IPP Software Navigation Tools IPP Links Communication Pan-STARRS Links

Changeset 31587


Ignore:
Timestamp:
May 29, 2011, 11:26:31 AM (15 years ago)
Author:
eugene
Message:

merging changes from trunk

Location:
branches/eam_branches/ipp-20110505
Files:
30 edited

Legend:

Unmodified
Added
Removed
  • branches/eam_branches/ipp-20110505/ippScripts/scripts/camera_exp.pl

    r30071 r31587  
    153153    }
    154154    chomp $cmdflags;
     155
     156    { # Determine if FWHM is too large to bother continuing.
     157        my $command = "$ppConfigDump -camera $camera -dump-camera -";
     158        my ($success, $error_code, $full_buf, $stdout_buf, $stderr_buf ) =
     159            run(command => $command, verbose => $verbose);
     160        unless ($success) {
     161            $error_code = (($error_code >> 8) or $PS_EXIT_PROG_ERROR);
     162            &my_die("Unable to perform ppConfigDump: $error_code", $cam_id, $PS_EXIT_CONFIG_ERROR);
     163        }
     164        my $cameraConfig = $mdcParser->parse(join "", @$stdout_buf) or
     165            &my_die("Unable to parse metadata config doc", $cam_id, $PS_EXIT_CONFIG_ERROR);
     166
     167        my $maxFWHM = metadataLookupStr($cameraConfig, 'MAX_ALLOWED_FWHM');
     168        if ($maxFWHM) {
     169            my $expFWHM;
     170            ($expFWHM) = $cmdflags =~ /-fwhm_major (\d+)/;     
     171           
     172            if ($expFWHM > $maxFWHM) {
     173                print "Setting quality to 4007 due to large FWHM: exposure: $expFWHM  maximum: $maxFWHM\n";
     174                $cmdflags .= " -quality 4007 "; # This corresponds to PSASTRO_ERR_DATA
     175                $no_op = 1;
     176            }
     177        }
     178    }
    155179}
    156180
  • branches/eam_branches/ipp-20110505/ippScripts/scripts/dist_advancerun.pl

    r30489 r31587  
    114114    $tool_cmd = "$staticskytool -sky_id";
    115115    $list_mode = "-result";
    116     $component_key = "";
     116    $component_key = "skycell_id";
    117117} elsif ($stage eq "diff") {
    118118    $tool_cmd = "$difftool -diff_id";
  • branches/eam_branches/ipp-20110505/ippScripts/scripts/dist_bundle.pl

    r30922 r31587  
    6262my $streaksrelease   = can_run('streaksrelease') or (warn "Can't find streaksrelease" and $missing_tools = 1);
    6363my $bgtool   = can_run('bgtool') or (warn "Can't find bgtool" and $missing_tools = 1);
     64my $staticskytool   = can_run('staticskytool') or (warn "Can't find staticskytool" and $missing_tools = 1);
    6465my $file_cmd   = can_run('file') or (warn "can't find program file" and $missing_tools = 1);
    6566my $zcat   = can_run('zcat') or (warn "can't find program zcat" and $missing_tools = 1);
     
    106107    defined $stage_id and
    107108    defined $component and
    108     defined $exp_type and
    109109    defined $path_base and
    110110    defined $outroot;
    111111
     112if ($stage eq 'raw' and !$clean and !$no_magic) {
     113   
     114    # for raw stage need to have exposure type defined and if the type is OBJECT we need
     115    # a chip_path_base so we can find the chip mask file
     116    if (!defined $exp_type or ($exp_type eq 'OBJECT' and !defined $chip_path_base)) {
     117        pod2usage( -msg => "Required options: --chip_path_base --exp_type for raw stage", -exitval => 3);
     118    }
     119}
     120
    112121$ipprc->redirect_output($logfile) if $logfile;
    113122
    114 if ($stage eq 'raw' and !$clean and !$no_magic) {
    115     # need to be able to find chip mask file
    116     if ($exp_type eq 'OBJECT' and !defined $chip_path_base) {
    117         pod2usage( -msg => "Required options: --chip_path_base for raw stage", -exitval => 3);
    118     }
    119 }
    120 
    121123$ipprc->define_camera($camera);
    122124
    123125$ipprc->outroot_prepare($outroot);
    124126
     127my $num_sky_inputs;
     128if ($stage eq 'sky') {
     129    $num_sky_inputs = get_num_sky_inputs($stage_id);
     130}
    125131# Get the list of data products for this component
    126132# note: We my_die in get_file_list if something goes wrong.
    127133
    128 my $file_list = get_file_list($stage, $component, $path_base, $clean);
     134my $file_list = get_file_list($stage, $component, $path_base, $clean, $num_sky_inputs);
    129135
    130136if (($stage ne 'raw') and ($stage ne 'fake') and !$poor_quality) {
     
    184190        # it must exist)
    185191        next if $poor_quality;
     192
     193        # skip file stats file. Due to a bug the update process destroys them sometimes
     194        # XXX: perhaps only do this for stages where we know that this happens
     195        next if $file_rule =~ /STATS/;
    186196
    187197        &my_die("failed to resolve  $file_name", $component, $PS_EXIT_DATA_ERROR);
     
    480490    my $path_base = shift;
    481491    my $clean = shift;
     492    my $num_sky_inputs = shift;
    482493
    483494    my @file_list;
     
    531542        $config_file_rule = "PPSTACK.CONFIG";
    532543    } elsif ($stage eq "sky") {
    533         $config_file_rule = "PSPHOT.STACK.CONFIG";
     544        if ($num_sky_inputs > 1) {
     545            $config_file_rule = "PSPHOT.STACK.CONFIG";
     546        } else {
     547            $config_file_rule = "PSPHOT.SKY.CONFIG";
     548        }
    534549    } else {
    535550        &my_die("$stage is not a valid stage", $component, $PS_EXIT_CONFIG_ERROR);
     
    627642    return \@file_list;
    628643}
     644sub get_num_sky_inputs {
     645    my $sky_id = shift;
     646
     647    my $command = "$staticskytool -inputs -sky_id $sky_id -simple";
     648    $command .= " -dbname $dbname" if $dbname;
     649    $command .= " | wc";
     650    my ( $success, $error_code, $full_buf, $stdout_buf, $stderr_buf ) =
     651        run(command => $command, verbose => $verbose);
     652    unless ($success) {
     653        $error_code = (($error_code >> 8) or $PS_EXIT_PROG_ERROR);
     654        &my_die("Unable to perform $command: $error_code", $component, $error_code);
     655    }
     656    my ($num_inputs, $words, $chars) = split " ", (join "", @$stdout_buf);
     657    if (!$num_inputs) {
     658        $num_inputs = "undefined" if !defined $num_inputs;
     659        &my_die("unexpected number of static sky inputs $num_inputs",  $PS_EXIT_PROG_ERROR, $component, $error_code);
     660    }
     661
     662    return $num_inputs;
     663}
    629664
    630665sub my_die
  • branches/eam_branches/ipp-20110505/ippScripts/scripts/ipp_apply_burntool_single.pl

    • Property svn:mergeinfo changed (with no actual effect on merging)
  • branches/eam_branches/ipp-20110505/ippScripts/scripts/lap_science.pl

    r31454 r31587  
    9999    my $status = queue_chips($lap_id);
    100100
    101     if ($status) {
     101    if (!$status) { # This is the culprit.
    102102        my $command = "$laptool -updaterun -lap_id $lap_id";
    103103        $command .= " -dbname $dbname " if defined $dbname;
     
    120120    # This is a puzzler... chiptool doesn't actually return a useful metadata.  We'll just scrape it from the database for now.
    121121
    122     my $command = "$chiptool -listrun -exp_id $exp_id -label $label -data_group $data_group";
     122    my $command = "$chiptool -listrun -pstamp_order -exp_id $exp_id -label $label -data_group $data_group";
    123123    $command .= " -dbname $dbname " if defined $dbname;
    124124
     
    236236        my $comment = $exposure->{comment};
    237237
     238        # This is a hack to fix old exposures that have no object.
     239        unless(defined($comment)) {
     240            $comment = '';
     241        }
     242        if ((!defined($object))||($object eq 'NULL')||($object eq '')) {
     243            if ($comment =~ /3pi_/) {
     244                $object = $comment;
     245                $object =~ s/^.*?(3pi_\d\d_\d\d\d\d).*?$/$1/;
     246            }
     247            elsif ($comment =~ / ps1_/) {
     248                $object = $comment;
     249                $object =~ s/^.*?(ps1_\d\d_\d\d\d\d).*$/$1/;
     250            }
     251            else {
     252                $exposure->{data_state} = 'new';
     253                $exposure->{pairwise} = 0;
     254                $exposure->{private} = 1;
     255                $exposure->{pair_id} = 9223372036854775807;
     256                update_this_exposure($exposure);
     257                $counter++;
     258                next;
     259            }
     260        }
     261
     262
    238263        if (S64_IS_NOT_NULL($chip_id)) { # We already have a defined chip_id
    239264            if (($pairwise) && !($pair_id)) {
     
    255280            $counter++;
    256281        }
     282        print "ZZ: $exp_id $object $comment $matching{$object}{$comment}\n";
    257283    }
    258284
     
    262288        foreach my $comment (keys %{ $matching{$object} }) {
    263289            push @exp_ids_to_diff, $matching{$object}{$comment};
     290            print "$object $comment $matching{$object}{$comment} $indexing{$matching{$object}{$comment}} $exp_ids_to_diff[-1]\n";
    264291        }
    265292        @exp_ids_to_diff = sort { $indexing{$a} <=> $indexing{$b} } @exp_ids_to_diff;
     
    275302            my $exp_A = ${ $exposures }[$indexing{$exp_id_A}];
    276303            my $exp_B = ${ $exposures }[$indexing{$exp_id_B}];
    277 
     304            print "$exp_A $exp_B $exp_id_A $exp_id_B $indexing{$exp_id_A} $indexing{$exp_id_B}\n";
    278305            $exp_A->{pairwise} = 1;
    279306            $exp_A->{private} = 0;
     
    292319    # Scan all exposures, and ensure that pairwise and private are set correctly
    293320    foreach my $exposure (@$exposures) {
     321        print "YY: $exposure\n";
    294322        if ($exposure->{pairwise} && !($exposure->{pair_id})) {
    295323            $exposure->{pairwise} = 0; # We marked it for pairwise diffs, but didn't match it. Probably an error.
     
    299327        }
    300328
    301         $exposure = update_this_exposure($exposure);
    302     }
    303     return(1);
     329        update_this_exposure($exposure);
     330    }
     331    return(0);
    304332}
    305333
     
    432460        my $companion;
    433461
    434         if ($exposure->{pair_id}) { # Load companion exposure information
    435             if (exists($match_hash{$exposure->{chip_id}})) {
     462        if ($exposure->{pairwise}) {
     463            # Load companion exposure information
     464            if (($exposure->{pair_id})&&(exists($match_hash{$exposure->{chip_id}}))) {
    436465                $companion = ${ $exposures }[$match_hash{$exposure->{chip_id}}]; # Match!
     466            }
     467            else { # We claimed to be pairwise, but do not have a valid pair_id.
     468                $exposure->{pairwise} = 0;
     469                $exposure->{private} = 1;
     470                &update_this_exposure($exposure);
    437471            }
    438472        }
     
    446480            $exposure = remake_this_exposure($exposure);
    447481        }
    448         if ($exposure->{cam_quality}) {
    449             $needs_qstack = 1;
    450             $needs_something_private = 1;
    451             if ($companion) {
    452                 $companion->{private} = 1;
    453                 $companion->{pairwise} = 0;
    454                 &update_this_exposure($companion);
     482        # Do quality checks here
     483        my $is_bad_quality = 0;
     484        if ((defined($exposure->{chipRun_state}))&&($exposure->{chipRun_state} eq 'full')&&
     485            ($exposure->{chip_component_count} > 0)&&($exposure->{chip_bad_quality} / $exposure->{chip_component_count} > 0.05)) {
     486            printf("QUALITY: $exposure->{exp_id} has bad chip quality: %d / %d\n",
     487                   $exposure->{chip_bad_quality} , $exposure->{chip_component_count});
     488            $is_bad_quality = 1;
     489        }
     490        elsif ((defined($exposure->{camRun_state}))&&($exposure->{camRun_state} eq 'full')&&
     491               ($exposure->{cam_bad_quality} / $exposure->{cam_component_count} > 0)) {
     492            printf("QUALITY: $exposure->{exp_id} has bad cam quality: %d / %d\n",
     493                   $exposure->{cam_bad_quality} , $exposure->{cam_component_count});
     494            $is_bad_quality = 1;
     495        }
     496        elsif ((defined($exposure->{warpRun_state}))&&($exposure->{warpRun_state} eq 'full')&&
     497               ($exposure->{warp_bad_quality} / $exposure->{warp_component_count} > 0.2)) {
     498            printf("QUALITY: $exposure->{exp_id} has bad warp quality: %d / %d\n",
     499                   $exposure->{warp_bad_quality} , $exposure->{warp_component_count});
     500            $is_bad_quality = 1;
     501        }
     502        elsif ((defined($exposure->{diffRun_state}))&&($exposure->{diffRun_state} eq 'full')&&
     503               ($exposure->{diff_bad_quality} / $exposure->{diff_component_count} > 0.5)) {
     504            printf("QUALITY: $exposure->{exp_id} has bad diff quality: %d / %d\n",
     505                    $exposure->{diff_bad_quality} , $exposure->{diff_component_count});
     506            $is_bad_quality = 1;
     507        }
     508        # If we've detected a bad quality exposure, drop it, and tell the companion.
     509        if ($is_bad_quality) {
     510            unless ((defined($exposure->{diffRun_state}))&&
     511                    ($exposure->{diffRun_state} eq 'full')) {
     512                $needs_qstack = 1;
     513                $needs_something_private = 1;
     514                if ($companion) {
     515                    $companion->{private} = 1;
     516                    $companion->{pairwise} = 0;
     517                    &update_this_exposure($companion);
     518                }
     519                $exposure->{private} = 1;
     520                $exposure->{pairwise} = 0;
    455521            }
    456             $exposure->{private} = 1;
    457             $exposure->{pairwise} = 0;
    458522            $exposure->{data_state} = 'drop';
    459523            &update_this_exposure($exposure);
    460524
    461525        }
    462 #       if ($companion) { # Validate that there are no problems with the companion exposure
    463 #           if ($companion->{cam_quality}) { # Maybe other things here?
    464 #               $exposure->{private} = 1;
    465 #               $exposure->{data_state} = 'drop';
    466 #               &update_this_exposure($exposure);
    467 #               $needs_qstack = 1;
    468 #           }
    469 #       }
     526       
    470527        if  ($exposure->{data_state} eq 'drop') { # This exposure is impossible, so fudge the counts so we get through.
    471528            $can_qstack ++;
     
    564621    }
    565622
     623    my $warps = '';
     624    foreach $exposure (@$exposures) {
     625        if (($exposure->{data_state} != 'drop')&&
     626            (S64_IS_NOT_NULL($exposure->{warp_id}))) {
     627            $warps .= " -warp_id $exposure->{warp_id} ";
     628        }
     629    }
     630
    566631    my @utctime = gmtime();
    567632    $utctime[5] += 1900;
     
    570635    my $workdir_date = sprintf("%4d/%02d/%02d",$utctime[5],$utctime[4],$utctime[3]);
    571636    my $workdir = "neb://\@HOST\@.0/${dbname}/${label}/${workdir_date}";
    572     my $data_group = "${label}.${date}";
     637    my $data_group = "${label}.${proj_cell}.quick.${date}";
    573638
    574639    my $command = "$stacktool ";
     
    576641    $command .= " -dbname $dbname " if defined $dbname;
    577642    $command .= " -definebyquery -select_label $label -select_skycell_id ${proj_cell}.% -select_filter $filter ";
    578     $command .= " -set_label ${label} -set_data_group ${proj_cell}.quick.${date} ";
     643    $command .= " -set_label ${label} -set_data_group $data_group ";
    579644    $command .= "  -set_workdir $workdir  -set_dist_group NODIST ";
    580645    $command .= " -min_num 2 -set_reduction QUICKSTACK ";
     646    $command .= " $warps ";
    581647
    582648    my ($success, $error_code, $full_buf, $stdout_buf, $stderr_buf ) =
     
    589655    $command = "$stacktool ";
    590656    $command .= " -dbname $dbname " if defined $dbname;
    591     $command .= " -sassskyfile -data_group ${proj_cell}.quick.${date} ";
     657    $command .= " -sassskyfile -data_group $data_group ";
    592658    $command .= " -filter $filter -projection_cell ${proj_cell} ";
    593659
     
    632698    unless (defined($label) && defined($filter) && defined($proj_cell)) {
    633699        &my_die("Unable to perform stacktool. Insufficient information.", $lap_id);
     700    }
     701
     702    my $warps = '';
     703    foreach $exposure (@$exposures) {
     704        if (($exposure->{data_state} != 'drop')&&
     705            (S64_IS_NOT_NULL($exposure->{magicked}))&&
     706            (S64_IS_NOT_NULL($exposure->{warp_id}))) {
     707            $warps .= " -warp_id $exposure->{warp_id} ";
     708        }
    634709    }
    635710
     
    640715    my $workdir_date = sprintf("%4d/%02d/%02d",$utctime[5],$utctime[4],$utctime[3]);
    641716    my $workdir = "neb://\@HOST\@.0/${dbname}/${label}/${workdir_date}";
    642     my $data_group = "${label}.${date}";
     717    my $data_group = "${label}.${proj_cell}.final.${date}";
    643718
    644719    my $command = "$stacktool ";
     
    646721    $command .= " -dbname $dbname " if defined $dbname;
    647722    $command .= " -definebyquery -select_label $label -select_skycell_id ${proj_cell}.% -select_filter $filter ";
    648     $command .= " -set_label ${label} -set_workdir $workdir -set_data_group ${proj_cell}.final.${date} ";
    649     $command .= " -min_num 2 -set_reduction THREEPI_STACK ";
     723    $command .= " -set_label ${label} -set_workdir $workdir -set_data_group $data_group ";
     724    $command .= " -min_num 2 -set_reduction THREEPI_STACK -set_dist_group ${label} ";
     725    $command .= " $warps ";
    650726
    651727    my ($success, $error_code, $full_buf, $stdout_buf, $stderr_buf ) =
     
    659735    $command = "$stacktool ";
    660736    $command .= " -dbname $dbname " if defined $dbname;
    661     $command .= " -sassskyfile -data_group ${proj_cell}.final.${date} ";
     737    $command .= " -sassskyfile -data_group $data_group ";
    662738    $command .= " -filter $filter -projection_cell ${proj_cell} ";
    663739
     
    734810            $command .= " -set_dist_group $exposure->{dist_group} ";
    735811        }
    736        
    737         if ($exposure->{pairwise}) { # warpwarp
     812        my $retry_command;
     813        if (($exposure->{pairwise})&&(defined(${ $exposures }[$match_hash{$exposure->{chip_id}}]))) { # warpwarp
    738814            my $companion = ${ $exposures }[$match_hash{$exposure->{chip_id}}];
    739815            $command .= " -definewarpwarp ";
    740             $command .= "-input_label $label -template_label $label -backwards ";
     816            $command .= "-input_label $label -template_label $label ";
    741817            $command .= "-warp_id $exposure->{warp_id} -template_warp_id $companion->{warp_id} ";
     818            $retry_command = $command;
     819            $command .= " -backwards "; # This usually works.
    742820            $already_queued{$exposure->{warp_id}} = 1;
    743821            $already_queued{$companion->{warp_id}} = 1;
     
    763841        my $diff_id = $diff->{diff_id};
    764842        unless (defined($diff_id)) {
    765             $exposure->{data_state} = 'drop';
    766             &update_this_exposure($exposure);
    767         }
    768        
     843            if ($retry_command) {
     844                ($success, $error_code, $full_buf, $stdout_buf, $stderr_buf ) =
     845                    run(command => $retry_command, verbose => $verbose);
     846                unless ($success) {
     847                    $error_code = (($error_code >> 8) or $PS_EXIT_PROG_ERROR);
     848                    &my_die("unable to perform difftool -definewarp(warp|stack): $error_code", $exposure->{lap_id}, $exposure->{proj_cell});
     849                }
     850               
     851                $diffs = $mdcParser->parse_list(join "", @$stdout_buf) or
     852                    &my_die("Unable to parse metadata from difftool -definewarp(warp|stack)", $lap_id, "");
     853               
     854                $diff = ${ $diffs }[0];
     855                $diff_id = $diff->{diff_id};
     856            }
     857            unless (defined($diff_id)) {
     858                $exposure->{data_state} = 'drop';
     859                &update_this_exposure($exposure);
     860            }
     861        }
    769862    }
    770863}
     
    872965   
    873966    my $command = "$laptool -updateexp -lap_id $lap_id -exp_id $exp_id ";
     967    $command .= " -dbname $dbname " if defined $dbname;
    874968    if (($exposure->{chip_id})&&(S64_IS_NOT_NULL($exposure->{chip_id}))) {
    875969        $command .= " -set_chip_id  $exposure->{chip_id} ";
  • branches/eam_branches/ipp-20110505/ippScripts/scripts/receive_file.pl

    r30674 r31587  
    224224        $stage = 'sky';
    225225        # XXX: This should be skycell, but the distribution code uses exposure
    226         $comp_name = 'exposure';
    227         $current_component = $comp_name;
     226        $comp_name = 'skycell_id';
     227#        $current_component = $comp_name;
    228228    } else {
    229229        &my_die( "unexpected run type line found in $filename: $runType\n", $file_id, $PS_EXIT_UNKNOWN_ERROR);
     
    237237        $new_workdir_value = "$workdir/$destdir";
    238238    }
     239
     240    if ($stage eq 'sky') {
     241        # the dbinfo file for a skyRun only has one component and it doesn't contain
     242        # skycell_id which is the way components are listed in the dirinfo file.
     243        my @ids = keys %$components;
     244        &my_die( "unexpected number of components scalar @ids found in staticsky dirinfo file\n", $file_id, $PS_EXIT_UNKNOWN_ERROR) if scalar @ids != 1;
     245        $current_component = $ids[0];
     246    }
     247
    239248    my $component_dir;
    240249    if ($current_component) {
  • branches/eam_branches/ipp-20110505/ippScripts/scripts/warp_skycell.pl

    r30825 r31587  
    7272
    7373my ($logDest, $traceDest);
     74my $do_stats;
    7475if ($run_state eq 'new') {
    7576    $logDest = prepare_output("LOG.EXP", $outroot, $skycell_id, 0);
    7677    $traceDest = prepare_output("TRACE.EXP", $outroot, $skycell_id, 1);
     78    $do_stats = 1;
    7779} elsif ($run_state eq 'update')  {
    7880    $logDest = prepare_output("LOG.EXP.UPDATE", $outroot, $skycell_id, 1);
     
    162164my $outputBin1 = prepare_output ("PSWARP.BIN1", $outroot, $skycell_id, 1);
    163165my $outputBin2 = prepare_output ("PSWARP.BIN2", $outroot, $skycell_id, 1);
    164 my $outputStats = prepare_output ("SKYCELL.STATS", $outroot, $skycell_id, 1);
     166my $outputStats;
     167if ($do_stats) {
     168    $outputStats = prepare_output ("SKYCELL.STATS", $outroot, $skycell_id, 1) if $do_stats;
     169}
    165170my $configuration;
    166171
     
    257262# Run pswarp
    258263my $cmdflags;
    259 my $do_stats;
    260264unless ($no_op) {
    261265    my $command = "$pswarp";
     
    279283    if ($run_state eq 'new') {
    280284        $command .= " -dumpconfig $configuration";
    281         $do_stats = 1;
    282285    } else {
    283286        $command .= " -ipprc $configuration";
  • branches/eam_branches/ipp-20110505/ippTasks/lap.pro

    r31454 r31587  
    1717
    1818macro lap.show.books
     19    echo "lapNewRuns"
    1920    book listbook lapNewRuns
     21    echo "lapRunRuns"
    2022    book listbook lapRunRuns
     23    echo "lapDoneRuns"
    2124    book listbook lapDoneRuns
     25    echo "lapFullRuns"
    2226    book listbook lapFullRuns
     27end
     28
     29macro lap.clear.books
     30    book init lapNewRuns
     31    book init lapRunRuns
     32    book init lapFullRuns
     33    book init lapDoneRuns
    2334end
    2435
     
    3647      active true
    3748    end
    38     task lap.cleanup.load
    39       active true
    40     end
    41     task lap.cleanup.run
    42       active true
    43     end
    4449end
    4550
     
    5762      active false
    5863    end
    59     task lap.cleanup.load
    60       active false
    61     end
    62     task lap.cleanup.run
     64end
     65
     66macro lap.debug.mode
     67    task lap.initial.load
     68      active true
     69    end
     70    task lap.initial.run
     71      active false
     72    end
     73    task lap.monitor.load
     74      active true
     75    end
     76    task lap.monitor.run
    6377      active false
    6478    end
     
    156170  # success
    157171  task.exit  0
    158 #    book delpage lapNewRuns $options:0
    159172    ipptool2book stdout lapNewRuns -uniq -key lap_id -setword dbname $options:0 -setword pantaskState INIT
    160173
     174    process_cleanup lapNewRuns
     175
    161176    if ($VERBOSE > 2)
    162177      book listbook lapNewRuns
     
    181196  periods      -exec $LOADEXEC
    182197  periods      -timeout 600
     198  active       false
    183199# This can probably be increased and spread over hosts in the future.
    184200  npending     1           
     
    193209
    194210
    195     book getpage lapNewRuns $lap_NewPage -var lapNewPageName
     211    book getpage lapNewRuns 0 -var lapNewPageName -key pantaskState INIT
    196212
    197213    $lap_NewPage ++
    198214    if ($lap_NewPage >= $N) set lap_NewPage = 0
    199215
    200 
    201216    if ("$lapNewPageName" == "NULL") break
     217
     218
    202219
    203220    book setword lapNewRuns $lapNewPageName pantaskState RUN
     
    205222    book getword lapNewRuns $lapNewPageName dbname -var DBNAME
    206223
     224    option $LAP_ID
     225
    207226    $run = lap_science.pl --chip_mode --dbname $DBNAME --lap_id $LAP_ID
    208 
    209     command $run
    210 
    211   end
    212 
    213   # success
    214   task.exit  0
    215 #    book delpage lapNewRuns $options:0
    216 #    ipptool2book stdout lapNewRuns -uniq -key lap_id
    217 
     227   
     228    command $run
     229
     230  end
     231
     232  # success
     233  task.exit  0
     234    process_exit lapNewRuns $options:0 0
    218235    if ($VERBOSE > 2)
    219236      book listbook lapNewRuns
     
    222239  # locked list
    223240  task.exit    default
    224     showcommand failure
    225   end
    226   task.exit    crash
    227     showcommand crash
    228   end
    229   #operation times out?
    230   task.exit    timeout
     241    process_exit lapNewRuns $options:0 0
     242    showcommand failure
     243  end
     244  task.exit    crash
     245    process_exit lapNewRuns $options:0 0
     246    showcommand crash
     247  end
     248  #operation times out?
     249  task.exit    timeout
     250    process_exit lapNewRuns $options:0 0
    231251    showcommand timeout
    232252  end
     
    267287
    268288    add_poll_labels run
    269 #    echo $run
    270     command $run
    271   end
    272   # success
    273   task.exit  0
    274 #    book delpage lapRunRuns $options:0
     289    command $run
     290  end
     291  # success
     292  task.exit  0
    275293    ipptool2book stdout lapRunRuns -uniq -key lap_id -setword dbname $options:0 -setword pantaskState INIT
     294   
     295    process_cleanup lapRunRuns
    276296
    277297    if ($VERBOSE > 2)
     
    297317  periods      -exec $LOADEXEC
    298318  periods      -timeout 600
     319  active       false
    299320# This can probably be increased and spread over hosts in the future.
    300321  npending     1           
     
    306327    book npages lapRunRuns -var N
    307328
    308 #    echo $N $NETWORK
    309329    if ($N == 0) break
    310330    if ($NETWORK == 0) break
    311331
    312332
    313     book getpage lapRunRuns $lap_RunPage -var lapRunPageName
     333    book getpage lapRunRuns 0 -var lapRunPageName -key pantaskState INIT
    314334
    315335    $lap_RunPage ++
    316336    if ($lap_RunPage >= $N) set lap_RunPage = 0
    317337
    318 #    echo $lapRunPageName
    319338    if ("$lapRunPageName" == "NULL") break
    320339
     
    323342    book getword lapRunRuns $lapRunPageName dbname -var DBNAME
    324343
     344    option $LAP_ID
     345
    325346    $run = lap_science.pl --monitor_mode --dbname $DBNAME --lap_id $LAP_ID
    326347
     
    331352  # success
    332353  task.exit  0
    333 #    book delpage lapRunRuns $options:0
    334 #    ipptool2book stdout lapRunRuns -uniq -key lap_id
    335 
    336     if ($VERBOSE > 2)
     354    process_exit lapRunRuns $options:0 0
     355    if ($VERBOSE > 2)
     356
    337357      book listbook lapRunRuns
    338358    end
     
    340360  # locked list
    341361  task.exit    default
    342     showcommand failure
    343   end
    344   task.exit    crash
    345     showcommand crash
    346   end
    347   #operation times out?
    348   task.exit    timeout
     362    process_exit lapRunRuns $options:0 0
     363    showcommand failure
     364  end
     365  task.exit    crash
     366    process_exit lapRunRuns $options:0 0
     367    showcommand crash
     368  end
     369  #operation times out?
     370  task.exit    timeout
     371    process_exit lapRunRuns $options:0 0
    349372    showcommand timeout
    350373  end
     
    358381  periods      -exec $LOADEXEC
    359382  periods      -timeout 30
     383  active       false
    360384  npending     1
    361385
     
    390414    ipptool2book stdout lapDoneRuns -uniq -key lap_id  -setword dbname $options:0 -setword pantaskState INIT
    391415
     416    process_cleanup lapDoneRuns
    392417    if ($VERBOSE > 2)
    393418      book listbook lapRuns
     
    412437  periods      -exec $LOADEXEC
    413438  periods      -timeout 600
     439  active       false
    414440# This can probably be increased and spread over hosts in the future.
    415441  npending     1           
     
    424450
    425451
    426     book getpage lapDoneRuns $lap_DonePage -var lapDonePageName
     452    book getpage lapDoneRuns 0 -var lapDonePageName -key pantaskState INIT
    427453
    428454    $lap_DonePage ++
     
    435461    book getword lapDoneRuns $lapDonePageName dbname -var DBNAME
    436462
     463    option $LAP_ID
    437464    $run = lap_science.pl --cleanup_mode --dbname $DBNAME --lap_id $LAP_ID
    438465
     
    443470  # success
    444471  task.exit  0
    445 #    ipptool2book stdout lapDoneRuns -uniq -key lap_id
    446 
     472    process_exit lapDoneRuns $options:0 0
    447473    if ($VERBOSE > 2)
    448474      book listbook lapDoneRuns
     
    451477  # locked list
    452478  task.exit    default
    453     showcommand failure
    454   end
    455   task.exit    crash
    456     showcommand crash
    457   end
    458   #operation times out?
    459   task.exit    timeout
    460     showcommand timeout
    461   end
    462 end
     479    process_exit lapDoneRuns $options:0 0
     480    showcommand failure
     481  end
     482  task.exit    crash
     483    process_exit lapDoneRuns $options:0 0
     484    showcommand crash
     485  end
     486  #operation times out?
     487  task.exit    timeout
     488    process_exit lapDoneRuns $options:0 0
     489    showcommand timeout
     490  end
     491end
  • branches/eam_branches/ipp-20110505/ippToPsps/jython/batch.py

    r31405 r31587  
    3232    "../config/2/tables.vot"
    3333    '''
    34     def __init__(self, logger, batchType, inputFitsPath="", survey="", useFullTables=False):
     34    def __init__(self,
     35                 logger,
     36                 gpc1Db,
     37                 ippToPspsDb,
     38                 scratchDb,
     39                 id,
     40                 batchType,
     41                 inputFitsPath="",
     42                 survey="",
     43                 useFullTables=False):
     44
     45        self.everythingOK = False
     46        self.readHeader = False
    3547
    3648        # set up logging
     
    4052
    4153        # set up class variables
     54        self.id = id
     55        self.gpc1Db = gpc1Db
     56        self.ippToPspsDb = ippToPspsDb
     57        self.scratchDb = scratchDb
    4258        self.batchType = batchType;
    4359        self.pspsVoTableFilePath = "../config/" + batchType + "/tables.vot"
     
    4662        self.useFullTables = useFullTables
    4763
     64        if self.alreadyProcessed(): return
     65
     66        # do we have an input file?
     67        if self.inputFitsPath != "":
     68
     69            if not self.readPrimaryHeader(): return
     70
    4871        # TODO
    4972        self.tablesToExport = []
     
    5174        # open config
    5275        doc = ElementTree(file="config.xml")
    53 
    54         # create Gpc1Db object
    55         self.gpc1Db = Gpc1Db(self.logger)
    56         self.ippToPspsDb = IppToPspsDb(logger)
    57         self.scratchDb = ScratchDb(logger, self.useFullTables)
    5876
    5977        if self.survey != "":
     
    88106        self.dateStr = now.strftime("%Y-%m-%d")
    89107
    90         if self.inputFitsPath != "":
    91             file = open(self.inputFitsPath)
    92             self.header = self.parseFitsHeader(file)
    93             self.logger.info("Read primary and found " + str(len(self.header)) + " header cards")
    94             # TODO close file?
    95 
    96108        # create DVO tables if accessing DVO directly
    97109        if not self.useFullTables: self.scratchDb.createDvoTables()
    98110
     111        self.everythingOK = True
     112
    99113    '''
    100114    Destructor
     
    103117
    104118        self.logger.debug("Batch destructor")
     119
     120
     121    '''
     122    Reads the primary header of the FITS file
     123    '''
     124    def readPrimaryHeader(self):
     125
     126        if self.readHeader: return True
     127
     128        # does it exist?
     129        if not os.path.isfile(self.inputFitsPath):
     130
     131            self.logger.error("Cannot read file at '" + self.inputFitsPath + "'")
     132            return False
     133
     134        file = open(self.inputFitsPath)
     135        self.header = self.parseFitsHeader(file)
     136        self.logger.info("Read primary header and found " + str(len(self.header)) + " header cards")
     137        # TODO close file?
     138
     139        self.readHeader = True
     140
     141        return True
    105142
    106143
     
    138175            file.seek(index + 2880, 0)
    139176           
    140         if found != True: self.logger.error("...could not find extension '" + name + "'")
    141         else: self.logger.info("...read header at '" + name + "' and found " + str(len(header)) + " header cards")
     177        if found != True:
     178            self.logger.error("...could not read header in extension '" + name + "'")
     179            return
     180        #else: self.logger.info("...read header at '" + name + "' and found " + str(len(header)) + " header cards")
    142181
    143182        return header
     
    262301
    263302        first = True
     303
     304        self.totalDetections = 0
    264305        for table in tables:
    265306
    266             sql = "SELECT MIN(objID), MAX(objID) FROM " + table
    267             rs = self.scratchDb.stmt.executeQuery(sql)
     307            sql = "SELECT MIN(objID), MAX(objID), COUNT(objID) FROM " + table
     308            rs = self.scratchDb.executeQuery(sql)
    268309            rs.first()
     310
     311            self.totalDetections = self.totalDetections + rs.getLong(3)
    269312
    270313            if first:
     
    276319
    277320            first = False
     321            rs.close()
    278322
    279323        self.ippToPspsDb.updateMinMaxObjID(self.batchID, self.minObjID, self.maxObjID)
     324        self.logger.info("Total detections = %ld min objID = %ld max objID = %ld" % (self.totalDetections, self.minObjID, self.maxObjID))
     325
    280326
    281327    '''
     
    313359         self.pspsTables = stilts.treads(self.pspsVoTableFilePath)
    314360         for table in self.pspsTables:
    315              self.logger.info("Creating PSPS table: " + table.name)
     361             self.logger.debug("Creating PSPS table: " + table.name)
    316362             table.write(self.scratchDb.url + '#' + table.name)
    317363             self.tablesToExport.append(table.name)
     
    337383    Accepts a regular expression filter so not all tables need to be imported
    338384    '''
    339     def importIppTables(self, filter):
     385    def importIppTables(self, filter=""):
    340386
    341387      self.logger.info("Attempting to import tables from input FITS file")
     
    347393          match = re.match(filter, table.name)
    348394          if not match: continue
    349           self.logger.info("   Reading IPP table " + table.name + " from FITS file")
     395          self.logger.info("Reading IPP table " + table.name + " from FITS file")
    350396          table = stilts.tpipe(table, cmd='explodeall')
    351397
    352398          # drop any previous tables before import
    353           self.scratchDb.dropTable(table.name)
     399          #self.scratchDb.dropTable(table.name)
    354400
    355401          # IPP FITS files are littered with infinities, so remove these
    356           self.logger.info("   Removing Infinity values from all columns")
     402          self.logger.debug("Removing Infinity values from all columns")
    357403          table = stilts.tpipe(table, cmd='replaceval -Infinity null *')
    358404          table = stilts.tpipe(table, cmd='replaceval Infinity null *')
     
    360406          try:
    361407              table.write(self.scratchDb.url + '#' + table.name)
     408              self.scratchDb.killLastConnectionID()
     409              count = count + 1
    362410          except:
    363               self.logger.exception("   Problem writing table '" + table.name + "' to the database")
    364           count = count + 1
     411              self.logger.exception("Problem writing table '" + table.name + "' to the database")
     412
    365413
    366414      self.logger.info("Done. Imported %d tables" % count)
    367 
    368415      self.indexIppTables()
    369416
     
    373420    def exportPspsTablesToFits(self, regex="(.*)"):
    374421
    375         self.logger.info("Replacing NULLs with -999 then exporting all PSPS tables to FITS")
     422        self.logger.info("Replacing NULLs with -999, changing tables names using regex: " + regex)
    376423        _tables = []
    377424
    378         self.logger.info("    Selecting database tables")
     425        self.logger.info("Selecting database tables")
    379426        for table in self.tablesToExport:
    380427
     
    383430
    384431           # get everything from table
    385            _table = stilts.tread(self.scratchDb.url + '#SELECT * FROM ' + table)
    386 
     432           try:
     433               _table = stilts.tread(self.scratchDb.url + '#SELECT * FROM ' + table)
     434               self.scratchDb.killLastConnectionID()
     435           except:
     436               self.logger.exception("Could not read from DB table: " + table)
     437               return False
     438               
    387439           # replace nulls and empty fields with weird PSPS -999 pseudo-null
    388440           _table = stilts.tpipe(_table, cmd='replaceval "" -999 *')
     
    395447           _tables.append(_table)
    396448
    397         self.logger.info("    Writing to FITS file '" + self.outputFitsPath + "'...")
    398         stilts.twrites(_tables, self.outputFitsPath, fmt='fits')
    399         self.logger.info("    ...done")
    400         self.ippToPspsDb.updateProcessed(self.batchID, 1)
     449        self.logger.info("Writing to FITS file '" + self.outputFitsPath + "'...")
     450        try:
     451            stilts.twrites(_tables, self.outputFitsPath, fmt='fits')
     452            self.ippToPspsDb.updateProcessed(self.batchID, 1)
     453        except:
     454            self.logger.exception("Could not write to FITS")
     455            return False
     456
     457        return True
    401458
    402459    '''
     
    447504    '''
    448505    def alreadyProcessed(self):
    449            self.logger.info("Not implemented")
    450 
    451 
    452 
     506        self.logger.info("Not implemented")
     507
     508
     509    '''
     510    Creates and publishes a batch
     511    '''
     512    def run(self):
     513
     514        if not self.everythingOK: return
     515
     516        self.createEmptyPspsTables()
     517        self.importIppTables()
     518        if self.populatePspsTables():
     519            if self.exportPspsTablesToFits():
     520                self.writeBatchManifest()
     521                self.createTarball()
     522                self.publishToDatastore()
     523                #self.reportNullsInAllPspsTables(False)
     524                #sys.exit()
     525        self.logger.info("Finished.")
     526
     527
  • branches/eam_branches/ipp-20110505/ippToPsps/jython/detectionbatch.py

    r31406 r31587  
    66from java.lang import *
    77from java.sql import *
     8
    89from batch import Batch
    910from gpc1db import Gpc1Db
     11from ipptopspsdb import IppToPspsDb
     12from scratchdb import ScratchDb
    1013
    1114import logging.config
     
    1922    Constructor
    2023    '''
    21     def __init__(self, logger, camID, inputFile, test=False, useFullTables=False):
     24    def __init__(self,
     25                 logger,
     26                 gpc1Db,
     27                 ippToPspsDb,
     28                 scratchDb,
     29                 camID,
     30                 inputFile,
     31                 test=False,
     32                 useFullTables=False):
     33
    2234       super(DetectionBatch, self).__init__(
    2335               logger,
     36               gpc1Db,
     37               ippToPspsDb,
     38               scratchDb,
     39               camID,
    2440               "detection",
    2541               inputFile,
    26                "MD04",
    27                useFullTables) # TODO
     42               "MD04", # TODO
    2843               #"3PI") # TODO
    29 
    30        self.logger.info("DetectionBatch constructor. Creating batch from: '" + inputFile + "'")
    31 
    32        meta = self.gpc1Db.getCameraStageMeta(camID)
    33      
    34        self.expID = meta[0];
    35        self.expName = meta[1];
    36        self.distGroup = meta[2];
    37 
    38        self.logger.info("Processing exposure with ID: %d, name: %s and distribution group: %s" % (self.expID, self.expName, self.distGroup))
     44               useFullTables)
     45
     46       if not self.everythingOK: return
     47
     48       # meta data to the log
     49       self.logger.info("New Detection Batch:")
     50       self.logger.info("Cam ID:             %d" % self.id)
     51       self.logger.info("file:               %s" % inputFile)
     52       self.logger.info("Exp ID:             %d" % self.expID)
     53       self.logger.info("Exp name:           %s" % self.expName)
     54       self.logger.info("Distribution group: %s" % self.distGroup)
    3955
    4056       # create an output filename, which is {expID}.FITS
     
    167183        ," + self.header['PCA2X0Y2'] + " \
    168184        )"
    169         self.scratchDb.stmt.execute(sql)
     185        self.scratchDb.execute(sql)
    170186
    171187        self.scratchDb.updateAllRows("FrameMeta", "surveyID", str(self.surveyID))
     
    184200        self.scratchDb.dropTable(tableName)
    185201        sql = "CREATE TABLE " + tableName + " LIKE ImageMeta"
    186         try: self.scratchDb.stmt.execute(sql)
     202        try: self.scratchDb.execute(sql)
    187203        except: pass
    188204
     
    310326               )"
    311327
    312         self.scratchDb.stmt.execute(sql)
     328        self.scratchDb.execute(sql)
    313329        self.scratchDb.updateFilterID(tableName, self.filter)
    314330        self.scratchDb.updateAllRows(tableName, "calibModNum", str(self.calibModNum))
    315331        self.scratchDb.updateAllRows(tableName, "dataRelease", str(self.dataRelease))
    316         self.totalNumPhotoRef = self.totalNumPhotoRef + int(header['NASTRO'])
     332        if 'NASTRO' in header: self.totalNumPhotoRef = self.totalNumPhotoRef + int(header['NASTRO'])
    317333        self.scratchDb.replaceNullsInThisColumn(tableName, "polyOrder", "0")
    318334
     
    327343        self.scratchDb.dropTable(tableName)
    328344        sql = "CREATE TABLE " + tableName + " LIKE Detection"
    329         try: self.scratchDb.stmt.execute(sql)
     345        try: self.scratchDb.execute(sql)
    330346        except: pass
    331347
     
    375391               ,EXT_NSIGMA \
    376392               FROM " + ota + "_psf"
    377 
    378         self.scratchDb.stmt.execute(sql)
     393        self.scratchDb.execute(sql)
    379394
    380395        # set obsTime
    381396        sql = "UPDATE " + tableName + " SET obsTime = %f, assocDate = '%s', activeFlag = 0" % (self.obsTime, self.dateStr)
    382         self.scratchDb.stmt.execute(sql)
     397        self.scratchDb.execute(sql)
    383398        self.scratchDb.updateAllRows(tableName, "dataRelease", str(self.dataRelease))
    384399        self.scratchDb.updateAllRows(tableName, "historyModNum", "0")
     
    387402        self.scratchDb.updateFilterID(tableName, self.filter)
    388403
    389         # now delete bad flux
     404        # now delete bad flux and bad chip positions
    390405        self.scratchDb.reportAndDeleteRowsWithNULLS(tableName, "instFlux")
    391406        self.scratchDb.reportAndDeleteRowsWithNULLS(tableName, "peakADU")
     
    401416        self.scratchDb.dropTable(tableName)
    402417        sql = "CREATE TABLE " + tableName + " LIKE SkinnyObject"
    403         try: self.scratchDb.stmt.execute(sql)
     418        try: self.scratchDb.execute(sql)
    404419        except: pass
    405420
     
    415430               ,surveyID \
    416431               FROM Detection_" + ota
    417         self.scratchDb.stmt.execute(sql)
     432        self.scratchDb.execute(sql)
    418433
    419434        self.scratchDb.updateAllRows(tableName, "dataRelease", str(self.dataRelease))
     
    429444        self.scratchDb.dropTable(tableName)
    430445        sql = "CREATE TABLE " + tableName + " LIKE ObjectCalColor"
    431         try: self.scratchDb.stmt.execute(sql)
     446        try: self.scratchDb.execute(sql)
    432447        except: pass
    433448
     
    443458               ,filterID \
    444459               FROM Detection_" + ota
    445         self.scratchDb.stmt.execute(sql)
     460        self.scratchDb.execute(sql)
    446461
    447462        self.scratchDb.updateAllRows(tableName, "calibModNum", str(self.calibModNum))
     
    484499
    485500        imageID = self.scratchDb.getImageIDFromExternID(sourceID, externID)
    486         self.logger.info("Updating table '" + table + "' with DVO IDs using imageID = %d" % imageID)
     501        self.logger.debug("Updating table '" + table + "' with DVO IDs using imageID = %d" % imageID)
    487502        sql = "UPDATE IGNORE " + table + " AS a, " + self.scratchDb.dvoDetection + " AS b SET \
    488503               a.ippObjID = b.ippObjID, \
     
    494509               AND b.imageID = " + str(imageID)
    495510
    496         self.scratchDb.stmt.execute(sql)
     511        self.scratchDb.execute(sql)
    497512
    498513
     
    511526
    512527        # loop through all OTAs and populate ImageMeta extensions
     528        self.logger.info("Reading all fits headers and populating ImageMeta tables")
    513529        for x in range(self.startX, self.endX):
    514530            for y in range(self.startY, self.endY):
     
    524540                # load corresponding header into memory
    525541                header = self.findAndReadFITSHeader(ota + ".hdr", file)
     542                if not header:
     543                    self.logger.error("No header found for OTA " + ota)
     544                    continue
     545
    526546
    527547                # store sourceID/imageID combo in Db so DVO can look up later
     
    578598
    579599                # update ImageMeta with count of detections for this OTA and photoCodeID
    580                 sql = "UPDATE ImageMeta_" + ota + " SET nDetect = %d, photoCalID = %d" % (self.scratchDb.getRowCount("Detection_" + ota), self.scratchDb.getPhotoCalID(sourceIDs[ota], imageIDs[ota]))
    581                 self.scratchDb.stmt.execute(sql)
     600                sql = "UPDATE ImageMeta_" + ota + " \
     601                       SET nDetect = %d, photoCalID = %d" % (self.scratchDb.getRowCount("Detection_" + ota), self.scratchDb.getPhotoCalID(sourceIDs[ota], imageIDs[ota]))
     602                self.scratchDb.execute(sql)
    582603
    583604                self.populateSkinnyObjectTable(ota)
     
    603624        # update FrameMeta with count OTAs in this file and total number of photometric reference sources
    604625        sql = "UPDATE FrameMeta SET nOTA = %d, numPhotoRef = %d" % (otaCount, self.totalNumPhotoRef)
    605         self.scratchDb.stmt.execute(sql)
     626        self.scratchDb.execute(sql)
    606627       
    607628        return True
     
    613634
    614635        sql = "UPDATE " + tableName + " SET imageID = %d%d%d" % (self.expID, x, y)
    615         self.scratchDb.stmt.execute(sql)
     636        self.scratchDb.execute(sql)
    616637
    617638    '''
     
    619640    '''
    620641    def alreadyProcessed(self):
     642
     643        meta = self.gpc1Db.getCameraStageMeta(self.id)
     644        self.expID = meta[0];
     645        self.expName = meta[1];
     646        self.distGroup = meta[2];
    621647
    622648        return self.ippToPspsDb.alreadyProcessed("detection", "exp_id", self.expID)
     
    634660
    635661
     662    '''
     663    Overriding this method. Filter to only import *.psf extensions
     664    '''
     665    def importIppTables(self, filter=""):
     666       return super(DetectionBatch, self).importIppTables(".*.psf")
     667
     668
     669    '''
     670    Overriding this method. Use regex to trim off, eg _XY33 extension
     671    '''
     672    def exportPspsTablesToFits(self, regex="(.*)"):
     673       return super(DetectionBatch, self).exportPspsTablesToFits("([a-zA-Z]+)")
     674
     675
     676# TODO put in config
     677useFullTables=True
     678testMode=False
     679
    636680logging.config.fileConfig("logging.conf")
    637681logger = logging.getLogger("detectionbatch")
     682logger.setLevel(logging.INFO)
    638683logger.info("Starting")
    639684
    640685gpc1Db = Gpc1Db(logger)
     686ippToPspsDb = IppToPspsDb(logger)
     687scratchDb = ScratchDb(logger, useFullTables)
     688
    641689camIDs = gpc1Db.getIDsInThisDVODbForThisStage("MD04.V2", "cam")
    642690logger.info("Found %d exposures" % len(camIDs))
     
    645693for camID in camIDs:
    646694
    647     logger.info("-------------------------------------------------- cam ID: %d" % camID)
     695    #if camID < 43764: continue # TODO
    648696
    649697    file = gpc1Db.getCameraStageSmf(camID)
    650     if not os.path.isfile(file):
    651         logger.error("Cannot read file at '" + file)
    652         continue
    653 
    654     detectionBatch = DetectionBatch(logger, camID, file, False, True)
    655 
    656     if not detectionBatch.alreadyProcessed():
    657 
    658         detectionBatch.createEmptyPspsTables()
    659         detectionBatch.importIppTables(".*.psf")
    660         if detectionBatch.populatePspsTables():
    661             detectionBatch.exportPspsTablesToFits("([a-zA-Z]+)")
    662             detectionBatch.writeBatchManifest()
    663             #detectionBatch.reportNullsInAllPspsTables(False)
    664             #detectionBatch.createTarball()
    665             #detectionBatch.publishToDatastore()
    666    
    667             i = i+1
    668            # if i > 0: sys.exit()
    669 
     698
     699    detectionBatch = DetectionBatch(logger,
     700                                    gpc1Db,
     701                                    ippToPspsDb,
     702                                    scratchDb,
     703                                    camID,
     704                                    file,
     705                                    testMode,
     706                                    useFullTables)
     707    detectionBatch.run()
     708
  • branches/eam_branches/ipp-20110505/ippToPsps/jython/gpc1db.py

    r31400 r31587  
    2929        self.logger.debug("Gpc1Db destructor")
    3030
     31
     32    '''
     33    TODO
     34    '''
     35    def getIDsInThisDVODbForThisStageFudge(self):
     36
     37        sql = "SELECT staticskyRun.sky_id \
     38               FROM staticskyInput, staticskyRun, stackRun, staticskyResult \
     39               WHERE staticskyRun.sky_id = staticskyInput.sky_id \
     40               AND staticskyInput.stack_id = stackRun.stack_id \
     41               AND staticskyInput.sky_id = staticskyResult.sky_id \
     42               AND staticskyRun.label like 'MD04.staticsky' \
     43               AND stackRun.filter like 'i%'"
     44
     45        try:
     46            rs = self.executeQuery(sql)
     47        except:
     48            self.logger.exception("Can't query for ids in DVO")
     49
     50        ids = []
     51        while (rs.next()):
     52            ids.append(rs.getInt(1))
     53
     54        rs.close()
     55
     56        self.logger.info("Found %d items in DVO database '" % (len(ids)))
     57
     58        return ids
     59
    3160    '''
    3261    Gets a list of ids in this DVO database for this stage, could be cam or staticsky (so far)
     
    4069
    4170        try:
    42             rs = self.stmt.executeQuery(sql)
     71            rs = self.executeQuery(sql)
    4372        except:
    4473            self.logger.exception("Can't query for ids in DVO")
     
    5079        rs.close()
    5180
    52         self.logger.debug("Found %d items in DVO database '%s' for stage='%s'" % (len(ids), dvoDb, stage))
     81        self.logger.info("Found %d items in DVO database '%s' for stage='%s'" % (len(ids), dvoDb, stage))
    5382
    5483        return ids
     
    73102
    74103        try:
    75             rs = self.stmt.executeQuery(sql)
     104            rs = self.executeQuery(sql)
    76105        except:
    77106            self.logger.exception("Can't query for imageIDs")
     
    105134
    106135        try:
    107             rs = self.stmt.executeQuery(sql)
     136            rs = self.executeQuery(sql)
    108137            rs.first()
    109138            meta.append(rs.getInt(1))
     
    113142
    114143        return meta
     144
    115145    '''
    116146    Gets some camera-stage meta data for this cam_id
     
    127157
    128158        try:
    129             rs = self.stmt.executeQuery(sql)
     159            rs = self.executeQuery(sql)
    130160            rs.first()
    131161            meta.append(rs.getInt(1))
     
    150180
    151181        try:
    152             rs = self.stmt.executeQuery(sql)
     182            rs = self.executeQuery(sql)
    153183            rs.first()
    154184        except:
     
    171201            files = glob.glob(path + "/*.cmf")
    172202
     203        if len(files) < 1: return "NULL"
     204
    173205        return files[0] # TODO just returning first file - check
    174206
     
    186218
    187219        try:
    188             rs = self.stmt.executeQuery(sql)
     220            rs = self.executeQuery(sql)
    189221            rs.first()
    190222        except:
     
    200232
    201233            f=os.popen("neb-ls -p "+path+"%cmf")
    202             print "neb-ls -p "+path+"%cmf"
    203234            for i in f.readlines():
    204235                files.append(i.rstrip())
    205                 print i.rstrip()
    206236
    207237        # or not a neb path
     
    211241        return files
    212242
     243
     244    '''
     245    TODO hack to get exposure time for a stack
     246    '''
     247    def getStackExpTime(self, stackID):
     248
     249        self.logger.debug("Querying GPC1 for stack exposure time")
     250
     251        sql = "SELECT SUM(exp_time) * (COUNT(warp_id) - reject_images) / COUNT(warp_id) as EXPTIME \
     252               FROM staticskyRun JOIN staticskyInput using(sky_id) \
     253               JOIN stackRun using(stack_id) \
     254               JOIN stackSumSkyfile using(stack_id) \
     255               JOIN stackInputSkyfile using(stack_id) \
     256               JOIN warpRun using(warp_id) \
     257               JOIN fakeRun using(fake_id) \
     258               JOIN camRun using(cam_id) \
     259               JOIN chipRun using(chip_id) \
     260               JOIN rawExp using(exp_id) \
     261               WHERE stack_id = %d" % stackID
     262
     263        try:
     264            rs = self.executeQuery(sql)
     265            rs.first()
     266            return rs.getInt(1)
     267        except:
     268            self.logger.exception("Can't query for exposure time")
     269
     270        return 0.0
     271
     272
  • branches/eam_branches/ipp-20110505/ippToPsps/jython/ipptopspsdb.py

    r31355 r31587  
    3737               )"
    3838
    39         self.stmt.execute(sql)
     39        self.execute(sql)
    4040
    4141        sql = "SELECT MAX(batch_id) FROM batch"
     
    4444
    4545        try:
    46             rs = self.stmt.executeQuery(sql)
     46            rs = self.executeQuery(sql)
    4747            rs.first()
    4848            batchID = rs.getInt(1)
     
    6464               WHERE batch_id = " + str(batchID)
    6565
    66         self.stmt.execute(sql)
     66        self.execute(sql)
    6767
    6868    '''
     
    7575               WHERE batch_id = " + str(batchID)
    7676
    77         self.stmt.execute(sql)
     77        self.execute(sql)
    7878
    7979    '''
     
    8686               WHERE batch_id = " + str(batchID)
    8787
    88         self.stmt.execute(sql)
     88        self.execute(sql)
    8989
    9090    '''
     
    101101
    102102        try:
    103             rs = self.stmt.executeQuery(sql)
     103            rs = self.executeQuery(sql)
    104104            rs.first()
    105105            if rs.getInt(1) > 0:
     
    128128               )"
    129129
    130         self.stmt.execute(sql)
     130        self.execute(sql)
    131131
    132132    '''
     
    149149               )"
    150150
    151         self.stmt.execute(sql)
     151        self.execute(sql)
    152152
    153153
  • branches/eam_branches/ipp-20110505/ippToPsps/jython/mysql.py

    r31401 r31587  
    3737        self.url = "jdbc:mysql://"+self.dbHost+"/"+self.dbName+"?user="+self.dbUser+"&password="+self.dbPass
    3838        self.con = DriverManager.getConnection(self.url)
    39         self.stmt = self.con.createStatement()
     39        self.connectionID = self.getLastConnectionID()
     40        self.logger.info("MySQL connection to %s with ID %d" % (dbType, self.connectionID))
     41
     42        #self.stmt = self.con.createStatement()
     43
     44
     45    '''
     46    Disconnect from database
     47    '''
     48    def disconnect(self):
     49        self.con.close()
    4050
    4151    '''
     
    4555
    4656        self.logger.debug("MySql destructor")
    47         self.stmt.close()
    48         self.con.close()
     57        self.disconnect()
     58
     59    '''
     60    Kills the last connection ID, so long as it's not THIS connection ID
     61    '''
     62    def killLastConnectionID(self):
     63   
     64        connectionID = self.getLastConnectionID()
     65        if connectionID == self.connectionID:
     66            self.logger.error("NOT going to kill THIS connection ID")
     67            return
     68
     69        sql = "KILL %d" % connectionID
     70        self.execute(sql)
     71
     72    '''
     73    Gets the last connection ID
     74    '''
     75    def getLastConnectionID(self):
     76
     77        sql = "SELECT ID \
     78               FROM INFORMATION_SCHEMA.PROCESSLIST \
     79               WHERE DB='" + self.dbName + "' \
     80               ORDER BY ID"
     81        rs = self.executeQuery(sql)
     82        rs.last()
     83        return rs.getInt(1)
    4984
    5085    '''
     
    5489
    5590        sql = "UPDATE " + table + " SET " + column + " = " + value
    56         self.stmt.execute(sql)
     91        self.execute(sql)
    5792
    5893    '''
     
    6297
    6398        sql = "DROP TABLE " + table
    64         try: self.stmt.execute(sql)
     99        try: self.execute(sql)
    65100        except: return
    66101
     
    74109        sql = "ALTER TABLE " + table + " ADD UNIQUE (" + column + ")"
    75110        try:
    76             self.stmt.execute(sql)
     111            self.execute(sql)
    77112        except: pass
    78113            #self.logger.warn("Index already in place on '" + column + "' for table '" + table + "'")
     
    82117    def createIndex(self, table, column):
    83118
    84         self.logger.debug("Creating index on column '"+column+"' for table '"+table+"'")
     119        #self.logger.debug("Creating index on column '"+column+"' for table '"+table+"'")
    85120
    86121        sql = "CREATE INDEX "+table+"_"+column+"_index ON "+table+" ("+column+")"
    87122        try:
    88             self.stmt.execute(sql)
     123            self.execute(sql)
    89124        except: pass
    90125            #self.logger.warn("Index already in place on '" + column + "' for table '" + table + "'")
     126    '''
     127    TODO
     128    '''
     129    def execute(self, sql):
     130
     131        stmt = self.con.createStatement()
     132        stmt.execute(sql)
     133        stmt.close()
     134
     135    '''
     136    TODO
     137    '''
     138    def executeQuery(self, sql):
     139
     140        stmt = self.con.createStatement()
     141        rs = stmt.executeQuery(sql)
     142        #stmt.close()
     143        return rs
    91144
    92145    '''
     
    96149
    97150       sql = "SHOW COLUMNS FROM " + tableName
    98        rs = self.stmt.executeQuery(sql)
     151       rs = self.executeQuery(sql)
    99152       columns = []
    100153       while (rs.next()): columns.append(rs.getString(1))
     
    109162
    110163      sql = "UPDATE " + tableName + " SET " + column + " = " + sub + " WHERE " + column + " IS NULL"
    111       self.stmt.execute(sql)
     164      self.execute(sql)
    112165
    113166    '''
     
    123176         
    124177          sql = "UPDATE " + tableName + " SET " + column + " = " + sub + " WHERE " + column + " IS NULL"
    125           self.stmt.execute(sql)
     178          self.execute(sql)
    126179
    127180    '''
     
    131184
    132185        sql = "SELECT COUNT(*) FROM " + tableName + " WHERE " + columnName + " = " + value
    133         rs = self.stmt.executeQuery(sql)
     186        rs = self.executeQuery(sql)
    134187        rs.first()
    135         nBadFlux = rs.getInt(1)
    136         self.logger.info("%d NULL %s values in table %s. Deleting." % (nBadFlux, columnName, tableName))
     188        nBad = rs.getInt(1)
     189        self.logger.info("%5d NULL %s values in table %s. Deleting." % (nBad, columnName, tableName))
    137190
    138191        sql="DELETE from " + tableName + " WHERE " + columnName + " = " + value
    139         self.stmt.execute(sql)
     192        self.execute(sql)
    140193
    141194    '''
     
    145198
    146199        sql = "SELECT COUNT(*) FROM " + tableName + " WHERE " + columnName + " IS NULL"
    147         rs = self.stmt.executeQuery(sql)
     200        rs = self.executeQuery(sql)
    148201        rs.first()
    149         nBadFlux = rs.getInt(1)
    150         self.logger.info("%d NULL %s values in table %s. Deleting." % (nBadFlux, columnName, tableName))
     202        nBad = rs.getInt(1)
     203        self.logger.info("%5d NULL %s values in table %s. Deleting." % (nBad, columnName, tableName))
    151204
    152205        sql="DELETE from " + tableName + " WHERE " + columnName + " IS NULL"
    153         self.stmt.execute(sql)
     206        self.execute(sql)
    154207
    155208    '''
     
    160213       # first, count rows
    161214       sql = "SELECT COUNT(*) FROM " + tableName
    162        rs = self.stmt.executeQuery(sql)
     215       rs = self.executeQuery(sql)
    163216       rs.first()
    164217       numRows = rs.getInt(1)
     
    175228         
    176229          sql = "SELECT COUNT(*) FROM " + tableName + " WHERE " + column + " IS NULL"
    177           rs = self.stmt.executeQuery(sql)
     230          rs = self.executeQuery(sql)
    178231          rs.first()
    179232          if rs.getInt(1) == numRows:
     
    191244        sql = "SELECT COUNT(*) FROM " + table
    192245        try:
    193             rs = self.stmt.executeQuery(sql) 
     246            rs = self.executeQuery(sql) 
    194247            rs.first()
    195248            return rs.getInt(1)
  • branches/eam_branches/ipp-20110505/ippToPsps/jython/scratchdb.py

    r31399 r31587  
    4545        sql = "SELECT surveyID FROM Survey WHERE name = '" + name + "'"
    4646        try:
    47             rs = self.stmt.executeQuery(sql)
     47            rs = self.executeQuery(sql)
    4848            rs.first()
    4949            return rs.getInt(1)
     
    6161        sql = "SELECT flags FROM " + self.dvoMeta + " WHERE sourceID = %s AND externID = %s" % (sourceID, externID)
    6262        try:
    63             rs = self.stmt.executeQuery(sql) 
     63            rs = self.executeQuery(sql) 
    6464            rs.first()
    6565            flags = rs.getInt(1)
     
    7878        sql = "SELECT imageID FROM " + self.dvoMeta + " WHERE sourceID = %s AND externID = %s" % (sourceID, externID)
    7979        try:
    80             rs = self.stmt.executeQuery(sql) 
     80            rs = self.executeQuery(sql) 
    8181            rs.first()
    8282            imageID = rs.getInt(1)
     
    9595        sql = "SELECT photcode FROM " + self.dvoMeta + " WHERE sourceID = %s AND externID = %s" % (sourceID, externID)
    9696        try:
    97             rs = self.stmt.executeQuery(sql) 
     97            rs = self.executeQuery(sql) 
    9898            rs.first()
    9999            photcode = rs.getInt(1)
     
    109109
    110110        sql = "UPDATE "+table+" AS a, Filter AS b SET a.filterID=b.filterID WHERE b.filterType = '" + filter + "'"
    111         self.stmt.execute(sql)
     111        self.execute(sql)
    112112
    113113    '''
     
    123123               " + str(imageID) + "    \
    124124               )"
    125         self.stmt.execute(sql)
     125        self.execute(sql)
    126126
    127127    '''
     
    131131
    132132        sql = "INSERT INTO dvoDone (name) VALUES ('" + name + "')"
    133         self.stmt.execute(sql)
     133        self.execute(sql)
    134134       
    135135    '''
     
    141141
    142142        try:
    143             rs = self.stmt.executeQuery(sql)
     143            rs = self.executeQuery(sql)
    144144            rs.first()
    145145            if rs.getInt(1) > 0:
     
    160160
    161161        sql = "DROP TABLE dvoMeta"
    162         try: self.stmt.execute(sql)
     162        try: self.execute(sql)
    163163        except: pass
    164164       
    165165        sql = "DROP TABLE dvoDetection"
    166         try: self.stmt.execute(sql)
     166        try: self.execute(sql)
    167167        except: pass
    168168
     
    175175               )"
    176176
    177         try: self.stmt.execute(sql)
     177        try: self.execute(sql)
    178178        except:
    179179            self.logger.error("Unable to create DVO meta-data database table")
     
    193193               #INDEX (ippDetectID) \
    194194
    195         try: self.stmt.execute(sql)
     195        try: self.execute(sql)
    196196        except:
    197197            self.logger.error("Unable to create DVO detection database table")
  • branches/eam_branches/ipp-20110505/ippToPsps/jython/stackbatch.py

    r31402 r31587  
    88from java.sql import *
    99
     10from batch import Batch
    1011from gpc1db import Gpc1Db
    11 from batch import Batch
     12from ipptopspsdb import IppToPspsDb
     13from scratchdb import ScratchDb
     14
    1215import logging.config
    1316
     
    2023    Constructor
    2124    '''
    22     def __init__(self, logger, skyID, inputFile, stackType, useFullTables=False):
     25    def __init__(self,
     26                 logger,
     27                 gpc1Db,
     28                 ippToPspsDb,
     29                 scratchDb,
     30                 skyID,
     31                 inputFile,
     32                 stackType,
     33                 useFullTables=False):
     34
    2335       super(StackBatch, self).__init__(
    2436               logger,
     37               gpc1Db,
     38               ippToPspsDb,
     39               scratchDb,
     40               skyID,
    2541               "stack",
    2642               inputFile,
     
    2844               useFullTables) # TODO
    2945
    30        self.logger.info("StackBatch constructor. Creating batch from: '" + inputFile + "'")
    31 
    32        self.skyID = skyID
    33 
    34        # get filterID using init table
    35        self.filter = self.header['FPA.FILTER']
    36        self.filter = self.filter[0:1]
    37 
    38        self.stackType = stackType
    39        meta = self.gpc1Db.getStackStageMeta(self.skyID, self.header['FPA.FILTER'])
    40        if len(meta) < 1: return
    41        self.stackID = meta[0];
    42        self.skycell = meta[1];
    43 
    44        # determine skycell from header value
    45        #self.skycell = "skycell.34" #= self.header['SKYCELL']
    46        self.skycell = self.skycell[8:]
    47 
    48        self.logger.info("Processing stack with ID: %d, type: %s and skycell: %s filter: %s" % (self.stackID, self.stackType, self.skycell, self.filter))
    49 
     46       if not self.everythingOK: return
     47
     48       self.expTime = gpc1Db.getStackExpTime(self.stackID)
     49
     50       self.logger.info("got exp time of %d" % self.expTime)
     51
     52       # meta data to the log
     53       self.logger.info("New Stack Batch:")
     54       self.logger.info("Sky ID:     %d" % self.id)
     55       self.logger.info("File:       %s" % inputFile)
     56       self.logger.info("Stack ID:   %d" % self.stackID)
     57       self.logger.info("Stack type: %s" % self.stackType)
     58       self.logger.info("Skycell:    %s" % self.skycell)
     59       self.logger.info("Filter:     %s" % self.filter)
    5060
    5161       # delete PSPS tables
     
    5868       self.scratchDb.dropTable("ObjectCalColor")
    5969
    60        # delete IPP tables
    61        #self.scratchDb.dropTable("SkyChip_psf")
    62        #self.scratchDb.dropTable("SkyChip_xsrc")
    63        #self.scratchDb.dropTable("SkyChip_xfit")
    64        #self.scratchDb.dropTable("SkyChip_xrad")
    65 
    66        self.logger.info("Stack type: " + self.safeDictionaryAccess(self.header, self.stackType))
    67        # obs time makes no sense except for nightly stacks
    68        #if self.header['STK_TYPE'] != "NIGHTLY_STACK": self.header['MJD-OBS'] = "-999"
    69 
    7070       # create an output filename, which is {filterID}{skycellID}.FITS
    71        self.outputFitsFile = "%s%07d.FITS" % (self.filter, int(self.skycell))
     71       self.outputFitsFile = "%08d.FITS" % self.stackID
    7272       self.outputFitsPath = "%s/%s" % (self.localOutPath, self.outputFitsFile)
    7373
     
    7777
    7878       # insert what we know about this stack batch into the stack table
    79        self.ippToPspsDb.insertStackMeta(self.batchID, self.skyID, self.stackID, self.filter, self.stackType)
     79       self.ippToPspsDb.insertStackMeta(self.batchID, self.id, self.stackID, self.filter, self.stackType)
    8080
    8181       # insert sourceID/imageID combo so DVO can look it up
     
    8989
    9090        sql = "UPDATE " + table + "  SET stackMetaID=" + str(self.stackID)
    91         self.scratchDb.stmt.execute(sql)
     91        self.scratchDb.execute(sql)
    9292
    9393    '''
     
    9797
    9898        sql = "UPDATE "+table+" AS a, StackType AS b SET a.stackTypeID=b.stackTypeID WHERE b.name = '" + self.stackType + "'"
    99         self.scratchDb.stmt.execute(sql)
     99        self.scratchDb.execute(sql)
    100100
    101101
     
    148148        WHERE a.ippDetectID=b.IPP_IDET AND b.PSF_FWHM "+psfCondition
    149149
    150         self.scratchDb.stmt.execute(sql)
     150        self.scratchDb.execute(sql)
    151151
    152152    '''
     
    196196        WHERE a.ippDetectID=b.IPP_IDET AND b.MODEL_TYPE = '"+ippModelType+"'"
    197197
    198         self.scratchDb.stmt.execute(sql)
     198        self.scratchDb.execute(sql)
    199199
    200200        # sersic fit has an extra parameter
     
    213213            WHERE a.ippDetectID=b.IPP_IDET AND b.MODEL_TYPE = '"+ippModelType+"'"
    214214
    215             self.scratchDb.stmt.execute(sql)
     215            self.scratchDb.execute(sql)
    216216
    217217
     
    220220    '''
    221221    def populateStackMeta(self):
     222
    222223        self.logger.info("Procesing StackMeta table")
    223224
     
    246247        ," + str(self.scratchDb.getPhotoCalID(self.header['SOURCEID'], self.header['IMAGEID'])) + " \
    247248        ," + self.header['FPA.ZP'] + " \
    248         ," + self.header['EXPTIME'] + " \
     249        ," + str(self.expTime) + " \
    249250        ,'" + self.safeDictionaryAccess(self.header, 'PSFMODEL') + "' \
    250251        ,'" + self.header['CTYPE1'] + "' \
     
    261262        ," + self.header['PC002002'] + " \
    262263        )"
    263         self.scratchDb.stmt.execute(sql)
     264        self.scratchDb.execute(sql)
    264265
    265266        self.scratchDb.updateAllRows("StackMeta", "surveyID", str(self.surveyID))
     
    272273    '''
    273274    def populateStackDetection(self):
     275
    274276        self.logger.info("Procesing StackDetection table")
    275277
     
    321323               ,X_PSF_SIG \
    322324               ,Y_PSF_SIG \
    323                ,POW(10.0, (-0.4*PSF_INST_MAG)) / "+self.header['EXPTIME']+" \
    324                ,ABS((PSF_INST_MAG_SIG*(POW(10.0, (-0.4*PSF_INST_MAG)) / "+self.header['EXPTIME']+")) / 1.085736) \
    325                ,POW(10.0, (-0.4*PEAK_FLUX_AS_MAG)) / "+self.header['EXPTIME']+" \
     325               ,POW(10.0, (-0.4*PSF_INST_MAG)) / "+str(self.expTime)+" \
     326               ,ABS((PSF_INST_MAG_SIG*(POW(10.0, (-0.4*PSF_INST_MAG)) / "+str(self.expTime)+")) / 1.085736) \
     327               ,POW(10.0, (-0.4*PEAK_FLUX_AS_MAG)) / "+str(self.expTime)+" \
    326328               ,SKY \
    327329               ,SKY_SIGMA \
     
    352354               FROM SkyChip_psf"
    353355
    354         self.scratchDb.stmt.execute(sql)
     356        self.scratchDb.execute(sql)
    355357
    356358        self.scratchDb.updateAllRows("StackDetection", "surveyID", str(self.surveyID))
     
    362364        self.updateStackTypeID("StackDetection")
    363365        self.updateDvoIDs("StackDetection")
    364 
    365         # now delete bad flux
     366        sql = "ALTER TABLE StackDetection ADD PRIMARY KEY (objID, stackDetectID)"
     367        self.scratchDb.execute(sql)
     368
     369        if self.stackType == "DEEP_STACK":
     370
     371            #if deep stack and instFlux = null and err not null
     372            sql = "UPDATE StackDetection AS a, SkyChip_psf AS b \
     373                   SET instFlux = 2*b.PSF_INST_FLUX_SIG \
     374                   WHERE instFlux IS NULL \
     375                   AND b.PSF_INST_FLUX_SIG IS NOT NULL"
     376            self.scratchDb.execute(sql)
     377            #    instFlux = 2*PSF_INST_FLUX_SIG
     378           
    366379        self.scratchDb.reportAndDeleteRowsWithNULLS("StackDetection", "instFlux")
    367380        self.scratchDb.reportAndDeleteRowsWithNULLS("StackDetection", "objID")
     
    372385    '''
    373386    def populateStackApFlx(self):
     387
    374388        self.logger.info("Procesing StackApFlx table")
    375389 
     
    381395
    382396        try:
    383             self.scratchDb.stmt.execute(sql)
     397            self.scratchDb.execute(sql)
    384398        except: return
    385399
    386400        # TODO temporarily loading 1st convolved fluxes into unconvolved fields
    387         self.logger.info("    Adding un-convolved fluxes")
     401        self.logger.info("Adding un-convolved fluxes")
    388402        self.updateApFlxs("", "< 7.0")
    389         self.logger.info("    Adding 1st convolved fluxes")
     403        self.logger.info("Adding 1st convolved fluxes")
    390404        self.updateApFlxs("c1", "< 7.0")
    391         self.logger.info("    Adding 2nd convolved fluxes")
     405        self.logger.info("Adding 2nd convolved fluxes")
    392406        self.updateApFlxs("c2", "> 7.0")
    393407
    394         self.logger.info("    Adding petrosians for extended sources")
     408        self.logger.info("Adding petrosians for extended sources")
    395409        sql = "UPDATE StackApFlx AS a, SkyChip_xsrc AS b SET \
    396410        petRadius=b.PETRO_RADIUS \
     
    403417        ,petR90Err=b.PETRO_RADIUS_90_ERR \
    404418        WHERE a.ippDetectID=b.IPP_IDET"
    405         self.scratchDb.stmt.execute(sql)
     419        self.scratchDb.execute(sql)
    406420
    407421        self.scratchDb.updateAllRows("StackApFlx", "surveyID", str(self.surveyID))
     
    413427        self.updateStackTypeID("StackApFlx")
    414428        self.updateDvoIDs("StackApFlx")
     429        self.scratchDb.reportAndDeleteRowsWithNULLS("StackApFlx", "objID")
     430        self.deleteDetectionsNotInStackDetection("StackApFlx")
     431
    415432
    416433    '''
     
    418435    '''
    419436    def populateStackModelFit(self):
     437
    420438        self.logger.info("Procesing StackModelFit table")
    421439
     
    423441        sql = "INSERT INTO StackModelFit (ippDetectID) SELECT DISTINCT IPP_IDET from SkyChip_xfit"
    424442        try:
    425             self.scratchDb.stmt.execute(sql)
     443            self.scratchDb.execute(sql)
    426444        except:
    427445            return
    428446
    429 
    430447        # populate model parameters
    431         self.logger.info("    Adding deVaucouleurs fit")
     448        self.logger.info("Adding deVaucouleurs fit")
    432449        self.updateModelFit("deV", "PS_MODEL_DEV")
    433         self.logger.info("    Adding exponential fit")
     450        self.logger.info("Adding exponential fit")
    434451        self.updateModelFit("exp", "PS_MODEL_EXP")
    435         self.logger.info("    Adding sersic fit")
     452        self.logger.info("Adding sersic fit")
    436453        self.updateModelFit("ser", "PS_MODEL_SERSIC")
    437454
     
    444461        self.updateStackTypeID("StackModelFit")
    445462        self.updateDvoIDs("StackModelFit")
     463        self.scratchDb.reportAndDeleteRowsWithNULLS("StackModelFit", "objID")
     464        self.deleteDetectionsNotInStackDetection("StackModelFit")
     465
     466    '''
     467    Reports and deletes detections in this table that are not in StackDetection
     468    '''
     469    def deleteDetectionsNotInStackDetection(self, table):
     470
     471        sql = "SELECT COUNT(*) FROM " + table + " WHERE ippDetectID NOT IN (SELECT ippDetectID FROM StackDetection)"
     472        rs = self.scratchDb.executeQuery(sql)
     473        rs.first()
     474        nMissing = rs.getInt(1)
     475        self.logger.info("%5d detections in %s table that are not in StackDetection. Deleting" % (nMissing, table))
     476 
     477        if nMissing < 1: return
     478       
     479        sql = "DELETE FROM " + table + " WHERE ippDetectID NOT IN (SELECT ippDetectID FROM StackDetection)"
     480        self.scratchDb.execute(sql)
     481       
    446482
    447483    '''
     
    449485    '''
    450486    def populateStackToImage(self):
     487
    451488        self.logger.info("Procesing StackToImage table")
    452489
     
    457494                   VALUES (\
    458495                   " + str(self.stackID) + ", " + imageID + ")"
    459             self.scratchDb.stmt.execute(sql)
     496            self.scratchDb.execute(sql)
    460497
    461498        # now update StackMeta with correct number of inputs
    462499        sql = "UPDATE StackMeta SET nP2Images = (SELECT COUNT(*) FROM StackToImage)"
    463         self.scratchDb.stmt.execute(sql)
     500        self.scratchDb.execute(sql)
    464501
    465502    '''
     
    467504    '''
    468505    def populateSkinnyObject(self):
     506
    469507        self.logger.info("Procesing SkinnyObject table")
    470508
     
    474512               ) \
    475513               SELECT \
    476                objID \
     514               DISTINCT objID \
    477515               ,ippObjID \
    478516               FROM StackDetection"
    479         self.scratchDb.stmt.execute(sql)
     517        self.scratchDb.execute(sql)
    480518
    481519        self.scratchDb.updateAllRows("SkinnyObject", "surveyID", str(self.surveyID))
     
    486524    '''
    487525    def populateObjectCalColor(self):
     526
    488527        self.logger.info("Procesing ObjectCalColor table")
    489528
     
    493532               ) \
    494533               SELECT \
    495                objID \
     534               DISTINCT objID \
    496535               ,ippObjID \
    497536               FROM StackDetection"
    498         self.scratchDb.stmt.execute(sql)
     537        self.scratchDb.execute(sql)
    499538
    500539        self.scratchDb.updateFilterID("ObjectCalColor", self.filter)
     
    508547
    509548        self.logger.info("Altering PSPS tables")
    510         self.scratchDb.makeColumnUnique("StackDetection", "objID")
     549        #self.scratchDb.makeColumnUnique("StackDetection", "objID")
    511550        self.scratchDb.createIndex("StackDetection", "ippDetectID")
    512551        self.scratchDb.createIndex("StackApFlx", "ippDetectID")
     
    531570        imageID = self.scratchDb.getImageIDFromExternID(self.header['SOURCEID'], self.header['IMAGEID'])
    532571
    533         self.logger.info("Updating table '" + table + "' with DVO IDs...")
     572        self.logger.debug("Updating table '" + table + "' with DVO IDs...")
    534573        sql = "UPDATE IGNORE " + table + " AS a, dvoDetectionFull AS b SET \
    535574               a.ippObjID = b.ippObjID, \
     
    539578               AND b.sourceID = " + self.header['SOURCEID'] + "\
    540579               AND b.imageID = " + str(imageID)
    541         self.scratchDb.stmt.execute(sql)
    542 
     580        self.scratchDb.execute(sql)
    543581
    544582    '''
     
    553591        self.populateStackMeta()
    554592        self.populateStackDetection()
    555         self.populateStackModelFit()
    556         self.populateStackApFlx()
     593
     594        if self.stackType != "NIGHTLY_STACK":
     595            self.populateStackModelFit()
     596            self.populateStackApFlx()
     597   
    557598        self.populateStackToImage()
    558599        self.populateSkinnyObject()
     
    560601
    561602        self.setMinMaxObjID(["StackDetection"])
    562        
     603       
     604        if self.totalDetections < 1:
     605
     606            self.logger.error("No detections to publish")
     607            return False
     608
    563609        return True
    564610
     
    568614    def alreadyProcessed(self):
    569615
    570         return self.ippToPspsDb.alreadyProcessed("stack", "stack_id", self.stackID)
     616        # sadly, we have to read the FITS primary header first
     617        if not self.readPrimaryHeader(): return False
     618
     619        # get filterID using init table
     620        self.filter = self.header['FPA.FILTER']
     621        self.filter = self.filter[0:1]
     622
     623        self.stackType = stackType
     624        meta = self.gpc1Db.getStackStageMeta(self.id, self.header['FPA.FILTER'])
     625        if len(meta) < 1: return False
     626        self.stackID = meta[0];
     627        self.skycell = meta[1];
     628        self.skycell = self.skycell[8:]
     629
     630        #return self.ippToPspsDb.alreadyProcessed("stack", "stack_id", self.stackID)
     631        return False # TODOI
     632
     633
     634useFullTables=True
    571635
    572636logging.config.fileConfig("logging.conf")
    573637logger = logging.getLogger("stackbatch")
     638logger.setLevel(logging.INFO)
    574639logger.info("Starting")
     640
    575641gpc1Db = Gpc1Db(logger)
    576 stackType = "NIGHTLY_STACK"
    577 skyIDs = gpc1Db.getIDsInThisDVODbForThisStage("MD04.Staticsky", "staticsky")
    578 #skyIDs = gpc1Db.getIDsInThisDVODbForThisStage("MD04.GENE.PSPSDEEP", "staticsky")
    579 #stackType = "DEEP_STACK"
    580 #skyIDs = [689]
     642ippToPspsDb = IppToPspsDb(logger)
     643scratchDb = ScratchDb(logger, useFullTables)
     644
     645#stackType = "NIGHTLY_STACK"
     646#skyIDs = gpc1Db.getIDsInThisDVODbForThisStageFudge()
     647#skyIDs = gpc1Db.getIDsInThisDVODbForThisStage("MD04.Staticsky", "staticsky")
     648
     649stackType = "DEEP_STACK"
     650skyIDs = gpc1Db.getIDsInThisDVODbForThisStage("MD04.GENE.PSPSDEEP", "staticsky")
     651
     652#skyIDs = [942]
    581653#skyIDs = [299]
    582654#skyIDs = [302]
    583655#skyIDs = [8508]
    584 i = 0
     656#i = 0
    585657for skyID in skyIDs:
    586 
    587     logger.info("-------------------------------------------------- sky ID: %d" % skyID)
     658   
     659    #if skyID < 1340: continue # nightly
     660    #if skyID < 238: continue # deep
    588661
    589662    cmfFiles = gpc1Db.getStackStageCmfs(skyID)
     
    591664    for file in cmfFiles:
    592665
    593         if not os.path.isfile(file):
    594             logger.error("Cannot read file at '" + file)
    595             continue
    596 
    597         stackBatch = StackBatch(logger, skyID, file, stackType, True)
    598 
    599         if not stackBatch.alreadyProcessed():
    600 
    601             stackBatch.createEmptyPspsTables()
    602             stackBatch.importIppTables("")
    603             if stackBatch.populatePspsTables():
    604  
    605                 #stackBatch.reportNullsInAllPspsTables(False)
    606                 stackBatch.exportPspsTablesToFits()
    607                 stackBatch.writeBatchManifest()
    608                 #stackBatch.createTarball()
    609                 #stackBatch.publishToDatastore()
    610 
    611                 i = i + 1
    612                 #if i > 0: sys.exit()
    613 
    614 logger.info("Finished")
     666        stackBatch = StackBatch(logger,
     667                                gpc1Db,
     668                                ippToPspsDb,
     669                                scratchDb,
     670                                skyID,
     671                                file,
     672                                stackType,
     673                                useFullTables)
     674
     675        stackBatch.run()
     676
  • branches/eam_branches/ipp-20110505/ippTools/share/laptool_definerun.sql

    r31435 r31587  
    1 SELECT want.exp_id, have.chip_id, false as private, true as active, false as pairwise
     1SELECT DISTINCT want.exp_id, have.chip_id, false as private, true as active, false as pairwise
    22  FROM
    33  (SELECT exp_id FROM rawExp
     
    88  ) AS want
    99  LEFT JOIN
    10   (SELECT *
     10  (SELECT exp_id,MAX(chip_id) AS chip_id
    1111     FROM lapExp
    1212     where private IS FALSE
  • branches/eam_branches/ipp-20110505/ippTools/share/laptool_exposures.sql

    r31435 r31587  
    1 SELECT DISTINCT
    2     D.*,diffRun.state,
    3     coalesce(CONVERT(sum(others.private),SIGNED),0) AS needs_remade
    4 --      0 AS needs_remade
    5     FROM (
    6   SELECT DISTINCT
    7       W.*,CONVERT(IFNULL(diff1.diff_id,diff2.diff_id),SIGNED) AS diff_id FROM (
    8     SELECT DISTINCT
    9         lap_id,lapRun.tess_id,projection_cell,filter,lapRun.state as lapRun_state, lapRun.registered, lapRun.fault, lapRun.label, lapRun.dist_group,
    10         lapExp.exp_id,lapExp.chip_id,lapExp.pair_id,private,pairwise,active,lapExp.data_state,
    11         chipRun.state as chipRun_state,
    12         coalesce(CONVERT(sum(chipProcessedImfile.fault),SIGNED),0) as chip_faults,
    13         coalesce(CONVERT(sum(chipProcessedImfile.quality),SIGNED),0) as chip_quality,
    14         camRun.cam_id, camRun.state as camRun_state,   
    15         coalesce(CONVERT(sum(camProcessedExp.fault),SIGNED),0) AS cam_faults,
    16         coalesce(CONVERT(sum(camProcessedExp.quality),SIGNED),0) AS cam_quality,
    17         fakeRun.fake_id, fakeRun.state as fakeRun_state,
    18         coalesce(CONVERT(sum(fakeProcessedImfile.fault),SIGNED),0) as fake_faults,
    19         warpRun.warp_id, warpRun.state as warpRun_state,
    20         coalesce(CONVERT(sum(warpSkyfile.fault),SIGNED),0) as warp_faults,
    21         coalesce(CONVERT(sum(warpSkyfile.quality),SIGNED),0) as warp_quality,
    22         warpRun.magicked
    23     FROM lapRun JOIN lapExp USING(lap_id)
    24     LEFT JOIN chipRun USING(chip_id)
    25     LEFT JOIN chipProcessedImfile USING(chip_id)
    26     LEFT JOIN camRun USING(chip_id) LEFT JOIN camProcessedExp USING(cam_id)
    27     LEFT JOIN fakeRun USING(cam_id) LEFT JOIN fakeProcessedImfile USING(fake_id)
    28     LEFT JOIN warpRun USING(fake_id) LEFT JOIN warpSkyfile USING(warp_id)
    29     WHERE    @WHERE@
    30     AND (warpSkyfile.quality IS NULL OR
    31          (warpSkyfile.quality != 8007      -- known cases where quality != 0, but everything's fine.
    32           AND warpSkyfile.quality != 3006  -- known cases where quality != 0, but everything's fine.
    33           ))
    34     GROUP BY lap_id,exp_id
    35     ) AS W
    36 -- This was unreasonably slow in testing, so that's why I'm using a subquery here.
    37   LEFT JOIN diffInputSkyfile AS diff1 ON (W.warp_id = diff1.warp1)
    38   LEFT JOIN diffInputSkyfile AS diff2 ON (W.warp_id = diff2.warp2)
    39 ) AS D
     1select DISTINCT V3.*,
     2       diffRun.diff_id,diffRun.state as diffRun_state,
     3       coalesce(CONVERT(sum(diffSkyfile.quality != 0),SIGNED),0) AS diff_bad_quality,
     4       coalesce(CONVERT(count(diffSkyfile.diff_id),SIGNED),0) AS diff_component_count
     5       FROM
     6( select V2.*,
     7       warpRun.warp_id,warpRun.state as warpRun_state,
     8       coalesce(CONVERT(sum(warpSkyfile.quality != 0),SIGNED),0) AS warp_bad_quality,
     9       coalesce(CONVERT(count(warpSkyfile.warp_id),SIGNED),0) AS warp_component_count,
     10       warpRun.magicked
     11       FROM
     12( select V1.*,
     13       camRun.cam_id,camRun.state as camRun_state,
     14       coalesce(CONVERT(sum(camProcessedExp.quality != 0),SIGNED),0) AS cam_bad_quality,
     15       coalesce(CONVERT(count(camProcessedExp.cam_id),SIGNED),0) AS cam_component_count,
     16       fakeRun.fake_id,fakeRun.state as fakeRun_state FROM
     17( SELECT DISTINCT
     18       lap_id,lapRun.tess_id,projection_cell,filter,lapRun.state as lapRun_state, lapRun.registered, lapRun.fault, lapRun.label, lapRun.dist_group,
     19       lapExp.exp_id,lapExp.chip_id,lapExp.pair_id,private,pairwise,active,lapExp.data_state,
     20       chipRun.state as chipRun_state,
     21       coalesce(CONVERT(sum(chipProcessedImfile.quality != 0),SIGNED),0) AS chip_bad_quality,
     22       coalesce(CONVERT(count(chipProcessedImfile.chip_id),SIGNED),0) AS chip_component_count
     23       FROM lapRun JOIN lapExp USING(lap_id)
     24       LEFT JOIN chipRun USING(chip_id) LEFT JOIN chipProcessedImfile USING(chip_id)
     25WHERE @WHERE@
     26       GROUP BY lap_id,exp_id
     27       ) AS V1
     28       LEFT JOIN camRun USING(chip_id) LEFT JOIN camProcessedExp USING(cam_id)
     29       LEFT JOIN fakeRun USING(cam_id)
     30       GROUP BY lap_id,exp_id
     31  ) AS V2
     32  LEFT JOIN warpRun USING(fake_id) LEFT JOIN warpSkyfile USING(warp_id)
     33  GROUP BY lap_id,exp_id
     34) AS V3
     35LEFT JOIN
     36  (SELECT DISTINCT diff_id,warp1,warp2 FROM diffInputSkyfile) AS DI ON
     37  (DI.warp1 = warp_id OR DI.warp2 = warp_id)
    4038LEFT JOIN diffRun USING(diff_id)
    41 LEFT JOIN lapExp AS others ON (D.chip_id = others.chip_id AND D.lap_id != others.lap_id)
     39LEFT JOIN diffSkyfile USING(diff_id)
    4240GROUP BY lap_id,exp_id
    4341
  • branches/eam_branches/ipp-20110505/ippTools/src/disttool.c

    r30906 r31587  
    447447
    448448    if (pretend) {
    449         if (!ippdbPrintMetadatas(stdout, output, "newdistRuns", true)) {
     449        if (!ippdbPrintMetadatas(stdout, output, "newdistRuns", !simple)) {
    450450            psError(PS_ERR_UNKNOWN, false, "failed to print array");
    451451            psFree(output);
  • branches/eam_branches/ipp-20110505/ippTools/src/laptool.c

    r31435 r31587  
    384384  PXOPT_COPY_STR(config->args, where, "-projection_cell", "projection_cell", "==");
    385385  PXOPT_COPY_STR(config->args, where, "-filter", "filter", "==");
    386   PXOPT_COPY_STR(config->args, where, "-label", "label", "==");
     386  //  PXOPT_COPY_STR(config->args, where, "-label", "label", "==");
    387387  PXOPT_COPY_STR(config->args, where, "-state", "state", "==");
    388388  PXOPT_COPY_STR(config->args, where, "-fault", "fault", "==");
    389389
     390  pxAddLabelSearchArgs(config, where, "-label", "lapRun.label", "==");
     391 
    390392  psString query = pxDataGet("laptool_pendingrun.sql");
    391393  if (!query) {
     
    560562  psMetadata *where = psMetadataAlloc();
    561563  PXOPT_COPY_S64(config->args, where, "-lap_id", "lap_id", "==");
    562   PXOPT_COPY_S64(config->args, where, "-exp_id", "exp_id", "==");
     564  PXOPT_COPY_S64(config->args, where, "-exp_id", "lapExp.exp_id", "==");
    563565 
    564566  psString query = pxDataGet("laptool_exposures.sql");
  • branches/eam_branches/ipp-20110505/ippTools/src/laptoolConfig.c

    r31435 r31587  
    6969  ADD_OPT(Str, pendingrunArgs, "-projection_cell",            "search by projection cell", NULL);
    7070  ADD_OPT(Str, pendingrunArgs, "-filter",                     "search by filter", NULL);
    71   ADD_OPT(Str, pendingrunArgs, "-label",                      "search by LAP run label", NULL);
     71  //  ADD_OPT(Str, pendingrunArgs, "-label",                      "search by LAP run label", NULL);
     72  psMetadataAddStr(pendingrunArgs, PS_LIST_TAIL, "-label", PS_META_DUPLICATE_OK, "search by LAP run label", NULL);
    7273  ADD_OPT(Str, pendingrunArgs, "-state",                      "search by LAP run state", NULL);
    7374  ADD_OPT(Str, pendingrunArgs, "-fault",                      "search by LAP run fault", NULL);
    7475  ADD_OPT(Bool,pendingrunArgs, "-simple",                     "use the simple output format", false);
     76
     77  // -listrun
     78  psMetadata *listrunArgs = psMetadataAlloc();
     79  ADD_OPT(S64, listrunArgs, "-seq_id",                     "search by LAP sequence ID", 0);
     80  ADD_OPT(S64, listrunArgs, "-lap_id",                     "search by LAP run ID", 0);
     81  ADD_OPT(Str, listrunArgs, "-projection_cell",            "search by projection cell", NULL);
     82  ADD_OPT(Str, listrunArgs, "-filter",                     "search by filter", NULL);
     83  //  ADD_OPT(Str, listrunArgs, "-label",                      "search by LAP run label", NULL);
     84  psMetadataAddStr(listrunArgs, PS_LIST_TAIL, "-label", PS_META_DUPLICATE_OK, "search by LAP run label", NULL);
     85  ADD_OPT(Str, listrunArgs, "-state",                      "search by LAP run state", NULL);
     86  ADD_OPT(Str, listrunArgs, "-fault",                      "search by LAP run fault", NULL);
     87  ADD_OPT(Bool,listrunArgs, "-simple",                     "use the simple output format", false);
    7588
    7689  // -updaterun
     
    128141  psMetadata *argSets = psMetadataAlloc();
    129142  psMetadata *modes = psMetadataAlloc();
    130 
     143 
    131144  PXOPT_ADD_MODE("-definesequence",          "", LAPTOOL_MODE_DEFINESEQUENCE,   definesequenceArgs);
    132145  PXOPT_ADD_MODE("-listsequence",            "", LAPTOOL_MODE_LISTSEQUENCE,     listsequenceArgs);
    133146  PXOPT_ADD_MODE("-definerun",               "", LAPTOOL_MODE_DEFINERUN,        definerunArgs);
    134147  PXOPT_ADD_MODE("-pendingrun",              "", LAPTOOL_MODE_PENDINGRUN,       pendingrunArgs);
     148  PXOPT_ADD_MODE("-listrun",                 "", LAPTOOL_MODE_PENDINGRUN,       listrunArgs);
    135149  PXOPT_ADD_MODE("-updaterun",               "", LAPTOOL_MODE_UPDATERUN,        updaterunArgs);
    136150  PXOPT_ADD_MODE("-pendingexp",              "", LAPTOOL_MODE_PENDINGEXP,       pendingexpArgs);
  • branches/eam_branches/ipp-20110505/ippTools/src/pstamptool.c

    r30543 r31587  
    843843    psFree(where);
    844844
    845     psStringAppend(&query, " ORDER BY priority DESC, req_id");
     845    psStringAppend(&query, " ORDER BY priority DESC, req_id, job_id");
    846846
    847847    // treat limit == 0 as "no limit"
     
    13231323    psFree(where);
    13241324
    1325     psStringAppend(&query, " ORDER BY priority DESC, req_id");
     1325    psStringAppend(&query, " ORDER BY priority DESC, req_id, dep_id");
    13261326
    13271327    // treat limit == 0 as "no limit"
  • branches/eam_branches/ipp-20110505/ippTools/src/pubtoolConfig.c

    r30769 r31587  
    4848    psMetadataAddStr(defineclientArgs, PS_LIST_TAIL, "-product", 0, "define product (required)", NULL);
    4949    psMetadataAddStr(defineclientArgs, PS_LIST_TAIL, "-workdir", 0, "define workdir (required)", NULL);
     50    psMetadataAddS16(defineclientArgs, PS_LIST_TAIL, "-output_format", 0, "define output format", 2);
    5051    psMetadataAddStr(defineclientArgs, PS_LIST_TAIL, "-comment", 0, "define comment", NULL);
     52    psMetadataAddStr(defineclientArgs, PS_LIST_TAIL, "-name", 0, "define name", NULL);
    5153    psMetadataAddBool(defineclientArgs, PS_LIST_TAIL, "-unmagicked", 0, "allow unmagicked data?", false);
    5254
  • branches/eam_branches/ipp-20110505/ippTools/src/stacktool.c

    r31445 r31587  
    246246    PXOPT_COPY_STR(config->args,  where, "-select_data_group",         "warpRun.data_group", "==");
    247247    pxAddLabelSearchArgs (config, where, "-select_label",              "warpRun.label", "LIKE"); // define using warp label
    248 
     248    pxAddLabelSearchArgs (config, where, "-warp_id",                   "warpRun.warp_id", "==");
     249   
    249250    // these are used to build the HAVING restriction
    250251    PXOPT_COPY_S32(config->args, having, "-min_num", "num_warp", ">=");
     
    508509          association->sass_id = sass_id;
    509510        }
     511        else {
     512          sass_id = association->sass_id;
     513        }
    510514        // Insert the map entry for this row.
    511515        stackAssociationMapRow *maprow = stackAssociationMapRowAlloc(sass_id,stack_id);
  • branches/eam_branches/ipp-20110505/ippTools/src/stacktoolConfig.c

    r30945 r31587  
    9191    psMetadataAddF64(definebyqueryArgs, PS_LIST_TAIL, "-select_zpt_obs_max", 0, "define max zero point", NAN);
    9292    psMetadataAddF64(definebyqueryArgs, PS_LIST_TAIL, "-select_astrom", 0, "define max astrometry rms", NAN);
     93    psMetadataAddS64(definebyqueryArgs, PS_LIST_TAIL, "-warp_id", PS_META_DUPLICATE_OK, "include this warp ID (multiple OK)", 0);
    9394    psMetadataAddS32(definebyqueryArgs, PS_LIST_TAIL, "-random", 0, "use this number of random elements", 0);
    9495    psMetadataAddS32(definebyqueryArgs, PS_LIST_TAIL, "-min_num", 0, "minimum number of inputs", 0);
  • branches/eam_branches/ipp-20110505/ppStack/src/ppStackPrepare.c

    r31158 r31587  
    144144    int numCols = 0, numRows = 0;   // Size of image
    145145    options->sumExposure = 0.0;
     146    int numWithSources = 0;
    146147    for (int i = 0; i < num; i++) {
    147148        pmFPAfile *file = pmFPAfileSelectSingle(config->files, "PPSTACK.INPUT", i); // File of interest
     
    188189            pmReadout *ro = pmFPAviewThisReadout(view, file->fpa); // Readout with sources
    189190            detections = psMetadataLookupPtr(NULL, ro->analysis, "PSPHOT.DETECTIONS"); // Sources
    190             if (!detections || !detections->allSources) {
     191            if (!detections || !detections->allSources || !detections->allSources->n) {
    191192                psWarning("No detections found for image %d --- rejecting.", i);
    192193                options->inputMask->data.PS_TYPE_VECTOR_MASK_DATA[i] = PPSTACK_MASK_CAL;
     
    196197
    197198            options->sourceLists->data[i] = psMemIncrRefCounter(detections->allSources);
     199            numWithSources++;
    198200        }
    199201
     
    230232            ppStackFileActivation(config, PPSTACK_FILES_PREPARE, true);
    231233        }
     234    }
     235    if (numWithSources < 2) {
     236        // This can happen if the inputs have been destreaked
     237        psErrorStackPrint(stderr, "Not enough inputs have sources");
     238        psWarning("No inputs have sources --- suspect bad data quality.");
     239        if (options->quality == 0) {
     240            options->quality = PPSTACK_ERR_DATA;
     241        }
     242        psErrorClear();
     243        psFree(view);
     244        return false;
    232245    }
    233246
     
    308321        psFree(psfs);
    309322        if (!options->psf) {
     323#if 1
    310324            psError(psErrorCodeLast(), false, "Unable to determine output PSF.");
     325#else
     326            // This will repair the problem reproted in ticket 1427 but we aren't yet sure
     327            // why ppStackPSF is failing so we are going to continue to fault for now
     328            int errorCode = psErrorCodeLast();
     329            if (errorCode == PPSTACK_ERR_PSF) {
     330                psErrorStackPrint(stderr, "Unable to determine output PSF.");
     331                psWarning("Unable to determine output PSF --- suspect bad data quality.");
     332                if (options->quality == 0) {
     333                    options->quality = errorCode;
     334                }
     335                psErrorClear();
     336            } else {
     337                psError(psErrorCodeLast(), false, "Unable to determine output PSF.");
     338            }
     339#endif // notyet
    311340            psFree(view);
    312341            return false;
  • branches/eam_branches/ipp-20110505/psModules/src/imcombine/pmSubtractionStamps.c

    r31451 r31587  
    834834            }
    835835
    836             psStatsInit (stats);
    837             if (!psVectorStats (stats, flux1, NULL, NULL, 0)) {
    838                 psAbort ("failed to generate stats");
    839             }
    840             float f1 = stats->sampleMedian;
    841 
    842             psStatsInit (stats);
    843             if (!psVectorStats (stats, flux2, NULL, NULL, 0)) {
    844                 psAbort ("failed to generate stats");
    845             }
    846             float f2 = stats->sampleMedian;
     836            float f1 = NAN;
     837            if (flux1->n > 0) {
     838                psStatsInit (stats);
     839                if (!psVectorStats (stats, flux1, NULL, NULL, 0)) {
     840                    psAbort ("failed to generate stats");
     841                }
     842                f1 = stats->sampleMedian;
     843            }
     844
     845            float f2 = NAN;
     846            if (flux2->n > 0) {
     847                psStatsInit (stats);
     848                if (!psVectorStats (stats, flux2, NULL, NULL, 0)) {
     849                    psAbort ("failed to generate stats");
     850                }
     851                f2 = stats->sampleMedian;
     852            }
    847853
    848854            stamps->window1->kernel[y][x] = f1;
     
    893899    float R2 = Sr2 / Sf2;
    894900
     901    if (!isfinite(R1) || !isfinite(R2)) {
     902        psError(PM_ERR_STAMPS, true, "Kron Radii are not finite (failure to converge).");
     903        psFree (stats);
     904        psFree (flux1);
     905        psFree (flux2);
     906        psFree (norm1);
     907        psFree (norm2);
     908        return false;
     909    }
     910
    895911    // Compare the Kron Radii (R1 & R2) to above to the FWHMs : if they are too discrepant, we will need to rescale
    896912    psLogMsg ("psModules.imcombine", PS_LOG_DETAIL, "Kron Radii vs FWHMs 1: fwhm: %f, kron %f\n", fwhm1, R1);
     
    901917    stamps->normWindow2 = 2.75*R2;
    902918    psLogMsg ("psModules.imcombine", PS_LOG_DETAIL, "Windows from Kron Radii: %f for 1, %f for 2\n", stamps->normWindow1, stamps->normWindow2);
     919
    903920
    904921    // if the calculated normWindows are too large, we will fall off the stamps.  In this case, we need to try again.
     
    913930        psFree (norm2);
    914931        return false;
    915     }
    916 
    917     if (!isfinite(R1) || !isfinite(R2)) {
    918         psError(PM_ERR_STAMPS, true, "Kron Radii are not finite (failure to converge).");
    919         psFree (stats);
    920         psFree (flux1);
    921         psFree (flux2);
    922         psFree (norm1);
    923         psFree (norm2);
    924         return false;
    925932    }
    926933
  • branches/eam_branches/ipp-20110505/psModules/src/objects/models

    • Property svn:mergeinfo deleted
  • branches/eam_branches/ipp-20110505/psconfig/psbuild

    r31068 r31587  
    386386    if (@ARGV != 2) { die "USAGE: psbuild -bootstrap (install_dir)\n"; }
    387387    $psconfdir = $ARGV[1];
     388    die "Target directory must be absolute, not relative: $psconfdir\n" unless $psconfdir =~ m|^/|;
    388389
    389390    # copy psconfig.csh and psconfig.bash to psconfdir
  • branches/eam_branches/ipp-20110505/pstamp/scripts/pstamp_job_run.pl

    r30850 r31587  
    146146            # user required uncensored but since stage isn't chip we can't rebuild them
    147147            my_die("uncensored inputs not available for job $job_id", $job_id, $PSTAMP_NOT_AVAILABLE, 'stop');
    148         } elsif (($options & $PSTAMP_REQUEST_UNCENSORED) and ($params->{state} eq 'update') and ($stage ne 'chip')) {
     148        } elsif (($options & $PSTAMP_REQUEST_UNCENSORED) and ($params->{state} ne 'full') and ($stage ne 'chip')) {
    149149            # we can only restore pixels for chip stage images if the data has been updated.
    150             # XXX: this test is not quite good enough. If all components have been updated then the
    151             # state will be 'full' But this will get us going.
    152             print "Run state is update: will make stamps from destreaked $stage images.\n";
     150            # the data will have been updated if the params->{state} the state when the job was queued is not 'full' (
     151            # XXX: we should probably be looking explicitly at the job and checking for a dep_id
     152            print "Run state was $params->{state}: will make stamps from destreaked $stage images.\n";
    153153            # make stamps from uncensored images
    154154            $muggle = 0;
  • branches/eam_branches/ipp-20110505/pstamp/scripts/pstampparse.pl

    r30793 r31587  
    754754    if ( $num_jobs == 0 ) {
    755755        print STDERR "no jobs for row $rownum\n" if $verbose;
    756         insertFakeJobForRow($row, 1, $PSTAMP_NO_JOBS_QUEUED);
     756        insertFakeJobForRow($row, 1, $PSTAMP_NO_IMAGE_MATCH);
    757757        $num_jobs = 1;
     758        $row->{job_num} = 1;
    758759    }
    759760    return $num_jobs;
Note: See TracChangeset for help on using the changeset viewer.