@@ -1124,7 +1124,7 @@ def run_test(framework='YOLT2',
11241124 # plot
11251125
11261126 # add geo coords to eall boxes?
1127- if test_add_geo_coords :
1127+ if test_add_geo_coords and len ( df_tot ) > 0 :
11281128 ###########################################
11291129 # !!!!! Skip?
11301130 # json = None
@@ -1544,17 +1544,20 @@ def execute(args, train_cmd1, test_cmd_tot, test_cmd_tot2=''):
15441544 rotate_boxes = args .rotate_boxes ,
15451545 test_add_geo_coords = args .test_add_geo_coords )
15461546
1547- # save to csv
1548- df_tot .to_csv (args .val_df_path_aug , index = False )
1549- # get number of files
1550- n_files = len (np .unique (df_tot ['Loc_Tmp' ].values ))
1551- # n_files = str(len(test_files_locs_list)
1552- t4 = time .time ()
1553- cmd_time_str = '"Length of time to run test for ' \
1554- + str (n_files ) + ' files = ' \
1555- + str (t4 - t3 ) + ' seconds\n "'
1556- print (cmd_time_str [1 :- 1 ])
1557- os .system ('echo ' + cmd_time_str + ' >> ' + args .log_file )
1547+ if len (df_tot ) == 0 :
1548+ print ("No detections found!" )
1549+ else :
1550+ # save to csv
1551+ df_tot .to_csv (args .val_df_path_aug , index = False )
1552+ # get number of files
1553+ n_files = len (np .unique (df_tot ['Loc_Tmp' ].values ))
1554+ # n_files = str(len(test_files_locs_list)
1555+ t4 = time .time ()
1556+ cmd_time_str = '"Length of time to run test for ' \
1557+ + str (n_files ) + ' files = ' \
1558+ + str (t4 - t3 ) + ' seconds\n "'
1559+ print (cmd_time_str [1 :- 1 ])
1560+ os .system ('echo ' + cmd_time_str + ' >> ' + args .log_file )
15581561
15591562 # run again, if desired
15601563 if len (args .weight_file2 ) > 0 :
@@ -1620,69 +1623,70 @@ def execute(args, train_cmd1, test_cmd_tot, test_cmd_tot2=''):
16201623 sliced = False
16211624 print ("test data sliced?" , sliced )
16221625
1623- # refine for each plot_thresh
1624- for plot_thresh_tmp in args .plot_thresh :
1625- print ("Plotting at:" , plot_thresh_tmp )
1626- groupby = 'Image_Path'
1627- groupby_cat = 'Category'
1628- df_refine = post_process .refine_df (df_tot ,
1629- groupby = groupby ,
1630- groupby_cat = groupby_cat ,
1631- nms_overlap_thresh = args .nms_overlap_thresh ,
1632- plot_thresh = plot_thresh_tmp ,
1633- verbose = False )
1634- # make some output plots, if desired
1635- if len (args .building_csv_file ) > 0 :
1636- building_csv_file_tmp = args .building_csv_file .split ('.' )[0 ] \
1637- + '_plot_thresh_' + str (plot_thresh_tmp ).replace ('.' , 'p' ) \
1638- + '.csv'
1639- else :
1640- building_csv_file_tmp = ''
1641- if args .n_test_output_plots > 0 :
1642- post_process .plot_refined_df (df_refine , groupby = groupby ,
1643- label_map_dict = args .label_map_dict_tot ,
1644- outdir = args .results_dir ,
1645- plot_thresh = plot_thresh_tmp ,
1646- show_labels = bool (
1647- args .show_labels ),
1648- alpha_scaling = bool (
1649- args .alpha_scaling ),
1650- plot_line_thickness = args .plot_line_thickness ,
1651- print_iter = 5 ,
1652- n_plots = args .n_test_output_plots ,
1653- building_csv_file = building_csv_file_tmp ,
1654- shuffle_ims = bool (
1655- args .shuffle_val_output_plot_ims ),
1656- verbose = False )
1657-
1658- # geo coords?
1659- if bool (args .test_add_geo_coords ):
1660- df_refine , json = add_geo_coords .add_geo_coords_to_df (
1661- df_refine ,
1662- create_geojson = bool (args .save_json ),
1663- inProj_str = 'epsg:32737' , outProj_str = 'epsg:3857' ,
1664- # inProj_str='epsg:4326', outProj_str='epsg:3857',
1665- verbose = False )
1666-
1667- # save df_refine
1668- outpath_tmp = os .path .join (args .results_dir ,
1669- args .val_prediction_df_refine_tot_root_part +
1670- '_thresh=' + str (plot_thresh_tmp ) + '.csv' )
1671- # df_refine.to_csv(args.val_prediction_df_refine_tot)
1672- df_refine .to_csv (outpath_tmp )
1673- print ("Num objects at thresh:" , plot_thresh_tmp , "=" ,
1674- len (df_refine ))
1675- # save json
1676- if bool (args .save_json ) and (len (json ) > 0 ):
1677- output_json_path = os .path .join (args .results_dir ,
1678- args .val_prediction_df_refine_tot_root_part +
1679- '_thresh=' + str (plot_thresh_tmp ) + '.GeoJSON' )
1680- json .to_file (output_json_path , driver = "GeoJSON" )
1681-
1682- cmd_time_str = '"Length of time to run refine_test()' + ' ' \
1683- + str (time .time () - t8 ) + ' seconds"'
1684- print (cmd_time_str [1 :- 1 ])
1685- os .system ('echo ' + cmd_time_str + ' >> ' + args .log_file )
1626+ # refine for each plot_thresh (if we have detections)
1627+ if len (df_tot ) > 0 :
1628+ for plot_thresh_tmp in args .plot_thresh :
1629+ print ("Plotting at:" , plot_thresh_tmp )
1630+ groupby = 'Image_Path'
1631+ groupby_cat = 'Category'
1632+ df_refine = post_process .refine_df (df_tot ,
1633+ groupby = groupby ,
1634+ groupby_cat = groupby_cat ,
1635+ nms_overlap_thresh = args .nms_overlap_thresh ,
1636+ plot_thresh = plot_thresh_tmp ,
1637+ verbose = False )
1638+ # make some output plots, if desired
1639+ if len (args .building_csv_file ) > 0 :
1640+ building_csv_file_tmp = args .building_csv_file .split ('.' )[0 ] \
1641+ + '_plot_thresh_' + str (plot_thresh_tmp ).replace ('.' , 'p' ) \
1642+ + '.csv'
1643+ else :
1644+ building_csv_file_tmp = ''
1645+ if args .n_test_output_plots > 0 :
1646+ post_process .plot_refined_df (df_refine , groupby = groupby ,
1647+ label_map_dict = args .label_map_dict_tot ,
1648+ outdir = args .results_dir ,
1649+ plot_thresh = plot_thresh_tmp ,
1650+ show_labels = bool (
1651+ args .show_labels ),
1652+ alpha_scaling = bool (
1653+ args .alpha_scaling ),
1654+ plot_line_thickness = args .plot_line_thickness ,
1655+ print_iter = 5 ,
1656+ n_plots = args .n_test_output_plots ,
1657+ building_csv_file = building_csv_file_tmp ,
1658+ shuffle_ims = bool (
1659+ args .shuffle_val_output_plot_ims ),
1660+ verbose = False )
1661+
1662+ # geo coords?
1663+ if bool (args .test_add_geo_coords ):
1664+ df_refine , json = add_geo_coords .add_geo_coords_to_df (
1665+ df_refine ,
1666+ create_geojson = bool (args .save_json ),
1667+ inProj_str = 'epsg:32737' , outProj_str = 'epsg:3857' ,
1668+ # inProj_str='epsg:4326', outProj_str='epsg:3857',
1669+ verbose = False )
1670+
1671+ # save df_refine
1672+ outpath_tmp = os .path .join (args .results_dir ,
1673+ args .val_prediction_df_refine_tot_root_part +
1674+ '_thresh=' + str (plot_thresh_tmp ) + '.csv' )
1675+ # df_refine.to_csv(args.val_prediction_df_refine_tot)
1676+ df_refine .to_csv (outpath_tmp )
1677+ print ("Num objects at thresh:" , plot_thresh_tmp , "=" ,
1678+ len (df_refine ))
1679+ # save json
1680+ if bool (args .save_json ) and (len (json ) > 0 ):
1681+ output_json_path = os .path .join (args .results_dir ,
1682+ args .val_prediction_df_refine_tot_root_part +
1683+ '_thresh=' + str (plot_thresh_tmp ) + '.GeoJSON' )
1684+ json .to_file (output_json_path , driver = "GeoJSON" )
1685+
1686+ cmd_time_str = '"Length of time to run refine_test()' + ' ' \
1687+ + str (time .time () - t8 ) + ' seconds"'
1688+ print (cmd_time_str [1 :- 1 ])
1689+ os .system ('echo ' + cmd_time_str + ' >> ' + args .log_file )
16861690
16871691 # remove or zip test_split_dirs to save space
16881692 if len (test_split_dir_list ) > 0 :
0 commit comments