Docker-in-Docker (DinD) capabilities of public runners deactivated. More info

Commit d17b9487 authored by Carine Rey's avatar Carine Rey
Browse files

try different globale ne (continuation)

parent 53634b70
......@@ -29,11 +29,11 @@ type t =
| HaPC_NeBig
let string_of_model m = match m with
| H0_NeG1 -> "H0_NeG1"
| H0_NeG2 -> "H0_NeG2"
| H0_NeG3 -> "H0_NeG3"
| H0_NeG4 -> "H0_NeG4"
| H0_NeG5 -> "H0_NeG5"
| H0_NeG1 -> "H0_NeG1"
| H0_NeG2 -> "H0_NeG2"
| H0_NeG3 -> "H0_NeG3"
| H0_NeG4 -> "H0_NeG4"
| H0_NeG5 -> "H0_NeG5"
| HaPC_NeG1 -> "HaPC_NeG1"
| HaPC_NeG2 -> "HaPC_NeG2"
| HaPC_NeG3 -> "HaPC_NeG3"
......
......@@ -130,14 +130,8 @@ let derive_from_tree ~tree_dir ~tree ~profile ~preview ~use_concat ~ns ~no_Ne ~n
[
H0_NeG1 ;
H0_NeG2 ;
H0_NeG3 ;
H0_NeG4 ;
H0_NeG5 ;
HaPC_NeG1;
HaPC_NeG2;
HaPC_NeG3;
HaPC_NeG4;
HaPC_NeG5;
HaPCOC ;
];
( if no_HaPC then
......
......@@ -52,6 +52,7 @@ type post_analyses_simu = {
}
let r_env = docker_image ~account:"carinerey" ~name:"r_basics" ~tag:"08012018" ()
let py_env = docker_image ~account:"carinerey" ~name:"python_basics" ~tag:"07252018" ()
let is_hyp ~hyp (dataset_results :dataset_res) =
let model_prefix = dataset_results.model_prefix in
......@@ -63,10 +64,23 @@ let build_cmd_t_choices (opt_name : string) mr_option =
| None -> []
let make_t_choices ~h0_mr ~h0_NeBig_mr ~h0_NeSmall_mr ~haPCOC_mr ~haPC_mr ~haPC_NeBig_mr ~haPC_NeSmall_mr ~h0_NeBigInSmall_mr
~h0_NeSmallInBig_mr ~haPC_NeBigInSmall_mr ~haPC_NeSmallInBig_mr () : post_analyses_dir directory workflow =
~h0_NeSmallInBig_mr ~haPC_NeBigInSmall_mr ~haPC_NeSmallInBig_mr
~h0_NeG1_mr ~h0_NeG2_mr ~h0_NeG3_mr ~h0_NeG4_mr ~h0_NeG5_mr
~haPC_NeG1_mr ~haPC_NeG2_mr ~haPC_NeG3_mr ~haPC_NeG4_mr ~haPC_NeG5_mr () : post_analyses_dir directory workflow =
let env = r_env in
let out = dest // "out" in
let cmd_mr = List.map [
("--H0_NeG1" , h0_NeG1_mr );
("--H0_NeG2" , h0_NeG2_mr );
("--H0_NeG3" , h0_NeG3_mr );
("--H0_NeG4" , h0_NeG4_mr );
("--H0_NeG5" , h0_NeG5_mr );
("--HaPC_NeG1" , haPC_NeG1_mr );
("--HaPC_NeG2" , haPC_NeG2_mr );
("--HaPC_NeG3" , haPC_NeG3_mr );
("--HaPC_NeG4" , haPC_NeG4_mr );
("--HaPC_NeG5" , haPC_NeG5_mr );
("--H0" , h0_mr );
("--H0NeBig" , h0_NeBig_mr );
("--H0NeSmall" , h0_NeSmall_mr );
......@@ -131,27 +145,31 @@ let group_simu_infos ~simu_infos_l : simu_infos directory workflow =
]
let plot_trees ~reinfered_tree_l : plot_trees directory workflow =
let env = r_env in
let env_r = r_env in
let env_py = py_env in
let cmd_cp_l = List.map reinfered_tree_l ~f:(fun rt -> [
cmd "cp" [dep rt.reinfered_tree ; tmp // (rt.tree_prefix ^"@"^ rt.model_prefix ^ ".nw")];
cmd "cp" [dep rt.input_tree ; tmp // (rt.tree_prefix ^"@input_tree.nw")]
]) |> List.concat
in
let out = dest // "out" in
workflow ~descr:"post_analyses.plot_trees" [
docker env (
and_list ([
[mkdir_p dest];
[mkdir_p tmp ];
cmd_cp_l;
[cmd "Rscript" [
file_dump (string Scripts.plot_trees) ;
opt "--input_dir" ident tmp;
opt "--out " ident out;
];]
] |> List.concat)
)
]
workflow ~descr:"post_analyses.plot_trees" ([
[mkdir_p dest];
[mkdir_p tmp ];
cmd_cp_l;
[cmd "Rscript" ~env:env_r [
file_dump (string Scripts.plot_trees) ;
opt "--input_dir" ident tmp;
opt "--out " ident out;
];
];
[cmd "python" ~env:env_py [
file_dump (string Scripts.get_bl_trees) ;
opt "--input_dir" ident tmp;
opt "--out " ident out;
];
];
]|> List.concat)
let get_merged_results_opt hx = match hx with
| Some w -> Some w.merged_results
......@@ -169,10 +187,27 @@ type res_all_hyp = {
h0_NeSmallInBig_res : dataset_res option ;
ha_PC_NeBigInSmall_res : dataset_res option ;
ha_PC_NeSmallInBig_res : dataset_res option ;
h0_NeG1_res : dataset_res option ;
h0_NeG2_res : dataset_res option ;
h0_NeG3_res : dataset_res option ;
h0_NeG4_res : dataset_res option ;
h0_NeG5_res : dataset_res option ;
haPC_NeG1_res : dataset_res option ;
haPC_NeG2_res : dataset_res option ;
haPC_NeG3_res : dataset_res option ;
haPC_NeG4_res : dataset_res option ;
haPC_NeG5_res : dataset_res option ;
}
let make_t_choices_per_couple {h0_res; h0_NeBig_res; h0_NeSmall_res; ha_PC_res; ha_PCOC_res; ha_PC_NeBig_res ; ha_PC_NeSmall_res;
h0_NeBigInSmall_res; h0_NeSmallInBig_res; ha_PC_NeBigInSmall_res; ha_PC_NeSmallInBig_res} =
h0_NeBigInSmall_res; h0_NeSmallInBig_res; ha_PC_NeBigInSmall_res; ha_PC_NeSmallInBig_res;
h0_NeG1_res; h0_NeG2_res; h0_NeG3_res; h0_NeG4_res; h0_NeG5_res;
haPC_NeG1_res;
haPC_NeG2_res;
haPC_NeG3_res;
haPC_NeG4_res;
haPC_NeG5_res;
} =
let h0_mr = get_merged_results_opt h0_res in
let h0_NeBig_mr = get_merged_results_opt h0_NeBig_res in
......@@ -187,36 +222,71 @@ let make_t_choices_per_couple {h0_res; h0_NeBig_res; h0_NeSmall_res; ha_PC_res;
let haPC_NeSmall_mr = get_merged_results_opt ha_PC_NeSmall_res in
let haPC_NeBigInSmall_mr = get_merged_results_opt ha_PC_NeBigInSmall_res in
let haPC_NeSmallInBig_mr = get_merged_results_opt ha_PC_NeSmallInBig_res in
let h0_NeG1_mr = get_merged_results_opt h0_NeG1_res in
let h0_NeG2_mr = get_merged_results_opt h0_NeG2_res in
let h0_NeG3_mr = get_merged_results_opt h0_NeG3_res in
let h0_NeG4_mr = get_merged_results_opt h0_NeG4_res in
let h0_NeG5_mr = get_merged_results_opt h0_NeG5_res in
let haPC_NeG1_mr = get_merged_results_opt haPC_NeG1_res in
let haPC_NeG2_mr = get_merged_results_opt haPC_NeG2_res in
let haPC_NeG3_mr = get_merged_results_opt haPC_NeG3_res in
let haPC_NeG4_mr = get_merged_results_opt haPC_NeG4_res in
let haPC_NeG5_mr = get_merged_results_opt haPC_NeG5_res in
make_t_choices ~h0_mr ~h0_NeBig_mr ~h0_NeSmall_mr ~haPCOC_mr ~haPC_mr ~haPC_NeBig_mr ~haPC_NeSmall_mr ~h0_NeBigInSmall_mr
~h0_NeSmallInBig_mr ~haPC_NeBigInSmall_mr ~haPC_NeSmallInBig_mr ()
~h0_NeSmallInBig_mr ~haPC_NeBigInSmall_mr ~haPC_NeSmallInBig_mr ~h0_NeG1_mr
~h0_NeG2_mr
~h0_NeG3_mr
~h0_NeG4_mr
~h0_NeG5_mr
~haPC_NeG1_mr
~haPC_NeG2_mr
~haPC_NeG3_mr
~haPC_NeG4_mr
~haPC_NeG5_mr
()
let get_t_choices ~(dataset_results_l: dataset_res list) : t_choices option =
let h0_res = List.find dataset_results_l (is_hyp ~hyp: "H0" ) in
let h0_NeBig_res = List.find dataset_results_l (is_hyp ~hyp: "H0_NeBig" ) in
let h0_NeSmall_res = List.find dataset_results_l (is_hyp ~hyp: "H0_NeSmall" ) in
let h0_NeBigInSmall_res = List.find dataset_results_l (is_hyp ~hyp: "H0_BigNeInSmallNe") in
let h0_NeSmallInBig_res = List.find dataset_results_l (is_hyp ~hyp: "H0_SmallNeInBigNe") in
let h0_NeBigInSmall_res = List.find dataset_results_l (is_hyp ~hyp: "H0_BigNeInSmallNe") in
let h0_NeSmallInBig_res = List.find dataset_results_l (is_hyp ~hyp: "H0_SmallNeInBigNe") in
let ha_PCOC_res = List.find dataset_results_l (is_hyp ~hyp: "HaPCOC" ) in
let ha_PCOC_res = List.find dataset_results_l (is_hyp ~hyp: "HaPCOC" ) in
let ha_PC_res = List.find dataset_results_l (is_hyp ~hyp: "HaPC" ) in
let ha_PC_NeBig_res = List.find dataset_results_l (is_hyp ~hyp: "HaPC_NeBig" ) in
let ha_PC_NeSmall_res = List.find dataset_results_l (is_hyp ~hyp: "HaPC_NeSmall" ) in
let ha_PC_res = List.find dataset_results_l (is_hyp ~hyp: "HaPC" ) in
let ha_PC_NeBig_res = List.find dataset_results_l (is_hyp ~hyp: "HaPC_NeBig" ) in
let ha_PC_NeSmall_res = List.find dataset_results_l (is_hyp ~hyp: "HaPC_NeSmall" ) in
let ha_PC_NeBigInSmall_res = List.find dataset_results_l (is_hyp ~hyp: "HaPC_BigNeInSmallNe") in
let ha_PC_NeSmallInBig_res = List.find dataset_results_l (is_hyp ~hyp: "HaPC_SmallNeInBigNe") in
match (h0_res, ha_PCOC_res) with
| (Some h0, Some _) ->
let h0_NeG1_res = List.find dataset_results_l (is_hyp ~hyp: "H0_NeG1" ) in
let h0_NeG2_res = List.find dataset_results_l (is_hyp ~hyp: "H0_NeG2" ) in
let h0_NeG3_res = List.find dataset_results_l (is_hyp ~hyp: "H0_NeG3" ) in
let h0_NeG4_res = List.find dataset_results_l (is_hyp ~hyp: "H0_NeG4" ) in
let h0_NeG5_res = List.find dataset_results_l (is_hyp ~hyp: "H0_NeG5" ) in
let haPC_NeG1_res = List.find dataset_results_l (is_hyp ~hyp: "HaPC_NeG1" ) in
let haPC_NeG2_res = List.find dataset_results_l (is_hyp ~hyp: "HaPC_NeG2" ) in
let haPC_NeG3_res = List.find dataset_results_l (is_hyp ~hyp: "HaPC_NeG3" ) in
let haPC_NeG4_res = List.find dataset_results_l (is_hyp ~hyp: "HaPC_NeG4" ) in
let haPC_NeG5_res = List.find dataset_results_l (is_hyp ~hyp: "HaPC_NeG5" ) in
match (ha_PCOC_res) with
| ( Some ha_PCOC) ->
let t_choices_dir = make_t_choices_per_couple {h0_res; h0_NeBig_res; h0_NeSmall_res; ha_PC_res; ha_PCOC_res; ha_PC_NeBig_res ; ha_PC_NeSmall_res;
h0_NeBigInSmall_res; h0_NeSmallInBig_res; ha_PC_NeBigInSmall_res; ha_PC_NeSmallInBig_res} in
h0_NeBigInSmall_res; h0_NeSmallInBig_res; ha_PC_NeBigInSmall_res; ha_PC_NeSmallInBig_res ;
h0_NeG1_res; h0_NeG2_res; h0_NeG3_res; h0_NeG4_res; h0_NeG5_res;
haPC_NeG1_res; haPC_NeG2_res;haPC_NeG3_res; haPC_NeG4_res; haPC_NeG5_res; } in
let t_choices_max = t_choices_dir / selector ["out.max_MCC_per_meth.tsv"] in
let t_choices_recall09 = t_choices_dir / selector ["out.recall09_per_meth.tsv"] in
let t_choices_complete = t_choices_dir / selector ["out.complete.tsv"] in
let t_choices_plot = t_choices_dir / selector ["out.pdf"] in
let t_choices_condensed_plot = t_choices_dir / selector ["out_condensed.pdf"] in
let tree_prefix = h0.tree_prefix in
let tree_prefix = ha_PCOC.tree_prefix in
Some {t_choices_max; t_choices_recall09; t_choices_complete ; t_choices_plot; t_choices_condensed_plot; tree_prefix}
| _ -> None
......
......@@ -11,16 +11,26 @@ library("cowplot")
date = format(Sys.time(), format="%Y-%m-%d %X")
option_list = list(
make_option(c("--H0") , type="character", default=NA, help="merged_results H0", metavar="character"),
make_option(c("--H0NeBig") , type="character", default=NA, help="merged_results H0NeBig", metavar="character"),
make_option(c("--H0NeSmall") , type="character", default=NA, help="merged_results H0NeSmall", metavar="character"),
make_option(c("--H0NeBigInSmall") , type="character", default=NA, help="merged_results H0NeBigInSmall", metavar="character"),
make_option(c("--H0NeSmallInBig") , type="character", default=NA, help="merged_results H0NeSmallInBig", metavar="character"),
make_option(c("--HaPCOC") , type="character", default=NA, help="merged_results HaPCOC", metavar="character"),
make_option(c("--HaPC") , type="character", default=NA, help="merged_results HaPC", metavar="character"),
make_option(c("--HaPCNeBig") , type="character", default=NA, help="merged_results HaPCNeBig", metavar="character"),
make_option(c("--HaPCNeSmall") , type="character", default=NA, help="merged_results HaPCNeSmall", metavar="character"),
make_option(c("--HaPCNeBigInSmall") , type="character", default=NA, help="merged_results HaPCNeBigInSmall", metavar="character"),
make_option(c("--H0_NeG1" ) , type="character", default=NA, help="merged_results H0_NeG1)" , metavar="character"),
make_option(c("--H0_NeG2" ) , type="character", default=NA, help="merged_results H0_NeG2)" , metavar="character"),
make_option(c("--H0_NeG3" ) , type="character", default=NA, help="merged_results H0_NeG3)" , metavar="character"),
make_option(c("--H0_NeG4" ) , type="character", default=NA, help="merged_results H0_NeG4)" , metavar="character"),
make_option(c("--H0_NeG5" ) , type="character", default=NA, help="merged_results H0_NeG5)" , metavar="character"),
make_option(c("--HaPC_NeG1" ) , type="character", default=NA, help="merged_results HaPC_NeG1" , metavar="character"),
make_option(c("--HaPC_NeG2" ) , type="character", default=NA, help="merged_results HaPC_NeG2" , metavar="character"),
make_option(c("--HaPC_NeG3" ) , type="character", default=NA, help="merged_results HaPC_NeG3" , metavar="character"),
make_option(c("--HaPC_NeG4" ) , type="character", default=NA, help="merged_results HaPC_NeG4" , metavar="character"),
make_option(c("--HaPC_NeG5" ) , type="character", default=NA, help="merged_results HaPC_NeG5" , metavar="character"),
make_option(c("--H0" ) , type="character", default=NA, help="merged_results H0" , metavar="character"),
make_option(c("--H0NeBig" ) , type="character", default=NA, help="merged_results H0NeBig" , metavar="character"),
make_option(c("--H0NeSmall" ) , type="character", default=NA, help="merged_results H0NeSmall" , metavar="character"),
make_option(c("--H0NeBigInSmall" ) , type="character", default=NA, help="merged_results H0NeBigInSmall" , metavar="character"),
make_option(c("--H0NeSmallInBig" ) , type="character", default=NA, help="merged_results H0NeSmallInBig" , metavar="character"),
make_option(c("--HaPCOC" ) , type="character", default=NA, help="merged_results HaPCOC" , metavar="character"),
make_option(c("--HaPC" ) , type="character", default=NA, help="merged_results HaPC" , metavar="character"),
make_option(c("--HaPCNeBig" ) , type="character", default=NA, help="merged_results HaPCNeBig" , metavar="character"),
make_option(c("--HaPCNeSmall" ) , type="character", default=NA, help="merged_results HaPCNeSmall" , metavar="character"),
make_option(c("--HaPCNeBigInSmall") , type="character", default=NA, help="merged_results HaPCNeBigInSmall", metavar="character"),
make_option(c("--HaPCNeSmallInBig") , type="character", default=NA, help="merged_results HaPCNeSmallInBig", metavar="character"),
make_option(c("-o","--out"), type="character", default="out",
......@@ -32,9 +42,9 @@ opt = parse_args(opt_parser);
print(opt)
if (is.null(opt$H0)){
if (is.na(opt$H0) & is.na(opt$H0_NeG1)){
print_help(opt_parser)
stop("At least one argument must be supplied (H0 input file)", call.=FALSE)
stop("At least one argument must be supplied (H0_NeG1 or H0 input file)", call.=FALSE)
}
if (is.null(opt$HaPCOC)){
print_help(opt_parser)
......@@ -60,9 +70,19 @@ read_hyp = function(opt_name) {
}
}
df_H0_melt = read_hyp(opt$H0)
df_H0NeBig_melt = read_hyp(opt$H0NeBig)
df_H0NeSmall_melt = read_hyp(opt$H0NeSmall)
df_H0_NeG1_melt = read_hyp(opt$H0_NeG1 )
df_H0_NeG2_melt = read_hyp(opt$H0_NeG2 )
df_H0_NeG3_melt = read_hyp(opt$H0_NeG3 )
df_H0_NeG4_melt = read_hyp(opt$H0_NeG4 )
df_H0_NeG5_melt = read_hyp(opt$H0_NeG5 )
df_HaPC_NeG1_melt = read_hyp(opt$HaPC_NeG1 )
df_HaPC_NeG2_melt = read_hyp(opt$HaPC_NeG2 )
df_HaPC_NeG3_melt = read_hyp(opt$HaPC_NeG3 )
df_HaPC_NeG4_melt = read_hyp(opt$HaPC_NeG4 )
df_HaPC_NeG5_melt = read_hyp(opt$HaPC_NeG5 )
df_H0_melt = read_hyp(opt$H0)
df_H0NeBig_melt = read_hyp(opt$H0NeBig)
df_H0NeSmall_melt = read_hyp(opt$H0NeSmall)
df_H0NeBigInSmall_melt = read_hyp(opt$H0NeBigInSmall)
df_H0NeSmallInBig_melt = read_hyp(opt$H0NeSmallInBig)
......@@ -94,6 +114,14 @@ build_df_dist_couple = function (df_h0,df_ha,name) {
return(df_melt)
}
df_d_H0HaPCOC_NeG1 = build_df_dist_couple(df_H0_NeG1_melt, df_HaPCOC_melt, "H0/HaPCOC NeG1")
df_d_H0HaPC_NeG1 = build_df_dist_couple(df_H0_NeG1_melt, df_HaPC_NeG1_melt, "H0/HaPC NeG1")
df_d_H0HaPC_NeG2 = build_df_dist_couple(df_H0_NeG2_melt, df_HaPC_NeG2_melt, "H0/HaPC NeG2")
df_d_H0HaPC_NeG3 = build_df_dist_couple(df_H0_NeG3_melt, df_HaPC_NeG3_melt, "H0/HaPC NeG3")
df_d_H0HaPC_NeG4 = build_df_dist_couple(df_H0_NeG4_melt, df_HaPC_NeG4_melt, "H0/HaPC NeG4")
df_d_H0HaPC_NeG5 = build_df_dist_couple(df_H0_NeG5_melt, df_HaPC_NeG5_melt, "H0/HaPC NeG5")
df_d_H0HaPCOC = build_df_dist_couple(df_H0_melt, df_HaPCOC_melt, "H0/HaPCOC")
df_d_H0HaPC = build_df_dist_couple(df_H0_melt, df_HaPC_melt, "H0/HaPC")
df_d_H0HaPC_NeBig = build_df_dist_couple(df_H0NeBig_melt, df_HaPCNeBig_melt, "H0/HaPC NeBig")
......@@ -102,7 +130,13 @@ df_d_H0HaPC_NeBigInSmall = build_df_dist_couple(df_H0NeBigInSmall_melt, df_HaPCN
df_d_H0HaPC_NeSmallInBig = build_df_dist_couple(df_H0NeSmallInBig_melt, df_HaPCNeSmallInBig_melt, "H0/HaPC NeSmallInBig")
df_d = rbind.data.frame(df_d_H0HaPC, df_d_H0HaPCOC,df_d_H0HaPC_NeBig,df_d_H0HaPC_NeSmall,
df_d_H0HaPC_NeBigInSmall,df_d_H0HaPC_NeSmallInBig)
df_d_H0HaPC_NeBigInSmall,df_d_H0HaPC_NeSmallInBig,
df_d_H0HaPCOC_NeG1,
df_d_H0HaPC_NeG1,
df_d_H0HaPC_NeG2,
df_d_H0HaPC_NeG3,
df_d_H0HaPC_NeG4,
df_d_H0HaPC_NeG5)
df_d = df_d[order(df_d$methode),]
......@@ -148,15 +182,28 @@ calc_TN_FP_TP_FN = function(t, df_H0_melt, df_Ha_melt){
build_df_couple = function (df_h0,df_ha,name) {
print(name)
if ((! is.null(df_h0)) & (! is.null(df_ha))) {
df = do.call(rbind.data.frame,lapply(seq(0,1,0.01), calc_TN_FP_TP_FN, df_H0_melt = df_h0, df_Ha_melt = df_ha))
df$couple = name
print(head(df))
} else {
df = NULL
print(df)
}
return(df)
}
df_H0HaPCOC_NeG1 = build_df_couple(df_H0_NeG1_melt, df_HaPCOC_melt , "H0/HaPCOC NeG1")
df_H0HaPC_NeG1 = build_df_couple(df_H0_NeG1_melt, df_HaPC_NeG1_melt, "H0/HaPC NeG1")
df_H0HaPC_NeG2 = build_df_couple(df_H0_NeG2_melt, df_HaPC_NeG2_melt, "H0/HaPC NeG2")
df_H0HaPC_NeG3 = build_df_couple(df_H0_NeG3_melt, df_HaPC_NeG3_melt, "H0/HaPC NeG3")
df_H0HaPC_NeG4 = build_df_couple(df_H0_NeG4_melt, df_HaPC_NeG4_melt, "H0/HaPC NeG4")
df_H0HaPC_NeG5 = build_df_couple(df_H0_NeG5_melt, df_HaPC_NeG5_melt, "H0/HaPC NeG5")
df_H0HaPC = build_df_couple(df_H0_melt, df_HaPC_melt, "H0/HaPC")
df_H0HaPC_NeBig = build_df_couple(df_H0NeBig_melt, df_HaPCNeBig_melt, "H0/HaPC NeBig")
df_H0HaPC_NeSmall = build_df_couple(df_H0NeSmall_melt, df_HaPCNeSmall_melt, "H0/HaPC NeSmall")
......@@ -165,15 +212,17 @@ df_H0HaPC_NeSmallInBig = build_df_couple(df_H0NeSmallInBig_melt, df_HaPCNeSmallI
df_H0HaPCOC = build_df_couple(df_H0_melt, df_HaPCOC_melt, "H0/HaPCOC")
df = rbind.data.frame(df_H0HaPC, df_H0HaPCOC,df_H0HaPC_NeBig,df_H0HaPC_NeSmall,
df_H0HaPC_NeBigInSmall,df_H0HaPC_NeSmallInBig
)
df_l = list(df_H0HaPC, df_H0HaPCOC,df_H0HaPC_NeBig,df_H0HaPC_NeSmall,
df_H0HaPC_NeBigInSmall,df_H0HaPC_NeSmallInBig,df_H0HaPCOC_NeG1,
df_H0HaPC_NeG1, df_H0HaPC_NeG2, df_H0HaPC_NeG3, df_H0HaPC_NeG4, df_H0HaPC_NeG5)
df_l = df_l[-which(sapply(df_l, is.null))]
df = do.call("rbind",df_l)
print(head(df))
print(tail(df))
## Sensitivity (= recall)
df$sens = df$TP / (df$TP+df$FN)
df$sens[is.na(df$sens)] = 0
......@@ -274,7 +323,7 @@ plot_out = function(df_out, df_d , df_recall_sup09_per_meth, meths = NULL, suffi
df_d$methode = factor(df_d$methode, levels = meths)
couple_levels = unique(df_out$couple)
couple_levels = c(couple_levels[couple_levels != "H0/HaPCOC"] , "H0/HaPCOC")
couple_levels = c(couple_levels[! couple_levels %in% c("H0/HaPCOC","H0/HaPCOC NeG1")] , "H0/HaPCOC", "H0/HaPCOC NeG1")
df_out$couple = factor(df_out$couple, levels = couple_levels)
df_d$couple = factor(df_d$couple, levels = couple_levels)
......
......@@ -35,6 +35,7 @@ input_dir2 = opt$input_dir2
## program... max_t_per_tree
files = paste0(list.files(input_dir))
if (length(files) > 0) {
print(files)
files = files[grep("tsv", files)]
files_split = strsplit(files, ".", fixed = T)
......@@ -42,7 +43,9 @@ files_df = as.data.frame(do.call(rbind, files_split))
print(files_df)
files_df_ok = data.frame(files= paste0(input_dir,"/",files), tree = gsub(".tsv", "",files), profil = opt$profil)
#files_df_ok = data.frame(files= paste0(input_dir,"/",files), tree = files_df$V1, bl = files_df$V1, profil = opt$profil)
} else {
stop("ERROR no input files")
}
condensed_meths = c("PCOC","Diffsel_mean","Identical_LG08","Mutinomial_1MinusLRT","Tdg09_1MinusFDR","Msd_1MinusP","Topological_LG08")
read_dir = function(x) {
......@@ -65,7 +68,7 @@ y_labs = "Threshold"
print(head(df))
df_tmp = subset(df, couple == "H0/HaPCOC")
df_tmp = subset(df, couple == "H0/HaPCOC NeG1")
df_t_per_method = do.call(rbind, lapply(split(df_tmp, paste0(df_tmp$methode, df_tmp$tree)),
function(x) {return(x[which.max(x$threshold),c("methode", "threshold","tree","profil","couple")])}))
......@@ -112,7 +115,6 @@ print("df_t")
print(df_t)
parse_file = function(df_m, df_t) {
df_m_melt = melt(df_m)
df_m_melt$t = mapvalues(df_m_melt$variable, from=df_t$methode, to=df_t$threshold)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment