Commit 30a50eba authored by Philippe Veber's avatar Philippe Veber
Browse files

récupération des noeuds de chaque modèle pour bppseqgen

parent 25fa66f4
......@@ -4,19 +4,21 @@ open Core
open File_formats
let env = docker_image ~account:"carinerey" ~name:"bppsuite:06182018" ()
let env = docker_image ~account:"carinerey" ~name:"bppsuite" ~tag:"06182018" ()
let assign k v =
seq ~sep:"=" [ string k ; v ]
let conf_file_bppseqgen ~tree ~nb_sites ~config =
seq ~sep:"\n" [
assign "input.tree.file" (dep tree) ;
assign "output.sequence.file" dest ;
assign "number_of_sites" (int nb_sites) ;
string config ;
]
seq ~sep:"\n" (
[
assign "input.tree.file" (dep tree) ;
assign "output.sequence.file" dest ;
assign "number_of_sites" (int nb_sites) ;
]
@ config
)
let bppseqgen ~nb_sites ~tree ~config : nucleotide_fasta workflow =
workflow ~descr:"bppsuite.bppseqgen" [
......
......@@ -5,7 +5,7 @@ open File_formats
val bppseqgen :
nb_sites:int ->
tree:nhx workflow ->
config: string ->
config:Bistro.Template.t list ->
nucleotide_fasta workflow
val fna2faa :
......
......@@ -10,30 +10,31 @@ let string_of_model m = match m with
| H0 -> "H0"
| Ha -> "Ha"
let bpp_config = function
| H0 -> {|\
let bpp_config_base = {|
alphabet=Codon(letter=DNA)
genetic_code = Standard
input.tree.format=Nhx
output.internal.sequences=no
nonhomogeneous = general
nonhomogeneous.number_of_models = 1
model1=Codon_AAFit(model=K80, fitness=FromModel(model=LGL08_CAT_C1(nbCat=10)))
model1.nodes_id=0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17
nonhomogeneous.root_freq=Fixed()
rate_distribution=Constant()
|};
| Ha -> {|\
alphabet=Codon(letter=DNA)
genetic_code = Standard
input.tree.format=Nhx
output.internal.sequences=no
nonhomogeneous = general
nonhomogeneous.number_of_models = 2
|}
let bpp_config_H0 = {|nonhomogeneous.number_of_models = 1
model1=Codon_AAFit(model=K80, fitness=FromModel(model=LGL08_CAT_C1(nbCat=10)))
|}
let bpp_config_Ha = {|nonhomogeneous.number_of_models = 2
model1=Codon_AAFit(model=K80, fitness=FromModel(model=LGL08_CAT_C1(nbCat=10)))
model1.nodes_id=15,1,13,14,4
model2=Codon_AAFit(model=K80, fitness=FromModel(model=LGL08_CAT_C7(nbCat=10)))
model2.nodes_id=0,2,3,5,6,7,8,9,10,11,12,16,17
nonhomogeneous.root_freq=Fixed()
rate_distribution=Constant()
|};
|}
let bpp_config nodes hyp = [
string bpp_config_base ;
insert nodes ;
string (
match hyp with
| H0 -> bpp_config_H0
| Ha -> bpp_config_Ha
) ;
]
......@@ -33,7 +33,8 @@ let repo_of_raw_dataset (raw_dataset:raw_dataset) =
let derive_from_model ~model ~tree ~tree_dataset ~preview =
let nb_sites = if preview then 10 else 100 in
let config = Convergence_hypothesis.bpp_config model in
let nodes = Tree_dataset.nodes tree_dataset model in
let config = Convergence_hypothesis.bpp_config nodes model in
let tree = Tree_dataset.tree tree_dataset `Simulation in
let fna = Bppsuite.bppseqgen ~nb_sites ~tree ~config in
let faa = Bppsuite.fna2faa ~fna in
......
......@@ -140,7 +140,7 @@ t.write(format=1, features=["ND"], outfile = "%s/tree.only_node_ids.nhx" %(OutDi
#### -> 1 line: all nodes ids
all_node_ids = range(nodeId-1)
with open("%s/tree.H0.node_ids" %(OutDirName), "w") as output_H0_node_ids:
output_H0_node_ids.write(",".join(map(str, all_node_ids)))
output_H0_node_ids.write("model1.nodes_id="+",".join(map(str, all_node_ids)))
### tree.Ha.node_ids: alternative hypothesis
#### -> 2 lines: 1) node ids under the ancestral model
......@@ -149,9 +149,9 @@ with open("%s/tree.H0.node_ids" %(OutDirName), "w") as output_H0_node_ids:
with open("%s/tree.Ha.node_ids" %(OutDirName), "w") as output_Ha_node_ids:
conv_node_ids = [n.ND for n in t.search_nodes(Condition = "1")]
not_conv_node_ids = [i for i in all_node_ids if i not in conv_node_ids]
output_Ha_node_ids.write(",".join(map(str, conv_node_ids)))
output_Ha_node_ids.write("model2.nodes_id="+",".join(map(str, conv_node_ids)))
output_Ha_node_ids.write("\n")
output_Ha_node_ids.write(",".join(map(str, not_conv_node_ids)))
output_Ha_node_ids.write("model1.nodes_id="+",".join(map(str, not_conv_node_ids)))
######### output trees for detection
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment