Skip to content
Merged

2.5.85 #1923

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 17 additions & 29 deletions res/TemplateBatchFiles/SelectionAnalyses/BUSTED.bf
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,6 @@ busted.FG = "Test";
busted.BG = "Background";
busted.SRV = "Synonymous site-to-site rates";
busted.background = "background";
busted.unconstrained = "unconstrained";
busted.constrained = "constrained";
busted.optimized_null = "optimized null";
busted.ER = "Posterior prob omega class";
busted.bsER = "Posterior prob omega class by site";
busted.ER2H = "Evidence ratio for 2H";
Expand All @@ -67,8 +64,6 @@ busted.ER23H = "Evidence ratio for 2H+3H";
busted.MG94 = terms.json.mg94xrev_sep_rates;

busted.json.background = busted.background;
busted.json.site_logl = "Site Log Likelihood";
busted.json.evidence_ratios = "Evidence Ratios";
busted.json.srv_posteriors = "Synonymous site-posteriors";
busted.json.srv_viterbi = "Viterbi synonymous rate path";
busted.rate_classes = 3;
Expand All @@ -82,17 +77,17 @@ busted.json = { terms.json.analysis: busted.analysis_description,
busted.json.background: {},
terms.json.fits : {},
terms.json.timers : {},
busted.json.site_logl : {},
busted.json.evidence_ratios: {},
busted.json.site_logl : {}
terms.json.site_logl : {},
terms.json.evidence_ratios: {},
terms.json.site_logl : {}
};


busted.display_orders = {terms.original_name: -1,
terms.json.nucleotide_gtr: 0,
busted.MG94: 1,
busted.unconstrained: 2,
busted.constrained: 3 ,
terms.json.unconstrained: 2,
terms.json.constrained: 3 ,
busted.ER : 4,
busted.bsER : 5,
busted.ER2H : 6,
Expand Down Expand Up @@ -967,7 +962,7 @@ while (!busted.converged) {
busted.tree_ids = estimators.LFObjectGetTrees (busted.full_model[terms.likelihood_function]);
busted.EFV_ids = estimators.LFObjectGetEFV (busted.full_model[terms.likelihood_function]);

(busted.json [busted.json.site_logl])[busted.unconstrained] = busted.ComputeSiteLikelihoods (busted.full_model[terms.likelihood_function]);
(busted.json [terms.json.site_logl])[terms.json.unconstrained] = selection.ComputeSiteLikelihoods (busted.full_model[terms.likelihood_function]);

busted.report_multi_hit (busted.full_model, busted.distribution_for_json, "MultiHit", "alt-mh", busted.branch_length_string, busted.model_parameters);

Expand Down Expand Up @@ -1261,7 +1256,7 @@ while (!busted.converged) {
busted.full_model[terms.parameters] + 9 , // +9 comes from CF3x4
busted.codon_data_info[terms.data.sample_size],
busted.distribution_for_json,
busted.display_orders[busted.unconstrained]);
busted.display_orders[terms.json.unconstrained]);


if (busted.error_sink) {
Expand All @@ -1271,7 +1266,7 @@ while (!busted.converged) {
}

for (_key_, _value_; in; busted.filter_specification) {
selection.io.json_store_branch_attribute(busted.json, busted.unconstrained, terms.branch_length, 0,
selection.io.json_store_branch_attribute(busted.json, terms.json.unconstrained, terms.branch_length, 0,
_key_,
selection.io.extract_branch_info((busted.full_model[terms.branch_length])[_key_], "selection.io.branch.length"));
}
Expand All @@ -1288,11 +1283,11 @@ while (!busted.converged) {

io.ReportProgressMessageMD ("BUSTED", "test", "Performing the constrained (dN/dS > 1 not allowed) model fit");
parameters.SetConstraint (model.generic.GetGlobalParameter (busted.test.bsrel_model , terms.AddCategory (terms.parameters.omega_ratio,busted.rate_classes)), terms.parameters.one, terms.global);
(busted.json [busted.json.site_logl])[busted.constrained] = busted.ComputeSiteLikelihoods (busted.full_model[terms.likelihood_function]);
(busted.json [terms.json.site_logl])[terms.json.constrained] = selection.ComputeSiteLikelihoods (busted.full_model[terms.likelihood_function]);
busted.null_results = estimators.FitExistingLF (busted.full_model[terms.likelihood_function], busted.model_object_map);
(busted.json [busted.json.site_logl])[busted.optimized_null] = busted.ComputeSiteLikelihoods (busted.full_model[terms.likelihood_function]);
(busted.json [terms.json.site_logl])[terms.json.optimized_null] = selection.ComputeSiteLikelihoods (busted.full_model[terms.likelihood_function]);

busted.site_ll_diff = Transpose (((busted.json [busted.json.site_logl])[busted.unconstrained] - (busted.json [busted.json.site_logl])[busted.optimized_null])*2);
busted.site_ll_diff = Transpose (((busted.json [terms.json.site_logl])[terms.json.unconstrained] - (busted.json [terms.json.site_logl])[terms.json.optimized_null])*2);
busted.site_ll_sum = +busted.site_ll_diff;

busted.site_ll_diff = ({Rows (busted.site_ll_diff), 2})["(_MATRIX_ELEMENT_COLUMN_==0)*busted.site_ll_diff[_MATRIX_ELEMENT_ROW_]+(_MATRIX_ELEMENT_COLUMN_==1)_MATRIX_ELEMENT_ROW_"] % 0;
Expand All @@ -1313,7 +1308,7 @@ while (!busted.converged) {
selection.io.stopTimer (busted.json [terms.json.timers], "Constrained BUSTED model fitting");

utility.ForEachPair (busted.filter_specification, "_key_", "_value_",
'selection.io.json_store_branch_attribute(busted.json, busted.constrained, terms.branch_length, 1,
'selection.io.json_store_branch_attribute(busted.json, terms.json.constrained, terms.branch_length, 1,
_key_,
selection.io.extract_branch_info((busted.null_results[terms.branch_length])[_key_], "selection.io.branch.length"));');

Expand Down Expand Up @@ -1373,10 +1368,10 @@ while (!busted.converged) {
busted.null_results[terms.parameters] + 9 , // +9 comes from CF3x4
busted.codon_data_info[terms.data.sample_size],
busted.distribution_for_json,
busted.display_orders[busted.constrained]);
busted.display_orders[terms.json.constrained]);

(busted.json [busted.json.evidence_ratios])[busted.constrained] = busted.EvidenceRatios ( (busted.json [busted.json.site_logl])[busted.unconstrained], (busted.json [busted.json.site_logl])[busted.constrained]);
(busted.json [busted.json.evidence_ratios ])[busted.optimized_null] = busted.EvidenceRatios ( (busted.json [busted.json.site_logl])[busted.unconstrained], (busted.json [busted.json.site_logl])[busted.optimized_null]);
(busted.json [terms.json.evidence_ratios])[terms.json.constrained] = selection.EvidenceRatios ( (busted.json [terms.json.site_logl])[terms.json.unconstrained], (busted.json [terms.json.site_logl])[terms.json.constrained]);
(busted.json [terms.json.evidence_ratios ])[terms.json.optimized_null] = selection.EvidenceRatios ( (busted.json [terms.json.site_logl])[terms.json.unconstrained], (busted.json [terms.json.site_logl])[terms.json.optimized_null]);

if (busted.LRT[terms.LRT] < -0.01) {
parameters.RemoveConstraint (model.generic.GetGlobalParameter (busted.test.bsrel_model , terms.AddCategory (terms.parameters.omega_ratio,busted.rate_classes)));
Expand Down Expand Up @@ -1413,22 +1408,15 @@ return busted.json;

/// HELPERS

//------------------------------------------------------------------------------
lfunction busted.ComputeSiteLikelihoods (id) {
ConstructCategoryMatrix (sl, ^id, SITE_LOG_LIKELIHOODS);
return sl;
}

//------------------------------------------------------------------------------

function busted.ComputeLRT (ha, h0) {
return {terms.LRT : 2*(ha-h0),
terms.p_value : 0.5*(1-CChi2 (2*(ha-h0),2))};
}

//------------------------------------------------------------------------------
lfunction busted.EvidenceRatios (ha, h0) {
return ha["Exp(_MATRIX_ELEMENT_VALUE_-h0[_MATRIX_ELEMENT_COLUMN_])"];
}


//------------------------------------------------------------------------------
lfunction busted.FilteredEvidenceRatios (ha, h0, level) {
Expand Down
84 changes: 80 additions & 4 deletions res/TemplateBatchFiles/SelectionAnalyses/RELAX.bf
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,8 @@ relax.analysis_description = {
Version 4.0 adds support for synonymous rate variation.
Version 4.1 adds further support for multiple hit models.
Version 4.1.1 adds reporting for convergence diagnostics.
Version 4.5 adds support for multiple datasets for joint testing.",
Version 4.5 adds support for multiple datasets for joint testing.
Version 4.6 adds support for branch- and site-level evidence ratio calculation.",
terms.io.version : "4.5",
terms.io.reference : "RELAX: Detecting Relaxed Selection in a Phylogenetic Framework (2015). Mol Biol Evol 32 (3): 820-832",
terms.io.authors : "Sergei L Kosakovsky Pond, Ben Murrell, Steven Weaver and Temple iGEM / UCSD viral evolution g",
Expand Down Expand Up @@ -723,8 +724,6 @@ if (relax.analysis_run_mode != relax.kGroupMode) {



//console.log (relax.model_namespaces);
//explicit loop to avoid re-entrance errors

relax.relax_parameter_terms = {};
relax.bound_weights = {};
Expand All @@ -740,6 +739,8 @@ for (relax.k = 0; relax.k < relax.numbers_of_tested_groups; relax.k += 1) {
relax.filter_names,
None);



for (relax.i = 1; relax.i < relax.rate_classes; relax.i += 1) {
parameters.SetRange (model.generic.GetGlobalParameter (relax.model_object_map[relax.model_nmsp] , terms.AddCategory (terms.parameters.omega_ratio,relax.i)), terms.range01);
}
Expand Down Expand Up @@ -786,6 +787,8 @@ for (relax.k = 0; relax.k < relax.numbers_of_tested_groups; relax.k += 1) {
}
}



relax.model_map = {};
for (relax.index, relax.junk ; in; relax.filter_names) {
relax.model_map [relax.index]= utility.Map (relax.model_to_group_name, "_groupid_", 'utility.Filter (relax.selected_branches[relax.index], "_branchgroup_", "_branchgroup_ == \'" + _groupid_ + "\'")');
Expand Down Expand Up @@ -1222,6 +1225,78 @@ function relax.FitMainTestPair (prompt) {

selection.io.stopTimer (relax.json [terms.json.timers], "RELAX alternative model fitting");

/// START EBF CALCULATION
///=====================================================================================================================================

relax.tree_ids = estimators.LFObjectGetTrees (relax.alternative_model.fit[terms.likelihood_function]);

estimators.RestoreLFStateFromSnapshot (relax.alternative_model.fit[terms.likelihood_function],relax.stashLF);

utility.ToggleEnvVariable ("KEEP_OPTIMAL_ORDER", TRUE);
relax.er_report.tagged_sites = {};
for (_partition_, _selection_; in; relax.selected_branches) {


relax.tested_branches = {};
relax.full_to_short = {};
for (_b_,_class_; in; _selection_) {
if (_class_ == relax.test_branches_name) {
relax.node_name = relax.tree_ids[+_partition_] + '.' + _b_;
relax.tested_branches [relax.node_name] = 1;
relax.full_to_short [relax.node_name] = _b_;
}
}
relax.test_model_name = (relax.model_map[_partition_])[_selection_[0]];


utility.ToggleEnvVariable ("SET_MODEL_KEEP_LOCALS", TRUE);

relax.ll_by_branch = {};
relax.model_by_branch = {};


relax.altLL = relax.alternative_model.fit [terms.fit.log_likelihood];
relax.json [terms.json.site_logl] = {};
(relax.json [terms.json.site_logl])[terms.json.unconstrained] = selection.ComputeSiteLikelihoods (relax.alternative_model.fit[terms.likelihood_function]);

for (_b_,_ignore_; in; relax.tested_branches) {
GetString (relax._current_model, ^_b_, -5);
relax.model_by_branch [_b_] = relax._current_model;
SetParameter ( ^_b_ , MODEL, ^relax.reference_model_namespace);

LFCompute (^(relax.alternative_model.fit[terms.likelihood_function]),LF_START_COMPUTE);
LFCompute (^(relax.alternative_model.fit[terms.likelihood_function]),relax.ll);
relax.ll_by_branch [relax.full_to_short[_b_]] = Exp(relax.altLL-relax.ll);
LFCompute (^(relax.alternative_model.fit[terms.likelihood_function]),LF_DONE_COMPUTE);

SetParameter ( ^_b_ , MODEL, ^relax._current_model);
}


relax.json [terms.json.evidence_ratios] = {terms.json.by_branch : relax.ll_by_branch};

for (_b_,_ignore_; in; relax.tested_branches) {
SetParameter ( ^_b_ , MODEL, ^relax.reference_model_namespace);
}
LFCompute (^(relax.alternative_model.fit[terms.likelihood_function]),LF_START_COMPUTE);
LFCompute (^(relax.alternative_model.fit[terms.likelihood_function]),relax.ll);
LFCompute (^(relax.alternative_model.fit[terms.likelihood_function]),LF_DONE_COMPUTE);
(relax.json [terms.json.site_logl])[terms.json.constrained] = selection.ComputeSiteLikelihoods (relax.alternative_model.fit[terms.likelihood_function]);

for (_b_,_ignore_; in; relax.tested_branches) {
SetParameter ( ^_b_ , MODEL, ^(relax.model_by_branch[_b_]));
}

(relax.json [terms.json.evidence_ratios])[terms.json.constrained] = selection.EvidenceRatios ( (relax.json [terms.json.site_logl])[terms.json.unconstrained], (relax.json [terms.json.site_logl])[terms.json.constrained]);



}
utility.ToggleEnvVariable ("KEEP_OPTIMAL_ORDER", None);
///=====================================================================================================================================
/// END EBF CALCULATION


// NULL MODEL

selection.io.startTimer (relax.json [terms.json.timers], "RELAX null model fitting", 4);
Expand All @@ -1237,7 +1312,8 @@ function relax.FitMainTestPair (prompt) {
parameters.SetConstraint (model.generic.GetGlobalParameter (relax.model_object_map[relax.model_nmsp] , terms.relax.k), terms.parameters.one, terms.global);
}
}



relax.null_model.fit = estimators.FitExistingLF (relax.alternative_model.fit[terms.likelihood_function], relax.model_object_map);
io.ReportProgressMessageMD ("RELAX", "null", "* " + selection.io.report_fit (relax.null_model.fit, 9, relax.codon_data_info[terms.data.sample_size]));
relax.LRT = math.DoLRT (relax.null_model.fit[terms.fit.log_likelihood], relax.alternative_model.fit[terms.fit.log_likelihood], relax.numbers_of_tested_groups-1);
Expand Down
Loading
Loading