diff --git a/m_blr_core.h b/m_blr_core.h
index 433edbbc3fc0b78b4ce067185341ec941519ff55..a53bc24a0ea9c2bdb0f043dee0e83c99e5113dee 100644
--- a/m_blr_core.h
+++ b/m_blr_core.h
@@ -1,11 +1,10 @@
 #ifndef M_BLR_Core_H
 #define M_BLR_Core_H
 
-#include "linear_regressor.h"
-#include "../CORE/config/config.h"
+#include "m_core.h"
 #include "functions/function_base.h"
 #include "functions/basis_functions/bf_base.h"
-#include "m_core.h"
+#include "../CORE/config/config.h"
 
 #include <iostream>
 
@@ -42,7 +41,14 @@ class M_BLR_Core:
     }
         double predict(const rvec &v) const{
             return bf.epredict(get_weights(),v);
-        };
+        }
+
+        t_type get_weights_uncertainty() const{
+            double lambda=config.template get<double>("LAMBDA");
+            if(lambda >= 0) throw std::runtime_error(
+                    "Sigma matrix is only computed for LAMBDA < 0");
+            return Sigma.diagonal();
+        }
 
 
     private:
@@ -56,39 +62,4 @@ class M_BLR_Core:
             verbose=(config.get<int>("VERBOSE"));
         }
 };
-template
-<class BF=Function_Base&>
-class M_BLR_Train:
-    public M_BLR_Core<BF>,    // MUST NOT BE VIRUAL!!
-    public M_Train
-{
-
-    public:
-        using M_BLR_Core<BF>::trained;
-        using M_BLR_Core<BF>::config;
-        using M_BLR_Core<BF>::weights;
-        using M_BLR_Core<BF>::Sigma;
-
-        M_BLR_Train(Config &c):
-            M_BLR_Core<BF>(c)
-    {}
-
-        M_BLR_Train(BF &bf, Config &c):
-            M_BLR_Core<BF>(bf, c)
-    {}
-
-        void train(phi_type &Phi, const t_type &T) {
-            if(trained) {
-                throw std::runtime_error("This object is already trained!");
-            }
-            LinearRegressor::train(config,Phi, T,weights,Sigma);
-            trained=true;
-        }
-        t_type get_weights_uncertainty() const{
-            double lambda=config.template get<double>("LAMBDA");
-            if(lambda >= 0) throw std::runtime_error(
-                    "Sigma matrix is only computed for LAMBDA < 0");
-            return Sigma.diagonal();
-        }
-};
 #endif
diff --git a/m_blr_train.h b/m_blr_train.h
new file mode 100644
index 0000000000000000000000000000000000000000..60618dfb5f424dd916b43fd8db0a9838b9e4585d
--- /dev/null
+++ b/m_blr_train.h
@@ -0,0 +1,42 @@
+#ifndef M_BLR_TRAIN_H
+#define M_BLR_TRAIN_H
+
+#include "m_core.h"
+#include "m_blr_core.h"
+#include "linear_regressor.h"
+#include "functions/function_base.h"
+#include "functions/basis_functions/bf_base.h"
+#include "../CORE/config/config.h"
+
+#include <iostream>
+
+template
+<class BF=Function_Base&>
+class M_BLR_Train:
+    public M_BLR_Core<BF>,    // MUST NOT BE VIRUAL!!
+    public M_Train
+{
+
+    public:
+        using M_BLR_Core<BF>::trained;
+        using M_BLR_Core<BF>::config;
+        using M_BLR_Core<BF>::weights;
+        using M_BLR_Core<BF>::Sigma;
+
+        M_BLR_Train(Config &c):
+            M_BLR_Core<BF>(c)
+    {}
+
+        M_BLR_Train(BF &bf, Config &c):
+            M_BLR_Core<BF>(bf, c)
+    {}
+
+        void train(phi_type &Phi, const t_type &T) {
+            if(trained) {
+                throw std::runtime_error("This object is already trained!");
+            }
+            LinearRegressor::train(config,Phi, T,weights,Sigma);
+            trained=true;
+        }
+};
+#endif
diff --git a/m_core.h b/m_core.h
index aaec72bee895d98adbe65ff79df994de3e208cc6..c2783bd7642d651a3dd16593b884ebd43d96ca1a 100644
--- a/m_core.h
+++ b/m_core.h
@@ -22,6 +22,7 @@ class M_Core {
             weights=w;
         }
         virtual double predict(const rvec &v)const=0;
+        virtual t_type get_weights_uncertainty()const=0;
 };
 class M_Predict {
     public:
@@ -50,6 +51,5 @@ class M_Train {
     public:
         virtual ~M_Train() {}
         virtual void train(phi_type &Phi, const t_type &T)=0;
-        virtual t_type get_weights_uncertainty()const=0;
 };
 #endif
diff --git a/m_krr_core.h b/m_krr_core.h
index 2bf5bf20402630dfb2dc7d4b114396c09de27911..ff7b37016a43e6e3437fa46c005395501367f75d 100644
--- a/m_krr_core.h
+++ b/m_krr_core.h
@@ -1,12 +1,10 @@
 #ifndef M_KRR_Core_H
 #define M_KRR_Core_H
 
-#include "linear_regressor.h"
 #include "../CORE/config/config.h"
 #include "functions/function_base.h"
 #include "functions/kernels/kern_base.h"
 #include "m_core.h"
-#include "ekm.h"
 
 #include <iostream>
 
@@ -43,7 +41,14 @@ class M_KRR_Core:
     }
         double predict(const rvec &v) const{
             return kernel.epredict(weights,v);
-        };
+        }
+
+        t_type get_weights_uncertainty() const{
+            double lambda=config.template get<double>("LAMBDA");
+            if(lambda >= 0) throw std::runtime_error(
+                    "Sigma matrix is only computed for LAMBDA < 0");
+            return Sigma.diagonal();
+        }
 
 
     private:
@@ -57,53 +62,6 @@ class M_KRR_Core:
             verbose=(config.get<int>("VERBOSE"));
         }
 };
-template
-<class K=Function_Base&>
-class M_KRR_Train:
-    public M_KRR_Core<K>,    // MUST NOT BE VIRUAL!!
-    public M_Train
-{
-
-    public:
-        EKM<K> ekm;
-        using M_KRR_Core<K>::trained;
-        using M_KRR_Core<K>::config;
-        using M_KRR_Core<K>::kernel;
-        using M_KRR_Core<K>::weights;
-        using M_KRR_Core<K>::Sigma;
-
-        M_KRR_Train(Config &c):
-            M_KRR_Core<K>(c),
-            ekm(c)
-    {}
-
-        M_KRR_Train(K &kernel, Config &c):
-            M_KRR_Core<K>(kernel, c),
-            ekm(kernel)
-    {}
-
-        void train(phi_type &Phi, const t_type &T) {
-            if(trained) {
-                throw std::runtime_error("This object is already trained!");
-            }
-            if (kernel.get_label()!="Kern_Linear") {
-                ekm.project(Phi);
-            }
-            LinearRegressor::train(config,Phi, T,weights,Sigma);
-
-            if (kernel.get_label()!="Kern_Linear") {
-                //kernalize weights
-                weights = ekm.KK.transpose()*weights;
-            }
-            trained=true;
-        }
-        t_type get_weights_uncertainty() const{
-            double lambda=config.template get<double>("LAMBDA");
-            if(lambda >= 0) throw std::runtime_error(
-                    "Sigma matrix is only computed for LAMBDA < 0");
-            return Sigma.diagonal();
-        }
-};
 
 //template
 //<class K=Function_Base&>
diff --git a/m_krr_train.h b/m_krr_train.h
new file mode 100644
index 0000000000000000000000000000000000000000..4cb7da33374cdd9387e0b08bc6c7364fef9860a2
--- /dev/null
+++ b/m_krr_train.h
@@ -0,0 +1,55 @@
+#ifndef M_KRR_TRAIN_H
+#define M_KRR_TRAIN_H
+
+#include "linear_regressor.h"
+#include "../CORE/config/config.h"
+#include "functions/function_base.h"
+#include "functions/kernels/kern_base.h"
+#include "m_core.h"
+#include "m_krr_core.h"
+#include "ekm.h"
+
+#include <iostream>
+
+template
+<class K=Function_Base&>
+class M_KRR_Train:
+    public M_KRR_Core<K>,    // MUST NOT BE VIRUAL!!
+    public M_Train
+{
+
+    public:
+        EKM<K> ekm;
+        using M_KRR_Core<K>::trained;
+        using M_KRR_Core<K>::config;
+        using M_KRR_Core<K>::kernel;
+        using M_KRR_Core<K>::weights;
+        using M_KRR_Core<K>::Sigma;
+
+        M_KRR_Train(Config &c):
+            M_KRR_Core<K>(c),
+            ekm(c)
+    {}
+
+        M_KRR_Train(K &kernel, Config &c):
+            M_KRR_Core<K>(kernel, c),
+            ekm(kernel)
+    {}
+
+        void train(phi_type &Phi, const t_type &T) {
+            if(trained) {
+                throw std::runtime_error("This object is already trained!");
+            }
+            if (kernel.get_label()!="Kern_Linear") {
+                ekm.project(Phi);
+            }
+            LinearRegressor::train(config,Phi, T,weights,Sigma);
+
+            if (kernel.get_label()!="Kern_Linear") {
+                //kernalize weights
+                weights = ekm.KK.transpose()*weights;
+            }
+            trained=true;
+        }
+};
+#endif