Line data Source code
1 : /* +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
2 : Copyright (c) 2017-2019 The plumed team
3 : (see the PEOPLE file at the root of the distribution for a list of names)
4 :
5 : See http://www.plumed.org for more information.
6 :
7 : This file is part of plumed, version 2.
8 :
9 : plumed is free software: you can redistribute it and/or modify
10 : it under the terms of the GNU Lesser General Public License as published by
11 : the Free Software Foundation, either version 3 of the License, or
12 : (at your option) any later version.
13 :
14 : plumed is distributed in the hope that it will be useful,
15 : but WITHOUT ANY WARRANTY; without even the implied warranty of
16 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 : GNU Lesser General Public License for more details.
18 :
19 : You should have received a copy of the GNU Lesser General Public License
20 : along with plumed. If not, see <http://www.gnu.org/licenses/>.
21 : +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ */
22 : #include "MetainferenceBase.h"
23 : #include "tools/File.h"
24 : #include <cmath>
25 : #include <ctime>
26 : #include <numeric>
27 :
28 : using namespace std;
29 :
30 : #ifndef M_PI
31 : #define M_PI 3.14159265358979323846
32 : #endif
33 :
34 : namespace PLMD {
35 : namespace isdb {
36 :
37 93 : void MetainferenceBase::registerKeywords( Keywords& keys ) {
38 93 : Action::registerKeywords(keys);
39 93 : ActionAtomistic::registerKeywords(keys);
40 93 : ActionWithValue::registerKeywords(keys);
41 93 : ActionWithArguments::registerKeywords(keys);
42 93 : componentsAreNotOptional(keys);
43 186 : keys.use("ARG");
44 279 : keys.addFlag("DOSCORE",false,"activate metainference");
45 279 : keys.addFlag("NOENSEMBLE",false,"don't perform any replica-averaging");
46 279 : keys.addFlag("REWEIGHT",false,"simple REWEIGHT using the ARG as energy");
47 372 : keys.add("optional","AVERAGING", "Stride for calculation of averaged weights and sigma_mean");
48 465 : keys.add("compulsory","NOISETYPE","MGAUSS","functional form of the noise (GAUSS,MGAUSS,OUTLIERS,MOUTLIERS,GENERIC)");
49 465 : keys.add("compulsory","LIKELIHOOD","GAUSS","the likelihood for the GENERIC metainference model, GAUSS or LOGN");
50 465 : keys.add("compulsory","DFTILDE","0.1","fraction of sigma_mean used to evolve ftilde");
51 279 : keys.addFlag("SCALEDATA",false,"Set to TRUE if you want to sample a scaling factor common to all values and replicas");
52 465 : keys.add("compulsory","SCALE0","1.0","initial value of the scaling factor");
53 465 : keys.add("compulsory","SCALE_PRIOR","FLAT","either FLAT or GAUSSIAN");
54 372 : keys.add("optional","SCALE_MIN","minimum value of the scaling factor");
55 372 : keys.add("optional","SCALE_MAX","maximum value of the scaling factor");
56 372 : keys.add("optional","DSCALE","maximum MC move of the scaling factor");
57 279 : keys.addFlag("ADDOFFSET",false,"Set to TRUE if you want to sample an offset common to all values and replicas");
58 465 : keys.add("compulsory","OFFSET0","0.0","initial value of the offset");
59 465 : keys.add("compulsory","OFFSET_PRIOR","FLAT","either FLAT or GAUSSIAN");
60 372 : keys.add("optional","OFFSET_MIN","minimum value of the offset");
61 372 : keys.add("optional","OFFSET_MAX","maximum value of the offset");
62 372 : keys.add("optional","DOFFSET","maximum MC move of the offset");
63 372 : keys.add("optional","REGRES_ZERO","stride for regression with zero offset");
64 465 : keys.add("compulsory","SIGMA0","1.0","initial value of the uncertainty parameter");
65 465 : keys.add("compulsory","SIGMA_MIN","0.0","minimum value of the uncertainty parameter");
66 465 : keys.add("compulsory","SIGMA_MAX","10.","maximum value of the uncertainty parameter");
67 372 : keys.add("optional","DSIGMA","maximum MC move of the uncertainty parameter");
68 465 : keys.add("compulsory","OPTSIGMAMEAN","NONE","Set to NONE/SEM to manually set sigma mean, or to estimate it on the fly");
69 372 : keys.add("optional","SIGMA_MEAN0","starting value for the uncertainty in the mean estimate");
70 372 : keys.add("optional","TEMP","the system temperature - this is only needed if code doesn't pass the temperature to plumed");
71 372 : keys.add("optional","MC_STEPS","number of MC steps");
72 372 : keys.add("optional","MC_CHUNKSIZE","MC chunksize");
73 372 : keys.add("optional","STATUS_FILE","write a file with all the data useful for restart/continuation of Metainference");
74 465 : keys.add("compulsory","WRITE_STRIDE","10000","write the status to a file every N steps, this can be used for restart/continuation");
75 372 : keys.add("optional","SELECTOR","name of selector");
76 372 : keys.add("optional","NSELECT","range of values for selector [0, N-1]");
77 186 : keys.use("RESTART");
78 372 : keys.addOutputComponent("score", "default", "the Metainference score");
79 372 : keys.addOutputComponent("sigma", "default", "uncertainty parameter");
80 372 : keys.addOutputComponent("sigmaMean", "default", "uncertainty in the mean estimate");
81 372 : keys.addOutputComponent("acceptSigma", "default", "MC acceptance for sigma values");
82 372 : keys.addOutputComponent("acceptScale", "SCALEDATA", "MC acceptance for scale value");
83 372 : keys.addOutputComponent("acceptFT", "GENERIC", "MC acceptance for general metainference f tilde value");
84 372 : keys.addOutputComponent("weight", "REWEIGHT", "weights of the weighted average");
85 372 : keys.addOutputComponent("biasDer", "REWEIGHT", "derivatives with respect to the bias");
86 372 : keys.addOutputComponent("scale", "SCALEDATA", "scale parameter");
87 372 : keys.addOutputComponent("offset", "ADDOFFSET", "offset parameter");
88 372 : keys.addOutputComponent("ftilde", "GENERIC", "ensemble average estimator");
89 93 : }
90 :
91 86 : MetainferenceBase::MetainferenceBase(const ActionOptions&ao):
92 : Action(ao),
93 : ActionAtomistic(ao),
94 : ActionWithArguments(ao),
95 : ActionWithValue(ao),
96 : doscore_(false),
97 : write_stride_(0),
98 : narg(0),
99 : doscale_(false),
100 : scale_(1.),
101 : scale_mu_(0),
102 : scale_min_(1),
103 : scale_max_(-1),
104 : Dscale_(-1),
105 : dooffset_(false),
106 : offset_(0.),
107 : offset_mu_(0),
108 : offset_min_(1),
109 : offset_max_(-1),
110 : Doffset_(-1),
111 : doregres_zero_(false),
112 : nregres_zero_(0),
113 : Dftilde_(0.1),
114 : random(3),
115 : MCsteps_(1),
116 : MCaccept_(0),
117 : MCacceptScale_(0),
118 : MCacceptFT_(0),
119 : MCtrial_(0),
120 : MCchunksize_(0),
121 : firstTime(true),
122 : do_reweight_(false),
123 : do_optsigmamean_(0),
124 : nsel_(1),
125 : iselect(0),
126 : optsigmamean_stride_(0),
127 688 : decay_w_(1.)
128 : {
129 172 : parseFlag("DOSCORE", doscore_);
130 :
131 86 : bool noensemble = false;
132 172 : parseFlag("NOENSEMBLE", noensemble);
133 :
134 : // set up replica stuff
135 86 : master = (comm.Get_rank()==0);
136 86 : if(master) {
137 55 : nrep_ = multi_sim_comm.Get_size();
138 55 : replica_ = multi_sim_comm.Get_rank();
139 55 : if(noensemble) nrep_ = 1;
140 : } else {
141 31 : nrep_ = 0;
142 31 : replica_ = 0;
143 : }
144 86 : comm.Sum(&nrep_,1);
145 86 : comm.Sum(&replica_,1);
146 :
147 172 : parse("SELECTOR", selector_);
148 172 : parse("NSELECT", nsel_);
149 : // do checks
150 86 : if(selector_.length()>0 && nsel_<=1) error("With SELECTOR active, NSELECT must be greater than 1");
151 86 : if(selector_.length()==0 && nsel_>1) error("With NSELECT greater than 1, you must specify SELECTOR");
152 :
153 : // initialise firstTimeW
154 86 : firstTimeW.resize(nsel_, true);
155 :
156 : // reweight implies a different number of arguments (the latest one must always be the bias)
157 172 : parseFlag("REWEIGHT", do_reweight_);
158 108 : if(do_reweight_&&getNumberOfArguments()!=1) error("To REWEIGHT one must provide one single bias as an argument");
159 86 : if(do_reweight_&&nrep_<2) error("REWEIGHT can only be used in parallel with 2 or more replicas");
160 258 : if(!getRestart()) average_weights_.resize(nsel_, vector<double> (nrep_, 1./static_cast<double>(nrep_)));
161 0 : else average_weights_.resize(nsel_, vector<double> (nrep_, 0.));
162 :
163 86 : unsigned averaging=0;
164 172 : parse("AVERAGING", averaging);
165 86 : if(averaging>0) {
166 0 : decay_w_ = 1./static_cast<double> (averaging);
167 0 : optsigmamean_stride_ = averaging;
168 : }
169 :
170 : string stringa_noise;
171 172 : parse("NOISETYPE",stringa_noise);
172 86 : if(stringa_noise=="GAUSS") noise_type_ = GAUSS;
173 79 : else if(stringa_noise=="MGAUSS") noise_type_ = MGAUSS;
174 10 : else if(stringa_noise=="OUTLIERS") noise_type_ = OUTLIERS;
175 9 : else if(stringa_noise=="MOUTLIERS") noise_type_ = MOUTLIERS;
176 1 : else if(stringa_noise=="GENERIC") noise_type_ = GENERIC;
177 0 : else error("Unknown noise type!");
178 :
179 86 : if(noise_type_== GENERIC) {
180 : string stringa_like;
181 2 : parse("LIKELIHOOD",stringa_like);
182 1 : if(stringa_like=="GAUSS") gen_likelihood_ = LIKE_GAUSS;
183 0 : else if(stringa_like=="LOGN") gen_likelihood_ = LIKE_LOGN;
184 0 : else error("Unknown likelihood type!");
185 :
186 2 : parse("DFTILDE",Dftilde_);
187 : }
188 :
189 172 : parse("WRITE_STRIDE",write_stride_);
190 172 : parse("STATUS_FILE",status_file_name_);
191 172 : if(status_file_name_=="") status_file_name_ = "MISTATUS"+getLabel();
192 0 : else status_file_name_ = status_file_name_+getLabel();
193 :
194 : string stringa_optsigma;
195 172 : parse("OPTSIGMAMEAN", stringa_optsigma);
196 86 : if(stringa_optsigma=="NONE") do_optsigmamean_=0;
197 4 : else if(stringa_optsigma=="SEM") do_optsigmamean_=1;
198 :
199 : vector<double> read_sigma_mean_;
200 172 : parseVector("SIGMA_MEAN0",read_sigma_mean_);
201 223 : if(!do_optsigmamean_ && read_sigma_mean_.size()==0 && !getRestart() && doscore_)
202 0 : error("If you don't use OPTSIGMAMEAN and you are not RESTARTING then you MUST SET SIGMA_MEAN0");
203 :
204 86 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
205 78 : if(read_sigma_mean_.size()>0) {
206 23 : sigma_mean2_.resize(read_sigma_mean_.size());
207 69 : for(unsigned i=0; i<read_sigma_mean_.size(); i++) sigma_mean2_[i]=read_sigma_mean_[i]*read_sigma_mean_[i];
208 : } else {
209 55 : sigma_mean2_.resize(1,0.000001);
210 : }
211 : } else {
212 8 : if(read_sigma_mean_.size()==1) {
213 8 : sigma_mean2_.resize(1, read_sigma_mean_[0]*read_sigma_mean_[0]);
214 0 : } else if(read_sigma_mean_.size()==0) {
215 0 : sigma_mean2_.resize(1, 0.000001);
216 : } else {
217 0 : error("If you want to use more than one SIGMA_MEAN0 you should use NOISETYPE=MGAUSS|MOUTLIERS");
218 : }
219 : }
220 :
221 172 : parseFlag("SCALEDATA", doscale_);
222 86 : if(doscale_) {
223 : string stringa_noise;
224 24 : parse("SCALE_PRIOR",stringa_noise);
225 12 : if(stringa_noise=="GAUSSIAN") scale_prior_ = SC_GAUSS;
226 12 : else if(stringa_noise=="FLAT") scale_prior_ = SC_FLAT;
227 0 : else error("Unknown SCALE_PRIOR type!");
228 24 : parse("SCALE0",scale_);
229 24 : parse("DSCALE",Dscale_);
230 12 : if(Dscale_<0.) error("DSCALE must be set when using SCALEDATA");
231 12 : if(scale_prior_==SC_GAUSS) {
232 0 : scale_mu_=scale_;
233 : } else {
234 24 : parse("SCALE_MIN",scale_min_);
235 24 : parse("SCALE_MAX",scale_max_);
236 12 : if(scale_max_<scale_min_) error("SCALE_MAX and SCALE_MIN must be set when using SCALE_PRIOR=FLAT");
237 : }
238 : }
239 :
240 172 : parseFlag("ADDOFFSET", dooffset_);
241 86 : if(dooffset_) {
242 : string stringa_noise;
243 4 : parse("OFFSET_PRIOR",stringa_noise);
244 2 : if(stringa_noise=="GAUSSIAN") offset_prior_ = SC_GAUSS;
245 2 : else if(stringa_noise=="FLAT") offset_prior_ = SC_FLAT;
246 0 : else error("Unknown OFFSET_PRIOR type!");
247 4 : parse("OFFSET0",offset_);
248 4 : parse("DOFFSET",Doffset_);
249 2 : if(offset_prior_==SC_GAUSS) {
250 0 : offset_mu_=offset_;
251 0 : if(Doffset_<0.) error("DOFFSET must be set when using OFFSET_PRIOR=GAUSS");
252 : } else {
253 4 : parse("OFFSET_MIN",offset_min_);
254 4 : parse("OFFSET_MAX",offset_max_);
255 2 : if(Doffset_<0) Doffset_ = 0.05*(offset_max_ - offset_min_);
256 2 : if(offset_max_<offset_min_) error("OFFSET_MAX and OFFSET_MIN must be set when using OFFSET_PRIOR=FLAT");
257 : }
258 : }
259 :
260 : // regression with zero intercept
261 172 : parse("REGRES_ZERO", nregres_zero_);
262 86 : if(nregres_zero_>0) {
263 : // set flag
264 0 : doregres_zero_=true;
265 : // check if already sampling scale and offset
266 0 : if(doscale_) error("REGRES_ZERO and SCALEDATA are mutually exclusive");
267 0 : if(dooffset_) error("REGRES_ZERO and ADDOFFSET are mutually exclusive");
268 : }
269 :
270 : vector<double> readsigma;
271 172 : parseVector("SIGMA0",readsigma);
272 94 : if((noise_type_!=MGAUSS&&noise_type_!=MOUTLIERS&&noise_type_!=GENERIC)&&readsigma.size()>1)
273 0 : error("If you want to use more than one SIGMA you should use NOISETYPE=MGAUSS|MOUTLIERS|GENERIC");
274 86 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
275 78 : sigma_.resize(readsigma.size());
276 78 : sigma_=readsigma;
277 8 : } else sigma_.resize(1, readsigma[0]);
278 :
279 : vector<double> readsigma_min;
280 172 : parseVector("SIGMA_MIN",readsigma_min);
281 94 : if((noise_type_!=MGAUSS&&noise_type_!=MOUTLIERS&&noise_type_!=GENERIC)&&readsigma_min.size()>1)
282 0 : error("If you want to use more than one SIGMA you should use NOISETYPE=MGAUSS|MOUTLIERS|GENERIC");
283 86 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
284 78 : sigma_min_.resize(readsigma_min.size());
285 78 : sigma_min_=readsigma_min;
286 8 : } else sigma_min_.resize(1, readsigma_min[0]);
287 :
288 : vector<double> readsigma_max;
289 172 : parseVector("SIGMA_MAX",readsigma_max);
290 94 : if((noise_type_!=MGAUSS&&noise_type_!=MOUTLIERS&&noise_type_!=GENERIC)&&readsigma_max.size()>1)
291 0 : error("If you want to use more than one SIGMA you should use NOISETYPE=MGAUSS|MOUTLIERS|GENERIC");
292 86 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
293 78 : sigma_max_.resize(readsigma_max.size());
294 78 : sigma_max_=readsigma_max;
295 8 : } else sigma_max_.resize(1, readsigma_max[0]);
296 :
297 86 : if(sigma_max_.size()!=sigma_min_.size()) error("The number of values for SIGMA_MIN and SIGMA_MAX must be the same");
298 :
299 : vector<double> read_dsigma;
300 172 : parseVector("DSIGMA",read_dsigma);
301 94 : if((noise_type_!=MGAUSS&&noise_type_!=MOUTLIERS&&noise_type_!=GENERIC)&&readsigma_max.size()>1)
302 0 : error("If you want to use more than one SIGMA you should use NOISETYPE=MGAUSS|MOUTLIERS|GENERIC");
303 86 : if(read_dsigma.size()>0) {
304 31 : Dsigma_.resize(read_dsigma.size());
305 31 : Dsigma_=read_dsigma;
306 : } else {
307 55 : Dsigma_.resize(sigma_max_.size());
308 220 : for(unsigned i=0; i<sigma_max_.size(); i++) Dsigma_[i] = 0.05*(sigma_max_[i] - sigma_min_[i]);
309 : }
310 :
311 : // monte carlo stuff
312 172 : parse("MC_STEPS",MCsteps_);
313 172 : parse("MC_CHUNKSIZE", MCchunksize_);
314 : // get temperature
315 86 : double temp=0.0;
316 172 : parse("TEMP",temp);
317 117 : if(temp>0.0) kbt_=plumed.getAtoms().getKBoltzmann()*temp;
318 110 : else kbt_=plumed.getAtoms().getKbT();
319 86 : if(kbt_==0.0&&doscore_) error("Unless the MD engine passes the temperature to plumed, you must specify it using TEMP");
320 :
321 : // initialize random seed
322 : unsigned iseed;
323 86 : if(master) iseed = time(NULL)+replica_;
324 31 : else iseed = 0;
325 86 : comm.Sum(&iseed, 1);
326 172 : random[0].setSeed(-iseed);
327 : // Random chunk
328 86 : if(master) iseed = time(NULL)+replica_;
329 31 : else iseed = 0;
330 86 : comm.Sum(&iseed, 1);
331 172 : random[2].setSeed(-iseed);
332 86 : if(doscale_||dooffset_) {
333 : // in this case we want the same seed everywhere
334 14 : iseed = time(NULL);
335 14 : if(master&&nrep_>1) multi_sim_comm.Bcast(iseed,0);
336 14 : comm.Bcast(iseed,0);
337 28 : random[1].setSeed(-iseed);
338 : }
339 :
340 : // outfile stuff
341 86 : if(write_stride_>0&&doscore_) {
342 31 : sfile_.link(*this);
343 31 : sfile_.open(status_file_name_);
344 : }
345 :
346 86 : }
347 :
348 430 : MetainferenceBase::~MetainferenceBase()
349 : {
350 86 : if(sfile_.isOpen()) sfile_.close();
351 86 : }
352 :
353 31 : void MetainferenceBase::Initialise(const unsigned input)
354 : {
355 : setNarg(input);
356 62 : if(narg!=parameters.size()) {
357 0 : std::string num1; Tools::convert(parameters.size(),num1);
358 0 : std::string num2; Tools::convert(narg,num2);
359 0 : std::string msg = "The number of experimental values " + num1 +" must be the same of the calculated values " + num2;
360 0 : error(msg);
361 : }
362 :
363 : // resize vector for sigma_mean history
364 31 : sigma_mean2_last_.resize(nsel_);
365 62 : for(unsigned i=0; i<nsel_; i++) sigma_mean2_last_[i].resize(narg);
366 31 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
367 23 : if(sigma_mean2_.size()==1) {
368 23 : double tmp = sigma_mean2_[0];
369 23 : sigma_mean2_.resize(narg, tmp);
370 0 : } else if(sigma_mean2_.size()>1&&sigma_mean2_.size()!=narg) {
371 0 : error("SIGMA_MEAN0 can accept either one single value or as many values as the number of arguments (with NOISETYPE=MGAUSS|MOUTLIERS|GENERIC)");
372 : }
373 : // set the initial value for the history
374 7410 : for(unsigned i=0; i<nsel_; i++) for(unsigned j=0; j<narg; j++) sigma_mean2_last_[i][j].push_back(sigma_mean2_[j]);
375 : } else {
376 : // set the initial value for the history
377 54 : for(unsigned i=0; i<nsel_; i++) for(unsigned j=0; j<narg; j++) sigma_mean2_last_[i][j].push_back(sigma_mean2_[0]);
378 : }
379 :
380 : // set sigma_bias
381 31 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
382 23 : if(sigma_.size()==1) {
383 23 : double tmp = sigma_[0];
384 23 : sigma_.resize(narg, tmp);
385 0 : } else if(sigma_.size()>1&&sigma_.size()!=narg) {
386 0 : error("SIGMA0 can accept either one single value or as many values as the number of arguments (with NOISETYPE=MGAUSS|MOUTLIERS|GENERIC)");
387 : }
388 23 : if(sigma_min_.size()==1) {
389 23 : double tmp = sigma_min_[0];
390 23 : sigma_min_.resize(narg, tmp);
391 0 : } else if(sigma_min_.size()>1&&sigma_min_.size()!=narg) {
392 0 : error("SIGMA_MIN can accept either one single value or as many values as the number of arguments (with NOISETYPE=MGAUSS|MOUTLIERS|GENERIC)");
393 : }
394 23 : if(sigma_max_.size()==1) {
395 23 : double tmp = sigma_max_[0];
396 23 : sigma_max_.resize(narg, tmp);
397 0 : } else if(sigma_max_.size()>1&&sigma_max_.size()!=narg) {
398 0 : error("SIGMA_MAX can accept either one single value or as many values as the number of arguments (with NOISETYPE=MGAUSS|MOUTLIERS|GENERIC)");
399 : }
400 23 : if(Dsigma_.size()==1) {
401 23 : double tmp = Dsigma_[0];
402 23 : Dsigma_.resize(narg, tmp);
403 0 : } else if(Dsigma_.size()>1&&Dsigma_.size()!=narg) {
404 0 : error("DSIGMA can accept either one single value or as many values as the number of arguments (with NOISETYPE=MGAUSS|MOUTLIERS|GENERIC)");
405 : }
406 : }
407 :
408 31 : IFile restart_sfile;
409 31 : restart_sfile.link(*this);
410 62 : if(getRestart()&&restart_sfile.FileExist(status_file_name_)) {
411 0 : firstTime = false;
412 0 : for(unsigned i=0; i<nsel_; i++) firstTimeW[i] = false;
413 0 : restart_sfile.open(status_file_name_);
414 0 : log.printf(" Restarting from %s\n", status_file_name_.c_str());
415 : double dummy;
416 0 : if(restart_sfile.scanField("time",dummy)) {
417 : // nsel
418 0 : for(unsigned i=0; i<sigma_mean2_last_.size(); i++) {
419 : std::string msg_i;
420 0 : Tools::convert(i,msg_i);
421 : // narg
422 0 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
423 0 : for(unsigned j=0; j<narg; ++j) {
424 : std::string msg_j;
425 0 : Tools::convert(j,msg_j);
426 0 : std::string msg = msg_i+"_"+msg_j;
427 : double read_sm;
428 0 : restart_sfile.scanField("sigmaMean_"+msg,read_sm);
429 0 : sigma_mean2_last_[i][j][0] = read_sm*read_sm;
430 : }
431 : }
432 0 : if(noise_type_==GAUSS||noise_type_==OUTLIERS) {
433 : double read_sm;
434 : std::string msg_j;
435 0 : Tools::convert(0,msg_j);
436 0 : std::string msg = msg_i+"_"+msg_j;
437 0 : restart_sfile.scanField("sigmaMean_"+msg,read_sm);
438 0 : for(unsigned j=0; j<narg; j++) sigma_mean2_last_[i][j][0] = read_sm*read_sm;
439 : }
440 : }
441 :
442 0 : for(unsigned i=0; i<sigma_.size(); ++i) {
443 : std::string msg;
444 0 : Tools::convert(i,msg);
445 0 : restart_sfile.scanField("sigma_"+msg,sigma_[i]);
446 : }
447 0 : if(noise_type_==GENERIC) {
448 0 : for(unsigned i=0; i<ftilde_.size(); ++i) {
449 : std::string msg;
450 0 : Tools::convert(i,msg);
451 0 : restart_sfile.scanField("ftilde_"+msg,ftilde_[i]);
452 : }
453 : }
454 0 : restart_sfile.scanField("scale0_",scale_);
455 0 : restart_sfile.scanField("offset0_",offset_);
456 :
457 0 : for(unsigned i=0; i<nsel_; i++) {
458 : std::string msg;
459 0 : Tools::convert(i,msg);
460 : double tmp_w;
461 0 : restart_sfile.scanField("weight_"+msg,tmp_w);
462 0 : if(master) {
463 0 : average_weights_[i][replica_] = tmp_w;
464 0 : if(nrep_>1) multi_sim_comm.Sum(&average_weights_[i][0], nrep_);
465 : }
466 0 : comm.Sum(&average_weights_[i][0], nrep_);
467 : }
468 :
469 : }
470 0 : restart_sfile.scanField();
471 0 : restart_sfile.close();
472 : }
473 :
474 62 : addComponentWithDerivatives("score");
475 62 : componentIsNotPeriodic("score");
476 62 : valueScore=getPntrToComponent("score");
477 :
478 31 : if(do_reweight_) {
479 44 : addComponent("biasDer");
480 44 : componentIsNotPeriodic("biasDer");
481 44 : addComponent("weight");
482 44 : componentIsNotPeriodic("weight");
483 : }
484 :
485 31 : if(doscale_ || doregres_zero_) {
486 24 : addComponent("scale");
487 24 : componentIsNotPeriodic("scale");
488 24 : valueScale=getPntrToComponent("scale");
489 : }
490 :
491 31 : if(dooffset_) {
492 4 : addComponent("offset");
493 4 : componentIsNotPeriodic("offset");
494 4 : valueOffset=getPntrToComponent("offset");
495 : }
496 :
497 31 : if(dooffset_||doscale_) {
498 28 : addComponent("acceptScale");
499 28 : componentIsNotPeriodic("acceptScale");
500 28 : valueAcceptScale=getPntrToComponent("acceptScale");
501 : }
502 :
503 31 : if(noise_type_==GENERIC) {
504 2 : addComponent("acceptFT");
505 2 : componentIsNotPeriodic("acceptFT");
506 2 : valueAcceptFT=getPntrToComponent("acceptFT");
507 : }
508 :
509 62 : addComponent("acceptSigma");
510 62 : componentIsNotPeriodic("acceptSigma");
511 62 : valueAccept=getPntrToComponent("acceptSigma");
512 :
513 31 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
514 4963 : for(unsigned i=0; i<sigma_mean2_.size(); ++i) {
515 2470 : std::string num; Tools::convert(i,num);
516 7410 : addComponent("sigmaMean-"+num); componentIsNotPeriodic("sigmaMean-"+num);
517 7410 : valueSigmaMean.push_back(getPntrToComponent("sigmaMean-"+num));
518 4940 : getPntrToComponent("sigmaMean-"+num)->set(sqrt(sigma_mean2_[i]));
519 7410 : addComponent("sigma-"+num); componentIsNotPeriodic("sigma-"+num);
520 7410 : valueSigma.push_back(getPntrToComponent("sigma-"+num));
521 4940 : getPntrToComponent("sigma-"+num)->set(sigma_[i]);
522 2470 : if(noise_type_==GENERIC) {
523 6 : addComponent("ftilde-"+num); componentIsNotPeriodic("ftilde-"+num);
524 6 : valueFtilde.push_back(getPntrToComponent("ftilde-"+num));
525 : }
526 : }
527 : } else {
528 24 : addComponent("sigmaMean"); componentIsNotPeriodic("sigmaMean");
529 24 : valueSigmaMean.push_back(getPntrToComponent("sigmaMean"));
530 16 : getPntrToComponent("sigmaMean")->set(sqrt(sigma_mean2_[0]));
531 24 : addComponent("sigma"); componentIsNotPeriodic("sigma");
532 24 : valueSigma.push_back(getPntrToComponent("sigma"));
533 16 : getPntrToComponent("sigma")->set(sigma_[0]);
534 : }
535 :
536 31 : switch(noise_type_) {
537 : case GENERIC:
538 1 : log.printf(" with general metainference ");
539 1 : if(gen_likelihood_==LIKE_GAUSS) log.printf(" and a gaussian likelihood\n");
540 0 : else if(gen_likelihood_==LIKE_LOGN) log.printf(" and a log-normal likelihood\n");
541 1 : log.printf(" ensemble average parameter sampled with a step %lf of sigma_mean\n", Dftilde_);
542 : break;
543 : case GAUSS:
544 7 : log.printf(" with gaussian noise and a single noise parameter for all the data\n");
545 : break;
546 : case MGAUSS:
547 14 : log.printf(" with gaussian noise and a noise parameter for each data point\n");
548 : break;
549 : case OUTLIERS:
550 1 : log.printf(" with long tailed gaussian noise and a single noise parameter for all the data\n");
551 : break;
552 : case MOUTLIERS:
553 8 : log.printf(" with long tailed gaussian noise and a noise parameter for each data point\n");
554 : break;
555 : }
556 :
557 31 : if(doscale_) {
558 12 : log.printf(" sampling a common scaling factor with:\n");
559 12 : log.printf(" initial scale parameter %f\n",scale_);
560 12 : if(scale_prior_==SC_GAUSS) {
561 0 : log.printf(" gaussian prior with mean %f and width %f\n",scale_mu_,Dscale_);
562 : }
563 12 : if(scale_prior_==SC_FLAT) {
564 12 : log.printf(" flat prior between %f - %f\n",scale_min_,scale_max_);
565 12 : log.printf(" maximum MC move of scale parameter %f\n",Dscale_);
566 : }
567 : }
568 :
569 31 : if(dooffset_) {
570 2 : log.printf(" sampling a common offset with:\n");
571 2 : log.printf(" initial offset parameter %f\n",offset_);
572 2 : if(offset_prior_==SC_GAUSS) {
573 0 : log.printf(" gaussian prior with mean %f and width %f\n",offset_mu_,Doffset_);
574 : }
575 2 : if(offset_prior_==SC_FLAT) {
576 2 : log.printf(" flat prior between %f - %f\n",offset_min_,offset_max_);
577 2 : log.printf(" maximum MC move of offset parameter %f\n",Doffset_);
578 : }
579 : }
580 :
581 31 : log.printf(" number of experimental data points %u\n",narg);
582 31 : log.printf(" number of replicas %u\n",nrep_);
583 31 : log.printf(" initial data uncertainties");
584 4987 : for(unsigned i=0; i<sigma_.size(); ++i) log.printf(" %f", sigma_[i]);
585 31 : log.printf("\n");
586 31 : log.printf(" minimum data uncertainties");
587 4987 : for(unsigned i=0; i<sigma_.size(); ++i) log.printf(" %f",sigma_min_[i]);
588 31 : log.printf("\n");
589 31 : log.printf(" maximum data uncertainties");
590 4987 : for(unsigned i=0; i<sigma_.size(); ++i) log.printf(" %f",sigma_max_[i]);
591 31 : log.printf("\n");
592 31 : log.printf(" maximum MC move of data uncertainties");
593 4987 : for(unsigned i=0; i<sigma_.size(); ++i) log.printf(" %f",Dsigma_[i]);
594 31 : log.printf("\n");
595 31 : log.printf(" temperature of the system %f\n",kbt_);
596 31 : log.printf(" MC steps %u\n",MCsteps_);
597 31 : log.printf(" initial standard errors of the mean");
598 4987 : for(unsigned i=0; i<sigma_mean2_.size(); ++i) log.printf(" %f", sqrt(sigma_mean2_[i]));
599 31 : log.printf("\n");
600 :
601 : //resize the number of metainference derivatives and the number of back-calculated data
602 31 : metader_.resize(narg, 0.);
603 31 : calc_data_.resize(narg, 0.);
604 :
605 93 : log<<" Bibliography "<<plumed.cite("Bonomi, Camilloni, Cavalli, Vendruscolo, Sci. Adv. 2, e150117 (2016)");
606 75 : if(do_reweight_) log<<plumed.cite("Bonomi, Camilloni, Vendruscolo, Sci. Rep. 6, 31232 (2016)");
607 39 : if(do_optsigmamean_>0) log<<plumed.cite("Loehr, Jussupow, Camilloni, J. Chem. Phys. 146, 165102 (2017)");
608 93 : log<<plumed.cite("Bonomi, Camilloni, Bioinformatics, 33, 3999 (2017)");
609 31 : log<<"\n";
610 31 : }
611 :
612 0 : void MetainferenceBase::Selector()
613 : {
614 0 : iselect = 0;
615 : // set the value of selector for REM-like stuff
616 0 : if(selector_.length()>0) iselect = static_cast<unsigned>(plumed.passMap[selector_]);
617 0 : }
618 :
619 12 : double MetainferenceBase::getEnergySP(const vector<double> &mean, const vector<double> &sigma,
620 : const double scale, const double offset)
621 : {
622 12 : const double scale2 = scale*scale;
623 12 : const double sm2 = sigma_mean2_[0];
624 12 : const double ss2 = sigma[0]*sigma[0] + scale2*sm2;
625 12 : const double sss = sigma[0]*sigma[0] + sm2;
626 :
627 : double ene = 0.0;
628 36 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene)
629 : {
630 24 : #pragma omp for reduction( + : ene)
631 : for(unsigned i=0; i<narg; ++i) {
632 249 : const double dev = scale*mean[i]-parameters[i]+offset;
633 83 : const double a2 = 0.5*dev*dev + ss2;
634 83 : ene += std::log(2.0*a2/(1.0-exp(-a2/sm2)));
635 : }
636 : }
637 : // add one single Jeffrey's prior and one normalisation per data point
638 12 : ene += 0.5*std::log(sss) + static_cast<double>(narg)*0.5*std::log(0.5*M_PI*M_PI/ss2);
639 12 : if(doscale_ || doregres_zero_) ene += 0.5*std::log(sss);
640 12 : if(dooffset_) ene += 0.5*std::log(sss);
641 12 : return kbt_ * ene;
642 : }
643 :
644 5328 : double MetainferenceBase::getEnergySPE(const vector<double> &mean, const vector<double> &sigma,
645 : const double scale, const double offset)
646 : {
647 5328 : const double scale2 = scale*scale;
648 : double ene = 0.0;
649 15984 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene)
650 : {
651 10656 : #pragma omp for reduction( + : ene)
652 : for(unsigned i=0; i<narg; ++i) {
653 42588 : const double sm2 = sigma_mean2_[i];
654 21294 : const double ss2 = sigma[i]*sigma[i] + scale2*sm2;
655 21294 : const double sss = sigma[i]*sigma[i] + sm2;
656 42588 : const double dev = scale*mean[i]-parameters[i]+offset;
657 21294 : const double a2 = 0.5*dev*dev + ss2;
658 21294 : ene += 0.5*std::log(sss) + 0.5*std::log(0.5*M_PI*M_PI/ss2) + std::log(2.0*a2/(1.0-exp(-a2/sm2)));
659 42588 : if(doscale_ || doregres_zero_) ene += 0.5*std::log(sss);
660 21294 : if(dooffset_) ene += 0.5*std::log(sss);
661 : }
662 : }
663 5328 : return kbt_ * ene;
664 : }
665 :
666 48 : double MetainferenceBase::getEnergyMIGEN(const vector<double> &mean, const vector<double> &ftilde, const vector<double> &sigma,
667 : const double scale, const double offset)
668 : {
669 : double ene = 0.0;
670 144 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene)
671 : {
672 96 : #pragma omp for reduction( + : ene)
673 : for(unsigned i=0; i<narg; ++i) {
674 192 : const double inv_sb2 = 1./(sigma[i]*sigma[i]);
675 96 : const double inv_sm2 = 1./sigma_mean2_[i];
676 : double devb = 0;
677 286 : if(gen_likelihood_==LIKE_GAUSS) devb = scale*ftilde[i]-parameters[i]+offset;
678 1 : else if(gen_likelihood_==LIKE_LOGN) devb = std::log(scale*ftilde[i]/parameters[i]);
679 192 : double devm = mean[i] - ftilde[i];
680 : // deviation + normalisation + jeffrey
681 : double normb = 0.;
682 192 : if(gen_likelihood_==LIKE_GAUSS) normb = -0.5*std::log(0.5/M_PI*inv_sb2);
683 0 : else if(gen_likelihood_==LIKE_LOGN) normb = -0.5*std::log(0.5/M_PI*inv_sb2/(parameters[i]*parameters[i]));
684 96 : const double normm = -0.5*std::log(0.5/M_PI*inv_sm2);
685 96 : const double jeffreys = -0.5*std::log(2.*inv_sb2);
686 96 : ene += 0.5*devb*devb*inv_sb2 + 0.5*devm*devm*inv_sm2 + normb + normm + jeffreys;
687 96 : if(doscale_ || doregres_zero_) ene += jeffreys;
688 96 : if(dooffset_) ene += jeffreys;
689 : }
690 : }
691 48 : return kbt_ * ene;
692 : }
693 :
694 554 : double MetainferenceBase::getEnergyGJ(const vector<double> &mean, const vector<double> &sigma,
695 : const double scale, const double offset)
696 : {
697 554 : const double scale2 = scale*scale;
698 1108 : const double inv_s2 = 1./(sigma[0]*sigma[0] + scale2*sigma_mean2_[0]);
699 554 : const double inv_sss = 1./(sigma[0]*sigma[0] + sigma_mean2_[0]);
700 :
701 : double ene = 0.0;
702 1662 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene)
703 : {
704 1108 : #pragma omp for reduction( + : ene)
705 : for(unsigned i=0; i<narg; ++i) {
706 4872 : double dev = scale*mean[i]-parameters[i]+offset;
707 1624 : ene += 0.5*dev*dev*inv_s2;
708 : }
709 : }
710 554 : const double normalisation = -0.5*std::log(0.5/M_PI*inv_s2);
711 554 : const double jeffreys = -0.5*std::log(2.*inv_sss);
712 : // add Jeffrey's prior in case one sigma for all data points + one normalisation per datapoint
713 554 : ene += jeffreys + static_cast<double>(narg)*normalisation;
714 554 : if(doscale_ || doregres_zero_) ene += jeffreys;
715 554 : if(dooffset_) ene += jeffreys;
716 :
717 554 : return kbt_ * ene;
718 : }
719 :
720 368 : double MetainferenceBase::getEnergyGJE(const vector<double> &mean, const vector<double> &sigma,
721 : const double scale, const double offset)
722 : {
723 368 : const double scale2 = scale*scale;
724 :
725 : double ene = 0.0;
726 1104 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(ene)
727 : {
728 736 : #pragma omp for reduction( + : ene)
729 : for(unsigned i=0; i<narg; ++i) {
730 23691 : const double inv_s2 = 1./(sigma[i]*sigma[i] + scale2*sigma_mean2_[i]);
731 7897 : const double inv_sss = 1./(sigma[i]*sigma[i] + sigma_mean2_[i]);
732 15794 : double dev = scale*mean[i]-parameters[i]+offset;
733 : // deviation + normalisation + jeffrey
734 7897 : const double normalisation = -0.5*std::log(0.5/M_PI*inv_s2);
735 7897 : const double jeffreys = -0.5*std::log(2.*inv_sss);
736 7897 : ene += 0.5*dev*dev*inv_s2 + normalisation + jeffreys;
737 8467 : if(doscale_ || doregres_zero_) ene += jeffreys;
738 7897 : if(dooffset_) ene += jeffreys;
739 : }
740 : }
741 368 : return kbt_ * ene;
742 : }
743 :
744 2225 : double MetainferenceBase::doMonteCarlo(const vector<double> &mean_)
745 : {
746 : // calculate old energy with the updated coordinates
747 2225 : double old_energy=0.;
748 :
749 2225 : switch(noise_type_) {
750 : case GAUSS:
751 271 : old_energy = getEnergyGJ(mean_,sigma_,scale_,offset_);
752 271 : break;
753 : case MGAUSS:
754 160 : old_energy = getEnergyGJE(mean_,sigma_,scale_,offset_);
755 160 : break;
756 : case OUTLIERS:
757 6 : old_energy = getEnergySP(mean_,sigma_,scale_,offset_);
758 6 : break;
759 : case MOUTLIERS:
760 1776 : old_energy = getEnergySPE(mean_,sigma_,scale_,offset_);
761 1776 : break;
762 : case GENERIC:
763 12 : old_energy = getEnergyMIGEN(mean_,ftilde_,sigma_,scale_,offset_);
764 12 : break;
765 : }
766 :
767 2225 : if(!getExchangeStep()) {
768 : // Create vector of random sigma indices
769 : vector<unsigned> indices;
770 2225 : if (MCchunksize_ > 0) {
771 0 : for (unsigned j=0; j<sigma_.size(); j++) {
772 0 : indices.push_back(j);
773 : }
774 0 : random[2].Shuffle(indices);
775 : }
776 : bool breaknow = false;
777 :
778 : // cycle on MC steps
779 6675 : for(unsigned i=0; i<MCsteps_; ++i) {
780 :
781 2225 : MCtrial_++;
782 :
783 : // propose move for ftilde
784 2225 : vector<double> new_ftilde(sigma_.size());
785 2225 : new_ftilde = ftilde_;
786 :
787 2225 : if(noise_type_==GENERIC) {
788 : // change all sigmas
789 60 : for(unsigned j=0; j<sigma_.size(); j++) {
790 24 : const double r3 = random[0].Gaussian();
791 48 : const double ds3 = Dftilde_*sqrt(sigma_mean2_[j])*r3;
792 24 : new_ftilde[j] = ftilde_[j] + ds3;
793 : }
794 : // calculate new energy
795 12 : double new_energy = getEnergyMIGEN(mean_,new_ftilde,sigma_,scale_,offset_);
796 :
797 : // accept or reject
798 12 : const double delta = ( new_energy - old_energy ) / kbt_;
799 : // if delta is negative always accept move
800 12 : if( delta <= 0.0 ) {
801 12 : old_energy = new_energy;
802 12 : ftilde_ = new_ftilde;
803 12 : MCacceptFT_++;
804 : // otherwise extract random number
805 : } else {
806 0 : const double s = random[0].RandU01();
807 0 : if( s < exp(-delta) ) {
808 0 : old_energy = new_energy;
809 0 : ftilde_ = new_ftilde;
810 0 : MCacceptFT_++;
811 : }
812 : }
813 : }
814 :
815 : // propose move for scale and/or offset
816 2225 : double new_scale = scale_;
817 2225 : double new_offset = offset_;
818 2225 : if(doscale_||dooffset_) {
819 1848 : if(doscale_) {
820 1824 : if(scale_prior_==SC_FLAT) {
821 1824 : const double r1 = random[1].Gaussian();
822 1824 : const double ds1 = Dscale_*r1;
823 1824 : new_scale += ds1;
824 : // check boundaries
825 1824 : if(new_scale > scale_max_) {new_scale = 2.0 * scale_max_ - new_scale;}
826 1824 : if(new_scale < scale_min_) {new_scale = 2.0 * scale_min_ - new_scale;}
827 : } else {
828 0 : const double r1 = random[1].Gaussian();
829 0 : const double ds1 = 0.5*(scale_mu_-new_scale)+Dscale_*exp(1)/M_PI*r1;
830 0 : new_scale += ds1;
831 : }
832 : }
833 :
834 1848 : if(dooffset_) {
835 24 : if(offset_prior_==SC_FLAT) {
836 24 : const double r1 = random[1].Gaussian();
837 24 : const double ds1 = Doffset_*r1;
838 24 : new_offset += ds1;
839 : // check boundaries
840 24 : if(new_offset > offset_max_) {new_offset = 2.0 * offset_max_ - new_offset;}
841 24 : if(new_offset < offset_min_) {new_offset = 2.0 * offset_min_ - new_offset;}
842 : } else {
843 0 : const double r1 = random[1].Gaussian();
844 0 : const double ds1 = 0.5*(offset_mu_-new_offset)+Doffset_*exp(1)/M_PI*r1;
845 0 : new_offset += ds1;
846 : }
847 : }
848 :
849 : // calculate new energy
850 : double new_energy = 0.;
851 :
852 1848 : switch(noise_type_) {
853 : case GAUSS:
854 12 : new_energy = getEnergyGJ(mean_,sigma_,new_scale,new_offset);
855 : break;
856 : case MGAUSS:
857 48 : new_energy = getEnergyGJE(mean_,sigma_,new_scale,new_offset);
858 : break;
859 : case OUTLIERS:
860 0 : new_energy = getEnergySP(mean_,sigma_,new_scale,new_offset);
861 : break;
862 : case MOUTLIERS:
863 1776 : new_energy = getEnergySPE(mean_,sigma_,new_scale,new_offset);
864 : break;
865 : case GENERIC:
866 12 : new_energy = getEnergyMIGEN(mean_,ftilde_,sigma_,new_scale,new_offset);
867 : break;
868 : }
869 : // for the scale we need to consider the total energy
870 1848 : vector<double> totenergies(2);
871 1848 : if(master) {
872 936 : totenergies[0] = old_energy;
873 936 : totenergies[1] = new_energy;
874 936 : if(nrep_>1) multi_sim_comm.Sum(totenergies);
875 : } else {
876 912 : totenergies[0] = 0;
877 912 : totenergies[1] = 0;
878 : }
879 1848 : comm.Sum(totenergies);
880 :
881 : // accept or reject
882 1848 : const double delta = ( totenergies[1] - totenergies[0] ) / kbt_;
883 : // if delta is negative always accept move
884 1848 : if( delta <= 0.0 ) {
885 1848 : old_energy = new_energy;
886 1848 : scale_ = new_scale;
887 1848 : offset_ = new_offset;
888 1848 : MCacceptScale_++;
889 : // otherwise extract random number
890 : } else {
891 0 : double s = random[1].RandU01();
892 0 : if( s < exp(-delta) ) {
893 0 : old_energy = new_energy;
894 0 : scale_ = new_scale;
895 0 : offset_ = new_offset;
896 0 : MCacceptScale_++;
897 : }
898 : }
899 : }
900 :
901 : // propose move for sigma
902 2225 : vector<double> new_sigma(sigma_.size());
903 2225 : new_sigma = sigma_;
904 :
905 : // change MCchunksize_ sigmas
906 2225 : if (MCchunksize_ > 0) {
907 0 : if ((MCchunksize_ * i) >= sigma_.size()) {
908 : // This means we are not moving any sigma, so we should break immediately
909 : breaknow = true;
910 : }
911 :
912 : // change random sigmas
913 0 : for(unsigned j=0; j<MCchunksize_; j++) {
914 0 : const unsigned shuffle_index = j + MCchunksize_ * i;
915 0 : if (shuffle_index >= sigma_.size()) {
916 : // Going any further will segfault but we should still evaluate the sigmas we changed
917 : break;
918 : }
919 0 : const unsigned index = indices[shuffle_index];
920 0 : const double r2 = random[0].Gaussian();
921 0 : const double ds2 = Dsigma_[index]*r2;
922 0 : new_sigma[index] = sigma_[index] + ds2;
923 : // check boundaries
924 0 : if(new_sigma[index] > sigma_max_[index]) {new_sigma[index] = 2.0 * sigma_max_[index] - new_sigma[index];}
925 0 : if(new_sigma[index] < sigma_min_[index]) {new_sigma[index] = 2.0 * sigma_min_[index] - new_sigma[index];}
926 : }
927 : } else {
928 : // change all sigmas
929 24747 : for(unsigned j=0; j<sigma_.size(); j++) {
930 11261 : const double r2 = random[0].Gaussian();
931 11261 : const double ds2 = Dsigma_[j]*r2;
932 11261 : new_sigma[j] = sigma_[j] + ds2;
933 : // check boundaries
934 22522 : if(new_sigma[j] > sigma_max_[j]) {new_sigma[j] = 2.0 * sigma_max_[j] - new_sigma[j];}
935 22522 : if(new_sigma[j] < sigma_min_[j]) {new_sigma[j] = 2.0 * sigma_min_[j] - new_sigma[j];}
936 : }
937 : }
938 :
939 2225 : if (breaknow) {
940 : // We didnt move any sigmas, so no sense in evaluating anything
941 : break;
942 : }
943 :
944 : // calculate new energy
945 : double new_energy = 0.;
946 2225 : switch(noise_type_) {
947 : case GAUSS:
948 271 : new_energy = getEnergyGJ(mean_,new_sigma,scale_,offset_);
949 : break;
950 : case MGAUSS:
951 160 : new_energy = getEnergyGJE(mean_,new_sigma,scale_,offset_);
952 : break;
953 : case OUTLIERS:
954 6 : new_energy = getEnergySP(mean_,new_sigma,scale_,offset_);
955 : break;
956 : case MOUTLIERS:
957 1776 : new_energy = getEnergySPE(mean_,new_sigma,scale_,offset_);
958 : break;
959 : case GENERIC:
960 12 : new_energy = getEnergyMIGEN(mean_,ftilde_,new_sigma,scale_,offset_);
961 : break;
962 : }
963 :
964 : // accept or reject
965 2225 : const double delta = ( new_energy - old_energy ) / kbt_;
966 : // if delta is negative always accept move
967 2225 : if( delta <= 0.0 ) {
968 2225 : old_energy = new_energy;
969 2225 : sigma_ = new_sigma;
970 2225 : MCaccept_++;
971 : // otherwise extract random number
972 : } else {
973 0 : const double s = random[0].RandU01();
974 0 : if( s < exp(-delta) ) {
975 0 : old_energy = new_energy;
976 0 : sigma_ = new_sigma;
977 0 : MCaccept_++;
978 : }
979 : }
980 :
981 : }
982 :
983 : /* save the result of the sampling */
984 2225 : double accept = static_cast<double>(MCaccept_) / static_cast<double>(MCtrial_);
985 2225 : valueAccept->set(accept);
986 2225 : if(doscale_ || doregres_zero_) valueScale->set(scale_);
987 2225 : if(dooffset_) valueOffset->set(offset_);
988 2225 : if(doscale_||dooffset_) {
989 1848 : accept = static_cast<double>(MCacceptScale_) / static_cast<double>(MCtrial_);
990 1848 : valueAcceptScale->set(accept);
991 : }
992 47269 : for(unsigned i=0; i<sigma_.size(); i++) valueSigma[i]->set(sigma_[i]);
993 2225 : if(noise_type_==GENERIC) {
994 12 : accept = static_cast<double>(MCacceptFT_) / static_cast<double>(MCtrial_);
995 12 : valueAcceptFT->set(accept);
996 108 : for(unsigned i=0; i<sigma_.size(); i++) valueFtilde[i]->set(ftilde_[i]);
997 : }
998 : }
999 :
1000 : // here we sum the score over the replicas to get the full metainference score that we save as a bias
1001 2225 : if(master) {
1002 1227 : if(nrep_>1) multi_sim_comm.Sum(old_energy);
1003 : } else {
1004 998 : old_energy=0;
1005 : }
1006 2225 : comm.Sum(old_energy);
1007 :
1008 2225 : return old_energy;
1009 : }
1010 :
1011 : /*
1012 : In the following energy-force functions we don't add the normalisation and the jeffreys priors
1013 : because they are not needed for the forces, the correct MetaInference energy is the one calculated
1014 : in the Monte-Carlo
1015 : */
1016 :
1017 6 : void MetainferenceBase::getEnergyForceSP(const vector<double> &mean, const vector<double> &dmean_x,
1018 : const vector<double> &dmean_b)
1019 : {
1020 6 : const double scale2 = scale_*scale_;
1021 6 : const double sm2 = sigma_mean2_[0];
1022 6 : const double ss2 = sigma_[0]*sigma_[0] + scale2*sm2;
1023 6 : vector<double> f(narg,0);
1024 :
1025 6 : if(master) {
1026 17 : #pragma omp parallel num_threads(OpenMP::getNumThreads())
1027 : {
1028 11 : #pragma omp for
1029 : for(unsigned i=0; i<narg; ++i) {
1030 123 : const double dev = scale_*mean[i]-parameters[i]+offset_;
1031 41 : const double a2 = 0.5*dev*dev + ss2;
1032 41 : const double t = exp(-a2/sm2);
1033 41 : const double dt = 1./t;
1034 41 : const double dit = 1./(1.-dt);
1035 82 : f[i] = -scale_*dev*(dit/sm2 + 1./a2);
1036 : }
1037 : }
1038 : // collect contribution to forces and energy from other replicas
1039 6 : if(nrep_>1) multi_sim_comm.Sum(&f[0],narg);
1040 : }
1041 : // intra-replica summation
1042 12 : comm.Sum(&f[0],narg);
1043 :
1044 : double w_tmp = 0.;
1045 42 : for(unsigned i=0; i<narg; ++i) {
1046 126 : setMetaDer(i, -kbt_*f[i]*dmean_x[i]);
1047 126 : w_tmp += kbt_*f[i]*dmean_b[i];
1048 : }
1049 :
1050 6 : if(do_reweight_) {
1051 0 : setArgDerivatives(valueScore, -w_tmp);
1052 0 : getPntrToComponent("biasDer")->set(-w_tmp);
1053 : }
1054 6 : }
1055 :
1056 1776 : void MetainferenceBase::getEnergyForceSPE(const vector<double> &mean, const vector<double> &dmean_x,
1057 : const vector<double> &dmean_b)
1058 : {
1059 1776 : const double scale2 = scale_*scale_;
1060 1776 : vector<double> f(narg,0);
1061 :
1062 1776 : if(master) {
1063 2649 : #pragma omp parallel num_threads(OpenMP::getNumThreads())
1064 : {
1065 1761 : #pragma omp for
1066 : for(unsigned i=0; i<narg; ++i) {
1067 7062 : const double sm2 = sigma_mean2_[i];
1068 3531 : const double ss2 = sigma_[i]*sigma_[i] + scale2*sm2;
1069 10593 : const double dev = scale_*mean[i]-parameters[i]+offset_;
1070 3531 : const double a2 = 0.5*dev*dev + ss2;
1071 3531 : const double t = exp(-a2/sm2);
1072 3531 : const double dt = 1./t;
1073 3531 : const double dit = 1./(1.-dt);
1074 7062 : f[i] = -scale_*dev*(dit/sm2 + 1./a2);
1075 : }
1076 : }
1077 : // collect contribution to forces and energy from other replicas
1078 1776 : if(nrep_>1) multi_sim_comm.Sum(&f[0],narg);
1079 : }
1080 3552 : comm.Sum(&f[0],narg);
1081 :
1082 : double w_tmp = 0.;
1083 7104 : for(unsigned i=0; i<narg; ++i) {
1084 21312 : setMetaDer(i, -kbt_ * dmean_x[i] * f[i]);
1085 21312 : w_tmp += kbt_ * dmean_b[i] *f[i];
1086 : }
1087 :
1088 1776 : if(do_reweight_) {
1089 1776 : setArgDerivatives(valueScore, -w_tmp);
1090 3552 : getPntrToComponent("biasDer")->set(-w_tmp);
1091 : }
1092 1776 : }
1093 :
1094 271 : void MetainferenceBase::getEnergyForceGJ(const vector<double> &mean, const vector<double> &dmean_x,
1095 : const vector<double> &dmean_b)
1096 : {
1097 271 : const double scale2 = scale_*scale_;
1098 271 : double inv_s2=0.;
1099 :
1100 271 : if(master) {
1101 458 : inv_s2 = 1./(sigma_[0]*sigma_[0] + scale2*sigma_mean2_[0]);
1102 229 : if(nrep_>1) multi_sim_comm.Sum(inv_s2);
1103 : }
1104 271 : comm.Sum(inv_s2);
1105 :
1106 271 : double w_tmp = 0.;
1107 813 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(w_tmp)
1108 : {
1109 542 : #pragma omp for reduction( + : w_tmp)
1110 : for(unsigned i=0; i<narg; ++i) {
1111 2400 : const double dev = scale_*mean[i]-parameters[i]+offset_;
1112 800 : const double mult = dev*scale_*inv_s2;
1113 1600 : setMetaDer(i, kbt_*dmean_x[i]*mult);
1114 1600 : w_tmp += kbt_*dmean_b[i]*mult;
1115 : }
1116 : }
1117 :
1118 271 : if(do_reweight_) {
1119 84 : setArgDerivatives(valueScore, w_tmp);
1120 168 : getPntrToComponent("biasDer")->set(w_tmp);
1121 : }
1122 271 : }
1123 :
1124 160 : void MetainferenceBase::getEnergyForceGJE(const vector<double> &mean, const vector<double> &dmean_x,
1125 : const vector<double> &dmean_b)
1126 : {
1127 160 : const double scale2 = scale_*scale_;
1128 320 : vector<double> inv_s2(sigma_.size(),0.);
1129 :
1130 160 : if(master) {
1131 5948 : for(unsigned i=0; i<sigma_.size(); ++i) inv_s2[i] = 1./(sigma_[i]*sigma_[i] + scale2*sigma_mean2_[i]);
1132 184 : if(nrep_>1) multi_sim_comm.Sum(&inv_s2[0],sigma_.size());
1133 : }
1134 320 : comm.Sum(&inv_s2[0],sigma_.size());
1135 :
1136 160 : double w_tmp = 0.;
1137 480 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(w_tmp)
1138 : {
1139 320 : #pragma omp for reduction( + : w_tmp)
1140 : for(unsigned i=0; i<narg; ++i) {
1141 11568 : const double dev = scale_*mean[i]-parameters[i]+offset_;
1142 7712 : const double mult = dev*scale_*inv_s2[i];
1143 7712 : setMetaDer(i, kbt_*dmean_x[i]*mult);
1144 7712 : w_tmp += kbt_*dmean_b[i]*mult;
1145 : }
1146 : }
1147 :
1148 160 : if(do_reweight_) {
1149 76 : setArgDerivatives(valueScore, w_tmp);
1150 152 : getPntrToComponent("biasDer")->set(w_tmp);
1151 : }
1152 160 : }
1153 :
1154 12 : void MetainferenceBase::getEnergyForceMIGEN(const vector<double> &mean, const vector<double> &dmean_x, const vector<double> &dmean_b)
1155 : {
1156 24 : vector<double> inv_s2(sigma_.size(),0.);
1157 24 : vector<double> dev(sigma_.size(),0.);
1158 24 : vector<double> dev2(sigma_.size(),0.);
1159 :
1160 72 : for(unsigned i=0; i<sigma_.size(); ++i) {
1161 24 : inv_s2[i] = 1./sigma_mean2_[i];
1162 24 : if(master) {
1163 48 : dev[i] = (mean[i]-ftilde_[i]);
1164 24 : dev2[i] = dev[i]*dev[i];
1165 : }
1166 : }
1167 12 : if(master&&nrep_>1) {
1168 0 : multi_sim_comm.Sum(&dev[0],dev.size());
1169 0 : multi_sim_comm.Sum(&dev2[0],dev2.size());
1170 : }
1171 12 : comm.Sum(&dev[0],dev.size());
1172 12 : comm.Sum(&dev2[0],dev2.size());
1173 :
1174 12 : double dene_b = 0.;
1175 36 : #pragma omp parallel num_threads(OpenMP::getNumThreads()) shared(dene_b)
1176 : {
1177 24 : #pragma omp for reduction( + : dene_b)
1178 : for(unsigned i=0; i<narg; ++i) {
1179 92 : const double dene_x = kbt_*inv_s2[i]*dmean_x[i]*dev[i];
1180 23 : dene_b += kbt_*inv_s2[i]*dmean_b[i]*dev[i];
1181 : setMetaDer(i, dene_x);
1182 : }
1183 : }
1184 :
1185 12 : if(do_reweight_) {
1186 0 : setArgDerivatives(valueScore, dene_b);
1187 0 : getPntrToComponent("biasDer")->set(dene_b);
1188 : }
1189 12 : }
1190 :
1191 2225 : void MetainferenceBase::get_weights(double &fact, double &var_fact)
1192 : {
1193 2225 : const double dnrep = static_cast<double>(nrep_);
1194 2225 : const double ave_fact = 1.0/dnrep;
1195 :
1196 : double norm = 0.0;
1197 :
1198 : // calculate the weights either from BIAS
1199 2225 : if(do_reweight_) {
1200 1936 : vector<double> bias(nrep_,0);
1201 1936 : if(master) {
1202 1960 : bias[replica_] = getArgument(0);
1203 1960 : if(nrep_>1) multi_sim_comm.Sum(&bias[0], nrep_);
1204 : }
1205 3872 : comm.Sum(&bias[0], nrep_);
1206 :
1207 3872 : const double maxbias = *(std::max_element(bias.begin(), bias.end()));
1208 5808 : for(unsigned i=0; i<nrep_; ++i) {
1209 7744 : bias[i] = exp((bias[i]-maxbias)/kbt_);
1210 3872 : norm += bias[i];
1211 : }
1212 :
1213 : // accumulate weights
1214 3872 : if(!firstTimeW[iselect]) {
1215 3828 : for(unsigned i=0; i<nrep_; ++i) {
1216 11484 : const double delta=bias[i]/norm-average_weights_[iselect][i];
1217 3828 : average_weights_[iselect][i]+=decay_w_*delta;
1218 : }
1219 : } else {
1220 : firstTimeW[iselect] = false;
1221 66 : for(unsigned i=0; i<nrep_; ++i) {
1222 88 : average_weights_[iselect][i] = bias[i]/norm;
1223 : }
1224 : }
1225 :
1226 : // set average back into bias and set norm to one
1227 11616 : for(unsigned i=0; i<nrep_; ++i) bias[i] = average_weights_[iselect][i];
1228 : // set local weight, norm and weight variance
1229 3872 : fact = bias[replica_];
1230 : norm = 1.0;
1231 5808 : for(unsigned i=0; i<nrep_; ++i) var_fact += (bias[i]/norm-ave_fact)*(bias[i]/norm-ave_fact);
1232 3872 : getPntrToComponent("weight")->set(fact);
1233 : } else {
1234 : // or arithmetic ones
1235 : norm = dnrep;
1236 289 : fact = 1.0/norm;
1237 : }
1238 2225 : }
1239 :
1240 2225 : void MetainferenceBase::get_sigma_mean(const double fact, const double var_fact, const vector<double> &mean)
1241 : {
1242 2225 : const double dnrep = static_cast<double>(nrep_);
1243 2225 : const double ave_fact = 1.0/dnrep;
1244 :
1245 2225 : vector<double> sigma_mean2_tmp(sigma_mean2_.size());
1246 :
1247 2225 : if(do_optsigmamean_>0) {
1248 : // remove first entry of the history vector
1249 168 : if(sigma_mean2_last_[iselect][0].size()==optsigmamean_stride_&&optsigmamean_stride_>0)
1250 0 : for(unsigned i=0; i<narg; ++i) sigma_mean2_last_[iselect][i].erase(sigma_mean2_last_[iselect][i].begin());
1251 : /* this is the current estimate of sigma mean for each argument
1252 : there is one of this per argument in any case because it is
1253 : the maximum among these to be used in case of GAUSS/OUTLIER */
1254 84 : vector<double> sigma_mean2_now(narg,0);
1255 84 : if(do_reweight_) {
1256 0 : if(master) {
1257 0 : for(unsigned i=0; i<narg; ++i) {
1258 0 : double tmp1 = (fact*getCalcData(i)-ave_fact*mean[i])*(fact*getCalcData(i)-ave_fact*mean[i]);
1259 0 : double tmp2 = -2.*mean[i]*(fact-ave_fact)*(fact*getCalcData(i)-ave_fact*mean[i]);
1260 0 : sigma_mean2_now[i] = tmp1 + tmp2;
1261 : }
1262 0 : if(nrep_>1) multi_sim_comm.Sum(&sigma_mean2_now[0], narg);
1263 : }
1264 0 : comm.Sum(&sigma_mean2_now[0], narg);
1265 0 : for(unsigned i=0; i<narg; ++i) sigma_mean2_now[i] = dnrep/(dnrep-1.)*(sigma_mean2_now[i] + mean[i]*mean[i]*var_fact);
1266 : } else {
1267 84 : if(master) {
1268 630 : for(unsigned i=0; i<narg; ++i) {
1269 630 : double tmp = getCalcData(i)-mean[i];
1270 630 : sigma_mean2_now[i] = fact*tmp*tmp;
1271 : }
1272 84 : if(nrep_>1) multi_sim_comm.Sum(&sigma_mean2_now[0], narg);
1273 : }
1274 168 : comm.Sum(&sigma_mean2_now[0], narg);
1275 2520 : for(unsigned i=0; i<narg; ++i) sigma_mean2_now[i] /= dnrep;
1276 : }
1277 :
1278 : // add sigma_mean2 to history
1279 84 : if(optsigmamean_stride_>0) {
1280 0 : for(unsigned i=0; i<narg; ++i) sigma_mean2_last_[iselect][i].push_back(sigma_mean2_now[i]);
1281 : } else {
1282 3780 : for(unsigned i=0; i<narg; ++i) if(sigma_mean2_now[i] > sigma_mean2_last_[iselect][i][0]) sigma_mean2_last_[iselect][i][0] = sigma_mean2_now[i];
1283 : }
1284 :
1285 84 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
1286 1260 : for(unsigned i=0; i<narg; ++i) {
1287 : /* set to the maximum in history vector */
1288 5040 : sigma_mean2_tmp[i] = *max_element(sigma_mean2_last_[iselect][i].begin(), sigma_mean2_last_[iselect][i].end());
1289 : /* the standard error of the mean */
1290 2520 : valueSigmaMean[i]->set(sqrt(sigma_mean2_tmp[i]));
1291 1260 : if(noise_type_==GENERIC) {
1292 0 : sigma_min_[i] = sqrt(sigma_mean2_tmp[i]);
1293 0 : if(sigma_[i] < sigma_min_[i]) sigma_[i] = sigma_min_[i];
1294 : }
1295 : }
1296 0 : } else if(noise_type_==GAUSS||noise_type_==OUTLIERS) {
1297 : // find maximum for each data point
1298 : vector <double> max_values;
1299 0 : for(unsigned i=0; i<narg; ++i) max_values.push_back(*max_element(sigma_mean2_last_[iselect][i].begin(), sigma_mean2_last_[iselect][i].end()));
1300 : // find maximum across data points
1301 0 : const double max_now = *max_element(max_values.begin(), max_values.end());
1302 : // set new value
1303 0 : sigma_mean2_tmp[0] = max_now;
1304 0 : valueSigmaMean[0]->set(sqrt(sigma_mean2_tmp[0]));
1305 : }
1306 : // endif sigma optimization
1307 : } else {
1308 2141 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
1309 9724 : for(unsigned i=0; i<narg; ++i) {
1310 29172 : sigma_mean2_tmp[i] = sigma_mean2_last_[iselect][i][0];
1311 19448 : valueSigmaMean[i]->set(sqrt(sigma_mean2_tmp[i]));
1312 : }
1313 277 : } else if(noise_type_==GAUSS||noise_type_==OUTLIERS) {
1314 554 : sigma_mean2_tmp[0] = sigma_mean2_last_[iselect][0][0];
1315 554 : valueSigmaMean[0]->set(sqrt(sigma_mean2_tmp[0]));
1316 : }
1317 : }
1318 :
1319 2225 : sigma_mean2_ = sigma_mean2_tmp;
1320 2225 : }
1321 :
1322 2225 : void MetainferenceBase::replica_averaging(const double fact, vector<double> &mean, vector<double> &dmean_b)
1323 : {
1324 2225 : if(master) {
1325 12490 : for(unsigned i=0; i<narg; ++i) mean[i] = fact*calc_data_[i];
1326 2249 : if(nrep_>1) multi_sim_comm.Sum(&mean[0], narg);
1327 : }
1328 4450 : comm.Sum(&mean[0], narg);
1329 : // set the derivative of the mean with respect to the bias
1330 37706 : for(unsigned i=0; i<narg; ++i) dmean_b[i] = fact/kbt_*(calc_data_[i]-mean[i])*decay_w_;
1331 :
1332 : // this is only for generic metainference
1333 2225 : if(firstTime) {ftilde_ = mean; firstTime = false;}
1334 2225 : }
1335 :
1336 0 : void MetainferenceBase::do_regression_zero(const vector<double> &mean)
1337 : {
1338 : // parameters[i] = scale_ * mean[i]: find scale_ with linear regression
1339 : double num = 0.0;
1340 : double den = 0.0;
1341 0 : for(unsigned i=0; i<parameters.size(); ++i) {
1342 0 : num += mean[i] * parameters[i];
1343 0 : den += mean[i] * mean[i];
1344 : }
1345 0 : if(den>0) {
1346 0 : scale_ = num / den;
1347 : } else {
1348 0 : scale_ = 1.0;
1349 : }
1350 0 : }
1351 :
1352 2225 : double MetainferenceBase::getScore()
1353 : {
1354 : /* Metainference */
1355 : /* 1) collect weights */
1356 2225 : double fact = 0.;
1357 2225 : double var_fact = 0.;
1358 2225 : get_weights(fact, var_fact);
1359 :
1360 : /* 2) calculate average */
1361 4450 : vector<double> mean(getNarg(),0);
1362 : // this is the derivative of the mean with respect to the argument
1363 2225 : vector<double> dmean_x(getNarg(),fact);
1364 : // this is the derivative of the mean with respect to the bias
1365 4450 : vector<double> dmean_b(getNarg(),0);
1366 : // calculate it
1367 2225 : replica_averaging(fact, mean, dmean_b);
1368 :
1369 : /* 3) calculates parameters */
1370 2225 : get_sigma_mean(fact, var_fact, mean);
1371 :
1372 : // in case of regression with zero intercept, calculate scale
1373 2225 : if(doregres_zero_ && getStep()%nregres_zero_==0) do_regression_zero(mean);
1374 :
1375 : /* 4) run monte carlo */
1376 2225 : double ene = doMonteCarlo(mean);
1377 :
1378 : // calculate bias and forces
1379 2225 : switch(noise_type_) {
1380 : case GAUSS:
1381 271 : getEnergyForceGJ(mean, dmean_x, dmean_b);
1382 : break;
1383 : case MGAUSS:
1384 160 : getEnergyForceGJE(mean, dmean_x, dmean_b);
1385 : break;
1386 : case OUTLIERS:
1387 6 : getEnergyForceSP(mean, dmean_x, dmean_b);
1388 : break;
1389 : case MOUTLIERS:
1390 1776 : getEnergyForceSPE(mean, dmean_x, dmean_b);
1391 : break;
1392 : case GENERIC:
1393 12 : getEnergyForceMIGEN(mean, dmean_x, dmean_b);
1394 : break;
1395 : }
1396 :
1397 2225 : return ene;
1398 : }
1399 :
1400 86 : void MetainferenceBase::writeStatus()
1401 : {
1402 172 : if(!doscore_) return;
1403 31 : sfile_.rewind();
1404 62 : sfile_.printField("time",getTimeStep()*getStep());
1405 : //nsel
1406 124 : for(unsigned i=0; i<sigma_mean2_last_.size(); i++) {
1407 : std::string msg_i,msg_j;
1408 31 : Tools::convert(i,msg_i);
1409 : vector <double> max_values;
1410 : //narg
1411 2528 : for(unsigned j=0; j<narg; ++j) {
1412 2497 : Tools::convert(j,msg_j);
1413 4994 : std::string msg = msg_i+"_"+msg_j;
1414 2497 : if(noise_type_==MGAUSS||noise_type_==MOUTLIERS||noise_type_==GENERIC) {
1415 9880 : sfile_.printField("sigmaMean_"+msg,sqrt(*max_element(sigma_mean2_last_[i][j].begin(), sigma_mean2_last_[i][j].end())));
1416 : } else {
1417 : // find maximum for each data point
1418 81 : max_values.push_back(*max_element(sigma_mean2_last_[i][j].begin(), sigma_mean2_last_[i][j].end()));
1419 : }
1420 : }
1421 31 : if(noise_type_==GAUSS||noise_type_==OUTLIERS) {
1422 : // find maximum across data points
1423 16 : const double max_now = sqrt(*max_element(max_values.begin(), max_values.end()));
1424 8 : Tools::convert(0,msg_j);
1425 16 : std::string msg = msg_i+"_"+msg_j;
1426 16 : sfile_.printField("sigmaMean_"+msg, max_now);
1427 : }
1428 : }
1429 4987 : for(unsigned i=0; i<sigma_.size(); ++i) {
1430 : std::string msg;
1431 2478 : Tools::convert(i,msg);
1432 4956 : sfile_.printField("sigma_"+msg,sigma_[i]);
1433 : }
1434 31 : if(noise_type_==GENERIC) {
1435 5 : for(unsigned i=0; i<ftilde_.size(); ++i) {
1436 : std::string msg;
1437 2 : Tools::convert(i,msg);
1438 4 : sfile_.printField("ftilde_"+msg,ftilde_[i]);
1439 : }
1440 : }
1441 62 : sfile_.printField("scale0_",scale_);
1442 62 : sfile_.printField("offset0_",offset_);
1443 124 : for(unsigned i=0; i<average_weights_.size(); i++) {
1444 : std::string msg_i;
1445 31 : Tools::convert(i,msg_i);
1446 93 : sfile_.printField("weight_"+msg_i,average_weights_[i][replica_]);
1447 : }
1448 31 : sfile_.printField();
1449 31 : sfile_.flush();
1450 : }
1451 :
1452 : }
1453 5874 : }
1454 :
|