44 using namespace catalogue;
45 using namespace chainmesh;
46 using namespace pairs;
47 using namespace measure;
48 using namespace twopt;
56 string MK =
"mkdir -p "+dir;
if (system (MK.c_str())) {}
58 string file_out = dir+file;
59 ofstream fout(file_out.c_str());
checkIO(fout, file_out);
64 if (PP->pairInfo()==PairInfo::_standard_)
65 for (
int i=0; i<PP->nbins(); i++)
66 fout << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << i
67 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->scale(i)
68 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->PP1D(i)
69 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->PP1D_weighted(i) << endl;
74 else if (PP->pairInfo()==PairInfo::_extra_)
75 for (
int i=0; i<PP->nbins(); i++)
76 fout << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << i
77 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->scale(i)
78 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->PP1D(i)
79 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->PP1D_weighted(i)
80 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->scale_mean(i)
81 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->scale_sigma(i)
82 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->z_mean(i)
83 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->z_sigma(i) << endl;
86 ErrorCBL(
"no such pairInfo!",
"write_pairs",
"TwoPointCorrelation1D.cpp");
88 fout.clear(); fout.close();
coutCBL <<
"I wrote the file " << file_out << endl;
99 ErrorCBL(
"dir.size()=0!",
"read_pairs",
"TwoPointCorrelation1D.cpp");
103 double pairs, weighted_pairs;
108 if (PP->pairInfo()==PairInfo::_standard_)
110 for (
size_t dd=0; dd<dir.size(); dd++) {
112 string file_in = dir[dd]+file;
113 ifstream fin(file_in.c_str());
checkIO(fin, file_in);
115 while (getline(fin, line)) {
117 stringstream ss(line);
120 while (ss >> val) num.emplace_back(val);
123 ErrorCBL(
"the number of lines in the input pair file: "+file_in+
" must be 4!",
"read_pairs",
"TwoPointCorrelation1D.cpp");
127 weighted_pairs = num[3];
129 PP->add_data1D(i, {pairs, weighted_pairs});
132 fin.clear(); fin.close();
coutCBL <<
"I read the file " << file_in << endl;
138 else if (PP->pairInfo()==PairInfo::_extra_)
140 for (
size_t dd=0; dd<dir.size(); dd++) {
142 string file_in = dir[dd]+file;
143 ifstream fin(file_in.c_str());
checkIO(fin, file_in);
145 double scale_mean, scale_sigma, z_mean, z_sigma;
147 while (getline(fin, line)) {
149 stringstream ss(line);
152 while (ss >> val) num.emplace_back(val);
155 ErrorCBL(
"the number of lines in the input pair file: "+file_in+
" must be 8!",
"read_pairs",
"TwoPointCorrelation1D.cpp");
159 weighted_pairs = num[3];
161 scale_sigma = num[5];
165 PP->add_data1D(i, {pairs, weighted_pairs, scale_mean, pow(scale_sigma, 2)*weighted_pairs, z_mean, pow(z_sigma, 2)*weighted_pairs});
169 fin.clear(); fin.close();
coutCBL <<
"I read the file " << file_in << endl;
173 ErrorCBL(
"no such pairInfo!",
"read_pairs",
"TwoPointCorrelation1D.cpp");
183 size_t nRegions = m_data->region_list().size();
185 bool cross = (PP.size() == nRegions*nRegions) ?
true :
false;
187 string MK =
"mkdir -p "+dir;
if (system (MK.c_str())) {}
189 string file_out = dir+file;
190 ofstream fout(file_out.c_str());
checkIO(fout, file_out);
195 if (PP[0]->pairInfo()==PairInfo::_standard_)
197 for (
size_t i=0; i<nRegions; i++)
198 for (
size_t j=(cross) ? 0 : i; j<nRegions; j++) {
199 int index = (cross) ? i*nRegions+j : i*nRegions+j-(i-1)*i/2-i;
200 for (
int r1=0; r1<PP[index]->nbins(); r1++)
201 if (PP[index]->PP1D(r1)>0)
202 fout << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << i
203 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << j
204 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << r1
205 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->scale(r1)
206 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->PP1D(r1)
207 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->PP1D_weighted(r1) << endl;
213 else if (PP[0]->pairInfo()==PairInfo::_extra_)
215 for (
size_t i=0; i<nRegions; i++)
216 for (
size_t j=(cross) ? 0 : i; j<nRegions; j++) {
217 int index = (cross) ? i*nRegions+j : i*nRegions+j-(i-1)*i/2-i;
218 for (
int r1=0; r1<PP[index]->nbins(); r1++)
219 if (PP[index]->PP1D(r1)>0)
220 fout << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << i
221 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << j
222 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << r1
223 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->scale(r1)
224 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->PP1D(r1)
225 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->PP1D_weighted(r1)
226 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->scale_mean(r1)
227 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->scale_sigma(r1)
228 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->z_mean(r1)
229 <<
" " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->z_sigma(r1) << endl;
233 ErrorCBL(
"no such pairInfo!",
"write_pairs",
"TwoPointCorrelation1D.cpp");
235 fout.clear(); fout.close();
244 size_t nRegions = m_data->region_list().size();
246 bool cross = (PP.size() == nRegions*nRegions) ?
true :
false;
248 int i, j, bin, index;
249 double rad, pairs, weighted_pairs;
254 if (PP[0]->pairInfo()==PairInfo::_standard_)
256 for (
size_t dd=0; dd<dir.size(); dd++) {
257 string ff = dir[dd]+file;
258 ifstream fin(ff.c_str());
checkIO(fin, ff);
260 while (fin >> i >> j >> bin >> rad >> pairs >> weighted_pairs) {
261 index = (cross) ? i*nRegions+j : i*nRegions+j-(i-1)*i/2-i;
262 PP[index]->add_data1D(bin, {pairs, weighted_pairs});
265 fin.clear(); fin.close();
coutCBL <<
"I read the file " << ff << endl;
271 else if (PP[0]->pairInfo()==PairInfo::_extra_)
273 for (
size_t dd=0; dd<dir.size(); dd++) {
274 string ff = dir[dd]+file;
275 ifstream fin(ff.c_str());
checkIO(fin, ff);
277 double scale_mean, scale_sigma, z_mean, z_sigma;
279 while (fin >> i >> j >> bin >> rad >> pairs >> weighted_pairs >> scale_mean >> scale_sigma >> z_mean >> z_sigma) {
280 index = (cross) ? i*nRegions+j : i*nRegions+j-(i-1)*i/2-i;
281 PP[index]->add_data1D(bin, {pairs, weighted_pairs, scale_mean, pow(scale_sigma, 2)*weighted_pairs, z_mean, pow(z_sigma, 2)*weighted_pairs});
284 fin.clear(); fin.close();
coutCBL <<
"I read the file " << ff << endl;
288 ErrorCBL(
"no such pairInfo!",
"read_pairs",
"TwoPointCorrelation1D.cpp");
297 vector<vector<double>> extra(4);
299 for (
int i=0; i<dd->nbins(); ++i) {
300 extra[0].push_back(dd->scale_mean(i));
301 extra[1].push_back(dd->scale_sigma(i));
302 extra[2].push_back(dd->z_mean(i));
303 extra[3].push_back(dd->z_sigma(i));
306 return move(unique_ptr<data::Data1D_extra>(
new data::Data1D_extra(rad, xi, error, extra)));
315 vector<double> rad(m_dd->nbins()), xi(m_dd->nbins(), -1.), error(m_dd->nbins(), 1000.);
318 int nD = (nData>0) ? nData : m_data->nObjects();
321 double nDw = (nData_weighted>0) ? nData_weighted : m_data->weightedN();
324 int nR = (nRandom>0) ? nRandom : m_random->nObjects();
327 double nRw = (nRandom_weighted>0) ? nRandom_weighted : m_random->weightedN();
330 double nDDi = 1./(nDw*(nDw-1.)*0.5);
333 double nRRi = 1./(nRw*(nRw-1.)*0.5);
335 for (
int i=0; i<dd->nbins(); i++) {
337 rad[i] = dd->scale(i);
339 if (dd->PP1D_weighted(i)>0) {
341 if (rr->PP1D_weighted(i)<1.e-30)
342 ErrorCBL(
"there are no random objects in the bin "+
conv(i,
par::fINT)+
"; please, either increase the total number of random objects or enlarge the bin size! (dd="+
conv(dd->PP1D_weighted(i),
par::fDP3)+
", rr="+
conv(rr->PP1D_weighted(i),
par::fDP3)+
")!",
"correlation_NaturalEstimator",
"TwoPointCorrelation1D.cpp");
345 double DD_norm = dd->PP1D_weighted(i)*nDDi;
348 double RR_norm = rr->PP1D_weighted(i)*nRRi;
351 xi[i] = max(-1., DD_norm/RR_norm-1.);
354 error[i] = PoissonError(Estimator::_natural_, dd->PP1D(i), rr->PP1D(i), 0, nD, nR);
359 return (!m_compute_extra_info) ? move(unique_ptr<data::Data1D>(
new data::Data1D(rad, xi, error))) : data_with_extra_info(dd, rad, xi, error);
370 int nD = (nData>0) ? nData : m_data->nObjects();
373 double nDw = (nData_weighted>0) ? nData_weighted : m_data->weightedN();
376 int nR = (nRandom>0) ? nRandom : m_random->nObjects();
379 double nRw = (nRandom_weighted>0) ? nRandom_weighted : m_random->weightedN();
382 double nDDi = 1./(nDw*(nDw-1.)*0.5);
385 double nRRi = 1./(nRw*m_random_dilution_fraction*(nRw*m_random_dilution_fraction-1.)*0.5);
388 double nDRi = 1./(nDw*nRw);
390 vector<double> rad(m_dd->nbins()), xi(m_dd->nbins(), -1.), error(m_dd->nbins(), 1000.);
392 for (
int i=0; i<dd->nbins(); i++) {
394 rad[i] = dd->scale(i);
396 if (dd->PP1D_weighted(i)>0) {
398 if (rr->PP1D_weighted(i)<1.e-30)
399 ErrorCBL(
"there are no random objects in the bin "+
conv(i,
par::fINT)+
"; please, either increase the total number of random objects or enlarge the bin size! (dd="+
conv(dd->PP1D_weighted(i),
par::fDP3)+
", rr="+
conv(rr->PP1D_weighted(i),
par::fDP3)+
")!",
"correlation_LandySzalayEstimator",
"TwoPointCorrelation1D.cpp");
402 double DD_norm = dd->PP1D_weighted(i)*nDDi;
405 double RR_norm = rr->PP1D_weighted(i)*nRRi;
408 double DR_norm = dr->PP1D_weighted(i)*nDRi;
411 xi[i] = max(-1., (DD_norm-2.*DR_norm)/RR_norm+1.);
414 error[i] = PoissonError(Estimator::_LandySzalay_, dd->PP1D(i), rr->PP1D(i), dr->PP1D(i), nD, nR);
419 return (!m_compute_extra_info) ? move(unique_ptr<data::Data1D>(
new data::Data1D(rad, xi, error))) : data_with_extra_info(dd, rad, xi, error);
428 vector<long> region_list = m_data->region_list();
429 size_t nRegions = region_list.size();
431 vector<shared_ptr<data::Data>> data;
433 for (
size_t i=0; i<nRegions; i++) {
435 coutCBL <<
"analysing region: " << i <<
" of " << nRegions <<
"\r"; cout.flush();
437 auto dd_SS = Pair::Create(m_dd->pairType(), m_dd->pairInfo(), m_dd->sMin(), m_dd->sMax(), m_dd->nbins(), m_dd->shift(), m_dd->angularUnits(), m_dd->angularWeight());
438 auto rr_SS = Pair::Create(m_rr->pairType(), m_rr->pairInfo(), m_rr->sMin(), m_rr->sMax(), m_rr->nbins(), m_rr->shift(), m_rr->angularUnits(), m_rr->angularWeight());
440 vector<int> w(nRegions, 1);
443 for (
size_t j=0; j<nRegions; j++)
444 for (
size_t k=j; k<nRegions; k++) {
445 int index = j*nRegions-(j-1)*j/2+k-j;
446 double ww = w[j]*w[k];
448 for (
int bin=0; bin<dd_SS->nbins(); bin++) {
449 dd_SS->add_data1D(bin, dd[index]);
450 rr_SS->add_data1D(bin, rr[index]);
454 double nData_SS = m_data->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
455 double nData_SS_weighted = m_data->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
456 double nRandom_SS = m_random->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
457 double nRandom_SS_weighted = m_random->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
459 data.push_back(move(correlation_NaturalEstimator(dd_SS, rr_SS, nData_SS, nData_SS_weighted, nRandom_SS, nRandom_SS_weighted)));
471 vector<long> region_list = m_data->region_list();
472 size_t nRegions = region_list.size();
474 vector<shared_ptr<data::Data>> data;
476 for (
size_t i=0; i<nRegions; i++) {
478 coutCBL <<
"analysing region: " << i <<
" of " << nRegions <<
"\r"; cout.flush();
480 auto dd_SS = Pair::Create(m_dd->pairType(), m_dd->pairInfo(), m_dd->sMin(), m_dd->sMax(), m_dd->nbins(), m_dd->shift(), m_dd->angularUnits(), m_dd->angularWeight());
481 auto rr_SS = Pair::Create(m_rr->pairType(), m_rr->pairInfo(), m_rr->sMin(), m_rr->sMax(), m_rr->nbins(), m_rr->shift(), m_rr->angularUnits(), m_rr->angularWeight());
482 auto dr_SS = Pair::Create(m_dr->pairType(), m_dr->pairInfo(), m_dr->sMin(), m_dr->sMax(), m_dr->nbins(), m_dr->shift(), m_dr->angularUnits(), m_dr->angularWeight());
484 vector<int> w(nRegions, 1);
487 for (
size_t j=0; j<nRegions; j++) {
490 for (
size_t k=j; k<nRegions; k++) {
492 int index = j*nRegions+k-(j-1)*j/2-j;
493 for (
int bin=0; bin<dd_SS->nbins(); bin++) {
494 dd_SS->add_data1D(bin, dd[index]);
495 rr_SS->add_data1D(bin, rr[index]);
501 for (
size_t k=0; k<nRegions; k++) {
503 int index = j*nRegions+k;
504 for (
int bin=0; bin<dd_SS->nbins(); bin++)
505 dr_SS->add_data1D(bin, dr[index]);
512 double nData_SS = m_data->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
513 double nData_SS_weighted = m_data->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
514 double nRandom_SS = m_random->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
515 double nRandom_SS_weighted = m_random->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
517 data.push_back(move(correlation_LandySzalayEstimator(dd_SS, rr_SS, dr_SS, nData_SS, nData_SS_weighted, nRandom_SS, nRandom_SS_weighted)));
529 vector<long> region_list = m_data->region_list();
530 size_t nRegions = dd.size();
532 vector<shared_ptr<data::Data>> data;
534 for (
size_t i=0; i<nRegions; i++) {
536 double nData = m_data->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
537 double nData_weighted = m_data->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
538 double nRandom = m_random->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
539 double nRandom_weighted = m_random->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
541 data.push_back(move(correlation_NaturalEstimator(dd[i], rr[i], nData, nData_weighted, nRandom, nRandom_weighted)));
553 vector<long> region_list = m_data->region_list();
554 size_t nRegions = dd.size();
556 vector<shared_ptr<data::Data>> data;
558 for (
size_t i=0; i<nRegions; i++) {
560 double nData = m_data->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
561 double nData_weighted = m_data->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
562 double nRandom = m_random->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
563 double nRandom_weighted = m_random->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
565 data.push_back(move(correlation_LandySzalayEstimator(dd[i], rr[i], dr[i], nData, nData_weighted, nRandom, nRandom_weighted)));
576 vector<long> region_list = m_data->region_list();
577 size_t nRegions = region_list.size();
579 vector<shared_ptr<data::Data>> data;
580 vector<double> nData_reg, nData_reg_weighted, nRandom_reg, nRandom_reg_weighted;
582 for (
size_t i=0; i<nRegions; i++) {
583 nData_reg.push_back(m_data->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 0));
584 nData_reg_weighted.push_back(m_data->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 0));
585 nRandom_reg.push_back(m_random->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 0));
586 nRandom_reg_weighted.push_back(m_random->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 0));
593 for (
int i=0; i<nMocks; i++) {
595 coutCBL <<
"analysing mock: " << i <<
" of " << nMocks <<
"\r"; cout.flush();
597 auto dd_SS = Pair::Create(m_dd->pairType(), m_dd->pairInfo(), m_dd->sMin(), m_dd->sMax(), m_dd->nbins(), m_dd->shift(), m_dd->angularUnits(), m_dd->angularWeight());
598 auto rr_SS = Pair::Create(m_rr->pairType(), m_rr->pairInfo(), m_rr->sMin(), m_rr->sMax(), m_rr->nbins(), m_rr->shift(), m_rr->angularUnits(), m_rr->angularWeight());
600 double nData_SS = 0., nData_SS_weighted = 0., nRandom_SS = 0., nRandom_SS_weighted = 0.;
602 vector<int> w(nRegions, 0);
603 for (
size_t n=0; n<val*nRegions; n++)
606 for (
size_t j=0; j<nRegions; j++) {
610 nData_SS += w[j]*nData_reg[j];
611 nData_SS_weighted += w[j]*nData_reg_weighted[j];
612 nRandom_SS += w[j]*nRandom_reg[j];
613 nRandom_SS_weighted += w[j]*nRandom_reg_weighted[j];
615 for (
size_t k=j; k<nRegions; k++) {
617 int index = j*nRegions+k-(j-1)*j/2-j;
618 double ww = w[j]*w[k];
619 for (
int bin=0; bin<dd_SS->nbins(); bin++) {
620 dd_SS->add_data1D(bin, dd[index], ww);
621 rr_SS->add_data1D(bin, rr[index], ww);
628 data.push_back(move(correlation_NaturalEstimator(dd_SS, rr_SS, nData_SS, nData_SS_weighted, nRandom_SS, nRandom_SS_weighted)));
638 std::vector<std::shared_ptr<data::Data>>
cbl::measure::twopt::TwoPointCorrelation1D::XiBootstrap (
const int nMocks,
const std::vector<std::shared_ptr<pairs::Pair>> dd,
const std::vector<std::shared_ptr<pairs::Pair>> rr,
const std::vector<std::shared_ptr<pairs::Pair>> dr,
const int seed)
640 vector<long> region_list = m_data->region_list();
641 size_t nRegions = region_list.size();
643 vector<shared_ptr<data::Data>> data;
644 vector<double> nData_reg, nData_reg_weighted, nRandom_reg, nRandom_reg_weighted;
646 for (
size_t i=0; i<nRegions; i++) {
647 nData_reg.push_back(m_data->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 0));
648 nData_reg_weighted.push_back(m_data->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 0));
649 nRandom_reg.push_back(m_random->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 0));
650 nRandom_reg_weighted.push_back(m_random->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 0));
657 for (
int i=0; i<nMocks; i++) {
659 coutCBL <<
"analysing mock: " << i <<
" of " << nMocks <<
"\r"; cout.flush();
661 auto dd_SS = Pair::Create(m_dd->pairType(), m_dd->pairInfo(), m_dd->sMin(), m_dd->sMax(), m_dd->nbins(), m_dd->shift(), m_dd->angularUnits(), m_dd->angularWeight());
662 auto rr_SS = Pair::Create(m_rr->pairType(), m_rr->pairInfo(), m_rr->sMin(), m_rr->sMax(), m_rr->nbins(), m_rr->shift(), m_rr->angularUnits(), m_rr->angularWeight());
663 auto dr_SS = Pair::Create(m_dr->pairType(), m_dr->pairInfo(), m_dr->sMin(), m_dr->sMax(), m_dr->nbins(), m_dr->shift(), m_dr->angularUnits(), m_dr->angularWeight());
665 double nData_SS = 0., nData_SS_weighted = 0., nRandom_SS = 0., nRandom_SS_weighted = 0.;
667 vector<int> w(nRegions, 0);
668 for (
size_t n=0; n<val*nRegions; n++)
671 for (
size_t j=0; j<nRegions; j++) {
675 nData_SS += w[j]*nData_reg[j];
676 nData_SS_weighted += w[j]*nData_reg_weighted[j];
677 nRandom_SS += w[j]*nRandom_reg[j];
678 nRandom_SS_weighted += w[j]*nRandom_reg_weighted[j];
680 for (
size_t k=j; k<nRegions; k++) {
682 int index = j*nRegions+k-(j-1)*j/2-j;
683 double ww = w[j]*w[k];
684 for (
int bin=0; bin<dd_SS->nbins(); bin++) {
685 dd_SS->add_data1D(bin, dd[index], ww);
686 rr_SS->add_data1D(bin, rr[index], ww);
691 for (
size_t k=0; k<nRegions; k++) {
693 int index = j*nRegions+k;
694 double ww = w[j]*w[k];
695 for (
int bin=0; bin<dr_SS->nbins(); bin++)
696 dr_SS->add_data1D(bin, dr[index], ww);
702 data.push_back(move(correlation_LandySzalayEstimator(dd_SS, rr_SS, dr_SS, nData_SS, nData_SS_weighted, nRandom_SS, nRandom_SS_weighted)));
714 m_dataset->set_covariance(dir+file);
723 m_dataset->write_covariance(dir, file);
732 vector<vector<double>> Xi;
734 for (
size_t i=0; i<xi.size(); i++) {
740 vector<vector<double>> cov_mat;
743 m_dataset->set_covariance(cov_mat);
752 vector<double> rad, mean;
753 vector<vector<double>> cov_mat;
757 m_dataset->set_covariance(cov_mat);
#define coutCBL
CBL print message.
The class TwoPointCorrelation1D.
virtual void read_pairs(std::shared_ptr< pairs::Pair > PP, const std::vector< std::string > dir, const std::string file) const override
read the number of pairs
virtual std::shared_ptr< data::Data > correlation_NaturalEstimator(const std::shared_ptr< pairs::Pair > dd, const std::shared_ptr< pairs::Pair > rr, const int nData=0, const double nData_weighted=0., const int nRandom=0, const double nRandom_weighted=0.) override
get a dataset containing the two-point correlation function measured with the natural estimator,...
void read_covariance(const std::string dir, const std::string file) override
read the measured covariance matrix
virtual std::vector< std::shared_ptr< data::Data > > XiJackknifeTest(const std::vector< std::shared_ptr< pairs::Pair >> dd, const std::vector< std::shared_ptr< pairs::Pair >> rr) override
measure the jackknife resampling of the two-point correlation function, ξ(r), test
virtual void write_pairs(const std::shared_ptr< pairs::Pair > PP, const std::string dir, const std::string file) const override
write the number of pairs
virtual std::vector< std::shared_ptr< data::Data > > XiJackknife(const std::vector< std::shared_ptr< pairs::Pair >> dd, const std::vector< std::shared_ptr< pairs::Pair >> rr) override
measure the jackknife resampling of the two-point correlation function, ξ(r)
virtual std::shared_ptr< data::Data > data_with_extra_info(const std::shared_ptr< pairs::Pair > dd, const std::vector< double > rad, const std::vector< double > xi, const std::vector< double > error) const
return a data object with extra info
virtual std::shared_ptr< data::Data > correlation_LandySzalayEstimator(const std::shared_ptr< pairs::Pair > dd, const std::shared_ptr< pairs::Pair > rr, const std::shared_ptr< pairs::Pair > dr, const int nData=0, const double nData_weighted=0., const int nRandom=0, const double nRandom_weighted=0.) override
get a dataset containing the two-point correlation function measured with the Landy-Szalay estimator,...
void write_covariance(const std::string dir, const std::string file) const override
write the measured two-point correlation
void compute_covariance(const std::vector< std::shared_ptr< data::Data >> xi, const bool JK) override
compute the covariance matrix
virtual std::vector< std::shared_ptr< data::Data > > XiBootstrap(const int nMocks, const std::vector< std::shared_ptr< pairs::Pair >> dd, const std::vector< std::shared_ptr< pairs::Pair >> rr, const int seed=3213) override
measure the bootstrap resampling of the two-point correlation function, ξ(r)
static const char fDP3[]
conversion symbol for: double -> std::string
static const char fINT[]
conversion symbol for: int -> std::string
The global namespace of the CosmoBolognaLib
std::string conv(const T val, const char *fact)
convert a number to a std::string
void checkIO(const std::ifstream &fin, const std::string file="NULL")
check if an input file can be opened
int ErrorCBL(const std::string msg, const std::string functionCBL, const std::string fileCBL, const cbl::glob::ExitCode exitCode=cbl::glob::ExitCode::_error_)
throw an exception: it is used for handling exceptions inside the CosmoBolognaLib
void covariance_matrix(const std::vector< std::vector< double >> mat, std::vector< std::vector< double >> &cov, const bool JK=false)
compute the covariance matrix from an input dataset