CosmoBolognaLib
Free Software C++/Python libraries for cosmological calculations
TwoPointCorrelation1D.cpp
Go to the documentation of this file.
1 /********************************************************************
2  * Copyright (C) 2015 by Federico Marulli and Alfonso Veropalumbo *
3  * federico.marulli3@unibo.it *
4  * *
5  * This program is free software; you can redistribute it and/or *
6  * modify it under the terms of the GNU General Public License as *
7  * published by the Free Software Foundation; either version 2 of *
8  * the License, or (at your option) any later version. *
9  * *
10  * This program is distributed in the hope that it will be useful, *
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
13  * GNU General Public License for more details. *
14  * *
15  * You should have received a copy of the GNU General Public *
16  * License along with this program; if not, write to the Free *
17  * Software Foundation, Inc., *
18  * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
19  ********************************************************************/
20 
38 #include "TwoPointCorrelation1D.h"
39 #include "Data1D_extra.h"
40 
41 using namespace std;
42 
43 using namespace cbl;
44 using namespace catalogue;
45 using namespace chainmesh;
46 using namespace pairs;
47 using namespace measure;
48 using namespace twopt;
49 
50 
51 // ============================================================================
52 
53 
54 void cbl::measure::twopt::TwoPointCorrelation1D::write_pairs (const std::shared_ptr<pairs::Pair> PP, const std::string dir, const std::string file) const
55 {
56  string MK = "mkdir -p "+dir; if (system (MK.c_str())) {}
57 
58  string file_out = dir+file;
59  ofstream fout(file_out.c_str()); checkIO(fout, file_out);
60 
61 
62  // ----- standard info: scales at the bin centre + number of pairs -----
63 
64  if (PP->pairInfo()==PairInfo::_standard_)
65  for (int i=0; i<PP->nbins(); i++)
66  fout << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << i
67  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->scale(i)
68  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->PP1D(i)
69  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->PP1D_weighted(i) << endl;
70 
71 
72  // ----- standard + extra info -----
73 
74  else if (PP->pairInfo()==PairInfo::_extra_)
75  for (int i=0; i<PP->nbins(); i++)
76  fout << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << i
77  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->scale(i)
78  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->PP1D(i)
79  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->PP1D_weighted(i)
80  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->scale_mean(i)
81  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->scale_sigma(i)
82  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->z_mean(i)
83  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP->z_sigma(i) << endl;
84 
85  else
86  ErrorCBL("no such pairInfo!", "write_pairs", "TwoPointCorrelation1D.cpp");
87 
88  fout.clear(); fout.close(); coutCBL << "I wrote the file " << file_out << endl;
89 
90 }
91 
92 
93 // ============================================================================
94 
95 
96 void cbl::measure::twopt::TwoPointCorrelation1D::read_pairs (std::shared_ptr<pairs::Pair> PP, const std::vector<std::string> dir, const std::string file) const
97 {
98  if (dir.size()==0)
99  ErrorCBL("dir.size()=0!", "read_pairs", "TwoPointCorrelation1D.cpp");
100 
101  string line;
102  int i;
103  double pairs, weighted_pairs;
104 
105 
106  // ----- standard info: scales at the bin centre + number of pairs -----
107 
108  if (PP->pairInfo()==PairInfo::_standard_)
109 
110  for (size_t dd=0; dd<dir.size(); dd++) {
111 
112  string file_in = dir[dd]+file;
113  ifstream fin(file_in.c_str()); checkIO(fin, file_in);
114 
115  while (getline(fin, line)) {
116 
117  stringstream ss(line);
118  vector<double> num;
119  double val;
120  while (ss >> val) num.emplace_back(val);
121 
122  if (num.size()!=4)
123  ErrorCBL("the number of lines in the input pair file: "+file_in+" must be 4!", "read_pairs", "TwoPointCorrelation1D.cpp");
124 
125  i = int(num[0]);
126  pairs = num[2];
127  weighted_pairs = num[3];
128 
129  PP->add_data1D(i, {pairs, weighted_pairs});
130  }
131 
132  fin.clear(); fin.close(); coutCBL << "I read the file " << file_in << endl;
133  }
134 
135 
136  // ----- standard + extra info -----
137 
138  else if (PP->pairInfo()==PairInfo::_extra_)
139 
140  for (size_t dd=0; dd<dir.size(); dd++) {
141 
142  string file_in = dir[dd]+file;
143  ifstream fin(file_in.c_str()); checkIO(fin, file_in);
144 
145  double scale_mean, scale_sigma, z_mean, z_sigma;
146 
147  while (getline(fin, line)) {
148 
149  stringstream ss(line);
150  vector<double> num;
151  double val;
152  while (ss >> val) num.emplace_back(val);
153 
154  if (num.size()!=8)
155  ErrorCBL("the number of lines in the input pair file: "+file_in+" must be 8!", "read_pairs", "TwoPointCorrelation1D.cpp");
156 
157  i = int(num[0]);
158  pairs = num[2];
159  weighted_pairs = num[3];
160  scale_mean = num[4];
161  scale_sigma = num[5];
162  z_mean = num[6];
163  z_sigma = num[7];
164 
165  PP->add_data1D(i, {pairs, weighted_pairs, scale_mean, pow(scale_sigma, 2)*weighted_pairs, z_mean, pow(z_sigma, 2)*weighted_pairs});
166 
167  }
168 
169  fin.clear(); fin.close(); coutCBL << "I read the file " << file_in << endl;
170  }
171 
172  else
173  ErrorCBL("no such pairInfo!", "read_pairs", "TwoPointCorrelation1D.cpp");
174 
175 }
176 
177 
178 // ============================================================================
179 
180 
181 void cbl::measure::twopt::TwoPointCorrelation1D::write_pairs (const std::vector<std::shared_ptr<pairs::Pair>> PP, const std::string dir, const std::string file) const
182 {
183  size_t nRegions = m_data->region_list().size();
184 
185  bool cross = (PP.size() == nRegions*nRegions) ? true : false;
186 
187  string MK = "mkdir -p "+dir; if (system (MK.c_str())) {}
188 
189  string file_out = dir+file;
190  ofstream fout(file_out.c_str()); checkIO(fout, file_out);
191 
192 
193  // ----- standard info: scales at the bin centre + number of pairs -----
194 
195  if (PP[0]->pairInfo()==PairInfo::_standard_)
196 
197  for (size_t i=0; i<nRegions; i++)
198  for (size_t j=(cross) ? 0 : i; j<nRegions; j++) {
199  int index = (cross) ? i*nRegions+j : i*nRegions+j-(i-1)*i/2-i;
200  for (int r1=0; r1<PP[index]->nbins(); r1++)
201  if (PP[index]->PP1D(r1)>0)
202  fout << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << i
203  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << j
204  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << r1
205  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->scale(r1)
206  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->PP1D(r1)
207  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->PP1D_weighted(r1) << endl;
208  }
209 
210 
211  // ----- standard + extra info -----
212 
213  else if (PP[0]->pairInfo()==PairInfo::_extra_)
214 
215  for (size_t i=0; i<nRegions; i++)
216  for (size_t j=(cross) ? 0 : i; j<nRegions; j++) {
217  int index = (cross) ? i*nRegions+j : i*nRegions+j-(i-1)*i/2-i;
218  for (int r1=0; r1<PP[index]->nbins(); r1++)
219  if (PP[index]->PP1D(r1)>0)
220  fout << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << i
221  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << j
222  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << r1
223  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->scale(r1)
224  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->PP1D(r1)
225  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->PP1D_weighted(r1)
226  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->scale_mean(r1)
227  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->scale_sigma(r1)
228  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->z_mean(r1)
229  << " " << setiosflags(ios::fixed) << setprecision(5) << setw(10) << right << PP[index]->z_sigma(r1) << endl;
230  }
231 
232  else
233  ErrorCBL("no such pairInfo!", "write_pairs", "TwoPointCorrelation1D.cpp");
234 
235  fout.clear(); fout.close();
236 }
237 
238 
239 // ============================================================================
240 
241 
242 void cbl::measure::twopt::TwoPointCorrelation1D::read_pairs (std::vector<std::shared_ptr<pairs::Pair>> PP, const std::vector<std::string> dir, const std::string file) const
243 {
244  size_t nRegions = m_data->region_list().size();
245 
246  bool cross = (PP.size() == nRegions*nRegions) ? true : false;
247 
248  int i, j, bin, index;
249  double rad, pairs, weighted_pairs;
250 
251 
252  // ----- standard info: scales at the bin centre + number of pairs -----
253 
254  if (PP[0]->pairInfo()==PairInfo::_standard_)
255 
256  for (size_t dd=0; dd<dir.size(); dd++) {
257  string ff = dir[dd]+file;
258  ifstream fin(ff.c_str()); checkIO(fin, ff);
259 
260  while (fin >> i >> j >> bin >> rad >> pairs >> weighted_pairs) {
261  index = (cross) ? i*nRegions+j : i*nRegions+j-(i-1)*i/2-i;
262  PP[index]->add_data1D(bin, {pairs, weighted_pairs});
263  }
264 
265  fin.clear(); fin.close(); coutCBL << "I read the file " << ff << endl;
266  }
267 
268 
269  // ----- standard + extra info -----
270 
271  else if (PP[0]->pairInfo()==PairInfo::_extra_)
272 
273  for (size_t dd=0; dd<dir.size(); dd++) {
274  string ff = dir[dd]+file;
275  ifstream fin(ff.c_str()); checkIO(fin, ff);
276 
277  double scale_mean, scale_sigma, z_mean, z_sigma;
278 
279  while (fin >> i >> j >> bin >> rad >> pairs >> weighted_pairs >> scale_mean >> scale_sigma >> z_mean >> z_sigma) {
280  index = (cross) ? i*nRegions+j : i*nRegions+j-(i-1)*i/2-i;
281  PP[index]->add_data1D(bin, {pairs, weighted_pairs, scale_mean, pow(scale_sigma, 2)*weighted_pairs, z_mean, pow(z_sigma, 2)*weighted_pairs});
282  }
283 
284  fin.clear(); fin.close(); coutCBL << "I read the file " << ff << endl;
285  }
286 
287  else
288  ErrorCBL("no such pairInfo!", "read_pairs", "TwoPointCorrelation1D.cpp");
289 }
290 
291 
292 // ============================================================================
293 
294 
295 std::shared_ptr<data::Data> cbl::measure::twopt::TwoPointCorrelation1D::data_with_extra_info (const std::shared_ptr<pairs::Pair> dd, const std::vector<double> rad, const std::vector<double> xi, const std::vector<double> error) const
296 {
297  vector<vector<double>> extra(4);
298 
299  for (int i=0; i<dd->nbins(); ++i) {
300  extra[0].push_back(dd->scale_mean(i));
301  extra[1].push_back(dd->scale_sigma(i));
302  extra[2].push_back(dd->z_mean(i));
303  extra[3].push_back(dd->z_sigma(i));
304  }
305 
306  return move(unique_ptr<data::Data1D_extra>(new data::Data1D_extra(rad, xi, error, extra)));
307 }
308 
309 
310 // ============================================================================
311 
312 
313 std::shared_ptr<data::Data> cbl::measure::twopt::TwoPointCorrelation1D::correlation_NaturalEstimator (const std::shared_ptr<pairs::Pair> dd, const std::shared_ptr<pairs::Pair> rr, const int nData, const double nData_weighted, const int nRandom, const double nRandom_weighted)
314 {
315  vector<double> rad(m_dd->nbins()), xi(m_dd->nbins(), -1.), error(m_dd->nbins(), 1000.);
316 
317  // number of objects in the data catalogue
318  int nD = (nData>0) ? nData : m_data->nObjects();
319 
320  // weighted number of objects in the data catalogue
321  double nDw = (nData_weighted>0) ? nData_weighted : m_data->weightedN();
322 
323  // number of objects in the random catalogue
324  int nR = (nRandom>0) ? nRandom : m_random->nObjects();
325 
326  // weighted number of objects in the random catalogue
327  double nRw = (nRandom_weighted>0) ? nRandom_weighted : m_random->weightedN();
328 
329  // inverse of the total number of data-data pairs
330  double nDDi = 1./(nDw*(nDw-1.)*0.5);
331 
332  // inverse of the total number of random-random pairs
333  double nRRi = 1./(nRw*(nRw-1.)*0.5);
334 
335  for (int i=0; i<dd->nbins(); i++) {
336 
337  rad[i] = dd->scale(i);
338 
339  if (dd->PP1D_weighted(i)>0) {
340 
341  if (rr->PP1D_weighted(i)<1.e-30)
342  ErrorCBL("there are no random objects in the bin "+conv(i, par::fINT)+"; please, either increase the total number of random objects or enlarge the bin size! (dd="+conv(dd->PP1D_weighted(i), par::fDP3)+", rr="+conv(rr->PP1D_weighted(i), par::fDP3)+")!", "correlation_NaturalEstimator", "TwoPointCorrelation1D.cpp");
343 
344  // normalised number of data-data weighted pairs
345  double DD_norm = dd->PP1D_weighted(i)*nDDi;
346 
347  // normalised number of random-random weighted pairs
348  double RR_norm = rr->PP1D_weighted(i)*nRRi;
349 
350  // natural estimator
351  xi[i] = max(-1., DD_norm/RR_norm-1.);
352 
353  // Poisson error
354  error[i] = PoissonError(Estimator::_natural_, dd->PP1D(i), rr->PP1D(i), 0, nD, nR);
355 
356  }
357  }
358 
359  return (!m_compute_extra_info) ? move(unique_ptr<data::Data1D>(new data::Data1D(rad, xi, error))) : data_with_extra_info(dd, rad, xi, error);
360 }
361 
362 
363 // ============================================================================
364 
365 
366 std::shared_ptr<data::Data> cbl::measure::twopt::TwoPointCorrelation1D::correlation_LandySzalayEstimator (const std::shared_ptr<pairs::Pair> dd, const std::shared_ptr<pairs::Pair> rr, const std::shared_ptr<pairs::Pair> dr, const int nData, const double nData_weighted, const int nRandom, const double nRandom_weighted)
367 {
368 
369  // number of objects in the data catalogue
370  int nD = (nData>0) ? nData : m_data->nObjects();
371 
372  // weighted number of objects in the data catalogue
373  double nDw = (nData_weighted>0) ? nData_weighted : m_data->weightedN();
374 
375  // number of objects in the random catalogue
376  int nR = (nRandom>0) ? nRandom : m_random->nObjects();
377 
378  // weighted number of objects in the random catalogue
379  double nRw = (nRandom_weighted>0) ? nRandom_weighted : m_random->weightedN();
380 
381  // inverse of the total number of data-data pairs
382  double nDDi = 1./(nDw*(nDw-1.)*0.5);
383 
384  // inverse of the total number of random-random pairs
385  double nRRi = 1./(nRw*m_random_dilution_fraction*(nRw*m_random_dilution_fraction-1.)*0.5);
386 
387  // inverse of the total number of data-random pairs
388  double nDRi = 1./(nDw*nRw);
389 
390  vector<double> rad(m_dd->nbins()), xi(m_dd->nbins(), -1.), error(m_dd->nbins(), 1000.);
391 
392  for (int i=0; i<dd->nbins(); i++) {
393 
394  rad[i] = dd->scale(i);
395 
396  if (dd->PP1D_weighted(i)>0) {
397 
398  if (rr->PP1D_weighted(i)<1.e-30)
399  ErrorCBL("there are no random objects in the bin "+conv(i, par::fINT)+"; please, either increase the total number of random objects or enlarge the bin size! (dd="+conv(dd->PP1D_weighted(i), par::fDP3)+", rr="+conv(rr->PP1D_weighted(i), par::fDP3)+")!", "correlation_LandySzalayEstimator", "TwoPointCorrelation1D.cpp");
400 
401  // normalised number of data-data weighted pairs
402  double DD_norm = dd->PP1D_weighted(i)*nDDi;
403 
404  // normalised number of random-random weighted pairs
405  double RR_norm = rr->PP1D_weighted(i)*nRRi;
406 
407  // normalised number of data-random weighted pairs
408  double DR_norm = dr->PP1D_weighted(i)*nDRi;
409 
410  // Landy & Szalay estimator
411  xi[i] = max(-1., (DD_norm-2.*DR_norm)/RR_norm+1.);
412 
413  // Poisson error
414  error[i] = PoissonError(Estimator::_LandySzalay_, dd->PP1D(i), rr->PP1D(i), dr->PP1D(i), nD, nR);
415 
416  }
417  }
418 
419  return (!m_compute_extra_info) ? move(unique_ptr<data::Data1D>(new data::Data1D(rad, xi, error))) : data_with_extra_info(dd, rad, xi, error);
420 }
421 
422 
423 // ============================================================================
424 
425 
426 std::vector<std::shared_ptr<data::Data>> cbl::measure::twopt::TwoPointCorrelation1D::XiJackknife (const std::vector<std::shared_ptr<pairs::Pair>> dd, const std::vector<std::shared_ptr<pairs::Pair>> rr)
427 {
428  vector<long> region_list = m_data->region_list();
429  size_t nRegions = region_list.size();
430 
431  vector<shared_ptr<data::Data>> data;
432 
433  for (size_t i=0; i<nRegions; i++) {
434 
435  coutCBL << "analysing region: " << i << " of " << nRegions << "\r"; cout.flush();
436 
437  auto dd_SS = Pair::Create(m_dd->pairType(), m_dd->pairInfo(), m_dd->sMin(), m_dd->sMax(), m_dd->nbins(), m_dd->shift(), m_dd->angularUnits(), m_dd->angularWeight());
438  auto rr_SS = Pair::Create(m_rr->pairType(), m_rr->pairInfo(), m_rr->sMin(), m_rr->sMax(), m_rr->nbins(), m_rr->shift(), m_rr->angularUnits(), m_rr->angularWeight());
439 
440  vector<int> w(nRegions, 1);
441  w[i] = 0;
442 
443  for (size_t j=0; j<nRegions; j++)
444  for (size_t k=j; k<nRegions; k++) {
445  int index = j*nRegions-(j-1)*j/2+k-j;
446  double ww = w[j]*w[k];
447  if (ww>0)
448  for (int bin=0; bin<dd_SS->nbins(); bin++) {
449  dd_SS->add_data1D(bin, dd[index]);
450  rr_SS->add_data1D(bin, rr[index]);
451  }
452  }
453 
454  double nData_SS = m_data->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
455  double nData_SS_weighted = m_data->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
456  double nRandom_SS = m_random->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
457  double nRandom_SS_weighted = m_random->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
458 
459  data.push_back(move(correlation_NaturalEstimator(dd_SS, rr_SS, nData_SS, nData_SS_weighted, nRandom_SS, nRandom_SS_weighted)));
460  }
461 
462  return data;
463 }
464 
465 
466 // ============================================================================
467 
468 
469 std::vector<std::shared_ptr<data::Data>> cbl::measure::twopt::TwoPointCorrelation1D::XiJackknife (const std::vector<std::shared_ptr<pairs::Pair>> dd, const std::vector<std::shared_ptr<pairs::Pair>> rr, const std::vector<std::shared_ptr<pairs::Pair>> dr)
470 {
471  vector<long> region_list = m_data->region_list();
472  size_t nRegions = region_list.size();
473 
474  vector<shared_ptr<data::Data>> data;
475 
476  for (size_t i=0; i<nRegions; i++) {
477 
478  coutCBL << "analysing region: " << i << " of " << nRegions << "\r"; cout.flush();
479 
480  auto dd_SS = Pair::Create(m_dd->pairType(), m_dd->pairInfo(), m_dd->sMin(), m_dd->sMax(), m_dd->nbins(), m_dd->shift(), m_dd->angularUnits(), m_dd->angularWeight());
481  auto rr_SS = Pair::Create(m_rr->pairType(), m_rr->pairInfo(), m_rr->sMin(), m_rr->sMax(), m_rr->nbins(), m_rr->shift(), m_rr->angularUnits(), m_rr->angularWeight());
482  auto dr_SS = Pair::Create(m_dr->pairType(), m_dr->pairInfo(), m_dr->sMin(), m_dr->sMax(), m_dr->nbins(), m_dr->shift(), m_dr->angularUnits(), m_dr->angularWeight());
483 
484  vector<int> w(nRegions, 1);
485  w[i] = 0;
486 
487  for (size_t j=0; j<nRegions; j++) {
488 
489  if (w[j]>0) {
490  for (size_t k=j; k<nRegions; k++) { // auto pairs
491  if (w[k]>0) {
492  int index = j*nRegions+k-(j-1)*j/2-j;
493  for (int bin=0; bin<dd_SS->nbins(); bin++) {
494  dd_SS->add_data1D(bin, dd[index]);
495  rr_SS->add_data1D(bin, rr[index]);
496 
497  }
498  }
499  }
500 
501  for (size_t k=0; k<nRegions; k++) {// cross pairs
502  if (w[k]>0) {
503  int index = j*nRegions+k;
504  for (int bin=0; bin<dd_SS->nbins(); bin++)
505  dr_SS->add_data1D(bin, dr[index]);
506  }
507  }
508  }
509 
510  }
511 
512  double nData_SS = m_data->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
513  double nData_SS_weighted = m_data->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
514  double nRandom_SS = m_random->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
515  double nRandom_SS_weighted = m_random->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
516 
517  data.push_back(move(correlation_LandySzalayEstimator(dd_SS, rr_SS, dr_SS, nData_SS, nData_SS_weighted, nRandom_SS, nRandom_SS_weighted)));
518  }
519 
520  return data;
521 }
522 
523 
524 // ============================================================================
525 
526 
527 std::vector<std::shared_ptr<data::Data>> cbl::measure::twopt::TwoPointCorrelation1D::XiJackknifeTest (const std::vector<std::shared_ptr<pairs::Pair>> dd, const std::vector<std::shared_ptr<pairs::Pair>> rr)
528 {
529  vector<long> region_list = m_data->region_list();
530  size_t nRegions = dd.size();
531 
532  vector<shared_ptr<data::Data>> data;
533 
534  for (size_t i=0; i<nRegions; i++) {
535 
536  double nData = m_data->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
537  double nData_weighted = m_data->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
538  double nRandom = m_random->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
539  double nRandom_weighted = m_random->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
540 
541  data.push_back(move(correlation_NaturalEstimator(dd[i], rr[i], nData, nData_weighted, nRandom, nRandom_weighted)));
542  }
543 
544  return data;
545 }
546 
547 
548 // ============================================================================
549 
550 
551 std::vector<std::shared_ptr<data::Data>> cbl::measure::twopt::TwoPointCorrelation1D::XiJackknifeTest (const std::vector<std::shared_ptr<pairs::Pair>> dd, const std::vector<std::shared_ptr<pairs::Pair>> rr, const std::vector<std::shared_ptr<pairs::Pair>> dr)
552 {
553  vector<long> region_list = m_data->region_list();
554  size_t nRegions = dd.size();
555 
556  vector<shared_ptr<data::Data>> data;
557 
558  for (size_t i=0; i<nRegions; i++) {
559 
560  double nData = m_data->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
561  double nData_weighted = m_data->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
562  double nRandom = m_random->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
563  double nRandom_weighted = m_random->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 1);
564 
565  data.push_back(move(correlation_LandySzalayEstimator(dd[i], rr[i], dr[i], nData, nData_weighted, nRandom, nRandom_weighted)));
566  }
567 
568  return data;
569 }
570 
571 // ============================================================================
572 
573 
574 std::vector<std::shared_ptr<data::Data>> cbl::measure::twopt::TwoPointCorrelation1D::XiBootstrap (const int nMocks, const std::vector<std::shared_ptr<pairs::Pair>> dd, const std::vector<std::shared_ptr<pairs::Pair>> rr, const int seed)
575 {
576  vector<long> region_list = m_data->region_list();
577  size_t nRegions = region_list.size();
578 
579  vector<shared_ptr<data::Data>> data;
580  vector<double> nData_reg, nData_reg_weighted, nRandom_reg, nRandom_reg_weighted;
581 
582  for (size_t i=0; i<nRegions; i++) {
583  nData_reg.push_back(m_data->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 0));
584  nData_reg_weighted.push_back(m_data->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 0));
585  nRandom_reg.push_back(m_random->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 0));
586  nRandom_reg_weighted.push_back(m_random->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 0));
587  }
588 
589  random::UniformRandomNumbers_Int ran(0., nRegions-1, seed);
590 
591  int val = 3; // see Norberg et al. 2009
592 
593  for (int i=0; i<nMocks; i++) {
594 
595  coutCBL << "analysing mock: " << i << " of " << nMocks << "\r"; cout.flush();
596 
597  auto dd_SS = Pair::Create(m_dd->pairType(), m_dd->pairInfo(), m_dd->sMin(), m_dd->sMax(), m_dd->nbins(), m_dd->shift(), m_dd->angularUnits(), m_dd->angularWeight());
598  auto rr_SS = Pair::Create(m_rr->pairType(), m_rr->pairInfo(), m_rr->sMin(), m_rr->sMax(), m_rr->nbins(), m_rr->shift(), m_rr->angularUnits(), m_rr->angularWeight());
599 
600  double nData_SS = 0., nData_SS_weighted = 0., nRandom_SS = 0., nRandom_SS_weighted = 0.;
601 
602  vector<int> w(nRegions, 0);
603  for (size_t n=0; n<val*nRegions; n++)
604  w[ran()] ++;
605 
606  for (size_t j=0; j<nRegions; j++) {
607 
608  if (w[j]>0) {
609 
610  nData_SS += w[j]*nData_reg[j];
611  nData_SS_weighted += w[j]*nData_reg_weighted[j];
612  nRandom_SS += w[j]*nRandom_reg[j];
613  nRandom_SS_weighted += w[j]*nRandom_reg_weighted[j];
614 
615  for (size_t k=j; k<nRegions; k++) {
616  if (w[k]>0) {
617  int index = j*nRegions+k-(j-1)*j/2-j;
618  double ww = w[j]*w[k]; //(k==j) ? w[k] : w[j]*w[k];
619  for (int bin=0; bin<dd_SS->nbins(); bin++) {
620  dd_SS->add_data1D(bin, dd[index], ww);
621  rr_SS->add_data1D(bin, rr[index], ww);
622  }
623  }
624  }
625  }
626  }
627 
628  data.push_back(move(correlation_NaturalEstimator(dd_SS, rr_SS, nData_SS, nData_SS_weighted, nRandom_SS, nRandom_SS_weighted)));
629  }
630 
631  return data;
632 }
633 
634 
635 // ============================================================================
636 
637 
638 std::vector<std::shared_ptr<data::Data>> cbl::measure::twopt::TwoPointCorrelation1D::XiBootstrap (const int nMocks, const std::vector<std::shared_ptr<pairs::Pair>> dd, const std::vector<std::shared_ptr<pairs::Pair>> rr, const std::vector<std::shared_ptr<pairs::Pair>> dr, const int seed)
639 {
640  vector<long> region_list = m_data->region_list();
641  size_t nRegions = region_list.size();
642 
643  vector<shared_ptr<data::Data>> data;
644  vector<double> nData_reg, nData_reg_weighted, nRandom_reg, nRandom_reg_weighted;
645 
646  for (size_t i=0; i<nRegions; i++) {
647  nData_reg.push_back(m_data->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 0));
648  nData_reg_weighted.push_back(m_data->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 0));
649  nRandom_reg.push_back(m_random->nObjects_condition(Var::_Region_, region_list[i], region_list[i]+1, 0));
650  nRandom_reg_weighted.push_back(m_random->weightedN_condition(Var::_Region_, region_list[i], region_list[i]+1, 0));
651  }
652 
653  random::UniformRandomNumbers_Int ran(0., nRegions-1, seed);
654 
655  int val = 3; // see Norberg et al. 2009
656 
657  for (int i=0; i<nMocks; i++) {
658 
659  coutCBL << "analysing mock: " << i << " of " << nMocks << "\r"; cout.flush();
660 
661  auto dd_SS = Pair::Create(m_dd->pairType(), m_dd->pairInfo(), m_dd->sMin(), m_dd->sMax(), m_dd->nbins(), m_dd->shift(), m_dd->angularUnits(), m_dd->angularWeight());
662  auto rr_SS = Pair::Create(m_rr->pairType(), m_rr->pairInfo(), m_rr->sMin(), m_rr->sMax(), m_rr->nbins(), m_rr->shift(), m_rr->angularUnits(), m_rr->angularWeight());
663  auto dr_SS = Pair::Create(m_dr->pairType(), m_dr->pairInfo(), m_dr->sMin(), m_dr->sMax(), m_dr->nbins(), m_dr->shift(), m_dr->angularUnits(), m_dr->angularWeight());
664 
665  double nData_SS = 0., nData_SS_weighted = 0., nRandom_SS = 0., nRandom_SS_weighted = 0.;
666 
667  vector<int> w(nRegions, 0);
668  for (size_t n=0; n<val*nRegions; n++)
669  w[ran()] ++;
670 
671  for (size_t j=0; j<nRegions; j++) {
672 
673  if (w[j]>0) {
674 
675  nData_SS += w[j]*nData_reg[j];
676  nData_SS_weighted += w[j]*nData_reg_weighted[j];
677  nRandom_SS += w[j]*nRandom_reg[j];
678  nRandom_SS_weighted += w[j]*nRandom_reg_weighted[j];
679 
680  for (size_t k=j; k<nRegions; k++) {
681  if (w[k]>0) {
682  int index = j*nRegions+k-(j-1)*j/2-j;
683  double ww = w[j]*w[k]; //(k==j) ? w[k] : w[j]*w[k];
684  for (int bin=0; bin<dd_SS->nbins(); bin++) {
685  dd_SS->add_data1D(bin, dd[index], ww);
686  rr_SS->add_data1D(bin, rr[index], ww);
687  }
688  }
689  }
690 
691  for (size_t k=0; k<nRegions; k++) {
692  if (w[k]>0) {
693  int index = j*nRegions+k;
694  double ww = w[j]*w[k]; //(k==j) ? w[k] : w[j]*w[k];
695  for (int bin=0; bin<dr_SS->nbins(); bin++)
696  dr_SS->add_data1D(bin, dr[index], ww);
697  }
698  }
699  }
700  }
701 
702  data.push_back(move(correlation_LandySzalayEstimator(dd_SS, rr_SS, dr_SS, nData_SS, nData_SS_weighted, nRandom_SS, nRandom_SS_weighted)));
703  }
704 
705  return data;
706 }
707 
708 
709 // ============================================================================
710 
711 
712 void cbl::measure::twopt::TwoPointCorrelation1D::read_covariance (const std::string dir, const std::string file)
713 {
714  m_dataset->set_covariance(dir+file);
715 }
716 
717 
718 // ============================================================================
719 
720 
721 void cbl::measure::twopt::TwoPointCorrelation1D::write_covariance (const std::string dir, const std::string file) const
722 {
723  m_dataset->write_covariance(dir, file);
724 }
725 
726 
727 // ============================================================================
728 
729 
730 void cbl::measure::twopt::TwoPointCorrelation1D::compute_covariance (const std::vector<std::shared_ptr<data::Data>> xi, const bool JK)
731 {
732  vector<vector<double>> Xi;
733 
734  for (size_t i=0; i<xi.size(); i++) {
735  vector<double> vv;
736  xi[i]->get_data(vv);
737  Xi.push_back(vv);
738  }
739 
740  vector<vector<double>> cov_mat;
741  cbl::covariance_matrix(Xi, cov_mat, JK);
742 
743  m_dataset->set_covariance(cov_mat);
744 }
745 
746 
747 // ============================================================================
748 
749 
750 void cbl::measure::twopt::TwoPointCorrelation1D::compute_covariance (const std::vector<std::string> file, const bool JK)
751 {
752  vector<double> rad, mean;
753  vector<vector<double>> cov_mat;
754 
755  cbl::covariance_matrix(file, rad, mean, cov_mat, JK);
756 
757  m_dataset->set_covariance(cov_mat);
758 }
759 
The class Data1D_extra.
#define coutCBL
CBL print message.
Definition: Kernel.h:734
The class TwoPointCorrelation1D.
The class Data1D_extra.
Definition: Data1D_extra.h:52
The class Data1D.
Definition: Data1D.h:51
virtual void read_pairs(std::shared_ptr< pairs::Pair > PP, const std::vector< std::string > dir, const std::string file) const override
read the number of pairs
virtual std::shared_ptr< data::Data > correlation_NaturalEstimator(const std::shared_ptr< pairs::Pair > dd, const std::shared_ptr< pairs::Pair > rr, const int nData=0, const double nData_weighted=0., const int nRandom=0, const double nRandom_weighted=0.) override
get a dataset containing the two-point correlation function measured with the natural estimator,...
void read_covariance(const std::string dir, const std::string file) override
read the measured covariance matrix
virtual std::vector< std::shared_ptr< data::Data > > XiJackknifeTest(const std::vector< std::shared_ptr< pairs::Pair >> dd, const std::vector< std::shared_ptr< pairs::Pair >> rr) override
measure the jackknife resampling of the two-point correlation function, ξ(r), test
virtual void write_pairs(const std::shared_ptr< pairs::Pair > PP, const std::string dir, const std::string file) const override
write the number of pairs
virtual std::vector< std::shared_ptr< data::Data > > XiJackknife(const std::vector< std::shared_ptr< pairs::Pair >> dd, const std::vector< std::shared_ptr< pairs::Pair >> rr) override
measure the jackknife resampling of the two-point correlation function, ξ(r)
virtual std::shared_ptr< data::Data > data_with_extra_info(const std::shared_ptr< pairs::Pair > dd, const std::vector< double > rad, const std::vector< double > xi, const std::vector< double > error) const
return a data object with extra info
virtual std::shared_ptr< data::Data > correlation_LandySzalayEstimator(const std::shared_ptr< pairs::Pair > dd, const std::shared_ptr< pairs::Pair > rr, const std::shared_ptr< pairs::Pair > dr, const int nData=0, const double nData_weighted=0., const int nRandom=0, const double nRandom_weighted=0.) override
get a dataset containing the two-point correlation function measured with the Landy-Szalay estimator,...
void write_covariance(const std::string dir, const std::string file) const override
write the measured two-point correlation
void compute_covariance(const std::vector< std::shared_ptr< data::Data >> xi, const bool JK) override
compute the covariance matrix
virtual std::vector< std::shared_ptr< data::Data > > XiBootstrap(const int nMocks, const std::vector< std::shared_ptr< pairs::Pair >> dd, const std::vector< std::shared_ptr< pairs::Pair >> rr, const int seed=3213) override
measure the bootstrap resampling of the two-point correlation function, ξ(r)
The class UniformRandomNumbers_Int.
static const char fDP3[]
conversion symbol for: double -> std::string
Definition: Constants.h:136
static const char fINT[]
conversion symbol for: int -> std::string
Definition: Constants.h:121
The global namespace of the CosmoBolognaLib
Definition: CAMB.h:38
std::string conv(const T val, const char *fact)
convert a number to a std::string
Definition: Kernel.h:903
void checkIO(const std::ifstream &fin, const std::string file="NULL")
check if an input file can be opened
Definition: Kernel.cpp:160
int ErrorCBL(const std::string msg, const std::string functionCBL, const std::string fileCBL, const cbl::glob::ExitCode exitCode=cbl::glob::ExitCode::_error_)
throw an exception: it is used for handling exceptions inside the CosmoBolognaLib
Definition: Kernel.h:780
void covariance_matrix(const std::vector< std::vector< double >> mat, std::vector< std::vector< double >> &cov, const bool JK=false)
compute the covariance matrix from an input dataset
Definition: Func.cpp:761