Datasets:
Update README.md
Browse files
README.md
CHANGED
|
@@ -70,6 +70,19 @@ In these files, there are following fields:
|
|
| 70 |
- **SKLEARN_LogReg_test** is a statistic of the proposed ML-method based on Logistic Regression (implemented in sklearn);
|
| 71 |
- **SKLEARN_GB_test** is a statistic of the proposed ML-method based on Gradient Boosting Machine (implemented in sklearn).
|
| 72 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 73 |
# Dataset Simulation
|
| 74 |
|
| 75 |
For this dataset, the full source code (C++) is available [here](https://github.com/pfilonenko/ML_for_TwoSampleTesting/tree/main/dataset/simulation).
|
|
@@ -166,16 +179,171 @@ int main()
|
|
| 166 |
return 0;
|
| 167 |
}
|
| 168 |
```
|
|
|
|
|
|
|
|
|
|
|
|
|
| 169 |
|
| 170 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 171 |
|
| 172 |
-
~~~
|
| 173 |
-
@misc {petr_philonenko_2024,
|
| 174 |
-
author = { {Petr Philonenko} },
|
| 175 |
-
title = { ML_for_TwoSampleTesting (Revision a4ae672) },
|
| 176 |
-
year = 2024,
|
| 177 |
-
url = { https://huggingface.co/datasets/pfilonenko/ML_for_TwoSampleTesting },
|
| 178 |
-
doi = { 10.57967/hf/2978 },
|
| 179 |
-
publisher = { Hugging Face }
|
| 180 |
-
}
|
| 181 |
-
~~~
|
|
|
|
| 70 |
- **SKLEARN_LogReg_test** is a statistic of the proposed ML-method based on Logistic Regression (implemented in sklearn);
|
| 71 |
- **SKLEARN_GB_test** is a statistic of the proposed ML-method based on Gradient Boosting Machine (implemented in sklearn).
|
| 72 |
|
| 73 |
+
# Citing
|
| 74 |
+
|
| 75 |
+
~~~
|
| 76 |
+
@misc {petr_philonenko_2024,
|
| 77 |
+
author = { {Petr Philonenko} },
|
| 78 |
+
title = { ML_for_TwoSampleTesting (Revision a4ae672) },
|
| 79 |
+
year = 2024,
|
| 80 |
+
url = { https://huggingface.co/datasets/pfilonenko/ML_for_TwoSampleTesting },
|
| 81 |
+
doi = { 10.57967/hf/2978 },
|
| 82 |
+
publisher = { Hugging Face }
|
| 83 |
+
}
|
| 84 |
+
~~~
|
| 85 |
+
|
| 86 |
# Dataset Simulation
|
| 87 |
|
| 88 |
For this dataset, the full source code (C++) is available [here](https://github.com/pfilonenko/ML_for_TwoSampleTesting/tree/main/dataset/simulation).
|
|
|
|
| 179 |
return 0;
|
| 180 |
}
|
| 181 |
```
|
| 182 |
+
> simulation_for_machine_learning.h
|
| 183 |
+
```C++
|
| 184 |
+
#ifndef simulation_for_machine_learning_H
|
| 185 |
+
#define simulation_for_machine_learning_H
|
| 186 |
|
| 187 |
+
#include"HelpFucntions.h"
|
| 188 |
+
|
| 189 |
+
// Object of the data simulation for training of the proposed ML-method
|
| 190 |
+
class simulation_for_machine_learning{
|
| 191 |
+
private:
|
| 192 |
+
// p-value computation using the Test and Test Statistic (Sn)
|
| 193 |
+
double pvalue(double Sn, HomogeneityTest* Test)
|
| 194 |
+
{
|
| 195 |
+
auto f = Test->F( Sn );
|
| 196 |
+
double pv = 0;
|
| 197 |
+
if( Test->TestType().c_str() == "right" )
|
| 198 |
+
pv = 1.0 - f;
|
| 199 |
+
else
|
| 200 |
+
if( Test->TestType().c_str() == "left" )
|
| 201 |
+
pv = f;
|
| 202 |
+
else // "double"
|
| 203 |
+
pv = 2.0*min( f, 1-f );
|
| 204 |
+
return pv;
|
| 205 |
+
}
|
| 206 |
+
|
| 207 |
+
// Process of simulation
|
| 208 |
+
void Simulation(int iter, vector<HomogeneityTest*> &D, int rank, mt19937boost Gw)
|
| 209 |
+
{
|
| 210 |
+
// ñôîðìèðîâàëè íàçâàíèå ôàéëà äëÿ ñîõðàíåíèÿ
|
| 211 |
+
char file_to_save[512];
|
| 212 |
+
sprintf(file_to_save,".//to_machine_learning_2024//to_machine_learning[rank=%d].csv", rank);
|
| 213 |
+
|
| 214 |
+
// åñëè ýòî ñàìàÿ ïåðâàÿ èòåðàöèÿ, òî ñîõðàíèëè øàïêó ôàéëà
|
| 215 |
+
if( iter == 0 )
|
| 216 |
+
{
|
| 217 |
+
FILE *ou = fopen(file_to_save,"w");
|
| 218 |
+
fprintf(ou, "num;H0/H1;model;n1;n2;perc;real_perc1;real_perc2;");
|
| 219 |
+
for(int i=0; i<D.size(); i++)
|
| 220 |
+
{
|
| 221 |
+
char title_of_test[512];
|
| 222 |
+
D[i]->TitleTest(title_of_test);
|
| 223 |
+
fprintf(ou, "Sn [%s];p-value [%s];", title_of_test, title_of_test);
|
| 224 |
+
}
|
| 225 |
+
fprintf(ou, "\n");
|
| 226 |
+
fclose(ou);
|
| 227 |
+
}
|
| 228 |
+
|
| 229 |
+
// Getting list of the Alternative Hypotheses (H01 - H27)
|
| 230 |
+
vector<int> H;
|
| 231 |
+
int l = 1;
|
| 232 |
+
for(int i=100; i<940; i+=100) // Groups of Alternative Hypotheses (I, II, III, IV, V, VI, VII, VIII, IX)
|
| 233 |
+
{
|
| 234 |
+
for(int j=10; j<40; j+=10) // Alternative Hypotheses in the Group (e.g., H01, H02, H03 into the I and so on)
|
| 235 |
+
//for(int l=1; l<4; l++) // various families of distribution of censoring time F^C(t)
|
| 236 |
+
H.push_back( 1000+i+j+l );
|
| 237 |
+
}
|
| 238 |
+
|
| 239 |
+
// Sample sizes
|
| 240 |
+
vector<int> sample_sizes;
|
| 241 |
+
sample_sizes.push_back( 20 ); // n1 = n2 = 20
|
| 242 |
+
sample_sizes.push_back( 30 ); // n1 = n2 = 30
|
| 243 |
+
sample_sizes.push_back( 50 ); // n1 = n2 = 50
|
| 244 |
+
sample_sizes.push_back( 75 ); // n1 = n2 = 75
|
| 245 |
+
sample_sizes.push_back( 100 ); // n1 = n2 = 100
|
| 246 |
+
sample_sizes.push_back( 150 ); // n1 = n2 = 150
|
| 247 |
+
sample_sizes.push_back( 200 ); // n1 = n2 = 200
|
| 248 |
+
sample_sizes.push_back( 300 ); // n1 = n2 = 300
|
| 249 |
+
sample_sizes.push_back( 500 ); // n1 = n2 = 500
|
| 250 |
+
sample_sizes.push_back( 1000 ); // n1 = n2 = 1000
|
| 251 |
+
|
| 252 |
+
// Simulation (Getting H, Simulation samples, Computation of the test statistics & Save to file)
|
| 253 |
+
for(int i = 0; i<H.size(); i++)
|
| 254 |
+
{
|
| 255 |
+
int Hyp = H[i];
|
| 256 |
+
|
| 257 |
+
if(rank == 0)
|
| 258 |
+
printf("\tH = %d\n",Hyp);
|
| 259 |
+
|
| 260 |
+
for(int per = 0; per<51; per+=10)
|
| 261 |
+
{
|
| 262 |
+
// ---- Getting Hi ----
|
| 263 |
+
AlternativeHypotheses H0_1(Hyp,1,0), H0_2(Hyp,2,0);
|
| 264 |
+
AlternativeHypotheses H1_1(Hyp,1,per), H1_2(Hyp,2,per);
|
| 265 |
+
|
| 266 |
+
for(int jj=0; jj<sample_sizes.size(); jj++)
|
| 267 |
+
{
|
| 268 |
+
int n = sample_sizes[jj];
|
| 269 |
+
|
| 270 |
+
// ---- Simulation samples ----
|
| 271 |
+
//competing hypothesis Í0
|
| 272 |
+
Sample A0(*H0_1.D,n,Gw);
|
| 273 |
+
Sample B0(*H0_1.D,n,Gw);
|
| 274 |
+
if( per > 0 )
|
| 275 |
+
{
|
| 276 |
+
A0.CensoredTypeThird(*H1_1.D,Gw);
|
| 277 |
+
B0.CensoredTypeThird(*H1_1.D,Gw);
|
| 278 |
+
}
|
| 279 |
+
|
| 280 |
+
//competing hypothesis Í1
|
| 281 |
+
Sample A1(*H0_1.D,n,Gw);
|
| 282 |
+
Sample B1(*H0_2.D,n,Gw);
|
| 283 |
+
if( per > 0 )
|
| 284 |
+
{
|
| 285 |
+
A1.CensoredTypeThird(*H1_1.D,Gw);
|
| 286 |
+
B1.CensoredTypeThird(*H1_2.D,Gw);
|
| 287 |
+
}
|
| 288 |
+
|
| 289 |
+
// ---- Computation of the test statistics & Save to file ----
|
| 290 |
+
//Sn and p-value computation under Í0
|
| 291 |
+
FILE *ou = fopen(file_to_save, "a");
|
| 292 |
+
auto perc1 = A0.RealCensoredPercent();
|
| 293 |
+
auto perc2 = B0.RealCensoredPercent();
|
| 294 |
+
fprintf(ou,"%d;", iter);
|
| 295 |
+
fprintf(ou,"H0;");
|
| 296 |
+
fprintf(ou,"%d;", Hyp);
|
| 297 |
+
fprintf(ou,"%d;%d;", n,n);
|
| 298 |
+
fprintf(ou,"%d;%lf;%lf", per, perc1, perc2);
|
| 299 |
+
for(int j=0; j<D.size(); j++)
|
| 300 |
+
{
|
| 301 |
+
auto Sn_H0 = D[j]->CalculateStatistic(A0, B0);
|
| 302 |
+
auto pv_H0 = 0.0; // skip computation (it prepares in ML-framework)
|
| 303 |
+
fprintf(ou, ";%lf;0", Sn_H0);
|
| 304 |
+
}
|
| 305 |
+
fprintf(ou, "\n");
|
| 306 |
+
|
| 307 |
+
//Sn and p-value computation under Í1
|
| 308 |
+
perc1 = A1.RealCensoredPercent();
|
| 309 |
+
perc2 = B1.RealCensoredPercent();
|
| 310 |
+
fprintf(ou,"%d;", iter);
|
| 311 |
+
fprintf(ou,"H1;");
|
| 312 |
+
fprintf(ou,"%d;", Hyp);
|
| 313 |
+
fprintf(ou,"%d;%d;", n,n);
|
| 314 |
+
fprintf(ou,"%d;%lf;%lf", per, perc1, perc2);
|
| 315 |
+
for(int j=0; j<D.size(); j++)
|
| 316 |
+
{
|
| 317 |
+
auto Sn_H1 = D[j]->CalculateStatistic(A1, B1);
|
| 318 |
+
auto pv_H1 = 0.0; // skip computation (it prepares in ML-framework)
|
| 319 |
+
fprintf(ou, ";%lf;0", Sn_H1);
|
| 320 |
+
}
|
| 321 |
+
fprintf(ou, "\n");
|
| 322 |
+
fclose( ou );
|
| 323 |
+
}
|
| 324 |
+
}
|
| 325 |
+
}
|
| 326 |
+
}
|
| 327 |
+
|
| 328 |
+
public:
|
| 329 |
+
// Constructor of the class
|
| 330 |
+
simulation_for_machine_learning(vector<HomogeneityTest*> &D)
|
| 331 |
+
{
|
| 332 |
+
int N = 40000; // number of the Monte-Carlo replications
|
| 333 |
+
#pragma omp parallel for
|
| 334 |
+
for(int k=0; k<N; k++)
|
| 335 |
+
{
|
| 336 |
+
int rank = omp_get_thread_num();
|
| 337 |
+
auto gen = GwMT19937[rank];
|
| 338 |
+
|
| 339 |
+
if(rank == 0)
|
| 340 |
+
printf("\r%d", k);
|
| 341 |
+
|
| 342 |
+
Simulation(k, D, rank, gen);
|
| 343 |
+
}
|
| 344 |
+
}
|
| 345 |
+
};
|
| 346 |
+
|
| 347 |
+
#endif
|
| 348 |
+
```
|
| 349 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|