qingy2024 commited on
Commit
bf620c6
·
verified ·
1 Parent(s): b682d43

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +8 -0
  2. .gitignore +49 -0
  3. .ipynb_checkpoints/requirements-checkpoint.txt +4 -0
  4. GraphUNets/README.md +78 -0
  5. GraphUNets/configs/COLLAB +12 -0
  6. GraphUNets/configs/DD +12 -0
  7. GraphUNets/configs/IMDBMULTI +12 -0
  8. GraphUNets/configs/PROTEINS +12 -0
  9. GraphUNets/doc/GPool.png +0 -0
  10. GraphUNets/doc/GUnet.png +3 -0
  11. GraphUNets/doc/GUnpool.png +0 -0
  12. GraphUNets/run_GNN.sh +48 -0
  13. GraphUNets/src/main.py +63 -0
  14. GraphUNets/src/network.py +60 -0
  15. GraphUNets/src/trainer.py +69 -0
  16. GraphUNets/src/utils/data_loader.py +101 -0
  17. GraphUNets/src/utils/dataset.py +45 -0
  18. GraphUNets/src/utils/ops.py +145 -0
  19. cora/README +29 -0
  20. input +2 -0
  21. requirements.txt +4 -0
  22. results.csv +271 -0
  23. results_clique.csv +271 -0
  24. results_lct.csv +271 -0
  25. results_mk.csv +307 -0
  26. results_mk_paper.csv +91 -0
  27. results_mk_paper2.csv +7 -0
  28. results_mk_peeling.csv +91 -0
  29. results_nosquared.csv +271 -0
  30. results_nosquared2.csv +271 -0
  31. results_qing.csv +271 -0
  32. seeds_diam_1e-2.json +3 -0
  33. seeds_diam_1e-3.json +3 -0
  34. seeds_diam_1e-4.json +3 -0
  35. seeds_diam_1e-6.json +0 -0
  36. seeds_diam_1e-6_coarsen.json +1 -0
  37. seeds_diam_1e-8.json +3 -0
  38. seeds_diam_1e0.json +3 -0
  39. seeds_diam_1e3.json +3 -0
  40. seeds_diam_1e4.json +0 -0
  41. seeds_lambda2_1e-6.json +0 -0
  42. src/.ipynb_checkpoints/2.2_lrmc_bilevel-checkpoint.py +325 -0
  43. src/.ipynb_checkpoints/2.3_lrmc_bilevel-checkpoint.py +436 -0
  44. src/.ipynb_checkpoints/2.4_lrmc_bilevel-checkpoint.py +413 -0
  45. src/.ipynb_checkpoints/export_edgelist-checkpoint.py +158 -0
  46. src/.ipynb_checkpoints/export_reddit_edgelist-checkpoint.py +41 -0
  47. src/.ipynb_checkpoints/highlight_seeds_dot-checkpoint.py +71 -0
  48. src/1_build_lrmc_levels.py +194 -0
  49. src/2.1_lrmc_bilevel.py +301 -0
  50. src/2.2_lrmc_bilevel.py +325 -0
.gitattributes CHANGED
@@ -33,3 +33,11 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ GraphUNets/doc/GUnet.png filter=lfs diff=lfs merge=lfs -text
37
+ seeds_diam_1e-2.json filter=lfs diff=lfs merge=lfs -text
38
+ seeds_diam_1e-3.json filter=lfs diff=lfs merge=lfs -text
39
+ seeds_diam_1e-4.json filter=lfs diff=lfs merge=lfs -text
40
+ seeds_diam_1e-8.json filter=lfs diff=lfs merge=lfs -text
41
+ seeds_diam_1e0.json filter=lfs diff=lfs merge=lfs -text
42
+ seeds_diam_1e3.json filter=lfs diff=lfs merge=lfs -text
43
+ src/synthetic/Table1-Epsilon-Ablations.png filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Compiled class file
2
+ *.class
3
+
4
+ # Log file
5
+ *.log
6
+
7
+ # BlueJ files
8
+ *.ctxt
9
+
10
+ # Mobile Tools for Java (J2ME)
11
+ .mtj.tmp/
12
+
13
+ # Package Files #
14
+ *.jar
15
+ *.war
16
+ *.nar
17
+ *.ear
18
+ *.zip
19
+ *.tar.gz
20
+ *.rar
21
+
22
+ # virtual machine crash logs
23
+ hs_err_pid*
24
+
25
+ # Temporary table files
26
+ src/tmp_table1/
27
+
28
+ # macOS system files
29
+ .DS_Store
30
+
31
+ # Dataset files
32
+ *.cites
33
+ *.content
34
+
35
+ src/synthetic/data/*
36
+
37
+ src/reddit_edges.txt
38
+
39
+ out/*
40
+
41
+ .idea/*
42
+
43
+ iclr2026_3_reorganized.tex
44
+
45
+ GraphUNets/data/*
46
+
47
+ temp*
48
+
49
+ *.tex
.ipynb_checkpoints/requirements-checkpoint.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ torch
2
+ torch-scatter
3
+ torch-sparse
4
+ torch-geometric
GraphUNets/README.md ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ PyTorch Implementation of Graph U-Nets
2
+ ======================================
3
+
4
+ Created by [Hongyang Gao](https://faculty.sites.iastate.edu/hygao/) @ Iowa State University, and
5
+ [Shuiwang Ji](http://people.tamu.edu/~sji/) @ Texas A&M University.
6
+
7
+ About
8
+ -----
9
+
10
+ PyTorch implementation of Graph U-Nets. Check http://proceedings.mlr.press/v97/gao19a/gao19a.pdf for more information.
11
+
12
+ Methods
13
+ -------
14
+
15
+ ### Graph Pooling Layer
16
+
17
+ ![gPool](./doc/GPool.png)
18
+
19
+ ### Graph Unpooling Layer
20
+
21
+ ![gPool](./doc/GUnpool.png)
22
+
23
+ ### Graph U-Net
24
+
25
+ ![gPool](./doc/GUnet.png)
26
+
27
+ Installation
28
+ ------------
29
+
30
+
31
+ Type
32
+
33
+ ./run_GNN.sh DATA FOLD GPU
34
+ to run on dataset using fold number (1-10).
35
+
36
+ You can run
37
+
38
+ ./run_GNN.sh DD 0 0
39
+ to run on DD dataset with 10-fold cross
40
+ validation on GPU #0.
41
+
42
+
43
+ Code
44
+ ----
45
+
46
+ The detail implementation of Graph U-Net is in src/utils/ops.py.
47
+
48
+
49
+ Datasets
50
+ --------
51
+
52
+ Check the "data/README.md" for the format.
53
+
54
+
55
+ Results
56
+ -------
57
+
58
+
59
+ | Models | DD | IMDBMULTI | PROTEINS |
60
+ | -------- | --------------- | --------------- | --------------- |
61
+ | PSCN | 76.3 ± 2.6% | 45.2 ± 2.8% | 75.9 ± 2.8% |
62
+ | DIFFPOOL | 80.6% | - | 76.3% |
63
+ | SAGPool | 76.5% | - | 71.9% |
64
+ | GIN | 82.0 ± 2.7% | 52.3 ± 2.8% | 76.2 ± 2.8% |
65
+ | g-U-Net | **83.0 ± 2.2%** | **56.7 ± 2.9%** | **78.7 ± 4.2%** |
66
+
67
+ Reference
68
+ ---------
69
+
70
+ If you find the code useful, please cite our paper:
71
+
72
+ @inproceedings{gao2019graph,
73
+ title={Graph U-Nets},
74
+ author={Gao, Hongyang and Ji, Shuiwang},
75
+ booktitle={International Conference on Machine Learning},
76
+ pages={2083--2092},
77
+ year={2019}
78
+ }
GraphUNets/configs/COLLAB ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ num_epochs=200
2
+ batch_size=64
3
+ learning_rate=0.001
4
+ deg_as_tag=1
5
+ layer_num=3
6
+ hidden_dim=512
7
+ layer_dim=64
8
+ drop_network=0.3
9
+ drop_classifier=0.2
10
+ activation_network=ELU
11
+ activation_classifier=ReLU
12
+ pool_rates_layers="0.8 0.6 0.4"
GraphUNets/configs/DD ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ num_epochs=200
2
+ batch_size=32
3
+ learning_rate=0.001
4
+ deg_as_tag=0
5
+ layer_num=3
6
+ hidden_dim=512
7
+ layer_dim=128
8
+ drop_network=0.3
9
+ drop_classifier=0.3
10
+ activation_network=ELU
11
+ activation_classifier=ELU
12
+ pool_rates_layers="0.9 0.8 0.7"
GraphUNets/configs/IMDBMULTI ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ num_epochs=200
2
+ batch_size=64
3
+ learning_rate=0.001
4
+ deg_as_tag=1
5
+ layer_num=3
6
+ hidden_dim=512
7
+ layer_dim=48
8
+ drop_network=0.1
9
+ drop_classifier=0.1
10
+ activation_network=LeakyReLU
11
+ activation_classifier=ELU
12
+ pool_rates_layers="0.9 0.9 0.9"
GraphUNets/configs/PROTEINS ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ num_epochs=200
2
+ batch_size=64
3
+ learning_rate=0.001
4
+ deg_as_tag=0
5
+ layer_num=3
6
+ hidden_dim=512
7
+ layer_dim=64
8
+ drop_network=0.3
9
+ drop_classifier=0.3
10
+ activation_network=ELU
11
+ activation_classifier=ELU
12
+ pool_rates_layers="0.9 0.8 0.7"
GraphUNets/doc/GPool.png ADDED
GraphUNets/doc/GUnet.png ADDED

Git LFS Details

  • SHA256: aba808676951891b9095ae99b3dc791460091fda1a55a3342e1cdd5474bdd58d
  • Pointer size: 131 Bytes
  • Size of remote file: 105 kB
GraphUNets/doc/GUnpool.png ADDED
GraphUNets/run_GNN.sh ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ DATA="${1-DD}"
3
+ fold=${2-1} # 0 for 10-fold
4
+ GPU=${3-0}
5
+
6
+ seed=1
7
+
8
+ CONFIG=configs/${DATA}
9
+ if [ ! -f "$CONFIG" ]; then
10
+ echo "No config file for ${DATA} in configs folder"
11
+ exit 128
12
+ fi
13
+ source configs/${DATA}
14
+
15
+ FOLDER=results
16
+ FILE=${FOLDER}/${DATA}.txt
17
+ if [ ! -d "$FOLDER" ]; then
18
+ mkdir $FOLDER
19
+ fi
20
+
21
+ run(){
22
+ CUDA_VISIBLE_DEVICES=${GPU} python3 src/main.py \
23
+ -seed $seed -data $DATA -fold $1 -num_epochs $num_epochs \
24
+ -batch $batch_size -lr $learning_rate -deg_as_tag $deg_as_tag \
25
+ -l_num $layer_num -h_dim $hidden_dim -l_dim $layer_dim \
26
+ -drop_n $drop_network -drop_c $drop_classifier \
27
+ -act_n $activation_network -act_c $activation_classifier \
28
+ -ks $pool_rates_layers -acc_file $FILE
29
+ }
30
+
31
+ if [ ${fold} == 0 ]; then
32
+ if [ -f "$FILE" ]; then
33
+ rm $FILE
34
+ fi
35
+ echo "Running 10-fold cross validation"
36
+ start=`date +%s`
37
+ run $fold
38
+ stop=`date +%s`
39
+ echo "End of cross-validation using $[stop - start] seconds"
40
+ echo "The accuracy results for ${DATA} are as follows:"
41
+ cat $FILE
42
+ echo "Mean and sstdev are:"
43
+ cat $FILE | datamash mean 2 sstdev 2
44
+ else
45
+ run $fold
46
+ echo "The accuracy result for ${DATA} fold ${fold} is:"
47
+ tail -1 $FILE
48
+ fi
GraphUNets/src/main.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import random
3
+ import time
4
+ import torch
5
+ import numpy as np
6
+ from network import GNet
7
+ from trainer import Trainer
8
+ from utils.data_loader import FileLoader
9
+
10
+
11
+ def get_args():
12
+ parser = argparse.ArgumentParser(description='Args for graph predition')
13
+ parser.add_argument('-seed', type=int, default=1, help='seed')
14
+ parser.add_argument('-data', default='DD', help='data folder name')
15
+ parser.add_argument('-fold', type=int, default=1, help='fold (1..10)')
16
+ parser.add_argument('-num_epochs', type=int, default=2, help='epochs')
17
+ parser.add_argument('-batch', type=int, default=8, help='batch size')
18
+ parser.add_argument('-lr', type=float, default=0.001, help='learning rate')
19
+ parser.add_argument('-deg_as_tag', type=int, default=0, help='1 or degree')
20
+ parser.add_argument('-l_num', type=int, default=3, help='layer num')
21
+ parser.add_argument('-h_dim', type=int, default=512, help='hidden dim')
22
+ parser.add_argument('-l_dim', type=int, default=48, help='layer dim')
23
+ parser.add_argument('-drop_n', type=float, default=0.3, help='drop net')
24
+ parser.add_argument('-drop_c', type=float, default=0.2, help='drop output')
25
+ parser.add_argument('-act_n', type=str, default='ELU', help='network act')
26
+ parser.add_argument('-act_c', type=str, default='ELU', help='output act')
27
+ parser.add_argument('-ks', nargs='+', type=float, default='0.9 0.8 0.7')
28
+ parser.add_argument('-acc_file', type=str, default='re', help='acc file')
29
+ args, _ = parser.parse_known_args()
30
+ return args
31
+
32
+
33
+ def set_random(seed):
34
+ random.seed(seed)
35
+ np.random.seed(seed)
36
+ torch.manual_seed(seed)
37
+
38
+
39
+ def app_run(args, G_data, fold_idx):
40
+ G_data.use_fold_data(fold_idx)
41
+ net = GNet(G_data.feat_dim, G_data.num_class, args)
42
+ trainer = Trainer(args, net, G_data)
43
+ trainer.train()
44
+
45
+
46
+ def main():
47
+ args = get_args()
48
+ print(args)
49
+ set_random(args.seed)
50
+ start = time.time()
51
+ G_data = FileLoader(args).load_data()
52
+ print('load data using ------>', time.time()-start)
53
+ if args.fold == 0:
54
+ for fold_idx in range(10):
55
+ print('start training ------> fold', fold_idx+1)
56
+ app_run(args, G_data, fold_idx)
57
+ else:
58
+ print('start training ------> fold', args.fold)
59
+ app_run(args, G_data, args.fold-1)
60
+
61
+
62
+ if __name__ == "__main__":
63
+ main()
GraphUNets/src/network.py ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+ from utils.ops import GCN, GraphUnet, Initializer, norm_g
5
+
6
+
7
+ class GNet(nn.Module):
8
+ def __init__(self, in_dim, n_classes, args):
9
+ super(GNet, self).__init__()
10
+ self.n_act = getattr(nn, args.act_n)()
11
+ self.c_act = getattr(nn, args.act_c)()
12
+ self.s_gcn = GCN(in_dim, args.l_dim, self.n_act, args.drop_n)
13
+ self.g_unet = GraphUnet(
14
+ args.ks, args.l_dim, args.l_dim, args.l_dim, self.n_act,
15
+ args.drop_n)
16
+ self.out_l_1 = nn.Linear(3*args.l_dim*(args.l_num+1), args.h_dim)
17
+ self.out_l_2 = nn.Linear(args.h_dim, n_classes)
18
+ self.out_drop = nn.Dropout(p=args.drop_c)
19
+ Initializer.weights_init(self)
20
+
21
+ def forward(self, gs, hs, labels):
22
+ hs = self.embed(gs, hs)
23
+ logits = self.classify(hs)
24
+ return self.metric(logits, labels)
25
+
26
+ def embed(self, gs, hs):
27
+ o_hs = []
28
+ for g, h in zip(gs, hs):
29
+ h = self.embed_one(g, h)
30
+ o_hs.append(h)
31
+ hs = torch.stack(o_hs, 0)
32
+ return hs
33
+
34
+ def embed_one(self, g, h):
35
+ g = norm_g(g)
36
+ h = self.s_gcn(g, h)
37
+ hs = self.g_unet(g, h)
38
+ h = self.readout(hs)
39
+ return h
40
+
41
+ def readout(self, hs):
42
+ h_max = [torch.max(h, 0)[0] for h in hs]
43
+ h_sum = [torch.sum(h, 0) for h in hs]
44
+ h_mean = [torch.mean(h, 0) for h in hs]
45
+ h = torch.cat(h_max + h_sum + h_mean)
46
+ return h
47
+
48
+ def classify(self, h):
49
+ h = self.out_drop(h)
50
+ h = self.out_l_1(h)
51
+ h = self.c_act(h)
52
+ h = self.out_drop(h)
53
+ h = self.out_l_2(h)
54
+ return F.log_softmax(h, dim=1)
55
+
56
+ def metric(self, logits, labels):
57
+ loss = F.nll_loss(logits, labels)
58
+ _, preds = torch.max(logits, 1)
59
+ acc = torch.mean((preds == labels).float())
60
+ return loss, acc
GraphUNets/src/trainer.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from tqdm import tqdm
3
+ import torch.optim as optim
4
+ from utils.dataset import GraphData
5
+
6
+
7
+ class Trainer:
8
+ def __init__(self, args, net, G_data):
9
+ self.args = args
10
+ self.net = net
11
+ self.feat_dim = G_data.feat_dim
12
+ self.fold_idx = G_data.fold_idx
13
+ self.init(args, G_data.train_gs, G_data.test_gs)
14
+ if torch.cuda.is_available():
15
+ self.net.cuda()
16
+
17
+ def init(self, args, train_gs, test_gs):
18
+ print('#train: %d, #test: %d' % (len(train_gs), len(test_gs)))
19
+ train_data = GraphData(train_gs, self.feat_dim)
20
+ test_data = GraphData(test_gs, self.feat_dim)
21
+ self.train_d = train_data.loader(self.args.batch, True)
22
+ self.test_d = test_data.loader(self.args.batch, False)
23
+ self.optimizer = optim.Adam(
24
+ self.net.parameters(), lr=self.args.lr, amsgrad=True,
25
+ weight_decay=0.0008)
26
+
27
+ def to_cuda(self, gs):
28
+ if torch.cuda.is_available():
29
+ if type(gs) == list:
30
+ return [g.cuda() for g in gs]
31
+ return gs.cuda()
32
+ return gs
33
+
34
+ def run_epoch(self, epoch, data, model, optimizer):
35
+ losses, accs, n_samples = [], [], 0
36
+ for batch in tqdm(data, desc=str(epoch), unit='b'):
37
+ cur_len, gs, hs, ys = batch
38
+ gs, hs, ys = map(self.to_cuda, [gs, hs, ys])
39
+ loss, acc = model(gs, hs, ys)
40
+ losses.append(loss*cur_len)
41
+ accs.append(acc*cur_len)
42
+ n_samples += cur_len
43
+ if optimizer is not None:
44
+ optimizer.zero_grad()
45
+ loss.backward()
46
+ optimizer.step()
47
+
48
+ avg_loss, avg_acc = sum(losses) / n_samples, sum(accs) / n_samples
49
+ return avg_loss.item(), avg_acc.item()
50
+
51
+ def train(self):
52
+ max_acc = 0.0
53
+ train_str = 'Train epoch %d: loss %.5f acc %.5f'
54
+ test_str = 'Test epoch %d: loss %.5f acc %.5f max %.5f'
55
+ line_str = '%d:\t%.5f\n'
56
+ for e_id in range(self.args.num_epochs):
57
+ self.net.train()
58
+ loss, acc = self.run_epoch(
59
+ e_id, self.train_d, self.net, self.optimizer)
60
+ print(train_str % (e_id, loss, acc))
61
+
62
+ with torch.no_grad():
63
+ self.net.eval()
64
+ loss, acc = self.run_epoch(e_id, self.test_d, self.net, None)
65
+ max_acc = max(max_acc, acc)
66
+ print(test_str % (e_id, loss, acc, max_acc))
67
+
68
+ with open(self.args.acc_file, 'a+') as f:
69
+ f.write(line_str % (self.fold_idx, max_acc))
GraphUNets/src/utils/data_loader.py ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from tqdm import tqdm
3
+ import networkx as nx
4
+ import numpy as np
5
+ import torch.nn.functional as F
6
+ from sklearn.model_selection import StratifiedKFold
7
+ from functools import partial
8
+
9
+
10
+ class G_data(object):
11
+ def __init__(self, num_class, feat_dim, g_list):
12
+ self.num_class = num_class
13
+ self.feat_dim = feat_dim
14
+ self.g_list = g_list
15
+ self.sep_data()
16
+
17
+ def sep_data(self, seed=0):
18
+ skf = StratifiedKFold(n_splits=10, shuffle=True, random_state=seed)
19
+ labels = [g.label for g in self.g_list]
20
+ self.idx_list = list(skf.split(np.zeros(len(labels)), labels))
21
+
22
+ def use_fold_data(self, fold_idx):
23
+ self.fold_idx = fold_idx+1
24
+ train_idx, test_idx = self.idx_list[fold_idx]
25
+ self.train_gs = [self.g_list[i] for i in train_idx]
26
+ self.test_gs = [self.g_list[i] for i in test_idx]
27
+
28
+
29
+ class FileLoader(object):
30
+ def __init__(self, args):
31
+ self.args = args
32
+
33
+ def line_genor(self, lines):
34
+ for line in lines:
35
+ yield line
36
+
37
+ def gen_graph(self, f, i, label_dict, feat_dict, deg_as_tag):
38
+ row = next(f).strip().split()
39
+ n, label = [int(w) for w in row]
40
+ if label not in label_dict:
41
+ label_dict[label] = len(label_dict)
42
+ g = nx.Graph()
43
+ g.add_nodes_from(list(range(n)))
44
+ node_tags = []
45
+ for j in range(n):
46
+ row = next(f).strip().split()
47
+ tmp = int(row[1]) + 2
48
+ row = [int(w) for w in row[:tmp]]
49
+ if row[0] not in feat_dict:
50
+ feat_dict[row[0]] = len(feat_dict)
51
+ for k in range(2, len(row)):
52
+ if j != row[k]:
53
+ g.add_edge(j, row[k])
54
+ if len(row) > 2:
55
+ node_tags.append(feat_dict[row[0]])
56
+ g.label = label
57
+ g.remove_nodes_from(list(nx.isolates(g)))
58
+ if deg_as_tag:
59
+ g.node_tags = list(dict(g.degree).values())
60
+ else:
61
+ g.node_tags = node_tags
62
+ return g
63
+
64
+ def process_g(self, label_dict, tag2index, tagset, g):
65
+ g.label = label_dict[g.label]
66
+ g.feas = torch.tensor([tag2index[tag] for tag in g.node_tags])
67
+ g.feas = F.one_hot(g.feas, len(tagset))
68
+ A = torch.FloatTensor(nx.to_numpy_matrix(g))
69
+ g.A = A + torch.eye(g.number_of_nodes())
70
+ return g
71
+
72
+ def load_data(self):
73
+ args = self.args
74
+ print('loading data ...')
75
+ g_list = []
76
+ label_dict = {}
77
+ feat_dict = {}
78
+
79
+ with open('data/%s/%s.txt' % (args.data, args.data), 'r') as f:
80
+ lines = f.readlines()
81
+ f = self.line_genor(lines)
82
+ n_g = int(next(f).strip())
83
+ for i in tqdm(range(n_g), desc="Create graph", unit='graphs'):
84
+ g = self.gen_graph(f, i, label_dict, feat_dict, args.deg_as_tag)
85
+ g_list.append(g)
86
+
87
+ tagset = set([])
88
+ for g in g_list:
89
+ tagset = tagset.union(set(g.node_tags))
90
+ tagset = list(tagset)
91
+ tag2index = {tagset[i]: i for i in range(len(tagset))}
92
+
93
+ f_n = partial(self.process_g, label_dict, tag2index, tagset)
94
+ new_g_list = []
95
+ for g in tqdm(g_list, desc="Process graph", unit='graphs'):
96
+ new_g_list.append(f_n(g))
97
+ num_class = len(label_dict)
98
+ feat_dim = len(tagset)
99
+
100
+ print('# classes: %d' % num_class, '# maximum node tag: %d' % feat_dim)
101
+ return G_data(num_class, feat_dim, new_g_list)
GraphUNets/src/utils/dataset.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import random
2
+ import torch
3
+
4
+
5
+ class GraphData(object):
6
+
7
+ def __init__(self, data, feat_dim):
8
+ super(GraphData, self).__init__()
9
+ self.data = data
10
+ self.feat_dim = feat_dim
11
+ self.idx = list(range(len(data)))
12
+ self.pos = 0
13
+
14
+ def __reset__(self):
15
+ self.pos = 0
16
+ if self.shuffle:
17
+ random.shuffle(self.idx)
18
+
19
+ def __len__(self):
20
+ return len(self.data) // self.batch + 1
21
+
22
+ def __getitem__(self, idx):
23
+ g = self.data[idx]
24
+ return g.A, g.feas.float(), g.label
25
+
26
+ def __iter__(self):
27
+ return self
28
+
29
+ def __next__(self):
30
+ if self.pos >= len(self.data):
31
+ self.__reset__()
32
+ raise StopIteration
33
+
34
+ cur_idx = self.idx[self.pos: self.pos+self.batch]
35
+ data = [self.__getitem__(idx) for idx in cur_idx]
36
+ self.pos += len(cur_idx)
37
+ gs, hs, labels = map(list, zip(*data))
38
+ return len(gs), gs, hs, torch.LongTensor(labels)
39
+
40
+ def loader(self, batch, shuffle, *args):
41
+ self.batch = batch
42
+ self.shuffle = shuffle
43
+ if shuffle:
44
+ random.shuffle(self.idx)
45
+ return self
GraphUNets/src/utils/ops.py ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import numpy as np
4
+
5
+
6
+ class GraphUnet(nn.Module):
7
+
8
+ def __init__(self, ks, in_dim, out_dim, dim, act, drop_p):
9
+ super(GraphUnet, self).__init__()
10
+ self.ks = ks
11
+ self.bottom_gcn = GCN(dim, dim, act, drop_p)
12
+ self.down_gcns = nn.ModuleList()
13
+ self.up_gcns = nn.ModuleList()
14
+ self.pools = nn.ModuleList()
15
+ self.unpools = nn.ModuleList()
16
+ self.l_n = len(ks)
17
+ for i in range(self.l_n):
18
+ self.down_gcns.append(GCN(dim, dim, act, drop_p))
19
+ self.up_gcns.append(GCN(dim, dim, act, drop_p))
20
+ self.pools.append(Pool(ks[i], dim, drop_p))
21
+ self.unpools.append(Unpool(dim, dim, drop_p))
22
+
23
+ def forward(self, g, h):
24
+ adj_ms = []
25
+ indices_list = []
26
+ down_outs = []
27
+ hs = []
28
+ org_h = h
29
+ for i in range(self.l_n):
30
+ h = self.down_gcns[i](g, h)
31
+ adj_ms.append(g)
32
+ down_outs.append(h)
33
+ g, h, idx = self.pools[i](g, h)
34
+ indices_list.append(idx)
35
+ h = self.bottom_gcn(g, h)
36
+ for i in range(self.l_n):
37
+ up_idx = self.l_n - i - 1
38
+ g, idx = adj_ms[up_idx], indices_list[up_idx]
39
+ g, h = self.unpools[i](g, h, down_outs[up_idx], idx)
40
+ h = self.up_gcns[i](g, h)
41
+ h = h.add(down_outs[up_idx])
42
+ hs.append(h)
43
+ h = h.add(org_h)
44
+ hs.append(h)
45
+ return hs
46
+
47
+
48
+ class GCN(nn.Module):
49
+
50
+ def __init__(self, in_dim, out_dim, act, p):
51
+ super(GCN, self).__init__()
52
+ self.proj = nn.Linear(in_dim, out_dim)
53
+ self.act = act
54
+ self.drop = nn.Dropout(p=p) if p > 0.0 else nn.Identity()
55
+
56
+ def forward(self, g, h):
57
+ h = self.drop(h)
58
+ h = torch.matmul(g, h)
59
+ h = self.proj(h)
60
+ h = self.act(h)
61
+ return h
62
+
63
+
64
+ class Pool(nn.Module):
65
+
66
+ def __init__(self, k, in_dim, p):
67
+ super(Pool, self).__init__()
68
+ self.k = k
69
+ self.sigmoid = nn.Sigmoid()
70
+ self.proj = nn.Linear(in_dim, 1)
71
+ self.drop = nn.Dropout(p=p) if p > 0 else nn.Identity()
72
+
73
+ def forward(self, g, h):
74
+ Z = self.drop(h)
75
+ weights = self.proj(Z).squeeze()
76
+ scores = self.sigmoid(weights)
77
+ return top_k_graph(scores, g, h, self.k)
78
+
79
+
80
+ class Unpool(nn.Module):
81
+
82
+ def __init__(self, *args):
83
+ super(Unpool, self).__init__()
84
+
85
+ def forward(self, g, h, pre_h, idx):
86
+ new_h = h.new_zeros([g.shape[0], h.shape[1]])
87
+ new_h[idx] = h
88
+ return g, new_h
89
+
90
+
91
+ def top_k_graph(scores, g, h, k):
92
+ num_nodes = g.shape[0]
93
+ values, idx = torch.topk(scores, max(2, int(k*num_nodes)))
94
+ new_h = h[idx, :]
95
+ values = torch.unsqueeze(values, -1)
96
+ new_h = torch.mul(new_h, values)
97
+ un_g = g.bool().float()
98
+ un_g = torch.matmul(un_g, un_g).bool().float()
99
+ un_g = un_g[idx, :]
100
+ un_g = un_g[:, idx]
101
+ g = norm_g(un_g)
102
+ return g, new_h, idx
103
+
104
+
105
+ def norm_g(g):
106
+ degrees = torch.sum(g, 1)
107
+ g = g / degrees
108
+ return g
109
+
110
+
111
+ class Initializer(object):
112
+
113
+ @classmethod
114
+ def _glorot_uniform(cls, w):
115
+ if len(w.size()) == 2:
116
+ fan_in, fan_out = w.size()
117
+ elif len(w.size()) == 3:
118
+ fan_in = w.size()[1] * w.size()[2]
119
+ fan_out = w.size()[0] * w.size()[2]
120
+ else:
121
+ fan_in = np.prod(w.size())
122
+ fan_out = np.prod(w.size())
123
+ limit = np.sqrt(6.0 / (fan_in + fan_out))
124
+ w.uniform_(-limit, limit)
125
+
126
+ @classmethod
127
+ def _param_init(cls, m):
128
+ if isinstance(m, nn.parameter.Parameter):
129
+ cls._glorot_uniform(m.data)
130
+ elif isinstance(m, nn.Linear):
131
+ m.bias.data.zero_()
132
+ cls._glorot_uniform(m.weight.data)
133
+
134
+ @classmethod
135
+ def weights_init(cls, m):
136
+ for p in m.modules():
137
+ if isinstance(p, nn.ParameterList):
138
+ for pp in p:
139
+ cls._param_init(pp)
140
+ else:
141
+ cls._param_init(p)
142
+
143
+ for name, p in m.named_parameters():
144
+ if '.' not in name:
145
+ cls._param_init(p)
cora/README ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ This directory contains the a selection of the Cora dataset (www.research.whizbang.com/data).
2
+
3
+ The Cora dataset consists of Machine Learning papers. These papers are classified into one of the following seven classes:
4
+ Case_Based
5
+ Genetic_Algorithms
6
+ Neural_Networks
7
+ Probabilistic_Methods
8
+ Reinforcement_Learning
9
+ Rule_Learning
10
+ Theory
11
+
12
+ The papers were selected in a way such that in the final corpus every paper cites or is cited by atleast one other paper. There are 2708 papers in the whole corpus.
13
+
14
+ After stemming and removing stopwords we were left with a vocabulary of size 1433 unique words. All words with document frequency less than 10 were removed.
15
+
16
+
17
+ THE DIRECTORY CONTAINS TWO FILES:
18
+
19
+ The .content file contains descriptions of the papers in the following format:
20
+
21
+ <paper_id> <word_attributes>+ <class_label>
22
+
23
+ The first entry in each line contains the unique string ID of the paper followed by binary values indicating whether each word in the vocabulary is present (indicated by 1) or absent (indicated by 0) in the paper. Finally, the last entry in the line contains the class label of the paper.
24
+
25
+ The .cites file contains the citation graph of the corpus. Each line describes a link in the following format:
26
+
27
+ <ID of cited paper> <ID of citing paper>
28
+
29
+ Each line contains two paper IDs. The first entry is the ID of the paper being cited and the second ID stands for the paper which contains the citation. The direction of the link is from right to left. If a line is represented by "paper1 paper2" then the link is "paper2->paper1".
input ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ 5 3 4
2
+ 0 2 3
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ torch
2
+ torch-scatter
3
+ torch-sparse
4
+ torch-geometric
results.csv ADDED
@@ -0,0 +1,271 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ series,n,m,trial,ms,theo_x,normalized_theory_ms
2
+ dens_x0.5,10000,761,1,51.152,99112.473,38.589
3
+ dens_x0.5,10000,761,2,50.359,99112.473,38.589
4
+ dens_x0.5,10000,761,3,50.185,99112.473,38.589
5
+ dens_x0.5,12000,1077,1,61.595,122827.840,47.823
6
+ dens_x0.5,12000,1077,2,61.517,122827.840,47.823
7
+ dens_x0.5,12000,1077,3,60.927,122827.840,47.823
8
+ dens_x0.5,14000,1455,1,69.076,147545.989,57.447
9
+ dens_x0.5,14000,1455,2,80.602,147545.989,57.447
10
+ dens_x0.5,14000,1455,3,69.987,147545.989,57.447
11
+ dens_x0.5,16000,1937,1,80.775,173636.330,67.605
12
+ dens_x0.5,16000,1937,2,81.025,173636.330,67.605
13
+ dens_x0.5,16000,1937,3,87.149,173636.330,67.605
14
+ dens_x0.5,19000,2715,1,97.314,213940.398,83.297
15
+ dens_x0.5,19000,2715,2,96.800,213940.398,83.297
16
+ dens_x0.5,19000,2715,3,99.001,213940.398,83.297
17
+ dens_x0.5,22000,3611,1,114.149,256079.209,99.704
18
+ dens_x0.5,22000,3611,2,107.349,256079.209,99.704
19
+ dens_x0.5,22000,3611,3,128.944,256079.209,99.704
20
+ dens_x0.5,26000,5194,1,169.081,317113.582,123.468
21
+ dens_x0.5,26000,5194,2,124.848,317113.582,123.468
22
+ dens_x0.5,26000,5194,3,126.180,317113.582,123.468
23
+ dens_x0.5,30000,6763,1,137.270,378988.027,147.558
24
+ dens_x0.5,30000,6763,2,174.605,378988.027,147.558
25
+ dens_x0.5,30000,6763,3,347.805,378988.027,147.558
26
+ dens_x0.5,36000,9660,1,170.769,479031.581,186.510
27
+ dens_x0.5,36000,9660,2,167.815,479031.581,186.510
28
+ dens_x0.5,36000,9660,3,157.285,479031.581,186.510
29
+ dens_x0.5,42000,13346,1,208.810,589181.686,229.397
30
+ dens_x0.5,42000,13346,2,191.181,589181.686,229.397
31
+ dens_x0.5,42000,13346,3,188.304,589181.686,229.397
32
+ dens_x0.5,49000,17900,1,274.029,722491.606,281.301
33
+ dens_x0.5,49000,17900,2,262.442,722491.606,281.301
34
+ dens_x0.5,49000,17900,3,268.845,722491.606,281.301
35
+ dens_x0.5,57000,24363,1,312.798,890990.473,346.906
36
+ dens_x0.5,57000,24363,2,301.761,890990.473,346.906
37
+ dens_x0.5,57000,24363,3,327.824,890990.473,346.906
38
+ dens_x0.5,67000,33829,1,424.048,1120457.009,436.248
39
+ dens_x0.5,67000,33829,2,375.659,1120457.009,436.248
40
+ dens_x0.5,67000,33829,3,375.023,1120457.009,436.248
41
+ dens_x0.5,79000,46917,1,447.988,1419991.587,552.871
42
+ dens_x0.5,79000,46917,2,543.833,1419991.587,552.871
43
+ dens_x0.5,79000,46917,3,490.562,1419991.587,552.871
44
+ dens_x0.5,92000,63864,1,605.730,1781454.424,693.606
45
+ dens_x0.5,92000,63864,2,597.292,1781454.424,693.606
46
+ dens_x0.5,92000,63864,3,650.577,1781454.424,693.606
47
+ dens_x0.5,108000,87350,1,608.491,2264084.329,881.518
48
+ dens_x0.5,108000,87350,2,568.816,2264084.329,881.518
49
+ dens_x0.5,108000,87350,3,605.038,2264084.329,881.518
50
+ dens_x0.5,127000,120912,1,701.876,2913447.536,1134.346
51
+ dens_x0.5,127000,120912,2,719.486,2913447.536,1134.346
52
+ dens_x0.5,127000,120912,3,717.672,2913447.536,1134.346
53
+ dens_x0.5,149000,166365,1,875.466,3756533.770,1462.600
54
+ dens_x0.5,149000,166365,2,841.735,3756533.770,1462.600
55
+ dens_x0.5,149000,166365,3,986.523,3756533.770,1462.600
56
+ dens_x0.5,174000,226551,1,1247.354,4833373.044,1881.866
57
+ dens_x0.5,174000,226551,2,1151.140,4833373.044,1881.866
58
+ dens_x0.5,174000,226551,3,1234.685,4833373.044,1881.866
59
+ dens_x0.5,204000,312325,1,1410.429,6312525.050,2457.772
60
+ dens_x0.5,204000,312325,2,1327.941,6312525.050,2457.772
61
+ dens_x0.5,204000,312325,3,1383.628,6312525.050,2457.772
62
+ dens_x0.5,240000,431142,1,1747.568,8314371.662,3237.187
63
+ dens_x0.5,240000,431142,2,1674.865,8314371.662,3237.187
64
+ dens_x0.5,240000,431142,3,1688.159,8314371.662,3237.187
65
+ dens_x0.5,281000,589828,1,2166.406,10925503.834,4253.828
66
+ dens_x0.5,281000,589828,2,3446.245,10925503.834,4253.828
67
+ dens_x0.5,281000,589828,3,2455.903,10925503.834,4253.828
68
+ dens_x0.5,329000,811844,1,3235.985,14493068.872,5642.853
69
+ dens_x0.5,329000,811844,2,3914.621,14493068.872,5642.853
70
+ dens_x0.5,329000,811844,3,3149.491,14493068.872,5642.853
71
+ dens_x0.5,386000,1115568,1,4208.325,19315559.086,7520.482
72
+ dens_x0.5,386000,1115568,2,4153.722,19315559.086,7520.482
73
+ dens_x0.5,386000,1115568,3,4030.253,19315559.086,7520.482
74
+ dens_x0.5,452000,1531464,1,5754.485,25827552.428,10055.916
75
+ dens_x0.5,452000,1531464,2,5686.995,25827552.428,10055.916
76
+ dens_x0.5,452000,1531464,3,5806.431,25827552.428,10055.916
77
+ dens_x0.5,530000,2103874,1,8092.970,34716124.680,13516.668
78
+ dens_x0.5,530000,2103874,2,8378.748,34716124.680,13516.668
79
+ dens_x0.5,530000,2103874,3,8476.747,34716124.680,13516.668
80
+ dens_x0.5,621000,2887681,1,12003.507,46802598.872,18222.517
81
+ dens_x0.5,621000,2887681,2,11475.289,46802598.872,18222.517
82
+ dens_x0.5,621000,2887681,3,11643.507,46802598.872,18222.517
83
+ dens_x0.5,728000,3971764,1,15740.247,63437679.196,24699.359
84
+ dens_x0.5,728000,3971764,2,15861.904,63437679.196,24699.359
85
+ dens_x0.5,728000,3971764,3,16104.897,63437679.196,24699.359
86
+ dens_x0.5,853000,5448500,1,23924.937,86056528.179,33505.972
87
+ dens_x0.5,853000,5448500,2,23993.406,86056528.179,33505.972
88
+ dens_x0.5,853000,5448500,3,25817.687,86056528.179,33505.972
89
+ dens_x0.5,1000000,7489974,1,33333.025,117293325.434,45667.970
90
+ dens_x0.5,1000000,7489974,2,32787.127,117293325.434,45667.970
91
+ dens_x0.5,1000000,7489974,3,35885.585,117293325.434,45667.970
92
+ dens_x1.0,10000,1550,1,58.206,106379.431,41.419
93
+ dens_x1.0,10000,1550,2,58.889,106379.431,41.419
94
+ dens_x1.0,10000,1550,3,59.592,106379.431,41.419
95
+ dens_x1.0,12000,2135,1,68.803,132765.276,51.692
96
+ dens_x1.0,12000,2135,2,69.910,132765.276,51.692
97
+ dens_x1.0,12000,2135,3,69.644,132765.276,51.692
98
+ dens_x1.0,14000,2963,1,94.906,161942.582,63.052
99
+ dens_x1.0,14000,2963,2,80.102,161942.582,63.052
100
+ dens_x1.0,14000,2963,3,84.479,161942.582,63.052
101
+ dens_x1.0,16000,3912,1,94.907,192755.010,75.049
102
+ dens_x1.0,16000,3912,2,97.008,192755.010,75.049
103
+ dens_x1.0,16000,3912,3,94.134,192755.010,75.049
104
+ dens_x1.0,19000,5413,1,108.859,240521.618,93.647
105
+ dens_x1.0,19000,5413,2,108.454,240521.618,93.647
106
+ dens_x1.0,19000,5413,3,108.726,240521.618,93.647
107
+ dens_x1.0,22000,7290,1,116.383,292864.786,114.026
108
+ dens_x1.0,22000,7290,2,119.034,292864.786,114.026
109
+ dens_x1.0,22000,7290,3,124.547,292864.786,114.026
110
+ dens_x1.0,26000,10078,1,134.102,366763.602,142.799
111
+ dens_x1.0,26000,10078,2,138.847,366763.602,142.799
112
+ dens_x1.0,26000,10078,3,145.234,366763.602,142.799
113
+ dens_x1.0,30000,13336,1,205.487,446748.773,173.941
114
+ dens_x1.0,30000,13336,2,201.550,446748.773,173.941
115
+ dens_x1.0,30000,13336,3,185.775,446748.773,173.941
116
+ dens_x1.0,36000,19373,1,195.098,580933.327,226.185
117
+ dens_x1.0,36000,19373,2,221.406,580933.327,226.185
118
+ dens_x1.0,36000,19373,3,229.693,580933.327,226.185
119
+ dens_x1.0,42000,26057,1,268.682,724495.682,282.081
120
+ dens_x1.0,42000,26057,2,235.697,724495.682,282.081
121
+ dens_x1.0,42000,26057,3,260.327,724495.682,282.081
122
+ dens_x1.0,49000,35903,1,319.756,916916.365,357.000
123
+ dens_x1.0,49000,35903,2,247.543,916916.365,357.000
124
+ dens_x1.0,49000,35903,3,242.526,916916.365,357.000
125
+ dens_x1.0,57000,48648,1,397.064,1156930.810,450.449
126
+ dens_x1.0,57000,48648,2,380.093,1156930.810,450.449
127
+ dens_x1.0,57000,48648,3,346.247,1156930.810,450.449
128
+ dens_x1.0,67000,67188,1,359.800,1491157.159,580.580
129
+ dens_x1.0,67000,67188,2,361.464,1491157.159,580.580
130
+ dens_x1.0,67000,67188,3,369.751,1491157.159,580.580
131
+ dens_x1.0,79000,93552,1,469.624,1945903.955,757.635
132
+ dens_x1.0,79000,93552,2,523.395,1945903.955,757.635
133
+ dens_x1.0,79000,93552,3,451.623,1945903.955,757.635
134
+ dens_x1.0,92000,126776,1,557.575,2500509.887,973.570
135
+ dens_x1.0,92000,126776,2,555.152,2500509.887,973.570
136
+ dens_x1.0,92000,126776,3,559.637,2500509.887,973.570
137
+ dens_x1.0,108000,175020,1,693.661,3280169.679,1277.129
138
+ dens_x1.0,108000,175020,2,694.646,3280169.679,1277.129
139
+ dens_x1.0,108000,175020,3,688.850,3280169.679,1277.129
140
+ dens_x1.0,127000,241838,1,887.291,4334562.918,1687.655
141
+ dens_x1.0,127000,241838,2,917.991,4334562.918,1687.655
142
+ dens_x1.0,127000,241838,3,887.282,4334562.918,1687.655
143
+ dens_x1.0,149000,333094,1,1218.530,5742559.864,2235.857
144
+ dens_x1.0,149000,333094,2,1262.687,5742559.864,2235.857
145
+ dens_x1.0,149000,333094,3,1221.463,5742559.864,2235.857
146
+ dens_x1.0,174000,454003,1,1653.458,7577993.244,2950.480
147
+ dens_x1.0,174000,454003,2,1589.535,7577993.244,2950.480
148
+ dens_x1.0,174000,454003,3,1597.942,7577993.244,2950.480
149
+ dens_x1.0,204000,623499,1,2081.263,10116899.562,3938.999
150
+ dens_x1.0,204000,623499,2,2064.348,10116899.562,3938.999
151
+ dens_x1.0,204000,623499,3,2011.870,10116899.562,3938.999
152
+ dens_x1.0,240000,862431,1,2886.728,13657349.809,5317.467
153
+ dens_x1.0,240000,862431,2,2914.305,13657349.809,5317.467
154
+ dens_x1.0,240000,862431,3,2874.904,13657349.809,5317.467
155
+ dens_x1.0,281000,1182168,1,4453.264,18357066.601,7147.295
156
+ dens_x1.0,281000,1182168,2,4139.544,18357066.601,7147.295
157
+ dens_x1.0,281000,1182168,3,4573.999,18357066.601,7147.295
158
+ dens_x1.0,329000,1623777,1,5959.977,24807713.897,9658.844
159
+ dens_x1.0,329000,1623777,2,6951.901,24807713.897,9658.844
160
+ dens_x1.0,329000,1623777,3,5727.519,24807713.897,9658.844
161
+ dens_x1.0,386000,2232671,1,8057.699,33685517.024,13115.402
162
+ dens_x1.0,386000,2232671,2,8162.875,33685517.024,13115.402
163
+ dens_x1.0,386000,2232671,3,8187.878,33685517.024,13115.402
164
+ dens_x1.0,452000,3055874,1,11484.033,45677561.904,17784.486
165
+ dens_x1.0,452000,3055874,2,11583.497,45677561.904,17784.486
166
+ dens_x1.0,452000,3055874,3,11713.283,45677561.904,17784.486
167
+ dens_x1.0,530000,4208065,1,16968.262,62450692.510,24315.078
168
+ dens_x1.0,530000,4208065,2,15686.033,62450692.510,24315.078
169
+ dens_x1.0,530000,4208065,3,16661.082,62450692.510,24315.078
170
+ dens_x1.0,621000,5774507,1,24345.921,85310220.195,33215.398
171
+ dens_x1.0,621000,5774507,2,24451.492,85310220.195,33215.398
172
+ dens_x1.0,621000,5774507,3,23900.906,85310220.195,33215.398
173
+ dens_x1.0,728000,7935888,1,36299.762,116945648.236,45532.602
174
+ dens_x1.0,728000,7935888,2,33059.393,116945648.236,45532.602
175
+ dens_x1.0,728000,7935888,3,37098.302,116945648.236,45532.602
176
+ dens_x1.0,853000,10900416,1,53083.830,160510699.866,62494.585
177
+ dens_x1.0,853000,10900416,2,53282.730,160510699.866,62494.585
178
+ dens_x1.0,853000,10900416,3,52312.206,160510699.866,62494.585
179
+ dens_x1.0,1000000,14986435,1,77987.779,220860761.527,85991.786
180
+ dens_x1.0,1000000,14986435,2,78617.841,220860761.527,85991.786
181
+ dens_x1.0,1000000,14986435,3,79625.105,220860761.527,85991.786
182
+ dens_x2.0,10000,3007,1,72.111,119798.897,46.644
183
+ dens_x2.0,10000,3007,2,70.572,119798.897,46.644
184
+ dens_x2.0,10000,3007,3,69.460,119798.897,46.644
185
+ dens_x2.0,12000,4365,1,88.829,153710.912,59.847
186
+ dens_x2.0,12000,4365,2,87.598,153710.912,59.847
187
+ dens_x2.0,12000,4365,3,92.566,153710.912,59.847
188
+ dens_x2.0,14000,5830,1,98.751,189313.294,73.709
189
+ dens_x2.0,14000,5830,2,99.427,189313.294,73.709
190
+ dens_x2.0,14000,5830,3,97.017,189313.294,73.709
191
+ dens_x2.0,16000,7624,1,113.441,228688.447,89.039
192
+ dens_x2.0,16000,7624,2,114.051,228688.447,89.039
193
+ dens_x2.0,16000,7624,3,111.923,228688.447,89.039
194
+ dens_x2.0,19000,10837,1,140.996,293959.920,114.453
195
+ dens_x2.0,19000,10837,2,131.449,293959.920,114.453
196
+ dens_x2.0,19000,10837,3,137.347,293959.920,114.453
197
+ dens_x2.0,22000,14556,1,165.281,365516.050,142.313
198
+ dens_x2.0,22000,14556,2,146.564,365516.050,142.313
199
+ dens_x2.0,22000,14556,3,144.894,365516.050,142.313
200
+ dens_x2.0,26000,20395,1,181.262,471644.695,183.634
201
+ dens_x2.0,26000,20395,2,198.833,471644.695,183.634
202
+ dens_x2.0,26000,20395,3,207.951,471644.695,183.634
203
+ dens_x2.0,30000,26790,1,225.380,585445.422,227.942
204
+ dens_x2.0,30000,26790,2,212.692,585445.422,227.942
205
+ dens_x2.0,30000,26790,3,220.381,585445.422,227.942
206
+ dens_x2.0,36000,38800,1,291.967,784747.311,305.540
207
+ dens_x2.0,36000,38800,2,287.036,784747.311,305.540
208
+ dens_x2.0,36000,38800,3,285.022,784747.311,305.540
209
+ dens_x2.0,42000,53391,1,306.692,1015477.726,395.375
210
+ dens_x2.0,42000,53391,2,317.915,1015477.726,395.375
211
+ dens_x2.0,42000,53391,3,324.443,1015477.726,395.375
212
+ dens_x2.0,49000,71945,1,431.617,1306154.668,508.549
213
+ dens_x2.0,49000,71945,2,411.852,1306154.668,508.549
214
+ dens_x2.0,49000,71945,3,442.893,1306154.668,508.549
215
+ dens_x2.0,57000,97182,1,444.690,1688417.255,657.383
216
+ dens_x2.0,57000,97182,2,414.667,1688417.255,657.383
217
+ dens_x2.0,57000,97182,3,413.077,1688417.255,657.383
218
+ dens_x2.0,67000,134082,1,528.155,2234513.248,870.004
219
+ dens_x2.0,67000,134082,2,532.767,2234513.248,870.004
220
+ dens_x2.0,67000,134082,3,528.469,2234513.248,870.004
221
+ dens_x2.0,79000,186876,1,676.211,2998337.660,1167.398
222
+ dens_x2.0,79000,186876,2,664.701,2998337.660,1167.398
223
+ dens_x2.0,79000,186876,3,697.816,2998337.660,1167.398
224
+ dens_x2.0,92000,253371,1,849.705,3947432.991,1536.927
225
+ dens_x2.0,92000,253371,2,904.967,3947432.991,1536.927
226
+ dens_x2.0,92000,253371,3,870.314,3947432.991,1536.927
227
+ dens_x2.0,108000,349108,1,1181.033,5297829.841,2062.702
228
+ dens_x2.0,108000,349108,2,1179.125,5297829.841,2062.702
229
+ dens_x2.0,108000,349108,3,1169.062,5297829.841,2062.702
230
+ dens_x2.0,127000,484181,1,1568.306,7182563.887,2796.520
231
+ dens_x2.0,127000,484181,2,1527.148,7182563.887,2796.520
232
+ dens_x2.0,127000,484181,3,1527.026,7182563.887,2796.520
233
+ dens_x2.0,149000,666491,1,2130.278,9713885.437,3782.086
234
+ dens_x2.0,149000,666491,2,2095.352,9713885.437,3782.086
235
+ dens_x2.0,149000,666491,3,2116.762,9713885.437,3782.086
236
+ dens_x2.0,174000,907603,1,2955.051,13051498.522,5081.580
237
+ dens_x2.0,174000,907603,2,2968.651,13051498.522,5081.580
238
+ dens_x2.0,174000,907603,3,2901.389,13051498.522,5081.580
239
+ dens_x2.0,204000,1247723,1,4011.758,17748584.329,6910.383
240
+ dens_x2.0,204000,1247723,2,3967.668,17748584.329,6910.383
241
+ dens_x2.0,204000,1247723,3,4002.986,17748584.329,6910.383
242
+ dens_x2.0,240000,1725541,1,5827.568,24349896.729,9480.594
243
+ dens_x2.0,240000,1725541,2,5824.487,24349896.729,9480.594
244
+ dens_x2.0,240000,1725541,3,5785.284,24349896.729,9480.594
245
+ dens_x2.0,281000,2365586,1,8196.369,33204358.944,12928.064
246
+ dens_x2.0,281000,2365586,2,8396.722,33204358.944,12928.064
247
+ dens_x2.0,281000,2365586,3,8292.723,33204358.944,12928.064
248
+ dens_x2.0,329000,3242756,1,12064.337,45374920.412,17666.653
249
+ dens_x2.0,329000,3242756,2,12045.907,45374920.412,17666.653
250
+ dens_x2.0,329000,3242756,3,12026.978,45374920.412,17666.653
251
+ dens_x2.0,386000,4461354,1,17662.636,62354387.279,24277.581
252
+ dens_x2.0,386000,4461354,2,17484.608,62354387.279,24277.581
253
+ dens_x2.0,386000,4461354,3,17401.132,62354387.279,24277.581
254
+ dens_x2.0,452000,6119886,1,31461.157,85575402.535,33318.647
255
+ dens_x2.0,452000,6119886,2,27648.193,85575402.535,33318.647
256
+ dens_x2.0,452000,6119886,3,25986.776,85575402.535,33318.647
257
+ dens_x2.0,530000,8413791,1,39027.499,117884820.410,45898.267
258
+ dens_x2.0,530000,8413791,2,37862.747,117884820.410,45898.267
259
+ dens_x2.0,530000,8413791,3,37738.933,117884820.410,45898.267
260
+ dens_x2.0,621000,11552079,1,56434.022,162377752.059,63221.519
261
+ dens_x2.0,621000,11552079,2,55647.184,162377752.059,63221.519
262
+ dens_x2.0,621000,11552079,3,55382.089,162377752.059,63221.519
263
+ dens_x2.0,728000,15870417,1,84297.240,224046367.608,87232.097
264
+ dens_x2.0,728000,15870417,2,84495.719,224046367.608,87232.097
265
+ dens_x2.0,728000,15870417,3,84830.940,224046367.608,87232.097
266
+ dens_x2.0,853000,21800415,1,127846.865,309366697.818,120451.431
267
+ dens_x2.0,853000,21800415,2,128578.916,309366697.818,120451.431
268
+ dens_x2.0,853000,21800415,3,127293.075,309366697.818,120451.431
269
+ dens_x2.0,1000000,29958277,1,193809.065,427704402.750,166526.028
270
+ dens_x2.0,1000000,29958277,2,192667.212,427704402.750,166526.028
271
+ dens_x2.0,1000000,29958277,3,194000.905,427704402.750,166526.028
results_clique.csv ADDED
@@ -0,0 +1,271 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ series,n,m,trial,ms,theo_x,normalized_theory_ms
2
+ dens_x0.5,10000,761,1,38.276,99112.473,18.938
3
+ dens_x0.5,10000,761,2,44.437,99112.473,18.938
4
+ dens_x0.5,10000,761,3,47.050,99112.473,18.938
5
+ dens_x0.5,12000,1077,1,61.835,122827.840,23.469
6
+ dens_x0.5,12000,1077,2,56.548,122827.840,23.469
7
+ dens_x0.5,12000,1077,3,57.742,122827.840,23.469
8
+ dens_x0.5,14000,1455,1,56.974,147545.989,28.192
9
+ dens_x0.5,14000,1455,2,72.837,147545.989,28.192
10
+ dens_x0.5,14000,1455,3,53.347,147545.989,28.192
11
+ dens_x0.5,16000,1937,1,78.537,173636.330,33.178
12
+ dens_x0.5,16000,1937,2,85.969,173636.330,33.178
13
+ dens_x0.5,16000,1937,3,86.131,173636.330,33.178
14
+ dens_x0.5,19000,2715,1,98.627,213940.398,40.879
15
+ dens_x0.5,19000,2715,2,116.253,213940.398,40.879
16
+ dens_x0.5,19000,2715,3,108.861,213940.398,40.879
17
+ dens_x0.5,22000,3611,1,130.744,256079.209,48.930
18
+ dens_x0.5,22000,3611,2,89.643,256079.209,48.930
19
+ dens_x0.5,22000,3611,3,131.201,256079.209,48.930
20
+ dens_x0.5,26000,5194,1,120.353,317113.582,60.593
21
+ dens_x0.5,26000,5194,2,145.893,317113.582,60.593
22
+ dens_x0.5,26000,5194,3,144.641,317113.582,60.593
23
+ dens_x0.5,30000,6763,1,193.243,378988.027,72.415
24
+ dens_x0.5,30000,6763,2,138.053,378988.027,72.415
25
+ dens_x0.5,30000,6763,3,159.792,378988.027,72.415
26
+ dens_x0.5,36000,9660,1,188.103,479031.581,91.531
27
+ dens_x0.5,36000,9660,2,181.788,479031.581,91.531
28
+ dens_x0.5,36000,9660,3,183.152,479031.581,91.531
29
+ dens_x0.5,42000,13346,1,151.805,589181.686,112.578
30
+ dens_x0.5,42000,13346,2,237.285,589181.686,112.578
31
+ dens_x0.5,42000,13346,3,241.080,589181.686,112.578
32
+ dens_x0.5,49000,17900,1,225.210,722491.606,138.051
33
+ dens_x0.5,49000,17900,2,256.325,722491.606,138.051
34
+ dens_x0.5,49000,17900,3,254.573,722491.606,138.051
35
+ dens_x0.5,57000,24363,1,205.892,890990.473,170.247
36
+ dens_x0.5,57000,24363,2,218.569,890990.473,170.247
37
+ dens_x0.5,57000,24363,3,193.055,890990.473,170.247
38
+ dens_x0.5,67000,33829,1,312.891,1120457.009,214.092
39
+ dens_x0.5,67000,33829,2,332.210,1120457.009,214.092
40
+ dens_x0.5,67000,33829,3,246.322,1120457.009,214.092
41
+ dens_x0.5,79000,46917,1,302.178,1419991.587,271.326
42
+ dens_x0.5,79000,46917,2,362.654,1419991.587,271.326
43
+ dens_x0.5,79000,46917,3,388.967,1419991.587,271.326
44
+ dens_x0.5,92000,63864,1,405.253,1781454.424,340.392
45
+ dens_x0.5,92000,63864,2,428.211,1781454.424,340.392
46
+ dens_x0.5,92000,63864,3,424.595,1781454.424,340.392
47
+ dens_x0.5,108000,87350,1,531.480,2264084.329,432.611
48
+ dens_x0.5,108000,87350,2,456.242,2264084.329,432.611
49
+ dens_x0.5,108000,87350,3,480.178,2264084.329,432.611
50
+ dens_x0.5,127000,120912,1,584.835,2913447.536,556.689
51
+ dens_x0.5,127000,120912,2,574.881,2913447.536,556.689
52
+ dens_x0.5,127000,120912,3,591.637,2913447.536,556.689
53
+ dens_x0.5,149000,166365,1,770.336,3756533.770,717.782
54
+ dens_x0.5,149000,166365,2,749.424,3756533.770,717.782
55
+ dens_x0.5,149000,166365,3,758.684,3756533.770,717.782
56
+ dens_x0.5,174000,226551,1,977.870,4833373.044,923.540
57
+ dens_x0.5,174000,226551,2,911.794,4833373.044,923.540
58
+ dens_x0.5,174000,226551,3,1973.549,4833373.044,923.540
59
+ dens_x0.5,204000,312325,1,1322.555,6312525.050,1206.169
60
+ dens_x0.5,204000,312325,2,1448.643,6312525.050,1206.169
61
+ dens_x0.5,204000,312325,3,1015.363,6312525.050,1206.169
62
+ dens_x0.5,240000,431142,1,1260.775,8314371.662,1588.673
63
+ dens_x0.5,240000,431142,2,1571.054,8314371.662,1588.673
64
+ dens_x0.5,240000,431142,3,2325.376,8314371.662,1588.673
65
+ dens_x0.5,281000,589828,1,1716.171,10925503.834,2087.597
66
+ dens_x0.5,281000,589828,2,1772.932,10925503.834,2087.597
67
+ dens_x0.5,281000,589828,3,1770.317,10925503.834,2087.597
68
+ dens_x0.5,329000,811844,1,2497.336,14493068.872,2769.272
69
+ dens_x0.5,329000,811844,2,3011.600,14493068.872,2769.272
70
+ dens_x0.5,329000,811844,3,2262.160,14493068.872,2769.272
71
+ dens_x0.5,386000,1115568,1,3122.086,19315559.086,3690.732
72
+ dens_x0.5,386000,1115568,2,3034.498,19315559.086,3690.732
73
+ dens_x0.5,386000,1115568,3,3167.076,19315559.086,3690.732
74
+ dens_x0.5,452000,1531464,1,4496.003,25827552.428,4935.015
75
+ dens_x0.5,452000,1531464,2,4372.080,25827552.428,4935.015
76
+ dens_x0.5,452000,1531464,3,4231.171,25827552.428,4935.015
77
+ dens_x0.5,530000,2103874,1,6131.828,34716124.680,6633.404
78
+ dens_x0.5,530000,2103874,2,6007.724,34716124.680,6633.404
79
+ dens_x0.5,530000,2103874,3,5781.477,34716124.680,6633.404
80
+ dens_x0.5,621000,2887681,1,7942.139,46802598.872,8942.834
81
+ dens_x0.5,621000,2887681,2,7923.217,46802598.872,8942.834
82
+ dens_x0.5,621000,2887681,3,7941.885,46802598.872,8942.834
83
+ dens_x0.5,728000,3971764,1,11260.380,63437679.196,12121.392
84
+ dens_x0.5,728000,3971764,2,11239.027,63437679.196,12121.392
85
+ dens_x0.5,728000,3971764,3,10927.841,63437679.196,12121.392
86
+ dens_x0.5,853000,5448500,1,15269.468,86056528.179,16443.301
87
+ dens_x0.5,853000,5448500,2,15545.416,86056528.179,16443.301
88
+ dens_x0.5,853000,5448500,3,15453.498,86056528.179,16443.301
89
+ dens_x0.5,1000000,7489974,1,21313.952,117293325.434,22411.891
90
+ dens_x0.5,1000000,7489974,2,21260.455,117293325.434,22411.891
91
+ dens_x0.5,1000000,7489974,3,21363.975,117293325.434,22411.891
92
+ dens_x1.0,10000,1550,1,61.324,106379.431,20.327
93
+ dens_x1.0,10000,1550,2,42.309,106379.431,20.327
94
+ dens_x1.0,10000,1550,3,43.722,106379.431,20.327
95
+ dens_x1.0,12000,2135,1,59.932,132765.276,25.368
96
+ dens_x1.0,12000,2135,2,58.811,132765.276,25.368
97
+ dens_x1.0,12000,2135,3,59.965,132765.276,25.368
98
+ dens_x1.0,14000,2963,1,67.045,161942.582,30.943
99
+ dens_x1.0,14000,2963,2,64.095,161942.582,30.943
100
+ dens_x1.0,14000,2963,3,98.423,161942.582,30.943
101
+ dens_x1.0,16000,3912,1,67.059,192755.010,36.831
102
+ dens_x1.0,16000,3912,2,73.193,192755.010,36.831
103
+ dens_x1.0,16000,3912,3,78.115,192755.010,36.831
104
+ dens_x1.0,19000,5413,1,86.648,240521.618,45.958
105
+ dens_x1.0,19000,5413,2,129.568,240521.618,45.958
106
+ dens_x1.0,19000,5413,3,78.989,240521.618,45.958
107
+ dens_x1.0,22000,7290,1,111.022,292864.786,55.959
108
+ dens_x1.0,22000,7290,2,133.328,292864.786,55.959
109
+ dens_x1.0,22000,7290,3,99.040,292864.786,55.959
110
+ dens_x1.0,26000,10078,1,152.963,366763.602,70.080
111
+ dens_x1.0,26000,10078,2,122.924,366763.602,70.080
112
+ dens_x1.0,26000,10078,3,138.414,366763.602,70.080
113
+ dens_x1.0,30000,13336,1,129.110,446748.773,85.363
114
+ dens_x1.0,30000,13336,2,186.901,446748.773,85.363
115
+ dens_x1.0,30000,13336,3,145.582,446748.773,85.363
116
+ dens_x1.0,36000,19373,1,157.848,580933.327,111.002
117
+ dens_x1.0,36000,19373,2,214.319,580933.327,111.002
118
+ dens_x1.0,36000,19373,3,144.951,580933.327,111.002
119
+ dens_x1.0,42000,26057,1,274.180,724495.682,138.433
120
+ dens_x1.0,42000,26057,2,190.711,724495.682,138.433
121
+ dens_x1.0,42000,26057,3,282.941,724495.682,138.433
122
+ dens_x1.0,49000,35903,1,223.180,916916.365,175.200
123
+ dens_x1.0,49000,35903,2,323.990,916916.365,175.200
124
+ dens_x1.0,49000,35903,3,227.125,916916.365,175.200
125
+ dens_x1.0,57000,48648,1,317.633,1156930.810,221.061
126
+ dens_x1.0,57000,48648,2,269.861,1156930.810,221.061
127
+ dens_x1.0,57000,48648,3,301.058,1156930.810,221.061
128
+ dens_x1.0,67000,67188,1,372.238,1491157.159,284.924
129
+ dens_x1.0,67000,67188,2,310.594,1491157.159,284.924
130
+ dens_x1.0,67000,67188,3,327.349,1491157.159,284.924
131
+ dens_x1.0,79000,93552,1,399.597,1945903.955,371.815
132
+ dens_x1.0,79000,93552,2,399.330,1945903.955,371.815
133
+ dens_x1.0,79000,93552,3,378.416,1945903.955,371.815
134
+ dens_x1.0,92000,126776,1,436.287,2500509.887,477.786
135
+ dens_x1.0,92000,126776,2,458.982,2500509.887,477.786
136
+ dens_x1.0,92000,126776,3,455.902,2500509.887,477.786
137
+ dens_x1.0,108000,175020,1,552.531,3280169.679,626.760
138
+ dens_x1.0,108000,175020,2,541.982,3280169.679,626.760
139
+ dens_x1.0,108000,175020,3,541.280,3280169.679,626.760
140
+ dens_x1.0,127000,241838,1,737.481,4334562.918,828.229
141
+ dens_x1.0,127000,241838,2,728.767,4334562.918,828.229
142
+ dens_x1.0,127000,241838,3,720.321,4334562.918,828.229
143
+ dens_x1.0,149000,333094,1,934.288,5742559.864,1097.263
144
+ dens_x1.0,149000,333094,2,1065.436,5742559.864,1097.263
145
+ dens_x1.0,149000,333094,3,950.058,5742559.864,1097.263
146
+ dens_x1.0,174000,454003,1,1169.608,7577993.244,1447.969
147
+ dens_x1.0,174000,454003,2,1263.648,7577993.244,1447.969
148
+ dens_x1.0,174000,454003,3,1170.809,7577993.244,1447.969
149
+ dens_x1.0,204000,623499,1,1608.676,10116899.562,1933.092
150
+ dens_x1.0,204000,623499,2,1533.414,10116899.562,1933.092
151
+ dens_x1.0,204000,623499,3,1524.210,10116899.562,1933.092
152
+ dens_x1.0,240000,862431,1,2177.558,13657349.809,2609.586
153
+ dens_x1.0,240000,862431,2,2162.622,13657349.809,2609.586
154
+ dens_x1.0,240000,862431,3,2250.789,13657349.809,2609.586
155
+ dens_x1.0,281000,1182168,1,2836.469,18357066.601,3507.587
156
+ dens_x1.0,281000,1182168,2,2819.405,18357066.601,3507.587
157
+ dens_x1.0,281000,1182168,3,2877.142,18357066.601,3507.587
158
+ dens_x1.0,329000,1623777,1,3866.669,24807713.897,4740.148
159
+ dens_x1.0,329000,1623777,2,3998.567,24807713.897,4740.148
160
+ dens_x1.0,329000,1623777,3,4059.382,24807713.897,4740.148
161
+ dens_x1.0,386000,2232671,1,5485.621,33685517.024,6436.480
162
+ dens_x1.0,386000,2232671,2,5580.961,33685517.024,6436.480
163
+ dens_x1.0,386000,2232671,3,5513.234,33685517.024,6436.480
164
+ dens_x1.0,452000,3055874,1,7599.997,45677561.904,8727.867
165
+ dens_x1.0,452000,3055874,2,7705.885,45677561.904,8727.867
166
+ dens_x1.0,452000,3055874,3,7632.451,45677561.904,8727.867
167
+ dens_x1.0,530000,4208065,1,10645.239,62450692.510,11932.802
168
+ dens_x1.0,530000,4208065,2,10608.786,62450692.510,11932.802
169
+ dens_x1.0,530000,4208065,3,10661.889,62450692.510,11932.802
170
+ dens_x1.0,621000,5774507,1,14916.794,85310220.195,16300.700
171
+ dens_x1.0,621000,5774507,2,14760.646,85310220.195,16300.700
172
+ dens_x1.0,621000,5774507,3,15000.037,85310220.195,16300.700
173
+ dens_x1.0,728000,7935888,1,21350.019,116945648.236,22345.458
174
+ dens_x1.0,728000,7935888,2,20599.824,116945648.236,22345.458
175
+ dens_x1.0,728000,7935888,3,20715.817,116945648.236,22345.458
176
+ dens_x1.0,853000,10900416,1,28939.165,160510699.866,30669.676
177
+ dens_x1.0,853000,10900416,2,28468.021,160510699.866,30669.676
178
+ dens_x1.0,853000,10900416,3,31349.436,160510699.866,30669.676
179
+ dens_x1.0,1000000,14986435,1,66847.745,220860761.527,42201.099
180
+ dens_x1.0,1000000,14986435,2,41387.027,220860761.527,42201.099
181
+ dens_x1.0,1000000,14986435,3,42951.041,220860761.527,42201.099
182
+ dens_x2.0,10000,3007,1,87.427,119798.897,22.891
183
+ dens_x2.0,10000,3007,2,63.753,119798.897,22.891
184
+ dens_x2.0,10000,3007,3,52.585,119798.897,22.891
185
+ dens_x2.0,12000,4365,1,68.433,153710.912,29.370
186
+ dens_x2.0,12000,4365,2,82.225,153710.912,29.370
187
+ dens_x2.0,12000,4365,3,62.498,153710.912,29.370
188
+ dens_x2.0,14000,5830,1,146.832,189313.294,36.173
189
+ dens_x2.0,14000,5830,2,92.943,189313.294,36.173
190
+ dens_x2.0,14000,5830,3,77.130,189313.294,36.173
191
+ dens_x2.0,16000,7624,1,151.648,228688.447,43.697
192
+ dens_x2.0,16000,7624,2,86.382,228688.447,43.697
193
+ dens_x2.0,16000,7624,3,118.420,228688.447,43.697
194
+ dens_x2.0,19000,10837,1,115.938,293959.920,56.169
195
+ dens_x2.0,19000,10837,2,138.467,293959.920,56.169
196
+ dens_x2.0,19000,10837,3,110.107,293959.920,56.169
197
+ dens_x2.0,22000,14556,1,150.381,365516.050,69.841
198
+ dens_x2.0,22000,14556,2,126.595,365516.050,69.841
199
+ dens_x2.0,22000,14556,3,164.303,365516.050,69.841
200
+ dens_x2.0,26000,20395,1,143.082,471644.695,90.120
201
+ dens_x2.0,26000,20395,2,188.214,471644.695,90.120
202
+ dens_x2.0,26000,20395,3,150.807,471644.695,90.120
203
+ dens_x2.0,30000,26790,1,156.010,585445.422,111.864
204
+ dens_x2.0,30000,26790,2,245.802,585445.422,111.864
205
+ dens_x2.0,30000,26790,3,158.062,585445.422,111.864
206
+ dens_x2.0,36000,38800,1,193.929,784747.311,149.946
207
+ dens_x2.0,36000,38800,2,193.447,784747.311,149.946
208
+ dens_x2.0,36000,38800,3,320.627,784747.311,149.946
209
+ dens_x2.0,42000,53391,1,245.425,1015477.726,194.033
210
+ dens_x2.0,42000,53391,2,259.902,1015477.726,194.033
211
+ dens_x2.0,42000,53391,3,259.556,1015477.726,194.033
212
+ dens_x2.0,49000,71945,1,307.752,1306154.668,249.574
213
+ dens_x2.0,49000,71945,2,310.579,1306154.668,249.574
214
+ dens_x2.0,49000,71945,3,332.788,1306154.668,249.574
215
+ dens_x2.0,57000,97182,1,349.160,1688417.255,322.615
216
+ dens_x2.0,57000,97182,2,344.003,1688417.255,322.615
217
+ dens_x2.0,57000,97182,3,309.397,1688417.255,322.615
218
+ dens_x2.0,67000,134082,1,415.061,2234513.248,426.961
219
+ dens_x2.0,67000,134082,2,418.921,2234513.248,426.961
220
+ dens_x2.0,67000,134082,3,542.886,2234513.248,426.961
221
+ dens_x2.0,79000,186876,1,506.513,2998337.660,572.909
222
+ dens_x2.0,79000,186876,2,509.927,2998337.660,572.909
223
+ dens_x2.0,79000,186876,3,511.928,2998337.660,572.909
224
+ dens_x2.0,92000,253371,1,636.724,3947432.991,754.258
225
+ dens_x2.0,92000,253371,2,641.103,3947432.991,754.258
226
+ dens_x2.0,92000,253371,3,630.308,3947432.991,754.258
227
+ dens_x2.0,108000,349108,1,1719.294,5297829.841,1012.286
228
+ dens_x2.0,108000,349108,2,1146.188,5297829.841,1012.286
229
+ dens_x2.0,108000,349108,3,926.098,5297829.841,1012.286
230
+ dens_x2.0,127000,484181,1,1180.387,7182563.887,1372.413
231
+ dens_x2.0,127000,484181,2,1185.449,7182563.887,1372.413
232
+ dens_x2.0,127000,484181,3,1133.979,7182563.887,1372.413
233
+ dens_x2.0,149000,666491,1,1659.692,9713885.437,1856.086
234
+ dens_x2.0,149000,666491,2,1612.972,9713885.437,1856.086
235
+ dens_x2.0,149000,666491,3,1517.271,9713885.437,1856.086
236
+ dens_x2.0,174000,907603,1,2171.669,13051498.522,2493.823
237
+ dens_x2.0,174000,907603,2,2170.243,13051498.522,2493.823
238
+ dens_x2.0,174000,907603,3,2138.745,13051498.522,2493.823
239
+ dens_x2.0,204000,1247723,1,3040.113,17748584.329,3391.321
240
+ dens_x2.0,204000,1247723,2,2865.235,17748584.329,3391.321
241
+ dens_x2.0,204000,1247723,3,2818.529,17748584.329,3391.321
242
+ dens_x2.0,240000,1725541,1,3911.937,24349896.729,4652.671
243
+ dens_x2.0,240000,1725541,2,3864.313,24349896.729,4652.671
244
+ dens_x2.0,240000,1725541,3,4019.525,24349896.729,4652.671
245
+ dens_x2.0,281000,2365586,1,5440.080,33204358.944,6344.542
246
+ dens_x2.0,281000,2365586,2,5280.197,33204358.944,6344.542
247
+ dens_x2.0,281000,2365586,3,5331.377,33204358.944,6344.542
248
+ dens_x2.0,329000,3242756,1,7509.897,45374920.412,8670.039
249
+ dens_x2.0,329000,3242756,2,7297.788,45374920.412,8670.039
250
+ dens_x2.0,329000,3242756,3,7308.891,45374920.412,8670.039
251
+ dens_x2.0,386000,4461354,1,10341.541,62354387.279,11914.401
252
+ dens_x2.0,386000,4461354,2,11255.962,62354387.279,11914.401
253
+ dens_x2.0,386000,4461354,3,10464.571,62354387.279,11914.401
254
+ dens_x2.0,452000,6119886,1,14859.560,85575402.535,16351.370
255
+ dens_x2.0,452000,6119886,2,14641.356,85575402.535,16351.370
256
+ dens_x2.0,452000,6119886,3,14368.936,85575402.535,16351.370
257
+ dens_x2.0,530000,8413791,1,20623.174,117884820.410,22524.911
258
+ dens_x2.0,530000,8413791,2,20776.519,117884820.410,22524.911
259
+ dens_x2.0,530000,8413791,3,35375.719,117884820.410,22524.911
260
+ dens_x2.0,621000,11552079,1,35126.303,162377752.059,31026.424
261
+ dens_x2.0,621000,11552079,2,28352.579,162377752.059,31026.424
262
+ dens_x2.0,621000,11552079,3,28463.199,162377752.059,31026.424
263
+ dens_x2.0,728000,15870417,1,40801.199,224046367.608,42809.790
264
+ dens_x2.0,728000,15870417,2,39676.070,224046367.608,42809.790
265
+ dens_x2.0,728000,15870417,3,40929.421,224046367.608,42809.790
266
+ dens_x2.0,853000,21800415,1,57857.381,309366697.818,59112.422
267
+ dens_x2.0,853000,21800415,2,57089.829,309366697.818,59112.422
268
+ dens_x2.0,853000,21800415,3,56924.223,309366697.818,59112.422
269
+ dens_x2.0,1000000,29958277,1,81612.419,427704402.750,81723.868
270
+ dens_x2.0,1000000,29958277,2,81139.967,427704402.750,81723.868
271
+ dens_x2.0,1000000,29958277,3,85039.919,427704402.750,81723.868
results_lct.csv ADDED
@@ -0,0 +1,271 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ series,n,m,trial,ms,theo_x,normalized_theory_ms
2
+ dens_x0.5,10000,761,1,35.961,99112.473,62.983
3
+ dens_x0.5,10000,761,2,34.394,99112.473,62.983
4
+ dens_x0.5,10000,761,3,50.859,99112.473,62.983
5
+ dens_x0.5,12000,1077,1,45.321,122827.840,78.053
6
+ dens_x0.5,12000,1077,2,62.352,122827.840,78.053
7
+ dens_x0.5,12000,1077,3,47.778,122827.840,78.053
8
+ dens_x0.5,14000,1455,1,73.521,147545.989,93.761
9
+ dens_x0.5,14000,1455,2,62.999,147545.989,93.761
10
+ dens_x0.5,14000,1455,3,60.800,147545.989,93.761
11
+ dens_x0.5,16000,1937,1,87.146,173636.330,110.340
12
+ dens_x0.5,16000,1937,2,67.794,173636.330,110.340
13
+ dens_x0.5,16000,1937,3,164.459,173636.330,110.340
14
+ dens_x0.5,19000,2715,1,145.995,213940.398,135.952
15
+ dens_x0.5,19000,2715,2,175.168,213940.398,135.952
16
+ dens_x0.5,19000,2715,3,116.191,213940.398,135.952
17
+ dens_x0.5,22000,3611,1,99.170,256079.209,162.730
18
+ dens_x0.5,22000,3611,2,92.240,256079.209,162.730
19
+ dens_x0.5,22000,3611,3,92.266,256079.209,162.730
20
+ dens_x0.5,26000,5194,1,111.996,317113.582,201.515
21
+ dens_x0.5,26000,5194,2,99.461,317113.582,201.515
22
+ dens_x0.5,26000,5194,3,110.166,317113.582,201.515
23
+ dens_x0.5,30000,6763,1,183.794,378988.027,240.834
24
+ dens_x0.5,30000,6763,2,181.493,378988.027,240.834
25
+ dens_x0.5,30000,6763,3,186.844,378988.027,240.834
26
+ dens_x0.5,36000,9660,1,182.254,479031.581,304.409
27
+ dens_x0.5,36000,9660,2,200.999,479031.581,304.409
28
+ dens_x0.5,36000,9660,3,143.599,479031.581,304.409
29
+ dens_x0.5,42000,13346,1,179.495,589181.686,374.405
30
+ dens_x0.5,42000,13346,2,181.815,589181.686,374.405
31
+ dens_x0.5,42000,13346,3,165.094,589181.686,374.405
32
+ dens_x0.5,49000,17900,1,284.586,722491.606,459.120
33
+ dens_x0.5,49000,17900,2,233.091,722491.606,459.120
34
+ dens_x0.5,49000,17900,3,208.590,722491.606,459.120
35
+ dens_x0.5,57000,24363,1,309.408,890990.473,566.195
36
+ dens_x0.5,57000,24363,2,261.349,890990.473,566.195
37
+ dens_x0.5,57000,24363,3,265.088,890990.473,566.195
38
+ dens_x0.5,67000,33829,1,335.052,1120457.009,712.013
39
+ dens_x0.5,67000,33829,2,449.145,1120457.009,712.013
40
+ dens_x0.5,67000,33829,3,410.054,1120457.009,712.013
41
+ dens_x0.5,79000,46917,1,511.215,1419991.587,902.358
42
+ dens_x0.5,79000,46917,2,531.563,1419991.587,902.358
43
+ dens_x0.5,79000,46917,3,437.912,1419991.587,902.358
44
+ dens_x0.5,92000,63864,1,600.704,1781454.424,1132.055
45
+ dens_x0.5,92000,63864,2,666.763,1781454.424,1132.055
46
+ dens_x0.5,92000,63864,3,602.090,1781454.424,1132.055
47
+ dens_x0.5,108000,87350,1,767.170,2264084.329,1438.751
48
+ dens_x0.5,108000,87350,2,736.665,2264084.329,1438.751
49
+ dens_x0.5,108000,87350,3,674.970,2264084.329,1438.751
50
+ dens_x0.5,127000,120912,1,913.971,2913447.536,1851.400
51
+ dens_x0.5,127000,120912,2,814.923,2913447.536,1851.400
52
+ dens_x0.5,127000,120912,3,740.522,2913447.536,1851.400
53
+ dens_x0.5,149000,166365,1,967.782,3756533.770,2387.153
54
+ dens_x0.5,149000,166365,2,1230.397,3756533.770,2387.153
55
+ dens_x0.5,149000,166365,3,1109.607,3756533.770,2387.153
56
+ dens_x0.5,174000,226551,1,1557.154,4833373.044,3071.449
57
+ dens_x0.5,174000,226551,2,1316.292,4833373.044,3071.449
58
+ dens_x0.5,174000,226551,3,1317.413,4833373.044,3071.449
59
+ dens_x0.5,204000,312325,1,1605.837,6312525.050,4011.401
60
+ dens_x0.5,204000,312325,2,1914.829,6312525.050,4011.401
61
+ dens_x0.5,204000,312325,3,1488.417,6312525.050,4011.401
62
+ dens_x0.5,240000,431142,1,1820.898,8314371.662,5283.508
63
+ dens_x0.5,240000,431142,2,1964.801,8314371.662,5283.508
64
+ dens_x0.5,240000,431142,3,2009.085,8314371.662,5283.508
65
+ dens_x0.5,281000,589828,1,2600.835,10925503.834,6942.796
66
+ dens_x0.5,281000,589828,2,2540.880,10925503.834,6942.796
67
+ dens_x0.5,281000,589828,3,2590.190,10925503.834,6942.796
68
+ dens_x0.5,329000,811844,1,3473.126,14493068.872,9209.866
69
+ dens_x0.5,329000,811844,2,3470.392,14493068.872,9209.866
70
+ dens_x0.5,329000,811844,3,3314.117,14493068.872,9209.866
71
+ dens_x0.5,386000,1115568,1,5107.043,19315559.086,12274.399
72
+ dens_x0.5,386000,1115568,2,4930.561,19315559.086,12274.399
73
+ dens_x0.5,386000,1115568,3,6380.473,19315559.086,12274.399
74
+ dens_x0.5,452000,1531464,1,10735.902,25827552.428,16412.555
75
+ dens_x0.5,452000,1531464,2,6940.655,25827552.428,16412.555
76
+ dens_x0.5,452000,1531464,3,11508.761,25827552.428,16412.555
77
+ dens_x0.5,530000,2103874,1,11136.995,34716124.680,22060.949
78
+ dens_x0.5,530000,2103874,2,9777.029,34716124.680,22060.949
79
+ dens_x0.5,530000,2103874,3,9783.617,34716124.680,22060.949
80
+ dens_x0.5,621000,2887681,1,14149.726,46802598.872,29741.503
81
+ dens_x0.5,621000,2887681,2,14757.810,46802598.872,29741.503
82
+ dens_x0.5,621000,2887681,3,14496.587,46802598.872,29741.503
83
+ dens_x0.5,728000,3971764,1,19466.477,63437679.196,40312.547
84
+ dens_x0.5,728000,3971764,2,20275.542,63437679.196,40312.547
85
+ dens_x0.5,728000,3971764,3,21114.354,63437679.196,40312.547
86
+ dens_x0.5,853000,5448500,1,30840.224,86056528.179,54686.077
87
+ dens_x0.5,853000,5448500,2,30346.139,86056528.179,54686.077
88
+ dens_x0.5,853000,5448500,3,31170.105,86056528.179,54686.077
89
+ dens_x0.5,1000000,7489974,1,47582.788,117293325.434,74536.028
90
+ dens_x0.5,1000000,7489974,2,46482.519,117293325.434,74536.028
91
+ dens_x0.5,1000000,7489974,3,44026.110,117293325.434,74536.028
92
+ dens_x1.0,10000,1550,1,44.766,106379.431,67.601
93
+ dens_x1.0,10000,1550,2,80.772,106379.431,67.601
94
+ dens_x1.0,10000,1550,3,64.825,106379.431,67.601
95
+ dens_x1.0,12000,2135,1,117.380,132765.276,84.368
96
+ dens_x1.0,12000,2135,2,83.140,132765.276,84.368
97
+ dens_x1.0,12000,2135,3,124.942,132765.276,84.368
98
+ dens_x1.0,14000,2963,1,153.056,161942.582,102.909
99
+ dens_x1.0,14000,2963,2,107.005,161942.582,102.909
100
+ dens_x1.0,14000,2963,3,176.352,161942.582,102.909
101
+ dens_x1.0,16000,3912,1,125.583,192755.010,122.489
102
+ dens_x1.0,16000,3912,2,134.302,192755.010,122.489
103
+ dens_x1.0,16000,3912,3,185.688,192755.010,122.489
104
+ dens_x1.0,19000,5413,1,123.266,240521.618,152.844
105
+ dens_x1.0,19000,5413,2,203.327,240521.618,152.844
106
+ dens_x1.0,19000,5413,3,122.239,240521.618,152.844
107
+ dens_x1.0,22000,7290,1,181.987,292864.786,186.106
108
+ dens_x1.0,22000,7290,2,164.418,292864.786,186.106
109
+ dens_x1.0,22000,7290,3,156.525,292864.786,186.106
110
+ dens_x1.0,26000,10078,1,259.958,366763.602,233.066
111
+ dens_x1.0,26000,10078,2,206.015,366763.602,233.066
112
+ dens_x1.0,26000,10078,3,197.615,366763.602,233.066
113
+ dens_x1.0,30000,13336,1,247.134,446748.773,283.894
114
+ dens_x1.0,30000,13336,2,249.997,446748.773,283.894
115
+ dens_x1.0,30000,13336,3,248.417,446748.773,283.894
116
+ dens_x1.0,36000,19373,1,342.043,580933.327,369.164
117
+ dens_x1.0,36000,19373,2,286.624,580933.327,369.164
118
+ dens_x1.0,36000,19373,3,352.649,580933.327,369.164
119
+ dens_x1.0,42000,26057,1,351.357,724495.682,460.393
120
+ dens_x1.0,42000,26057,2,353.756,724495.682,460.393
121
+ dens_x1.0,42000,26057,3,405.657,724495.682,460.393
122
+ dens_x1.0,49000,35903,1,419.918,916916.365,582.670
123
+ dens_x1.0,49000,35903,2,482.741,916916.365,582.670
124
+ dens_x1.0,49000,35903,3,429.988,916916.365,582.670
125
+ dens_x1.0,57000,48648,1,520.940,1156930.810,735.191
126
+ dens_x1.0,57000,48648,2,440.392,1156930.810,735.191
127
+ dens_x1.0,57000,48648,3,427.737,1156930.810,735.191
128
+ dens_x1.0,67000,67188,1,570.235,1491157.159,947.581
129
+ dens_x1.0,67000,67188,2,553.552,1491157.159,947.581
130
+ dens_x1.0,67000,67188,3,583.040,1491157.159,947.581
131
+ dens_x1.0,79000,93552,1,683.788,1945903.955,1236.558
132
+ dens_x1.0,79000,93552,2,669.714,1945903.955,1236.558
133
+ dens_x1.0,79000,93552,3,732.646,1945903.955,1236.558
134
+ dens_x1.0,92000,126776,1,769.173,2500509.887,1588.991
135
+ dens_x1.0,92000,126776,2,765.284,2500509.887,1588.991
136
+ dens_x1.0,92000,126776,3,765.463,2500509.887,1588.991
137
+ dens_x1.0,108000,175020,1,929.263,3280169.679,2084.439
138
+ dens_x1.0,108000,175020,2,956.523,3280169.679,2084.439
139
+ dens_x1.0,108000,175020,3,950.606,3280169.679,2084.439
140
+ dens_x1.0,127000,241838,1,1181.597,4334562.918,2754.471
141
+ dens_x1.0,127000,241838,2,1092.162,4334562.918,2754.471
142
+ dens_x1.0,127000,241838,3,1079.477,4334562.918,2754.471
143
+ dens_x1.0,149000,333094,1,1442.657,5742559.864,3649.207
144
+ dens_x1.0,149000,333094,2,1365.485,5742559.864,3649.207
145
+ dens_x1.0,149000,333094,3,1393.380,5742559.864,3649.207
146
+ dens_x1.0,174000,454003,1,1856.201,7577993.244,4815.564
147
+ dens_x1.0,174000,454003,2,1991.080,7577993.244,4815.564
148
+ dens_x1.0,174000,454003,3,1853.065,7577993.244,4815.564
149
+ dens_x1.0,204000,623499,1,2612.491,10116899.562,6428.955
150
+ dens_x1.0,204000,623499,2,2464.648,10116899.562,6428.955
151
+ dens_x1.0,204000,623499,3,2486.181,10116899.562,6428.955
152
+ dens_x1.0,240000,862431,1,3883.620,13657349.809,8678.794
153
+ dens_x1.0,240000,862431,2,3654.052,13657349.809,8678.794
154
+ dens_x1.0,240000,862431,3,3628.481,13657349.809,8678.794
155
+ dens_x1.0,281000,1182168,1,4952.134,18357066.601,11665.309
156
+ dens_x1.0,281000,1182168,2,5243.182,18357066.601,11665.309
157
+ dens_x1.0,281000,1182168,3,5014.568,18357066.601,11665.309
158
+ dens_x1.0,329000,1623777,1,6905.065,24807713.897,15764.482
159
+ dens_x1.0,329000,1623777,2,7251.999,24807713.897,15764.482
160
+ dens_x1.0,329000,1623777,3,6730.380,24807713.897,15764.482
161
+ dens_x1.0,386000,2232671,1,10210.314,33685517.024,21406.032
162
+ dens_x1.0,386000,2232671,2,10712.272,33685517.024,21406.032
163
+ dens_x1.0,386000,2232671,3,10134.142,33685517.024,21406.032
164
+ dens_x1.0,452000,3055874,1,15197.842,45677561.904,29026.580
165
+ dens_x1.0,452000,3055874,2,14589.433,45677561.904,29026.580
166
+ dens_x1.0,452000,3055874,3,13996.179,45677561.904,29026.580
167
+ dens_x1.0,530000,4208065,1,21873.394,62450692.510,39685.349
168
+ dens_x1.0,530000,4208065,2,20504.892,62450692.510,39685.349
169
+ dens_x1.0,530000,4208065,3,22332.396,62450692.510,39685.349
170
+ dens_x1.0,621000,5774507,1,33111.195,85310220.195,54211.823
171
+ dens_x1.0,621000,5774507,2,33271.836,85310220.195,54211.823
172
+ dens_x1.0,621000,5774507,3,33340.294,85310220.195,54211.823
173
+ dens_x1.0,728000,7935888,1,47822.973,116945648.236,74315.091
174
+ dens_x1.0,728000,7935888,2,48739.415,116945648.236,74315.091
175
+ dens_x1.0,728000,7935888,3,48949.025,116945648.236,74315.091
176
+ dens_x1.0,853000,10900416,1,84950.950,160510699.866,101999.240
177
+ dens_x1.0,853000,10900416,2,79421.655,160510699.866,101999.240
178
+ dens_x1.0,853000,10900416,3,87799.883,160510699.866,101999.240
179
+ dens_x1.0,1000000,14986435,1,113252.341,220860761.527,140349.708
180
+ dens_x1.0,1000000,14986435,2,123807.781,220860761.527,140349.708
181
+ dens_x1.0,1000000,14986435,3,127188.415,220860761.527,140349.708
182
+ dens_x2.0,10000,3007,1,123.275,119798.897,76.128
183
+ dens_x2.0,10000,3007,2,79.558,119798.897,76.128
184
+ dens_x2.0,10000,3007,3,128.487,119798.897,76.128
185
+ dens_x2.0,12000,4365,1,82.814,153710.912,97.678
186
+ dens_x2.0,12000,4365,2,120.726,153710.912,97.678
187
+ dens_x2.0,12000,4365,3,73.037,153710.912,97.678
188
+ dens_x2.0,14000,5830,1,156.142,189313.294,120.302
189
+ dens_x2.0,14000,5830,2,98.668,189313.294,120.302
190
+ dens_x2.0,14000,5830,3,111.172,189313.294,120.302
191
+ dens_x2.0,16000,7624,1,114.385,228688.447,145.324
192
+ dens_x2.0,16000,7624,2,163.465,228688.447,145.324
193
+ dens_x2.0,16000,7624,3,180.540,228688.447,145.324
194
+ dens_x2.0,19000,10837,1,212.731,293959.920,186.802
195
+ dens_x2.0,19000,10837,2,311.261,293959.920,186.802
196
+ dens_x2.0,19000,10837,3,287.109,293959.920,186.802
197
+ dens_x2.0,22000,14556,1,240.075,365516.050,232.273
198
+ dens_x2.0,22000,14556,2,150.187,365516.050,232.273
199
+ dens_x2.0,22000,14556,3,295.141,365516.050,232.273
200
+ dens_x2.0,26000,20395,1,223.860,471644.695,299.715
201
+ dens_x2.0,26000,20395,2,237.952,471644.695,299.715
202
+ dens_x2.0,26000,20395,3,202.743,471644.695,299.715
203
+ dens_x2.0,30000,26790,1,393.274,585445.422,372.031
204
+ dens_x2.0,30000,26790,2,301.000,585445.422,372.031
205
+ dens_x2.0,30000,26790,3,349.833,585445.422,372.031
206
+ dens_x2.0,36000,38800,1,370.541,784747.311,498.681
207
+ dens_x2.0,36000,38800,2,364.912,784747.311,498.681
208
+ dens_x2.0,36000,38800,3,376.424,784747.311,498.681
209
+ dens_x2.0,42000,53391,1,448.852,1015477.726,645.303
210
+ dens_x2.0,42000,53391,2,441.742,1015477.726,645.303
211
+ dens_x2.0,42000,53391,3,451.396,1015477.726,645.303
212
+ dens_x2.0,49000,71945,1,442.243,1306154.668,830.018
213
+ dens_x2.0,49000,71945,2,472.422,1306154.668,830.018
214
+ dens_x2.0,49000,71945,3,523.281,1306154.668,830.018
215
+ dens_x2.0,57000,97182,1,578.557,1688417.255,1072.933
216
+ dens_x2.0,57000,97182,2,489.410,1688417.255,1072.933
217
+ dens_x2.0,57000,97182,3,518.427,1688417.255,1072.933
218
+ dens_x2.0,67000,134082,1,780.094,2234513.248,1419.959
219
+ dens_x2.0,67000,134082,2,755.497,2234513.248,1419.959
220
+ dens_x2.0,67000,134082,3,674.201,2234513.248,1419.959
221
+ dens_x2.0,79000,186876,1,833.358,2998337.660,1905.344
222
+ dens_x2.0,79000,186876,2,961.736,2998337.660,1905.344
223
+ dens_x2.0,79000,186876,3,816.439,2998337.660,1905.344
224
+ dens_x2.0,92000,253371,1,1203.082,3947432.991,2508.463
225
+ dens_x2.0,92000,253371,2,1186.119,3947432.991,2508.463
226
+ dens_x2.0,92000,253371,3,1192.455,3947432.991,2508.463
227
+ dens_x2.0,108000,349108,1,1618.963,5297829.841,3366.596
228
+ dens_x2.0,108000,349108,2,1345.165,5297829.841,3366.596
229
+ dens_x2.0,108000,349108,3,1338.014,5297829.841,3366.596
230
+ dens_x2.0,127000,484181,1,2170.970,7182563.887,4564.282
231
+ dens_x2.0,127000,484181,2,2114.696,7182563.887,4564.282
232
+ dens_x2.0,127000,484181,3,2058.957,7182563.887,4564.282
233
+ dens_x2.0,149000,666491,1,2903.698,9713885.437,6172.853
234
+ dens_x2.0,149000,666491,2,2768.252,9713885.437,6172.853
235
+ dens_x2.0,149000,666491,3,2683.809,9713885.437,6172.853
236
+ dens_x2.0,174000,907603,1,3750.869,13051498.522,8293.796
237
+ dens_x2.0,174000,907603,2,3788.667,13051498.522,8293.796
238
+ dens_x2.0,174000,907603,3,3595.097,13051498.522,8293.796
239
+ dens_x2.0,204000,1247723,1,5598.056,17748584.329,11278.638
240
+ dens_x2.0,204000,1247723,2,5033.484,17748584.329,11278.638
241
+ dens_x2.0,204000,1247723,3,5031.559,17748584.329,11278.638
242
+ dens_x2.0,240000,1725541,1,8026.526,24349896.729,15473.554
243
+ dens_x2.0,240000,1725541,2,7715.847,24349896.729,15473.554
244
+ dens_x2.0,240000,1725541,3,7493.727,24349896.729,15473.554
245
+ dens_x2.0,281000,2365586,1,12315.262,33204358.944,21100.272
246
+ dens_x2.0,281000,2365586,2,10750.730,33204358.944,21100.272
247
+ dens_x2.0,281000,2365586,3,10910.945,33204358.944,21100.272
248
+ dens_x2.0,329000,3242756,1,17218.988,45374920.412,28834.261
249
+ dens_x2.0,329000,3242756,2,15704.277,45374920.412,28834.261
250
+ dens_x2.0,329000,3242756,3,16909.888,45374920.412,28834.261
251
+ dens_x2.0,386000,4461354,1,22930.235,62354387.279,39624.150
252
+ dens_x2.0,386000,4461354,2,23229.405,62354387.279,39624.150
253
+ dens_x2.0,386000,4461354,3,23342.470,62354387.279,39624.150
254
+ dens_x2.0,452000,6119886,1,35110.122,85575402.535,54380.337
255
+ dens_x2.0,452000,6119886,2,35385.954,85575402.535,54380.337
256
+ dens_x2.0,452000,6119886,3,35167.751,85575402.535,54380.337
257
+ dens_x2.0,530000,8413791,1,55796.898,117884820.410,74911.904
258
+ dens_x2.0,530000,8413791,2,59173.450,117884820.410,74911.904
259
+ dens_x2.0,530000,8413791,3,56385.714,117884820.410,74911.904
260
+ dens_x2.0,621000,11552079,1,87065.836,162377752.059,103185.690
261
+ dens_x2.0,621000,11552079,2,88510.360,162377752.059,103185.690
262
+ dens_x2.0,621000,11552079,3,87154.836,162377752.059,103185.690
263
+ dens_x2.0,728000,15870417,1,131315.993,224046367.608,142374.055
264
+ dens_x2.0,728000,15870417,2,129778.579,224046367.608,142374.055
265
+ dens_x2.0,728000,15870417,3,129852.250,224046367.608,142374.055
266
+ dens_x2.0,853000,21800415,1,216168.770,309366697.818,196592.303
267
+ dens_x2.0,853000,21800415,2,208242.155,309366697.818,196592.303
268
+ dens_x2.0,853000,21800415,3,198993.350,309366697.818,196592.303
269
+ dens_x2.0,1000000,29958277,1,365605.129,427704402.750,271792.000
270
+ dens_x2.0,1000000,29958277,2,338676.140,427704402.750,271792.000
271
+ dens_x2.0,1000000,29958277,3,326162.678,427704402.750,271792.000
results_mk.csv ADDED
@@ -0,0 +1,307 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ series,n,m,trial,ms,theo_x,normalized_theory_ms
2
+ dens_x0.5,10000,761,1,15.688,99112.473,21.422
3
+ dens_x0.5,10000,761,2,15.767,99112.473,21.422
4
+ dens_x0.5,10000,761,3,15.154,99112.473,21.422
5
+ dens_x0.5,12000,1077,1,20.751,122827.840,26.548
6
+ dens_x0.5,12000,1077,2,20.008,122827.840,26.548
7
+ dens_x0.5,12000,1077,3,18.613,122827.840,26.548
8
+ dens_x0.5,14000,1455,1,22.026,147545.989,31.890
9
+ dens_x0.5,14000,1455,2,21.376,147545.989,31.890
10
+ dens_x0.5,14000,1455,3,22.028,147545.989,31.890
11
+ dens_x0.5,16000,1937,1,26.161,173636.330,37.529
12
+ dens_x0.5,16000,1937,2,27.869,173636.330,37.529
13
+ dens_x0.5,16000,1937,3,28.148,173636.330,37.529
14
+ dens_x0.5,19000,2715,1,39.460,213940.398,46.241
15
+ dens_x0.5,19000,2715,2,39.874,213940.398,46.241
16
+ dens_x0.5,19000,2715,3,37.501,213940.398,46.241
17
+ dens_x0.5,22000,3611,1,45.224,256079.209,55.348
18
+ dens_x0.5,22000,3611,2,44.134,256079.209,55.348
19
+ dens_x0.5,22000,3611,3,44.888,256079.209,55.348
20
+ dens_x0.5,26000,5194,1,56.877,317113.582,68.540
21
+ dens_x0.5,26000,5194,2,56.417,317113.582,68.540
22
+ dens_x0.5,26000,5194,3,50.854,317113.582,68.540
23
+ dens_x0.5,30000,6763,1,55.551,378988.027,81.914
24
+ dens_x0.5,30000,6763,2,56.281,378988.027,81.914
25
+ dens_x0.5,30000,6763,3,61.705,378988.027,81.914
26
+ dens_x0.5,36000,9660,1,58.902,479031.581,103.537
27
+ dens_x0.5,36000,9660,2,59.343,479031.581,103.537
28
+ dens_x0.5,36000,9660,3,56.766,479031.581,103.537
29
+ dens_x0.5,42000,13346,1,71.784,589181.686,127.344
30
+ dens_x0.5,42000,13346,2,70.883,589181.686,127.344
31
+ dens_x0.5,42000,13346,3,67.382,589181.686,127.344
32
+ dens_x0.5,49000,17900,1,77.784,722491.606,156.158
33
+ dens_x0.5,49000,17900,2,98.588,722491.606,156.158
34
+ dens_x0.5,49000,17900,3,78.477,722491.606,156.158
35
+ dens_x0.5,57000,24363,1,100.125,890990.473,192.577
36
+ dens_x0.5,57000,24363,2,98.099,890990.473,192.577
37
+ dens_x0.5,57000,24363,3,95.559,890990.473,192.577
38
+ dens_x0.5,67000,33829,1,120.753,1120457.009,242.173
39
+ dens_x0.5,67000,33829,2,157.183,1120457.009,242.173
40
+ dens_x0.5,67000,33829,3,119.986,1120457.009,242.173
41
+ dens_x0.5,79000,46917,1,158.279,1419991.587,306.914
42
+ dens_x0.5,79000,46917,2,160.867,1419991.587,306.914
43
+ dens_x0.5,79000,46917,3,188.158,1419991.587,306.914
44
+ dens_x0.5,92000,63864,1,217.457,1781454.424,385.039
45
+ dens_x0.5,92000,63864,2,201.234,1781454.424,385.039
46
+ dens_x0.5,92000,63864,3,202.297,1781454.424,385.039
47
+ dens_x0.5,108000,87350,1,280.966,2264084.329,489.354
48
+ dens_x0.5,108000,87350,2,249.816,2264084.329,489.354
49
+ dens_x0.5,108000,87350,3,300.323,2264084.329,489.354
50
+ dens_x0.5,127000,120912,1,303.758,2913447.536,629.706
51
+ dens_x0.5,127000,120912,2,353.401,2913447.536,629.706
52
+ dens_x0.5,127000,120912,3,611.729,2913447.536,629.706
53
+ dens_x0.5,149000,166365,1,520.963,3756533.770,811.928
54
+ dens_x0.5,149000,166365,2,567.283,3756533.770,811.928
55
+ dens_x0.5,149000,166365,3,464.261,3756533.770,811.928
56
+ dens_x0.5,174000,226551,1,579.208,4833373.044,1044.674
57
+ dens_x0.5,174000,226551,2,645.586,4833373.044,1044.674
58
+ dens_x0.5,174000,226551,3,566.923,4833373.044,1044.674
59
+ dens_x0.5,204000,312325,1,908.258,6312525.050,1364.374
60
+ dens_x0.5,204000,312325,2,597.708,6312525.050,1364.374
61
+ dens_x0.5,204000,312325,3,614.792,6312525.050,1364.374
62
+ dens_x0.5,240000,431142,1,830.654,8314371.662,1797.049
63
+ dens_x0.5,240000,431142,2,747.721,8314371.662,1797.049
64
+ dens_x0.5,240000,431142,3,793.694,8314371.662,1797.049
65
+ dens_x0.5,281000,589828,1,1009.589,10925503.834,2361.413
66
+ dens_x0.5,281000,589828,2,999.220,10925503.834,2361.413
67
+ dens_x0.5,281000,589828,3,968.745,10925503.834,2361.413
68
+ dens_x0.5,329000,811844,1,1346.701,14493068.872,3132.498
69
+ dens_x0.5,329000,811844,2,1300.301,14493068.872,3132.498
70
+ dens_x0.5,329000,811844,3,1336.633,14493068.872,3132.498
71
+ dens_x0.5,386000,1115568,1,1930.593,19315559.086,4174.819
72
+ dens_x0.5,386000,1115568,2,1943.367,19315559.086,4174.819
73
+ dens_x0.5,386000,1115568,3,1826.516,19315559.086,4174.819
74
+ dens_x0.5,452000,1531464,1,2626.389,25827552.428,5582.306
75
+ dens_x0.5,452000,1531464,2,2527.856,25827552.428,5582.306
76
+ dens_x0.5,452000,1531464,3,2552.325,25827552.428,5582.306
77
+ dens_x0.5,530000,2103874,1,3927.716,34716124.680,7503.461
78
+ dens_x0.5,530000,2103874,2,3968.094,34716124.680,7503.461
79
+ dens_x0.5,530000,2103874,3,3782.988,34716124.680,7503.461
80
+ dens_x0.5,621000,2887681,1,5852.172,46802598.872,10115.803
81
+ dens_x0.5,621000,2887681,2,5655.723,46802598.872,10115.803
82
+ dens_x0.5,621000,2887681,3,5788.834,46802598.872,10115.803
83
+ dens_x0.5,728000,3971764,1,8425.377,63437679.196,13711.270
84
+ dens_x0.5,728000,3971764,2,8226.609,63437679.196,13711.270
85
+ dens_x0.5,728000,3971764,3,8583.745,63437679.196,13711.270
86
+ dens_x0.5,853000,5448500,1,12175.891,86056528.179,18600.055
87
+ dens_x0.5,853000,5448500,2,15218.073,86056528.179,18600.055
88
+ dens_x0.5,853000,5448500,3,11273.111,86056528.179,18600.055
89
+ dens_x0.5,1000000,7489974,1,22260.390,117293325.434,25351.503
90
+ dens_x0.5,1000000,7489974,2,21735.102,117293325.434,25351.503
91
+ dens_x0.5,1000000,7489974,3,16119.248,117293325.434,25351.503
92
+ dens_x0.5,1300000,12655006,1,36062.526,196456827.614,73316.712
93
+ dens_x0.5,1300000,12655006,2,37491.996,196456827.614,73316.712
94
+ dens_x0.5,1300000,12655006,3,40901.895,196456827.614,73316.712
95
+ dens_x0.5,1500000,16856907,1,55060.072,261053127.751,97423.730
96
+ dens_x0.5,1500000,16856907,2,51489.209,261053127.751,97423.730
97
+ dens_x0.5,1500000,16856907,3,51762.161,261053127.751,97423.730
98
+ dens_x1.0,10000,1550,1,20.015,106379.431,22.993
99
+ dens_x1.0,10000,1550,2,16.967,106379.431,22.993
100
+ dens_x1.0,10000,1550,3,33.054,106379.431,22.993
101
+ dens_x1.0,12000,2135,1,40.238,132765.276,28.696
102
+ dens_x1.0,12000,2135,2,22.697,132765.276,28.696
103
+ dens_x1.0,12000,2135,3,23.725,132765.276,28.696
104
+ dens_x1.0,14000,2963,1,35.596,161942.582,35.002
105
+ dens_x1.0,14000,2963,2,28.078,161942.582,35.002
106
+ dens_x1.0,14000,2963,3,33.954,161942.582,35.002
107
+ dens_x1.0,16000,3912,1,44.754,192755.010,41.662
108
+ dens_x1.0,16000,3912,2,46.279,192755.010,41.662
109
+ dens_x1.0,16000,3912,3,43.305,192755.010,41.662
110
+ dens_x1.0,19000,5413,1,59.872,240521.618,51.986
111
+ dens_x1.0,19000,5413,2,54.141,240521.618,51.986
112
+ dens_x1.0,19000,5413,3,59.136,240521.618,51.986
113
+ dens_x1.0,22000,7290,1,57.037,292864.786,63.299
114
+ dens_x1.0,22000,7290,2,80.374,292864.786,63.299
115
+ dens_x1.0,22000,7290,3,51.274,292864.786,63.299
116
+ dens_x1.0,26000,10078,1,100.147,366763.602,79.271
117
+ dens_x1.0,26000,10078,2,64.036,366763.602,79.271
118
+ dens_x1.0,26000,10078,3,118.154,366763.602,79.271
119
+ dens_x1.0,30000,13336,1,63.956,446748.773,96.559
120
+ dens_x1.0,30000,13336,2,90.179,446748.773,96.559
121
+ dens_x1.0,30000,13336,3,63.027,446748.773,96.559
122
+ dens_x1.0,36000,19373,1,133.343,580933.327,125.562
123
+ dens_x1.0,36000,19373,2,82.262,580933.327,125.562
124
+ dens_x1.0,36000,19373,3,126.932,580933.327,125.562
125
+ dens_x1.0,42000,26057,1,310.700,724495.682,156.591
126
+ dens_x1.0,42000,26057,2,182.416,724495.682,156.591
127
+ dens_x1.0,42000,26057,3,146.894,724495.682,156.591
128
+ dens_x1.0,49000,35903,1,152.136,916916.365,198.180
129
+ dens_x1.0,49000,35903,2,134.394,916916.365,198.180
130
+ dens_x1.0,49000,35903,3,140.925,916916.365,198.180
131
+ dens_x1.0,57000,48648,1,283.893,1156930.810,250.056
132
+ dens_x1.0,57000,48648,2,234.125,1156930.810,250.056
133
+ dens_x1.0,57000,48648,3,190.222,1156930.810,250.056
134
+ dens_x1.0,67000,67188,1,289.951,1491157.159,322.295
135
+ dens_x1.0,67000,67188,2,237.208,1491157.159,322.295
136
+ dens_x1.0,67000,67188,3,287.380,1491157.159,322.295
137
+ dens_x1.0,79000,93552,1,275.404,1945903.955,420.583
138
+ dens_x1.0,79000,93552,2,307.363,1945903.955,420.583
139
+ dens_x1.0,79000,93552,3,253.094,1945903.955,420.583
140
+ dens_x1.0,92000,126776,1,342.068,2500509.887,540.454
141
+ dens_x1.0,92000,126776,2,276.337,2500509.887,540.454
142
+ dens_x1.0,92000,126776,3,288.147,2500509.887,540.454
143
+ dens_x1.0,108000,175020,1,316.023,3280169.679,708.968
144
+ dens_x1.0,108000,175020,2,353.284,3280169.679,708.968
145
+ dens_x1.0,108000,175020,3,339.276,3280169.679,708.968
146
+ dens_x1.0,127000,241838,1,391.496,4334562.918,936.862
147
+ dens_x1.0,127000,241838,2,384.289,4334562.918,936.862
148
+ dens_x1.0,127000,241838,3,400.746,4334562.918,936.862
149
+ dens_x1.0,149000,333094,1,601.271,5742559.864,1241.183
150
+ dens_x1.0,149000,333094,2,542.543,5742559.864,1241.183
151
+ dens_x1.0,149000,333094,3,507.314,5742559.864,1241.183
152
+ dens_x1.0,174000,454003,1,705.319,7577993.244,1637.890
153
+ dens_x1.0,174000,454003,2,708.632,7577993.244,1637.890
154
+ dens_x1.0,174000,454003,3,662.705,7577993.244,1637.890
155
+ dens_x1.0,204000,623499,1,944.243,10116899.562,2186.643
156
+ dens_x1.0,204000,623499,2,929.347,10116899.562,2186.643
157
+ dens_x1.0,204000,623499,3,942.194,10116899.562,2186.643
158
+ dens_x1.0,240000,862431,1,1338.912,13657349.809,2951.867
159
+ dens_x1.0,240000,862431,2,1197.529,13657349.809,2951.867
160
+ dens_x1.0,240000,862431,3,1288.236,13657349.809,2951.867
161
+ dens_x1.0,281000,1182168,1,1812.009,18357066.601,3967.653
162
+ dens_x1.0,281000,1182168,2,1831.549,18357066.601,3967.653
163
+ dens_x1.0,281000,1182168,3,1800.877,18357066.601,3967.653
164
+ dens_x1.0,329000,1623777,1,2607.569,24807713.897,5361.881
165
+ dens_x1.0,329000,1623777,2,2721.496,24807713.897,5361.881
166
+ dens_x1.0,329000,1623777,3,2692.989,24807713.897,5361.881
167
+ dens_x1.0,386000,2232671,1,3896.705,33685517.024,7280.708
168
+ dens_x1.0,386000,2232671,2,4093.978,33685517.024,7280.708
169
+ dens_x1.0,386000,2232671,3,4157.255,33685517.024,7280.708
170
+ dens_x1.0,452000,3055874,1,6800.304,45677561.904,9872.640
171
+ dens_x1.0,452000,3055874,2,6063.371,45677561.904,9872.640
172
+ dens_x1.0,452000,3055874,3,5619.696,45677561.904,9872.640
173
+ dens_x1.0,530000,4208065,1,7359.967,62450692.510,13497.945
174
+ dens_x1.0,530000,4208065,2,8442.994,62450692.510,13497.945
175
+ dens_x1.0,530000,4208065,3,9818.880,62450692.510,13497.945
176
+ dens_x1.0,621000,5774507,1,13500.980,85310220.195,18438.750
177
+ dens_x1.0,621000,5774507,2,15204.199,85310220.195,18438.750
178
+ dens_x1.0,621000,5774507,3,12344.941,85310220.195,18438.750
179
+ dens_x1.0,728000,7935888,1,20263.595,116945648.236,25276.356
180
+ dens_x1.0,728000,7935888,2,21043.008,116945648.236,25276.356
181
+ dens_x1.0,728000,7935888,3,20002.944,116945648.236,25276.356
182
+ dens_x1.0,853000,10900416,1,36276.239,160510699.866,34692.404
183
+ dens_x1.0,853000,10900416,2,28334.668,160510699.866,34692.404
184
+ dens_x1.0,853000,10900416,3,35198.553,160510699.866,34692.404
185
+ dens_x1.0,1000000,14986435,1,45843.089,220860761.527,47736.324
186
+ dens_x1.0,1000000,14986435,2,47166.816,220860761.527,47736.324
187
+ dens_x1.0,1000000,14986435,3,49091.110,220860761.527,47736.324
188
+ dens_x1.0,1300000,25315094,1,98721.978,374683961.719,139830.193
189
+ dens_x1.0,1300000,25315094,2,78676.425,374683961.719,139830.193
190
+ dens_x1.0,1300000,25315094,3,103389.019,374683961.719,139830.193
191
+ dens_x1.0,1500000,33713952,1,132205.466,500776754.498,186887.396
192
+ dens_x1.0,1500000,33713952,2,152518.556,500776754.498,186887.396
193
+ dens_x1.0,1500000,33713952,3,112566.137,500776754.498,186887.396
194
+ dens_x2.0,10000,3007,1,26.720,119798.897,25.893
195
+ dens_x2.0,10000,3007,2,43.318,119798.897,25.893
196
+ dens_x2.0,10000,3007,3,31.487,119798.897,25.893
197
+ dens_x2.0,12000,4365,1,48.927,153710.912,33.223
198
+ dens_x2.0,12000,4365,2,34.995,153710.912,33.223
199
+ dens_x2.0,12000,4365,3,46.545,153710.912,33.223
200
+ dens_x2.0,14000,5830,1,49.223,189313.294,40.918
201
+ dens_x2.0,14000,5830,2,37.392,189313.294,40.918
202
+ dens_x2.0,14000,5830,3,56.664,189313.294,40.918
203
+ dens_x2.0,16000,7624,1,46.943,228688.447,49.428
204
+ dens_x2.0,16000,7624,2,57.763,228688.447,49.428
205
+ dens_x2.0,16000,7624,3,47.823,228688.447,49.428
206
+ dens_x2.0,19000,10837,1,98.560,293959.920,63.536
207
+ dens_x2.0,19000,10837,2,62.016,293959.920,63.536
208
+ dens_x2.0,19000,10837,3,58.108,293959.920,63.536
209
+ dens_x2.0,22000,14556,1,101.815,365516.050,79.002
210
+ dens_x2.0,22000,14556,2,63.192,365516.050,79.002
211
+ dens_x2.0,22000,14556,3,93.428,365516.050,79.002
212
+ dens_x2.0,26000,20395,1,109.113,471644.695,101.940
213
+ dens_x2.0,26000,20395,2,82.642,471644.695,101.940
214
+ dens_x2.0,26000,20395,3,117.889,471644.695,101.940
215
+ dens_x2.0,30000,26790,1,94.074,585445.422,126.537
216
+ dens_x2.0,30000,26790,2,143.902,585445.422,126.537
217
+ dens_x2.0,30000,26790,3,124.759,585445.422,126.537
218
+ dens_x2.0,36000,38800,1,162.750,784747.311,169.613
219
+ dens_x2.0,36000,38800,2,202.980,784747.311,169.613
220
+ dens_x2.0,36000,38800,3,142.628,784747.311,169.613
221
+ dens_x2.0,42000,53391,1,206.343,1015477.726,219.483
222
+ dens_x2.0,42000,53391,2,142.426,1015477.726,219.483
223
+ dens_x2.0,42000,53391,3,289.445,1015477.726,219.483
224
+ dens_x2.0,49000,71945,1,264.377,1306154.668,282.309
225
+ dens_x2.0,49000,71945,2,199.305,1306154.668,282.309
226
+ dens_x2.0,49000,71945,3,262.257,1306154.668,282.309
227
+ dens_x2.0,57000,97182,1,307.139,1688417.255,364.931
228
+ dens_x2.0,57000,97182,2,326.031,1688417.255,364.931
229
+ dens_x2.0,57000,97182,3,350.852,1688417.255,364.931
230
+ dens_x2.0,67000,134082,1,247.385,2234513.248,482.962
231
+ dens_x2.0,67000,134082,2,359.392,2234513.248,482.962
232
+ dens_x2.0,67000,134082,3,399.988,2234513.248,482.962
233
+ dens_x2.0,79000,186876,1,342.322,2998337.660,648.054
234
+ dens_x2.0,79000,186876,2,332.860,2998337.660,648.054
235
+ dens_x2.0,79000,186876,3,344.360,2998337.660,648.054
236
+ dens_x2.0,92000,253371,1,379.277,3947432.991,853.189
237
+ dens_x2.0,92000,253371,2,425.224,3947432.991,853.189
238
+ dens_x2.0,92000,253371,3,363.135,3947432.991,853.189
239
+ dens_x2.0,108000,349108,1,491.590,5297829.841,1145.060
240
+ dens_x2.0,108000,349108,2,527.905,5297829.841,1145.060
241
+ dens_x2.0,108000,349108,3,514.033,5297829.841,1145.060
242
+ dens_x2.0,127000,484181,1,822.984,7182563.887,1552.422
243
+ dens_x2.0,127000,484181,2,713.202,7182563.887,1552.422
244
+ dens_x2.0,127000,484181,3,730.441,7182563.887,1552.422
245
+ dens_x2.0,149000,666491,1,902.169,9713885.437,2099.536
246
+ dens_x2.0,149000,666491,2,921.216,9713885.437,2099.536
247
+ dens_x2.0,149000,666491,3,1008.476,9713885.437,2099.536
248
+ dens_x2.0,174000,907603,1,1210.904,13051498.522,2820.920
249
+ dens_x2.0,174000,907603,2,1263.896,13051498.522,2820.920
250
+ dens_x2.0,174000,907603,3,1232.927,13051498.522,2820.920
251
+ dens_x2.0,204000,1247723,1,1851.569,17748584.329,3836.137
252
+ dens_x2.0,204000,1247723,2,1769.742,17748584.329,3836.137
253
+ dens_x2.0,204000,1247723,3,1841.024,17748584.329,3836.137
254
+ dens_x2.0,240000,1725541,1,2987.372,24349896.729,5262.929
255
+ dens_x2.0,240000,1725541,2,2659.560,24349896.729,5262.929
256
+ dens_x2.0,240000,1725541,3,2649.774,24349896.729,5262.929
257
+ dens_x2.0,281000,2365586,1,3782.265,33204358.944,7176.712
258
+ dens_x2.0,281000,2365586,2,4613.097,33204358.944,7176.712
259
+ dens_x2.0,281000,2365586,3,4571.901,33204358.944,7176.712
260
+ dens_x2.0,329000,3242756,1,7011.528,45374920.412,9807.228
261
+ dens_x2.0,329000,3242756,2,6958.774,45374920.412,9807.228
262
+ dens_x2.0,329000,3242756,3,6887.107,45374920.412,9807.228
263
+ dens_x2.0,386000,4461354,1,9100.421,62354387.279,13477.130
264
+ dens_x2.0,386000,4461354,2,11851.489,62354387.279,13477.130
265
+ dens_x2.0,386000,4461354,3,10179.649,62354387.279,13477.130
266
+ dens_x2.0,452000,6119886,1,13421.960,85575402.535,18496.066
267
+ dens_x2.0,452000,6119886,2,18355.367,85575402.535,18496.066
268
+ dens_x2.0,452000,6119886,3,15388.386,85575402.535,18496.066
269
+ dens_x2.0,530000,8413791,1,25985.246,117884820.410,25479.347
270
+ dens_x2.0,530000,8413791,2,18929.372,117884820.410,25479.347
271
+ dens_x2.0,530000,8413791,3,26384.270,117884820.410,25479.347
272
+ dens_x2.0,621000,11552079,1,30976.711,162377752.059,35095.944
273
+ dens_x2.0,621000,11552079,2,31445.815,162377752.059,35095.944
274
+ dens_x2.0,621000,11552079,3,30916.426,162377752.059,35095.944
275
+ dens_x2.0,728000,15870417,1,45213.376,224046367.608,48424.853
276
+ dens_x2.0,728000,15870417,2,46801.158,224046367.608,48424.853
277
+ dens_x2.0,728000,15870417,3,46000.598,224046367.608,48424.853
278
+ dens_x2.0,853000,21800415,1,80293.217,309366697.818,66865.788
279
+ dens_x2.0,853000,21800415,2,72885.247,309366697.818,66865.788
280
+ dens_x2.0,853000,21800415,3,73704.347,309366697.818,66865.788
281
+ dens_x2.0,1000000,29958277,1,93513.717,427704402.750,92443.020
282
+ dens_x2.0,1000000,29958277,2,93710.242,427704402.750,92443.020
283
+ dens_x2.0,1000000,29958277,3,114397.999,427704402.750,92443.020
284
+ dens_x0.5,1300000,12655006,1,36062.526,196456827.614,73316.712
285
+ dens_x0.5,1300000,12655006,2,37491.996,196456827.614,73316.712
286
+ dens_x0.5,1300000,12655006,3,40901.895,196456827.614,73316.712
287
+ dens_x0.5,1500000,16856907,1,55060.072,261053127.751,97423.730
288
+ dens_x0.5,1500000,16856907,2,51489.209,261053127.751,97423.730
289
+ dens_x0.5,1500000,16856907,3,51762.161,261053127.751,97423.730
290
+ dens_x1.0,1300000,25315094,1,98721.978,374683961.719,139830.193
291
+ dens_x1.0,1300000,25315094,2,78676.425,374683961.719,139830.193
292
+ dens_x1.0,1300000,25315094,3,103389.019,374683961.719,139830.193
293
+ dens_x1.0,1500000,33713952,1,132205.466,500776754.498,186887.396
294
+ dens_x1.0,1500000,33713952,2,152518.556,500776754.498,186887.396
295
+ dens_x1.0,1500000,33713952,3,112566.137,500776754.498,186887.396
296
+ dens_x2.0,1300000,50620887,1,269269.465,730935747.856,272781.589
297
+ dens_x2.0,1300000,50620887,2,256085.939,730935747.856,272781.589
298
+ dens_x2.0,1300000,50620887,3,281477.940,730935747.856,272781.589
299
+ dens_x2.0,1500000,67408553,1,490996.350,979946855.397,365711.297
300
+ dens_x2.0,1500000,67408553,2,398152.570,979946855.397,365711.297
301
+ dens_x2.0,1500000,67408553,3,409926.988,979946855.397,365711.297
302
+ dens_x2.0,1300000,50620887,1,269269.465,730935747.856,272781.589
303
+ dens_x2.0,1300000,50620887,2,256085.939,730935747.856,272781.589
304
+ dens_x2.0,1300000,50620887,3,281477.940,730935747.856,272781.589
305
+ dens_x2.0,1500000,67408553,1,490996.350,979946855.397,365711.297
306
+ dens_x2.0,1500000,67408553,2,398152.570,979946855.397,365711.297
307
+ dens_x2.0,1500000,67408553,3,409926.988,979946855.397,365711.297
results_mk_paper.csv ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ series,n,m,trial,ms,theo_x,normalized_theory_ms,p_intra,p_inter
2
+ p0.010,10000,6855,1,32.883,168950.287,5.622,0.010000,0.000100000
3
+ p0.010,10000,6855,2,35.347,168950.287,5.622,0.010000,0.000100000
4
+ p0.010,10000,6855,3,35.753,168950.287,5.622,0.010000,0.000100000
5
+ p0.010,12000,9996,1,44.591,226592.992,7.540,0.010000,0.000100000
6
+ p0.010,12000,9996,2,49.257,226592.992,7.540,0.010000,0.000100000
7
+ p0.010,12000,9996,3,46.698,226592.992,7.540,0.010000,0.000100000
8
+ p0.010,14000,13651,1,55.784,291280.915,9.693,0.010000,0.000100000
9
+ p0.010,14000,13651,2,53.707,291280.915,9.693,0.010000,0.000100000
10
+ p0.010,14000,13651,3,55.903,291280.915,9.693,0.010000,0.000100000
11
+ p0.010,16000,17847,1,57.833,381191.603,12.685,0.010000,0.000100000
12
+ p0.010,16000,17847,2,57.146,381191.603,12.685,0.010000,0.000100000
13
+ p0.010,16000,17847,3,54.132,381191.603,12.685,0.010000,0.000100000
14
+ p0.010,19000,25186,1,64.764,510887.055,17.000,0.010000,0.000100000
15
+ p0.010,19000,25186,2,73.329,510887.055,17.000,0.010000,0.000100000
16
+ p0.010,19000,25186,3,76.659,510887.055,17.000,0.010000,0.000100000
17
+ p0.010,22000,34048,1,89.388,662556.615,22.047,0.010000,0.000100000
18
+ p0.010,22000,34048,2,90.601,662556.615,22.047,0.010000,0.000100000
19
+ p0.010,22000,34048,3,90.021,662556.615,22.047,0.010000,0.000100000
20
+ p0.010,26000,47355,1,142.680,935136.060,31.118,0.010000,0.000100000
21
+ p0.010,26000,47355,2,117.901,935136.060,31.118,0.010000,0.000100000
22
+ p0.010,26000,47355,3,134.182,935136.060,31.118,0.010000,0.000100000
23
+ p0.010,31000,66944,1,190.615,1280687.626,42.616,0.010000,0.000100000
24
+ p0.010,31000,66944,2,168.829,1280687.626,42.616,0.010000,0.000100000
25
+ p0.010,31000,66944,3,170.883,1280687.626,42.616,0.010000,0.000100000
26
+ p0.010,37000,95444,1,215.912,1870355.152,62.238,0.010000,0.000100000
27
+ p0.010,37000,95444,2,268.993,1870355.152,62.238,0.010000,0.000100000
28
+ p0.010,37000,95444,3,242.674,1870355.152,62.238,0.010000,0.000100000
29
+ p0.010,43000,129467,1,265.009,2616844.730,87.079,0.010000,0.000100000
30
+ p0.010,43000,129467,2,283.592,2616844.730,87.079,0.010000,0.000100000
31
+ p0.010,43000,129467,3,254.891,2616844.730,87.079,0.010000,0.000100000
32
+ p0.010,51000,181666,1,333.548,3793663.932,126.239,0.010000,0.000100000
33
+ p0.010,51000,181666,2,349.489,3793663.932,126.239,0.010000,0.000100000
34
+ p0.010,51000,181666,3,410.205,3793663.932,126.239,0.010000,0.000100000
35
+ p0.010,59000,242699,1,503.184,5255843.829,174.895,0.010000,0.000100000
36
+ p0.010,59000,242699,2,381.119,5255843.829,174.895,0.010000,0.000100000
37
+ p0.010,59000,242699,3,478.618,5255843.829,174.895,0.010000,0.000100000
38
+ p0.010,70000,340953,1,518.918,7994224.620,266.018,0.010000,0.000100000
39
+ p0.010,70000,340953,2,594.535,7994224.620,266.018,0.010000,0.000100000
40
+ p0.010,70000,340953,3,581.086,7994224.620,266.018,0.010000,0.000100000
41
+ p0.010,82000,468721,1,739.602,11855770.726,394.516,0.010000,0.000100000
42
+ p0.010,82000,468721,2,671.713,11855770.726,394.516,0.010000,0.000100000
43
+ p0.010,82000,468721,3,737.615,11855770.726,394.516,0.010000,0.000100000
44
+ p0.010,97000,657203,1,926.296,18518155.499,616.215,0.010000,0.000100000
45
+ p0.010,97000,657203,2,954.613,18518155.499,616.215,0.010000,0.000100000
46
+ p0.010,97000,657203,3,1049.569,18518155.499,616.215,0.010000,0.000100000
47
+ p0.010,114000,905961,1,1377.284,27277715.688,907.700,0.010000,0.000100000
48
+ p0.010,114000,905961,2,1347.628,27277715.688,907.700,0.010000,0.000100000
49
+ p0.010,114000,905961,3,1253.559,27277715.688,907.700,0.010000,0.000100000
50
+ p0.010,134000,1254092,1,1900.711,42723184.084,1421.667,0.010000,0.000100000
51
+ p0.010,134000,1254092,2,1952.478,42723184.084,1421.667,0.010000,0.000100000
52
+ p0.010,134000,1254092,3,1959.497,42723184.084,1421.667,0.010000,0.000100000
53
+ p0.010,157000,1721008,1,2679.850,65493689.748,2179.384,0.010000,0.000100000
54
+ p0.010,157000,1721008,2,2582.330,65493689.748,2179.384,0.010000,0.000100000
55
+ p0.010,157000,1721008,3,2514.934,65493689.748,2179.384,0.010000,0.000100000
56
+ p0.010,185000,2387648,1,4411.791,102830800.776,3421.823,0.010000,0.000100000
57
+ p0.010,185000,2387648,2,4158.479,102830800.776,3421.823,0.010000,0.000100000
58
+ p0.010,185000,2387648,3,4728.596,102830800.776,3421.823,0.010000,0.000100000
59
+ p0.010,218000,3318758,1,7240.039,162950002.734,5422.364,0.010000,0.000100000
60
+ p0.010,218000,3318758,2,7185.162,162950002.734,5422.364,0.010000,0.000100000
61
+ p0.010,218000,3318758,3,7265.966,162950002.734,5422.364,0.010000,0.000100000
62
+ p0.010,256000,4576568,1,10987.979,252395500.186,8398.775,0.010000,0.000100000
63
+ p0.010,256000,4576568,2,10888.895,252395500.186,8398.775,0.010000,0.000100000
64
+ p0.010,256000,4576568,3,10992.033,252395500.186,8398.775,0.010000,0.000100000
65
+ p0.010,301000,6321401,1,14079.826,405932149.192,13507.898,0.010000,0.000100000
66
+ p0.010,301000,6321401,2,16659.217,405932149.192,13507.898,0.010000,0.000100000
67
+ p0.010,301000,6321401,3,16325.723,405932149.192,13507.898,0.010000,0.000100000
68
+ p0.010,354000,8746503,1,25978.646,649814284.806,21623.380,0.010000,0.000100000
69
+ p0.010,354000,8746503,2,26495.244,649814284.806,21623.380,0.010000,0.000100000
70
+ p0.010,354000,8746503,3,25287.206,649814284.806,21623.380,0.010000,0.000100000
71
+ p0.010,416000,12073015,1,32732.342,1030845457.971,34302.668,0.010000,0.000100000
72
+ p0.010,416000,12073015,2,28855.088,1030845457.971,34302.668,0.010000,0.000100000
73
+ p0.010,416000,12073015,3,29107.222,1030845457.971,34302.668,0.010000,0.000100000
74
+ p0.010,489000,16686318,1,46434.755,1660022036.511,55239.303,0.010000,0.000100000
75
+ p0.010,489000,16686318,2,50506.399,1660022036.511,55239.303,0.010000,0.000100000
76
+ p0.010,489000,16686318,3,45908.078,1660022036.511,55239.303,0.010000,0.000100000
77
+ p0.010,575000,23083981,1,67075.956,2691418413.960,89560.303,0.010000,0.000100000
78
+ p0.010,575000,23083981,2,72917.385,2691418413.960,89560.303,0.010000,0.000100000
79
+ p0.010,575000,23083981,3,77338.323,2691418413.960,89560.303,0.010000,0.000100000
80
+ p0.010,676000,31898369,1,130134.315,4328877665.154,144048.800,0.010000,0.000100000
81
+ p0.010,676000,31898369,2,148812.790,4328877665.154,144048.800,0.010000,0.000100000
82
+ p0.010,676000,31898369,3,136450.553,4328877665.154,144048.800,0.010000,0.000100000
83
+ p0.010,795000,44104536,1,232507.154,7005167189.025,233105.670,0.010000,0.000100000
84
+ p0.010,795000,44104536,2,173564.025,7005167189.025,233105.670,0.010000,0.000100000
85
+ p0.010,795000,44104536,3,202268.941,7005167189.025,233105.670,0.010000,0.000100000
86
+ p0.010,935000,61021179,1,389899.582,11408456214.779,379630.602,0.010000,0.000100000
87
+ p0.010,935000,61021179,2,392860.546,11408456214.779,379630.602,0.010000,0.000100000
88
+ p0.010,935000,61021179,3,349062.020,11408456214.779,379630.602,0.010000,0.000100000
89
+ p0.010,1100000,84452506,1,576853.282,18502869304.633,615706.040,0.010000,0.000100000
90
+ p0.010,1100000,84452506,2,674198.550,18502869304.633,615706.040,0.010000,0.000100000
91
+ p0.010,1100000,84452506,3,648210.917,18502869304.633,615706.040,0.010000,0.000100000
results_mk_paper2.csv ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ series,n,m,trial,ms,theo_x,normalized_theory_ms,p_intra,p_inter
2
+ p0.010,894000,55784686,1,434722.717,9981167356.202,415219.440,0.010000,0.000100000
3
+ p0.010,894000,55784686,2,443367.313,9981167356.202,415219.440,0.010000,0.000100000
4
+ p0.010,894000,55784686,3,405549.165,9981167356.202,415219.440,0.010000,0.000100000
5
+ p0.010,1050000,76942931,1,769922.022,16085188994.585,669148.500,0.010000,0.000100000
6
+ p0.010,1050000,76942931,2,633155.668,16085188994.585,669148.500,0.010000,0.000100000
7
+ p0.010,1050000,76942931,3,580799.949,16085188994.585,669148.500,0.010000,0.000100000
results_mk_peeling.csv ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ series,n,m,trial,ms_lrmc,ms_charikar,ratio_char_over_lrmc,p_intra,p_inter
2
+ p0.010,10000,6855,1,31.992,13.314,0.4162,0.010000,0.000100000
3
+ p0.010,10000,6855,2,31.391,5.962,0.1899,0.010000,0.000100000
4
+ p0.010,10000,6855,3,32.466,3.270,0.1007,0.010000,0.000100000
5
+ p0.010,12000,9996,1,43.876,4.399,0.1003,0.010000,0.000100000
6
+ p0.010,12000,9996,2,45.240,3.439,0.0760,0.010000,0.000100000
7
+ p0.010,12000,9996,3,47.128,3.028,0.0643,0.010000,0.000100000
8
+ p0.010,14000,13651,1,56.129,8.664,0.1544,0.010000,0.000100000
9
+ p0.010,14000,13651,2,52.710,4.570,0.0867,0.010000,0.000100000
10
+ p0.010,14000,13651,3,53.127,4.179,0.0787,0.010000,0.000100000
11
+ p0.010,16000,17847,1,55.469,5.666,0.1022,0.010000,0.000100000
12
+ p0.010,16000,17847,2,59.261,5.577,0.0941,0.010000,0.000100000
13
+ p0.010,16000,17847,3,52.821,5.330,0.1009,0.010000,0.000100000
14
+ p0.010,19000,25186,1,63.495,7.508,0.1182,0.010000,0.000100000
15
+ p0.010,19000,25186,2,70.504,7.240,0.1027,0.010000,0.000100000
16
+ p0.010,19000,25186,3,62.448,6.297,0.1008,0.010000,0.000100000
17
+ p0.010,22000,34048,1,90.847,8.440,0.0929,0.010000,0.000100000
18
+ p0.010,22000,34048,2,76.658,10.948,0.1428,0.010000,0.000100000
19
+ p0.010,22000,34048,3,80.600,9.722,0.1206,0.010000,0.000100000
20
+ p0.010,26000,47355,1,141.174,12.880,0.0912,0.010000,0.000100000
21
+ p0.010,26000,47355,2,119.388,13.316,0.1115,0.010000,0.000100000
22
+ p0.010,26000,47355,3,118.538,13.578,0.1145,0.010000,0.000100000
23
+ p0.010,31000,66944,1,186.135,17.147,0.0921,0.010000,0.000100000
24
+ p0.010,31000,66944,2,170.393,16.096,0.0945,0.010000,0.000100000
25
+ p0.010,31000,66944,3,179.323,15.829,0.0883,0.010000,0.000100000
26
+ p0.010,36000,90395,1,202.701,24.991,0.1233,0.010000,0.000100000
27
+ p0.010,36000,90395,2,251.185,24.376,0.0970,0.010000,0.000100000
28
+ p0.010,36000,90395,3,203.004,24.936,0.1228,0.010000,0.000100000
29
+ p0.010,42000,123740,1,261.486,32.913,0.1259,0.010000,0.000100000
30
+ p0.010,42000,123740,2,236.691,35.356,0.1494,0.010000,0.000100000
31
+ p0.010,42000,123740,3,364.157,32.413,0.0890,0.010000,0.000100000
32
+ p0.010,50000,174903,1,430.400,44.907,0.1043,0.010000,0.000100000
33
+ p0.010,50000,174903,2,458.256,47.685,0.1041,0.010000,0.000100000
34
+ p0.010,50000,174903,3,332.602,42.574,0.1280,0.010000,0.000100000
35
+ p0.010,58000,234422,1,606.597,65.650,0.1082,0.010000,0.000100000
36
+ p0.010,58000,234422,2,502.739,58.246,0.1159,0.010000,0.000100000
37
+ p0.010,58000,234422,3,477.328,59.802,0.1253,0.010000,0.000100000
38
+ p0.010,69000,331231,1,559.928,81.968,0.1464,0.010000,0.000100000
39
+ p0.010,69000,331231,2,514.083,86.528,0.1683,0.010000,0.000100000
40
+ p0.010,69000,331231,3,601.672,95.934,0.1594,0.010000,0.000100000
41
+ p0.010,81000,457271,1,675.088,119.821,0.1775,0.010000,0.000100000
42
+ p0.010,81000,457271,2,641.671,114.564,0.1785,0.010000,0.000100000
43
+ p0.010,81000,457271,3,651.985,119.556,0.1834,0.010000,0.000100000
44
+ p0.010,95000,629660,1,928.485,162.729,0.1753,0.010000,0.000100000
45
+ p0.010,95000,629660,2,869.368,161.968,0.1863,0.010000,0.000100000
46
+ p0.010,95000,629660,3,876.931,165.358,0.1886,0.010000,0.000100000
47
+ p0.010,111000,858910,1,1290.964,232.245,0.1799,0.010000,0.000100000
48
+ p0.010,111000,858910,2,1210.956,223.658,0.1847,0.010000,0.000100000
49
+ p0.010,111000,858910,3,1233.032,235.343,0.1909,0.010000,0.000100000
50
+ p0.010,130000,1180808,1,1835.596,302.229,0.1646,0.010000,0.000100000
51
+ p0.010,130000,1180808,2,1728.640,301.218,0.1743,0.010000,0.000100000
52
+ p0.010,130000,1180808,3,1894.693,323.953,0.1710,0.010000,0.000100000
53
+ p0.010,153000,1635104,1,2434.417,481.471,0.1978,0.010000,0.000100000
54
+ p0.010,153000,1635104,2,2835.456,565.099,0.1993,0.010000,0.000100000
55
+ p0.010,153000,1635104,3,2509.212,462.239,0.1842,0.010000,0.000100000
56
+ p0.010,180000,2259463,1,3470.979,753.372,0.2170,0.010000,0.000100000
57
+ p0.010,180000,2259463,2,3840.293,888.440,0.2313,0.010000,0.000100000
58
+ p0.010,180000,2259463,3,4039.016,774.241,0.1917,0.010000,0.000100000
59
+ p0.010,211000,3107369,1,5367.980,1196.096,0.2228,0.010000,0.000100000
60
+ p0.010,211000,3107369,2,5169.248,1260.174,0.2438,0.010000,0.000100000
61
+ p0.010,211000,3107369,3,5381.383,1125.088,0.2091,0.010000,0.000100000
62
+ p0.010,248000,4294862,1,8809.494,1714.655,0.1946,0.010000,0.000100000
63
+ p0.010,248000,4294862,2,10826.117,1550.755,0.1432,0.010000,0.000100000
64
+ p0.010,248000,4294862,3,9121.021,1334.802,0.1463,0.010000,0.000100000
65
+ p0.010,291000,5909593,1,11920.301,1879.288,0.1577,0.010000,0.000100000
66
+ p0.010,291000,5909593,2,12681.672,1886.153,0.1487,0.010000,0.000100000
67
+ p0.010,291000,5909593,3,13008.623,1904.175,0.1464,0.010000,0.000100000
68
+ p0.010,341000,8116027,1,20886.926,2635.949,0.1262,0.010000,0.000100000
69
+ p0.010,341000,8116027,2,23657.120,2641.528,0.1117,0.010000,0.000100000
70
+ p0.010,341000,8116027,3,24323.864,2660.392,0.1094,0.010000,0.000100000
71
+ p0.010,401000,11224438,1,26442.424,3698.956,0.1399,0.010000,0.000100000
72
+ p0.010,401000,11224438,2,27944.718,3785.824,0.1355,0.010000,0.000100000
73
+ p0.010,401000,11224438,3,35573.425,4131.695,0.1161,0.010000,0.000100000
74
+ p0.010,471000,15478323,1,46818.609,5235.147,0.1118,0.010000,0.000100000
75
+ p0.010,471000,15478323,2,43437.508,5320.091,0.1225,0.010000,0.000100000
76
+ p0.010,471000,15478323,3,44888.857,5216.460,0.1162,0.010000,0.000100000
77
+ p0.010,553000,21347547,1,70903.340,7237.961,0.1021,0.010000,0.000100000
78
+ p0.010,553000,21347547,2,86748.593,7565.931,0.0872,0.010000,0.000100000
79
+ p0.010,553000,21347547,3,73878.376,7251.312,0.0982,0.010000,0.000100000
80
+ p0.010,649000,29406812,1,110734.007,10133.955,0.0915,0.010000,0.000100000
81
+ p0.010,649000,29406812,2,98126.391,10132.573,0.1033,0.010000,0.000100000
82
+ p0.010,649000,29406812,3,94134.658,10104.309,0.1073,0.010000,0.000100000
83
+ p0.010,762000,40512199,1,238696.389,14388.104,0.0603,0.010000,0.000100000
84
+ p0.010,762000,40512199,2,197853.014,14915.448,0.0754,0.010000,0.000100000
85
+ p0.010,762000,40512199,3,185054.047,14096.983,0.0762,0.010000,0.000100000
86
+ p0.010,894000,55788477,1,342670.024,19924.599,0.0581,0.010000,0.000100000
87
+ p0.010,894000,55788477,2,514606.529,19798.917,0.0385,0.010000,0.000100000
88
+ p0.010,894000,55788477,3,448368.373,20717.981,0.0462,0.010000,0.000100000
89
+ p0.010,1050000,76952553,1,868027.280,37071.813,0.0427,0.010000,0.000100000
90
+ p0.010,1050000,76952553,2,519475.395,38502.183,0.0741,0.010000,0.000100000
91
+ p0.010,1050000,76952553,3,511793.094,35704.567,0.0698,0.010000,0.000100000
results_nosquared.csv ADDED
@@ -0,0 +1,271 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ series,n,m,trial,ms,theo_x,normalized_theory_ms
2
+ dens_x0.5,10000,761,1,13.928,99112.473,32.641
3
+ dens_x0.5,10000,761,2,14.427,99112.473,32.641
4
+ dens_x0.5,10000,761,3,14.068,99112.473,32.641
5
+ dens_x0.5,12000,1077,1,24.819,122827.840,40.451
6
+ dens_x0.5,12000,1077,2,18.116,122827.840,40.451
7
+ dens_x0.5,12000,1077,3,17.827,122827.840,40.451
8
+ dens_x0.5,14000,1455,1,22.211,147545.989,48.592
9
+ dens_x0.5,14000,1455,2,22.745,147545.989,48.592
10
+ dens_x0.5,14000,1455,3,21.998,147545.989,48.592
11
+ dens_x0.5,16000,1937,1,25.174,173636.330,57.184
12
+ dens_x0.5,16000,1937,2,24.886,173636.330,57.184
13
+ dens_x0.5,16000,1937,3,25.206,173636.330,57.184
14
+ dens_x0.5,19000,2715,1,30.350,213940.398,70.458
15
+ dens_x0.5,19000,2715,2,27.632,213940.398,70.458
16
+ dens_x0.5,19000,2715,3,30.365,213940.398,70.458
17
+ dens_x0.5,22000,3611,1,37.591,256079.209,84.335
18
+ dens_x0.5,22000,3611,2,39.308,256079.209,84.335
19
+ dens_x0.5,22000,3611,3,37.304,256079.209,84.335
20
+ dens_x0.5,26000,5194,1,49.215,317113.582,104.436
21
+ dens_x0.5,26000,5194,2,45.546,317113.582,104.436
22
+ dens_x0.5,26000,5194,3,46.041,317113.582,104.436
23
+ dens_x0.5,30000,6763,1,43.645,378988.027,124.813
24
+ dens_x0.5,30000,6763,2,45.099,378988.027,124.813
25
+ dens_x0.5,30000,6763,3,45.535,378988.027,124.813
26
+ dens_x0.5,36000,9660,1,47.761,479031.581,157.761
27
+ dens_x0.5,36000,9660,2,67.519,479031.581,157.761
28
+ dens_x0.5,36000,9660,3,48.563,479031.581,157.761
29
+ dens_x0.5,42000,13346,1,50.651,589181.686,194.037
30
+ dens_x0.5,42000,13346,2,67.829,589181.686,194.037
31
+ dens_x0.5,42000,13346,3,58.132,589181.686,194.037
32
+ dens_x0.5,49000,17900,1,71.334,722491.606,237.940
33
+ dens_x0.5,49000,17900,2,81.959,722491.606,237.940
34
+ dens_x0.5,49000,17900,3,175.205,722491.606,237.940
35
+ dens_x0.5,57000,24363,1,100.968,890990.473,293.433
36
+ dens_x0.5,57000,24363,2,102.184,890990.473,293.433
37
+ dens_x0.5,57000,24363,3,101.227,890990.473,293.433
38
+ dens_x0.5,67000,33829,1,119.900,1120457.009,369.004
39
+ dens_x0.5,67000,33829,2,131.959,1120457.009,369.004
40
+ dens_x0.5,67000,33829,3,133.951,1120457.009,369.004
41
+ dens_x0.5,79000,46917,1,165.619,1419991.587,467.650
42
+ dens_x0.5,79000,46917,2,165.403,1419991.587,467.650
43
+ dens_x0.5,79000,46917,3,161.530,1419991.587,467.650
44
+ dens_x0.5,92000,63864,1,200.791,1781454.424,586.692
45
+ dens_x0.5,92000,63864,2,255.603,1781454.424,586.692
46
+ dens_x0.5,92000,63864,3,233.085,1781454.424,586.692
47
+ dens_x0.5,108000,87350,1,258.690,2264084.329,745.638
48
+ dens_x0.5,108000,87350,2,279.017,2264084.329,745.638
49
+ dens_x0.5,108000,87350,3,209.424,2264084.329,745.638
50
+ dens_x0.5,127000,120912,1,296.722,2913447.536,959.495
51
+ dens_x0.5,127000,120912,2,297.731,2913447.536,959.495
52
+ dens_x0.5,127000,120912,3,323.214,2913447.536,959.495
53
+ dens_x0.5,149000,166365,1,389.742,3756533.770,1237.151
54
+ dens_x0.5,149000,166365,2,315.302,3756533.770,1237.151
55
+ dens_x0.5,149000,166365,3,369.940,3756533.770,1237.151
56
+ dens_x0.5,174000,226551,1,384.883,4833373.044,1591.789
57
+ dens_x0.5,174000,226551,2,474.197,4833373.044,1591.789
58
+ dens_x0.5,174000,226551,3,385.850,4833373.044,1591.789
59
+ dens_x0.5,204000,312325,1,513.323,6312525.050,2078.923
60
+ dens_x0.5,204000,312325,2,548.002,6312525.050,2078.923
61
+ dens_x0.5,204000,312325,3,561.628,6312525.050,2078.923
62
+ dens_x0.5,240000,431142,1,707.811,8314371.662,2738.197
63
+ dens_x0.5,240000,431142,2,649.676,8314371.662,2738.197
64
+ dens_x0.5,240000,431142,3,701.845,8314371.662,2738.197
65
+ dens_x0.5,281000,589828,1,888.249,10925503.834,3598.130
66
+ dens_x0.5,281000,589828,2,891.876,10925503.834,3598.130
67
+ dens_x0.5,281000,589828,3,908.785,10925503.834,3598.130
68
+ dens_x0.5,329000,811844,1,1222.536,14493068.872,4773.047
69
+ dens_x0.5,329000,811844,2,1304.879,14493068.872,4773.047
70
+ dens_x0.5,329000,811844,3,1212.275,14493068.872,4773.047
71
+ dens_x0.5,386000,1115568,1,2245.253,19315559.086,6361.252
72
+ dens_x0.5,386000,1115568,2,2000.723,19315559.086,6361.252
73
+ dens_x0.5,386000,1115568,3,2589.654,19315559.086,6361.252
74
+ dens_x0.5,452000,1531464,1,3012.595,25827552.428,8505.867
75
+ dens_x0.5,452000,1531464,2,2750.113,25827552.428,8505.867
76
+ dens_x0.5,452000,1531464,3,2729.488,25827552.428,8505.867
77
+ dens_x0.5,530000,2103874,1,3809.212,34716124.680,11433.167
78
+ dens_x0.5,530000,2103874,2,4309.380,34716124.680,11433.167
79
+ dens_x0.5,530000,2103874,3,4481.890,34716124.680,11433.167
80
+ dens_x0.5,621000,2887681,1,6422.418,46802598.872,15413.643
81
+ dens_x0.5,621000,2887681,2,6585.869,46802598.872,15413.643
82
+ dens_x0.5,621000,2887681,3,6765.516,46802598.872,15413.643
83
+ dens_x0.5,728000,3971764,1,11958.752,63437679.196,20892.124
84
+ dens_x0.5,728000,3971764,2,10308.737,63437679.196,20892.124
85
+ dens_x0.5,728000,3971764,3,11056.578,63437679.196,20892.124
86
+ dens_x0.5,853000,5448500,1,14109.654,86056528.179,28341.259
87
+ dens_x0.5,853000,5448500,2,14769.758,86056528.179,28341.259
88
+ dens_x0.5,853000,5448500,3,14404.127,86056528.179,28341.259
89
+ dens_x0.5,1000000,7489974,1,24320.614,117293325.434,38628.569
90
+ dens_x0.5,1000000,7489974,2,24047.511,117293325.434,38628.569
91
+ dens_x0.5,1000000,7489974,3,22553.170,117293325.434,38628.569
92
+ dens_x1.0,10000,1550,1,15.795,106379.431,35.034
93
+ dens_x1.0,10000,1550,2,17.105,106379.431,35.034
94
+ dens_x1.0,10000,1550,3,17.056,106379.431,35.034
95
+ dens_x1.0,12000,2135,1,20.849,132765.276,43.724
96
+ dens_x1.0,12000,2135,2,21.907,132765.276,43.724
97
+ dens_x1.0,12000,2135,3,22.254,132765.276,43.724
98
+ dens_x1.0,14000,2963,1,28.417,161942.582,53.333
99
+ dens_x1.0,14000,2963,2,27.935,161942.582,53.333
100
+ dens_x1.0,14000,2963,3,27.837,161942.582,53.333
101
+ dens_x1.0,16000,3912,1,32.832,192755.010,63.481
102
+ dens_x1.0,16000,3912,2,32.494,192755.010,63.481
103
+ dens_x1.0,16000,3912,3,31.028,192755.010,63.481
104
+ dens_x1.0,19000,5413,1,40.669,240521.618,79.212
105
+ dens_x1.0,19000,5413,2,39.997,240521.618,79.212
106
+ dens_x1.0,19000,5413,3,43.415,240521.618,79.212
107
+ dens_x1.0,22000,7290,1,40.898,292864.786,96.450
108
+ dens_x1.0,22000,7290,2,68.058,292864.786,96.450
109
+ dens_x1.0,22000,7290,3,37.365,292864.786,96.450
110
+ dens_x1.0,26000,10078,1,49.070,366763.602,120.787
111
+ dens_x1.0,26000,10078,2,51.834,366763.602,120.787
112
+ dens_x1.0,26000,10078,3,46.146,366763.602,120.787
113
+ dens_x1.0,30000,13336,1,80.316,446748.773,147.129
114
+ dens_x1.0,30000,13336,2,74.729,446748.773,147.129
115
+ dens_x1.0,30000,13336,3,57.861,446748.773,147.129
116
+ dens_x1.0,36000,19373,1,83.950,580933.327,191.321
117
+ dens_x1.0,36000,19373,2,90.415,580933.327,191.321
118
+ dens_x1.0,36000,19373,3,77.043,580933.327,191.321
119
+ dens_x1.0,42000,26057,1,131.538,724495.682,238.600
120
+ dens_x1.0,42000,26057,2,94.107,724495.682,238.600
121
+ dens_x1.0,42000,26057,3,157.959,724495.682,238.600
122
+ dens_x1.0,49000,35903,1,121.913,916916.365,301.971
123
+ dens_x1.0,49000,35903,2,142.426,916916.365,301.971
124
+ dens_x1.0,49000,35903,3,142.150,916916.365,301.971
125
+ dens_x1.0,57000,48648,1,169.368,1156930.810,381.016
126
+ dens_x1.0,57000,48648,2,170.218,1156930.810,381.016
127
+ dens_x1.0,57000,48648,3,171.756,1156930.810,381.016
128
+ dens_x1.0,67000,67188,1,214.470,1491157.159,491.087
129
+ dens_x1.0,67000,67188,2,208.270,1491157.159,491.087
130
+ dens_x1.0,67000,67188,3,220.916,1491157.159,491.087
131
+ dens_x1.0,79000,93552,1,237.551,1945903.955,640.850
132
+ dens_x1.0,79000,93552,2,188.477,1945903.955,640.850
133
+ dens_x1.0,79000,93552,3,184.641,1945903.955,640.850
134
+ dens_x1.0,92000,126776,1,220.118,2500509.887,823.501
135
+ dens_x1.0,92000,126776,2,274.676,2500509.887,823.501
136
+ dens_x1.0,92000,126776,3,223.749,2500509.887,823.501
137
+ dens_x1.0,108000,175020,1,287.105,3280169.679,1080.268
138
+ dens_x1.0,108000,175020,2,312.388,3280169.679,1080.268
139
+ dens_x1.0,108000,175020,3,281.471,3280169.679,1080.268
140
+ dens_x1.0,127000,241838,1,374.493,4334562.918,1427.515
141
+ dens_x1.0,127000,241838,2,358.238,4334562.918,1427.515
142
+ dens_x1.0,127000,241838,3,408.865,4334562.918,1427.515
143
+ dens_x1.0,149000,333094,1,487.476,5742559.864,1891.215
144
+ dens_x1.0,149000,333094,2,564.073,5742559.864,1891.215
145
+ dens_x1.0,149000,333094,3,462.038,5742559.864,1891.215
146
+ dens_x1.0,174000,454003,1,625.079,7577993.244,2495.684
147
+ dens_x1.0,174000,454003,2,720.059,7577993.244,2495.684
148
+ dens_x1.0,174000,454003,3,753.227,7577993.244,2495.684
149
+ dens_x1.0,204000,623499,1,1540.281,10116899.562,3331.829
150
+ dens_x1.0,204000,623499,2,1002.571,10116899.562,3331.829
151
+ dens_x1.0,204000,623499,3,1068.854,10116899.562,3331.829
152
+ dens_x1.0,240000,862431,1,1298.144,13657349.809,4497.817
153
+ dens_x1.0,240000,862431,2,1314.402,13657349.809,4497.817
154
+ dens_x1.0,240000,862431,3,1310.847,13657349.809,4497.817
155
+ dens_x1.0,281000,1182168,1,2025.112,18357066.601,6045.589
156
+ dens_x1.0,281000,1182168,2,2083.471,18357066.601,6045.589
157
+ dens_x1.0,281000,1182168,3,1986.419,18357066.601,6045.589
158
+ dens_x1.0,329000,1623777,1,3467.966,24807713.897,8170.000
159
+ dens_x1.0,329000,1623777,2,2775.287,24807713.897,8170.000
160
+ dens_x1.0,329000,1623777,3,2566.352,24807713.897,8170.000
161
+ dens_x1.0,386000,2232671,1,4668.748,33685517.024,11093.754
162
+ dens_x1.0,386000,2232671,2,3965.444,33685517.024,11093.754
163
+ dens_x1.0,386000,2232671,3,4808.141,33685517.024,11093.754
164
+ dens_x1.0,452000,3055874,1,7017.716,45677561.904,15043.131
165
+ dens_x1.0,452000,3055874,2,5722.622,45677561.904,15043.131
166
+ dens_x1.0,452000,3055874,3,6966.270,45677561.904,15043.131
167
+ dens_x1.0,530000,4208065,1,9001.121,62450692.510,20567.077
168
+ dens_x1.0,530000,4208065,2,10557.556,62450692.510,20567.077
169
+ dens_x1.0,530000,4208065,3,10740.295,62450692.510,20567.077
170
+ dens_x1.0,621000,5774507,1,14674.393,85310220.195,28095.475
171
+ dens_x1.0,621000,5774507,2,16115.354,85310220.195,28095.475
172
+ dens_x1.0,621000,5774507,3,15775.575,85310220.195,28095.475
173
+ dens_x1.0,728000,7935888,1,25390.090,116945648.236,38514.067
174
+ dens_x1.0,728000,7935888,2,20846.516,116945648.236,38514.067
175
+ dens_x1.0,728000,7935888,3,21598.478,116945648.236,38514.067
176
+ dens_x1.0,853000,10900416,1,41442.053,160510699.866,52861.478
177
+ dens_x1.0,853000,10900416,2,42161.875,160510699.866,52861.478
178
+ dens_x1.0,853000,10900416,3,42103.177,160510699.866,52861.478
179
+ dens_x1.0,1000000,14986435,1,53008.511,220860761.527,72736.748
180
+ dens_x1.0,1000000,14986435,2,52939.177,220860761.527,72736.748
181
+ dens_x1.0,1000000,14986435,3,51968.340,220860761.527,72736.748
182
+ dens_x2.0,10000,3007,1,25.919,119798.897,39.454
183
+ dens_x2.0,10000,3007,2,19.887,119798.897,39.454
184
+ dens_x2.0,10000,3007,3,23.027,119798.897,39.454
185
+ dens_x2.0,12000,4365,1,27.838,153710.912,50.622
186
+ dens_x2.0,12000,4365,2,29.499,153710.912,50.622
187
+ dens_x2.0,12000,4365,3,28.616,153710.912,50.622
188
+ dens_x2.0,14000,5830,1,37.540,189313.294,62.347
189
+ dens_x2.0,14000,5830,2,31.680,189313.294,62.347
190
+ dens_x2.0,14000,5830,3,35.764,189313.294,62.347
191
+ dens_x2.0,16000,7624,1,32.257,228688.447,75.315
192
+ dens_x2.0,16000,7624,2,39.717,228688.447,75.315
193
+ dens_x2.0,16000,7624,3,31.113,228688.447,75.315
194
+ dens_x2.0,19000,10837,1,41.738,293959.920,96.811
195
+ dens_x2.0,19000,10837,2,39.956,293959.920,96.811
196
+ dens_x2.0,19000,10837,3,43.791,293959.920,96.811
197
+ dens_x2.0,22000,14556,1,49.073,365516.050,120.377
198
+ dens_x2.0,22000,14556,2,49.029,365516.050,120.377
199
+ dens_x2.0,22000,14556,3,48.099,365516.050,120.377
200
+ dens_x2.0,26000,20395,1,76.849,471644.695,155.328
201
+ dens_x2.0,26000,20395,2,76.294,471644.695,155.328
202
+ dens_x2.0,26000,20395,3,54.920,471644.695,155.328
203
+ dens_x2.0,30000,26790,1,91.247,585445.422,192.807
204
+ dens_x2.0,30000,26790,2,91.468,585445.422,192.807
205
+ dens_x2.0,30000,26790,3,90.817,585445.422,192.807
206
+ dens_x2.0,36000,38800,1,130.781,784747.311,258.443
207
+ dens_x2.0,36000,38800,2,128.351,784747.311,258.443
208
+ dens_x2.0,36000,38800,3,130.023,784747.311,258.443
209
+ dens_x2.0,42000,53391,1,157.755,1015477.726,334.430
210
+ dens_x2.0,42000,53391,2,156.454,1015477.726,334.430
211
+ dens_x2.0,42000,53391,3,172.249,1015477.726,334.430
212
+ dens_x2.0,49000,71945,1,231.629,1306154.668,430.160
213
+ dens_x2.0,49000,71945,2,201.577,1306154.668,430.160
214
+ dens_x2.0,49000,71945,3,229.360,1306154.668,430.160
215
+ dens_x2.0,57000,97182,1,165.994,1688417.255,556.052
216
+ dens_x2.0,57000,97182,2,249.906,1688417.255,556.052
217
+ dens_x2.0,57000,97182,3,226.709,1688417.255,556.052
218
+ dens_x2.0,67000,134082,1,268.547,2234513.248,735.899
219
+ dens_x2.0,67000,134082,2,211.378,2234513.248,735.899
220
+ dens_x2.0,67000,134082,3,217.940,2234513.248,735.899
221
+ dens_x2.0,79000,186876,1,268.171,2998337.660,987.452
222
+ dens_x2.0,79000,186876,2,270.091,2998337.660,987.452
223
+ dens_x2.0,79000,186876,3,366.576,2998337.660,987.452
224
+ dens_x2.0,92000,253371,1,379.773,3947432.991,1300.020
225
+ dens_x2.0,92000,253371,2,457.191,3947432.991,1300.020
226
+ dens_x2.0,92000,253371,3,438.751,3947432.991,1300.020
227
+ dens_x2.0,108000,349108,1,489.690,5297829.841,1744.750
228
+ dens_x2.0,108000,349108,2,546.835,5297829.841,1744.750
229
+ dens_x2.0,108000,349108,3,592.548,5297829.841,1744.750
230
+ dens_x2.0,127000,484181,1,648.164,7182563.887,2365.456
231
+ dens_x2.0,127000,484181,2,661.210,7182563.887,2365.456
232
+ dens_x2.0,127000,484181,3,683.688,7182563.887,2365.456
233
+ dens_x2.0,149000,666491,1,941.120,9713885.437,3199.104
234
+ dens_x2.0,149000,666491,2,939.268,9713885.437,3199.104
235
+ dens_x2.0,149000,666491,3,1079.986,9713885.437,3199.104
236
+ dens_x2.0,174000,907603,1,1457.898,13051498.522,4298.290
237
+ dens_x2.0,174000,907603,2,1491.741,13051498.522,4298.290
238
+ dens_x2.0,174000,907603,3,1509.796,13051498.522,4298.290
239
+ dens_x2.0,204000,1247723,1,1941.400,17748584.329,5845.195
240
+ dens_x2.0,204000,1247723,2,1883.289,17748584.329,5845.195
241
+ dens_x2.0,204000,1247723,3,2148.763,17748584.329,5845.195
242
+ dens_x2.0,240000,1725541,1,3554.892,24349896.729,8019.226
243
+ dens_x2.0,240000,1725541,2,3500.571,24349896.729,8019.226
244
+ dens_x2.0,240000,1725541,3,2982.789,24349896.729,8019.226
245
+ dens_x2.0,281000,2365586,1,5271.622,33204358.944,10935.293
246
+ dens_x2.0,281000,2365586,2,5262.823,33204358.944,10935.293
247
+ dens_x2.0,281000,2365586,3,5355.604,33204358.944,10935.293
248
+ dens_x2.0,329000,3242756,1,6945.261,45374920.412,14943.461
249
+ dens_x2.0,329000,3242756,2,8169.639,45374920.412,14943.461
250
+ dens_x2.0,329000,3242756,3,8000.158,45374920.412,14943.461
251
+ dens_x2.0,386000,4461354,1,12528.673,62354387.279,20535.361
252
+ dens_x2.0,386000,4461354,2,12562.673,62354387.279,20535.361
253
+ dens_x2.0,386000,4461354,3,12035.052,62354387.279,20535.361
254
+ dens_x2.0,452000,6119886,1,20186.465,85575402.535,28182.808
255
+ dens_x2.0,452000,6119886,2,19916.812,85575402.535,28182.808
256
+ dens_x2.0,452000,6119886,3,15977.399,85575402.535,28182.808
257
+ dens_x2.0,530000,8413791,1,32317.410,117884820.410,38823.367
258
+ dens_x2.0,530000,8413791,2,31720.112,117884820.410,38823.367
259
+ dens_x2.0,530000,8413791,3,31302.338,117884820.410,38823.367
260
+ dens_x2.0,621000,11552079,1,53605.814,162377752.059,53476.360
261
+ dens_x2.0,621000,11552079,2,52721.452,162377752.059,53476.360
262
+ dens_x2.0,621000,11552079,3,53208.675,162377752.059,53476.360
263
+ dens_x2.0,728000,15870417,1,88661.318,224046367.608,73785.874
264
+ dens_x2.0,728000,15870417,2,89084.669,224046367.608,73785.874
265
+ dens_x2.0,728000,15870417,3,92250.894,224046367.608,73785.874
266
+ dens_x2.0,853000,21800415,1,103414.772,309366697.818,101884.678
267
+ dens_x2.0,853000,21800415,2,108232.648,309366697.818,101884.678
268
+ dens_x2.0,853000,21800415,3,109812.620,309366697.818,101884.678
269
+ dens_x2.0,1000000,29958277,1,177792.954,427704402.750,140857.196
270
+ dens_x2.0,1000000,29958277,2,167656.235,427704402.750,140857.196
271
+ dens_x2.0,1000000,29958277,3,168312.820,427704402.750,140857.196
results_nosquared2.csv ADDED
@@ -0,0 +1,271 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ series,n,m,trial,ms,theo_x,normalized_theory_ms
2
+ dens_x0.5,10000,761,1,15.467,99112.473,26.023
3
+ dens_x0.5,10000,761,2,15.346,99112.473,26.023
4
+ dens_x0.5,10000,761,3,15.207,99112.473,26.023
5
+ dens_x0.5,12000,1077,1,18.413,122827.840,32.250
6
+ dens_x0.5,12000,1077,2,19.956,122827.840,32.250
7
+ dens_x0.5,12000,1077,3,18.865,122827.840,32.250
8
+ dens_x0.5,14000,1455,1,28.436,147545.989,38.740
9
+ dens_x0.5,14000,1455,2,21.738,147545.989,38.740
10
+ dens_x0.5,14000,1455,3,22.054,147545.989,38.740
11
+ dens_x0.5,16000,1937,1,23.350,173636.330,45.590
12
+ dens_x0.5,16000,1937,2,24.948,173636.330,45.590
13
+ dens_x0.5,16000,1937,3,23.249,173636.330,45.590
14
+ dens_x0.5,19000,2715,1,28.115,213940.398,56.172
15
+ dens_x0.5,19000,2715,2,29.197,213940.398,56.172
16
+ dens_x0.5,19000,2715,3,29.275,213940.398,56.172
17
+ dens_x0.5,22000,3611,1,33.547,256079.209,67.236
18
+ dens_x0.5,22000,3611,2,36.908,256079.209,67.236
19
+ dens_x0.5,22000,3611,3,35.231,256079.209,67.236
20
+ dens_x0.5,26000,5194,1,38.924,317113.582,83.261
21
+ dens_x0.5,26000,5194,2,42.115,317113.582,83.261
22
+ dens_x0.5,26000,5194,3,39.713,317113.582,83.261
23
+ dens_x0.5,30000,6763,1,67.958,378988.027,99.507
24
+ dens_x0.5,30000,6763,2,42.524,378988.027,99.507
25
+ dens_x0.5,30000,6763,3,43.359,378988.027,99.507
26
+ dens_x0.5,36000,9660,1,46.321,479031.581,125.774
27
+ dens_x0.5,36000,9660,2,45.298,479031.581,125.774
28
+ dens_x0.5,36000,9660,3,42.455,479031.581,125.774
29
+ dens_x0.5,42000,13346,1,56.408,589181.686,154.695
30
+ dens_x0.5,42000,13346,2,55.323,589181.686,154.695
31
+ dens_x0.5,42000,13346,3,59.205,589181.686,154.695
32
+ dens_x0.5,49000,17900,1,60.583,722491.606,189.697
33
+ dens_x0.5,49000,17900,2,59.983,722491.606,189.697
34
+ dens_x0.5,49000,17900,3,59.318,722491.606,189.697
35
+ dens_x0.5,57000,24363,1,73.324,890990.473,233.938
36
+ dens_x0.5,57000,24363,2,73.899,890990.473,233.938
37
+ dens_x0.5,57000,24363,3,74.315,890990.473,233.938
38
+ dens_x0.5,67000,33829,1,138.137,1120457.009,294.187
39
+ dens_x0.5,67000,33829,2,137.270,1120457.009,294.187
40
+ dens_x0.5,67000,33829,3,137.367,1120457.009,294.187
41
+ dens_x0.5,79000,46917,1,184.130,1419991.587,372.832
42
+ dens_x0.5,79000,46917,2,194.899,1419991.587,372.832
43
+ dens_x0.5,79000,46917,3,221.479,1419991.587,372.832
44
+ dens_x0.5,92000,63864,1,233.290,1781454.424,467.738
45
+ dens_x0.5,92000,63864,2,284.284,1781454.424,467.738
46
+ dens_x0.5,92000,63864,3,311.293,1781454.424,467.738
47
+ dens_x0.5,108000,87350,1,263.641,2264084.329,594.457
48
+ dens_x0.5,108000,87350,2,266.367,2264084.329,594.457
49
+ dens_x0.5,108000,87350,3,280.940,2264084.329,594.457
50
+ dens_x0.5,127000,120912,1,271.834,2913447.536,764.954
51
+ dens_x0.5,127000,120912,2,284.661,2913447.536,764.954
52
+ dens_x0.5,127000,120912,3,257.204,2913447.536,764.954
53
+ dens_x0.5,149000,166365,1,335.326,3756533.770,986.314
54
+ dens_x0.5,149000,166365,2,320.044,3756533.770,986.314
55
+ dens_x0.5,149000,166365,3,302.948,3756533.770,986.314
56
+ dens_x0.5,174000,226551,1,398.037,4833373.044,1269.049
57
+ dens_x0.5,174000,226551,2,381.909,4833373.044,1269.049
58
+ dens_x0.5,174000,226551,3,395.745,4833373.044,1269.049
59
+ dens_x0.5,204000,312325,1,533.436,6312525.050,1657.414
60
+ dens_x0.5,204000,312325,2,518.033,6312525.050,1657.414
61
+ dens_x0.5,204000,312325,3,533.411,6312525.050,1657.414
62
+ dens_x0.5,240000,431142,1,654.672,8314371.662,2183.018
63
+ dens_x0.5,240000,431142,2,636.705,8314371.662,2183.018
64
+ dens_x0.5,240000,431142,3,646.470,8314371.662,2183.018
65
+ dens_x0.5,281000,589828,1,928.709,10925503.834,2868.596
66
+ dens_x0.5,281000,589828,2,938.183,10925503.834,2868.596
67
+ dens_x0.5,281000,589828,3,893.858,10925503.834,2868.596
68
+ dens_x0.5,329000,811844,1,1234.116,14493068.872,3805.295
69
+ dens_x0.5,329000,811844,2,1227.491,14493068.872,3805.295
70
+ dens_x0.5,329000,811844,3,1175.747,14493068.872,3805.295
71
+ dens_x0.5,386000,1115568,1,1811.607,19315559.086,5071.486
72
+ dens_x0.5,386000,1115568,2,1734.140,19315559.086,5071.486
73
+ dens_x0.5,386000,1115568,3,1746.189,19315559.086,5071.486
74
+ dens_x0.5,452000,1531464,1,2372.135,25827552.428,6781.272
75
+ dens_x0.5,452000,1531464,2,2490.243,25827552.428,6781.272
76
+ dens_x0.5,452000,1531464,3,2315.387,25827552.428,6781.272
77
+ dens_x0.5,530000,2103874,1,3490.322,34716124.680,9115.052
78
+ dens_x0.5,530000,2103874,2,4043.773,34716124.680,9115.052
79
+ dens_x0.5,530000,2103874,3,3668.140,34716124.680,9115.052
80
+ dens_x0.5,621000,2887681,1,5439.786,46802598.872,12288.472
81
+ dens_x0.5,621000,2887681,2,5475.330,46802598.872,12288.472
82
+ dens_x0.5,621000,2887681,3,5776.755,46802598.872,12288.472
83
+ dens_x0.5,728000,3971764,1,8193.403,63437679.196,16656.172
84
+ dens_x0.5,728000,3971764,2,8824.932,63437679.196,16656.172
85
+ dens_x0.5,728000,3971764,3,8664.769,63437679.196,16656.172
86
+ dens_x0.5,853000,5448500,1,15321.894,86056528.179,22594.969
87
+ dens_x0.5,853000,5448500,2,14151.323,86056528.179,22594.969
88
+ dens_x0.5,853000,5448500,3,11713.548,86056528.179,22594.969
89
+ dens_x0.5,1000000,7489974,1,19408.272,117293325.434,30796.490
90
+ dens_x0.5,1000000,7489974,2,19446.200,117293325.434,30796.490
91
+ dens_x0.5,1000000,7489974,3,23624.300,117293325.434,30796.490
92
+ dens_x1.0,10000,1550,1,32.667,106379.431,27.931
93
+ dens_x1.0,10000,1550,2,41.566,106379.431,27.931
94
+ dens_x1.0,10000,1550,3,42.463,106379.431,27.931
95
+ dens_x1.0,12000,2135,1,61.629,132765.276,34.859
96
+ dens_x1.0,12000,2135,2,58.200,132765.276,34.859
97
+ dens_x1.0,12000,2135,3,42.940,132765.276,34.859
98
+ dens_x1.0,14000,2963,1,57.881,161942.582,42.520
99
+ dens_x1.0,14000,2963,2,62.641,161942.582,42.520
100
+ dens_x1.0,14000,2963,3,59.948,161942.582,42.520
101
+ dens_x1.0,16000,3912,1,74.806,192755.010,50.610
102
+ dens_x1.0,16000,3912,2,70.515,192755.010,50.610
103
+ dens_x1.0,16000,3912,3,66.497,192755.010,50.610
104
+ dens_x1.0,19000,5413,1,83.372,240521.618,63.151
105
+ dens_x1.0,19000,5413,2,59.112,240521.618,63.151
106
+ dens_x1.0,19000,5413,3,70.868,240521.618,63.151
107
+ dens_x1.0,22000,7290,1,33.001,292864.786,76.894
108
+ dens_x1.0,22000,7290,2,65.463,292864.786,76.894
109
+ dens_x1.0,22000,7290,3,34.928,292864.786,76.894
110
+ dens_x1.0,26000,10078,1,57.807,366763.602,96.297
111
+ dens_x1.0,26000,10078,2,59.212,366763.602,96.297
112
+ dens_x1.0,26000,10078,3,62.940,366763.602,96.297
113
+ dens_x1.0,30000,13336,1,50.504,446748.773,117.298
114
+ dens_x1.0,30000,13336,2,83.799,446748.773,117.298
115
+ dens_x1.0,30000,13336,3,55.835,446748.773,117.298
116
+ dens_x1.0,36000,19373,1,138.905,580933.327,152.530
117
+ dens_x1.0,36000,19373,2,92.379,580933.327,152.530
118
+ dens_x1.0,36000,19373,3,145.058,580933.327,152.530
119
+ dens_x1.0,42000,26057,1,147.388,724495.682,190.223
120
+ dens_x1.0,42000,26057,2,207.063,724495.682,190.223
121
+ dens_x1.0,42000,26057,3,174.600,724495.682,190.223
122
+ dens_x1.0,49000,35903,1,227.166,916916.365,240.745
123
+ dens_x1.0,49000,35903,2,205.647,916916.365,240.745
124
+ dens_x1.0,49000,35903,3,195.526,916916.365,240.745
125
+ dens_x1.0,57000,48648,1,251.880,1156930.810,303.763
126
+ dens_x1.0,57000,48648,2,356.017,1156930.810,303.763
127
+ dens_x1.0,57000,48648,3,266.212,1156930.810,303.763
128
+ dens_x1.0,67000,67188,1,390.684,1491157.159,391.518
129
+ dens_x1.0,67000,67188,2,228.733,1491157.159,391.518
130
+ dens_x1.0,67000,67188,3,207.511,1491157.159,391.518
131
+ dens_x1.0,79000,93552,1,544.276,1945903.955,510.916
132
+ dens_x1.0,79000,93552,2,299.406,1945903.955,510.916
133
+ dens_x1.0,79000,93552,3,566.521,1945903.955,510.916
134
+ dens_x1.0,92000,126776,1,248.202,2500509.887,656.533
135
+ dens_x1.0,92000,126776,2,375.889,2500509.887,656.533
136
+ dens_x1.0,92000,126776,3,338.922,2500509.887,656.533
137
+ dens_x1.0,108000,175020,1,364.955,3280169.679,861.240
138
+ dens_x1.0,108000,175020,2,309.446,3280169.679,861.240
139
+ dens_x1.0,108000,175020,3,323.999,3280169.679,861.240
140
+ dens_x1.0,127000,241838,1,704.959,4334562.918,1138.081
141
+ dens_x1.0,127000,241838,2,494.727,4334562.918,1138.081
142
+ dens_x1.0,127000,241838,3,464.965,4334562.918,1138.081
143
+ dens_x1.0,149000,333094,1,550.220,5742559.864,1507.764
144
+ dens_x1.0,149000,333094,2,519.299,5742559.864,1507.764
145
+ dens_x1.0,149000,333094,3,539.635,5742559.864,1507.764
146
+ dens_x1.0,174000,454003,1,786.343,7577993.244,1989.675
147
+ dens_x1.0,174000,454003,2,704.575,7577993.244,1989.675
148
+ dens_x1.0,174000,454003,3,724.878,7577993.244,1989.675
149
+ dens_x1.0,204000,623499,1,965.397,10116899.562,2656.289
150
+ dens_x1.0,204000,623499,2,910.950,10116899.562,2656.289
151
+ dens_x1.0,204000,623499,3,934.879,10116899.562,2656.289
152
+ dens_x1.0,240000,862431,1,1269.612,13657349.809,3585.868
153
+ dens_x1.0,240000,862431,2,1262.191,13657349.809,3585.868
154
+ dens_x1.0,240000,862431,3,1509.854,13657349.809,3585.868
155
+ dens_x1.0,281000,1182168,1,1911.024,18357066.601,4819.824
156
+ dens_x1.0,281000,1182168,2,1828.471,18357066.601,4819.824
157
+ dens_x1.0,281000,1182168,3,1921.693,18357066.601,4819.824
158
+ dens_x1.0,329000,1623777,1,2749.311,24807713.897,6513.504
159
+ dens_x1.0,329000,1623777,2,2523.897,24807713.897,6513.504
160
+ dens_x1.0,329000,1623777,3,3497.880,24807713.897,6513.504
161
+ dens_x1.0,386000,2232671,1,3995.353,33685517.024,8844.456
162
+ dens_x1.0,386000,2232671,2,4067.755,33685517.024,8844.456
163
+ dens_x1.0,386000,2232671,3,4026.271,33685517.024,8844.456
164
+ dens_x1.0,452000,3055874,1,5745.135,45677561.904,11993.083
165
+ dens_x1.0,452000,3055874,2,6786.590,45677561.904,11993.083
166
+ dens_x1.0,452000,3055874,3,5896.035,45677561.904,11993.083
167
+ dens_x1.0,530000,4208065,1,10912.807,62450692.510,16397.030
168
+ dens_x1.0,530000,4208065,2,9795.368,62450692.510,16397.030
169
+ dens_x1.0,530000,4208065,3,9179.829,62450692.510,16397.030
170
+ dens_x1.0,621000,5774507,1,12696.188,85310220.195,22399.018
171
+ dens_x1.0,621000,5774507,2,15804.396,85310220.195,22399.018
172
+ dens_x1.0,621000,5774507,3,12631.341,85310220.195,22399.018
173
+ dens_x1.0,728000,7935888,1,20186.080,116945648.236,30705.204
174
+ dens_x1.0,728000,7935888,2,19884.957,116945648.236,30705.204
175
+ dens_x1.0,728000,7935888,3,20055.111,116945648.236,30705.204
176
+ dens_x1.0,853000,10900416,1,31703.026,160510699.866,42143.627
177
+ dens_x1.0,853000,10900416,2,31489.327,160510699.866,42143.627
178
+ dens_x1.0,853000,10900416,3,33382.287,160510699.866,42143.627
179
+ dens_x1.0,1000000,14986435,1,53515.256,220860761.527,57989.116
180
+ dens_x1.0,1000000,14986435,2,43407.301,220860761.527,57989.116
181
+ dens_x1.0,1000000,14986435,3,48171.900,220860761.527,57989.116
182
+ dens_x2.0,10000,3007,1,37.511,119798.897,31.454
183
+ dens_x2.0,10000,3007,2,21.882,119798.897,31.454
184
+ dens_x2.0,10000,3007,3,25.285,119798.897,31.454
185
+ dens_x2.0,12000,4365,1,37.716,153710.912,40.358
186
+ dens_x2.0,12000,4365,2,32.105,153710.912,40.358
187
+ dens_x2.0,12000,4365,3,45.543,153710.912,40.358
188
+ dens_x2.0,14000,5830,1,47.403,189313.294,49.706
189
+ dens_x2.0,14000,5830,2,51.361,189313.294,49.706
190
+ dens_x2.0,14000,5830,3,28.873,189313.294,49.706
191
+ dens_x2.0,16000,7624,1,55.510,228688.447,60.044
192
+ dens_x2.0,16000,7624,2,25.997,228688.447,60.044
193
+ dens_x2.0,16000,7624,3,29.947,228688.447,60.044
194
+ dens_x2.0,19000,10837,1,51.411,293959.920,77.182
195
+ dens_x2.0,19000,10837,2,33.435,293959.920,77.182
196
+ dens_x2.0,19000,10837,3,43.471,293959.920,77.182
197
+ dens_x2.0,22000,14556,1,51.011,365516.050,95.970
198
+ dens_x2.0,22000,14556,2,59.701,365516.050,95.970
199
+ dens_x2.0,22000,14556,3,63.187,365516.050,95.970
200
+ dens_x2.0,26000,20395,1,522.381,471644.695,123.835
201
+ dens_x2.0,26000,20395,2,73.043,471644.695,123.835
202
+ dens_x2.0,26000,20395,3,57.832,471644.695,123.835
203
+ dens_x2.0,30000,26790,1,173.512,585445.422,153.714
204
+ dens_x2.0,30000,26790,2,137.431,585445.422,153.714
205
+ dens_x2.0,30000,26790,3,137.061,585445.422,153.714
206
+ dens_x2.0,36000,38800,1,148.907,784747.311,206.043
207
+ dens_x2.0,36000,38800,2,268.881,784747.311,206.043
208
+ dens_x2.0,36000,38800,3,198.411,784747.311,206.043
209
+ dens_x2.0,42000,53391,1,217.649,1015477.726,266.623
210
+ dens_x2.0,42000,53391,2,351.625,1015477.726,266.623
211
+ dens_x2.0,42000,53391,3,195.803,1015477.726,266.623
212
+ dens_x2.0,49000,71945,1,267.586,1306154.668,342.943
213
+ dens_x2.0,49000,71945,2,252.261,1306154.668,342.943
214
+ dens_x2.0,49000,71945,3,260.151,1306154.668,342.943
215
+ dens_x2.0,57000,97182,1,164.837,1688417.255,443.310
216
+ dens_x2.0,57000,97182,2,164.714,1688417.255,443.310
217
+ dens_x2.0,57000,97182,3,172.108,1688417.255,443.310
218
+ dens_x2.0,67000,134082,1,216.360,2234513.248,586.693
219
+ dens_x2.0,67000,134082,2,223.478,2234513.248,586.693
220
+ dens_x2.0,67000,134082,3,209.673,2234513.248,586.693
221
+ dens_x2.0,79000,186876,1,384.747,2998337.660,787.242
222
+ dens_x2.0,79000,186876,2,314.466,2998337.660,787.242
223
+ dens_x2.0,79000,186876,3,348.417,2998337.660,787.242
224
+ dens_x2.0,92000,253371,1,389.063,3947432.991,1036.436
225
+ dens_x2.0,92000,253371,2,376.706,3947432.991,1036.436
226
+ dens_x2.0,92000,253371,3,409.028,3947432.991,1036.436
227
+ dens_x2.0,108000,349108,1,569.904,5297829.841,1390.996
228
+ dens_x2.0,108000,349108,2,509.953,5297829.841,1390.996
229
+ dens_x2.0,108000,349108,3,614.219,5297829.841,1390.996
230
+ dens_x2.0,127000,484181,1,661.230,7182563.887,1885.851
231
+ dens_x2.0,127000,484181,2,736.545,7182563.887,1885.851
232
+ dens_x2.0,127000,484181,3,690.100,7182563.887,1885.851
233
+ dens_x2.0,149000,666491,1,889.956,9713885.437,2550.474
234
+ dens_x2.0,149000,666491,2,912.750,9713885.437,2550.474
235
+ dens_x2.0,149000,666491,3,944.557,9713885.437,2550.474
236
+ dens_x2.0,174000,907603,1,1342.386,13051498.522,3426.796
237
+ dens_x2.0,174000,907603,2,1332.430,13051498.522,3426.796
238
+ dens_x2.0,174000,907603,3,1345.460,13051498.522,3426.796
239
+ dens_x2.0,204000,1247723,1,1799.321,17748584.329,4660.061
240
+ dens_x2.0,204000,1247723,2,1801.665,17748584.329,4660.061
241
+ dens_x2.0,204000,1247723,3,1804.948,17748584.329,4660.061
242
+ dens_x2.0,240000,1725541,1,2803.778,24349896.729,6393.299
243
+ dens_x2.0,240000,1725541,2,2813.138,24349896.729,6393.299
244
+ dens_x2.0,240000,1725541,3,2724.773,24349896.729,6393.299
245
+ dens_x2.0,281000,2365586,1,4194.937,33204358.944,8718.124
246
+ dens_x2.0,281000,2365586,2,4184.496,33204358.944,8718.124
247
+ dens_x2.0,281000,2365586,3,4189.026,33204358.944,8718.124
248
+ dens_x2.0,329000,3242756,1,6341.712,45374920.412,11913.622
249
+ dens_x2.0,329000,3242756,2,6127.021,45374920.412,11913.622
250
+ dens_x2.0,329000,3242756,3,6071.201,45374920.412,11913.622
251
+ dens_x2.0,386000,4461354,1,9538.741,62354387.279,16371.744
252
+ dens_x2.0,386000,4461354,2,9270.728,62354387.279,16371.744
253
+ dens_x2.0,386000,4461354,3,9227.539,62354387.279,16371.744
254
+ dens_x2.0,452000,6119886,1,14518.451,85575402.535,22468.645
255
+ dens_x2.0,452000,6119886,2,17278.281,85575402.535,22468.645
256
+ dens_x2.0,452000,6119886,3,16358.872,85575402.535,22468.645
257
+ dens_x2.0,530000,8413791,1,25026.906,117884820.410,30951.793
258
+ dens_x2.0,530000,8413791,2,26927.986,117884820.410,30951.793
259
+ dens_x2.0,530000,8413791,3,28834.072,117884820.410,30951.793
260
+ dens_x2.0,621000,11552079,1,35260.396,162377752.059,42633.840
261
+ dens_x2.0,621000,11552079,2,35232.220,162377752.059,42633.840
262
+ dens_x2.0,621000,11552079,3,35913.410,162377752.059,42633.840
263
+ dens_x2.0,728000,15870417,1,59449.804,224046367.608,58825.527
264
+ dens_x2.0,728000,15870417,2,55741.168,224046367.608,58825.527
265
+ dens_x2.0,728000,15870417,3,57488.308,224046367.608,58825.527
266
+ dens_x2.0,853000,21800415,1,86590.882,309366697.818,81227.200
267
+ dens_x2.0,853000,21800415,2,86308.070,309366697.818,81227.200
268
+ dens_x2.0,853000,21800415,3,84987.572,309366697.818,81227.200
269
+ dens_x2.0,1000000,29958277,1,135554.293,427704402.750,112297.902
270
+ dens_x2.0,1000000,29958277,2,134537.849,427704402.750,112297.902
271
+ dens_x2.0,1000000,29958277,3,151123.123,427704402.750,112297.902
results_qing.csv ADDED
@@ -0,0 +1,271 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ series,n,m,trial,ms,theo_x,normalized_theory_ms
2
+ dens_x0.5,10000,761,1,17.752,99112.473,50.448
3
+ dens_x0.5,10000,761,2,17.676,99112.473,50.448
4
+ dens_x0.5,10000,761,3,17.359,99112.473,50.448
5
+ dens_x0.5,12000,1077,1,19.911,122827.840,62.519
6
+ dens_x0.5,12000,1077,2,18.349,122827.840,62.519
7
+ dens_x0.5,12000,1077,3,17.963,122827.840,62.519
8
+ dens_x0.5,14000,1455,1,21.243,147545.989,75.101
9
+ dens_x0.5,14000,1455,2,21.438,147545.989,75.101
10
+ dens_x0.5,14000,1455,3,21.547,147545.989,75.101
11
+ dens_x0.5,16000,1937,1,33.463,173636.330,88.381
12
+ dens_x0.5,16000,1937,2,25.635,173636.330,88.381
13
+ dens_x0.5,16000,1937,3,24.989,173636.330,88.381
14
+ dens_x0.5,19000,2715,1,29.835,213940.398,108.895
15
+ dens_x0.5,19000,2715,2,29.484,213940.398,108.895
16
+ dens_x0.5,19000,2715,3,29.684,213940.398,108.895
17
+ dens_x0.5,22000,3611,1,30.203,256079.209,130.344
18
+ dens_x0.5,22000,3611,2,30.522,256079.209,130.344
19
+ dens_x0.5,22000,3611,3,33.004,256079.209,130.344
20
+ dens_x0.5,26000,5194,1,41.147,317113.582,161.410
21
+ dens_x0.5,26000,5194,2,40.076,317113.582,161.410
22
+ dens_x0.5,26000,5194,3,36.815,317113.582,161.410
23
+ dens_x0.5,30000,6763,1,44.788,378988.027,192.904
24
+ dens_x0.5,30000,6763,2,44.445,378988.027,192.904
25
+ dens_x0.5,30000,6763,3,42.679,378988.027,192.904
26
+ dens_x0.5,36000,9660,1,45.425,479031.581,243.826
27
+ dens_x0.5,36000,9660,2,46.989,479031.581,243.826
28
+ dens_x0.5,36000,9660,3,44.779,479031.581,243.826
29
+ dens_x0.5,42000,13346,1,50.098,589181.686,299.892
30
+ dens_x0.5,42000,13346,2,51.428,589181.686,299.892
31
+ dens_x0.5,42000,13346,3,53.184,589181.686,299.892
32
+ dens_x0.5,49000,17900,1,64.897,722491.606,367.747
33
+ dens_x0.5,49000,17900,2,57.590,722491.606,367.747
34
+ dens_x0.5,49000,17900,3,62.995,722491.606,367.747
35
+ dens_x0.5,57000,24363,1,110.618,890990.473,453.512
36
+ dens_x0.5,57000,24363,2,109.511,890990.473,453.512
37
+ dens_x0.5,57000,24363,3,107.511,890990.473,453.512
38
+ dens_x0.5,67000,33829,1,157.316,1120457.009,570.310
39
+ dens_x0.5,67000,33829,2,152.683,1120457.009,570.310
40
+ dens_x0.5,67000,33829,3,149.917,1120457.009,570.310
41
+ dens_x0.5,79000,46917,1,217.186,1419991.587,722.773
42
+ dens_x0.5,79000,46917,2,218.315,1419991.587,722.773
43
+ dens_x0.5,79000,46917,3,214.770,1419991.587,722.773
44
+ dens_x0.5,92000,63864,1,254.246,1781454.424,906.757
45
+ dens_x0.5,92000,63864,2,223.552,1781454.424,906.757
46
+ dens_x0.5,92000,63864,3,225.867,1781454.424,906.757
47
+ dens_x0.5,108000,87350,1,301.682,2264084.329,1152.414
48
+ dens_x0.5,108000,87350,2,218.064,2264084.329,1152.414
49
+ dens_x0.5,108000,87350,3,215.160,2264084.329,1152.414
50
+ dens_x0.5,127000,120912,1,315.482,2913447.536,1482.939
51
+ dens_x0.5,127000,120912,2,293.023,2913447.536,1482.939
52
+ dens_x0.5,127000,120912,3,379.251,2913447.536,1482.939
53
+ dens_x0.5,149000,166365,1,338.435,3756533.770,1912.068
54
+ dens_x0.5,149000,166365,2,346.454,3756533.770,1912.068
55
+ dens_x0.5,149000,166365,3,346.208,3756533.770,1912.068
56
+ dens_x0.5,174000,226551,1,455.336,4833373.044,2460.177
57
+ dens_x0.5,174000,226551,2,504.231,4833373.044,2460.177
58
+ dens_x0.5,174000,226551,3,522.258,4833373.044,2460.177
59
+ dens_x0.5,204000,312325,1,696.115,6312525.050,3213.062
60
+ dens_x0.5,204000,312325,2,850.734,6312525.050,3213.062
61
+ dens_x0.5,204000,312325,3,922.932,6312525.050,3213.062
62
+ dens_x0.5,240000,431142,1,1927.235,8314371.662,4231.998
63
+ dens_x0.5,240000,431142,2,1138.207,8314371.662,4231.998
64
+ dens_x0.5,240000,431142,3,1115.626,8314371.662,4231.998
65
+ dens_x0.5,281000,589828,1,1827.282,10925503.834,5561.059
66
+ dens_x0.5,281000,589828,2,1339.802,10925503.834,5561.059
67
+ dens_x0.5,281000,589828,3,1169.025,10925503.834,5561.059
68
+ dens_x0.5,329000,811844,1,1522.195,14493068.872,7376.943
69
+ dens_x0.5,329000,811844,2,1501.547,14493068.872,7376.943
70
+ dens_x0.5,329000,811844,3,1607.400,14493068.872,7376.943
71
+ dens_x0.5,386000,1115568,1,2344.641,19315559.086,9831.580
72
+ dens_x0.5,386000,1115568,2,2432.236,19315559.086,9831.580
73
+ dens_x0.5,386000,1115568,3,2322.046,19315559.086,9831.580
74
+ dens_x0.5,452000,1531464,1,3316.101,25827552.428,13146.172
75
+ dens_x0.5,452000,1531464,2,3377.502,25827552.428,13146.172
76
+ dens_x0.5,452000,1531464,3,3354.746,25827552.428,13146.172
77
+ dens_x0.5,530000,2103874,1,5013.327,34716124.680,17670.437
78
+ dens_x0.5,530000,2103874,2,4984.979,34716124.680,17670.437
79
+ dens_x0.5,530000,2103874,3,5053.679,34716124.680,17670.437
80
+ dens_x0.5,621000,2887681,1,7653.489,46802598.872,23822.428
81
+ dens_x0.5,621000,2887681,2,7610.639,46802598.872,23822.428
82
+ dens_x0.5,621000,2887681,3,7519.181,46802598.872,23822.428
83
+ dens_x0.5,728000,3971764,1,12015.660,63437679.196,32289.650
84
+ dens_x0.5,728000,3971764,2,11577.906,63437679.196,32289.650
85
+ dens_x0.5,728000,3971764,3,11991.380,63437679.196,32289.650
86
+ dens_x0.5,853000,5448500,1,17834.890,86056528.179,43802.598
87
+ dens_x0.5,853000,5448500,2,17710.405,86056528.179,43802.598
88
+ dens_x0.5,853000,5448500,3,18224.804,86056528.179,43802.598
89
+ dens_x0.5,1000000,7489974,1,26985.940,117293325.434,59702.065
90
+ dens_x0.5,1000000,7489974,2,28216.802,117293325.434,59702.065
91
+ dens_x0.5,1000000,7489974,3,27674.139,117293325.434,59702.065
92
+ dens_x1.0,10000,1550,1,17.827,106379.431,54.147
93
+ dens_x1.0,10000,1550,2,16.952,106379.431,54.147
94
+ dens_x1.0,10000,1550,3,17.015,106379.431,54.147
95
+ dens_x1.0,12000,2135,1,20.646,132765.276,67.577
96
+ dens_x1.0,12000,2135,2,21.123,132765.276,67.577
97
+ dens_x1.0,12000,2135,3,20.674,132765.276,67.577
98
+ dens_x1.0,14000,2963,1,24.111,161942.582,82.428
99
+ dens_x1.0,14000,2963,2,25.719,161942.582,82.428
100
+ dens_x1.0,14000,2963,3,25.629,161942.582,82.428
101
+ dens_x1.0,16000,3912,1,24.766,192755.010,98.112
102
+ dens_x1.0,16000,3912,2,28.343,192755.010,98.112
103
+ dens_x1.0,16000,3912,3,26.101,192755.010,98.112
104
+ dens_x1.0,19000,5413,1,27.698,240521.618,122.425
105
+ dens_x1.0,19000,5413,2,30.179,240521.618,122.425
106
+ dens_x1.0,19000,5413,3,29.468,240521.618,122.425
107
+ dens_x1.0,22000,7290,1,38.453,292864.786,149.068
108
+ dens_x1.0,22000,7290,2,35.754,292864.786,149.068
109
+ dens_x1.0,22000,7290,3,34.257,292864.786,149.068
110
+ dens_x1.0,26000,10078,1,44.338,366763.602,186.682
111
+ dens_x1.0,26000,10078,2,45.863,366763.602,186.682
112
+ dens_x1.0,26000,10078,3,43.600,366763.602,186.682
113
+ dens_x1.0,30000,13336,1,45.214,446748.773,227.394
114
+ dens_x1.0,30000,13336,2,46.397,446748.773,227.394
115
+ dens_x1.0,30000,13336,3,45.272,446748.773,227.394
116
+ dens_x1.0,36000,19373,1,89.256,580933.327,295.694
117
+ dens_x1.0,36000,19373,2,89.571,580933.327,295.694
118
+ dens_x1.0,36000,19373,3,61.459,580933.327,295.694
119
+ dens_x1.0,42000,26057,1,111.164,724495.682,368.767
120
+ dens_x1.0,42000,26057,2,112.365,724495.682,368.767
121
+ dens_x1.0,42000,26057,3,111.181,724495.682,368.767
122
+ dens_x1.0,49000,35903,1,147.373,916916.365,466.709
123
+ dens_x1.0,49000,35903,2,160.224,916916.365,466.709
124
+ dens_x1.0,49000,35903,3,149.107,916916.365,466.709
125
+ dens_x1.0,57000,48648,1,204.262,1156930.810,588.875
126
+ dens_x1.0,57000,48648,2,187.035,1156930.810,588.875
127
+ dens_x1.0,57000,48648,3,213.253,1156930.810,588.875
128
+ dens_x1.0,67000,67188,1,236.565,1491157.159,758.996
129
+ dens_x1.0,67000,67188,2,233.706,1491157.159,758.996
130
+ dens_x1.0,67000,67188,3,234.320,1491157.159,758.996
131
+ dens_x1.0,79000,93552,1,197.042,1945903.955,990.461
132
+ dens_x1.0,79000,93552,2,268.478,1945903.955,990.461
133
+ dens_x1.0,79000,93552,3,308.769,1945903.955,990.461
134
+ dens_x1.0,92000,126776,1,273.573,2500509.887,1272.754
135
+ dens_x1.0,92000,126776,2,250.997,2500509.887,1272.754
136
+ dens_x1.0,92000,126776,3,275.845,2500509.887,1272.754
137
+ dens_x1.0,108000,175020,1,379.493,3280169.679,1669.600
138
+ dens_x1.0,108000,175020,2,309.677,3280169.679,1669.600
139
+ dens_x1.0,108000,175020,3,306.916,3280169.679,1669.600
140
+ dens_x1.0,127000,241838,1,468.439,4334562.918,2206.284
141
+ dens_x1.0,127000,241838,2,446.181,4334562.918,2206.284
142
+ dens_x1.0,127000,241838,3,458.373,4334562.918,2206.284
143
+ dens_x1.0,149000,333094,1,566.457,5742559.864,2922.951
144
+ dens_x1.0,149000,333094,2,579.989,5742559.864,2922.951
145
+ dens_x1.0,149000,333094,3,577.801,5742559.864,2922.951
146
+ dens_x1.0,174000,454003,1,785.385,7577993.244,3857.183
147
+ dens_x1.0,174000,454003,2,768.331,7577993.244,3857.183
148
+ dens_x1.0,174000,454003,3,799.821,7577993.244,3857.183
149
+ dens_x1.0,204000,623499,1,1114.363,10116899.562,5149.481
150
+ dens_x1.0,204000,623499,2,1120.554,10116899.562,5149.481
151
+ dens_x1.0,204000,623499,3,1099.929,10116899.562,5149.481
152
+ dens_x1.0,240000,862431,1,1680.754,13657349.809,6951.563
153
+ dens_x1.0,240000,862431,2,1659.639,13657349.809,6951.563
154
+ dens_x1.0,240000,862431,3,1635.171,13657349.809,6951.563
155
+ dens_x1.0,281000,1182168,1,2464.097,18357066.601,9343.710
156
+ dens_x1.0,281000,1182168,2,2520.973,18357066.601,9343.710
157
+ dens_x1.0,281000,1182168,3,2521.537,18357066.601,9343.710
158
+ dens_x1.0,329000,1623777,1,3657.489,24807713.897,12627.076
159
+ dens_x1.0,329000,1623777,2,3822.215,24807713.897,12627.076
160
+ dens_x1.0,329000,1623777,3,3771.764,24807713.897,12627.076
161
+ dens_x1.0,386000,2232671,1,5777.777,33685517.024,17145.860
162
+ dens_x1.0,386000,2232671,2,5853.952,33685517.024,17145.860
163
+ dens_x1.0,386000,2232671,3,5741.782,33685517.024,17145.860
164
+ dens_x1.0,452000,3055874,1,8791.834,45677561.904,23249.786
165
+ dens_x1.0,452000,3055874,2,8823.808,45677561.904,23249.786
166
+ dens_x1.0,452000,3055874,3,8832.307,45677561.904,23249.786
167
+ dens_x1.0,530000,4208065,1,13664.587,62450692.510,31787.276
168
+ dens_x1.0,530000,4208065,2,13613.387,62450692.510,31787.276
169
+ dens_x1.0,530000,4208065,3,13421.059,62450692.510,31787.276
170
+ dens_x1.0,621000,5774507,1,20937.405,85310220.195,43422.729
171
+ dens_x1.0,621000,5774507,2,21621.346,85310220.195,43422.729
172
+ dens_x1.0,621000,5774507,3,21139.229,85310220.195,43422.729
173
+ dens_x1.0,728000,7935888,1,34245.137,116945648.236,59525.098
174
+ dens_x1.0,728000,7935888,2,33147.218,116945648.236,59525.098
175
+ dens_x1.0,728000,7935888,3,34011.001,116945648.236,59525.098
176
+ dens_x1.0,853000,10900416,1,54191.487,160510699.866,81699.621
177
+ dens_x1.0,853000,10900416,2,54442.605,160510699.866,81699.621
178
+ dens_x1.0,853000,10900416,3,55017.857,160510699.866,81699.621
179
+ dens_x1.0,1000000,14986435,1,86648.996,220860761.527,112417.680
180
+ dens_x1.0,1000000,14986435,2,88775.035,220860761.527,112417.680
181
+ dens_x1.0,1000000,14986435,3,87850.833,220860761.527,112417.680
182
+ dens_x2.0,10000,3007,1,21.643,119798.897,60.977
183
+ dens_x2.0,10000,3007,2,21.430,119798.897,60.977
184
+ dens_x2.0,10000,3007,3,21.235,119798.897,60.977
185
+ dens_x2.0,12000,4365,1,25.837,153710.912,78.239
186
+ dens_x2.0,12000,4365,2,23.999,153710.912,78.239
187
+ dens_x2.0,12000,4365,3,23.257,153710.912,78.239
188
+ dens_x2.0,14000,5830,1,26.238,189313.294,96.360
189
+ dens_x2.0,14000,5830,2,25.462,189313.294,96.360
190
+ dens_x2.0,14000,5830,3,25.834,189313.294,96.360
191
+ dens_x2.0,16000,7624,1,30.745,228688.447,116.402
192
+ dens_x2.0,16000,7624,2,34.669,228688.447,116.402
193
+ dens_x2.0,16000,7624,3,29.403,228688.447,116.402
194
+ dens_x2.0,19000,10837,1,36.609,293959.920,149.625
195
+ dens_x2.0,19000,10837,2,35.257,293959.920,149.625
196
+ dens_x2.0,19000,10837,3,33.859,293959.920,149.625
197
+ dens_x2.0,22000,14556,1,48.189,365516.050,186.047
198
+ dens_x2.0,22000,14556,2,47.185,365516.050,186.047
199
+ dens_x2.0,22000,14556,3,44.113,365516.050,186.047
200
+ dens_x2.0,26000,20395,1,85.873,471644.695,240.066
201
+ dens_x2.0,26000,20395,2,87.164,471644.695,240.066
202
+ dens_x2.0,26000,20395,3,85.546,471644.695,240.066
203
+ dens_x2.0,30000,26790,1,104.921,585445.422,297.991
204
+ dens_x2.0,30000,26790,2,108.649,585445.422,297.991
205
+ dens_x2.0,30000,26790,3,106.060,585445.422,297.991
206
+ dens_x2.0,36000,38800,1,154.243,784747.311,399.435
207
+ dens_x2.0,36000,38800,2,151.319,784747.311,399.435
208
+ dens_x2.0,36000,38800,3,158.272,784747.311,399.435
209
+ dens_x2.0,42000,53391,1,204.388,1015477.726,516.876
210
+ dens_x2.0,42000,53391,2,198.844,1015477.726,516.876
211
+ dens_x2.0,42000,53391,3,197.630,1015477.726,516.876
212
+ dens_x2.0,49000,71945,1,250.340,1306154.668,664.830
213
+ dens_x2.0,49000,71945,2,279.343,1306154.668,664.830
214
+ dens_x2.0,49000,71945,3,280.425,1306154.668,664.830
215
+ dens_x2.0,57000,97182,1,296.197,1688417.255,859.401
216
+ dens_x2.0,57000,97182,2,182.517,1688417.255,859.401
217
+ dens_x2.0,57000,97182,3,181.647,1688417.255,859.401
218
+ dens_x2.0,67000,134082,1,334.311,2234513.248,1137.363
219
+ dens_x2.0,67000,134082,2,268.165,2234513.248,1137.363
220
+ dens_x2.0,67000,134082,3,282.074,2234513.248,1137.363
221
+ dens_x2.0,79000,186876,1,326.573,2998337.660,1526.148
222
+ dens_x2.0,79000,186876,2,326.265,2998337.660,1526.148
223
+ dens_x2.0,79000,186876,3,343.059,2998337.660,1526.148
224
+ dens_x2.0,92000,253371,1,468.680,3947432.991,2009.235
225
+ dens_x2.0,92000,253371,2,456.861,3947432.991,2009.235
226
+ dens_x2.0,92000,253371,3,474.351,3947432.991,2009.235
227
+ dens_x2.0,108000,349108,1,660.224,5297829.841,2696.585
228
+ dens_x2.0,108000,349108,2,637.197,5297829.841,2696.585
229
+ dens_x2.0,108000,349108,3,651.868,5297829.841,2696.585
230
+ dens_x2.0,127000,484181,1,877.044,7182563.887,3655.910
231
+ dens_x2.0,127000,484181,2,953.163,7182563.887,3655.910
232
+ dens_x2.0,127000,484181,3,986.232,7182563.887,3655.910
233
+ dens_x2.0,149000,666491,1,1297.659,9713885.437,4944.348
234
+ dens_x2.0,149000,666491,2,1257.054,9713885.437,4944.348
235
+ dens_x2.0,149000,666491,3,1284.346,9713885.437,4944.348
236
+ dens_x2.0,174000,907603,1,1867.315,13051498.522,6643.186
237
+ dens_x2.0,174000,907603,2,1903.876,13051498.522,6643.186
238
+ dens_x2.0,174000,907603,3,1907.572,13051498.522,6643.186
239
+ dens_x2.0,204000,1247723,1,3091.032,17748584.329,9033.993
240
+ dens_x2.0,204000,1247723,2,2802.598,17748584.329,9033.993
241
+ dens_x2.0,204000,1247723,3,2826.897,17748584.329,9033.993
242
+ dens_x2.0,240000,1725541,1,4427.498,24349896.729,12394.048
243
+ dens_x2.0,240000,1725541,2,4409.478,24349896.729,12394.048
244
+ dens_x2.0,240000,1725541,3,4625.596,24349896.729,12394.048
245
+ dens_x2.0,281000,2365586,1,6853.596,33204358.944,16900.951
246
+ dens_x2.0,281000,2365586,2,6972.828,33204358.944,16900.951
247
+ dens_x2.0,281000,2365586,3,6824.291,33204358.944,16900.951
248
+ dens_x2.0,329000,3242756,1,10753.997,45374920.412,23095.743
249
+ dens_x2.0,329000,3242756,2,10933.854,45374920.412,23095.743
250
+ dens_x2.0,329000,3242756,3,10953.280,45374920.412,23095.743
251
+ dens_x2.0,386000,4461354,1,16880.353,62354387.279,31738.257
252
+ dens_x2.0,386000,4461354,2,17044.714,62354387.279,31738.257
253
+ dens_x2.0,386000,4461354,3,17421.910,62354387.279,31738.257
254
+ dens_x2.0,452000,6119886,1,27124.239,85575402.535,43557.706
255
+ dens_x2.0,452000,6119886,2,27560.953,85575402.535,43557.706
256
+ dens_x2.0,452000,6119886,3,27165.046,85575402.535,43557.706
257
+ dens_x2.0,530000,8413791,1,43964.374,117884820.410,60003.135
258
+ dens_x2.0,530000,8413791,2,43212.872,117884820.410,60003.135
259
+ dens_x2.0,530000,8413791,3,44285.243,117884820.410,60003.135
260
+ dens_x2.0,621000,11552079,1,73741.057,162377752.059,82649.947
261
+ dens_x2.0,621000,11552079,2,70827.593,162377752.059,82649.947
262
+ dens_x2.0,621000,11552079,3,66956.073,162377752.059,82649.947
263
+ dens_x2.0,728000,15870417,1,107879.018,224046367.608,114039.147
264
+ dens_x2.0,728000,15870417,2,108503.864,224046367.608,114039.147
265
+ dens_x2.0,728000,15870417,3,107531.729,224046367.608,114039.147
266
+ dens_x2.0,853000,21800415,1,174361.084,309366697.818,157467.022
267
+ dens_x2.0,853000,21800415,2,173178.964,309366697.818,157467.022
268
+ dens_x2.0,853000,21800415,3,186579.072,309366697.818,157467.022
269
+ dens_x2.0,1000000,29958277,1,291293.554,427704402.750,217700.674
270
+ dens_x2.0,1000000,29958277,2,278506.552,427704402.750,217700.674
271
+ dens_x2.0,1000000,29958277,3,286580.368,427704402.750,217700.674
seeds_diam_1e-2.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3cdc07c7a3e60521a69a41eea8d44b6a40583dc9ce0d777011e549ef201f05ae
3
+ size 13140898
seeds_diam_1e-3.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50b8aab046e0bf1e7ccb275424ff97a64eb0a1473c5f569e01b7121d13a84359
3
+ size 13142060
seeds_diam_1e-4.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ddcc4f54f5261f92098e476e78a3fa541ffc6c63243b0aa24ab9cebf65e31812
3
+ size 13140779
seeds_diam_1e-6.json ADDED
The diff for this file is too large to render. See raw diff
 
seeds_diam_1e-6_coarsen.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"clusters": [{"cluster_id": 0, "members": [0, 633, 926, 1310, 1862, 1866, 2582, 2692]}, {"cluster_id": 1, "members": [1, 203, 247, 470, 652, 854, 857, 1097, 1246, 2583]}, {"cluster_id": 2, "members": [2, 49, 1454, 1662, 1666, 2381]}, {"cluster_id": 3, "members": [3, 594, 749, 1333, 2195, 2312, 2544]}, {"cluster_id": 4, "members": [4, 982, 1016, 1256, 1563, 2176, 2633]}, {"cluster_id": 5, "members": [5, 689, 796, 1148, 1173, 1250, 1292, 1317, 1629, 1659, 2220, 2221, 2507, 2525]}, {"cluster_id": 6, "members": [6, 61, 149, 1008, 1416, 1468, 1922, 1925, 2575]}, {"cluster_id": 7, "members": [7, 173, 208, 290, 480, 510, 649, 681, 687, 987, 1019, 1033, 1171, 1175, 1189, 1298, 1426, 1445, 1446, 1456, 1457, 1498, 1586, 1710, 2359, 2372, 2398, 2489, 2634, 2635, 2636, 2693, 2703]}, {"cluster_id": 8, "members": [8, 14, 258, 435, 751]}, {"cluster_id": 9, "members": [9, 64, 369, 385, 494, 723, 986, 1209, 1644, 2483, 2484, 2614, 2616]}, {"cluster_id": 10, "members": [10, 192, 193, 303, 308, 420, 519, 719, 1066, 1253, 1346, 1574, 1670, 2143, 2545]}, {"cluster_id": 11, "members": [11, 77, 354, 659, 767, 770, 873, 1053, 1121, 1131, 1215, 1405, 1410, 1479, 1508, 1655, 1708, 1803, 1839, 1857, 1894, 2118, 2282, 2313, 2314, 2384, 2453, 2460]}, {"cluster_id": 12, "members": [12, 1001, 1910, 2506, 2661, 2662]}, {"cluster_id": 13, "members": [13, 224, 310, 568, 829, 875, 892, 962, 1004, 1062, 1107, 1116, 1223, 1241, 1331, 1489, 1576, 1581, 1806, 1808, 1809, 1810, 1811, 1814, 1816, 1819, 1822]}, {"cluster_id": 14, "members": [15, 395, 599, 640, 765, 894, 1090, 1093, 1147, 1271, 1598, 1845, 1865, 2367, 2368, 2370, 2371, 2420]}, {"cluster_id": 15, "members": [16, 46, 466, 970, 1305, 1738, 2366, 2444]}, {"cluster_id": 16, "members": [17, 24, 927, 1247, 1315, 1316, 1335, 1356, 1613, 1679, 1856, 2096, 2139, 2140, 2141, 2142]}, {"cluster_id": 17, "members": [18, 317, 943, 1196, 1624, 1785, 1786, 2082, 2300]}, {"cluster_id": 18, "members": [19, 216, 295, 872, 990, 1176, 1177, 1542, 1607, 1931, 1935, 1939]}, {"cluster_id": 19, "members": [20, 128, 1404, 2269, 2270, 2374, 2375, 2416, 2417]}, {"cluster_id": 20, "members": [21, 313, 722, 813, 835, 865, 1043, 1092, 1106, 1134, 1321, 1567, 1654, 1874, 1920, 2296, 2310, 2376, 2422, 2499, 2500, 2501, 2625, 2626, 2653]}, {"cluster_id": 21, "members": [22, 275, 463, 706, 789, 805, 1234, 1371, 1393, 1702, 1703, 1967, 1969, 1970, 2238, 2239]}, {"cluster_id": 22, "members": [23, 92, 108, 898, 1155, 1327, 1328, 1835, 1836, 2157, 2158, 2159, 2160, 2161, 2209]}, {"cluster_id": 23, "members": [25, 1301, 1344, 1447, 1928, 2011, 2315, 2317, 2475]}, {"cluster_id": 24, "members": [26, 99, 122, 123, 127, 177, 1236, 1437, 1639, 1688, 2454, 2455, 2479, 2578, 2604, 2679, 2682]}, {"cluster_id": 25, "members": [27, 288, 531, 606, 618, 938, 2027, 2230, 2360, 2361]}, {"cluster_id": 26, "members": [28, 54, 422, 545, 700, 877, 1010, 1061, 1069, 1111, 1182, 1569, 1578, 1582, 1651, 1683, 1684, 1685, 1687, 1690, 1691, 1726, 1736, 1909, 2513, 2677]}, {"cluster_id": 27, "members": [29, 43, 152, 375, 396, 963, 1094, 1369, 1530, 1653, 1988, 2240, 2399, 2400, 2401, 2434, 2435, 2645]}, {"cluster_id": 28, "members": [30, 405, 697, 738, 1080, 1275, 2059, 2162, 2343, 2344]}, {"cluster_id": 29, "members": [31, 76, 737, 1594, 1974, 1979, 2178]}, {"cluster_id": 30, "members": [32, 518, 666, 2151, 2423]}, {"cluster_id": 31, "members": [33, 196, 286, 332, 442, 588, 627, 698, 911, 1051, 1798, 2040, 2120, 2122, 2362, 2615, 2618, 2619, 2637, 2696]}, {"cluster_id": 32, "members": [34, 59, 101, 111, 228, 322, 326, 333, 346, 357, 557, 569, 628, 684, 686, 757, 764, 839, 849, 882, 919, 929, 951, 977, 999, 1040, 1114, 1149, 1150, 1238, 1243, 1272, 1281, 1339, 1358, 1384, 1389, 1451, 1471, 1492, 1517, 1520, 1546, 1562, 1565, 1604, 1612, 1646, 1694, 1696, 1709, 1711, 1713, 1716, 1717, 1720, 1725, 1730, 1734, 1737, 1744, 1746, 1747, 1752, 1753, 1754, 1757, 1759, 1760, 1764, 1930, 1948, 1982, 2041, 2153, 2334, 2421, 2512, 2560, 2561, 2597, 2627, 2643]}, {"cluster_id": 33, "members": [35, 504, 702, 728, 777, 895, 945, 1187, 1188, 1296, 1377, 1436, 1913, 2069, 2070, 2388, 2640]}, {"cluster_id": 34, "members": [36, 530, 1146, 1505, 1552, 1699, 1788, 2086, 2094, 2106, 2254, 2591]}, {"cluster_id": 35, "members": [37, 60, 331, 355, 1527, 1668, 2274, 2480]}, {"cluster_id": 36, "members": [38, 226, 429, 523, 617, 705, 794, 863, 949, 1160, 1166, 1493, 1807, 2043, 2406]}, {"cluster_id": 37, "members": [39, 1522, 1697, 1965, 2253, 2594, 2680]}, {"cluster_id": 38, "members": [40, 866, 1049, 1364, 2610]}, {"cluster_id": 39, "members": [41, 96, 175, 398, 596, 644, 730, 955, 992, 2135, 2164, 2177, 2217]}, {"cluster_id": 40, "members": [42, 69, 87, 106, 262, 604, 907, 1351, 1362, 1372, 1535, 1916, 2461]}, {"cluster_id": 41, "members": [44, 2247, 2263, 2624, 2701]}, {"cluster_id": 42, "members": [45, 174, 183, 214, 220, 367, 440, 454, 459, 469, 476, 482, 486, 490, 496, 498, 499, 501, 503, 535, 539, 541, 542, 551, 561, 563, 577, 578, 579, 582, 605, 663, 676, 913, 1014, 1056, 1091, 1105, 1122, 1140, 1233, 1705, 1792, 1917, 1952, 2029, 2179, 2322, 2354, 2393, 2433, 2457, 2458, 2548, 2549]}, {"cluster_id": 43, "members": [47, 163, 448, 472, 559, 587, 844, 1032, 1183, 1579, 1825]}, {"cluster_id": 44, "members": [48, 1031, 2205, 2206, 2471]}, {"cluster_id": 45, "members": [50, 241, 590, 648, 750, 1060, 1128, 1355, 1441, 1619, 1700, 1707, 1954, 1957, 2268, 2271, 2272, 2273, 2275, 2445, 2630, 2699]}, {"cluster_id": 46, "members": [51, 421, 457, 708, 710, 768, 1392, 1723, 1724, 2121, 2212, 2214, 2215, 2216]}, {"cluster_id": 47, "members": [52, 538, 636, 858, 1139, 1286, 1467, 1515, 1838, 2072, 2172, 2182, 2233, 2234, 2502]}, {"cluster_id": 48, "members": [53, 1601, 1739, 1750, 1766]}, {"cluster_id": 49, "members": [55, 815, 1079, 2018, 2020]}, {"cluster_id": 50, "members": [56, 412, 447, 581, 1616, 2638]}, {"cluster_id": 51, "members": [57, 731, 901, 1279, 2073, 2186, 2203, 2330, 2345, 2418, 2511]}, {"cluster_id": 52, "members": [58, 246, 744, 936, 1154, 1458, 1714, 1715, 2439, 2514, 2515, 2530, 2641]}, {"cluster_id": 53, "members": [62, 373, 475, 485, 624, 1025, 1042, 1047, 1198, 1926, 2051, 2052, 2219, 2333]}, {"cluster_id": 54, "members": [63, 72, 202, 268, 488, 613, 616, 745, 771, 1020, 1257, 1322, 1345, 1719, 2053, 2054, 2087, 2091, 2099, 2107, 2165, 2175, 2181, 2213, 2259, 2261, 2337, 2363, 2496, 2539, 2540, 2571, 2577]}, {"cluster_id": 55, "members": [65, 543, 619, 1293, 2021]}, {"cluster_id": 56, "members": [66, 411, 1096, 1465, 1488, 1633, 1799, 2601, 2609, 2631]}, {"cluster_id": 57, "members": [67, 113, 282, 540, 747, 2527, 2628]}, {"cluster_id": 58, "members": [68, 307, 391, 493, 976, 978, 991, 998, 1431, 2365]}, {"cluster_id": 59, "members": [70, 344, 441, 1455, 1537, 2184, 2323]}, {"cluster_id": 60, "members": [71, 73, 206, 558, 876, 881, 1199, 1214, 1589, 1706, 1745, 1751, 2584, 2585, 2596, 2632, 2691]}, {"cluster_id": 61, "members": [74, 497, 1118, 1376, 1652, 1848, 1972, 2419, 2620]}, {"cluster_id": 62, "members": [75, 143, 205, 232, 235, 316, 335, 343, 416, 532, 547, 664, 669, 766, 845, 869, 920, 1052, 1075, 1100, 1124, 1212, 1297, 1299, 1323, 1440, 1701, 1820, 1853, 1854, 1855, 1859, 1860, 1861, 1863, 1864, 1868, 1872, 1875, 1877, 1942, 2299, 2573]}, {"cluster_id": 63, "members": [78, 994, 1219, 1329, 1418]}, {"cluster_id": 64, "members": [79, 522, 603, 716, 975, 1057, 1821, 2097, 2098, 2409]}, {"cluster_id": 65, "members": [80, 257, 434, 964, 965, 1117, 1524, 1693, 2049, 2055, 2198, 2408]}, {"cluster_id": 66, "members": [81, 315, 347, 408, 423, 527, 647, 827, 1921, 2232]}, {"cluster_id": 67, "members": [82, 194, 673, 1050, 1081, 1217, 1230, 1320, 1342, 1349, 1379, 1380, 1491, 1634, 2523, 2563, 2564, 2646, 2684]}, {"cluster_id": 68, "members": [83, 449, 1280, 1516, 2581, 2612]}, {"cluster_id": 69, "members": [84, 284, 583, 677, 786, 947, 954, 1112, 2222, 2223, 2224, 2225, 2226]}, {"cluster_id": 70, "members": [85, 401, 864, 1065, 2210, 2211, 2487, 2488]}, {"cluster_id": 71, "members": [86, 280, 897, 993, 1141, 1152, 1203, 1983, 2155, 2293, 2294, 2295]}, {"cluster_id": 72, "members": [88, 161, 191, 272, 382, 516, 841, 842, 851, 914, 1174, 1197, 1288, 1309, 1551, 1658, 1677, 1741, 1791, 1882, 1989, 2010, 2012, 2014, 2015, 2016, 2104, 2593]}, {"cluster_id": 73, "members": [89, 761, 884, 1087, 1153, 1157, 1401]}, {"cluster_id": 74, "members": [90, 155, 156, 221, 376, 672, 817, 1689, 1763, 2456]}, {"cluster_id": 75, "members": [91, 330, 574, 576, 665, 1018, 2001, 2002, 2044, 2123, 2348, 2378, 2379, 2380]}, {"cluster_id": 76, "members": [93, 100, 104, 117, 135, 139, 146, 150, 151, 154, 162, 164, 166, 169, 172, 178, 180, 182, 190, 198, 199, 212, 222, 223, 229, 233, 234, 237, 244, 245, 254, 259, 261, 264, 265, 266, 271, 273, 281, 292, 298, 300, 311, 319, 325, 328, 334, 351, 353, 356, 358, 360, 362, 368, 372, 374, 392, 413, 455, 460, 491, 550, 593, 595, 626, 654, 655, 690, 718, 758, 782, 821, 1036, 1132, 1201, 1259, 1680, 2056, 2149, 2311, 2342, 2377, 2532, 2537, 2562, 2576, 2617]}, {"cluster_id": 77, "members": [94, 195, 560, 585, 675, 774, 934, 1649, 2355, 2356, 2357, 2424, 2490, 2526]}, {"cluster_id": 78, "members": [95, 525, 861, 1397, 1540, 1628, 2057, 2180]}, {"cluster_id": 79, "members": [97, 661, 1028, 1353, 1883, 1884, 1885, 2134, 2644]}, {"cluster_id": 80, "members": [98, 772, 908, 1013, 1156, 1435, 1521, 1566, 1625, 1661, 1729, 1840, 1851, 1895, 2050, 2071, 2485]}, {"cluster_id": 81, "members": [102, 505, 937, 1180, 1251, 1304, 1448, 1560, 1871, 1878, 2256]}, {"cluster_id": 82, "members": [103, 134, 350, 406, 484, 608, 1775, 2081, 2505]}, {"cluster_id": 83, "members": [105, 1205, 1721, 1761, 2476, 2651]}, {"cluster_id": 84, "members": [107, 388, 971, 1113, 1360, 1478, 1650, 1899, 1902, 1904, 1949, 1960, 2491, 2510, 2602, 2603]}, {"cluster_id": 85, "members": [109, 133, 153, 318, 384, 452, 508, 556, 660, 1045, 1210, 1258, 1337, 1561, 1571, 1623, 1648, 1767, 1768, 1773, 1774, 1779, 1780, 1783, 1784, 1787, 1991, 2092, 2095, 2318, 2325]}, {"cluster_id": 86, "members": [110, 1161, 1599, 2279, 2441]}, {"cluster_id": 87, "members": [112, 426, 487, 1564, 1772, 1782, 1789, 2026, 2469]}, {"cluster_id": 88, "members": [114, 610, 826, 1263, 1407, 2287, 2288, 2481]}, {"cluster_id": 89, "members": [115, 277, 451, 630, 671, 880, 935, 973, 2019]}, {"cluster_id": 90, "members": [116, 130, 211, 1076, 1394, 1487, 1501, 2017, 2137, 2467, 2468]}, {"cluster_id": 91, "members": [118, 255, 260, 365, 404, 446, 554, 685, 743, 781, 795, 836, 903, 967, 989, 1029, 1170, 1343, 1375, 1476, 1645, 2030, 2031, 2112, 2166, 2316, 2402, 2403, 2436, 2572, 2586, 2659]}, {"cluster_id": 92, "members": [119, 132, 479, 517, 646, 904, 952, 1312, 1959, 2105, 2621]}, {"cluster_id": 93, "members": [120, 483, 514, 816, 1842, 1980, 2281, 2405]}, {"cluster_id": 94, "members": [121, 250, 589, 980, 1158, 2251, 2252, 2429]}, {"cluster_id": 95, "members": [124, 236, 287, 294, 306, 329, 417, 474, 656, 699, 773, 852, 940, 958, 1009, 1070, 1072, 1181, 1193, 1245, 1367, 1411, 1459, 1572, 1584, 1640, 1656, 1771, 1778, 1781, 1800, 1801, 1804, 1805, 1829, 2047, 2080, 2088, 2089, 2090, 2218, 2289, 2327, 2328, 2389, 2392, 2451, 2452, 2478, 2598, 2705]}, {"cluster_id": 96, "members": [125, 458, 1022, 1276, 1555, 1953, 1962, 1963]}, {"cluster_id": 97, "members": [126, 215, 1248, 1852, 2045, 2046, 2048, 2079]}, {"cluster_id": 98, "members": [129, 393, 701, 735, 739, 1237, 1543, 1881, 1956, 1958, 2060, 2061, 2062, 2649]}, {"cluster_id": 99, "members": [131, 683, 834, 1547, 2168, 2169]}, {"cluster_id": 100, "members": [136, 741, 831, 1011, 1088, 1638, 1733, 2093]}, {"cluster_id": 101, "members": [137, 397, 879, 2144, 2329, 2331, 2504]}, {"cluster_id": 102, "members": [138, 337, 784, 960, 1499, 1735, 1776, 2078, 2324, 2650]}, {"cluster_id": 103, "members": [140, 444, 584, 623, 2463]}, {"cluster_id": 104, "members": [141, 622, 740, 788, 1002, 1951, 2033, 2204, 2521]}, {"cluster_id": 105, "members": [142, 227, 638, 808, 823, 924, 1283, 1318, 2382, 2383]}, {"cluster_id": 106, "members": [144, 145, 213, 537, 1165, 1593, 1698, 2192, 2622]}, {"cluster_id": 107, "members": [147, 242, 279, 285, 304, 502, 838, 1058, 1172, 1195, 1976, 2280]}, {"cluster_id": 108, "members": [148, 378, 380, 381, 450, 477, 602, 930, 2518, 2538, 2569]}, {"cluster_id": 109, "members": [157, 639, 811, 855, 900, 2493]}, {"cluster_id": 110, "members": [158, 184, 293, 520, 691, 791, 1270, 1336, 1615, 1665, 1889, 1890, 1891, 1893, 2032, 2034, 2035, 2037, 2038, 2039, 2042]}, {"cluster_id": 111, "members": [159, 176, 240, 256, 291, 379, 512, 591, 642, 756, 1017, 1078, 1423, 1472, 1514, 1692, 2187, 2302, 2308, 2472, 2559, 2652]}, {"cluster_id": 112, "members": [160, 267, 670, 1059, 1600, 2000, 2606]}, {"cluster_id": 113, "members": [165, 302, 320, 428, 432, 521, 598, 637, 968, 1126, 1442, 1473, 1597, 1603, 1823, 2462, 2707]}, {"cluster_id": 114, "members": [167, 168, 2437, 2438, 2482]}, {"cluster_id": 115, "members": [170, 609, 762, 859, 860, 1030, 1041, 1067, 1427, 1466, 1712, 2446, 2448]}, {"cluster_id": 116, "members": [171, 775, 790, 1548, 2075, 2076, 2077, 2667, 2668]}, {"cluster_id": 117, "members": [179, 197, 231, 387, 2191, 2412]}, {"cluster_id": 118, "members": [181, 712, 850, 1359, 1464, 2278]}, {"cluster_id": 119, "members": [185, 201, 297, 570, 874, 899, 1858, 2430, 2528]}, {"cluster_id": 120, "members": [186, 1228, 1319, 1391, 1536, 1672, 2580, 2702]}, {"cluster_id": 121, "members": [187, 492, 688, 726, 956, 1109, 1206, 1208, 1533, 2671, 2672, 2673, 2674, 2675, 2678]}, {"cluster_id": 122, "members": [188, 301, 439, 524, 552, 721, 853, 1034, 1169, 1642, 1727, 1731, 1755, 2492, 2657, 2658]}, {"cluster_id": 123, "members": [189, 564, 592, 1144, 1262, 1509, 2669]}, {"cluster_id": 124, "members": [200, 500, 792, 843, 1191, 1439, 1558, 2241, 2242, 2339, 2340, 2410, 2411, 2676]}, {"cluster_id": 125, "members": [204, 415, 818, 966, 1843, 1847, 1850, 1999]}, {"cluster_id": 126, "members": [207, 345, 390, 431, 571, 625, 696, 833, 1024, 1108, 1130, 1570, 1678, 1762, 2036, 2495, 2595, 2694, 2695]}, {"cluster_id": 127, "members": [209, 361, 856, 1390, 1449, 2681, 2683]}, {"cluster_id": 128, "members": [210, 566, 674, 717, 1614, 1626, 1671, 1906, 1907, 1968]}, {"cluster_id": 129, "members": [217, 243, 473, 1532, 1686, 2431, 2432, 2473, 2474, 2647, 2648]}, {"cluster_id": 130, "members": [218, 575, 1348, 1382, 2283, 2605]}, {"cluster_id": 131, "members": [219, 251, 253, 402, 507, 1211, 1300, 1932, 1933, 1936, 1940, 1941, 2508]}, {"cluster_id": 132, "members": [225, 694, 941, 974, 1496, 2255, 2516, 2517, 2533]}, {"cluster_id": 133, "members": [230, 511, 549, 1000, 1095, 1325, 1497, 2613]}, {"cluster_id": 134, "members": [238, 653, 1231, 1409, 1553, 1826, 1827, 1828]}, {"cluster_id": 135, "members": [239, 887, 910, 1220, 1274, 1490, 2085]}, {"cluster_id": 136, "members": [248, 443, 946, 1264, 1531, 1964, 1966, 1971, 2623]}, {"cluster_id": 137, "members": [249, 424, 433, 436, 621, 668, 878, 885, 1039, 1142, 1289, 1332, 1494, 1526, 1622, 1879, 1977, 1978, 1981, 2326, 2425, 2426]}, {"cluster_id": 138, "members": [252, 414, 711, 1285, 2111]}, {"cluster_id": 139, "members": [263, 296, 632, 732, 1103, 1123, 1430, 1480, 1718, 1748, 1886, 1887, 1888, 2685]}, {"cluster_id": 140, "members": [269, 321, 418, 1083, 1225, 1232, 1450, 2543, 2551, 2579]}, {"cluster_id": 141, "members": [270, 276, 314, 339, 348, 467, 495, 729, 804, 1073, 1086, 1136, 1244, 1568, 1575, 1585, 1588, 1590, 1596, 1602, 1636, 1641, 1643, 1647, 1945, 1993, 2074, 2369, 2546, 2607]}, {"cluster_id": 142, "members": [274, 748, 1145, 1374, 1550, 1617]}, {"cluster_id": 143, "members": [278, 1185, 1460, 1992, 2358, 2503, 2589]}, {"cluster_id": 144, "members": [283, 471, 600, 724, 1027, 1138, 1260, 1399, 1420, 1559, 2013, 2228, 2250, 2394, 2395, 2396, 2397, 2665, 2666, 2706]}, {"cluster_id": 145, "members": [289, 312, 489, 513, 657, 1254, 1722, 1740, 2390, 2391, 2440]}, {"cluster_id": 146, "members": [299, 1046, 1162, 1484, 1510, 2386, 2387, 2498, 2587]}, {"cluster_id": 147, "members": [305, 769, 942, 1218, 1923, 1924, 1927, 2116]}, {"cluster_id": 148, "members": [309, 377, 383, 481, 715, 733, 759, 862, 1164, 1192, 1265, 1287, 1424, 1591, 1621, 1704, 1813, 2231, 2291, 2292, 2303, 2541]}, {"cluster_id": 149, "members": [323, 847, 2022, 2188, 2189, 2190, 2193, 2194]}, {"cluster_id": 150, "members": [324, 342, 438, 478, 651, 787, 1512, 1667, 1984, 1985, 2023, 2024, 2145, 2156]}, {"cluster_id": 151, "members": [327, 692, 928, 1326, 1777, 1802, 2063, 2064, 2494, 2629]}, {"cluster_id": 152, "members": [336, 352, 1294, 1432, 1605, 1815, 1944, 1946, 1947, 1950, 2663]}, {"cluster_id": 153, "members": [338, 776, 1340, 1363, 1413, 1438, 1934, 1943, 2664]}, {"cluster_id": 154, "members": [340, 341, 529, 709, 1417, 1419, 2260]}, {"cluster_id": 155, "members": [349, 419, 465, 896, 1403, 2427, 2428]}, {"cluster_id": 156, "members": [359, 389, 905, 969, 1282, 1743]}, {"cluster_id": 157, "members": [363, 536, 567, 631, 760, 848, 985, 1003, 1373, 2459, 2554, 2689]}, {"cluster_id": 158, "members": [364, 650, 819, 1306, 1486, 1606, 1632, 1756, 2642]}, {"cluster_id": 159, "members": [366, 553, 572, 1110, 1120, 1127, 1357, 1583, 1995, 2025, 2028, 2170, 2171]}, {"cluster_id": 160, "members": [370, 371, 400, 720, 1354, 1408, 1414, 1415]}, {"cluster_id": 161, "members": [386, 597, 725, 1104, 1137, 2114, 2115, 2264, 2509, 2519, 2588]}, {"cluster_id": 162, "members": [394, 906, 1063, 1064, 1129, 1518, 2351, 2352, 2353]}, {"cluster_id": 163, "members": [399, 456, 544, 680, 1089, 1580, 1769, 2199, 2200, 2201, 2347]}, {"cluster_id": 164, "members": [403, 806, 1037, 1383, 2447, 2654, 2655]}, {"cluster_id": 165, "members": [407, 409, 667, 679, 695, 948, 1005, 1012, 1395, 1541, 1681, 1682, 1797, 2084, 2266, 2267, 2319, 2320, 2321, 2364]}, {"cluster_id": 166, "members": [410, 612, 802, 807, 1168, 1179, 1226, 1955, 2235]}, {"cluster_id": 167, "members": [425, 464, 526, 533, 580, 615, 1007, 1216, 1742, 1749, 1758, 2307, 2443, 2466]}, {"cluster_id": 168, "members": [427, 565, 1249, 1361, 1398, 1528, 1610, 2556, 2557, 2558]}, {"cluster_id": 169, "members": [430, 912, 983, 1261, 2130, 2249]}, {"cluster_id": 170, "members": [437, 645, 755, 891, 950, 1267, 1277, 1495, 1674, 1898, 1900, 2608]}, {"cluster_id": 171, "members": [445, 909, 959, 1023, 1347, 1434, 1453, 1870, 1873, 1876, 1986, 1987, 1990, 1994, 1996, 1997, 1998, 2003, 2004, 2005, 2007, 2008, 2301, 2529, 2568]}, {"cluster_id": 172, "members": [453, 635, 1278, 1378, 1544, 1818, 2058, 2150, 2183, 2305, 2306, 2552]}, {"cluster_id": 173, "members": [461, 1021, 1268, 1869, 2309]}, {"cluster_id": 174, "members": [462, 611, 714, 783, 810, 893, 984, 1015, 1048, 1068, 1125, 1143, 1324, 1385, 1425, 1452, 1474, 1481, 1618, 1669, 1892, 2265, 2332, 2335, 2690]}, {"cluster_id": 175, "members": [468, 573, 643, 1167, 1311, 1313, 2407]}, {"cluster_id": 176, "members": [506, 778, 779, 822, 921, 1074, 1163, 1352, 1370, 1402, 1482, 1485, 1525, 1539, 1592, 1637, 1675, 1676, 1905, 1914, 1918, 1919, 1973, 2132]}, {"cluster_id": 177, "members": [509, 846, 939, 1101, 1242, 2136, 2146, 2148, 2173, 2174]}, {"cluster_id": 178, "members": [515, 798, 828, 1102, 1207, 1793, 1795, 1817, 2290, 2464, 2465, 2547]}, {"cluster_id": 179, "members": [528, 678, 803, 830, 2100, 2101]}, {"cluster_id": 180, "members": [534, 629, 883, 933, 1038]}, {"cluster_id": 181, "members": [546, 870, 1119, 1235, 1273, 1462, 1608, 1611, 1663, 2124, 2125, 2126, 2127, 2128, 2129]}, {"cluster_id": 182, "members": [548, 825, 944, 1523, 1765, 1770, 1937, 1938, 2138, 2449]}, {"cluster_id": 183, "members": [555, 1463, 1695, 2131, 2133]}, {"cluster_id": 184, "members": [562, 742, 754, 902, 1077, 1284, 1620, 2450]}, {"cluster_id": 185, "members": [586, 682, 780, 925, 1222, 1224, 1291, 1587, 1660, 1732, 2119, 2154, 2341]}, {"cluster_id": 186, "members": [601, 713, 809, 812, 814, 1044, 1365, 2229]}, {"cluster_id": 187, "members": [607, 658, 785, 1084, 1213, 1290, 1513, 1664, 1812, 2147, 2258, 2486, 2550]}, {"cluster_id": 188, "members": [614, 888, 1475, 1556, 2470]}, {"cluster_id": 189, "members": [620, 923, 1186, 2009, 2534, 2535, 2574]}, {"cluster_id": 190, "members": [634, 753, 890, 1227, 1269, 1314, 1630, 1880, 2227]}, {"cluster_id": 191, "members": [641, 868, 988, 1341, 1844, 2276, 2704]}, {"cluster_id": 192, "members": [662, 832, 932, 1469, 1929, 2346, 2349, 2590, 2600]}, {"cluster_id": 193, "members": [693, 707, 763, 1511, 1519, 1635, 2108]}, {"cluster_id": 194, "members": [703, 801, 1915, 1961, 2497]}, {"cluster_id": 195, "members": [704, 1412, 1483, 2113, 2208]}, {"cluster_id": 196, "members": [727, 1151, 1350, 1386, 2570]}, {"cluster_id": 197, "members": [734, 736, 1006, 1302, 1388, 1631, 1841, 2196, 2197, 2202, 2277]}, {"cluster_id": 198, "members": [746, 916, 1026, 1303, 1849, 2006, 2163, 2477, 2531]}, {"cluster_id": 199, "members": [752, 957, 1557, 2083, 2167]}, {"cluster_id": 200, "members": [793, 840, 1896, 1897, 1903, 2592]}, {"cluster_id": 201, "members": [797, 800, 820, 824, 889, 915, 931, 1035, 1433, 1830, 1831, 1832, 1833, 1834]}, {"cluster_id": 202, "members": [799, 961, 1728, 2257, 2338, 2555, 2599]}, {"cluster_id": 203, "members": [837, 996, 1071, 1135, 1194, 1255, 1422, 1461, 1545, 2248, 2670]}, {"cluster_id": 204, "members": [867, 871, 1229, 1252, 1308, 1504, 1506, 1507, 1627, 2442, 2522, 2656]}, {"cluster_id": 205, "members": [886, 1295, 1400, 1573, 2697, 2698, 2700]}, {"cluster_id": 206, "members": [917, 1846, 1867, 2117, 2639]}, {"cluster_id": 207, "members": [918, 1098, 1477, 2065, 2066, 2067]}, {"cluster_id": 208, "members": [922, 1307, 1406, 1444, 2536]}, {"cluster_id": 209, "members": [953, 972, 2565, 2566, 2567]}, {"cluster_id": 210, "members": [979, 1200, 1366, 2207, 2373, 2413, 2414, 2415]}, {"cluster_id": 211, "members": [981, 1054, 1055, 1099, 1159, 1221, 1334, 1577]}, {"cluster_id": 212, "members": [995, 1338, 1609, 1790, 1912, 2102, 2103, 2152, 2304, 2385, 2404]}, {"cluster_id": 213, "members": [997, 1204, 1330, 1837, 2068]}, {"cluster_id": 214, "members": [1082, 1202, 1428, 1429, 1534, 2336, 2520, 2524]}, {"cluster_id": 215, "members": [1085, 1396, 1421, 1975, 2110]}, {"cluster_id": 216, "members": [1115, 1178, 1266, 1529, 1673, 1908, 2350, 2660]}, {"cluster_id": 217, "members": [1133, 1500, 1538, 2109, 2185]}, {"cluster_id": 218, "members": [1184, 1190, 1239, 1387, 2611]}, {"cluster_id": 219, "members": [1240, 1443, 2236, 2237, 2262, 2542]}, {"cluster_id": 220, "members": [1368, 1470, 1824, 1911, 2553]}, {"cluster_id": 221, "members": [1381, 1794, 1796, 2297, 2298]}, {"cluster_id": 222, "members": [1502, 1503, 1595, 2243, 2244, 2245, 2246]}, {"cluster_id": 223, "members": [1549, 1901, 2284, 2285, 2286]}, {"cluster_id": 224, "members": [1554, 1657, 2686, 2687, 2688]}]}
seeds_diam_1e-8.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:38713bab14cb6b5def749b13d955bd7c5b69d807eea4a3491f2a2b203a37acec
3
+ size 13141004
seeds_diam_1e0.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3720e74552fda80710fa03addc8b6586425185da4ed281151fe65498a30aa514
3
+ size 13146594
seeds_diam_1e3.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46249fc1669b3e62bafbd256451fae4e9253c7e65a568349cf17d8dc4ea73919
3
+ size 13156713
seeds_diam_1e4.json ADDED
The diff for this file is too large to render. See raw diff
 
seeds_lambda2_1e-6.json ADDED
The diff for this file is too large to render. See raw diff
 
src/.ipynb_checkpoints/2.2_lrmc_bilevel-checkpoint.py ADDED
@@ -0,0 +1,325 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # 2.1_lrmc_bilevel.py
2
+ # Top-1 LRMC ablation with debug guards so seeds differences are visible.
3
+ # Requires: torch, torch_geometric, torch_scatter, torch_sparse
4
+
5
+ import argparse, json, hashlib
6
+ from pathlib import Path
7
+ from typing import List, Tuple, Optional
8
+
9
+ import torch
10
+ import torch.nn as nn
11
+ import torch.nn.functional as F
12
+ from torch import Tensor
13
+
14
+ from torch_scatter import scatter_mean
15
+ from torch_sparse import coalesce, spspmm
16
+ from torch_geometric.datasets import Planetoid
17
+ from torch_geometric.nn import GCNConv
18
+
19
+
20
+ # ---------------------------
21
+ # Utilities: edges and seeds
22
+ # ---------------------------
23
+
24
+ def add_scaled_self_loops(edge_index: Tensor,
25
+ edge_weight: Optional[Tensor],
26
+ num_nodes: int,
27
+ scale: float = 1.0) -> Tuple[Tensor, Tensor]:
28
+ if scale == 0.0:
29
+ if edge_weight is None:
30
+ edge_weight = torch.ones(edge_index.size(1), device=edge_index.device)
31
+ return edge_index, edge_weight
32
+ device = edge_index.device
33
+ self_loops = torch.arange(num_nodes, device=device)
34
+ self_index = torch.stack([self_loops, self_loops], dim=0)
35
+ self_weight = torch.full((num_nodes,), float(scale), device=device)
36
+ base_w = edge_weight if edge_weight is not None else torch.ones(edge_index.size(1), device=device)
37
+ ei = torch.cat([edge_index, self_index], dim=1)
38
+ ew = torch.cat([base_w, self_weight], dim=0)
39
+ ei, ew = coalesce(ei, ew, num_nodes, num_nodes, op='add')
40
+ return ei, ew
41
+
42
+
43
+ def adjacency_power(edge_index: Tensor, num_nodes: int, k: int = 2) -> Tensor:
44
+ # A^2 using spspmm; return binary, coalesced, no self loops
45
+ row, col = edge_index
46
+ val = torch.ones(row.numel(), device=edge_index.device)
47
+ Ai, Av = edge_index, val
48
+ Ri, _ = spspmm(Ai, Av, Ai, Av, num_nodes, num_nodes, num_nodes)
49
+ mask = Ri[0] != Ri[1]
50
+ Ri = Ri[:, mask]
51
+ Ri, _ = coalesce(Ri, torch.ones(Ri.size(1), device=edge_index.device), num_nodes, num_nodes, op='add')
52
+ return Ri
53
+
54
+
55
+ def build_cluster_graph(edge_index: Tensor,
56
+ num_nodes: int,
57
+ node2cluster: Tensor,
58
+ weight_per_edge: Optional[Tensor] = None,
59
+ num_clusters: Optional[int] = None
60
+ ) -> Tuple[Tensor, Tensor, int]:
61
+ K = int(node2cluster.max().item()) + 1 if num_clusters is None else num_clusters
62
+ src, dst = edge_index
63
+ csrc = node2cluster[src]
64
+ cdst = node2cluster[dst]
65
+ edge_c = torch.stack([csrc, cdst], dim=0)
66
+ w = weight_per_edge if weight_per_edge is not None else torch.ones(edge_c.size(1), device=edge_c.device)
67
+ edge_c, w = coalesce(edge_c, w, K, K, op='add')
68
+ return edge_c, w, K
69
+
70
+
71
+ # -----
72
+ # Seeds
73
+ # -----
74
+
75
+ def _md5(path: Path) -> str:
76
+ h = hashlib.md5()
77
+ with path.open('rb') as f:
78
+ for chunk in iter(lambda: f.read(8192), b''):
79
+ h.update(chunk)
80
+ return h.hexdigest()
81
+
82
+
83
+ def _extract_members(cluster_obj: dict) -> List[int]:
84
+ """
85
+ Try 'members' first, then 'seed_nodes'. Raise if neither works.
86
+ """
87
+ m = cluster_obj.get("members", None)
88
+ if isinstance(m, list) and len(m) > 0:
89
+ return list(dict.fromkeys(int(x) for x in m)) # dedupe/preserve order
90
+ m2 = cluster_obj.get("seed_nodes", None)
91
+ if isinstance(m2, list) and len(m2) > 0:
92
+ return list(dict.fromkeys(int(x) for x in m2))
93
+ # If both present but empty, return empty; caller will handle.
94
+ if isinstance(m, list) or isinstance(m2, list):
95
+ return []
96
+ raise KeyError("Cluster object has neither 'members' nor 'seed_nodes'.")
97
+
98
+
99
+ def _pick_top1_cluster(obj: dict) -> List[int]:
100
+ """
101
+ From {"clusters":[{..., "score":float, "members" or "seed_nodes"}, ...]},
102
+ choose max by (score, size). Returns deduped member list.
103
+ """
104
+ clusters = obj.get("clusters", [])
105
+ if not isinstance(clusters, list) or len(clusters) == 0:
106
+ return []
107
+ def keyfun(c):
108
+ score = float(c.get("score", 0.0))
109
+ try:
110
+ mem = _extract_members(c)
111
+ except KeyError:
112
+ mem = []
113
+ return (score, len(mem))
114
+ best = max(clusters, key=keyfun)
115
+ try:
116
+ members = _extract_members(best)
117
+ except KeyError:
118
+ members = []
119
+ return sorted(set(int(x) for x in members))
120
+
121
+
122
+ def load_top1_assignment(seeds_json: str, n_nodes: int, debug: bool = False) -> Tuple[Tensor, Tensor, dict]:
123
+ """
124
+ Hard assignment for top-1 LRMC cluster:
125
+ cluster 0 = top cluster; others are singletons.
126
+ Returns node2cluster[N], cluster_scores[K,1], and a small debug dict.
127
+ """
128
+ p = Path(seeds_json)
129
+ text = p.read_text(encoding='utf-8')
130
+ obj = json.loads(text)
131
+
132
+ C_star = _pick_top1_cluster(obj)
133
+ # if len(C_star) > 0 and max(C_star) == n_nodes:
134
+ # Looks 1-indexed (since max == N, not N-1) → shift down by 1
135
+ C_star = [u - 1 for u in C_star]
136
+ C_star = torch.tensor(C_star, dtype=torch.long)
137
+
138
+ # C_star = _pick_top1_cluster(obj)
139
+ # C_star = torch.tensor(C_star, dtype=torch.long)
140
+ node2cluster = torch.full((n_nodes,), -1, dtype=torch.long)
141
+
142
+ if C_star.numel() == 0:
143
+ # FAIL LOUDLY instead of silently falling back to identity
144
+ raise RuntimeError(
145
+ f"No members found for top-1 cluster in {seeds_json}. "
146
+ f"Expected 'members' or 'seed_nodes' to be non-empty."
147
+ )
148
+
149
+ node2cluster[C_star] = 0
150
+ outside = torch.tensor(sorted(set(range(n_nodes)) - set(C_star.tolist())), dtype=torch.long)
151
+ if outside.numel() > 0:
152
+ node2cluster[outside] = torch.arange(1, 1 + outside.numel(), dtype=torch.long)
153
+ assert int(node2cluster.min()) >= 0
154
+
155
+ K = 1 + outside.numel()
156
+ cluster_scores = torch.zeros(K, 1, dtype=torch.float32)
157
+ cluster_scores[0, 0] = 1.0
158
+
159
+ info = {
160
+ "json_md5": _md5(p),
161
+ "top_cluster_size": int(C_star.numel()),
162
+ "K": int(K),
163
+ "n_outside": int(outside.numel()),
164
+ "first_members": [int(x) for x in C_star[:10].tolist()],
165
+ }
166
+ if debug:
167
+ print(f"[LRMC] Loaded {seeds_json} (md5={info['json_md5']}) | "
168
+ f"top_size={info['top_cluster_size']} K={info['K']} outside={info['n_outside']} "
169
+ f"first10={info['first_members']}")
170
+ return node2cluster, cluster_scores, info
171
+
172
+
173
+ # --------------------------
174
+ # Models (baseline + pooled)
175
+ # --------------------------
176
+
177
+ class GCN2(nn.Module):
178
+ def __init__(self, in_dim, hid, out_dim, dropout=0.5):
179
+ super().__init__()
180
+ self.conv1 = GCNConv(in_dim, hid)
181
+ self.conv2 = GCNConv(hid, out_dim)
182
+ self.dropout = dropout
183
+ def forward(self, x, edge_index):
184
+ x = F.relu(self.conv1(x, edge_index))
185
+ x = F.dropout(x, p=self.dropout, training=self.training)
186
+ x = self.conv2(x, edge_index)
187
+ return x
188
+
189
+
190
+ class OneClusterPool(nn.Module):
191
+ def __init__(self,
192
+ in_dim: int,
193
+ hid: int,
194
+ out_dim: int,
195
+ node2cluster: Tensor,
196
+ edge_index_node: Tensor,
197
+ num_nodes: int,
198
+ self_loop_scale: float = 0.0,
199
+ use_a2_for_clusters: bool = False,
200
+ debug_header: str = ""):
201
+ super().__init__()
202
+ self.n2c = node2cluster.long()
203
+ self.K = int(self.n2c.max().item()) + 1
204
+
205
+ # Node graph (A + λI if desired)
206
+ ei_node = edge_index_node
207
+ ei_node, ew_node = add_scaled_self_loops(ei_node, None, num_nodes, scale=self_loop_scale)
208
+ self.register_buffer("edge_index_node", ei_node)
209
+ self.register_buffer("edge_weight_node", ew_node)
210
+
211
+ # Cluster graph
212
+ ei_for_c = adjacency_power(edge_index_node, num_nodes, k=2) if use_a2_for_clusters else edge_index_node
213
+ edge_index_c, edge_weight_c, K = build_cluster_graph(ei_for_c, num_nodes, self.n2c)
214
+ self.register_buffer("edge_index_c", edge_index_c)
215
+ self.register_buffer("edge_weight_c", edge_weight_c)
216
+ self.K = K
217
+
218
+ if debug_header:
219
+ print(f"[POOL] {debug_header} | cluster_edges={edge_index_c.size(1)} (K={K})")
220
+
221
+ # Layers
222
+ self.gcn_node1 = GCNConv(in_dim, hid, add_self_loops=False, normalize=True)
223
+ self.gcn_cluster = GCNConv(hid, hid, add_self_loops=True, normalize=True)
224
+ self.gcn_node2 = GCNConv(hid * 2, out_dim) # concat [h_node, h_broadcast]
225
+
226
+ def forward(self, x: Tensor, edge_index_node: Tensor) -> Tensor:
227
+ h1 = F.relu(self.gcn_node1(x, self.edge_index_node, self.edge_weight_node))
228
+ z = scatter_mean(h1, self.n2c, dim=0, dim_size=self.K) # [K, H]
229
+ z2 = F.relu(self.gcn_cluster(z, self.edge_index_c, self.edge_weight_c))
230
+ hb = z2[self.n2c] # [N, H]
231
+ hcat = torch.cat([h1, hb], dim=1) # [N, 2H]
232
+ out = self.gcn_node2(hcat, edge_index_node)
233
+ return out
234
+
235
+
236
+ # -------------
237
+ # Training glue
238
+ # -------------
239
+
240
+ @torch.no_grad()
241
+ def accuracy(logits: Tensor, y: Tensor, mask: Tensor) -> float:
242
+ pred = logits[mask].argmax(dim=1)
243
+ return (pred == y[mask]).float().mean().item()
244
+
245
+
246
+ def run_train_eval(model: nn.Module, data, epochs=200, lr=0.01, wd=5e-4):
247
+ opt = torch.optim.Adam(model.parameters(), lr=lr, weight_decay=wd)
248
+ best_val, best_state = 0.0, None
249
+ for ep in range(1, epochs + 1):
250
+ model.train()
251
+ opt.zero_grad(set_to_none=True)
252
+ logits = model(data.x, data.edge_index)
253
+ loss = F.cross_entropy(logits[data.train_mask], data.y[data.train_mask])
254
+ loss.backward(); opt.step()
255
+
256
+ model.eval()
257
+ logits = model(data.x, data.edge_index)
258
+ val_acc = accuracy(logits, data.y, data.val_mask)
259
+ if val_acc > best_val:
260
+ best_val, best_state = val_acc, {k: v.detach().clone() for k, v in model.state_dict().items()}
261
+ if ep % 20 == 0:
262
+ tr = accuracy(logits, data.y, data.train_mask)
263
+ te = accuracy(logits, data.y, data.test_mask)
264
+ print(f"[{ep:04d}] loss={loss.item():.4f} train={tr:.3f} val={val_acc:.3f} test={te:.3f}")
265
+
266
+ if best_state is not None:
267
+ model.load_state_dict(best_state)
268
+ model.eval()
269
+ logits = model(data.x, data.edge_index)
270
+ return {"val": accuracy(logits, data.y, data.val_mask),
271
+ "test": accuracy(logits, data.y, data.test_mask)}
272
+
273
+
274
+ # -----------
275
+ # Entrypoint
276
+ # -----------
277
+
278
+ def main():
279
+ ap = argparse.ArgumentParser()
280
+ ap.add_argument("--dataset", required=True, choices=["Cora", "Citeseer", "Pubmed"])
281
+ ap.add_argument("--seeds", required=True, help="Path to LRMC seeds JSON (single large graph).")
282
+ ap.add_argument("--variant", choices=["baseline", "pool"], default="pool")
283
+ ap.add_argument("--hidden", type=int, default=128)
284
+ ap.add_argument("--epochs", type=int, default=200)
285
+ ap.add_argument("--lr", type=float, default=0.01)
286
+ ap.add_argument("--wd", type=float, default=5e-4)
287
+ ap.add_argument("--dropout", type=float, default=0.5) # baseline only
288
+ ap.add_argument("--self_loop_scale", type=float, default=0.0)
289
+ ap.add_argument("--use_a2", action="store_true", help="Use A^2 for the cluster graph.")
290
+ ap.add_argument("--seed", type=int, default=42)
291
+ ap.add_argument("--debug", action="store_true", help="Print seeds md5, cluster size, K, etc.")
292
+ args = ap.parse_args()
293
+
294
+ torch.manual_seed(args.seed)
295
+
296
+ ds = Planetoid(root=f"./data/{args.dataset}", name=args.dataset)
297
+ data = ds[0]
298
+ in_dim, out_dim, n = ds.num_node_features, ds.num_classes, data.num_nodes
299
+
300
+ if args.variant == "baseline":
301
+ model = GCN2(in_dim, args.hidden, out_dim, dropout=args.dropout)
302
+ res = run_train_eval(model, data, epochs=args.epochs, lr=args.lr, wd=args.wd)
303
+ print(f"Baseline GCN: val={res['val']:.4f} test={res['test']:.4f}")
304
+ return
305
+
306
+ # pool variant
307
+ node2cluster, _, info = load_top1_assignment(args.seeds, n, debug=args.debug)
308
+ dbg_header = f"seeds_md5={info['json_md5']} top_size={info['top_cluster_size']} K={info['K']}"
309
+
310
+ model = OneClusterPool(in_dim=in_dim,
311
+ hid=args.hidden,
312
+ out_dim=out_dim,
313
+ node2cluster=node2cluster,
314
+ edge_index_node=data.edge_index,
315
+ num_nodes=n,
316
+ self_loop_scale=args.self_loop_scale,
317
+ use_a2_for_clusters=args.use_a2,
318
+ debug_header=dbg_header)
319
+
320
+ res = run_train_eval(model, data, epochs=args.epochs, lr=args.lr, wd=args.wd)
321
+ print(f"L-RMC (top-1 pool): val={res['val']:.4f} test={res['test']:.4f}")
322
+
323
+
324
+ if __name__ == "__main__":
325
+ main()
src/.ipynb_checkpoints/2.3_lrmc_bilevel-checkpoint.py ADDED
@@ -0,0 +1,436 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # 2.3_lrmc_bilevel.py
2
+ # Top-1 LRMC ablation with: cluster refinement (k-core), gated residual fusion,
3
+ # sparsified cluster graph (drop self-loops + per-row top-k), and A + γA² mix.
4
+ # Requires: torch, torch_geometric, torch_scatter, torch_sparse
5
+
6
+ import argparse, json, hashlib
7
+ from pathlib import Path
8
+ from typing import List, Tuple, Optional
9
+
10
+ import torch
11
+ import torch.nn as nn
12
+ import torch.nn.functional as F
13
+ from torch import Tensor
14
+
15
+ from torch_scatter import scatter_mean
16
+ from torch_sparse import coalesce, spspmm
17
+ from torch_geometric.datasets import Planetoid
18
+ from torch_geometric.nn import GCNConv
19
+
20
+
21
+ # ---------------------------
22
+ # Utilities: edges and seeds
23
+ # ---------------------------
24
+
25
+ def add_scaled_self_loops(edge_index: Tensor,
26
+ edge_weight: Optional[Tensor],
27
+ num_nodes: int,
28
+ scale: float = 1.0) -> Tuple[Tensor, Tensor]:
29
+ if scale == 0.0:
30
+ if edge_weight is None:
31
+ edge_weight = torch.ones(edge_index.size(1), device=edge_index.device)
32
+ return edge_index, edge_weight
33
+ device = edge_index.device
34
+ self_loops = torch.arange(num_nodes, device=device)
35
+ self_index = torch.stack([self_loops, self_loops], dim=0)
36
+ self_weight = torch.full((num_nodes,), float(scale), device=device)
37
+ base_w = edge_weight if edge_weight is not None else torch.ones(edge_index.size(1), device=device)
38
+ ei = torch.cat([edge_index, self_index], dim=1)
39
+ ew = torch.cat([base_w, self_weight], dim=0)
40
+ ei, ew = coalesce(ei, ew, num_nodes, num_nodes, op='add')
41
+ return ei, ew
42
+
43
+
44
+ def adjacency_power(edge_index: Tensor, num_nodes: int, k: int = 2) -> Tensor:
45
+ # A^2 using spspmm; return binary, coalesced, no self loops
46
+ row, col = edge_index
47
+ val = torch.ones(row.numel(), device=edge_index.device)
48
+ Ai, Av = edge_index, val
49
+ Ri, _ = spspmm(Ai, Av, Ai, Av, num_nodes, num_nodes, num_nodes)
50
+ mask = Ri[0] != Ri[1]
51
+ Ri = Ri[:, mask]
52
+ Ri, _ = coalesce(Ri, torch.ones(Ri.size(1), device=edge_index.device), num_nodes, num_nodes, op='add')
53
+ return Ri
54
+
55
+
56
+ def _md5(path: Path) -> str:
57
+ h = hashlib.md5()
58
+ with path.open('rb') as f:
59
+ for chunk in iter(lambda: f.read(8192), b''):
60
+ h.update(chunk)
61
+ return h.hexdigest()
62
+
63
+
64
+ # -----
65
+ # Seeds
66
+ # -----
67
+
68
+ def _extract_members(cluster_obj: dict) -> List[int]:
69
+ m = cluster_obj.get("members", None)
70
+ if isinstance(m, list) and len(m) > 0:
71
+ return list(dict.fromkeys(int(x) for x in m))
72
+ m2 = cluster_obj.get("seed_nodes", None)
73
+ if isinstance(m2, list) and len(m2) > 0:
74
+ return list(dict.fromkeys(int(x) for x in m2))
75
+ if isinstance(m, list) or isinstance(m2, list):
76
+ return []
77
+ raise KeyError("Cluster object has neither 'members' nor 'seed_nodes'.")
78
+
79
+
80
+ def _pick_top1_cluster(obj: dict) -> List[int]:
81
+ clusters = obj.get("clusters", [])
82
+ if not isinstance(clusters, list) or len(clusters) == 0:
83
+ return []
84
+ def keyfun(c):
85
+ score = float(c.get("score", 0.0))
86
+ try:
87
+ mem = _extract_members(c)
88
+ except KeyError:
89
+ mem = []
90
+ return (score, len(mem))
91
+ best = max(clusters, key=keyfun)
92
+ try:
93
+ members = _extract_members(best)
94
+ except KeyError:
95
+ members = []
96
+ return sorted(set(int(x) for x in members))
97
+
98
+
99
+ def refine_k_core(C_star: List[int], edge_index: Tensor, k: int = 2, rounds: int = 50) -> List[int]:
100
+ """Refine cluster by taking a k-core of its induced subgraph (label-free purity boost)."""
101
+ if k <= 0 or len(C_star) == 0:
102
+ return C_star
103
+ device = edge_index.device
104
+ S = torch.tensor(sorted(set(C_star)), device=device, dtype=torch.long)
105
+ inS = torch.zeros(int(edge_index.max().item()) + 1, dtype=torch.bool, device=device)
106
+ inS[S] = True
107
+ ei = edge_index
108
+ for _ in range(rounds):
109
+ u, v = ei[0], ei[1]
110
+ mask_int = inS[u] & inS[v]
111
+ u_int, v_int = u[mask_int], v[mask_int]
112
+ if u_int.numel() == 0:
113
+ break
114
+ deg = torch.zeros_like(inS, dtype=torch.long)
115
+ deg.scatter_add_(0, u_int, torch.ones_like(u_int, dtype=torch.long))
116
+ deg.scatter_add_(0, v_int, torch.ones_like(v_int, dtype=torch.long))
117
+ keep = inS.clone()
118
+ kill = (deg < k) & inS
119
+ if not kill.any():
120
+ break
121
+ keep[kill] = False
122
+ if keep.sum() == inS.sum():
123
+ break
124
+ inS = keep
125
+ out = torch.nonzero(inS, as_tuple=False).view(-1).tolist()
126
+ # return only nodes that were originally in C_star
127
+ return sorted(set(out).intersection(set(C_star)))
128
+
129
+
130
+ def load_top1_assignment(seeds_json: str, n_nodes: int,
131
+ debug: bool = False,
132
+ refine_k: int = 0,
133
+ edge_index_for_refine: Optional[Tensor] = None) -> Tuple[Tensor, Tensor, dict]:
134
+ """
135
+ Hard assignment for top-1 LRMC cluster with optional k-core refinement.
136
+ cluster 0 = top cluster; others are singletons.
137
+ """
138
+ p = Path(seeds_json)
139
+ obj = json.loads(p.read_text(encoding='utf-8'))
140
+ C_star = _pick_top1_cluster(obj)
141
+ if len(C_star) > 0 and max(C_star) == n_nodes:
142
+ # 1-indexed → shift down
143
+ C_star = [u - 1 for u in C_star]
144
+
145
+ if refine_k > 0:
146
+ if edge_index_for_refine is None:
147
+ raise ValueError("--refine_k requires access to edge_index for refinement.")
148
+ C_star = refine_k_core(C_star, edge_index_for_refine, k=refine_k)
149
+
150
+ C = torch.tensor(C_star, dtype=torch.long)
151
+ if C.numel() == 0:
152
+ raise RuntimeError(
153
+ f"No members found for top-1 cluster in {seeds_json}. "
154
+ f"Expected 'members' or 'seed_nodes' to be non-empty."
155
+ )
156
+
157
+ node2cluster = torch.full((n_nodes,), -1, dtype=torch.long)
158
+ node2cluster[C] = 0
159
+ outside = torch.tensor(sorted(set(range(n_nodes)) - set(C.tolist())), dtype=torch.long)
160
+ if outside.numel() > 0:
161
+ node2cluster[outside] = torch.arange(1, 1 + outside.numel(), dtype=torch.long)
162
+
163
+ K = 1 + outside.numel()
164
+ cluster_scores = torch.zeros(K, 1, dtype=torch.float32)
165
+ cluster_scores[0, 0] = 1.0
166
+
167
+ info = {
168
+ "json_md5": _md5(p),
169
+ "top_cluster_size": int(C.numel()),
170
+ "K": int(K),
171
+ "n_outside": int(outside.numel()),
172
+ "first_members": [int(x) for x in C[:10].tolist()],
173
+ }
174
+ if debug:
175
+ print(f"[LRMC] Loaded {seeds_json} (md5={info['json_md5']}) | "
176
+ f"top_size={info['top_cluster_size']} K={info['K']} outside={info['n_outside']} "
177
+ f"first10={info['first_members']}")
178
+ return node2cluster, cluster_scores, info
179
+
180
+
181
+ # ---------------------------
182
+ # Cluster graph construction
183
+ # ---------------------------
184
+
185
+ def _sparsify_topk(edge_index: Tensor, edge_weight: Tensor, K: int, topk: int) -> Tuple[Tensor, Tensor]:
186
+ """Keep per-row top-k neighbors by weight; symmetrize and coalesce."""
187
+ if topk <= 0:
188
+ return edge_index, edge_weight
189
+ row, col = edge_index
190
+ keep = torch.zeros(edge_weight.numel(), dtype=torch.bool, device=edge_weight.device)
191
+ # simple per-row loop (K ~ 2k is fine)
192
+ for r in range(K):
193
+ idx = (row == r).nonzero(as_tuple=False).view(-1)
194
+ if idx.numel():
195
+ k = min(topk, idx.numel())
196
+ _, order = torch.topk(edge_weight[idx], k)
197
+ keep[idx[order]] = True
198
+ ei = edge_index[:, keep]
199
+ ew = edge_weight[keep]
200
+ # symmetrize
201
+ rev = torch.stack([ei[1], ei[0]], dim=0)
202
+ ei2 = torch.cat([ei, rev], dim=1)
203
+ ew2 = torch.cat([ew, ew], dim=0)
204
+ ei2, ew2 = coalesce(ei2, ew2, K, K, op='max')
205
+ return ei2, ew2
206
+
207
+
208
+ def build_cluster_graph_mixed(edge_index_node: Tensor,
209
+ num_nodes: int,
210
+ node2cluster: Tensor,
211
+ use_a2: bool,
212
+ a2_gamma: float,
213
+ drop_self_loops: bool,
214
+ topk_per_row: int) -> Tuple[Tensor, Tensor, int]:
215
+ """
216
+ Build A_c = S^T (A + γ A²) S, optionally drop diag, then per-row top-k sparsify.
217
+ """
218
+ device = edge_index_node.device
219
+ # combine A and γA² at node level
220
+ row, col = edge_index_node
221
+ wA = torch.ones(row.numel(), device=device)
222
+ e_all = edge_index_node
223
+ w_all = wA
224
+ if use_a2 and a2_gamma > 0.0:
225
+ A2 = adjacency_power(edge_index_node, num_nodes, k=2)
226
+ wA2 = torch.full((A2.size(1),), float(a2_gamma), device=device)
227
+ e_all = torch.cat([e_all, A2], dim=1)
228
+ w_all = torch.cat([w_all, wA2], dim=0)
229
+
230
+ # project to clusters: S^T * (⋅) * S
231
+ K = int(node2cluster.max().item()) + 1
232
+ src, dst = e_all
233
+ csrc = node2cluster[src]
234
+ cdst = node2cluster[dst]
235
+ eC = torch.stack([csrc, cdst], dim=0)
236
+ eC, wC = coalesce(eC, w_all, K, K, op='add')
237
+
238
+ if drop_self_loops:
239
+ mask = eC[0] != eC[1]
240
+ eC, wC = eC[:, mask], wC[mask]
241
+
242
+ if topk_per_row > 0:
243
+ eC, wC = _sparsify_topk(eC, wC, K, topk_per_row)
244
+
245
+ return eC, wC, K
246
+
247
+
248
+ # --------------------------
249
+ # Models (baseline + pooled)
250
+ # --------------------------
251
+
252
+ class GCN2(nn.Module):
253
+ def __init__(self, in_dim, hid, out_dim, dropout=0.5):
254
+ super().__init__()
255
+ self.conv1 = GCNConv(in_dim, hid)
256
+ self.conv2 = GCNConv(hid, out_dim)
257
+ self.dropout = dropout
258
+ def forward(self, x, edge_index):
259
+ x = F.relu(self.conv1(x, edge_index))
260
+ x = F.dropout(x, p=self.dropout, training=self.training)
261
+ x = self.conv2(x, edge_index)
262
+ return x
263
+
264
+
265
+ class OneClusterPoolGated(nn.Module):
266
+ """
267
+ Node-GCN -> pool (means) -> Cluster-GCN over sparsified A_c -> residual gate -> Node-GCN -> logits
268
+ """
269
+ def __init__(self,
270
+ in_dim: int,
271
+ hid: int,
272
+ out_dim: int,
273
+ node2cluster: Tensor,
274
+ edge_index_node: Tensor,
275
+ num_nodes: int,
276
+ self_loop_scale: float = 0.0,
277
+ use_a2_for_clusters: bool = False,
278
+ a2_gamma: float = 0.2,
279
+ drop_cluster_self_loops: bool = True,
280
+ cluster_topk: int = 24,
281
+ debug_header: str = ""):
282
+ super().__init__()
283
+ self.n2c = node2cluster.long()
284
+ self.K = int(self.n2c.max().item()) + 1
285
+
286
+ # Node graph (A + λI)
287
+ ei_node = edge_index_node
288
+ ei_node, ew_node = add_scaled_self_loops(ei_node, None, num_nodes, scale=self_loop_scale)
289
+ self.register_buffer("edge_index_node", ei_node)
290
+ self.register_buffer("edge_weight_node", ew_node)
291
+
292
+ # Cluster graph: A_c = S^T (A + γA²) S → drop diag → per-row top-k
293
+ eC, wC, K = build_cluster_graph_mixed(
294
+ edge_index_node, num_nodes, self.n2c,
295
+ use_a2=use_a2_for_clusters, a2_gamma=a2_gamma,
296
+ drop_self_loops=drop_cluster_self_loops, topk_per_row=cluster_topk
297
+ )
298
+ self.register_buffer("edge_index_c", eC)
299
+ self.register_buffer("edge_weight_c", wC)
300
+ self.K = K
301
+
302
+ if debug_header:
303
+ print(f"[POOL] {debug_header} | cluster_edges={eC.size(1)} (K={K})")
304
+
305
+ # Layers: gated residual fusion
306
+ self.gcn_node1 = GCNConv(in_dim, hid, add_self_loops=False, normalize=True)
307
+ self.gcn_cluster = GCNConv(hid, hid, add_self_loops=True, normalize=True)
308
+ self.down = nn.Linear(hid, hid)
309
+ self.gate = nn.Sequential(nn.Linear(2*hid, hid//2), nn.ReLU(), nn.Linear(hid//2, 1))
310
+ self.lambda_logit = nn.Parameter(torch.tensor(0.0))
311
+ self.gcn_node2 = GCNConv(hid, out_dim) # final node conv on gated residual
312
+
313
+ def forward(self, x: Tensor, edge_index_node: Tensor) -> Tensor:
314
+ # node step
315
+ h1 = F.relu(self.gcn_node1(x, self.edge_index_node, self.edge_weight_node))
316
+ # pool
317
+ z = scatter_mean(h1, self.n2c, dim=0, dim_size=self.K) # [K, H]
318
+ # cluster step
319
+ z2 = F.relu(self.gcn_cluster(z, self.edge_index_c, self.edge_weight_c))
320
+ # broadcast + gated residual
321
+ hb = z2[self.n2c] # [N, H]
322
+ inj = self.down(hb)
323
+ gate_dyn = torch.sigmoid(self.gate(torch.cat([h1, inj], dim=1))) # [N,1]
324
+ lam = torch.sigmoid(self.lambda_logit) # scalar in (0,1)
325
+ alpha = lam * 1.0 + (1.0 - lam) * gate_dyn
326
+ h2 = h1 + alpha * inj
327
+ h2 = F.dropout(h2, p=0.5, training=self.training)
328
+ # final node conv (use same weighted adjacency)
329
+ out = self.gcn_node2(h2, self.edge_index_node, self.edge_weight_node)
330
+ return out
331
+
332
+
333
+ # -------------
334
+ # Training glue
335
+ # -------------
336
+
337
+ @torch.no_grad()
338
+ def accuracy(logits: Tensor, y: Tensor, mask: Tensor) -> float:
339
+ pred = logits[mask].argmax(dim=1)
340
+ return (pred == y[mask]).float().mean().item()
341
+
342
+
343
+ def run_train_eval(model: nn.Module, data, epochs=200, lr=0.01, wd=5e-4):
344
+ opt = torch.optim.Adam(model.parameters(), lr=lr, weight_decay=wd)
345
+ best_val, best_state = 0.0, None
346
+ for ep in range(1, epochs + 1):
347
+ model.train()
348
+ opt.zero_grad(set_to_none=True)
349
+ logits = model(data.x, data.edge_index)
350
+ loss = F.cross_entropy(logits[data.train_mask], data.y[data.train_mask])
351
+ loss.backward(); opt.step()
352
+
353
+ model.eval()
354
+ logits = model(data.x, data.edge_index)
355
+ val_acc = accuracy(logits, data.y, data.val_mask)
356
+ if val_acc > best_val:
357
+ best_val, best_state = val_acc, {k: v.detach().clone() for k, v in model.state_dict().items()}
358
+ if ep % 20 == 0:
359
+ tr = accuracy(logits, data.y, data.train_mask)
360
+ te = accuracy(logits, data.y, data.test_mask)
361
+ print(f"[{ep:04d}] loss={loss.item():.4f} train={tr:.3f} val={val_acc:.3f} test={te:.3f}")
362
+
363
+ if best_state is not None:
364
+ model.load_state_dict(best_state)
365
+ model.eval()
366
+ logits = model(data.x, data.edge_index)
367
+ return {"val": accuracy(logits, data.y, data.val_mask),
368
+ "test": accuracy(logits, data.y, data.test_mask)}
369
+
370
+
371
+ # -----------
372
+ # Entrypoint
373
+ # -----------
374
+
375
+ def main():
376
+ ap = argparse.ArgumentParser()
377
+ ap.add_argument("--dataset", required=True, choices=["Cora", "Citeseer", "Pubmed"])
378
+ ap.add_argument("--seeds", required=True, help="Path to LRMC seeds JSON (single large graph).")
379
+ ap.add_argument("--variant", choices=["baseline", "pool"], default="pool")
380
+ ap.add_argument("--hidden", type=int, default=128)
381
+ ap.add_argument("--epochs", type=int, default=200)
382
+ ap.add_argument("--lr", type=float, default=0.01)
383
+ ap.add_argument("--wd", type=float, default=5e-4)
384
+ ap.add_argument("--dropout", type=float, default=0.5) # baseline only
385
+ ap.add_argument("--self_loop_scale", type=float, default=0.0)
386
+
387
+ # NEW knobs for cluster graph & refinement
388
+ ap.add_argument("--use_a2", action="store_true", help="Include A^2 in cluster graph.")
389
+ ap.add_argument("--a2_gamma", type=float, default=0.2, help="Weight for A^2 in A + γA^2.")
390
+ ap.add_argument("--cluster_topk", type=int, default=24, help="Top-k neighbors per cluster row to keep.")
391
+ ap.add_argument("--drop_cluster_self_loops", action="store_true", help="Drop (c,c) in cluster graph.")
392
+ ap.add_argument("--refine_k", type=int, default=0, help="k-core refinement on the top cluster (e.g., 2).")
393
+
394
+ ap.add_argument("--seed", type=int, default=42)
395
+ ap.add_argument("--debug", action="store_true")
396
+ args = ap.parse_args()
397
+
398
+ torch.manual_seed(args.seed)
399
+
400
+ ds = Planetoid(root=f"./data/{args.dataset}", name=args.dataset)
401
+ data = ds[0]
402
+ in_dim, out_dim, n = ds.num_node_features, ds.num_classes, data.num_nodes
403
+
404
+ if args.variant == "baseline":
405
+ model = GCN2(in_dim, args.hidden, out_dim, dropout=args.dropout)
406
+ res = run_train_eval(model, data, epochs=args.epochs, lr=args.lr, wd=args.wd)
407
+ print(f"Baseline GCN: val={res['val']:.4f} test={res['test']:.4f}")
408
+ return
409
+
410
+ # pool variant
411
+ node2cluster, _, info = load_top1_assignment(
412
+ args.seeds, n, debug=args.debug, refine_k=args.refine_k, edge_index_for_refine=data.edge_index
413
+ )
414
+ dbg_header = f"seeds_md5={info['json_md5']} top_size={info['top_cluster_size']} K={info['K']}"
415
+
416
+ model = OneClusterPoolGated(
417
+ in_dim=in_dim,
418
+ hid=args.hidden,
419
+ out_dim=out_dim,
420
+ node2cluster=node2cluster,
421
+ edge_index_node=data.edge_index,
422
+ num_nodes=n,
423
+ self_loop_scale=args.self_loop_scale,
424
+ use_a2_for_clusters=args.use_a2,
425
+ a2_gamma=args.a2_gamma,
426
+ drop_cluster_self_loops=args.drop_cluster_self_loops,
427
+ cluster_topk=args.cluster_topk,
428
+ debug_header=dbg_header
429
+ )
430
+
431
+ res = run_train_eval(model, data, epochs=args.epochs, lr=args.lr, wd=args.wd)
432
+ print(f"L-RMC (top-1 pool, gated): val={res['val']:.4f} test={res['test']:.4f}")
433
+
434
+
435
+ if __name__ == "__main__":
436
+ main()
src/.ipynb_checkpoints/2.4_lrmc_bilevel-checkpoint.py ADDED
@@ -0,0 +1,413 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # lrmc_bilevel.py
2
+ # Top-1 LRMC ablation: one-cluster pooling vs. plain GCN on Planetoid (e.g., Cora)
3
+ # Requires: torch, torch_geometric, torch_scatter, torch_sparse
4
+ #
5
+ # Usage examples:
6
+ # python lrmc_bilevel.py --dataset Cora --seeds /path/to/lrmc_seeds.json --variant baseline
7
+ # python lrmc_bilevel.py --dataset Cora --seeds /path/to/lrmc_seeds.json --variant pool --lrmc_inv_weight 0.01 --lrmc_gamma 0.7
8
+ #
9
+ # Notes:
10
+ # - We read your LRMC JSON, pick the single cluster with the highest 'score',
11
+ # assign it to cluster id 0, and make all other nodes singletons (1..K-1).
12
+ # - For --variant pool: Node-GCN -> pool (means) -> Cluster-GCN -> broadcast + skip -> Node-GCN -> classifier
13
+ # This variant now also applies the L-RMC stability tricks.
14
+ # - For --variant baseline: Standard 2-layer GCN.
15
+ # - Keep flags like --self_loop_scale and --use_a2 if you want A+λI / A^2 augmentation.
16
+
17
+ import argparse, json
18
+ from pathlib import Path
19
+ from typing import List, Tuple, Optional
20
+
21
+ import torch
22
+ import torch.nn as nn
23
+ import torch.nn.functional as F
24
+
25
+ from torch import Tensor
26
+ from torch_scatter import scatter_add, scatter_mean
27
+ from torch_sparse import coalesce, spspmm
28
+
29
+ from torch_geometric.datasets import Planetoid
30
+ from torch_geometric.nn import GCNConv
31
+ from torch_geometric.utils import subgraph, degree # Added for stability score
32
+
33
+ # ---------------------------
34
+ # Utilities: edges and seeds
35
+ # ---------------------------
36
+
37
+ def add_scaled_self_loops(edge_index: Tensor,
38
+ edge_weight: Optional[Tensor],
39
+ num_nodes: int,
40
+ scale: float = 1.0) -> Tuple[Tensor, Tensor]:
41
+ """Add self-loops with chosen weight (scale). If scale=0, return unchanged (and create weights if None)."""
42
+ if scale == 0.0:
43
+ if edge_weight is None:
44
+ edge_weight = torch.ones(edge_index.size(1), device=edge_index.device)
45
+ return edge_index, edge_weight
46
+ device = edge_index.device
47
+ self_loops = torch.arange(num_nodes, device=device)
48
+ self_index = torch.stack([self_loops, self_loops], dim=0)
49
+ self_weight = torch.full((num_nodes,), float(scale), device=device)
50
+ base_w = edge_weight if edge_weight is not None else torch.ones(edge_index.size(1), device=device)
51
+ ei = torch.cat([edge_index, self_index], dim=1)
52
+ ew = torch.cat([base_w, self_weight], dim=0)
53
+ ei, ew = coalesce(ei, ew, num_nodes, num_nodes, op='add')
54
+ return ei, ew
55
+
56
+
57
+ def adjacency_power(edge_index: Tensor, num_nodes: int, k: int = 2) -> Tensor:
58
+ """
59
+ Compute (binary) k-th power adjacency using sparse matmul (torch_sparse.spspmm).
60
+ Here we use k=2. Returns coalesced edge_index without weights.
61
+ """
62
+ row, col = edge_index
63
+ val = torch.ones(row.numel(), device=edge_index.device)
64
+ Ai, Av = edge_index, val
65
+ # A^2
66
+ Ri, Rv = spspmm(Ai, Av, Ai, Av, num_nodes, num_nodes, num_nodes)
67
+ mask = Ri[0] != Ri[1] # drop diagonal; add custom self-loops later if desired
68
+ Ri = Ri[:, mask]
69
+ Ri, _ = coalesce(Ri, torch.ones(Ri.size(1), device=edge_index.device), num_nodes, num_nodes, op='add')
70
+ return Ri
71
+
72
+
73
+ def build_cluster_graph(edge_index: Tensor,
74
+ num_nodes: int,
75
+ node2cluster: Tensor,
76
+ weight_per_edge: Optional[Tensor] = None,
77
+ num_clusters: Optional[int] = None
78
+ ) -> Tuple[Tensor, Tensor, int]:
79
+ """
80
+ Build cluster graph A_c = S^T A S with summed multiplicities as weights.
81
+ node2cluster: [N] long tensor mapping each node -> cluster id.
82
+ """
83
+ K = int(node2cluster.max().item()) + 1 if num_clusters is None else num_clusters
84
+ src, dst = edge_index
85
+ csrc = node2cluster[src]
86
+ cdst = node2cluster[dst]
87
+ edge_c = torch.stack([csrc, cdst], dim=0)
88
+ w = weight_per_edge if weight_per_edge is not None else torch.ones(edge_c.size(1), device=edge_c.device)
89
+ edge_c, w = coalesce(edge_c, w, K, K, op='add') # sum multiplicities
90
+ return edge_c, w, K
91
+
92
+
93
+ # -----
94
+ # Seeds
95
+ # -----
96
+
97
+ def _pick_top1_cluster(obj: dict) -> List[int]:
98
+ """
99
+ From LRMC JSON with structure: {"clusters":[{"members":[...], "score":float, ...}, ...]}
100
+ choose the cluster with max (score, size) and return its members.
101
+ """
102
+ clusters = obj.get("clusters", [])
103
+ if not clusters:
104
+ return []
105
+ # choose by highest score, then by size (tiebreaker)
106
+ best = max(clusters, key=lambda c: (float(c.get("score", 0.0)), len(c.get("members", []))))
107
+ return list(best.get("members", []))
108
+
109
+
110
+ def load_top1_assignment(seeds_json: str, n_nodes: int) -> Tuple[Tensor, Tensor, Tensor]:
111
+ """
112
+ Create a hard assignment for top-1 LRMC cluster:
113
+ - cluster 0 = top-1 LRMC set
114
+ - nodes outside are singletons (1..K-1)
115
+ Returns:
116
+ node2cluster: [N] long
117
+ cluster_scores: [K,1] with 1.0 for top cluster, 0.0 for singletons
118
+ core_nodes: [|C|] long, original indices of nodes in the top-1 LRMC cluster
119
+ """
120
+ obj = json.loads(Path(seeds_json).read_text())
121
+ C_star_list = _pick_top1_cluster(obj)
122
+ C_star = torch.tensor(sorted(set(C_star_list)), dtype=torch.long) # Original indices of core nodes
123
+
124
+ node2cluster = torch.full((n_nodes,), -1, dtype=torch.long)
125
+ node2cluster[C_star] = 0
126
+ outside = torch.tensor(sorted(set(range(n_nodes)) - set(C_star.tolist())), dtype=torch.long)
127
+ if outside.numel() > 0:
128
+ node2cluster[outside] = torch.arange(1, 1 + outside.numel(), dtype=torch.long)
129
+ assert int(node2cluster.min()) >= 0, "All nodes must be assigned."
130
+
131
+ K = 1 + outside.numel()
132
+ cluster_scores = torch.zeros(K, 1, dtype=torch.float32)
133
+ if C_star.numel() > 0:
134
+ cluster_scores[0, 0] = 1.0 # emphasize the supercluster
135
+ return node2cluster, cluster_scores, C_star
136
+
137
+
138
+ # --------------------------
139
+ # Models (baseline + pooled)
140
+ # --------------------------
141
+
142
+ class GCN2(nn.Module):
143
+ """Plain 2-layer GCN baseline."""
144
+ def __init__(self, in_dim, hid, out_dim, dropout_p: float = 0.5):
145
+ super().__init__()
146
+ self.conv1 = GCNConv(in_dim, hid)
147
+ self.conv2 = GCNConv(hid, out_dim)
148
+ self.dropout_p = dropout_p
149
+
150
+ def forward(self, x, edge_index):
151
+ x = F.relu(self.conv1(x, edge_index))
152
+ x = F.dropout(x, p=self.dropout_p, training=self.training)
153
+ x = self.conv2(x, edge_index)
154
+ return x
155
+
156
+
157
+ class OneClusterPool(nn.Module):
158
+ """
159
+ Node-GCN -> pool to one-cluster + singletons -> Cluster-GCN -> broadcast + skip -> Node-GCN -> classifier
160
+ This version includes L-RMC stability tricks:
161
+ 1. Backbone-invariance regularizer (loss computed in forward).
162
+ 2. Boundary damping on node graph.
163
+ """
164
+ def __init__(self,
165
+ in_dim: int,
166
+ hid: int,
167
+ out_dim: int,
168
+ node2cluster: Tensor,
169
+ core_nodes: Tensor, # New: explicit core nodes
170
+ edge_index_node: Tensor,
171
+ num_nodes: int,
172
+ self_loop_scale: float = 0.0,
173
+ use_a2_for_clusters: bool = False,
174
+ lrmc_gamma: float = 1.0, # New: damping factor (1.0 means no damping)
175
+ dropout_p: float = 0.5):
176
+ super().__init__()
177
+ self.n2c = node2cluster.long()
178
+ self.K = int(self.n2c.max().item()) + 1
179
+ self.core_nodes = core_nodes # Store original indices of core nodes
180
+ self.lrmc_gamma = lrmc_gamma
181
+ self.dropout_p = dropout_p
182
+
183
+ # Node graph (A + λI if desired)
184
+ ei_node = edge_index_node
185
+ ew_node_base = None # Will be created by add_scaled_self_loops if None
186
+ ei_node, ew_node = add_scaled_self_loops(ei_node, ew_node_base, num_nodes, scale=self_loop_scale)
187
+
188
+ # --- Apply Boundary Damping ---
189
+ if self.lrmc_gamma < 1.0 and self.core_nodes.numel() > 0:
190
+ is_core = torch.zeros(num_nodes, dtype=torch.bool, device=ei_node.device)
191
+ is_core[self.core_nodes] = True
192
+
193
+ src_is_core = is_core[ei_node[0]]
194
+ dst_is_core = is_core[ei_node[1]]
195
+ cross_boundary_mask = (src_is_core != dst_is_core)
196
+
197
+ ew_node[cross_boundary_mask] *= self.lrmc_gamma
198
+ # --- End Boundary Damping ---
199
+
200
+ self.register_buffer("edge_index_node", ei_node)
201
+ self.register_buffer("edge_weight_node", ew_node)
202
+
203
+ # Cluster graph from A or A^2
204
+ ei_for_c = adjacency_power(edge_index_node, num_nodes, k=2) if use_a2_for_clusters else edge_index_node
205
+ edge_index_c, edge_weight_c, K = build_cluster_graph(ei_for_c, num_nodes, self.n2c)
206
+ self.register_buffer("edge_index_c", edge_index_c)
207
+ self.register_buffer("edge_weight_c", edge_weight_c)
208
+ self.K = K
209
+
210
+ # Layers
211
+ self.gcn_node1 = GCNConv(in_dim, hid, add_self_loops=False, normalize=True)
212
+ self.gcn_cluster = GCNConv(hid, hid, add_self_loops=True, normalize=True)
213
+ self.gcn_node2 = GCNConv(hid * 2, out_dim) # on concatenated [h_node, h_broadcast]
214
+
215
+ def forward(self, x: Tensor, edge_index_node: Tensor) -> Tuple[Tensor, Optional[Tensor]]:
216
+ # Node GCN (uses stored weights)
217
+ h1 = F.relu(self.gcn_node1(x, self.edge_index_node, self.edge_weight_node))
218
+ h1 = F.dropout(h1, p=self.dropout_p, training=self.training) # Consistent dropout
219
+
220
+ # --- Backbone-invariance regularizer ---
221
+ lrmc_inv_loss = None
222
+ # Apply only if core nodes exist AND regularizer weight is positive (handled by run_train_eval)
223
+ if self.core_nodes.numel() > 0:
224
+ core_embeddings = h1[self.core_nodes]
225
+ # Calculate mean embedding of the core, keepdim=True for broadcasting
226
+ avg_embedding = core_embeddings.mean(dim=0, keepdim=True)
227
+ # MSE between each core embedding and the average core embedding
228
+ lrmc_inv_loss = F.mse_loss(core_embeddings, avg_embedding.expand_as(core_embeddings), reduction='mean')
229
+ # --- End Backbone-invariance regularizer ---
230
+
231
+ # Pool to clusters: mean per cluster
232
+ z = scatter_mean(h1, self.n2c, dim=0, dim_size=self.K) # [K, H]
233
+
234
+ # Cluster GCN
235
+ z2 = F.relu(self.gcn_cluster(z, self.edge_index_c, self.edge_weight_c))
236
+
237
+ # Broadcast back + skip concat
238
+ hb = z2[self.n2c] # [N, H]
239
+ hcat = torch.cat([h1, hb], dim=1) # [N, 2H]
240
+
241
+ # Final node GCN head -> logits
242
+ out = self.gcn_node2(hcat, edge_index_node)
243
+ return out, lrmc_inv_loss
244
+
245
+
246
+ # -------------
247
+ # Training glue
248
+ # -------------
249
+
250
+ @torch.no_grad()
251
+ def accuracy(logits: Tensor, y: Tensor, mask: Tensor) -> float:
252
+ pred = logits[mask].argmax(dim=1)
253
+ return (pred == y[mask]).float().mean().item()
254
+
255
+
256
+ def run_train_eval(model: nn.Module, data, epochs=200, lr=0.01, wd=5e-4, lrmc_inv_weight: float = 0.0):
257
+ opt = torch.optim.Adam(model.parameters(), lr=lr, weight_decay=wd)
258
+ best_val, best_state = 0.0, None
259
+ for ep in range(1, epochs + 1):
260
+ model.train()
261
+ opt.zero_grad(set_to_none=True)
262
+
263
+ # Model output depends on its type
264
+ res = model(data.x, data.edge_index)
265
+
266
+ current_lrmc_inv_loss = None
267
+ if isinstance(model, OneClusterPool): # OneClusterPool returns (logits, lrmc_inv_loss)
268
+ logits, current_lrmc_inv_loss = res
269
+ loss = F.cross_entropy(logits[data.train_mask], data.y[data.train_mask])
270
+ if current_lrmc_inv_loss is not None and lrmc_inv_weight > 0:
271
+ loss += lrmc_inv_weight * current_lrmc_inv_loss
272
+ else: # GCN2 (baseline) returns only logits
273
+ logits = res
274
+ loss = F.cross_entropy(logits[data.train_mask], data.y[data.train_mask])
275
+
276
+ loss.backward(); opt.step()
277
+
278
+ # track best on val
279
+ model.eval()
280
+ # For evaluation, we only need logits. If it's OneClusterPool, ignore the loss.
281
+ logits_eval, _ = model(data.x, data.edge_index) if isinstance(model, OneClusterPool) else (model(data.x, data.edge_index), None)
282
+
283
+ val_acc = accuracy(logits_eval, data.y, data.val_mask)
284
+ if val_acc > best_val:
285
+ best_val, best_state = val_acc, {k: v.detach().clone() for k, v in model.state_dict().items()}
286
+ if ep % 20 == 0:
287
+ tr = accuracy(logits_eval, data.y, data.train_mask)
288
+ te = accuracy(logits_eval, data.y, data.test_mask)
289
+ lrmc_loss_str = f" inv_l={current_lrmc_inv_loss.item():.4f}" if current_lrmc_inv_loss is not None else ""
290
+ print(f"[{ep:04d}] loss={loss.item():.4f}{lrmc_loss_str} train={tr:.3f} val={val_acc:.3f} test={te:.3f}")
291
+
292
+ # test @ best val
293
+ if best_state is not None:
294
+ model.load_state_dict(best_state)
295
+ model.eval()
296
+ logits_final, _ = model(data.x, data.edge_index) if isinstance(model, OneClusterPool) else (model(data.x, data.edge_index), None)
297
+ return {
298
+ "val": accuracy(logits_final, data.y, data.val_mask),
299
+ "test": accuracy(logits_final, data.y, data.test_mask)
300
+ }
301
+
302
+ # --------------------------
303
+ # LRMC Stability Score (new)
304
+ # --------------------------
305
+
306
+ def compute_lrmc_stability_score(core_nodes: Tensor, edge_index: Tensor, num_nodes: int, epsilon: float = 1e-6) -> float:
307
+ """
308
+ Computes the L-RMC stability score S_L(C) = |C| / (d^T L_C d + epsilon).
309
+ Here, d^T L_C d is interpreted as sum of squared degree differences over edges within the core:
310
+ sum_{(u,v) in E_C} (deg_C(u) - deg_C(v))^2.
311
+ """
312
+ if core_nodes.numel() == 0:
313
+ return 0.0
314
+
315
+ # Get the induced subgraph for core_nodes, relabeling nodes to [0, ..., |C|-1]
316
+ # `subgraph` returns (sub_edge_index, edge_attr, mapping_nodes, edge_mask)
317
+ sub_edge_index, _, _, _ = subgraph(core_nodes, edge_index, relabel_nodes=True, num_nodes=num_nodes)
318
+
319
+ num_core_nodes = core_nodes.numel()
320
+
321
+ if sub_edge_index.numel() == 0:
322
+ # If the core has nodes but no internal edges, there's no degree variability over edges.
323
+ # d^T L_C d = 0 in this interpretation, leading to max stability score.
324
+ return float(num_core_nodes) / epsilon
325
+
326
+ # Compute degrees of nodes *within the induced subgraph*
327
+ # `degree` sums the occurrences of nodes in `sub_edge_index[0]` (sources).
328
+ # For undirected graphs, total degree is (in_degree + out_degree).
329
+ # Since `sub_edge_index` contains both (u,v) and (v,u) if relabel_nodes=False, we just need one pass.
330
+ # With relabel_nodes=True, it's a new adjacency matrix for `num_core_nodes`.
331
+ # Summing degrees from both source and destination to get total degree in undirected graph.
332
+ degrees_in_subgraph_full = degree(sub_edge_index[0], num_nodes=num_core_nodes, dtype=torch.float) + \
333
+ degree(sub_edge_index[1], num_nodes=num_core_nodes, dtype=torch.float)
334
+
335
+ # Calculate sum of squared degree differences for each edge within the subgraph
336
+ # deg_u_relabel and deg_v_relabel are the degrees of source and destination nodes
337
+ # (after relabeling) within the induced subgraph.
338
+ deg_u_relabel = degrees_in_subgraph_full[sub_edge_index[0]]
339
+ deg_v_relabel = degrees_in_subgraph_full[sub_edge_index[1]]
340
+
341
+ degree_variability_sum = torch.sum((deg_u_relabel - deg_v_relabel)**2)
342
+
343
+ # Compute S_L(C)
344
+ score = float(num_core_nodes) / (degree_variability_sum.item() + epsilon)
345
+ return score
346
+
347
+
348
+ # -----------
349
+ # Entrypoint
350
+ # -----------
351
+
352
+ def main():
353
+ ap = argparse.ArgumentParser()
354
+ ap.add_argument("--dataset", required=True, choices=["Cora", "Citeseer", "Pubmed"])
355
+ ap.add_argument("--seeds", required=True, help="Path to LRMC seeds JSON (single large graph).")
356
+ ap.add_argument("--variant", choices=["baseline", "pool"], default="pool",
357
+ help="baseline=plain GCN; pool=top-1 LRMC one-cluster pooling (with new L-RMC tricks)")
358
+ ap.add_argument("--hidden", type=int, default=128)
359
+ ap.add_argument("--epochs", type=int, default=200)
360
+ ap.add_argument("--lr", type=float, default=0.01)
361
+ ap.add_argument("--wd", type=float, default=5e-4)
362
+ ap.add_argument("--dropout", type=float, default=0.5, help="Dropout rate for GCN layers.")
363
+ ap.add_argument("--self_loop_scale", type=float, default=0.0, help="λ for A+λI on node graph (0 disables)")
364
+ ap.add_argument("--use_a2", action="store_true", help="Use A^2 to build the cluster graph (recommended for pool)")
365
+ ap.add_argument("--lrmc_inv_weight", type=float, default=0.0,
366
+ help="Weight for the backbone-invariance regularizer (0 disables).")
367
+ ap.add_argument("--lrmc_gamma", type=float, default=1.0,
368
+ help="Damping factor for cross-boundary edges (1.0 means no damping).")
369
+ ap.add_argument("--seed", type=int, default=42)
370
+ args = ap.parse_args()
371
+
372
+ torch.manual_seed(args.seed)
373
+
374
+ # Load dataset
375
+ ds = Planetoid(root=f"./data/{args.dataset}", name=args.dataset)
376
+ data = ds[0]
377
+ in_dim, out_dim, n = ds.num_node_features, ds.num_classes, data.num_nodes
378
+
379
+ if args.variant == "baseline":
380
+ model = GCN2(in_dim, args.hidden, out_dim, dropout_p=args.dropout)
381
+ # use default add_self_loops=True behavior inside convs
382
+ res = run_train_eval(model, data, epochs=args.epochs, lr=args.lr, wd=args.wd)
383
+ print(f"Baseline GCN: val={res['val']:.4f} test={res['test']:.4f}")
384
+ return
385
+
386
+ # Top-1 LRMC assignment
387
+ node2cluster, _, core_nodes = load_top1_assignment(args.seeds, n)
388
+
389
+ # For informational purposes, compute and print the stability score for the found core
390
+ lrmc_score = compute_lrmc_stability_score(core_nodes, data.edge_index, n)
391
+ print(f"LRMC Top-1 Cluster Size: {core_nodes.numel()} nodes. Stability Score (S_L): {lrmc_score:.4f}")
392
+
393
+
394
+ # One-cluster pooled model with L-RMC tricks
395
+ model = OneClusterPool(in_dim=in_dim,
396
+ hid=args.hidden,
397
+ out_dim=out_dim,
398
+ node2cluster=node2cluster,
399
+ core_nodes=core_nodes, # Pass core nodes
400
+ edge_index_node=data.edge_index,
401
+ num_nodes=n,
402
+ self_loop_scale=args.self_loop_scale,
403
+ use_a2_for_clusters=args.use_a2,
404
+ lrmc_gamma=args.lrmc_gamma, # Pass damping factor
405
+ dropout_p=args.dropout) # Pass dropout rate
406
+
407
+ res = run_train_eval(model, data, epochs=args.epochs, lr=args.lr, wd=args.wd,
408
+ lrmc_inv_weight=args.lrmc_inv_weight) # Pass regularizer weight
409
+ print(f"L-RMC (top-1 pool with tricks): val={res['val']:.4f} test={res['test']:.4f}")
410
+
411
+
412
+ if __name__ == "__main__":
413
+ main()
src/.ipynb_checkpoints/export_edgelist-checkpoint.py ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ This script extracts canonical – 0‑based, undirected, duplicate‑free – edgelists from
4
+ PyTorch Geometric datasets.
5
+
6
+ Usage
7
+ -----
8
+
9
+ python export_edgelist.py [--data_root <root>] <dataset_name> <edges_out> [--dot <dot_file>]
10
+
11
+ Arguments
12
+ ---------
13
+ dataset_name : str
14
+ Name of a PyG dataset that can be loaded via `Planetoid(..)` or `TUDataset(..)`.
15
+
16
+ edges_out : str
17
+ * For a single‑graph dataset (e.g. Planetoid Cora) – write to this file.
18
+ * For a multi‑graph dataset (e.g. TUDataset “Facebook”) – write to this
19
+ directory, one file per graph (`graph_000000.txt`, …).
20
+
21
+ --dot path/to/file.dot : str, optional
22
+ If supplied, a GraphViz dot file is written that visualises all edgelists
23
+ (each graph gets its own subgraph in the file).
24
+
25
+ The script prints *nothing* to stdout – the only side‑effect is the edge‑list
26
+ (and, optionally, the .dot) output.
27
+
28
+ """
29
+
30
+ from __future__ import annotations
31
+
32
+ import argparse
33
+ from pathlib import Path
34
+ from typing import Iterable, Set, Tuple, List
35
+
36
+ # ---------------------------------------------------------------------------
37
+
38
+ def canonical_edges(edge_index) -> Set[Tuple[int, int]]:
39
+ """Return a set of undirected edges `(u, v)` with `u < v` and `u != v`."""
40
+ seen: Set[Tuple[int, int]] = set()
41
+ for u, v in edge_index.t().tolist():
42
+ if u == v:
43
+ continue
44
+ if u > v:
45
+ u, v = v, u
46
+ seen.add((u, v))
47
+ return seen
48
+
49
+
50
+ def write_edges(out_file: Path, edges: Iterable[Tuple[int, int]]) -> None:
51
+ """Write edges to `out_file` in `u v\n` format."""
52
+ out_file.parent.mkdir(parents=True, exist_ok=True)
53
+ with out_file.open("w", encoding="utf-8") as f:
54
+ for u, v in sorted(edges):
55
+ f.write(f"{u} {v}\n")
56
+
57
+
58
+ def dump_dot(dot_path: Path, graph_edges: List[Set[Tuple[int, int]]]) -> None:
59
+ """Create a GraphViz dot file from a list of edge sets."""
60
+ dot_lines: List[str] = ["graph G {"]
61
+ for i, edges in enumerate(graph_edges):
62
+ dot_lines.append(f" /* graph {i} */")
63
+ dot_lines.append(f" subgraph cluster_{i} {{")
64
+ dot_lines.append(" label = \"{}\";".format(i))
65
+ # add nodes
66
+ nodes: Set[int] = {n for e in edges for n in e}
67
+ for n in sorted(nodes):
68
+ dot_lines.append(f" {n};")
69
+ # add edges
70
+ for u, v in sorted(edges):
71
+ dot_lines.append(f" {u} -- {v};")
72
+ dot_lines.append(" }")
73
+ dot_lines.append("}")
74
+ dot_path.parent.mkdir(parents=True, exist_ok=True)
75
+ dot_path.write_text("\n".join(dot_lines), encoding="utf-8")
76
+
77
+
78
+ # ---------------------------------------------------------------------------
79
+
80
+ def process_planetoid(
81
+ root: Path, name: str, out_path: Path, dot_file: Path | None
82
+ ) -> None:
83
+ """Single‑graph dataset (Planetoid)."""
84
+ from torch_geometric.datasets import Planetoid
85
+
86
+ ds = Planetoid(root=str(root), name=name)
87
+ edges = canonical_edges(ds[0].edge_index)
88
+
89
+ if out_path.is_dir():
90
+ out_file = out_path / "graph_000000.txt"
91
+ else:
92
+ out_file = out_path
93
+
94
+ write_edges(out_file, edges)
95
+
96
+ if dot_file:
97
+ dump_dot(dot_file, [edges])
98
+
99
+
100
+ def process_tudataset(
101
+ root: Path, name: str, out_dir: Path, dot_file: Path | None
102
+ ) -> None:
103
+ """Multi‑graph dataset (TUDataset)."""
104
+ from torch_geometric.datasets import TUDataset
105
+
106
+ ds = TUDataset(root=str(root), name=name)
107
+ out_dir.mkdir(parents=True, exist_ok=True)
108
+
109
+ if dot_file:
110
+ all_edges: List[Set[Tuple[int, int]]] = []
111
+
112
+ for i, data in enumerate(ds):
113
+ edges = canonical_edges(data.edge_index)
114
+ out_file = out_dir / f"graph_{i:06d}.txt"
115
+ write_edges(out_file, edges)
116
+
117
+ if dot_file:
118
+ all_edges.append(edges)
119
+
120
+ if dot_file:
121
+ dump_dot(dot_file, all_edges)
122
+
123
+ # ---------------------------------------------------------------------------
124
+
125
+ def main() -> None:
126
+ parser = argparse.ArgumentParser(description=__doc__.strip(), formatter_class=argparse.RawTextHelpFormatter)
127
+ parser.add_argument("--data_root", default="./data", help="Root directory of PyG datasets")
128
+ parser.add_argument("dataset_name", help="PyG dataset name (e.g. Cora, WikipediaNetwork)")
129
+ parser.add_argument("edges_out", help="File (single‑graph) or directory (many‑graph) for edgelists")
130
+ parser.add_argument("--dot", dest="dot_file", type=str, default="", help="Optional .dot file for visualization")
131
+ args = parser.parse_args()
132
+
133
+ root = Path(args.data_root)
134
+ out_path = Path(args.edges_out)
135
+ dot_path = Path(args.dot_file) if args.dot_file else None
136
+
137
+ # Decide whether the dataset is Planetoid (single graph) or TUDataset (many)
138
+ try:
139
+ from torch_geometric.datasets import Planetoid
140
+
141
+ Planetoid(root=str(root), name=args.dataset_name)
142
+ dataset_type = "planetoid"
143
+ except Exception: # pragma: no cover
144
+ from torch_geometric.datasets import TUDataset
145
+
146
+ TUDataset(root=str(root), name=args.dataset_name)
147
+ dataset_type = "tudataset"
148
+
149
+ if dataset_type == "planetoid":
150
+ process_planetoid(root, args.dataset_name, out_path, dot_path)
151
+ else: # tudataset
152
+ if out_path.is_file():
153
+ raise ValueError("For multi‑graph datasets (--tudataset) the output must be a directory")
154
+ process_tudataset(root, args.dataset_name, out_path, dot_path)
155
+
156
+
157
+ if __name__ == "__main__":
158
+ main()
src/.ipynb_checkpoints/export_reddit_edgelist-checkpoint.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # export_reddit_edgelist_canonical.py
2
+ # Writes EACH undirected edge exactly once: "u v" with u < v (0-based), from PyG Reddit.
3
+ # This halves the edge count relative to to_undirected and avoids duplication downstream.
4
+ #
5
+ # Usage:
6
+ # python export_reddit_edgelist_canonical.py --out reddit_edges.txt --root ./data/Reddit
7
+
8
+ import argparse
9
+ from pathlib import Path
10
+ import torch
11
+ from torch_geometric.datasets import Reddit
12
+
13
+ def main():
14
+ ap = argparse.ArgumentParser()
15
+ ap.add_argument("--root", type=str, default="./data/Reddit")
16
+ ap.add_argument("--out", type=str, default="reddit_edges.txt")
17
+ args = ap.parse_args()
18
+
19
+ ds = Reddit(root=args.root); data = ds[0]
20
+ ei = data.edge_index # directed; in this dataset it's effectively undirected
21
+ outp = Path(args.out); outp.parent.mkdir(parents=True, exist_ok=True)
22
+
23
+ # canonical pairs u<v; de-duplicate
24
+ seen = set()
25
+ with outp.open("w") as f:
26
+ E = ei.size(1)
27
+ for e in range(E):
28
+ u = int(ei[0, e]); v = int(ei[1, e])
29
+ if u == v:
30
+ continue
31
+ if u > v:
32
+ u, v = v, u
33
+ key = (u << 32) | v
34
+ if key in seen:
35
+ continue
36
+ seen.add(key)
37
+ f.write(f"{u} {v}\n")
38
+ print(f"Wrote {len(seen)} undirected edges to {outp} (nodes: {data.num_nodes})")
39
+
40
+ if __name__ == "__main__":
41
+ main()
src/.ipynb_checkpoints/highlight_seeds_dot-checkpoint.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ highlight_seeds_dot.py
4
+
5
+ Highlight seed nodes (first cluster in a JSON seeds file) inside a GraphViz .dot file.
6
+
7
+ Usage
8
+ -----
9
+ python highlight_seeds_dot.py <input.dot> <seeds.json> <output.dot>
10
+
11
+ Every node listed in seeds.json[clusters][0]["seed_nodes"] will be rendered
12
+ [color="red", style="filled", fillcolor="red", fontcolor="white"]
13
+ and all its incident edges colored red.
14
+ The script prints *nothing* to stdout – it only writes the modified .dot file.
15
+ """
16
+
17
+ from __future__ import annotations
18
+
19
+ import argparse
20
+ import json
21
+ import re
22
+ from pathlib import Path
23
+
24
+
25
+ def load_seeds(seeds_path: Path) -> set[int]:
26
+ """Return the seed-node indices from the first cluster."""
27
+ with seeds_path.open("r", encoding="utf-8") as f:
28
+ data = json.load(f)
29
+ return {n - 1 for n in data["clusters"][0]["seed_nodes"]}
30
+
31
+
32
+ def highlight_dot(dot_path: Path, seeds: set[int], out_path: Path) -> None:
33
+ """Read dot_path, highlight seeds and their edges, write to out_path."""
34
+ content = dot_path.read_text(encoding="utf-8")
35
+
36
+ # 1) highlight seed nodes
37
+ def node_replace(m: re.Match) -> str:
38
+ node_id = int(m.group(1))
39
+ if node_id in seeds:
40
+ return f'{m.group(0).rstrip(";")} [color="red", style="filled", fillcolor="red", fontcolor="white"];'
41
+ return m.group(0)
42
+
43
+ content = re.sub(rf'^\s*({"|".join(map(str, seeds))})\s*;', node_replace, content, flags=re.MULTILINE)
44
+
45
+ # 2) highlight edges incident to seeds
46
+ def edge_replace(m: re.Match) -> str:
47
+ u, v = int(m.group(1)), int(m.group(2))
48
+ if u in seeds or v in seeds:
49
+ return f'{m.group(0).rstrip(";")} [color="red"];'
50
+ return m.group(0)
51
+
52
+ content = re.sub(rf'^\s*({"|".join(map(str, seeds))})\s*--\s*(\d+)\s*;', edge_replace, content, flags=re.MULTILINE)
53
+ content = re.sub(rf'^\s*(\d+)\s*--\s*({"|".join(map(str, seeds))})\s*;', edge_replace, content, flags=re.MULTILINE)
54
+
55
+ out_path.parent.mkdir(parents=True, exist_ok=True)
56
+ out_path.write_text(content, encoding="utf-8")
57
+
58
+
59
+ def main() -> None:
60
+ parser = argparse.ArgumentParser(description=__doc__.strip(), formatter_class=argparse.RawTextHelpFormatter)
61
+ parser.add_argument("input_dot", type=str, help="Original GraphViz .dot file")
62
+ parser.add_argument("seeds_json", type=str, help="Seeds JSON file")
63
+ parser.add_argument("output_dot", type=str, help="Path to write highlighted .dot file")
64
+ args = parser.parse_args()
65
+
66
+ seeds = load_seeds(Path(args.seeds_json))
67
+ highlight_dot(Path(args.input_dot), seeds, Path(args.output_dot))
68
+
69
+
70
+ if __name__ == "__main__":
71
+ main()
src/1_build_lrmc_levels.py ADDED
@@ -0,0 +1,194 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ import argparse
4
+ import glob
5
+ import json
6
+ import os
7
+ import re
8
+ import shutil
9
+ import subprocess
10
+ from collections import defaultdict
11
+ from typing import Dict, List, Tuple, Iterable, Set
12
+
13
+ def ensure_dir(p: str):
14
+ os.makedirs(p, exist_ok=True)
15
+
16
+ def read_edgelist(path: str) -> Iterable[Tuple[int, int]]:
17
+ with open(path, 'r') as f:
18
+ for line in f:
19
+ s = line.strip()
20
+ if not s or s.startswith('#'):
21
+ continue
22
+ parts = s.split()
23
+ if len(parts) < 2:
24
+ continue
25
+ try:
26
+ u = int(parts[0]); v = int(parts[1])
27
+ except ValueError:
28
+ continue
29
+ if u == v:
30
+ continue
31
+ a, b = (u, v) if u < v else (v, u)
32
+ yield a, b
33
+
34
+ def write_edgelist(path: str, edges: Iterable[Tuple[int, int]]):
35
+ with open(path, 'w') as f:
36
+ for u, v in edges:
37
+ f.write(f"{u} {v}\n")
38
+
39
+ def parse_seeds(path: str) -> Tuple[Dict[int, int], List[int]]:
40
+ """
41
+ Return (node_to_cluster_index, sorted_cluster_ids).
42
+ The cluster indices are 0..C-1, sorted by cluster_id.
43
+ - On overlapping membership, choose the cluster with higher 'score', then smaller cluster_id.
44
+ """
45
+ with open(path, 'r') as f:
46
+ js = json.load(f)
47
+ clusters = js.get('clusters', [])
48
+ # Sort clusters by cluster_id for stable indexing
49
+ clusters_sorted = sorted(clusters, key=lambda c: c.get('cluster_id', 0))
50
+ cluster_id_list = [c.get('cluster_id', i) for i, c in enumerate(clusters_sorted)]
51
+ cluster_id_to_idx = {cid: i for i, cid in enumerate(cluster_id_list)}
52
+
53
+ # Build node->(best_cluster_idx, best_score, best_cid)
54
+ node_choice: Dict[int, Tuple[int, float, int]] = {}
55
+
56
+ for c in clusters_sorted:
57
+ cid = c.get('cluster_id', None)
58
+ if cid is None:
59
+ continue
60
+ idx = cluster_id_to_idx[cid]
61
+ members = c.get('members', [])
62
+ score = float(c.get('score', 0.0))
63
+ for u in members:
64
+ prev = node_choice.get(u, None)
65
+ if prev is None or (score > prev[1]) or (score == prev[1] and cid < prev[2]):
66
+ node_choice[u] = (idx, score, cid)
67
+
68
+ node_to_cluster = {u: idx for u, (idx, score, cid) in node_choice.items()}
69
+ return node_to_cluster, cluster_id_list
70
+
71
+ def coarsen_edgelist(prev_edgelist: str, seeds_json: str, out_edgelist: str) -> int:
72
+ node_to_cluster, cluster_id_list = parse_seeds(seeds_json)
73
+ edges_set: Set[Tuple[int, int]] = set()
74
+ missing_nodes = 0
75
+ for u, v in read_edgelist(prev_edgelist):
76
+ cu = node_to_cluster.get(u, None)
77
+ cv = node_to_cluster.get(v, None)
78
+ if cu is None or cv is None:
79
+ # If a node isn't present in any cluster JSON, skip or count as missing
80
+ missing_nodes += 1
81
+ continue
82
+ if cu == cv:
83
+ continue
84
+ a, b = (cu, cv) if cu < cv else (cv, cu)
85
+ edges_set.add((a, b))
86
+
87
+ write_edgelist(out_edgelist, sorted(edges_set))
88
+ return missing_nodes
89
+
90
+ def run_java(java_exec: str, class_name: str, edgelist_path: str, out_json_path: str,
91
+ epsilon: str, java_opts: List[str]) -> None:
92
+ cmd = [java_exec] + java_opts + [class_name, edgelist_path, out_json_path, epsilon]
93
+ print("[run]", " ".join(cmd))
94
+ subprocess.run(cmd, check=True)
95
+
96
+ def build_single_graph_levels(args):
97
+ ensure_dir(args.out_dir)
98
+ # Stage 0 edgelist is the input; optionally copy for record
99
+ stage0_dir = os.path.join(args.out_dir, "stage0")
100
+ ensure_dir(stage0_dir)
101
+ e0_copy = os.path.join(stage0_dir, "edgelist_0.txt")
102
+ if args.copy_inputs:
103
+ shutil.copyfile(args.input_edgelist, e0_copy)
104
+
105
+ prev_edgelist = args.input_edgelist
106
+ for lvl in range(args.levels):
107
+ stage_dir = os.path.join(args.out_dir, f"stage{lvl}")
108
+ ensure_dir(stage_dir)
109
+ seeds_out = os.path.join(stage_dir, "seeds.json")
110
+ # Run Java to produce seeds at this level
111
+ run_java(args.java, args.class_name, prev_edgelist, seeds_out, args.epsilon, args.java_opts)
112
+
113
+ # Prepare next-level edgelist (unless last level)
114
+ if lvl < args.levels - 1:
115
+ next_stage_dir = os.path.join(args.out_dir, f"stage{lvl+1}")
116
+ ensure_dir(next_stage_dir)
117
+ next_edgelist = os.path.join(next_stage_dir, f"edgelist_{lvl+1}.txt")
118
+ missing = coarsen_edgelist(prev_edgelist, seeds_out, next_edgelist)
119
+ if missing > 0:
120
+ print(f"[warn] stage{lvl}: {missing} edges had nodes missing from seeds; skipped.")
121
+ prev_edgelist = next_edgelist
122
+
123
+ def build_multigraph_levels(args):
124
+ ensure_dir(args.out_dir)
125
+ # Enumerate graph files
126
+ graph_files = sorted(glob.glob(os.path.join(args.graphs_dir, args.glob)))
127
+ if not graph_files:
128
+ raise SystemExit(f"No graph files found in {args.graphs_dir} with pattern {args.glob}")
129
+
130
+ pattern = re.compile(r'(.*?)(\d+)(\.\w+)$') # capture numeric id
131
+ def graph_id_from_path(p: str) -> str:
132
+ base = os.path.basename(p)
133
+ m = pattern.match(base)
134
+ if m:
135
+ return m.group(2).zfill(6) # zero-pad to 6 for consistency
136
+ # fallback: strip extension
137
+ stem = os.path.splitext(base)[0]
138
+ m2 = re.search(r'(\d+)$', stem)
139
+ return (m2.group(1).zfill(6) if m2 else stem)
140
+
141
+ # Stage 0: run Java for each graph
142
+ prev_stage_edgelists: Dict[str, str] = {}
143
+ for lvl in range(args.levels):
144
+ stage_dir = os.path.join(args.out_dir, f"stage{lvl}")
145
+ ensure_dir(stage_dir)
146
+
147
+ if lvl == 0:
148
+ for gpath in graph_files:
149
+ gid = graph_id_from_path(gpath)
150
+ seeds_out = os.path.join(stage_dir, f"graph_{gid}.json")
151
+ run_java(args.java, args.class_name, gpath, seeds_out, args.epsilon, args.java_opts)
152
+ prev_stage_edgelists[gid] = gpath
153
+ else:
154
+ # For each graph, coarsen previous edgelist using previous seeds, then run Java
155
+ for gpath in graph_files:
156
+ gid = graph_id_from_path(gpath)
157
+ prev_edgelist = prev_stage_edgelists[gid]
158
+ prev_seeds = os.path.join(args.out_dir, f"stage{lvl-1}", f"graph_{gid}.json")
159
+ next_edgelist = os.path.join(stage_dir, f"graph_{gid}.txt")
160
+ missing = coarsen_edgelist(prev_edgelist, prev_seeds, next_edgelist)
161
+ if missing > 0:
162
+ print(f"[warn] stage{lvl-1} graph_{gid}: {missing} edges had nodes missing from seeds; skipped.")
163
+
164
+ seeds_out = os.path.join(stage_dir, f"graph_{gid}.json")
165
+ run_java(args.java, args.class_name, next_edgelist, seeds_out, args.epsilon, args.java_opts)
166
+ prev_stage_edgelists[gid] = next_edgelist
167
+
168
+
169
+ def main():
170
+ ap = argparse.ArgumentParser(description="Build LRMC seeds across multiple levels by invoking the Java LRMC tool and coarsening between levels.")
171
+ mode = ap.add_mutually_exclusive_group(required=True)
172
+ mode.add_argument('--input_edgelist', type=str, help='Single-graph mode: path to original edgelist.txt')
173
+ mode.add_argument('--graphs_dir', type=str, help='Multi-graph mode: directory containing per-graph edgelist files (e.g., graph_000000.txt)')
174
+ ap.add_argument('--glob', type=str, default='graph_*.txt', help='Multi-graph mode: glob pattern for graph files (default: graph_*.txt)')
175
+ ap.add_argument('--out_dir', type=str, required=True, help='Output directory (stages will be created here)')
176
+ ap.add_argument('--levels', type=int, required=True, help='Number of levels to build (e.g., 3)')
177
+ # Java settings
178
+ ap.add_argument('--java', type=str, default='java', help='Java executable (default: java)')
179
+ ap.add_argument('--class_name', type=str, default='LRMCGenerateSingleCluster', help='Fully qualified Java class name')
180
+ ap.add_argument('--epsilon', type=str, default='1e6', help='Epsilon argument for the Java tool (default: 1e6)')
181
+ ap.add_argument('--java_opts', type=str, default='', help='Extra options for java (e.g., "-Xmx16g -cp my.jar")')
182
+ ap.add_argument('--copy_inputs', action='store_true', help='Copy original edgelist under stage0 for record (single-graph mode)')
183
+ args = ap.parse_args()
184
+
185
+ # Parse java_opts into a list if provided
186
+ args.java_opts = args.java_opts.split() if args.java_opts else []
187
+
188
+ if args.input_edgelist:
189
+ build_single_graph_levels(args)
190
+ else:
191
+ build_multigraph_levels(args)
192
+
193
+ if __name__ == '__main__':
194
+ main()
src/2.1_lrmc_bilevel.py ADDED
@@ -0,0 +1,301 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # lrmc_bilevel.py
2
+ # Top-1 LRMC ablation: one-cluster pooling vs. plain GCN on Planetoid (e.g., Cora)
3
+ # Requires: torch, torch_geometric, torch_scatter, torch_sparse
4
+ #
5
+ # Usage examples:
6
+ # python lrmc_bilevel.py --dataset Cora --seeds /path/to/lrmc_seeds.json --variant baseline
7
+ # python lrmc_bilevel.py --dataset Cora --seeds /path/to/lrmc_seeds.json --variant pool
8
+ #
9
+ # Notes:
10
+ # - We read your LRMC JSON, pick the single cluster with the highest 'score',
11
+ # assign it to cluster id 0, and make all other nodes singletons (1..K-1).
12
+ # - For --variant pool: Node-GCN -> pool (means) -> Cluster-GCN -> broadcast + skip -> Node-GCN -> classifier
13
+ # - For --variant baseline: Standard 2-layer GCN.
14
+ # - Keep flags like --self_loop_scale and --use_a2 if you want A+λI / A^2 augmentation.
15
+
16
+ import argparse, json
17
+ from pathlib import Path
18
+ from typing import List, Tuple, Optional
19
+
20
+ import torch
21
+ import torch.nn as nn
22
+ import torch.nn.functional as F
23
+
24
+ from torch import Tensor
25
+ from torch_scatter import scatter_add, scatter_mean
26
+ from torch_sparse import coalesce, spspmm
27
+
28
+ from torch_geometric.datasets import Planetoid
29
+ from torch_geometric.nn import GCNConv
30
+
31
+
32
+ # ---------------------------
33
+ # Utilities: edges and seeds
34
+ # ---------------------------
35
+
36
+ def add_scaled_self_loops(edge_index: Tensor,
37
+ edge_weight: Optional[Tensor],
38
+ num_nodes: int,
39
+ scale: float = 1.0) -> Tuple[Tensor, Tensor]:
40
+ """Add self-loops with chosen weight (scale). If scale=0, return unchanged (and create weights if None)."""
41
+ if scale == 0.0:
42
+ if edge_weight is None:
43
+ edge_weight = torch.ones(edge_index.size(1), device=edge_index.device)
44
+ return edge_index, edge_weight
45
+ device = edge_index.device
46
+ self_loops = torch.arange(num_nodes, device=device)
47
+ self_index = torch.stack([self_loops, self_loops], dim=0)
48
+ self_weight = torch.full((num_nodes,), float(scale), device=device)
49
+ base_w = edge_weight if edge_weight is not None else torch.ones(edge_index.size(1), device=device)
50
+ ei = torch.cat([edge_index, self_index], dim=1)
51
+ ew = torch.cat([base_w, self_weight], dim=0)
52
+ ei, ew = coalesce(ei, ew, num_nodes, num_nodes, op='add')
53
+ return ei, ew
54
+
55
+
56
+ def adjacency_power(edge_index: Tensor, num_nodes: int, k: int = 2) -> Tensor:
57
+ """
58
+ Compute (binary) k-th power adjacency using sparse matmul (torch_sparse.spspmm).
59
+ Here we use k=2. Returns coalesced edge_index without weights.
60
+ """
61
+ row, col = edge_index
62
+ val = torch.ones(row.numel(), device=edge_index.device)
63
+ Ai, Av = edge_index, val
64
+ # A^2
65
+ Ri, Rv = spspmm(Ai, Av, Ai, Av, num_nodes, num_nodes, num_nodes)
66
+ mask = Ri[0] != Ri[1] # drop diagonal; add custom self-loops later if desired
67
+ Ri = Ri[:, mask]
68
+ Ri, _ = coalesce(Ri, torch.ones(Ri.size(1), device=edge_index.device), num_nodes, num_nodes, op='add')
69
+ return Ri
70
+
71
+
72
+ def build_cluster_graph(edge_index: Tensor,
73
+ num_nodes: int,
74
+ node2cluster: Tensor,
75
+ weight_per_edge: Optional[Tensor] = None,
76
+ num_clusters: Optional[int] = None
77
+ ) -> Tuple[Tensor, Tensor, int]:
78
+ """
79
+ Build cluster graph A_c = S^T A S with summed multiplicities as weights.
80
+ node2cluster: [N] long tensor mapping each node -> cluster id.
81
+ """
82
+ K = int(node2cluster.max().item()) + 1 if num_clusters is None else num_clusters
83
+ src, dst = edge_index
84
+ csrc = node2cluster[src]
85
+ cdst = node2cluster[dst]
86
+ edge_c = torch.stack([csrc, cdst], dim=0)
87
+ w = weight_per_edge if weight_per_edge is not None else torch.ones(edge_c.size(1), device=edge_c.device)
88
+ edge_c, w = coalesce(edge_c, w, K, K, op='add') # sum multiplicities
89
+ return edge_c, w, K
90
+
91
+
92
+ # -----
93
+ # Seeds
94
+ # -----
95
+
96
+ def _pick_top1_cluster(obj: dict) -> List[int]:
97
+ """
98
+ From LRMC JSON with structure: {"clusters":[{"members":[...], "score":float, ...}, ...]}
99
+ choose the cluster with max (score, size) and return its members.
100
+ """
101
+ clusters = obj.get("clusters", [])
102
+ if not clusters:
103
+ return []
104
+ # choose by highest score, then by size (tiebreaker)
105
+ best = max(clusters, key=lambda c: (float(c.get("score", 0.0)), len(c.get("members", []))))
106
+ return list(best.get("members", []))
107
+
108
+
109
+ def load_top1_assignment(seeds_json: str, n_nodes: int) -> Tuple[Tensor, Tensor]:
110
+ """
111
+ Create a hard assignment for top-1 LRMC cluster:
112
+ - cluster 0 = top-1 LRMC set
113
+ - nodes outside are singletons (1..K-1)
114
+ Returns:
115
+ node2cluster: [N] long
116
+ cluster_scores: [K,1] with 1.0 for top cluster, 0.0 for singletons
117
+ """
118
+ obj = json.loads(Path(seeds_json).read_text())
119
+ C_star = _pick_top1_cluster(obj)
120
+ C_star = torch.tensor(sorted(set(C_star)), dtype=torch.long)
121
+
122
+ node2cluster = torch.full((n_nodes,), -1, dtype=torch.long)
123
+ node2cluster[C_star] = 0
124
+ outside = torch.tensor(sorted(set(range(n_nodes)) - set(C_star.tolist())), dtype=torch.long)
125
+ if outside.numel() > 0:
126
+ node2cluster[outside] = torch.arange(1, 1 + outside.numel(), dtype=torch.long)
127
+ assert int(node2cluster.min()) >= 0, "All nodes must be assigned."
128
+
129
+ K = 1 + outside.numel()
130
+ cluster_scores = torch.zeros(K, 1, dtype=torch.float32)
131
+ if C_star.numel() > 0:
132
+ cluster_scores[0, 0] = 1.0 # emphasize the supercluster
133
+ return node2cluster, cluster_scores
134
+
135
+
136
+ # --------------------------
137
+ # Models (baseline + pooled)
138
+ # --------------------------
139
+
140
+ class GCN2(nn.Module):
141
+ """Plain 2-layer GCN baseline."""
142
+ def __init__(self, in_dim, hid, out_dim):
143
+ super().__init__()
144
+ self.conv1 = GCNConv(in_dim, hid)
145
+ self.conv2 = GCNConv(hid, out_dim)
146
+
147
+ def forward(self, x, edge_index):
148
+ x = F.relu(self.conv1(x, edge_index))
149
+ x = F.dropout(x, p=0.5, training=self.training)
150
+ x = self.conv2(x, edge_index)
151
+ return x
152
+
153
+
154
+ class OneClusterPool(nn.Module):
155
+ """
156
+ Node-GCN -> pool to one-cluster + singletons -> Cluster-GCN -> broadcast + skip -> Node-GCN -> classifier
157
+ """
158
+ def __init__(self,
159
+ in_dim: int,
160
+ hid: int,
161
+ out_dim: int,
162
+ node2cluster: Tensor,
163
+ edge_index_node: Tensor,
164
+ num_nodes: int,
165
+ self_loop_scale: float = 0.0,
166
+ use_a2_for_clusters: bool = False):
167
+ super().__init__()
168
+ self.n2c = node2cluster.long()
169
+ self.K = int(self.n2c.max().item()) + 1
170
+
171
+ # Node graph (A + λI if desired)
172
+ ei_node = edge_index_node
173
+ ei_node, ew_node = add_scaled_self_loops(ei_node, None, num_nodes, scale=self_loop_scale)
174
+ self.register_buffer("edge_index_node", ei_node)
175
+ self.register_buffer("edge_weight_node", ew_node)
176
+
177
+ # Cluster graph from A or A^2
178
+ ei_for_c = adjacency_power(edge_index_node, num_nodes, k=2) if use_a2_for_clusters else edge_index_node
179
+ edge_index_c, edge_weight_c, K = build_cluster_graph(ei_for_c, num_nodes, self.n2c)
180
+ self.register_buffer("edge_index_c", edge_index_c)
181
+ self.register_buffer("edge_weight_c", edge_weight_c)
182
+ self.K = K
183
+
184
+ # Layers
185
+ self.gcn_node1 = GCNConv(in_dim, hid, add_self_loops=False, normalize=True)
186
+ self.gcn_cluster = GCNConv(hid, hid, add_self_loops=True, normalize=True)
187
+ self.gcn_node2 = GCNConv(hid * 2, out_dim) # on concatenated [h_node, h_broadcast]
188
+
189
+ def forward(self, x: Tensor, edge_index_node: Tensor) -> Tensor:
190
+ # Node GCN (uses stored weights)
191
+ h1 = F.relu(self.gcn_node1(x, self.edge_index_node, self.edge_weight_node))
192
+
193
+ # Pool to clusters: mean per cluster
194
+ z = scatter_mean(h1, self.n2c, dim=0, dim_size=self.K) # [K, H]
195
+
196
+ # Cluster GCN
197
+ z2 = F.relu(self.gcn_cluster(z, self.edge_index_c, self.edge_weight_c))
198
+
199
+ # Broadcast back + skip concat
200
+ hb = z2[self.n2c] # [N, H]
201
+ hcat = torch.cat([h1, hb], dim=1) # [N, 2H]
202
+
203
+ # Final node GCN head -> logits
204
+ out = self.gcn_node2(hcat, edge_index_node)
205
+ return out
206
+
207
+
208
+ # -------------
209
+ # Training glue
210
+ # -------------
211
+
212
+ @torch.no_grad()
213
+ def accuracy(logits: Tensor, y: Tensor, mask: Tensor) -> float:
214
+ pred = logits[mask].argmax(dim=1)
215
+ return (pred == y[mask]).float().mean().item()
216
+
217
+
218
+ def run_train_eval(model: nn.Module, data, epochs=200, lr=0.01, wd=5e-4):
219
+ opt = torch.optim.Adam(model.parameters(), lr=lr, weight_decay=wd)
220
+ best_val, best_state = 0.0, None
221
+ for ep in range(1, epochs + 1):
222
+ model.train()
223
+ opt.zero_grad(set_to_none=True)
224
+ logits = model(data.x, data.edge_index)
225
+ loss = F.cross_entropy(logits[data.train_mask], data.y[data.train_mask])
226
+ loss.backward(); opt.step()
227
+
228
+ # track best on val
229
+ model.eval()
230
+ logits = model(data.x, data.edge_index)
231
+ val_acc = accuracy(logits, data.y, data.val_mask)
232
+ if val_acc > best_val:
233
+ best_val, best_state = val_acc, {k: v.detach().clone() for k, v in model.state_dict().items()}
234
+ if ep % 20 == 0:
235
+ tr = accuracy(logits, data.y, data.train_mask)
236
+ te = accuracy(logits, data.y, data.test_mask)
237
+ print(f"[{ep:04d}] loss={loss.item():.4f} train={tr:.3f} val={val_acc:.3f} test={te:.3f}")
238
+
239
+ # test @ best val
240
+ if best_state is not None:
241
+ model.load_state_dict(best_state)
242
+ model.eval()
243
+ logits = model(data.x, data.edge_index)
244
+ return {
245
+ "val": accuracy(logits, data.y, data.val_mask),
246
+ "test": accuracy(logits, data.y, data.test_mask)
247
+ }
248
+
249
+
250
+ # -----------
251
+ # Entrypoint
252
+ # -----------
253
+
254
+ def main():
255
+ ap = argparse.ArgumentParser()
256
+ ap.add_argument("--dataset", required=True, choices=["Cora", "Citeseer", "Pubmed"])
257
+ ap.add_argument("--seeds", required=True, help="Path to LRMC seeds JSON (single large graph).")
258
+ ap.add_argument("--variant", choices=["baseline", "pool"], default="pool",
259
+ help="baseline=plain GCN; pool=top-1 LRMC one-cluster pooling")
260
+ ap.add_argument("--hidden", type=int, default=128)
261
+ ap.add_argument("--epochs", type=int, default=200)
262
+ ap.add_argument("--lr", type=float, default=0.01)
263
+ ap.add_argument("--wd", type=float, default=5e-4)
264
+ ap.add_argument("--dropout", type=float, default=0.5) # used in baseline only
265
+ ap.add_argument("--self_loop_scale", type=float, default=0.0, help="λ for A+λI on node graph (0 disables)")
266
+ ap.add_argument("--use_a2", action="store_true", help="Use A^2 to build the cluster graph (recommended for pool)")
267
+ ap.add_argument("--seed", type=int, default=42)
268
+ args = ap.parse_args()
269
+
270
+ torch.manual_seed(args.seed)
271
+
272
+ # Load dataset
273
+ ds = Planetoid(root=f"./data/{args.dataset}", name=args.dataset)
274
+ data = ds[0]
275
+ in_dim, out_dim, n = ds.num_node_features, ds.num_classes, data.num_nodes
276
+
277
+ if args.variant == "baseline":
278
+ model = GCN2(in_dim, args.hidden, out_dim)
279
+ # use default add_self_loops=True behavior inside convs
280
+ res = run_train_eval(model, data, epochs=args.epochs, lr=args.lr, wd=args.wd)
281
+ print(f"Baseline GCN: val={res['val']:.4f} test={res['test']:.4f}")
282
+ return
283
+
284
+ # Top-1 LRMC assignment
285
+ node2cluster, _ = load_top1_assignment(args.seeds, n)
286
+
287
+ # One-cluster pooled model
288
+ model = OneClusterPool(in_dim=in_dim,
289
+ hid=args.hidden,
290
+ out_dim=out_dim,
291
+ node2cluster=node2cluster,
292
+ edge_index_node=data.edge_index,
293
+ num_nodes=n,
294
+ self_loop_scale=args.self_loop_scale,
295
+ use_a2_for_clusters=args.use_a2)
296
+ res = run_train_eval(model, data, epochs=args.epochs, lr=args.lr, wd=args.wd)
297
+ print(f"L-RMC (top-1 pool): val={res['val']:.4f} test={res['test']:.4f}")
298
+
299
+
300
+ if __name__ == "__main__":
301
+ main()
src/2.2_lrmc_bilevel.py ADDED
@@ -0,0 +1,325 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # 2.1_lrmc_bilevel.py
2
+ # Top-1 LRMC ablation with debug guards so seeds differences are visible.
3
+ # Requires: torch, torch_geometric, torch_scatter, torch_sparse
4
+
5
+ import argparse, json, hashlib
6
+ from pathlib import Path
7
+ from typing import List, Tuple, Optional
8
+
9
+ import torch
10
+ import torch.nn as nn
11
+ import torch.nn.functional as F
12
+ from torch import Tensor
13
+
14
+ from torch_scatter import scatter_mean
15
+ from torch_sparse import coalesce, spspmm
16
+ from torch_geometric.datasets import Planetoid
17
+ from torch_geometric.nn import GCNConv
18
+
19
+
20
+ # ---------------------------
21
+ # Utilities: edges and seeds
22
+ # ---------------------------
23
+
24
+ def add_scaled_self_loops(edge_index: Tensor,
25
+ edge_weight: Optional[Tensor],
26
+ num_nodes: int,
27
+ scale: float = 1.0) -> Tuple[Tensor, Tensor]:
28
+ if scale == 0.0:
29
+ if edge_weight is None:
30
+ edge_weight = torch.ones(edge_index.size(1), device=edge_index.device)
31
+ return edge_index, edge_weight
32
+ device = edge_index.device
33
+ self_loops = torch.arange(num_nodes, device=device)
34
+ self_index = torch.stack([self_loops, self_loops], dim=0)
35
+ self_weight = torch.full((num_nodes,), float(scale), device=device)
36
+ base_w = edge_weight if edge_weight is not None else torch.ones(edge_index.size(1), device=device)
37
+ ei = torch.cat([edge_index, self_index], dim=1)
38
+ ew = torch.cat([base_w, self_weight], dim=0)
39
+ ei, ew = coalesce(ei, ew, num_nodes, num_nodes, op='add')
40
+ return ei, ew
41
+
42
+
43
+ def adjacency_power(edge_index: Tensor, num_nodes: int, k: int = 2) -> Tensor:
44
+ # A^2 using spspmm; return binary, coalesced, no self loops
45
+ row, col = edge_index
46
+ val = torch.ones(row.numel(), device=edge_index.device)
47
+ Ai, Av = edge_index, val
48
+ Ri, _ = spspmm(Ai, Av, Ai, Av, num_nodes, num_nodes, num_nodes)
49
+ mask = Ri[0] != Ri[1]
50
+ Ri = Ri[:, mask]
51
+ Ri, _ = coalesce(Ri, torch.ones(Ri.size(1), device=edge_index.device), num_nodes, num_nodes, op='add')
52
+ return Ri
53
+
54
+
55
+ def build_cluster_graph(edge_index: Tensor,
56
+ num_nodes: int,
57
+ node2cluster: Tensor,
58
+ weight_per_edge: Optional[Tensor] = None,
59
+ num_clusters: Optional[int] = None
60
+ ) -> Tuple[Tensor, Tensor, int]:
61
+ K = int(node2cluster.max().item()) + 1 if num_clusters is None else num_clusters
62
+ src, dst = edge_index
63
+ csrc = node2cluster[src]
64
+ cdst = node2cluster[dst]
65
+ edge_c = torch.stack([csrc, cdst], dim=0)
66
+ w = weight_per_edge if weight_per_edge is not None else torch.ones(edge_c.size(1), device=edge_c.device)
67
+ edge_c, w = coalesce(edge_c, w, K, K, op='add')
68
+ return edge_c, w, K
69
+
70
+
71
+ # -----
72
+ # Seeds
73
+ # -----
74
+
75
+ def _md5(path: Path) -> str:
76
+ h = hashlib.md5()
77
+ with path.open('rb') as f:
78
+ for chunk in iter(lambda: f.read(8192), b''):
79
+ h.update(chunk)
80
+ return h.hexdigest()
81
+
82
+
83
+ def _extract_members(cluster_obj: dict) -> List[int]:
84
+ """
85
+ Try 'members' first, then 'seed_nodes'. Raise if neither works.
86
+ """
87
+ m = cluster_obj.get("members", None)
88
+ if isinstance(m, list) and len(m) > 0:
89
+ return list(dict.fromkeys(int(x) for x in m)) # dedupe/preserve order
90
+ m2 = cluster_obj.get("seed_nodes", None)
91
+ if isinstance(m2, list) and len(m2) > 0:
92
+ return list(dict.fromkeys(int(x) for x in m2))
93
+ # If both present but empty, return empty; caller will handle.
94
+ if isinstance(m, list) or isinstance(m2, list):
95
+ return []
96
+ raise KeyError("Cluster object has neither 'members' nor 'seed_nodes'.")
97
+
98
+
99
+ def _pick_top1_cluster(obj: dict) -> List[int]:
100
+ """
101
+ From {"clusters":[{..., "score":float, "members" or "seed_nodes"}, ...]},
102
+ choose max by (score, size). Returns deduped member list.
103
+ """
104
+ clusters = obj.get("clusters", [])
105
+ if not isinstance(clusters, list) or len(clusters) == 0:
106
+ return []
107
+ def keyfun(c):
108
+ score = float(c.get("score", 0.0))
109
+ try:
110
+ mem = _extract_members(c)
111
+ except KeyError:
112
+ mem = []
113
+ return (score, len(mem))
114
+ best = max(clusters, key=keyfun)
115
+ try:
116
+ members = _extract_members(best)
117
+ except KeyError:
118
+ members = []
119
+ return sorted(set(int(x) for x in members))
120
+
121
+
122
+ def load_top1_assignment(seeds_json: str, n_nodes: int, debug: bool = False) -> Tuple[Tensor, Tensor, dict]:
123
+ """
124
+ Hard assignment for top-1 LRMC cluster:
125
+ cluster 0 = top cluster; others are singletons.
126
+ Returns node2cluster[N], cluster_scores[K,1], and a small debug dict.
127
+ """
128
+ p = Path(seeds_json)
129
+ text = p.read_text(encoding='utf-8')
130
+ obj = json.loads(text)
131
+
132
+ C_star = _pick_top1_cluster(obj)
133
+ # if len(C_star) > 0 and max(C_star) == n_nodes:
134
+ # Looks 1-indexed (since max == N, not N-1) → shift down by 1
135
+ C_star = [u - 1 for u in C_star]
136
+ C_star = torch.tensor(C_star, dtype=torch.long)
137
+
138
+ # C_star = _pick_top1_cluster(obj)
139
+ # C_star = torch.tensor(C_star, dtype=torch.long)
140
+ node2cluster = torch.full((n_nodes,), -1, dtype=torch.long)
141
+
142
+ if C_star.numel() == 0:
143
+ # FAIL LOUDLY instead of silently falling back to identity
144
+ raise RuntimeError(
145
+ f"No members found for top-1 cluster in {seeds_json}. "
146
+ f"Expected 'members' or 'seed_nodes' to be non-empty."
147
+ )
148
+
149
+ node2cluster[C_star] = 0
150
+ outside = torch.tensor(sorted(set(range(n_nodes)) - set(C_star.tolist())), dtype=torch.long)
151
+ if outside.numel() > 0:
152
+ node2cluster[outside] = torch.arange(1, 1 + outside.numel(), dtype=torch.long)
153
+ assert int(node2cluster.min()) >= 0
154
+
155
+ K = 1 + outside.numel()
156
+ cluster_scores = torch.zeros(K, 1, dtype=torch.float32)
157
+ cluster_scores[0, 0] = 1.0
158
+
159
+ info = {
160
+ "json_md5": _md5(p),
161
+ "top_cluster_size": int(C_star.numel()),
162
+ "K": int(K),
163
+ "n_outside": int(outside.numel()),
164
+ "first_members": [int(x) for x in C_star[:10].tolist()],
165
+ }
166
+ if debug:
167
+ print(f"[LRMC] Loaded {seeds_json} (md5={info['json_md5']}) | "
168
+ f"top_size={info['top_cluster_size']} K={info['K']} outside={info['n_outside']} "
169
+ f"first10={info['first_members']}")
170
+ return node2cluster, cluster_scores, info
171
+
172
+
173
+ # --------------------------
174
+ # Models (baseline + pooled)
175
+ # --------------------------
176
+
177
+ class GCN2(nn.Module):
178
+ def __init__(self, in_dim, hid, out_dim, dropout=0.5):
179
+ super().__init__()
180
+ self.conv1 = GCNConv(in_dim, hid)
181
+ self.conv2 = GCNConv(hid, out_dim)
182
+ self.dropout = dropout
183
+ def forward(self, x, edge_index):
184
+ x = F.relu(self.conv1(x, edge_index))
185
+ x = F.dropout(x, p=self.dropout, training=self.training)
186
+ x = self.conv2(x, edge_index)
187
+ return x
188
+
189
+
190
+ class OneClusterPool(nn.Module):
191
+ def __init__(self,
192
+ in_dim: int,
193
+ hid: int,
194
+ out_dim: int,
195
+ node2cluster: Tensor,
196
+ edge_index_node: Tensor,
197
+ num_nodes: int,
198
+ self_loop_scale: float = 0.0,
199
+ use_a2_for_clusters: bool = False,
200
+ debug_header: str = ""):
201
+ super().__init__()
202
+ self.n2c = node2cluster.long()
203
+ self.K = int(self.n2c.max().item()) + 1
204
+
205
+ # Node graph (A + λI if desired)
206
+ ei_node = edge_index_node
207
+ ei_node, ew_node = add_scaled_self_loops(ei_node, None, num_nodes, scale=self_loop_scale)
208
+ self.register_buffer("edge_index_node", ei_node)
209
+ self.register_buffer("edge_weight_node", ew_node)
210
+
211
+ # Cluster graph
212
+ ei_for_c = adjacency_power(edge_index_node, num_nodes, k=2) if use_a2_for_clusters else edge_index_node
213
+ edge_index_c, edge_weight_c, K = build_cluster_graph(ei_for_c, num_nodes, self.n2c)
214
+ self.register_buffer("edge_index_c", edge_index_c)
215
+ self.register_buffer("edge_weight_c", edge_weight_c)
216
+ self.K = K
217
+
218
+ if debug_header:
219
+ print(f"[POOL] {debug_header} | cluster_edges={edge_index_c.size(1)} (K={K})")
220
+
221
+ # Layers
222
+ self.gcn_node1 = GCNConv(in_dim, hid, add_self_loops=False, normalize=True)
223
+ self.gcn_cluster = GCNConv(hid, hid, add_self_loops=True, normalize=True)
224
+ self.gcn_node2 = GCNConv(hid * 2, out_dim) # concat [h_node, h_broadcast]
225
+
226
+ def forward(self, x: Tensor, edge_index_node: Tensor) -> Tensor:
227
+ h1 = F.relu(self.gcn_node1(x, self.edge_index_node, self.edge_weight_node))
228
+ z = scatter_mean(h1, self.n2c, dim=0, dim_size=self.K) # [K, H]
229
+ z2 = F.relu(self.gcn_cluster(z, self.edge_index_c, self.edge_weight_c))
230
+ hb = z2[self.n2c] # [N, H]
231
+ hcat = torch.cat([h1, hb], dim=1) # [N, 2H]
232
+ out = self.gcn_node2(hcat, edge_index_node)
233
+ return out
234
+
235
+
236
+ # -------------
237
+ # Training glue
238
+ # -------------
239
+
240
+ @torch.no_grad()
241
+ def accuracy(logits: Tensor, y: Tensor, mask: Tensor) -> float:
242
+ pred = logits[mask].argmax(dim=1)
243
+ return (pred == y[mask]).float().mean().item()
244
+
245
+
246
+ def run_train_eval(model: nn.Module, data, epochs=200, lr=0.01, wd=5e-4):
247
+ opt = torch.optim.Adam(model.parameters(), lr=lr, weight_decay=wd)
248
+ best_val, best_state = 0.0, None
249
+ for ep in range(1, epochs + 1):
250
+ model.train()
251
+ opt.zero_grad(set_to_none=True)
252
+ logits = model(data.x, data.edge_index)
253
+ loss = F.cross_entropy(logits[data.train_mask], data.y[data.train_mask])
254
+ loss.backward(); opt.step()
255
+
256
+ model.eval()
257
+ logits = model(data.x, data.edge_index)
258
+ val_acc = accuracy(logits, data.y, data.val_mask)
259
+ if val_acc > best_val:
260
+ best_val, best_state = val_acc, {k: v.detach().clone() for k, v in model.state_dict().items()}
261
+ if ep % 20 == 0:
262
+ tr = accuracy(logits, data.y, data.train_mask)
263
+ te = accuracy(logits, data.y, data.test_mask)
264
+ print(f"[{ep:04d}] loss={loss.item():.4f} train={tr:.3f} val={val_acc:.3f} test={te:.3f}")
265
+
266
+ if best_state is not None:
267
+ model.load_state_dict(best_state)
268
+ model.eval()
269
+ logits = model(data.x, data.edge_index)
270
+ return {"val": accuracy(logits, data.y, data.val_mask),
271
+ "test": accuracy(logits, data.y, data.test_mask)}
272
+
273
+
274
+ # -----------
275
+ # Entrypoint
276
+ # -----------
277
+
278
+ def main():
279
+ ap = argparse.ArgumentParser()
280
+ ap.add_argument("--dataset", required=True, choices=["Cora", "Citeseer", "Pubmed"])
281
+ ap.add_argument("--seeds", required=True, help="Path to LRMC seeds JSON (single large graph).")
282
+ ap.add_argument("--variant", choices=["baseline", "pool"], default="pool")
283
+ ap.add_argument("--hidden", type=int, default=128)
284
+ ap.add_argument("--epochs", type=int, default=200)
285
+ ap.add_argument("--lr", type=float, default=0.01)
286
+ ap.add_argument("--wd", type=float, default=5e-4)
287
+ ap.add_argument("--dropout", type=float, default=0.5) # baseline only
288
+ ap.add_argument("--self_loop_scale", type=float, default=0.0)
289
+ ap.add_argument("--use_a2", action="store_true", help="Use A^2 for the cluster graph.")
290
+ ap.add_argument("--seed", type=int, default=42)
291
+ ap.add_argument("--debug", action="store_true", help="Print seeds md5, cluster size, K, etc.")
292
+ args = ap.parse_args()
293
+
294
+ torch.manual_seed(args.seed)
295
+
296
+ ds = Planetoid(root=f"./data/{args.dataset}", name=args.dataset)
297
+ data = ds[0]
298
+ in_dim, out_dim, n = ds.num_node_features, ds.num_classes, data.num_nodes
299
+
300
+ if args.variant == "baseline":
301
+ model = GCN2(in_dim, args.hidden, out_dim, dropout=args.dropout)
302
+ res = run_train_eval(model, data, epochs=args.epochs, lr=args.lr, wd=args.wd)
303
+ print(f"Baseline GCN: val={res['val']:.4f} test={res['test']:.4f}")
304
+ return
305
+
306
+ # pool variant
307
+ node2cluster, _, info = load_top1_assignment(args.seeds, n, debug=args.debug)
308
+ dbg_header = f"seeds_md5={info['json_md5']} top_size={info['top_cluster_size']} K={info['K']}"
309
+
310
+ model = OneClusterPool(in_dim=in_dim,
311
+ hid=args.hidden,
312
+ out_dim=out_dim,
313
+ node2cluster=node2cluster,
314
+ edge_index_node=data.edge_index,
315
+ num_nodes=n,
316
+ self_loop_scale=args.self_loop_scale,
317
+ use_a2_for_clusters=args.use_a2,
318
+ debug_header=dbg_header)
319
+
320
+ res = run_train_eval(model, data, epochs=args.epochs, lr=args.lr, wd=args.wd)
321
+ print(f"L-RMC (top-1 pool): val={res['val']:.4f} test={res['test']:.4f}")
322
+
323
+
324
+ if __name__ == "__main__":
325
+ main()