| { |
| "config": { |
| "V": 256, |
| "D": 16, |
| "patch_size": 16, |
| "hidden": 768, |
| "depth": 4, |
| "n_cross_layers": 2, |
| "target_cv": 0.125, |
| "dataset": "curriculum_noise", |
| "img_size": 64, |
| "lr": 0.0003 |
| }, |
| "image_datasets": { |
| "cifar10": { |
| "label": "CIFAR-10\u219264", |
| "mse_mean": 0.0014347061514854431, |
| "mse_std": 0.0035109403543174267, |
| "mse_min": 0.00036670686677098274, |
| "mse_max": 0.02804356999695301, |
| "S0": 4.297059059143066, |
| "SD": 1.8400815725326538, |
| "ratio": 2.335254669189453, |
| "erank": 15.757864952087402, |
| "fidelity": 99.85653162002563 |
| }, |
| "mnist": { |
| "label": "MNIST\u219264", |
| "mse_mean": 0.0014255789574235678, |
| "mse_std": 0.0003838766715489328, |
| "mse_min": 0.0009477993589825928, |
| "mse_max": 0.0027567630168050528, |
| "S0": 4.296897888183594, |
| "SD": 1.83693265914917, |
| "ratio": 2.339169979095459, |
| "erank": 15.75734806060791, |
| "fidelity": 99.8574435710907 |
| }, |
| "tiny_imagenet": { |
| "label": "TinyImageNet\u219264", |
| "mse_mean": 0.004735944792628288, |
| "mse_std": 0.0068034762516617775, |
| "mse_min": 0.0003534757997840643, |
| "mse_max": 0.0489242784678936, |
| "S0": 4.297494888305664, |
| "SD": 1.844409704208374, |
| "ratio": 2.3300108909606934, |
| "erank": 15.758447647094727, |
| "fidelity": 99.52640533447266 |
| }, |
| "imagenet128": { |
| "label": "ImageNet-128\u219264", |
| "mse_mean": 0.002932386938482523, |
| "mse_std": 0.008305463008582592, |
| "mse_min": 0.0005107235629111528, |
| "mse_max": 0.06651423126459122, |
| "S0": 4.296170234680176, |
| "SD": 1.8447363376617432, |
| "ratio": 2.3288803100585938, |
| "erank": 15.758931159973145, |
| "fidelity": 99.70676302909851 |
| }, |
| "imagenet256": { |
| "label": "ImageNet-256\u219264", |
| "mse_mean": 0.003373820800334215, |
| "mse_std": 0.00939043890684843, |
| "mse_min": 0.0005268495297059417, |
| "mse_max": 0.0749613344669342, |
| "S0": 4.296170234680176, |
| "SD": 1.8447273969650269, |
| "ratio": 2.3288917541503906, |
| "erank": 15.758930206298828, |
| "fidelity": 99.66261982917786 |
| } |
| }, |
| "noise_types": { |
| "gaussian": { |
| "mse_mean": 0.2886161804199219, |
| "mse_std": 0.0077516548335552216, |
| "S0": 4.2976789474487305, |
| "SD": 1.8385813236236572, |
| "ratio": 2.3374972343444824, |
| "erank": 15.757594108581543, |
| "byte_exact": 0.023179372772574425, |
| "byte_within1": 0.06980133056640625 |
| }, |
| "uniform": { |
| "mse_mean": 0.09659423679113388, |
| "mse_std": 0.0027394602075219154, |
| "S0": 4.296614646911621, |
| "SD": 1.8364839553833008, |
| "ratio": 2.339587450027466, |
| "erank": 15.75722885131836, |
| "byte_exact": 0.03953297808766365, |
| "byte_within1": 0.1183675155043602 |
| }, |
| "uniform_scaled": { |
| "mse_mean": 0.3850646913051605, |
| "mse_std": 0.007132471073418856, |
| "S0": 4.297467231750488, |
| "SD": 1.8398751020431519, |
| "ratio": 2.3357386589050293, |
| "erank": 15.757889747619629, |
| "byte_exact": 0.019789377227425575, |
| "byte_within1": 0.059528350830078125 |
| }, |
| "poisson": { |
| "mse_mean": 0.05631270632147789, |
| "mse_std": 0.059722624719142914, |
| "S0": 4.2967119216918945, |
| "SD": 1.8365355730056763, |
| "ratio": 2.3395745754241943, |
| "erank": 15.757218360900879, |
| "byte_exact": 0.06762949377298355, |
| "byte_within1": 0.20140330493450165 |
| }, |
| "pink": { |
| "mse_mean": 0.001325038610957563, |
| "mse_std": 0.001498894882388413, |
| "S0": 4.331319808959961, |
| "SD": 2.2164804935455322, |
| "ratio": 1.9541429281234741, |
| "erank": 15.804304122924805, |
| "byte_exact": 0.7337977290153503, |
| "byte_within1": 0.9098625183105469 |
| }, |
| "brown": { |
| "mse_mean": 0.0010676529491320252, |
| "mse_std": 0.001300301868468523, |
| "S0": 4.332561492919922, |
| "SD": 2.215461254119873, |
| "ratio": 1.9556024074554443, |
| "erank": 15.804342269897461, |
| "byte_exact": 0.7268803715705872, |
| "byte_within1": 0.9237658381462097 |
| }, |
| "salt_pepper": { |
| "mse_mean": 0.7266831398010254, |
| "mse_std": 0.02865126170217991, |
| "S0": 4.296142578125, |
| "SD": 1.8696966171264648, |
| "ratio": 2.2977752685546875, |
| "erank": 15.764196395874023, |
| "byte_exact": 0.019995370879769325, |
| "byte_within1": 0.05995432659983635 |
| }, |
| "sparse": { |
| "mse_mean": 0.18902762234210968, |
| "mse_std": 0.008378366008400917, |
| "S0": 4.297026634216309, |
| "SD": 1.8373357057571411, |
| "ratio": 2.3387269973754883, |
| "erank": 15.757360458374023, |
| "byte_exact": 0.040803272277116776, |
| "byte_within1": 0.12197113037109375 |
| }, |
| "block": { |
| "mse_mean": 0.011099232360720634, |
| "mse_std": 0.013453341089189053, |
| "S0": 4.298161506652832, |
| "SD": 1.8415257930755615, |
| "ratio": 2.334021806716919, |
| "erank": 15.758216857910156, |
| "byte_exact": 0.25016021728515625, |
| "byte_within1": 0.5908978581428528 |
| }, |
| "gradient": { |
| "mse_mean": 0.07368763536214828, |
| "mse_std": 0.001773475669324398, |
| "S0": 4.295886993408203, |
| "SD": 1.841874122619629, |
| "ratio": 2.332345485687256, |
| "erank": 15.758503913879395, |
| "byte_exact": 0.046288806945085526, |
| "byte_within1": 0.13783009350299835 |
| }, |
| "checkerboard": { |
| "mse_mean": 0.026502303779125214, |
| "mse_std": 0.000645435182377696, |
| "S0": 4.298178672790527, |
| "SD": 1.8394688367843628, |
| "ratio": 2.336641311645508, |
| "erank": 15.757760047912598, |
| "byte_exact": 0.07736968994140625, |
| "byte_within1": 0.22842152416706085 |
| }, |
| "mixed": { |
| "mse_mean": 0.10527042299509048, |
| "mse_std": 0.03688870370388031, |
| "S0": 4.296633720397949, |
| "SD": 1.8366726636886597, |
| "ratio": 2.339357376098633, |
| "erank": 15.757261276245117, |
| "byte_exact": 0.04016876220703125, |
| "byte_within1": 0.1195831298828125 |
| }, |
| "structural": { |
| "mse_mean": 0.17544668912887573, |
| "mse_std": 0.006575456354767084, |
| "S0": 4.307066917419434, |
| "SD": 1.9492502212524414, |
| "ratio": 2.209601879119873, |
| "erank": 15.773061752319336, |
| "byte_exact": 0.21276219189167023, |
| "byte_within1": 0.2740936279296875 |
| }, |
| "cauchy": { |
| "mse_mean": 0.8427785038948059, |
| "mse_std": 0.020405219867825508, |
| "S0": 4.297488212585449, |
| "SD": 1.8460646867752075, |
| "ratio": 2.327918529510498, |
| "erank": 15.7590913772583, |
| "byte_exact": 0.0134493513032794, |
| "byte_within1": 0.04004160687327385 |
| }, |
| "exponential": { |
| "mse_mean": 0.26751795411109924, |
| "mse_std": 0.007822155952453613, |
| "S0": 4.2975873947143555, |
| "SD": 1.8381683826446533, |
| "ratio": 2.337972640991211, |
| "erank": 15.757513999938965, |
| "byte_exact": 0.025768280029296875, |
| "byte_within1": 0.07705942541360855 |
| }, |
| "laplace": { |
| "mse_mean": 0.5420289039611816, |
| "mse_std": 0.013301815837621689, |
| "S0": 4.297547817230225, |
| "SD": 1.8408969640731812, |
| "ratio": 2.3344857692718506, |
| "erank": 15.758106231689453, |
| "byte_exact": 0.017190298065543175, |
| "byte_within1": 0.052120208740234375 |
| } |
| }, |
| "text": { |
| "Hello, world! This is a test of the geom": { |
| "mse": 0.0010953543242067099, |
| "byte_acc": 0.03703703731298447 |
| }, |
| "The quick brown fox jumps over the lazy ": { |
| "mse": 0.0010851288679987192, |
| "byte_acc": 0.0181818176060915 |
| }, |
| "import torch; model = PatchSVAE(); outpu": { |
| "mse": 0.0010681533021852374, |
| "byte_acc": 0.0 |
| }, |
| "E = mc\u00b2 \u2014 Albert Einstein, theoretical p": { |
| "mse": 0.0011298768222332, |
| "byte_acc": 0.0 |
| }, |
| "To be, or not to be, that is the questio": { |
| "mse": 0.0011395657202228904, |
| "byte_acc": 0.017241379246115685 |
| }, |
| "\u222b\u2080^\u221e e^(-x\u00b2) dx = \u221a\u03c0/2 \u2014 Gaussian integr": { |
| "mse": 0.0012817305978387594, |
| "byte_acc": 0.0 |
| }, |
| "01101000 01100101 01101100 01101100 0110": { |
| "mse": 0.0010446326341480017, |
| "byte_acc": 0.016393441706895828 |
| }, |
| "SELECT * FROM models WHERE cv BETWEEN 0.": { |
| "mse": 0.0010333505924791098, |
| "byte_acc": 0.01923076994717121 |
| } |
| }, |
| "piecemeal": { |
| "gaussian": 0.2881821095943451, |
| "uniform": 0.09624352306127548, |
| "pink": 0.0006394972442649305, |
| "salt_pepper": 0.7255234718322754, |
| "cauchy": 0.8445295691490173 |
| }, |
| "signal_survival": { |
| "cifar10": { |
| "survival": 100.4367528204266, |
| "snr": 28.14120036847449 |
| }, |
| "mnist": { |
| "survival": 100.24542360575926, |
| "snr": 27.32563422996327 |
| }, |
| "tiny_imagenet": { |
| "survival": 99.39334267356153, |
| "snr": 22.74953529430487 |
| }, |
| "imagenet128": { |
| "survival": 100.36820346533295, |
| "snr": 26.553161467892533 |
| }, |
| "imagenet256": { |
| "survival": 100.34640214616964, |
| "snr": 25.971234282315912 |
| }, |
| "gaussian": { |
| "survival": 69.41721567412671, |
| "snr": 5.408560835738218 |
| }, |
| "pink": { |
| "survival": 99.90278609869983, |
| "snr": 31.36067843690602 |
| }, |
| "salt_pepper": { |
| "survival": 89.36883987080029, |
| "snr": 7.439095668373767 |
| }, |
| "cauchy": { |
| "survival": 69.29348994149203, |
| "snr": 5.461273405311193 |
| } |
| }, |
| "alpha": { |
| "layer_0": { |
| "mean": 0.05629274249076843, |
| "max": 0.06006751209497452, |
| "min": 0.05116058140993118, |
| "std": 0.002453029388561845, |
| "values": [ |
| 0.06006751209497452, |
| 0.0591709204018116, |
| 0.059410709887742996, |
| 0.05934760719537735, |
| 0.056799840182065964, |
| 0.05789351090788841, |
| 0.0569395087659359, |
| 0.05661381408572197, |
| 0.054646458476781845, |
| 0.05604906007647514, |
| 0.05461455136537552, |
| 0.054723966866731644, |
| 0.054879408329725266, |
| 0.054339487105607986, |
| 0.05402696132659912, |
| 0.05116058140993118 |
| ] |
| }, |
| "layer_1": { |
| "mean": 0.057577453553676605, |
| "max": 0.06145685911178589, |
| "min": 0.05558929964900017, |
| "std": 0.0017885520355775952, |
| "values": [ |
| 0.05910342559218407, |
| 0.059786856174468994, |
| 0.059539224952459335, |
| 0.05954492837190628, |
| 0.06145685911178589, |
| 0.05699703097343445, |
| 0.057837218046188354, |
| 0.05728701874613762, |
| 0.0571381039917469, |
| 0.055610667914152145, |
| 0.05702045559883118, |
| 0.05618014559149742, |
| 0.05567823722958565, |
| 0.05611863359808922, |
| 0.05635116249322891, |
| 0.05558929964900017 |
| ] |
| } |
| }, |
| "compression": { |
| "input_values": 12288, |
| "latent_values": 256, |
| "ratio": 48.0 |
| } |
| } |