OpenCores
URL https://opencores.org/ocsvn/or1k/or1k/trunk

Subversion Repositories or1k

Compare Revisions

  • This comparison shows the changes necessary to convert path
    /or1k/tags/MW_0_8_9PRE7/mw/src/demos/nxscribble
    from Rev 674 to Rev 1765
    Reverse comparison

Rev 674 → Rev 1765

/letters.cl
0,0 → 1,955
36 classes
a
b
c
d
e
f
g
h
i
j
k
l
m
n
o
p
q
r
s
t
u
v
w
x
y
z
A
B
N
P
R
S
U
V
W
X
V 12 0.399646 -0.901023 68.3035 0.853626 45.3077 0.985601 0.0577468 108.794 -1.7641 5.05968 6.51905 0.313636
M 12 12
0.252364 0.122096 -3.5644 -0.0892954 1.67161 0.0128568 -0.00446271 -5.93004 1.85371 1.28053 3.68032 -0.0247525
0 0.0604855 -1.47857 -0.0455797 1.11245 0.00642619 -0.00977233 -2.43846 0.961169 0.748691 2.03279 -0.00418033
0 0 1709.91 1.94678 1004.54 -0.876742 14.6047 2607.49 -28.7349 23.6491 -22.3418 3.25618
0 0 0 0.0560832 -1.33471 -0.0090991 0.0136832 3.74867 -0.378436 -0.23639 -0.403897 -0.0169437
0 0 0 0 707.369 -0.207999 8.21991 1484.2 -2.74835 23.6055 -1.57592 2.8232
0 0 0 0 0 0.0038999 0.000328831 -1.00935 0.0956826 0.116303 0.224319 0.0070438
0 0 0 0 0 0 0.27392 22.2565 -0.900239 -0.830305 -2.90493 -0.00931034
0 0 0 0 0 0 0 4123.04 -6.75052 94.9037 110.282 6.55017
0 0 0 0 0 0 0 0 43.1649 52.3501 131.814 1.11174
0 0 0 0 0 0 0 0 0 73.4378 175.718 2.35503
0 0 0 0 0 0 0 0 0 0 437.382 4.73112
0 0 0 0 0 0 0 0 0 0 0 0.160455
V 12 5.36574 -2.40819 1.04612 114.77 -0.517563 28.1487 -4.92446 -0.340564 -1.46704 8.21809 -2.18899 31.972
V 12 0.154076 0.980119 60.408 0.965168 35.6989 -0.0173768 0.990109 206.82 -14.0522 15.8188 17.4801 0.523333
M 12 12
0.135378 -0.0255002 -1.38321 -0.018171 0.504943 0.080105 0.00160845 -4.20524 -1.01446 -0.0653353 -1.28068 -0.00485185
0 0.00527446 0.290905 0.00276587 -0.242418 -0.0142026 -0.000362804 0.553495 0.129869 0.0227955 0.306975 0.00188893
0 0 394.828 1.24298 186.955 -5.91599 -0.228183 1269.07 10.6292 5.84897 2.67133 0.896392
0 0 0 0.0286294 0.888624 -0.0460479 0.00154784 6.01879 0.296226 0.0248924 -0.49967 -0.0018663
0 0 0 0 189.275 -3.41701 -0.112526 786.651 13.0278 -4.27161 -58.5574 0.368262
0 0 0 0 0 0.173747 0.00462184 -21.619 -0.0402202 -0.00820824 0.584956 -0.00252116
0 0 0 0 0 0 0.000692129 -0.573892 0.053466 0.0141727 -0.0345009 -0.000433994
0 0 0 0 0 0 0 4540.4 87.4294 6.41814 -109.319 2.80201
0 0 0 0 0 0 0 0 35.9685 -1.08454 9.01869 0.17514
0 0 0 0 0 0 0 0 0 2.24012 2.54924 0.00104982
0 0 0 0 0 0 0 0 0 0 51.4081 0.1368
0 0 0 0 0 0 0 0 0 0 0 0.0126
V 12 14.4493 26.856 0.200203 132.203 -0.228309 -1.5976 49.8838 -0.0787449 -7.54233 31.0478 -6.10292 -44.2283
V 12 -0.51548 -0.699395 64.3271 0.866707 33.7348 0.348184 0.924161 109.692 5.16378 5.64044 3.15367 0.342222
M 12 12
0.526885 0.238904 0.741471 0.0575203 -1.142 0.0632321 -0.0239155 4.30394 2.20899 1.6966 5.54289 0.00462132
0 0.679258 14.3878 -0.0436242 6.13672 0.093463 -0.0275413 27.9816 2.19728 2.32334 7.0296 0.0193951
0 0 2719.19 -3.82265 1433.48 14.616 -6.0784 4436.88 44.0248 43.4769 111.576 3.39087
0 0 0 0.0347512 -1.27624 -0.0478862 0.0190366 -6.34297 0.0623548 -0.0584682 0.0719218 -0.00700955
0 0 0 0 823.682 6.28675 -2.58719 2291.74 14.1173 15.8258 38.1371 1.97308
0 0 0 0 0 0.187435 -0.0795747 23.738 0.534235 0.406735 1.12765 0.0279421
0 0 0 0 0 0 0.0348195 -9.87237 -0.203084 -0.134819 -0.361028 -0.0106644
0 0 0 0 0 0 0 7315.45 91.749 89.9288 238.339 5.20584
0 0 0 0 0 0 0 0 12.0452 10.6773 32.721 0.0699095
0 0 0 0 0 0 0 0 0 11.4586 30.3658 0.105709
0 0 0 0 0 0 0 0 0 0 94.3382 0.192381
0 0 0 0 0 0 0 0 0 0 0 0.00915556
V 12 -16.2099 -9.5096 1.39521 107.291 -0.846288 6.62733 59.1719 -0.441533 2.12512 12.892 -4.48685 45.1446
V 12 0.124989 0.989823 57.4806 0.95813 38.17 -0.0198691 0.990931 164.817 -7.34595 9.40091 10.3519 0.423333
M 12 12
0.0408616 -0.00537338 -0.701603 0.00639396 -0.997237 0.0231348 0.000294168 -1.16583 -0.277993 0.0613462 -0.0185456 -0.0043116
0 0.000790587 0.21694 -0.000973071 0.199548 -0.00515744 -9.54895e-05 0.548335 0.0414939 -0.00582608 -0.00387619 0.000622278
0 0 537.835 0.559126 382.301 -3.56392 -0.362759 1594.6 -11.3773 10.1273 -11.7212 0.853839
0 0 0 0.0624367 1.87448 0.0212207 0.00402034 3.36734 -0.327806 0.0471617 -0.212145 -0.00632178
0 0 0 0 444.483 -4.17437 -0.41432 1012.88 -48.3532 -2.28018 -9.59507 -0.324902
0 0 0 0 0 0.156409 0.0152144 -5.53488 -0.0291227 0.218029 -0.151032 0.0188099
0 0 0 0 0 0 0.00253481 -0.249934 0.0543282 0.0291327 -0.0285543 0.00200127
0 0 0 0 0 0 0 5112.2 -9.15045 46.7876 -42.0839 3.60397
0 0 0 0 0 0 0 0 38.1712 3.28523 11.9653 0.0910594
0 0 0 0 0 0 0 0 0 1.33038 0.175394 0.0756697
0 0 0 0 0 0 0 0 0 0 9.60588 -0.0773521
0 0 0 0 0 0 0 0 0 0 0 0.0108
V 12 9.24202 18.8968 0.206127 123.855 -0.217878 -3.3918 52.5601 -0.0346533 -4.24773 16.4745 -3.1243 9.44457
V 12 -0.66484 -0.723315 73.4083 1.03953 51.0116 0.0579574 0.993277 154.477 11.0187 11.5011 8.31685 0.495556
M 12 12
0.209688 -0.143459 1.65727 0.0364763 0.838139 -0.0321871 0.00343876 -0.0446194 0.218527 0.0765792 1.1537 0.0433305
0 0.103547 -1.17655 -0.0211057 -0.52589 0.0248506 -0.00223118 -0.1355 -0.128449 -0.0055948 -0.827045 -0.0264364
0 0 171.011 1.1495 126.411 -0.78815 -0.0765513 281.112 2.55213 -2.84588 -9.37371 -0.00313091
0 0 0 0.0400323 2.28148 -0.00958874 0.00124195 1.46879 0.0393459 -0.150005 0.035281 -0.00154421
0 0 0 0 201.337 -1.60849 -0.0180167 111.512 0.708294 -10.884 10.0211 -0.948075
0 0 0 0 0 0.0900129 -0.00149255 1.06505 -0.00653656 0.0100605 0.00938159 0.0316957
0 0 0 0 0 0 0.000360977 0.1418 0.00230959 0.00535118 -0.018936 0.000652201
0 0 0 0 0 0 0 1347.56 1.2232 8.67589 -144.873 1.3946
0 0 0 0 0 0 0 0 0.338433 0.393695 1.26028 0.0711067
0 0 0 0 0 0 0 0 0 2.45452 0.706677 0.152416
0 0 0 0 0 0 0 0 0 0 43.7868 0.32073
0 0 0 0 0 0 0 0 0 0 0 0.0334222
V 12 -19.1726 -9.58074 0.532993 132.408 -0.0899372 1.46136 63.9953 -0.340856 4.90785 27.0273 -8.31652 -0.835639
V 12 -0.962645 -0.21878 77.6236 1.01662 73.2855 -0.529572 0.842977 102.924 1.81595 3.3993 1.87967 0.243333
M 12 12
0.0214607 -0.0624866 1.7544 0.0137475 0.295463 -0.00271355 -0.00104713 2.35578 0.0234857 -0.0725227 0.0336242 0.00420961
0 0.207583 -8.50149 -0.0576057 -4.39746 -0.00806749 -0.00696427 -11.0449 -0.126378 0.0910634 -0.0833511 -0.0136498
0 0 1642.25 8.33022 1534.27 6.17653 4.02039 1841.63 19.6411 -23.2505 -31.8406 -0.0720389
0 0 0 0.0801228 7.47494 0.0599722 0.0404192 8.49542 0.156863 -0.340109 -0.324413 -0.0060077
0 0 0 0 1544.12 6.2896 3.99783 1681.32 17.0958 -15.9198 -34.524 -0.373372
0 0 0 0 0 0.0553412 0.0370098 6.09849 0.131853 -0.265781 -0.300745 -0.00807496
0 0 0 0 0 0 0.0251418 4.00374 0.0928861 -0.187084 -0.202364 -0.00511327
0 0 0 0 0 0 0 2126.9 22.3809 -19.1556 -28.0063 0.261188
0 0 0 0 0 0 0 0 0.450451 -0.617195 -0.601283 -0.00618568
0 0 0 0 0 0 0 0 0 3.8771 2.02737 0.063605
0 0 0 0 0 0 0 0 0 0 2.09952 0.0599972
0 0 0 0 0 0 0 0 0 0 0 0.003
V 12 -24.6065 -5.48567 0.618253 125.26 0.0826266 -17.0863 48.7782 -0.359807 0.720456 8.29677 -3.10104 47.2145
V 12 -0.675943 -0.727808 59.9845 0.95858 20.7237 0.419092 0.883771 144.278 10.1362 11.1408 11.3379 0.455556
M 12 12
0.0742157 -0.0580838 6.286 -0.025757 0.728887 0.0552694 -0.0251278 16.2631 0.280223 0.289918 -0.133506 0.0343108
0 0.046352 -5.03816 0.0228141 -0.426471 -0.0553798 0.0263941 -13.4269 -0.198483 -0.218938 0.149853 -0.0269771
0 0 1223.7 -1.65129 214.798 3.0112 -1.88517 3167.39 74.9078 36.8127 -48.8185 5.81546
0 0 0 0.0362986 0.731007 -0.0879153 0.0462144 -6.56004 -0.668556 -0.076832 0.314619 -0.0197625
0 0 0 0 156.76 -2.16626 1.16504 399.983 12.5268 10.0842 -13.2323 1.12588
0 0 0 0 0 0.297582 -0.162241 13.9218 0.721033 0.0586547 -0.843902 0.0397837
0 0 0 0 0 0 0.0922086 -8.11511 -0.438964 -0.00440702 0.536478 -0.0191563
0 0 0 0 0 0 0 8562.06 223.989 90.9921 -167.622 14.8988
0 0 0 0 0 0 0 0 34.2048 3.23141 -13.2125 0.706952
0 0 0 0 0 0 0 0 0 1.70576 -1.17382 0.228729
0 0 0 0 0 0 0 0 0 0 23.422 0.0554354
0 0 0 0 0 0 0 0 0 0 0 0.0524222
V 12 -17.7135 -6.58129 1.15589 120.376 -0.834714 10.8803 57.2821 -0.352796 4.1747 22.3618 -6.5589 11.1521
V 12 0.0284837 0.779287 70.4111 1.06989 66.6917 0.485469 0.872417 131.577 -0.381 7.22736 11.1916 0.321111
M 12 12
0.117821 0.592994 -4.99679 -0.0169569 -2.21372 0.00534711 -0.00331103 -1.04189 -1.32577 -0.865289 -2.72811 -0.00171503
0 3.40928 -33.857 -0.103032 -20.1672 0.0345554 -0.0223577 -10.1579 -7.04737 -4.78343 -15.0494 0.00212784
0 0 875.551 -0.126588 819.297 -0.409428 0.356016 1686.49 31.2776 77.4361 174.347 2.50745
0 0 0 0.0287095 -0.585397 -0.0222285 0.0130025 -2.64512 0.411786 0.11636 0.68087 -0.00831696
0 0 0 0 838.983 -0.390967 0.35355 1828.86 -11.1376 61.529 118.278 2.77865
0 0 0 0 0 0.0212206 -0.0127076 -1.28228 -0.0682095 -0.138327 -0.519435 -0.000491028
0 0 0 0 0 0 0.00765406 1.13337 0.0271445 0.0980145 0.331866 0.0011215
0 0 0 0 0 0 0 5605.64 -83.0648 149.629 174.417 10.1295
0 0 0 0 0 0 0 0 51.1137 7.33029 26.6304 -0.252978
0 0 0 0 0 0 0 0 0 12.1246 28.4922 0.325419
0 0 0 0 0 0 0 0 0 0 82.0476 0.349949
0 0 0 0 0 0 0 0 0 0 0 0.0228889
V 12 3.54825 12.3942 -0.0871734 134.992 0.338007 12.1843 41.0796 -0.0702678 -1.03756 11.2804 -2.00771 9.85807
V 12 0.142394 0.749947 60.5848 1.47216 59.0054 0.0430692 0.993284 62.3978 -1.04001 1.3742 2.72956 0.103333
M 12 12
0.308103 0.261259 7.97119 -0.0414303 7.9881 0.00573119 -0.00281468 8.03354 -0.872265 0.680944 1.28981 0.00217198
0 3.44762 -42.5062 0.151165 -30.0003 -0.289186 0.0233989 -55.1187 3.62687 -4.177 -13.0798 -0.0541821
0 0 2606.31 -4.45021 2321.94 -1.55401 -0.588584 2901.91 -163.752 171.483 472.748 2.65047
0 0 0 0.0340459 -3.74047 -0.00427718 0.00352902 -5.19429 0.341379 -0.350779 -0.91562 0.000758548
0 0 0 0 2097.22 -1.87183 -0.482634 2557.21 -138.682 144.451 392.014 2.27762
0 0 0 0 0 0.103389 -0.000593543 -1.22956 0.154691 -0.170274 -0.514306 -0.00300831
0 0 0 0 0 0 0.00039423 -0.697956 0.0454853 -0.0482203 -0.134356 -0.000181723
0 0 0 0 0 0 0 3258.65 -189.33 199.056 554.67 3.02944
0 0 0 0 0 0 0 0 14.4468 -14.75 -41.8572 -0.204229
0 0 0 0 0 0 0 0 0 15.3284 43.8062 0.21751
0 0 0 0 0 0 0 0 0 0 127.6 0.633456
0 0 0 0 0 0 0 0 0 0 0 0.0048
V 12 4.79584 8.7641 0.735153 182.099 -0.115306 -3.53472 46.407 -0.35639 -1.41119 3.93101 -0.744261 43.2702
V 12 0.160005 0.862107 77.6537 1.02558 69.5941 -0.527115 0.839564 105.517 -2.75746 4.11773 4.40617 0.224444
M 12 12
0.222286 0.0926897 20.6447 0.119967 19.5034 0.130899 0.0783581 24.756 -0.785712 0.807423 1.90801 0.0263218
0 0.858235 -4.04485 0.1335 -0.247198 0.160607 0.129107 -10.3277 1.91811 -2.12849 -4.50451 -0.0134242
0 0 3605.14 11.9405 3002.93 9.4556 5.20846 4583.17 -158.138 99.0518 214.227 2.4737
0 0 0 0.106184 10.19 0.101605 0.0661116 13.4068 -0.297454 0.132366 0.326078 0.00605346
0 0 0 0 2629.93 9.15666 5.14124 3790.2 -138.243 98.4729 226.571 2.50477
0 0 0 0 0 0.10828 0.0704452 9.99985 -0.205923 0.195451 0.525073 0.00926354
0 0 0 0 0 0 0.0472553 5.23817 -0.0399084 0.0111075 0.102485 0.00489543
0 0 0 0 0 0 0 5894.27 -209.743 131.519 289.369 3.23977
0 0 0 0 0 0 0 0 14.9704 -12.9181 -32.2227 -0.123639
0 0 0 0 0 0 0 0 0 14.3419 34.209 0.156465
0 0 0 0 0 0 0 0 0 0 90.1727 0.336479
0 0 0 0 0 0 0 0 0 0 0 0.00722222
V 12 6.07624 13.2154 0.968336 130.116 -0.265711 -20.0093 41.8845 -0.402123 -2.26101 9.20482 -1.90429 30.0385
V 12 -0.139206 0.970963 75.3378 0.792136 53.976 0.130817 0.983367 128.244 -4.83599 6.06693 5.52288 0.272222
M 12 12
0.322467 0.0632006 21.4612 -0.0427538 14.5986 0.175824 -0.0203291 40.2196 -0.327818 0.511587 -1.6507 0.0276585
0 0.018208 4.24892 -0.00396177 3.01343 0.0334761 -0.00191779 7.9657 0.0574387 0.0846374 -0.443248 0.00554894
0 0 5409.92 15.6406 5211.57 3.44051 -0.244149 6893.41 248.104 97.4488 230.443 7.73058
0 0 0 0.120019 19.4141 -0.0609222 0.00991189 11.7582 1.58015 0.29151 2.02326 0.0263036
0 0 0 0 5300.28 -0.548623 0.295887 6112.12 290.17 98.8086 315.795 7.89303
0 0 0 0 0 0.140216 -0.016406 11.3767 -0.505904 0.169657 -1.51842 0.00563195
0 0 0 0 0 0 0.00267945 -1.1164 0.0954685 -0.0216125 0.126778 -0.000129618
0 0 0 0 0 0 0 10035.9 176.158 118.399 65.3093 9.45884
0 0 0 0 0 0 0 0 32.5718 3.74829 32.9772 0.313202
0 0 0 0 0 0 0 0 0 2.91776 6.12705 0.190834
0 0 0 0 0 0 0 0 0 0 60.1728 0.404189
0 0 0 0 0 0 0 0 0 0 0 0.0149556
V 12 -0.550858 15.5455 1.2347 101.668 -0.65126 -0.578875 50.2244 -0.375502 -3.12698 13.5775 -3.01742 12.8411
V 12 0.0944759 0.768043 78.7181 0.919109 73.4364 0.601519 0.793622 108.023 1.46695 4.00275 4.45712 0.233333
M 12 12
0.0910845 0.173548 -0.509415 -0.0527325 1.9364 0.0468846 -0.038293 -2.88552 -0.0124118 -0.418227 -1.27789 0.00451528
0 1.51957 -49.1981 -0.041992 -36.0649 0.0778583 -0.065024 -70.7485 -0.128134 -4.4807 -12.6754 -0.0142988
0 0 2506.12 1.34749 2079.5 -2.36516 1.90405 3399.06 18.9027 164.371 441.063 1.13322
0 0 0 0.0709515 -0.56695 -0.0542858 0.0435628 3.33205 -0.122251 0.0787093 0.314885 -0.00185947
0 0 0 0 1845.82 -0.560872 0.472882 2706.37 41.0083 121.121 336.896 0.991502
0 0 0 0 0 0.045548 -0.0365564 -4.47208 0.0685009 -0.178544 -0.643368 0.000150598
0 0 0 0 0 0 0.0294938 3.62068 -0.0508754 0.150583 0.542472 -8.70188e-05
0 0 0 0 0 0 0 4764.12 -13.184 242.833 624.742 1.74438
0 0 0 0 0 0 0 0 17.0916 -2.60429 3.03692 -0.130918
0 0 0 0 0 0 0 0 0 15.366 37.727 0.0815725
0 0 0 0 0 0 0 0 0 0 107.68 0.157028
0 0 0 0 0 0 0 0 0 0 0 0.0032
V 12 1.11745 9.3851 0.490389 116.268 0.0719881 14.7767 35.441 -0.248769 -0.0785579 8.78678 -2.11794 15.4082
V 12 -0.0412624 -0.645221 65.8038 0.715833 48.3801 0.994541 0.0143255 166.094 -4.32553 11.2014 16.581 0.403333
M 12 12
0.34227 0.0409384 -4.78292 -0.064911 -0.367605 -0.00266621 0.0386519 -24.589 0.612188 -0.845324 -3.26736 -0.042786
0 1.89562 17.9404 0.100465 12.2207 -0.0172222 0.00100917 48.7529 6.6873 5.10884 15.3968 -0.0128018
0 0 1293.75 7.78743 669.59 -0.0114715 -2.69192 4453.54 -188.434 103.456 195.417 3.41971
0 0 0 0.0792974 2.6238 -0.000218489 -0.0320352 28.106 -0.86687 0.71903 1.80943 0.0180314
0 0 0 0 422.256 -0.0444246 -1.23763 2194.25 -83.5181 53.0198 84.6088 1.88262
0 0 0 0 0 0.000286587 0.00239793 0.336021 -0.123796 -0.0312811 -0.106129 0.0004618
0 0 0 0 0 0 0.0958558 -4.65369 -0.519935 -0.0526964 -0.0898188 -0.0208584
0 0 0 0 0 0 0 16089.8 -762.441 352.067 689.085 12.2321
0 0 0 0 0 0 0 0 102.668 8.26656 49.201 -0.603818
0 0 0 0 0 0 0 0 0 20.0053 54.9071 0.193066
0 0 0 0 0 0 0 0 0 0 177.323 0.303564
0 0 0 0 0 0 0 0 0 0 0 0.0192
V 12 0.648045 5.2073 -0.163197 99.0039 0.227983 32.8841 -9.46232 0.0181511 -2.54893 17.2342 -3.4923 -26.7363
V 12 -0.1874 -0.95079 70.2905 0.967337 68.0767 0.541411 -0.836815 164.493 -0.415279 6.94474 8.99813 0.336667
M 12 12
0.515341 -0.119225 1.0335 -0.00528176 3.23902 0.0670906 0.0446989 -10.4248 0.465985 -0.535481 -1.34177 -0.0215909
0 0.0325686 0.555946 0.00206988 -0.0949028 -0.0156765 -0.0107281 3.81932 -0.104879 0.128793 0.35817 0.00426698
0 0 1688.26 2.5937 1680.51 -0.861985 -0.397562 3641.26 5.92135 -11.0336 -50.788 0.324024
0 0 0 0.0443067 2.67359 -0.0389861 -0.0228715 6.97647 0.105464 0.136984 0.515504 -9.64461e-05
0 0 0 0 1694.09 -0.717197 -0.291714 3546.68 7.35756 -11.7672 -56.2338 0.268375
0 0 0 0 0 0.0435451 0.0262955 -4.29636 -0.0367025 -0.193872 -0.68371 -0.00230559
0 0 0 0 0 0 0.0159878 -2.43089 -0.0161964 -0.122082 -0.424174 -0.00161175
0 0 0 0 0 0 0 8795.81 10.698 -16.679 -65.6867 1.62002
0 0 0 0 0 0 0 0 0.891438 -0.526444 0.103731 -0.0160222
0 0 0 0 0 0 0 0 0 2.05864 4.41922 0.00183788
0 0 0 0 0 0 0 0 0 0 16.9778 -0.000804065
0 0 0 0 0 0 0 0 0 0 0 0.002
V 12 -8.01005 0.461651 -1.01217 134.349 1.02747 26.2281 -68.7083 0.203478 -0.337059 10.6908 -2.9443 -16.9113
V 12 -0.759352 0.081353 66.9889 0.937491 11.2173 -0.757203 0.333136 161.282 7.15419 7.19526 3.24982 0.262222
M 12 12
0.574062 -0.93599 31.9103 -0.00620827 14.8007 -0.282419 -0.0128577 91.4075 1.43874 1.43679 0.736506 0.0499438
0 3.17684 -84.6624 0.0738641 -31.0503 -0.075642 0.74845 -232.931 -3.81309 -3.88318 -1.87284 -0.133139
0 0 3370.38 -0.72972 1323.33 -27.8986 -16.7554 9401.85 133.965 137.746 62.2908 4.30433
0 0 0 0.0196723 -0.414248 -0.0211307 0.0563411 -2.42748 -0.0696328 -0.0480321 0.0369146 -0.00247947
0 0 0 0 605.554 -14.0562 -2.63195 3813.92 60.8797 61.2081 29.4145 1.92561
0 0 0 0 0 1.2627 -0.131464 -84.9015 -0.868342 -0.943266 -0.307051 -0.0234355
0 0 0 0 0 0 0.640772 -40.4383 -0.169033 -0.198359 0.270761 -0.0116308
0 0 0 0 0 0 0 26443.6 389.867 398.921 184.648 12.4366
0 0 0 0 0 0 0 0 7.6948 7.70059 4.6022 0.235958
0 0 0 0 0 0 0 0 0 7.76144 4.66114 0.236958
0 0 0 0 0 0 0 0 0 0 3.71561 0.134452
0 0 0 0 0 0 0 0 0 0 0 0.00755556
V 12 -18.458 7.29231 2.02273 119.128 -1.58341 -21.1111 16.5671 -0.423463 2.53078 18.3395 -5.73911 1.48564
V 12 0.235193 0.968513 70.3223 1.07384 19.3254 0.111667 0.952075 180.315 -8.07591 8.51595 7.98512 0.344444
M 12 12
0.0564829 -0.013853 -3.45294 -0.000554399 -2.54708 -0.0209861 -0.00687987 -9.32347 -0.103082 -0.0224664 0.638568 0.00696987
0 0.00351138 0.9387 0.000199919 0.646329 0.0029629 0.00108935 2.52817 0.0230854 0.00736132 -0.136832 -0.00119725
0 0 1515.02 -1.42353 312.512 0.24175 0.235343 3698.66 -6.62775 20.8345 -89.0857 0.320876
0 0 0 0.0563413 0.208408 0.122697 0.0269983 -3.95094 0.0596896 0.0318251 0.294814 -0.00827403
0 0 0 0 438.747 -4.42212 1.40652 1165.12 -2.1318 10.1431 -19.9559 -0.270163
0 0 0 0 0 0.7044 0.0509854 -9.5045 0.292356 -0.159063 0.404213 -0.0151037
0 0 0 0 0 0 0.0253449 1.89703 0.0288603 0.0908658 -0.191656 -0.0077285
0 0 0 0 0 0 0 9553.16 -21.9494 63.1384 -222.626 0.722966
0 0 0 0 0 0 0 0 0.717282 -0.436662 -1.75159 -0.0350246
0 0 0 0 0 0 0 0 0 1.23741 -1.4902 0.00160649
0 0 0 0 0 0 0 0 0 0 23.6597 0.163705
0 0 0 0 0 0 0 0 0 0 0 0.00542222
V 12 11.0526 22.7562 1.86057 138.023 -1.57415 -1.25863 47.4868 -0.340559 -5.03336 16.9539 -3.42922 12.4228
V 12 -0.862217 -0.164243 77.8447 0.831845 33.7264 0.933615 -0.148201 203.442 9.67266 10.7585 7.69453 0.41
M 12 12
0.29897 -0.161771 -1.97598 0.0607213 -7.25277 -0.0737928 -0.335535 5.65123 0.951968 0.323466 -0.778814 0.0170785
0 1.76749 -40.188 -0.107431 -0.505546 0.185008 0.539939 -122.208 -3.09664 -1.85295 1.62645 -0.128365
0 0 2355.81 0.215553 1082.52 -1.03115 1.88534 6251.64 153.603 54.426 -90.5909 4.04343
0 0 0 0.0950519 -8.38408 -0.0550511 0.0451667 8.81263 0.198778 0.0565219 -1.03697 -0.00260359
0 0 0 0 1496.77 5.61294 3.98446 2000.99 34.3236 11.0064 31.3201 1.49415
0 0 0 0 0 0.055966 0.0800018 -9.19831 -0.343708 -0.205412 0.630613 -0.00606518
0 0 0 0 0 0 0.901638 5.11527 0.439825 -0.626391 -2.12349 -0.0737932
0 0 0 0 0 0 0 17891.1 540.202 170.315 -398.787 9.79332
0 0 0 0 0 0 0 0 42.786 6.76487 -21.8552 0.259097
0 0 0 0 0 0 0 0 0 2.60336 -3.91037 0.116031
0 0 0 0 0 0 0 0 0 0 30.7276 0.241314
0 0 0 0 0 0 0 0 0 0 0 0.0232
V 12 -25.0027 6.19343 0.702586 114.253 -0.521822 34.9476 -23.7343 -0.10455 4.00138 24.8572 -7.55836 -44.6583
V 12 0.209952 0.970277 74.7861 0.787827 62.7039 0.780341 0.620685 195.065 -6.06696 11.3916 12.9121 0.408889
M 12 12
0.124022 -0.0264448 1.24773 -0.0147862 2.79449 0.0115516 -0.0160944 3.34637 -0.875729 0.105227 -0.129721 0.00222791
0 0.00631499 -0.323615 0.00548105 -0.856079 -0.00290328 0.00394286 -0.626869 0.17791 -0.0199017 -0.0536849 0.00048681
0 0 741.332 2.61488 504.529 -2.26649 2.90116 2711.43 -99.1699 50.0689 62.4166 2.72473
0 0 0 0.0334301 0.608549 -0.0221437 0.0275474 13.856 -0.560372 0.30518 -0.159947 0.0184849
0 0 0 0 517.038 -1.37369 1.72616 1807.19 -98.488 34.6735 58.1374 1.35109
0 0 0 0 0 0.0202739 -0.0254658 -11.3286 0.516796 -0.240304 0.136927 -0.0130916
0 0 0 0 0 0 0.0320891 14.187 -0.610447 0.297164 -0.166667 0.016384
0 0 0 0 0 0 0 11166.8 -511.809 214.8 149.943 11.926
0 0 0 0 0 0 0 0 45.3783 -10.9286 0.0230053 -0.532221
0 0 0 0 0 0 0 0 0 5.32755 4.46494 0.212352
0 0 0 0 0 0 0 0 0 0 32.3301 -0.0273857
0 0 0 0 0 0 0 0 0 0 0 0.0148889
V 12 9.41738 21.1756 -0.464708 106.791 0.438611 24.5674 21.783 0.113756 -3.54582 21.8421 -4.38892 -48.4221
V 12 -0.593403 -0.695182 76.2992 0.976811 62.1096 -0.640999 0.754739 140.965 0.12594 7.94657 3.74115 0.336667
M 12 12
1.01004 -0.619078 21.5338 -0.0188284 8.83462 -0.173254 -0.160987 49.0781 0.808946 0.821354 -0.54795 0.0167427
0 0.471304 -13.494 0.0162904 -5.72435 0.128715 0.11427 -28.2805 -0.513454 -0.546214 0.235904 -0.00299806
0 0 2791.84 2.72122 1920.31 -4.51993 -3.23677 5231.85 -3.3677 22.1551 -81.9961 2.86083
0 0 0 0.052803 2.61309 0.0568742 0.0493523 2.20615 0.171269 -0.1899 -0.136688 -0.00164391
0 0 0 0 1390.54 -1.69008 -0.889029 3469.46 -5.05942 13.4581 -47.5286 1.53592
0 0 0 0 0 0.0984601 0.0859319 -11.9928 0.116127 -0.381085 -0.0680641 -0.00420051
0 0 0 0 0 0 0.076945 -9.29149 0.070785 -0.343065 -0.0902314 -0.00432504
0 0 0 0 0 0 0 10174.7 -9.68876 47.5559 -175.551 6.67667
0 0 0 0 0 0 0 0 3.0506 -0.829505 -1.06958 -0.0238468
0 0 0 0 0 0 0 0 0 3.2939 2.76036 -0.0211042
0 0 0 0 0 0 0 0 0 0 10.1479 -0.355119
0 0 0 0 0 0 0 0 0 0 0 0.0204
V 12 -15.347 -6.05029 0.599345 125.592 -0.0107668 -18.5279 46.5025 -0.368201 0.0336666 20.8994 -6.41365 3.13135
V 12 0.938507 -0.040987 83.9611 1.00163 78.3182 0.49915 0.860416 113.61 -1.45994 3.73423 4.24723 0.237778
M 12 12
0.0677477 0.145273 -1.44333 0.0142629 -0.790868 -0.0248184 0.0147916 -2.94673 0.58742 -0.408431 -1.21737 -0.00627097
0 0.989977 8.26783 -0.00230064 8.65801 -0.138978 0.0855994 5.38269 3.35901 -0.138361 0.283066 -0.0022058
0 0 1562.87 5.62155 1614.87 -7.17035 4.12594 1877.13 -0.539359 24.4959 55.4534 2.50989
0 0 0 0.0609033 7.16334 -0.0430538 0.0240445 5.09612 -0.134832 -0.346887 -0.821358 0.00983112
0 0 0 0 1733.37 -7.8329 4.5244 1887.96 8.79843 21.7555 57.2714 2.64586
0 0 0 0 0 0.0699262 -0.0414511 -7.40227 -0.0336671 0.410212 1.08962 -0.00746758
0 0 0 0 0 0 0.0248802 4.32739 0.0373163 -0.219785 -0.587664 0.0039792
0 0 0 0 0 0 0 2458.12 -12.2325 59.5136 132.2 3.13753
0 0 0 0 0 0 0 0 20.212 6.75438 21.4817 -0.0143441
0 0 0 0 0 0 0 0 0 13.7427 37.1351 0.0869814
0 0 0 0 0 0 0 0 0 0 107.387 0.254566
0 0 0 0 0 0 0 0 0 0 0 0.00515556
V 12 20.2551 1.70127 0.470311 125.074 0.0891557 9.89027 45.6579 -0.268055 -1.33629 7.43357 -1.59011 23.3727
V 12 0.0850961 0.87805 61.9882 0.937023 36.0688 0.974639 0.000385626 108.799 2.92205 3.74242 2.03137 0.205556
M 12 12
0.126704 0.0710394 -1.25277 0.0090714 -1.69575 0.0199892 -0.123875 -0.188246 0.0965135 -0.392457 -0.572182 0.000776415
0 0.869378 -0.952638 -0.008761 -0.813276 -0.0260013 0.0332028 -9.2212 2.66991 -2.28556 -6.10891 -0.0140358
0 0 284.126 -0.033764 188.572 -0.0652279 -0.157096 506.241 -3.74477 2.09651 4.10752 0.415921
0 0 0 0.0221645 -0.856808 0.00076631 -0.0158749 0.964387 -0.0731954 -0.0159182 0.06954 -0.00120782
0 0 0 0 178.374 -0.386706 2.87482 259.83 -1.80298 3.96875 3.77083 0.200446
0 0 0 0 0 0.0109753 -0.0513819 1.23184 -0.092736 0.0205474 0.174985 0.00294528
0 0 0 0 0 0 0.439724 -7.10411 0.201315 0.17826 -0.228115 -0.0186396
0 0 0 0 0 0 0 1136.42 -32.5975 17.5544 59.2093 1.11047
0 0 0 0 0 0 0 0 8.46858 -6.71924 -18.6908 -0.0418491
0 0 0 0 0 0 0 0 0 6.48442 16.1418 0.0294386
0 0 0 0 0 0 0 0 0 0 43.1039 0.0950451
0 0 0 0 0 0 0 0 0 0 0 0.00182222
V 12 -0.88259 16.7024 1.09867 126.641 -0.649323 29.9613 -22.0336 -0.265928 0.307259 10.6303 -2.96813 -1.62263
V 12 0.290129 0.464985 77.2794 0.707285 58.9079 0.990296 -0.116626 128.243 -0.207406 7.1398 8.53983 0.302222
M 12 12
0.699342 0.910191 7.34944 0.0202821 5.1711 -0.00121552 0.000665465 6.07781 2.66628 -3.04317 -9.68592 -0.00493888
0 1.59718 8.08862 -0.0328274 8.55751 0.00576635 0.075501 0.849463 4.77947 -4.83673 -15.6641 0.0135923
0 0 375.095 -1.02658 340.283 -0.13952 -1.96092 382.927 25.5828 -14.6591 -94.3123 0.220431
0 0 0 0.0460436 -2.88458 -0.00239847 -0.0119023 0.847859 -0.0952002 -0.0971751 0.289799 -0.00093762
0 0 0 0 400.749 -0.0262399 -1.16807 254.326 26.3362 -8.58571 -93.6546 0.260308
0 0 0 0 0 0.000589534 0.00496401 -0.260326 0.0179104 -0.0193289 -0.0496903 -0.00031444
0 0 0 0 0 0 0.0508178 -2.77815 0.233335 -0.307274 -0.654851 -0.00151436
0 0 0 0 0 0 0 529.968 3.9414 -0.877304 -34.2945 0.090191
0 0 0 0 0 0 0 0 14.4213 -14.4677 -46.4511 0.0489526
0 0 0 0 0 0 0 0 0 16.4229 48.3318 -0.00608587
0 0 0 0 0 0 0 0 0 0 158.175 -0.0679779
0 0 0 0 0 0 0 0 0 0 0 0.00235556
V 12 5.16363 15.7649 0.8485 101.706 -0.272068 30.6141 -26.6035 -0.294836 -1.19759 14.9576 -3.39383 -18.6401
V 12 -0.0964798 0.541643 76.3987 0.776867 50.9635 0.983487 -0.087939 175.901 3.25342 10.3855 12.7882 0.381111
M 12 12
0.190674 -0.604522 2.26548 0.060497 1.93168 -0.0201795 -0.13943 10.8225 -1.11372 1.16957 4.17967 0.00955366
0 4.08515 -40.3369 -0.155246 -29.2136 0.0570214 0.54136 -143.105 6.02901 -8.00553 -27.5476 -0.116115
0 0 2118.1 2.3645 1378.71 -0.827823 -8.51423 6296.51 67.1937 131.463 457.958 4.46704
0 0 0 0.0597761 0.151398 -0.0166495 -0.0958555 7.6966 -0.344515 0.246192 0.89827 0.00674999
0 0 0 0 977.45 -0.0979468 -3.66483 4228.16 50.2043 97.6817 367.786 2.81598
0 0 0 0 0 0.00572837 0.0332847 -2.00805 0.167367 -0.0896943 -0.187437 -0.00342872
0 0 0 0 0 0 0.21945 -25.1541 1.18894 -1.07247 -3.48208 -0.0301067
0 0 0 0 0 0 0 19517.9 189.363 442.058 1685.22 12.8152
0 0 0 0 0 0 0 0 25.454 -7.81538 -20.332 -0.0311406
0 0 0 0 0 0 0 0 0 18.6772 68.1484 0.328669
0 0 0 0 0 0 0 0 0 0 289.488 1.05429
0 0 0 0 0 0 0 0 0 0 0 0.0114889
V 12 -2.37642 17.4178 0.321796 108.693 -0.106349 33.2469 -24.3867 -0.0655701 0.48355 19.6855 -4.55732 -40.5077
V 12 0.340981 0.814973 77.3056 0.755024 53.062 -0.143329 0.978926 136.295 6.02462 6.45461 5.76851 0.323333
M 12 12
0.211672 0.276027 -5.26597 0.00440787 -2.1834 0.013395 -0.00221185 -9.8222 -1.41885 -1.61309 -4.47195 -0.00493481
0 0.764279 3.56542 -0.0163511 4.35512 -0.209632 -0.0212475 -0.155127 -1.9034 -2.21843 -6.40682 -0.0154572
0 0 1299.57 -3.68441 686.489 -9.38497 -1.08022 2448.35 20.1981 23.0075 58.8519 0.38925
0 0 0 0.0401327 -0.528948 0.0536165 0.00644273 -8.51463 0.205905 0.115329 0.33837 -0.00941497
0 0 0 0 447.812 -4.55188 -0.494033 1159.12 9.66754 5.66116 15.8547 -0.258453
0 0 0 0 0 0.188563 0.0165527 -14.2977 0.405276 0.311014 0.760535 -0.00779272
0 0 0 0 0 0 0.00188082 -1.86211 0.0575526 0.0541998 0.144731 -0.000871986
0 0 0 0 0 0 0 5208.4 39.2596 54.1796 69.0536 1.18854
0 0 0 0 0 0 0 0 17.9551 18.5615 52.8629 -0.0148615
0 0 0 0 0 0 0 0 0 19.8788 55.6602 0.0236241
0 0 0 0 0 0 0 0 0 0 174.83 0.127935
0 0 0 0 0 0 0 0 0 0 0 0.0036
V 12 8.19501 10.401 1.03137 93.1394 -0.579021 -9.17137 57.2275 -0.303538 1.82013 13.4725 -3.33402 14.3076
V 12 -0.2997 0.679588 92.8182 1.02702 56.2194 0.727629 0.672004 230.552 -3.27463 11.9623 11.3825 0.412222
M 12 12
0.8026 -0.383416 -21.6649 0.0268354 -23.54 0.0978261 -0.132776 -73.7497 1.4912 0.0529117 3.58918 -0.0737606
0 1.23246 27.6099 -0.121926 22.2584 0.0403501 -0.0448161 85.1621 -3.96968 -2.87747 -12.9931 0.0503084
0 0 1866.1 0.885354 1365.26 -3.81054 4.39173 5681.23 -81.2322 -35.2025 -295.083 2.52534
0 0 0 0.0576774 -1.13207 -0.0351994 0.0409551 0.956952 0.508848 0.298588 1.03766 -0.00917923
0 0 0 0 1195.38 -3.16213 3.98283 4307.68 -74.9826 -30.017 -245.844 2.41212
0 0 0 0 0 0.070257 -0.083427 -11.1269 -0.127722 0.0186336 0.0930677 0.00211977
0 0 0 0 0 0 0.100439 13.0741 0.132323 -0.00563659 -0.149822 -0.000987506
0 0 0 0 0 0 0 17458.9 -258.027 -98.3473 -893.791 8.55637
0 0 0 0 0 0 0 0 13.3064 9.46421 41.7924 -0.183843
0 0 0 0 0 0 0 0 0 10.5585 35.555 0.0405021
0 0 0 0 0 0 0 0 0 0 151.425 -0.271355
0 0 0 0 0 0 0 0 0 0 0 0.0117556
V 12 -4.41974 18.7194 0.591701 135.42 -0.396057 23.5597 24.3854 -0.0940777 -2.3303 25.2492 -6.03729 -43.6139
V 12 0.773225 -0.207778 80.0422 0.859908 73.3993 0.68081 0.728286 159.815 -1.14315 8.35861 10.3377 0.463636
M 12 12
1.52773 -0.257291 16.2076 0.0293153 18.1825 0.0185121 -0.0250982 42.2846 4.58306 -1.34092 -7.25112 0.366882
0 0.420743 -12.8116 -0.0124076 -6.32755 0.0286584 -0.0277651 -29.326 0.608286 -0.841183 -1.4858 0.0407537
0 0 5068.77 -3.73901 4679.5 0.347128 -1.09551 10656.2 -8.56173 234.589 316.121 32.3256
0 0 0 0.0800595 -4.27463 -0.0487384 0.0452385 -17.4725 0.133853 -1.51878 -1.81828 -0.206564
0 0 0 0 4463.03 1.31965 -2.01248 9967.33 27.0583 227.841 250.736 37.2293
0 0 0 0 0 0.0358874 -0.0333199 7.40414 0.1603 0.805563 0.623035 0.142386
0 0 0 0 0 0 0.0311801 -8.48066 -0.178783 -0.781286 -0.594066 -0.141389
0 0 0 0 0 0 0 24437.9 10.2944 784.671 958.857 108.41
0 0 0 0 0 0 0 0 19.9871 -7.47958 -34.4506 1.63125
0 0 0 0 0 0 0 0 0 72.0353 94.9972 8.29163
0 0 0 0 0 0 0 0 0 0 172.028 7.28854
0 0 0 0 0 0 0 0 0 0 0 1.33345
V 12 17.4103 2.20319 -0.268045 111.339 0.444724 17.597 40.4474 -0.0323351 -1.08088 12.9235 -2.7324 16.8933
V 12 0.773872 0.014955 87.0731 0.0852753 86.2452 0.994171 -0.0158202 88.1636 0.646201 1.51116 1.85626 0.131111
M 12 12
1.54032 0.0280236 37.5569 -0.0206688 43.1694 -0.00512635 0.117857 31.7285 -4.56974 -4.20751 -11.4986 0.0516772
0 0.0677668 -5.50236 0.0164122 -5.3691 -0.00391674 0.073224 -5.61148 -0.0273995 -0.0865782 -0.267022 -0.00184475
0 0 8641.4 -4.53122 8777.31 0.338344 -0.173881 8496.53 -124.727 -104.373 -294.426 5.78794
0 0 0 0.0504594 -4.57923 -0.00590711 0.0293711 -4.46209 -0.0492254 0.136634 0.00486763 0.00248456
0 0 0 0 8933.91 0.317638 0.279731 8611.1 -141.604 -120.007 -337.98 5.97714
0 0 0 0 0 0.000812564 -0.00640825 0.358228 0.0240114 0.00338654 0.0435463 -0.000456643
0 0 0 0 0 0 0.101546 -0.620817 -0.302318 -0.299536 -0.876754 0.00114896
0 0 0 0 0 0 0 8373.76 -107.324 -88.2955 -250.253 5.58787
0 0 0 0 0 0 0 0 14.3527 12.5747 35.9003 -0.191523
0 0 0 0 0 0 0 0 0 12.2721 33.5156 -0.132019
0 0 0 0 0 0 0 0 0 0 97.1191 -0.411629
0 0 0 0 0 0 0 0 0 0 0 0.00708889
V 12 12.4572 3.14728 0.480484 15.5328 0.233096 28.186 -11.1355 -0.236408 -0.172307 3.53586 -0.7682 -10.8211
V 12 -0.97959 -0.0471904 63.2902 0.103224 63.2633 -0.991553 -0.0186928 63.5941 0.0144769 0.984499 0.198166 0.116667
M 12 12
0.00640268 0.00454945 -1.08475 0.0195311 -1.08272 0.00299511 0.0020717 -1.08305 0.0172118 -0.0364846 -0.00974826 -0.00176275
0 0.337189 -1.82124 0.0112965 -1.81079 0.00184028 0.204692 -1.7409 -0.24695 0.232858 0.0656811 -0.00404879
0 0 2607.1 -3.45244 2606.45 -0.441897 1.64644 2606.41 -2.10229 18.8072 0.0841851 2.35876
0 0 0 0.0649139 -3.44178 0.00999834 -0.00538757 -3.45764 0.00897442 -0.0911794 -0.0281919 -0.00451163
0 0 0 0 2605.81 -0.440096 1.64311 2605.75 -2.1389 18.8234 0.0859094 2.35822
0 0 0 0 0 0.00157951 -0.000800715 -0.443542 0.00124578 -0.0140403 -0.00425499 -0.000698872
0 0 0 0 0 0 0.146687 1.71891 -0.0631095 0.132001 0.0417703 -0.00173158
0 0 0 0 0 0 0 2605.83 -2.07083 19.0164 0.167317 2.35919
0 0 0 0 0 0 0 0 0.709499 -0.444293 -0.0747735 -0.0141509
0 0 0 0 0 0 0 0 0 1.18587 0.343378 0.029349
0 0 0 0 0 0 0 0 0 0 0.125799 0.00271197
0 0 0 0 0 0 0 0 0 0 0 0.0028
V 12 -24.7854 -0.563721 0.700749 15.0806 0.0246505 -28.2674 -2.06235 -0.320158 -0.0554537 2.33491 -1.21138 26.7809
V 12 0.462624 -0.740403 68.8759 0.841956 67.9213 0.658444 -0.737602 70.1269 -0.939989 1.16231 1.84669 0.1
M 12 12
0.437832 -0.257163 12.3733 -0.144164 15.2113 0.105523 0.0958625 9.44534 1.87124 -1.94388 -4.91817 -0.00110734
0 0.702206 -13.2431 -0.0378202 -16.1944 0.0370421 0.0196569 -10.2818 -1.57506 1.57795 4.26193 -0.0126388
0 0 1531.97 0.0930538 1657.79 -0.204108 0.106278 1408.8 82.9878 -84.0843 -226.572 0.228694
0 0 0 0.205459 0.462193 -0.153175 -0.133318 -0.317303 0.331504 -0.375562 -0.998929 -0.00215626
0 0 0 0 1813.3 -0.501416 -0.109944 1503.89 103.06 -105.198 -280.517 0.206778
0 0 0 0 0 0.11462 0.0989962 0.124578 -0.255402 0.28844 0.76519 0.00152024
0 0 0 0 0 0 0.0869243 0.347834 -0.201915 0.229187 0.61466 0.00132161
0 0 0 0 0 0 0 1317.57 62.0769 -62.0749 -170.48 0.258105
0 0 0 0 0 0 0 0 14.0473 -14.8569 -37.7278 -0.0318049
0 0 0 0 0 0 0 0 0 15.778 39.9512 0.0378138
0 0 0 0 0 0 0 0 0 0 101.548 0.0821447
0 0 0 0 0 0 0 0 0 0 0 0.001
V 12 4.86119 0.340009 0.445772 117.685 0.257808 23.7015 -63.9704 -0.283323 -0.987516 3.74589 -1.2727 2.23273
V 12 -0.973334 -0.197112 61.4961 0.118074 16.0555 -0.498697 -0.0570863 107.795 3.39501 4.07805 7.94342 0.202222
M 12 12
0.00512349 -0.0233279 -2.00251 0.0109504 -0.563288 -0.0883248 -0.0340416 -3.37546 0.00830385 -0.00814727 -0.156593 -0.00305023
0 0.118789 9.20624 -0.0286473 2.23915 0.559178 0.192535 15.8732 -0.0603551 -0.00761394 0.690495 0.0137186
0 0 1383.07 -3.91519 438.883 15.1076 21.5315 2272.23 -13.4653 3.82281 80.6674 1.91462
0 0 0 0.0740178 -2.63536 0.04033 0.0453845 -5.11437 -0.0174512 -0.113381 -0.396956 -0.0079541
0 0 0 0 633.981 9.43658 3.81145 213.176 -4.17393 -2.86446 -7.79596 0.697837
0 0 0 0 0 5.62304 0.602507 20.6855 0.0626768 -0.182404 1.88804 0.0639809
0 0 0 0 0 0 0.769498 37.9398 -0.209398 -0.169099 0.996599 0.0139136
0 0 0 0 0 0 0 4255.48 -22.2504 10.6819 167.823 3.08545
0 0 0 0 0 0 0 0 0.261139 0.103105 -0.475158 -0.015483
0 0 0 0 0 0 0 0 0 0.450364 1.21092 0.017521
0 0 0 0 0 0 0 0 0 0 9.32208 0.145402
0 0 0 0 0 0 0 0 0 0 0 0.00435556
V 12 -22.642 4.12614 2.09684 13.9436 -1.56562 -15.6881 -1.44506 -0.395758 0.781686 2.2243 -0.531158 40.6026
V 12 -0.575184 0.670375 67.8362 0.846637 67.522 -0.655566 0.746104 68.3492 -0.349548 0.856767 0.961241 0.104444
M 12 12
0.431383 -0.384971 -6.90299 0.0126267 -8.52967 0.020105 0.0220021 -5.13279 -1.50634 1.38881 4.54351 -0.00670436
0 0.546471 17.8371 0.109744 19.733 0.0700628 0.0558012 15.8265 1.76257 -1.57277 -5.2812 0.0119597
0 0 4905.27 1.88294 4959.11 1.27121 0.514793 4858.38 29.8697 -27.0149 -149.678 2.82429
0 0 0 0.126414 2.09711 0.0940514 0.0804237 1.66887 0.276808 -0.13343 -0.579121 -0.00236686
0 0 0 0 5020.07 1.38597 0.594555 4904.61 36.4547 -32.968 -169.505 2.86514
0 0 0 0 0 0.0703337 0.0601325 1.15991 0.164345 -0.059984 -0.306253 -0.00185749
0 0 0 0 0 0 0.0517259 0.439307 0.126508 -0.0385583 -0.210714 -0.00213021
0 0 0 0 0 0 0 4819.69 22.7748 -20.6096 -128.428 2.78434
0 0 0 0 0 0 0 0 6.35566 -5.48313 -18.3434 0.0170977
0 0 0 0 0 0 0 0 0 5.34538 16.6545 -0.0238936
0 0 0 0 0 0 0 0 0 0 55.3 -0.114248
0 0 0 0 0 0 0 0 0 0 0 0.00282222
V 12 -12.8425 5.95694 0.792335 103.391 -0.0716873 -22.4426 38.2132 -0.358441 -0.694976 2.5957 -0.82554 41.9967
V 12 -0.0174011 -0.979302 64.7642 1.46326 64.3917 -0.00233031 -0.9936 65.372 0.182184 1.08281 0.982671 0.111111
M 12 12
0.359598 3.00221e-05 12.2567 0.0460075 11.0812 0.143008 -0.00662607 13.3859 1.33603 1.01307 2.8136 0.00592309
0 0.00637808 -0.166032 -0.0054274 -0.311534 -0.00864816 0.000759261 -0.0151752 0.11348 0.111245 0.359731 -0.000854761
0 0 3831.35 2.48665 3793.48 -4.36005 -0.189503 3868.31 71.4977 15.1299 77.9046 2.05723
0 0 0 0.0235464 2.43971 0.0373319 -0.00276148 2.52332 0.0580199 0.0337794 0.10953 0.000188908
0 0 0 0 3763.33 -4.64953 -0.183037 3822.84 64.2131 8.60789 59.1584 2.05768
0 0 0 0 0 0.114409 -0.00521462 -4.0909 0.256932 0.315387 0.727964 -0.00342699
0 0 0 0 0 0 0.000366594 -0.195259 -0.00828832 -0.00907751 -0.0148718 1.3138e-05
0 0 0 0 0 0 0 3912.81 78.6142 21.5881 96.3411 2.0543
0 0 0 0 0 0 0 0 7.71304 5.87086 17.432 0.00822736
0 0 0 0 0 0 0 0 0 6.17679 15.8575 0.00603771
0 0 0 0 0 0 0 0 0 0 45.581 -0.00382592
0 0 0 0 0 0 0 0 0 0 0 0.00368889
V 12 -6.43275 -0.863914 0.557851 198.95 0.255469 7.02272 -84.1532 -0.387754 -0.566074 5.13969 -2.20154 22.257
V 12 0.771134 0.0352195 82.0824 0.492939 74.6289 0.87202 -0.478268 111.592 2.45002 4.18321 5.48843 0.247778
M 12 12
1.52934 0.0714774 -17.6208 -0.179154 -7.00128 0.136944 0.227324 -37.4384 -4.6885 -4.2768 -16.2502 -0.0239009
0 0.107668 -4.74496 -0.0275816 -5.10219 0.0262798 0.0431138 -6.88945 -0.0498532 0.0700672 -0.06346 -0.000186207
0 0 1663.31 -2.3585 1421.41 -0.731319 -0.316074 2175.48 46.6517 72.1132 230.856 1.83342
0 0 0 0.0700929 -2.83829 -0.03505 -0.0633092 0.250713 0.490913 0.421272 1.51916 -0.00476556
0 0 0 0 1410.79 0.299216 1.23325 1742.28 10.5833 38.852 111.593 1.22555
0 0 0 0 0 0.0251042 0.0423724 -2.83669 -0.387059 -0.275314 -1.15415 0.000674122
0 0 0 0 0 0 0.0724646 -3.72487 -0.640324 -0.457842 -1.89615 0.00203943
0 0 0 0 0 0 0 3054.36 104.526 135.309 446.876 2.48897
0 0 0 0 0 0 0 0 14.8221 13.2232 50.8758 0.0741353
0 0 0 0 0 0 0 0 0 14.8741 50.1694 0.126089
0 0 0 0 0 0 0 0 0 0 183.564 0.351664
0 0 0 0 0 0 0 0 0 0 0 0.00355556
V 12 13.0731 8.96274 0.430724 73.8267 0.16042 27.8746 -45.7393 -0.21043 0.252879 7.96352 -1.82234 -10.5349
V 12 0.82344 -0.483555 100.37 0.792454 100.218 0.696784 -0.706485 141.85 6.74934 7.91229 5.52726 0.33
M 12 12
0.198124 0.320619 -3.08766 -0.0839249 -2.98122 0.0578626 0.0647806 -0.39343 0.609054 -0.845658 -1.36295 -0.0124675
0 0.594963 3.44614 -0.196244 3.53097 0.13467 0.147521 4.41804 0.882847 -1.14561 -1.72153 -0.0124529
0 0 3543.9 -5.67334 3539.65 3.52846 4.21017 3058.77 -26.5589 123.344 186.857 4.74405
0 0 0 0.138442 -5.68194 -0.097839 -0.0971721 -2.51842 -0.0718579 0.2601 0.245931 0.00753079
0 0 0 0 3536.12 3.52716 4.22241 3052.21 -26.2025 122.199 186.016 4.7377
0 0 0 0 0 0.0696352 0.0682825 1.41867 0.0437722 -0.176784 -0.174192 -0.00631838
0 0 0 0 0 0 0.06871 2.11997 0.0749355 -0.207731 -0.208558 -0.00474715
0 0 0 0 0 0 0 2975.39 -9.05054 107.507 145.021 4.29487
0 0 0 0 0 0 0 0 2.27213 -3.11262 -5.18269 -0.0415916
0 0 0 0 0 0 0 0 0 9.01883 11.9513 0.2164
0 0 0 0 0 0 0 0 0 0 19.5519 0.302994
0 0 0 0 0 0 0 0 0 0 0 0.0086
V 12 11.2359 4.4263 0.0482801 118.76 0.762602 27.0281 -62.8257 -0.305214 2.40319 21.9525 -6.26707 -55.7713
V 12 0.412447 0.890081 104.013 0.887931 100.816 -0.596114 0.784945 159.072 -8.42763 8.68363 5.77523 0.348889
M 12 12
0.249205 -0.144416 0.659428 0.1331 0.0464839 0.149015 0.0968027 5.60396 -0.291416 0.182945 0.816916 0.0130843
0 0.0895862 0.773976 -0.0807271 1.1539 -0.092564 -0.0567945 -1.18524 0.172018 -0.110142 -0.696143 -0.00820015
0 0 1075.3 -5.53915 1072.67 -6.02969 -3.1545 1931.83 -2.91689 -2.3162 -82.6341 1.64556
0 0 0 0.176717 -5.56747 0.177752 0.112928 -6.9596 -0.0716634 0.0458014 0.778848 -0.00455476
0 0 0 0 1078.75 -6.11851 -3.21736 1913.81 -0.801243 -3.42706 -82.0116 1.61979
0 0 0 0 0 0.182739 0.114666 -7.83001 -0.0880123 0.070069 0.971881 -0.00322277
0 0 0 0 0 0 0.073843 -3.70383 -0.0574488 0.0445602 0.490497 -0.00225965
0 0 0 0 0 0 0 3941.44 -10.7528 -3.25186 -162.66 3.51644
0 0 0 0 0 0 0 0 0.770771 -0.293386 -0.182348 -0.0144085
0 0 0 0 0 0 0 0 0 0.26211 0.864158 0.00443534
0 0 0 0 0 0 0 0 0 0 17.1649 -0.0237092
0 0 0 0 0 0 0 0 0 0 0 0.00488889
V 12 12.3847 18.4129 0.770276 122.123 0.140352 -20.1124 35.5366 -0.477885 -4.81855 23.1153 -5.37298 -19.8261
V 12 -0.991892 -0.0652085 67.6195 0.422765 64.8836 -0.922027 -0.384303 87.7993 -1.50683 3.27735 2.68246 0.224444
M 12 12
0.000110751 -0.000379392 -0.0944635 0.000653646 -0.0795056 0.000179832 -0.000423565 -0.093764 0.00256135 0.0044972 -0.00483256 0.000106389
0 0.106968 0.781788 0.039036 0.0665271 0.0042666 -0.010221 2.17259 -0.171051 0.188775 0.234445 0.00250576
0 0 814.432 -2.44994 811.663 -0.652778 1.56848 869.331 -3.85478 -15.4977 -37.1924 0.550305
0 0 0 0.0310815 -2.51251 0.00685859 -0.0165054 -2.06016 -0.00860383 0.0938864 0.162267 -0.000707409
0 0 0 0 839.052 -0.580019 1.41442 842.86 0.241878 -19.5684 -47.393 0.501777
0 0 0 0 0 0.00287154 -0.00691209 -0.683207 0.00910535 -0.00571845 0.0196748 -2.17578e-05
0 0 0 0 0 0 0.0167286 1.62634 -0.0190791 0.0131313 -0.058434 -7.70995e-05
0 0 0 0 0 0 0 963.461 -7.44433 -12.4621 -29.4835 0.662764
0 0 0 0 0 0 0 0 0.818063 -0.583274 -1.29739 -0.0179992
0 0 0 0 0 0 0 0 0 1.61154 1.28621 0.000450866
0 0 0 0 0 0 0 0 0 0 7.04095 -0.00323546
0 0 0 0 0 0 0 0 0 0 0 0.00142222
V 12 -24.9047 4.52565 0.706881 63.1248 0.0427796 -23.5669 -31.6684 -0.350434 -1.04458 7.07127 -2.36009 25.0877
V 36 -90.3112 -330.26 -131.373 -173.117 -249.096 -127.291 -205.317 -132.858 -173.042 -124.141 -119.529 -98.7098 -119.37 -138.594 -164.519 -209.429 -198.606 -174.073 -159.633 -116.485 -106.259 -99.172 -134.438 -114.942 -210.296 -122.523 -41.3475 -42.3311 -97.687 -53.8217 -86.3039 -204.337 -69.0683 -169.645 -195.727 -64.425
M 12 12
23.5437 1.73123 -0.0480679 0.309615 -0.0100276 -1.82389 1.63144 0.0153898 -0.234706 0.0132026 0.432145 -7.26704
1.73123 12.3643 0.139963 2.38363 -0.114633 -0.406601 -6.63841 -0.0108937 -0.406324 0.637238 0.210628 -5.43734
-0.0480679 0.139963 0.0806172 0.103168 -0.0538965 -0.150402 -0.0402053 -0.0211138 -0.0270994 0.0589144 -0.0179324 0.820913
0.309615 2.38363 0.103168 128.254 -0.00747945 1.25262 -10.9634 -0.102652 -0.3768 1.89716 -0.703515 17.5151
-0.0100276 -0.114633 -0.0538965 -0.00747945 0.0466482 0.0975592 -0.0514087 0.0101183 0.0172406 0.00563775 -0.00181087 -0.710881
-1.82389 -0.406601 -0.150402 1.25262 0.0975592 30.1171 -5.35369 0.0588838 0.0765097 0.471863 -0.206237 -10.8205
1.63144 -6.63841 -0.0402053 -10.9634 -0.0514087 -5.35369 71.4392 0.0169827 0.273192 -0.77371 0.265191 15.4563
0.0153898 -0.0108937 -0.0211138 -0.102652 0.0101183 0.0588838 0.0169827 0.00852506 0.00816925 -0.0535524 0.0156038 -0.280709
-0.234706 -0.406324 -0.0270994 -0.3768 0.0172406 0.0765097 0.273192 0.00816925 0.465283 0.00115915 -0.0463122 -0.67258
0.0132026 0.637238 0.0589144 1.89716 0.00563775 0.471863 -0.77371 -0.0535524 0.00115915 4.1365 -1.15986 -16.1727
0.432145 0.210628 -0.0179324 -0.703515 -0.00181087 -0.206237 0.265191 0.0156038 -0.0463122 -1.15986 0.448141 3.47924
-7.26704 -5.43734 0.820913 17.5151 -0.710881 -10.8205 15.4563 -0.280709 -0.67258 -16.1727 3.47924 285.155
11 a
36 33 84 33 83 34 83 35 81 36 78 38 75 40 71 43 66 46 61 49 56 51 52 53 48 55 46 56 44 57 43 57 42 58 42 58 41 58 40 59 40 60 42 61 45 62 50 64 56 66 61 68 67 70 73 71 76 73 80 74 82 74 83 75 85 75 86 76 87 76 86 76 85
28 172 78 172 79 173 78 174 75 176 72 179 67 182 62 185 55 188 49 191 45 194 41 195 38 196 37 196 38 197 40 199 43 201 48 203 55 206 61 208 67 210 72 212 76 213 79 215 80 215 82 216 82 216 83 215 82
32 35 179 35 180 35 179 36 176 37 174 38 170 41 165 44 160 46 154 49 149 52 144 54 140 56 136 56 134 57 133 57 132 57 133 58 135 59 137 61 141 63 147 65 154 68 160 70 166 71 171 73 174 74 176 75 177 75 178 75 179 76 179 76 180
28 145 188 146 187 147 185 150 182 153 178 158 172 164 166 170 159 177 153 182 148 186 144 189 142 191 141 192 142 192 144 192 147 193 152 194 159 195 166 197 173 198 179 199 184 200 187 201 189 201 190 202 190 202 191 202 190
26 230 176 231 176 232 175 233 172 234 169 237 164 240 159 243 153 246 148 249 144 251 141 252 139 253 139 253 140 254 141 255 143 256 147 258 153 260 159 262 166 264 172 265 176 267 180 268 182 269 183 269 184
28 251 96 252 95 252 94 253 91 254 86 257 79 260 72 264 63 269 55 273 48 276 42 278 40 279 39 280 40 281 42 282 45 285 52 287 60 290 69 294 79 297 89 300 96 301 102 303 105 303 107 304 108 304 109 304 108
25 54 93 54 94 54 93 55 91 55 88 56 85 58 81 60 76 63 71 66 66 68 61 70 57 72 53 73 51 74 50 74 49 75 50 76 53 78 56 80 62 82 68 84 74 86 78 87 82 88 83
28 130 148 130 146 131 144 132 141 135 137 137 131 140 124 143 117 146 111 148 106 149 103 150 102 150 101 151 103 152 106 153 111 155 118 157 125 158 132 160 139 161 143 162 146 163 148 163 149 164 149 164 150 165 150 165 149
32 204 159 203 159 203 158 204 156 205 152 208 147 211 139 216 130 220 121 224 113 227 107 230 103 231 101 231 100 232 100 233 102 234 105 236 110 239 118 242 127 245 136 247 145 250 153 252 159 255 166 256 170 257 174 258 176 258 177 258 178 258 177 258 176
52 44 100 45 100 46 99 47 98 48 97 48 95 49 93 50 91 52 89 53 86 54 84 56 81 57 78 58 75 60 72 61 69 62 66 63 63 64 61 66 59 67 57 67 55 68 54 68 53 68 52 69 53 70 55 71 58 72 61 74 65 75 68 77 72 78 75 80 78 82 81 80 78 82 81 83 84 84 86 85 87 86 89 87 90 87 91 88 91 88 92 88 93 88 94 89 94 89 95 89 96 90 96 90 97
65 170 98 170 97 170 96 170 95 171 93 171 91 172 88 172 86 173 83 174 80 175 78 176 76 178 73 179 71 180 69 182 66 183 64 184 62 185 61 186 59 186 58 187 57 188 55 189 54 190 52 190 50 191 49 191 48 192 48 192 47 192 46 192 47 193 48 194 51 195 54 196 56 198 59 199 62 200 64 201 66 202 69 203 71 204 73 205 75 205 77 206 78 207 80 208 82 209 84 211 86 213 89 215 91 213 89 215 91 217 93 218 95 219 97 220 98 221 100 221 101 222 101 222 102 222 103 222 104 223 104
9 b
49 225 123 225 125 225 129 225 134 226 140 226 147 226 154 226 159 226 162 225 162 225 161 224 157 223 152 222 146 221 138 220 132 221 126 223 120 226 117 230 115 234 115 237 118 239 122 239 127 238 132 235 137 232 141 229 143 227 144 225 143 224 142 225 140 226 139 228 138 232 137 235 138 239 139 243 141 246 144 248 147 249 150 248 154 245 156 241 159 236 160 230 162 225 162 221 161 218 160
52 65 132 66 133 66 136 68 140 69 145 69 152 70 159 69 167 68 172 66 176 65 178 63 177 62 174 62 168 61 161 62 153 64 145 68 137 72 131 75 126 79 123 83 123 86 124 88 127 88 132 87 138 85 143 82 148 79 151 76 153 73 154 71 154 70 153 70 152 72 151 74 150 77 149 81 149 85 149 89 150 92 152 94 156 94 159 93 163 90 166 87 170 82 173 77 175 72 176 68 176 64 174 63 172
51 123 41 123 40 124 40 124 42 125 44 126 49 127 54 128 61 128 67 128 72 128 76 127 77 127 76 126 73 125 68 124 62 123 55 123 49 123 44 124 39 127 36 131 35 135 34 139 35 143 37 145 40 145 44 144 48 142 52 139 55 136 57 133 58 132 59 131 58 133 57 135 55 138 54 142 53 146 53 150 54 153 56 156 59 157 63 158 66 156 69 153 72 149 73 143 75 138 76 133 76 129 76
49 239 156 239 158 240 161 240 166 241 171 241 177 240 182 240 187 239 190 239 191 238 191 238 188 237 183 236 177 236 168 236 159 237 150 238 145 241 139 245 135 250 134 254 136 257 140 258 145 258 149 255 155 252 160 248 164 245 167 242 169 241 169 241 168 243 167 246 165 250 164 255 163 260 164 264 167 266 170 267 173 267 177 266 180 263 183 258 186 253 188 250 190 244 190 238 190 234 189
56 172 80 172 81 172 84 172 89 172 97 173 105 172 113 172 119 172 124 172 126 172 125 172 121 172 116 172 110 172 102 171 95 171 88 171 83 172 79 174 76 176 73 179 72 182 72 185 73 187 75 189 79 189 83 189 86 187 90 185 94 183 96 181 98 179 98 177 99 177 98 177 97 178 96 179 95 182 94 186 93 189 93 194 94 198 96 201 99 203 104 204 108 203 112 202 116 199 119 195 121 191 122 187 122 182 122 178 120 175 119 172 117
51 64 56 64 58 64 60 65 65 64 71 64 77 64 83 64 89 63 91 63 88 62 84 62 78 61 73 60 67 61 62 62 57 64 52 68 49 72 46 76 45 79 46 81 49 82 53 82 57 81 61 79 65 76 68 74 70 72 70 71 70 71 69 72 68 74 67 77 66 81 66 84 66 88 68 91 69 93 71 95 73 95 75 94 77 91 79 88 82 83 84 78 86 74 87 71 87 68 87 67 85 67 84
60 205 151 206 155 206 161 206 167 207 175 207 181 207 187 207 191 206 193 206 194 205 192 205 190 204 186 202 181 201 175 201 169 200 162 200 157 201 151 202 146 203 142 206 138 209 135 212 134 216 134 219 136 223 138 225 141 226 145 226 149 224 153 222 158 218 162 215 165 211 168 209 169 207 169 209 167 212 166 215 165 219 164 224 164 228 165 232 166 235 168 237 170 238 173 239 176 238 179 237 182 235 185 234 187 231 189 227 192 222 194 216 195 210 196 205 195 199 194 195 193
58 160 68 161 71 161 74 162 79 163 85 164 90 164 96 164 101 164 103 163 104 163 103 162 101 162 97 161 92 161 86 160 80 160 74 160 69 161 66 162 62 164 60 166 58 169 57 172 57 174 59 175 61 177 64 177 68 176 72 174 76 171 79 168 80 165 81 163 81 162 81 162 80 163 79 165 78 167 77 170 77 174 77 178 77 181 78 184 79 187 81 188 83 189 85 190 88 189 91 187 93 184 96 180 98 176 100 171 102 167 103 163 102 160 101 158 100
57 42 46 43 49 43 52 44 56 44 61 45 66 45 70 44 73 44 76 43 77 42 74 42 71 41 67 40 62 40 58 40 53 41 49 42 45 44 42 46 39 49 37 51 36 54 35 57 36 59 38 60 40 61 44 60 48 58 51 56 55 53 58 50 59 48 60 47 60 47 59 49 58 51 57 54 56 57 56 61 56 65 57 69 58 72 59 73 60 74 63 75 65 74 67 73 70 71 72 68 74 64 76 61 77 57 78 53 78 50 78 47 77 45 76
9 c
37 108 124 108 125 109 125 109 126 109 127 108 126 108 124 107 122 105 120 104 118 101 115 99 114 96 113 93 113 90 114 87 117 84 120 81 123 79 128 77 133 76 139 76 144 76 150 77 155 79 160 81 164 83 168 87 170 91 172 96 171 102 170 108 168 114 166 119 163 123 161 125 159 126 158
38 202 89 202 90 202 91 202 90 202 89 202 88 202 86 201 85 201 83 200 82 199 81 197 81 195 81 193 81 191 82 188 83 186 85 184 87 182 91 180 95 179 99 178 103 178 108 179 113 180 117 182 120 184 123 187 125 191 126 194 126 198 126 201 124 204 123 207 122 208 120 210 119 210 118 211 117
36 81 56 82 56 82 55 82 54 81 54 81 53 80 53 80 52 79 51 78 50 76 49 75 49 74 49 72 50 70 51 68 53 65 55 63 58 62 60 62 62 61 64 62 67 62 70 63 72 64 75 65 77 68 78 70 79 73 80 76 79 79 79 82 77 84 76 87 75 88 74 89 73
41 127 144 128 144 127 144 127 143 126 142 125 141 124 140 122 138 120 137 118 137 116 136 113 136 110 136 106 138 103 139 100 142 97 146 94 150 92 156 90 162 89 170 88 177 87 184 88 190 89 196 90 202 93 206 96 210 100 213 105 214 111 214 119 213 126 210 134 207 141 203 147 200 151 197 154 195 156 194 156 193 157 193
36 185 56 185 57 185 58 185 57 185 56 185 55 185 53 184 52 183 51 182 50 181 49 179 48 177 48 175 47 172 49 170 51 167 53 164 56 162 60 160 65 159 69 158 74 158 79 159 83 160 87 163 92 167 95 170 96 175 96 181 95 187 93 193 90 198 86 201 84 203 82 204 81
36 70 35 71 35 70 35 70 34 69 34 68 33 67 32 64 31 62 30 59 29 57 28 54 27 51 28 49 29 46 31 44 33 41 35 39 38 37 41 35 45 35 49 34 54 35 58 36 61 39 65 42 67 46 69 50 71 55 72 60 72 65 71 71 69 75 68 79 66 82 65 83 64
30 131 130 131 129 130 128 129 126 128 124 126 122 124 120 121 119 118 119 116 119 113 120 111 121 108 123 106 127 104 130 102 135 101 140 100 146 99 151 100 155 101 159 103 162 106 164 111 166 116 166 123 165 128 163 133 160 136 158 138 156
33 194 43 194 44 194 45 194 44 194 43 194 42 193 41 192 39 191 38 190 37 188 36 186 35 184 35 182 35 180 36 177 38 175 40 173 43 172 47 171 51 170 56 170 61 170 65 171 69 173 73 175 75 179 76 183 77 187 77 192 76 196 75 200 74 202 73
38 65 42 65 41 65 40 64 40 63 39 61 38 59 37 58 36 56 36 54 35 53 36 51 36 49 37 47 38 44 39 42 41 40 43 38 45 36 48 35 51 35 55 34 58 35 61 35 64 36 67 37 70 39 73 41 75 43 76 46 77 50 78 53 79 57 79 61 78 64 77 69 76 73 74 75 73
9 d
45 259 144 259 145 259 147 259 150 260 155 261 160 261 166 262 171 262 175 262 178 262 179 262 177 262 175 261 171 260 166 259 161 258 156 257 152 257 148 258 145 259 142 260 139 262 136 264 135 267 134 271 133 274 134 279 136 283 139 286 141 289 145 291 150 292 155 291 160 290 165 288 170 285 174 281 176 277 177 273 178 269 178 266 177 263 176 260 176 258 176
44 172 93 173 94 173 95 173 98 174 101 174 105 175 111 175 117 175 122 174 127 174 130 173 132 173 133 173 132 173 129 173 125 173 120 173 115 173 110 172 105 172 101 172 97 173 93 174 90 176 88 178 86 181 86 184 86 188 88 191 91 195 94 197 99 199 103 200 108 200 113 199 117 197 121 195 124 192 126 188 127 184 128 181 128 177 127 175 126
51 49 44 49 45 50 48 50 52 51 58 52 64 53 71 53 77 53 82 53 85 52 89 52 92 52 94 52 96 51 96 51 95 51 93 50 91 49 87 49 83 48 78 47 73 46 67 46 62 46 56 46 51 47 46 49 42 51 39 55 36 58 35 63 35 68 35 73 36 77 39 81 42 84 46 86 52 88 57 88 63 87 69 86 74 84 78 82 81 78 83 74 85 69 87 63 88 59 88 55 89 52 88
40 85 130 85 131 85 133 86 136 86 140 87 146 87 150 87 156 86 160 86 164 85 164 85 163 85 160 84 155 83 150 82 144 82 139 82 134 82 130 82 127 84 125 86 124 89 124 93 125 97 127 101 130 105 134 108 139 111 144 112 149 112 153 111 158 109 161 106 165 101 167 96 169 89 171 82 172 75 173 70 173
44 154 52 154 53 154 55 154 58 155 62 156 67 157 72 157 78 157 82 157 86 156 88 156 89 156 88 155 86 154 83 153 79 152 74 151 69 150 65 150 60 149 56 150 53 150 50 152 48 154 46 157 46 161 46 165 47 169 49 172 51 175 55 177 59 177 62 177 67 176 70 174 74 172 77 169 79 166 81 163 82 160 83 157 84 155 84 153 83
41 37 43 37 44 38 45 38 48 39 52 39 56 40 60 40 64 40 67 40 69 39 69 39 67 38 65 38 62 37 59 36 55 35 51 35 48 35 45 37 42 38 40 40 38 43 36 45 36 48 37 51 38 54 41 57 43 60 47 62 52 63 56 64 59 63 62 62 64 59 65 55 67 51 68 46 69 42 70 40 70 39 71
39 222 105 223 106 223 108 223 112 223 118 224 125 224 132 224 140 223 146 223 150 222 152 221 149 220 144 219 138 218 130 217 123 215 117 214 112 214 108 214 104 216 102 218 100 222 99 226 99 231 102 235 106 238 112 240 119 240 126 239 133 237 139 234 145 231 149 228 152 225 154 222 156 219 156 217 155 216 154
44 141 54 142 55 142 57 142 59 142 63 142 68 142 73 143 78 143 83 142 86 142 89 142 91 142 92 142 91 141 89 141 85 140 80 139 74 139 69 139 64 140 59 141 56 142 53 144 51 147 49 151 49 155 50 159 51 164 54 168 57 171 62 174 67 176 71 176 76 175 80 173 83 171 85 166 88 160 90 154 92 149 93 144 94 141 94 139 94
44 32 33 32 34 32 35 32 38 32 42 33 47 33 53 34 58 34 62 34 65 34 66 34 67 33 65 33 63 32 60 31 56 30 52 29 48 29 44 30 40 32 37 33 35 35 32 38 29 41 29 44 29 47 31 50 34 52 38 55 42 56 47 57 52 58 56 58 59 56 62 54 64 51 66 48 67 44 68 41 69 38 70 37 70 35 70 35 71
9 e
56 279 119 279 118 278 117 277 116 277 115 276 114 275 114 273 113 271 113 269 113 266 114 263 116 259 118 255 121 253 124 251 128 250 133 251 137 252 142 254 146 256 149 258 150 261 152 263 152 265 152 267 151 269 150 270 150 270 149 271 149 270 149 268 148 267 148 265 149 263 149 261 150 259 151 257 152 254 154 252 155 250 157 248 160 247 162 247 165 248 169 251 172 254 174 258 176 263 177 268 176 273 175 278 174 283 172 286 170 289 168 290 167
54 167 80 167 79 167 78 166 78 165 77 163 76 161 75 159 75 155 75 152 76 149 77 147 79 144 82 143 87 142 93 142 100 144 106 146 112 149 115 153 117 156 116 159 115 162 113 164 112 165 110 166 108 165 107 163 105 161 105 159 104 155 104 152 104 149 105 146 106 143 107 140 109 137 110 136 112 134 115 134 118 134 122 134 126 135 131 136 134 139 138 142 140 145 143 149 143 154 143 159 142 164 140 168 137 171 135 173 134
47 62 46 63 47 62 48 62 47 61 45 60 44 59 42 57 42 55 41 53 42 50 43 48 44 45 47 43 51 41 56 40 60 40 64 41 69 43 72 46 74 49 75 52 75 55 75 58 74 60 73 61 72 61 71 60 71 59 70 56 70 53 71 50 72 46 74 43 76 41 78 39 81 38 85 38 90 38 94 40 99 43 102 47 105 51 106 57 106 61 105 66 103 70 100
53 263 145 263 144 263 143 262 143 262 142 260 141 259 140 256 140 253 140 250 141 247 143 244 145 242 148 241 150 239 154 239 158 240 162 241 165 243 168 246 169 250 170 253 170 256 170 258 169 259 169 260 169 260 168 260 169 259 169 258 169 257 170 255 170 253 171 250 172 247 173 243 175 240 176 237 178 234 180 233 182 232 183 232 186 233 188 235 190 237 193 240 195 244 197 249 198 254 198 259 197 263 196 267 195 269 193
43 183 79 183 78 182 77 181 76 180 75 178 74 174 74 170 74 167 75 163 77 160 80 159 84 158 89 158 95 159 100 161 105 164 108 167 110 169 110 172 109 174 108 175 107 176 104 175 102 174 101 172 101 169 101 166 102 162 104 158 107 155 110 152 114 151 118 150 122 150 126 152 130 154 133 157 135 162 136 167 136 172 135 177 133 182 131
39 75 75 75 74 74 73 73 72 71 71 69 69 64 69 60 70 55 70 51 72 47 75 45 79 43 83 43 88 44 93 46 98 48 101 51 102 54 102 56 101 57 99 58 98 58 97 57 97 54 98 51 100 47 103 43 107 39 111 36 115 35 119 34 124 36 128 39 131 43 134 49 135 55 134 61 133 66 130
52 256 63 256 64 257 64 256 63 256 62 255 61 254 59 252 57 250 55 248 54 245 54 242 56 239 58 236 62 233 67 231 72 230 77 231 81 232 85 234 88 237 89 241 90 244 89 247 88 249 86 250 84 251 83 250 81 249 80 247 79 245 79 242 79 240 79 237 79 234 80 231 82 228 84 226 87 224 90 223 94 222 98 222 103 223 107 225 111 227 115 231 118 236 119 241 120 247 119 252 117 257 114 261 112
54 160 42 160 41 159 40 158 39 157 39 155 38 154 37 152 36 150 35 149 35 148 35 146 36 144 37 142 40 140 43 138 47 137 51 137 55 138 58 139 62 141 65 143 66 145 67 146 68 148 67 149 66 150 65 151 63 152 61 152 60 151 59 149 58 147 59 144 59 141 60 137 61 134 63 130 64 127 66 126 67 124 69 123 71 123 73 124 75 126 78 128 80 132 82 136 84 141 85 146 86 152 85 157 84 161 83 164 81
60 71 39 71 38 71 37 71 35 70 34 70 33 69 32 68 31 66 31 64 30 62 30 60 29 57 30 54 31 51 32 48 35 46 38 44 41 43 44 42 47 41 51 42 55 43 58 44 60 47 62 49 63 51 63 54 63 56 63 58 62 59 62 59 61 60 61 59 60 58 60 56 61 54 61 52 62 50 63 48 63 46 64 44 66 42 67 41 69 40 72 39 74 38 77 38 80 38 83 38 86 39 90 40 93 42 95 45 98 47 99 52 99 57 98 62 96 66 95 71 92
9 f
23 288 93 287 93 287 92 285 92 283 92 278 92 273 93 266 95 260 98 254 100 249 102 246 104 245 106 245 109 245 113 246 120 247 128 247 138 247 149 247 159 247 169 246 176 246 180
29 199 89 199 88 198 88 196 87 193 85 189 83 185 81 180 79 175 77 171 75 168 74 165 73 164 73 163 73 162 73 163 74 163 76 163 80 163 85 163 93 163 102 162 113 161 123 160 132 160 138 159 143 159 145 158 146 158 145
24 80 58 80 59 80 58 79 57 77 56 75 55 70 55 65 54 59 55 55 56 50 57 47 59 46 61 47 64 48 68 49 75 50 85 50 95 50 106 50 115 49 122 49 125 48 127 48 128
28 269 62 268 62 267 62 266 61 264 61 261 61 257 61 252 61 246 62 240 62 235 62 231 63 228 63 226 63 225 64 225 65 225 66 226 68 227 72 227 77 228 83 228 89 228 95 227 100 227 105 226 107 225 108 225 107
29 175 50 174 49 173 49 171 49 167 48 163 48 158 47 153 48 148 48 144 48 141 49 139 49 138 50 137 52 137 53 138 56 138 60 138 65 139 70 139 76 139 83 138 89 138 95 137 100 137 104 137 107 137 109 137 110 137 111
24 72 40 71 40 68 40 65 40 61 40 57 39 53 39 49 39 46 39 44 39 43 40 42 40 42 41 42 42 42 44 43 48 43 53 43 58 44 64 44 70 44 75 44 78 44 80 44 82
26 318 111 317 110 316 110 314 109 311 109 307 108 302 108 295 108 289 109 284 109 279 110 276 111 274 112 274 113 273 115 274 118 275 123 276 130 277 140 277 150 277 160 277 170 276 178 276 183 276 187 275 188
29 238 52 238 51 237 51 236 51 234 50 232 49 228 49 223 48 217 48 212 48 206 49 202 50 198 51 196 52 195 54 196 57 196 61 197 67 198 75 199 84 199 94 199 103 199 111 198 117 198 120 198 122 197 122 197 121 197 120
27 85 42 85 41 84 41 82 41 80 41 76 41 71 40 65 40 59 39 54 39 49 39 46 40 44 41 43 42 43 43 44 45 45 49 46 54 47 61 48 69 48 76 49 84 49 90 49 94 49 97 49 98 49 99
9 g
37 270 90 270 89 270 88 269 86 267 85 265 85 263 86 260 88 257 92 254 97 252 104 252 111 252 117 254 123 257 126 261 128 264 128 269 127 272 126 274 124 276 122 276 119 275 117 273 115 270 113 267 111 264 111 262 110 261 110 260 110 262 110 264 110 266 110 270 109 273 109 276 108 278 107
48 182 55 182 56 181 54 181 53 180 51 178 49 175 48 173 46 169 46 165 47 162 49 158 52 154 57 152 63 150 69 148 77 147 85 147 92 149 99 152 104 156 107 162 109 167 110 173 109 179 107 183 104 186 101 187 97 188 93 187 88 185 85 183 81 180 79 176 77 173 77 169 77 166 77 163 78 162 78 161 79 163 79 166 79 169 79 174 78 179 78 183 77 187 76 189 75
37 77 46 76 45 76 44 74 42 72 41 70 40 67 40 64 41 61 43 59 47 58 51 58 56 58 60 60 65 62 69 65 72 69 73 73 74 76 73 78 72 80 70 81 68 80 66 79 64 77 62 75 61 72 60 70 60 68 60 68 61 70 61 72 62 74 61 77 61 79 61 81 60 82 60
50 174 145 174 144 174 143 173 143 172 141 170 140 169 139 166 138 163 137 160 137 157 138 154 140 151 143 148 147 146 153 145 159 146 165 147 171 150 176 154 179 158 182 162 182 167 181 171 180 174 177 177 174 178 171 179 168 179 166 178 163 176 161 173 160 170 159 168 158 165 159 163 159 161 160 160 160 161 160 162 160 164 160 166 160 170 159 174 158 178 157 181 156 184 155 185 155 186 155 185 155
49 213 59 213 58 213 57 212 56 211 56 210 54 208 53 207 52 204 51 201 52 199 53 196 56 193 61 190 66 188 72 187 79 187 85 190 92 193 97 197 101 202 104 207 105 212 104 215 102 218 99 220 96 221 91 220 87 219 83 217 81 214 78 212 77 208 76 205 75 202 75 200 75 198 76 197 76 197 77 198 78 200 79 203 79 207 79 212 79 216 79 221 77 224 76 227 76 229 75
41 90 51 90 50 90 49 89 48 87 47 85 46 83 46 81 46 79 46 76 48 74 50 72 54 71 58 70 62 71 67 72 71 73 75 76 79 79 82 82 85 86 86 90 86 94 85 97 83 99 80 100 78 100 75 98 72 96 71 92 70 89 70 86 70 84 71 83 72 83 73 85 73 87 73 91 73 94 72 97 72 100 71
57 304 116 304 115 305 116 305 115 304 115 304 114 303 112 302 110 300 108 298 106 295 106 292 106 288 107 284 109 279 112 276 116 272 121 270 126 268 133 267 140 267 146 268 152 270 158 272 162 276 165 280 167 285 168 290 167 295 165 299 163 303 160 305 157 307 155 307 153 307 151 306 150 305 148 302 146 300 145 297 144 294 143 292 144 289 144 287 144 285 144 284 145 285 145 286 145 288 145 291 144 295 144 300 143 304 142 308 141 311 140 313 140 314 139
51 201 92 200 92 200 91 200 90 199 89 198 88 197 87 195 87 193 87 191 88 188 90 186 92 183 95 181 99 179 104 178 110 177 117 178 123 179 128 181 132 184 135 187 137 191 137 195 136 199 134 201 132 203 130 204 127 204 125 203 124 201 122 199 121 196 121 194 121 192 121 190 121 189 122 188 122 189 122 190 122 192 122 194 122 197 121 200 121 203 120 205 120 206 119 207 119 208 119 208 118 207 118
60 73 51 74 51 74 52 74 51 73 51 73 50 72 49 71 47 69 46 67 45 64 44 61 44 58 45 55 46 53 48 50 51 49 54 48 57 47 61 47 65 47 69 48 73 49 77 51 81 53 85 56 88 59 90 62 91 66 92 70 91 73 90 77 89 79 87 81 85 82 83 83 81 83 79 83 77 82 75 80 74 78 73 76 72 74 72 71 72 69 72 67 72 64 72 63 72 62 72 63 72 64 72 67 72 69 71 72 71 75 70 77 70 80 69 81 69 83 68 84 68
9 h
40 254 104 254 103 254 102 254 103 255 106 256 112 256 120 257 129 257 140 257 150 257 158 256 163 256 166 255 167 255 168 256 167 256 164 256 161 257 158 258 153 260 149 263 144 266 141 270 138 273 136 277 135 280 135 284 135 286 136 288 138 289 141 291 145 291 150 291 156 291 161 290 165 290 168 290 170 289 170 290 169
31 155 61 154 60 155 61 155 64 156 70 157 79 158 89 158 97 158 108 158 117 157 122 157 125 157 126 157 125 157 123 158 119 160 115 163 110 167 105 171 101 175 99 178 99 182 100 184 103 186 107 188 111 189 116 189 119 190 122 190 124 189 125
40 44 39 44 37 44 36 44 37 44 40 44 44 44 51 43 59 43 68 42 76 42 83 41 88 41 91 41 93 41 94 41 93 41 92 41 91 41 88 41 85 42 81 43 77 45 73 47 70 50 67 53 64 56 62 59 60 62 59 65 58 68 59 69 60 71 63 72 67 74 71 74 76 75 81 75 85 75 89 75 91
36 247 106 246 102 245 100 245 99 245 100 245 101 246 105 247 112 248 122 250 135 251 147 252 159 251 168 251 174 251 177 251 178 251 176 252 174 253 171 255 168 258 165 261 162 265 161 270 160 273 161 276 162 279 164 280 166 282 167 282 169 282 171 283 173 283 174 283 176 283 177 283 176
38 148 67 147 67 147 66 147 65 148 66 148 67 148 69 149 73 149 80 149 90 149 102 148 113 147 123 146 131 146 135 145 136 146 136 146 135 147 132 148 126 150 119 153 112 156 104 159 98 164 93 168 90 171 89 174 89 177 91 179 95 180 100 182 106 182 112 183 118 183 123 184 128 184 131 184 133
25 44 58 43 58 43 59 44 61 44 65 45 71 45 78 45 87 45 95 45 101 45 105 45 107 45 106 46 102 47 98 49 95 51 91 53 89 56 89 58 89 60 91 62 93 63 97 64 100 64 103
34 120 121 120 120 120 122 120 126 120 132 120 139 120 148 120 157 119 166 119 174 118 179 118 183 118 185 118 184 118 182 118 179 118 175 119 172 121 166 124 162 127 158 131 155 135 153 138 153 141 153 143 155 145 158 147 162 148 167 148 171 148 175 148 178 148 180 148 181
37 176 30 176 29 176 28 176 29 176 32 177 36 177 43 177 52 177 61 177 70 176 79 176 84 175 88 175 90 175 89 175 88 175 85 176 81 178 76 180 71 183 67 186 63 190 61 195 60 199 59 203 60 206 61 209 62 211 65 212 68 212 72 212 77 212 81 211 86 211 89 211 92 211 94
27 35 27 34 26 34 27 35 30 35 35 36 42 37 51 37 60 37 68 37 75 37 79 37 81 37 82 37 81 38 79 40 76 42 72 46 68 50 64 54 62 58 62 62 62 65 65 68 68 69 71 70 74 70 76
9 i
12 172 131 173 131 174 134 175 140 176 148 177 159 179 171 180 183 181 193 182 200 182 204 182 205
8 77 121 78 124 79 129 80 137 81 145 82 155 83 162 83 168
10 282 83 282 82 283 85 284 90 284 97 285 107 286 117 286 125 286 132 286 135
14 221 66 222 64 223 63 223 64 222 69 220 77 218 89 215 102 212 115 209 128 208 136 207 141 207 143 207 142
17 128 57 128 53 128 49 127 47 127 48 128 51 129 58 130 68 133 81 135 95 137 108 139 119 141 126 142 129 143 131 142 131 142 129
11 47 46 46 45 46 46 46 49 45 55 45 63 45 73 45 83 45 91 45 98 45 102
17 197 95 198 92 198 91 198 92 198 95 199 101 199 109 199 119 199 131 199 141 198 150 198 158 198 163 198 167 198 170 198 172 198 173
11 117 32 118 32 118 33 118 36 119 40 119 46 119 53 120 59 120 64 120 68 120 71
12 39 27 39 26 39 29 38 34 38 40 38 47 38 54 38 60 38 64 38 67 38 69 39 69
9 j
26 284 68 285 67 285 66 285 67 286 70 286 75 287 83 288 93 288 105 288 116 288 126 287 135 287 141 286 145 285 147 285 149 284 149 283 149 280 149 276 148 270 147 263 146 254 145 247 142 240 139 236 136
25 207 54 208 52 208 51 208 52 209 55 209 60 209 66 210 74 210 82 210 90 210 97 210 102 209 105 208 107 205 108 201 107 195 105 189 104 182 101 176 100 170 99 166 98 164 98 161 98 160 99
22 99 37 99 36 99 37 99 38 100 41 100 46 100 52 101 59 101 66 101 72 101 75 101 78 100 79 99 78 98 77 95 76 92 75 87 75 83 75 79 75 77 76 75 77
29 297 92 297 89 298 88 298 89 298 91 299 95 299 102 300 111 301 123 302 136 302 148 303 160 304 170 304 177 304 183 304 188 303 192 302 194 301 195 299 196 296 196 292 195 288 195 283 194 277 193 271 193 266 192 262 192 260 192
21 204 44 204 43 204 44 204 47 205 52 205 60 206 69 206 80 206 91 205 101 204 110 202 116 200 120 195 122 190 122 184 119 177 116 172 113 168 110 165 107 164 105
21 92 28 93 30 93 34 94 39 95 47 95 55 96 64 96 73 95 80 95 86 95 90 94 93 93 95 91 96 89 96 85 95 81 93 77 90 73 87 69 84 65 83
24 253 95 254 94 254 92 254 93 254 95 254 100 254 107 254 115 254 125 255 134 255 143 256 150 256 156 256 159 255 162 253 164 250 165 245 165 239 166 232 166 225 166 220 166 216 165 214 165
21 198 37 197 38 197 40 198 44 198 49 198 56 198 62 199 68 199 73 198 76 198 78 197 79 195 79 193 79 189 78 184 78 178 78 173 78 168 78 163 77 161 76
27 72 34 72 33 72 34 72 35 73 37 73 41 74 46 74 52 74 58 74 65 74 70 74 74 75 78 75 80 75 82 74 82 74 83 73 83 72 83 70 84 67 84 62 85 57 85 53 85 48 85 45 84 42 85
9 k
29 122 132 123 131 124 130 124 131 124 132 123 136 121 140 118 146 113 154 106 161 99 168 91 174 83 178 76 180 70 182 64 182 62 180 62 179 64 177 70 175 77 174 85 174 94 174 102 177 110 180 115 183 118 186 122 189 124 191
35 217 51 217 52 217 53 217 55 216 58 216 62 214 66 211 72 207 78 201 84 194 90 186 95 179 99 172 101 167 103 164 104 163 104 162 105 163 105 163 104 164 104 165 104 167 103 170 103 173 103 177 103 185 106 191 110 196 115 202 121 208 128 213 137 218 146 221 154 224 160
25 102 41 102 40 102 41 102 42 101 44 100 46 97 49 94 53 89 56 84 60 79 63 74 65 70 66 67 67 65 67 68 66 70 65 74 65 78 66 83 68 87 70 91 73 94 76 96 78 98 80
28 110 130 110 131 111 133 111 137 110 142 108 148 105 154 101 159 95 163 88 165 81 166 75 166 70 166 65 164 63 163 63 161 66 160 71 159 78 159 86 159 94 160 102 163 110 166 116 170 121 174 124 177 127 178 128 179
24 204 73 205 73 205 75 205 79 203 84 200 91 196 99 191 107 185 113 180 116 174 118 170 118 167 117 165 114 166 111 168 108 173 106 179 105 186 105 194 107 200 109 206 111 210 113 212 114
24 92 54 93 54 93 55 93 56 92 59 90 62 87 67 83 71 78 76 73 81 68 84 62 85 59 85 57 83 57 81 58 78 61 76 65 74 71 73 77 73 82 74 88 75 93 77 97 79
38 206 115 207 115 208 115 208 117 208 120 207 125 206 131 203 139 200 146 196 153 190 158 186 160 180 162 172 163 165 162 159 161 154 159 152 158 152 156 155 154 159 153 165 151 172 151 179 151 186 152 193 155 199 158 204 162 209 166 213 172 217 177 220 182 223 187 225 190 226 193 227 194 228 195 228 194
29 204 53 204 52 204 54 203 56 202 60 200 64 196 69 192 73 187 76 182 78 177 78 173 78 170 77 167 76 167 74 167 72 169 71 171 70 176 70 181 71 187 73 192 76 197 80 201 83 204 87 206 90 207 92 208 94 209 95
29 74 35 74 36 74 38 74 41 73 46 71 51 69 57 64 61 60 66 55 69 51 70 47 71 44 70 42 69 41 68 40 67 41 66 43 66 47 65 51 65 56 65 62 66 68 68 73 70 76 72 79 73 80 74 81 75 82 76
9 l
28 152 142 152 141 153 143 154 147 156 153 158 161 160 171 162 181 163 190 164 196 165 201 165 203 165 204 165 205 166 205 168 205 172 204 177 204 183 204 190 204 197 204 204 204 208 204 211 205 213 205 214 204 215 204 214 203
24 189 53 189 54 190 56 190 60 191 65 191 71 190 78 190 84 189 89 188 92 188 93 187 94 187 93 188 94 189 94 192 95 196 96 202 98 208 98 216 98 222 98 228 98 232 98 235 97
28 69 48 69 45 70 43 70 42 70 43 71 47 70 53 70 62 69 73 67 86 64 97 62 107 60 115 59 120 59 123 58 124 59 124 60 125 63 125 67 125 73 124 79 124 88 122 96 121 104 120 110 118 115 118 117 118
21 85 152 85 151 85 153 85 157 86 163 85 171 85 179 85 187 84 193 84 197 85 199 86 199 89 199 92 199 96 198 101 197 106 196 111 196 116 195 119 195 122 195
23 170 81 170 78 170 77 170 78 170 82 169 88 168 97 167 108 165 120 164 130 162 138 162 144 162 147 163 149 165 150 169 151 175 151 183 152 192 154 201 156 211 158 219 160 226 162
23 42 48 43 47 43 46 43 47 43 49 44 54 43 61 43 70 42 82 41 93 40 103 38 111 38 116 38 119 39 119 42 119 47 118 52 116 58 115 65 114 71 113 76 112 79 111
26 233 78 233 77 233 78 233 81 234 87 235 95 235 105 235 116 234 127 234 134 234 140 233 144 233 146 233 147 234 147 235 146 237 146 241 146 246 145 253 144 260 144 267 143 272 142 277 142 279 143 280 143
26 134 58 134 56 134 55 134 56 135 59 135 64 135 70 134 77 134 85 133 93 132 100 131 105 130 109 130 110 130 111 131 111 132 110 135 110 139 109 144 109 150 109 156 109 162 109 166 109 169 109 171 110
24 27 41 27 40 27 41 27 42 27 45 27 49 27 55 27 61 27 68 27 75 26 80 26 84 26 87 26 88 26 89 27 89 30 89 33 89 37 88 42 87 47 86 52 86 56 86 59 86
9 m
42 266 164 267 165 267 164 267 161 267 157 267 153 268 144 270 135 272 126 274 118 277 112 279 109 281 109 283 111 286 116 288 122 289 131 291 139 291 147 292 153 291 157 291 156 291 152 291 146 292 140 293 133 294 128 296 123 298 120 300 118 303 118 305 120 308 123 311 128 313 135 315 143 316 151 316 158 316 163 315 166 315 168 315 169
37 184 146 183 147 183 146 183 145 183 143 183 142 184 138 185 134 186 130 188 127 189 125 191 124 192 124 193 126 194 130 195 133 197 138 198 141 198 144 199 145 200 143 201 140 203 137 205 133 208 129 210 126 211 124 212 124 213 124 213 126 214 129 215 133 217 138 218 142 219 146 219 148 220 149
48 64 124 63 125 63 126 62 127 62 126 61 125 62 121 62 115 64 107 65 98 68 90 71 83 72 78 74 75 75 75 75 77 76 81 78 87 80 95 82 103 84 110 85 115 86 118 87 119 87 118 87 115 88 109 89 103 91 95 94 88 96 82 98 78 100 77 101 76 102 75 102 76 103 77 104 80 105 85 106 92 107 100 107 107 108 114 109 119 109 122 110 123 110 125 110 126
43 271 175 271 177 271 180 271 182 271 183 271 181 271 178 271 173 271 166 273 158 275 151 277 145 280 141 282 140 284 141 286 144 287 147 288 153 289 159 289 165 290 169 290 171 290 170 291 167 292 162 294 155 296 148 299 141 302 136 305 132 308 130 311 130 313 132 315 136 317 141 318 147 319 153 320 157 321 160 321 163 321 165 321 166 321 167
37 191 128 191 130 191 131 191 132 191 131 191 129 191 126 192 120 193 114 195 107 198 100 200 95 203 93 205 93 207 96 209 101 210 108 211 114 212 120 212 124 212 127 213 125 214 121 215 115 217 109 220 102 222 97 224 93 226 92 228 94 230 98 231 105 233 113 235 121 236 129 237 134 237 137
46 54 104 54 107 55 109 55 110 54 108 53 104 53 98 52 90 52 82 53 74 56 67 59 62 62 58 67 57 70 57 74 60 76 64 78 70 80 77 81 85 81 92 81 98 81 102 81 103 80 102 81 97 81 92 83 85 85 78 87 71 90 67 93 64 95 61 98 60 100 59 103 60 105 61 107 65 109 70 111 77 112 86 113 94 113 102 113 107 114 111 114 112
35 74 202 74 203 74 204 74 203 74 201 74 198 75 195 76 191 78 187 80 184 82 181 84 181 86 181 87 184 89 187 89 191 90 195 90 198 90 200 91 201 91 200 93 198 95 195 97 191 100 187 103 184 105 182 107 182 109 183 110 187 112 192 113 197 114 202 115 206 115 209
48 163 118 163 119 163 120 163 119 163 118 163 114 162 108 162 100 164 91 166 83 168 76 171 71 174 69 178 69 181 71 184 74 186 78 188 84 189 92 189 99 189 104 188 109 188 112 187 112 188 111 188 107 189 102 190 96 192 89 194 83 196 78 198 75 199 73 202 72 204 72 206 72 208 72 211 74 213 76 215 80 216 85 217 93 218 100 219 107 219 113 219 117 219 118 219 117
47 32 81 32 82 32 81 32 79 32 76 32 71 33 66 34 60 35 54 36 49 37 45 38 42 38 41 39 40 40 40 40 41 42 43 43 47 45 53 47 59 49 64 51 69 52 72 53 74 54 75 54 74 54 73 55 70 56 66 58 61 60 57 63 52 67 48 69 44 71 42 72 41 73 41 74 41 74 43 75 47 75 52 76 58 78 64 78 69 79 73 80 75 81 76
9 n
34 126 204 126 203 126 201 126 199 127 196 128 191 129 185 131 179 133 173 134 169 135 166 136 165 136 166 137 167 138 170 140 174 144 180 147 186 151 192 155 197 158 200 160 202 162 203 163 202 164 199 164 194 164 187 164 178 165 169 165 161 166 154 166 149 167 146 167 144
35 211 142 211 143 211 142 211 140 210 137 210 131 210 124 211 116 211 108 212 101 212 97 212 94 213 93 214 96 216 100 219 106 222 113 226 122 230 130 234 136 236 140 238 142 239 142 239 141 240 139 240 135 240 130 241 122 241 112 241 102 241 93 241 84 241 78 240 75 240 74
37 95 131 95 132 95 133 94 132 93 130 92 126 92 120 92 111 93 103 95 94 98 86 101 81 104 78 106 76 108 77 109 79 112 83 114 88 117 94 120 101 122 108 124 113 126 118 127 121 129 124 129 126 130 126 130 125 131 120 133 113 135 103 137 93 138 84 139 77 140 73 141 72 141 71
34 237 190 237 189 237 188 237 185 238 180 240 174 242 166 244 157 247 149 249 143 251 138 252 136 253 135 254 137 256 141 258 147 261 155 265 165 269 175 272 183 275 189 277 193 279 194 280 194 281 192 281 188 282 181 284 172 285 160 286 149 286 140 286 132 286 127 286 124
34 177 117 176 117 175 116 174 114 173 111 172 105 171 99 171 91 170 84 170 79 170 75 170 73 170 72 171 72 173 75 177 80 181 87 187 95 192 103 196 111 199 116 201 119 202 120 202 119 202 115 203 110 203 103 204 95 204 86 205 79 205 73 206 68 206 65 206 64
34 54 116 54 115 53 113 53 110 53 106 53 99 53 92 55 85 56 78 58 73 60 69 62 67 63 66 64 65 67 66 68 68 70 70 72 74 74 79 76 85 78 91 79 98 81 103 81 107 82 110 82 108 83 105 84 99 85 92 86 84 87 77 88 72 89 69 90 66
39 231 160 230 160 229 159 228 157 227 153 227 149 227 141 227 130 228 119 228 109 229 99 230 93 230 90 231 89 232 90 234 95 237 101 241 109 247 119 253 130 258 139 264 146 268 151 271 155 274 157 275 157 276 157 277 155 277 152 277 147 278 140 278 131 278 121 278 110 278 100 278 91 277 84 276 79 276 77
37 148 137 147 138 147 139 146 139 146 138 146 136 145 132 144 128 144 122 144 116 143 110 143 106 143 103 143 102 144 101 144 102 146 104 147 108 150 112 154 118 158 124 161 129 164 133 167 135 168 137 169 137 170 137 170 136 170 134 170 132 170 128 170 124 170 118 170 112 170 107 170 102 170 99
36 37 111 37 112 37 111 36 109 36 106 37 101 37 94 37 85 37 77 38 70 38 64 39 61 39 59 39 58 40 60 41 62 44 66 47 73 51 82 55 90 59 98 62 103 64 107 66 108 68 107 68 106 69 104 70 100 71 96 72 90 72 84 72 78 72 72 72 67 72 63 71 61
9 o
26 268 136 267 135 266 134 265 133 263 133 261 134 258 136 255 138 251 143 249 148 247 154 248 161 250 166 254 171 259 174 264 175 269 174 273 172 277 169 279 164 280 159 280 153 279 147 277 142 274 138 270 136
31 170 68 170 66 169 64 168 62 166 62 163 62 159 65 155 70 151 77 147 87 146 97 147 108 151 117 156 125 163 129 170 130 178 129 185 125 191 119 194 111 197 101 197 91 196 82 193 74 189 68 183 65 176 63 169 64 162 65 157 68 153 70
26 79 51 77 51 76 52 74 54 72 57 70 60 69 65 69 71 70 76 73 81 77 84 81 87 85 87 88 86 91 83 93 79 94 74 94 68 93 63 91 59 88 55 84 53 81 52 78 51 76 52 76 54
25 271 158 270 158 267 159 265 160 262 164 260 169 258 176 257 183 258 190 260 196 263 200 268 202 274 201 279 198 285 194 289 189 291 183 292 176 290 170 287 164 283 160 277 157 271 157 265 158 261 159
26 175 87 174 86 172 86 170 87 167 89 164 93 161 100 159 108 158 117 160 125 163 131 169 135 176 137 183 137 189 134 195 130 200 124 202 117 203 109 203 101 200 93 195 87 188 84 181 84 173 85 166 88
29 62 68 61 67 60 66 58 66 56 68 53 71 51 76 49 83 48 92 49 101 52 109 56 115 62 119 68 120 74 119 79 116 83 111 85 105 87 97 87 89 86 81 83 74 79 70 74 67 67 67 60 68 55 71 51 75 50 78
33 264 90 265 87 265 85 263 83 261 83 257 83 253 85 247 89 241 96 236 106 231 118 228 131 228 144 230 154 235 162 243 168 251 170 260 168 270 163 279 155 286 145 291 135 293 127 294 115 291 104 286 95 279 88 271 84 261 84 253 86 244 89 239 94 235 98
24 157 117 157 116 155 116 154 117 152 120 149 124 147 130 145 136 144 143 145 149 148 153 151 155 156 155 160 154 165 150 168 146 170 140 171 135 170 129 169 125 166 121 162 119 158 119 154 120
25 71 63 69 62 65 63 62 67 58 72 55 81 54 91 53 102 55 112 59 120 64 124 71 125 77 122 82 118 86 111 88 103 89 94 88 85 87 77 84 71 79 66 75 65 70 64 67 65 63 66
9 p
39 262 144 262 143 262 144 263 146 264 150 265 155 266 161 267 167 267 173 267 177 267 180 266 181 265 180 264 177 263 172 262 166 261 159 260 151 260 145 261 139 263 135 267 131 272 128 277 126 283 125 288 127 292 129 296 134 298 138 298 143 297 147 295 152 291 155 287 157 281 158 273 159 265 160 258 160 252 159
36 195 93 195 92 196 94 196 97 197 102 198 108 198 115 199 123 199 131 198 136 198 140 197 142 196 139 195 133 194 126 193 117 192 109 192 101 192 94 194 89 196 85 199 83 203 83 207 84 211 86 215 90 217 95 219 100 218 106 217 111 214 115 210 119 205 121 200 122 196 122 193 121
36 85 78 86 80 86 84 87 89 89 95 90 103 91 110 92 117 92 122 91 125 91 127 90 126 89 124 88 121 87 114 86 105 85 96 85 88 86 80 87 75 90 72 94 70 97 69 102 69 106 71 110 74 113 78 113 82 112 86 110 89 106 92 102 95 97 98 93 99 89 101 86 101
35 248 100 248 103 249 106 250 112 251 121 252 130 253 141 254 150 254 158 255 163 254 167 253 166 251 163 249 157 247 148 244 137 242 126 241 113 242 103 244 94 248 86 253 82 259 80 266 80 273 83 279 89 284 96 287 103 286 111 284 118 279 124 273 128 266 131 259 133 254 132
32 148 129 149 130 150 133 151 137 152 143 153 150 154 157 154 162 154 166 154 169 153 169 152 167 151 163 150 158 148 151 147 142 147 134 148 127 149 122 153 119 156 117 160 117 164 119 167 122 169 126 170 130 169 134 166 138 162 141 158 143 152 144 148 145
32 57 79 57 80 58 84 58 89 60 95 61 103 62 110 62 117 61 121 60 123 58 122 56 118 54 111 52 103 51 93 51 83 53 74 56 68 61 63 67 61 73 61 79 63 83 66 86 70 87 75 85 81 81 86 76 91 71 95 64 97 61 98 58 98
37 209 121 209 122 210 124 211 128 211 133 213 140 214 148 214 155 215 161 215 166 214 169 214 171 212 171 211 169 210 164 209 157 208 147 207 137 208 126 210 116 212 108 215 100 220 95 224 93 228 92 234 93 239 97 243 102 245 108 246 114 244 121 240 126 235 129 229 131 223 132 217 132 212 132
38 131 58 131 57 131 58 132 59 133 62 134 66 135 71 136 77 136 83 136 89 136 93 136 95 135 95 134 93 134 88 132 83 131 76 130 69 130 63 130 57 132 52 135 48 138 45 141 43 145 42 149 42 154 45 157 48 159 52 161 57 160 61 159 65 156 68 153 70 148 71 144 71 140 71 136 71
37 37 62 37 61 38 60 38 61 38 63 38 66 39 71 40 77 40 84 40 89 40 94 40 97 40 99 39 97 39 94 38 89 37 82 36 75 36 68 37 61 38 56 40 51 42 47 46 44 49 43 53 42 56 43 59 46 61 50 62 54 61 59 59 63 55 66 51 69 46 71 42 72 40 71
9 q
44 277 138 277 139 277 138 276 138 275 137 273 136 271 136 268 137 264 139 259 142 254 148 251 155 248 162 248 170 250 177 254 183 260 187 266 188 272 187 278 184 284 180 288 174 291 167 292 159 291 153 289 147 285 143 280 140 275 139 271 138 267 138 264 138 263 139 264 139 266 140 271 140 276 140 283 139 289 137 295 136 300 134 303 133 306 132 307 132
39 170 81 170 79 169 78 167 78 164 80 161 84 158 90 156 99 155 108 155 118 157 126 161 132 167 136 173 137 180 135 186 133 191 128 194 122 196 113 196 104 195 95 191 88 186 82 180 79 175 77 170 77 166 79 165 82 167 86 170 89 176 92 183 94 191 93 200 91 209 88 217 85 224 82 228 80 231 79
38 67 65 67 64 67 63 65 63 63 64 60 65 56 69 52 74 48 82 45 92 44 104 44 115 46 125 50 132 55 136 61 136 66 134 73 130 77 124 79 115 81 105 81 95 80 85 78 76 75 71 71 68 65 67 61 68 57 70 55 73 54 76 56 79 59 82 64 83 72 84 77 82 84 80 88 77
36 236 157 236 156 235 156 234 157 233 159 232 162 230 165 229 170 228 175 229 179 230 183 233 186 237 188 241 189 244 189 248 188 250 185 252 181 252 177 252 171 252 166 250 161 247 158 244 155 241 155 237 155 234 156 231 157 230 159 231 160 233 160 238 160 244 159 251 157 257 154 263 152
43 174 84 174 83 174 81 173 80 172 79 170 78 168 77 164 77 160 78 156 81 151 84 147 90 144 96 142 104 142 112 143 118 145 123 149 127 154 128 160 128 166 126 171 121 176 115 179 108 181 99 182 91 181 84 179 78 176 74 172 71 167 70 163 71 158 72 155 74 154 76 154 78 156 79 160 80 165 80 170 79 176 77 181 76 184 75
38 62 54 60 52 58 51 56 51 54 52 51 54 49 56 47 60 46 66 45 73 46 81 47 90 50 96 54 101 59 104 62 104 68 101 73 97 77 90 79 82 79 74 78 68 75 62 72 58 67 55 62 53 58 53 55 53 53 54 52 56 54 58 57 60 62 62 69 63 75 63 82 62 87 60 92 59
52 244 122 243 120 242 118 241 117 239 116 236 117 232 119 228 123 224 130 220 138 217 147 216 156 217 166 220 174 225 181 232 185 239 187 246 187 254 184 260 179 267 174 272 168 275 161 278 154 279 148 278 142 276 136 272 130 267 126 262 122 255 119 249 116 243 115 238 114 234 114 232 114 231 115 231 116 232 117 233 118 234 119 235 120 236 120 237 120 240 119 244 118 250 116 258 113 266 110 274 107 280 105 284 103
40 165 67 165 66 164 65 162 66 159 67 157 68 154 72 152 76 151 82 151 89 152 95 155 100 158 103 162 104 166 103 170 100 173 96 175 90 176 85 175 80 174 75 172 71 170 69 167 67 164 66 162 65 160 64 158 64 157 63 156 63 157 63 159 64 162 63 166 63 171 62 177 61 187 59 191 59 193 59 195 58
49 47 45 47 44 46 44 45 43 44 43 42 43 40 44 38 46 35 50 34 54 33 60 33 66 34 71 37 77 40 81 45 84 50 86 55 85 61 84 65 81 69 77 71 73 72 68 72 63 71 58 68 54 64 50 61 47 58 45 54 43 51 42 49 42 47 42 45 42 44 42 44 43 43 43 43 42 43 43 44 43 46 44 49 44 54 43 59 42 67 41 74 40 80 39 85 38 88 37
9 r
41 244 138 244 140 244 142 245 147 245 152 246 158 246 163 246 167 246 170 245 168 244 164 243 159 241 152 240 144 240 137 242 131 244 127 248 123 252 121 257 121 260 123 261 127 261 132 259 138 255 144 252 149 249 152 246 154 246 155 247 154 250 154 254 154 260 155 266 156 273 159 279 162 284 165 288 168 291 170 292 171 291 170
38 145 85 146 83 147 85 149 89 150 96 151 104 152 112 152 119 152 123 151 121 150 115 150 107 149 99 150 90 152 84 155 79 160 76 165 76 169 78 172 81 174 85 174 91 173 97 170 102 168 106 165 109 163 110 162 111 164 112 167 113 171 115 176 117 183 119 190 122 195 124 200 125 203 125 204 125
38 51 64 52 63 52 64 53 66 53 70 54 76 54 82 54 88 53 94 53 98 52 100 51 99 51 96 50 90 50 83 51 74 53 66 56 60 59 56 63 54 67 54 70 56 71 59 71 63 70 67 68 72 64 76 63 79 63 82 64 85 68 87 73 90 78 92 84 94 89 96 94 98 98 100 102 100
51 244 137 245 138 246 139 247 143 248 149 249 158 250 167 251 176 251 184 251 187 251 190 250 191 248 187 247 182 245 175 243 167 242 158 241 151 242 144 243 138 245 133 249 129 253 126 258 124 264 124 269 125 273 128 276 133 276 140 274 146 270 153 265 159 260 163 256 164 251 166 247 165 246 164 246 163 250 161 255 161 261 162 268 164 275 168 282 173 287 177 291 182 295 187 297 189 298 191 299 192 299 191
51 162 51 162 53 163 57 164 62 166 70 167 78 168 87 169 95 169 101 169 106 169 109 168 110 167 109 166 106 164 103 162 98 160 91 157 85 156 78 154 71 154 65 154 59 156 54 159 49 162 45 166 43 170 41 175 41 179 43 182 45 184 50 184 55 182 60 179 66 175 70 170 73 167 75 165 76 164 77 166 77 170 77 176 78 183 80 188 82 196 86 203 89 208 93 212 95 214 96 214 97 213 96
48 49 50 49 51 50 54 51 58 52 63 53 69 54 76 53 83 53 88 52 93 51 95 50 94 49 91 48 86 46 81 44 74 42 67 41 61 41 56 41 51 43 47 46 43 50 41 55 39 59 38 64 39 68 40 70 43 70 48 69 53 65 58 62 63 59 66 56 68 55 69 54 70 55 70 58 71 62 72 68 74 74 76 80 79 85 82 90 85 93 88 94 89 95 90 95 89
39 200 156 201 159 202 163 203 168 203 175 204 181 204 186 203 190 203 191 202 190 202 186 201 181 200 175 199 168 199 162 199 157 200 153 203 150 205 149 208 149 211 150 213 152 215 154 215 158 214 161 212 164 209 167 208 169 206 170 205 171 206 172 207 173 210 174 215 176 220 178 226 179 233 181 238 182 243 183
41 146 58 146 59 146 62 146 66 146 72 147 80 147 87 147 95 146 100 146 103 146 102 146 99 145 95 144 88 143 81 142 74 141 67 141 61 143 56 147 53 152 50 157 48 163 49 167 51 170 54 171 59 169 64 166 69 162 73 158 76 155 77 153 78 153 79 155 80 159 81 169 85 176 89 183 93 188 97 193 99 196 100
40 26 51 26 50 26 52 26 56 27 60 28 66 28 73 29 79 29 83 28 86 28 87 27 85 26 81 24 75 24 68 23 61 24 54 26 48 30 43 35 41 40 40 45 40 49 43 50 46 50 52 48 58 43 63 39 68 35 71 32 74 31 75 32 75 35 76 39 77 45 78 51 81 57 84 61 87 64 89 67 90
9 s
39 262 141 262 140 263 139 263 138 263 137 262 135 261 133 258 132 254 131 250 130 245 130 241 132 237 134 234 137 233 141 232 144 234 150 236 155 240 159 244 163 249 166 254 168 258 170 261 173 263 175 263 178 263 182 260 185 257 188 251 190 245 191 238 192 231 191 224 189 219 186 215 182 213 179 212 175 212 173
31 194 65 194 64 193 63 192 62 189 61 186 61 181 63 176 65 172 68 169 72 168 76 169 79 172 82 177 86 182 89 187 93 191 97 193 101 193 104 192 108 188 110 184 112 178 113 171 113 165 112 160 111 155 109 153 107 152 106 152 104 154 102
30 84 56 84 54 83 53 82 52 80 51 77 52 74 54 70 57 65 60 64 63 62 66 63 69 65 72 70 74 75 76 79 78 82 80 84 83 85 86 84 89 82 92 79 94 74 96 69 98 62 99 57 99 52 99 49 98 48 97 48 95
34 270 96 271 94 271 92 270 89 268 87 264 84 260 82 254 81 248 81 242 83 237 87 233 92 232 98 233 102 235 107 240 114 246 121 253 127 260 133 265 138 269 142 271 147 271 151 269 156 266 160 261 163 255 166 248 167 243 167 237 165 231 163 227 160 224 157 223 154
33 174 84 174 83 174 82 173 81 171 79 169 78 166 77 161 77 157 78 152 80 148 83 146 86 146 90 147 94 151 99 156 103 161 107 167 111 172 115 175 118 177 121 177 124 176 127 174 130 170 133 166 135 160 136 157 136 151 135 145 134 140 131 136 129 133 127
27 80 50 80 49 80 47 79 46 76 46 73 47 69 49 63 52 58 57 55 61 53 66 55 70 58 74 63 76 70 79 75 82 79 86 82 91 82 96 79 100 75 105 70 108 62 110 56 111 51 111 48 110 46 107
42 302 74 301 74 300 74 299 72 298 71 296 68 294 66 292 63 289 62 284 62 280 63 275 66 270 70 266 75 264 82 263 90 264 97 266 104 269 109 274 112 279 114 284 115 289 117 293 119 297 122 300 125 301 129 302 135 302 139 300 144 297 148 293 152 288 155 282 157 275 158 268 158 261 157 255 154 250 150 246 147 244 144 242 141
37 190 95 190 94 189 93 188 91 186 90 184 89 181 89 178 90 176 92 174 95 172 99 172 102 172 106 173 109 174 112 176 114 178 115 180 116 182 117 184 117 186 118 188 119 190 121 192 123 193 125 193 127 193 130 192 132 190 134 188 136 184 137 181 138 177 139 173 139 170 138 167 137 165 136
41 71 42 70 42 69 41 67 41 64 40 60 40 57 40 53 42 50 44 48 46 46 49 45 52 46 56 47 58 48 60 51 62 53 63 56 63 60 63 63 63 67 64 70 65 73 66 75 68 77 70 78 73 79 74 79 77 79 79 78 82 77 84 75 86 72 88 68 89 63 90 59 91 55 91 51 90 48 89 45 88 42 86
9 t
28 240 78 240 79 242 79 245 80 250 81 257 81 264 81 272 81 279 81 285 81 289 81 292 80 293 80 293 81 292 82 292 85 291 90 290 98 288 108 286 118 284 129 281 138 279 146 277 151 276 155 275 157 275 158 274 158
27 147 52 145 52 145 51 144 51 144 52 146 53 150 53 155 53 162 53 169 52 175 51 180 50 184 50 186 50 187 50 187 51 186 52 186 56 187 61 187 69 188 79 189 90 190 100 191 110 192 116 193 121 193 123
24 38 38 36 38 36 37 39 37 42 37 47 37 53 37 60 37 66 37 71 37 74 38 75 38 75 40 75 42 75 46 74 52 74 59 73 69 73 79 72 88 71 95 70 100 69 102 69 101
29 268 93 266 93 265 92 264 92 263 91 264 91 266 90 271 90 278 89 286 88 294 87 302 86 308 86 312 86 314 87 315 87 315 89 315 92 315 98 314 105 314 115 314 126 314 136 315 145 315 152 315 157 316 160 316 162 315 161
30 171 77 170 77 169 77 170 77 171 77 175 76 180 76 186 76 192 75 198 75 203 75 206 75 209 76 210 76 211 78 211 81 210 85 210 92 210 102 210 113 210 125 210 136 210 146 210 155 210 163 210 168 211 173 211 175 211 177 210 177
27 57 65 56 65 56 64 58 64 61 65 67 64 72 64 78 63 84 63 89 63 92 63 94 63 94 64 94 65 94 68 93 71 93 76 92 85 92 94 92 104 92 112 92 120 92 124 92 126 92 127 92 126 92 125
24 258 80 257 80 256 80 258 79 261 79 266 77 271 76 278 74 283 73 286 72 289 72 291 71 292 71 292 72 292 73 292 75 292 80 293 87 293 96 294 105 295 113 295 119 295 123 295 125
27 159 50 157 49 157 50 159 50 163 52 168 54 175 56 183 59 190 62 197 64 201 65 204 66 206 67 207 67 207 68 207 69 206 72 205 77 203 84 201 94 198 104 195 112 193 119 193 124 192 126 193 125 193 124
25 21 41 22 41 24 42 28 41 34 40 40 40 48 39 56 38 61 38 67 38 69 38 71 39 70 41 70 43 68 47 67 53 64 61 62 70 61 78 59 87 58 93 58 98 58 101 58 100 59 97
9 u
24 229 142 230 140 230 138 231 138 231 140 231 142 231 147 232 154 233 162 235 170 237 178 240 184 244 188 249 189 253 188 257 186 261 181 263 175 265 167 266 159 266 152 266 145 266 141 265 139
22 154 93 154 91 154 90 155 92 155 96 157 102 158 110 160 119 162 128 164 135 167 140 170 142 174 142 178 140 181 136 184 131 186 123 187 114 188 105 188 97 188 91 187 89
21 60 58 60 59 60 61 60 64 60 69 61 75 62 82 63 90 66 96 70 103 73 106 77 108 81 107 84 104 87 99 90 93 92 85 93 78 93 72 93 66 92 62
24 220 119 220 117 220 118 220 120 220 124 220 130 221 137 222 145 224 153 227 160 231 165 236 168 242 168 248 166 253 162 258 157 261 149 263 141 265 133 265 127 265 121 264 117 264 114 263 112
21 142 50 141 49 142 52 142 54 143 59 144 65 147 73 150 81 154 87 160 91 165 93 170 93 175 91 178 87 180 82 182 76 182 71 182 66 181 62 180 60 180 59
20 44 40 44 41 43 45 43 50 43 57 45 65 47 73 50 81 54 87 59 91 65 92 71 90 74 87 77 82 79 77 80 72 80 67 79 63 79 59 79 57
21 224 125 225 127 225 130 226 135 227 141 228 148 230 155 233 161 236 165 240 168 243 168 247 166 250 163 252 158 254 151 255 144 256 136 257 129 257 124 257 121 257 119
23 133 67 133 66 133 65 133 66 133 69 134 73 135 80 136 88 138 98 141 106 145 112 150 116 156 116 159 115 164 110 168 102 171 94 173 85 173 77 173 70 172 66 171 63 170 60
21 30 53 30 54 30 56 30 60 31 64 33 70 35 76 39 82 42 86 46 88 50 88 54 86 57 83 59 78 61 74 61 68 61 63 60 58 59 54 58 50 57 47
9 v
31 250 157 249 156 250 156 250 157 252 160 254 165 257 171 260 179 263 186 266 192 267 196 269 198 270 198 272 195 274 191 276 184 278 176 281 168 283 162 284 157 285 155 286 154 287 154 289 154 292 155 296 155 301 155 305 155 308 154 311 154 313 153
32 164 114 163 112 163 110 163 109 163 111 165 114 167 120 170 127 173 136 175 144 178 151 181 156 184 158 187 158 190 155 193 150 196 143 199 135 201 127 202 120 202 114 203 111 203 109 203 108 204 108 206 108 209 109 213 109 218 109 222 108 226 106 229 105
31 60 66 61 65 61 64 61 65 62 66 64 68 67 74 70 81 74 90 77 99 81 107 84 113 86 117 88 117 90 114 92 108 94 101 95 92 95 84 95 77 95 71 94 68 94 65 94 64 96 63 98 62 102 60 107 59 112 58 117 56 120 55
34 259 150 258 150 258 151 259 151 260 152 261 155 262 158 264 164 267 170 269 177 271 183 274 188 277 190 281 190 284 187 288 182 291 175 292 169 293 162 294 157 293 153 293 150 292 149 292 148 293 148 294 148 297 148 301 147 306 146 311 144 316 143 319 141 322 141 324 141
33 168 101 168 98 168 97 168 96 168 97 169 100 170 104 173 111 175 118 178 126 181 132 183 137 185 140 186 141 187 140 188 136 189 131 191 122 192 113 193 105 194 98 194 94 195 92 195 91 196 91 198 91 200 91 204 91 208 91 212 90 215 89 218 89 219 89
32 57 58 57 57 57 56 58 57 58 58 60 62 62 68 64 76 66 86 69 95 71 102 74 106 76 107 79 105 81 100 83 93 85 86 86 78 87 72 87 67 87 63 87 62 86 61 87 61 87 62 89 62 91 62 95 62 99 62 104 61 107 61 110 60
31 80 155 80 153 79 152 80 153 81 155 83 160 86 167 90 174 94 181 98 187 102 191 105 192 108 192 111 190 113 185 115 179 116 172 116 165 115 159 115 155 114 153 114 152 114 151 116 151 119 151 124 151 128 151 134 150 138 150 142 149 145 149
30 160 93 161 91 162 91 164 94 166 99 168 105 171 112 174 120 177 127 180 132 183 135 185 136 187 135 189 131 190 126 191 118 192 110 192 102 191 96 191 91 191 88 192 86 194 86 197 85 202 84 207 84 212 83 217 82 221 82 223 82
29 48 58 48 57 47 56 47 55 48 57 49 59 51 64 54 71 58 79 61 88 63 95 66 100 68 102 70 101 72 97 73 91 74 84 75 76 76 69 76 63 76 59 75 57 76 56 77 56 79 56 81 56 85 56 89 56 93 55
9 w
41 257 148 257 147 257 146 257 147 257 149 257 154 257 161 257 170 258 179 259 187 260 193 262 196 264 197 267 195 269 192 272 187 273 182 275 176 276 170 277 166 278 163 278 162 278 163 279 165 281 168 283 173 286 177 289 182 291 186 294 188 296 188 299 187 300 184 301 178 303 172 304 163 304 154 304 146 304 139 303 133 302 130
43 159 64 158 61 158 60 158 61 159 65 161 70 163 79 167 90 171 101 176 113 179 122 183 128 185 131 186 130 188 125 189 117 191 107 194 96 196 85 198 78 199 74 200 72 200 70 200 71 201 72 202 76 204 83 206 92 208 101 211 111 213 119 216 125 218 128 219 129 221 126 223 121 225 112 227 102 229 90 231 79 232 69 232 62 233 57
39 37 63 37 61 37 59 36 58 37 61 38 65 38 72 40 80 42 89 44 96 46 101 48 102 50 100 52 96 54 91 56 85 58 79 59 76 60 73 60 72 60 71 61 72 62 74 64 77 67 82 70 87 72 92 75 96 76 98 77 99 79 98 80 95 81 90 82 84 83 76 84 68 84 62 84 58 83 55
37 249 123 249 122 249 123 249 124 249 128 248 132 248 139 249 146 250 153 252 159 254 162 257 164 259 163 261 161 263 158 265 154 266 150 266 147 267 146 267 145 268 146 269 149 270 152 272 157 275 161 278 165 282 167 286 166 290 163 294 159 296 153 298 146 299 140 298 135 298 130 297 128 296 126
35 159 100 159 98 159 97 158 99 158 103 158 108 158 115 158 123 159 129 161 134 164 136 167 137 171 136 174 134 177 131 179 127 180 125 181 122 181 121 182 122 182 123 183 125 184 129 186 131 189 133 191 135 194 135 196 133 199 131 200 128 202 123 202 117 202 111 201 106 200 101
44 35 49 35 47 35 46 34 47 34 48 33 52 32 57 31 64 31 72 31 79 32 88 35 96 38 102 43 105 48 106 54 103 58 99 62 93 65 87 67 81 68 76 68 73 68 71 68 70 68 71 69 73 70 75 71 79 73 84 75 90 77 94 80 98 84 101 87 102 91 101 95 99 98 95 101 89 104 82 104 74 104 66 102 59 99 53 96 48
43 229 129 229 127 229 124 230 122 230 121 230 123 230 126 230 132 230 141 230 150 231 160 232 168 234 174 237 177 241 177 245 176 249 172 253 167 256 161 258 156 259 150 260 146 260 144 261 143 261 144 262 146 263 149 264 153 266 158 269 163 270 167 273 170 275 171 277 169 280 166 282 161 284 154 286 146 286 137 286 129 285 122 284 116 282 111
37 129 69 128 68 127 67 126 69 126 73 126 78 126 86 126 94 128 103 129 109 130 114 133 116 135 115 138 112 140 107 142 102 144 96 145 92 145 90 146 90 148 92 150 97 153 102 157 108 160 113 163 117 167 119 169 118 171 115 173 109 174 101 175 93 175 85 175 80 174 74 173 71 172 69
34 33 39 33 37 32 37 32 39 32 43 32 50 33 57 33 65 34 72 35 77 37 79 38 80 40 78 42 76 45 72 46 68 48 65 49 64 51 64 53 67 55 70 59 73 62 76 65 77 69 78 71 76 73 74 75 70 75 64 76 59 76 53 75 48 74 44 73 41
9 x
37 259 144 258 143 257 142 256 141 255 140 256 141 258 143 261 146 265 150 270 155 276 160 283 165 289 168 295 171 300 172 304 172 308 172 310 170 311 168 311 165 309 163 307 161 304 160 300 161 296 161 291 163 286 166 281 169 276 174 271 178 267 183 264 188 262 191 261 193 261 194 260 195 259 195
33 181 87 180 85 180 84 179 84 180 85 180 87 182 91 185 95 188 100 193 106 199 111 205 115 211 116 217 116 222 115 226 113 228 111 229 108 229 107 227 106 224 106 220 107 215 109 208 112 202 115 194 119 187 123 181 128 175 132 171 135 169 138 168 140 168 142
30 54 48 53 47 52 47 52 48 54 51 56 56 60 61 65 67 72 74 77 79 83 83 89 85 93 86 97 85 99 83 100 81 100 79 99 76 97 75 94 74 90 74 85 75 79 78 73 82 67 87 61 91 57 95 54 99 53 101 53 103
37 227 154 227 153 227 154 227 155 228 157 230 160 233 164 237 169 241 174 246 178 251 181 255 183 258 184 261 184 262 183 263 183 263 181 263 180 263 178 262 177 261 176 260 175 258 175 255 176 251 177 247 178 242 180 238 182 233 185 229 187 226 189 224 190 223 190 222 191 221 191 220 191 218 192
37 162 72 161 71 162 72 163 75 165 79 168 84 172 90 177 97 183 103 189 108 195 110 201 111 206 110 212 107 216 103 220 99 222 95 223 92 223 89 222 88 220 86 216 86 213 85 208 85 203 86 198 87 192 89 187 92 181 96 175 100 169 106 163 113 157 119 152 125 147 129 144 131 143 132
32 46 52 45 51 45 50 46 52 48 55 51 59 56 65 61 71 66 76 72 80 77 83 81 83 85 83 87 82 89 80 89 78 89 76 89 74 88 73 87 72 85 71 83 71 81 72 78 73 73 75 69 77 63 81 59 85 55 89 53 93 52 95 52 97
34 213 130 212 130 211 130 212 131 214 134 216 139 219 144 224 151 229 157 235 161 241 164 247 166 253 166 258 164 261 162 264 159 266 157 266 154 266 152 264 151 261 151 258 151 252 153 246 155 239 159 232 162 224 166 217 170 211 174 206 177 202 180 200 182 198 184 197 186
35 141 59 140 59 140 60 140 61 142 63 144 66 147 70 152 76 157 81 162 86 168 89 173 91 177 92 181 91 184 90 186 89 187 89 187 88 186 88 185 87 184 87 183 86 182 87 181 87 180 88 178 88 176 89 170 91 164 94 155 99 146 103 138 108 132 112 127 116 126 118
32 33 49 33 50 33 51 34 53 35 56 38 60 40 64 44 69 49 73 54 75 59 77 63 78 67 78 70 78 72 76 72 75 73 74 72 72 71 71 69 70 65 70 61 70 57 71 51 73 45 76 41 79 34 85 28 91 24 96 21 101 20 104 21 104
9 y
48 251 107 251 106 250 106 249 108 247 110 246 114 244 119 243 124 244 130 245 134 248 138 252 139 257 138 262 135 267 131 271 126 273 121 276 118 277 115 277 113 277 115 277 117 278 122 279 128 280 136 281 145 282 154 283 162 283 169 282 174 280 177 277 179 273 179 267 178 261 176 256 173 251 169 248 166 247 162 249 159 254 155 261 151 270 148 280 145 290 142 297 141 302 142 304 142
38 140 59 140 58 139 60 138 62 137 66 136 72 135 79 135 85 136 90 139 91 143 90 148 87 152 82 157 77 160 72 162 69 164 68 165 70 165 75 166 83 167 93 168 104 168 115 167 124 164 131 160 135 155 137 149 137 144 136 140 133 138 128 138 123 141 118 146 113 154 108 162 103 171 100 178 99
40 22 48 22 47 21 47 22 49 22 53 24 57 26 62 29 66 32 69 35 69 38 67 42 64 44 59 47 55 48 50 49 46 49 44 49 43 50 45 52 50 54 56 58 65 63 75 68 86 71 96 73 105 73 111 72 116 69 118 63 119 57 118 51 115 45 111 40 107 39 101 40 94 43 88 49 81 56 75 63 69
40 262 126 262 124 262 123 262 122 262 123 262 126 262 130 262 135 263 140 265 145 266 148 269 149 271 148 274 145 277 142 279 138 281 135 282 133 283 133 284 135 284 140 286 146 288 154 289 163 291 171 291 178 289 183 287 185 283 186 278 185 274 182 269 180 267 177 266 174 268 170 271 166 277 161 284 157 291 154 297 151
45 158 75 157 74 156 74 155 76 154 81 153 87 153 96 154 106 156 114 159 120 163 123 168 122 174 117 179 111 183 103 186 95 189 88 190 82 191 79 192 77 192 78 193 82 194 89 195 99 197 111 198 124 200 137 200 150 199 161 196 171 192 179 185 186 177 189 169 190 161 188 156 183 153 176 154 167 159 159 166 152 175 146 185 141 194 138 203 136 209 136
37 56 59 56 56 56 54 56 53 56 56 56 59 56 64 57 69 59 74 62 77 65 78 69 77 72 74 74 71 76 67 78 63 79 59 79 58 81 59 82 63 85 69 87 77 89 87 90 97 90 107 88 115 85 120 80 124 75 125 70 124 65 122 63 117 63 112 65 106 70 100 75 95 81 91
41 254 107 254 106 254 105 253 105 253 106 252 108 252 112 252 118 252 124 253 130 255 134 258 136 262 136 267 133 272 129 276 125 279 121 281 118 283 116 284 117 285 121 285 126 285 134 286 143 286 153 285 162 282 171 278 178 273 183 265 186 257 186 250 184 245 180 243 175 244 168 250 162 258 157 269 153 279 150 289 150 296 150
45 153 62 153 61 152 62 151 63 149 65 147 69 144 75 142 81 141 87 141 93 142 98 145 100 150 100 155 97 161 93 167 88 172 83 175 79 177 76 178 74 179 73 179 74 179 76 179 81 178 87 178 95 177 104 175 113 172 121 168 128 164 134 159 137 153 139 148 139 144 138 142 136 142 132 145 127 150 122 158 118 167 114 177 111 185 110 192 110 196 111
47 32 37 32 35 31 35 30 37 29 39 28 43 27 48 27 52 28 59 30 65 32 69 36 71 41 71 45 69 50 65 53 60 56 55 57 51 59 47 59 44 59 43 59 42 60 43 60 46 61 50 62 56 63 64 65 74 67 83 68 92 69 101 69 109 67 115 64 119 60 121 56 121 51 119 47 116 43 111 41 105 41 99 42 93 46 88 52 83 58 79 64 76 71 75
11 z
109 193 53 194 53 197 53 200 52 204 52 208 52 215 52 222 52 232 52 239 52 242 52 244 52 245 51 246 51 247 51 248 51 249 50 249 51 248 53 247 55 246 57 244 60 241 63 239 67 236 70 234 73 232 76 230 79 228 81 227 83 225 85 224 87 223 89 222 90 221 90 220 91 220 92 219 92 218 93 217 95 215 96 213 97 212 99 210 100 209 101 208 102 207 103 206 104 206 105 204 106 203 107 202 108 201 109 201 110 200 111 199 111 199 112 198 112 198 113 197 113 196 114 195 114 194 115 193 115 194 115 193 115 194 115 195 115 198 115 201 115 205 115 209 115 213 115 216 115 217 115 218 115 219 115 221 115 223 114 225 114 227 114 228 114 230 114 230 113 232 113 233 113 234 113 236 114 238 114 240 114 242 114 244 114 245 114 246 114 247 114 248 114 249 114 250 114 251 114 252 114 253 114 254 113 255 113 256 113 257 113 258 113 259 113 260 113 260 112
134 22 47 22 46 23 46 24 46 26 46 28 45 32 45 36 45 40 45 47 45 54 45 58 46 61 46 64 46 65 46 67 46 68 46 69 46 70 46 72 46 74 46 76 45 79 45 81 45 83 45 84 45 85 45 86 45 87 45 88 45 89 45 88 46 86 47 85 49 84 51 82 53 81 54 80 56 78 58 76 59 75 61 73 63 71 65 70 66 70 68 69 69 68 70 68 71 67 72 68 71 67 72 66 73 65 74 64 75 62 76 61 77 60 78 60 79 59 79 58 80 57 81 56 82 55 83 54 84 54 85 53 85 52 86 52 87 51 87 50 88 50 89 49 90 49 91 48 91 48 92 47 93 46 94 44 96 44 97 43 98 42 99 41 100 40 101 39 102 38 103 37 103 37 104 36 104 36 105 35 106 34 107 34 108 33 108 32 109 31 111 30 111 30 112 31 111 33 111 36 111 40 111 44 110 47 110 50 110 51 110 52 110 53 110 54 110 55 109 57 109 59 109 62 109 64 109 65 109 67 109 69 109 71 109 73 109 76 109 78 109 79 109 80 108 81 108 82 108 83 108 84 108 85 107 86 107 87 107 89 107 91 107 94 107 95 107 96 107
31 249 136 250 135 252 135 256 134 261 132 267 131 274 130 280 129 284 130 287 132 288 135 287 139 284 145 280 152 274 159 268 167 261 174 255 180 252 183 248 187 247 189 248 190 251 190 258 190 266 189 275 189 285 189 292 189 298 189 302 190 304 190
27 168 78 167 78 166 78 168 77 170 76 174 75 179 75 184 74 189 74 193 75 195 77 196 82 195 87 192 95 189 103 185 111 181 117 179 121 177 124 177 126 178 126 181 126 186 126 191 126 197 125 202 125 207 125
30 56 61 55 61 54 62 53 62 55 61 58 60 63 59 69 58 75 57 80 56 84 57 87 60 87 65 86 71 83 77 80 82 76 90 71 97 66 103 63 108 61 111 62 113 65 113 71 113 76 112 83 112 89 112 95 111 99 111 102 111
34 220 73 218 73 220 72 224 71 230 70 238 69 245 68 253 67 259 68 263 70 266 73 266 78 265 86 263 96 258 108 253 121 247 134 240 145 233 156 226 163 221 169 218 173 216 175 218 173 222 170 225 168 233 166 242 163 252 162 261 162 270 163 277 164 282 166 285 167
31 140 103 139 103 139 102 141 102 142 102 145 101 149 100 154 99 158 97 162 97 165 97 167 98 167 101 167 106 165 112 161 118 157 124 153 131 148 136 144 139 142 142 140 144 140 145 141 145 144 144 148 144 153 143 159 142 165 142 170 141 175 141
26 53 50 51 50 50 50 51 49 53 49 55 49 59 49 63 49 68 50 71 52 72 56 73 60 71 65 69 70 64 74 61 78 58 80 57 81 56 82 56 83 58 82 62 82 65 82 71 81 74 81 77 80
36 249 144 247 143 246 143 247 142 248 142 252 141 257 141 263 140 269 140 276 139 280 140 284 141 285 143 284 146 282 151 278 157 272 164 266 172 259 180 252 188 246 194 241 199 238 203 236 205 235 205 240 204 245 202 252 199 261 197 270 195 278 194 286 194 292 195 296 197 298 198 299 200
35 173 58 175 58 178 58 182 58 187 58 193 58 198 59 203 59 206 59 208 60 209 62 208 65 207 68 204 74 200 80 195 88 189 96 183 104 178 110 174 115 172 118 170 119 170 120 172 119 174 118 177 117 181 116 187 114 194 113 202 112 210 111 217 110 224 110 229 110 233 111
34 33 54 31 53 30 53 29 52 30 52 31 52 33 52 37 52 41 52 46 52 50 52 54 53 56 54 57 57 57 60 56 65 54 71 50 76 46 82 41 88 36 93 32 97 30 100 29 102 30 103 32 102 37 100 43 99 50 97 57 97 64 97 71 98 76 98 78 99
9 A
15 239 59 237 61 236 61 239 62 245 61 253 60 264 59 275 57 286 55 296 54 302 53 307 53 308 53 308 54 307 55
12 133 129 130 129 129 129 133 129 139 128 149 128 160 127 171 127 181 126 189 126 194 125 196 125
11 60 51 63 52 69 51 79 51 91 51 105 51 119 51 133 51 144 52 153 53 158 53
18 175 189 173 189 172 189 175 189 180 188 188 187 198 186 209 184 221 182 234 180 245 178 255 177 263 176 269 175 272 174 274 174 275 175 274 175
23 131 120 132 120 135 120 138 120 144 120 151 120 160 120 171 120 184 120 198 119 212 120 227 120 242 120 255 121 267 121 277 121 285 120 290 120 294 120 295 120 295 121 294 121 293 122
14 55 77 55 78 56 78 60 77 65 77 73 77 80 77 88 77 94 76 99 76 103 75 104 75 105 75 104 75
17 180 151 179 151 181 151 184 152 190 153 197 155 206 158 215 161 225 163 233 166 241 168 247 169 251 169 253 169 254 170 255 170 254 170
16 146 104 145 104 146 104 147 104 151 104 158 104 166 103 175 103 186 102 197 100 207 99 216 97 223 97 229 96 231 96 233 96
16 23 61 22 61 22 62 23 62 25 62 28 62 34 62 41 62 50 61 59 61 70 61 79 61 87 61 92 61 96 62 98 62
9 B
14 288 170 287 170 285 170 281 169 276 169 269 169 260 168 249 168 240 168 231 169 224 169 219 169 217 169 216 169
11 234 116 232 117 229 118 224 120 218 122 210 125 202 126 194 128 187 129 182 129 179 130
13 193 92 192 92 191 91 188 90 185 89 179 87 172 85 164 83 156 81 148 80 142 78 138 77 137 76
11 297 167 296 167 293 167 289 167 284 166 277 166 270 166 264 165 260 165 257 165 256 164
16 202 122 203 122 201 122 198 121 192 121 184 121 174 122 162 122 149 123 136 124 124 124 116 125 109 124 106 124 105 124 106 124
12 126 77 125 76 122 76 118 75 113 74 107 73 100 73 94 73 88 73 84 73 81 74 79 74
15 321 78 320 78 318 78 315 79 310 79 304 79 297 80 289 81 280 81 272 81 265 82 260 82 257 82 256 81 257 79
16 216 128 215 128 214 128 211 128 207 127 201 126 193 126 184 125 174 125 163 124 154 124 145 123 139 123 136 122 133 122 132 121
15 123 74 122 74 121 75 118 75 115 74 110 74 103 73 96 73 88 73 80 74 74 74 69 75 67 75 66 75 67 74
9 N
14 253 182 251 183 250 184 249 184 250 182 254 179 258 175 265 170 272 163 280 158 287 153 292 150 296 148 297 146
11 185 147 185 146 185 144 187 140 190 134 194 126 199 115 205 104 212 93 217 84 221 78
10 75 135 75 134 76 132 77 130 79 125 83 119 88 111 94 102 100 93 106 85
10 192 172 192 171 194 169 198 165 203 160 209 153 217 146 225 139 232 132 238 126
10 124 135 126 134 130 130 136 125 144 119 153 112 158 108 166 103 172 99 177 96
10 49 93 47 95 46 97 47 94 50 90 54 84 59 77 67 69 73 62 78 57
14 174 161 174 160 175 158 178 153 182 148 188 140 195 131 203 121 210 111 215 103 219 97 221 94 222 92 222 93
11 94 120 96 118 100 113 106 106 114 99 124 89 135 80 146 72 155 66 162 61 167 58
12 29 98 29 97 31 95 35 90 40 84 46 77 52 69 58 62 64 57 69 53 71 51 73 50
9 P
23 146 162 144 162 140 162 135 161 128 161 120 161 110 161 100 161 90 162 84 162 76 162 70 162 65 162 63 162 64 162 66 163 71 163 76 163 84 163 94 162 104 161 114 162 124 163
20 243 111 244 111 243 111 242 110 238 110 234 109 227 109 220 109 211 110 202 111 194 112 187 114 183 115 181 116 183 116 187 116 194 116 203 115 213 115 222 114
18 147 71 146 71 144 70 140 69 135 69 129 68 121 68 112 69 105 70 98 70 94 71 92 71 94 72 98 72 105 72 113 72 122 71 130 70
21 288 147 289 147 287 147 284 147 279 147 273 149 264 151 255 154 246 157 239 159 233 161 230 162 229 162 230 162 233 162 240 161 249 160 261 157 273 155 285 152 294 150
21 219 126 219 125 217 125 214 125 210 124 204 123 197 123 190 122 182 122 174 121 168 121 163 121 160 121 159 122 160 122 163 123 169 123 177 123 186 123 195 123 203 122
19 137 92 136 92 134 91 130 89 126 87 121 85 114 82 107 80 101 79 96 78 92 77 90 78 91 79 94 80 98 82 104 84 111 86 118 88 123 89
25 301 155 300 155 299 154 297 154 294 154 288 154 283 154 276 154 270 155 266 155 263 156 262 156 261 156 262 156 263 156 265 156 269 156 274 156 282 156 292 155 303 155 314 154 323 153 329 153 334 153
24 234 117 233 117 232 117 229 116 225 115 219 115 211 116 201 117 191 118 182 119 174 120 167 121 163 121 160 122 159 122 160 122 164 121 170 120 179 119 189 118 200 117 210 117 218 118 224 118
20 110 82 111 82 110 82 108 81 104 80 100 79 93 79 86 79 80 79 74 80 70 80 68 81 68 82 71 82 75 82 81 82 89 81 96 80 103 80 108 79
9 R
12 292 99 293 99 293 100 292 102 288 105 283 111 275 118 265 127 253 138 243 147 234 155 228 160
10 227 79 229 78 230 78 229 79 226 81 222 85 217 90 210 97 202 105 195 112
9 137 79 138 78 137 78 135 80 132 84 128 90 123 97 118 105 113 113
13 285 79 284 80 282 82 278 85 272 91 264 98 255 107 245 116 235 126 225 134 217 140 211 144 207 147
10 197 61 197 60 197 61 195 64 191 68 186 77 179 88 172 99 166 110 161 118
13 123 54 123 55 121 56 118 58 114 62 109 66 103 71 97 76 92 80 89 83 87 85 86 85 87 84
16 229 72 230 72 230 71 230 72 229 73 227 77 223 83 218 94 210 107 201 120 192 134 185 145 179 153 176 158 174 161 173 162
14 142 94 143 94 142 94 142 95 139 97 136 100 132 105 126 111 120 117 114 123 110 127 107 129 105 130 105 129
10 84 38 83 39 81 41 78 46 73 53 67 61 60 69 55 76 50 81 48 84
9 S
14 255 169 256 170 257 170 257 167 257 163 256 157 255 148 253 134 252 118 250 103 249 89 248 78 247 70 246 67
12 213 161 213 162 212 160 211 158 210 153 209 147 208 139 207 131 206 124 205 119 205 115 205 113
13 140 136 140 137 140 136 140 135 139 133 138 128 137 120 136 110 134 99 131 88 129 79 127 72 126 68
12 239 158 240 160 240 161 240 158 241 153 241 145 242 134 243 120 244 108 245 98 245 90 246 85
9 154 144 153 141 153 137 153 130 154 120 154 109 155 99 157 90 157 84
10 82 124 82 125 82 123 82 119 82 112 83 103 83 93 84 84 85 76 85 72
14 210 134 210 135 210 134 209 133 209 132 209 129 210 125 210 121 211 116 211 111 212 108 212 105 213 103 213 102
16 142 121 142 122 142 121 142 119 142 116 142 109 142 100 143 89 143 76 144 64 144 53 144 46 144 40 144 36 144 34 145 33
11 62 117 63 118 63 117 63 114 63 109 63 102 63 93 64 83 65 73 67 65 68 61
9 U
26 238 155 237 155 239 155 243 155 249 156 256 156 264 156 272 157 279 157 285 157 290 157 293 158 296 158 297 158 298 159 299 159 299 158 298 157 298 155 297 152 297 148 296 142 296 136 296 130 296 125 296 122
27 125 140 122 140 120 140 121 140 124 140 128 140 135 141 144 142 153 142 162 143 171 144 179 145 185 145 189 145 191 145 193 146 193 145 192 143 192 140 191 136 191 130 191 124 191 116 192 110 194 103 195 99 195 95
22 72 74 71 75 73 75 76 75 82 75 89 75 98 74 106 74 114 74 120 74 124 73 127 73 128 73 129 72 128 71 128 68 128 64 128 60 128 55 128 51 128 49 127 47
29 212 183 211 183 211 184 212 184 216 184 221 184 229 184 240 184 251 184 262 184 274 184 284 184 293 183 300 183 306 183 310 182 312 183 313 183 314 183 313 182 313 180 312 178 310 174 309 169 307 163 306 157 306 152 306 148 306 146
24 156 142 156 141 157 141 161 140 166 140 174 140 182 140 191 141 199 141 207 142 214 142 219 142 222 141 225 141 226 140 227 139 227 137 227 133 227 129 226 123 226 115 225 107 224 99 223 93
24 47 99 46 100 46 101 48 101 52 101 58 101 65 101 75 102 83 102 90 103 96 104 100 105 103 105 104 105 104 103 103 101 102 98 102 94 101 90 100 85 100 80 100 76 100 73 100 72
30 210 183 208 183 209 183 211 183 216 182 223 182 233 182 243 182 254 182 265 182 274 182 281 182 286 182 290 182 292 182 293 182 292 182 292 181 291 179 290 177 288 174 287 169 286 164 286 160 285 156 285 154 284 152 284 151 284 150 284 149
27 159 120 157 121 154 122 155 121 159 121 165 120 173 119 183 118 193 118 202 118 211 118 218 118 223 118 227 118 229 118 231 118 231 119 231 118 230 116 229 113 228 108 226 101 224 93 222 86 219 80 217 76 215 73
28 31 85 30 85 31 86 35 86 40 86 47 87 55 87 63 88 72 89 79 90 85 91 90 91 94 92 98 92 100 92 101 92 102 92 102 91 101 90 101 89 99 86 97 81 95 76 94 71 92 65 91 61 91 58 90 56
9 V
34 158 185 157 185 156 184 157 184 158 184 161 184 165 183 169 182 175 179 179 176 185 171 189 166 193 161 194 156 195 152 194 149 192 148 190 147 188 148 186 149 184 151 184 153 185 156 188 158 192 160 197 159 204 158 212 154 220 148 229 142 237 136 244 130 248 124 251 119
37 104 168 105 168 106 167 107 166 110 164 113 161 117 157 120 152 123 146 125 142 126 137 126 134 125 132 124 131 123 131 122 131 121 132 120 133 119 135 118 137 118 139 119 142 120 144 123 146 127 147 131 148 135 146 139 144 143 140 146 136 148 129 151 121 152 112 154 102 155 94 155 88 155 84
31 46 135 45 135 45 136 46 135 48 134 51 132 54 128 57 124 60 118 63 112 64 106 64 101 63 98 61 98 58 99 57 102 56 107 56 111 58 114 61 116 65 116 70 114 74 110 78 106 81 100 83 95 85 91 88 87 90 84 92 82 93 80
38 174 186 173 184 173 183 173 182 173 181 175 181 177 181 181 180 186 177 191 174 196 170 200 166 204 161 206 157 207 153 206 150 205 148 204 147 202 146 200 147 199 147 198 148 198 150 199 151 202 153 206 153 211 153 216 151 222 148 228 143 234 137 239 129 244 121 247 113 249 110 250 104 251 101 250 101
32 99 170 98 170 97 171 98 170 100 170 102 169 104 168 108 166 112 162 115 157 118 152 120 146 121 142 120 138 118 136 116 136 113 136 111 138 110 141 110 143 112 146 115 147 119 147 124 146 129 142 135 138 140 133 144 128 149 123 152 118 154 114 156 111
30 39 127 38 127 37 128 38 128 39 127 41 126 43 124 46 121 49 117 52 111 54 105 56 99 55 95 54 92 52 91 50 92 48 93 47 96 47 100 48 102 51 105 56 106 61 104 69 100 76 94 82 88 87 81 91 76 94 72 96 70
36 178 189 178 188 179 188 180 189 182 189 186 189 190 187 194 184 199 180 204 175 207 169 210 164 212 159 212 156 211 153 210 152 209 152 208 153 207 154 206 155 206 157 206 158 206 160 208 162 211 164 214 164 218 164 225 163 233 160 242 155 251 148 260 141 267 133 272 128 276 124 277 122
40 98 162 98 159 98 158 98 157 98 156 98 157 100 157 102 158 105 158 110 158 116 158 122 156 129 153 134 149 139 144 141 139 142 135 142 130 141 127 140 126 138 125 136 125 135 125 133 126 133 128 133 130 135 131 139 131 144 130 150 127 156 122 163 115 169 106 174 97 179 87 182 78 184 70 186 65 186 61 186 59
32 35 109 34 110 34 111 36 111 39 110 43 107 48 103 53 98 56 92 59 85 61 79 60 75 59 71 56 69 54 68 51 69 49 72 48 76 48 80 50 84 54 87 58 87 65 84 72 80 78 74 84 68 89 62 93 56 96 52 98 49 100 47 101 46
9 W
36 308 133 307 132 308 132 308 133 309 135 310 138 310 142 310 148 309 153 307 159 304 163 300 167 295 169 291 169 286 169 282 167 279 166 278 165 278 164 279 163 281 163 284 164 287 166 290 168 291 170 292 173 291 177 287 180 281 184 274 187 266 190 257 193 250 194 244 195 240 196 237 197
37 234 68 234 67 235 67 236 68 236 69 236 72 237 76 237 81 236 88 235 95 232 102 228 109 223 115 218 119 213 121 209 122 205 120 203 118 202 116 202 115 203 113 206 113 209 114 211 115 214 118 216 123 217 128 215 134 211 140 206 145 199 150 191 153 183 156 174 157 167 157 160 156 156 156
34 136 48 137 48 138 50 139 53 140 56 140 61 140 66 138 72 135 77 130 82 125 86 121 87 117 87 114 86 113 84 112 82 113 81 116 80 118 80 121 82 124 84 125 88 125 94 123 100 119 106 113 113 106 118 99 122 91 125 84 126 78 126 74 126 72 126 71 125
37 282 80 282 79 283 80 284 80 285 82 288 85 290 90 292 96 294 102 294 109 294 115 291 121 288 125 284 127 280 129 276 129 273 129 271 128 269 128 268 127 269 126 272 126 275 127 278 128 282 131 285 134 287 139 287 143 286 148 283 152 279 156 274 158 268 161 263 163 260 164 257 164 256 164
36 209 72 210 73 210 74 211 77 211 82 211 87 210 94 209 100 207 105 203 109 199 112 194 113 190 113 186 112 183 111 182 109 182 108 183 106 185 105 188 105 191 106 194 108 196 112 196 115 196 120 194 125 190 129 184 133 178 137 170 140 162 142 155 143 149 142 143 141 140 140 137 139
34 98 61 98 62 99 63 100 65 101 68 103 72 103 77 103 83 102 88 99 93 95 97 90 99 86 100 83 99 80 97 79 95 79 93 80 91 83 91 85 91 87 92 90 95 91 99 91 103 89 109 84 114 78 120 71 125 63 128 55 130 48 131 43 131 39 130 37 129
42 291 76 292 76 292 77 293 77 294 79 295 81 296 83 296 89 296 95 295 102 294 109 291 116 288 120 284 123 280 125 276 125 272 124 269 122 266 120 264 118 263 116 263 113 264 111 266 110 269 110 274 110 279 112 284 116 288 121 290 126 290 131 287 137 281 142 274 148 264 154 253 159 242 163 233 166 225 168 218 169 214 169 212 168
36 212 63 212 62 212 63 212 64 213 65 214 68 216 72 217 77 218 80 218 85 217 90 214 95 211 98 205 101 200 103 194 103 189 103 186 102 184 101 184 100 186 98 189 97 193 97 197 99 201 102 204 106 205 111 203 117 200 122 193 128 185 133 176 138 167 142 159 144 153 146 149 146
34 93 57 93 56 93 55 93 56 93 58 95 61 96 65 96 70 97 76 96 82 95 87 92 92 89 96 85 98 80 98 76 96 73 94 71 92 71 89 72 88 75 88 79 88 81 90 85 93 87 99 88 105 87 113 84 120 80 127 74 132 68 137 61 140 57 141 54 141
9 X
26 335 197 335 198 336 198 335 198 334 198 332 198 328 198 323 199 316 199 308 200 300 201 293 203 286 203 282 204 279 205 277 205 276 206 276 205 276 204 276 201 276 197 277 191 277 184 277 180 278 174 278 171
23 234 160 234 161 234 160 232 160 229 159 225 159 218 158 211 158 202 158 193 160 185 161 178 162 173 163 170 164 168 164 168 163 167 161 167 158 167 154 167 149 168 144 168 139 169 137
25 152 107 152 108 151 108 150 107 148 107 145 106 140 107 134 106 127 107 119 108 111 108 105 108 99 109 96 109 94 109 93 109 93 110 93 109 93 108 93 106 94 102 95 97 96 91 97 86 97 83
25 307 74 306 74 305 74 302 73 298 73 295 72 290 72 282 72 273 71 257 72 249 72 244 72 241 72 240 72 239 72 238 72 238 71 238 70 238 68 237 65 237 61 237 55 237 51 237 47 237 46
24 260 156 258 156 256 155 253 155 249 154 243 154 237 154 230 154 223 155 217 155 213 156 209 156 208 157 207 157 207 156 207 155 207 154 208 151 208 148 209 143 209 138 208 134 208 131 207 130
24 134 109 133 109 133 110 131 109 129 109 126 109 121 108 115 108 108 108 100 108 93 109 88 109 83 109 80 110 79 111 78 111 79 111 79 109 80 107 81 102 82 97 83 91 84 86 84 83
25 262 180 262 179 261 179 260 179 257 179 253 179 249 179 243 180 236 180 228 180 220 179 212 178 203 177 196 177 190 177 185 177 182 177 180 177 178 177 178 175 179 172 180 168 181 162 182 155 183 149
24 265 97 263 97 261 98 257 98 253 98 247 98 239 98 232 98 225 99 219 99 215 100 212 101 211 102 210 102 210 101 210 100 209 98 208 94 208 88 207 82 206 76 206 70 205 67 205 65
22 113 98 112 97 111 97 109 96 105 97 101 98 96 99 89 100 83 101 77 102 72 101 68 101 65 101 63 101 62 101 63 99 63 96 63 92 64 87 64 82 64 77 64 75
/digits.cl
0,0 → 1,423
20 classes
 
1
2
3
4
6
7
8
9
B
N
A
S
R
P
U
V
W
X
5
V 12 -0.868832 0.336823 54.4382 0.824697 5.95937 -0.639416 0.364462 128.224 7.0839 7.21703 3.27009 0.265
M 12 12
0.0706877 0.178766 -1.55779 0.0170591 -1.18709 0.0988158 -0.0405307 -5.34896 -0.261723 -0.331571 -0.346411 0.000916717
0 0.456038 -4.0199 0.0469372 -3.26572 0.295152 -0.127233 -14.1596 -0.686385 -0.865744 -0.895775 0.00100384
0 0 44.9158 -0.766481 29.6618 -3.04356 1.50025 156.169 6.50061 9.22175 9.16945 0.0277157
0 0 0 0.0187331 -0.47486 0.0654544 -0.0372046 -2.78889 -0.0949287 -0.153827 -0.142259 -0.00178416
0 0 0 0 37.9436 -4.69544 2.31214 126.788 5.98374 7.10354 6.89876 0.0680542
0 0 0 0 0 0.657636 -0.339774 -14.574 -0.645471 -0.767322 -0.70343 -0.013669
0 0 0 0 0 0 0.179529 7.32567 0.306387 0.376506 0.335367 0.00796206
0 0 0 0 0 0 0 578.293 24.3964 33.3165 32.5936 0.201431
0 0 0 0 0 0 0 0 1.12731 1.43227 1.4347 0.00532357
0 0 0 0 0 0 0 0 0 1.94989 1.9333 0.0079861
0 0 0 0 0 0 0 0 0 0 1.93982 0.00475851
0 0 0 0 0 0 0 0 0 0 0 0.0005
V 12 -35.5271 102.284 4.34635 391.096 -9.46806 -23.3665 -34.9044 4.47625 14.6324 27.6621 -16.0167 0
V 12 0.00538372 0.987343 54.2151 1.52049 54.2022 0.00365474 0.998697 54.5982 -0.00331362 0.995045 0.271884 0.235
M 12 12
0.199726 0.0101852 -10.3976 0.0124388 -10.386 0.0572288 0.0006328 -10.4509 -0.208231 -0.151891 -0.0286168 -0.185655
0 0.00127902 -0.822632 0.00203586 -0.822163 0.00319514 0.000102679 -0.836944 -0.0119487 -0.0283753 -0.00601587 -0.0126756
0 0 773.743 -1.19665 772.848 -3.58949 -0.0528495 780.989 9.27996 21.0627 5.01607 11.3177
0 0 0 0.00394629 -1.19582 0.00316145 0.00019045 -1.21741 -0.0136189 -0.0379449 -0.00501631 -0.0155686
0 0 0 0 771.958 -3.58476 -0.052866 780.09 9.27862 21.0564 5.0159 11.3083
0 0 0 0 0 0.0207122 0.000134107 -3.61842 -0.0430791 -0.102229 -0.0299627 -0.0605664
0 0 0 0 0 0 1.02964e-05 -0.053857 -0.000883834 -0.00189239 -0.000271339 -0.000807414
0 0 0 0 0 0 0 788.481 9.29902 21.5266 5.14114 11.4383
0 0 0 0 0 0 0 0 0.584471 -0.261827 -0.100791 0.140588
0 0 0 0 0 0 0 0 0 1.69989 0.51084 0.351802
0 0 0 0 0 0 0 0 0 0 0.168177 0.0924074
0 0 0 0 0 0 0 0 0 0 0 0.2066
V 12 -22.4317 44.2829 2.80463 369.441 -2.79376 30.2952 52.2509 0.849907 6.19976 4.8771 -3.79268 0
V 12 0.748952 -0.609163 56.6561 0.961987 48.5848 0.607206 0.785999 108.045 -4.40824 7.73482 8.12382 0.2775
M 12 12
0.0941015 0.12739 -0.368146 -0.0560117 1.34438 0.0355039 -0.0293618 0.18567 0.750925 -0.46386 -0.112485 -0.00131889
0 0.177863 -0.543038 -0.0800382 1.87357 0.0557927 -0.0463136 0.378538 1.29213 -0.661829 -0.158645 -0.0019424
0 0 28.348 -0.0379026 16.9439 0.280414 -0.142676 57.2082 -33.8839 8.89766 -6.4484 0.271749
0 0 0 0.0398285 -1.09097 -0.0324639 0.026017 -0.858123 -0.346294 0.227679 0.148234 -0.00200978
0 0 0 0 39.0621 0.996094 -0.750566 54.2456 -11.006 -1.15744 -7.59302 0.20598
0 0 0 0 0 0.0332337 -0.0261331 1.32554 0.155093 -0.0995702 -0.177896 0.00410761
0 0 0 0 0 0 0.0207907 -0.905156 -0.231924 0.105935 0.124361 -0.00251432
0 0 0 0 0 0 0 134.454 -55.7632 13.4088 -15.7986 0.583338
0 0 0 0 0 0 0 0 51.0032 -12.7771 6.27666 -0.305126
0 0 0 0 0 0 0 0 0 4.24271 -1.18496 0.0754917
0 0 0 0 0 0 0 0 0 0 1.95733 -0.0676238
0 0 0 0 0 0 0 0 0 0 0 0.002675
V 12 -0.979157 30.9851 -1.84912 244.202 -0.870042 60.3329 39.6348 2.96732 1.36243 10.6481 -4.51508 0
V 12 0.810435 -0.528327 58.2447 1.03159 39.5567 0.0492026 0.991733 111.092 -4.52355 10.504 11.6369 0.3525
M 12 12
0.0685727 0.110014 -1.49958 -0.0167949 -0.991297 0.0547559 -0.00188802 -2.92445 0.204912 -0.220652 -0.252902 0.00170146
0 0.18769 -2.92345 -0.03062 -2.04737 0.088495 -0.00438014 -5.75534 0.337287 -0.368692 -0.312536 0.00332211
0 0 57.2058 0.57743 43.0214 -1.15181 0.101269 109.839 -4.90287 6.25373 3.30595 -0.0637952
0 0 0 0.00875827 0.41034 -0.00715084 0.000690427 0.782045 -0.0552861 0.122995 0.210038 -0.000541413
0 0 0 0 33.0827 -0.785157 0.0813786 84.272 -3.32259 4.11532 0.756693 -0.0484355
0 0 0 0 0 0.0559482 -0.00199208 -2.93063 0.159798 -0.0562995 0.139943 0.0015245
0 0 0 0 0 0 0.000228668 0.226732 -0.00652885 0.00382439 -0.0155073 -0.000122762
0 0 0 0 0 0 0 249.188 -9.36128 5.51768 -12.8122 -0.134631
0 0 0 0 0 0 0 0 0.620373 -0.713371 -0.803881 0.00548976
0 0 0 0 0 0 0 0 0 1.92552 4.02504 -0.00494913
0 0 0 0 0 0 0 0 0 0 10.9993 0.00229203
0 0 0 0 0 0 0 0 0 0 0 7.5e-05
V 12 11.2556 45.0956 1.28 279.094 -3.71699 30.3681 25.5477 2.66718 1.42974 12.4536 -4.97276 0
V 12 0.244181 0.969499 51.2678 0.709698 48.9359 0.778902 0.594869 70.9969 1.15957 3.57825 2.59775 0.2125
M 12 12
0.00168071 -0.00042793 -0.089638 0.00856224 0.0218155 -0.00764593 0.00899914 -0.108729 -0.0200301 -0.0129966 -0.00569979 -0.00110646
0 0.00010916 0.0183344 -0.00220556 -0.00971971 0.00196936 -0.0023198 0.0203826 0.00507744 0.00354237 0.00161264 0.000273957
0 0 106.44 -0.21332 91.9073 0.141073 -0.190651 166.463 2.49447 -5.6088 -5.0443 0.286286
0 0 0 0.0880203 0.499646 -0.0717192 0.0932151 0.445944 -0.218771 0.0522525 0.180022 -0.0118385
0 0 0 0 86.1182 -0.464945 0.558218 148.007 0.59131 -5.43315 -4.13131 0.168198
0 0 0 0 0 0.0590412 -0.0759237 -0.378034 0.175473 -0.0222636 -0.122715 0.00938347
0 0 0 0 0 0 0.0987308 0.530477 -0.231171 0.0547377 0.190597 -0.0124634
0 0 0 0 0 0 0 269.59 1.86277 -7.24373 -4.96851 0.336047
0 0 0 0 0 0 0 0 0.588132 -0.299687 -0.615411 0.0348705
0 0 0 0 0 0 0 0 0 0.898185 1.0518 -0.0260921
0 0 0 0 0 0 0 0 0 0 1.42403 -0.0423594
0 0 0 0 0 0 0 0 0 0 0 0.002275
V 12 -19.7866 43.1394 -1.23994 201.004 -0.0919554 59.6065 50.6699 1.73555 7.10613 17.5026 -8.64285 0
V 12 -0.426122 -0.107658 60.9813 1.08073 39.7653 -0.343465 0.920632 126.156 7.79931 8.32252 4.45956 0.335
M 12 12
0.20699 -0.752324 1.50432 0.0727364 3.3089 -0.146328 -0.0537387 6.34446 1.06811 0.676561 0.404955 0.0332674
0 3.02033 -3.69606 -0.218208 -6.97332 0.594901 0.223259 -21.041 -3.88625 -2.69624 -1.83203 -0.12345
0 0 57.133 0.0826384 40.0182 -0.343739 -0.0528543 138.41 8.65906 9.47495 6.00441 0.458267
0 0 0 0.0482206 2.29715 -0.0480771 -0.0177936 0.897171 0.35552 0.0741994 -0.0258259 0.00645547
0 0 0 0 148.873 -1.36801 -0.436945 102.351 16.601 3.99895 -2.19611 0.385875
0 0 0 0 0 0.120424 0.0456889 -3.29459 -0.747375 -0.474294 -0.316269 -0.0219082
0 0 0 0 0 0 0.0174509 -1.07705 -0.273396 -0.170663 -0.115598 -0.00779972
0 0 0 0 0 0 0 389.475 34.798 32.7173 21.8597 1.5278
0 0 0 0 0 0 0 0 5.53588 3.65233 2.23332 0.17778
0 0 0 0 0 0 0 0 0 3.43962 2.52814 0.150577
0 0 0 0 0 0 0 0 0 0 2.04126 0.103171
0 0 0 0 0 0 0 0 0 0 0 0.0069
V 12 -61.3591 84.1919 -1.89078 366.919 -2.95811 43.0758 34.9259 4.32043 16.6654 37.8163 -19.8766 0
V 12 0.734428 0.0144477 64.1103 0.984049 53.4913 0.295882 0.9516 91.7006 -1.0022 4.35052 5.24347 0.2425
M 12 12
0.719933 0.00994317 0.962911 0.116572 4.1176 -0.10626 0.0365987 -3.25217 -1.97055 -2.08556 -7.17241 0.00250909
0 0.121696 1.11312 0.0400482 2.90061 -0.0340006 0.00883158 -0.814637 0.184317 -0.137426 -0.0683502 0.00477204
0 0 13.5003 0.51082 34.4728 -0.39438 0.114837 -8.2985 -0.510725 -2.6508 -9.57945 0.0598054
0 0 0 0.0310524 1.57615 -0.0273603 0.0085291 -0.760483 -0.251518 -0.368507 -1.15248 0.00194711
0 0 0 0 94.7076 -1.30859 0.390587 -32.2923 -6.01988 -12.8646 -40.6817 0.14462
0 0 0 0 0 0.0251741 -0.00780603 0.742143 0.238163 0.357767 1.04549 -0.00138532
0 0 0 0 0 0 0.00247029 -0.230646 -0.0865678 -0.118146 -0.361411 0.000395286
0 0 0 0 0 0 0 23.586 7.84353 11.6043 31.8534 -0.0232151
0 0 0 0 0 0 0 0 5.78145 5.62405 19.6606 0.00264385
0 0 0 0 0 0 0 0 0 6.69474 20.6191 -0.00476439
0 0 0 0 0 0 0 0 0 0 71.4946 -0.0253875
0 0 0 0 0 0 0 0 0 0 0 0.000275
V 12 13.6302 28.7273 3.04421 252.856 -3.29672 34.0834 42.3961 1.23727 2.39582 0.8181 -0.962233 0
V 12 -0.591494 -0.72204 57.5914 1.08804 3.6586 0.374196 0.172535 151.71 0.324157 11.2057 6.54117 0.4125
M 12 12
0.204581 -0.249426 0.831037 -0.00178567 -0.60475 -0.133475 0.373433 4.35064 0.362549 0.638999 0.391795 0.00867678
0 0.310594 -0.889715 0.00210216 0.823377 0.229049 -0.484176 -5.0091 -0.488897 -0.784134 -0.485778 -0.0128904
0 0 21.9197 -0.0701513 5.58142 2.30104 1.81971 54.0733 -4.74461 -4.97306 -12.5272 0.0281503
0 0 0 0.000249784 -0.0200116 -0.00560478 -0.00616067 -0.158235 0.0175918 0.0228445 0.0496801 -0.00018901
0 0 0 0 5.45848 1.89886 -1.14995 3.22042 -3.79806 -4.90949 -6.78502 -0.0417177
0 0 0 0 0 0.918971 -0.455436 3.18392 -1.23553 -1.1985 -1.70236 -0.0256712
0 0 0 0 0 0 0.854989 8.24797 0.590087 0.795737 0.016398 0.0280625
0 0 0 0 0 0 0 164.464 -4.54517 -0.845679 -18.5819 0.149501
0 0 0 0 0 0 0 0 2.7323 3.62629 5.34509 0.0199408
0 0 0 0 0 0 0 0 0 5.44765 7.67342 0.0118914
0 0 0 0 0 0 0 0 0 0 12.8019 -0.0122934
0 0 0 0 0 0 0 0 0 0 0 0.001275
V 12 -36.8636 91.6026 2.53132 445.907 -9.12058 24.8284 -28.2419 5.59594 11.5034 31.8916 -16.6492 0
V 12 -0.537602 -0.82772 64.0858 1.05547 48.8959 0.0220152 0.997058 124.185 2.19532 10.2996 10.5937 0.37
M 12 12
0.0721133 -0.0473014 1.62635 0.0020778 1.08025 -0.0339579 0.000719243 3.12886 -0.316297 0.651679 0.428142 0.0186265
0 0.0313443 -1.19317 -0.0019644 -0.810891 0.0218198 -0.000418814 -2.25328 0.216285 -0.427327 -0.291585 -0.0126012
0 0 87.024 0.265557 65.283 -0.564903 -0.00552715 152.224 -10.4922 14.3997 14.0663 0.577812
0 0 0 0.00617392 0.167603 -0.00505571 8.79428e-05 0.0453213 -0.0614425 0.0782839 -4.87223e-05 -1.17126e-05
0 0 0 0 49.7718 -0.305801 -0.00819475 116.437 -7.16653 9.03446 10.2505 0.41709
0 0 0 0 0 0.0215377 -0.000580481 -0.764445 0.171712 -0.36634 -0.15377 -0.00695912
0 0 0 0 0 0 2.16308e-05 -0.016579 -0.00288534 0.00853182 0.00137729 7.90149e-05
0 0 0 0 0 0 0 299.123 -16.2517 23.0887 28.1708 1.15946
0 0 0 0 0 0 0 0 1.88719 -3.2828 -1.94161 -0.0832065
0 0 0 0 0 0 0 0 0 6.60671 3.47163 0.15287
0 0 0 0 0 0 0 0 0 0 3.12098 0.13194
0 0 0 0 0 0 0 0 0 0 0 0.0056
V 12 -60.9302 48.8885 -2.53682 308.912 -1.1034 65.3942 56.4014 3.46841 9.84652 28.116 -13.7268 0
V 12 -0.984615 -0.129299 46.1601 0.0835282 46.0858 -0.99806 0.0607862 46.5957 0.487011 0.714338 0.145569 0.1025
M 12 12
0.00174089 -0.00931799 -0.0457754 0.000310923 -0.0487582 -4.29543e-05 -0.000761245 -0.0365313 0.00998752 -0.00198189 0.000940758 -0.000625948
0 0.0535192 0.80185 -0.0036483 0.82158 0.00016741 0.00313017 0.732784 -0.0853471 -0.00425034 -0.0100507 0.00354991
0 0 131.982 -0.40371 132.676 -0.0142705 -0.205784 127.289 -5.97477 -4.91964 -1.68989 0.0891753
0 0 0 0.00132242 -0.406375 3.8182e-05 0.000542944 -0.388449 0.0208413 0.0131975 0.00471587 -0.000305819
0 0 0 0 133.38 -0.0142667 -0.205528 127.946 -6.02557 -4.93402 -1.69749 0.0904919
0 0 0 0 0 2.88068e-06 4.54133e-05 -0.0139761 0.000407971 0.000649566 0.00017787 6.61642e-06
0 0 0 0 0 0 0.000722967 -0.202152 0.00539376 0.0095355 0.00249151 0.000146682
0 0 0 0 0 0 0 122.796 -5.72357 -4.76417 -1.62927 0.0834326
0 0 0 0 0 0 0 0 0.351736 0.16831 0.0658074 -0.00611366
0 0 0 0 0 0 0 0 0 0.22252 0.0725071 -0.00259685
0 0 0 0 0 0 0 0 0 0 0.0250879 -0.0014433
0 0 0 0 0 0 0 0 0 0 0 0.000275
V 12 -23.3576 2.4457 2.00506 15.7816 -0.367188 -35.8958 -12.4251 -0.642924 0.499322 -0.989331 -0.135816 0
V 12 0.540953 -0.823918 53.946 0.893605 53.946 0.625758 -0.7783 54.2167 -0.241313 0.552941 0.0792705 0.1025
M 12 12
0.0719837 0.0546704 -1.33695 -0.0191639 -1.33695 0.0145437 0.0124636 -1.40764 0.135422 -0.0713532 -0.0120074 -0.00512371
0 0.042133 -1.02491 -0.0163675 -1.02491 0.0124576 0.0106013 -1.07216 0.100604 -0.0520623 -0.00933528 -0.00363581
0 0 464.302 0.0569543 464.302 -0.0285438 -0.0552163 465.65 -1.52062 -0.655411 -1.00671 0.130524
0 0 0 0.0107181 0.0569543 -0.00825252 -0.00682761 0.0565902 -0.0301094 0.0141359 0.00475473 0.000577357
0 0 0 0 464.302 -0.0285438 -0.0552163 465.65 -1.52062 -0.655411 -1.00671 0.130524
0 0 0 0 0 0.00635546 0.00525533 -0.0278824 0.0227509 -0.0106696 -0.00366395 -0.000421703
0 0 0 0 0 0 0.00435129 -0.0554437 0.0197009 -0.00926153 -0.00302404 -0.000395379
0 0 0 0 0 0 0 467.135 -1.67697 -0.563519 -0.997572 0.138258
0 0 0 0 0 0 0 0 0.265117 -0.146311 -0.0244901 -0.0104919
0 0 0 0 0 0 0 0 0 0.0867741 0.0166171 0.00579411
0 0 0 0 0 0 0 0 0 0 0.0055401 0.000654071
0 0 0 0 0 0 0 0 0 0 0 0.000475
V 12 23.5507 -5.29344 4.00421 190.253 -2.48693 5.43604 -39.2205 -0.219633 -0.424464 -8.89918 3.29379 0
V 12 0.994086 0.107285 48.5413 0.0412531 48.5413 0.999148 0.0412414 48.7202 0.122489 0.754344 0.170171 0.095
M 12 12
6.5353e-06 -6.05549e-05 -0.00541839 4.60746e-06 -0.00541839 -1.90018e-07 4.60354e-06 -0.00551706 -0.000442839 7.01653e-05 -3.56923e-05 1.80766e-05
0 0.000561091 0.0502058 -4.26919e-05 0.0502058 1.76067e-06 -4.26556e-05 0.0511201 0.00410327 -0.000650139 0.000330719 -0.000167495
0 0 4.49235 -0.00382002 4.49235 0.000157543 -0.00381677 4.57417 0.367156 -0.0581737 0.0295923 -0.0149873
0 0 0 3.24831e-06 -0.00382002 -1.33965e-07 3.24555e-06 -0.00388959 -0.000312207 4.94673e-05 -2.51635e-05 1.27442e-05
0 0 0 0 4.49235 0.000157543 -0.00381677 4.57417 0.367156 -0.0581737 0.0295923 -0.0149873
0 0 0 0 0 5.5249e-09 -1.33851e-07 0.000160412 1.28758e-05 -2.0401e-06 1.03778e-06 -5.2559e-07
0 0 0 0 0 0 3.24279e-06 -0.00388628 -0.000311941 4.94252e-05 -2.51421e-05 1.27334e-05
0 0 0 0 0 0 0 4.65747 0.373842 -0.0592331 0.0301312 -0.0152602
0 0 0 0 0 0 0 0 0.0300073 -0.00475448 0.00241855 -0.00122489
0 0 0 0 0 0 0 0 0 0.000753319 -0.000383205 0.000194077
0 0 0 0 0 0 0 0 0 0 0.000194932 -9.8725e-05
0 0 0 0 0 0 0 0 0 0 0 5e-05
V 12 32.9348 -7.22499 2.02466 -4.97987 -0.0370677 37.5587 23.7342 -0.725622 0.0259355 -1.92133 1.1174 0
V 12 -0.0204946 -0.994412 41.0208 1.41349 41.0208 0.0111527 -0.98759 41.3393 0.12096 1.33119 0.311238 0.18
M 12 12
0.0214411 -0.000441897 1.37815 -0.00233842 1.37815 0.0324384 0.000366321 1.38691 -0.0415797 0.0757787 0.0184973 0
0 9.10741e-06 -0.0284034 4.81944e-05 -0.0284034 -0.000668549 -7.54981e-06 -0.028584 0.000856949 -0.00156178 -0.000381226 0
0 0 88.5822 -0.150305 88.5822 2.08502 0.0235457 89.1453 -2.67258 4.87076 1.18894 0
0 0 0 0.000255034 -0.150305 -0.00353782 -3.99519e-05 -0.15126 0.00453478 -0.00826461 -0.00201736 0
0 0 0 0 88.5822 2.08502 0.0235457 89.1453 -2.67258 4.87076 1.18894 0
0 0 0 0 0 0.0490764 0.000554211 2.09827 -0.0629063 0.114646 0.0279848 0
0 0 0 0 0 0 6.2586e-06 0.0236954 -0.000710389 0.00129468 0.000316027 0
0 0 0 0 0 0 0 89.712 -2.68957 4.90173 1.19649 0
0 0 0 0 0 0 0 0 0.0806335 -0.146954 -0.035871 0
0 0 0 0 0 0 0 0 0 0.267823 0.0653746 0
0 0 0 0 0 0 0 0 0 0 0.0159577 0
0 0 0 0 0 0 0 0 0 0 0 0
V 12 -0.793351 12.2231 3.79934 321.507 -3.59714 -17.6949 -67.5307 0.518265 1.74858 -3.83504 0.423141 0
V 12 -0.57735 0.816025 58.8508 0.855768 57.1088 -0.648672 0.761034 61.1883 1.04288 2.99231 5.30172 0.2
M 12 12
0.00102604 0.000725936 0.292116 0.000741486 0.371028 0.000248239 0.000211588 0.208387 -0.0651262 -0.075661 -0.226351 -0.000452998
0 0.000513611 0.206677 0.000524613 0.262508 0.000175633 0.000149702 0.147437 -0.0460778 -0.0535313 -0.160147 -0.000320503
0 0 83.1666 0.211104 105.633 0.0706747 0.06024 59.3284 -18.5417 -21.541 -64.4429 -0.12897
0 0 0 0.00053585 0.268131 0.000179395 0.000152909 0.150595 -0.0470648 -0.054678 -0.163577 -0.000327368
0 0 0 0 134.169 0.0897667 0.0765132 75.3553 -23.5505 -27.36 -81.8515 -0.16381
0 0 0 0 0 6.00591e-05 5.11918e-05 0.0504171 -0.0157566 -0.0183054 -0.0547634 -0.000109598
0 0 0 0 0 0 4.36336e-05 0.0429733 -0.0134303 -0.0156027 -0.0466779 -9.34169e-05
0 0 0 0 0 0 0 42.323 -13.227 -15.3666 -45.9715 -0.0920033
0 0 0 0 0 0 0 0 4.13379 4.80247 14.3673 0.0287534
0 0 0 0 0 0 0 0 0 5.57932 16.6913 0.0334045
0 0 0 0 0 0 0 0 0 0 49.9346 0.0999346
0 0 0 0 0 0 0 0 0 0 0 0.0002
V 12 -23.6811 33.1369 2.95266 212.867 -2.03407 -6.24276 27.856 0.135007 3.6956 3.21097 -2.25166 0
V 12 -1 0 59.1364 0.0681789 11.1847 -0.985071 -0.121268 107.733 0.0383859 4.7198 8.83571 0.26
M 12 12
0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0
0 0 49.7696 -0.0574683 11.8193 0.148943 -1.20988 89.486 31.7264 1.6058 0.980488 -0.0997693
0 0 0 6.63578e-05 -0.0136475 -0.000171983 0.00139703 -0.103328 -0.0366341 -0.0018542 -0.00113216 0.000115202
0 0 0 0 2.80683 0.0353709 -0.287322 21.2511 7.53438 0.381345 0.232846 -0.0236932
0 0 0 0 0 0.000445735 -0.00362075 0.267801 0.0949462 0.00480561 0.00293426 -0.000298575
0 0 0 0 0 0 0.0294118 -2.17537 -0.771258 -0.0390364 -0.0238353 0.00242536
0 0 0 0 0 0 0 160.896 57.0443 2.88724 1.76292 -0.179386
0 0 0 0 0 0 0 0 20.2245 1.02364 0.625028 -0.0635996
0 0 0 0 0 0 0 0 0 0.0518107 0.0316351 -0.00321903
0 0 0 0 0 0 0 0 0 0 0.0193161 -0.00196551
0 0 0 0 0 0 0 0 0 0 0 0.0002
V 12 24.0395 39.5788 10.2585 155.231 -10.7363 -92.5702 -80.1173 1.29775 -1.95363 -14.0565 5.00575 0
V 12 0.994868 -0.0263938 86.1634 0.380008 81.3451 0.934392 -0.356239 115.578 4.75077 8.03333 15.1974 0.41
M 12 12
1.34205e-05 0.00050586 -0.0509884 -2.26262e-05 -0.0557919 3.97454e-06 1.04249e-05 -0.0842918 -0.014808 -0.0206802 -0.0522437 0.000103616
0 0.0190675 -1.92192 -0.000852854 -2.10298 0.000149813 0.00039295 -3.17723 -0.55816 -0.779504 -1.96923 0.00390564
0 0 193.72 0.0859638 211.97 -0.0151005 -0.0396075 320.25 56.26 78.5704 198.49 -0.39367
0 0 0 3.81466e-05 0.0940622 -6.70086e-06 -1.75759e-05 0.142112 0.0249655 0.0348658 0.0880801 -0.000174692
0 0 0 0 231.94 -0.016523 -0.0433389 350.42 61.5601 85.9724 217.189 -0.430757
0 0 0 0 0 1.17708e-06 3.0874e-06 -0.0249634 -0.00438545 -0.00612455 -0.0154722 3.06865e-05
0 0 0 0 0 0 8.09805e-06 -0.0654774 -0.0115028 -0.0160643 -0.0405826 8.04887e-05
0 0 0 0 0 0 0 529.423 93.0065 129.889 328.134 -0.650798
0 0 0 0 0 0 0 0 16.3389 22.8183 57.645 -0.114329
0 0 0 0 0 0 0 0 0 31.8671 80.5048 -0.159667
0 0 0 0 0 0 0 0 0 0 203.376 -0.403362
0 0 0 0 0 0 0 0 0 0 0 0.0008
V 12 36.8582 18.0343 1.89616 104.372 -1.21699 28.3764 -11.9442 0.845809 1.87068 2.57067 -0.822069 0
V 12 0.630903 -0.769579 68.2843 0.874553 68.2843 0.640354 -0.766077 117.757 9.68435 12.4076 17.3 0.605
M 12 12
0.0116139 0.00952111 0.810195 0.00845242 0.810195 -0.00648185 -0.00541809 1.33266 0.509902 0.633731 1.7202 -0.0022861
0 0.00780544 0.664201 0.00692933 0.664201 -0.00531385 -0.00444178 1.09252 0.418019 0.519535 1.41022 -0.00187415
0 0 56.52 0.589649 56.52 -0.45218 -0.377971 92.9677 35.5712 44.2097 120.003 -0.15948
0 0 0 0.00615155 0.589649 -0.0047174 -0.00394321 0.969892 0.371099 0.46122 1.25193 -0.00166379
0 0 0 0 56.52 -0.45218 -0.377971 92.9677 35.5712 44.2097 120.003 -0.15948
0 0 0 0 0 0.0036176 0.00302391 -0.743775 -0.284582 -0.353693 -0.960063 0.0012759
0 0 0 0 0 0 0.00252764 -0.621711 -0.237879 -0.295647 -0.802504 0.00106651
0 0 0 0 0 0 0 152.919 58.5098 72.7189 197.388 -0.262324
0 0 0 0 0 0 0 0 22.387 27.8236 75.5244 -0.10037
0 0 0 0 0 0 0 0 0 34.5806 93.8655 -0.124745
0 0 0 0 0 0 0 0 0 0 254.788 -0.338607
0 0 0 0 0 0 0 0 0 0 0 0.00045
V 12 -12.8826 48.7146 -3.7706 237.558 0.685318 45.4059 -32.8473 3.19714 10.601 30.1256 -14.0359 0
V 12 -0.315499 0.944101 87.6009 0.925596 87.6009 -0.600665 0.798058 147.142 -4.0985 11.8461 14.1237 0.66
M 12 12
0.016431 0.00549091 0.561318 0.0087056 0.561318 0.00695292 0.00523317 0.598499 -0.578053 -0.665569 -1.88782 -0.00181279
0 0.00183495 0.187581 0.00290923 0.187581 0.00232352 0.00174882 0.200006 -0.193173 -0.222419 -0.630871 -0.000605796
0 0 19.1758 0.297401 19.1758 0.237526 0.178776 20.446 -19.7475 -22.7372 -64.4919 -0.0619286
0 0 0 0.00461246 0.297401 0.00368385 0.00277268 0.317101 -0.306268 -0.352636 -1.00022 -0.000960465
0 0 0 0 19.1758 0.237526 0.178776 20.446 -19.7475 -22.7372 -64.4919 -0.0619286
0 0 0 0 0 0.00294219 0.00221446 0.25326 -0.244608 -0.281641 -0.798848 -0.000767097
0 0 0 0 0 0 0.00166673 0.190618 -0.184106 -0.211979 -0.601259 -0.000577361
0 0 0 0 0 0 0 21.8003 -21.0556 -24.2433 -68.7638 -0.0660308
0 0 0 0 0 0 0 0 20.3362 23.4151 66.4147 0.063775
0 0 0 0 0 0 0 0 0 26.9601 76.4697 0.0734303
0 0 0 0 0 0 0 0 0 0 216.899 0.208278
0 0 0 0 0 0 0 0 0 0 0 0.0002
V 12 -35.8766 78.7569 -4.91619 252.441 1.15996 14.866 13.9487 4.10515 3.67335 26.5119 -11.3298 0
V 12 -0.991935 0.0894427 62.4857 0.420066 60.8159 -0.929051 -0.369923 77.8878 -1.69329 3.00109 1.94101 0.305
M 12 12
0.00013009 0.00144272 0.0164565 0.000589377 0.0104765 2.74297e-05 -6.88889e-05 0.0272376 0.0032141 0.0029235 0.00364059 -8.06504e-05
0 0.016 0.182505 0.00653629 0.116186 0.0003042 -0.000763989 0.302069 0.0356449 0.0324221 0.0403747 -0.000894427
0 0 2.08175 0.0745566 1.32529 0.00346988 -0.00871449 3.44557 0.406586 0.369825 0.460537 -0.0102023
0 0 0 0.00267019 0.0474642 0.000124271 -0.000312104 0.123401 0.0145616 0.013245 0.0164938 -0.00036539
0 0 0 0 0.843703 0.00220899 -0.00554782 2.19352 0.258841 0.235438 0.293187 -0.00649501
0 0 0 0 0 5.78361e-06 -1.45254e-05 0.0057431 0.0006777 0.000616426 0.000767625 -1.70053e-05
0 0 0 0 0 0 3.648e-05 -0.0144236 -0.00170202 -0.00154813 -0.00192787 4.27083e-05
0 0 0 0 0 0 0 5.70287 0.672953 0.612108 0.762249 -0.0168862
0 0 0 0 0 0 0 0 0.0794101 0.0722303 0.0899472 -0.00199261
0 0 0 0 0 0 0 0 0 0.0656996 0.0818147 -0.00181245
0 0 0 0 0 0 0 0 0 0 0.101882 -0.00225702
0 0 0 0 0 0 0 0 0 0 0 5e-05
V 12 -23.0358 26.525 1.18672 112.582 -1.02739 -45.3042 -48.6908 0.763702 0.501695 4.11643 -2.39024 0
V 12 -0.987919 -0.141627 94.9528 0.8734 85.6297 -0.712269 0.700772 178.174 -0.549404 9.52428 6.05273 0.5525
M 12 12
0.000445216 -0.00259943 -0.00895692 -0.000562669 -0.0800858 -0.00108954 -0.00115701 0.0931599 -0.000455399 -0.0202862 -0.034375 0.000411806
0 0.0153888 0.210401 0.00333649 0.588145 0.00620296 0.00658468 -0.162825 -0.00465418 0.130541 0.245088 -0.00153469
0 0 220.835 -0.142725 187.267 -0.24685 -0.253012 333.984 -10.1016 5.33909 17.175 0.409252
0 0 0 0.00108402 -0.048865 0.00162075 0.00170005 -0.121687 0.00752177 0.0362544 0.0854059 0.00012444
0 0 0 0 172.192 -0.0344816 -0.0278066 248.048 -8.41642 6.71359 15.9283 0.205012
0 0 0 0 0 0.00300557 0.00317747 -0.588301 0.0134061 0.0466128 0.0753578 -0.00131824
0 0 0 0 0 0 0.00336033 -0.623609 0.0138244 0.0489248 0.0777454 -0.00143446
0 0 0 0 0 0 0 730.946 -15.5824 15.4731 64.3438 1.53509
0 0 0 0 0 0 0 0 0.463802 -0.211144 -0.742766 -0.0199497
0 0 0 0 0 0 0 0 0 1.77935 4.76738 0.0401742
0 0 0 0 0 0 0 0 0 0 14.659 0.188065
0 0 0 0 0 0 0 0 0 0 0 0.004475
V 12 -70.296 77.6869 -5.94485 283.484 0.696282 25.8011 12.6215 5.29216 8.97019 32.3299 -16.1293 0
V 20 -697.568 -354.227 -248.172 -295.169 -179.89 -549.58 -219.033 -829.408 -377.996 -52.131 -143.213 -65.0684 -266.881 -157.757 -345.82 -142.905 -307.909 -367.758 -105.318 -469.97
M 12 12
40.531 1.124 1.44352 -10.7086 -1.0284 -12.9545 -14.3517 -0.204499 -1.73503 -5.20943 2.67347 0
1.124 22.907 -0.474452 20.613 -0.205512 -7.90084 -6.1928 0.541725 1.16961 4.39697 -2.04169 0
1.44352 -0.474452 0.453725 1.04202 -0.282541 -1.41366 -0.515229 -0.121887 -0.053333 -0.49189 0.22175 0
-10.7086 20.613 1.04202 242.224 -2.20516 3.75158 -7.81967 0.950944 2.51888 0.0389675 -1.01072 0
-1.0284 -0.205512 -0.282541 -2.20516 0.245823 1.17208 0.846584 0.0297539 0.0213151 0.323688 -0.128126 0
-12.9545 -7.90084 -1.41366 3.75158 1.17208 51.2723 33.6699 0.143971 1.5798 4.46039 -1.96071 0
-14.3517 -6.1928 -0.515229 -7.81967 0.846584 33.6699 67.4665 -0.307529 1.34343 1.91357 -0.853268 0
-0.204499 0.541725 -0.121887 0.950944 0.0297539 0.143971 -0.307529 0.0757869 0.0115349 0.0462066 -0.0424633 0
-1.73503 1.16961 -0.053333 2.51888 0.0213151 1.5798 1.34343 0.0115349 0.876624 1.17863 -0.686415 0
-5.20943 4.39697 -0.49189 0.0389675 0.323688 4.46039 1.91357 0.0462066 1.17863 5.88724 -2.48812 0
2.67347 -2.04169 0.22175 -1.01072 -0.128126 -1.96071 -0.853268 -0.0424633 -0.686415 -2.48812 1.17447 0
0 0 0 0 0 0 0 0 0 0 0 0
4 0
28 111 162 110 161 109 160 108 160 107 160 105 162 102 163 100 166 98 170 97 175 97 180 97 187 100 192 105 196 111 198 118 197 126 194 133 189 138 183 142 177 142 170 139 165 134 161 127 159 120 159 113 160 108 162 104 166
27 40 155 39 154 38 154 37 153 35 153 32 155 30 157 28 160 27 164 26 169 27 176 29 182 32 188 36 192 40 194 45 193 50 190 54 184 58 178 61 171 62 164 60 158 56 154 50 152 44 153 37 155 32 159
29 102 43 103 43 102 43 101 43 99 45 97 47 95 50 94 55 93 60 94 65 95 68 97 73 101 76 106 77 112 77 117 75 122 73 126 69 128 64 128 59 127 53 124 48 120 43 114 40 109 39 104 39 100 41 98 43 97 46
26 28 39 27 38 27 39 26 40 24 41 22 44 21 48 20 52 19 58 20 64 22 69 25 73 29 76 34 77 38 75 43 72 47 68 50 63 51 57 51 51 49 46 46 41 42 38 37 37 32 37 29 39
8 1
43 158 35 158 36 157 37 157 40 156 42 156 44 156 47 155 50 155 53 154 55 154 58 153 60 153 62 153 64 153 66 153 69 153 71 153 73 153 74 153 76 153 77 153 79 153 80 153 82 153 80 153 82 153 84 153 86 153 88 153 89 153 91 153 93 154 95 154 97 154 98 154 99 154 100 154 101 154 102 154 103 154 104 154 105 154 106
60 46 31 46 32 45 33 45 34 45 36 44 38 44 39 43 41 43 44 42 46 42 48 42 49 42 51 42 54 42 55 42 57 42 59 42 60 42 62 42 63 42 64 42 65 42 66 42 67 42 69 42 71 42 73 42 74 42 73 42 74 42 76 42 77 42 78 42 79 42 80 42 81 42 82 42 83 41 83 41 84 41 85 41 86 41 88 41 89 41 90 41 91 41 92 40 92 40 93 40 94 40 95 40 96 40 97 40 98 40 99 40 100 40 101 40 102 40 103 40 102
20 112 25 112 26 112 28 112 30 113 33 113 37 113 41 113 45 113 52 113 56 113 60 113 63 113 67 112 70 112 74 112 77 112 78 111 79 111 80 111 81
30 52 28 52 29 52 31 52 33 52 35 52 37 53 40 53 43 53 45 53 48 53 50 53 53 53 55 53 58 53 60 53 62 53 64 53 65 53 67 53 68 53 69 53 70 53 71 53 72 53 73 52 73 52 74 52 75 51 75 51 76
15 117 153 117 152 117 153 117 156 117 160 117 166 117 172 117 180 117 186 117 191 117 195 118 197 118 198 119 198 119 197
14 35 150 36 149 37 149 37 151 37 155 37 160 36 167 36 175 36 182 36 189 37 193 38 196 38 197 39 196
11 109 34 109 35 110 37 110 42 111 48 111 56 111 64 111 72 112 78 112 81 112 82
13 26 30 26 32 27 36 27 41 27 48 27 56 27 63 27 69 27 74 27 78 28 79 28 80 28 81
4 2
28 100 163 99 162 100 162 102 161 105 161 110 160 116 160 121 160 126 162 129 165 129 170 127 176 123 183 118 189 113 194 108 198 105 200 104 201 103 201 105 201 107 201 110 200 114 200 119 199 125 199 130 199 134 198 138 198
30 27 161 26 160 26 159 27 159 28 158 30 157 33 157 37 157 39 158 42 161 43 165 43 171 42 179 39 186 35 193 30 199 26 202 23 204 21 204 21 202 22 200 25 198 29 196 33 195 38 196 43 197 48 198 52 200 54 201 56 202
33 96 39 96 38 96 37 96 36 97 35 99 33 101 32 104 31 106 31 109 31 112 33 113 37 115 42 115 48 115 55 114 62 112 68 109 73 106 77 102 80 100 81 96 82 94 81 94 80 95 79 98 78 102 76 108 76 114 75 119 76 124 76 127 77 129 77
25 19 37 18 36 19 35 20 33 23 32 26 31 30 31 34 32 37 34 38 38 37 44 35 51 31 58 26 65 21 71 17 75 15 78 14 78 16 78 19 76 23 76 28 75 32 75 36 75 40 75
4 3
36 84 161 85 160 86 159 88 158 91 157 95 156 100 156 104 156 109 157 112 159 113 163 113 167 110 173 107 177 104 180 102 183 101 184 100 184 101 184 103 183 105 182 108 182 110 182 113 183 115 185 116 187 116 191 115 195 113 199 109 202 105 204 100 206 95 206 91 205 88 204 85 202
38 18 163 18 164 19 163 20 163 22 162 24 162 28 161 31 161 35 162 38 163 40 164 41 167 41 169 40 172 38 175 36 177 34 179 32 180 31 180 32 180 33 180 35 180 37 180 40 181 43 182 45 184 47 188 47 191 47 195 44 199 41 202 38 204 35 204 32 203 30 201 28 199 25 197 23 197
36 93 34 92 34 93 33 94 31 97 29 101 28 105 27 110 27 113 27 115 30 116 33 115 37 112 42 109 47 106 51 103 54 102 55 101 55 102 54 104 53 107 52 110 52 113 54 115 56 117 59 118 63 118 67 116 71 113 74 109 77 104 78 100 79 95 79 92 77 90 76 88 74
36 12 29 13 28 14 27 15 26 18 24 20 23 24 22 27 22 30 22 33 24 34 27 35 30 34 34 31 38 29 42 27 45 25 46 24 47 25 46 27 45 29 45 31 45 34 45 36 48 38 52 39 57 40 62 39 67 37 71 34 74 31 76 27 77 24 76 20 75 18 73 16 71
4 4
26 107 158 107 157 108 158 108 160 109 163 109 167 109 172 108 176 107 181 106 185 106 188 106 190 106 191 108 191 110 191 112 190 116 189 120 188 124 187 130 186 136 185 141 185 147 185 151 185 154 184 155 184
24 22 159 23 160 23 161 23 162 24 164 24 167 25 170 25 173 24 176 24 179 24 181 24 182 25 183 26 183 27 183 29 183 32 182 35 182 40 181 45 180 51 179 56 179 60 178 63 177
20 104 26 105 26 105 27 106 29 106 34 106 39 106 45 106 50 105 54 105 57 106 59 108 59 110 58 114 58 118 57 123 57 127 56 131 56 135 57 137 57
22 19 24 20 24 20 25 21 27 21 31 21 35 21 41 20 47 19 53 18 58 18 62 18 64 19 66 22 66 26 66 31 66 36 65 42 65 46 65 49 65 52 66 53 66
4 6
33 114 148 114 147 113 147 113 148 111 149 110 152 108 155 106 158 104 163 103 168 103 173 103 178 105 182 107 187 111 190 115 193 119 194 123 195 128 194 132 192 135 188 139 186 140 183 141 180 140 178 138 176 134 175 129 174 123 174 118 175 113 176 109 178 107 180
35 47 154 47 153 47 152 47 151 47 150 46 149 45 147 43 146 41 146 38 147 35 149 33 152 30 157 28 163 28 169 28 177 29 184 31 190 33 195 36 197 38 198 41 198 44 196 46 194 48 190 49 187 49 184 48 181 47 179 45 178 42 178 39 179 35 180 30 183 26 186
41 127 30 127 31 127 30 128 30 128 29 128 28 128 26 127 25 126 23 125 21 123 19 121 18 119 16 117 16 115 16 113 17 110 19 108 22 106 26 103 32 102 38 100 44 100 52 100 59 101 65 103 70 106 74 110 76 114 77 119 76 123 74 126 72 127 68 127 65 125 62 121 61 117 60 113 62 109 64 106 67 105 70
30 29 16 28 16 27 17 26 20 24 23 23 28 22 35 21 42 21 50 22 57 23 63 26 67 29 69 32 70 35 69 39 68 42 66 44 64 46 61 47 59 46 56 45 53 43 51 39 49 35 49 30 49 26 51 23 53 21 56 21 60
4 7
26 98 161 97 161 97 160 98 160 99 160 102 160 107 159 112 159 118 158 124 158 130 157 134 157 135 157 136 158 135 159 134 162 133 164 131 169 127 176 124 184 120 192 116 199 114 204 113 207 113 208 113 207
27 27 152 26 153 25 153 26 153 28 153 30 154 34 154 38 154 43 154 48 155 52 156 55 156 57 157 58 158 58 157 58 158 57 159 56 161 55 163 53 168 51 175 47 183 44 192 41 201 39 207 38 211 38 213
27 102 40 102 41 104 41 106 41 110 40 115 39 120 39 125 38 129 38 132 38 134 38 135 39 136 39 136 40 136 41 136 42 135 43 134 46 133 51 130 57 127 65 125 74 122 81 120 87 119 91 118 94 118 95
28 27 36 26 37 25 37 24 37 23 37 24 37 25 36 28 36 32 36 38 36 44 35 51 34 57 34 61 33 64 33 65 34 65 37 63 40 61 45 58 51 55 58 52 66 50 72 48 77 47 81 46 82 46 83 45 83
4 8
43 123 152 124 151 125 151 125 150 125 149 124 148 123 148 121 148 117 149 113 152 109 155 106 158 104 162 104 165 105 168 108 170 112 171 116 173 121 174 125 176 128 179 129 182 129 186 127 189 124 193 119 196 114 197 110 196 108 194 106 190 107 186 109 182 113 177 116 174 121 171 124 168 127 165 129 162 129 159 128 157 126 155 123 154 122 153
46 43 146 43 145 43 144 42 143 42 142 41 141 39 140 37 140 33 141 30 142 26 144 23 147 22 150 21 153 22 156 24 159 27 162 29 164 32 165 36 167 39 169 42 171 45 173 47 176 48 179 48 183 47 186 45 188 41 190 37 191 32 191 28 190 24 187 22 184 23 180 25 175 28 170 32 166 37 162 41 159 44 156 45 154 45 152 44 150 42 149 40 149
44 120 34 120 33 120 32 120 31 120 30 118 29 117 27 114 26 111 26 108 27 105 29 102 32 100 35 100 40 100 44 102 47 105 50 109 51 113 53 117 54 121 56 124 58 126 61 127 65 127 69 126 72 124 75 120 77 116 79 110 80 106 80 101 79 99 77 98 75 99 70 103 65 108 59 113 53 117 47 121 43 123 39 124 36 123 35 122 34
41 38 23 38 22 37 21 35 20 33 20 30 21 26 22 23 24 20 26 19 29 18 32 18 35 20 38 23 40 26 41 30 43 34 44 37 45 41 47 43 49 44 53 44 57 44 61 42 65 40 68 37 70 34 70 30 69 28 67 26 64 26 61 27 56 29 51 32 45 36 39 39 34 41 31 42 27 43 23 42 20 40 19
4 9
34 127 159 127 158 126 157 126 156 125 156 123 155 121 154 118 153 114 153 110 154 107 156 104 159 102 163 102 168 103 172 105 175 108 177 112 176 116 175 119 172 121 170 123 167 124 165 124 164 125 164 125 166 126 169 126 173 127 179 128 192 129 198 130 202 130 204 131 204
40 53 158 53 157 53 155 53 154 52 153 51 152 49 150 47 149 44 149 41 150 38 151 34 154 32 157 30 161 31 165 33 168 36 170 40 171 45 171 48 169 52 168 54 166 55 163 56 162 56 160 56 159 56 160 57 163 57 167 58 173 59 180 60 188 60 195 59 201 58 206 56 210 54 211 53 211 51 211 50 210
42 127 36 128 36 128 35 128 34 128 33 127 32 125 31 123 30 120 30 115 30 111 31 107 32 104 34 102 36 102 40 103 43 106 46 110 49 113 50 118 50 123 49 127 46 131 44 133 41 135 40 136 39 136 38 136 39 135 40 135 42 135 46 135 52 134 58 133 65 133 72 132 78 131 82 130 84 129 85 127 85 126 85 125 85
40 50 34 50 33 50 32 50 31 49 31 47 30 46 29 43 29 40 28 36 29 32 30 28 31 26 33 24 36 24 40 25 44 28 47 32 49 36 50 41 49 45 47 49 43 52 40 54 37 55 35 55 34 55 33 55 35 55 38 55 42 56 48 57 54 58 62 58 70 58 76 57 81 57 84 56 85 55 85 55 83
4 B
12 143 155 142 155 141 155 139 155 135 155 130 155 124 156 116 156 108 157 100 158 95 158 90 158
11 58 154 57 154 55 154 53 153 49 153 44 153 39 153 33 154 28 154 24 155 21 157
10 142 60 141 59 139 59 136 58 132 58 126 58 119 58 110 59 102 60 96 62
12 66 61 67 61 66 61 64 61 61 60 55 60 49 60 41 60 32 61 26 62 21 63 18 64
4 N
11 102 209 104 208 106 206 109 203 113 199 118 194 123 188 128 182 132 177 136 173 138 171
14 22 206 22 205 23 203 25 199 28 195 32 190 36 184 41 178 46 173 50 169 54 165 56 163 59 161 60 160
11 97 92 97 91 98 90 100 86 103 82 108 75 114 67 122 58 129 49 134 42 138 37
13 27 66 27 67 26 67 27 65 28 63 31 59 34 54 38 49 43 43 47 38 50 34 52 32 52 33
2 A
13 13 59 14 59 17 60 21 60 31 60 38 60 48 60 55 60 59 61 63 61 64 61 65 61 64 61
14 48 106 49 106 51 106 55 106 59 107 63 107 70 108 80 108 87 108 91 108 95 108 97 108 98 108 97 108
2 S
19 52 111 52 110 52 107 51 105 51 103 50 101 50 99 49 96 49 93 48 89 48 87 48 84 48 82 48 81 48 80 48 79 48 78 48 77 47 77
22 102 140 102 139 102 137 103 134 103 131 103 128 104 125 104 121 104 118 105 115 106 112 107 109 108 106 108 103 109 101 109 99 109 97 110 95 110 93 110 92 110 91 110 92
2 R
24 75 27 75 28 74 30 72 32 69 35 67 37 64 41 61 44 58 48 55 52 52 55 49 58 47 61 45 63 43 64 42 65 41 65 40 66 40 65 41 65 42 65 43 64 44 64 45 63
21 158 29 158 30 156 32 154 35 151 38 147 42 143 46 136 53 133 57 129 64 126 68 123 71 121 74 120 76 119 77 119 78 118 78 118 79 117 79 116 79 116 80
2 P
26 92 64 90 64 88 65 84 64 80 64 70 63 60 63 50 62 43 62 36 62 32 62 28 62 26 62 26 63 27 63 29 63 32 63 35 63 39 63 49 63 56 63 66 63 73 62 77 62 79 61 80 61
28 206 64 205 64 202 64 199 64 195 64 188 64 181 64 178 64 174 65 171 65 167 66 160 67 157 68 154 68 152 68 151 68 152 68 153 68 154 67 156 67 160 67 164 66 171 65 178 65 185 65 192 64 194 64 196 64
2 U
46 20 79 21 79 22 79 24 79 27 79 34 80 41 81 51 81 61 82 68 82 72 82 74 82 77 82 78 82 79 82 80 82 82 82 84 81 86 81 88 81 89 81 91 81 92 81 91 80 90 79 89 78 88 76 88 74 87 72 87 70 87 67 87 65 87 64 87 63 87 62 87 61 87 60 87 59 87 58 87 57 87 56 87 55 87 54 86 54 86 53 86 52
41 84 168 85 167 86 167 88 167 92 167 96 168 103 169 110 170 114 170 110 170 114 170 118 171 128 171 135 170 142 170 152 169 159 169 166 169 169 168 171 168 172 168 173 168 173 167 172 167 172 166 171 165 171 164 170 162 169 160 169 158 169 156 169 153 169 151 169 147 169 145 169 142 170 139 170 137 170 136 170 135 170 134
2 V
64 38 107 39 107 39 106 40 105 41 103 42 101 44 98 45 95 46 91 46 88 47 85 48 82 48 80 48 78 48 77 48 75 48 74 48 73 47 72 46 72 45 72 44 72 44 73 43 73 42 73 42 74 42 75 42 76 42 77 41 78 41 79 42 80 42 82 42 84 42 86 43 88 44 89 45 90 45 91 46 91 47 91 48 90 50 89 53 87 56 85 59 84 62 82 66 80 68 79 70 77 72 75 73 73 74 72 76 70 76 69 77 67 78 66 78 65 79 65 79 64 80 63 81 62 81 61 82 61
65 88 138 89 137 90 136 92 135 93 133 95 131 97 130 99 128 102 125 104 123 106 121 107 119 108 117 108 115 108 113 108 112 108 111 108 110 108 109 107 109 106 109 105 109 104 109 103 109 102 109 100 108 99 108 99 109 98 110 98 111 98 112 98 113 98 114 98 116 99 117 99 119 99 120 99 122 99 123 100 124 101 124 102 124 103 123 104 121 106 120 109 117 111 115 114 111 117 107 120 103 122 100 124 96 126 93 127 89 129 86 127 89 129 86 130 83 131 81 132 79 133 78 133 77 133 78 132 78 132 79
2 W
69 107 30 107 31 106 32 106 35 104 37 103 39 102 42 100 45 98 48 96 51 93 55 96 51 93 55 91 59 88 62 86 65 83 67 80 69 78 70 76 71 75 71 74 71 73 71 72 70 71 69 70 67 69 66 69 65 68 64 67 63 67 62 67 61 67 60 67 59 68 59 69 58 70 57 72 56 73 56 74 56 75 56 75 57 76 57 77 58 78 59 80 60 81 61 81 63 82 65 82 67 82 68 82 70 82 72 81 74 80 76 78 78 77 79 74 81 72 83 70 85 68 86 66 87 64 89 62 90 60 91 57 93 55 94 53 95 52 95
70 201 44 201 45 200 47 200 50 198 57 196 64 195 71 194 78 193 85 192 89 190 92 189 94 187 96 185 98 184 99 182 100 181 101 180 101 179 101 177 101 176 101 175 100 173 99 172 99 171 98 170 97 169 96 169 95 169 94 168 93 169 91 169 90 169 89 170 88 170 87 171 87 172 86 173 85 174 85 175 84 176 84 177 84 178 85 179 86 179 87 180 88 181 90 182 92 182 94 182 96 182 99 181 101 180 104 180 107 178 109 177 111 175 113 173 114 171 115 168 115 165 116 161 117 158 117 154 118 152 119 150 119 149 120 148 120 149 121 150 121
2 X
34 130 88 128 89 126 89 123 89 119 90 116 91 109 91 102 91 95 92 91 92 84 93 82 93 81 93 80 93 79 93 78 93 79 92 79 91 79 89 79 86 78 83 78 79 77 77 76 74 75 71 74 69 74 68 74 67 74 66 73 66 73 65 73 64 72 64 72 63
34 218 155 217 155 216 155 214 155 212 155 209 155 205 155 198 155 188 155 181 155 174 156 170 156 168 156 166 156 166 157 165 157 165 155 164 153 164 152 164 150 163 148 163 145 162 143 162 141 162 139 161 138 161 137 161 136 161 135 162 135 162 134 162 133 162 132 163 132
4 5
60 89 34 88 34 87 34 86 34 84 34 81 34 77 33 73 33 69 33 64 33 60 33 56 34 53 34 50 35 48 36 47 38 46 40 46 42 46 45 47 47 48 50 49 53 50 56 50 59 50 62 50 65 50 67 49 68 49 69 49 70 50 70 51 69 52 68 54 67 56 66 59 66 62 66 66 67 69 69 72 72 75 75 77 78 79 81 81 84 81 88 82 91 81 94 80 97 78 99 76 101 73 103 69 105 64 107 59 108 53 108 47 108 41 106 36 102 32 99 29 97
59 195 69 194 69 193 69 191 68 190 67 187 67 183 66 180 66 176 66 172 65 168 65 165 65 163 65 160 65 159 65 158 66 158 68 157 70 157 72 157 75 158 79 158 82 159 87 160 90 161 93 162 96 162 97 162 98 162 99 163 98 164 97 166 97 167 96 170 96 173 97 175 98 179 99 181 102 183 105 186 108 188 112 190 117 191 120 192 124 192 127 191 129 189 131 186 133 181 135 176 136 170 137 163 138 156 138 150 138 145 136 140 134 137 132 134 128 131 124
52 81 32 80 32 78 31 76 31 74 31 71 31 67 30 64 30 60 30 56 31 53 31 51 32 49 33 48 34 48 36 48 39 48 42 49 46 51 50 52 54 53 58 54 62 55 65 54 67 55 68 56 67 58 67 60 67 62 67 64 67 67 68 70 70 72 72 75 75 76 78 78 81 78 84 78 86 77 88 75 90 72 91 69 93 64 94 59 94 53 94 48 93 43 92 38 90 34 89 31 88 29 87 28 87
54 189 68 189 69 187 69 185 68 183 68 179 67 174 67 170 67 164 68 160 69 156 70 154 71 153 72 152 73 152 74 153 76 154 79 155 82 155 85 156 89 156 92 155 95 155 97 154 98 154 99 154 100 155 100 156 101 157 100 159 100 162 101 165 101 169 103 172 105 175 108 178 112 180 116 182 120 183 123 183 127 182 130 181 133 178 136 174 139 168 141 162 143 154 145 146 146 139 145 133 143 128 141 125 139 123 137 122 135
/li_recognizer.h
0,0 → 1,38
/*
* li_recognizer.h
*
* Adapted from cmu_recognizer.h.
* Credit to Dean Rubine, Jim Kempf, and Ari Rapkin.
*/
 
#ifndef _LI_RECOGNIZER_H_
 
#define _LI_RECOGNIZER_H_
 
/*Extension function interfaces and indices.*/
 
#define LI_ISA_LI 0 /*Is this a li recognizer?.*/
 
typedef bool (*li_isa_li)(recognizer r);
 
#define LI_TRAIN 1 /*Train recognizer*/
 
typedef int (*li_recognizer_train)(recognizer r,
rc* rec_xt,
u_int nstrokes,
pen_stroke* strokes,
rec_element* re,
bool replace_p);
 
#define LI_CLEAR 2 /* ari's clear-state extension fn. */
 
typedef int (*li_recognizer_clearState)(recognizer r);
 
#define LI_GET_CLASSES 3 /* ari's get-classes extension fn. */
 
typedef int (*li_recognizer_getClasses)(recognizer r, char ***list, int *nc);
 
#define LI_NUM_EX_FNS 4 /*Number of extension functions*/
 
#endif
 
/bitvector.c
0,0 → 1,154
 
/***********************************************************************
 
bitvector.c - some routines for dealing with bitvectors
 
Copyright (C) 1991 Dean Rubine
 
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License. See ../COPYING for
the full agreement.
 
**********************************************************************/
 
/*LINTLIBRARY*/
 
#include "util.h"
#undef BITS_PER_VECTOR
#define BITS_PER_VECTOR 128
#include "bitvector.h"
/* ari -- for strlen */
#include <string.h>
 
int
bitcount(max, bv)
int max;
BitVector bv;
{
register int i, count;
 
for(count = i = 0; i < max; i++)
if(IS_SET(i, bv))
count++;
return count;
}
 
char *
BitVectorToString(max, bv)
BitVector bv;
{
char *string = tempstring();
register int i;
 
for(i = 0; i < max; i++)
string[i] = IS_SET(i, bv) ? (i % 10) + '0' : '-' ;
string[i] = '\0';
return string;
}
 
 
void
StringToBitVector(string, max, bv)
char *string;
int max;
BitVector bv;
{
register int i;
 
/* ari -- strlen returns a size_t, which depends on which OS you've got */
if((int) strlen(string) != max) {
error("StringToBitVector: strlen(%s)=%d != %d",
string, (int) strlen(string), max);
return;
}
 
for(i = 0; i < max; i++)
if(string[i] != '-')
BIT_SET(i, bv);
else
BIT_CLEAR(i, bv);
}
 
 
void
SetBitVector(v)
register BitVector v;
{
register int nints = INTS_PER_VECTOR;
 
while(--nints >= 0)
*v++ = -1;
}
 
 
void
ClearBitVector(nints, v)
register int nints;
register BitVector v;
{
 
while(--nints >= 0)
*v++ = 0;
}
 
void
AssignBitVector(nints, v1, v2)
register int nints;
register BitVector v1, v2;
{
 
while(--nints >= 0)
*v1++ = *v2++;
}
 
int
BitVectorDeQ(max, v)
register int max;
register BitVector v;
{
register int i;
for(i = 0; i < max; i++)
if(IS_SET(i, v)) {
BIT_CLEAR(i, v);
return i;
}
return -1;
 
}
 
int *
BitVectorOr(v, v1, v2, ipv)
int *v;
register int *v1, *v2;
register int ipv;
{
int *vv = v;
do
*vv++ = *v1++ | *v2++;
while(--ipv > 0);
return v;
}
 
int *
BitVectorAnd(v, v1, v2, ipv)
int *v;
register int *v1, *v2;
register int ipv;
{
int *vv = v;
do
*vv++ = *v1++ & *v2++;
while(--ipv > 0);
return v;
}
 
int
BitVectorNoBitsSet(v, ipv)
register int *v;
register int ipv;
{
do
if(*v++) return 0;
while(--ipv > 0);
return 1;
}
/matrix.c
0,0 → 1,725
/***********************************************************************
 
matrix.c - simple matrix operations
 
Copyright (C) 1991 Dean Rubine
 
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License. See ../COPYING for
the full agreement.
 
**********************************************************************/
 
/*
Simple matrix operations
Why I am writing this stuff over is beyond me
*/
 
#undef PIQ_DEBUG
 
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include "util.h"
#include "matrix.h"
 
 
typedef struct array_header *Array;
 
#define EPSILON (1.0e-10) /* zero range */
 
/*
Allocation functions
*/
 
 
Vector
NewVector(r)
int r;
{
register struct array_header *a;
register Vector v;
 
a = (struct array_header *)
allocate(sizeof(struct array_header) + r * sizeof(double), char);
a->ndims = 1;
a->nrows = r;
a->ncols = 1;
v = (Vector) (a + 1);
 
#define CHECK
#ifdef CHECK
if(HEADER(v) != (struct array_header *) a ||
NDIMS(v) != 1 || NROWS(v) != r || NCOLS(v) != 1) {
exit_error("NewVector error: v=%x H: %x,%x D:%d,%d R:%d,%d C:%d,%d\n", v, HEADER(v), a, NDIMS(v), 1, NROWS(v), r, NCOLS(v), 1);
}
#endif
 
return v;
}
 
Matrix
NewMatrix(r, c)
int r, c;
{
register struct array_header *a = (struct array_header *)
allocate(sizeof(struct array_header) + r * sizeof(double *), char);
register int i;
register Matrix m;
 
m = (Matrix) (a + 1);
for(i = 0; i < r; i++)
m[i] = allocate(c, double);
a->ndims = 2;
a->nrows = r;
a->ncols = c;
return m;
}
 
void
FreeVector(v)
Vector v;
{
free(HEADER(v));
}
 
void
FreeMatrix(m)
Matrix m;
{
register int i;
 
for(i = 0; i < NROWS(m); i++)
free(m[i]);
free(HEADER(m));
}
 
Vector
VectorCopy(v)
register Vector v;
{
register Vector r = NewVector(NROWS(v));
register int i;
 
for(i = 0; i < NROWS(v); i++)
r[i] = v[i];
return r;
}
 
Matrix
MatrixCopy(m)
register Matrix m;
{
register Matrix r = NewMatrix(NROWS(m), NCOLS(m));
register int i, j;
 
for(i = 0; i < NROWS(m); i++)
for(j = 0; j < NROWS(m); j++)
r[i][j] = m[i][j];
return r;
}
 
/* Null vector and matrixes */
 
 
void
ZeroVector(v)
Vector v;
{
register int i;
for(i = 0; i < NROWS(v); i++) v[i] = 0.0;
}
 
 
void
ZeroMatrix(m)
Matrix m;
{
register int i, j;
for(i = 0; i < NROWS(m); i++)
for(j = 0; j < NCOLS(m); j++)
m[i][j] = 0.0;
}
 
void
FillMatrix(m, fill)
Matrix m;
double fill;
{
register int i, j;
for(i = 0; i < NROWS(m); i++)
for(j = 0; j < NCOLS(m); j++)
m[i][j] = fill;
}
 
double
InnerProduct(v1, v2)
register Vector v1, v2;
{
double result = 0;
register int n = NROWS(v1);
if(n != NROWS(v2)) {
exit_error("InnerProduct %d x %d ", n, NROWS(v2));
}
while(--n >= 0)
result += *v1++ * *v2++;
return result;
}
 
void
MatrixMultiply(m1, m2, prod)
register Matrix m1, m2, prod;
{
register int i, j, k;
double sum;
 
if(NCOLS(m1) != NROWS(m2)) {
error("MatrixMultiply: Can't multiply %dx%d and %dx%d matrices",
NROWS(m1), NCOLS(m1), NROWS(m2), NCOLS(m2));
return;
}
if(NROWS(prod) != NROWS(m1) || NCOLS(prod) != NCOLS(m2)) {
error("MatrixMultiply: %dx%d times %dx%d does not give %dx%d product",
NROWS(m1), NCOLS(m1), NROWS(m2), NCOLS(m2),
NROWS(prod), NCOLS(prod));
return;
}
 
for(i = 0; i < NROWS(m1); i++)
for(j = 0; j < NCOLS(m2); j++) {
sum = 0;
for(k = 0; k < NCOLS(m1); k++)
sum += m1[i][k] * m2[k][j];
prod[i][j] = sum;
}
}
 
/*
Compute result = v'm where
v is a column vector (r x 1)
m is a matrix (r x c)
result is a column vector (c x 1)
*/
 
void
VectorTimesMatrix(v, m, prod)
Vector v;
Matrix m;
Vector prod;
{
register int i, j;
 
if(NROWS(v) != NROWS(m)) {
error("VectorTimesMatrix: Can't multiply %d vector by %dx%d",
NROWS(v), NROWS(m), NCOLS(m));
return;
}
if(NROWS(prod) != NCOLS(m)) {
error("VectorTimesMatrix: %d vector times %dx%d mat does not fit in %d product" ,
NROWS(v), NROWS(m), NCOLS(m), NROWS(prod));
return;
}
 
for(j = 0; j < NCOLS(m); j++) {
prod[j] = 0;
for(i = 0; i < NROWS(m); i++)
prod[j] += v[i] * m[i][j];
}
}
 
void
ScalarTimesVector(s, v, product)
double s;
register Vector v, product;
{
register int n = NROWS(v);
 
if(NROWS(v) != NROWS(product)) {
error("ScalarTimesVector: result wrong size (%d!=%d)",
NROWS(v), NROWS(product));
return;
}
 
while(--n >= 0)
*product++ = s * *v++;
}
 
void
ScalarTimesMatrix(s, m, product)
double s;
register Matrix m, product;
{
register int i, j;
 
if(NROWS(m) != NROWS(product) ||
NCOLS(m) != NCOLS(product)) {
error("ScalarTimesMatrix: result wrong size (%d!=%d)or(%d!=%d)",
NROWS(m), NROWS(product),
NCOLS(m), NCOLS(product));
return;
}
 
for(i = 0; i < NROWS(m); i++)
for(j = 0; j < NCOLS(m); j++)
product[i][j] = s * m[i][j];
}
 
/*
Compute v'mv
*/
 
double
QuadraticForm(v, m)
register Vector v;
register Matrix m;
{
register int i, j, n;
double result = 0;
 
n = NROWS(v);
 
if(n != NROWS(m) || n != NCOLS(m)) {
exit_error("QuadraticForm: bad matrix size (%dx%d not %dx%d)",
NROWS(m), NCOLS(m), n, n);
}
for(i = 0; i < n; i++)
for(j = 0; j < n; j++) {
 
#ifdef PIQ_DEBUG
printf("%g*%g*%g [%g] %s ",
m[i][j],v[i],v[j],
m[i][j] * v[i] * v[j],
i==n-1&&j==n-1? "=" : "+");
#endif
 
result += m[i][j] * v[i] * v[j];
}
return result;
}
 
/* Matrix inversion using full pivoting.
* The standard Gauss-Jordan method is used.
* The return value is the determinant.
* The input matrix may be the same as the result matrix
*
* det = InvertMatrix(inputmatrix, resultmatrix);
*
* HISTORY
* 26-Feb-82 David Smith (drs) at Carnegie-Mellon University
* Written.
* Sun Mar 20 19:36:16 EST 1988 - converted to this form by Dean Rubine
*
*/
 
int DebugInvertMatrix = 0;
 
#define PERMBUFSIZE 200 /* Max mat size */
 
#define _abs(x) ((x)>=0 ? (x) : -(x))
 
double
InvertMatrix(ym, rm)
Matrix ym, rm;
{
register int i, j, k;
double det, biga, recip_biga, hold;
int l[PERMBUFSIZE], m[PERMBUFSIZE];
register int n;
 
if(NROWS(ym) != NCOLS(ym)) {
exit_error("InvertMatrix: not square");
}
 
n = NROWS(ym);
 
if(n != NROWS(rm) || n != NCOLS(rm)) {
exit_error("InvertMatrix: result wrong size");
}
 
/* Copy ym to rm */
if(ym != rm)
for(i = 0; i < n; i++)
for(j = 0; j < n; j++)
rm[i][j] = ym[i][j];
 
/*if(DebugInvertMatrix) PrintMatrix(rm, "Inverting (det=%g)\n", det);*/
 
/* Allocate permutation vectors for l and m, with the same origin
as the matrix. */
 
if (n >= PERMBUFSIZE) {
exit_error("InvertMatrix: PERMBUFSIZE");
}
 
det = 1.0;
for (k = 0; k < n; k++) {
l[k] = k; m[k] = k;
biga = rm[k][k];
 
/* Find the biggest element in the submatrix */
for (i = k; i < n; i++)
for (j = k; j < n; j++)
if (_abs(rm[i][j]) > _abs(biga)) {
biga = rm[i][j];
l[k] = i;
m[k] = j;
}
 
if(DebugInvertMatrix)
if(biga == 0.0)
PrintMatrix(m, "found zero biga = %g\n", biga);
 
/* Interchange rows */
i = l[k];
if (i > k)
for (j = 0; j < n; j++) {
hold = -rm[k][j];
rm[k][j] = rm[i][j];
rm[i][j] = hold;
}
 
/* Interchange columns */
j = m[k];
if (j > k)
for (i = 0; i < n; i++) {
hold = -rm[i][k];
rm[i][k] = rm[i][j];
rm[i][j] = hold;
}
 
/* Divide column by minus pivot
(value of pivot element is contained in biga). */
if (biga == 0.0) {
return 0.0;
}
 
if(DebugInvertMatrix) printf("biga = %g\n", biga);
recip_biga = 1/biga;
for (i = 0; i < n; i++)
if (i != k)
rm[i][k] *= -recip_biga;
 
/* Reduce matrix */
for (i = 0; i < n; i++)
if (i != k) {
hold = rm[i][k];
for (j = 0; j < n; j++)
if (j != k)
rm[i][j] += hold * rm[k][j];
}
 
/* Divide row by pivot */
for (j = 0; j < n; j++)
if (j != k)
rm[k][j] *= recip_biga;
 
det *= biga; /* Product of pivots */
if(DebugInvertMatrix) printf("det = %g\n", det);
rm[k][k] = recip_biga;
 
} /* K loop */
 
/* Final row & column interchanges */
for (k = n - 1; k >= 0; k--) {
i = l[k];
if (i > k)
for (j = 0; j < n; j++) {
hold = rm[j][k];
rm[j][k] = -rm[j][i];
rm[j][i] = hold;
}
j = m[k];
if (j > k)
for (i = 0; i < n; i++) {
hold = rm[k][i];
rm[k][i] = -rm[j][i];
rm[j][i] = hold;
}
}
 
if(DebugInvertMatrix) printf("returning, det = %g\n", det);
 
return det;
}
 
 
#include "bitvector.h"
 
Vector
SliceVector(v, rowmask)
Vector v;
BitVector rowmask;
{
register int i, ri;
 
Vector r = NewVector(bitcount(NROWS(v), rowmask));
for(i = ri = 0; i < NROWS(v); i++)
if(IS_SET(i, rowmask) )
r[ri++] = v[i];
return r;
}
 
Matrix
SliceMatrix(m, rowmask, colmask)
Matrix m;
BitVector rowmask, colmask;
{
register int i, ri, j, rj;
 
Matrix r;
r = NewMatrix(bitcount(NROWS(m), rowmask),
bitcount(NCOLS(m), colmask));
for(i = ri = 0; i < NROWS(m); i++)
if(IS_SET(i, rowmask) ) {
for(j = rj = 0; j < NCOLS(m); j++)
if(IS_SET(j, colmask))
r[ri][rj++] = m[i][j];
ri++;
}
 
return r;
}
 
Matrix
DeSliceMatrix(m, fill, rowmask, colmask, r)
Matrix m;
double fill;
BitVector rowmask, colmask;
Matrix r;
{
register int i, ri, j, rj;
 
FillMatrix(r, fill);
 
for(i = ri = 0; i < NROWS(r); i++) {
if(IS_SET(i, rowmask) ) {
for(j = rj = 0; j < NCOLS(r); j++)
if(IS_SET(j, colmask))
r[i][j] = m[ri][rj++];
ri++;
}
}
 
return r;
}
 
void
OutputVector(f, v)
FILE *f;
register Vector v;
{
register int i;
fprintf(f, " V %d ", NROWS(v));
for(i = 0; i < NROWS(v); i++)
fprintf(f, " %g", v[i]);
fprintf(f, "\n");
}
 
Vector
InputVector(f)
FILE *f;
{
register Vector v;
register int i;
char check[4];
int nrows;
 
if(fscanf(f, "%1s %d", check, &nrows) != 2) {
exit_error("InputVector fscanf 1");
}
if(check[0] != 'V') {
exit_error("InputVector check");
}
v = NewVector(nrows);
for(i = 0; i < nrows; i++) {
if(fscanf(f, "%lf", &v[i]) != 1) {
exit_error("InputVector fscanf 2");
}
}
return v;
}
 
void
OutputMatrix(f, m)
FILE* f;
register Matrix m;
{
register int i, j;
fprintf(f, " M %d %d\n", NROWS(m), NCOLS(m));
for(i = 0; i < NROWS(m); i++) {
for(j = 0; j < NCOLS(m); j++)
fprintf(f, " %g", m[i][j]);
fprintf(f, "\n");
}
}
 
Matrix
InputMatrix(f)
FILE *f;
{
register Matrix m;
register int i, j;
char check[4];
int nrows, ncols;
 
if(fscanf(f, "%1s %d %d", check, &nrows, &ncols) != 3) {
exit_error("InputMatrix fscanf 1");
}
if(check[0] != 'M') {
exit_error("InputMatrix check");
}
m = NewMatrix(nrows, ncols);
for(i = 0; i < nrows; i++)
for(j = 0; j < ncols; j++) {
if(fscanf(f, "%lf", &m[i][j]) != 1) {
exit_error("InputMatrix fscanf 2");
}
}
 
return m;
}
 
double
InvertSingularMatrix(m, inv)
Matrix m, inv;
{
register int i, j, k;
BitVector mask;
Matrix sm;
double det, maxdet;
int mi = -1, mj = -1, mk = -1;
 
maxdet = 0.0;
for(i = 0; i < NROWS(m); i++) {
printf("r&c%d, ", i);
SET_BIT_VECTOR(mask);
BIT_CLEAR(i, mask);
sm = SliceMatrix(m, mask, mask);
det = InvertMatrix(sm, sm);
if(det == 0.0)
printf("det still 0\n");
else {
printf("det = %g\n", det);
}
if(_abs(det) > _abs(maxdet))
maxdet = det, mi = i;
FreeMatrix(sm);
}
printf("\n");
 
printf("maxdet=%g when row %d left out\n", maxdet, mi);
if(fabs(maxdet) > 1.0e-6) {
goto found;
}
 
maxdet = 0.0;
for(i = 0; i < NROWS(m); i++) {
for(j = i+1; j < NROWS(m); j++) {
/* printf("leaving out row&col %d&%d, ", i, j); */
SET_BIT_VECTOR(mask);
BIT_CLEAR(i, mask);
BIT_CLEAR(j, mask);
sm = SliceMatrix(m, mask, mask);
det = InvertMatrix(sm, sm);
/*
if(det == 0.0)
printf("det still 0\n");
else {
printf("det = %g\n", det);
}
*/
if(abs(det) > abs(maxdet))
maxdet = det, mi = i, mj = j;
FreeMatrix(sm);
}
}
 
printf("maxdet=%g when rows %d,%d left out\n", maxdet, mi, mj);
if(_abs(maxdet) > 1.0e-6) {
goto found;
}
 
maxdet = 0.0;
for(i = 0; i < NROWS(m); i++) {
for(j = i+1; j < NROWS(m); j++) {
for(k = j+1; k < NROWS(m); k++) {
/* printf("leaving out row&col %d,%d&%d, ", i, j, k); */
SET_BIT_VECTOR(mask);
BIT_CLEAR(i, mask);
BIT_CLEAR(j, mask);
BIT_CLEAR(k, mask);
sm = SliceMatrix(m, mask, mask);
det = InvertMatrix(sm, sm);
/*
if(det == 0.0)
printf("det still 0\n");
else {
printf("det = %g\n", det);
}
*/
if(_abs(det) > _abs(maxdet))
maxdet = det, mi = i, mj = j, mk = k;
FreeMatrix(sm);
}
}
}
printf("maxdet=%g when rows %d,%d&%d left out\n", maxdet, mi, mj, mk);
if(mk == -1)
return 0.0;
 
found:
 
SET_BIT_VECTOR(mask);
if(mi >= 0) BIT_CLEAR(mi, mask);
if(mj >= 0) BIT_CLEAR(mj, mask);
if(mk >= 0) BIT_CLEAR(mk, mask);
sm = SliceMatrix(m, mask, mask);
det = InvertMatrix(sm, sm);
DeSliceMatrix(sm, 0.0, mask, mask, inv);
FreeMatrix(sm);
PrintMatrix(inv, "desliced:\n");
return det;
}
 
/* You can fairly confidently ignore the compiler warnings after here */
 
void
PrintVector(v, s,a1,a2,a3,a4,a5,a6,a7,a8)
register Vector v;
char *s; int a1,a2,a3,a4,a5,a6,a7,a8;
{
register int i;
printf(s,a1,a2,a3,a4,a5,a6,a7,a8);
 
for(i = 0; i < NROWS(v); i++) printf(" %8.4f", v[i]);
printf("\n");
}
 
void
PrintMatrix(m, s,a1,a2,a3,a4,a5,a6,a7,a8)
register Matrix m;
char *s; int a1,a2,a3,a4,a5,a6,a7,a8;
{
register int i, j;
printf(s,a1,a2,a3,a4,a5,a6,a7,a8);
for(i = 0; i < NROWS(m); i++) {
for(j = 0; j < NCOLS(m); j++)
printf(" %8.4f", m[i][j]);
printf("\n");
}
}
 
void
PrintArray(a, s,a1,a2,a3,a4,a5,a6,a7,a8)
Array a;
char *s; int a1,a2,a3,a4,a5,a6,a7,a8;
{
switch(NDIMS(a)) {
case 1: PrintVector((Vector) a, s,a1,a2,a3,a4,a5,a6,a7,a8); break;
case 2: PrintMatrix((Matrix) a, s,a1,a2,a3,a4,a5,a6,a7,a8); break;
default: error("PrintArray");
}
}
 
/nxscribble.syms
0,0 → 1,725
nxscribble_main
/sc.c
0,0 → 1,533
/***********************************************************************
 
sc.c - creates classifiers from feature vectors of examples, as well as
classifying example feature vectors.
 
Copyright (C) 1991 Dean Rubine
 
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 1, or (at your option)
any later version.
 
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
 
You should have received a copy of the GNU General Public License
along with this program (in ../COPYING); if not, write to the Free
Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
 
***********************************************************************/
 
 
#include "bitvector.h"
#include "matrix.h"
#include "util.h"
#include "sc.h"
#include "stdio.h"
#include "stdlib.h"
#include "math.h"
#include "zdebug.h"
 
#define EPS (1.0e-6) /* for singular matrix check */
sClassifier
sNewClassifier()
{
register sClassifier sc = allocate(1, struct sclassifier);
sc->nfeatures = -1;
sc->nclasses = 0;
sc->classdope = allocate(MAXSCLASSES, sClassDope);
sc->w = NULL;
return sc;
}
 
void
sFreeClassifier(sc)
register sClassifier sc;
{
register int i;
register sClassDope scd;
 
for(i = 0; i < sc->nclasses; i++) {
scd = sc->classdope[i];
if(scd->name) free(scd->name);
if(scd->sumcov) FreeMatrix(scd->sumcov);
if(scd->average) FreeVector(scd->average);
free(scd);
if(sc->w && sc->w[i]) FreeVector(sc->w[i]);
}
free(sc->classdope);
if(sc->w) free(sc->w);
if(sc->cnst) FreeVector(sc->cnst);
if(sc->invavgcov) FreeMatrix(sc->invavgcov);
free(sc);
}
 
sClassDope
sClassNameLookup(sc, classname)
register sClassifier sc;
register char *classname;
{
register int i;
register sClassDope scd;
static sClassifier lastsc = NULL;
static sClassDope lastscd = NULL;
 
/* quick check for last class name */
if(lastsc == sc && lastscd != NULL && STREQ(lastscd->name, classname))
return lastscd;
 
/* linear search through all classes for name */
for(i = 0; i < sc->nclasses; i++) {
scd = sc->classdope[i];
if(STREQ(scd->name, classname))
return lastsc = sc, lastscd = scd;
}
lastsc = NULL;
lastscd = NULL;
return NULL;
}
 
static sClassDope
sAddClass(sc, classname)
register sClassifier sc;
char *classname;
{
register sClassDope scd;
 
sc->classdope[sc->nclasses] = scd = allocate(1, struct sclassdope);
scd->name = scopy(classname);
scd->number = sc->nclasses;
scd->nexamples = 0;
scd->sumcov = NULL;
++sc->nclasses;
return scd;
}
 
void
sAddExample(sc, classname, y)
register sClassifier sc;
char *classname;
Vector y;
{
register sClassDope scd;
register int i, j;
double nfv[50];
double nm1on, recipn;
 
scd = sClassNameLookup(sc, classname);
if(scd == NULL) {
/* fprintf(stderr, "sAddExample: calling sAddClass on %s.\n", classname); */
scd = sAddClass(sc, classname);
}
 
if(sc->nfeatures == -1) {
sc->nfeatures = NROWS(y);
/* fprintf(stderr, "sAddExample: setting sc->nfeatures to NROWS(y).\n"); */
}
 
if(scd->nexamples == 0) {
/* fprintf(stderr, "sAddExample: allocating & zeroing scd->average & scd->sumcov.\n"); */
scd->average = NewVector(sc->nfeatures);
ZeroVector(scd->average);
scd->sumcov = NewMatrix(sc->nfeatures, sc->nfeatures);
ZeroMatrix(scd->sumcov);
 
}
 
if(sc->nfeatures != NROWS(y)) {
PrintVector(y, "sAddExample: funny feature vector nrows!=%d",
sc->nfeatures);
return;
}
 
scd->nexamples++;
nm1on = ((double) scd->nexamples-1)/scd->nexamples;
recipn = 1.0/scd->nexamples;
 
/* incrementally update covariance matrix */
for(i = 0; i < sc->nfeatures; i++)
nfv[i] = y[i] - scd->average[i];
 
/* only upper triangular part computed */
for(i = 0; i < sc->nfeatures; i++)
for(j = i; j < sc->nfeatures; j++)
scd->sumcov[i][j] += nm1on * nfv[i] * nfv[j];
 
/* incrementally update mean vector */
for(i = 0; i < sc->nfeatures; i++)
scd->average[i] = nm1on * scd->average[i] + recipn * y[i];
 
}
 
void
sDoneAdding(sc)
register sClassifier sc;
{
register int i, j;
int c;
int ne, denom;
double oneoverdenom;
register Matrix s;
register Matrix avgcov;
double det;
register sClassDope scd;
 
if(sc->nclasses == 0) {
error("No classes for adding to classifier");
return;
}
 
/* Given covariance matrices for each class (* number of examples - 1)
compute the average (common) covariance matrix */
 
avgcov = NewMatrix(sc->nfeatures, sc->nfeatures);
ZeroMatrix(avgcov);
ne = 0;
for(c = 0; c < sc->nclasses; c++) {
scd = sc->classdope[c];
ne += scd->nexamples;
s = scd->sumcov;
for(i = 0; i < sc->nfeatures; i++)
for(j = i; j < sc->nfeatures; j++)
avgcov[i][j] += s[i][j];
}
 
denom = ne - sc->nclasses;
if(denom <= 0) {
error("Number of classes must be less than number of examples");
return;
}
 
oneoverdenom = 1.0 / denom;
for(i = 0; i < sc->nfeatures; i++)
for(j = i; j < sc->nfeatures; j++)
avgcov[j][i] = avgcov[i][j] *= oneoverdenom;
 
Z('a') PrintMatrix(avgcov, "Average Covariance Matrix\n");
/* invert the avg covariance matrix */
 
sc->invavgcov = NewMatrix(sc->nfeatures, sc->nfeatures);
det = InvertMatrix(avgcov, sc->invavgcov);
if(fabs(det) <= EPS)
FixClassifier(sc, avgcov);
/* now compute discrimination functions */
sc->w = allocate(sc->nclasses, Vector);
sc->cnst = NewVector(sc->nclasses);
for(c = 0; c < sc->nclasses; c++) {
scd = sc->classdope[c];
sc->w[c] = NewVector(sc->nfeatures);
VectorTimesMatrix(scd->average, sc->invavgcov, sc->w[c]);
sc->cnst[c] = -0.5 * InnerProduct(sc->w[c], scd->average);
/* could add log(priorprob class c) to cnst[c] */
}
 
FreeMatrix(avgcov);
return;
}
 
sClassDope
sClassify(sc, fv) {
return sClassifyAD(sc, fv, NULL, NULL);
}
 
sClassDope
sClassifyAD(sc, fv, ap, dp)
sClassifier sc;
Vector fv;
double *ap;
double *dp;
{
double disc[MAXSCLASSES];
register int i, maxclass;
double denom, exp();
register sClassDope scd;
double d;
 
if(sc->w == NULL) {
error("%x not a trained classifier", sc);
return(NULL);
}
 
for(i = 0; i < sc->nclasses; i++) {
/* ari */
double IP;
IP = InnerProduct(sc->w[i], fv);
/* fprintf(stderr, "sClassifyAD: InnerProduct for class %s is %f.\n", sc->classdope[i]->name, IP); */
/* fprintf(stderr, "sClassifyAD: sc->cnst[i] = %f.\n", sc->cnst[i]); */
disc[i] = IP + sc->cnst[i];
/* fprintf(stderr, "sClassifyAD: Set disc = %f for class %s.\n", disc[i],sc->classdope[i]->name); */
/* disc[i] = InnerProduct(sc->w[i], fv) + sc->cnst[i]; */
}
 
maxclass = 0;
for(i = 1; i < sc->nclasses; i++)
if(disc[i] > disc[maxclass])
maxclass = i;
 
/* ari */
/* PF_INIT_COS 0 initial angle (cos) */
/* PF_INIT_SIN 1 initial angle (sin) */
/* PF_BB_LEN 2 length of bounding box diagonal */
/* PF_BB_TH 3 angle of bounding box diagonal */
/* PF_SE_LEN 4 length between start and end points */
/* PF_SE_COS 5 cos of angle between start and end points */
/* PF_SE_SIN 6 sin of angle between start and end points */
/* PF_LEN 7 arc length of path */
/* PF_TH 8 total angle traversed */
/* PF_ATH 9 sum of abs vals of angles traversed */
/* PF_SQTH 10 sum of squares of angles traversed */
/* PF_DUR 11 duration of path */
/* ifndef USE_TIME */
/* NFEATURES 12 */
/* else */
/* PF_MAXV 12 maximum speed */
/* NFEATURES 13 */
/* endif */
 
/*
* fprintf(stderr, "\nFeature vector:\n");
* fprintf(stderr, " start cosine %8.4f path length %8.4f\n",
* fv[PF_INIT_COS], fv[PF_LEN]);
* fprintf(stderr, " start sine %8.4f total angle %8.4f\n",
* fv[PF_INIT_SIN], fv[PF_TH]);
* fprintf(stderr, " b.b. length %8.4f total abs. angle %8.4f\n",
* fv[PF_BB_LEN], fv[PF_ATH]);
* fprintf(stderr, " b.b. angle %8.4f total sq. angle %8.4f\n",
* fv[PF_BB_TH], fv[PF_SQTH]);
* fprintf(stderr, " st-end length %8.4f duration %8.4f\n",
* fv[PF_SE_LEN], fv[PF_DUR]);
* fprintf(stderr, " st-end cos %8.4f\n", fv[PF_SE_COS]);
* fprintf(stderr, " st-end sin %8.4f\n", fv[PF_SE_SIN]);
*/
ZZ('C') {
scd = sc->classdope[maxclass];
PrintVector(fv, "%10.10s ", scd->name);
ZZZ('C') {
for(i = 0; i < sc->nclasses; i++) {
scd = sc->classdope[i];
PrintVector(scd->average, "%5.5s %5g ",
scd->name, disc[i]);
}
}
}
 
scd = sc->classdope[maxclass];
/* ari */
/* fprintf(stderr,"%s", scd->name); */
/*
fprintf(stderr,"Stroke identified as %s [ ", scd->name);
for (i = 0; i < sc->nclasses; i++) {
if ( (disc[maxclass] - disc[i] < 5.0) && (i != maxclass) )
fprintf(stderr,"%s ", sc->classdope[i]->name);
}
fprintf(stderr,"], ");
*/
if(ap) { /* calculate probability of non-ambiguity */
for(denom = 0, i = 0; i < sc->nclasses; i++)
/* quick check to avoid computing negligible term */
if((d = disc[i] - disc[maxclass]) > -7.0)
denom += exp(d);
*ap = 1.0 / denom;
}
 
if(dp) /* calculate distance to mean of chosen class */
*dp = MahalanobisDistance(fv, scd->average, sc->invavgcov);
 
return scd;
}
 
/*
Compute (v-u)' sigma (v-u)
*/
 
double
MahalanobisDistance(v, u, sigma)
register Vector v, u;
register Matrix sigma;
{
register int i;
static Vector space;
double result;
 
if(space == NULL || NROWS(space) != NROWS(v)) {
if(space) FreeVector(space);
space = NewVector(NROWS(v));
}
for(i = 0; i < NROWS(v); i++)
space[i] = v[i] - u[i];
result = QuadraticForm(space, sigma);
return result;
}
 
void
FixClassifier(sc, avgcov)
register sClassifier sc;
Matrix avgcov;
{
int i;
double det;
BitVector bv;
Matrix m, r;
 
/* just add the features one by one, discarding any that cause
the matrix to be non-invertible */
 
CLEAR_BIT_VECTOR(bv);
for(i = 0; i < sc->nfeatures; i++) {
BIT_SET(i, bv);
m = SliceMatrix(avgcov, bv, bv);
r = NewMatrix(NROWS(m), NCOLS(m));
det = InvertMatrix(m, r);
if(fabs(det) <= EPS)
BIT_CLEAR(i, bv);
FreeMatrix(m);
FreeMatrix(r);
}
 
m = SliceMatrix(avgcov, bv, bv);
r = NewMatrix(NROWS(m), NCOLS(m));
det = InvertMatrix(m, r);
if(fabs(det) <= EPS) {
error("Can't fix classifier!");
return;
}
DeSliceMatrix(r, 0.0, bv, bv, sc->invavgcov);
 
FreeMatrix(m);
FreeMatrix(r);
 
}
 
void
sDumpClassifier(sc)
register sClassifier sc;
{
register sClassIndex c;
 
printf("\n----Classifier %x, %d features:-----\n", (int)sc, sc->nfeatures);
printf("%d classes: ", sc->nclasses);
for(c = 0; c < sc->nclasses; c++)
printf("%s ", sc->classdope[c]->name);
printf("Discrimination functions:\n");
for(c = 0; c < sc->nclasses; c++) {
PrintVector(sc->w[c], "%s: %g + ", sc->classdope[c]->name, sc->cnst[c]);
printf("Mean vectors:\n");
PrintVector(sc->classdope[c]->average, "%s: ", sc->classdope[c]->name);
}
if( sc->invavgcov != NULL ) {
PrintMatrix(sc->invavgcov, "Inverse average covariance matrix:\n");
}
printf("\n---------\n\n");
}
 
void
sWrite(outfile, sc)
FILE *outfile;
sClassifier sc;
{
int i;
register sClassDope scd;
 
fprintf(outfile, "%d classes\n", sc->nclasses);
for(i = 0; i < sc->nclasses; i++) {
scd = sc->classdope[i];
fprintf(outfile, "%s\n", scd->name);
}
for(i = 0; i < sc->nclasses; i++) {
scd = sc->classdope[i];
OutputVector(outfile, scd->average);
OutputMatrix(outfile, scd->sumcov);
OutputVector(outfile, sc->w[i]);
}
OutputVector(outfile, sc->cnst);
OutputMatrix(outfile, sc->invavgcov);
}
 
sClassifier
sRead(infile)
FILE *infile;
{
int i, n;
register sClassifier sc;
register sClassDope scd;
char buf[100];
 
Z('a') printf("Reading classifier \n");
sc = sNewClassifier();
fgets(buf, 100, infile);
if(sscanf(buf, "%d", &n) != 1) {
error("Input error in classifier file");
sFreeClassifier(sc);
return(NULL);
}
Z('a') printf(" %d classes \n", n);
for(i = 0; i < n; i++) {
fscanf(infile, "%s", buf);
scd = sAddClass(sc, buf);
Z('a') printf(" %s \n", scd->name);
}
sc->w = allocate(sc->nclasses, Vector);
for(i = 0; i < sc->nclasses; i++) {
scd = sc->classdope[i];
scd->average = InputVector(infile);
scd->sumcov = InputMatrix(infile);
sc->w[i] = InputVector(infile);
}
sc->cnst = InputVector(infile);
sc->invavgcov = InputMatrix(infile);
Z('a') printf("\n");
return sc;
}
 
 
void
sDistances(sc, nclosest)
register sClassifier sc;
{
register Matrix d = NewMatrix(sc->nclasses, sc->nclasses);
register int i, j;
double min, max = 0;
int n, mi, mj;
 
printf("-----------\n");
printf("Computing %d closest pairs of classes\n", nclosest);
for(i = 0; i < NROWS(d); i++) {
for(j = i+1; j < NCOLS(d); j++) {
d[i][j] = MahalanobisDistance(
sc->classdope[i]->average,
sc->classdope[j]->average,
sc->invavgcov);
if(d[i][j] > max) max = d[i][j];
}
}
 
for(n = 1; n <= nclosest; n++) {
min = max;
mi = mj = -1;
for(i = 0; i < NROWS(d); i++) {
for(j = i+1; j < NCOLS(d); j++) {
if(d[i][j] < min)
min = d[mi=i][mj=j];
}
}
if(mi == -1)
break;
 
printf("%2d) %10.10s to %10.10s d=%g nstd=%g\n",
n,
sc->classdope[mi]->name,
sc->classdope[mj]->name,
d[mi][mj],
sqrt(d[mi][mj]));
 
d[mi][mj] = max+1;
}
printf("-----------\n");
FreeMatrix(d);
}
/bitvector.h
0,0 → 1,220
/***********************************************************************
 
bitvector.h - some macros for dealing with bitvectors
 
Copyright (C) 1991 Dean Rubine
 
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License. See ../COPYING for
the full agreement.
 
**********************************************************************/
 
/*
Bit vector package
 
Used so that it's easier when we need more than 8*sizeof(int) bits
in a the vector.
 
Usage:
Before including this file define the identifier BITS_PER_VECTOR
 
BITS_PER_VECTOR must be one of the following values:
 
16
32
64
128
 
The high tech preprocessor hacking is sure to be nonportable. The
use of include in this file is not to include files, it is to
print out error messages. Ugly, I know, but what can I do?
 
You may include this file more than one in a single C file!
By default, when this file is included it defines a type
BitVector. You may change the name of this type (necessary
to avoid redefinitions when included more than one) by defining:
 
#define BV_TYPE_NAME MyBitVectorTypeName
 
The usual sequence for including this file is thus:
 
#undef BV_TYPE_NAME
#undef BITS_PER_VECTOR
#define BV_TYPE_NAME BVTypeName
#define BITS_PER_VECTOR how-many-bits-per-vector
 
WARNING: Once the file is re-included do not attempt to manipulate any
other vectors besides the newest type for the rest of the file or until
the file is included again.
*/
 
/*
------------- check BITS_PER_VECTOR -----------------
*/
#ifndef BITS_PER_VECTOR
# define BITS_PER_VECTOR 32
#endif
 
#if (BITS_PER_VECTOR != 16) && (BITS_PER_VECTOR != 32) && (BITS_PER_VECTOR != 64) && (BITS_PER_VECTOR != 128)
 
# include "****** illegal value for BITS_PER_VECTOR ******"
 
#endif
 
/*
------------- machine dependent stuff -----------------
*/
 
#ifndef BITS_PER_INT
 
 
# ifdef unix
# define BITS_PER_INT 32
# else
# define BITS_PER_INT 16 /* IBM XT Lattice C */
# endif
 
# define BV_CHECK_MACHINE_ASSUMPTIONS() \
if(BITS_PER_INT != 8*sizeof(int)) \
error("BV_CHECK_ASSUMPTIONS");
 
#endif
 
/*
---- If this file has been included already, redefine everything ----
*/
 
# undef BV_INDEX_MASK
# undef BV_INDEX_SHIFT
# undef INTS_PER_VECTOR
# undef VECTOR_SIZE_CHECK
# undef SET_BIT_VECTOR
# undef IS_SET
# undef ASSIGN_BIT_VECTOR
# undef CLEAR_BIT_VECTOR
# undef BIT_SET
# undef BIT_CLEAR
 
/*
--------------- round up to int size -------------------
*/
 
#if BITS_PER_VECTOR < BITS_PER_INT
# undef BITS_PER_VECTOR
# define BITS_PER_VECTOR BITS_PER_INT
#endif
 
/*
------------- Compute index shift and mask to avoid division -----
*/
 
#define BV_INDEX_MASK (BITS_PER_INT - 1)
 
#if BITS_PER_INT==16
# define BV_INDEX_SHIFT 4
#endif
 
#if BITS_PER_INT==32
# define BV_INDEX_SHIFT 5
#endif
 
#ifndef BV_INDEX_SHIFT
# include "****** bad value for BITS_PER_INT ******"
#endif
 
/*
------------- Compute INTS_PER_VECTOR ------------------
*/
 
#if BITS_PER_INT==BITS_PER_VECTOR
# define INTS_PER_VECTOR 1
#else
# if 2*BITS_PER_INT==BITS_PER_VECTOR
# define INTS_PER_VECTOR 2
# else
# define INTS_PER_VECTOR (BITS_PER_VECTOR / BITS_PER_INT)
# endif
#endif
 
 
#define BV_SIZE_CHECK(nbits_needed) \
if(nbits_needed > BITS_PER_VECTOR) \
error("%s line %d - %d bits needed, %d is vector size", \
__FILE__, __LINE__, nbits_needed, BITS_PER_VECTOR);
 
#ifndef BV_TYPE_NAME
# define BV_TYPE_NAME BitVector
#endif
 
/*
------------- Optimize INTS_PER_VECTOR=1 case
*/
 
#if INTS_PER_VECTOR==1
 
typedef int BV_TYPE_NAME[1];
 
#define CLEAR_BIT_VECTOR(v) ( (v)[0] = 0 )
#define SET_BIT_VECTOR(v) ( (v)[0] = -1 ) /* assumes 2's comp */
#define BIT_SET(bit, v) ( (v)[0] |= (1 << (bit)) )
#define BIT_CLEAR(bit, v) ( (v)[0] &= ~(1 << (bit)) )
#define IS_SET(bit, v) ( ((v)[0] >> (bit)) & 01 )
#define ASSIGN_BIT_VECTOR(v1,v2) ( (v1)[0] = (v2)[0] )
 
#else
 
/*
------------- Optimize INTS_PER_VECTOR=2 case -------
*/
 
#if INTS_PER_VECTOR==2
 
typedef int BV_TYPE_NAME[2];
 
# define CLEAR_BIT_VECTOR(v) ( (v)[0] = (v)[1] = 0 )
# define SET_BIT_VECTOR(v) ( (v)[0] = (v)[1] = -1 ) /* 2's comp */
# define ASSIGN_BIT_VECTOR(v1,v2) \
( (v1)[0] = (v2)[0], (v1)[1] = (v2)[1] )
 
#else
 
/*
------------- general case -------------------
*/
 
typedef int BV_TYPE_NAME[INTS_PER_VECTOR];
 
# define CLEAR_BIT_VECTOR(v) ( ClearBitVector(INTS_PER_VECTOR, v) )
# define SET_BIT_VECTOR(v) ( SetBitVector(INTS_PER_VECTOR, v) )
# define ASSIGN_BIT_VECTOR(v1,v2) \
( AssignBitVector(INTS_PER_VECTOR, v1, v2) )
 
#endif
 
 
#define BIT_SET(bit, v) \
( (v[bit>>BV_INDEX_SHIFT]) |= (1 << (bit&BV_INDEX_MASK)) )
 
#define BIT_CLEAR(bit, v) \
( (v[bit>>BV_INDEX_SHIFT]) &= ~(1 << (bit&BV_INDEX_MASK)) )
 
#define IS_SET(bit, v) \
( ((v[bit>>BV_INDEX_SHIFT]) >> (bit&BV_INDEX_MASK)) & 01 )
 
#endif
 
/* TODO: make efficient */
 
#define OR(v, v1, v2) ( BitVectorOr((v), (v1), (v2), INTS_PER_VECTOR) )
#define AND(v, v1, v2) ( BitVectorAnd((v), (v1), (v2), INTS_PER_VECTOR) )
#define NO_BITS_SET(v) ( BitVectorNoBitsSet( (v), INTS_PER_VECTOR ) )
 
int bitcount(); /* max, bv */
char *BitVectorToString(); /* max, bv */
void StringToBitVector(); /* string, max, bv */
int BitVectorDeQ(); /* element = BitVectorDeQ(max, bv); */
 
int *BitVectorOr();
int *BitVectorAnd();
int BitVectorNoBitsSet();
/scribwidget.c
0,0 → 1,475
/*
* Copyright (c) 2000 Greg Haerr <greg@censoft.com>
* Copyright (c) 2000 Century Software <embedded.centurysoftware.com>
* Scribble Handwriting Recognition for Nano-X!
* Scribble object routines
*
 
*
* Permission to use, copy, modify, distribute, and sell this software and its
* documentation for any purpose is hereby granted without fee, provided that
* the above copyright notice appear in all copies and that both that
* copyright notice and this permission notice appear in supporting
* documentation, and that the name of Keith Packard not be used in
* advertising or publicity pertaining to distribution of the software without
* specific, written prior permission. Keith Packard makes no
* representations about the suitability of this software for any purpose. It
* is provided "as is" without express or implied warranty.
*
* KEITH PACKARD DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
* INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
* EVENT SHALL KEITH PACKARD BE LIABLE FOR ANY SPECIAL, INDIRECT OR
* CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
* DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
* TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
* PERFORMANCE OF THIS SOFTWARE.
*/
 
#include <stdio.h>
#include <string.h>
#include "scrib.h"
 
static ScribbleRec scrib; /* static object instance*/
static char *curmsg = NULL;
 
static char *cl_name[3] = {DEFAULT_LETTERS_FILE,
DEFAULT_DIGITS_FILE,
DEFAULT_PUNC_FILE};
 
static int graffiti_load_recognizers(struct graffiti *pg);
static void Recognize (ScribbleWidget w);
static void ShowMode (ScribbleWidget w);
static void
ResetStroke (ScribbleWidget w)
{
w->ps.ps_npts = 0;
w->ps.ps_nstate = 0;
w->ps.ps_trans = 0;
w->ps.ps_state = 0;
w->lastchar = 0;
curmsg = NULL;
ShowMode(w);
}
 
static void
DisplayStroke (ScribbleWidget w)
{
GrDrawLines(w->win, w->gc, w->pt, w->ps.ps_npts);
}
 
static void
DisplayLast (ScribbleWidget w)
{
int npt;
npt = w->ps.ps_npts;
if (npt > 2)
npt = 2;
GrDrawLines(w->win, w->gc, w->pt + (w->ps.ps_npts - npt), npt);
}
 
static void
AddPoint (ScribbleWidget w, int x, int y)
{
pen_point *ppa;
GR_POINT *pt;
int ppasize;
if (w->ps.ps_npts == w->ppasize)
{
ppasize = w->ppasize + 100;
ppa = malloc ((sizeof (pen_point) + sizeof (GR_POINT)) * ppasize);
if (!ppa)
return;
pt = (GR_POINT *) (ppa + ppasize);
memcpy (ppa, w->ps.ps_pts, w->ppasize * sizeof (pen_point));
memcpy (pt, w->pt, w->ppasize * sizeof (GR_POINT));
free (w->ps.ps_pts);
w->ps.ps_pts = ppa;
w->pt = pt;
w->ppasize = ppasize;
}
ppa = &w->ps.ps_pts[w->ps.ps_npts];
ppa->x = x;
ppa->y = y;
pt = &w->pt[w->ps.ps_npts];
pt->x = x;
pt->y = y;
w->ps.ps_npts++;
DisplayLast (w);
}
 
ScribbleWidget
create_scribble(void)
{
ScribbleWidget new = (ScribbleWidget)&scrib;
GR_WM_PROPERTIES props;
 
new->capsLock = 0;
new->puncShift = 0;
new->tmpShift = 0;
new->ctrlShift = 0;
new->curCharSet = CS_LETTERS;
new->lastchar = 0;
new->down = GR_FALSE;
/*new->lastfocusid = 0;*/
graffiti_load_recognizers (&new->graf);
new->ppasize = 0;
new->ps.ps_pts = 0;
new->pt = 0;
new->win = GrNewWindow(GR_ROOT_WINDOW_ID,
350, 20, 200, 150,
0, GrGetSysColor(GR_COLOR_APPWINDOW), BLACK);
/* set title, disallow focus on input window*/
props.flags = GR_WM_FLAGS_TITLE | GR_WM_FLAGS_PROPS;
props.props = GR_WM_PROPS_NOFOCUS | GR_WM_PROPS_BORDER |
GR_WM_PROPS_CAPTION | GR_WM_PROPS_CLOSEBOX;
props.title = "nxScribble";
GrSetWMProperties(new->win, &props);
 
GrSelectEvents(new->win, GR_EVENT_MASK_BUTTON_DOWN |
GR_EVENT_MASK_BUTTON_UP | GR_EVENT_MASK_MOUSE_MOTION |
GR_EVENT_MASK_KEY_DOWN | /*GR_EVENT_MASK_FOCUS_IN |*/
GR_EVENT_MASK_EXPOSURE | GR_EVENT_MASK_CLOSE_REQ);
GrMapWindow(new->win);
 
new->gc = GrNewGC();
GrSetGCForeground(new->gc, GrGetSysColor(GR_COLOR_APPTEXT));
GrSetGCBackground(new->gc, GrGetSysColor(GR_COLOR_APPWINDOW));
GrSetGCFont(new->gc, GrCreateFont(GR_FONT_OEM_FIXED, 0, NULL));
 
ResetStroke (new);
return new;
}
 
void
destroy_scribble(ScribbleWidget w)
{
GrDestroyWindow(w->win);
GrDestroyGC(w->gc);
free (w->ps.ps_pts);
}
 
void
Redisplay (ScribbleWidget w)
{
/*DisplayStroke (w);*/
ShowMode(w);
}
 
void
ActionStart (ScribbleWidget w, int x, int y)
{
GrRaiseWindow(w->win);
ResetStroke (w);
w->down = GR_TRUE;
AddPoint (w, x, y);
}
 
void
ActionMove (ScribbleWidget w, int x, int y)
{
if (w->down)
AddPoint (w, x, y);
}
 
void
ActionEnd (ScribbleWidget w, int x, int y)
{
AddPoint (w, x, y);
w->down = GR_FALSE;
Recognize (w);
}
 
 
static void
SendKey(ScribbleWidget w, int ch)
{
GR_WINDOW_ID win = GrGetFocus();
 
/* FIXME: modifiers are incorrect*/
GrInjectKeyboardEvent(win, ch, 0, 0, 1);
GrInjectKeyboardEvent(win, ch, 0, 0, 0);
}
 
/* This procedure is called to initialize pg by loading the three
recognizers, loading the initial set of three classifiers, and
loading & verifying the recognizer extension functions. If the
directory $HOME/.recognizers exists, the classifier files will be
loaded from that directory. If not, or if there is an error, the
default files (directory specified in Makefile) will be loaded
instead. Returns non-zero on success, 0 on failure. (Adapted from
package tkgraf/src/GraffitiPkg.c. */
 
static int
graffiti_load_recognizers(struct graffiti *pg)
{
bool usingDefault;
#if 0
char* homedir;
#endif
int i;
rec_fn *fns;
 
/* First, load the recognizers... */
/* call recognizer_unload if an error ? */
for (i = 0; i < NUM_RECS; i++) {
/* Load the recognizer itself... */
pg->rec[i] = recognizer_load(DEFAULT_REC_DIR, rec_name, NULL);
if (pg->rec[i] == NULL) {
fprintf(stderr,"Error loading recognizer from %s.", DEFAULT_REC_DIR);
return 0;
}
if ((* (int *)(pg->rec[i])) != 0xfeed) {
fprintf(stderr,"Error in recognizer_magic.");
return 0;
}
}
 
/* ...then figure out where the classifiers are... */
#if 0
if ( (homedir = (char*)getenv("HOME")) == NULL ) {
#endif
strcpy(pg->cldir, REC_DEFAULT_USER_DIR);
usingDefault = true;
#if 0
} else {
strcpy(pg->cldir, homedir);
strcat(pg->cldir, "/");
strcat(pg->cldir, CLASSIFIER_DIR);
usingDefault = false;
}
#endif
 
/* ...then load the classifiers... */
for (i = 0; i < NUM_RECS; i++) {
int rec_return;
char *s;
 
rec_return = recognizer_load_state(pg->rec[i], pg->cldir, cl_name[i]);
if ((rec_return == -1) && (usingDefault == false)) {
fprintf(stderr,
"Unable to load custom classifier file %s/%s.\nTrying default classifier file instead.\nOriginal error: %s\n ",
pg->cldir, cl_name[i],
(s = recognizer_error(pg->rec[i])) ? s : "(none)");
rec_return = recognizer_load_state(pg->rec[i],
REC_DEFAULT_USER_DIR, cl_name[i]);
}
if (rec_return == -1) {
fprintf(stderr, "Unable to load default classifier file %s.\nOriginal error: %s\n",
cl_name[i],
(s = recognizer_error(pg->rec[i])) ? s : "(none)");
return 0;
}
}
 
/* We have recognizers and classifiers now. */
/* Get the vector of LIextension functions.. */
fns = recognizer_get_extension_functions(pg->rec[CS_LETTERS]);
if (fns == NULL) {
fprintf(stderr, "LI Recognizer Training:No extension functions!");
return 0;
}
/* ... and make sure the training & get-classes functions are okay. */
if( (pg->rec_train = (li_recognizer_train)fns[LI_TRAIN]) == NULL ) {
fprintf(stderr,
"LI Recognizer Training:li_recognizer_train() not found!");
if (fns != NULL) {
free(fns);
}
return 0;
}
if( (pg->rec_getClasses = (li_recognizer_getClasses)fns[LI_GET_CLASSES]) == NULL ) {
fprintf(stderr,
"LI Recognizer Training:li_recognizer_getClasses() not found!");
if (fns != NULL) {
free(fns);
}
return 0;
}
free(fns);
return 1;
}
 
static void
msg(char *str)
{
curmsg = str;
}
 
static void
ShowMode (ScribbleWidget w)
{
char *mode;
char buf[32];
 
if (w->ctrlShift)
mode = "^C";
else if (w->puncShift)
mode = "#&^";
else if (w->curCharSet == CS_DIGITS)
mode = "123";
else if (w->capsLock)
mode = "ABC";
else if (w->tmpShift)
mode = "Abc";
else
mode = "abc";
 
if (curmsg)
sprintf(buf, "%s %s", mode, curmsg);
else if (w->lastchar > ' ')
sprintf(buf, "%s %c", mode, w->lastchar);
else sprintf(buf, "%s", mode);
GrClearWindow(w->win, GR_FALSE);
GrText(w->win, w->gc, 70, 0, buf, strlen(buf), GR_TFTOP);
}
 
static char
do_recognize(struct graffiti *pg, pen_stroke *ps, int charset)
{
int rec_char;
int nret;
rec_alternative *ret;
 
rec_char = recognizer_translate(pg->rec[charset], 1, ps, false,
&nret, &ret);
if (rec_char != -1) {
delete_rec_alternative_array(nret, ret, false);
}
return rec_char;
}
 
typedef int KeySym;
 
static void
Recognize (ScribbleWidget w)
{
struct graffiti *graf = &w->graf;
pen_stroke *ps = &w->ps;
KeySym keysym;
GR_BOOL control;
char c;
if (ps->ps_npts == 0)
return;
w->lastchar = 0;
 
c = do_recognize(graf, ps, w->puncShift ? CS_PUNCTUATION : w->curCharSet);
 
/*printf("class %c (%d)\n", c, c);*/
 
switch (c) {
case '\000':
msg("[Error]");
w->tmpShift = 0;
w->puncShift = 0;
w->ctrlShift = 0;
ShowMode (w);
break;
case 'L': /* caps lock */
msg("[Capslock]");
w->capsLock = !w->capsLock;
ShowMode (w);
break;
case 'N': /* numlock */
if (w->curCharSet == CS_DIGITS) {
w->curCharSet = CS_LETTERS;
msg("[Letters]");
} else {
w->curCharSet = CS_DIGITS;
msg("[Digits]");
}
w->tmpShift = 0;
w->puncShift = 0;
w->ctrlShift = 0;
ShowMode (w);
break;
case 'P': /* usually puncshift, but we'll make it CTRL */
msg("[Ctrlshift]");
w->ctrlShift = !w->ctrlShift;
w->tmpShift = 0;
w->puncShift = 0;
ShowMode (w);
break;
case 'S': /* shift */
w->tmpShift = !w->tmpShift;
if (w->tmpShift) msg("[Shift]"); else msg("[Unshift]");
w->puncShift = 0;
w->ctrlShift = 0;
ShowMode (w);
break;
default:
control = GR_FALSE;
switch (c) {
case 'A':
msg("[Space]");
keysym = ' ';
break;
case 'B':
msg("[Backspace]");
keysym = '\b';
break;
case 'R':
msg("[Return]");
keysym = '\r';
break;
case '.':
if (! w->puncShift) {
msg("[Puncshift]");
w->puncShift = 1;
w->ctrlShift = 0;
w->tmpShift = 0;
ShowMode (w);
return;
} else {
w->puncShift = 0;
ShowMode (w);
}
keysym = '.';
break;
default:
if ('A' <= c && c <= 'Z') {
msg("[Notimp]");
ShowMode (w);
return;
}
keysym = (KeySym) c;
if (w->ctrlShift)
{
control = GR_TRUE;
w->ctrlShift = 0;
if (c < 'a' || 'z' < c)
{
ShowMode (w);
return;
}
}
else if ((w->capsLock && !w->tmpShift) ||
(!w->capsLock && w->tmpShift))
{
keysym = keysym-'a'+'A';
}
w->tmpShift = 0;
w->puncShift = 0;
ShowMode(w);
}
if (control)
keysym &= 0x1f;
w->lastchar = keysym;
ShowMode(w);
SendKey(w, keysym);
break;
}
}
/hre_api.c
0,0 → 1,1460
/*
* hre_api.c: Implementation of HRE API
* Author: James &
* Created On: Wed Dec 9 13:49:14 1992
* Last Modified By: James Kempf
* Last Modified On: Fri Sep 23 13:49:04 1994
* Update Count: 137
* Copyright (c) 1994 by Sun Microsystems Computer Company
* All rights reserved.
*
* Use and copying of this software and preparation of
* derivative works based upon this software are permitted.
* Any distribution of this software or derivative works
* must comply with all applicable United States export control
* laws.
*
* This software is made available as is, and Sun Microsystems
* Computer Company makes no warranty about the software, its
* performance, or its conformity to any specification
*/
 
 
#include <sys/types.h>
#ifdef ELX
#include <vxWorks.h>
#endif
#include <sys/stat.h>
#include <errno.h>
#include <stdio.h>
#include <string.h>
#include <locale.h>
#include <stdlib.h>
/*#include <libintl.h>*/
#include <hre_internal.h> /* includes hre.h */
 
/* ari -- prototype for rii function */
recognizer __recognizer_internal_initialize(rec_info* ri);
 
/*Version number of API.*/
 
char* REC_VERSION = "2.0";
 
/*Domain name for internationalized text.*/
 
#define INTL_DOMAIN "recognition_manager"
 
/* XXX -- Intl Hack -- Jay & Ari */
#define dgettext(domain, msg) (msg)
#define bindtextdomain(dirname, domain)
 
/*
* These magic numbers are used to ensure the integrity of the
* recognizer structure.
*/
 
 
#define REC_MAGIC 0xfeed
#define REC_END_MAGIC 0xbeef
 
/*Check the recognizer for validity*/
 
#define RI_CHECK_MAGIC(rec) \
( (rec != NULL) && \
(((recognizer)rec)->recognizer_magic == REC_MAGIC) && \
(((recognizer)rec)->recognizer_end_magic == REC_END_MAGIC) &&\
(((recognizer)rec)->recognizer_version == REC_VERSION) )
 
/*The name of the initialization & finalization functions.*/
 
/* static char rii_name[] = "__recognizer_internal_initialize";
static char rif_name[] = "__recognizer_internal_finalize"; */
 
/*User home directory for recognizer info.*/
/* ari -- changed USERRECHOME from ".recognizers" */
#define HOME "HOME"
#define USERRECHOME ".classifiers"
 
/*Local functions*/
 
#if 0
static char* shared_library_name(char* directory,char* locale,char* name);
#endif
static rec_info* make_rec_info(char* directory,char* name,char** subset);
static void delete_rec_info(rec_info* ri);
#if 0
static int check_for_user_home();
#endif
static void intl_initialize();
 
static void cleanup_rec_element(rec_element* re,bool delete_points_p);
 
/*The last error.*/
 
static char* the_last_error = NULL;
 
static char *safe_malloc (int nbytes)
{
char *res = malloc(nbytes);
if (res == NULL) {
error("malloc failure");
exit(2);
}
return (res);
}
 
 
/*
* Implementation of API functions
*/
 
/*
* recognizer_load - Load the recognizer matching the rec_info struct.
* If name is not null, then load the recognizer having that name. Returns
* the recognizer object, or null if it can't load the recognizer, and
* sets errno to indicate why.
*/
 
recognizer
recognizer_load(char* directory,char* name,char** subset)
{
recognizer rec; /*the recognizer*/
#if 0
recognizer_internal_initialize rii; /*the initialization function*/
#endif
rec_info* rinf; /*rec_info for recognizer information*/
static bool intl_init = false; /*true if recog. manager initted.*/
 
if( intl_init == false ) {
intl_init = true;
 
intl_initialize();
}
 
/*The name takes precedence.*/
rinf = make_rec_info(directory,name,subset);
if (rinf == NULL) {
the_last_error =
dgettext(INTL_DOMAIN,
"Ran out of memory during prelinking initialization.");
return((recognizer)NULL);
}
/*fprintf(stderr, "Got past make_rec_info.\n");*/
 
/*Let recognition code create recognizer and initialize*/
rec = __recognizer_internal_initialize(rinf);
if (rec == NULL) {
return((recognizer)NULL);
}
/*fprintf(stderr, "Did rii.\n");*/
/*Check whether it's been correctly initialized*/
 
if( rec->recognizer_load_state == NULL ||
rec->recognizer_save_state == NULL ||
rec->recognizer_load_dictionary == NULL ||
rec->recognizer_save_dictionary == NULL ||
rec->recognizer_free_dictionary == NULL ||
rec->recognizer_add_to_dictionary == NULL ||
rec->recognizer_delete_from_dictionary == NULL ||
rec->recognizer_error == NULL ||
rec->recognizer_set_context == NULL ||
rec->recognizer_get_context == NULL ||
rec->recognizer_clear == NULL ||
rec->recognizer_get_buffer == NULL ||
rec->recognizer_set_buffer == NULL ||
rec->recognizer_translate == NULL ||
rec->recognizer_get_extension_functions == NULL ||
rec->recognizer_get_gesture_names == NULL ||
rec->recognizer_set_gesture_action == NULL
) {
 
recognizer_unload(rec);
fprintf(stderr, "Unloading b/c null function pointer.\n");
the_last_error =
dgettext(INTL_DOMAIN,
"One or more recognizer function pointers is NULL.");
return((recognizer)NULL);
}
 
 
/*Set the rec_info structure.*/
 
rec->recognizer_info = rinf;
 
/*Check whether home directory is there for recognizer info.*/
 
/*
* ari -- don't bother. We're not going to load from each user's
* home directory at this point. Instead, we'll use a stupid
* little a-b-c file because it loads FAST.
*
* if( check_for_user_home() < 0 ) {
* recognizer_unload(rec);
* return((recognizer)NULL);
* }
*/
/*We got it!*/
/*fprintf(stderr, "Done.\n");*/
 
return(rec);
}
 
/*
* recognizer_unload - Unload the recognizer.
*/
 
int
recognizer_unload(recognizer rec)
{
#if 0
recognizer_internal_finalize rif;
#endif
/*Make sure magic numbers right.*/
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return(-1);
}
__recognizer_internal_finalize(rec);
 
return(0);
}
 
/*
* recognizer_load_state-Get any recognizer state associated with name
* in dir. Note that name may not be simple file name, since
* there may be more than one file involved. Return 0 if successful,
* -1 if not.
*/
 
int recognizer_load_state(recognizer rec,char* dir,char* name)
{
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return(-1);
}
 
/*Do the function.*/
return(rec->recognizer_load_state(rec,dir,name));
}
 
/*
* recognizer_save_state-Save any recognizer state to name
* in dir. Note that name may not be a simple file name, since
* there may be more than one file involved. Return 0 if successful,
* -1 if not.
*/
 
int recognizer_save_state(recognizer rec,char* dir,char* name)
{
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return(-1);
}
 
/*Do the function.*/
 
return(rec->recognizer_save_state(rec,dir,name));
}
 
/*
* recognizer_load_dictionary-Load dictionary, return pointer
* to it, or NULL if error.
*/
 
wordset recognizer_load_dictionary(recognizer rec,char* dir,char* name)
{
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return(NULL);
}
 
/*Do the function.*/
 
return(rec->recognizer_load_dictionary(rec,dir,name));
}
 
/*
* recognizer_save_dictionary-Save the dictionary to the file, return 0 if
* OK, -1 if error.
*/
 
int recognizer_save_dictionary(recognizer rec,char* dir,char* name,wordset dict)
{
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return(-1);
}
 
/*Do the function.*/
 
return(rec->recognizer_save_dictionary(rec,dir,name,dict));
}
 
/*
* recognizer_free_dictionary-Free the dictionary, return 0 if
* OK, -1 if error.
*/
 
int recognizer_free_dictionary(recognizer rec,wordset dict)
{
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return(-1);
}
 
/*Do the function.*/
 
return(rec->recognizer_free_dictionary(rec,dict));
}
 
/*
* recognizer_add_to_dictionary-Add word to the dictionary,
* return 0 if OK, -1 if error.
*/
 
 
int recognizer_add_to_dictionary(recognizer rec,letterset* word,wordset dict)
{
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return(-1);
}
 
/*Do the function.*/
 
return(rec->recognizer_add_to_dictionary(rec,word,dict));
}
 
/*
* recognizer_delete_from_dictionary-Delete word from the dictionary,
* return 0 if OK, -1 if error.
*/
 
int
recognizer_delete_from_dictionary(recognizer rec,letterset* word,wordset dict)
{
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return(-1);
}
 
/*Do the function.*/
 
return(rec->recognizer_delete_from_dictionary(rec,word,dict));
}
 
/*
* recognizer_get_info-Get a pointers to the rec_info
* giving the locales and subsets supported by the recognizer
* and the shared library pathname.
*/
 
const rec_info*
recognizer_get_info(recognizer rec)
{
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return((rec_info*)NULL);
}
 
/*Return the rec_info object.*/
 
return(rec->recognizer_info);
}
 
/*
* recognizer_manager_version-Return the version number string of the
* recognition manager.
*/
 
const char* recognizer_manager_version(recognizer rec)
{
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return(NULL);
}
 
return(rec->recognizer_version);
}
/*
* recognizer_error-Return the last error message, or NULL if none.
*/
 
char* recognizer_error(recognizer rec)
{
/*Make sure magic numbers right and function there.*/
 
if( !RI_CHECK_MAGIC(rec) && the_last_error == NULL ) {
return(dgettext(INTL_DOMAIN,"Bad recognizer object."));
 
} else if( the_last_error != NULL ) {
char* error = the_last_error;
 
the_last_error = NULL;
return(error);
}
 
/*Do the function.*/
 
return(rec->recognizer_error(rec));
}
 
/*
* recognizer_set_context-Set the recognition context for translation.
* Return 0 if successful, -1 if error.
*/
 
int recognizer_set_context(recognizer rec,rc* rec_xt)
{
 
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return(-1);
}
 
/*Do the function.*/
 
return(rec->recognizer_set_context(rec,rec_xt));
}
 
/*
* recognzier_get_context-Get the recognition context for translation.
* If none or error, return NULL.
*/
 
rc* recognizer_get_context(recognizer rec)
{
 
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return(NULL);
}
 
/*Do the function.*/
 
return(recognizer_get_context(rec));
}
 
/*
* recognizer_clear-Clear buffer and recognition context.
* Return 0 if success, else -1.
*/
 
int recognizer_clear(recognizer rec,bool delete_points_p)
{
 
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return(-1);
}
 
/*Do the function.*/
 
return(rec->recognizer_clear(rec,delete_points_p));
}
 
/*recognizer_get_buffer-Get stroke buffer. Return 0 if success, else -1.*/
 
 
int recognizer_get_buffer(recognizer rec, u_int* nstrokes,pen_stroke** strokes)
{
 
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return(-1);
}
 
/*Do the function.*/
 
return(rec->recognizer_get_buffer(rec,nstrokes,strokes));
 
}
 
/*
* recognizer_set_buffer-Set stroke buffer to arg. Return 0 if success, else
* return -1.
*/
 
int recognizer_set_buffer(recognizer rec,u_int nstrokes,pen_stroke* strokes)
{
 
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return(-1);
}
 
/*Do the function.*/
 
return(rec->recognizer_set_buffer(rec,nstrokes,strokes));
 
}
 
/*
* recognizer_translate-Translate the strokes in the current context, including
* buffered strokes. If nstrokes == 0 or strokes == NULL, return
* translation of stroke buffer.
*/
 
int recognizer_translate(recognizer rec,
u_int nstrokes,
pen_stroke* strokes,
bool correlate_p,
int* nret,
rec_alternative** ret)
{
int retval;
char msg[80];
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN, msg);
return(-1);
}
 
/* ari */
/* {
* u_int i;
* pen_stroke ari_pstr;
* pen_point* ari_pts;
* int ari;
* for (i = 0; i < nstrokes; i++) {
* ari_pstr = strokes[i];
* ari_pts = ari_pstr.ps_pts;
* fprintf(stderr, "\nrecognizer_translate: ari_pts = %ld, sizeof(Time) = %d, sizeof(ari_pts[0] = %d, %d points are...\n", ari_pts, sizeof(Time), sizeof(ari_pts[0]), ari_pstr.ps_npts);
* for (ari = 0; ari < ari_pstr.ps_npts; ari++)
* fprintf(stderr, "%ld -- (%d, %d) ", ari_pts[ari], ari_pts[ari].x, ari_pts[ari].y);
* }
* }
*/
/*Do the function.*/
/* ari -- this is calling cmu_recognizer_translate */
retval = rec->recognizer_translate(rec,
nstrokes,
strokes,
correlate_p,
nret,
ret);
return (retval);
}
 
 
/*
* recognizer_get_extension_functions-Return a null terminated array
* of functions providing extended functionality. Their interfaces
* will change depending on the recognizer.
*/
 
rec_fn* recognizer_get_extension_functions(recognizer rec)
{
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return((rec_fn*)NULL);
}
 
/*Do the function.*/
 
return(rec->recognizer_get_extension_functions(rec));
}
 
 
/*
* recognizer_get_gesture_names - Return a null terminated array of
* gesture name strings.
*/
 
char**
recognizer_get_gesture_names(recognizer rec)
{
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return(NULL);
}
 
/*Do the function.*/
 
return(rec->recognizer_get_gesture_names(rec));
}
 
/*
* recognizer_set_gesture_action-Set the action function for the gesture.
*/
 
xgesture
recognizer_train_gestures(recognizer rec,char* name,xgesture fn,void* wsinfo)
{
/*Make sure magic numbers right.*/
 
if( !RI_CHECK_MAGIC(rec) ) {
the_last_error = dgettext(INTL_DOMAIN,"Bad recognizer object.");
return((xgesture)-1);
}
 
/*Do the function.*/
 
return(rec->recognizer_set_gesture_action(rec,name,fn,wsinfo));
}
 
/*
* Local functions.
*/
 
/*
* shared_library_name-Get the full pathname to the shared library,
* based on the recognizer name and the environment.
*/
 
 
#if 0
static char* shared_library_name(char* directory,char* locale,char* name)
{
char* ret = NULL;
int len = strlen(name);
 
/*If directory is there, it takes precedence.*/
 
if( directory != NULL ) {
ret = (char*)safe_malloc(strlen(directory) + len + 2);
strcpy(ret,directory);
strcat(ret,"/");
strcat(ret,name);
 
}
else {
char* dir = NULL;
 
/*First try the environment variable.*/
 
if( (dir = getenv(RECHOME)) == NULL ) {
dir = "REC_DEFAULT_HOME_DIR";
 
}
 
ret = (char*)safe_malloc(strlen(dir) + strlen(locale) + len + 3);
/*Form the pathname.*/
strcpy(ret,dir);
strcat(ret,"/");
strcat(ret,locale);
strcat(ret,"/");
strcat(ret,name);
 
}
 
return(ret);
}
#endif
 
/*
* intl_initialize-Initialize the internationaliztion of messages for
* the recognition manager.
*/
 
static void intl_initialize()
{
char* dirname;
 
/*Get recognizer home directory name from environment.*/
 
if( (dirname = getenv(RECHOME)) == NULL ) {
dirname = "REC_DEFAULT_HOME_DIR";
}
 
/*Bind the text domain.*/
 
bindtextdomain(dirname,INTL_DOMAIN);
}
 
 
/*make_rec_info-Create a rec_info structure*/
 
static rec_info* make_rec_info(char* directory,char* name,char** subset)
{
int i,len;
rec_info* ri;
char* locale;
 
ri = (rec_info*)safe_malloc(sizeof(rec_info));
ri->ri_locale = NULL;
ri->ri_name = NULL;
ri->ri_subset = NULL;
 
/*Get locale*/
 
if( (locale = getenv(LANG)) == NULL ) {
locale = strdup(REC_DEFAULT_LOCALE);
}
 
if( (ri->ri_locale = strdup(locale)) == NULL ) {
delete_rec_info(ri);
return(NULL);
}
 
/*Get shared library pathname.*/
 
/*
* if( (ri->ri_name = shared_library_name(directory,locale,name)) == NULL ) {
* delete_rec_info(ri);
* return(NULL);
* }
*/
 
/*Initialize the subset information.*/
 
if( subset != NULL ) {
/*Count the subset strings.*/
 
for( len = 1; subset[len] != NULL; len++ ) ;
/*Copy the subset strings.*/
ri->ri_subset = (char**)safe_malloc((len +1)*sizeof(char*));
for( i = 0; i < len; i++ ) {
if( subset[i] != NULL ) {
if( (ri->ri_subset[i] = strdup(subset[i])) == NULL ) {
delete_rec_info(ri);
return(NULL);
}
} else {
ri->ri_subset[i] = subset[i];
}
}
 
ri->ri_subset[i] = NULL;
 
} else {
 
ri->ri_subset = NULL;
}
return(ri);
}
 
static void delete_rec_info(rec_info* ri)
{
if( ri != NULL ) {
if( ri->ri_locale != NULL ) {
free(ri->ri_locale);
}
/*
* if( ri->ri_name != NULL ) {
* free(ri->ri_name);
* }
*/
if( ri->ri_subset != NULL ) {
int i;
for( i = 0; ri->ri_subset[i] != NULL; i++) {
free(ri->ri_subset[i]);
}
free(ri->ri_subset);
}
free(ri);
}
}
 
/*check_for_user_home-Check whether USERRECHOME has been created.*/
 
#if 0
static int check_for_user_home()
{
char* homedir = getenv(HOME);
char* rechome = NULL;
 
if( homedir == NULL ) {
the_last_error = "Home environment variable HOME not set.";
return(-1);
}
 
rechome = (char*)safe_malloc(strlen(homedir) + strlen(USERRECHOME) + 2);
 
/*Form name.*/
 
strcpy(rechome,homedir);
strcat(rechome,"/");
strcat(rechome,USERRECHOME);
 
/*Create directory.*/
 
if( mkdir(rechome,S_IRWXU | S_IRWXG | S_IRWXO) < 0 ) {
 
/*If errno is EEXIST, then OK.*/
 
if( errno != EEXIST ) {
the_last_error = "Error during creation of USERRECHOME.";
free(rechome);
return(-1);
}
}
 
free(rechome);
 
return(0);
}
#endif
 
/*
* Constructor functions for making structures.
*
* The general philosophy here is that we control all memory
* in connected data structures, *except* for pen_point arrays.
* There are likely to be lots and lots of points, they are likely
* to come from the window system; so if we wanted to control them,
* we would have to copy which would be slow. We require the client
* to deal with them directly, or the client can give us permission
* to delete them.
*/
 
/*
* recognizer
*/
 
 
recognizer make_recognizer(rec_info* rif)
{
recognizer rec;
/*Allocate it.*/
 
rec = (recognizer)safe_malloc(sizeof(*rec));
rec->recognizer_magic = REC_MAGIC;
rec->recognizer_version = REC_VERSION;
rec->recognizer_info = rif;
rec->recognizer_specific = NULL;
rec->recognizer_end_magic = REC_END_MAGIC;
rec->recognizer_load_state = NULL;
rec->recognizer_save_state = NULL;
rec->recognizer_load_dictionary = NULL;
rec->recognizer_save_dictionary = NULL;
rec->recognizer_free_dictionary = NULL;
rec->recognizer_add_to_dictionary = NULL;
rec->recognizer_delete_from_dictionary = NULL;
rec->recognizer_error = NULL;
rec->recognizer_set_context = NULL;
rec->recognizer_get_context = NULL;
rec->recognizer_clear = NULL;
rec->recognizer_get_buffer = NULL;
rec->recognizer_set_buffer = NULL;
rec->recognizer_translate = NULL;
rec->recognizer_get_extension_functions = NULL;
rec->recognizer_get_gesture_names = NULL;
rec->recognizer_set_gesture_action = NULL;
return(rec);
}
 
void delete_recognizer(recognizer rec)
{
 
if( rec != NULL ) {
if( rec->recognizer_info != NULL ) {
delete_rec_info(rec->recognizer_info);
}
free(rec);
}
}
 
/*
* rec_alternative
*/
 
rec_alternative* make_rec_alternative_array(u_int size)
{
int i;
rec_alternative* ri;
 
ri = (rec_alternative*) safe_malloc(size * sizeof(rec_alternative));
 
for( i = 0; i < size; i++ ) {
ri[i].ra_elem.re_type = REC_NONE;
ri[i].ra_elem.re_result.aval = NULL;
ri[i].ra_elem.re_conf = 0;
ri[i].ra_nalter = 0;
ri[i].ra_next = NULL;
}
 
return(ri);
}
 
rec_alternative*
initialize_rec_alternative(rec_alternative* ra,
u_int nelem)
{
if( ra != NULL ) {
if( (ra->ra_next = make_rec_alternative_array(nelem)) == NULL ) {
return(NULL);
}
 
ra->ra_nalter = nelem;
}
 
return(ra);
}
 
void delete_rec_alternative_array(u_int nalter,
rec_alternative* ra,
bool delete_points_p)
{
int i;
 
if( ra != NULL ) {
 
for( i = 0; i < nalter; i++ ) {
cleanup_rec_element(&ra[i].ra_elem,delete_points_p);
/*Now do the next one down the line.*/
if( ra[i].ra_nalter > 0 ) {
delete_rec_alternative_array(ra[i].ra_nalter,
ra[i].ra_next,
delete_points_p);
}
}
 
free(ra);
}
}
 
 
/*initialize_rec_element-Initialize a recognition element.*/
 
rec_element*
initialize_rec_element(rec_element* re,
char type,
u_int size,
void* trans,
rec_confidence conf)
{
if( re != NULL ) {
 
re->re_type = type;
re->re_conf = conf;
re->re_result.aval = NULL;
switch (type) {
case REC_GESTURE:
if( size > 0 && trans != NULL ) {
re->re_result.gval =
(gesture*)safe_malloc(sizeof(gesture));
memcpy((void*)re->re_result.gval,trans,sizeof(gesture));
}
break;
case REC_ASCII:
case REC_VAR:
case REC_OTHER:
if( size > 0 && trans != NULL ) {
re->re_result.aval =
(char*)safe_malloc((size+1)*sizeof(char));
memcpy((void*)re->re_result.aval,trans,size*sizeof(char));
re->re_result.aval[size] = '\000';
}
break;
case REC_WCHAR:
if( size > 0 && trans != NULL ) {
re->re_result.wval =
(wchar_t*)safe_malloc((size+1)*sizeof(wchar_t));
memcpy((void*)re->re_result.wval,trans,size*sizeof(wchar_t));
re->re_result.wval[size] = '\000';
}
break;
case REC_CORR:
if( size > 0 && trans != NULL ) {
re->re_result.rcval =
(rec_correlation*)safe_malloc(sizeof(rec_correlation));
memcpy((void*)re->re_result.rcval,
trans,
sizeof(rec_correlation));
}
break;
 
default:
return(NULL);
}
 
}
 
return(re);
}
 
static void cleanup_rec_element(rec_element* re,bool delete_points_p)
{
switch(re->re_type) {
case REC_NONE:
break;
case REC_ASCII:
case REC_VAR:
case REC_WCHAR:
case REC_OTHER:
free(re->re_result.aval);
break;
case REC_GESTURE:
delete_gesture_array(1,re->re_result.gval,true);
break;
 
case REC_CORR:
delete_rec_correlation(re->re_result.rcval,
delete_points_p);
break;
}
}
 
/*
* rec_correlation
*/
 
 
rec_correlation*
make_rec_correlation(char type,
u_int size,
void* trans,
rec_confidence conf,
u_int ps_size)
{
rec_correlation* rc;
 
rc = (rec_correlation*)safe_malloc(sizeof(rec_correlation));
 
rc->ro_nstrokes = ps_size;
 
/*First initialize element.*/
 
if( initialize_rec_element(&(rc->ro_elem),
type,
size,
trans,
conf) == NULL ) {
return(NULL);
}
if( (rc->ro_strokes = make_pen_stroke_array(ps_size)) == NULL ) {
return(NULL);
}
rc->ro_start = (u_int*)safe_malloc(ps_size * sizeof(int));
rc->ro_stop = (u_int*)safe_malloc(ps_size * sizeof(int));
return(rc);
}
 
void delete_rec_correlation(rec_correlation* rc,bool delete_points_p)
{
if( rc != NULL ) {
 
cleanup_rec_element(&rc->ro_elem,delete_points_p);
 
delete_pen_stroke_array(rc->ro_nstrokes,rc->ro_strokes,delete_points_p);
 
if( rc->ro_start != NULL ) {
free(rc->ro_start);
}
 
if( rc->ro_stop != NULL ) {
free(rc->ro_stop);
}
 
free(rc);
}
 
}
 
 
/*
* rec_fn
*/
 
 
rec_fn* make_rec_fn_array(u_int size)
{
rec_fn* ri = (rec_fn*)safe_malloc((size + 1) * sizeof(rec_fn));
int i;
 
for( i = 0; i < size; i++ ) {
ri[i] = NULL;
}
 
ri[i] = NULL;
 
return(ri);
}
 
void delete_rec_fn_array(rec_fn* rf)
{
if( rf != NULL ) {
free(rf);
}
}
 
/*
* pen_stroke
*/
 
 
pen_stroke* make_pen_stroke_array(u_int size)
{
int i;
pen_stroke* ri;
 
ri = (pen_stroke*) safe_malloc(size * sizeof(pen_stroke));
for( i = 0; i < size; i++ ) {
ri[i].ps_npts = 0;
ri[i].ps_pts = NULL;
ri[i].ps_nstate = 0;
ri[i].ps_state = NULL;
}
 
return(ri);
}
 
pen_stroke* initialize_pen_stroke(pen_stroke* ps,
u_int npts,
pen_point* pts,
u_int nstate,
u_int* trans,
pen_state* state)
{
if( ps != NULL ) {
ps->ps_npts = npts;
ps->ps_pts = pts;
ps->ps_nstate = nstate;
ps->ps_trans = trans;
ps->ps_state = state;
}
return (ps);
}
 
void delete_pen_stroke_array(u_int size,pen_stroke* ps,bool delete_points_p)
{
int i;
if( ps != NULL ) {
 
for( i = 0; i < size; i++ ) {
 
if( ps[i].ps_state != NULL ) {
free(ps[i].ps_state);
}
 
if( ps[i].ps_trans != NULL ) {
free(ps[i].ps_trans);
}
 
if( delete_points_p ) {
delete_pen_point_array(ps[i].ps_pts);
}
 
}
free(ps);
}
}
 
/*
* pen_point
*/
 
 
pen_point* make_pen_point_array(u_int size)
{
pen_point* pp = (pen_point*)safe_malloc(size * sizeof(pen_point));
int i;
 
for( i = 0; i < size; i++ ) {
pp[i].time = 0;
pp[i].x = pp[i].y = 0;
}
 
return(pp);
}
 
void delete_pen_point_array(pen_point* pp)
{
if( pp != NULL ) {
free(pp);
}
}
 
/*
* pen_state
*/
 
 
pen_state* make_pen_state_array(u_int size)
{
int i;
 
pen_state* ps = (pen_state*)safe_malloc(size*sizeof(pen_state));
for( i = 0; i < size; i++ ) {
ps[i].pt_button = 0;
ps[i].pt_pen = 0;
ps[i].pt_pressure = 0;
ps[i].pt_anglex = 0.0;
ps[i].pt_angley = 0.0;
ps[i].pt_barrelrotate = 0.0;
}
 
return(ps);
 
}
 
pen_state* initialize_pen_state(pen_state* ps,
u_short button,
u_short pen,
short pressure,
double anglex,
double angley,
double barrelrotate)
{
if( ps != NULL ) {
ps->pt_button = button;
ps->pt_pen = pen;
ps->pt_pressure = pressure;
ps->pt_anglex = anglex;
ps->pt_angley = angley;
ps->pt_barrelrotate = barrelrotate;
}
 
return(ps);
}
 
void delete_pen_state_array(pen_state* ps)
{
if( ps != NULL ) {
free(ps);
}
}
 
/*
* gesture
*/
 
gesture*
make_gesture_array(u_int size)
{
return((gesture*)safe_malloc(size * sizeof(gesture)));
}
 
gesture* initialize_gesture(gesture* g,
char* name,
u_int nhs,
pen_point* hspots,
pen_rect bbox,
xgesture fn,
void* wsinfo)
{
if( g != NULL ) {
 
/*We don't do points, 'cause they come from the window system.*/
 
g->g_nhs = nhs;
g->g_hspots = hspots;
 
g->g_name = strdup(name);
 
g->g_bbox.x = bbox.x;
g->g_bbox.y = bbox.y;
g->g_bbox.width = bbox.width;
g->g_bbox.height = bbox.height;
g->g_action = fn;
g->g_wsinfo = wsinfo;
}
 
return(g);
}
 
void
delete_gesture_array(u_int size,gesture* ga,bool delete_points_p)
{
int i;
 
if( ga != NULL ) {
 
for( i = 0; i < size; i++ ) {
free(ga[i].g_name);
if( delete_points_p ) {
delete_pen_point_array(ga[i].g_hspots);
}
}
free(ga);
}
}
 
/*
* copy fns for stroke buffer management.
*/
 
static pen_stroke*
copy_pen_stroke(pen_stroke* ps1,pen_stroke* ps2)
{
u_int* trans = NULL;
pen_state* state = NULL;
if( (trans =
copy_state_trans_array(ps2->ps_nstate,
ps2->ps_trans)) == NULL ) {
return(NULL);
}
if( (state =
copy_pen_state_array(ps2->ps_nstate,
ps2->ps_state)) == NULL ) {
free(trans);
return(NULL);
}
initialize_pen_stroke(ps1,
ps2->ps_npts,
ps2->ps_pts,
ps2->ps_nstate,
trans,
state);
return(ps1);
 
}
 
pen_stroke*
copy_pen_stroke_array(u_int nstrokes,
pen_stroke* strokes)
{
int i;
pen_stroke* ps = make_pen_stroke_array(nstrokes);
 
if( ps != NULL ) {
 
for( i = 0; i < nstrokes; i++ ) {
 
copy_pen_stroke(&ps[i],&strokes[i]);
 
}
 
}
 
return(ps);
}
 
pen_state*
copy_pen_state_array(u_int nstate,pen_state* state)
{
pen_state* ps = make_pen_state_array(nstate);
int i;
 
if( ps != NULL ) {
 
for( i = 0; i < nstate; i++ ) {
initialize_pen_state(&ps[i],
state[i].pt_button,
state[i].pt_pen,
state[i].pt_pressure,
state[i].pt_anglex,
state[i].pt_angley,
state[i].pt_barrelrotate);
}
}
 
return(ps);
}
 
u_int*
copy_state_trans_array(u_int ntrans,u_int* trans)
{
u_int* pt = (u_int*)safe_malloc(ntrans*sizeof(u_int));
int i;
 
for( i = 0; i < ntrans; i++ ) {
pt[i] = trans[i];
}
return(pt);
 
}
 
pen_stroke*
concatenate_pen_strokes(int nstrokes1,
pen_stroke* strokes1,
int nstrokes2,
pen_stroke* strokes2,
int* nstrokes3,
pen_stroke** strokes3)
{
int i;
int ns;
pen_stroke* ps;
 
/*Measure new strokes*/
 
ns = nstrokes1 + nstrokes2;
 
/*Allocate memory*/
 
if( (ps = make_pen_stroke_array(ns)) == NULL ) {
return(NULL);
}
 
/*Copy old ones into new.*/
 
for( i = 0; i < nstrokes1; i++ ) {
if( copy_pen_stroke(&ps[i],&strokes1[i]) == NULL ) {
delete_pen_stroke_array(ns,ps,false);
return(NULL);
}
}
 
for( ; i < ns; i++ ) {
if( copy_pen_stroke(&ps[i],&strokes2[i - nstrokes1]) == NULL ) {
delete_pen_stroke_array(ns,ps,false);
return(NULL);
}
}
 
*nstrokes3 = ns;
*strokes3 = ps;
 
return(ps);
}
/scrib.h
0,0 → 1,81
/*
* Copyright (c) 2000 Greg Haerr <greg@censoft.com>
*
 
*
* Permission to use, copy, modify, distribute, and sell this software and its
* documentation for any purpose is hereby granted without fee, provided that
* the above copyright notice appear in all copies and that both that
* copyright notice and this permission notice appear in supporting
* documentation, and that the name of Keith Packard not be used in
* advertising or publicity pertaining to distribution of the software without
* specific, written prior permission. Keith Packard makes no
* representations about the suitability of this software for any purpose. It
* is provided "as is" without express or implied warranty.
*
* KEITH PACKARD DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
* INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
* EVENT SHALL KEITH PACKARD BE LIABLE FOR ANY SPECIAL, INDIRECT OR
* CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
* DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
* TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
* PERFORMANCE OF THIS SOFTWARE.
*/
 
#define MWINCLUDECOLORS
#include "nano-X.h"
#include "hre_api.h"
#include "li_recognizer.h"
 
#define CS_LETTERS 0
#define CS_DIGITS 1
#define CS_PUNCTUATION 2
 
#define NUM_RECS 3
#define DEFAULT_REC_DIR "classifiers"
#ifndef REC_DEFAULT_USER_DIR
#define REC_DEFAULT_USER_DIR "bin"
#endif
/*#define REC_DEFAULT_USER_DIR "/home/greg/net/microwin/src/apps/scribble"*/
/*#define CLASSIFIER_DIR ".classifiers"*/
#define DEFAULT_LETTERS_FILE "letters.cl"
#define DEFAULT_DIGITS_FILE "digits.cl"
#define DEFAULT_PUNC_FILE "punc.cl"
#define rec_name "libli_recog.so"
 
struct graffiti {
recognizer rec[3]; /* 3 recognizers, one each for letters, digits,
and punctuation */
char cldir[200]; /* directory in which the current classifier
files are found */
li_recognizer_train rec_train; /* pointer to training function */
li_recognizer_getClasses rec_getClasses;
/* pointer to the function that lists
the characters in the classifier
file. */
};
 
typedef struct {
/* private state */
GR_WINDOW_ID win;
GR_GC_ID gc;
GR_BOOL down; /* mouse is down*/
/*GR_WINDOW_ID lastfocusid;*/ /* last window with focus*/
GR_POINT *pt; /* points */
int ppasize;
pen_stroke ps;
struct graffiti graf;
int capsLock;
int puncShift;
int tmpShift;
int ctrlShift;
int curCharSet;
int lastchar;
} ScribbleRec, *ScribbleWidget;
 
ScribbleWidget create_scribble(void);
void destroy_scribble(ScribbleWidget w);
void ActionStart(ScribbleWidget w, int x, int y);
void ActionMove(ScribbleWidget w, int x, int y);
void ActionEnd(ScribbleWidget w, int x, int y);
void Redisplay (ScribbleWidget w);
/hre_api_internal.h
0,0 → 1,285
/*
* hre_api_internal.h: API functions, used by internal clients also.
* Author: James Kempf
* Created On: Tue Jan 12 12:52:27 1993
* Last Modified By: James Kempf
* Last Modified On: Fri Sep 23 13:50:48 1994
* Update Count: 33
* Copyright (c) 1994 by Sun Microsystems Computer Company
* All rights reserved.
*
* Use and copying of this software and preparation of
* derivative works based upon this software are permitted.
* Any distribution of this software or derivative works
* must comply with all applicable United States export control
* laws.
*
* This software is made available as is, and Sun Microsystems
* Computer Company makes no warranty about the software, its
* performance, or its conformity to any specification
*/
 
#ifndef _HRE_API_INTERNAL_H_
 
#define _HRE_API_INTERNAL_H_
 
/*Need structs for return types.*/
 
#include <hre.h>
 
/*
* ADMINISTRATION
*/
 
/*
* recognizer_load - If directory is not NULL, then use it as a pathname
* to find the recognizer. Otherwise, use the default naming conventions
* to find the recognizer having file name name. The subset argument
* contains a null-terminated array of names for character subsets which
* the recognizer should translate.
*/
 
recognizer
recognizer_load(char* directory,char* name,char** subset);
 
/*
* recognizer_unload - Unload the recognizer.
*/
 
int
recognizer_unload(recognizer rec);
 
/*
* recognizer_get_info-Get a pointer to a rec_info
* giving the locale and subsets supported by the recognizer, and shared
* library pathname.
*/
 
const rec_info*
recognizer_get_info(recognizer rec);
 
 
/*
* recognizer_manager_version-Return the version number string of the
* recognition manager.
*/
 
const char* recognizer_manager_version(recognizer rec);
 
/*
* recognizer_load_state-Get any recognizer state associated with name
* in dir. Note that name may not be simple file name, since
* there may be more than one file involved. Return 0 if successful,
* -1 if not.
*/
 
int
recognizer_load_state(recognizer rec,char* dir,char* name);
 
/*
* recognizer_save_state-Save any recognizer state to name
* in dir. Note that name may not be a simple file name, since
* there may be more than one file involved. Return 0 if successful,
* -1 if not.
*/
 
int
recognizer_save_state(recognizer rec,char* dir,char* name);
 
/*
* recognizer_error-Return the last error message, or NULL if none.
*/
 
char*
recognizer_error(recognizer rec);
 
/*
* DICTIONARIES
*/
 
/* recognizer_load_dictionary-Load a dictionary from the directory
* dir and file name. Return the dictionary pointer if successful,
* otherwise NULL.
*/
 
wordset
recognizer_load_dictionary(recognizer rec,char* directory,char* name);
 
/* recoginzer_save_dictionary-Save the dictionary to the file. Return 0
* successful, -1 if error occurs.
*/
 
int
recognizer_save_dictionary(recognizer rec,char* dir,char* name,wordset dict);
 
/*
* recognizer_free_dictionary-Free the dictionary. Return 0 if successful,
* -1 if error occurs.
*/
 
int
recognizer_free_dictionary(recognizer rec,wordset dict);
 
/*
* recognizer_add_to_dictionary-Add the word to the dictionary. Return 0
* if successful, -1 if error occurs.
*/
 
int
recognizer_add_to_dictionary(recognizer rec,letterset* word,wordset dict);
 
/*
* recognizer_delete_from_dictionary-Delete the word from the dictionary.
* Return 0 if successful, -1 if error occurs.
*/
 
int
recognizer_delete_from_dictionary(recognizer rec,letterset* word,wordset dict);
 
/*
* TRANSLATION
*/
 
/* recognizer_set/get_context - Set/get the recognition context for
* subsequent buffering and translation. recognizer_set_context()
* returns -1 if an error occurs, otherwise 0. recognizer_get_context()
* returns NULL if no context has been set. The context is copied to avoid
* potential memory deallocation problems.
*/
 
int
recognizer_set_context(recognizer rec,rc* rec_xt);
rc*
recognizer_get_context(recognizer rec);
 
/* recognizer_clear - Set stroke buffer to NULL and clear the context.
* Returns -1 if an error occurred, otherwise 0. Both the context and the
* stroke buffer are deallocated. If delete_points_p is true, delete the
* points also.
*/
 
int
recognizer_clear(recognizer rec,bool delete_points_p);
 
/* recognizer_get/set_buffer - Get/set the stroke buffer. The stroke buffer
* is copied to avoid potential memory allocation problems. Returns -1 if
* an error occurs, otherwise 0.
*/
 
int
recognizer_get_buffer(recognizer rec, u_int* nstrokes,pen_stroke** strokes);
 
int
recognizer_set_buffer(recognizer rec,u_int nstrokes,pen_stroke* strokes);
 
/* recognizer_translate - Copy the strokes argument into the stroke buffer and
* translate the buffer. If correlate_p is true, then provide stroke
* correlations as well. If either nstrokes is 0 or strokes is NULL, then
* just translate the stroke buffer and return the translation. Return an
* array of alternative translation segmentations in the ret pointer and the
* number of alternatives in nret, or NULL and 0 if there is no translation.
* The direction of segmentation is as specified by the rc_direction field in
* the buffered recognition context. Returns -1 if an error occurred,
* otherwise 0.
*/
 
int
recognizer_translate(recognizer rec,
u_int nstrokes,
pen_stroke* strokes,
bool correlate_p,
int* nret,
rec_alternative** ret);
 
/*
* recognizer_get_extension_functions-Return a null terminated array
* of functions providing extended functionality. Their interfaces
* will change depending on the recognizer.
*/
 
rec_fn*
recognizer_get_extension_functions(recognizer rec);
 
 
/*
* GESTURE SUPPORT
*/
 
/*
* recognizer_get_gesture_names - Return a null terminated array of
* character strings containing the gesture names.
*/
 
char**
recognizer_get_gesture_names(recognizer rec);
 
/*
* recognizer_set_gesture_action-Set the action function associated with the
* name.
*/
 
xgesture
recognizer_set_gesture_action(recognizer rec,
char* name,
xgesture fn,
void* wsinof);
 
/*
* The following functions are for deleting data structures returned
* by the API functions.
*/
 
 
void
delete_rec_alternative_array(u_int nalter,
rec_alternative* ra,
bool delete_points_p);
 
void
delete_rec_correlation(rec_correlation* corr,
bool delete_points_p);
 
/*
* These are used by clients to create arrays for passing to API
* functions.
*/
 
pen_stroke*
make_pen_stroke_array(u_int size);
pen_stroke*
initialize_pen_stroke(pen_stroke* ps,
u_int npts,
pen_point* pts,
u_int nstate,
u_int* trans,
pen_state* state);
void
delete_pen_stroke_array(u_int size,pen_stroke* ps,bool delete_points_p);
 
pen_point*
make_pen_point_array(u_int size);
void
delete_pen_point_array(pen_point* pp);
 
pen_stroke*
copy_pen_stroke_array(u_int nstrokes,pen_stroke* strokes);
pen_state*
copy_pen_state_array(u_int nstate,pen_state* state);
u_int*
copy_state_trans_array(u_int ntrans,u_int* trans);
 
pen_state*
make_pen_state_array(u_int size);
pen_state*
initialize_pen_state(pen_state* ps,
u_short button,
u_short pen,
short pressure,
double anglex,
double angley,
double barrelrotate);
void
delete_pen_state_array(pen_state* ps);
 
#endif
 
/matrix.h
0,0 → 1,84
/***********************************************************************
 
matrix.h - matrix operations
 
Copyright (C) 1991 Dean Rubine
 
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License. See ../COPYING for
the full agreement.
 
**********************************************************************/
/*
Simple matrix operations
Why I am writing this stuff over is beyond me
 
*/
 
/*
 
This package provides the Matrix and Vector data types
 
The difference between this matrix package and others is that:
Vectors may be accessed as 1d arrays
Matrices may still be accessed like two dimensional arrays
This is accomplished by putting a structure containing the bounds
of the matrix before the pointer to the (array of) doubles (in the
case of a Vector) or before the pointer to an (array of) pointers
to doubles (in the case of a Matrix).
 
 
Vectors and matrices are collectively called "arrays" herein.
*/
 
#define HEADER(a) ( ((struct array_header *) a) - 1 )
 
#define NDIMS(a) (int)(HEADER(a)->ndims)
#define NROWS(a) (int)(HEADER(a)->nrows)
#define NCOLS(a) (int)(HEADER(a)->ncols)
#define ISVECTOR(a) (NDIMS(a) == 1)
#define ISMATRIX(a) (NDIMS(a) == 2)
 
/* Note: this structure is prepended at the beginning of a Vector, and causes
the Vector data type to be 32-byte aligned, but not 64-byte aligned.
If this were a problem, filler could be filler[5] (or more) instead.
--Sharon Perl, 12/17/98. */
 
struct array_header {
unsigned char ndims; /* 1 = vector, 2 = matrix */
unsigned char nrows;
unsigned char ncols;
unsigned char filler;
};
 
typedef double **Matrix;
typedef double *Vector;
 
Vector NewVector(); /* int r; (number of rows) */
Matrix NewMatrix(); /* int r, c; (number of rows, number of columns) */
void FreeVector(); /* Vector v; */
void FreeMatrix(); /* Matrix m; */
void PrintVector(); /* Vector v; char *fmt; any a1,a2,a3,a4,a5,a6,a7,a8 */
void PrintMatrix(); /* Matrix m; char *fmt; any a1,a2,a3,a4,a5,a6,a7,a8 */
double InnerProduct(); /* Vector v1, v2 */
void MatrixMultiply(); /* Matrix m1, m2, prod; */
void VectorTimesMatrix(); /* Vector v; Matrix m; Vector prod; */
void ScalarTimesVector(); /* double s; Vector v; Vector prod; */
double QuadraticForm(); /* Vector v; Matrix m; (computes v'mv) */
double InvertMatrix(); /* Matrix input_matrix, result_matrix (returns det) */
Vector SliceVector(); /* Vector v; BitVector rowmask */
Matrix SliceMatrix(); /* Matrix m; Bitvector rowmask, colmask; */
Vector VectorCopy(); /* Vector v; */
Matrix MatrixCopy(); /* Matrix m; */
Vector InputVector(); /* FILE *f; */
Matrix InputMatrix(); /* FILE *f; */
 
double InvertSingularMatrix(); /* Matrix input, result (returns det) */
Matrix DeSliceMatrix(); /* Matrix m, double fill, BitVector rowmask, colmask;
Matrix result */
void OutputVector();
void OutputMatrix();
void ZeroVector();
void ZeroMatrix(); /* Matrix m; */
void FillMatrix(); /* Matrix m; double fill; */
/sc.h
0,0 → 1,72
/***********************************************************************
 
sc.h - creates classifiers from feature vectors of examples, as well as
classifying example feature vectors.
 
Copyright (C) 1991 Dean Rubine
 
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 1, or (at your option)
any later version.
 
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
 
You should have received a copy of the GNU General Public License
along with this program (in ../COPYING); if not, write to the Free
Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
 
***********************************************************************/
 
 
/*
single path classifier
*/
 
#ifndef _SC_H_
 
#define _SC_H_
 
#define MAXSCLASSES 100
 
typedef struct sclassifier *sClassifier; /* single feature-vector classifier */
typedef int sClassIndex; /* single feature-vector class index */
typedef struct sclassdope *sClassDope; /* single feature-vector class dope */
 
struct sclassdope {
char *name;
sClassIndex number;
Vector average;
Matrix sumcov;
int nexamples;
};
 
struct sclassifier {
int nfeatures;
int nclasses;
sClassDope *classdope;
 
Vector cnst; /* constant term of discrimination function */
Vector *w; /* array of coefficient weights */
Matrix invavgcov; /* inverse covariance matrix */
};
 
sClassifier sNewClassifier();
sClassifier sRead(); /* FILE *f */
void sWrite(); /* FILE *f; sClassifier sc; */
void sFreeClassifier(); /* sc */
void sAddExample(); /* sc, char *classname; Vector y */
void sRemoveExample(); /* sc, classname, y */
void sDoneAdding(); /* sc */
sClassDope sClassify(); /* sc, y */
sClassDope sClassifyAD(); /* sc, y, double *ap; double *dp */
sClassDope sClassNameLookup(); /* sc, classname */
double MahalanobisDistance(); /* Vector v, u; Matrix sigma */
void FixClassifier();
void sDumpClassifier();
void sDistances();
 
#endif
/hre_api.h
0,0 → 1,43
/*
* hre_api.h: User-Level API for Handwriting Recognition
* Author: James Kempf
* Created On: Mon Nov 2 14:01:25 1992
* Last Modified By: James Kempf
* Last Modified On: Fri Sep 23 13:50:15 1994
* Update Count: 22
* Copyright (c) 1994 by Sun Microsystems Computer Company
* All rights reserved.
*
* Use and copying of this software and preparation of
* derivative works based upon this software are permitted.
* Any distribution of this software or derivative works
* must comply with all applicable United States export control
* laws.
*
* This software is made available as is, and Sun Microsystems
* Computer Company makes no warranty about the software, its
* performance, or its conformity to any specification
*/
 
#ifndef _HRE_API_H_
 
#define _HRE_API_H_
 
/*
* Opaque type for the recognizer. The toolkit must access through
* appropriate access functions.
*/
 
typedef void* recognizer;
 
/*
* Opaque type for recognizers to implement dictionaries.
*/
 
typedef void* wordset;
 
#include <hre.h>
#include <hre_api_internal.h>
 
 
#endif
/util.c
0,0 → 1,123
/***********************************************************************
 
util.c - memory allocation, error reporting, and other mundane stuff
 
Copyright (C) 1991 Dean Rubine
 
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License. See ../COPYING for
the full agreement.
 
**********************************************************************/
/*
* Mundane utility routines
* see util.h
*/
 
/*LINTLIBRARY*/
 
#include "util.h"
#include <stdio.h>
#include <ctype.h>
#include <setjmp.h>
/* ari -- for strlen */
#include <string.h>
 
extern char* li_err_msg;
static char err_msg[BUFSIZ];
 
/*
* Function used by allocation macro
*/
 
char *
myalloc(nitems, itemsize, typename)
char *typename;
{
register unsigned int bytes = nitems * itemsize;
register char *p = malloc(bytes);
if(p == NULL)
error("Can't get mem for %d %s's (each %d bytes, %d total bytes)",
nitems, typename, itemsize, bytes);
return p;
}
 
/*
* Return a copy of a string
*/
 
char *
scopy(s)
char *s;
{
register char *p = allocate(strlen(s) + 1, char);
(void) strcpy(p, s);
return p;
}
 
/*
* Save error message, then return to recognition manager.
*/
 
/*VARARGS1*/
void
error(a, b, c, d, e, f, g, h, i, j)
char *a;
{
sprintf(err_msg, a, b, c, d, e, f, g, h, i, j);
li_err_msg = err_msg;
}
 
/*
* Print error message, exit.
*/
 
 
/*VARARGS1*/
void
exit_error(a, b, c, d, e, f, g, h, i, j)
char *a;
{
fprintf(stderr, a, b, c, d, e, f, g, h, i, j);
exit(1);
}
 
 
/*
* print a message if DebugFlag is non-zero
*/
 
int DebugFlag = 1;
 
void
debug(a, b, c, d, e, f, g)
char *a;
{
if(DebugFlag)
fprintf(stderr, a, b, c, d, e, f, g);
}
 
#define upper(c) (islower(c) ? toupper(c) : (c))
 
int
ucstrcmp(s1, s2)
register char *s1, *s2;
{
register int i;
 
for(; *s1 && *s2; s1++, s2++)
if( (i = (upper(*s1) - upper(*s2))) != 0)
return i;
return (upper(*s1) - upper(*s2));
}
 
#define NSTRINGS 3
 
char *
tempstring()
{
static char strings[NSTRINGS][100];
static int index;
if(index >= NSTRINGS) index = 0;
return strings[index++];
}
/hre_internal.h
0,0 → 1,286
/*
* hre_internal.h: Internal Interface for Recognizer.
* Author: James Kempf
* Created On: Thu Nov 5 10:54:18 1992
* Last Modified By: James Kempf
* Last Modified On: Fri Sep 23 13:51:15 1994
* Update Count: 99
* Copyright (c) 1994 by Sun Microsystems Computer Company
* All rights reserved.
*
* Use and copying of this software and preparation of
* derivative works based upon this software are permitted.
* Any distribution of this software or derivative works
* must comply with all applicable United States export control
* laws.
*
* This software is made available as is, and Sun Microsystems
* Computer Company makes no warranty about the software, its
* performance, or its conformity to any specification
*/
 
#ifndef _HRE_INTERNAL_H_
 
#define _HRE_INTERNAL_H_
 
/*Avoids forward reference problem.*/
 
#define wordset void*
 
#include <hre.h>
 
#undef wordset
 
#define recognizer void*
 
/*
* Internal view of wordset. The recognition engine uses this view to
* maintain information about which recognizer object this wordset
* belongs to, which file (in case it needs to be saved), and internal
* data structures.
*/
 
typedef struct _wordset {
char* ws_pathname; /*Path name to word set file.*/
recognizer ws_recognizer; /*To whom it belongs.*/
void* ws_internal; /*Internal data structures.*/
} *wordset;
 
#undef recognizer
 
/*
* Internal view of the recognizer struct. This view is only available
* to OEM clients who implement a recognizer shared library. Clients
* of the recognizer itself see it as an opaque data type. The struct
* contains a function pointer for each function in the client API.
*/
 
struct _Recognizer {
u_int recognizer_magic;
/* ari */
/* const char* recognizer_version; */
char* recognizer_version;
 
rec_info* recognizer_info;
void* recognizer_specific;
int
(*recognizer_load_state)(
struct _Recognizer*,
char*, char*);
/* char* dir,
char* name);
*/
int
(*recognizer_save_state)(
struct _Recognizer*,
char*, char*);
/* char* dir,
char* name);
*/
char*
(*recognizer_error)(
struct _Recognizer*);
wordset
(*recognizer_load_dictionary)(
/* struct _Recognizer* rec,
char* directory,
char* name);
*/
struct _Recognizer*,
char*, char*);
int
(*recognizer_save_dictionary)(
/* struct _Recognizer* rec,
char* directory,
char* name,
wordset dict);
*/
struct _Recognizer*,
char*, char*, wordset);
 
int
(*recognizer_free_dictionary)(
/* struct _Recognizer* rec,
wordset dict);
*/
struct _Recognizer*,
wordset);
int
(*recognizer_add_to_dictionary)(
/* struct _Recognizer* rec,
letterset* word,
wordset dict);
*/
struct _Recognizer*,
letterset*, wordset);
int
(*recognizer_delete_from_dictionary)(
/* struct _Recognizer* rec,
letterset* word,
wordset dict);
*/
struct _Recognizer*,
letterset*, wordset);
int
(*recognizer_set_context)(
/* struct _Recognizer* rec,
rc* rec_xt);
*/
struct _Recognizer*,rc*);
rc*
(*recognizer_get_context)(
/* struct _Recognizer* rec);
*/
struct _Recognizer*);
int
(*recognizer_clear)(
/* struct _Recognizer* rec,
bool delete_ponts_p);
*/
struct _Recognizer*, bool);
int
(*recognizer_get_buffer)(
/* struct _Recognizer* rec,
u_int* nstrokes,
pen_stroke** strokes);
*/
struct _Recognizer*, u_int*, pen_stroke**);
 
int
(*recognizer_set_buffer)(
/* struct _Recognizer* rec,
u_int nstrokes,
pen_stroke* strokes);
*/
struct _Recognizer*, u_int, pen_stroke*);
int
(*recognizer_translate)(
/* struct _Recognizer* rec,
u_int nstrokes,
pen_stroke* strokes,
bool correlate_p,
int* nret,
rec_alternative** ret);
*/
struct _Recognizer*, u_int, pen_stroke*,
bool, int*, rec_alternative**);
rec_fn*
(*recognizer_get_extension_functions)(
struct _Recognizer*);
char**
(*recognizer_get_gesture_names)(
struct _Recognizer*);
xgesture
(*recognizer_set_gesture_action)(
struct _Recognizer*,
/* char* name,
xgesture fn,
void* wsinfo);
*/
char*, xgesture, void*);
u_int recognizer_end_magic;
};
 
typedef struct _Recognizer* recognizer;
 
/*
* recognizer_internal_initialize - Allocate and initialize the recognizer
* object. The recognition shared library has the responsibility for filling
* in all the function pointers for the recognition functions. This
* function must be defined as a global function within the shared
* library, so it can be accessed using dlsym() when the recognizer
* shared library is loaded. It returns NULL if an error occured and
* sets errno to indicate what.
*/
 
typedef recognizer (*recognizer_internal_initialize)(rec_info* ri);
 
/*Function header definition for recognizer internal initializer.*/
 
/* ari -- This is used in cmu_recognizer.c. */
 
#define RECOGNIZER_INITIALIZE(_a) \
recognizer __recognizer_internal_initialize(rec_info* _a)
 
/*
* recognizer_internal_finalize - Deallocate and deinitialize the recognizer
* object. If the recognizer has allocated any additional storage, it should
* be deallocated as well. Returns 0 if successful, -1 if the argument
* wasn't a recognizer or wasn't a recognizer handled by this library.
*/
 
typedef int (*recognizer_internal_finalize)(recognizer r);
 
#define RECOGNIZER_FINALIZE(_a) \
int __recognizer_internal_finalize(recognizer _a)
 
 
/*
* The following are for creating HRE structures.
*/
 
recognizer
make_recognizer(rec_info* ri);
void
delete_recognizer(recognizer rec);
 
rec_alternative*
make_rec_alternative_array(u_int size);
rec_correlation*
make_rec_correlation(char type,
u_int size,
void* trans,
rec_confidence conf,
u_int ps_size);
 
rec_fn*
make_rec_fn_array(u_int size);
void
delete_rec_fn_array(rec_fn* rf);
 
gesture*
initialize_gesture(gesture* g,
char* name,
u_int nhs,
pen_point* hspots,
pen_rect bbox,
xgesture cback,
void* wsinfo);
gesture*
make_gesture_array(u_int size);
void
delete_gesture_array(u_int size,gesture* ga,bool delete_points_p);
 
pen_stroke*
concatenate_pen_strokes(int nstrokes1,
pen_stroke* strokes1,
int nstrokes2,
pen_stroke* strokes2,
int* nstrokes3,
pen_stroke** strokes3);
 
rec_alternative*
initialize_rec_alternative(rec_alternative* ra,u_int nelem);
rec_element*
initialize_rec_element(rec_element* re,
char type,
u_int size,
void* trans,
rec_confidence conf);
/*
* Pathnames, etc.
*/
 
/* these going to be handled in the makefile, for portability */
/* #define REC_DEFAULT_HOME_DIR "/udir/rapkin/vb/hre.ultrix/lib/recognizers" */
/* #define REC_DEFAULT_USER_DIR ".recognizers" */
 
#define REC_DEFAULT_LOCALE "C"
#define RECHOME "RECHOME"
#define LANG "LANG"
 
#include <hre_api_internal.h>
 
#endif
 
/punc.cl
0,0 → 1,698
33 classes
!
#
$
%
&
'
+
,
/
<
=
@
^
|
~
?
>
)
(
.
-
T
_
*
`
;
:
F
G
H
I
J
K
V 12 0.049886 -0.989604 50.057 1.52594 50.0488 0.0187451 -0.999046 50.2848 -0.0996306 0.666789 0.145865 0.12
M 12 12
0.072489 0.00262767 1.13576 -0.00469268 1.14199 0.0191112 0.000350481 1.12358 0.171161 -0.0273375 -0.0188308 -0.0021693
0 0.00029536 0.0159534 -0.000397192 0.0159646 0.000827037 1.55778e-05 0.0175581 0.00474467 0.00257862 0.000185994 -0.000238129
0 0 22.1984 -0.0218347 22.3292 0.245996 0.0043836 21.586 3.15653 -1.34531 -0.557223 -0.0107666
0 0 0 0.000995911 -0.0218806 -0.00207712 -3.99886e-05 -0.0264514 -0.00394825 -0.0110765 -0.00264208 0.000380231
0 0 0 0 22.4608 0.247315 0.00440698 21.7128 3.17425 -1.3538 -0.560529 -0.0107666
0 0 0 0 0 0.0062168 0.000116549 0.249038 0.0354797 0.00910818 0.000169102 -0.000772006
0 0 0 0 0 0 2.18952e-06 0.00445375 0.000629641 0.000203318 1.43503e-05 -1.46788e-05
0 0 0 0 0 0 0 21.0186 3.074 -1.2418 -0.524773 -0.0124504
0 0 0 0 0 0 0 0 0.483806 -0.201447 -0.0870416 -0.00321751
0 0 0 0 0 0 0 0 0 0.252058 0.080812 -0.00321751
0 0 0 0 0 0 0 0 0 0 0.0276943 -0.000517982
0 0 0 0 0 0 0 0 0 0 0 0.0002
V 12 -5.52489 -35.4601 4.03154 386.66 -1.16952 -2.41402 -25.0189 -1.0773 2.41449 -4.15137 0.34553 -59.6006
V 12 0.116751 0.989159 54.938 0.937331 47.5277 0.678989 0.730747 132.35 0.699642 9.45562 17.2364 0.385
M 12 12
0.0311973 -0.00388715 0.984021 -0.00188968 0.182776 0.0111903 -0.0110209 0.248842 -0.093359 0.285352 -0.0159444 0.00588619
0 0.000534344 -0.130414 -5.31055e-05 -0.00187312 -0.00160486 0.00157996 -0.0287152 -0.0217009 -0.0479767 -0.0290114 -0.00107193
0 0 32.2818 -0.00507341 2.63179 0.373864 -0.369023 9.13503 1.95071 11.3807 5.62052 0.24008
0 0 0 0.00532131 -0.0834584 -0.0039294 0.0035327 0.585233 0.0831266 0.219058 0.659424 0.00218559
0 0 0 0 10.4626 -0.0832467 0.0770943 10.7672 -16.0424 -1.27835 -6.52026 -0.0989823
0 0 0 0 0 0.0105333 -0.00994158 -0.6941 0.25173 -0.0530008 -0.480015 0.00279416
0 0 0 0 0 0 0.00939817 0.624544 -0.236463 0.0365085 0.426988 -0.00280439
0 0 0 0 0 0 0 108.366 -22.1698 30.2308 81.5295 0.133439
0 0 0 0 0 0 0 0 26.2241 2.08557 5.15536 0.188934
0 0 0 0 0 0 0 0 0 13.3512 29.8511 0.165301
0 0 0 0 0 0 0 0 0 0 84.1638 0.286555
0 0 0 0 0 0 0 0 0 0 0 0.0035
V 12 -38.708 73.8855 -6.75602 216.961 4.20605 18.359 28.493 3.41663 2.67563 20.5578 -5.85796 -75.5096
V 12 -0.638524 -0.742665 47.186 0.964699 37.1448 -0.567201 0.819713 92.0376 0.895497 8.14666 5.12249 0.38
M 12 12
0.0849295 -0.0800523 0.676257 0.030871 0.00950862 0.0142014 0.00841607 0.35667 -0.00629488 0.370179 0.257338 0.0099804
0 0.078012 -0.545215 -0.0282092 0.0289632 -0.00834056 -0.00442137 -0.0432412 0.0261499 -0.328391 -0.179574 -0.00873421
0 0 9.93124 0.357925 4.66285 0.370076 0.252286 13.3464 0.316612 3.70847 4.79615 0.0802977
0 0 0 0.0167778 0.22767 0.0118358 0.00812397 0.2277 -0.0190081 0.143042 0.146618 0.00232512
0 0 0 0 9.05026 0.274272 0.207614 4.23238 -0.655921 0.400151 2.21694 -0.0506975
0 0 0 0 0 0.0169423 0.0119398 0.634122 0.0165731 0.103671 0.196546 0.00155624
0 0 0 0 0 0 0.0084721 0.434866 0.00974229 0.0658633 0.13484 0.000787621
0 0 0 0 0 0 0 35.064 2.30699 3.90573 8.27455 0.120138
0 0 0 0 0 0 0 0 0.267796 0.128784 0.337826 0.0115365
0 0 0 0 0 0 0 0 0 1.77866 1.63538 0.0485104
0 0 0 0 0 0 0 0 0 0 2.51658 0.0376901
0 0 0 0 0 0 0 0 0 0 0 0.0018
V 12 -62.3217 -3.04281 -2.02307 240.69 1.44273 6.79239 36.4621 1.33272 2.46843 25.1566 -8.58086 -43.9501
V 12 0.776615 0.585384 64.4908 0.591912 53.8525 0.994566 0.068018 164.27 -12.595 13.7863 12.7074 0.535
M 12 12
0.0935925 -0.106095 3.86846 0.00430316 3.22224 -0.00218803 0.0157518 8.24276 -0.372575 0.211527 -0.439606 -0.00216241
0 0.123189 -4.94107 -0.00376449 -4.1794 0.00295353 -0.0246348 -10.451 0.48261 -0.279196 0.51579 0.00235265
0 0 341.73 0.101056 297.92 -0.273526 2.62915 740.543 -36.3439 17.7283 -15.5175 0.183215
0 0 0 0.000861708 0.0619288 -8.53894e-05 -0.00063734 0.292832 -0.0109841 -0.00265343 -0.00295858 0.000313353
0 0 0 0 260.618 -0.239564 2.34823 643.848 -31.7306 15.649 -13.2158 0.1586
0 0 0 0 0 0.000241617 -0.00230822 -0.603445 0.0300646 -0.0131478 0.0057983 -0.000275863
0 0 0 0 0 0 0.0246078 5.66804 -0.288296 0.140497 -0.060436 0.00216165
0 0 0 0 0 0 0 1616.03 -79.2222 37.2626 -30.4753 0.478155
0 0 0 0 0 0 0 0 3.90713 -1.84028 1.36584 -0.0250482
0 0 0 0 0 0 0 0 0 1.03875 -1.11224 0.00140616
0 0 0 0 0 0 0 0 0 0 2.63821 0.0294959
0 0 0 0 0 0 0 0 0 0 0 0.0009
V 12 -31.9542 82.4315 -9.83552 94.8984 6.51264 29.5118 9.78601 3.99923 -3.83964 45.7942 -13.7732 -95.0987
V 12 -0.868692 -0.438435 51.1691 1.05531 2.6586 0.0903437 -0.21898 136.553 -0.305897 10.4558 5.77095 0.385
M 12 12
0.0336443 -0.0760252 0.861637 -0.000108733 0.148433 -0.0664519 -0.12602 4.04524 0.234576 0.334564 0.192863 0.0038866
0 0.178954 -1.88182 -0.00039898 -0.314805 0.140935 0.267271 -8.36185 -0.557587 -0.756764 -0.473384 -0.00886783
0 0 45.9076 -0.128147 4.54224 -2.45966 -3.49039 206.186 3.98703 5.352 3.87775 0.064666
0 0 0 0.000672595 -0.00517841 0.00450873 0.00251529 -0.574057 0.0108171 0.0154833 0.00645726 0.000170368
0 0 0 0 0.727312 -0.335752 -0.60878 22.3611 0.913602 1.39748 0.725641 0.01609
0 0 0 0 0 0.162665 0.274446 -11.7613 -0.376566 -0.566806 -0.311675 -0.00657838
0 0 0 0 0 0 0.515223 -17.4813 -0.803515 -1.23699 -0.627398 -0.0141972
0 0 0 0 0 0 0 963.402 17.9398 26.9607 16.1459 0.317973
0 0 0 0 0 0 0 0 1.87604 2.57992 1.54387 0.0300221
0 0 0 0 0 0 0 0 0 3.77008 2.02115 0.0433647
0 0 0 0 0 0 0 0 0 0 1.325 0.0237826
0 0 0 0 0 0 0 0 0 0 0 0.0005
V 12 -86.3863 12.2898 -0.305307 303.997 -1.92801 10.219 12.2394 1.72185 1.14943 26.5568 -9.26211 -14.9363
V 12 0.0656807 0.994664 43.5374 1.53049 43.5205 0.00640072 0.999521 43.79 -0.0138746 0.786061 0.168906 0.1025
M 12 12
0.025301 -0.000544207 -0.811395 0.000236238 -0.808355 0.00584339 -5.84955e-05 -0.811931 -0.0865349 0.0135277 0.00261677 0.00033823
0 1.4252e-05 0.0156244 -2.82639e-06 0.0155235 -0.000198956 2.19515e-06 0.0157795 0.00172312 0.000174295 5.29046e-05 3.72815e-06
0 0 28.9776 0.0168483 28.9279 -0.116225 0.00159112 28.9414 3.22906 -0.879374 -0.206263 -0.0248865
0 0 0 0.000416758 0.0171986 0.000283869 6.4271e-06 0.017757 0.00468884 -0.00122853 -0.000575221 -8.43706e-05
0 0 0 0 28.8794 -0.114249 0.00157628 28.8903 3.2254 -0.88575 -0.208068 -0.0250826
0 0 0 0 0 0.00366748 -3.60873e-05 -0.119917 -0.0120029 -0.0115224 -0.00303384 -0.000307837
0 0 0 0 0 0 5.71467e-07 0.00165688 0.000232905 0.000113601 2.37551e-05 1.81796e-06
0 0 0 0 0 0 0 28.9148 3.23382 -0.856803 -0.201492 -0.0244589
0 0 0 0 0 0 0 0 0.380001 -0.0955787 -0.0243592 -0.00307876
0 0 0 0 0 0 0 0 0 0.0998066 0.0243309 0.00260778
0 0 0 0 0 0 0 0 0 0 0.00612781 0.000670961
0 0 0 0 0 0 0 0 0 0 0 7.5e-05
V 12 -17.8718 45.8351 2.39734 378.912 -0.0721122 0.822638 22.9365 -0.333549 3.75217 6.95251 -2.788 -110.803
V 12 -0.316493 0.94058 46.5157 0.710328 29.1481 0.0472818 0.99435 89.4746 -4.89048 5.69387 4.01352 0.25
M 12 12
0.0529535 0.0198291 -0.430744 -0.0124459 -1.2563 0.0348636 -0.00267616 -0.0920761 -0.107183 -0.055066 -0.142378 -0.00126644
0 0.00761095 -0.265252 -0.00372911 -0.482586 0.011599 -0.000856653 -0.256733 -0.0455328 -0.028604 -0.0491365 -0.00103348
0 0 75.1476 -0.210796 37.7582 0.673629 -0.0515556 129.675 3.05719 6.16942 3.23523 0.357294
0 0 0 0.010852 0.556934 -0.0132842 0.00148449 -1.02263 -0.014869 -0.00761269 0.123108 -0.00197871
0 0 0 0 62.5629 -0.512694 0.0664574 23.6932 1.60829 3.75926 9.91041 0.118972
0 0 0 0 0 0.0358721 -0.0028168 1.72965 -0.0370868 0.0395776 -0.0798266 0.0039096
0 0 0 0 0 0 0.00025406 -0.166707 0.000686923 -0.00271654 0.0131261 -0.000353586
0 0 0 0 0 0 0 267.654 6.36396 10.0711 -3.2687 0.682868
0 0 0 0 0 0 0 0 0.425645 0.265684 -0.107409 0.0167055
0 0 0 0 0 0 0 0 0 0.517082 0.379511 0.0285195
0 0 0 0 0 0 0 0 0 0 1.92601 0.00196841
0 0 0 0 0 0 0 0 0 0 0 0.0018
V 12 -42.9416 59.5993 -0.998314 170.702 0.897832 6.69731 30.6187 1.1504 0.28149 22.2682 -7.0215 -77.9223
V 12 -0.454688 0.842232 33.1338 0.919161 33.1338 -0.60075 0.787936 34.0438 -0.380803 0.994689 0.257463 0.095
M 12 12
0.262266 0.137965 0.48634 0.122594 0.48634 0.0959503 0.0754858 1.27174 -0.411405 0.35495 0.111878 0.0163621
0 0.0733522 0.260348 0.0656273 0.260348 0.0513642 0.0404091 0.654085 -0.211395 0.175354 0.054106 0.00835935
0 0 26.6071 0.845841 26.6071 0.620551 0.570832 27.4983 0.680926 -0.993889 -0.382865 0.100565
0 0 0 0.0735517 0.845841 0.0565784 0.0464803 1.17374 -0.151239 0.111473 0.0319328 0.00899285
0 0 0 0 26.6071 0.620551 0.570832 27.4983 0.680926 -0.993889 -0.382865 0.100565
0 0 0 0 0 0.0435757 0.0356894 0.877916 -0.120654 0.089807 0.0259012 0.00692269
0 0 0 0 0 0 0.0294509 0.771849 -0.0903681 0.065549 0.0185661 0.00567681
0 0 0 0 0 0 0 31.2358 -0.701811 0.381593 0.0809312 0.154509
0 0 0 0 0 0 0 0 0.755822 -0.717782 -0.237241 -0.023324
0 0 0 0 0 0 0 0 0 0.744891 0.255722 0.0213502
0 0 0 0 0 0 0 0 0 0 0.0891023 0.00692609
0 0 0 0 0 0 0 0 0 0 0 0.0013
V 12 -30.2222 42.7303 1.32172 228.327 0.285969 -4.48971 18.5821 -0.0939902 2.51588 8.56682 -3.27608 -78.1892
V 12 0.426123 -0.902218 42.4911 1.00884 42.4911 0.532368 -0.845448 42.6953 -0.21446 0.608858 0.0889538 0.0925
M 12 12
0.0142458 0.00699119 -0.800867 -0.00882893 -0.800867 0.00747875 0.00468643 -0.801737 0.00921448 0.0323278 0.00526008 0.000883731
0 0.00344007 -0.384225 -0.00423578 -0.384225 0.00358802 0.00224837 -0.384855 0.00474343 0.0152999 0.00246304 0.000447254
0 0 54.0244 0.571373 54.0244 -0.483622 -0.303896 53.9083 -0.276482 -2.17958 -0.367082 -0.0266136
0 0 0 0.00721138 0.571373 -0.00612261 -0.00380485 0.568596 -0.00439563 -0.0329811 -0.00614072 -0.000779043
0 0 0 0 54.0244 -0.483622 -0.303896 53.9083 -0.276482 -2.17958 -0.367082 -0.0266136
0 0 0 0 0 0.00519847 0.00322999 -0.481247 0.00374416 0.0280752 0.00523386 0.000667378
0 0 0 0 0 0 0.00200818 -0.30246 0.00229929 0.0172811 0.0032068 0.000401292
0 0 0 0 0 0 0 53.7986 -0.280258 -2.15762 -0.362114 -0.0263529
0 0 0 0 0 0 0 0 0.0128786 0.0174123 0.00291956 0.00145604
0 0 0 0 0 0 0 0 0 0.176377 0.035174 0.00485153
0 0 0 0 0 0 0 0 0 0 0.00719769 0.00101249
0 0 0 0 0 0 0 0 0 0 0 0.000275
V 12 12.8919 -33.8272 3.40329 248.816 -0.69706 2.46266 -19.495 -1.06444 1.37491 -3.33833 0.734071 -52.8205
V 12 -0.776845 -0.625498 53.3187 0.773192 38.3575 0.235755 -0.96415 78.1426 -2.20168 4.2411 5.57318 0.245
M 12 12
0.00852157 -0.0103167 0.0591229 -0.00714472 -0.093666 0.01409 0.00488685 0.139842 0.0155436 -0.0618657 -0.287751 -0.00234812
0 0.0125382 -0.0467626 0.0074002 0.0939898 -0.0158806 -0.00560917 -0.101783 -0.0180668 0.0763801 0.357456 0.00283771
0 0 106.444 -0.240436 80.9168 1.17143 0.301372 127.829 0.169533 -4.29264 -23.5846 0.34909
0 0 0 0.0406531 1.02817 -0.0401291 -0.0115439 -1.42304 -0.0341298 -0.00913043 -0.122584 0.00389068
0 0 0 0 98.8055 -0.171853 -0.0704924 61.6267 -0.793888 -4.46301 -26.3098 0.389298
0 0 0 0 0 0.0544507 0.0161429 2.34405 0.0424587 -0.0891712 -0.384799 -0.00216585
0 0 0 0 0 0 0.00489072 0.618861 0.0133286 -0.0314319 -0.137743 -0.000948452
0 0 0 0 0 0 0 187.895 0.987748 -3.50403 -17.9076 0.317623
0 0 0 0 0 0 0 0 0.0412214 -0.0735379 -0.291171 -0.00564795
0 0 0 0 0 0 0 0 0 0.724461 3.67301 -0.00137381
0 0 0 0 0 0 0 0 0 0 18.8372 -0.0253708
0 0 0 0 0 0 0 0 0 0 0 0.0021
V 12 -39.0612 -11.3373 2.43606 203.826 -0.931492 3.05023 -18.3931 -0.29605 0.417576 6.67359 -2.22274 -32.8837
V 12 0.981236 -0.173579 57.4221 0.895119 52.2756 0.681634 0.724577 110.889 -0.331935 6.29565 6.90851 0.3075
M 12 12
0.000978813 0.00512169 0.150445 0.0038076 0.15262 -0.00370616 0.00338241 0.372408 0.00279737 0.0306574 0.0650609 0.000611099
0 0.027206 0.84034 0.0204286 0.871739 -0.0203223 0.0184329 1.99437 0.0156071 0.145946 0.355372 0.00348622
0 0 44.8081 0.378314 42.4863 -0.358367 0.35068 99.7197 0.381736 5.03041 17.0635 0.196867
0 0 0 0.0204923 0.509253 -0.0217325 0.018979 0.82965 0.0153255 0.0603814 0.177014 0.0015282
0 0 0 0 43.0405 -0.531371 0.489174 89.7441 0.49826 3.59607 16.1157 0.18911
0 0 0 0 0 0.023669 -0.0204709 -0.690674 -0.0167577 -0.0331358 -0.165125 -0.0015015
0 0 0 0 0 0 0.0177836 0.712909 0.0145645 0.0400751 0.159867 0.00145981
0 0 0 0 0 0 0 237.226 0.740188 15.5069 39.0911 0.42648
0 0 0 0 0 0 0 0 0.0123835 0.026891 0.16091 0.00166083
0 0 0 0 0 0 0 0 0 1.80619 2.2721 0.0186428
0 0 0 0 0 0 0 0 0 0 6.64574 0.0738396
0 0 0 0 0 0 0 0 0 0 0 0.000875
V 12 5.40745 13.8098 -2.68708 197.346 2.64766 12.7611 30.3406 1.60241 1.87592 14.0904 -4.14369 -81.3486
V 12 -0.930148 -0.0330482 51.2067 0.808894 4.86803 -0.973607 0.111803 119.138 7.24667 7.42533 3.23527 0.2875
M 12 12
0.0352562 -0.129526 0.596513 0.0116492 0.0315672 -0.00631343 -0.0267441 1.34989 0.134008 0.125317 0.0442492 0.00264051
0 0.499675 -2.11408 -0.0466675 -0.0920962 0.0184192 0.0780251 -4.78149 -0.477013 -0.472345 -0.218809 -0.010398
0 0 10.4849 0.143848 0.463363 -0.0926726 -0.392568 24.1815 2.25763 2.17978 0.577589 0.045234
0 0 0 0.0162529 0.0495148 -0.00990296 -0.0419496 0.195961 0.0590176 0.0148769 0.0203399 0.000167744
0 0 0 0 0.20898 -0.0417961 -0.177051 0.579975 0.198205 -0.00366036 -0.0292615 -0.00131966
0 0 0 0 0 0.00835921 0.0354102 -0.115995 -0.039641 0.000732072 0.0058523 0.000263932
0 0 0 0 0 0 0.15 -0.491363 -0.167922 0.00310111 0.0247907 0.00111803
0 0 0 0 0 0 0 57.1671 4.92169 5.24373 1.33844 0.111259
0 0 0 0 0 0 0 0 0.544416 0.42699 0.127013 0.00839133
0 0 0 0 0 0 0 0 0 0.520273 0.193768 0.0117133
0 0 0 0 0 0 0 0 0 0 0.189374 0.00519831
0 0 0 0 0 0 0 0 0 0 0 0.000275
V 12 -73.9781 19.4842 2.09578 251.558 -2.73093 -7.33305 21.4949 0.948813 4.09528 16.3162 -6.2654 -25.7316
V 12 0.24977 -0.963507 56.884 0.875305 36.4624 0.994313 0.0466005 93.906 -2.481 4.24797 3.33351 0.22
M 12 12
0.0348817 0.00859054 -1.1879 -0.0119493 -0.247835 0.00200889 -0.0161239 -1.94124 0.0189749 0.154426 0.0755713 -0.00492869
0 0.00219783 -0.24316 -0.0023142 -0.0548922 0.000417326 -0.00243621 -0.396485 0.00453489 0.0444695 0.0253952 -0.000970794
0 0 76.8499 0.815057 15.5753 -0.11466 1.4209 124.887 -1.10317 1.03092 3.27894 0.364731
0 0 0 0.00903875 0.147447 -0.00127907 0.0170342 1.33313 -0.00924638 0.00727908 0.0340218 0.00377692
0 0 0 0 3.98653 -0.0199166 0.20334 24.8884 -0.336999 0.626034 0.88113 0.0792617
0 0 0 0 0 0.000188633 -0.00237643 -0.188256 0.00120732 0.0029303 -0.00196371 -0.000510655
0 0 0 0 0 0 0.0364948 2.3477 -0.00853273 0.0305964 0.0783661 0.00643354
0 0 0 0 0 0 0 203.179 -1.73582 1.31062 5.10607 0.589335
0 0 0 0 0 0 0 0 0.031392 -0.0617128 -0.0692369 -0.00592187
0 0 0 0 0 0 0 0 0 2.06122 1.50647 0.0155519
0 0 0 0 0 0 0 0 0 0 1.19346 0.0228297
0 0 0 0 0 0 0 0 0 0 0 0.0018
V 12 -6.81489 -32.8357 2.86046 220.026 -1.32994 9.78696 13.8833 -0.218079 0.637955 5.39539 -1.46656 -46.5825
V 12 -0.0437891 -0.995222 45.4825 1.47768 5.94921 0.294793 0.0694812 87.0315 1.52145 4.07295 8.98949 0.195
M 12 12
0.0304075 -0.00103396 0.159901 -0.0128643 0.368575 0.173041 -0.111201 0.433399 0.477384 0.014085 0.147656 -0.00209981
0 5.57776e-05 -0.0148651 0.000532282 -0.0165011 -0.00587208 0.0088201 -0.0234829 -0.011906 0.00119955 -0.000238553 -2.69127e-06
0 0 9.36643 -0.0960947 -8.07592 0.96607 -3.1254 28.4317 -9.37842 0.915194 -1.69541 0.042177
0 0 0 0.00593158 -0.216064 -0.0729346 0.0693864 -0.145152 -0.217148 0.00745752 -0.0414789 0.000616001
0 0 0 0 38.4274 1.92236 -1.65343 -55.2367 32.7668 -4.66675 1.66987 -0.0654721
0 0 0 0 0 0.985638 -0.633357 2.78495 2.57438 0.104626 0.838871 -0.0117098
0 0 0 0 0 0 1.65135 -4.96625 -0.131937 0.268349 0.64472 -0.0115136
0 0 0 0 0 0 0 126.331 -47.1222 7.94391 -1.42314 0.103159
0 0 0 0 0 0 0 0 31.7055 -3.20903 3.99232 -0.0939912
0 0 0 0 0 0 0 0 0 0.757093 0.348834 0.000377499
0 0 0 0 0 0 0 0 0 0 1.84547 -0.0286899
0 0 0 0 0 0 0 0 0 0 0 0.0005
V 12 -15.3537 -55.7116 6.88296 406.826 -5.89644 -3.39878 15.1903 -0.724092 2.77212 -12.8139 4.27525 4.98575
V 12 -0.0855098 0.991295 60.8098 1.00998 59.8401 -0.535396 0.84341 145.153 -0.0310887 7.43448 10.3095 0.3675
M 12 12
0.0394768 0.00475785 1.44357 0.0147329 1.44898 0.0139336 0.00902072 3.3876 0.00174021 -0.00279291 0.394413 0.000575221
0 0.000612986 0.185738 0.00194216 0.168785 0.00163229 0.00106576 0.508611 -0.00046537 -0.00108562 0.0364594 0.000161896
0 0 227.689 0.325135 227.202 0.139119 0.11582 396.574 -6.09428 0.0766059 32.8758 0.326271
0 0 0 0.0066031 0.246071 0.00554922 0.00359509 1.31378 0.00695152 -0.00481108 0.0671992 0.00017804
0 0 0 0 234.672 0.152503 0.12109 358.83 -5.95161 0.420216 38.4361 0.292519
0 0 0 0 0 0.00571503 0.00364125 0.571355 0.0138044 -0.000929844 0.107163 -0.000484525
0 0 0 0 0 0 0.00232453 0.42544 0.0079813 -0.00071847 0.0697841 -0.000255219
0 0 0 0 0 0 0 889.323 -10.0044 -1.5707 36.5651 0.67768
0 0 0 0 0 0 0 0 0.218645 -0.00128774 -0.549312 -0.011207
0 0 0 0 0 0 0 0 0 0.0153194 0.232729 -0.00114322
0 0 0 0 0 0 0 0 0 0 9.80036 0.0150597
0 0 0 0 0 0 0 0 0 0 0 0.000675
V 12 -49.5342 82.4385 -8.71418 233.865 5.62957 5.67597 28.8584 4.24355 3.15568 21.646 -7.68012 -87.2371
V 12 0.157096 -0.976327 53.4501 1.07577 36.6136 0.349972 0.932218 79.9555 -3.33794 5.15241 2.60165 0.2675
M 12 12
0.084035 0.0186076 1.8837 0.00141652 1.6145 -0.0466654 0.0163528 2.67893 -0.00669303 0.052503 -0.0217501 0.00515597
0 0.00439286 0.36222 -0.00165683 0.294655 -0.00999968 0.00340954 0.55473 -0.00781553 0.0173889 0.00448184 0.000452339
0 0 53.341 0.435641 50.0564 -1.12923 0.415396 66.7255 1.02728 0.19874 -2.28333 0.25538
0 0 0 0.0150194 0.608075 -0.00487105 0.00245715 0.212218 0.0355091 -0.0218351 -0.0596791 0.00517811
0 0 0 0 66.7877 -1.25487 0.472696 41.7045 -0.374404 2.86104 -1.23597 0.276277
0 0 0 0 0 0.0300441 -0.0107913 -1.28831 0.01853 -0.0641736 0.00592312 -0.0039484
0 0 0 0 0 0 0.00391244 0.455052 -0.00518839 0.0221534 -0.00460877 0.00163247
0 0 0 0 0 0 0 107.125 2.1724 -1.92238 -3.16413 0.245545
0 0 0 0 0 0 0 0 0.284309 -0.392898 -0.320403 0.0141326
0 0 0 0 0 0 0 0 0 0.62987 0.380905 -0.008609
0 0 0 0 0 0 0 0 0 0 0.405164 -0.0237007
0 0 0 0 0 0 0 0 0 0 0 0.002075
V 12 -15.0197 -34.955 3.42388 262.493 -1.47576 6.71518 37.3681 -0.545039 0.954194 11.3575 -3.37294 -55.9466
V 12 0.822294 -0.551128 52.2271 0.81778 37.9013 -0.0223183 -0.995935 80.2596 0.895992 5.01664 6.55857 0.2525
M 12 12
0.0208836 0.0350742 -0.207062 0.00899897 0.25059 0.00494219 -0.00032755 -0.503197 -0.736249 0.156813 0.43114 3.27205e-05
0 0.0594754 -0.24663 0.0149709 0.473517 0.0115034 -0.000621274 -0.640199 -1.28492 0.284849 0.822242 0.000595599
0 0 40.3166 -0.395058 8.05254 0.997846 -0.0157974 88.5706 -0.747366 6.75191 27.7878 0.235412
0 0 0 0.00779566 0.0786797 -0.00527204 -3.50128e-05 -0.920285 -0.312981 0.000126756 -0.0408239 -0.00205264
0 0 0 0 7.95563 0.383576 -0.010893 15.6592 -13.2886 4.12998 15.1085 0.0585063
0 0 0 0 0 0.0304429 -0.00062877 2.14441 -0.433749 0.263684 0.998663 0.00633759
0 0 0 0 0 0 1.60586e-05 -0.0326012 0.0174314 -0.00656116 -0.0236487 -0.000112136
0 0 0 0 0 0 0 195.547 1.6047 14.3917 58.9348 0.518529
0 0 0 0 0 0 0 0 30.0957 -7.23257 -23.1517 -0.0432929
0 0 0 0 0 0 0 0 0 2.98156 10.1767 0.0515524
0 0 0 0 0 0 0 0 0 0 36.3644 0.194533
0 0 0 0 0 0 0 0 0 0 0 0.001475
V 12 18.4702 -12.471 2.96634 196.719 -0.890073 -2.55766 -18.6823 -0.529183 0.897891 1.94864 -0.238543 -50.575
V 12 0.72713 0.683693 57.1052 1.26935 54.793 0.0681552 0.994559 64.5825 -1.89759 1.96854 0.546347 0.145
M 12 12
0.00777019 -0.00769131 -0.624999 0.00206644 -0.543557 0.0137079 -0.000328333 -0.861637 -0.0149567 -0.00670782 0.0145335 -0.00254463
0 0.00761322 0.618654 -0.00204546 0.538039 -0.0135687 0.000325 0.85289 0.0148049 0.00663972 -0.0143859 0.0025188
0 0 51.9897 -0.208693 44.8634 -1.06998 0.0226925 72.3971 0.841215 0.901384 -1.23365 0.241505
0 0 0 0.00216801 -0.163749 0.00280964 1.10169e-05 -0.335965 0.0128546 -0.0186162 -0.00107563 -0.00179674
0 0 0 0 38.9279 -0.937698 0.0205986 61.8462 0.931359 0.584167 -1.16392 0.19916
0 0 0 0 0 0.0248038 -0.000650152 -1.45981 -0.0336631 -0.00455663 0.0247483 -0.00377905
0 0 0 0 0 0 2.19909e-05 0.029377 0.00150399 -0.000588542 -0.000547953 2.54663e-05
0 0 0 0 0 0 0 102.734 0.58575 1.81664 -1.37818 0.359637
0 0 0 0 0 0 0 0 0.214454 -0.172752 -0.105136 -0.00576455
0 0 0 0 0 0 0 0 0 0.191454 0.0646145 0.0128594
0 0 0 0 0 0 0 0 0 0 0.10492 -0.00373607
0 0 0 0 0 0 0 0 0 0 0 0.0017
V 12 10.3343 34.706 3.04544 295.507 0.240784 -0.127427 26.649 -0.634006 2.41131 8.47404 -2.65283 -127.094
V 12 -0.652953 0.678683 63.9808 1.29194 61.9284 -0.0806341 0.990022 71.411 1.95437 2.81612 1.18916 0.4225
M 12 12
0.529368 0.419893 5.65112 0.0467522 6.05555 0.209656 0.00833685 5.63188 -0.881283 0.631111 -0.0635047 0.401133
0 0.374971 10.2093 0.0392367 10.4683 0.161685 0.00694957 10.4241 -0.913069 0.474249 -0.156813 0.426657
0 0 1638.66 1.79674 1596.92 4.83287 0.527761 1790.4 -40.4055 36.4875 2.11203 38.643
0 0 0 0.0158861 2.022 0.0274681 0.00141449 1.71771 -0.104687 0.103761 0.0479507 0.0908368
0 0 0 0 1563.01 4.94829 0.516332 1736.43 -40.999 35.5008 2.20022 38.4215
0 0 0 0 0 0.106424 0.00540343 5.32722 -0.319421 0.449226 0.0890945 0.25018
0 0 0 0 0 0 0.000413087 0.603124 -0.0166935 0.0283643 0.00891636 0.0175228
0 0 0 0 0 0 0 1971.91 -41.2578 42.1979 3.70314 41.7748
0 0 0 0 0 0 0 0 2.67424 -0.806703 0.57766 -1.26321
0 0 0 0 0 0 0 0 0 2.6792 0.834504 1.35727
0 0 0 0 0 0 0 0 0 0 0.828884 0.213875
0 0 0 0 0 0 0 0 0 0 0 1.15755
V 12 -48.0427 39.3168 1.94168 319.386 0.598383 2.22858 29.2673 -0.195782 4.40121 14.3295 -5.13821 -93.1343
V 12 -0.962961 -0.0204892 37.0606 0.1267 5.61192 0.0553648 0.289691 69.423 3.16079 4.55237 8.938 0.1825
M 12 12
0.00715537 -0.033236 0.604604 0.00280202 0.0921872 0.130801 -0.0322888 1.15234 0.0695716 0.0393536 -0.00681741 0.0010701
0 0.281989 1.32443 -0.0119486 -0.965258 -0.262047 0.119002 3.3798 -0.588997 -0.271414 0.426431 -0.00152003
0 0 194.042 0.272635 -5.81177 21.103 -4.25363 393.436 -2.85471 2.55372 13.4429 0.216217
0 0 0 0.00110637 0.0321677 0.053942 -0.0129796 0.526173 0.0250041 0.0149777 0.000810388 0.000449944
0 0 0 0 5.02563 -0.24281 -0.502882 -16.3839 1.96366 1.75316 -1.23566 0.0051119
0 0 0 0 0 3.46892 -0.608861 43.0499 0.567464 0.217165 0.789962 0.0271478
0 0 0 0 0 0 0.18314 -8.07982 -0.242321 -0.276311 -0.135782 -0.00647155
0 0 0 0 0 0 0 802.435 -7.16744 3.32861 27.7123 0.428904
0 0 0 0 0 0 0 0 1.23194 0.538752 -0.905436 0.00302558
0 0 0 0 0 0 0 0 0 0.761246 -0.0274122 0.00672504
0 0 0 0 0 0 0 0 0 0 1.39487 0.0115577
0 0 0 0 0 0 0 0 0 0 0 0.000275
V 12 -40.414 -0.846006 3.76482 59.3111 -2.92393 -1.61484 21.5935 -0.283848 1.74163 2.21325 0.139982 -1.91616
V 12 0.745967 -0.0447214 47.5546 0.0472112 46.5383 0.999161 -0.0330946 48.7735 -0.919019 1.43799 2.22858 0.105
M 12 12
0.74213 -0.0425573 -3.5898 -0.0114153 -0.629304 0.00117796 0.0300718 -6.51787 2.21437 -2.19718 -6.25273 -0.0150807
0 0.024 -0.62335 0.00143387 -0.798131 -0.000150159 -0.00592014 -0.463961 -0.0910333 0.127843 0.375236 -0.000894427
0 0 51.2526 0.0231856 37.0917 -0.00168853 0.0401286 65.9006 -12.6125 11.0463 29.7324 0.13062
0 0 0 0.000205883 -0.0224991 -2.19644e-05 -0.000639218 0.067755 -0.0322262 0.0333583 0.0966488 0.000178696
0 0 0 0 34.7425 0.00301591 0.160662 40.0584 -3.77868 2.27278 4.78309 0.0710356
0 0 0 0 0 2.5333e-06 7.39075e-05 -0.00623818 0.00316081 -0.0032906 -0.00993518 -2.13619e-05
0 0 0 0 0 0 0.00232895 -0.0749593 0.0764388 -0.0834542 -0.255054 -0.000389944
0 0 0 0 0 0 0 92.1169 -21.4042 19.753 54.406 0.190031
0 0 0 0 0 0 0 0 6.80192 -6.67998 -18.6624 -0.0453352
0 0 0 0 0 0 0 0 0 6.62513 18.545 0.042049
0 0 0 0 0 0 0 0 0 0 52.7028 0.125057
0 0 0 0 0 0 0 0 0 0 0 0.0005
V 12 33.8232 2.20635 2.8922 -14.4478 0.639046 6.39629 4.77371 -1.13631 0.0201355 3.35489 0.119112 -78.9471
V 12 -0.0276079 -0.99461 59.9495 0.952133 58.6438 0.570163 -0.819327 82.7057 0.0731404 5.1507 7.51047 0.5325
M 12 12
0.0399156 -8.01885e-05 -0.00977587 -0.019741 0.129948 0.0182814 0.0133066 0.0363877 0.264113 0.00586653 0.574095 -0.0101908
0 4.04572e-05 0.0258348 -0.000126005 0.0373632 7.75422e-05 7.37125e-05 -0.0121761 -0.0328679 -0.0332044 -0.109601 0.000782545
0 0 26.251 -0.152561 33.9645 0.154895 0.116762 -2.27158 -24.2641 -25.9379 -79.7644 0.184421
0 0 0 0.010715 -0.271176 -0.00996612 -0.00728787 0.002772 0.02071 0.158224 0.215493 0.00352408
0 0 0 0 45.6312 0.253995 0.193823 -5.58794 -32.8817 -35.7324 -109.727 0.366326
0 0 0 0 0 0.00946162 0.0068733 0.0310704 -0.00164193 -0.132897 -0.134644 -0.00522432
0 0 0 0 0 0 0.00500554 0.0134429 -0.0125279 -0.107568 -0.137122 -0.00325373
0 0 0 0 0 0 0 6.77333 7.98386 7.33289 27.3572 -0.411378
0 0 0 0 0 0 0 0 28.9473 28.3514 94.4671 -0.569131
0 0 0 0 0 0 0 0 0 29.59 94.2971 -0.481865
0 0 0 0 0 0 0 0 0 0 310.724 -1.87741
0 0 0 0 0 0 0 0 0 0 0 0.026675
V 12 -16.4286 -28.5509 0.648566 225.281 0.752478 8.46256 -13.4458 0.0147482 1.44794 5.64283 -1.6631 -22.3425
V 12 0.989354 -0.0460463 50.3481 0.0577831 8.42865 0.328389 0.0159828 94.8818 1.48455 6.90468 14.5298 0.4075
M 12 12
0.00104395 0.00823759 0.149452 -0.00036111 0.0433156 0.044291 0.0178876 0.0591334 -0.0322828 -0.1913 -0.523941 0.00328502
0 0.0751874 2.59874 -0.00190428 0.977049 0.396738 0.125803 2.55599 -0.78777 -1.65895 -4.4483 0.00337894
0 0 281.259 0.371842 142.266 16.5398 1.71289 381.385 -78.8995 -37.7814 -117.375 -1.27833
0 0 0 0.00158479 0.267502 0.00606196 -0.00154401 0.557873 -0.038293 0.101343 0.158619 0.00332028
0 0 0 0 77.8314 7.55348 0.773645 195.388 -34.5818 -9.26833 -40.2589 -0.202769
0 0 0 0 0 2.30884 0.762896 16.9627 -3.84203 -8.20184 -23.6045 0.115933
0 0 0 0 0 0 0.35644 -0.434371 0.249828 -2.8359 -8.47569 0.119201
0 0 0 0 0 0 0 539.64 -111.164 -27.7491 -92.2559 -2.60315
0 0 0 0 0 0 0 0 28.8915 13.7356 32.6326 1.07805
0 0 0 0 0 0 0 0 0 38.9998 100.85 -0.0372438
0 0 0 0 0 0 0 0 0 0 272.667 -0.922736
0 0 0 0 0 0 0 0 0 0 0 0.091475
V 12 35.8341 -15.3088 6.38201 12.6662 -4.37831 -5.88772 15.9328 -1.0548 -0.134875 -6.90526 4.63567 2.17223
V 12 0.415571 0.849692 64.4723 0.638665 38.7767 0.0785575 0.99379 129.564 5.20379 7.17519 4.22235 0.545
M 12 12
0.169987 -0.0831376 -1.68214 -0.0426701 -3.36821 -0.0458047 0.00362079 0.60341 -0.28652 0.122504 0.210121 0.00291536
0 0.0406613 0.822707 0.0208692 1.64734 0.0224023 -0.00177087 -0.295118 0.140132 -0.0599145 -0.102767 -0.00142586
0 0 16.646 0.422251 33.3309 0.453271 -0.0358303 -5.97118 2.83532 -1.21226 -2.0793 -0.0288496
0 0 0 0.0107111 0.845491 0.0114979 -0.000908892 -0.151468 0.0719223 -0.0307509 -0.0527446 -0.000731815
0 0 0 0 66.7398 0.907602 -0.0717445 -11.9563 5.67727 -2.42736 -4.16346 -0.0577667
0 0 0 0 0 0.0123426 -0.00097566 -0.162595 0.0772058 -0.0330099 -0.0566193 -0.000785575
0 0 0 0 0 0 7.71244e-05 0.0128529 -0.006103 0.00260938 0.00447567 6.20985e-05
0 0 0 0 0 0 0 2.14196 -1.01707 0.434857 0.745877 0.0103488
0 0 0 0 0 0 0 0 0.482942 -0.206485 -0.354168 -0.00491397
0 0 0 0 0 0 0 0 0 0.0882842 0.151427 0.002101
0 0 0 0 0 0 0 0 0 0 0.259731 0.00360368
0 0 0 0 0 0 0 0 0 0 0 5e-05
V 12 -26.7738 55.8115 -1.36864 157.44 1.08798 4.41654 40.0606 1.63931 3.70263 20.1494 -6.50109 -66.7932
V 12 -0.355074 -0.933914 43.8549 1.07162 7.14109 0.0789496 -0.348061 83.2347 0.0621775 5.24893 7.02273 0.33
M 12 12
0.00301799 -0.00114744 0.313123 -0.00014259 0.00543976 -0.070777 -0.0160541 0.528165 0.239244 0.047461 -0.0469832 -0.00233075
0 0.000436255 -0.119049 5.42126e-05 -0.00206819 0.0269094 0.00610377 -0.200808 -0.0909606 -0.0180447 0.017863 0.000886148
0 0 32.4872 -0.014794 0.564386 -7.34326 -1.66565 54.7983 24.8221 4.92418 -4.8746 -0.24182
0 0 0 6.73691e-06 -0.000257011 0.00334398 0.000758505 -0.0249541 -0.0113035 -0.00224238 0.0022198 0.00011012
0 0 0 0 0.00980486 -0.127572 -0.0289367 0.951989 0.431225 0.0855459 -0.0846846 -0.00420104
0 0 0 0 0 1.65984 0.376497 -12.3864 -5.61069 -1.11304 1.10184 0.05466
0 0 0 0 0 0 0.0853997 -2.80957 -1.27266 -0.252468 0.249926 0.0123984
0 0 0 0 0 0 0 92.432 41.8692 8.30595 -8.22232 -0.407894
0 0 0 0 0 0 0 0 18.9656 3.76237 -3.72449 -0.184765
0 0 0 0 0 0 0 0 0 0.746375 -0.73886 -0.0366534
0 0 0 0 0 0 0 0 0 0 0.73142 0.0362844
0 0 0 0 0 0 0 0 0 0 0 0.0018
V 12 -30.3923 -42.8372 5.32192 298.42 -4.65887 -2.03676 3.17694 -0.571773 1.28574 -2.31905 0.803894 15.8007
V 12 -0.433765 0.89836 56.2323 0.876871 12.0434 0.476463 -0.120352 103.683 -0.36495 5.55468 8.91303 0.36
M 12 12
0.00778106 0.00375702 -1.02437 0.0182585 -1.37764 -0.0594379 0.0228104 -0.940082 -0.379596 -0.00980811 -0.0478109 0.00374245
0 0.00181404 -0.494606 0.00881596 -0.665183 -0.0286991 0.0110138 -0.45391 -0.183284 -0.00473575 -0.023085 0.00180701
0 0 134.857 -2.40371 181.365 7.82492 -3.00297 123.761 49.9733 1.29122 6.29424 -0.492688
0 0 0 0.0428442 -3.23269 -0.139473 0.0535254 -2.20593 -0.890734 -0.023015 -0.11219 0.00878177
0 0 0 0 243.913 10.5235 -4.03861 166.442 67.2078 1.73653 8.46495 -0.662604
0 0 0 0 0 0.454034 -0.174244 7.18109 2.89965 0.0749221 0.365217 -0.0285878
0 0 0 0 0 0 0.0668696 -2.75588 -1.1128 -0.0287528 -0.140159 0.0109711
0 0 0 0 0 0 0 113.578 45.8615 1.18498 5.77635 -0.45215
0 0 0 0 0 0 0 0 18.5184 0.478484 2.33243 -0.182574
0 0 0 0 0 0 0 0 0 0.0123632 0.060266 -0.00471739
0 0 0 0 0 0 0 0 0 0 0.293774 -0.0229955
0 0 0 0 0 0 0 0 0 0 0 0.0018
V 12 -37.4944 38.0626 4.94527 247.606 -3.98544 0.14641 4.1165 -0.218079 1.36206 4.16442 -0.727375 -31.4672
V 12 0.228825 0.969316 45.242 1.39035 9.55633 0.827157 0.302994 85.2884 -0.0621775 5.70402 10.4161 0.305
M 12 12
0.0152786 -0.0036068 -0.570878 -0.00622944 0.43444 -0.0284572 0.0776866 -1.87998 -0.56004 -0.0207207 -0.0176894 -0.0026221
0 0.000851449 0.134766 0.00147057 -0.102557 0.00671784 -0.0183393 0.443804 0.132207 0.00489149 0.00417589 0.000618993
0 0 21.3306 0.23276 -16.2326 1.06329 -2.90272 70.2445 20.9256 0.774217 0.660954 0.0979732
0 0 0 0.00253988 -0.177131 0.0116026 -0.0316746 0.766511 0.228341 0.00844828 0.00721235 0.00106909
0 0 0 0 12.3531 -0.809166 2.20898 -53.4563 -15.9244 -0.589181 -0.502988 -0.0745579
0 0 0 0 0 0.053003 -0.144695 3.50156 1.0431 0.0385933 0.0329473 0.00488378
0 0 0 0 0 0 0.39501 -9.55906 -2.84761 -0.105358 -0.0899444 -0.0133325
0 0 0 0 0 0 0 231.325 68.9109 2.54961 2.17661 0.32264
0 0 0 0 0 0 0 0 20.5283 0.759518 0.648405 0.0961131
0 0 0 0 0 0 0 0 0 0.0281011 0.0239901 0.00355605
0 0 0 0 0 0 0 0 0 0 0.0204805 0.00303582
0 0 0 0 0 0 0 0 0 0 0 0.00045
V 12 -20.1039 37.1823 4.28742 366.959 -3.62222 6.40695 12.2317 -0.126816 2.1834 3.19474 -0.285825 -46.4311
V 12 0.487618 0.872194 43.3942 1.09937 43.3942 0.454012 0.890639 43.8087 0.0511813 1.29514 0.381767 0.106667
M 12 12
0.00343256 -0.00192763 0.0844534 0.00217316 0.0844534 -0.00192714 0.0010038 0.0781474 -0.0141199 -0.0395131 -0.0159104 -3.95112e-05
0 0.0010836 -0.0445122 -0.00119623 -0.0445122 0.00106081 -0.000552536 -0.0409522 0.00826596 0.0221452 0.00886628 3.92604e-05
0 0 9.83687 0.117774 9.83687 -0.104427 0.0544287 9.73131 0.548694 -1.09015 -0.573982 0.0444761
0 0 0 0.0019088 0.117774 -0.0016926 0.000881921 0.114193 -0.00151247 -0.0259936 -0.0115857 0.000351661
0 0 0 0 9.83687 -0.104427 0.0544287 9.73131 0.548694 -1.09015 -0.573982 0.0444761
0 0 0 0 0 0.00150088 -0.000782027 -0.101251 0.00134289 0.0230507 0.0102738 -0.000311767
0 0 0 0 0 0 0.000407472 0.0527746 -0.000695392 -0.0120071 -0.00535216 0.000162598
0 0 0 0 0 0 0 9.63766 0.580362 -1.01831 -0.54592 0.0448392
0 0 0 0 0 0 0 0 0.161573 0.148912 0.0443673 0.00541139
0 0 0 0 0 0 0 0 0 0.456639 0.185924 -0.000236266
0 0 0 0 0 0 0 0 0 0 0.0780408 -0.000886021
0 0 0 0 0 0 0 0 0 0 0 0.000266667
V 12 1.95857 41.3836 2.26481 260.904 0.407748 4.97691 22.6122 -0.530316 2.76977 8.5846 -2.68466 -111.108
V 12 -0.546576 -0.777895 61.3736 1.12423 46.7205 -0.0178412 0.998976 117.615 9.02693 11.2159 10.3209 0.403333
M 12 12
0.220956 -0.12157 0.689291 0.0228679 -0.0355328 0.0203874 0.000364972 0.722104 1.29263 0.39244 -0.791377 0.00985999
0 0.067447 -0.481436 -0.014571 -0.184632 -0.00985887 -0.000151497 -0.290878 -0.592655 -0.20705 0.398824 -0.00554708
0 0 20.8281 0.434903 37.2094 -0.184668 -0.00787444 -17.1994 -17.636 -0.397207 4.2195 0.053079
0 0 0 0.00944354 0.722764 -0.00272257 -0.000137666 -0.303901 -0.287997 0.00905391 0.0482839 0.00145492
0 0 0 0 74.5755 -0.499345 -0.0180676 -38.9833 -43.5037 -3.30296 13.4911 0.043012
0 0 0 0 0 0.00518116 0.000153478 0.325187 0.40729 0.0577628 -0.161921 0.000613092
0 0 0 0 0 0 4.95209e-06 0.0105794 0.0125913 0.00143067 -0.00453462 5.51608e-06
0 0 0 0 0 0 0 22.6182 26.791 2.9712 -9.5518 0.00897817
0 0 0 0 0 0 0 0 32.6999 4.17692 -12.3888 0.0317888
0 0 0 0 0 0 0 0 0 0.837776 -1.98619 0.0155747
0 0 0 0 0 0 0 0 0 0 5.22938 -0.0273221
0 0 0 0 0 0 0 0 0 0 0 0.000466667
V 12 -68.972 3.18951 -1.9823 288.929 1.87534 15.4578 50.975 1.45805 5.7303 30.9377 -9.93379 -75.1496
V 12 0.577748 -0.807783 62.618 0.972612 41.398 -0.247967 0.966681 120.378 -4.99129 10.8528 11.6847 0.393333
M 12 12
0.0256737 0.0198777 -1.25558 0.00735422 -0.205157 0.0159324 0.00393819 -1.93006 0.0849933 -0.0215726 0.380208 -0.000816093
0 0.0154084 -0.9612 0.00578252 -0.150315 0.0121714 0.00299684 -1.45113 0.065545 -0.0128673 0.302432 -0.000604604
0 0 67.9534 -0.306584 15.143 -0.877557 -0.223929 120.288 -4.31274 3.35335 -13.765 0.056242
0 0 0 0.00253682 -0.0173513 0.00376647 0.00087415 -0.342961 0.023081 0.0124493 0.148053 -0.000101404
0 0 0 0 5.62645 -0.204076 -0.0559166 35.6303 -0.800983 1.96575 0.730007 0.019264
0 0 0 0 0 0.0113651 0.0029146 -1.58679 0.0550896 -0.0479143 0.163399 -0.000751773
0 0 0 0 0 0 0.00075399 -0.419962 0.0137843 -0.0143052 0.0352166 -0.000203315
0 0 0 0 0 0 0 247.511 -7.00685 10.7109 -9.48452 0.125934
0 0 0 0 0 0 0 0 0.285093 -0.126205 1.14356 -0.00309098
0 0 0 0 0 0 0 0 0 0.82478 1.37547 0.00641733
0 0 0 0 0 0 0 0 0 0 9.192 -4.24416e-05
0 0 0 0 0 0 0 0 0 0 0 6.66667e-05
V 12 -11.5081 -12.9676 0.664301 217.381 0.0944379 6.05471 42.6414 0.611751 -0.258248 21.7391 -5.813 -67.6836
V 12 -0.704652 0.694281 67.0879 0.841367 39.5384 -0.159623 -0.976994 159.814 -10.7376 11.8439 10.446 0.476667
M 12 12
0.0345584 0.032023 0.0755172 -0.0134295 -0.248787 -0.0309642 0.0060184 -0.631605 -0.046038 0.123899 0.410299 0.00218417
0 0.0297606 0.0344762 -0.0110778 -0.192807 -0.0270517 0.00541485 -0.675936 -0.0440322 0.109338 0.383007 0.00173957
0 0 14.6384 -0.586394 -15.9249 -0.736563 0.079198 35.5859 0.458721 2.50118 -0.24915 0.120705
0 0 0 0.0266582 0.688666 0.0377771 -0.00488074 -1.1773 -0.00363665 -0.133992 -0.115346 -0.00531074
0 0 0 0 18.1369 0.933766 -0.113516 -34.7377 -0.262976 -3.26229 -1.73616 -0.138928
0 0 0 0 0 0.0586575 -0.00844484 -1.1425 0.0154003 -0.214094 -0.314675 -0.00731491
0 0 0 0 0 0 0.0013495 0.0586926 -0.00546522 0.0317554 0.0662258 0.000909411
0 0 0 0 0 0 0 105.957 2.26996 3.43226 -10.4251 0.25618
0 0 0 0 0 0 0 0 0.0829457 -0.0788607 -0.590868 0.00157049
0 0 0 0 0 0 0 0 0 0.787928 1.29444 0.0256966
0 0 0 0 0 0 0 0 0 0 4.96202 0.0167545
0 0 0 0 0 0 0 0 0 0 0 0.00106667
V 12 -79.2657 82.2158 -5.58274 203.156 2.9175 11.5176 -20.6847 3.06253 -2.82417 37.3673 -12.2612 -66.2319
V 12 0.290502 0.5645 67.2352 0.933692 40.3849 -0.0205627 -0.998613 155.64 10.645 13.1631 15.2814 0.48
M 12 12
0.273853 0.170197 0.290182 -0.0269221 -6.85859 -0.0214773 0.00053469 9.80414 1.98945 0.344512 -1.64906 0.0195692
0 0.516991 -1.84515 -0.00419718 -4.67079 0.0336162 0.000282079 -5.7534 2.80655 -0.379309 -2.42523 0.0113493
0 0 10.2844 -0.0902686 -5.25672 -0.254086 0.000813963 68.7407 -5.62581 3.28803 5.15025 0.0247397
0 0 0 0.00302876 0.661815 0.00354296 -5.40956e-05 -1.32494 -0.147719 -0.0519571 0.119432 -0.0019486
0 0 0 0 172.177 0.491271 -0.0133413 -233.782 -51.384 -8.0391 42.6907 -0.4893
0 0 0 0 0 0.00704807 -4.76699e-05 -2.12188 0.0232962 -0.0947922 -0.0306016 -0.00162757
0 0 0 0 0 0 1.0501e-06 0.0205892 0.00369256 0.000745129 -0.00304871 3.83076e-05
0 0 0 0 0 0 0 692.28 25.9903 29.4294 -18.6953 0.724008
0 0 0 0 0 0 0 0 20.4478 0.236924 -17.3268 0.13906
0 0 0 0 0 0 0 0 0 1.28976 -0.0537155 0.0257914
0 0 0 0 0 0 0 0 0 0 14.699 -0.115072
0 0 0 0 0 0 0 0 0 0 0 0.0014
V 12 -46.5895 72.4526 -4.24205 243.724 2.7014 11.0874 -8.51107 2.59957 4.68721 29.5403 -9.3939 -80.9054
V 12 -0.209438 -0.97543 60.1719 0.971297 57.0193 0.487059 -0.870033 136.121 -0.0570286 7.1614 8.36929 0.343333
M 12 12
0.0134198 -0.00280469 0.749448 0.0069702 1.00221 7.37967e-05 -6.84062e-05 2.01629 0.0394375 -0.0368928 -0.126895 -0.00100388
0 0.000594315 -0.163715 -0.00179434 -0.217292 0.000318187 0.000189695 -0.411679 -0.00782739 0.0065781 0.0225502 0.000153342
0 0 48.015 0.682911 62.7838 -0.286064 -0.156387 104.149 1.84153 -1.07535 -3.63301 -0.00694646
0 0 0 0.0176164 0.845309 -0.0137927 -0.00730728 0.644312 0.00328181 0.0277847 0.0986994 0.00181962
0 0 0 0 82.3825 -0.31542 -0.173841 141.23 2.5461 -1.66587 -5.65717 -0.0206507
0 0 0 0 0 0.0136682 0.00718557 0.409276 0.0172158 -0.0465957 -0.163364 -0.00231894
0 0 0 0 0 0 0.00377842 0.199073 0.00873628 -0.0242033 -0.084876 -0.00121118
0 0 0 0 0 0 0 314.543 6.42061 -6.89463 -23.8046 -0.218229
0 0 0 0 0 0 0 0 0.137039 -0.166119 -0.575224 -0.00582741
0 0 0 0 0 0 0 0 0 0.258895 0.900991 0.0106123
0 0 0 0 0 0 0 0 0 0 3.13585 0.0370254
0 0 0 0 0 0 0 0 0 0 0 0.000466667
V 12 -44.7152 -0.269373 -6.53928 238.988 4.11896 14.8743 -10.4861 3.11807 1.7694 15.4584 -5.87557 -38.594
V 33 -364.388 -326.183 -263.785 -468.398 -410.601 -363.741 -191.042 -163.15 -183.608 -145.725 -203.241 -305.709 -166.423 -431.859 -407.989 -234.162 -138.279 -285.612 -325.139 -86.2547 -69.6601 -174.1 -119.955 -221.671 -260.311 -239.724 -350.068 -218.701 -407.008 -258.397 -422.869 -374.015 -286.058
M 12 12
38.7887 -5.06163 0.762521 -13.6259 -0.220786 -2.75772 -0.538112 -0.313659 -0.418746 -4.047 1.68285 -9.15308
-5.06163 46.0395 -0.814726 -1.48431 0.68798 0.385743 -4.96499 0.280534 0.145475 4.60175 -1.48201 -24.5281
0.762521 -0.814726 0.414778 0.468638 -0.218013 -0.430306 0.179966 -0.13739 -0.0117663 -0.25093 0.1309 -1.12977
-13.6259 -1.48431 0.468638 261.616 -0.885737 1.68163 -3.89785 0.12344 1.59731 -1.73942 -0.241116 -10.3746
-0.220786 0.68798 -0.218013 -0.885737 0.175911 0.278387 -0.11062 0.0557183 0.0193273 0.307557 -0.117114 -1.00638
-2.75772 0.385743 -0.430306 1.68163 0.278387 11.4918 0.855805 0.0538267 0.00460238 2.21022 -0.480491 -2.70173
-0.538112 -4.96499 0.179966 -3.89785 -0.11062 0.855805 29.1711 0.0360441 0.569629 0.794456 -0.0120204 -4.88447
-0.313659 0.280534 -0.13739 0.12344 0.0557183 0.0538267 0.0360441 0.0619013 0.00964495 -0.0189672 -0.0173292 0.456135
-0.418746 0.145475 -0.0117663 1.59731 0.0193273 0.00460238 0.569629 0.00964495 0.372388 -0.124825 0.0082243 -0.276982
-4.047 4.60175 -0.25093 -1.73942 0.307557 2.21022 0.794456 -0.0189672 -0.124825 5.19169 -1.60976 -9.22297
1.68285 -1.48201 0.1309 -0.241116 -0.117114 -0.480491 -0.0120204 -0.0173292 0.0082243 -1.60976 0.627764 2.69088
-9.15308 -24.5281 -1.12977 -10.3746 -1.00638 -2.70173 -4.88447 0.456135 -0.276982 -9.22297 2.69088 144.155
4 !
13 109 187 109 186 109 185 110 183 110 179 110 175 110 169 110 162 111 155 111 148 111 143 111 139 112 136
14 37 182 37 183 36 183 36 181 36 179 36 175 36 170 36 163 35 156 35 150 35 144 35 139 36 136 36 135
12 110 87 110 86 111 84 111 82 112 78 112 73 113 66 113 59 113 51 113 44 113 39 113 35
15 36 89 37 89 37 88 36 86 36 83 36 79 36 74 35 67 35 59 35 52 35 46 35 42 35 39 35 38 35 37
4 #
44 95 125 95 124 95 123 95 125 96 128 97 133 97 139 98 145 99 150 99 154 100 156 101 155 102 152 104 148 107 143 109 137 113 132 116 127 118 123 120 120 122 117 123 116 123 115 124 115 124 116 123 117 124 119 124 121 124 124 124 129 125 134 126 139 127 144 128 148 128 152 129 156 129 159 129 162 130 163 130 165 130 164 130 163 130 160 130 156
38 24 112 24 111 24 112 25 115 25 119 26 125 27 133 27 141 28 148 28 153 28 156 29 157 29 156 30 153 31 149 32 144 34 138 36 132 38 127 40 123 43 119 44 116 46 115 47 114 48 114 48 115 49 117 49 120 50 124 51 129 52 134 53 140 54 145 55 150 55 153 56 155 55 153 55 151
40 92 28 92 27 92 28 92 29 92 31 93 35 93 40 93 47 93 53 93 59 93 62 94 64 95 63 97 61 99 57 101 52 104 46 107 40 111 35 113 31 116 27 118 25 120 23 120 22 121 21 121 22 121 24 121 27 122 32 122 37 122 43 123 48 124 53 125 57 125 60 126 62 126 63 127 64 126 63 126 62
40 19 27 19 29 19 33 19 38 20 44 20 49 20 55 20 61 20 65 20 67 20 68 21 66 22 63 23 59 25 54 27 50 29 45 32 39 34 35 37 31 39 27 41 25 42 23 43 23 43 24 43 25 43 28 43 31 44 35 44 41 45 47 46 53 46 58 47 62 47 65 48 66 48 65 48 64 48 63 48 62
4 $
42 123 123 123 122 123 121 122 120 122 119 121 119 119 118 118 119 115 119 112 120 110 121 108 122 107 123 106 125 106 128 107 130 108 131 109 133 111 134 114 135 117 136 119 137 122 137 124 138 126 139 127 140 128 142 129 143 128 145 128 147 126 149 125 151 122 153 120 154 116 154 113 154 109 153 106 152 103 150 100 149 99 148 99 147
38 55 117 55 116 55 115 54 114 52 114 50 114 47 115 44 115 41 117 39 118 37 120 36 122 36 124 37 125 39 127 41 128 43 129 46 130 49 131 52 132 56 133 58 134 60 135 62 137 63 139 63 142 63 144 62 146 60 147 58 148 56 149 52 150 48 150 44 150 40 149 37 149 34 148 33 147
37 117 33 117 32 117 31 116 30 115 29 113 28 112 28 109 29 106 29 104 30 102 32 100 34 100 36 99 39 100 41 101 42 102 44 105 45 107 45 110 46 113 47 116 47 118 49 119 50 120 53 121 55 120 58 119 60 118 63 116 65 113 66 109 67 105 68 102 68 98 68 96 67 95 65
43 43 29 44 28 44 27 44 26 43 26 43 25 41 25 39 24 37 24 35 25 32 26 30 28 28 30 26 32 25 35 24 37 25 39 25 40 27 41 29 42 31 42 33 42 36 43 39 43 41 44 44 46 46 47 48 49 49 51 50 54 50 56 49 59 48 61 46 63 44 65 41 66 37 67 34 67 31 66 28 65 26 64 25 62 25 60
4 %
56 26 164 25 164 26 163 27 162 28 162 30 162 32 164 35 168 37 173 40 180 41 187 42 195 41 200 40 204 38 206 37 207 36 206 35 203 35 200 35 196 37 192 40 187 43 184 47 180 51 177 55 175 58 174 61 174 63 175 65 176 66 179 68 183 69 186 70 191 71 195 70 199 70 202 69 204 67 206 66 207 64 207 64 206 63 204 63 201 63 197 65 192 67 187 70 182 73 177 77 173 80 170 84 169 87 170 89 172 91 174 92 177
57 96 114 95 114 96 114 96 113 97 113 99 114 101 116 102 119 104 123 105 128 105 134 105 139 103 143 102 146 101 146 100 145 100 143 99 140 99 136 99 133 100 131 101 129 103 127 105 124 109 122 112 120 115 118 117 116 118 116 119 115 120 116 121 117 122 119 123 121 124 124 125 128 125 131 126 135 126 139 125 141 124 143 123 145 122 145 121 144 120 142 121 139 122 136 123 132 125 128 127 124 130 121 133 118 137 116 140 114 143 113 145 113 146 113
52 25 90 26 90 27 90 29 90 31 93 33 97 35 102 37 108 38 114 38 120 38 124 37 126 36 126 35 125 33 122 33 119 32 115 32 112 33 109 35 106 38 103 42 100 45 98 49 96 52 95 55 94 56 94 57 96 58 97 59 100 59 103 60 106 61 109 61 113 61 116 61 119 60 121 59 122 58 122 57 121 56 119 56 116 56 113 57 110 59 107 60 104 64 101 67 97 71 95 74 93 76 92 78 92
56 23 30 24 30 25 32 27 35 28 38 30 43 31 48 31 53 31 56 31 59 30 61 29 60 28 59 27 57 27 54 27 51 27 48 28 45 30 43 32 41 34 38 37 37 39 35 41 33 44 32 45 32 47 31 47 32 48 33 50 34 51 35 52 37 53 39 54 42 55 45 56 48 56 51 55 54 54 57 53 60 52 62 51 62 50 61 50 59 50 55 51 51 52 46 54 42 56 38 59 36 62 34 64 32 67 32 69 32 71 33 72 34
4 &
41 126 139 126 138 126 137 125 136 123 136 121 135 117 135 112 136 108 137 104 139 101 142 99 144 99 147 101 150 104 153 108 155 112 158 117 162 121 165 124 169 127 172 128 176 128 180 126 182 123 183 119 184 114 183 109 182 104 180 101 177 99 174 100 169 103 165 107 160 113 154 118 150 122 145 125 142 126 139 125 136 123 135
42 51 138 51 137 51 136 50 136 49 135 48 134 46 134 43 135 39 137 36 138 33 141 32 144 32 146 33 148 35 151 38 152 42 154 46 157 50 159 53 163 55 166 56 169 55 172 53 175 49 177 46 178 42 178 39 176 37 173 36 170 36 166 38 161 41 157 44 153 48 150 52 146 54 142 55 140 55 137 53 136 51 135 48 135
40 113 44 112 44 112 43 111 43 109 42 107 42 104 43 101 44 98 46 95 48 93 50 93 53 93 56 95 58 97 61 101 63 105 65 109 67 112 69 115 72 117 75 117 77 116 80 113 82 110 84 107 84 103 84 100 83 98 80 97 77 97 73 98 69 101 66 104 61 108 57 111 53 114 49 115 45 115 43 113 43
40 45 40 45 39 44 39 44 38 43 37 41 36 38 37 35 37 32 39 29 41 27 43 25 46 24 49 25 51 26 53 29 55 31 56 35 58 38 60 41 62 43 65 45 68 45 71 44 75 42 78 39 79 35 80 33 79 31 77 30 74 30 70 32 65 35 61 39 56 43 53 46 49 48 46 48 43 47 41 45 41
4 '
12 110 132 111 134 111 137 111 141 110 147 110 154 110 161 110 166 110 171 110 174 111 175 111 176
14 37 128 37 127 37 128 37 131 36 135 36 142 36 151 35 160 35 167 35 173 36 176 36 177 36 176 37 175
12 109 34 110 36 110 38 110 42 111 47 111 53 111 60 111 66 111 71 111 74 111 76 111 77
14 33 33 33 34 34 36 34 39 34 43 33 48 33 54 33 59 32 65 32 69 32 72 32 74 33 75 33 76
4 +
28 133 117 134 116 134 117 134 118 133 120 132 124 131 128 128 132 125 137 121 142 117 145 112 146 108 146 104 146 101 144 99 142 98 140 98 138 100 136 104 135 109 135 115 135 121 137 127 138 131 140 135 141 137 142 138 142
28 59 115 59 114 58 114 58 115 58 116 56 118 55 122 52 126 49 130 45 135 40 139 35 142 30 144 26 145 22 144 20 144 20 142 21 140 23 138 28 137 32 137 38 137 43 139 48 142 51 144 54 147 56 148 58 149
24 122 31 122 32 121 34 120 36 119 40 116 45 114 49 110 53 106 55 102 57 99 57 95 56 93 54 92 52 92 50 94 49 97 48 101 48 106 49 111 50 115 52 118 54 121 56 122 57
26 47 29 48 29 47 31 47 33 45 36 43 39 40 44 36 48 32 52 28 55 24 58 20 59 17 59 16 58 15 57 16 55 18 54 21 53 25 53 30 53 34 54 39 56 43 58 46 59 49 61 51 61
4 ,
11 128 139 127 139 126 140 125 142 122 145 118 149 115 154 111 159 107 164 105 167 104 169
13 54 136 54 137 54 138 54 140 53 143 52 147 51 151 49 155 46 159 43 162 40 163 38 165 37 165
9 114 50 114 51 112 53 109 55 104 58 100 62 95 66 92 69 90 71
11 47 45 47 46 47 47 46 49 46 52 44 56 42 60 39 65 36 69 33 71 30 74
4 /
10 94 185 95 183 96 180 98 175 101 169 105 162 109 155 113 149 116 145 118 143
12 41 181 40 182 41 179 43 176 45 171 49 165 53 159 58 152 60 148 63 144 65 141 66 140
11 98 78 98 77 99 76 100 73 102 70 106 65 109 60 113 54 117 50 120 46 122 44
11 31 72 31 73 31 72 32 70 34 66 37 62 40 57 44 51 48 46 51 43 53 40
4 <
29 127 178 127 177 126 177 125 176 124 176 123 175 122 174 119 172 116 169 113 166 109 163 105 159 102 156 100 154 98 153 98 152 98 151 99 151 100 151 102 150 105 149 108 147 113 144 117 141 123 138 127 135 131 133 133 132 135 132
23 60 173 59 172 57 171 56 169 54 168 51 166 47 163 43 160 40 157 37 155 35 154 34 153 35 154 36 154 37 153 40 152 43 151 47 148 51 146 54 144 58 141 61 140 64 139
26 126 88 126 89 125 88 124 87 123 86 120 84 116 82 112 80 107 78 102 76 98 74 95 74 94 74 95 74 96 74 98 73 100 72 103 70 107 68 112 65 118 62 123 59 128 57 131 55 133 54 134 54
24 50 85 49 84 48 83 47 82 45 80 42 77 38 74 33 71 28 69 24 67 21 65 19 64 20 63 22 62 26 62 30 61 36 60 42 59 49 57 55 55 60 53 63 52 65 51 66 51
4 =
34 97 132 97 131 98 131 100 131 103 131 107 131 112 130 118 130 123 129 126 128 128 129 129 130 129 131 127 134 125 137 122 142 118 147 115 153 111 159 108 165 105 170 103 174 101 176 101 177 101 178 103 177 106 176 110 175 115 174 121 173 126 173 131 173 134 173 136 173
32 26 128 26 127 28 127 30 127 33 127 37 127 41 127 45 126 48 126 50 126 51 126 50 128 49 131 48 134 45 138 42 144 38 150 35 156 31 162 28 168 26 172 25 175 25 176 27 176 31 175 35 174 39 173 44 173 48 173 52 173 54 173 56 173
31 102 36 101 36 102 35 104 35 107 34 111 34 115 33 120 32 125 32 128 32 130 33 131 34 130 37 128 40 126 45 122 49 119 54 115 59 111 64 109 67 107 70 107 72 108 73 109 73 113 72 118 71 123 70 128 69 133 69 137 69 140 69
31 23 35 22 35 23 35 25 34 28 33 33 32 38 31 43 30 47 30 50 30 51 31 52 32 51 35 49 38 46 43 43 48 40 53 37 58 34 63 32 66 31 68 31 70 32 70 35 70 38 70 43 69 47 68 52 68 56 68 59 68 61 68
4 @
30 116 130 116 129 116 128 116 127 114 128 112 128 111 129 108 131 104 135 102 139 100 144 100 149 101 154 103 159 106 163 110 165 115 166 119 164 124 160 128 156 131 150 132 145 132 140 131 136 127 132 123 130 118 129 114 129 111 130 109 132
31 51 124 51 123 50 122 50 121 49 120 48 119 45 119 42 120 39 122 36 125 33 129 30 135 29 139 29 145 30 150 33 155 37 158 41 160 46 160 52 159 56 156 60 151 63 146 64 141 64 135 63 131 60 127 56 125 52 124 49 123 46 124
29 115 46 115 45 114 45 112 45 110 46 107 48 104 50 101 54 99 58 98 63 98 67 99 72 102 75 106 77 110 78 116 78 121 76 126 73 129 69 132 64 134 59 134 54 132 49 129 46 125 43 121 42 117 42 113 44 110 46
30 39 40 39 39 38 39 37 39 35 40 32 41 29 42 27 45 24 48 23 52 23 57 25 61 27 66 30 69 34 71 39 71 44 70 49 67 53 63 57 58 60 54 61 50 61 46 60 42 56 40 52 38 47 38 43 38 38 40 35 42
4 ^
26 94 185 93 185 93 183 94 181 95 176 97 170 99 164 101 157 103 151 105 146 106 142 108 140 109 140 110 141 111 143 113 146 116 151 118 158 121 166 124 174 126 180 128 185 129 189 130 190 131 191 131 192
21 26 165 27 162 28 158 29 154 31 149 33 143 36 138 39 134 41 130 43 127 45 126 47 127 48 129 49 132 51 137 53 144 55 150 57 156 59 160 61 164 62 166
24 92 81 91 82 92 81 92 79 94 76 96 71 99 66 102 60 105 54 108 49 111 45 112 42 114 41 115 41 116 44 118 47 119 53 121 59 122 66 124 72 125 78 127 81 128 84 129 85
23 22 75 21 75 22 73 23 71 24 67 27 62 29 55 33 49 36 44 39 39 41 36 43 33 45 33 46 34 46 36 47 40 49 45 51 51 53 57 54 63 56 67 58 70 59 73
4 |
21 89 170 89 169 88 169 88 167 88 163 88 158 89 152 89 145 89 139 89 134 89 132 88 135 88 140 87 147 87 155 87 162 87 168 87 173 87 176 87 177 86 176
24 36 178 36 177 36 176 35 173 35 169 34 164 34 157 33 153 33 146 33 139 33 135 33 132 33 131 34 134 34 138 35 144 35 152 35 160 35 167 35 173 35 176 35 178 36 177 36 176
21 87 82 87 81 87 79 87 74 88 69 88 62 89 55 90 48 90 42 90 38 90 37 90 40 90 44 90 51 91 59 92 66 92 73 93 79 93 83 93 85 93 84
24 31 88 31 87 31 86 31 84 31 81 31 77 31 72 31 65 31 60 31 53 31 46 30 42 30 40 30 43 30 47 31 53 32 60 33 66 34 72 35 77 36 80 36 81 35 81 35 79
4 ~
40 123 121 123 120 122 120 122 122 122 126 122 131 122 137 122 144 122 152 122 159 121 166 121 170 120 174 120 175 119 175 119 173 118 169 117 164 116 158 115 151 113 144 111 137 108 132 106 128 103 125 101 124 99 125 97 125 96 126 95 128 95 130 95 133 95 138 95 144 94 150 93 157 92 163 92 167 91 170 90 171
41 60 117 61 117 61 118 60 120 60 123 60 128 59 135 58 143 56 152 55 159 54 165 54 169 53 171 53 170 53 167 52 164 50 159 48 153 45 146 42 139 40 133 37 128 35 125 34 123 33 123 32 123 32 124 32 126 32 128 32 132 31 136 31 142 30 149 29 155 27 162 26 168 24 173 23 176 22 179 22 180 22 179
39 121 25 120 25 120 26 120 27 120 29 119 33 118 37 117 43 116 49 115 55 114 60 113 63 113 65 112 64 112 62 111 59 110 55 108 49 105 43 103 38 101 33 98 29 97 26 95 25 94 26 93 27 93 28 92 30 92 32 92 35 92 38 92 44 91 49 91 55 90 61 90 65 89 69 89 70 88 70
37 47 23 47 24 47 27 47 30 47 34 47 40 47 46 47 52 46 57 46 62 46 65 45 67 44 66 43 64 41 61 39 58 38 55 35 50 32 45 30 39 29 35 28 30 26 28 26 26 25 25 25 27 24 28 23 30 22 34 22 39 21 44 20 51 20 57 20 63 20 67 21 70 21 72
4 ?
29 29 38 28 38 27 38 27 37 27 36 28 35 29 33 32 31 36 29 40 28 45 27 49 27 53 28 56 31 57 34 57 37 56 41 54 45 51 49 48 53 44 57 42 61 39 65 38 68 38 71 38 73 38 76 38 77 39 77
30 95 36 94 36 93 36 93 35 93 34 94 32 95 30 96 28 99 26 102 25 106 25 110 26 113 28 115 30 116 33 116 36 114 40 113 44 111 48 109 51 107 54 106 57 106 59 107 62 107 66 107 69 108 71 108 73 108 74 108 75
26 35 134 34 133 33 132 33 130 34 129 35 127 38 125 42 124 46 123 50 122 54 122 57 123 59 125 59 128 59 131 58 135 56 139 53 142 51 146 50 150 49 153 49 157 49 159 49 162 49 163 49 165
31 99 139 99 140 99 139 99 138 99 137 99 135 99 132 100 130 101 127 104 125 107 123 110 122 114 122 118 122 120 124 122 127 123 130 122 134 121 138 119 142 117 146 115 150 114 153 113 157 112 160 112 163 112 166 112 168 113 170 113 171 114 172
4 >
26 25 92 25 93 26 92 28 90 31 87 34 84 38 81 43 77 47 75 51 72 54 70 55 69 56 69 56 68 55 68 54 67 51 66 49 64 45 62 40 60 35 59 29 57 25 55 21 55 20 55 21 55
25 96 100 95 101 96 100 98 99 101 97 105 94 110 92 113 90 118 87 122 85 125 83 127 82 127 81 126 80 125 80 124 80 123 80 120 79 117 77 113 75 109 73 103 71 99 68 95 65 92 64
30 32 184 31 184 32 184 33 182 36 180 40 177 46 174 52 170 59 166 66 163 71 161 74 160 75 159 74 159 73 160 72 160 70 160 69 160 67 160 66 158 63 156 59 154 56 152 51 149 47 147 42 146 39 145 36 145 35 146 36 147
28 108 197 107 198 108 198 109 198 112 196 116 194 121 191 126 187 132 184 136 182 139 180 141 180 142 179 141 180 141 181 140 181 139 181 138 181 137 180 135 178 133 176 130 174 126 170 122 167 117 163 114 159 110 157 108 156
4 )
16 27 28 28 29 29 30 32 32 35 35 38 41 41 48 42 56 42 63 41 69 39 74 37 77 35 79 33 81 32 81 31 81
14 91 28 91 29 93 30 96 32 99 35 103 39 105 46 106 54 105 61 104 68 101 74 99 78 96 80 94 82
17 34 124 33 124 34 124 36 125 39 127 42 131 46 137 49 144 51 151 52 158 51 165 50 170 48 173 46 175 44 176 42 177 41 177
18 98 128 97 127 97 128 98 129 101 132 104 135 108 140 111 146 113 152 114 157 114 164 112 170 108 176 105 180 101 184 98 186 96 187 94 188
8 (
117 159 21 158 22 158 23 157 24 156 24 156 25 156 26 155 26 155 27 154 27 154 28 153 28 153 29 153 30 152 30 152 31 151 32 151 33 150 33 150 34 150 35 149 36 148 37 148 38 147 39 147 40 146 41 146 42 146 43 145 44 145 45 144 46 144 47 144 48 144 49 144 50 143 50 143 51 143 52 143 51 143 52 143 53 142 53 142 54 142 55 142 56 142 57 142 58 141 59 141 60 141 61 141 62 141 63 141 64 140 64 140 65 140 66 140 67 140 68 140 69 140 70 140 71 140 72 140 73 140 74 140 75 140 76 140 77 141 78 141 79 141 80 141 81 141 82 142 83 142 84 142 85 142 86 143 86 143 87 143 88 144 89 144 90 144 91 145 92 145 93 145 94 146 94 146 95 146 96 147 96 147 97 148 97 148 98 149 99 149 100 150 100 150 101 151 101 151 102 152 103 152 104 153 104 153 105 153 106 154 106 154 107 155 107 156 107 156 108 157 108 157 109 158 109 158 110 159 110 159 111 160 111 160 112
101 63 19 63 20 62 20 62 21 62 22 61 22 61 23 60 23 60 24 60 25 59 25 59 26 58 26 58 27 58 28 57 28 57 29 57 30 56 30 56 31 56 32 55 32 55 33 55 34 54 34 54 35 54 36 53 37 53 38 52 38 52 39 52 40 52 41 51 41 51 42 51 43 51 44 51 45 51 46 51 47 50 47 50 48 50 49 50 50 50 51 50 52 50 53 50 54 50 55 50 56 50 57 50 58 50 59 50 60 50 61 50 62 50 63 50 64 50 65 50 66 50 67 50 68 50 69 50 70 50 71 50 72 50 73 50 74 51 74 51 75 51 76 52 77 52 78 52 79 53 80 54 81 54 82 55 82 55 83 56 84 57 85 57 86 58 86 58 87 59 88 60 89 60 90 61 90 61 91 63 92 64 93 65 93 65 94 66 94 67 94 68 94 68 95 69 95 70 95 70 96 71 96
15 39 27 38 27 38 26 36 27 35 27 33 29 30 32 28 38 26 45 25 53 26 60 27 67 28 72 31 75 32 77
13 113 27 112 27 110 28 108 30 105 34 102 40 99 48 98 57 97 66 98 73 99 77 101 80 103 81
16 46 124 45 123 44 123 43 123 41 125 38 127 35 131 32 137 29 144 27 152 27 160 28 166 30 172 33 176 35 177 37 178
17 113 128 113 127 112 127 111 127 109 128 107 131 104 136 101 142 99 150 98 159 98 166 98 173 100 177 102 180 105 182 106 183 107 183
40 50 24 50 25 49 26 49 27 49 28 49 29 49 30 49 32 47 33 47 35 45 37 45 40 44 42 43 45 43 47 43 50 43 52 43 54 43 55 43 57 43 59 43 60 43 61 44 63 43 61 44 63 45 64 45 65 47 66 47 67 48 68 49 69 50 69 51 70 52 71 53 71 53 72 54 72 55 72 55 73
40 140 25 139 27 138 28 137 29 135 31 134 33 133 34 132 36 131 37 129 39 128 41 126 44 124 46 122 49 121 51 119 53 118 55 117 58 116 60 115 63 115 65 115 68 115 71 115 73 116 75 117 78 118 80 119 82 121 83 122 85 124 87 125 89 127 90 128 91 129 92 130 92 131 93 132 93 133 93 133 94
4 .
21 133 137 133 138 132 138 130 139 126 139 122 139 116 139 109 139 103 140 98 141 94 142 91 143 92 143 95 142 100 141 107 140 115 139 124 138 131 138 137 138 140 138
21 61 137 61 136 60 135 58 135 55 134 51 134 46 134 41 136 36 137 32 139 29 140 29 141 30 140 34 140 40 139 46 138 53 138 59 138 65 138 68 137 70 138
19 126 50 125 50 124 50 122 51 119 51 115 51 111 51 105 52 99 52 95 53 92 53 91 53 92 54 96 54 101 53 106 53 112 52 116 52 120 52
18 56 54 54 54 53 54 50 54 46 54 41 55 37 55 33 56 30 56 29 56 30 57 32 57 35 57 39 57 43 57 47 57 50 57 52 57
4 -
12 87 151 88 151 91 151 96 151 103 150 111 150 118 150 124 149 129 149 132 150 134 150 133 151
12 23 131 22 131 23 131 25 131 29 130 34 129 41 129 49 129 57 129 65 130 70 130 74 131
12 108 62 107 62 105 62 107 62 111 62 116 62 123 62 131 61 139 61 146 61 151 60 153 60
14 27 60 25 60 24 60 23 60 25 59 27 59 33 59 40 58 47 58 56 57 63 57 70 57 74 57 76 57
4 T
68 39 103 39 102 39 101 39 100 39 99 39 100 39 99 39 98 38 97 38 95 38 94 38 92 38 91 37 89 37 88 37 87 37 85 37 84 37 83 37 81 37 80 37 79 37 78 37 77 37 75 37 74 37 73 37 72 37 71 37 70 37 69 37 68 37 67 37 66 37 65 37 64 37 63 38 62 38 61 38 60 38 59 38 58 38 57 38 56 38 55 38 54 38 53 39 53 40 53 42 53 44 53 46 53 49 53 51 54 53 54 55 54 57 54 59 54 61 54 62 54 63 54 64 54 65 53 66 53 67 53 68 53 69 53 70 53
45 112 99 112 98 112 97 112 95 112 93 112 91 112 87 111 84 111 81 111 77 110 74 111 77 110 74 110 72 110 69 110 68 110 66 110 65 110 63 110 62 110 61 110 59 110 58 110 56 110 55 110 54 110 53 111 53 114 54 116 54 120 55 123 55 125 55 128 55 130 55 132 55 133 55 134 55 136 55 137 55 138 55 139 55 140 55 141 55 142 55
57 32 191 32 190 33 189 33 187 33 185 33 183 34 181 34 179 34 177 34 175 34 173 34 170 34 169 34 167 34 165 34 164 34 163 34 162 34 160 34 159 34 158 34 157 34 156 35 156 35 155 35 154 35 153 35 152 35 151 36 151 36 150 36 149 36 148 37 147 37 146 37 145 38 145 38 144 39 144 41 144 43 145 46 145 50 145 54 145 58 145 61 145 64 145 65 145 66 145 67 145 68 145 69 145 70 145 71 145 72 144 73 144 74 144
51 108 190 108 189 108 187 107 185 107 183 107 181 107 179 107 177 107 175 107 173 107 171 107 173 107 171 107 169 107 167 107 165 108 163 108 161 108 159 108 158 108 156 108 155 108 154 108 153 108 152 108 151 108 150 109 149 109 148 109 147 109 146 109 145 109 144 109 143 110 143 110 142 110 141 110 140 110 139 111 138 112 138 114 138 117 138 121 138 125 138 129 139 133 139 137 139 140 139 142 139 143 139
4 _
69 17 55 18 55 19 55 20 55 22 55 23 55 26 55 28 55 31 55 34 56 37 56 40 57 43 57 46 57 48 58 50 58 52 58 53 58 54 58 55 57 56 57 57 57 58 57 59 57 60 57 61 57 63 57 65 57 67 56 69 56 71 56 71 57 70 57 69 57 67 57 64 57 62 57 58 57 56 57 54 57 52 57 50 57 49 57 48 57 47 57 46 57 45 57 44 58 42 58 41 58 40 58 41 58 40 58 39 58 38 58 37 58 36 58 35 58 34 58 32 58 31 58 31 59 30 59 29 59 28 59 27 59 26 59 25 59 24 59
44 98 78 99 78 101 78 104 78 107 78 110 78 114 78 121 78 125 78 128 78 131 78 133 78 135 78 137 78 138 78 139 78 138 78 137 78 136 78 134 78 131 78 129 78 126 78 124 79 122 79 120 79 117 79 115 78 113 78 111 78 110 78 108 78 108 79 107 79 106 79 105 79 104 79 103 79 102 79 101 79 100 79 99 79 98 79 97 79
26 21 124 22 124 24 124 28 125 32 125 39 125 46 126 59 126 69 126 76 127 80 127 83 127 84 127 85 127 85 128 84 128 80 128 76 128 66 128 59 127 52 127 45 126 41 125 38 125 35 124 34 124
36 86 165 86 164 87 164 89 164 91 164 93 163 96 163 99 164 102 164 106 164 109 163 106 164 109 163 112 164 116 164 120 164 123 164 125 164 126 164 126 165 125 165 123 165 122 165 120 165 118 165 116 165 114 165 112 165 109 165 102 164 95 163 88 163 84 162 82 162 80 162 81 162
2 *
57 35 128 35 129 38 131 41 134 44 137 46 139 49 141 51 143 53 145 56 147 60 148 63 149 66 150 69 150 73 149 76 147 79 146 81 143 83 141 85 139 86 137 87 135 87 134 87 133 86 133 86 132 85 132 83 132 82 133 79 133 76 134 73 135 69 135 67 136 63 137 61 138 59 139 58 139 57 140 55 141 54 142 53 143 51 145 50 146 49 147 48 148 47 149 46 150 45 152 43 153 41 155 39 157 38 158 37 159 36 160 35 161 34 162
56 26 38 26 39 26 40 26 41 26 43 27 46 27 49 28 52 29 55 30 57 31 60 33 63 35 66 37 68 39 70 43 71 47 72 51 73 58 73 65 73 68 73 71 72 73 71 75 69 76 68 77 66 77 65 78 65 78 64 77 63 76 63 75 62 75 63 73 63 71 63 68 64 65 65 62 65 59 66 56 67 53 68 51 69 48 70 46 71 45 72 43 73 41 74 40 75 39 76 38 77 37 78 35 79 34 81 33 81 33 82 33 83
2 `
33 90 187 90 186 90 185 89 185 89 184 89 183 88 181 87 180 86 178 85 175 83 172 82 169 80 165 77 161 75 157 73 154 71 151 70 148 69 147 68 145 67 145 66 145 67 145 67 148 69 151 72 155 75 162 78 169 81 176 83 180 84 183 85 184 85 185
37 69 103 69 102 69 101 68 101 67 99 67 98 66 96 65 94 65 92 64 90 62 87 61 84 60 81 59 78 59 75 58 73 58 70 57 68 57 67 57 66 57 67 58 68 59 70 60 72 62 76 64 79 66 83 68 86 69 89 70 92 71 94 72 96 73 98 74 99 75 100 75 101 76 102
2 ;
40 113 53 113 54 113 56 111 59 109 63 107 66 105 70 103 73 101 77 99 81 97 84 95 86 93 88 92 90 91 91 89 93 88 94 88 95 87 95 87 94 88 93 89 92 90 90 92 88 93 85 95 82 97 79 99 75 101 72 103 69 104 66 105 64 105 63 106 62 107 61 108 60 109 58 111 57 112 55 113 54
34 71 32 71 33 71 34 69 36 67 39 64 42 61 46 57 50 53 54 50 58 48 61 47 64 46 66 45 68 46 67 47 67 47 66 48 64 49 62 50 59 52 57 54 54 56 51 59 47 61 45 64 42 67 39 71 37 78 34 82 32 86 30 89 28 91 27 93 25
2 :
32 100 31 101 32 101 34 102 37 102 40 103 44 104 48 104 55 105 59 106 63 106 66 107 68 107 70 107 71 108 72 108 73 107 73 107 72 107 71 107 69 108 67 108 65 108 62 108 59 108 56 108 52 108 48 109 45 109 42 108 40 108 39 107 39
35 36 26 36 27 36 28 36 30 37 33 37 37 38 41 38 48 38 55 38 59 39 63 39 66 39 69 39 72 38 73 38 74 39 73 39 72 39 69 40 65 41 61 41 57 42 50 42 46 43 39 43 35 43 32 43 29 43 27 43 26 43 27 43 26 43 25 43 26 43 27
3 F
12 184 33 183 32 183 31 185 33 187 36 189 41 192 47 195 55 198 62 200 67 203 72 205 74
14 112 34 111 33 112 33 113 34 114 38 117 43 120 50 122 57 125 64 128 69 129 72 131 74 132 75 132 74
13 43 37 42 37 42 39 44 41 47 45 49 50 53 56 56 61 59 67 62 70 63 73 64 74 65 74
3 G
41 219 44 219 43 218 42 217 41 216 40 215 40 214 39 212 39 211 40 209 41 206 42 203 44 201 47 198 50 196 54 195 58 195 63 196 66 199 69 201 71 205 72 208 72 211 72 212 71 213 71 213 70 211 71 209 71 206 73 202 75 199 77 196 80 194 83 194 87 194 91 196 94 199 96 203 98 207 98 211 98 215 97
40 145 43 145 42 145 40 144 39 142 38 139 38 136 39 132 41 128 44 124 47 121 51 120 54 119 58 120 61 122 64 124 65 127 66 129 66 131 65 133 64 134 63 134 62 133 61 132 61 130 62 128 63 125 65 122 67 119 70 117 73 116 76 117 80 118 83 121 86 124 87 129 88 134 87 138 86 142 85 145 84
43 62 44 62 43 63 43 63 42 63 41 62 40 61 38 60 37 58 37 55 37 52 39 49 41 46 43 44 47 42 51 41 56 41 61 42 65 44 68 46 71 48 72 50 72 52 71 53 70 54 69 53 68 53 67 51 67 50 67 48 68 45 70 43 72 41 74 38 80 38 84 39 87 41 90 44 93 48 94 52 94 56 93 60 91 63 90
3 H
41 215 55 214 55 213 54 214 53 215 51 217 50 219 48 223 46 227 45 230 45 234 47 236 50 238 54 237 59 235 63 232 67 228 70 225 73 223 74 222 75 221 75 222 75 223 74 226 74 228 74 231 75 234 76 237 79 239 82 240 85 240 89 239 92 237 95 233 97 230 99 224 100 219 101 213 101 208 100 204 98 201 96
40 135 54 134 54 133 54 134 53 135 52 137 50 140 49 144 48 148 49 152 50 155 53 157 56 157 60 156 64 154 68 151 71 149 73 146 75 144 76 143 77 143 76 144 76 146 75 148 75 150 76 153 77 155 78 157 80 158 83 159 85 158 88 157 90 154 93 152 94 148 95 144 96 139 96 135 96 131 95 128 94
41 52 52 52 51 51 51 52 50 52 49 53 47 55 45 57 44 60 43 64 44 68 45 72 48 74 50 75 54 74 56 73 59 71 62 68 64 66 66 65 67 65 68 66 68 67 68 69 67 71 68 72 69 74 71 76 74 78 77 79 81 79 84 78 87 75 89 72 91 67 93 62 94 55 94 50 94 45 93 42 91 41 90
3 I
50 223 96 224 96 224 97 223 98 222 99 221 101 219 102 216 104 214 106 210 106 207 106 203 104 199 103 195 101 192 98 189 96 187 93 186 90 186 87 188 85 190 84 194 83 198 82 202 82 205 83 208 83 210 84 211 84 211 85 210 85 209 85 207 85 204 85 200 84 196 84 190 83 185 83 180 82 177 81 174 79 174 76 176 73 180 70 185 67 191 63 197 61 202 59 207 59 210 59 211 60
50 152 94 152 95 151 96 149 98 147 99 144 100 139 102 134 104 128 105 122 105 118 104 114 102 112 100 111 97 111 94 113 91 115 88 118 85 121 83 125 82 128 81 131 81 133 82 135 83 136 84 136 85 136 86 135 86 134 87 133 86 131 86 128 85 124 84 121 82 117 80 113 79 109 76 106 74 103 72 102 70 103 69 105 67 109 64 114 62 121 60 128 57 134 56 139 56 142 56 145 57
47 64 95 64 96 64 97 63 97 62 99 60 100 57 101 54 101 50 102 46 101 42 100 38 98 35 96 32 92 31 89 31 86 32 83 35 80 37 79 41 78 46 77 50 77 54 78 56 79 57 79 58 80 57 80 56 80 54 80 51 79 47 78 44 77 39 75 36 73 32 70 30 67 29 64 29 61 30 57 34 54 39 51 44 50 50 49 56 49 61 51 65 52 67 54
3 J
52 208 104 207 104 206 104 206 105 207 106 209 107 212 108 216 109 221 109 226 109 231 108 235 106 239 104 241 101 243 98 243 95 242 92 239 90 235 88 231 87 227 87 223 88 220 88 218 89 217 89 217 90 218 90 220 90 222 90 225 90 227 89 231 88 234 87 237 86 239 84 242 82 243 79 245 76 245 72 244 68 241 64 238 60 233 58 226 56 219 56 213 58 207 60 204 63 202 67 202 70 203 72 206 74
48 126 101 124 100 123 100 122 100 121 101 122 101 123 102 125 103 128 104 132 104 138 103 143 101 148 99 153 96 156 93 157 89 158 87 157 84 155 82 151 80 149 79 145 79 142 79 139 79 137 79 135 79 136 79 137 79 140 78 144 78 147 77 152 75 156 73 159 71 161 69 163 66 163 64 163 60 161 56 158 53 154 50 151 49 145 48 139 48 133 49 128 51 124 53 122 55
49 42 95 41 96 40 97 41 98 42 100 43 101 46 102 49 103 53 103 58 103 62 102 67 100 71 97 73 94 75 90 75 87 75 84 73 82 71 80 68 78 65 78 63 77 60 77 58 78 57 78 56 78 56 79 57 79 58 79 60 78 63 77 66 76 70 75 73 72 75 71 77 68 78 65 77 62 76 59 73 56 70 54 66 52 61 51 56 50 52 51 47 51 45 52 44 52 44 53
3 K
35 200 146 199 145 199 144 198 142 198 140 197 136 197 131 197 127 197 122 197 117 197 114 197 112 198 112 199 114 201 117 203 121 206 126 209 131 213 137 217 141 220 144 224 145 227 146 229 145 230 144 231 143 231 141 232 138 231 135 231 130 230 124 230 118 229 112 228 108 227 104
35 137 85 137 84 137 82 137 80 136 77 136 73 136 67 135 61 135 54 135 49 135 44 135 42 136 41 137 43 139 46 141 50 144 56 147 63 151 69 155 75 158 80 161 82 164 83 166 84 167 82 169 80 170 76 171 71 171 65 171 57 171 49 171 42 171 36 170 32 170 30
37 44 83 44 84 43 83 43 81 43 78 42 74 42 69 41 63 41 57 40 52 39 48 39 45 39 44 40 45 41 47 43 51 46 55 50 60 53 66 57 70 60 73 63 75 65 77 66 77 67 77 68 76 69 74 69 73 69 69 69 65 68 59 68 53 67 46 67 39 67 34 67 31 67 29
/util.h
0,0 → 1,68
/***********************************************************************
 
util.h - memory allocation, error reporting, and other mundane stuff
 
Copyright (C) 1991 Dean Rubine
 
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License. See ../COPYING for
the full agreement.
 
**********************************************************************/
 
/*
* General utility functionss
*
* Mostly for dealing with mundane issues such as:
* Memory allocation
* Error handling
*/
 
/*
* General allocation macro
*
* Example:
* struct list *s; s = allocate(4, struct list)
* returns space for an array of 4 list structures.
* Allocate will die if there is no more memory
*/
 
#define allocate(n, type) \
((type *) myalloc(n, sizeof(type), "type"))
 
/*
* Functions
*/
 
#define STREQ(a,b) ( ! strcmp(a,b) )
 
char *myalloc(); /* Do not call this function directly */
char *scopy(); /* allocates memory for a string */
void debug(); /* printf on stderr -
setting DebugFlag = 0 turns off debugging */
void error(); /* printf on stderr, then dies */
int ucstrcmp(); /* strcmp, upper case = lower case */
char *tempstring(); /* returns a pointer to space that will reused soon */
 
/*
this is the wrong place for all of this, but got chosen since
every file includes this one
*/
 
#ifdef unix
# define GRAPHICS /* only GDEV on unix machines */
#endif
 
#ifndef unix
 
/* various BSD to lattice C name changes */
 
#ifdef __ECOS
extern char *strdup(char *);
#else
#define bcopy movmem
#endif
#define index strchr
#define rindex strrchr
 
#endif
/hre.h
0,0 → 1,394
/*
* hre.h: API for Handwriting Recognition Engine on Unix
* Author: James Kempf
* Created On: Wed Oct 28 11:30:43 1992
* Last Modified By: James Kempf
* Last Modified On: Fri Sep 23 13:49:26 1994
* Update Count: 74
* Copyright (c) 1994 by Sun Microsystems Computer Company
* All rights reserved.
*
* Use and copying of this software and preparation of
* derivative works based upon this software are permitted.
* Any distribution of this software or derivative works
* must comply with all applicable United States export control
* laws.
*
* This software is made available as is, and Sun Microsystems
* Computer Company makes no warranty about the software, its
* performance, or its conformity to any specification
*/
 
#ifndef _HRE_H_
 
#define _HRE_H_
 
#include <sys/types.h>
#include <stdlib.h>
/*#include <libintl.h>*/
 
#ifdef ELX
typedef unsigned int wchar_t;
#endif
 
/* Scalar Type Definitions */
 
/*For better readibility.*/
 
#ifndef true
 
typedef u_char bool;
 
#define true 1
#define false 0
 
#endif
 
/*For pointers to extra functions on recognizer.*/
 
typedef void (*rec_fn)();
 
/*
*rec_confidence is an integer between 0-100 giving the confidence of the
* recognizer in a particular result.
*/
 
typedef u_char rec_confidence;
 
/*Time value. This is the same as in X.h, so we conditionally define.*/
/* ari -- no it's not. *SIGH* there's an ifdef in X.h specifically */
/* for osf. */
#ifndef X_H
 
#ifndef __osf__
typedef unsigned long Time;
#else
typedef unsigned int Time;
#endif
/* (mips) typedef unsigned long Time; */
/* (osf) typedef unsigned int Time; */
 
#endif
 
/**************** RECOGNIZER CONFIGURATION INFORMATION *******************/
 
/*
* Recognizer information. Gives the locale, category of the character
* set returned by the recognizer, and any subsets to which the
* recognition can be limited. The locale and category should be
* suitable for the setlocale(3). Those recognizers which don't do text
* can simply report a blank locale and category, and report the
* graphics types they recognize in the subset.
*/
 
typedef struct {
char* ri_locale; /*The locale of the character set.*/
char* ri_name; /*Complete pathname to the recognizer.*/
char** ri_subset; /*Null terminated list of subsets supported*/
} rec_info;
 
/*These define a set of common character subset names.*/
 
#define GESTURE "GESTURE" /* gestures only */
#define MATHSET "MATHSET" /* %^*()_+={}<>,/. */
#define MONEYSET "MONEYSET" /* $, maybe cent, pound, and yen */
#define WHITESPACE "WHITESPACE" /* gaps are recognized as space */
#define KANJI_JIS1 "KANJI_JIS1" /* the JIS1 kanji only */
#define KANJI_JIS1_PLUS "KANJI_JIS1_PLUS" /* JIS1 plus some JIS2 */
#define KANJI_JIS2 "KANJI_JIS2" /* the JIS1 + JIS2 kanji */
#define HIRIGANA "HIRIGANA" /* the hirigana */
#define KATAKANA "KATAKANA" /* the katakana */
#define UPPERCASE "UPPERCASE" /* upper case alphabetics, no digits */
#define LOWERCASE "LOWERCASE" /* lower case alphabetics, no digits */
#define DIGITS "DIGITS" /* digits 0-9 only */
#define PUNCTUATION "PUNCTUATION" /* \!-;'"?()&., */
#define NONALPHABETIC "NONALPHABETIC" /* all nonalphabetics, no digits */
#define ASCII "ASCII" /* the ASCII character set */
#define ISO_LATIN12 "ISO_LATIN12" /* The ISO Latin 12 characters */
 
 
/******************** RECOGNITION INPUT STRUCTURES ***********************/
 
/*
* WINDOW SYSTEM INTERFACE
*/
 
/*Basic point. Note that it is identical to XTimeCoord, for easy conversion*/
 
typedef struct {
Time time;
short x, y;
} pen_point;
 
/*Bounding box. Structurally identical to XRectangle.*/
 
typedef struct {
short x,y; /*Upper left corner.*/
short width,height; /*Width and height.*/
} pen_rect;
 
/* Button flags - pen's button configuration. */
 
#define TABLET_TIP 0x1 /*tip switch*/
#define TABLET_BUTTON1 0x2 /*one barrel switch*/
#define TABLET_BUTTON2 0x4 /*two barrel switches*/
#define TABLET_BUTTON3 0x8 /*three barrel switches*/
 
/* Pen flags - additional state information that can be reported by the pen.*/
 
#define TABLET_PROXIMITY 0x1 /*can report position when pen not in contact*/
#define TABLET_RELATIVE 0x2 /*can report relative coords, like mouse*/
#define TABLET_ABSOLUTE 0x4 /*can report absolute co-ordinates*/
#define TABLET_RANGE 0x8 /*can report when pen goes out of range*/
#define TABLET_INVERT 0x10 /*can report when pen is inverted*/
#define TABLET_TOUCH 0x20 /*finger can be used as pen*/
 
/* Angle flags - reporting of information about the pen angle. */
 
#define TABLET_ANGLEX 0x1 /*can report angle with the x axis*/
#define TABLET_ANGLEY 0x2 /*can report angle with the y axis*/
#define TABLET_ROTATE 0x4 /*can report barrel rotation*/
 
/*
* Sensor flags - configuration and reporting capabilities
* of the tablet's sensor panel.
*/
 
#define TABLET_INTEGRATED 0x1 /*sensor panel is integrated with display*/
#define TABLET_PRESSURE 0x2 /*sensor panel can report pressure*/
#define TABLET_HEIGHT 0x4 /*sensor panel can report height*/
 
/* Units flags - in what units x and y coordinate data reported.*/
 
#define TABLET_DIMENSIONLESS 0x1 /*no units*/
#define TABLET_ENGLISH 0x2 /*thousandths of an inch*/
#define TABLET_METRIC 0x4 /*tenths of a millimeter*/
 
/* Origin flags - where the tablet's origin is located.*/
 
#define TABLET_ULEFT 0x1 /*upper left corner*/
#define TABLET_URIGHT 0x2 /*upper right corner*/
#define TABLET_LLEFT 0x4 /*lower left corner*/
#define TABLET_LRIGHT 0x8 /*lower right corner*/
#define TABLET_CENTER 0x10 /*center of tablet*/
 
/*
* Tablet capabilities structure. Defines basic information about tablet
* configuration.
*/
 
typedef struct {
char tc_id[20]; /*tablet identifier, null terminated*/
u_short tc_button; /*button capabilities*/
u_short tc_pen; /*pen capabilities*/
u_short tc_angle; /*pen angle reporting*/
u_int tc_sensor : 8; /*sensor capabilities*/
u_int tc_units : 8; /*units for xy reporting*/
u_int tc_default_units : 8; /*default units*/
u_int tc_origin : 8; /*where origin located*/
short tc_x[2]; /*minimum/maximum x*/
short tc_y[2]; /*minimum/maximum y*/
short tc_pressure[2]; /*minimum/maximum pressure/height*/
u_int tc_sample_rate; /*rate of event reporting*/
u_int tc_sample_distance; /*xy coords per sample*/
 
} tablet_cap;
 
/*
* PEN STROKE DATA
*/
 
/*
* Pen state parameters. "Basic" state is pen up/down, barrel buttons
* (if any), and in/out of range. Others may be reported by particular pens.
*/
 
typedef struct {
u_short pt_button; /*button state - same as tc_button*/
u_short pt_pen; /*other state - same as tc_pen*/
short pt_pressure; /*Pressure. + against tablet, - above tablet.*/
double pt_anglex; /*angle of tilt in the x direction, in radians.*/
double pt_angley; /*angle of tilt in the y direction, in radians.*/
double pt_barrelrotate; /*angle of barrel rotation, in radians.*/
} pen_state;
 
/*
* Stroke structure.
*/
 
typedef struct {
u_int ps_npts; /*Number of pen_point in array.*/
pen_point* ps_pts; /*Array of points.*/
u_int ps_nstate; /*Number of pen_state in array.*/
u_int* ps_trans; /*State transition point indicies.*/
pen_state* ps_state; /*Array of state.*/
} pen_stroke;
 
/*
* RECOGNITION CONTEXT
*/
 
/* Structure for reporting writing area geometric constraints. */
 
typedef struct {
pen_rect pr_area;
short pr_row, pr_col;
double pr_rowpitch, pr_colpitch;
} pen_frame;
 
/*User preferences*/
 
#define REC_RIGHTH 0x0 /*Right-handed writer.*/
#define REC_LEFTH 0x1 /*Left-handed writer.*/
 
/*
* Writing direction. There will generally be a preferred and a
* secondary direction (example: English is left to right, then
* top to bottom). High byte has preferred, low byte secondary.
* The recognizer can ignore this and key off of locale.
*/
 
#define REC_DEFAULT 0x0 /*Use default direction.*/
#define REC_BOTTOM_TOP 0x1 /*Bottom to top.*/
#define REC_LEFT_RIGHT 0x2 /*Left to right.*/
#define REC_RIGHT_LEFT 0x3 /*Right to left.*/
#define REC_TOP_BOTTOM 0x4 /*Top to bottom.*/
 
/*
* Structure for describing a set of letters to constrain recognition.
* ls_type is the same as the re_type field for rec_element below.
*/
 
typedef struct _letterset {
char ls_type;
union _ls_set {
char* aval;
wchar_t* wval;
} ls_set;
} letterset;
 
/*
* Recognition context. Describes the context in which the pen stroke
* data was obtained and in which recognition should proceed.
*/
 
typedef struct {
u_short rc_upref; /*User preference. */
bool rc_gesture; /*Look for gesture if true.*/
u_short rc_direction; /*Primary and secondary writing direction.*/
rec_confidence rc_cutoff; /*Cut off recognition below this confidence*/
tablet_cap* rc_tinfo; /*Tablet capabilities.*/
char** rc_subset; /*Confine recognition to these subsets.*/
pen_frame* rc_frame; /*If nonNULL, writing area geometry.*/
wordset rc_wordset; /*If nonNULL, dictionary.*/
letterset rc_letterset; /*If nonNULL, constrain to these chars.*/
void* rc_context; /*For recognizer-specific context.*/
} rc;
 
/************************** GESTURES **************************/
 
/*
* Gestures. The toolkit initializes the recognizer with a
* set of gestures having appropriate callbacks.
* When a gesture is recognized, it is returned as part of a
* recognition element. The recognizer fills in the bounding
* box and hotspots. The toolkit fills in any additional values,
* such as the current window, and calls the callback.
*/
 
typedef struct Gesture {
char* g_name; /*The gesture's name.*/
u_int g_nhs; /*Number of hotspots.*/
pen_point* g_hspots; /*The hotspots.*/
pen_rect g_bbox; /*The bounding box.*/
void (*g_action)(struct Gesture*); /*Pointer to execution function.*/
void* g_wsinfo; /*For toolkit to fill in.*/
} gesture;
 
typedef void (*xgesture)(gesture*);
 
/*These provide some common gesture names.*/
 
#define COPY "COPY" /*Copy target to clipboard*/
#define CUT "CUT" /*Copy target and delete*/
#define PASTE "PASTE" /*Paste clipboard into target*/
#define UNDO "UNDO" /*Undo the previous gesture action*/
#define CLEAR "CLEAR" /*Clear clipboard*/
#define EXTEND "EXTEND" /*Extend selection to target location*/
#define RETURN "RETURN" /*Insert newline/carriage return at target*/
#define SPACE "SPACE" /*Insert space at target*/
#define TAB "TAB" /*Insert tab at target*/
#define KKCONVERT "KKCONVERT" /*Perform kana-kanji conversion on target*/
 
/********************* RECOGNITION RETURN VALUES *************************/
 
 
/*Different types in union. "Other" indicates a cast is needed.*/
 
#define REC_NONE 0x0 /*No return value*/
#define REC_GESTURE 0x1 /*Gesture.*/
#define REC_ASCII 0x2 /*Array of 8 bit ASCII*/
#define REC_VAR 0x4 /*Array of variable width characters. */
#define REC_WCHAR 0x8 /*Array of Unicode (wide) characters. */
#define REC_OTHER 0x10 /*Undefined type.*/
#define REC_CORR 0x20 /*rec_correlation struct*/
 
/*
* Recognition elements. A recognition element is a structure having a
* confidence level member, and a union, along with a flag indicating
* the union type. The union contains a pointer to the result. This
* is the basic recognition return value, corresponding to one
* recognized word, letter, or group of letters.
*/
 
/*Ruse to make types woik*/
 
#define rec_correlation void
 
typedef struct {
char re_type; /*Union type flag.*/
union {
gesture* gval; /*Gesture.*/
char* aval; /*ASCII and variable width.*/
wchar_t* wval; /*Unicode.*/
rec_correlation* rcval; /*rec_correlation*/
} re_result;
rec_confidence re_conf; /*Confidence (0-100).*/
} rec_element;
 
/*
* Recognition alternative. The recognition alternative gives
* a translated element for a particular segmentation, and
* a pointer to an array of alternatives for the next position
* in the segmentation thread.
*/
 
struct _Rec_alternative {
rec_element ra_elem; /*the translated element*/
u_int ra_nalter; /*number of next alternatives*/
struct _Rec_alternative* ra_next; /*the array of next alternatives*/
};
 
typedef struct _Rec_alternative rec_alternative;
 
/*
* Recognition correlation. A recognition correlation is a recognition
* of the stroke input along with a correlation between the stroke
* input and the recognized text. The rec_correlation struct contains
* a pointer to an arrray of pointers to strokes, and
two arrays of integers, giving the starting point and
* stopping point of each corresponding recogition element returned
* in the strokes.
*/
 
#undef rec_correlation
 
typedef struct {
rec_element ro_elem; /*The recognized alternative.*/
u_int ro_nstrokes; /*Number of strokes.*/
pen_stroke* ro_strokes; /*Array of strokes.*/
u_int* ro_start; /*Starting index of points.*/
u_int* ro_stop; /*Stopping index of points.*/
} rec_correlation;
 
#endif
/zdebug.h
0,0 → 1,20
/***********************************************************************
 
zdebug.h - macros for debugging
 
Copyright (C) 1991 Dean Rubine
 
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License. See ../COPYING for
the full agreement.
 
**********************************************************************/
 
extern char _zdebug_flag[];
 
#define Z(f) if(_zdebug_flag[f] > 0)
#define ZZ(f) if(_zdebug_flag[f] >= 2)
#define ZZZ(f) if(_zdebug_flag[f] >= 3)
#define ZZZZ(f) if(_zdebug_flag[f] >= 4)
 
extern double kludge[];
/nxscribble.c
0,0 → 1,221
/*
* Copyright (c) 2000 Greg Haerr <greg@censoft.com>
* Copyright (c) 2000 Century Software <embedded.centurysoftware.com>
*
* Scribble Handwriting Recognition for Nano-X!
*/
#include <stdio.h>
#include <stdlib.h>
#define MWINCLUDECOLORS
#include "nano-X.h"
#include "scrib.h"
 
#define TEXTWIN_WIDTH 200 /* text window width/height*/
#define TEXTWIN_HEIGHT 150
 
static ScribbleWidget w;
 
static GR_BOOL bTextwin = GR_FALSE;
static GR_WINDOW_ID wt = 0;
static GR_GC_ID gct = 0;
static GR_GC_ID gctb = 0;
static GR_COORD xpos = 0;
static GR_COORD ypos = 0;
static GR_SIZE width; /* width of character */
static GR_SIZE height; /* height of character */
static GR_SIZE base; /* height of baseline */
static void char_out(GR_CHAR ch);
static void char_del(GR_COORD x, GR_COORD y);
 
void do_buttondown(GR_EVENT_BUTTON *bp);
void do_buttonup(GR_EVENT_BUTTON *bp);
void do_motion(GR_EVENT_MOUSE *mp);
void do_focusin(GR_EVENT_GENERAL *gp);
void do_keystroke(GR_EVENT_KEYSTROKE *kp);
void do_exposure(GR_EVENT_EXPOSURE *ep);
 
int
main(int argc, char **argv)
{
int t = 1;
GR_EVENT event; /* current event */
 
while (t < argc) {
if (!strcmp("-t", argv[t])) {
bTextwin = GR_TRUE;
++t;
continue;
}
}
 
if (GrOpen() < 0) {
fprintf(stderr, "cannot open graphics\n");
exit(1);
}
 
if (bTextwin) {
/* create text output window for debugging*/
wt = GrNewWindow(GR_ROOT_WINDOW_ID, 50, 20,
TEXTWIN_WIDTH, TEXTWIN_HEIGHT, 5, BLACK, GREEN);
GrSelectEvents(wt,
GR_EVENT_MASK_CLOSE_REQ | GR_EVENT_MASK_KEY_DOWN
| GR_EVENT_MASK_EXPOSURE);
GrMapWindow(wt);
gct = GrNewGC();
GrSetGCForeground(gct, GREEN);
GrGetGCTextSize(gct, "A",1, GR_TFASCII, &width, &height, &base);
GrSetGCFont(gct, GrCreateFont(GR_FONT_OEM_FIXED, 0, NULL));
gctb = GrNewGC();
GrSetGCForeground(gctb, BLACK);
}
 
/* create scribble input window*/
w = create_scribble();
 
while (1) {
GrGetNextEvent(&event);
 
switch (event.type) {
case GR_EVENT_TYPE_BUTTON_DOWN:
do_buttondown(&event.button);
break;
 
case GR_EVENT_TYPE_BUTTON_UP:
do_buttonup(&event.button);
break;
 
case GR_EVENT_TYPE_MOUSE_POSITION:
case GR_EVENT_TYPE_MOUSE_MOTION:
do_motion(&event.mouse);
break;
 
case GR_EVENT_TYPE_FOCUS_IN:
do_focusin(&event.general);
break;
 
case GR_EVENT_TYPE_KEY_DOWN:
do_keystroke(&event.keystroke);
break;
 
case GR_EVENT_TYPE_EXPOSURE:
do_exposure(&event.exposure);
break;
 
case GR_EVENT_TYPE_CLOSE_REQ:
GrClose();
exit(0);
}
}
}
 
 
/*
* Here when a button is pressed.
*/
void
do_buttondown(GR_EVENT_BUTTON *bp)
{
ActionStart(w, bp->x, bp->y);
}
 
 
/*
* Here when a button is released.
*/
void
do_buttonup(GR_EVENT_BUTTON *bp)
{
ActionEnd(w, bp->x, bp->y);
}
 
 
/*
* Here when the mouse has a motion event.
*/
void
do_motion(GR_EVENT_MOUSE *mp)
{
ActionMove(w, mp->x, mp->y);
}
 
 
/*
* Here when our window gets focus
*/
void
do_focusin(GR_EVENT_GENERAL *gp)
{
#if 0
/* if the window receiving focus is scribble, remember last window*/
if (gp->wid == w->win && gp->wid != 1)
w->lastfocusid = gp->otherid;
#endif
}
 
 
/*
* Here when an exposure event occurs.
*/
void
do_exposure(GR_EVENT_EXPOSURE *ep)
{
if (ep->wid == w->win)
Redisplay(w);
}
 
 
/*
* Here when a keyboard press or injection occurs.
*/
void
do_keystroke(GR_EVENT_KEYSTROKE *kp)
{
if (bTextwin)
char_out(kp->ch);
}
 
static void
char_del(GR_COORD x, GR_COORD y)
{
xpos -= width;
GrFillRect(wt, gctb, x+1, y /*- height*/ /*+ base*/, width, height);
}
 
static void
char_out(GR_CHAR ch)
{
switch(ch) {
case '\r':
case '\n':
xpos = 0;
ypos += height;
if(ypos >= TEXTWIN_HEIGHT - height) {
ypos -= height;
 
/* FIXME: changing FALSE to TRUE crashes nano-X*/
/* clear screen, no scroll*/
ypos = 0;
GrClearWindow(wt, GR_FALSE);
}
return;
case '\007': /* bel*/
return;
case '\t':
xpos += width;
while((xpos/width) & 7)
char_out(' ');
return;
case '\b': /* assumes fixed width font!!*/
if (xpos <= 0)
return;
char_del(xpos, ypos);
char_out(' ');
char_del(xpos, ypos);
return;
}
GrText(wt, gct, xpos+1, ypos, &ch, 1, GR_TFTOP);
xpos += width;
 
if (xpos >= TEXTWIN_WIDTH-width)
char_out('\n');
}
/li_recognizer.c
0,0 → 1,2571
/*
* li_recognizer.c
*
* Copyright 2000 Compaq Computer Corporation.
* Copying or modifying this code for any purpose is permitted,
* provided that this copyright notice is preserved in its entirety
* in all copies or modifications.
* COMPAQ COMPUTER CORPORATION MAKES NO WARRANTIES, EXPRESSED OR
* IMPLIED, AS TO THE USEFULNESS OR CORRECTNESS OF THIS CODE OR
*
*
* Adapted from cmu_recognizer.c by Jay Kistler.
*
* Where is the CMU copyright???? Gotta track it down - Jim Gettys
*
* Credit to Dean Rubine, Jim Kempf, and Ari Rapkin.
*/
 
 
#include <sys/types.h>
#include <stdio.h>
#include <string.h>
#ifndef ELX
#include <stdlib.h>
#endif
#include <math.h>
#include <locale.h>
#include <hre_internal.h>
#include <setjmp.h>
#include "util.h"
#include "matrix.h"
#include "sc.h"
#include "li_recognizer.h"
#include "li_recognizer_internal.h"
 
 
int lidebug = 0;
 
/*LI Magic Number.*/
 
#define LI_MAGIC 0xACCBADDD
 
#define CHECK_LI_MAGIC(_a) \
((_a) != NULL && ((li_recognizer*)(_a))->li_magic == LI_MAGIC)
 
 
static void lialg_initialize(rClassifier *);
static int lialg_read_classifier_digest(rClassifier *);
static int lialg_canonicalize_examples(rClassifier *);
static char *lialg_recognize_stroke(rClassifier *, point_list *);
 
 
char* li_err_msg = NULL;
char _zdebug_flag[128];
 
#ifndef __ECOS
// This is standard - defined in <stdlib.h>
#define bcopy(s1,s2,n) memcpy(s2,s1,n)
#endif
 
#if 0 /* was #ifdef mips*/
char *strdup(char* from) {
char* to;
int len = strlen(from) + 1;
 
/* to = (char *) safe_malloc( len * sizeof(char) );*/
to = allocate(len, char);
memcpy(to, from, len);
return to;
}
#endif
 
 
/*Freeing classifier*/
 
static void
free_rClassifier(rClassifier* rc);
 
/*
* Point List Support
*/
 
static point_list*
add_example(point_list* l,int npts,pen_point* pts)
{
pen_point* lpts = make_pen_point_array(npts);
/* point_list* p = (point_list*)safe_malloc(sizeof(point_list));*/
point_list *p = allocate(1, point_list);
 
p->npts = npts;
p->pts = lpts;
p->next = l; /*Order doesn't matter, so we stick on end.*/
 
/*Copy points.*/
 
bcopy(pts,lpts,npts * sizeof(pen_point));
 
return(p);
}
 
static void
delete_examples(point_list* l)
{
point_list* p;
 
for( ; l != NULL; l = p ) {
p = l->next;
free(l->pts);
free(l);
}
}
 
/*
* Local functions
*/
 
/*
* recognize_internal-Form Vector, use Classifier to classify, return char.
*/
 
static char*
recognize_internal(rClassifier* rec,pen_stroke* str,int* rconf)
{
char *res = NULL;
point_list *stroke = NULL;
 
stroke = add_example(NULL, str->ps_npts, str->ps_pts);
if (stroke == NULL) return(NULL);
 
res = lialg_recognize_stroke(rec, stroke);
 
delete_examples(stroke);
return(res);
}
 
/*
* file_path-Construct pathname, check for proper extension.
*/
 
static int
file_path(char* dir,char* filename,char* pathname)
{
char* dot;
/*Check for proper extension on file name.*/
dot = strrchr(filename,'.');
if( dot == NULL ) {
return(-1);
}
 
/*Determine whether a gesture or character classifier.*/
 
if( strcmp(dot,LI_CLASSIFIER_EXTENSION) != 0 ) {
return(-1);
}
 
/*Concatenate directory and filename into pathname.*/
strcpy(pathname,dir);
strcat(pathname,"/");
strcat(pathname,filename);
return(0);
}
 
/*read_classifier_points-Read points so classifier can be extended.*/
 
static int
read_classifier_points(FILE* fd,int nclss,point_list** ex,char** cnames)
{
int i,j,k;
char buf[BUFSIZ];
int nex = 0;
char* names[MAXSCLASSES];
point_list* examples[MAXSCLASSES];
pen_point* pts;
int npts;
 
/*Initialize*/
 
for( i = 0; i < MAXSCLASSES; i++ ) {
names[i] = NULL;
examples[i] = NULL;
}
 
/*Go thru classes.*/
 
/* ari */
/* fprintf(stderr, "Classes: [ "); */
 
for( k = 0; k < nclss; k++ ) {
 
/*Read class name and number of examples.*/
if( fscanf(fd,"%d %s",&nex,buf) != 2 ) {
printf("%s *FAILED* - line: %d\n", __FUNCTION__, __LINE__);
goto unallocate;
}
/*Save class name*/
names[k] = strdup(buf);
/* ari */
/* fprintf(stderr, "%s ", buf); */
 
/*Read examples.*/
for( i = 0; i < nex; i++ ) {
/*Read number of points.*/
if( fscanf(fd,"%d",&npts) != 1 ) {
printf("%s *FAILED* - line: %d\n", __FUNCTION__, __LINE__);
goto unallocate; /*Boy would I like exceptions!*/
}
/*Allocate array for points.*/
if( (pts = make_pen_point_array(npts)) == NULL ) {
printf("%s *FAILED* - line: %d\n", __FUNCTION__, __LINE__);
goto unallocate;
}
/*Read in points.*/
for( j = 0; j < npts; j++ ) {
int x,y;
int jj;
if( fscanf(fd,"%d %d",&x,&y) != 2 ) {
delete_pen_point_array(pts);
printf("%s *FAILED* - line: %d\n", __FUNCTION__, __LINE__);
printf("class = %d/%d/%s, ex = %d/%d, pt: %d/%d\n",
k, nclss, names[k], i, nex, j, npts);
for (jj = 0; jj < j; jj++) {
printf("pts[%d] = %d/%d\n", jj, pts[jj].x, pts[jj].y);
}
goto unallocate;
}
pts[j].x = x;
pts[j].y = y;
}
/*Add example*/
if( (examples[k] = add_example(examples[k],npts,pts)) == NULL ) {
delete_pen_point_array(pts);
printf("%s *FAILED* - line: %d\n", __FUNCTION__, __LINE__);
goto unallocate;
}
delete_pen_point_array(pts);
}
}
 
/* ari -- end of list of classes */
/* fprintf(stderr, "]\n"); */
 
/*Transfer to recognizer.*/
 
bcopy(examples,ex,sizeof(examples));
bcopy(names,cnames,sizeof(names));
 
return(0);
 
/*Error. Deallocate memory and return.*/
 
unallocate:
 
for( ; k >= 0; k-- ) {
delete_examples(examples[k]);
free(names[k]);
}
 
error("Error in reading example points from classifier file");
return(-1);
}
 
/*read_classifier-Read a classifier file.*/
 
static int read_classifier(FILE* fd,rClassifier* rc)
{
sClassifier sc;
li_err_msg = NULL;
 
/*Read in classifier file.*/
if( (sc = sRead(fd)) == NULL ) {
return(-1);
}
/*Read in the example points, so classifier can be extended.*/
 
if( read_classifier_points(fd,sc->nclasses,rc->ex,rc->cnames) != 0 ) {
sFreeClassifier(sc);
return(-1);
}
 
/*Transfer sClassifier to the rClassifier*/
 
rc->sc = sc;
return(0);
}
 
/*
* Extension Functions
*/
 
/* getClasses and clearState are by Ari */
 
static int
recognizer_getClasses (recognizer r, char ***list, int *nc)
{
int i, nclasses;
li_recognizer* rec;
sClassifier sc;
char **ret;
 
rec = (li_recognizer*)r->recognizer_specific;
 
/*Check for LI recognizer.*/
 
if( !CHECK_LI_MAGIC(rec) ) {
li_err_msg = "Not a LI recognizer";
return(-1);
}
sc = rec->li_rc.sc;
*nc = nclasses = sc->nclasses;
/* ret = (char **) safe_malloc (nclasses * sizeof(char*));*/
ret = allocate(nclasses, char*);
 
for (i = 0; i < nclasses; i++) {
ret[i] = rec->li_rc.cnames[i]; /* only the 1st char of the cname */
}
*list = ret;
return 0;
}
 
static int
recognizer_clearState (recognizer r)
{
/*This operation isn't supported by the LI recognizer.*/
 
li_err_msg = "Clearing state is not supported by the LI recognizer";
 
return(-1);
}
 
static bool isa_li(recognizer r)
{ return(CHECK_LI_MAGIC(r)); }
 
static int
recognizer_train(recognizer r,rc* rec_xt,u_int nstrokes,
pen_stroke* strokes,rec_element* re,
bool replace_p)
{
/*This operation isn't supported by the LI recognizer.*/
 
li_err_msg = "Training is not supported by the LI recognizer";
 
return(-1);
}
 
int
li_recognizer_get_example (recognizer r,
int class,
int instance,
char **name,
pen_point **points,
int *npts)
{
li_recognizer *rec = (li_recognizer*)r->recognizer_specific;
sClassifier sc = rec->li_rc.sc;
point_list *pl;
if( !CHECK_LI_MAGIC(rec) ) {
li_err_msg = "Not a LI recognizer";
return(-1);
}
if (class > sc->nclasses)
return -1;
pl = rec->li_rc.canonex[class];
while (instance && pl)
{
pl = pl->next;
instance--;
}
if (!pl)
return -1;
*name = rec->li_rc.cnames[class];
*points = pl->pts;
*npts = pl->npts;
return 0;
}
 
/*
* API Functions
*/
 
 
/*li_recognizer_load-Load a classifier file.*/
 
static int li_recognizer_load(recognizer r,char* dir,char* filename)
{
FILE *fd;
char* pathname;
li_recognizer* rec;
rClassifier* rc;
rec = (li_recognizer*)r->recognizer_specific;
 
/*Make sure recognizer's OK*/
 
if( !CHECK_LI_MAGIC(rec) ) {
li_err_msg = "Not a LI recognizer";
return(-1);
}
 
rc = &(rec->li_rc);
 
/*Check parameters.*/
 
if( filename == NULL ) {
li_err_msg = "Invalid parameters";
return(-1);
}
 
/*We let the directory be null.*/
 
if( dir == NULL || (int)strlen(dir) <= 0 ) {
dir = ".";
}
 
/*Make full pathname and check filename*/
 
/* pathname = (char*)safe_malloc(strlen(dir) + strlen(filename) + 2)); */
 
pathname = allocate(strlen(dir) + strlen(filename) + 2, char);
if( file_path(dir,filename,pathname) == -1 ) {
free(pathname);
li_err_msg = "Not a LI recognizer classifier file";
return(-1);
}
 
/* Try to short-circuit the full classifier-file processing. */
rc->file_name = pathname;
if (lialg_read_classifier_digest(rc) == 0)
return(0);
rc->file_name = NULL;
 
/*Open the file*/
 
if( (fd = fopen(pathname,"r")) == NULL ) {
free(pathname);
li_err_msg = "Can't open classifier file";
/* ari */
/* fprintf(stderr, "Trying to open %s.\n", pathname); */
return(-1);
 
}
 
/*If rClassifier is OK, then delete it first.*/
 
if( rc->file_name != NULL ) {
free_rClassifier(rc);
}
 
/*Read classifier.*/
if( read_classifier(fd,rc) < 0 ) {
free(pathname);
return(-1);
}
 
/*Close file.*/
 
fclose(fd);
 
/*Add classifier name.*/
 
rc->file_name = pathname;
 
/* Canonicalize examples. */
if (lialg_canonicalize_examples(rc) != 0) {
free(pathname);
rc->file_name = NULL;
return(-1);
}
 
return(0);
}
 
/*li_recognizer_save-Save a classifier file.*/
 
static int li_recognizer_save(recognizer r,char* dir,char* filename)
{
/*This operation isn't supported by the LI recognizer.*/
 
li_err_msg = "Saving is not supported by the LI recognizer";
 
return(-1);
}
 
static wordset
li_recognizer_load_dictionary(recognizer rec,char* directory,char* name)
{
/*This operation isn't supported by the LI recognizer.*/
 
li_err_msg = "Dictionaries are not supported by the LI recognizer";
 
return(NULL);
 
}
 
static int
li_recognizer_save_dictionary(recognizer rec,
char* directory,
char* name,
wordset dict)
{
/*This operation isn't supported by the LI recognizer.*/
 
li_err_msg = "Dictionaries are not supported by the LI recognizer";
 
return(-1);
 
}
 
static int
li_recognizer_free_dictionary(recognizer rec,wordset dict)
{
/*This operation isn't supported by the LI recognizer.*/
 
li_err_msg = "Dictionaries are not supported by the LI recognizer";
 
return(-1);
 
}
 
static int
li_recognizer_add_to_dictionary(recognizer rec,letterset* word,wordset dict)
{
/*This operation isn't supported by the LI recognizer.*/
 
li_err_msg = "Dictionaries are not supported by the LI recognizer";
 
return(-1);
 
}
 
static int
li_recognizer_delete_from_dictionary(recognizer rec,
letterset* word,
wordset dict)
{
/*This operation isn't supported by the LI recognizer.*/
 
li_err_msg = "Dictionaries are not supported by the LI recognizer";
 
return(-1);
 
}
 
static char*
li_recognizer_error(recognizer rec)
{
char* ret = li_err_msg;
 
/*Check for LI recognizer.*/
 
if( !CHECK_LI_MAGIC(rec->recognizer_specific) ) {
li_err_msg = "Not a LI recognizer";
return(NULL);
}
 
li_err_msg = NULL;
 
return(ret);
}
 
static int
li_recognizer_clear(recognizer r,bool delete_points_p)
{
li_recognizer* rec;
 
rec = (li_recognizer*)r->recognizer_specific;
 
/*Check for LI recognizer.*/
 
if( !CHECK_LI_MAGIC(rec) ) {
li_err_msg = "Not a LI recognizer";
return(0);
}
return(0);
}
 
static int
li_recognizer_set_context(recognizer r,rc* rec_xt)
{
/*This operation isn't supported by the LI recognizer.*/
 
li_err_msg = "Contexts are not supported by the LI recognizer";
 
return(-1);
}
 
static rc*
li_recognizer_get_context(recognizer r)
{
/*This operation isn't supported by the LI recognizer.*/
 
li_err_msg = "Contexts are not supported by the LI recognizer";
 
return(NULL);
}
 
static int
li_recognizer_get_buffer(recognizer r, u_int* nstrokes,pen_stroke** strokes)
{
/*This operation isn't supported by the LI recognizer.*/
 
li_err_msg = "Buffer get/set are not supported by the LI recognizer";
 
return(-1);
}
 
static int
li_recognizer_set_buffer(recognizer r,u_int nstrokes,pen_stroke* strokes)
{
/*This operation isn't supported by the LI recognizer.*/
 
li_err_msg = "Buffer get/set are not supported by the LI recognizer";
 
return(-1);
}
 
static int
li_recognizer_translate(recognizer r,
u_int ncs,
pen_stroke* tps,
bool correlate_p,
int* nret,
rec_alternative** ret)
{
char* clss = NULL;
li_recognizer* rec;
int conf;
rClassifier* rc;
rec = (li_recognizer*)r->recognizer_specific;
 
*nret = 0;
*ret = NULL;
 
/*Check for LI recognizer.*/
 
if( !CHECK_LI_MAGIC(rec) ) {
li_err_msg = "Not a LI recognizer";
return(-1);
}
 
rc = &(rec->li_rc);
 
/*Check for valid parameters.*/
if (ncs < 1) {
li_err_msg = "Invalid parameters: ncs";
return(-1);
}
if( tps == NULL) {
li_err_msg = "Invalid parameters: tps";
return(-1);
}
if( nret == NULL) {
li_err_msg = "Invalid parameters: nret";
return(-1);
}
if( ret == NULL) {
li_err_msg = "Invalid parameters: ret";
return(-1);
}
 
/* if( ncs < 1 || tps == NULL || nret == NULL || ret == NULL) {
li_err_msg = "Invalid parameters";
return(-1);
}
*/
 
/*Check for null classifier. It must have at least one.*/
/*
if( rec->li_rc.sc == NULL ) {
li_err_msg = "No classifier";
return(-1);
}
*/
 
/*
* Go through the stroke array and recognize. Since this is a single
* stroke recognizer, each stroke is treated as a separate
* character or gesture. We allow only characters or gestures
* to be recognized at one time, since otherwise, handling
* the display of segmentation would be difficult.
*/
clss = recognize_internal(rc,tps,&conf);
if (clss == NULL) {
/*
li_err_msg = "unrecognized character";
return(-1);
*/
*nret = 1;
return(0);
}
 
/*Return values.*/
*nret = 1;
return(*clss);
}
 
 
static rec_fn*
li_recognizer_get_extension_functions(recognizer rec)
{
rec_fn* ret;
 
/*Check for LI recognizer.*/
 
if( !CHECK_LI_MAGIC(rec->recognizer_specific) ) {
li_err_msg = "Not a LI recognizer";
return(NULL);
}
 
ret = make_rec_fn_array(LI_NUM_EX_FNS);
 
/* ari -- clearState & getClasses are mine */
ret[LI_GET_CLASSES] = (rec_fn)recognizer_getClasses;
ret[LI_CLEAR] = (rec_fn)recognizer_clearState;
ret[LI_ISA_LI] = (rec_fn)isa_li;
ret[LI_TRAIN] = (rec_fn)recognizer_train;
 
return(ret);
}
 
static char**
li_recognizer_get_gesture_names(recognizer r)
{
/*This operation isn't supported by the LI recognizer.*/
 
li_err_msg = "Gestures are not supported by the LI recognizer";
 
return(NULL);
}
 
static xgesture
li_recognizer_set_gesture_action(recognizer r,
char* name,
xgesture fn,
void* wsinfo)
{
/*This operation isn't supported by the LI recognizer.*/
 
li_err_msg = "Gestures are not supported by the LI recognizer";
 
return(NULL);
}
 
/*
* Exported Functions
*/
 
/*RECOGNIZER_INITIALIZE-Initialize the recognizer.*/
 
/* note from ari: this expands via pre-processor to
*
* recognizer __recognizer_internal_initialize(rec_info* ri)
*/
 
RECOGNIZER_INITIALIZE(ri)
{
recognizer r;
li_recognizer* rec;
int i;
 
/*Check that locale matches.*/
 
if( strcmp(ri->ri_locale,LI_SUPPORTED_LOCALE) != 0 ) {
li_err_msg = "Not a supported locale";
fprintf(stderr, "Locale error.\n");
#if 0
return(NULL);
#endif
}
 
/*
* Check that character sets match. Note that this is only approximate,
* since the classifier file will have more information.
*/
 
if( ri->ri_subset != NULL ) {
for(i = 0; ri->ri_subset[i] != NULL; i++ ) {
 
if( strcmp(ri->ri_subset[i],UPPERCASE) != 0 &&
strcmp(ri->ri_subset[i],LOWERCASE) != 0 &&
strcmp(ri->ri_subset[i],DIGITS) != 0 &&
strcmp(ri->ri_subset[i],GESTURE) != 0 ) {
li_err_msg = "Not a supported character set";
fprintf(stderr, "charset error.\n");
 
return(NULL);
}
}
}
/* ari */
r = make_recognizer(ri);
/*fprintf(stderr, "past make_recognizer.\n");*/
 
if( r == NULL ) {
li_err_msg = "Can't allocate storage";
 
return(NULL);
}
 
/*Make a LI recognizer structure.*/
 
 
/* rec = (li_recognizer*)safe_malloc(sizeof(li_recognizer))) == NULL ); */
 
rec = allocate(1, li_recognizer);
 
r->recognizer_specific = rec;
 
rec->li_rc.file_name = NULL;
rec->li_rc.sc = NULL;
 
/*Initialize the recognizer struct.*/
 
r->recognizer_load_state = li_recognizer_load;
r->recognizer_save_state = li_recognizer_save;
r->recognizer_load_dictionary = li_recognizer_load_dictionary;
r->recognizer_save_dictionary = li_recognizer_save_dictionary;
r->recognizer_free_dictionary = li_recognizer_free_dictionary;
r->recognizer_add_to_dictionary = li_recognizer_add_to_dictionary;
r->recognizer_delete_from_dictionary = li_recognizer_delete_from_dictionary;
r->recognizer_error = li_recognizer_error;
r->recognizer_translate = li_recognizer_translate;
r->recognizer_get_context = li_recognizer_get_context;
r->recognizer_set_context = li_recognizer_set_context;
r->recognizer_get_buffer = li_recognizer_get_buffer;
r->recognizer_set_buffer = li_recognizer_set_buffer;
r->recognizer_clear = li_recognizer_clear;
r->recognizer_get_extension_functions =
li_recognizer_get_extension_functions;
r->recognizer_get_gesture_names = li_recognizer_get_gesture_names;
r->recognizer_set_gesture_action =
li_recognizer_set_gesture_action;
 
/*Initialize LI Magic Number.*/
 
rec->li_magic = LI_MAGIC;
 
/*Initialize rClassifier.*/
 
rec->li_rc.file_name = NULL;
 
for( i = 0; i < MAXSCLASSES; i++ ) {
rec->li_rc.ex[i] = NULL;
rec->li_rc.cnames[i] = NULL;
}
 
lialg_initialize(&rec->li_rc);
 
/*Get rid of error message. Not needed here.*/
li_err_msg = NULL;
 
return(r);
}
 
/*free_rClassifier-Free the rClassifier.*/
 
static void
free_rClassifier(rClassifier* rc)
{
int i;
 
if( rc->file_name != NULL) {
free(rc->file_name);
}
 
for( i = 0; rc->ex[i] != NULL; i++) {
delete_examples(rc->ex[i]);
free(rc->cnames[i]);
}
 
if(rc->sc != NULL ) {
sFreeClassifier(rc->sc);
}
}
 
/*RECOGNIZER_FINALIZE-Deallocate the recognizer, finalize.*/
 
RECOGNIZER_FINALIZE(r)
{
li_recognizer* rec = (li_recognizer*)r->recognizer_specific;
 
/*Make sure this is a li_recognizer first*/
 
if( !CHECK_LI_MAGIC(rec) ) {
li_err_msg = "Not a LI recognizer";
return(-1);
}
 
/*Deallocate rClassifier resources.*/
 
free_rClassifier(&(rec->li_rc));
 
/*Deallocate the li_recognizer struct.*/
 
free(rec);
 
/*Deallocate the recognizer*/
 
delete_recognizer(r);
 
return(0);
}
 
 
/* **************************************************
 
Implementation of the Li/Yeung recognition algorithm
 
************************************************** */
 
/*#include <assert.h>*/
#ifdef __ECOS
#define MAXINT 0x7FFFFFFF
#else
#include <values.h>
#endif
#include <sys/time.h>
 
#ifdef __ultrix
/* Ultrix doesn't have these declarations in math.h! */
extern double rint(double);
extern float expf(float);
#endif
 
#ifdef ELX
extern double rint (double);
extern float expf (float); /* N.B. exp() appears to be broken on ELX! */
#endif
 
#define WORST_SCORE MAXINT
 
/* Dynamic programming parameters */
#define DP_BAND 3
#define MIN_SIM 0
#define MAX_DIST MAXINT
#define SIM_THLD 60 /* x 100 */
#define DIST_THLD 3200 /* x 100 */
 
/* Low-pass filter parameters -- empirically derived */
#define LP_FILTER_WIDTH 6
#define LP_FILTER_ITERS 8
#define LP_FILTER_THLD 250 /* x 100 */
#define LP_FILTER_MIN 5
 
/* Pseudo-extrema parameters -- empirically derived */
#define PE_AL_THLD 1500 /* x 100 */
#define PE_ATCR_THLD 135 /* x 100 */
 
/* Contour-angle derivation parameters */
#define T_ONE 1
#define T_TWO 20
 
/* Pre-processing and canonicalization parameters */
#define CANONICAL_X 108
#define CANONICAL_Y 128
#define DIST_SQ_THRESHOLD (3*3) /* copied from fv.h */
#define NCANONICAL 50
 
/* Tap-handling parameters */
#define TAP_CHAR "."
#define TAP_TIME_THLD 150 /* msec */
#define TAP_DIST_THLD 75 /* dx * dx + dy * dy */
#define TAP_PATHLEN 1000 /* x 100 */
 
 
/* Overload the time field of the pen_point struct with the chain-code. */
#define chaincode time
 
/* region types */
#define RGN_CONVEX 0
#define RGN_CONCAVE 1
#define RGN_PLAIN 2
#define RGN_PSEUDO 3
 
 
typedef struct RegionList {
int start;
int end;
int type;
struct RegionList *next;
} region_list;
 
 
/* direction-code table; indexed by dx, dy */
static int lialg_dctbl[3][3] = {{1, 0, 7}, {2, 0x7FFFFFFF, 6}, {3, 4, 5}};
 
/* low-pass filter weights */
static int lialg_lpfwts[2 * LP_FILTER_WIDTH + 1];
static int lialg_lpfconst = -1;
 
 
static int lialg_preprocess_stroke(point_list *);
static point_list *lialg_compute_dominant_points(point_list *);
static point_list *lialg_interpolate_points(point_list *);
static void lialg_bresline(pen_point *, pen_point *, point_list *, int *);
static void lialg_compute_chain_code(point_list *);
static void lialg_compute_unit_chain_code(point_list *);
static region_list *lialg_compute_regions(point_list *);
static point_list *lialg_compute_dompts(point_list *, region_list *);
static int *lialg_compute_contour_angle_set(point_list *, region_list *);
static void lialg_score_stroke(point_list *, point_list *, int *, int *);
static int lialg_compute_similarity(point_list *, point_list *);
static int lialg_compute_distance(point_list *, point_list *);
 
static int lialg_read_classifier_digest(rClassifier *);
 
static int lialg_canonicalize_examples(rClassifier *);
static int lialg_canonicalize_example_stroke(point_list *);
static int lialg_compute_equipoints(point_list *);
 
static int lialg_compute_pathlen(point_list *);
static int lialg_compute_pathlen_subset(point_list *, int, int);
static int lialg_filter_points(point_list *);
static int lialg_translate_points(point_list *, int, int, int, int);
static void lialg_get_bounding_box(point_list *, int *, int *, int *, int *);
static void lialg_compute_lpf_parameters();
static int isqrt(int);
static int likeatan(int, int);
static int quadr(int);
 
 
/*************************************************************
 
Core routines for the Li/Yeung recognition algorithm
 
*************************************************************/
 
static void lialg_initialize(rClassifier *rec) {
int i;
 
/* Initialize the dompts arrays. */
for (i = 0; i < MAXSCLASSES; i++) {
rec->dompts[i] = NULL;
}
}
 
 
/*
* Main recognition routine -- called by HRE API.
*/
static char *lialg_recognize_stroke(rClassifier *rec, point_list *stroke) {
int i;
char *name = NULL;
point_list *input_dompts = NULL;
char *best_name = NULL;
int best_score = WORST_SCORE;
char *curr_name;
point_list *curr_dompts = NULL;
/*struct timeval stv, etv;
int elapsed;*/
 
/* (void)gettimeofday(&stv, NULL);*/
 
if (stroke->npts < 1) goto done;
 
/* Check for tap. */
{
/*
pen_point *startpt = &stroke->pts[0];
pen_point *endpt = &stroke->pts[stroke->npts - 1];
int dt = endpt->time - startpt->time;
int dx = endpt->x - startpt->x;
int dy = endpt->y - startpt->y;
int magsq = dx * dx + dy * dy;
*/
 
/* First thing is to filter out ``close points.'' */
if (lialg_filter_points(stroke) != 0) return(NULL);
 
/* Unfortunately, we don't have the actual time that each point */
/* was recorded (i.e., dt is invalid). Hence, we have to use a */
/* heuristic based on total distance and the number of points. */
if (stroke->npts == 1 || lialg_compute_pathlen(stroke) < TAP_PATHLEN)
return(TAP_CHAR);
}
 
/* Pre-process input stroke. */
if (lialg_preprocess_stroke(stroke) != 0) goto done;
 
/* Compute its dominant points. */
input_dompts = lialg_compute_dominant_points(stroke);
if (input_dompts == NULL) goto done;
 
/* Score input stroke against every class in classifier. */
for (i = 0, curr_name = rec->cnames[i], curr_dompts = rec->dompts[i];
i < MAXSCLASSES && curr_name != NULL && curr_dompts != NULL;
i++, curr_name = rec->cnames[i], curr_dompts = rec->dompts[i]) {
int sim;
int dist;
int curr_score;
 
lialg_score_stroke(input_dompts, curr_dompts, &sim, &dist);
curr_score = dist;
 
if (lidebug && curr_score < DIST_THLD)
fprintf(stderr, "(%s, %d, %d) ", curr_name, sim, dist);
 
/* Is it the best so far? */
if (curr_score < best_score && curr_score <= DIST_THLD) {
best_score = curr_score;
best_name = curr_name;
}
}
 
if (lidebug)
fprintf(stderr, "\n");
 
/* No errors. */
name = best_name;
 
done:
delete_examples(input_dompts);
/* (void)gettimeofday(&etv, NULL);
elapsed = (1000 * (etv.tv_sec - stv.tv_sec)) + ((etv.tv_usec - stv.tv_usec + 500) / 1000);
fprintf(stderr, "elapsed = %d\n", elapsed);
*/
return(name);
}
 
 
static int lialg_preprocess_stroke(point_list *points) {
int minx, miny, maxx, maxy, xrange, yrange, scale, xoff, yoff;
 
/* Filter out points that are too close. */
/* We did this earlier, when we checked for a tap. */
/*
if (lialg_filter_points(points) != 0) return(-1);
*/
 
/* assert(points->npts > 0);*/
 
/* Scale up to avoid conversion errors. */
lialg_get_bounding_box(points, &minx, &miny, &maxx, &maxy);
xrange = maxx - minx;
yrange = maxy - miny;
scale = ( ((100 * xrange + CANONICAL_X / 2) / CANONICAL_X) >
((100 * yrange + CANONICAL_Y / 2) / CANONICAL_Y))
? (100 * CANONICAL_X + xrange / 2) / xrange
: (100 * CANONICAL_Y + yrange / 2) / yrange;
if (lialg_translate_points(points, minx, miny, scale, scale) != 0)
return(-1);
 
/* Center the stroke. */
lialg_get_bounding_box(points, &minx, &miny, &maxx, &maxy);
xrange = maxx - minx;
yrange = maxy - miny;
xoff = -((CANONICAL_X - xrange + 1) / 2);
yoff = -((CANONICAL_Y - yrange + 1) / 2);
if (lialg_translate_points(points, xoff, yoff, 100, 100) != 0) return(-1);
 
/* Store the x and y ranges in the point list. */
xrange = maxx - minx;
yrange = maxy - miny;
points->xrange = xrange;
points->yrange = yrange;
 
if (lidebug) {
int i;
fprintf(stderr, "After pre-processing: %d %d %d %d\n",
minx, miny, maxx, maxy);
for (i = 0; i < points->npts; i++)
fprintf(stderr, " (%d %d)\n",
points->pts[i].x, points->pts[i].y);
fflush(stderr);
}
 
return(0);
}
 
 
static point_list *lialg_compute_dominant_points(point_list *points) {
point_list *ipts = NULL;
region_list *regions = NULL;
point_list *dpts = NULL;
 
/* Interpolate points. */
ipts = lialg_interpolate_points(points);
if (ipts == NULL) return(NULL);
if (lidebug) {
int j;
fprintf(stderr, "After interpolation: %d ipts\n", ipts->npts);
for (j = 0; j < ipts->npts; j++) {
fprintf(stderr, " (%d, %d), %ld\n",
ipts->pts[j].x, ipts->pts[j].y, ipts->pts[j].chaincode);
}
fflush(stderr);
}
 
/* Compute regions. */
regions = lialg_compute_regions(ipts);
/* assert(regions != NULL);*/
 
/* Compute dominant points. */
dpts = lialg_compute_dompts(ipts, regions);
if (lidebug) {
int j;
fprintf(stderr, "Dominant points: ");
for (j = 0; j < dpts->npts; j++) {
fprintf(stderr, "%d %d (%ld) ",
dpts->pts[j].x, dpts->pts[j].y, dpts->pts[j].chaincode);
}
fprintf(stderr, "\n");
fflush(stderr);
}
 
/* Delete region data structure. */
{
region_list *curr, *next;
for (curr = regions; curr != NULL; ) {
next = curr->next;
free(curr);
curr = next;
}
}
delete_examples(ipts);
return(dpts);
}
 
 
/* Input points are assumed to be integer-valued! */
static point_list *lialg_interpolate_points(point_list *points) {
int i, j;
int maxpts;
point_list *newpts;
 
/* Compute an upper-bound on the number of interpolated points. */
maxpts = 0;
for (i = 0; i < (points->npts - 1); i++) {
pen_point *pta = &(points->pts[i]);
pen_point *ptb = &(points->pts[i+1]);
maxpts += abs(pta->x - ptb->x) + abs(pta->y - ptb->y);
}
 
/* Allocate an array of the requisite size. */
maxpts += points->npts;
/* newpts = (point_list *)safe_malloc(sizeof(point_list)); */
newpts = allocate(1, point_list);
newpts->pts = make_pen_point_array(maxpts);
if (newpts->pts == NULL) {
free(newpts);
return(NULL);
}
newpts->npts = maxpts;
newpts->next = NULL;
 
/* Interpolate each of the segments. */
j = 0;
for (i = 0; i < (points->npts - 1); i++) {
pen_point *startpt = &(points->pts[i]);
pen_point *endpt = &(points->pts[i+1]);
 
lialg_bresline(startpt, endpt, newpts, &j);
 
j--; /* end point gets recorded as start point of next segment! */
}
 
/* Add-in last point. */
newpts->pts[j++] = points->pts[points->npts - 1];
newpts->npts = j;
 
/* Compute the chain code for P (the list of points). */
lialg_compute_unit_chain_code(newpts);
 
return(newpts);
}
 
 
/* This implementation is due to Kenny Hoff. */
static void lialg_bresline(pen_point *startpt, pen_point *endpt,
point_list *newpts, int *j) {
int Ax, Ay, Bx, By, dX, dY, Xincr, Yincr;
 
Ax = startpt->x;
Ay = startpt->y;
Bx = endpt->x;
By = endpt->y;
 
/* INITIALIZE THE COMPONENTS OF THE ALGORITHM THAT ARE NOT AFFECTED */
/* BY THE SLOPE OR DIRECTION OF THE LINE */
dX = abs(Bx-Ax); /* store the change in X and Y of the line endpoints */
dY = abs(By-Ay);
 
/* DETERMINE "DIRECTIONS" TO INCREMENT X AND Y (REGARDLESS OF DECISION) */
if (Ax > Bx) { Xincr=-1; } else { Xincr=1; } /* which direction in X? */
if (Ay > By) { Yincr=-1; } else { Yincr=1; } /* which direction in Y? */
 
/* DETERMINE INDEPENDENT VARIABLE (ONE THAT ALWAYS INCREMENTS BY 1 (OR -1) ) */
/* AND INITIATE APPROPRIATE LINE DRAWING ROUTINE (BASED ON FIRST OCTANT */
/* ALWAYS). THE X AND Y'S MAY BE FLIPPED IF Y IS THE INDEPENDENT VARIABLE. */
if (dX >= dY) { /* if X is the independent variable */
int dPr = dY<<1; /* amount to increment decision if right is chosen (always) */
int dPru = dPr - (dX<<1); /* amount to increment decision if up is chosen */
int P = dPr - dX; /* decision variable start value */
 
/* process each point in the line one at a time (just use dX) */
for (; dX>=0; dX--) {
newpts->pts[*j].x = Ax;
newpts->pts[*j].y = Ay;
(*j)++;
 
if (P > 0) { /* is the pixel going right AND up? */
Ax+=Xincr; /* increment independent variable */
Ay+=Yincr; /* increment dependent variable */
P+=dPru; /* increment decision (for up) */
}
else { /* is the pixel just going right? */
Ax+=Xincr; /* increment independent variable */
P+=dPr; /* increment decision (for right) */
}
}
}
else { /* if Y is the independent variable */
int dPr = dX<<1; /* amount to increment decision if right is chosen (always) */
int dPru = dPr - (dY<<1); /* amount to increment decision if up is chosen */
int P = dPr - dY; /* decision variable start value */
 
/* process each point in the line one at a time (just use dY) */
for (; dY>=0; dY--) {
newpts->pts[*j].x = Ax;
newpts->pts[*j].y = Ay;
(*j)++;
 
if (P > 0) { /* is the pixel going up AND right? */
Ax+=Xincr; /* increment dependent variable */
Ay+=Yincr; /* increment independent variable */
P+=dPru; /* increment decision (for up) */
}
else { /* is the pixel just going up? */
Ay+=Yincr; /* increment independent variable */
P+=dPr; /* increment decision (for right) */
}
}
}
}
 
 
static void lialg_compute_chain_code(point_list *pts) {
int i;
 
for (i = 0; i < (pts->npts - 1); i++) {
pen_point *startpt = &(pts->pts[i]);
pen_point *endpt = &(pts->pts[i+1]);
int dx = endpt->x - startpt->x;
int dy = endpt->y - startpt->y;
/*
int tmp = rint(4.0 * atan2((double)dx, (double)dy) / M_PI);
int dircode = (10 + tmp) % 8;
*/
int tmp = quadr(likeatan(dy, dx));
int dircode = (12 - tmp) % 8;
 
startpt->chaincode = dircode;
}
}
 
 
static void lialg_compute_unit_chain_code(point_list *pts) {
int i;
 
for (i = 0; i < (pts->npts - 1); i++) {
pen_point *startpt = &(pts->pts[i]);
pen_point *endpt = &(pts->pts[i+1]);
int dx = endpt->x - startpt->x;
int dy = endpt->y - startpt->y;
int dircode = lialg_dctbl[dx+1][dy+1];
 
/* assert(dircode < 8);*/
startpt->chaincode = dircode;
}
}
 
 
static region_list *lialg_compute_regions(point_list *pts) {
region_list *regions = NULL;
region_list *curr_reg = NULL;
int *R[2 + LP_FILTER_ITERS];
int *junk;
int *curr, *next;
int i, j;
 
/* Initialize low-pass filter parameters if necessary. */
if (lialg_lpfconst == -1)
lialg_compute_lpf_parameters();
 
/* Allocate a 2 x pts->npts array for use in computing the (filtered) Angle set, A_n. */
/* junk = (int *)safe_malloc((2 + LP_FILTER_ITERS) * pts->npts * sizeof(int)); */
junk = allocate((2 + LP_FILTER_ITERS) * pts->npts, int);
for (i = 0; i < (2 + LP_FILTER_ITERS); i++)
R[i] = junk + (i * pts->npts);
curr = R[0];
 
/* Compute the Angle set, A, in the first element of array R. */
/* Values in R are in degrees, x 100. */
curr[0] = 18000; /* a_0 */
for (i = 1; i < (pts->npts - 1); i++) {
int d_i = pts->pts[i].chaincode;
int d_iminusone = pts->pts[i-1].chaincode;
int a_i;
 
if (d_iminusone < d_i)
d_iminusone += 8;
 
a_i = (d_iminusone - d_i) % 8;
 
/* convert to degrees, x 100 */
curr[i] = ((12 - a_i) % 8) * 45 * 100;
}
curr[pts->npts - 1] = 18000; /* a_L-1 */
 
/* Perform a number of filtering iterations. */
next = R[1];
for (j = 0; j < LP_FILTER_ITERS; j++, curr = R[j], next = R[j+1]) {
for (i = 0; i < pts->npts; i++) {
int k;
 
next[i] = 0;
 
for (k = i - LP_FILTER_WIDTH; k <= i + LP_FILTER_WIDTH; k++) {
int oldval = (k < 0 || k >= pts->npts) ? 18000 : curr[k];
next[i] += oldval * lialg_lpfwts[k - (i - LP_FILTER_WIDTH)]; /* overflow? */
}
 
next[i] /= lialg_lpfconst;
}
}
 
/* Do final thresholding around PI. */
/* curr and next are set-up correctly at end of previous loop! */
for (i = 0; i < pts->npts; i++) {
next[i] = (abs(curr[i] - 18000) < LP_FILTER_THLD)
? 18000
: curr[i];
}
curr = next;
 
/* Debugging. */
if (lidebug > 1) {
for (i = 0; i < pts->npts; i++) {
fprintf(stderr, "%3d: (%3d, %3d) %ld ",
i, pts->pts[i].x, pts->pts[i].y, pts->pts[i].chaincode);
for (j = 0; j < 2 + LP_FILTER_ITERS; j++)
fprintf(stderr, "%d ", R[j][i]);
fprintf(stderr, "\n");
}
}
 
/* Do the region segmentation. */
{
int start, end;
int currtype;
 
#define RGN_TYPE(val)\
(((val) == 18000)\
? RGN_PLAIN\
: ((val) < 18000 ? RGN_CONCAVE : RGN_CONVEX))
 
start = 0;
currtype = RGN_TYPE(curr[0]);
/* regions = (region_list *)safe_malloc(sizeof(region_list));*/
regions = allocate(1, region_list);
curr_reg = regions;
curr_reg->start = start;
curr_reg->end = 0;
curr_reg->type = currtype;
curr_reg->next = NULL;
for (i = 1; i < pts->npts; i++) {
int nexttype = RGN_TYPE(curr[i]);
 
if (nexttype != currtype) {
region_list *next_reg = NULL;
 
end = i - 1;
curr_reg->end = end;
if (lidebug > 1)
fprintf(stderr, " (%d, %d), %d\n", start, end, currtype);
 
start = i;
currtype = nexttype;
/* next_reg = (region_list *)safe_malloc(sizeof(region_list));*/
next_reg = allocate(1, region_list);
next_reg->start = start;
next_reg->end = 0;
next_reg->type = nexttype;
next_reg->next = NULL;
 
curr_reg->next = next_reg;
curr_reg = next_reg;
}
}
end = i - 1;
curr_reg->end = end;
if (lidebug > 1)
fprintf(stderr, " (%d, %d), %d\n", start, end, currtype);
 
/* Filter out convex/concave regions that are too short. */
for (curr_reg = regions; curr_reg; curr_reg = curr_reg->next)
if (curr_reg->type == RGN_PLAIN) {
region_list *next_reg;
 
for (next_reg = curr_reg->next;
next_reg != NULL &&
(next_reg->end - next_reg->start) < LP_FILTER_MIN;
next_reg = curr_reg->next) {
/* next_reg must not be plain, and it must be followed by a plain */
/* assert(next_reg->type != RGN_PLAIN); */
/* assert(next_reg->next != NULL && (next_reg->next)->type == RGN_PLAIN); */
 
curr_reg->next = (next_reg->next)->next;
curr_reg->end = (next_reg->next)->end;
 
free(next_reg->next);
free(next_reg);
}
}
 
/* Add-in pseudo-extremes. */
{
region_list *tmp, *prev_reg;
 
tmp = regions;
regions = NULL;
prev_reg = NULL;
for (curr_reg = tmp; curr_reg; curr_reg = curr_reg->next) {
if (curr_reg->type == RGN_PLAIN) {
int arclen = lialg_compute_pathlen_subset(pts,
curr_reg->start,
curr_reg->end);
int dx = pts->pts[curr_reg->end].x -
pts->pts[curr_reg->start].x;
int dy = pts->pts[curr_reg->end].y -
pts->pts[curr_reg->start].y;
int chordlen = isqrt(10000 * (dx * dx + dy * dy));
int atcr = (chordlen == 0) ? 0 : (100 * arclen + chordlen / 2) / chordlen;
 
if (lidebug)
fprintf(stderr, "%d, %d, %d\n", arclen, chordlen, atcr);
 
/* Split region if necessary. */
if (arclen >= PE_AL_THLD && atcr >= PE_ATCR_THLD) {
int mid = curr_reg->start + (curr_reg->end - curr_reg->start) / 2;
int end = curr_reg->end;
region_list *saved_next = curr_reg->next;
 
curr_reg->end = mid - 1;
if (prev_reg == NULL)
regions = curr_reg;
else
prev_reg->next = curr_reg;
prev_reg = curr_reg;
 
/* curr_reg = (region_list *)safe_malloc(sizeof(region_list));*/
curr_reg = allocate(1, region_list);
curr_reg->start = mid;
curr_reg->end = mid;
curr_reg->type = RGN_PSEUDO;
curr_reg->next = NULL;
prev_reg->next = curr_reg;
prev_reg = curr_reg;
 
/* curr_reg = (region_list *)safe_malloc(sizeof(region_list)); */
curr_reg = allocate(1, region_list);
curr_reg->start = mid + 1;
curr_reg->end = end;
curr_reg->type = RGN_PLAIN;
curr_reg->next = NULL;
prev_reg->next = curr_reg;
prev_reg = curr_reg;
 
curr_reg->next = saved_next;
continue;
}
}
 
if (prev_reg == NULL)
regions = curr_reg;
else
prev_reg->next = curr_reg;
prev_reg = curr_reg;
}
}
}
 
free(junk);
return(regions);
}
 
 
static point_list *lialg_compute_dompts(point_list *pts, region_list *regions) {
point_list *dpts = NULL;
int ndpts;
int *cas = NULL;
int nonplain;
region_list *r;
 
/* Compute contour angle set. */
cas = lialg_compute_contour_angle_set(pts, regions);
/* assert(cas != NULL);*/
 
/* Dominant points include: start_pt, end_pt, extrema_of_non_plain_regions, midpts of the preceding. */
nonplain = 0;
for (r = regions; r != NULL; r = r->next)
if (r->type != RGN_PLAIN) nonplain++;
ndpts = 2 * (2 + nonplain) - 1;
/* dpts = (point_list *)safe_malloc(sizeof(point_list)); */
dpts = allocate(1, point_list);
dpts->pts = make_pen_point_array(ndpts);
if (dpts->pts == NULL) {
free(dpts);
return(NULL);
}
dpts->npts = ndpts;
dpts->next = NULL;
 
/* Pick out dominant points. */
{
region_list *curr;
int dp;
int previx;
int currix;
 
/* Record start point. */
dp = 0;
previx = 0;
dpts->pts[dp++] = pts->pts[previx];
 
for (curr = regions; curr != NULL; curr = curr->next)
if (curr->type != RGN_PLAIN) {
int max_v = 0;
int min_v = MAXINT;
int max_ix = -1;
int min_ix = -1;
int i;
 
for (i = curr->start; i <= curr->end; i++) {
int v = cas[i];
if (v > max_v) { max_v = v; max_ix = i; }
if (v < min_v) { min_v = v; min_ix = i; }
if (lidebug > 1)
fprintf(stderr, " %d\n", v);
}
 
currix = (curr->type == RGN_CONVEX ? max_ix : min_ix);
 
/* Record midpoint. */
dpts->pts[dp++] = pts->pts[previx + (currix - previx) / 2];
 
/* Record extreme point. */
dpts->pts[dp++] = pts->pts[currix];
 
previx = currix;
}
 
/* Record last mid-point and end point. */
currix = pts->npts - 1;
dpts->pts[dp++] = pts->pts[previx + (currix - previx) / 2];
dpts->pts[dp++] = pts->pts[currix];
}
 
/* Compute chain-code. */
lialg_compute_chain_code(dpts);
 
free(cas);
return(dpts);
}
 
 
static int *lialg_compute_contour_angle_set(point_list *pts,
region_list *regions) {
int *V = NULL;
region_list *curr_reg, *prev_reg;
int i;
 
/* V = (int *)safe_malloc(pts->npts * sizeof(int));*/
V = allocate(pts->npts, int);
 
V[0] = 18000;
for (curr_reg = regions; curr_reg != NULL;
prev_reg = curr_reg, curr_reg = curr_reg->next) {
for (i = curr_reg->start; i <= curr_reg->end; i++) {
if (curr_reg->type == RGN_PLAIN) {
V[i] = 18000;
}
else {
#ifdef notdef
/* XXX - eliminate floating point */
region_list *next_reg = curr_reg->next;
int b = curr_reg->start;
int h = prev_reg->start;
int t = next_reg->end;
int pts_before = i - h;
int pts_after = t - i;
int min_pts = (pts_before < pts_after)
? pts_before
: pts_after;
int k = (min_pts < T_ONE)
? T_ONE
: (min_pts > T_TWO)
? T_TWO
: min_pts;
float sum = 0.0;
 
for (j = 1; j <= k; j++) {
int ptA = i - j;
int ptB = i + j - 1;
int d_A = pts->pts[ptA].chaincode;
int d_B = pts->pts[ptB].chaincode;
int a_i;
 
if (d_A < d_B)
d_A += 8;
 
a_i = (d_A - d_B) % 8;
 
/* convert to radians */
if (a_i == 4 && curr_reg->type == RGN_CONVEX)
sum += M_2_PI;
else
sum += (float)((12 - a_i) % 8) / 4.0 * M_PI;
}
V[i] = sum / (float)k;
#else
/* For now, simply choose the mid-point. */
int isMidPt = (i == (curr_reg->start +
(curr_reg->end - curr_reg->start) / 2));
V[i] = (curr_reg->type == RGN_CONVEX)
? (isMidPt ? 18000 : 0)
: (isMidPt ? 0 : 18000);
#endif
}
}
}
V[pts->npts - 1] = 18000;
 
return(V);
}
 
 
/*
* First compute the similarity between the two strings.
* If it's above a threshold, compute the distance between
* the two and return it as the ``score.''
* Otherwise, return the constant WORST_SCORE.
*
*/
static void lialg_score_stroke(point_list *input_dompts, point_list *curr_dompts, int *sim, int *dist) {
*sim = MIN_SIM;
*dist = MAX_DIST;
 
*sim = lialg_compute_similarity(input_dompts, curr_dompts);
if (*sim < SIM_THLD) goto done;
 
*dist = lialg_compute_distance(input_dompts, curr_dompts);
 
done:
if (lidebug)
fprintf(stderr, "%d, %d\n", *sim, *dist);
}
 
 
static int lialg_compute_similarity(point_list *input_dompts,
point_list *curr_dompts) {
int sim = 0;
point_list *A, *B;
int N, M;
int **G = NULL;
int *junk = NULL;
int i, j;
 
/* A is the longer sequence, length N. */
/* B is the shorter sequence, length M. */
if (input_dompts->npts >= curr_dompts->npts) {
A = input_dompts;
N = input_dompts->npts;
B = curr_dompts;
M = curr_dompts->npts;
}
else {
A = curr_dompts;
N = curr_dompts->npts;
B = input_dompts;
M = input_dompts->npts;
}
 
/* Allocate and initialize the Gain matrix, G. */
/* The size of G is M x (N + 1). */
/* Note that row 0 is unused. */
/* Similarities are x 10. */
{
/* G = (int **)safe_malloc(M * sizeof(int *));*/
G = allocate(M, int *);
/* junk = (int *)safe_malloc(M * (N + 1) * sizeof(int)); */
junk = allocate(M * (N + 1), int);
for (i = 0; i < M; i++)
G[i] = junk + (i * (N + 1));
 
for (i = 1; i < M; i++) {
int bval = B->pts[i-1].chaincode;
 
/* Source column. */
G[i][0] = 0;
 
for (j = 1; j < N; j++) {
int aval = A->pts[j-1].chaincode;
int diff = abs(bval - aval);
if (diff > 4) diff = 8 - diff;
 
G[i][j] = (diff == 0)
? 10
: (diff == 1)
? 6
: 0;
}
 
/* Sink column. */
G[i][N] = 0;
}
}
 
/* Do the DP algorithm. */
/* Proceed in column order, from highest column to the lowest. */
/* Within each column, proceed from the highest row to the lowest. */
/* Skip the highest column. */
{
for (j = N - 1; j >= 0; j--)
for (i = M - 1; i > 0; i--) {
int max = G[i][j + 1];
 
if (i < (M - 1)) {
int tmp = G[i + 1][j + 1];
if (tmp > max) max = tmp;
}
 
G[i][j] += max;
}
 
sim = (10 * G[1][0] + (N - 1) / 2) / (N - 1);
}
 
if (G != NULL) free(G);
if (junk != NULL) free(junk);
return(sim);
}
 
 
static int lialg_compute_distance(point_list *input_dompts,
point_list *curr_dompts) {
int dist = MAX_DIST;
point_list *A, *B;
int N, M;
int **C = NULL;
int *junk = NULL;
int *BE = NULL;
int *TE = NULL;
int i, j;
 
/* A is the longer sequence, length N. */
/* B is the shorter sequence, length M. */
if (input_dompts->npts >= curr_dompts->npts) {
A = input_dompts;
N = input_dompts->npts;
B = curr_dompts;
M = curr_dompts->npts;
}
else {
A = curr_dompts;
N = curr_dompts->npts;
B = input_dompts;
M = input_dompts->npts;
}
 
/* Construct the helper vectors, BE and TE, which say for each column */
/* what are the ``bottom'' and ``top'' rows of interest. */
{
/* BE = (int *)safe_malloc((N + 1) * sizeof(int));*/
BE = allocate((N + 1), int);
/* TE = (int *)safe_malloc((N + 1) * sizeof(int)); */
TE = allocate((N + 1), int);
 
for (j = 1; j <= N; j++) {
int bot, top;
 
bot = j + (M - DP_BAND);
if (bot > M) bot = M;
BE[j] = bot;
 
top = j - (N - DP_BAND);
if (top < 1) top = 1;
TE[j] = top;
}
}
 
/* Allocate and initialize the Cost matrix, C. */
/* The size of C is (M + 1) x (N + 1). */
/* Note that row and column 0 are unused. */
/* Costs are x 100. */
{
/* C = (int **)safe_malloc((M + 1) * sizeof(int *)); */
C = allocate((M + 1), int *);
/* junk = (int *)safe_malloc((M + 1) * (N + 1) * sizeof(int)); */
junk = allocate((M + 1) * (N + 1), int);
for (i = 0; i <= M; i++)
C[i] = junk + (i * (N + 1));
 
for (i = 1; i <= M; i++) {
int bx = B->pts[i-1].x;
int by = B->pts[i-1].y;
 
for (j = 1; j <= N; j++) {
int ax = A->pts[j-1].x;
int ay = A->pts[j-1].y;
int dx = bx - ax;
int dy = by - ay;
int dist = isqrt(10000 * (dx * dx + dy * dy));
 
C[i][j] = dist;
}
}
}
 
/* Do the DP algorithm. */
/* Proceed in column order, from highest column to the lowest. */
/* Within each column, proceed from the highest row to the lowest. */
{
for (j = N; j > 0; j--)
for (i = M; i > 0; i--) {
int min = MAX_DIST;
 
if (i > BE[j] || i < TE[j] || (j == N && i == M))
continue;
 
if (j < N) {
if (i >= TE[j+1]) {
int tmp = C[i][j+1];
if (tmp < min) min = tmp;
}
 
if (i < M) {
int tmp = C[i+1][j+1];
if (tmp < min) min = tmp;
}
}
 
if (i < BE[j]) {
int tmp = C[i+1][j];
if (tmp < min) min = tmp;
}
 
C[i][j] += min;
}
 
dist = (C[1][1] + N / 2) / N;
}
 
if (C != NULL) free(C);
if (junk != NULL) free(junk);
if (BE != NULL) free(BE);
if (TE != NULL) free(TE);
return(dist);
}
 
 
/*************************************************************
 
Digest-processing routines
 
*************************************************************/
 
static int lialg_read_classifier_digest(rClassifier *rec) {
int nclasses;
FILE *fp = NULL;
 
/* Try to open the corresponding digest file. */
{
char *clx_path;
char *dot;
 
/* Get a copy of the filename, with some room on the end. */
/* clx_path = safe_malloc(strlen(rec->file_name) + 5); */
clx_path = allocate(strlen(rec->file_name) + 5, char);
strcpy(clx_path, rec->file_name);
 
/* Truncate the path after the last dot. */
dot = strrchr(clx_path, '.');
if (dot == NULL) { free(clx_path); return(-1); }
*(dot + 1) = 0;
 
/* Append the classifier-digest extension. */
strcat(clx_path, "clx");
 
fp = fopen(clx_path, "r");
if (fp == NULL) { free(clx_path); return(-1); }
 
free(clx_path);
}
 
/* Read-in the name and dominant points for each class. */
for (nclasses = 0; !feof(fp); nclasses++) {
point_list *dpts = NULL;
char class[BUFSIZ];
int npts;
int j;
 
if (fscanf(fp, "%s %d", class, &npts) != 2) {
if (feof(fp)) break;
 
goto failed;
}
rec->cnames[nclasses] = strdup(class);
 
/* Allocate a dominant-points list. */
/* dpts = (point_list *)safe_malloc(sizeof(point_list)); */
dpts = allocate(1, point_list);
dpts->pts = make_pen_point_array(npts);
if (dpts->pts == NULL) goto failed;
dpts->npts = npts;
dpts->next = NULL;
 
/* Read in each point. */
for (j = 0; j < npts; j++) {
int x, y;
 
if (fscanf(fp, "%d %d", &x, &y) != 2) goto failed;
dpts->pts[j].x = x;
dpts->pts[j].y = y;
}
 
/* Compute the chain-code. */
lialg_compute_chain_code(dpts);
 
/* Store the list in the rec data structure. */
rec->dompts[nclasses] = dpts;
 
continue;
 
failed:
fprintf(stderr, "read_classifier_digest failed...\n");
for (; nclasses >= 0; nclasses--) {
if (rec->cnames[nclasses] != NULL) {
free(rec->cnames[nclasses]);
rec->cnames[nclasses] = NULL;
}
if (rec->dompts[nclasses] != NULL) {
delete_examples(rec->dompts[nclasses]);
rec->dompts[nclasses] = NULL;
}
}
if (dpts != NULL)
delete_examples(dpts);
fclose(fp);
return(-1);
}
 
fclose(fp);
return(0);
}
 
 
/*************************************************************
 
Canonicalization routines
 
*************************************************************/
 
static int lialg_canonicalize_examples(rClassifier *rec) {
int i;
int nclasses;
 
if (lidebug) {
fprintf(stderr, "lialg_canonicalize_examples working on %s\n",
rec->file_name);
}
/* Initialize canonical-example arrays. */
for (i = 0; i < MAXSCLASSES; i++) {
rec->canonex[i] = NULL;
}
 
/* Figure out number of classes. */
for (nclasses = 0;
nclasses < MAXSCLASSES && rec->cnames[nclasses] != NULL;
nclasses++)
;
 
/* Canonicalize the examples for each class. */
for (i = 0; i < nclasses; i++) {
int j, k;
int nex;
point_list *pts, *tmp, *avg;
int maxxrange, maxyrange;
int minx, miny, maxx, maxy;
int avgxrange, avgyrange, avgxoff, avgyoff, avgscale;
 
if (lidebug) {
fprintf(stderr, "lialg_canonicalize_examples working on class %s\n",
rec->cnames[i]);
}
/* Make a copy of the examples. */
pts = NULL;
tmp = rec->ex[i];
for (nex = 0; tmp != NULL; nex++, tmp = tmp->next) {
if ((pts = add_example(pts, tmp->npts, tmp->pts)) == NULL) {
delete_examples(pts);
return(-1);
}
}
 
/* Canonicalize each example, and derive the max x and y ranges. */
maxxrange = 0;
maxyrange = 0;
for (j = 0, tmp = pts; j < nex; j++, tmp = tmp->next) {
if (lialg_canonicalize_example_stroke(tmp) != 0) {
if (lidebug) {
fprintf(stderr, "lialg_canonicalize_example_stroke returned error\n");
}
return(-1);
}
 
if (tmp->xrange > maxxrange) maxxrange = tmp->xrange;
if (tmp->yrange > maxyrange) maxyrange = tmp->yrange;
}
 
/* Normalize max ranges. */
if (((100 * maxxrange + CANONICAL_X / 2) / CANONICAL_X) >
((100 * maxyrange + CANONICAL_Y / 2) / CANONICAL_Y)) {
maxyrange = (maxyrange * CANONICAL_X + maxxrange / 2) / maxxrange;
maxxrange = CANONICAL_X;
}
else {
maxxrange = (maxxrange * CANONICAL_Y + maxyrange / 2) / maxyrange;
maxyrange = CANONICAL_Y;
}
 
/* Re-scale each example to max ranges. */
for (j = 0, tmp = pts; j < nex; j++, tmp = tmp->next) {
int scalex = (tmp->xrange == 0) ? 100 : (100 * maxxrange + tmp->xrange / 2) / tmp->xrange;
int scaley = (tmp->yrange == 0) ? 100 : (100 * maxyrange + tmp->yrange / 2) / tmp->yrange;
if (lialg_translate_points(tmp, 0, 0, scalex, scaley) != 0) {
delete_examples(pts);
return(-1);
}
}
 
/* Average the examples; leave average in first example. */
avg = pts; /* careful aliasing!! */
for (k = 0; k < NCANONICAL; k++) {
int xsum = 0;
int ysum = 0;
 
for (j = 0, tmp = pts; j < nex; j++, tmp = tmp->next) {
xsum += tmp->pts[k].x;
ysum += tmp->pts[k].y;
}
 
avg->pts[k].x = (xsum + j / 2) / j;
avg->pts[k].y = (ysum + j / 2) / j;
}
 
/* Compute BB of averaged stroke and re-scale. */
lialg_get_bounding_box(avg, &minx, &miny, &maxx, &maxy);
avgxrange = maxx - minx;
avgyrange = maxy - miny;
avgscale = (((100 * avgxrange + CANONICAL_X / 2) / CANONICAL_X) >
((100 * avgyrange + CANONICAL_Y / 2) / CANONICAL_Y))
? (100 * CANONICAL_X + avgxrange / 2) / avgxrange
: (100 * CANONICAL_Y + avgyrange / 2) / avgyrange;
if (lialg_translate_points(avg, minx, miny, avgscale, avgscale) != 0) {
delete_examples(pts);
return(-1);
}
 
/* Re-compute the x and y ranges and center the stroke. */
lialg_get_bounding_box(avg, &minx, &miny, &maxx, &maxy);
avgxrange = maxx - minx;
avgyrange = maxy - miny;
avgxoff = -((CANONICAL_X - avgxrange + 1) / 2);
avgyoff = -((CANONICAL_Y - avgyrange + 1) / 2);
if (lialg_translate_points(avg, avgxoff, avgyoff, 100, 100) != 0) {
delete_examples(pts);
return(-1);
}
 
/* Create a point list to serve as the ``canonical representation. */
if ((rec->canonex[i] = add_example(NULL, avg->npts, avg->pts)) == NULL) {
delete_examples(pts);
return(-1);
}
(rec->canonex[i])->xrange = maxx - minx;
(rec->canonex[i])->yrange = maxy - miny;
 
if (lidebug) {
fprintf(stderr, "%s, avgpts = %d\n", rec->cnames[i], avg->npts);
for (j = 0; j < avg->npts; j++) {
fprintf(stderr, " (%d, %d)\n",
avg->pts[j].x, avg->pts[j].y);
}
}
 
/* Compute dominant points of canonical representation. */
rec->dompts[i] = lialg_compute_dominant_points(avg);
 
/* Clean up. */
delete_examples(pts);
}
 
/* Sanity check. */
for (i = 0; i < nclasses; i++) {
char *best_name = lialg_recognize_stroke(rec, rec->canonex[i]);
 
if (best_name != rec->cnames[i])
fprintf(stderr, "%s, best = %s\n", rec->cnames[i], best_name);
}
 
return(0);
}
 
 
static int lialg_canonicalize_example_stroke(point_list *points) {
int minx, miny, maxx, maxy, xrange, yrange, scale;
 
/* Filter out points that are too close. */
if (lialg_filter_points(points) != 0) return(-1);
 
/* Must be at least two points! */
if (points->npts < 2) {
if (lidebug) {
fprintf(stderr, "lialg_canonicalize_example_stroke: npts=%d\n",
points->npts);
}
return(-1);
}
 
/* Scale up to avoid conversion errors. */
lialg_get_bounding_box(points, &minx, &miny, &maxx, &maxy);
xrange = maxx - minx;
yrange = maxy - miny;
scale = (((100 * xrange + CANONICAL_X / 2) / CANONICAL_X) >
((100 * yrange + CANONICAL_Y / 2) / CANONICAL_Y))
? (100 * CANONICAL_X + xrange / 2) / xrange
: (100 * CANONICAL_Y + yrange / 2) / yrange;
if (lialg_translate_points(points, minx, miny, scale, scale) != 0) {
if (lidebug) {
fprintf(stderr, "lialg_translate_points (minx=%d,miny=%d,scale=%d) returned error\n", minx, miny, scale);
}
return(-1);
}
 
/* Compute an equivalent stroke with equi-distant points. */
if (lialg_compute_equipoints(points) != 0) return(-1);
 
/* Re-translate the points to the origin. */
lialg_get_bounding_box(points, &minx, &miny, &maxx, &maxy);
if (lialg_translate_points(points, minx, miny, 100, 100) != 0) {
if (lidebug) {
fprintf(stderr, "lialg_translate_points (minx=%d,miny=%d) returned error\n", minx, miny);
}
return(-1);
}
 
/* Store the x and y ranges in the point list. */
xrange = maxx - minx;
yrange = maxy - miny;
points->xrange = xrange;
points->yrange = yrange;
 
if (lidebug) {
int i;
fprintf(stderr, "Canonicalized: %d, %d, %d, %d\n", minx, miny, maxx, maxy);
for (i = 0; i < points->npts; i++)
fprintf(stderr, " (%d %d)\n",
points->pts[i].x, points->pts[i].y);
fflush(stderr);
}
 
return(0);
}
 
 
static int lialg_compute_equipoints(point_list *points) {
pen_point *equipoints = make_pen_point_array(NCANONICAL);
int nequipoints = 0;
int pathlen = lialg_compute_pathlen(points);
int equidist = (pathlen + (NCANONICAL - 1) / 2) / (NCANONICAL - 1);
int i;
int dist_since_last_eqpt;
int remaining_seglen;
int dist_to_next_eqpt;
 
if (equipoints == NULL) {
error("can't allocate memory in lialg_compute_equipoints");
return(-1);
}
 
if (lidebug) {
fprintf(stderr, "compute_equipoints: npts = %d, pathlen = %d, equidist = %d\n",
points->npts, pathlen, equidist);
fflush(stderr);
}
 
/* First original point is an equipoint. */
equipoints[0] = points->pts[0];
nequipoints++;
dist_since_last_eqpt = 0;
 
for (i = 1; i < points->npts; i++) {
int dx1 = points->pts[i].x - points->pts[i-1].x;
int dy1 = points->pts[i].y - points->pts[i-1].y;
int endx = 100 * points->pts[i-1].x;
int endy = 100 * points->pts[i-1].y;
remaining_seglen = isqrt(10000 * (dx1 * dx1 + dy1 * dy1));
dist_to_next_eqpt = equidist - dist_since_last_eqpt;
 
while (remaining_seglen >= dist_to_next_eqpt) {
if (dx1 == 0) {
/* x-coordinate stays the same */
if (dy1 >= 0)
endy += dist_to_next_eqpt;
else
endy -= dist_to_next_eqpt;
}
else {
int slope = (100 * dy1 + dx1 / 2) / dx1;
int tmp = isqrt(10000 + slope * slope);
int dx = (100 * dist_to_next_eqpt + tmp / 2) / tmp;
int dy = (slope * dx + 50) / 100;
 
if (dy < 0) dy = -dy;
if (dx1 >= 0)
endx += dx;
else
endx -= dx;
if (dy1 >= 0)
endy += dy;
else
endy -= dy;
}
 
equipoints[nequipoints].x = (endx + 50) / 100;
equipoints[nequipoints].y = (endy + 50) / 100;
nequipoints++;
/* assert(nequipoints <= NCANONICAL);*/
dist_since_last_eqpt = 0;
remaining_seglen -= dist_to_next_eqpt;
dist_to_next_eqpt = equidist;
}
 
dist_since_last_eqpt += remaining_seglen;
}
 
/* Take care of last equipoint. */
if (nequipoints == NCANONICAL) {
/* Good. */
} else if (nequipoints == (NCANONICAL - 1)) {
/* Make last original point the last equipoint. */
equipoints[nequipoints] = points->pts[points->npts - 1];
} else {
if (lidebug) {
fprintf(stderr,"lialg_compute_equipoints: nequipoints = %d\n",
nequipoints);
}
/* assert(false);*/
return(-1);
}
 
points->npts = NCANONICAL;
delete_pen_point_array(points->pts);
points->pts = equipoints;
return(0);
}
 
 
/*************************************************************
 
Utility routines
 
*************************************************************/
 
/* Result is x 100. */
static int lialg_compute_pathlen(point_list *points) {
return(lialg_compute_pathlen_subset(points, 0, points->npts - 1));
}
 
 
/* Result is x 100. */
static int lialg_compute_pathlen_subset(point_list *points,
int start, int end) {
int pathlen;
int i;
 
pathlen = 0;
for (i = start + 1; i <= end; i++) {
int dx = points->pts[i].x - points->pts[i-1].x;
int dy = points->pts[i].y - points->pts[i-1].y;
int dist = isqrt(10000 * (dx * dx + dy * dy));
pathlen += dist;
}
 
return(pathlen);
}
 
 
/* Note that this does NOT update points->xrange and points->yrange! */
static int lialg_filter_points(point_list *points) {
int filtered_npts;
pen_point *filtered_pts = make_pen_point_array(points->npts);
int i;
 
if (filtered_pts == NULL) {
error("can't allocate memory in lialg_filter_points");
return(-1);
}
 
filtered_pts[0] = points->pts[0];
filtered_npts = 1;
for (i = 1; i < points->npts; i++) {
int j = filtered_npts - 1;
int dx = points->pts[i].x - filtered_pts[j].x;
int dy = points->pts[i].y - filtered_pts[j].y;
int magsq = dx * dx + dy * dy;
 
if (magsq >= DIST_SQ_THRESHOLD) {
filtered_pts[filtered_npts] = points->pts[i];
filtered_npts++;
}
}
 
points->npts = filtered_npts;
delete_pen_point_array(points->pts);
points->pts = filtered_pts;
return(0);
}
 
 
/* scalex and scaley are x 100. */
/* Note that this does NOT update points->xrange and points->yrange! */
static int lialg_translate_points(point_list *points,
int minx, int miny,
int scalex, int scaley) {
int i;
 
for (i = 0; i < points->npts; i++) {
points->pts[i].x = ((points->pts[i].x - minx) * scalex + 50) / 100;
points->pts[i].y = ((points->pts[i].y - miny) * scaley + 50) / 100;
}
 
return(0);
}
 
 
static void lialg_get_bounding_box(point_list *points,
int *pminx, int *pminy,
int *pmaxx, int *pmaxy) {
int minx, miny, maxx, maxy;
int i;
 
minx = maxx = points->pts[0].x;
miny = maxy = points->pts[0].y;
for (i = 1; i < points->npts; i++) {
pen_point *pt = &(points->pts[i]);
if (pt->x < minx) minx = pt->x;
if (pt->x > maxx) maxx = pt->x;
if (pt->y < miny) miny = pt->y;
if (pt->y > maxy) maxy = pt->y;
}
 
*pminx = minx;
*pminy = miny;
*pmaxx = maxx;
*pmaxy = maxy;
}
 
#ifdef __ECOS
float
expf(float x)
{
return exp((double)x);
}
#endif
 
 
static void lialg_compute_lpf_parameters() {
int i;
 
for (i = LP_FILTER_WIDTH; i >= 0; i--) {
float x = 0.04 * (i * i);
#if defined(ARM_LINUX) || !defined(__GLIBC__)
double tmp = 100.0 * exp((double)x);
#else
float tmp = 100.0 * expf(x);
#endif
int wt = rint((double)tmp);
 
lialg_lpfwts[LP_FILTER_WIDTH - i] = wt;
lialg_lpfwts[LP_FILTER_WIDTH + i] = wt;
}
lialg_lpfconst = 0;
for (i = 0; i < (2 * LP_FILTER_WIDTH + 1); i++) {
lialg_lpfconst += lialg_lpfwts[i];
}
}
 
 
/* Code from Joseph Hall (jnhall@sat.mot.com). */
static int isqrt(int n) {
register int i;
register long k0, k1, nn;
 
for (nn = i = n, k0 = 2; i > 0; i >>= 2, k0 <<= 1)
;
nn <<= 2;
for (;;) {
k1 = (nn / k0 + k0) >> 1;
if (((k0 ^ k1) & ~1) == 0)
break;
k0 = k1;
}
return (int) ((k1 + 1) >> 1);
}
 
 
/* Helper routines from Mark Hayter. */
static int likeatan(int tantop, int tanbot) {
int t;
/* Use tan(theta)=top/bot --> order for t */
/* t in range 0..0x40000 */
 
if ((tantop == 0) && (tanbot == 0))
t = 0;
else
{
t = (tantop << 16) / (abs(tantop) + abs(tanbot));
if (tanbot < 0)
t = 0x20000 - t;
else
if (tantop < 0) t = 0x40000 + t;
}
return t;
}
 
 
static int quadr(int t) {
return (8 - (((t + 0x4000) >> 15) & 7)) & 7;
}
/li_recognizer_internal.h
0,0 → 1,49
/*
* li_recognizer_internal.h
*
* Adapted from cmu_recognizer_internal.h.
* Credit to Dean Rubine, Jim Kempf, and Ari Rapkin.
*/
 
#ifndef _LI_RECOGNIZER_INTERNAL_H_
 
#define _LI_RECOGNIZER_INTERNAL_H_
 
#include <hre_internal.h>
#include "sc.h"
 
typedef struct PointList {
int npts;
int xrange, yrange;
pen_point* pts;
struct PointList* next;
} point_list;
 
typedef struct {
char* file_name; /*The classifier file name.*/
sClassifier sc; /*The classifier.*/
point_list* ex[MAXSCLASSES]; /*The training examples.*/
char* cnames[MAXSCLASSES]; /*The class names.*/
point_list* canonex[MAXSCLASSES]; /* Canonicalized versions of the strokes. */
point_list* dompts[MAXSCLASSES]; /* Dominant points */
} rClassifier;
 
 
/*This structure contains extra fields for instance-specific data.*/
 
typedef struct {
/*Instance-specific data.*/
u_int li_magic; /*Just to make sure nobody's cheating.*/
rClassifier li_rc; /*The character classifier.*/
} li_recognizer;
 
 
/*Name of the default classifier file.*/
#define LI_DEFAULT_CLASSIFIER_FILE "default.cl"
 
/*Classifier file extension.*/
#define LI_CLASSIFIER_EXTENSION ".cl"
 
/*Locale supported by recognizer.*/
#define LI_SUPPORTED_LOCALE REC_DEFAULT_LOCALE
#endif
/Makefile
0,0 → 1,69
##############################################################################
# Microwindows template Makefile
# Copyright (c) 2000 Martin Jolicoeur, Greg Haerr
##############################################################################
 
ifndef TOP
TOP = ../..
CONFIG = $(TOP)/config
endif
 
include $(CONFIG)
 
######################## Additional Flags section ############################
 
# Directories list for header files
INCLUDEDIRS +=
# Defines for preprocessor
DEFINES +=
 
# Compilation flags for C files OTHER than include directories
CFLAGS +=
# Preprocessor flags OTHER than defines
CPPFLAGS +=
# Linking flags
LDFLAGS +=
 
############################# targets section ################################
 
ifeq ($(NANOX), Y)
 
# If you want to create a library with the objects files, define the name here
LIBNAME =
 
# List of objects to compile
OBJS = nxscribble.o scribwidget.o\
hre_api.o li_recognizer.o bitvector.o matrix.o sc.o util.o
 
ifeq ($(ARCH), ECOS)
DEFINES += -DREC_DEFAULT_USER_DIR=\"/bin\"
TARGET = $(TOP)/bin/nxscribble.o
else
TARGET = $(TOP)/bin/nxscribble
endif
 
all: default $(TARGET)
$(CP) letters.cl digits.cl punc.cl $(TOP)/bin
 
endif
 
######################### Makefile.rules section #############################
 
include $(TOP)/Makefile.rules
 
######################## Tools targets section ###############################
 
ifeq ($(SHAREDLIBS), Y)
$(TOP)/bin/nxscribble: $(OBJS) $(NANOXCLIENTLIBS) $(TOP)/config
$(CC) $(CFLAGS) $(LDFLAGS) $(OBJS) -o $@ $(CCNANOXCLIENTLIBS) -lm
else
ifeq ($(ARCH), ECOS)
$(TOP)/bin/nxscribble.o: $(OBJS) $(TOP)/config
$(CC) $(CFLAGS) $(LDFLAGS) $(OBJS) -Wl,-r -Wl,--defsym -Wl,nxscribble_main=main -o XX.o
$(NM) XX.o | grep -v _main | grep ' T' | awk -f $(TOP)/ecos/retain.awk | xargs $(OBJCOPY) XX.o $@
rm -f XX.o
else
$(TOP)/bin/nxscribble: $(OBJS) $(NANOXCLIENTLIBS) $(TOP)/config
$(CC) $(CFLAGS) $(LDFLAGS) $(OBJS) -o $@ $(NANOXCLIENTLIBS) -lm
endif
endif

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.