repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringclasses 981
values | size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
rgommers/statsmodels | statsmodels/duration/tests/survival_r_results.py | 34 | 11980 | import numpy as np
coef_20_1_bre = np.array([-0.9185611])
se_20_1_bre = np.array([0.4706831])
time_20_1_bre = np.array([0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,1.1,1.2,1.3,1.4,1.5])
hazard_20_1_bre = np.array([0,0,0.04139181,0.1755379,0.3121216,0.3121216,0.4263121,0.6196358,0.6196358,0.6196358,0.909556,1.31083,1.31083])
coef_20_1_et_bre = np.array([-0.8907007])
se_20_1_et_bre = np.array([0.4683384])
time_20_1_et_bre = np.array([0])
hazard_20_1_et_bre = np.array([0])
coef_20_1_st_bre = np.array([-0.5766809])
se_20_1_st_bre = np.array([0.4418918])
time_20_1_st_bre = np.array([0])
hazard_20_1_st_bre = np.array([0])
coef_20_1_et_st_bre = np.array([-0.5785683])
se_20_1_et_st_bre = np.array([0.4388437])
time_20_1_et_st_bre = np.array([0])
hazard_20_1_et_st_bre = np.array([0])
coef_20_1_efr = np.array([-0.9975319])
se_20_1_efr = np.array([0.4792421])
time_20_1_efr = np.array([0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,1.1,1.2,1.3,1.4,1.5])
hazard_20_1_efr = np.array([0,0,0.03934634,0.1663316,0.2986427,0.2986427,0.4119189,0.6077373,0.6077373,0.6077373,0.8933041,1.285732,1.285732])
coef_20_1_et_efr = np.array([-0.9679541])
se_20_1_et_efr = np.array([0.4766406])
time_20_1_et_efr = np.array([0])
hazard_20_1_et_efr = np.array([0])
coef_20_1_st_efr = np.array([-0.6345294])
se_20_1_st_efr = np.array([0.4455952])
time_20_1_st_efr = np.array([0])
hazard_20_1_st_efr = np.array([0])
coef_20_1_et_st_efr = np.array([-0.6355622])
se_20_1_et_st_efr = np.array([0.4423104])
time_20_1_et_st_efr = np.array([0])
hazard_20_1_et_st_efr = np.array([0])
coef_50_1_bre = np.array([-0.6761247])
se_50_1_bre = np.array([0.25133])
time_50_1_bre = np.array([0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1,1.1,1.2,1.5,1.6,1.7,1.8,1.9,2.4,2.8])
hazard_50_1_bre = np.array([0,0.04895521,0.08457461,0.2073863,0.2382473,0.2793018,0.3271622,0.3842953,0.3842953,0.5310807,0.6360276,0.7648251,0.7648251,0.9294298,0.9294298,0.9294298,1.206438,1.555569,1.555569])
coef_50_1_et_bre = np.array([-0.6492871])
se_50_1_et_bre = np.array([0.2542493])
time_50_1_et_bre = np.array([0])
hazard_50_1_et_bre = np.array([0])
coef_50_1_st_bre = np.array([-0.7051135])
se_50_1_st_bre = np.array([0.2852093])
time_50_1_st_bre = np.array([0])
hazard_50_1_st_bre = np.array([0])
coef_50_1_et_st_bre = np.array([-0.8672546])
se_50_1_et_st_bre = np.array([0.3443235])
time_50_1_et_st_bre = np.array([0])
hazard_50_1_et_st_bre = np.array([0])
coef_50_1_efr = np.array([-0.7119322])
se_50_1_efr = np.array([0.2533563])
time_50_1_efr = np.array([0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1,1.1,1.2,1.5,1.6,1.7,1.8,1.9,2.4,2.8])
hazard_50_1_efr = np.array([0,0.04773902,0.08238731,0.2022993,0.2327053,0.2736316,0.3215519,0.3787123,0.3787123,0.526184,0.6323073,0.7627338,0.7627338,0.9288858,0.9288858,0.9288858,1.206835,1.556054,1.556054])
coef_50_1_et_efr = np.array([-0.7103063])
se_50_1_et_efr = np.array([0.2598129])
time_50_1_et_efr = np.array([0])
hazard_50_1_et_efr = np.array([0])
coef_50_1_st_efr = np.array([-0.7417904])
se_50_1_st_efr = np.array([0.2846437])
time_50_1_st_efr = np.array([0])
hazard_50_1_st_efr = np.array([0])
coef_50_1_et_st_efr = np.array([-0.9276112])
se_50_1_et_st_efr = np.array([0.3462638])
time_50_1_et_st_efr = np.array([0])
hazard_50_1_et_st_efr = np.array([0])
coef_50_2_bre = np.array([-0.5935189,0.5035724])
se_50_2_bre = np.array([0.2172841,0.2399933])
time_50_2_bre = np.array([0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1,1.1,1.2,1.3,1.4,1.5,1.9,2.7,2.9])
hazard_50_2_bre = np.array([0.02695812,0.09162381,0.1309537,0.1768423,0.2033353,0.2033353,0.3083449,0.3547287,0.4076453,0.4761318,0.5579718,0.7610905,0.918962,0.918962,1.136173,1.605757,2.457676,2.457676])
coef_50_2_et_bre = np.array([-0.4001465,0.4415933])
se_50_2_et_bre = np.array([0.1992302,0.2525949])
time_50_2_et_bre = np.array([0])
hazard_50_2_et_bre = np.array([0])
coef_50_2_st_bre = np.array([-0.6574891,0.4416079])
se_50_2_st_bre = np.array([0.2753398,0.269458])
time_50_2_st_bre = np.array([0])
hazard_50_2_st_bre = np.array([0])
coef_50_2_et_st_bre = np.array([-0.3607069,0.2731982])
se_50_2_et_st_bre = np.array([0.255415,0.306942])
time_50_2_et_st_bre = np.array([0])
hazard_50_2_et_st_bre = np.array([0])
coef_50_2_efr = np.array([-0.6107485,0.5309737])
se_50_2_efr = np.array([0.2177713,0.2440535])
time_50_2_efr = np.array([0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1,1.1,1.2,1.3,1.4,1.5,1.9,2.7,2.9])
hazard_50_2_efr = np.array([0.02610571,0.08933637,0.1279094,0.1731699,0.19933,0.19933,0.303598,0.3497025,0.4023939,0.4706978,0.5519237,0.7545023,0.9129989,0.9129989,1.13186,1.60574,2.472615,2.472615])
coef_50_2_et_efr = np.array([-0.4092002,0.4871344])
se_50_2_et_efr = np.array([0.1968905,0.2608527])
time_50_2_et_efr = np.array([0])
hazard_50_2_et_efr = np.array([0])
coef_50_2_st_efr = np.array([-0.6631286,0.4663285])
se_50_2_st_efr = np.array([0.2748224,0.273603])
time_50_2_st_efr = np.array([0])
hazard_50_2_st_efr = np.array([0])
coef_50_2_et_st_efr = np.array([-0.3656059,0.2943912])
se_50_2_et_st_efr = np.array([0.2540752,0.3124632])
time_50_2_et_st_efr = np.array([0])
hazard_50_2_et_st_efr = np.array([0])
coef_100_5_bre = np.array([-0.529776,-0.2916374,-0.1205425,0.3493476,0.6034305])
se_100_5_bre = np.array([0.1789305,0.1482505,0.1347422,0.1528205,0.1647927])
time_100_5_bre = np.array([0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1,1.1,1.2,1.3,1.4,1.5,1.6,1.7,1.8,1.9,2,2.1,2.5,2.8,3.2,3.3])
hazard_100_5_bre = np.array([0.02558588,0.05608812,0.1087773,0.1451098,0.1896703,0.2235791,0.3127521,0.3355107,0.439452,0.504983,0.5431706,0.5841462,0.5841462,0.5841462,0.6916466,0.7540191,0.8298704,1.027876,1.170335,1.379306,1.648758,1.943177,1.943177,1.943177,4.727101])
coef_100_5_et_bre = np.array([-0.4000784,-0.1790941,-0.1378969,0.3288529,0.533246])
se_100_5_et_bre = np.array([0.1745655,0.1513545,0.1393968,0.1487803,0.1686992])
time_100_5_et_bre = np.array([0])
hazard_100_5_et_bre = np.array([0])
coef_100_5_st_bre = np.array([-0.53019,-0.3225739,-0.1241568,0.3246598,0.6196859])
se_100_5_st_bre = np.array([0.1954581,0.1602811,0.1470644,0.17121,0.1784115])
time_100_5_st_bre = np.array([0])
hazard_100_5_st_bre = np.array([0])
coef_100_5_et_st_bre = np.array([-0.3977171,-0.2166136,-0.1387623,0.3251726,0.5664705])
se_100_5_et_st_bre = np.array([0.1951054,0.1707925,0.1501968,0.1699932,0.1843428])
time_100_5_et_st_bre = np.array([0])
hazard_100_5_et_st_bre = np.array([0])
coef_100_5_efr = np.array([-0.5641909,-0.3233021,-0.1234858,0.3712328,0.6421963])
se_100_5_efr = np.array([0.1804027,0.1496253,0.1338531,0.1529832,0.1670848])
time_100_5_efr = np.array([0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1,1.1,1.2,1.3,1.4,1.5,1.6,1.7,1.8,1.9,2,2.1,2.5,2.8,3.2,3.3])
hazard_100_5_efr = np.array([0.02393412,0.05276399,0.1028432,0.1383859,0.1823461,0.2158107,0.3037825,0.3264864,0.4306648,0.4964367,0.5348595,0.5760305,0.5760305,0.5760305,0.6842238,0.7468135,0.8228841,1.023195,1.166635,1.379361,1.652898,1.950119,1.950119,1.950119,4.910635])
coef_100_5_et_efr = np.array([-0.4338666,-0.2140139,-0.1397387,0.3535993,0.5768645])
se_100_5_et_efr = np.array([0.1756485,0.1527244,0.138298,0.1488427,0.1716654])
time_100_5_et_efr = np.array([0])
hazard_100_5_et_efr = np.array([0])
coef_100_5_st_efr = np.array([-0.5530876,-0.3331652,-0.128381,0.3503472,0.6397813])
se_100_5_st_efr = np.array([0.1969338,0.1614976,0.1464088,0.171299,0.1800787])
time_100_5_st_efr = np.array([0])
hazard_100_5_st_efr = np.array([0])
coef_100_5_et_st_efr = np.array([-0.421153,-0.2350069,-0.1433638,0.3538863,0.5934568])
se_100_5_et_st_efr = np.array([0.1961729,0.1724719,0.1492979,0.170464,0.1861849])
time_100_5_et_st_efr = np.array([0])
hazard_100_5_et_st_efr = np.array([0])
coef_1000_10_bre = np.array([-0.4699279,-0.464557,-0.308411,-0.2158298,-0.09048563,0.09359662,0.112588,0.3343705,0.3480601,0.5634985])
se_1000_10_bre = np.array([0.04722914,0.04785291,0.04503528,0.04586872,0.04429793,0.0446141,0.04139944,0.04464292,0.04559903,0.04864393])
time_1000_10_bre = np.array([0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1,1.1,1.2,1.3,1.4,1.5,1.6,1.7,1.8,1.9,2,2.1,2.2,2.3,2.4,2.5,2.6,2.7,2.8,2.9,3,3.1,3.2,3.3,3.4,3.5,3.6,3.7,3.8,3.9,4,4.1,4.2,4.3,4.4,4.6,4.8,4.9,5,5.1,5.2,5.7,5.8,5.9,6.9])
hazard_1000_10_bre = np.array([0.01610374,0.04853538,0.08984849,0.1311329,0.168397,0.2230488,0.2755388,0.3312606,0.3668702,0.4146558,0.477935,0.5290705,0.5831775,0.6503129,0.7113068,0.7830385,0.8361717,0.8910061,0.9615944,1.024011,1.113399,1.165349,1.239827,1.352902,1.409548,1.53197,1.601843,1.682158,1.714907,1.751564,1.790898,1.790898,1.83393,1.83393,1.936055,1.992303,2.050778,2.118776,2.263056,2.504999,2.739343,2.895514,3.090349,3.090349,3.391772,3.728142,4.152769,4.152769,4.152769,4.725957,4.725957,5.69653,5.69653,5.69653])
coef_1000_10_et_bre = np.array([-0.410889,-0.3929442,-0.2975845,-0.1851533,-0.0918359,0.1011997,0.106735,0.2899179,0.3220672,0.5069589])
se_1000_10_et_bre = np.array([0.04696754,0.04732169,0.04537707,0.04605371,0.04365232,0.04450021,0.04252475,0.04482007,0.04562374,0.04859727])
time_1000_10_et_bre = np.array([0])
hazard_1000_10_et_bre = np.array([0])
coef_1000_10_st_bre = np.array([-0.471015,-0.4766859,-0.3070839,-0.2091938,-0.09190845,0.0964942,0.1138269,0.3307131,0.3543551,0.562492])
se_1000_10_st_bre = np.array([0.04814778,0.04841938,0.04572291,0.04641227,0.04502525,0.04517603,0.04203737,0.04524356,0.04635037,0.04920866])
time_1000_10_st_bre = np.array([0])
hazard_1000_10_st_bre = np.array([0])
coef_1000_10_et_st_bre = np.array([-0.4165849,-0.4073504,-0.2980959,-0.1765194,-0.09152798,0.1013213,0.1009838,0.2859668,0.3247608,0.5044448])
se_1000_10_et_st_bre = np.array([0.04809818,0.04809499,0.0460829,0.04679922,0.0445294,0.04514045,0.04339298,0.04580591,0.04652447,0.04920744])
time_1000_10_et_st_bre = np.array([0])
hazard_1000_10_et_st_bre = np.array([0])
coef_1000_10_efr = np.array([-0.4894399,-0.4839746,-0.3227769,-0.2261293,-0.09318482,0.09767154,0.1173205,0.3493732,0.3640146,0.5879749])
se_1000_10_efr = np.array([0.0474181,0.04811855,0.04507655,0.04603044,0.04440409,0.04478202,0.04136728,0.04473343,0.045768,0.04891375])
time_1000_10_efr = np.array([0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1,1.1,1.2,1.3,1.4,1.5,1.6,1.7,1.8,1.9,2,2.1,2.2,2.3,2.4,2.5,2.6,2.7,2.8,2.9,3,3.1,3.2,3.3,3.4,3.5,3.6,3.7,3.8,3.9,4,4.1,4.2,4.3,4.4,4.6,4.8,4.9,5,5.1,5.2,5.7,5.8,5.9,6.9])
hazard_1000_10_efr = np.array([0.01549698,0.04680035,0.08682564,0.1269429,0.1632388,0.2167291,0.2682311,0.3231316,0.3582936,0.4054892,0.4681098,0.5188697,0.5727059,0.639571,0.7003012,0.7718979,0.825053,0.880063,0.950935,1.013828,1.103903,1.156314,1.231707,1.346235,1.40359,1.527475,1.598231,1.6795,1.712779,1.750227,1.790455,1.790455,1.834455,1.834455,1.938997,1.996804,2.056859,2.126816,2.275217,2.524027,2.76669,2.929268,3.13247,3.13247,3.448515,3.80143,4.249649,4.249649,4.249649,4.851365,4.851365,5.877307,5.877307,5.877307])
coef_1000_10_et_efr = np.array([-0.4373066,-0.4131901,-0.3177637,-0.1978493,-0.09679451,0.1092037,0.1136069,0.3088907,0.3442007,0.5394121])
se_1000_10_et_efr = np.array([0.04716041,0.04755342,0.04546713,0.04627802,0.04376583,0.04474868,0.04259991,0.04491564,0.04589027,0.04890847])
time_1000_10_et_efr = np.array([0])
hazard_1000_10_et_efr = np.array([0])
coef_1000_10_st_efr = np.array([-0.4911117,-0.4960756,-0.3226152,-0.220949,-0.09478141,0.1015735,0.1195524,0.3446977,0.3695904,0.5878576])
se_1000_10_st_efr = np.array([0.04833676,0.04868554,0.04578407,0.04661755,0.04518267,0.04537135,0.04202183,0.04531266,0.0464931,0.04949831])
time_1000_10_st_efr = np.array([0])
hazard_1000_10_st_efr = np.array([0])
coef_1000_10_et_st_efr = np.array([-0.444355,-0.4283278,-0.3198815,-0.1901781,-0.09727039,0.1106191,0.1092104,0.3034778,0.3451699,0.5382381])
se_1000_10_et_st_efr = np.array([0.04830664,0.04833619,0.04617371,0.04706401,0.04472699,0.0454208,0.04350539,0.04588588,0.04675675,0.04950987])
time_1000_10_et_st_efr = np.array([0])
hazard_1000_10_et_st_efr = np.array([0])
| bsd-3-clause |
forman/dectree | examples/intertidal_flat_classif/intertidal_flat_classif.py | 1 | 12362 |
from numba import jit, jitclass, float64
import numpy as np
@jit(nopython=True)
def _B1_LT_085(x):
# B1.LT_085: lt(0.85)
if 0.0 == 0.0:
return 1.0 if x < 0.85 else 0.0
x1 = 0.85 - 0.0
x2 = 0.85 + 0.0
if x <= x1:
return 1.0
if x <= x2:
return 1.0 - (x - x1) / (x2 - x1)
return 0.0
@jit(nopython=True)
def _B1_GT_1(x):
# B1.GT_1: gt(1.0)
if 0.0 == 0.0:
return 1.0 if x > 1.0 else 0.0
x1 = 1.0 - 0.0
x2 = 1.0 + 0.0
if x <= x1:
return 0.0
if x <= x2:
return (x - x1) / (x2 - x1)
return 1.0
@jit(nopython=True)
def _B2_GT_0(x):
# B2.GT_0: gt(0.0)
if 0.0 == 0.0:
return 1.0 if x > 0.0 else 0.0
x1 = 0.0 - 0.0
x2 = 0.0 + 0.0
if x <= x1:
return 0.0
if x <= x2:
return (x - x1) / (x2 - x1)
return 1.0
@jit(nopython=True)
def _B3_LT_005(x):
# B3.LT_005: lt(0.05)
if 0.0 == 0.0:
return 1.0 if x < 0.05 else 0.0
x1 = 0.05 - 0.0
x2 = 0.05 + 0.0
if x <= x1:
return 1.0
if x <= x2:
return 1.0 - (x - x1) / (x2 - x1)
return 0.0
@jit(nopython=True)
def _B3_LT_01(x):
# B3.LT_01: lt(0.1)
if 0.0 == 0.0:
return 1.0 if x < 0.1 else 0.0
x1 = 0.1 - 0.0
x2 = 0.1 + 0.0
if x <= x1:
return 1.0
if x <= x2:
return 1.0 - (x - x1) / (x2 - x1)
return 0.0
@jit(nopython=True)
def _B3_LT_015(x):
# B3.LT_015: lt(0.15)
if 0.0 == 0.0:
return 1.0 if x < 0.15 else 0.0
x1 = 0.15 - 0.0
x2 = 0.15 + 0.0
if x <= x1:
return 1.0
if x <= x2:
return 1.0 - (x - x1) / (x2 - x1)
return 0.0
@jit(nopython=True)
def _B3_LT_02(x):
# B3.LT_02: lt(0.2)
if 0.0 == 0.0:
return 1.0 if x < 0.2 else 0.0
x1 = 0.2 - 0.0
x2 = 0.2 + 0.0
if x <= x1:
return 1.0
if x <= x2:
return 1.0 - (x - x1) / (x2 - x1)
return 0.0
@jit(nopython=True)
def _B4_NODATA(x):
# B4.NODATA: eq(0.0)
if 0.0 == 0.0:
return 1.0 if x == 0.0 else 0.0
x1 = 0.0 - 0.0
x2 = 0.0
x3 = 0.0 + 0.0
if x <= x1:
return 0.0
if x <= x2:
return (x - x1) / (x2 - x1)
if x <= x3:
return 1.0 - (x - x2) / (x3 - x2)
return 0.0
@jit(nopython=True)
def _B5_LT_01(x):
# B5.LT_01: lt(0.1)
if 0.0 == 0.0:
return 1.0 if x < 0.1 else 0.0
x1 = 0.1 - 0.0
x2 = 0.1 + 0.0
if x <= x1:
return 1.0
if x <= x2:
return 1.0 - (x - x1) / (x2 - x1)
return 0.0
@jit(nopython=True)
def _B7_LT_05(x):
# B7.LT_05: lt(0.5)
if 0.0 == 0.0:
return 1.0 if x < 0.5 else 0.0
x1 = 0.5 - 0.0
x2 = 0.5 + 0.0
if x <= x1:
return 1.0
if x <= x2:
return 1.0 - (x - x1) / (x2 - x1)
return 0.0
@jit(nopython=True)
def _B8_GT_0(x):
# B8.GT_0: gt(0.0)
if 0.0 == 0.0:
return 1.0 if x > 0.0 else 0.0
x1 = 0.0 - 0.0
x2 = 0.0 + 0.0
if x <= x1:
return 0.0
if x <= x2:
return (x - x1) / (x2 - x1)
return 1.0
@jit(nopython=True)
def _B8_LT_009(x):
# B8.LT_009: lt(0.09)
if 0.0 == 0.0:
return 1.0 if x < 0.09 else 0.0
x1 = 0.09 - 0.0
x2 = 0.09 + 0.0
if x <= x1:
return 1.0
if x <= x2:
return 1.0 - (x - x1) / (x2 - x1)
return 0.0
@jit(nopython=True)
def _B8_GT_033(x):
# B8.GT_033: gt(0.33)
if 0.0 == 0.0:
return 1.0 if x > 0.33 else 0.0
x1 = 0.33 - 0.0
x2 = 0.33 + 0.0
if x <= x1:
return 0.0
if x <= x2:
return (x - x1) / (x2 - x1)
return 1.0
@jit(nopython=True)
def _B8_GT_035(x):
# B8.GT_035: gt(0.35)
if 0.0 == 0.0:
return 1.0 if x > 0.35 else 0.0
x1 = 0.35 - 0.0
x2 = 0.35 + 0.0
if x <= x1:
return 0.0
if x <= x2:
return (x - x1) / (x2 - x1)
return 1.0
@jit(nopython=True)
def _B8_GT_04(x):
# B8.GT_04: gt(0.4)
if 0.0 == 0.0:
return 1.0 if x > 0.4 else 0.0
x1 = 0.4 - 0.0
x2 = 0.4 + 0.0
if x <= x1:
return 0.0
if x <= x2:
return (x - x1) / (x2 - x1)
return 1.0
@jit(nopython=True)
def _B8_GT_045(x):
# B8.GT_045: gt(0.45)
if 0.0 == 0.0:
return 1.0 if x > 0.45 else 0.0
x1 = 0.45 - 0.0
x2 = 0.45 + 0.0
if x <= x1:
return 0.0
if x <= x2:
return (x - x1) / (x2 - x1)
return 1.0
@jit(nopython=True)
def _B8_LT_085(x):
# B8.LT_085: lt(0.85)
if 0.0 == 0.0:
return 1.0 if x < 0.85 else 0.0
x1 = 0.85 - 0.0
x2 = 0.85 + 0.0
if x <= x1:
return 1.0
if x <= x2:
return 1.0 - (x - x1) / (x2 - x1)
return 0.0
@jit(nopython=True)
def _B16_GT_0(x):
# B16.GT_0: gt(0.0)
if 0.0 == 0.0:
return 1.0 if x > 0.0 else 0.0
x1 = 0.0 - 0.0
x2 = 0.0 + 0.0
if x <= x1:
return 0.0
if x <= x2:
return (x - x1) / (x2 - x1)
return 1.0
@jit(nopython=True)
def _B19_GT_015(x):
# B19.GT_015: gt(0.15)
if 0.0 == 0.0:
return 1.0 if x > 0.15 else 0.0
x1 = 0.15 - 0.0
x2 = 0.15 + 0.0
if x <= x1:
return 0.0
if x <= x2:
return (x - x1) / (x2 - x1)
return 1.0
@jit(nopython=True)
def _BSum_GT_011(x):
# BSum.GT_011: gt(0.11)
if 0.0 == 0.0:
return 1.0 if x > 0.11 else 0.0
x1 = 0.11 - 0.0
x2 = 0.11 + 0.0
if x <= x1:
return 0.0
if x <= x2:
return (x - x1) / (x2 - x1)
return 1.0
@jit(nopython=True)
def _BSum_GT_013(x):
# BSum.GT_013: gt(0.13)
if 0.0 == 0.0:
return 1.0 if x > 0.13 else 0.0
x1 = 0.13 - 0.0
x2 = 0.13 + 0.0
if x <= x1:
return 0.0
if x <= x2:
return (x - x1) / (x2 - x1)
return 1.0
@jit(nopython=True)
def _BSum_GT_016(x):
# BSum.GT_016: gt(0.16)
if 0.0 == 0.0:
return 1.0 if x > 0.16 else 0.0
x1 = 0.16 - 0.0
x2 = 0.16 + 0.0
if x <= x1:
return 0.0
if x <= x2:
return (x - x1) / (x2 - x1)
return 1.0
@jit(nopython=True)
def _Class_FALSE(x):
# Class.FALSE: false()
return 0.0
@jit(nopython=True)
def _Class_TRUE(x):
# Class.TRUE: true()
return 1.0
_InputsSpec = [
("b1", float64[:]),
("b2", float64[:]),
("b3", float64[:]),
("b4", float64[:]),
("b5", float64[:]),
("b6", float64[:]),
("b7", float64[:]),
("b8", float64[:]),
("b12", float64[:]),
("b13", float64[:]),
("b14", float64[:]),
("b15", float64[:]),
("b16", float64[:]),
("b19", float64[:]),
("b100", float64[:]),
("bsum", float64[:]),
]
@jitclass(_InputsSpec)
class Inputs:
def __init__(self, size: int):
self.b1 = np.zeros(size, dtype=np.float64)
self.b2 = np.zeros(size, dtype=np.float64)
self.b3 = np.zeros(size, dtype=np.float64)
self.b4 = np.zeros(size, dtype=np.float64)
self.b5 = np.zeros(size, dtype=np.float64)
self.b6 = np.zeros(size, dtype=np.float64)
self.b7 = np.zeros(size, dtype=np.float64)
self.b8 = np.zeros(size, dtype=np.float64)
self.b12 = np.zeros(size, dtype=np.float64)
self.b13 = np.zeros(size, dtype=np.float64)
self.b14 = np.zeros(size, dtype=np.float64)
self.b15 = np.zeros(size, dtype=np.float64)
self.b16 = np.zeros(size, dtype=np.float64)
self.b19 = np.zeros(size, dtype=np.float64)
self.b100 = np.zeros(size, dtype=np.float64)
self.bsum = np.zeros(size, dtype=np.float64)
_OutputsSpec = [
("nodata", float64[:]),
("Wasser", float64[:]),
("Schill", float64[:]),
("Muschel", float64[:]),
("dense2", float64[:]),
("dense1", float64[:]),
("Strand", float64[:]),
("Sand", float64[:]),
("Misch", float64[:]),
("Misch2", float64[:]),
("Schlick", float64[:]),
("schlick_t", float64[:]),
("Wasser2", float64[:]),
]
@jitclass(_OutputsSpec)
class Outputs:
def __init__(self, size: int):
self.nodata = np.zeros(size, dtype=np.float64)
self.Wasser = np.zeros(size, dtype=np.float64)
self.Schill = np.zeros(size, dtype=np.float64)
self.Muschel = np.zeros(size, dtype=np.float64)
self.dense2 = np.zeros(size, dtype=np.float64)
self.dense1 = np.zeros(size, dtype=np.float64)
self.Strand = np.zeros(size, dtype=np.float64)
self.Sand = np.zeros(size, dtype=np.float64)
self.Misch = np.zeros(size, dtype=np.float64)
self.Misch2 = np.zeros(size, dtype=np.float64)
self.Schlick = np.zeros(size, dtype=np.float64)
self.schlick_t = np.zeros(size, dtype=np.float64)
self.Wasser2 = np.zeros(size, dtype=np.float64)
@jit(nopython=True)
def apply_rules(inputs: Inputs, outputs: Outputs):
for i in range(len(outputs.nodata)):
t0 = 1.0
# if b4 is NODATA:
t1 = min(t0, _B4_NODATA(inputs.b4[i]))
# nodata = TRUE
outputs.nodata[i] = t1
# else:
t1 = min(t0, 1.0 - t1)
# if (b8 is GT_033 and b1 is LT_085) or b8 is LT_009:
t2 = min(t1, max(min(_B8_GT_033(inputs.b8[i]), _B1_LT_085(inputs.b1[i])), _B8_LT_009(inputs.b8[i])))
# if b5 is LT_01:
t3 = min(t2, _B5_LT_01(inputs.b5[i]))
# Wasser = TRUE
outputs.Wasser[i] = t3
# else:
t3 = min(t2, 1.0 - t3)
# if (b19 is GT_015 and (b8 is GT_04 and b8 is LT_085) and b7 is LT_05) or (b8 is GT_04 and bsum is GT_011) or (b8 is GT_035 and bsum is GT_016):
t4 = min(t3, max(max(min(min(_B19_GT_015(inputs.b19[i]), min(_B8_GT_04(inputs.b8[i]), _B8_LT_085(inputs.b8[i]))), _B7_LT_05(inputs.b7[i])), min(_B8_GT_04(inputs.b8[i]), _BSum_GT_011(inputs.bsum[i]))), min(_B8_GT_035(inputs.b8[i]), _BSum_GT_016(inputs.bsum[i]))))
# if bsum is GT_013:
t5 = min(t4, _BSum_GT_013(inputs.bsum[i]))
# Schill = TRUE
outputs.Schill[i] = t5
# else:
t5 = min(t4, 1.0 - t5)
# Muschel = TRUE
outputs.Muschel[i] = t5
# else:
t4 = min(t3, 1.0 - t4)
# if b8 is GT_045:
t5 = min(t4, _B8_GT_045(inputs.b8[i]))
# dense2 = TRUE
outputs.dense2[i] = t5
# else:
t5 = min(t4, 1.0 - t5)
# dense1 = TRUE
outputs.dense1[i] = t5
# else:
t2 = min(t1, 1.0 - t2)
# if b1 is GT_1:
t3 = min(t2, _B1_GT_1(inputs.b1[i]))
# Strand = TRUE
outputs.Strand[i] = t3
# else:
t3 = min(t2, 1.0 - t3)
# if b3 is LT_005:
t4 = min(t3, _B3_LT_005(inputs.b3[i]))
# Sand = TRUE
outputs.Sand[i] = t4
# else:
t4 = min(t3, 1.0 - t4)
# if b3 is LT_01 and b8 is GT_0:
t5 = min(t4, min(_B3_LT_01(inputs.b3[i]), _B8_GT_0(inputs.b8[i])))
# Misch = TRUE
outputs.Misch[i] = t5
# else:
t5 = min(t4, 1.0 - t5)
# if b3 is LT_015 and b8 is GT_0:
t6 = min(t5, min(_B3_LT_015(inputs.b3[i]), _B8_GT_0(inputs.b8[i])))
# Misch2 = TRUE
outputs.Misch2[i] = t6
# else:
t6 = min(t5, 1.0 - t6)
# if b3 is LT_02 and b2 is GT_0 and b8 is GT_0:
t7 = min(t6, min(min(_B3_LT_02(inputs.b3[i]), _B2_GT_0(inputs.b2[i])), _B8_GT_0(inputs.b8[i])))
# Schlick = TRUE
outputs.Schlick[i] = t7
# else:
t7 = min(t6, 1.0 - t7)
# if b16 is GT_0 and b8 is GT_0:
t8 = min(t7, min(_B16_GT_0(inputs.b16[i]), _B8_GT_0(inputs.b8[i])))
# schlick_t = TRUE
outputs.schlick_t[i] = t8
# else:
t8 = min(t7, 1.0 - t8)
# Wasser2 = TRUE
outputs.Wasser2[i] = t8
| mit |
blacklin/kbengine | kbe/src/lib/python/Lib/pickle.py | 67 | 55641 | """Create portable serialized representations of Python objects.
See module copyreg for a mechanism for registering custom picklers.
See module pickletools source for extensive comments.
Classes:
Pickler
Unpickler
Functions:
dump(object, file)
dumps(object) -> string
load(file) -> object
loads(string) -> object
Misc variables:
__version__
format_version
compatible_formats
"""
from types import FunctionType
from copyreg import dispatch_table
from copyreg import _extension_registry, _inverted_registry, _extension_cache
from itertools import islice
import sys
from sys import maxsize
from struct import pack, unpack
import re
import io
import codecs
import _compat_pickle
__all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler",
"Unpickler", "dump", "dumps", "load", "loads"]
# Shortcut for use in isinstance testing
bytes_types = (bytes, bytearray)
# These are purely informational; no code uses these.
format_version = "4.0" # File format version we write
compatible_formats = ["1.0", # Original protocol 0
"1.1", # Protocol 0 with INST added
"1.2", # Original protocol 1
"1.3", # Protocol 1 with BINFLOAT added
"2.0", # Protocol 2
"3.0", # Protocol 3
"4.0", # Protocol 4
] # Old format versions we can read
# This is the highest protocol number we know how to read.
HIGHEST_PROTOCOL = 4
# The protocol we write by default. May be less than HIGHEST_PROTOCOL.
# We intentionally write a protocol that Python 2.x cannot read;
# there are too many issues with that.
DEFAULT_PROTOCOL = 3
class PickleError(Exception):
"""A common base class for the other pickling exceptions."""
pass
class PicklingError(PickleError):
"""This exception is raised when an unpicklable object is passed to the
dump() method.
"""
pass
class UnpicklingError(PickleError):
"""This exception is raised when there is a problem unpickling an object,
such as a security violation.
Note that other exceptions may also be raised during unpickling, including
(but not necessarily limited to) AttributeError, EOFError, ImportError,
and IndexError.
"""
pass
# An instance of _Stop is raised by Unpickler.load_stop() in response to
# the STOP opcode, passing the object that is the result of unpickling.
class _Stop(Exception):
def __init__(self, value):
self.value = value
# Jython has PyStringMap; it's a dict subclass with string keys
try:
from org.python.core import PyStringMap
except ImportError:
PyStringMap = None
# Pickle opcodes. See pickletools.py for extensive docs. The listing
# here is in kind-of alphabetical order of 1-character pickle code.
# pickletools groups them by purpose.
MARK = b'(' # push special markobject on stack
STOP = b'.' # every pickle ends with STOP
POP = b'0' # discard topmost stack item
POP_MARK = b'1' # discard stack top through topmost markobject
DUP = b'2' # duplicate top stack item
FLOAT = b'F' # push float object; decimal string argument
INT = b'I' # push integer or bool; decimal string argument
BININT = b'J' # push four-byte signed int
BININT1 = b'K' # push 1-byte unsigned int
LONG = b'L' # push long; decimal string argument
BININT2 = b'M' # push 2-byte unsigned int
NONE = b'N' # push None
PERSID = b'P' # push persistent object; id is taken from string arg
BINPERSID = b'Q' # " " " ; " " " " stack
REDUCE = b'R' # apply callable to argtuple, both on stack
STRING = b'S' # push string; NL-terminated string argument
BINSTRING = b'T' # push string; counted binary string argument
SHORT_BINSTRING= b'U' # " " ; " " " " < 256 bytes
UNICODE = b'V' # push Unicode string; raw-unicode-escaped'd argument
BINUNICODE = b'X' # " " " ; counted UTF-8 string argument
APPEND = b'a' # append stack top to list below it
BUILD = b'b' # call __setstate__ or __dict__.update()
GLOBAL = b'c' # push self.find_class(modname, name); 2 string args
DICT = b'd' # build a dict from stack items
EMPTY_DICT = b'}' # push empty dict
APPENDS = b'e' # extend list on stack by topmost stack slice
GET = b'g' # push item from memo on stack; index is string arg
BINGET = b'h' # " " " " " " ; " " 1-byte arg
INST = b'i' # build & push class instance
LONG_BINGET = b'j' # push item from memo on stack; index is 4-byte arg
LIST = b'l' # build list from topmost stack items
EMPTY_LIST = b']' # push empty list
OBJ = b'o' # build & push class instance
PUT = b'p' # store stack top in memo; index is string arg
BINPUT = b'q' # " " " " " ; " " 1-byte arg
LONG_BINPUT = b'r' # " " " " " ; " " 4-byte arg
SETITEM = b's' # add key+value pair to dict
TUPLE = b't' # build tuple from topmost stack items
EMPTY_TUPLE = b')' # push empty tuple
SETITEMS = b'u' # modify dict by adding topmost key+value pairs
BINFLOAT = b'G' # push float; arg is 8-byte float encoding
TRUE = b'I01\n' # not an opcode; see INT docs in pickletools.py
FALSE = b'I00\n' # not an opcode; see INT docs in pickletools.py
# Protocol 2
PROTO = b'\x80' # identify pickle protocol
NEWOBJ = b'\x81' # build object by applying cls.__new__ to argtuple
EXT1 = b'\x82' # push object from extension registry; 1-byte index
EXT2 = b'\x83' # ditto, but 2-byte index
EXT4 = b'\x84' # ditto, but 4-byte index
TUPLE1 = b'\x85' # build 1-tuple from stack top
TUPLE2 = b'\x86' # build 2-tuple from two topmost stack items
TUPLE3 = b'\x87' # build 3-tuple from three topmost stack items
NEWTRUE = b'\x88' # push True
NEWFALSE = b'\x89' # push False
LONG1 = b'\x8a' # push long from < 256 bytes
LONG4 = b'\x8b' # push really big long
_tuplesize2code = [EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3]
# Protocol 3 (Python 3.x)
BINBYTES = b'B' # push bytes; counted binary string argument
SHORT_BINBYTES = b'C' # " " ; " " " " < 256 bytes
# Protocol 4
SHORT_BINUNICODE = b'\x8c' # push short string; UTF-8 length < 256 bytes
BINUNICODE8 = b'\x8d' # push very long string
BINBYTES8 = b'\x8e' # push very long bytes string
EMPTY_SET = b'\x8f' # push empty set on the stack
ADDITEMS = b'\x90' # modify set by adding topmost stack items
FROZENSET = b'\x91' # build frozenset from topmost stack items
NEWOBJ_EX = b'\x92' # like NEWOBJ but work with keyword only arguments
STACK_GLOBAL = b'\x93' # same as GLOBAL but using names on the stacks
MEMOIZE = b'\x94' # store top of the stack in memo
FRAME = b'\x95' # indicate the beginning of a new frame
__all__.extend([x for x in dir() if re.match("[A-Z][A-Z0-9_]+$", x)])
class _Framer:
_FRAME_SIZE_TARGET = 64 * 1024
def __init__(self, file_write):
self.file_write = file_write
self.current_frame = None
def start_framing(self):
self.current_frame = io.BytesIO()
def end_framing(self):
if self.current_frame and self.current_frame.tell() > 0:
self.commit_frame(force=True)
self.current_frame = None
def commit_frame(self, force=False):
if self.current_frame:
f = self.current_frame
if f.tell() >= self._FRAME_SIZE_TARGET or force:
with f.getbuffer() as data:
n = len(data)
write = self.file_write
write(FRAME)
write(pack("<Q", n))
write(data)
f.seek(0)
f.truncate()
def write(self, data):
if self.current_frame:
return self.current_frame.write(data)
else:
return self.file_write(data)
class _Unframer:
def __init__(self, file_read, file_readline, file_tell=None):
self.file_read = file_read
self.file_readline = file_readline
self.current_frame = None
def read(self, n):
if self.current_frame:
data = self.current_frame.read(n)
if not data and n != 0:
self.current_frame = None
return self.file_read(n)
if len(data) < n:
raise UnpicklingError(
"pickle exhausted before end of frame")
return data
else:
return self.file_read(n)
def readline(self):
if self.current_frame:
data = self.current_frame.readline()
if not data:
self.current_frame = None
return self.file_readline()
if data[-1] != b'\n':
raise UnpicklingError(
"pickle exhausted before end of frame")
return data
else:
return self.file_readline()
def load_frame(self, frame_size):
if self.current_frame and self.current_frame.read() != b'':
raise UnpicklingError(
"beginning of a new frame before end of current frame")
self.current_frame = io.BytesIO(self.file_read(frame_size))
# Tools used for pickling.
def _getattribute(obj, name, allow_qualname=False):
dotted_path = name.split(".")
if not allow_qualname and len(dotted_path) > 1:
raise AttributeError("Can't get qualified attribute {!r} on {!r}; " +
"use protocols >= 4 to enable support"
.format(name, obj))
for subpath in dotted_path:
if subpath == '<locals>':
raise AttributeError("Can't get local attribute {!r} on {!r}"
.format(name, obj))
try:
obj = getattr(obj, subpath)
except AttributeError:
raise AttributeError("Can't get attribute {!r} on {!r}"
.format(name, obj))
return obj
def whichmodule(obj, name, allow_qualname=False):
"""Find the module an object belong to."""
module_name = getattr(obj, '__module__', None)
if module_name is not None:
return module_name
for module_name, module in sys.modules.items():
if module_name == '__main__' or module is None:
continue
try:
if _getattribute(module, name, allow_qualname) is obj:
return module_name
except AttributeError:
pass
return '__main__'
def encode_long(x):
r"""Encode a long to a two's complement little-endian binary string.
Note that 0 is a special case, returning an empty string, to save a
byte in the LONG1 pickling context.
>>> encode_long(0)
b''
>>> encode_long(255)
b'\xff\x00'
>>> encode_long(32767)
b'\xff\x7f'
>>> encode_long(-256)
b'\x00\xff'
>>> encode_long(-32768)
b'\x00\x80'
>>> encode_long(-128)
b'\x80'
>>> encode_long(127)
b'\x7f'
>>>
"""
if x == 0:
return b''
nbytes = (x.bit_length() >> 3) + 1
result = x.to_bytes(nbytes, byteorder='little', signed=True)
if x < 0 and nbytes > 1:
if result[-1] == 0xff and (result[-2] & 0x80) != 0:
result = result[:-1]
return result
def decode_long(data):
r"""Decode a long from a two's complement little-endian binary string.
>>> decode_long(b'')
0
>>> decode_long(b"\xff\x00")
255
>>> decode_long(b"\xff\x7f")
32767
>>> decode_long(b"\x00\xff")
-256
>>> decode_long(b"\x00\x80")
-32768
>>> decode_long(b"\x80")
-128
>>> decode_long(b"\x7f")
127
"""
return int.from_bytes(data, byteorder='little', signed=True)
# Pickling machinery
class _Pickler:
def __init__(self, file, protocol=None, *, fix_imports=True):
"""This takes a binary file for writing a pickle data stream.
The optional *protocol* argument tells the pickler to use the
given protocol; supported protocols are 0, 1, 2, 3 and 4. The
default protocol is 3; a backward-incompatible protocol designed
for Python 3.
Specifying a negative protocol version selects the highest
protocol version supported. The higher the protocol used, the
more recent the version of Python needed to read the pickle
produced.
The *file* argument must have a write() method that accepts a
single bytes argument. It can thus be a file object opened for
binary writing, a io.BytesIO instance, or any other custom
object that meets this interface.
If *fix_imports* is True and *protocol* is less than 3, pickle
will try to map the new Python 3 names to the old module names
used in Python 2, so that the pickle data stream is readable
with Python 2.
"""
if protocol is None:
protocol = DEFAULT_PROTOCOL
if protocol < 0:
protocol = HIGHEST_PROTOCOL
elif not 0 <= protocol <= HIGHEST_PROTOCOL:
raise ValueError("pickle protocol must be <= %d" % HIGHEST_PROTOCOL)
try:
self._file_write = file.write
except AttributeError:
raise TypeError("file must have a 'write' attribute")
self.framer = _Framer(self._file_write)
self.write = self.framer.write
self.memo = {}
self.proto = int(protocol)
self.bin = protocol >= 1
self.fast = 0
self.fix_imports = fix_imports and protocol < 3
def clear_memo(self):
"""Clears the pickler's "memo".
The memo is the data structure that remembers which objects the
pickler has already seen, so that shared or recursive objects
are pickled by reference and not by value. This method is
useful when re-using picklers.
"""
self.memo.clear()
def dump(self, obj):
"""Write a pickled representation of obj to the open file."""
# Check whether Pickler was initialized correctly. This is
# only needed to mimic the behavior of _pickle.Pickler.dump().
if not hasattr(self, "_file_write"):
raise PicklingError("Pickler.__init__() was not called by "
"%s.__init__()" % (self.__class__.__name__,))
if self.proto >= 2:
self.write(PROTO + pack("<B", self.proto))
if self.proto >= 4:
self.framer.start_framing()
self.save(obj)
self.write(STOP)
self.framer.end_framing()
def memoize(self, obj):
"""Store an object in the memo."""
# The Pickler memo is a dictionary mapping object ids to 2-tuples
# that contain the Unpickler memo key and the object being memoized.
# The memo key is written to the pickle and will become
# the key in the Unpickler's memo. The object is stored in the
# Pickler memo so that transient objects are kept alive during
# pickling.
# The use of the Unpickler memo length as the memo key is just a
# convention. The only requirement is that the memo values be unique.
# But there appears no advantage to any other scheme, and this
# scheme allows the Unpickler memo to be implemented as a plain (but
# growable) array, indexed by memo key.
if self.fast:
return
assert id(obj) not in self.memo
idx = len(self.memo)
self.write(self.put(idx))
self.memo[id(obj)] = idx, obj
# Return a PUT (BINPUT, LONG_BINPUT) opcode string, with argument i.
def put(self, idx):
if self.proto >= 4:
return MEMOIZE
elif self.bin:
if idx < 256:
return BINPUT + pack("<B", idx)
else:
return LONG_BINPUT + pack("<I", idx)
else:
return PUT + repr(idx).encode("ascii") + b'\n'
# Return a GET (BINGET, LONG_BINGET) opcode string, with argument i.
def get(self, i):
if self.bin:
if i < 256:
return BINGET + pack("<B", i)
else:
return LONG_BINGET + pack("<I", i)
return GET + repr(i).encode("ascii") + b'\n'
def save(self, obj, save_persistent_id=True):
self.framer.commit_frame()
# Check for persistent id (defined by a subclass)
pid = self.persistent_id(obj)
if pid is not None and save_persistent_id:
self.save_pers(pid)
return
# Check the memo
x = self.memo.get(id(obj))
if x is not None:
self.write(self.get(x[0]))
return
# Check the type dispatch table
t = type(obj)
f = self.dispatch.get(t)
if f is not None:
f(self, obj) # Call unbound method with explicit self
return
# Check private dispatch table if any, or else copyreg.dispatch_table
reduce = getattr(self, 'dispatch_table', dispatch_table).get(t)
if reduce is not None:
rv = reduce(obj)
else:
# Check for a class with a custom metaclass; treat as regular class
try:
issc = issubclass(t, type)
except TypeError: # t is not a class (old Boost; see SF #502085)
issc = False
if issc:
self.save_global(obj)
return
# Check for a __reduce_ex__ method, fall back to __reduce__
reduce = getattr(obj, "__reduce_ex__", None)
if reduce is not None:
rv = reduce(self.proto)
else:
reduce = getattr(obj, "__reduce__", None)
if reduce is not None:
rv = reduce()
else:
raise PicklingError("Can't pickle %r object: %r" %
(t.__name__, obj))
# Check for string returned by reduce(), meaning "save as global"
if isinstance(rv, str):
self.save_global(obj, rv)
return
# Assert that reduce() returned a tuple
if not isinstance(rv, tuple):
raise PicklingError("%s must return string or tuple" % reduce)
# Assert that it returned an appropriately sized tuple
l = len(rv)
if not (2 <= l <= 5):
raise PicklingError("Tuple returned by %s must have "
"two to five elements" % reduce)
# Save the reduce() output and finally memoize the object
self.save_reduce(obj=obj, *rv)
def persistent_id(self, obj):
# This exists so a subclass can override it
return None
def save_pers(self, pid):
# Save a persistent id reference
if self.bin:
self.save(pid, save_persistent_id=False)
self.write(BINPERSID)
else:
self.write(PERSID + str(pid).encode("ascii") + b'\n')
def save_reduce(self, func, args, state=None, listitems=None,
dictitems=None, obj=None):
# This API is called by some subclasses
if not isinstance(args, tuple):
raise PicklingError("args from save_reduce() must be a tuple")
if not callable(func):
raise PicklingError("func from save_reduce() must be callable")
save = self.save
write = self.write
func_name = getattr(func, "__name__", "")
if self.proto >= 4 and func_name == "__newobj_ex__":
cls, args, kwargs = args
if not hasattr(cls, "__new__"):
raise PicklingError("args[0] from {} args has no __new__"
.format(func_name))
if obj is not None and cls is not obj.__class__:
raise PicklingError("args[0] from {} args has the wrong class"
.format(func_name))
save(cls)
save(args)
save(kwargs)
write(NEWOBJ_EX)
elif self.proto >= 2 and func_name == "__newobj__":
# A __reduce__ implementation can direct protocol 2 or newer to
# use the more efficient NEWOBJ opcode, while still
# allowing protocol 0 and 1 to work normally. For this to
# work, the function returned by __reduce__ should be
# called __newobj__, and its first argument should be a
# class. The implementation for __newobj__
# should be as follows, although pickle has no way to
# verify this:
#
# def __newobj__(cls, *args):
# return cls.__new__(cls, *args)
#
# Protocols 0 and 1 will pickle a reference to __newobj__,
# while protocol 2 (and above) will pickle a reference to
# cls, the remaining args tuple, and the NEWOBJ code,
# which calls cls.__new__(cls, *args) at unpickling time
# (see load_newobj below). If __reduce__ returns a
# three-tuple, the state from the third tuple item will be
# pickled regardless of the protocol, calling __setstate__
# at unpickling time (see load_build below).
#
# Note that no standard __newobj__ implementation exists;
# you have to provide your own. This is to enforce
# compatibility with Python 2.2 (pickles written using
# protocol 0 or 1 in Python 2.3 should be unpicklable by
# Python 2.2).
cls = args[0]
if not hasattr(cls, "__new__"):
raise PicklingError(
"args[0] from __newobj__ args has no __new__")
if obj is not None and cls is not obj.__class__:
raise PicklingError(
"args[0] from __newobj__ args has the wrong class")
args = args[1:]
save(cls)
save(args)
write(NEWOBJ)
else:
save(func)
save(args)
write(REDUCE)
if obj is not None:
# If the object is already in the memo, this means it is
# recursive. In this case, throw away everything we put on the
# stack, and fetch the object back from the memo.
if id(obj) in self.memo:
write(POP + self.get(self.memo[id(obj)][0]))
else:
self.memoize(obj)
# More new special cases (that work with older protocols as
# well): when __reduce__ returns a tuple with 4 or 5 items,
# the 4th and 5th item should be iterators that provide list
# items and dict items (as (key, value) tuples), or None.
if listitems is not None:
self._batch_appends(listitems)
if dictitems is not None:
self._batch_setitems(dictitems)
if state is not None:
save(state)
write(BUILD)
# Methods below this point are dispatched through the dispatch table
dispatch = {}
def save_none(self, obj):
self.write(NONE)
dispatch[type(None)] = save_none
def save_bool(self, obj):
if self.proto >= 2:
self.write(NEWTRUE if obj else NEWFALSE)
else:
self.write(TRUE if obj else FALSE)
dispatch[bool] = save_bool
def save_long(self, obj):
if self.bin:
# If the int is small enough to fit in a signed 4-byte 2's-comp
# format, we can store it more efficiently than the general
# case.
# First one- and two-byte unsigned ints:
if obj >= 0:
if obj <= 0xff:
self.write(BININT1 + pack("<B", obj))
return
if obj <= 0xffff:
self.write(BININT2 + pack("<H", obj))
return
# Next check for 4-byte signed ints:
if -0x80000000 <= obj <= 0x7fffffff:
self.write(BININT + pack("<i", obj))
return
if self.proto >= 2:
encoded = encode_long(obj)
n = len(encoded)
if n < 256:
self.write(LONG1 + pack("<B", n) + encoded)
else:
self.write(LONG4 + pack("<i", n) + encoded)
return
self.write(LONG + repr(obj).encode("ascii") + b'L\n')
dispatch[int] = save_long
def save_float(self, obj):
if self.bin:
self.write(BINFLOAT + pack('>d', obj))
else:
self.write(FLOAT + repr(obj).encode("ascii") + b'\n')
dispatch[float] = save_float
def save_bytes(self, obj):
if self.proto < 3:
if not obj: # bytes object is empty
self.save_reduce(bytes, (), obj=obj)
else:
self.save_reduce(codecs.encode,
(str(obj, 'latin1'), 'latin1'), obj=obj)
return
n = len(obj)
if n <= 0xff:
self.write(SHORT_BINBYTES + pack("<B", n) + obj)
elif n > 0xffffffff and self.proto >= 4:
self.write(BINBYTES8 + pack("<Q", n) + obj)
else:
self.write(BINBYTES + pack("<I", n) + obj)
self.memoize(obj)
dispatch[bytes] = save_bytes
def save_str(self, obj):
if self.bin:
encoded = obj.encode('utf-8', 'surrogatepass')
n = len(encoded)
if n <= 0xff and self.proto >= 4:
self.write(SHORT_BINUNICODE + pack("<B", n) + encoded)
elif n > 0xffffffff and self.proto >= 4:
self.write(BINUNICODE8 + pack("<Q", n) + encoded)
else:
self.write(BINUNICODE + pack("<I", n) + encoded)
else:
obj = obj.replace("\\", "\\u005c")
obj = obj.replace("\n", "\\u000a")
self.write(UNICODE + obj.encode('raw-unicode-escape') +
b'\n')
self.memoize(obj)
dispatch[str] = save_str
def save_tuple(self, obj):
if not obj: # tuple is empty
if self.bin:
self.write(EMPTY_TUPLE)
else:
self.write(MARK + TUPLE)
return
n = len(obj)
save = self.save
memo = self.memo
if n <= 3 and self.proto >= 2:
for element in obj:
save(element)
# Subtle. Same as in the big comment below.
if id(obj) in memo:
get = self.get(memo[id(obj)][0])
self.write(POP * n + get)
else:
self.write(_tuplesize2code[n])
self.memoize(obj)
return
# proto 0 or proto 1 and tuple isn't empty, or proto > 1 and tuple
# has more than 3 elements.
write = self.write
write(MARK)
for element in obj:
save(element)
if id(obj) in memo:
# Subtle. d was not in memo when we entered save_tuple(), so
# the process of saving the tuple's elements must have saved
# the tuple itself: the tuple is recursive. The proper action
# now is to throw away everything we put on the stack, and
# simply GET the tuple (it's already constructed). This check
# could have been done in the "for element" loop instead, but
# recursive tuples are a rare thing.
get = self.get(memo[id(obj)][0])
if self.bin:
write(POP_MARK + get)
else: # proto 0 -- POP_MARK not available
write(POP * (n+1) + get)
return
# No recursion.
write(TUPLE)
self.memoize(obj)
dispatch[tuple] = save_tuple
def save_list(self, obj):
if self.bin:
self.write(EMPTY_LIST)
else: # proto 0 -- can't use EMPTY_LIST
self.write(MARK + LIST)
self.memoize(obj)
self._batch_appends(obj)
dispatch[list] = save_list
_BATCHSIZE = 1000
def _batch_appends(self, items):
# Helper to batch up APPENDS sequences
save = self.save
write = self.write
if not self.bin:
for x in items:
save(x)
write(APPEND)
return
it = iter(items)
while True:
tmp = list(islice(it, self._BATCHSIZE))
n = len(tmp)
if n > 1:
write(MARK)
for x in tmp:
save(x)
write(APPENDS)
elif n:
save(tmp[0])
write(APPEND)
# else tmp is empty, and we're done
if n < self._BATCHSIZE:
return
def save_dict(self, obj):
if self.bin:
self.write(EMPTY_DICT)
else: # proto 0 -- can't use EMPTY_DICT
self.write(MARK + DICT)
self.memoize(obj)
self._batch_setitems(obj.items())
dispatch[dict] = save_dict
if PyStringMap is not None:
dispatch[PyStringMap] = save_dict
def _batch_setitems(self, items):
# Helper to batch up SETITEMS sequences; proto >= 1 only
save = self.save
write = self.write
if not self.bin:
for k, v in items:
save(k)
save(v)
write(SETITEM)
return
it = iter(items)
while True:
tmp = list(islice(it, self._BATCHSIZE))
n = len(tmp)
if n > 1:
write(MARK)
for k, v in tmp:
save(k)
save(v)
write(SETITEMS)
elif n:
k, v = tmp[0]
save(k)
save(v)
write(SETITEM)
# else tmp is empty, and we're done
if n < self._BATCHSIZE:
return
def save_set(self, obj):
save = self.save
write = self.write
if self.proto < 4:
self.save_reduce(set, (list(obj),), obj=obj)
return
write(EMPTY_SET)
self.memoize(obj)
it = iter(obj)
while True:
batch = list(islice(it, self._BATCHSIZE))
n = len(batch)
if n > 0:
write(MARK)
for item in batch:
save(item)
write(ADDITEMS)
if n < self._BATCHSIZE:
return
dispatch[set] = save_set
def save_frozenset(self, obj):
save = self.save
write = self.write
if self.proto < 4:
self.save_reduce(frozenset, (list(obj),), obj=obj)
return
write(MARK)
for item in obj:
save(item)
if id(obj) in self.memo:
# If the object is already in the memo, this means it is
# recursive. In this case, throw away everything we put on the
# stack, and fetch the object back from the memo.
write(POP_MARK + self.get(self.memo[id(obj)][0]))
return
write(FROZENSET)
self.memoize(obj)
dispatch[frozenset] = save_frozenset
def save_global(self, obj, name=None):
write = self.write
memo = self.memo
if name is None and self.proto >= 4:
name = getattr(obj, '__qualname__', None)
if name is None:
name = obj.__name__
module_name = whichmodule(obj, name, allow_qualname=self.proto >= 4)
try:
__import__(module_name, level=0)
module = sys.modules[module_name]
obj2 = _getattribute(module, name, allow_qualname=self.proto >= 4)
except (ImportError, KeyError, AttributeError):
raise PicklingError(
"Can't pickle %r: it's not found as %s.%s" %
(obj, module_name, name))
else:
if obj2 is not obj:
raise PicklingError(
"Can't pickle %r: it's not the same object as %s.%s" %
(obj, module_name, name))
if self.proto >= 2:
code = _extension_registry.get((module_name, name))
if code:
assert code > 0
if code <= 0xff:
write(EXT1 + pack("<B", code))
elif code <= 0xffff:
write(EXT2 + pack("<H", code))
else:
write(EXT4 + pack("<i", code))
return
# Non-ASCII identifiers are supported only with protocols >= 3.
if self.proto >= 4:
self.save(module_name)
self.save(name)
write(STACK_GLOBAL)
elif self.proto >= 3:
write(GLOBAL + bytes(module_name, "utf-8") + b'\n' +
bytes(name, "utf-8") + b'\n')
else:
if self.fix_imports:
r_name_mapping = _compat_pickle.REVERSE_NAME_MAPPING
r_import_mapping = _compat_pickle.REVERSE_IMPORT_MAPPING
if (module_name, name) in r_name_mapping:
module_name, name = r_name_mapping[(module_name, name)]
if module_name in r_import_mapping:
module_name = r_import_mapping[module_name]
try:
write(GLOBAL + bytes(module_name, "ascii") + b'\n' +
bytes(name, "ascii") + b'\n')
except UnicodeEncodeError:
raise PicklingError(
"can't pickle global identifier '%s.%s' using "
"pickle protocol %i" % (module, name, self.proto))
self.memoize(obj)
def save_type(self, obj):
if obj is type(None):
return self.save_reduce(type, (None,), obj=obj)
elif obj is type(NotImplemented):
return self.save_reduce(type, (NotImplemented,), obj=obj)
elif obj is type(...):
return self.save_reduce(type, (...,), obj=obj)
return self.save_global(obj)
dispatch[FunctionType] = save_global
dispatch[type] = save_type
# Unpickling machinery
class _Unpickler:
def __init__(self, file, *, fix_imports=True,
encoding="ASCII", errors="strict"):
"""This takes a binary file for reading a pickle data stream.
The protocol version of the pickle is detected automatically, so
no proto argument is needed.
The argument *file* must have two methods, a read() method that
takes an integer argument, and a readline() method that requires
no arguments. Both methods should return bytes. Thus *file*
can be a binary file object opened for reading, a io.BytesIO
object, or any other custom object that meets this interface.
The file-like object must have two methods, a read() method
that takes an integer argument, and a readline() method that
requires no arguments. Both methods should return bytes.
Thus file-like object can be a binary file object opened for
reading, a BytesIO object, or any other custom object that
meets this interface.
Optional keyword arguments are *fix_imports*, *encoding* and
*errors*, which are used to control compatiblity support for
pickle stream generated by Python 2. If *fix_imports* is True,
pickle will try to map the old Python 2 names to the new names
used in Python 3. The *encoding* and *errors* tell pickle how
to decode 8-bit string instances pickled by Python 2; these
default to 'ASCII' and 'strict', respectively. *encoding* can be
'bytes' to read theses 8-bit string instances as bytes objects.
"""
self._file_readline = file.readline
self._file_read = file.read
self.memo = {}
self.encoding = encoding
self.errors = errors
self.proto = 0
self.fix_imports = fix_imports
def load(self):
"""Read a pickled object representation from the open file.
Return the reconstituted object hierarchy specified in the file.
"""
# Check whether Unpickler was initialized correctly. This is
# only needed to mimic the behavior of _pickle.Unpickler.dump().
if not hasattr(self, "_file_read"):
raise UnpicklingError("Unpickler.__init__() was not called by "
"%s.__init__()" % (self.__class__.__name__,))
self._unframer = _Unframer(self._file_read, self._file_readline)
self.read = self._unframer.read
self.readline = self._unframer.readline
self.mark = object() # any new unique object
self.stack = []
self.append = self.stack.append
self.proto = 0
read = self.read
dispatch = self.dispatch
try:
while True:
key = read(1)
if not key:
raise EOFError
assert isinstance(key, bytes_types)
dispatch[key[0]](self)
except _Stop as stopinst:
return stopinst.value
# Return largest index k such that self.stack[k] is self.mark.
# If the stack doesn't contain a mark, eventually raises IndexError.
# This could be sped by maintaining another stack, of indices at which
# the mark appears. For that matter, the latter stack would suffice,
# and we wouldn't need to push mark objects on self.stack at all.
# Doing so is probably a good thing, though, since if the pickle is
# corrupt (or hostile) we may get a clue from finding self.mark embedded
# in unpickled objects.
def marker(self):
stack = self.stack
mark = self.mark
k = len(stack)-1
while stack[k] is not mark: k = k-1
return k
def persistent_load(self, pid):
raise UnpicklingError("unsupported persistent id encountered")
dispatch = {}
def load_proto(self):
proto = self.read(1)[0]
if not 0 <= proto <= HIGHEST_PROTOCOL:
raise ValueError("unsupported pickle protocol: %d" % proto)
self.proto = proto
dispatch[PROTO[0]] = load_proto
def load_frame(self):
frame_size, = unpack('<Q', self.read(8))
if frame_size > sys.maxsize:
raise ValueError("frame size > sys.maxsize: %d" % frame_size)
self._unframer.load_frame(frame_size)
dispatch[FRAME[0]] = load_frame
def load_persid(self):
pid = self.readline()[:-1].decode("ascii")
self.append(self.persistent_load(pid))
dispatch[PERSID[0]] = load_persid
def load_binpersid(self):
pid = self.stack.pop()
self.append(self.persistent_load(pid))
dispatch[BINPERSID[0]] = load_binpersid
def load_none(self):
self.append(None)
dispatch[NONE[0]] = load_none
def load_false(self):
self.append(False)
dispatch[NEWFALSE[0]] = load_false
def load_true(self):
self.append(True)
dispatch[NEWTRUE[0]] = load_true
def load_int(self):
data = self.readline()
if data == FALSE[1:]:
val = False
elif data == TRUE[1:]:
val = True
else:
val = int(data, 0)
self.append(val)
dispatch[INT[0]] = load_int
def load_binint(self):
self.append(unpack('<i', self.read(4))[0])
dispatch[BININT[0]] = load_binint
def load_binint1(self):
self.append(self.read(1)[0])
dispatch[BININT1[0]] = load_binint1
def load_binint2(self):
self.append(unpack('<H', self.read(2))[0])
dispatch[BININT2[0]] = load_binint2
def load_long(self):
val = self.readline()[:-1]
if val and val[-1] == b'L'[0]:
val = val[:-1]
self.append(int(val, 0))
dispatch[LONG[0]] = load_long
def load_long1(self):
n = self.read(1)[0]
data = self.read(n)
self.append(decode_long(data))
dispatch[LONG1[0]] = load_long1
def load_long4(self):
n, = unpack('<i', self.read(4))
if n < 0:
# Corrupt or hostile pickle -- we never write one like this
raise UnpicklingError("LONG pickle has negative byte count")
data = self.read(n)
self.append(decode_long(data))
dispatch[LONG4[0]] = load_long4
def load_float(self):
self.append(float(self.readline()[:-1]))
dispatch[FLOAT[0]] = load_float
def load_binfloat(self):
self.append(unpack('>d', self.read(8))[0])
dispatch[BINFLOAT[0]] = load_binfloat
def _decode_string(self, value):
# Used to allow strings from Python 2 to be decoded either as
# bytes or Unicode strings. This should be used only with the
# STRING, BINSTRING and SHORT_BINSTRING opcodes.
if self.encoding == "bytes":
return value
else:
return value.decode(self.encoding, self.errors)
def load_string(self):
data = self.readline()[:-1]
# Strip outermost quotes
if len(data) >= 2 and data[0] == data[-1] and data[0] in b'"\'':
data = data[1:-1]
else:
raise UnpicklingError("the STRING opcode argument must be quoted")
self.append(self._decode_string(codecs.escape_decode(data)[0]))
dispatch[STRING[0]] = load_string
def load_binstring(self):
# Deprecated BINSTRING uses signed 32-bit length
len, = unpack('<i', self.read(4))
if len < 0:
raise UnpicklingError("BINSTRING pickle has negative byte count")
data = self.read(len)
self.append(self._decode_string(data))
dispatch[BINSTRING[0]] = load_binstring
def load_binbytes(self):
len, = unpack('<I', self.read(4))
if len > maxsize:
raise UnpicklingError("BINBYTES exceeds system's maximum size "
"of %d bytes" % maxsize)
self.append(self.read(len))
dispatch[BINBYTES[0]] = load_binbytes
def load_unicode(self):
self.append(str(self.readline()[:-1], 'raw-unicode-escape'))
dispatch[UNICODE[0]] = load_unicode
def load_binunicode(self):
len, = unpack('<I', self.read(4))
if len > maxsize:
raise UnpicklingError("BINUNICODE exceeds system's maximum size "
"of %d bytes" % maxsize)
self.append(str(self.read(len), 'utf-8', 'surrogatepass'))
dispatch[BINUNICODE[0]] = load_binunicode
def load_binunicode8(self):
len, = unpack('<Q', self.read(8))
if len > maxsize:
raise UnpicklingError("BINUNICODE8 exceeds system's maximum size "
"of %d bytes" % maxsize)
self.append(str(self.read(len), 'utf-8', 'surrogatepass'))
dispatch[BINUNICODE8[0]] = load_binunicode8
def load_short_binstring(self):
len = self.read(1)[0]
data = self.read(len)
self.append(self._decode_string(data))
dispatch[SHORT_BINSTRING[0]] = load_short_binstring
def load_short_binbytes(self):
len = self.read(1)[0]
self.append(self.read(len))
dispatch[SHORT_BINBYTES[0]] = load_short_binbytes
def load_short_binunicode(self):
len = self.read(1)[0]
self.append(str(self.read(len), 'utf-8', 'surrogatepass'))
dispatch[SHORT_BINUNICODE[0]] = load_short_binunicode
def load_tuple(self):
k = self.marker()
self.stack[k:] = [tuple(self.stack[k+1:])]
dispatch[TUPLE[0]] = load_tuple
def load_empty_tuple(self):
self.append(())
dispatch[EMPTY_TUPLE[0]] = load_empty_tuple
def load_tuple1(self):
self.stack[-1] = (self.stack[-1],)
dispatch[TUPLE1[0]] = load_tuple1
def load_tuple2(self):
self.stack[-2:] = [(self.stack[-2], self.stack[-1])]
dispatch[TUPLE2[0]] = load_tuple2
def load_tuple3(self):
self.stack[-3:] = [(self.stack[-3], self.stack[-2], self.stack[-1])]
dispatch[TUPLE3[0]] = load_tuple3
def load_empty_list(self):
self.append([])
dispatch[EMPTY_LIST[0]] = load_empty_list
def load_empty_dictionary(self):
self.append({})
dispatch[EMPTY_DICT[0]] = load_empty_dictionary
def load_empty_set(self):
self.append(set())
dispatch[EMPTY_SET[0]] = load_empty_set
def load_frozenset(self):
k = self.marker()
self.stack[k:] = [frozenset(self.stack[k+1:])]
dispatch[FROZENSET[0]] = load_frozenset
def load_list(self):
k = self.marker()
self.stack[k:] = [self.stack[k+1:]]
dispatch[LIST[0]] = load_list
def load_dict(self):
k = self.marker()
items = self.stack[k+1:]
d = {items[i]: items[i+1]
for i in range(0, len(items), 2)}
self.stack[k:] = [d]
dispatch[DICT[0]] = load_dict
# INST and OBJ differ only in how they get a class object. It's not
# only sensible to do the rest in a common routine, the two routines
# previously diverged and grew different bugs.
# klass is the class to instantiate, and k points to the topmost mark
# object, following which are the arguments for klass.__init__.
def _instantiate(self, klass, k):
args = tuple(self.stack[k+1:])
del self.stack[k:]
if (args or not isinstance(klass, type) or
hasattr(klass, "__getinitargs__")):
try:
value = klass(*args)
except TypeError as err:
raise TypeError("in constructor for %s: %s" %
(klass.__name__, str(err)), sys.exc_info()[2])
else:
value = klass.__new__(klass)
self.append(value)
def load_inst(self):
module = self.readline()[:-1].decode("ascii")
name = self.readline()[:-1].decode("ascii")
klass = self.find_class(module, name)
self._instantiate(klass, self.marker())
dispatch[INST[0]] = load_inst
def load_obj(self):
# Stack is ... markobject classobject arg1 arg2 ...
k = self.marker()
klass = self.stack.pop(k+1)
self._instantiate(klass, k)
dispatch[OBJ[0]] = load_obj
def load_newobj(self):
args = self.stack.pop()
cls = self.stack.pop()
obj = cls.__new__(cls, *args)
self.append(obj)
dispatch[NEWOBJ[0]] = load_newobj
def load_newobj_ex(self):
kwargs = self.stack.pop()
args = self.stack.pop()
cls = self.stack.pop()
obj = cls.__new__(cls, *args, **kwargs)
self.append(obj)
dispatch[NEWOBJ_EX[0]] = load_newobj_ex
def load_global(self):
module = self.readline()[:-1].decode("utf-8")
name = self.readline()[:-1].decode("utf-8")
klass = self.find_class(module, name)
self.append(klass)
dispatch[GLOBAL[0]] = load_global
def load_stack_global(self):
name = self.stack.pop()
module = self.stack.pop()
if type(name) is not str or type(module) is not str:
raise UnpicklingError("STACK_GLOBAL requires str")
self.append(self.find_class(module, name))
dispatch[STACK_GLOBAL[0]] = load_stack_global
def load_ext1(self):
code = self.read(1)[0]
self.get_extension(code)
dispatch[EXT1[0]] = load_ext1
def load_ext2(self):
code, = unpack('<H', self.read(2))
self.get_extension(code)
dispatch[EXT2[0]] = load_ext2
def load_ext4(self):
code, = unpack('<i', self.read(4))
self.get_extension(code)
dispatch[EXT4[0]] = load_ext4
def get_extension(self, code):
nil = []
obj = _extension_cache.get(code, nil)
if obj is not nil:
self.append(obj)
return
key = _inverted_registry.get(code)
if not key:
if code <= 0: # note that 0 is forbidden
# Corrupt or hostile pickle.
raise UnpicklingError("EXT specifies code <= 0")
raise ValueError("unregistered extension code %d" % code)
obj = self.find_class(*key)
_extension_cache[code] = obj
self.append(obj)
def find_class(self, module, name):
# Subclasses may override this.
if self.proto < 3 and self.fix_imports:
if (module, name) in _compat_pickle.NAME_MAPPING:
module, name = _compat_pickle.NAME_MAPPING[(module, name)]
if module in _compat_pickle.IMPORT_MAPPING:
module = _compat_pickle.IMPORT_MAPPING[module]
__import__(module, level=0)
return _getattribute(sys.modules[module], name,
allow_qualname=self.proto >= 4)
def load_reduce(self):
stack = self.stack
args = stack.pop()
func = stack[-1]
try:
value = func(*args)
except:
print(sys.exc_info())
print(func, args)
raise
stack[-1] = value
dispatch[REDUCE[0]] = load_reduce
def load_pop(self):
del self.stack[-1]
dispatch[POP[0]] = load_pop
def load_pop_mark(self):
k = self.marker()
del self.stack[k:]
dispatch[POP_MARK[0]] = load_pop_mark
def load_dup(self):
self.append(self.stack[-1])
dispatch[DUP[0]] = load_dup
def load_get(self):
i = int(self.readline()[:-1])
self.append(self.memo[i])
dispatch[GET[0]] = load_get
def load_binget(self):
i = self.read(1)[0]
self.append(self.memo[i])
dispatch[BINGET[0]] = load_binget
def load_long_binget(self):
i, = unpack('<I', self.read(4))
self.append(self.memo[i])
dispatch[LONG_BINGET[0]] = load_long_binget
def load_put(self):
i = int(self.readline()[:-1])
if i < 0:
raise ValueError("negative PUT argument")
self.memo[i] = self.stack[-1]
dispatch[PUT[0]] = load_put
def load_binput(self):
i = self.read(1)[0]
if i < 0:
raise ValueError("negative BINPUT argument")
self.memo[i] = self.stack[-1]
dispatch[BINPUT[0]] = load_binput
def load_long_binput(self):
i, = unpack('<I', self.read(4))
if i > maxsize:
raise ValueError("negative LONG_BINPUT argument")
self.memo[i] = self.stack[-1]
dispatch[LONG_BINPUT[0]] = load_long_binput
def load_memoize(self):
memo = self.memo
memo[len(memo)] = self.stack[-1]
dispatch[MEMOIZE[0]] = load_memoize
def load_append(self):
stack = self.stack
value = stack.pop()
list = stack[-1]
list.append(value)
dispatch[APPEND[0]] = load_append
def load_appends(self):
stack = self.stack
mark = self.marker()
list_obj = stack[mark - 1]
items = stack[mark + 1:]
if isinstance(list_obj, list):
list_obj.extend(items)
else:
append = list_obj.append
for item in items:
append(item)
del stack[mark:]
dispatch[APPENDS[0]] = load_appends
def load_setitem(self):
stack = self.stack
value = stack.pop()
key = stack.pop()
dict = stack[-1]
dict[key] = value
dispatch[SETITEM[0]] = load_setitem
def load_setitems(self):
stack = self.stack
mark = self.marker()
dict = stack[mark - 1]
for i in range(mark + 1, len(stack), 2):
dict[stack[i]] = stack[i + 1]
del stack[mark:]
dispatch[SETITEMS[0]] = load_setitems
def load_additems(self):
stack = self.stack
mark = self.marker()
set_obj = stack[mark - 1]
items = stack[mark + 1:]
if isinstance(set_obj, set):
set_obj.update(items)
else:
add = set_obj.add
for item in items:
add(item)
del stack[mark:]
dispatch[ADDITEMS[0]] = load_additems
def load_build(self):
stack = self.stack
state = stack.pop()
inst = stack[-1]
setstate = getattr(inst, "__setstate__", None)
if setstate is not None:
setstate(state)
return
slotstate = None
if isinstance(state, tuple) and len(state) == 2:
state, slotstate = state
if state:
inst_dict = inst.__dict__
intern = sys.intern
for k, v in state.items():
if type(k) is str:
inst_dict[intern(k)] = v
else:
inst_dict[k] = v
if slotstate:
for k, v in slotstate.items():
setattr(inst, k, v)
dispatch[BUILD[0]] = load_build
def load_mark(self):
self.append(self.mark)
dispatch[MARK[0]] = load_mark
def load_stop(self):
value = self.stack.pop()
raise _Stop(value)
dispatch[STOP[0]] = load_stop
# Shorthands
def _dump(obj, file, protocol=None, *, fix_imports=True):
_Pickler(file, protocol, fix_imports=fix_imports).dump(obj)
def _dumps(obj, protocol=None, *, fix_imports=True):
f = io.BytesIO()
_Pickler(f, protocol, fix_imports=fix_imports).dump(obj)
res = f.getvalue()
assert isinstance(res, bytes_types)
return res
def _load(file, *, fix_imports=True, encoding="ASCII", errors="strict"):
return _Unpickler(file, fix_imports=fix_imports,
encoding=encoding, errors=errors).load()
def _loads(s, *, fix_imports=True, encoding="ASCII", errors="strict"):
if isinstance(s, str):
raise TypeError("Can't load pickle from unicode string")
file = io.BytesIO(s)
return _Unpickler(file, fix_imports=fix_imports,
encoding=encoding, errors=errors).load()
# Use the faster _pickle if possible
try:
from _pickle import (
PickleError,
PicklingError,
UnpicklingError,
Pickler,
Unpickler,
dump,
dumps,
load,
loads
)
except ImportError:
Pickler, Unpickler = _Pickler, _Unpickler
dump, dumps, load, loads = _dump, _dumps, _load, _loads
# Doctest
def _test():
import doctest
return doctest.testmod()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(
description='display contents of the pickle files')
parser.add_argument(
'pickle_file', type=argparse.FileType('br'),
nargs='*', help='the pickle file')
parser.add_argument(
'-t', '--test', action='store_true',
help='run self-test suite')
parser.add_argument(
'-v', action='store_true',
help='run verbosely; only affects self-test run')
args = parser.parse_args()
if args.test:
_test()
else:
if not args.pickle_file:
parser.print_help()
else:
import pprint
for f in args.pickle_file:
obj = load(f)
pprint.pprint(obj)
| lgpl-3.0 |
Shabbypenguin/Jellybean_kernel | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py | 12980 | 5411 | # SchedGui.py - Python extension for perf script, basic GUI code for
# traces drawing and overview.
#
# Copyright (C) 2010 by Frederic Weisbecker <[email protected]>
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
try:
import wx
except ImportError:
raise ImportError, "You need to install the wxpython lib for this script"
class RootFrame(wx.Frame):
Y_OFFSET = 100
RECT_HEIGHT = 100
RECT_SPACE = 50
EVENT_MARKING_WIDTH = 5
def __init__(self, sched_tracer, title, parent = None, id = -1):
wx.Frame.__init__(self, parent, id, title)
(self.screen_width, self.screen_height) = wx.GetDisplaySize()
self.screen_width -= 10
self.screen_height -= 10
self.zoom = 0.5
self.scroll_scale = 20
self.sched_tracer = sched_tracer
self.sched_tracer.set_root_win(self)
(self.ts_start, self.ts_end) = sched_tracer.interval()
self.update_width_virtual()
self.nr_rects = sched_tracer.nr_rectangles() + 1
self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
# whole window panel
self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height))
# scrollable container
self.scroll = wx.ScrolledWindow(self.panel)
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale)
self.scroll.EnableScrolling(True, True)
self.scroll.SetFocus()
# scrollable drawing area
self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2))
self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Fit()
self.Fit()
self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING)
self.txt = None
self.Show(True)
def us_to_px(self, val):
return val / (10 ** 3) * self.zoom
def px_to_us(self, val):
return (val / self.zoom) * (10 ** 3)
def scroll_start(self):
(x, y) = self.scroll.GetViewStart()
return (x * self.scroll_scale, y * self.scroll_scale)
def scroll_start_us(self):
(x, y) = self.scroll_start()
return self.px_to_us(x)
def paint_rectangle_zone(self, nr, color, top_color, start, end):
offset_px = self.us_to_px(start - self.ts_start)
width_px = self.us_to_px(end - self.ts_start)
offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
width_py = RootFrame.RECT_HEIGHT
dc = self.dc
if top_color is not None:
(r, g, b) = top_color
top_color = wx.Colour(r, g, b)
brush = wx.Brush(top_color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH)
width_py -= RootFrame.EVENT_MARKING_WIDTH
offset_py += RootFrame.EVENT_MARKING_WIDTH
(r ,g, b) = color
color = wx.Colour(r, g, b)
brush = wx.Brush(color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, width_py)
def update_rectangles(self, dc, start, end):
start += self.ts_start
end += self.ts_start
self.sched_tracer.fill_zone(start, end)
def on_paint(self, event):
dc = wx.PaintDC(self.scroll_panel)
self.dc = dc
width = min(self.width_virtual, self.screen_width)
(x, y) = self.scroll_start()
start = self.px_to_us(x)
end = self.px_to_us(x + width)
self.update_rectangles(dc, start, end)
def rect_from_ypixel(self, y):
y -= RootFrame.Y_OFFSET
rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT:
return -1
return rect
def update_summary(self, txt):
if self.txt:
self.txt.Destroy()
self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50))
def on_mouse_down(self, event):
(x, y) = event.GetPositionTuple()
rect = self.rect_from_ypixel(y)
if rect == -1:
return
t = self.px_to_us(x) + self.ts_start
self.sched_tracer.mouse_down(rect, t)
def update_width_virtual(self):
self.width_virtual = self.us_to_px(self.ts_end - self.ts_start)
def __zoom(self, x):
self.update_width_virtual()
(xpos, ypos) = self.scroll.GetViewStart()
xpos = self.us_to_px(x) / self.scroll_scale
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos)
self.Refresh()
def zoom_in(self):
x = self.scroll_start_us()
self.zoom *= 2
self.__zoom(x)
def zoom_out(self):
x = self.scroll_start_us()
self.zoom /= 2
self.__zoom(x)
def on_key_press(self, event):
key = event.GetRawKeyCode()
if key == ord("+"):
self.zoom_in()
return
if key == ord("-"):
self.zoom_out()
return
key = event.GetKeyCode()
(x, y) = self.scroll.GetViewStart()
if key == wx.WXK_RIGHT:
self.scroll.Scroll(x + 1, y)
elif key == wx.WXK_LEFT:
self.scroll.Scroll(x - 1, y)
elif key == wx.WXK_DOWN:
self.scroll.Scroll(x, y + 1)
elif key == wx.WXK_UP:
self.scroll.Scroll(x, y - 1)
| gpl-2.0 |
rajashreer7/autotest-client-tests | linux-tools/libpwquality/libpwquality.py | 4 | 1206 | #!/bin/python
import os, subprocess
import logging
from autotest.client import test
from autotest.client.shared import error
class libpwquality(test.test):
"""
Autotest module for testing basic functionality
of libpwquality
@author Tejaswini Sambamurthy <tejaswin.linux.vnet.ibm.com>
"""
version = 1
nfail = 0
path = ''
def initialize(self):
"""
Sets the overall failure counter for the test.
"""
self.nfail = 0
logging.info('\n Test initialize successfully')
def run_once(self, test_path=''):
"""
Trigger test run
"""
try:
os.environ["LTPBIN"] = "%s/shared" %(test_path)
ret_val = subprocess.call(test_path + '/libpwquality' + '/libpwquality.sh', shell=True)
if ret_val != 0:
self.nfail += 1
except error.CmdError, e:
self.nfail += 1
logging.error("Test Failed: %s", e)
def postprocess(self):
if self.nfail != 0:
logging.info('\n nfails is non-zero')
raise error.TestError('\nTest failed')
else:
logging.info('\n Test completed successfully ')
| gpl-2.0 |
pfouque/deezer-python | deezer/tests/test_resources.py | 1 | 9053 | # -*- coding: utf-8 -*-
import json
import unittest
from types import GeneratorType
import deezer
from mock import patch
from .mocked_methods import fake_urlopen
class TestResources(unittest.TestCase):
def setUp(self):
self.patcher = patch('deezer.client.urlopen', fake_urlopen)
self.patcher.start()
def tearDown(self):
self.patcher.stop()
def test_resource_dict(self):
"""
Test that resource can be converted to dict
"""
client = deezer.Client()
response = fake_urlopen(client.object_url('track', 3135556))
resp_str = response.read().decode('utf-8')
response.close()
data = json.loads(resp_str)
resource = deezer.resources.Resource(client, data)
self.assertEqual(resource.asdict(), data)
def test_resource_relation(self):
"""
Test passing parent object when using get_relation
"""
client = deezer.Client()
album = client.get_album(302127)
tracks = album.get_tracks()
self.assertTrue(tracks[0].album is album)
def test_album_attributes(self):
"""
Test album resource
"""
client = deezer.Client()
album = client.get_album(302127)
self.assertTrue(hasattr(album, 'title'))
self.assertEqual(repr(album), '<Album: Discovery>')
artist = album.get_artist()
self.assertIsInstance(artist, deezer.resources.Artist)
self.assertEqual(repr(artist), '<Artist: Daft Punk>')
def test_album_tracks(self):
"""
Test tracks method of album resource
"""
client = deezer.Client()
album = client.get_album(302127)
tracks = album.get_tracks()
self.assertIsInstance(tracks, list)
track = tracks[0]
self.assertIsInstance(track, deezer.resources.Track)
self.assertEqual(repr(track), '<Track: One More Time>')
self.assertEqual(type(album.iter_tracks()), GeneratorType)
track = list(album.iter_tracks())[0]
self.assertIsInstance(track, deezer.resources.Track)
def test_artist_attributes(self):
"""
Test artist resource
"""
client = deezer.Client()
artist = client.get_artist(27)
self.assertTrue(hasattr(artist, 'name'))
self.assertIsInstance(artist, deezer.resources.Artist)
self.assertEqual(repr(artist), '<Artist: Daft Punk>')
def test_artist_albums(self):
"""
Test albums method of artist resource
"""
client = deezer.Client()
artist = client.get_artist(27)
albums = artist.get_albums()
self.assertIsInstance(albums, list)
album = albums[0]
self.assertIsInstance(album, deezer.resources.Album)
self.assertEqual(repr(album),
'<Album: Human After All (Remixes) (Remixes)>')
self.assertEqual(type(artist.iter_albums()), GeneratorType)
def test_artist_top(self):
"""
Test top method of artist resource
"""
client = deezer.Client()
artist = client.get_artist(27)
tracks = artist.get_top()
self.assertIsInstance(tracks, list)
track = tracks[0]
self.assertIsInstance(track, deezer.resources.Track)
self.assertEqual(repr(track), '<Track: Get Lucky (Radio Edit)>')
def test_artist_radio(self):
"""
Test radio method of artist resource
"""
client = deezer.Client()
artist = client.get_artist(27)
tracks = artist.get_radio()
self.assertIsInstance(tracks, list)
track = tracks[0]
self.assertIsInstance(track, deezer.resources.Track)
self.assertEqual(repr(track), '<Track: Lose Yourself to Dance>')
def test_artist_related(self):
"""
Test related method of artist resource
"""
client = deezer.Client()
artist = client.get_artist(27)
artists = artist.get_related()
self.assertIsInstance(artists, list)
artist = artists[0]
self.assertIsInstance(artist, deezer.resources.Artist)
self.assertEqual(repr(artist), '<Artist: Justice>')
self.assertEqual(type(artist.iter_related()), GeneratorType)
def test_track_attributes(self):
"""
Test track resource
"""
client = deezer.Client()
track = client.get_track(3135556)
artist = track.get_artist()
album = track.get_album()
self.assertTrue(hasattr(track, 'title'))
self.assertIsInstance(track, deezer.resources.Track)
self.assertIsInstance(artist, deezer.resources.Artist)
self.assertIsInstance(album, deezer.resources.Album)
self.assertEqual(repr(track), '<Track: Harder Better Faster Stronger>')
self.assertEqual(repr(artist), '<Artist: Daft Punk>')
self.assertEqual(repr(album), '<Album: Discovery>')
def test_radio_attributes(self):
"""
Test radio resource
"""
client = deezer.Client()
radio = client.get_radio(23261)
self.assertTrue(hasattr(radio, 'title'))
self.assertIsInstance(radio, deezer.resources.Radio)
self.assertEqual(repr(radio), '<Radio: Telegraph Classical>')
def test_radio_tracks(self):
"""
Test tracks method of radio resource
"""
client = deezer.Client()
radio = client.get_radio(23261)
tracks = radio.get_tracks()
self.assertIsInstance(tracks, list)
track = tracks[2]
self.assertIsInstance(track, deezer.resources.Track)
self.assertEqual(repr(track), '<Track: Schumann: Kinderszenen, Op.15 - 11. Fürchtenmachen>')
self.assertEqual(type(radio.iter_tracks()), GeneratorType)
def test_genre_attributes(self):
"""
Test genre resource
"""
client = deezer.Client()
genre = client.get_genre(106)
self.assertTrue(hasattr(genre, 'name'))
self.assertIsInstance(genre, deezer.resources.Genre)
self.assertEqual(repr(genre), '<Genre: Electro>')
def test_genre_artists(self):
"""
Test artists method of genre resource
"""
client = deezer.Client()
genre = client.get_genre(106)
artists = genre.get_artists()
self.assertIsInstance(artists, list)
artist = artists[0]
self.assertIsInstance(artist, deezer.resources.Artist)
self.assertEqual(repr(artist), '<Artist: Calvin Harris>')
self.assertEqual(type(genre.iter_artists()), GeneratorType)
def test_genre_radios(self):
"""
Test radios method of genre resource
"""
client = deezer.Client()
genre = client.get_genre(106)
radios = genre.get_radios()
self.assertIsInstance(radios, list)
radio = radios[0]
self.assertIsInstance(radio, deezer.resources.Radio)
self.assertEqual(repr(radio), '<Radio: Techno/House>')
self.assertEqual(type(genre.iter_radios()), GeneratorType)
def test_chart_tracks(self):
"""
Test tracks method of chart resource
"""
client = deezer.Client()
chart = client.get_chart()
tracks = chart.get_tracks()
self.assertIsInstance(tracks, list)
track = tracks[0]
self.assertIsInstance(track, deezer.resources.Track)
self.assertEqual(repr(track), '<Track: Starboy>')
self.assertEqual(type(chart.iter_tracks()), GeneratorType)
def test_chart_artists(self):
"""
Test artists method of chart resource
"""
client = deezer.Client()
chart = client.get_chart()
artists = chart.get_artists()
self.assertIsInstance(artists, list)
artist = artists[0]
self.assertIsInstance(artist, deezer.resources.Artist)
self.assertEqual(repr(artist), '<Artist: Pnl>')
self.assertEqual(type(chart.iter_artists()), GeneratorType)
def test_chart_albums(self):
"""
Test albums method of chart resource
"""
client = deezer.Client()
chart = client.get_chart()
albums = chart.get_albums()
self.assertIsInstance(albums, list)
album = albums[0]
self.assertIsInstance(album, deezer.resources.Album)
self.assertEqual(repr(album),
"<Album: Where Is l'album de Gradur>")
self.assertEqual(type(chart.iter_albums()), GeneratorType)
def test_chart_playlists(self):
"""
Test playlists method of chart resource
"""
client = deezer.Client()
chart = client.get_chart()
playlists = chart.get_playlists()
self.assertIsInstance(playlists, list)
playlist = playlists[0]
self.assertIsInstance(playlist, deezer.resources.Playlist)
self.assertEqual(repr(playlist),
"<Playlist: Top France>")
self.assertEqual(type(chart.iter_playlists()), GeneratorType)
| mit |
gsnedders/presto-testo | wpt/websockets/autobahn/oberstet-Autobahn-643d2ee/demo/broadcast/broadcast_server.py | 4 | 2351 | ###############################################################################
##
## Copyright 2011 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
import sys
from twisted.internet import reactor
from twisted.python import log
from autobahn.websocket import WebSocketServerFactory, WebSocketServerProtocol, listenWS
class BroadcastServerProtocol(WebSocketServerProtocol):
def onOpen(self):
self.factory.register(self)
def onMessage(self, msg, binary):
if not binary:
self.factory.broadcast("received message '%s' from %s" % (msg, self.peerstr))
def connectionLost(self, reason):
WebSocketServerProtocol.connectionLost(self, reason)
self.factory.unregister(self)
class BroadcastServerFactory(WebSocketServerFactory):
protocol = BroadcastServerProtocol
def __init__(self, url):
WebSocketServerFactory.__init__(self, url)
self.clients = []
self.tickcount = 0
self.tick()
def tick(self):
self.tickcount += 1
self.broadcast("tick %d" % self.tickcount)
reactor.callLater(1, self.tick)
def register(self, client):
if not client in self.clients:
print "registered client " + client.peerstr
self.clients.append(client)
def unregister(self, client):
if client in self.clients:
print "unregistered client " + client.peerstr
self.clients.remove(client)
def broadcast(self, msg):
print "broadcasting message '%s' .." % msg
for c in self.clients:
print "send to " + c.peerstr
c.sendMessage(msg)
if __name__ == '__main__':
log.startLogging(sys.stdout)
factory = BroadcastServerFactory("ws://localhost:9000")
listenWS(factory)
reactor.run()
| bsd-3-clause |
isabellemao/Hello-World | python/Junior2015CCCJ4.py | 1 | 1278 | #Problem J4: Arrival Time
departure_time = input()
split_departure = list(departure_time) #The time of departure, split into a list.
#Split the list
departure_hour = split_departure[0:2]
departure_minute = split_departure[3:5]
#Change the split list to integers.
departure_hour = int("".join(departure_hour))
departure_minute = int("".join(departure_minute))
#The start and end of the rush hours
rh_start_1 = 7
rh_end_1 = 10
rh_start_2 = 15
rh_end_2 = 19
#Set the current time
hour = departure_hour
minute = departure_minute
#For the 120 minutes it usually takes Fiona to commute
for counter in range(1, 121):
#If it's currently rush hour
if hour >= rh_start_1 and hour < rh_end_1 or hour >= rh_start_2 and hour < rh_end_2:
#Twice as slow if rush hour
minute += 2
else:
#Normal speed if normal time
minute += 1
if minute >= 60:
minute = 0
#Reset hour
hour += 1
if hour == 24:
hour = 0
#Add fake zeroes if required.
if hour < 10:
hour = str(hour)
hour = "0" + hour
else:
hour = str(hour)
if minute < 10:
minute = str(minute)
minute = "0" + minute
else:
minute = str(minute)
#Make a valid output.
output = hour , ":" , minute
output = "".join(output)
print(output)
| apache-2.0 |
JosephCastro/selenium | py/selenium/webdriver/support/wait.py | 81 | 4070 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import time
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import TimeoutException
POLL_FREQUENCY = 0.5 # How long to sleep inbetween calls to the method
IGNORED_EXCEPTIONS = (NoSuchElementException,) # exceptions ignored during calls to the method
class WebDriverWait(object):
def __init__(self, driver, timeout, poll_frequency=POLL_FREQUENCY, ignored_exceptions=None):
"""Constructor, takes a WebDriver instance and timeout in seconds.
:Args:
- driver - Instance of WebDriver (Ie, Firefox, Chrome or Remote)
- timeout - Number of seconds before timing out
- poll_frequency - sleep interval between calls
By default, it is 0.5 second.
- ignored_exceptions - iterable structure of exception classes ignored during calls.
By default, it contains NoSuchElementException only.
Example:
from selenium.webdriver.support.ui import WebDriverWait \n
element = WebDriverWait(driver, 10).until(lambda x: x.find_element_by_id("someId")) \n
is_disappeared = WebDriverWait(driver, 30, 1, (ElementNotVisibleException)).\ \n
until_not(lambda x: x.find_element_by_id("someId").is_displayed())
"""
self._driver = driver
self._timeout = timeout
self._poll = poll_frequency
# avoid the divide by zero
if self._poll == 0:
self._poll = POLL_FREQUENCY
exceptions = list(IGNORED_EXCEPTIONS)
if ignored_exceptions is not None:
try:
exceptions.extend(iter(ignored_exceptions))
except TypeError: # ignored_exceptions is not iterable
exceptions.append(ignored_exceptions)
self._ignored_exceptions = tuple(exceptions)
def __repr__(self):
return '<{0.__module__}.{0.__name__} (session="{1}")>'.format(
type(self), self._driver.session_id)
def until(self, method, message=''):
"""Calls the method provided with the driver as an argument until the \
return value is not False."""
screen = None
stacktrace = None
end_time = time.time() + self._timeout
while True:
try:
value = method(self._driver)
if value:
return value
except self._ignored_exceptions as exc:
screen = getattr(exc, 'screen', None)
stacktrace = getattr(exc, 'stacktrace', None)
time.sleep(self._poll)
if time.time() > end_time:
break
raise TimeoutException(message, screen, stacktrace)
def until_not(self, method, message=''):
"""Calls the method provided with the driver as an argument until the \
return value is False."""
end_time = time.time() + self._timeout
while True:
try:
value = method(self._driver)
if not value:
return value
except self._ignored_exceptions:
return True
time.sleep(self._poll)
if time.time() > end_time:
break
raise TimeoutException(message)
| apache-2.0 |
FUSED-Wind/fusedwind | src/fusedwind/lib/cubicspline.py | 2 | 2234 |
import numpy as np
from scipy.linalg import solve_banded
from fusedwind.lib.utilities import _checkIfFloat
class NaturalCubicSpline(object):
"""
class implementation of utilities.cubic_with_deriv
"""
def __init__(self, xp, yp):
if np.any(np.diff(xp) < 0):
raise TypeError('xp must be in ascending order')
# n = len(x)
self.m = len(xp)
xk = xp[1:-1]
yk = yp[1:-1]
xkp = xp[2:]
ykp = yp[2:]
xkm = xp[:-2]
ykm = yp[:-2]
b = (ykp - yk)/(xkp - xk) - (yk - ykm)/(xk - xkm)
l = (xk - xkm)/6.0
d = (xkp - xkm)/3.0
u = (xkp - xk)/6.0
# u[0] = 0.0 # non-existent entries
# l[-1] = 0.0
# solve for second derivatives
fpp = solve_banded((1, 1), np.matrix([u, d, l]), b)
self.fpp = np.concatenate([[0.0], fpp, [0.0]]) # natural spline
self.xp = xp
self.yp = yp
def __call__(self, x, deriv=False):
x, n = _checkIfFloat(x)
y = np.zeros(n)
dydx = np.zeros(n)
dydxp = np.zeros((n, self.m))
dydyp = np.zeros((n, self.m))
# find location in vector
for i in range(n):
if x[i] < self.xp[0]:
j = 0
elif x[i] > self.xp[-1]:
j = self.m-2
else:
for j in range(self.m-1):
if self.xp[j+1] > x[i]:
break
x1 = self.xp[j]
y1 = self.yp[j]
x2 = self.xp[j+1]
y2 = self.yp[j+1]
A = (x2 - x[i])/(x2 - x1)
B = 1 - A
C = 1.0/6*(A**3 - A)*(x2 - x1)**2
D = 1.0/6*(B**3 - B)*(x2 - x1)**2
y[i] = A * y1 + B * y2 + C * self.fpp[j] + D * self.fpp[j+1]
dAdx = -1.0/(x2 - x1)
dBdx = -dAdx
dCdx = 1.0/6 * (3 * A**2 - 1) * dAdx * (x2 - x1)**2
dDdx = 1.0/6 * (3 * B**2 - 1) * dBdx * (x2 - x1)**2
dydx[i] = dAdx * y1 + dBdx * y2 + dCdx * self.fpp[j] + dDdx * self.fpp[j+1]
if n == 1:
y = y[0]
dydx = dydx[0]
if deriv:
return y, dydx
else:
return y
| apache-2.0 |
googleapis/python-pubsublite | google/cloud/pubsublite_v1/types/__init__.py | 1 | 4702 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .admin import (
CreateReservationRequest,
CreateSubscriptionRequest,
CreateTopicRequest,
DeleteReservationRequest,
DeleteSubscriptionRequest,
DeleteTopicRequest,
GetReservationRequest,
GetSubscriptionRequest,
GetTopicPartitionsRequest,
GetTopicRequest,
ListReservationsRequest,
ListReservationsResponse,
ListReservationTopicsRequest,
ListReservationTopicsResponse,
ListSubscriptionsRequest,
ListSubscriptionsResponse,
ListTopicsRequest,
ListTopicsResponse,
ListTopicSubscriptionsRequest,
ListTopicSubscriptionsResponse,
OperationMetadata,
SeekSubscriptionRequest,
SeekSubscriptionResponse,
TopicPartitions,
UpdateReservationRequest,
UpdateSubscriptionRequest,
UpdateTopicRequest,
)
from .common import (
AttributeValues,
Cursor,
PubSubMessage,
Reservation,
SequencedMessage,
Subscription,
TimeTarget,
Topic,
)
from .cursor import (
CommitCursorRequest,
CommitCursorResponse,
InitialCommitCursorRequest,
InitialCommitCursorResponse,
ListPartitionCursorsRequest,
ListPartitionCursorsResponse,
PartitionCursor,
SequencedCommitCursorRequest,
SequencedCommitCursorResponse,
StreamingCommitCursorRequest,
StreamingCommitCursorResponse,
)
from .publisher import (
InitialPublishRequest,
InitialPublishResponse,
MessagePublishRequest,
MessagePublishResponse,
PublishRequest,
PublishResponse,
)
from .subscriber import (
FlowControlRequest,
InitialPartitionAssignmentRequest,
InitialSubscribeRequest,
InitialSubscribeResponse,
MessageResponse,
PartitionAssignment,
PartitionAssignmentAck,
PartitionAssignmentRequest,
SeekRequest,
SeekResponse,
SubscribeRequest,
SubscribeResponse,
)
from .topic_stats import (
ComputeHeadCursorRequest,
ComputeHeadCursorResponse,
ComputeMessageStatsRequest,
ComputeMessageStatsResponse,
ComputeTimeCursorRequest,
ComputeTimeCursorResponse,
)
__all__ = (
"CreateReservationRequest",
"CreateSubscriptionRequest",
"CreateTopicRequest",
"DeleteReservationRequest",
"DeleteSubscriptionRequest",
"DeleteTopicRequest",
"GetReservationRequest",
"GetSubscriptionRequest",
"GetTopicPartitionsRequest",
"GetTopicRequest",
"ListReservationsRequest",
"ListReservationsResponse",
"ListReservationTopicsRequest",
"ListReservationTopicsResponse",
"ListSubscriptionsRequest",
"ListSubscriptionsResponse",
"ListTopicsRequest",
"ListTopicsResponse",
"ListTopicSubscriptionsRequest",
"ListTopicSubscriptionsResponse",
"OperationMetadata",
"SeekSubscriptionRequest",
"SeekSubscriptionResponse",
"TopicPartitions",
"UpdateReservationRequest",
"UpdateSubscriptionRequest",
"UpdateTopicRequest",
"AttributeValues",
"Cursor",
"PubSubMessage",
"Reservation",
"SequencedMessage",
"Subscription",
"TimeTarget",
"Topic",
"CommitCursorRequest",
"CommitCursorResponse",
"InitialCommitCursorRequest",
"InitialCommitCursorResponse",
"ListPartitionCursorsRequest",
"ListPartitionCursorsResponse",
"PartitionCursor",
"SequencedCommitCursorRequest",
"SequencedCommitCursorResponse",
"StreamingCommitCursorRequest",
"StreamingCommitCursorResponse",
"InitialPublishRequest",
"InitialPublishResponse",
"MessagePublishRequest",
"MessagePublishResponse",
"PublishRequest",
"PublishResponse",
"FlowControlRequest",
"InitialPartitionAssignmentRequest",
"InitialSubscribeRequest",
"InitialSubscribeResponse",
"MessageResponse",
"PartitionAssignment",
"PartitionAssignmentAck",
"PartitionAssignmentRequest",
"SeekRequest",
"SeekResponse",
"SubscribeRequest",
"SubscribeResponse",
"ComputeHeadCursorRequest",
"ComputeHeadCursorResponse",
"ComputeMessageStatsRequest",
"ComputeMessageStatsResponse",
"ComputeTimeCursorRequest",
"ComputeTimeCursorResponse",
)
| apache-2.0 |
jounex/hue | desktop/core/ext-py/Mako-0.8.1/test/test_tgplugin.py | 36 | 1260 | import unittest
from mako.ext.turbogears import TGPlugin
from test.util import flatten_result, result_lines
from test import TemplateTest, template_base
tl = TGPlugin(options=dict(directories=[template_base]), extension='html')
class TestTGPlugin(TemplateTest):
def test_basic(self):
t = tl.load_template('/index.html')
assert result_lines(t.render()) == [
"this is index"
]
def test_subdir(self):
t = tl.load_template('/subdir/index.html')
assert result_lines(t.render()) == [
"this is sub index",
"this is include 2"
]
assert tl.load_template('/subdir/index.html').module_id == '_subdir_index_html'
def test_basic_dot(self):
t = tl.load_template('index')
assert result_lines(t.render()) == [
"this is index"
]
def test_subdir_dot(self):
t = tl.load_template('subdir.index')
assert result_lines(t.render()) == [
"this is sub index",
"this is include 2"
]
assert tl.load_template('subdir.index').module_id == '_subdir_index_html'
def test_string(self):
t = tl.load_template('foo', "hello world")
assert t.render() == "hello world"
| apache-2.0 |
gisce/OCB | openerp/report/render/rml2pdf/__init__.py | 75 | 1135 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from trml2pdf import parseString, parseNode
#.apidoc title: RML to PDF engine
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
robosafe/testbench_vRAL_hydro | bert2_simulator/sim_step_monitors/assertion_monitor_manager.py | 1 | 2830 | #!/usr/bin/env python
"""
Assertion Monitor Manager
Created by David Western, June 2015.
"""
from coverage import coverage
import imp
import rospkg
import rospy
from std_msgs.msg import UInt64
from std_srvs.srv import Empty
import sys
class AMM:
def __init__(self,AM_list_file,trace_label):
# Read list of assertion monitors to run (from file?):
rospack = rospkg.RosPack()
path = rospack.get_path('bert2_simulator')
path = path+'/sim_step_monitors/'
print("--- Assertion monitors to run:")
self.AM_names = [line.rstrip('\n') for line in open(path+AM_list_file)]
print(self.AM_names)
# Instantiate assertion monitors:
self.AMs = [] # Initialise empty list of AMs.
for idx, class_name in enumerate(self.AM_names):
print(class_name)
print path+class_name+'.py'
module = imp.load_source(class_name, path+class_name+'.py')
#module = __import__(path+class_name) # N.B. These two lines imply that we
class_ = getattr(module, class_name) # require the AM to be defined in a
# file with the same name as the class.
self.AMs.append(class_(trace_label))
# Check AM has the mandatory attributes:
mand_attrs = ['step']
for attr in mand_attrs:
if not hasattr(self.AMs[idx],attr):
rospy.logerr("Assertion monitor specification '%s' does not define the attribute \
'%s', which is required by AMM (assertion_monitor_manager.py). \
Does %s inherite from an assertion monitor base class?",
self.AMs[idx].__name__, attr, self.AMs[idx].__name__)
# Get service
self.unpause_gazebo = rospy.ServiceProxy('gazebo/unpause_physics',Empty)
# Subscriber to triggers, which come on each sim step:
rospy.Subscriber("AM_trigger", UInt64, self.trigger_AMs)
def trigger_AMs(self,data):
iteration = data.data
sim_time = rospy.get_time()
# Step all assertion monitors:
for idx, AM in enumerate(self.AMs):
AM.step(iteration,sim_time)
# Release gazebo now we've finished the checks for this step:
#print "unpausing"
#self.unpause_gazebo()
# Problem: This line prevents Gazebo's pause button from working (unless you
# get a lucky click).
if __name__ == '__main__':
try:
if len(sys.argv) < 3:
print("usage: rosrun [package_name] assertion_monitor_manager.py AM_list_file.txt report_file_name")
else:
rospy.init_node('AMM')
AMMInst = AMM(sys.argv[1],sys.argv[2])
rospy.spin()
except rospy.ROSInterruptException: #to stop the code when pressing Ctr+c
pass
| gpl-3.0 |
ContinuumIO/chaco | examples/demo/xray_plot.py | 3 | 5751 | """
Implementation of a plot using a custom overlay and tool
"""
from __future__ import with_statement
import numpy
from traits.api import HasTraits, Instance, Enum
from traitsui.api import View, Item
from enable.api import ComponentEditor
from chaco.api import Plot, ArrayPlotData, AbstractOverlay
from enable.api import BaseTool
from enable.markers import DOT_MARKER, DotMarker
class BoxSelectTool(BaseTool):
""" Tool for selecting all points within a box
There are 2 states for this tool, normal and selecting. While the
left mouse button is down the metadata on the datasources will be
updated with the current selected bounds.
Note that the tool does not actually store the selected point, but the
bounds of the box.
"""
event_state = Enum("normal", "selecting")
def normal_left_down(self, event):
self.event_state = "selecting"
self.selecting_mouse_move(event)
def selecting_left_up(self, event):
self.event_state = "normal"
def selecting_mouse_move(self, event):
x1, y1 = self.map_to_data(event.x - 25, event.y - 25)
x2, y2 = self.map_to_data(event.x + 25, event.y + 25)
index_datasource = self.component.index
index_datasource.metadata['selections'] = (x1, x2)
value_datasource = self.component.value
value_datasource.metadata['selections'] = (y1, y2)
self.component.request_redraw()
def map_to_data(self, x, y):
""" Returns the data space coordinates of the given x and y.
Takes into account orientation of the plot and the axis setting.
"""
plot = self.component
if plot.orientation == "h":
index = plot.x_mapper.map_data(x)
value = plot.y_mapper.map_data(y)
else:
index = plot.y_mapper.map_data(y)
value = plot.x_mapper.map_data(x)
return index, value
class XRayOverlay(AbstractOverlay):
""" Overlay which draws scatter markers on top of plot data points.
This overlay should be combined with a tool which updates the
datasources metadata with selection bounds.
"""
marker = DotMarker()
def overlay(self, component, gc, view_bounds=None, mode='normal'):
x_range = self._get_selection_index_screen_range()
y_range = self._get_selection_value_screen_range()
if len(x_range) == 0:
return
x1, x2 = x_range
y1, y2 = y_range
with gc:
gc.set_alpha(0.8)
gc.set_fill_color((1.0, 1.0, 1.0))
gc.rect(x1, y1, x2 - x1, y2 - y1)
gc.draw_path()
pts = self._get_selected_points()
if len(pts) == 0:
return
screen_pts = self.component.map_screen(pts)
if hasattr(gc, 'draw_marker_at_points'):
gc.draw_marker_at_points(screen_pts, 3, DOT_MARKER)
else:
gc.save_state()
for sx, sy in screen_pts:
gc.translate_ctm(sx, sy)
gc.begin_path()
self.marker.add_to_path(gc, 3)
gc.draw_path(self.marker.draw_mode)
gc.translate_ctm(-sx, -sy)
gc.restore_state()
def _get_selected_points(self):
""" gets all the points within the bounds defined in the datasources
metadata
"""
index_datasource = self.component.index
index_selection = index_datasource.metadata['selections']
index = index_datasource.get_data()
value_datasource = self.component.value
value_selection = value_datasource.metadata['selections']
value = value_datasource.get_data()
x_indices = numpy.where((index > index_selection[0]) &
(index < index_selection[-1]))
y_indices = numpy.where((value > value_selection[0]) &
(value < value_selection[-1]))
indices = list(set(x_indices[0]) & set(y_indices[0]))
sel_index = index[indices]
sel_value = value[indices]
return zip(sel_index, sel_value)
def _get_selection_index_screen_range(self):
""" maps the selected bounds which were set by the tool into screen
space. The screen space points can be used for drawing the overlay
"""
index_datasource = self.component.index
index_mapper = self.component.index_mapper
index_selection = index_datasource.metadata['selections']
return tuple(index_mapper.map_screen(numpy.array(index_selection)))
def _get_selection_value_screen_range(self):
""" maps the selected bounds which were set by the tool into screen
space. The screen space points can be used for drawing the overlay
"""
value_datasource = self.component.value
value_mapper = self.component.value_mapper
value_selection = value_datasource.metadata['selections']
return tuple(value_mapper.map_screen(numpy.array(value_selection)))
class PlotExample(HasTraits):
plot = Instance(Plot)
traits_view = View(Item('plot', editor=ComponentEditor()),
width=600, height=600)
def __init__(self, index, value, *args, **kw):
super(PlotExample, self).__init__(*args, **kw)
plot_data = ArrayPlotData(index=index)
plot_data.set_data('value', value)
self.plot = Plot(plot_data)
line = self.plot.plot(('index', 'value'))[0]
line.overlays.append(XRayOverlay(line))
line.tools.append(BoxSelectTool(line))
index = numpy.arange(0, 25, 0.25)
value = numpy.sin(index) + numpy.arange(0, 10, 0.1)
example = PlotExample(index, value)
example.configure_traits()
| bsd-3-clause |
xuweiliang/Codelibrary | nova/db/sqlalchemy/migrate_repo/versions/302_pgsql_add_instance_system_metadata_index.py | 43 | 1322 | # Copyright 2015 Huawei.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db.sqlalchemy import utils
INDEX_COLUMNS = ['instance_uuid']
# using the index name same with mysql
INDEX_NAME = 'instance_uuid'
SYS_META_TABLE_NAME = 'instance_system_metadata'
def upgrade(migrate_engine):
"""Add instance_system_metadata indexes missing on PostgreSQL and other DB.
"""
# This index was already added by migration 216 for MySQL
if migrate_engine.name != 'mysql':
# Adds index for PostgreSQL and other DB
if not utils.index_exists(migrate_engine, SYS_META_TABLE_NAME,
INDEX_NAME):
utils.add_index(migrate_engine, SYS_META_TABLE_NAME, INDEX_NAME,
INDEX_COLUMNS)
| apache-2.0 |
0x7E/ubuntu-tweak | ubuntutweak/settings/ccm/Utils.py | 5 | 12852 | # -*- coding: UTF-8 -*-
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Authors: Quinn Storm ([email protected])
# Patrick Niklaus ([email protected])
# Guillaume Seguin ([email protected])
# Christopher Williams ([email protected])
# Copyright (C) 2007 Quinn Storm
import os
import weakref
from gi.repository import GObject, Gtk, Gdk, Pango
from Constants import *
from cgi import escape as protect_pango_markup
import operator
import itertools
import locale
import gettext
locale.setlocale(locale.LC_ALL, "")
gettext.bindtextdomain("ccsm", DataDir + "/locale")
gettext.textdomain("ccsm")
_ = gettext.gettext
IconTheme = Gtk.IconTheme.get_default()
#TODO
#if not IconDir in IconTheme.get_search_path():
# IconTheme.prepend_search_path(IconDir)
def gtk_process_events ():
while Gtk.events_pending ():
Gtk.main_iteration ()
def getScreens():
screens = []
display = Gdk.Display.get_default()
nScreens = display.get_n_screens()
for i in range(nScreens):
screens.append(i)
return screens
def getDefaultScreen():
display = Gdk.Display.get_default()
return display.get_default_screen().get_number()
def protect_markup_dict (dict_):
return dict((k, protect_pango_markup (v)) for (k, v) in dict_.items())
class Image (Gtk.Image):
def __init__ (self, name = None, type = ImageNone, size = 32,
useMissingImage = False):
GObject.GObject.__init__ (self)
if not name:
return
if useMissingImage:
self.set_from_stock (Gtk.STOCK_MISSING_IMAGE,
Gtk.IconSize.LARGE_TOOLBAR)
return
try:
if type in (ImagePlugin, ImageCategory, ImageThemed):
pixbuf = None
if type == ImagePlugin:
name = "plugin-" + name
try:
pixbuf = IconTheme.load_icon (name, size, 0)
except GObject.GError:
pixbuf = IconTheme.load_icon ("plugin-unknown", size, 0)
elif type == ImageCategory:
name = "plugins-" + name
try:
pixbuf = IconTheme.load_icon (name, size, 0)
except GObject.GError:
pixbuf = IconTheme.load_icon ("plugins-unknown", size, 0)
else:
pixbuf = IconTheme.load_icon (name, size, 0)
self.set_from_pixbuf (pixbuf)
elif type == ImageStock:
self.set_from_stock (name, size)
except GObject.GError as e:
self.set_from_stock (Gtk.STOCK_MISSING_IMAGE, Gtk.IconSize.BUTTON)
class ActionImage (Gtk.Alignment):
map = {
"keyboard" : "input-keyboard",
"button" : "input-mouse",
"edges" : "video-display",
"bell" : "audio-x-generic"
}
def __init__ (self, action):
GObject.GObject.__init__ (self, 0, 0.5)
self.set_padding (0, 0, 0, 10)
if action in self.map: action = self.map[action]
self.add (Image (name = action, type = ImageThemed, size = 22))
class SizedButton (Gtk.Button):
minWidth = -1
minHeight = -1
def __init__ (self, minWidth = -1, minHeight = -1):
super (SizedButton, self).__init__ ()
self.minWidth = minWidth
self.minHeight = minHeight
self.connect ("size-request", self.adjust_size)
def adjust_size (self, widget, requisition):
width, height = requisition.width, requisition.height
newWidth = max (width, self.minWidth)
newHeight = max (height, self.minHeight)
self.set_size_request (newWidth, newHeight)
class PrettyButton (Gtk.Button):
__gsignals__ = {
'draw': 'override',
}
_old_toplevel = None
def __init__ (self):
super (PrettyButton, self).__init__ ()
self.states = {
"focus" : False,
"pointer" : False
}
self.set_size_request (200, -1)
self.set_relief (Gtk.ReliefStyle.NONE)
self.connect ("focus-in-event", self.update_state_in, "focus")
self.connect ("focus-out-event", self.update_state_out, "focus")
self.connect ("hierarchy-changed", self.hierarchy_changed)
def hierarchy_changed (self, widget, old_toplevel):
if old_toplevel == self._old_toplevel:
return
if not old_toplevel and self.state != Gtk.StateType.NORMAL:
self.set_state(Gtk.StateType.PRELIGHT)
self.set_state(Gtk.StateType.NORMAL)
self._old_toplevel = old_toplevel
def update_state_in (self, *args):
state = args[-1]
self.set_state (Gtk.StateType.PRELIGHT)
self.states[state] = True
def update_state_out (self, *args):
state = args[-1]
self.states[state] = False
if True in self.states.values ():
self.set_state (Gtk.StateType.PRELIGHT)
else:
self.set_state (Gtk.StateType.NORMAL)
def do_expose_event (self, event):
has_focus = self.flags () & Gtk.HAS_FOCUS
if has_focus:
self.unset_flags (Gtk.HAS_FOCUS)
ret = super (PrettyButton, self).do_expose_event (self, event)
if has_focus:
self.set_flags (Gtk.HAS_FOCUS)
return ret
class Label(Gtk.Label):
def __init__(self, value = "", wrap = 160):
GObject.GObject.__init__(self, value)
self.props.xalign = 0
self.props.wrap_mode = Pango.WrapMode.WORD
self.set_line_wrap(True)
self.set_size_request(wrap, -1)
class NotFoundBox(Gtk.Alignment):
def __init__(self, value=""):
GObject.GObject.__init__(self, 0.5, 0.5, 0.0, 0.0)
box = Gtk.HBox()
self.Warning = Gtk.Label()
self.Markup = _("<span size=\"large\"><b>No matches found.</b> </span><span>\n\n Your filter \"<b>%s</b>\" does not match any items.</span>")
value = protect_pango_markup(value)
self.Warning.set_markup(self.Markup % value)
image = Image("face-surprise", ImageThemed, 48)
box.pack_start(image, False, False, 0)
box.pack_start(self.Warning, True, True, 15)
self.add(box)
def update(self, value):
value = protect_pango_markup(value)
self.Warning.set_markup(self.Markup % value)
class IdleSettingsParser:
def __init__(self, context, main):
def FilterPlugin (p):
return not p.Initialized and p.Enabled
self.Context = context
self.Main = main
self.PluginList = [p for p in self.Context.Plugins.items() if FilterPlugin(p[1])]
nCategories = len (main.MainPage.RightWidget._boxes)
self.CategoryLoadIconsList = list(range(3, nCategories)) # Skip the first 3
print('Loading icons...')
GObject.timeout_add (150, self.Wait)
def Wait(self):
if not self.PluginList:
return False
if len (self.CategoryLoadIconsList) == 0: # If we're done loading icons
GObject.idle_add (self.ParseSettings)
else:
GObject.idle_add (self.LoadCategoryIcons)
return False
def ParseSettings(self):
name, plugin = self.PluginList[0]
if not plugin.Initialized:
plugin.Update ()
self.Main.RefreshPage(plugin)
self.PluginList.remove (self.PluginList[0])
GObject.timeout_add (200, self.Wait)
return False
def LoadCategoryIcons(self):
from ccm.Widgets import PluginButton
catIndex = self.CategoryLoadIconsList[0]
pluginWindow = self.Main.MainPage.RightWidget
categoryBox = pluginWindow._boxes[catIndex]
for (pluginIndex, plugin) in \
enumerate (categoryBox.get_unfiltered_plugins()):
categoryBox._buttons[pluginIndex] = PluginButton (plugin)
categoryBox.rebuild_table (categoryBox._current_cols, True)
pluginWindow.connect_buttons (categoryBox)
self.CategoryLoadIconsList.remove (self.CategoryLoadIconsList[0])
GObject.timeout_add (150, self.Wait)
return False
# Updates all registered setting when they where changed through CompizConfig
class Updater:
def __init__ (self):
self.VisibleSettings = {}
self.Plugins = []
self.Block = 0
def SetContext (self, context):
self.Context = context
GObject.timeout_add (2000, self.Update)
def Append (self, widget):
reference = weakref.ref(widget)
setting = widget.Setting
self.VisibleSettings.setdefault((setting.Plugin.Name, setting.Name), []).append(reference)
def AppendPlugin (self, plugin):
self.Plugins.append (plugin)
def Remove (self, widget):
setting = widget.Setting
l = self.VisibleSettings.get((setting.Plugin.Name, setting.Name))
if not l:
return
for i, ref in enumerate(list(l)):
if ref() is widget:
l.remove(ref)
break
def UpdatePlugins(self):
for plugin in self.Plugins:
plugin.Read()
def UpdateSetting (self, setting):
widgets = self.VisibleSettings.get((setting.Plugin.Name, setting.Name))
if not widgets:
return
for reference in widgets:
widget = reference()
if widget is not None:
widget.Read()
def Update (self):
if self.Block > 0:
return True
if self.Context.ProcessEvents():
changed = self.Context.ChangedSettings
if [s for s in changed if s.Plugin.Name == "core" and s.Name == "active_plugins"]:
self.UpdatePlugins()
for setting in list(changed):
widgets = self.VisibleSettings.get((setting.Plugin.Name, setting.Name))
if widgets:
for reference in widgets:
widget = reference()
if widget is not None:
widget.Read()
if widget.List:
widget.ListWidget.Read()
changed.remove(setting)
self.Context.ChangedSettings = changed
return True
GlobalUpdater = Updater ()
class PluginSetting:
def __init__ (self, plugin, widget, handler):
self.Widget = widget
self.Plugin = plugin
self.Handler = handler
GlobalUpdater.AppendPlugin (self)
def Read (self):
widget = self.Widget
widget.handler_block(self.Handler)
widget.set_active (self.Plugin.Enabled)
widget.set_sensitive (self.Plugin.Context.AutoSort)
widget.handler_unblock(self.Handler)
class PureVirtualError(Exception):
pass
def SettingKeyFunc(value):
return value.Plugin.Ranking[value.Name]
def CategoryKeyFunc(category):
if 'General' == category:
return ''
else:
return category or 'zzzzzzzz'
def GroupIndexKeyFunc(item):
return item[1][0]
FirstItemKeyFunc = operator.itemgetter(0)
EnumSettingKeyFunc = operator.itemgetter(1)
PluginKeyFunc = operator.attrgetter('ShortDesc')
def HasOnlyType (settings, stype):
return settings and not [s for s in settings if s.Type != stype]
def GetSettings(group, types=None):
def TypeFilter (settings, types):
for setting in settings:
if setting.Type in types:
yield setting
if types:
screen = TypeFilter(iter(group.Screen.values()), types)
else:
screen = iter(group.Screen.values())
return screen
# Support python 2.4
try:
any
all
except NameError:
def any(iterable):
for element in iterable:
if element:
return True
return False
def all(iterable):
for element in iterable:
if not element:
return False
return True
| gpl-2.0 |
YongseopKim/crosswalk-test-suite | webapi/tct-csp-w3c-tests/csp-py/csp_media-src_none_audio_blocked_int.py | 30 | 3064 | def main(request, response):
import simplejson as json
f = file('config.json')
source = f.read()
s = json.JSONDecoder().decode(source)
url1 = "http://" + s['host'] + ":" + str(s['ports']['http'][1])
url2 = "http://" + s['host'] + ":" + str(s['ports']['http'][0])
_CSP = "media-src 'none'; script-src 'self' 'unsafe-inline'"
response.headers.set("Content-Security-Policy", _CSP)
response.headers.set("X-Content-Security-Policy", _CSP)
response.headers.set("X-WebKit-CSP", _CSP)
return """<!DOCTYPE html>
<!--
Copyright (c) 2013 Intel Corporation.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of works must retain the original copyright notice, this list
of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the original copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Intel Corporation nor the names of its contributors
may be used to endorse or promote products derived from this work without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Authors:
Zhang, Zhiqiang <[email protected]>
-->
<html>
<head>
<title>CSP Test: csp_media-src_none_audio_blocked_int</title>
<link rel="author" title="Intel" href="http://www.intel.com"/>
<link rel="help" href="http://www.w3.org/TR/2012/CR-CSP-20121115/#media-src"/>
<meta name="flags" content=""/>
<meta name="assert" content="media-src *; script-src 'self' 'unsafe-inline'"/>
<meta charset="utf-8"/>
<script src="../resources/testharness.js"></script>
<script src="../resources/testharnessreport.js"></script>
</head>
<body>
<div id="log"></div>
<audio id="m"></audio>
<script>
var t = async_test(document.title);
var m = document.getElementById("m");
m.src = "support/khronos/red-green.theora.ogv";
window.setTimeout(function() {
t.step(function() {
assert_true(m.currentSrc == "",
"audio.currentSrc should be empty after setting src attribute");
});
t.done();
}, 0);
</script>
</body>
</html> """
| bsd-3-clause |
gccpacman/shadowsocks | shadowsocks/shell.py | 652 | 12736 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import json
import sys
import getopt
import logging
from shadowsocks.common import to_bytes, to_str, IPNetwork
from shadowsocks import encrypt
VERBOSE_LEVEL = 5
verbose = 0
def check_python():
info = sys.version_info
if info[0] == 2 and not info[1] >= 6:
print('Python 2.6+ required')
sys.exit(1)
elif info[0] == 3 and not info[1] >= 3:
print('Python 3.3+ required')
sys.exit(1)
elif info[0] not in [2, 3]:
print('Python version not supported')
sys.exit(1)
def print_exception(e):
global verbose
logging.error(e)
if verbose > 0:
import traceback
traceback.print_exc()
def print_shadowsocks():
version = ''
try:
import pkg_resources
version = pkg_resources.get_distribution('shadowsocks').version
except Exception:
pass
print('Shadowsocks %s' % version)
def find_config():
config_path = 'config.json'
if os.path.exists(config_path):
return config_path
config_path = os.path.join(os.path.dirname(__file__), '../', 'config.json')
if os.path.exists(config_path):
return config_path
return None
def check_config(config, is_local):
if config.get('daemon', None) == 'stop':
# no need to specify configuration for daemon stop
return
if is_local and not config.get('password', None):
logging.error('password not specified')
print_help(is_local)
sys.exit(2)
if not is_local and not config.get('password', None) \
and not config.get('port_password', None) \
and not config.get('manager_address'):
logging.error('password or port_password not specified')
print_help(is_local)
sys.exit(2)
if 'local_port' in config:
config['local_port'] = int(config['local_port'])
if config.get('server_port', None) and type(config['server_port']) != list:
config['server_port'] = int(config['server_port'])
if config.get('local_address', '') in [b'0.0.0.0']:
logging.warn('warning: local set to listen on 0.0.0.0, it\'s not safe')
if config.get('server', '') in ['127.0.0.1', 'localhost']:
logging.warn('warning: server set to listen on %s:%s, are you sure?' %
(to_str(config['server']), config['server_port']))
if (config.get('method', '') or '').lower() == 'table':
logging.warn('warning: table is not safe; please use a safer cipher, '
'like AES-256-CFB')
if (config.get('method', '') or '').lower() == 'rc4':
logging.warn('warning: RC4 is not safe; please use a safer cipher, '
'like AES-256-CFB')
if config.get('timeout', 300) < 100:
logging.warn('warning: your timeout %d seems too short' %
int(config.get('timeout')))
if config.get('timeout', 300) > 600:
logging.warn('warning: your timeout %d seems too long' %
int(config.get('timeout')))
if config.get('password') in [b'mypassword']:
logging.error('DON\'T USE DEFAULT PASSWORD! Please change it in your '
'config.json!')
sys.exit(1)
if config.get('user', None) is not None:
if os.name != 'posix':
logging.error('user can be used only on Unix')
sys.exit(1)
encrypt.try_cipher(config['password'], config['method'])
def get_config(is_local):
global verbose
logging.basicConfig(level=logging.INFO,
format='%(levelname)-s: %(message)s')
if is_local:
shortopts = 'hd:s:b:p:k:l:m:c:t:vq'
longopts = ['help', 'fast-open', 'pid-file=', 'log-file=', 'user=',
'version']
else:
shortopts = 'hd:s:p:k:m:c:t:vq'
longopts = ['help', 'fast-open', 'pid-file=', 'log-file=', 'workers=',
'forbidden-ip=', 'user=', 'manager-address=', 'version']
try:
config_path = find_config()
optlist, args = getopt.getopt(sys.argv[1:], shortopts, longopts)
for key, value in optlist:
if key == '-c':
config_path = value
if config_path:
logging.info('loading config from %s' % config_path)
with open(config_path, 'rb') as f:
try:
config = parse_json_in_str(f.read().decode('utf8'))
except ValueError as e:
logging.error('found an error in config.json: %s',
e.message)
sys.exit(1)
else:
config = {}
v_count = 0
for key, value in optlist:
if key == '-p':
config['server_port'] = int(value)
elif key == '-k':
config['password'] = to_bytes(value)
elif key == '-l':
config['local_port'] = int(value)
elif key == '-s':
config['server'] = to_str(value)
elif key == '-m':
config['method'] = to_str(value)
elif key == '-b':
config['local_address'] = to_str(value)
elif key == '-v':
v_count += 1
# '-vv' turns on more verbose mode
config['verbose'] = v_count
elif key == '-t':
config['timeout'] = int(value)
elif key == '--fast-open':
config['fast_open'] = True
elif key == '--workers':
config['workers'] = int(value)
elif key == '--manager-address':
config['manager_address'] = value
elif key == '--user':
config['user'] = to_str(value)
elif key == '--forbidden-ip':
config['forbidden_ip'] = to_str(value).split(',')
elif key in ('-h', '--help'):
if is_local:
print_local_help()
else:
print_server_help()
sys.exit(0)
elif key == '--version':
print_shadowsocks()
sys.exit(0)
elif key == '-d':
config['daemon'] = to_str(value)
elif key == '--pid-file':
config['pid-file'] = to_str(value)
elif key == '--log-file':
config['log-file'] = to_str(value)
elif key == '-q':
v_count -= 1
config['verbose'] = v_count
except getopt.GetoptError as e:
print(e, file=sys.stderr)
print_help(is_local)
sys.exit(2)
if not config:
logging.error('config not specified')
print_help(is_local)
sys.exit(2)
config['password'] = to_bytes(config.get('password', b''))
config['method'] = to_str(config.get('method', 'aes-256-cfb'))
config['port_password'] = config.get('port_password', None)
config['timeout'] = int(config.get('timeout', 300))
config['fast_open'] = config.get('fast_open', False)
config['workers'] = config.get('workers', 1)
config['pid-file'] = config.get('pid-file', '/var/run/shadowsocks.pid')
config['log-file'] = config.get('log-file', '/var/log/shadowsocks.log')
config['verbose'] = config.get('verbose', False)
config['local_address'] = to_str(config.get('local_address', '127.0.0.1'))
config['local_port'] = config.get('local_port', 1080)
if is_local:
if config.get('server', None) is None:
logging.error('server addr not specified')
print_local_help()
sys.exit(2)
else:
config['server'] = to_str(config['server'])
else:
config['server'] = to_str(config.get('server', '0.0.0.0'))
try:
config['forbidden_ip'] = \
IPNetwork(config.get('forbidden_ip', '127.0.0.0/8,::1/128'))
except Exception as e:
logging.error(e)
sys.exit(2)
config['server_port'] = config.get('server_port', None)
logging.getLogger('').handlers = []
logging.addLevelName(VERBOSE_LEVEL, 'VERBOSE')
if config['verbose'] >= 2:
level = VERBOSE_LEVEL
elif config['verbose'] == 1:
level = logging.DEBUG
elif config['verbose'] == -1:
level = logging.WARN
elif config['verbose'] <= -2:
level = logging.ERROR
else:
level = logging.INFO
verbose = config['verbose']
logging.basicConfig(level=level,
format='%(asctime)s %(levelname)-8s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
check_config(config, is_local)
return config
def print_help(is_local):
if is_local:
print_local_help()
else:
print_server_help()
def print_local_help():
print('''usage: sslocal [OPTION]...
A fast tunnel proxy that helps you bypass firewalls.
You can supply configurations via either config file or command line arguments.
Proxy options:
-c CONFIG path to config file
-s SERVER_ADDR server address
-p SERVER_PORT server port, default: 8388
-b LOCAL_ADDR local binding address, default: 127.0.0.1
-l LOCAL_PORT local port, default: 1080
-k PASSWORD password
-m METHOD encryption method, default: aes-256-cfb
-t TIMEOUT timeout in seconds, default: 300
--fast-open use TCP_FASTOPEN, requires Linux 3.7+
General options:
-h, --help show this help message and exit
-d start/stop/restart daemon mode
--pid-file PID_FILE pid file for daemon mode
--log-file LOG_FILE log file for daemon mode
--user USER username to run as
-v, -vv verbose mode
-q, -qq quiet mode, only show warnings/errors
--version show version information
Online help: <https://github.com/shadowsocks/shadowsocks>
''')
def print_server_help():
print('''usage: ssserver [OPTION]...
A fast tunnel proxy that helps you bypass firewalls.
You can supply configurations via either config file or command line arguments.
Proxy options:
-c CONFIG path to config file
-s SERVER_ADDR server address, default: 0.0.0.0
-p SERVER_PORT server port, default: 8388
-k PASSWORD password
-m METHOD encryption method, default: aes-256-cfb
-t TIMEOUT timeout in seconds, default: 300
--fast-open use TCP_FASTOPEN, requires Linux 3.7+
--workers WORKERS number of workers, available on Unix/Linux
--forbidden-ip IPLIST comma seperated IP list forbidden to connect
--manager-address ADDR optional server manager UDP address, see wiki
General options:
-h, --help show this help message and exit
-d start/stop/restart daemon mode
--pid-file PID_FILE pid file for daemon mode
--log-file LOG_FILE log file for daemon mode
--user USER username to run as
-v, -vv verbose mode
-q, -qq quiet mode, only show warnings/errors
--version show version information
Online help: <https://github.com/shadowsocks/shadowsocks>
''')
def _decode_list(data):
rv = []
for item in data:
if hasattr(item, 'encode'):
item = item.encode('utf-8')
elif isinstance(item, list):
item = _decode_list(item)
elif isinstance(item, dict):
item = _decode_dict(item)
rv.append(item)
return rv
def _decode_dict(data):
rv = {}
for key, value in data.items():
if hasattr(value, 'encode'):
value = value.encode('utf-8')
elif isinstance(value, list):
value = _decode_list(value)
elif isinstance(value, dict):
value = _decode_dict(value)
rv[key] = value
return rv
def parse_json_in_str(data):
# parse json and convert everything from unicode to str
return json.loads(data, object_hook=_decode_dict)
| apache-2.0 |
reingart/rad2py | assignments/program10A.py | 16 | 3944 | #!/usr/bin/env python
# coding:utf-8
"PSP Program 6A - Linear Regression Prediction Interval"
__author__ = "Mariano Reingart ([email protected])"
__copyright__ = "Copyright (C) 2011 Mariano Reingart"
__license__ = "GPL 3.0"
from math import sqrt, pi
# reuse previous programs
from program1A import mean
from program5A import simpson_rule_integrate, gamma
def double_sided_student_t_probability(t, n):
"Calculate the p-value using a double sided student t distribution"
# create the function for n degrees of freedom:
k = gamma(n + 1, 2) / (sqrt(n * pi) * gamma(n, 2))
f_t_dist = lambda u: k * (1 + (u ** 2) / float(n)) ** (- (n + 1) / 2.0)
# integrate a finite area from the origin to t
p_aux = simpson_rule_integrate(f_t_dist, 0, t)
# return the area of the two tails of the distribution (symmetrical)
return (0.5 - p_aux) * 2
def double_sided_student_t_value(p, n):
"Calculate the t-value using a double sided student t distribution"
# replaces table lookup, thanks to http://statpages.org/pdfs.html
v = dv = 0.5
t = 0
while dv > 0.000001:
t = 1 / v - 1
dv = dv / 2
if double_sided_student_t_probability(t, n) > p:
v = v - dv
else:
v = v + dv
return t
def variance(x_values, y_values, b0, b1):
"Calculate the mean square deviation of the linear regeression line"
# take the variance from the regression line instead of the data average
sum_aux = sum([(y - b0 - b1 * x) ** 2 for x, y in zip(x_values, y_values)])
n = float(len(x_values))
return (1 / (n - 2.0)) * sum_aux
def prediction_interval(x_values, y_values, x_k, alpha):
"""Calculate the linear regression parameters for a set of n values
then calculate the upper and lower prediction interval
"""
# calculate aux variables
x_avg = mean(x_values)
y_avg = mean(y_values)
n = len(x_values)
sum_xy = sum([(x_values[i] * y_values[i]) for i in range(n)])
sum_x2 = sum([(x_values[i] ** 2) for i in range(n)])
# calculate regression coefficients
b1 = (sum_xy - (n * x_avg * y_avg)) / (sum_x2 - n * (x_avg ** 2))
b0 = y_avg - b1 * x_avg
# calculate the t-value for the given alpha p-value
t = double_sided_student_t_value(1 - alpha, n - 2)
# calculate the standard deviation
sigma = sqrt(variance(x_values, y_values, b0, b1))
# calculate the range
sum_xi_xavg = sum([(x - x_avg) ** 2 for x in x_values], 0.0)
aux = 1 + (1 / float(n)) + ((x_k - x_avg) ** 2) / sum_xi_xavg
p_range = t * sigma * sqrt(aux)
# combine the range with the x_k projection:
return b0, b1, p_range, x_k + p_range, x_k - p_range, t
def test_student_t_integration():
# test student t values
assert round(double_sided_student_t_probability(t=1.8595, n=8), 4) == 0.1
assert round(double_sided_student_t_value(p=0.1, n=8), 4) == 1.8595
if __name__ == "__main__":
test_student_t_integration()
# Table D8 "Size Estimating regression data"
est_loc = [130, 650, 99, 150, 128, 302, 95, 945, 368, 961]
act_new_chg_loc = [186, 699, 132, 272, 291, 331, 199, 1890, 788, 1601]
projection = 644.429
# 70 percent
b0, b1, p_range, upi, lpi, t = prediction_interval(
est_loc, act_new_chg_loc, projection, alpha=0.7)
print "70% Prediction interval: ", b0, b1, p_range, upi, lpi, t
assert round(t, 3) == 1.108
assert round(b0, 2) == -22.55
assert round(b1, 4) == 1.7279
assert round(p_range, 3) == 236.563
assert round(upi, 2) == 880.99
assert round(lpi, 2) == 407.87
# 90 percent
b0, b1, p_range, upi, lpi, t = prediction_interval(
est_loc, act_new_chg_loc, projection, alpha=0.9)
print "90% Prediction interval: ", b0, b1, p_range, upi, lpi, t
assert round(t, 2) == 1.86
assert round(p_range, 2) == 396.97
assert round(upi, 2) == 1041.4
assert round(lpi, 2) == 247.46
| gpl-3.0 |
40223245/2015cdb_g6-team1 | static/Brython3.1.1-20150328-091302/Lib/site-packages/highlight.py | 617 | 2518 | import keyword
import _jsre as re
from browser import html
letters = 'abcdefghijklmnopqrstuvwxyz'
letters += letters.upper()+'_'
digits = '0123456789'
builtin_funcs = ("abs|divmod|input|open|staticmethod|all|enumerate|int|ord|str|any|" +
"eval|isinstance|pow|sum|basestring|execfile|issubclass|print|super|" +
"binfile|iter|property|tuple|bool|filter|len|range|type|bytearray|" +
"float|list|raw_input|unichr|callable|format|locals|reduce|unicode|" +
"chr|frozenset|long|reload|vars|classmethod|getattr|map|repr|xrange|" +
"cmp|globals|max|reversed|zip|compile|hasattr|memoryview|round|" +
"__import__|complex|hash|min|set|apply|delattr|help|next|setattr|" +
"buffer|dict|hex|object|slice|coerce|dir|id|oct|sorted|intern")
kw_pattern = '^('+'|'.join(keyword.kwlist)+')$'
bf_pattern = '^('+builtin_funcs+')$'
def highlight(txt, string_color="blue", comment_color="green",
keyword_color="purple"):
res = html.PRE()
i = 0
name = ''
while i<len(txt):
car = txt[i]
if car in ["'",'"']:
k = i+1
while k<len(txt):
if txt[k]==car:
nb_as = 0
j = k-1
while True:
if txt[j]=='\\':
nb_as+=1
j -= 1
else:
break
if nb_as % 2 == 0:
res <= html.SPAN(txt[i:k+1],
style=dict(color=string_color))
i = k
break
k += 1
elif car == '#': # comment
end = txt.find('\n', i)
if end== -1:
res <= html.SPAN(txt[i:],style=dict(color=comment_color))
break
else:
res <= html.SPAN(txt[i:end],style=dict(color=comment_color))
i = end-1
elif car in letters:
name += car
elif car in digits and name:
name += car
else:
if name:
if re.search(kw_pattern,name):
res <= html.SPAN(name,style=dict(color=keyword_color))
elif re.search(bf_pattern,name):
res <= html.SPAN(name,style=dict(color=keyword_color))
else:
res <= name
name = ''
res <= car
i += 1
res <= name
return res | gpl-3.0 |
JeremyRubin/bitcoin | test/functional/test_framework/siphash.py | 91 | 2014 | #!/usr/bin/env python3
# Copyright (c) 2016-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Specialized SipHash-2-4 implementations.
This implements SipHash-2-4 for 256-bit integers.
"""
def rotl64(n, b):
return n >> (64 - b) | (n & ((1 << (64 - b)) - 1)) << b
def siphash_round(v0, v1, v2, v3):
v0 = (v0 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 13)
v1 ^= v0
v0 = rotl64(v0, 32)
v2 = (v2 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 16)
v3 ^= v2
v0 = (v0 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 21)
v3 ^= v0
v2 = (v2 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 17)
v1 ^= v2
v2 = rotl64(v2, 32)
return (v0, v1, v2, v3)
def siphash256(k0, k1, h):
n0 = h & ((1 << 64) - 1)
n1 = (h >> 64) & ((1 << 64) - 1)
n2 = (h >> 128) & ((1 << 64) - 1)
n3 = (h >> 192) & ((1 << 64) - 1)
v0 = 0x736f6d6570736575 ^ k0
v1 = 0x646f72616e646f6d ^ k1
v2 = 0x6c7967656e657261 ^ k0
v3 = 0x7465646279746573 ^ k1 ^ n0
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n0
v3 ^= n1
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n1
v3 ^= n2
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n2
v3 ^= n3
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n3
v3 ^= 0x2000000000000000
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= 0x2000000000000000
v2 ^= 0xFF
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
return v0 ^ v1 ^ v2 ^ v3
| mit |
tempesta-tech/linux-4.1-tfw | tools/perf/scripts/python/failed-syscalls-by-pid.py | 1996 | 2233 | # failed system call counts, by pid
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide failed system call totals, broken down by pid.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s syscall-counts-by-pid.py [comm|pid]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_error_totals()
def raw_syscalls__sys_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, id, ret):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
if ret < 0:
try:
syscalls[common_comm][common_pid][id][ret] += 1
except TypeError:
syscalls[common_comm][common_pid][id][ret] = 1
def syscalls__sys_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, ret):
raw_syscalls__sys_exit(**locals())
def print_error_totals():
if for_comm is not None:
print "\nsyscall errors for %s:\n\n" % (for_comm),
else:
print "\nsyscall errors:\n\n",
print "%-30s %10s\n" % ("comm [pid]", "count"),
print "%-30s %10s\n" % ("------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id in id_keys:
print " syscall: %-16s\n" % syscall_name(id),
ret_keys = syscalls[comm][pid][id].keys()
for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True):
print " err = %-20s %10d\n" % (strerror(ret), val),
| gpl-2.0 |
yuanke/hadoop-hbase | contrib/hod/hodlib/Common/setup.py | 182 | 45814 | #Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
# $Id:setup.py 5158 2007-04-09 00:14:35Z zim $
# $Id:setup.py 5158 2007-04-09 00:14:35Z zim $
#
#------------------------------------------------------------------------------
"""'setup' provides for reading and verifing configuration files based on
Python's SafeConfigParser class."""
import sys, os, re, pprint
from ConfigParser import SafeConfigParser
from optparse import OptionParser, IndentedHelpFormatter, OptionGroup
from util import get_perms, replace_escapes
from types import typeValidator, typeValidatorInstance, is_valid_type, \
typeToString
from hodlib.Hod.hod import hodHelp
reEmailAddress = re.compile("^.*@.*$")
reEmailDelimit = re.compile("@")
reComma = re.compile("\s*,\s*")
reDot = re.compile("\.")
reCommentHack = re.compile("^.*?\s+#|;.*", flags=re.S)
reCommentNewline = re.compile("\n|\r$")
reKeyVal = r"(?<!\\)="
reKeyVal = re.compile(reKeyVal)
reKeyValList = r"(?<!\\),"
reKeyValList = re.compile(reKeyValList)
errorPrefix = 'error'
requiredPerms = '0660'
class definition:
def __init__(self):
"""Generates a configuration definition object."""
self.__def = {}
self.__defOrder = []
def __repr__(self):
return pprint.pformat(self.__def)
def __getitem__(self, section):
return self.__def[section]
def __iter__(self):
return iter(self.__def)
def sections(self):
"""Returns a list of sections/groups."""
if len(self.__defOrder):
return self.__defOrder
else:
return self.__def.keys()
def add_section(self, section):
"""Add a configuration section / option group."""
if self.__def.has_key(section):
raise Exception("Section already exists: '%s'" % section)
else:
self.__def[section] = {}
def add_def(self, section, var, type, desc, help = True, default = None,
req = True, validate = True, short = None):
""" Add a variable definition.
section - section name
var - variable name
type - valid hodlib.types
desc - description of variable
help - display help for this variable
default - default value
req - bool, requried?
validate - bool, validate type value?
short - short symbol (1 character),
help - bool, display help?"""
if self.__def.has_key(section):
if not is_valid_type(type):
raise Exception("Type (type) is invalid: %s.%s - '%s'" % (section, var,
type))
if not isinstance(desc, str):
raise Exception("Description (desc) must be a string: %s.%s - '%s'" % (
section, var, desc))
if not isinstance(req, bool):
raise Exception("Required (req) must be a bool: %s.%s - '%s'" % (section,
var,
req))
if not isinstance(validate, bool):
raise Exception("Validate (validate) must be a bool: %s.%s - '%s'" % (
section, var, validate))
if self.__def[section].has_key(var):
raise Exception("Variable name already defined: '%s'" % var)
else:
self.__def[section][var] = { 'type' : type,
'desc' : desc,
'help' : help,
'default' : default,
'req' : req,
'validate' : validate,
'short' : short }
else:
raise Exception("Section does not exist: '%s'" % section)
def add_defs(self, defList, defOrder=None):
""" Add a series of definitions.
defList = { section0 : ((name0,
type0,
desc0,
help0,
default0,
req0,
validate0,
short0),
....
(nameN,
typeN,
descN,
helpN,
defaultN,
reqN,
validateN,
shortN)),
....
sectionN : ... }
Where the short synmbol is optional and can only be one char."""
for section in defList.keys():
self.add_section(section)
for defTuple in defList[section]:
if isinstance(defTuple, tuple):
if len(defTuple) < 7:
raise Exception(
"section %s is missing an element: %s" % (
section, pprint.pformat(defTuple)))
else:
raise Exception("section %s of defList is not a tuple" %
section)
if len(defTuple) == 7:
self.add_def(section, defTuple[0], defTuple[1],
defTuple[2], defTuple[3], defTuple[4],
defTuple[5], defTuple[6])
else:
self.add_def(section, defTuple[0], defTuple[1],
defTuple[2], defTuple[3], defTuple[4],
defTuple[5], defTuple[6], defTuple[7])
if defOrder:
for section in defOrder:
if section in self.__def:
self.__defOrder.append(section)
for section in self.__def:
if not section in defOrder:
raise Exception(
"section %s is missing from specified defOrder." %
section)
class baseConfig:
def __init__(self, configDef, originalDir=None):
self.__toString = typeToString()
self.__validated = False
self._configDef = configDef
self._options = None
self._mySections = []
self._dict = {}
self.configFile = None
self.__originalDir = originalDir
if self._configDef:
self._mySections = configDef.sections()
def __repr__(self):
"""Returns a string representation of a config object including all
normalizations."""
print_string = '';
for section in self._mySections:
print_string = "%s[%s]\n" % (print_string, section)
options = self._dict[section].keys()
for option in options:
print_string = "%s%s = %s\n" % (print_string, option,
self._dict[section][option])
print_string = "%s\n" % (print_string)
print_string = re.sub("\n\n$", "", print_string)
return print_string
def __getitem__(self, section):
""" Returns a dictionary of configuration name and values by section.
"""
return self._dict[section]
def __setitem__(self, section, value):
self._dict[section] = value
def __iter__(self):
return iter(self._dict)
def has_key(self, section):
status = False
if section in self._dict:
status = True
return status
# Prints configuration error messages
def var_error(self, section, option, *addData):
errorStrings = []
if not self._dict[section].has_key(option):
self._dict[section][option] = None
errorStrings.append("%s: invalid '%s' specified in section %s (--%s.%s): %s" % (
errorPrefix, option, section, section, option, self._dict[section][option]))
if addData:
errorStrings.append("%s: additional info: %s\n" % (errorPrefix,
addData[0]))
return errorStrings
def var_error_suggest(self, errorStrings):
if self.configFile:
errorStrings.append("Check your command line options and/or " + \
"your configuration file %s" % self.configFile)
def __get_args(self, section):
def __dummyToString(type, value):
return value
toString = __dummyToString
if self.__validated:
toString = self.__toString
args = []
if isinstance(self._dict[section], dict):
for option in self._dict[section]:
if section in self._configDef and \
option in self._configDef[section]:
if self._configDef[section][option]['type'] == 'bool':
if self._dict[section][option] == 'True' or \
self._dict[section][option] == True:
args.append("--%s.%s" % (section, option))
else:
args.append("--%s.%s" % (section, option))
args.append(toString(
self._configDef[section][option]['type'],
self._dict[section][option]))
else:
if section in self._configDef:
if self._configDef[section][option]['type'] == 'bool':
if self._dict[section] == 'True' or \
self._dict[section] == True:
args.append("--%s" % section)
else:
if self._dict[section] != 'config':
args.append("--%s" % section)
args.append(toString(self._configDef[section]['type'],
self._dict[section]))
return args
def values(self):
return self._dict.values()
def keys(self):
return self._dict.keys()
def get_args(self, exclude=None, section=None):
"""Retrieve a tuple of config arguments."""
args = []
if section:
args = self.__get_args(section)
else:
for section in self._dict:
if exclude:
if not section in exclude:
args.extend(self.__get_args(section))
else:
args.extend(self.__get_args(section))
return tuple(args)
def verify(self):
"""Verifies each configuration variable, using the configValidator
class, based on its type as defined by the dictionary configDef.
Upon encountering a problem an error is printed to STDERR and
false is returned."""
oldDir = os.getcwd()
if self.__originalDir:
os.chdir(self.__originalDir)
status = True
statusMsgs = []
if self._configDef:
errorCount = 0
configValidator = typeValidator(self.__originalDir)
# foreach section and option by type string as defined in configDef
# add value to be validated to validator
for section in self._mySections:
for option in self._configDef[section].keys():
configVarName = "%s.%s" % (section, option)
if self._dict[section].has_key(option):
if self._configDef[section][option].has_key('validate'):
if self._configDef[section][option]['validate']:
# is the section.option needed to be validated?
configValidator.add(configVarName,
self._configDef[section][option]['type'],
self._dict[section][option])
else:
# If asked not to validate, just normalize
self[section][option] = \
configValidator.normalize(
self._configDef[section][option]['type'],
self._dict[section][option])
if self._configDef[section][option]['default'] != \
None:
self._configDef[section][option]['default'] = \
configValidator.normalize(
self._configDef[section][option]['type'],
self._configDef[section][option]['default']
)
self._configDef[section][option]['default'] = \
self.__toString(
self._configDef[section][option]['type'],
self._configDef[section][option]['default']
)
else:
# This should not happen. Just in case, take this as 'to be validated' case.
configValidator.add(configVarName,
self._configDef[section][option]['type'],
self._dict[section][option])
elif self._configDef[section][option]['req']:
statusMsgs.append("%s: %s.%s is not defined."
% (errorPrefix, section, option))
errorCount = errorCount + 1
configValidator.validate()
for valueInfo in configValidator.validatedInfo:
sectionsOptions = reDot.split(valueInfo['name'])
if valueInfo['isValid'] == 1:
self._dict[sectionsOptions[0]][sectionsOptions[1]] = \
valueInfo['normalized']
else:
if valueInfo['errorData']:
statusMsgs.extend(self.var_error(sectionsOptions[0],
sectionsOptions[1], valueInfo['errorData']))
else:
statusMsgs.extend(self.var_error(sectionsOptions[0],
sectionsOptions[1]))
errorCount = errorCount + 1
if errorCount > 1:
statusMsgs.append( "%s: %s problems found." % (
errorPrefix, errorCount))
self.var_error_suggest(statusMsgs)
status = False
elif errorCount > 0:
statusMsgs.append( "%s: %s problem found." % (
errorPrefix, errorCount))
self.var_error_suggest(statusMsgs)
status = False
self.__validated = True
if self.__originalDir:
os.chdir(oldDir)
return status,statusMsgs
def normalizeValue(self, section, option) :
return typeValidatorInstance.normalize(
self._configDef[section][option]['type'],
self[section][option])
def validateValue(self, section, option):
# Validates a section.option and exits on error
valueInfo = typeValidatorInstance.verify(
self._configDef[section][option]['type'],
self[section][option])
if valueInfo['isValid'] == 1:
return []
else:
if valueInfo['errorData']:
return self.var_error(section, option, valueInfo['errorData'])
else:
return self.var_error(section, option)
class config(SafeConfigParser, baseConfig):
def __init__(self, configFile, configDef=None, originalDir=None,
options=None, checkPerms=False):
"""Constructs config object.
configFile - configuration file to read
configDef - definition object
options - options object
checkPerms - check file permission on config file, 0660
sample configuration file:
[snis]
modules_dir = modules/ ; location of infoModules
md5_defs_dir = etc/md5_defs ; location of infoTree md5 defs
info_store = var/info ; location of nodeInfo store
cam_daemon = localhost:8200 ; cam daemon address"""
SafeConfigParser.__init__(self)
baseConfig.__init__(self, configDef, originalDir)
if(os.path.exists(configFile)):
self.configFile = configFile
else:
raise IOError
self._options = options
## UNUSED CODE : checkPerms is never True
## zim: this code is used if one instantiates config() with checkPerms set to
## True.
if checkPerms: self.__check_perms()
self.read(configFile)
self._configDef = configDef
if not self._configDef:
self._mySections = self.sections()
self.__initialize_config_dict()
def __initialize_config_dict(self):
""" build a dictionary of config vars keyed by section name defined in
configDef, if options defined override config"""
for section in self._mySections:
items = self.items(section)
self._dict[section] = {}
# First fill self._dict with whatever is given in hodrc.
# Going by this, options given at the command line either override
# options in hodrc, or get appended to the list, like for
# hod.client-params. Note that after this dict has _only_ hodrc
# params
for keyValuePair in items:
# stupid commenting bug in ConfigParser class, lines without an
# option value pair or section required that ; or # are at the
# beginning of the line, :(
newValue = reCommentHack.sub("", keyValuePair[1])
newValue = reCommentNewline.sub("", newValue)
self._dict[section][keyValuePair[0]] = newValue
# end of filling with options given in hodrc
# now start filling in command line options
if self._options:
for option in self._configDef[section].keys():
if self._options[section].has_key(option):
# the user has given an option
compoundOpt = "%s.%s" %(section,option)
if ( compoundOpt == \
'gridservice-mapred.final-server-params' \
or compoundOpt == \
'gridservice-hdfs.final-server-params' \
or compoundOpt == \
'gridservice-mapred.server-params' \
or compoundOpt == \
'gridservice-hdfs.server-params' \
or compoundOpt == \
'hod.client-params' ):
if ( compoundOpt == \
'gridservice-mapred.final-server-params' \
or compoundOpt == \
'gridservice-hdfs.final-server-params' ):
overwrite = False
else: overwrite = True
# Append to the current list of values in self._dict
if not self._dict[section].has_key(option):
self._dict[section][option] = ""
dictOpts = reKeyValList.split(self._dict[section][option])
dictOptsKeyVals = {}
for opt in dictOpts:
if opt != '':
# when dict _has_ params from hodrc
if reKeyVal.search(opt):
(key, val) = reKeyVal.split(opt,1)
# we only consider the first '=' for splitting
# we do this to support passing params like
# mapred.child.java.opts=-Djava.library.path=some_dir
# Even in case of an invalid error like unescaped '=',
# we don't want to fail here itself. We leave such errors
# to be caught during validation which happens after this
dictOptsKeyVals[key] = val
else:
# this means an invalid option. Leaving it
#for config.verify to catch
dictOptsKeyVals[opt] = None
cmdLineOpts = reKeyValList.split(self._options[section][option])
for opt in cmdLineOpts:
if reKeyVal.search(opt):
# Same as for hodrc options. only consider
# the first =
( key, val ) = reKeyVal.split(opt,1)
else:
key = opt
val = None
# whatever is given at cmdline overrides
# what is given in hodrc only for non-final params
if dictOptsKeyVals.has_key(key):
if overwrite:
dictOptsKeyVals[key] = val
else: dictOptsKeyVals[key] = val
self._dict[section][option] = ""
for key in dictOptsKeyVals:
if self._dict[section][option] == "":
if dictOptsKeyVals[key]:
self._dict[section][option] = key + "=" + \
dictOptsKeyVals[key]
else: #invalid option. let config.verify catch
self._dict[section][option] = key
else:
if dictOptsKeyVals[key]:
self._dict[section][option] = \
self._dict[section][option] + "," + key + \
"=" + dictOptsKeyVals[key]
else: #invalid option. let config.verify catch
self._dict[section][option] = \
self._dict[section][option] + "," + key
else:
# for rest of the options, that don't need
# appending business.
# options = cmdline opts + defaults
# dict = hodrc opts only
# only non default opts can overwrite any opt
# currently in dict
if not self._dict[section].has_key(option):
# options not mentioned in hodrc
self._dict[section][option] = \
self._options[section][option]
elif self._configDef[section][option]['default'] != \
self._options[section][option]:
# option mentioned in hodrc but user has given a
# non-default option
self._dict[section][option] = \
self._options[section][option]
## UNUSED METHOD
## zim: is too :)
def __check_perms(self):
perms = None
if self._options:
try:
perms = get_perms(self.configFile)
except OSError, data:
self._options.print_help()
raise Exception("*** could not find config file: %s" % data)
sys.exit(1)
else:
perms = get_perms(self.configFile)
if perms != requiredPerms:
error = "*** '%s' has invalid permission: %s should be %s\n" % \
(self.configFile, perms, requiredPerms)
raise Exception( error)
sys.exit(1)
def replace_escape_seqs(self):
""" replace any escaped characters """
replace_escapes(self)
class formatter(IndentedHelpFormatter):
def format_option_strings(self, option):
"""Return a comma-separated list of option strings & metavariables."""
if option.takes_value():
metavar = option.metavar or option.dest.upper()
short_opts = [sopt
for sopt in option._short_opts]
long_opts = [self._long_opt_fmt % (lopt, metavar)
for lopt in option._long_opts]
else:
short_opts = option._short_opts
long_opts = option._long_opts
if self.short_first:
opts = short_opts + long_opts
else:
opts = long_opts + short_opts
return ", ".join(opts)
class options(OptionParser, baseConfig):
def __init__(self, optionDef, usage, version, originalDir=None,
withConfig=False, defaultConfig=None, defaultLocation=None,
name=None):
"""Constructs and options object.
optionDef - definition object
usage - usage statement
version - version string
withConfig - used in conjunction with a configuration file
defaultConfig - default configuration file
"""
OptionParser.__init__(self, usage=usage)
baseConfig.__init__(self, optionDef, originalDir)
self.formatter = formatter(4, max_help_position=100, width=180,
short_first=1)
self.__name = name
self.__version = version
self.__withConfig = withConfig
self.__defaultConfig = defaultConfig
self.__defaultLoc = defaultLocation
self.args = []
self.__optionList = []
self.__compoundOpts = []
self.__shortMap = {}
self.__alphaString = 'abcdefghijklmnopqrstuvxyzABCDEFGHIJKLMNOPQRSTUVXYZ1234567890'
self.__alpha = []
self.__parsedOptions = {}
self.__reserved = [ 'h' ]
self.__orig_grps = []
self.__orig_grp_lists = {}
self.__orig_option_list = []
self.__display_grps = []
self.__display_grp_lists = {}
self.__display_option_list = []
self.config = None
if self.__withConfig:
self.__reserved.append('c')
self.__reserved.append('v')
self.__gen_alpha()
# build self.__optionList, so it contains all the options that are
# possible. the list elements are of the form section.option
for section in self._mySections:
if self.__withConfig and section == 'config':
raise Exception(
"withConfig set 'config' cannot be used as a section name")
for option in self._configDef[section].keys():
if '.' in option:
raise Exception("Options cannot contain: '.'")
elif self.__withConfig and option == 'config':
raise Exception(
"With config set, option config is not allowed.")
elif self.__withConfig and option == 'verbose-help':
raise Exception(
"With config set, option verbose-help is not allowed.")
self.__optionList.append(self.__splice_compound(section,
option))
self.__build_short_map()
self.__add_options()
self.__init_display_options()
(self.__parsedOptions, self.args) = self.parse_args()
# Now process the positional arguments only for the client side
if self.__name == 'hod':
hodhelp = hodHelp()
_operation = getattr(self.__parsedOptions,'hod.operation')
_script = getattr(self.__parsedOptions, 'hod.script')
nArgs = self.args.__len__()
if _operation:
# -o option is given
if nArgs != 0:
self.error('invalid syntax : command and operation(-o) cannot coexist')
elif nArgs == 0 and _script:
# for a script option, without subcommand: hod -s script ...
pass
elif nArgs == 0:
print "Usage: ",hodhelp.help()
sys.exit(0)
else:
# subcommand is given
cmdstr = self.args[0] # the subcommand itself
cmdlist = hodhelp.ops
if cmdstr not in cmdlist:
print "Usage: ", hodhelp.help()
sys.exit(2)
numNodes = None
clusterDir = None
# Check which subcommand. cmdstr = subcommand itself now.
if cmdstr == "allocate":
clusterDir = getattr(self.__parsedOptions, 'hod.clusterdir')
numNodes = getattr(self.__parsedOptions, 'hod.nodecount')
if not clusterDir or not numNodes:
print hodhelp.usage(cmdstr)
sys.exit(3)
cmdstr = cmdstr + ' ' + clusterDir + ' ' + numNodes
setattr(self.__parsedOptions,'hod.operation', cmdstr)
elif cmdstr == "deallocate" or cmdstr == "info":
clusterDir = getattr(self.__parsedOptions, 'hod.clusterdir')
if not clusterDir:
print hodhelp.usage(cmdstr)
sys.exit(3)
cmdstr = cmdstr + ' ' + clusterDir
setattr(self.__parsedOptions,'hod.operation', cmdstr)
elif cmdstr == "list":
setattr(self.__parsedOptions,'hod.operation', cmdstr)
pass
elif cmdstr == "script":
clusterDir = getattr(self.__parsedOptions, 'hod.clusterdir')
numNodes = getattr(self.__parsedOptions, 'hod.nodecount')
originalDir = getattr(self.__parsedOptions, 'hod.original-dir')
if originalDir and clusterDir:
self.remove_exit_code_file(originalDir, clusterDir)
if not _script or not clusterDir or not numNodes:
print hodhelp.usage(cmdstr)
sys.exit(3)
pass
elif cmdstr == "help":
if nArgs == 1:
self.print_help()
sys.exit(0)
elif nArgs != 2:
self.print_help()
sys.exit(3)
elif self.args[1] == 'options':
self.print_options()
sys.exit(0)
cmdstr = cmdstr + ' ' + self.args[1]
setattr(self.__parsedOptions,'hod.operation', cmdstr)
# end of processing for arguments on the client side
if self.__withConfig:
self.config = self.__parsedOptions.config
if not self.config:
self.error("configuration file must be specified")
if not os.path.isabs(self.config):
# A relative path. Append the original directory which would be the
# current directory at the time of launch
try:
origDir = getattr(self.__parsedOptions, 'hod.original-dir')
if origDir is not None:
self.config = os.path.join(origDir, self.config)
self.__parsedOptions.config = self.config
except AttributeError, e:
self.error("hod.original-dir is not defined.\
Cannot get current directory")
if not os.path.exists(self.config):
if self.__defaultLoc and not re.search("/", self.config):
self.__parsedOptions.config = os.path.join(
self.__defaultLoc, self.config)
self.__build_dict()
def norm_cluster_dir(self, orig_dir, directory):
directory = os.path.expanduser(directory)
if not os.path.isabs(directory):
directory = os.path.join(orig_dir, directory)
directory = os.path.abspath(directory)
return directory
def remove_exit_code_file(self, orig_dir, dir):
try:
dir = self.norm_cluster_dir(orig_dir, dir)
if os.path.exists(dir):
exit_code_file = os.path.join(dir, "script.exitcode")
if os.path.exists(exit_code_file):
os.remove(exit_code_file)
except:
print >>sys.stderr, "Could not remove the script.exitcode file."
def __init_display_options(self):
self.__orig_option_list = self.option_list[:]
optionListTitleMap = {}
for option in self.option_list:
optionListTitleMap[option._long_opts[0]] = option
self.__orig_grps = self.option_groups[:]
for group in self.option_groups:
self.__orig_grp_lists[group.title] = group.option_list[:]
groupTitleMap = {}
optionTitleMap = {}
for group in self.option_groups:
groupTitleMap[group.title] = group
optionTitleMap[group.title] = {}
for option in group.option_list:
(sectionName, optionName) = \
self.__split_compound(option._long_opts[0])
optionTitleMap[group.title][optionName] = option
for section in self._mySections:
for option in self._configDef[section]:
if self._configDef[section][option]['help']:
if groupTitleMap.has_key(section):
if not self.__display_grp_lists.has_key(section):
self.__display_grp_lists[section] = []
self.__display_grp_lists[section].append(
optionTitleMap[section][option])
try:
self.__display_option_list.append(
optionListTitleMap["--" + self.__splice_compound(
section, option)])
except KeyError:
pass
try:
self.__display_option_list.append(optionListTitleMap['--config'])
except KeyError:
pass
self.__display_option_list.append(optionListTitleMap['--help'])
self.__display_option_list.append(optionListTitleMap['--verbose-help'])
self.__display_option_list.append(optionListTitleMap['--version'])
self.__display_grps = self.option_groups[:]
for section in self._mySections:
if self.__display_grp_lists.has_key(section):
self.__orig_grp_lists[section] = \
groupTitleMap[section].option_list
else:
try:
self.__display_grps.remove(groupTitleMap[section])
except KeyError:
pass
def __gen_alpha(self):
assignedOptions = []
for section in self._configDef:
for option in self._configDef[section]:
if self._configDef[section][option]['short']:
assignedOptions.append(
self._configDef[section][option]['short'])
for symbol in self.__alphaString:
if not symbol in assignedOptions:
self.__alpha.append(symbol)
def __splice_compound(self, section, option):
return "%s.%s" % (section, option)
def __split_compound(self, compound):
return compound.split('.')
def __build_short_map(self):
""" build a short_map of parametername : short_option. This is done
only for those parameters that don't have short options already
defined in configDef.
If possible, the first letter in the option that is not already
used/reserved as a short option is allotted. Otherwise the first
letter in __alpha that isn't still used is allotted.
e.g. { 'hodring.java-home': 'T', 'resource_manager.batch-home': 'B' }
"""
optionsKey = {}
for compound in self.__optionList:
(section, option) = self.__split_compound(compound)
if not optionsKey.has_key(section):
optionsKey[section] = []
optionsKey[section].append(option)
for section in self._configDef.sections():
options = optionsKey[section]
options.sort()
for option in options:
if not self._configDef[section][option]['short']:
compound = self.__splice_compound(section, option)
shortOptions = self.__shortMap.values()
for i in range(0, len(option)):
letter = option[i]
letter = letter.lower()
if letter in self.__alpha:
if not letter in shortOptions and \
not letter in self.__reserved:
self.__shortMap[compound] = letter
break
if not self.__shortMap.has_key(compound):
for i in range(0, len(self.__alpha)):
letter = self.__alpha[i]
if not letter in shortOptions and \
not letter in self.__reserved:
self.__shortMap[compound] = letter
def __add_option(self, config, compoundOpt, section, option, group=None):
addMethod = self.add_option
if group: addMethod=group.add_option
self.__compoundOpts.append(compoundOpt)
if compoundOpt == 'gridservice-mapred.final-server-params' or \
compoundOpt == 'gridservice-hdfs.final-server-params' or \
compoundOpt == 'gridservice-mapred.server-params' or \
compoundOpt == 'gridservice-hdfs.server-params' or \
compoundOpt == 'hod.client-params':
_action = 'append'
elif config[section][option]['type'] == 'bool':
_action = 'store_true'
else:
_action = 'store'
if self.__shortMap.has_key(compoundOpt):
addMethod("-" + self.__shortMap[compoundOpt],
"--" + compoundOpt, dest=compoundOpt,
action= _action,
metavar=config[section][option]['type'],
default=config[section][option]['default'],
help=config[section][option]['desc'])
else:
if config[section][option]['short']:
addMethod("-" + config[section][option]['short'],
"--" + compoundOpt, dest=compoundOpt,
action= _action,
metavar=config[section][option]['type'],
default=config[section][option]['default'],
help=config[section][option]['desc'])
else:
addMethod('', "--" + compoundOpt, dest=compoundOpt,
action= _action,
metavar=config[section][option]['type'],
default=config[section][option]['default'],
help=config[section][option]['desc'])
def __add_options(self):
if self.__withConfig:
self.add_option("-c", "--config", dest='config',
action='store', default=self.__defaultConfig,
metavar='config_file',
help="Full path to configuration file.")
self.add_option("", "--verbose-help",
action='help', default=None,
metavar='flag',
help="Display verbose help information.")
self.add_option("-v", "--version",
action='version', default=None,
metavar='flag',
help="Display version information.")
self.version = self.__version
if len(self._mySections) > 1:
for section in self._mySections:
group = OptionGroup(self, section)
for option in self._configDef[section]:
compoundOpt = self.__splice_compound(section, option)
self.__add_option(self._configDef, compoundOpt, section,
option, group)
self.add_option_group(group)
else:
for section in self._mySections:
for option in self._configDef[section]:
compoundOpt = self.__splice_compound(section, option)
self.__add_option(self._configDef, compoundOpt, section,
option)
def __build_dict(self):
if self.__withConfig:
self._dict['config'] = str(getattr(self.__parsedOptions, 'config'))
for compoundOption in dir(self.__parsedOptions):
if compoundOption in self.__compoundOpts:
(section, option) = self.__split_compound(compoundOption)
if not self._dict.has_key(section):
self._dict[section] = {}
if getattr(self.__parsedOptions, compoundOption):
_attr = getattr(self.__parsedOptions, compoundOption)
# when we have multi-valued parameters passed separately
# from command line, python optparser pushes them into a
# list. So converting all such lists to strings
if type(_attr) == type([]):
import string
_attr = string.join(_attr,',')
self._dict[section][option] = _attr
for section in self._configDef:
for option in self._configDef[section]:
if self._configDef[section][option]['type'] == 'bool':
compoundOption = self.__splice_compound(section, option)
if not self._dict.has_key(section):
self._dict[section] = {}
if option not in self._dict[section]:
self._dict[section][option] = False
def __set_display_groups(self):
if not '--verbose-help' in sys.argv:
self.option_groups = self.__display_grps
self.option_list = self.__display_option_list
for group in self.option_groups:
group.option_list = self.__display_grp_lists[group.title]
def __unset_display_groups(self):
if not '--verbose-help' in sys.argv:
self.option_groups = self.__orig_grps
self.option_list = self.__orig_option_list
for group in self.option_groups:
group.option_list = self.__orig_grp_lists[group.title]
def print_help(self, file=None):
self.__set_display_groups()
OptionParser.print_help(self, file)
self.__unset_display_groups()
def print_options(self):
_usage = self.usage
self.set_usage('')
self.print_help()
self.set_usage(_usage)
def verify(self):
return baseConfig.verify(self)
def replace_escape_seqs(self):
replace_escapes(self)
| apache-2.0 |
an7oine/WinVHS | Cygwin/lib/python2.7/distutils/command/bdist_msi.py | 164 | 35191 | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2005, 2006 Martin von Löwis
# Licensed to PSF under a Contributor Agreement.
# The bdist_wininst command proper
# based on bdist_wininst
"""
Implements the bdist_msi command.
"""
import sys, os
from sysconfig import get_python_version
from distutils.core import Command
from distutils.dir_util import remove_tree
from distutils.version import StrictVersion
from distutils.errors import DistutilsOptionError
from distutils import log
from distutils.util import get_platform
import msilib
from msilib import schema, sequence, text
from msilib import Directory, Feature, Dialog, add_data
class PyDialog(Dialog):
"""Dialog class with a fixed layout: controls at the top, then a ruler,
then a list of buttons: back, next, cancel. Optionally a bitmap at the
left."""
def __init__(self, *args, **kw):
"""Dialog(database, name, x, y, w, h, attributes, title, first,
default, cancel, bitmap=true)"""
Dialog.__init__(self, *args)
ruler = self.h - 36
#if kw.get("bitmap", True):
# self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin")
self.line("BottomLine", 0, ruler, self.w, 0)
def title(self, title):
"Set the title text of the dialog at the top."
# name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix,
# text, in VerdanaBold10
self.text("Title", 15, 10, 320, 60, 0x30003,
r"{\VerdanaBold10}%s" % title)
def back(self, title, next, name = "Back", active = 1):
"""Add a back button with a given title, the tab-next button,
its name in the Control table, possibly initially disabled.
Return the button, so that events can be associated"""
if active:
flags = 3 # Visible|Enabled
else:
flags = 1 # Visible
return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next)
def cancel(self, title, next, name = "Cancel", active = 1):
"""Add a cancel button with a given title, the tab-next button,
its name in the Control table, possibly initially disabled.
Return the button, so that events can be associated"""
if active:
flags = 3 # Visible|Enabled
else:
flags = 1 # Visible
return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next)
def next(self, title, next, name = "Next", active = 1):
"""Add a Next button with a given title, the tab-next button,
its name in the Control table, possibly initially disabled.
Return the button, so that events can be associated"""
if active:
flags = 3 # Visible|Enabled
else:
flags = 1 # Visible
return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next)
def xbutton(self, name, title, next, xpos):
"""Add a button with a given title, the tab-next button,
its name in the Control table, giving its x position; the
y-position is aligned with the other buttons.
Return the button, so that events can be associated"""
return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next)
class bdist_msi (Command):
description = "create a Microsoft Installer (.msi) binary distribution"
user_options = [('bdist-dir=', None,
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('target-version=', None,
"require a specific python version" +
" on the target system"),
('no-target-compile', 'c',
"do not compile .py to .pyc on the target system"),
('no-target-optimize', 'o',
"do not compile .py to .pyo (optimized)"
"on the target system"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('install-script=', None,
"basename of installation script to be run after"
"installation or before deinstallation"),
('pre-install-script=', None,
"Fully qualified filename of a script to be run before "
"any files are installed. This script need not be in the "
"distribution"),
]
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
'skip-build']
all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4',
'2.5', '2.6', '2.7', '2.8', '2.9',
'3.0', '3.1', '3.2', '3.3', '3.4',
'3.5', '3.6', '3.7', '3.8', '3.9']
other_version = 'X'
def initialize_options (self):
self.bdist_dir = None
self.plat_name = None
self.keep_temp = 0
self.no_target_compile = 0
self.no_target_optimize = 0
self.target_version = None
self.dist_dir = None
self.skip_build = None
self.install_script = None
self.pre_install_script = None
self.versions = None
def finalize_options (self):
self.set_undefined_options('bdist', ('skip_build', 'skip_build'))
if self.bdist_dir is None:
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'msi')
short_version = get_python_version()
if (not self.target_version) and self.distribution.has_ext_modules():
self.target_version = short_version
if self.target_version:
self.versions = [self.target_version]
if not self.skip_build and self.distribution.has_ext_modules()\
and self.target_version != short_version:
raise DistutilsOptionError, \
"target version can only be %s, or the '--skip-build'" \
" option must be specified" % (short_version,)
else:
self.versions = list(self.all_versions)
self.set_undefined_options('bdist',
('dist_dir', 'dist_dir'),
('plat_name', 'plat_name'),
)
if self.pre_install_script:
raise DistutilsOptionError, "the pre-install-script feature is not yet implemented"
if self.install_script:
for script in self.distribution.scripts:
if self.install_script == os.path.basename(script):
break
else:
raise DistutilsOptionError, \
"install_script '%s' not found in scripts" % \
self.install_script
self.install_script_key = None
# finalize_options()
def run (self):
if not self.skip_build:
self.run_command('build')
install = self.reinitialize_command('install', reinit_subcommands=1)
install.prefix = self.bdist_dir
install.skip_build = self.skip_build
install.warn_dir = 0
install_lib = self.reinitialize_command('install_lib')
# we do not want to include pyc or pyo files
install_lib.compile = 0
install_lib.optimize = 0
if self.distribution.has_ext_modules():
# If we are building an installer for a Python version other
# than the one we are currently running, then we need to ensure
# our build_lib reflects the other Python version rather than ours.
# Note that for target_version!=sys.version, we must have skipped the
# build step, so there is no issue with enforcing the build of this
# version.
target_version = self.target_version
if not target_version:
assert self.skip_build, "Should have already checked this"
target_version = sys.version[0:3]
plat_specifier = ".%s-%s" % (self.plat_name, target_version)
build = self.get_finalized_command('build')
build.build_lib = os.path.join(build.build_base,
'lib' + plat_specifier)
log.info("installing to %s", self.bdist_dir)
install.ensure_finalized()
# avoid warning of 'install_lib' about installing
# into a directory not in sys.path
sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
install.run()
del sys.path[0]
self.mkpath(self.dist_dir)
fullname = self.distribution.get_fullname()
installer_name = self.get_installer_filename(fullname)
installer_name = os.path.abspath(installer_name)
if os.path.exists(installer_name): os.unlink(installer_name)
metadata = self.distribution.metadata
author = metadata.author
if not author:
author = metadata.maintainer
if not author:
author = "UNKNOWN"
version = metadata.get_version()
# ProductVersion must be strictly numeric
# XXX need to deal with prerelease versions
sversion = "%d.%d.%d" % StrictVersion(version).version
# Prefix ProductName with Python x.y, so that
# it sorts together with the other Python packages
# in Add-Remove-Programs (APR)
fullname = self.distribution.get_fullname()
if self.target_version:
product_name = "Python %s %s" % (self.target_version, fullname)
else:
product_name = "Python %s" % (fullname)
self.db = msilib.init_database(installer_name, schema,
product_name, msilib.gen_uuid(),
sversion, author)
msilib.add_tables(self.db, sequence)
props = [('DistVersion', version)]
email = metadata.author_email or metadata.maintainer_email
if email:
props.append(("ARPCONTACT", email))
if metadata.url:
props.append(("ARPURLINFOABOUT", metadata.url))
if props:
add_data(self.db, 'Property', props)
self.add_find_python()
self.add_files()
self.add_scripts()
self.add_ui()
self.db.Commit()
if hasattr(self.distribution, 'dist_files'):
tup = 'bdist_msi', self.target_version or 'any', fullname
self.distribution.dist_files.append(tup)
if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run)
def add_files(self):
db = self.db
cab = msilib.CAB("distfiles")
rootdir = os.path.abspath(self.bdist_dir)
root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir")
f = Feature(db, "Python", "Python", "Everything",
0, 1, directory="TARGETDIR")
items = [(f, root, '')]
for version in self.versions + [self.other_version]:
target = "TARGETDIR" + version
name = default = "Python" + version
desc = "Everything"
if version is self.other_version:
title = "Python from another location"
level = 2
else:
title = "Python %s from registry" % version
level = 1
f = Feature(db, name, title, desc, 1, level, directory=target)
dir = Directory(db, cab, root, rootdir, target, default)
items.append((f, dir, version))
db.Commit()
seen = {}
for feature, dir, version in items:
todo = [dir]
while todo:
dir = todo.pop()
for file in os.listdir(dir.absolute):
afile = os.path.join(dir.absolute, file)
if os.path.isdir(afile):
short = "%s|%s" % (dir.make_short(file), file)
default = file + version
newdir = Directory(db, cab, dir, file, default, short)
todo.append(newdir)
else:
if not dir.component:
dir.start_component(dir.logical, feature, 0)
if afile not in seen:
key = seen[afile] = dir.add_file(file)
if file==self.install_script:
if self.install_script_key:
raise DistutilsOptionError(
"Multiple files with name %s" % file)
self.install_script_key = '[#%s]' % key
else:
key = seen[afile]
add_data(self.db, "DuplicateFile",
[(key + version, dir.component, key, None, dir.logical)])
db.Commit()
cab.commit(db)
def add_find_python(self):
"""Adds code to the installer to compute the location of Python.
Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the
registry for each version of Python.
Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined,
else from PYTHON.MACHINE.X.Y.
Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe"""
start = 402
for ver in self.versions:
install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver
machine_reg = "python.machine." + ver
user_reg = "python.user." + ver
machine_prop = "PYTHON.MACHINE." + ver
user_prop = "PYTHON.USER." + ver
machine_action = "PythonFromMachine" + ver
user_action = "PythonFromUser" + ver
exe_action = "PythonExe" + ver
target_dir_prop = "TARGETDIR" + ver
exe_prop = "PYTHON" + ver
if msilib.Win64:
# type: msidbLocatorTypeRawValue + msidbLocatorType64bit
Type = 2+16
else:
Type = 2
add_data(self.db, "RegLocator",
[(machine_reg, 2, install_path, None, Type),
(user_reg, 1, install_path, None, Type)])
add_data(self.db, "AppSearch",
[(machine_prop, machine_reg),
(user_prop, user_reg)])
add_data(self.db, "CustomAction",
[(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"),
(user_action, 51+256, target_dir_prop, "[" + user_prop + "]"),
(exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"),
])
add_data(self.db, "InstallExecuteSequence",
[(machine_action, machine_prop, start),
(user_action, user_prop, start + 1),
(exe_action, None, start + 2),
])
add_data(self.db, "InstallUISequence",
[(machine_action, machine_prop, start),
(user_action, user_prop, start + 1),
(exe_action, None, start + 2),
])
add_data(self.db, "Condition",
[("Python" + ver, 0, "NOT TARGETDIR" + ver)])
start += 4
assert start < 500
def add_scripts(self):
if self.install_script:
start = 6800
for ver in self.versions + [self.other_version]:
install_action = "install_script." + ver
exe_prop = "PYTHON" + ver
add_data(self.db, "CustomAction",
[(install_action, 50, exe_prop, self.install_script_key)])
add_data(self.db, "InstallExecuteSequence",
[(install_action, "&Python%s=3" % ver, start)])
start += 1
# XXX pre-install scripts are currently refused in finalize_options()
# but if this feature is completed, it will also need to add
# entries for each version as the above code does
if self.pre_install_script:
scriptfn = os.path.join(self.bdist_dir, "preinstall.bat")
f = open(scriptfn, "w")
# The batch file will be executed with [PYTHON], so that %1
# is the path to the Python interpreter; %0 will be the path
# of the batch file.
# rem ="""
# %1 %0
# exit
# """
# <actual script>
f.write('rem ="""\n%1 %0\nexit\n"""\n')
f.write(open(self.pre_install_script).read())
f.close()
add_data(self.db, "Binary",
[("PreInstall", msilib.Binary(scriptfn))
])
add_data(self.db, "CustomAction",
[("PreInstall", 2, "PreInstall", None)
])
add_data(self.db, "InstallExecuteSequence",
[("PreInstall", "NOT Installed", 450)])
def add_ui(self):
db = self.db
x = y = 50
w = 370
h = 300
title = "[ProductName] Setup"
# see "Dialog Style Bits"
modal = 3 # visible | modal
modeless = 1 # visible
# UI customization properties
add_data(db, "Property",
# See "DefaultUIFont Property"
[("DefaultUIFont", "DlgFont8"),
# See "ErrorDialog Style Bit"
("ErrorDialog", "ErrorDlg"),
("Progress1", "Install"), # modified in maintenance type dlg
("Progress2", "installs"),
("MaintenanceForm_Action", "Repair"),
# possible values: ALL, JUSTME
("WhichUsers", "ALL")
])
# Fonts, see "TextStyle Table"
add_data(db, "TextStyle",
[("DlgFont8", "Tahoma", 9, None, 0),
("DlgFontBold8", "Tahoma", 8, None, 1), #bold
("VerdanaBold10", "Verdana", 10, None, 1),
("VerdanaRed9", "Verdana", 9, 255, 0),
])
# UI Sequences, see "InstallUISequence Table", "Using a Sequence Table"
# Numbers indicate sequence; see sequence.py for how these action integrate
add_data(db, "InstallUISequence",
[("PrepareDlg", "Not Privileged or Windows9x or Installed", 140),
("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141),
# In the user interface, assume all-users installation if privileged.
("SelectFeaturesDlg", "Not Installed", 1230),
# XXX no support for resume installations yet
#("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240),
("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250),
("ProgressDlg", None, 1280)])
add_data(db, 'ActionText', text.ActionText)
add_data(db, 'UIText', text.UIText)
#####################################################################
# Standard dialogs: FatalError, UserExit, ExitDialog
fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title,
"Finish", "Finish", "Finish")
fatal.title("[ProductName] Installer ended prematurely")
fatal.back("< Back", "Finish", active = 0)
fatal.cancel("Cancel", "Back", active = 0)
fatal.text("Description1", 15, 70, 320, 80, 0x30003,
"[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.")
fatal.text("Description2", 15, 155, 320, 20, 0x30003,
"Click the Finish button to exit the Installer.")
c=fatal.next("Finish", "Cancel", name="Finish")
c.event("EndDialog", "Exit")
user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title,
"Finish", "Finish", "Finish")
user_exit.title("[ProductName] Installer was interrupted")
user_exit.back("< Back", "Finish", active = 0)
user_exit.cancel("Cancel", "Back", active = 0)
user_exit.text("Description1", 15, 70, 320, 80, 0x30003,
"[ProductName] setup was interrupted. Your system has not been modified. "
"To install this program at a later time, please run the installation again.")
user_exit.text("Description2", 15, 155, 320, 20, 0x30003,
"Click the Finish button to exit the Installer.")
c = user_exit.next("Finish", "Cancel", name="Finish")
c.event("EndDialog", "Exit")
exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title,
"Finish", "Finish", "Finish")
exit_dialog.title("Completing the [ProductName] Installer")
exit_dialog.back("< Back", "Finish", active = 0)
exit_dialog.cancel("Cancel", "Back", active = 0)
exit_dialog.text("Description", 15, 235, 320, 20, 0x30003,
"Click the Finish button to exit the Installer.")
c = exit_dialog.next("Finish", "Cancel", name="Finish")
c.event("EndDialog", "Return")
#####################################################################
# Required dialog: FilesInUse, ErrorDlg
inuse = PyDialog(db, "FilesInUse",
x, y, w, h,
19, # KeepModeless|Modal|Visible
title,
"Retry", "Retry", "Retry", bitmap=False)
inuse.text("Title", 15, 6, 200, 15, 0x30003,
r"{\DlgFontBold8}Files in Use")
inuse.text("Description", 20, 23, 280, 20, 0x30003,
"Some files that need to be updated are currently in use.")
inuse.text("Text", 20, 55, 330, 50, 3,
"The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.")
inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess",
None, None, None)
c=inuse.back("Exit", "Ignore", name="Exit")
c.event("EndDialog", "Exit")
c=inuse.next("Ignore", "Retry", name="Ignore")
c.event("EndDialog", "Ignore")
c=inuse.cancel("Retry", "Exit", name="Retry")
c.event("EndDialog","Retry")
# See "Error Dialog". See "ICE20" for the required names of the controls.
error = Dialog(db, "ErrorDlg",
50, 10, 330, 101,
65543, # Error|Minimize|Modal|Visible
title,
"ErrorText", None, None)
error.text("ErrorText", 50,9,280,48,3, "")
#error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None)
error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo")
error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes")
error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort")
error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel")
error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore")
error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk")
error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry")
#####################################################################
# Global "Query Cancel" dialog
cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title,
"No", "No", "No")
cancel.text("Text", 48, 15, 194, 30, 3,
"Are you sure you want to cancel [ProductName] installation?")
#cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None,
# "py.ico", None, None)
c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No")
c.event("EndDialog", "Exit")
c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes")
c.event("EndDialog", "Return")
#####################################################################
# Global "Wait for costing" dialog
costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title,
"Return", "Return", "Return")
costing.text("Text", 48, 15, 194, 30, 3,
"Please wait while the installer finishes determining your disk space requirements.")
c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None)
c.event("EndDialog", "Exit")
#####################################################################
# Preparation dialog: no user input except cancellation
prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title,
"Cancel", "Cancel", "Cancel")
prep.text("Description", 15, 70, 320, 40, 0x30003,
"Please wait while the Installer prepares to guide you through the installation.")
prep.title("Welcome to the [ProductName] Installer")
c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...")
c.mapping("ActionText", "Text")
c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None)
c.mapping("ActionData", "Text")
prep.back("Back", None, active=0)
prep.next("Next", None, active=0)
c=prep.cancel("Cancel", None)
c.event("SpawnDialog", "CancelDlg")
#####################################################################
# Feature (Python directory) selection
seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title,
"Next", "Next", "Cancel")
seldlg.title("Select Python Installations")
seldlg.text("Hint", 15, 30, 300, 20, 3,
"Select the Python locations where %s should be installed."
% self.distribution.get_fullname())
seldlg.back("< Back", None, active=0)
c = seldlg.next("Next >", "Cancel")
order = 1
c.event("[TARGETDIR]", "[SourceDir]", ordering=order)
for version in self.versions + [self.other_version]:
order += 1
c.event("[TARGETDIR]", "[TARGETDIR%s]" % version,
"FEATURE_SELECTED AND &Python%s=3" % version,
ordering=order)
c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1)
c.event("EndDialog", "Return", ordering=order + 2)
c = seldlg.cancel("Cancel", "Features")
c.event("SpawnDialog", "CancelDlg")
c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3,
"FEATURE", None, "PathEdit", None)
c.event("[FEATURE_SELECTED]", "1")
ver = self.other_version
install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver
dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver
c = seldlg.text("Other", 15, 200, 300, 15, 3,
"Provide an alternate Python location")
c.condition("Enable", install_other_cond)
c.condition("Show", install_other_cond)
c.condition("Disable", dont_install_other_cond)
c.condition("Hide", dont_install_other_cond)
c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1,
"TARGETDIR" + ver, None, "Next", None)
c.condition("Enable", install_other_cond)
c.condition("Show", install_other_cond)
c.condition("Disable", dont_install_other_cond)
c.condition("Hide", dont_install_other_cond)
#####################################################################
# Disk cost
cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title,
"OK", "OK", "OK", bitmap=False)
cost.text("Title", 15, 6, 200, 15, 0x30003,
"{\DlgFontBold8}Disk Space Requirements")
cost.text("Description", 20, 20, 280, 20, 0x30003,
"The disk space required for the installation of the selected features.")
cost.text("Text", 20, 53, 330, 60, 3,
"The highlighted volumes (if any) do not have enough disk space "
"available for the currently selected features. You can either "
"remove some files from the highlighted volumes, or choose to "
"install less features onto local drive(s), or select different "
"destination drive(s).")
cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223,
None, "{120}{70}{70}{70}{70}", None, None)
cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return")
#####################################################################
# WhichUsers Dialog. Only available on NT, and for privileged users.
# This must be run before FindRelatedProducts, because that will
# take into account whether the previous installation was per-user
# or per-machine. We currently don't support going back to this
# dialog after "Next" was selected; to support this, we would need to
# find how to reset the ALLUSERS property, and how to re-run
# FindRelatedProducts.
# On Windows9x, the ALLUSERS property is ignored on the command line
# and in the Property table, but installer fails according to the documentation
# if a dialog attempts to set ALLUSERS.
whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title,
"AdminInstall", "Next", "Cancel")
whichusers.title("Select whether to install [ProductName] for all users of this computer.")
# A radio group with two options: allusers, justme
g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3,
"WhichUsers", "", "Next")
g.add("ALL", 0, 5, 150, 20, "Install for all users")
g.add("JUSTME", 0, 25, 150, 20, "Install just for me")
whichusers.back("Back", None, active=0)
c = whichusers.next("Next >", "Cancel")
c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1)
c.event("EndDialog", "Return", ordering = 2)
c = whichusers.cancel("Cancel", "AdminInstall")
c.event("SpawnDialog", "CancelDlg")
#####################################################################
# Installation Progress dialog (modeless)
progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title,
"Cancel", "Cancel", "Cancel", bitmap=False)
progress.text("Title", 20, 15, 200, 15, 0x30003,
"{\DlgFontBold8}[Progress1] [ProductName]")
progress.text("Text", 35, 65, 300, 30, 3,
"Please wait while the Installer [Progress2] [ProductName]. "
"This may take several minutes.")
progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:")
c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...")
c.mapping("ActionText", "Text")
#c=progress.text("ActionData", 35, 140, 300, 20, 3, None)
#c.mapping("ActionData", "Text")
c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537,
None, "Progress done", None, None)
c.mapping("SetProgress", "Progress")
progress.back("< Back", "Next", active=False)
progress.next("Next >", "Cancel", active=False)
progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg")
###################################################################
# Maintenance type: repair/uninstall
maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title,
"Next", "Next", "Cancel")
maint.title("Welcome to the [ProductName] Setup Wizard")
maint.text("BodyText", 15, 63, 330, 42, 3,
"Select whether you want to repair or remove [ProductName].")
g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3,
"MaintenanceForm_Action", "", "Next")
#g.add("Change", 0, 0, 200, 17, "&Change [ProductName]")
g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]")
g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]")
maint.back("< Back", None, active=False)
c=maint.next("Finish", "Cancel")
# Change installation: Change progress dialog to "Change", then ask
# for feature selection
#c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1)
#c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2)
# Reinstall: Change progress dialog to "Repair", then invoke reinstall
# Also set list of reinstalled features to "ALL"
c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5)
c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6)
c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7)
c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8)
# Uninstall: Change progress to "Remove", then invoke uninstall
# Also set list of removed features to "ALL"
c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11)
c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12)
c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13)
c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14)
# Close dialog when maintenance action scheduled
c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20)
#c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21)
maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg")
def get_installer_filename(self, fullname):
# Factored out to allow overriding in subclasses
if self.target_version:
base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name,
self.target_version)
else:
base_name = "%s.%s.msi" % (fullname, self.plat_name)
installer_name = os.path.join(self.dist_dir, base_name)
return installer_name
| gpl-3.0 |
m-sanders/wagtail | wagtail/wagtailimages/migrations/0001_initial.py | 31 | 3250 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import wagtail.wagtailimages.models
import taggit.managers
from django.conf import settings
import wagtail.wagtailadmin.taggable
class Migration(migrations.Migration):
dependencies = [
('taggit', '__latest__'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Filter',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('spec', models.CharField(db_index=True, max_length=255)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Image',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('title', models.CharField(verbose_name='Title', max_length=255)),
('file', models.ImageField(width_field='width', upload_to=wagtail.wagtailimages.models.get_upload_to, verbose_name='File', height_field='height')),
('width', models.IntegerField(editable=False)),
('height', models.IntegerField(editable=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('focal_point_x', models.PositiveIntegerField(editable=False, null=True)),
('focal_point_y', models.PositiveIntegerField(editable=False, null=True)),
('focal_point_width', models.PositiveIntegerField(editable=False, null=True)),
('focal_point_height', models.PositiveIntegerField(editable=False, null=True)),
('tags', taggit.managers.TaggableManager(verbose_name='Tags', blank=True, help_text=None, to='taggit.Tag', through='taggit.TaggedItem')),
('uploaded_by_user', models.ForeignKey(editable=False, blank=True, null=True, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
bases=(models.Model, wagtail.wagtailadmin.taggable.TagSearchable),
),
migrations.CreateModel(
name='Rendition',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('file', models.ImageField(width_field='width', upload_to='images', height_field='height')),
('width', models.IntegerField(editable=False)),
('height', models.IntegerField(editable=False)),
('focal_point_key', models.CharField(editable=False, max_length=255, null=True)),
('filter', models.ForeignKey(related_name='+', to='wagtailimages.Filter')),
('image', models.ForeignKey(related_name='renditions', to='wagtailimages.Image')),
],
options={
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='rendition',
unique_together=set([('image', 'filter', 'focal_point_key')]),
),
]
| bsd-3-clause |
Huskerboy/startbootstrap-freelancer | freelancer_env/Lib/re.py | 36 | 15501 | #
# Secret Labs' Regular Expression Engine
#
# re-compatible interface for the sre matching engine
#
# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
#
# This version of the SRE library can be redistributed under CNRI's
# Python 1.6 license. For any other use, please contact Secret Labs
# AB ([email protected]).
#
# Portions of this engine have been developed in cooperation with
# CNRI. Hewlett-Packard provided funding for 1.6 integration and
# other compatibility work.
#
r"""Support for regular expressions (RE).
This module provides regular expression matching operations similar to
those found in Perl. It supports both 8-bit and Unicode strings; both
the pattern and the strings being processed can contain null bytes and
characters outside the US ASCII range.
Regular expressions can contain both special and ordinary characters.
Most ordinary characters, like "A", "a", or "0", are the simplest
regular expressions; they simply match themselves. You can
concatenate ordinary characters, so last matches the string 'last'.
The special characters are:
"." Matches any character except a newline.
"^" Matches the start of the string.
"$" Matches the end of the string or just before the newline at
the end of the string.
"*" Matches 0 or more (greedy) repetitions of the preceding RE.
Greedy means that it will match as many repetitions as possible.
"+" Matches 1 or more (greedy) repetitions of the preceding RE.
"?" Matches 0 or 1 (greedy) of the preceding RE.
*?,+?,?? Non-greedy versions of the previous three special characters.
{m,n} Matches from m to n repetitions of the preceding RE.
{m,n}? Non-greedy version of the above.
"\\" Either escapes special characters or signals a special sequence.
[] Indicates a set of characters.
A "^" as the first character indicates a complementing set.
"|" A|B, creates an RE that will match either A or B.
(...) Matches the RE inside the parentheses.
The contents can be retrieved or matched later in the string.
(?aiLmsux) Set the A, I, L, M, S, U, or X flag for the RE (see below).
(?:...) Non-grouping version of regular parentheses.
(?P<name>...) The substring matched by the group is accessible by name.
(?P=name) Matches the text matched earlier by the group named name.
(?#...) A comment; ignored.
(?=...) Matches if ... matches next, but doesn't consume the string.
(?!...) Matches if ... doesn't match next.
(?<=...) Matches if preceded by ... (must be fixed length).
(?<!...) Matches if not preceded by ... (must be fixed length).
(?(id/name)yes|no) Matches yes pattern if the group with id/name matched,
the (optional) no pattern otherwise.
The special sequences consist of "\\" and a character from the list
below. If the ordinary character is not on the list, then the
resulting RE will match the second character.
\number Matches the contents of the group of the same number.
\A Matches only at the start of the string.
\Z Matches only at the end of the string.
\b Matches the empty string, but only at the start or end of a word.
\B Matches the empty string, but not at the start or end of a word.
\d Matches any decimal digit; equivalent to the set [0-9] in
bytes patterns or string patterns with the ASCII flag.
In string patterns without the ASCII flag, it will match the whole
range of Unicode digits.
\D Matches any non-digit character; equivalent to [^\d].
\s Matches any whitespace character; equivalent to [ \t\n\r\f\v] in
bytes patterns or string patterns with the ASCII flag.
In string patterns without the ASCII flag, it will match the whole
range of Unicode whitespace characters.
\S Matches any non-whitespace character; equivalent to [^\s].
\w Matches any alphanumeric character; equivalent to [a-zA-Z0-9_]
in bytes patterns or string patterns with the ASCII flag.
In string patterns without the ASCII flag, it will match the
range of Unicode alphanumeric characters (letters plus digits
plus underscore).
With LOCALE, it will match the set [0-9_] plus characters defined
as letters for the current locale.
\W Matches the complement of \w.
\\ Matches a literal backslash.
This module exports the following functions:
match Match a regular expression pattern to the beginning of a string.
fullmatch Match a regular expression pattern to all of a string.
search Search a string for the presence of a pattern.
sub Substitute occurrences of a pattern found in a string.
subn Same as sub, but also return the number of substitutions made.
split Split a string by the occurrences of a pattern.
findall Find all occurrences of a pattern in a string.
finditer Return an iterator yielding a match object for each match.
compile Compile a pattern into a RegexObject.
purge Clear the regular expression cache.
escape Backslash all non-alphanumerics in a string.
Some of the functions in this module takes flags as optional parameters:
A ASCII For string patterns, make \w, \W, \b, \B, \d, \D
match the corresponding ASCII character categories
(rather than the whole Unicode categories, which is the
default).
For bytes patterns, this flag is the only available
behaviour and needn't be specified.
I IGNORECASE Perform case-insensitive matching.
L LOCALE Make \w, \W, \b, \B, dependent on the current locale.
M MULTILINE "^" matches the beginning of lines (after a newline)
as well as the string.
"$" matches the end of lines (before a newline) as well
as the end of the string.
S DOTALL "." matches any character at all, including the newline.
X VERBOSE Ignore whitespace and comments for nicer looking RE's.
U UNICODE For compatibility only. Ignored for string patterns (it
is the default), and forbidden for bytes patterns.
This module also defines an exception 'error'.
"""
import sys
import sre_compile
import sre_parse
try:
import _locale
except ImportError:
_locale = None
# public symbols
__all__ = [
"match", "fullmatch", "search", "sub", "subn", "split",
"findall", "finditer", "compile", "purge", "template", "escape",
"error", "A", "I", "L", "M", "S", "X", "U",
"ASCII", "IGNORECASE", "LOCALE", "MULTILINE", "DOTALL", "VERBOSE",
"UNICODE",
]
__version__ = "2.2.1"
# flags
A = ASCII = sre_compile.SRE_FLAG_ASCII # assume ascii "locale"
I = IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE # ignore case
L = LOCALE = sre_compile.SRE_FLAG_LOCALE # assume current 8-bit locale
U = UNICODE = sre_compile.SRE_FLAG_UNICODE # assume unicode "locale"
M = MULTILINE = sre_compile.SRE_FLAG_MULTILINE # make anchors look for newline
S = DOTALL = sre_compile.SRE_FLAG_DOTALL # make dot match newline
X = VERBOSE = sre_compile.SRE_FLAG_VERBOSE # ignore whitespace and comments
# sre extensions (experimental, don't rely on these)
T = TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE # disable backtracking
DEBUG = sre_compile.SRE_FLAG_DEBUG # dump pattern after compilation
# sre exception
error = sre_compile.error
# --------------------------------------------------------------------
# public interface
def match(pattern, string, flags=0):
"""Try to apply the pattern at the start of the string, returning
a match object, or None if no match was found."""
return _compile(pattern, flags).match(string)
def fullmatch(pattern, string, flags=0):
"""Try to apply the pattern to all of the string, returning
a match object, or None if no match was found."""
return _compile(pattern, flags).fullmatch(string)
def search(pattern, string, flags=0):
"""Scan through string looking for a match to the pattern, returning
a match object, or None if no match was found."""
return _compile(pattern, flags).search(string)
def sub(pattern, repl, string, count=0, flags=0):
"""Return the string obtained by replacing the leftmost
non-overlapping occurrences of the pattern in string by the
replacement repl. repl can be either a string or a callable;
if a string, backslash escapes in it are processed. If it is
a callable, it's passed the match object and must return
a replacement string to be used."""
return _compile(pattern, flags).sub(repl, string, count)
def subn(pattern, repl, string, count=0, flags=0):
"""Return a 2-tuple containing (new_string, number).
new_string is the string obtained by replacing the leftmost
non-overlapping occurrences of the pattern in the source
string by the replacement repl. number is the number of
substitutions that were made. repl can be either a string or a
callable; if a string, backslash escapes in it are processed.
If it is a callable, it's passed the match object and must
return a replacement string to be used."""
return _compile(pattern, flags).subn(repl, string, count)
def split(pattern, string, maxsplit=0, flags=0):
"""Split the source string by the occurrences of the pattern,
returning a list containing the resulting substrings. If
capturing parentheses are used in pattern, then the text of all
groups in the pattern are also returned as part of the resulting
list. If maxsplit is nonzero, at most maxsplit splits occur,
and the remainder of the string is returned as the final element
of the list."""
return _compile(pattern, flags).split(string, maxsplit)
def findall(pattern, string, flags=0):
"""Return a list of all non-overlapping matches in the string.
If one or more capturing groups are present in the pattern, return
a list of groups; this will be a list of tuples if the pattern
has more than one group.
Empty matches are included in the result."""
return _compile(pattern, flags).findall(string)
def finditer(pattern, string, flags=0):
"""Return an iterator over all non-overlapping matches in the
string. For each match, the iterator returns a match object.
Empty matches are included in the result."""
return _compile(pattern, flags).finditer(string)
def compile(pattern, flags=0):
"Compile a regular expression pattern, returning a pattern object."
return _compile(pattern, flags)
def purge():
"Clear the regular expression caches"
_cache.clear()
_cache_repl.clear()
def template(pattern, flags=0):
"Compile a template pattern, returning a pattern object"
return _compile(pattern, flags|T)
_alphanum_str = frozenset(
"_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890")
_alphanum_bytes = frozenset(
b"_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890")
def escape(pattern):
"""
Escape all the characters in pattern except ASCII letters, numbers and '_'.
"""
if isinstance(pattern, str):
alphanum = _alphanum_str
s = list(pattern)
for i, c in enumerate(pattern):
if c not in alphanum:
if c == "\000":
s[i] = "\\000"
else:
s[i] = "\\" + c
return "".join(s)
else:
alphanum = _alphanum_bytes
s = []
esc = ord(b"\\")
for c in pattern:
if c in alphanum:
s.append(c)
else:
if c == 0:
s.extend(b"\\000")
else:
s.append(esc)
s.append(c)
return bytes(s)
# --------------------------------------------------------------------
# internals
_cache = {}
_cache_repl = {}
_pattern_type = type(sre_compile.compile("", 0))
_MAXCACHE = 512
def _compile(pattern, flags):
# internal: compile pattern
try:
p, loc = _cache[type(pattern), pattern, flags]
if loc is None or loc == _locale.setlocale(_locale.LC_CTYPE):
return p
except KeyError:
pass
if isinstance(pattern, _pattern_type):
if flags:
raise ValueError(
"cannot process flags argument with a compiled pattern")
return pattern
if not sre_compile.isstring(pattern):
raise TypeError("first argument must be string or compiled pattern")
p = sre_compile.compile(pattern, flags)
if not (flags & DEBUG):
if len(_cache) >= _MAXCACHE:
_cache.clear()
if p.flags & LOCALE:
if not _locale:
return p
loc = _locale.setlocale(_locale.LC_CTYPE)
else:
loc = None
_cache[type(pattern), pattern, flags] = p, loc
return p
def _compile_repl(repl, pattern):
# internal: compile replacement pattern
try:
return _cache_repl[repl, pattern]
except KeyError:
pass
p = sre_parse.parse_template(repl, pattern)
if len(_cache_repl) >= _MAXCACHE:
_cache_repl.clear()
_cache_repl[repl, pattern] = p
return p
def _expand(pattern, match, template):
# internal: match.expand implementation hook
template = sre_parse.parse_template(template, pattern)
return sre_parse.expand_template(template, match)
def _subx(pattern, template):
# internal: pattern.sub/subn implementation helper
template = _compile_repl(template, pattern)
if not template[0] and len(template[1]) == 1:
# literal replacement
return template[1][0]
def filter(match, template=template):
return sre_parse.expand_template(template, match)
return filter
# register myself for pickling
import copyreg
def _pickle(p):
return _compile, (p.pattern, p.flags)
copyreg.pickle(_pattern_type, _pickle, _compile)
# --------------------------------------------------------------------
# experimental stuff (see python-dev discussions for details)
class Scanner:
def __init__(self, lexicon, flags=0):
from sre_constants import BRANCH, SUBPATTERN
self.lexicon = lexicon
# combine phrases into a compound pattern
p = []
s = sre_parse.Pattern()
s.flags = flags
for phrase, action in lexicon:
gid = s.opengroup()
p.append(sre_parse.SubPattern(s, [
(SUBPATTERN, (gid, sre_parse.parse(phrase, flags))),
]))
s.closegroup(gid, p[-1])
p = sre_parse.SubPattern(s, [(BRANCH, (None, p))])
self.scanner = sre_compile.compile(p)
def scan(self, string):
result = []
append = result.append
match = self.scanner.scanner(string).match
i = 0
while True:
m = match()
if not m:
break
j = m.end()
if i == j:
break
action = self.lexicon[m.lastindex-1][1]
if callable(action):
self.match = m
action = action(self, m.group())
if action is not None:
append(action)
i = j
return result, string[i:]
| mit |
zcbenz/cefode-chromium | tools/telemetry/telemetry/core/extension_dict.py | 35 | 1135 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core import extension_to_load
class ExtensionDict(object):
"""Dictionary of ExtensionPage instances, with extension_id as key"""
def __init__(self, extension_dict_backend):
self._extension_dict_backend = extension_dict_backend
def __getitem__(self, load_extension):
"""Given an ExtensionToLoad instance, returns the corresponding
ExtensionPage instance."""
if not isinstance(load_extension, extension_to_load.ExtensionToLoad):
raise Exception("Input param must be of type ExtensionToLoad")
return self._extension_dict_backend.__getitem__(
load_extension.extension_id)
def __contains__(self, load_extension):
"""Checks if this ExtensionToLoad instance has been loaded"""
if not isinstance(load_extension, extension_to_load.ExtensionToLoad):
raise Exception("Input param must be of type ExtensionToLoad")
return self._extension_dict_backend.__contains__(
load_extension.extension_id)
| bsd-3-clause |
pyKun/rally | rally/plugins/openstack/scenarios/ceilometer/samples.py | 3 | 1169 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from rally import consts
from rally.plugins.openstack.scenarios.ceilometer import utils as ceiloutils
from rally.task.scenarios import base
from rally.task import validation
class CeilometerSamples(ceiloutils.CeilometerScenario):
"""Benchmark scenarios for Ceilometer Samples API."""
@validation.required_services(consts.Service.CEILOMETER)
@validation.required_openstack(users=True)
@base.scenario()
def list_samples(self):
"""Fetch all samples.
This scenario fetches list of all samples.
"""
self._list_samples()
| apache-2.0 |
jlspyaozhongkai/Uter | third_party_backup/Python-2.7.9/Lib/email/mime/image.py | 573 | 1764 | # Copyright (C) 2001-2006 Python Software Foundation
# Author: Barry Warsaw
# Contact: [email protected]
"""Class representing image/* type MIME documents."""
__all__ = ['MIMEImage']
import imghdr
from email import encoders
from email.mime.nonmultipart import MIMENonMultipart
class MIMEImage(MIMENonMultipart):
"""Class for generating image/* type MIME documents."""
def __init__(self, _imagedata, _subtype=None,
_encoder=encoders.encode_base64, **_params):
"""Create an image/* type MIME document.
_imagedata is a string containing the raw image data. If this data
can be decoded by the standard Python `imghdr' module, then the
subtype will be automatically included in the Content-Type header.
Otherwise, you can specify the specific image subtype via the _subtype
parameter.
_encoder is a function which will perform the actual encoding for
transport of the image data. It takes one argument, which is this
Image instance. It should use get_payload() and set_payload() to
change the payload to the encoded form. It should also add any
Content-Transfer-Encoding or other headers to the message as
necessary. The default encoding is Base64.
Any additional keyword arguments are passed to the base class
constructor, which turns them into parameters on the Content-Type
header.
"""
if _subtype is None:
_subtype = imghdr.what(None, _imagedata)
if _subtype is None:
raise TypeError('Could not guess image MIME subtype')
MIMENonMultipart.__init__(self, 'image', _subtype, **_params)
self.set_payload(_imagedata)
_encoder(self)
| gpl-3.0 |
eayunstack/eayunstack-upgrade | ansible/library/keystone_v2_endpoint.py | 1 | 9178 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Kevin Carter <[email protected]>
#
# Copyright 2014, Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Based on Jimmy Tang's implementation
DOCUMENTATION = """
---
module: keystone_v2_endpoint
short_description:
- Manage OpenStack Identity (keystone) v2 endpoint.
description:
- Manage OpenStack Identity (keystone) v2 endpoint.
endpoints.
options:
token:
description:
- The token to be uses in case the password is not specified
required: true
default: None
endpoint:
description:
- The keystone url for authentication
required: true
service_name:
description:
- Name of the service.
required: true
default: None
region_name:
description:
- Name of the region.
required: true
default: None
service_type:
description:
- Type of service.
required: true
default: None
endpoint_dict:
description:
- Dict of endpoint urls to add to keystone for a service
required: true
default: None
type: dict
state:
description:
- Ensuring the endpoint is either present, absent.
- It always ensures endpoint is updated to latest url.
required: False
default: 'present'
requirements: [ python-keystoneclient ]
"""
EXAMPLES = """
# Create an endpoint
- keystone_v2_endpoint:
region_name: "RegionOne"
service_name: "glance"
service_type: "image"
endpoint: "http://127.0.0.1:5000/v2.0/"
token: "ChangeMe"
endpoint_dict:
publicurl: "http://127.0.0.1:9292"
adminurl: "http://127.0.0.1:9292"
internalurl: "http://127.0.0.1:9292"
"""
try:
from keystoneclient.v2_0 import client
except ImportError:
keystoneclient_found = False
else:
keystoneclient_found = True
class ManageKeystoneV2Endpoint(object):
def __init__(self, module):
"""Manage Keystone via Ansible."""
self.state_change = False
self.keystone = None
# Load AnsibleModule
self.module = module
@staticmethod
def _facts(facts):
"""Return a dict for our Ansible facts.
:param facts: ``dict`` Dict with data to return
"""
return {'keystone_facts': facts}
def failure(self, error, rc, msg):
"""Return a Failure when running an Ansible command.
:param error: ``str`` Error that occurred.
:param rc: ``int`` Return code while executing an Ansible command.
:param msg: ``str`` Message to report.
"""
self.module.fail_json(msg=msg, rc=rc, err=error)
def _authenticate(self):
"""Return a keystone client object."""
endpoint = self.module.params.get('endpoint')
token = self.module.params.get('token')
if token is None:
self.failure(
error='Missing Auth Token',
rc=2,
msg='Auto token is required!'
)
if token:
self.keystone = client.Client(
endpoint=endpoint,
token=token
)
def _get_service(self, name, srv_type=None):
for entry in self.keystone.services.list():
if srv_type is not None:
if entry.type == srv_type and name == entry.name:
return entry
elif entry.name == name:
return entry
else:
return None
def _get_endpoint(self, region, service_id):
""" Getting endpoints per complete definition
Returns the endpoint details for an endpoint matching
region, service id.
:param service_id: service to which the endpoint belongs
:param region: geographic location of the endpoint
"""
for entry in self.keystone.endpoints.list():
check = [
entry.region == region,
entry.service_id == service_id,
]
if all(check):
return entry
else:
return None
def _compare_endpoint_info(self, endpoint, endpoint_dict):
""" Compare existed endpoint with module parameters
Return True if public, admin, internal urls are all the same.
:param endpoint: endpoint existed
:param endpoint_dict: endpoint info passed in
"""
check = [
endpoint.adminurl == endpoint_dict.get('adminurl'),
endpoint.publicurl == endpoint_dict.get('publicurl'),
endpoint.internalurl == endpoint_dict.get('internalurl')
]
if all(check):
return True
else:
return False
def ensure_endpoint(self):
"""Ensures the deletion/modification/addition of endpoints
within Keystone.
Returns the endpoint ID on a successful run.
"""
self._authenticate()
service_name = self.module.params.get('service_name')
service_type = self.module.params.get('service_type')
region = self.module.params.get('region_name')
endpoint_dict = self.module.params.get('endpoint_dict')
state = self.module.params.get('state')
endpoint_dict = {
'adminurl': endpoint_dict.get('adminurl', ''),
'publicurl': endpoint_dict.get('publicurl', ''),
'internalurl': endpoint_dict.get('internalurl', '')
}
service = self._get_service(name=service_name, srv_type=service_type)
if service is None:
self.failure(
error='service [ %s ] was not found.' % service_name,
rc=2,
msg='Service was not found, does it exist?'
)
existed_endpoint = self._get_endpoint(
region=region,
service_id=service.id,
)
delete_existed = False
if state == 'present':
''' Creating an endpoint (if it does
not exist) or creating a new one,
and then deleting the existing
endpoint that matches the service
type, name, and region.
'''
if existed_endpoint:
if not self._compare_endpoint_info(existed_endpoint,
endpoint_dict):
delete_existed = True
else:
endpoint = existed_endpoint
if (not existed_endpoint or
delete_existed):
self.state_change = True
endpoint = self.keystone.endpoints.create(
region=region,
service_id=service.id,
**endpoint_dict
)
elif state == 'absent':
if existed_endpoint is not None:
self.state_change = True
delete_existed = True
if delete_existed:
result = self.keystone.endpoints.delete(existed_endpoint.id)
if result[0].status_code != 204:
self.module.fail()
if state != 'absent':
facts = self._facts(endpoint.to_dict())
else:
facts = self._facts({})
self.module.exit_json(
changed=self.state_change,
ansible_facts=facts
)
# TODO(evrardjp): Deprecate state=update in Q.
def main():
module = AnsibleModule(
argument_spec=dict(
token=dict(
required=True
),
endpoint=dict(
required=True,
),
region_name=dict(
required=True
),
service_name=dict(
required=True
),
service_type=dict(
required=True
),
endpoint_dict=dict(
required=True,
type='dict'
),
state=dict(
choices=['present', 'absent'],
required=False,
default='present'
)
),
supports_check_mode=False,
)
km = ManageKeystoneV2Endpoint(module=module)
if not keystoneclient_found:
km.failure(
error='python-keystoneclient is missing',
rc=2,
msg='keystone client was not importable, is it installed?'
)
facts = km.ensure_endpoint()
# import module snippets
from ansible.module_utils.basic import * # NOQA
if __name__ == '__main__':
main()
| apache-2.0 |
haroldl/homeworklog | django/core/management/commands/reset.py | 229 | 2598 | from optparse import make_option
from django.conf import settings
from django.core.management.base import AppCommand, CommandError
from django.core.management.color import no_style
from django.core.management.sql import sql_reset
from django.db import connections, transaction, DEFAULT_DB_ALIAS
class Command(AppCommand):
option_list = AppCommand.option_list + (
make_option('--noinput', action='store_false', dest='interactive', default=True,
help='Tells Django to NOT prompt the user for input of any kind.'),
make_option('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a database to reset. '
'Defaults to the "default" database.'),
)
help = "Executes ``sqlreset`` for the given app(s) in the current database."
args = '[appname ...]'
output_transaction = True
def handle_app(self, app, **options):
# This command breaks a lot and should be deprecated
import warnings
warnings.warn(
'This command has been deprecated. The command ``flush`` can be used to delete everything. You can also use ALTER TABLE or DROP TABLE statements manually.',
PendingDeprecationWarning
)
using = options.get('database', DEFAULT_DB_ALIAS)
connection = connections[using]
app_name = app.__name__.split('.')[-2]
self.style = no_style()
sql_list = sql_reset(app, self.style, connection)
if options.get('interactive'):
confirm = raw_input("""
You have requested a database reset.
This will IRREVERSIBLY DESTROY any data for
the "%s" application in the database "%s".
Are you sure you want to do this?
Type 'yes' to continue, or 'no' to cancel: """ % (app_name, connection.settings_dict['NAME']))
else:
confirm = 'yes'
if confirm == 'yes':
try:
cursor = connection.cursor()
for sql in sql_list:
cursor.execute(sql)
except Exception, e:
transaction.rollback_unless_managed()
raise CommandError("""Error: %s couldn't be reset. Possible reasons:
* The database isn't running or isn't configured correctly.
* At least one of the database tables doesn't exist.
* The SQL was invalid.
Hint: Look at the output of 'django-admin.py sqlreset %s'. That's the SQL this command wasn't able to run.
The full error: %s""" % (app_name, app_name, e))
transaction.commit_unless_managed()
else:
print "Reset cancelled."
| bsd-3-clause |
fards/Ainol_fire_kernel | tools/perf/scripts/python/futex-contention.py | 11261 | 1486 | # futex contention
# (c) 2010, Arnaldo Carvalho de Melo <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
| gpl-2.0 |
TinLe/Diamond | src/collectors/snmpraw/snmpraw.py | 56 | 6074 | # coding=utf-8
"""
The SNMPRawCollector is designed for collecting data from SNMP-enables devices,
using a set of specified OIDs
#### Configuration
Below is an example configuration for the SNMPRawCollector. The collector
can collect data any number of devices by adding configuration sections
under the *devices* header. By default the collector will collect every 60
seconds. This might be a bit excessive and put unnecessary load on the
devices being polled. You may wish to change this to every 300 seconds. However
you need modify your graphite data retentions to handle this properly.
```
# Options for SNMPRawCollector
enabled = True
interval = 60
[devices]
# Start the device configuration
# Note: this name will be used in the metric path.
[[my-identification-for-this-host]]
host = localhost
port = 161
community = public
# Start the OID list for this device
# Note: the value part will be used in the metric path.
[[[oids]]]
1.3.6.1.4.1.2021.10.1.3.1 = cpu.load.1min
1.3.6.1.4.1.2021.10.1.3.2 = cpu.load.5min
1.3.6.1.4.1.2021.10.1.3.3 = cpu.load.15min
# If you want another host, you can. But you probably won't need it.
[[another-identification]]
host = router1.example.com
port = 161
community = public
[[[oids]]]
oid = metric.path
oid = metric.path
```
Note: If you modify the SNMPRawCollector configuration, you will need to
restart diamond.
#### Dependencies
* pysmnp (which depends on pyasn1 0.1.7 and pycrypto)
"""
import os
import sys
import time
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
'snmp'))
from snmp import SNMPCollector as parent_SNMPCollector
from diamond.metric import Metric
class SNMPRawCollector(parent_SNMPCollector):
def process_config(self):
super(SNMPRawCollector, self).process_config()
# list to save non-existing oid's per device, to avoid repetition of
# errors in logging. Signal HUP to diamond/collector to flush this
self.skip_list = []
def get_default_config(self):
"""
Override SNMPCollector.get_default_config method to provide
default_config for the SNMPInterfaceCollector
"""
default_config = super(SNMPRawCollector,
self).get_default_config()
default_config.update({
'oids': {},
'path_prefix': 'servers',
'path_suffix': 'snmp',
})
return default_config
def _precision(self, value):
"""
Return the precision of the number
"""
value = str(value)
decimal = value.rfind('.')
if decimal == -1:
return 0
return len(value) - decimal - 1
def _skip(self, device, oid, reason=None):
self.skip_list.append((device, oid))
if reason is not None:
self.log.warn('Muted \'{0}\' on \'{1}\', because: {2}'.format(
oid, device, reason))
def _get_value_walk(self, device, oid, host, port, community):
data = self.walk(oid, host, port, community)
if data is None:
self._skip(device, oid, 'device down (#2)')
return
self.log.debug('Data received from WALK \'{0}\': [{1}]'.format(
device, data))
if len(data) != 1:
self._skip(
device,
oid,
'unexpected response, data has {0} entries'.format(
len(data)))
return
# because we only allow 1-key dicts, we can pick with absolute index
value = data.items()[0][1]
return value
def _get_value(self, device, oid, host, port, community):
data = self.get(oid, host, port, community)
if data is None:
self._skip(device, oid, 'device down (#1)')
return
self.log.debug('Data received from GET \'{0}\': [{1}]'.format(
device, data))
if len(data) == 0:
self._skip(device, oid, 'empty response, device down?')
return
if oid not in data:
# oid is not even in hierarchy, happens when using 9.9.9.9
# but not when using 1.9.9.9
self._skip(device, oid, 'no object at OID (#1)')
return
value = data[oid]
if value == 'No Such Object currently exists at this OID':
self._skip(device, oid, 'no object at OID (#2)')
return
if value == 'No Such Instance currently exists at this OID':
return self._get_value_walk(device, oid, host, port, community)
return value
def collect_snmp(self, device, host, port, community):
"""
Collect SNMP interface data from device
"""
self.log.debug(
'Collecting raw SNMP statistics from device \'{0}\''.format(device))
dev_config = self.config['devices'][device]
if 'oids' in dev_config:
for oid, metricName in dev_config['oids'].items():
if (device, oid) in self.skip_list:
self.log.debug(
'Skipping OID \'{0}\' ({1}) on device \'{2}\''.format(
oid, metricName, device))
continue
timestamp = time.time()
value = self._get_value(device, oid, host, port, community)
if value is None:
continue
self.log.debug(
'\'{0}\' ({1}) on device \'{2}\' - value=[{3}]'.format(
oid, metricName, device, value))
path = '.'.join([self.config['path_prefix'], device,
self.config['path_suffix'], metricName])
metric = Metric(path=path, value=value, timestamp=timestamp,
precision=self._precision(value),
metric_type='GAUGE')
self.publish_metric(metric)
| mit |
sinhrks/scikit-learn | sklearn/externals/joblib/_memory_helpers.py | 303 | 3605 | try:
# Available in Python 3
from tokenize import open as open_py_source
except ImportError:
# Copied from python3 tokenize
from codecs import lookup, BOM_UTF8
import re
from io import TextIOWrapper, open
cookie_re = re.compile("coding[:=]\s*([-\w.]+)")
def _get_normal_name(orig_enc):
"""Imitates get_normal_name in tokenizer.c."""
# Only care about the first 12 characters.
enc = orig_enc[:12].lower().replace("_", "-")
if enc == "utf-8" or enc.startswith("utf-8-"):
return "utf-8"
if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
return "iso-8859-1"
return orig_enc
def _detect_encoding(readline):
"""
The detect_encoding() function is used to detect the encoding that
should be used to decode a Python source file. It requires one
argment, readline, in the same way as the tokenize() generator.
It will call readline a maximum of twice, and return the encoding used
(as a string) and a list of any lines (left as bytes) it has read in.
It detects the encoding from the presence of a utf-8 bom or an encoding
cookie as specified in pep-0263. If both a bom and a cookie are
present, but disagree, a SyntaxError will be raised. If the encoding
cookie is an invalid charset, raise a SyntaxError. Note that if a
utf-8 bom is found, 'utf-8-sig' is returned.
If no encoding is specified, then the default of 'utf-8' will be
returned.
"""
bom_found = False
encoding = None
default = 'utf-8'
def read_or_stop():
try:
return readline()
except StopIteration:
return b''
def find_cookie(line):
try:
line_string = line.decode('ascii')
except UnicodeDecodeError:
return None
matches = cookie_re.findall(line_string)
if not matches:
return None
encoding = _get_normal_name(matches[0])
try:
codec = lookup(encoding)
except LookupError:
# This behaviour mimics the Python interpreter
raise SyntaxError("unknown encoding: " + encoding)
if bom_found:
if codec.name != 'utf-8':
# This behaviour mimics the Python interpreter
raise SyntaxError('encoding problem: utf-8')
encoding += '-sig'
return encoding
first = read_or_stop()
if first.startswith(BOM_UTF8):
bom_found = True
first = first[3:]
default = 'utf-8-sig'
if not first:
return default, []
encoding = find_cookie(first)
if encoding:
return encoding, [first]
second = read_or_stop()
if not second:
return default, [first]
encoding = find_cookie(second)
if encoding:
return encoding, [first, second]
return default, [first, second]
def open_py_source(filename):
"""Open a file in read only mode using the encoding detected by
detect_encoding().
"""
buffer = open(filename, 'rb')
encoding, lines = _detect_encoding(buffer.readline)
buffer.seek(0)
text = TextIOWrapper(buffer, encoding, line_buffering=True)
text.mode = 'r'
return text | bsd-3-clause |
lexus42/2015cd_midterm2 | static/Brython3.1.1-20150328-091302/Lib/unittest/test/test_skipping.py | 744 | 5173 | import unittest
from .support import LoggingResult
class Test_TestSkipping(unittest.TestCase):
def test_skipping(self):
class Foo(unittest.TestCase):
def test_skip_me(self):
self.skipTest("skip")
events = []
result = LoggingResult(events)
test = Foo("test_skip_me")
test.run(result)
self.assertEqual(events, ['startTest', 'addSkip', 'stopTest'])
self.assertEqual(result.skipped, [(test, "skip")])
# Try letting setUp skip the test now.
class Foo(unittest.TestCase):
def setUp(self):
self.skipTest("testing")
def test_nothing(self): pass
events = []
result = LoggingResult(events)
test = Foo("test_nothing")
test.run(result)
self.assertEqual(events, ['startTest', 'addSkip', 'stopTest'])
self.assertEqual(result.skipped, [(test, "testing")])
self.assertEqual(result.testsRun, 1)
def test_skipping_decorators(self):
op_table = ((unittest.skipUnless, False, True),
(unittest.skipIf, True, False))
for deco, do_skip, dont_skip in op_table:
class Foo(unittest.TestCase):
@deco(do_skip, "testing")
def test_skip(self): pass
@deco(dont_skip, "testing")
def test_dont_skip(self): pass
test_do_skip = Foo("test_skip")
test_dont_skip = Foo("test_dont_skip")
suite = unittest.TestSuite([test_do_skip, test_dont_skip])
events = []
result = LoggingResult(events)
suite.run(result)
self.assertEqual(len(result.skipped), 1)
expected = ['startTest', 'addSkip', 'stopTest',
'startTest', 'addSuccess', 'stopTest']
self.assertEqual(events, expected)
self.assertEqual(result.testsRun, 2)
self.assertEqual(result.skipped, [(test_do_skip, "testing")])
self.assertTrue(result.wasSuccessful())
def test_skip_class(self):
@unittest.skip("testing")
class Foo(unittest.TestCase):
def test_1(self):
record.append(1)
record = []
result = unittest.TestResult()
test = Foo("test_1")
suite = unittest.TestSuite([test])
suite.run(result)
self.assertEqual(result.skipped, [(test, "testing")])
self.assertEqual(record, [])
def test_skip_non_unittest_class(self):
@unittest.skip("testing")
class Mixin:
def test_1(self):
record.append(1)
class Foo(Mixin, unittest.TestCase):
pass
record = []
result = unittest.TestResult()
test = Foo("test_1")
suite = unittest.TestSuite([test])
suite.run(result)
self.assertEqual(result.skipped, [(test, "testing")])
self.assertEqual(record, [])
def test_expected_failure(self):
class Foo(unittest.TestCase):
@unittest.expectedFailure
def test_die(self):
self.fail("help me!")
events = []
result = LoggingResult(events)
test = Foo("test_die")
test.run(result)
self.assertEqual(events,
['startTest', 'addExpectedFailure', 'stopTest'])
self.assertEqual(result.expectedFailures[0][0], test)
self.assertTrue(result.wasSuccessful())
def test_unexpected_success(self):
class Foo(unittest.TestCase):
@unittest.expectedFailure
def test_die(self):
pass
events = []
result = LoggingResult(events)
test = Foo("test_die")
test.run(result)
self.assertEqual(events,
['startTest', 'addUnexpectedSuccess', 'stopTest'])
self.assertFalse(result.failures)
self.assertEqual(result.unexpectedSuccesses, [test])
self.assertTrue(result.wasSuccessful())
def test_skip_doesnt_run_setup(self):
class Foo(unittest.TestCase):
wasSetUp = False
wasTornDown = False
def setUp(self):
Foo.wasSetUp = True
def tornDown(self):
Foo.wasTornDown = True
@unittest.skip('testing')
def test_1(self):
pass
result = unittest.TestResult()
test = Foo("test_1")
suite = unittest.TestSuite([test])
suite.run(result)
self.assertEqual(result.skipped, [(test, "testing")])
self.assertFalse(Foo.wasSetUp)
self.assertFalse(Foo.wasTornDown)
def test_decorated_skip(self):
def decorator(func):
def inner(*a):
return func(*a)
return inner
class Foo(unittest.TestCase):
@decorator
@unittest.skip('testing')
def test_1(self):
pass
result = unittest.TestResult()
test = Foo("test_1")
suite = unittest.TestSuite([test])
suite.run(result)
self.assertEqual(result.skipped, [(test, "testing")])
| agpl-3.0 |
gaddman/ansible | lib/ansible/modules/storage/ibm/ibm_sa_domain.py | 9 | 4173 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, IBM CORPORATION
# Author(s): Tzur Eliyahu <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or
# https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: ibm_sa_domain
short_description: Manages domains on IBM Spectrum Accelerate Family storage systems
version_added: "2.8"
description:
- "This module can be used to add domains to or removes them from IBM Spectrum Accelerate Family storage systems."
options:
domain:
description:
- Name of the domain to be managed.
required: true
state:
description:
- The desired state of the domain.
required: true
default: "present"
choices: [ "present", "absent" ]
ldap_id:
description:
- ldap id to add to the domain.
required: false
size:
description:
- Size of the domain.
required: false
hard_capacity:
description:
- Hard capacity of the domain.
required: false
soft_capacity:
description:
- Soft capacity of the domain.
required: false
max_cgs:
description:
- Number of max cgs.
required: false
max_dms:
description:
- Number of max dms.
required: false
max_mirrors:
description:
- Number of max_mirrors.
required: false
max_pools:
description:
- Number of max_pools.
required: false
max_volumes:
description:
- Number of max_volumes.
required: false
perf_class:
description:
- Add the domain to a performance class.
required: false
extends_documentation_fragment:
- ibm_storage
author:
- Tzur Eliyahu (@tzure)
'''
EXAMPLES = '''
- name: Define new domain.
ibm_sa_domain:
domain: domain_name
size: domain_size
state: present
username: admin
password: secret
endpoints: hostdev-system
- name: Delete domain.
ibm_sa_domain:
domain: domain_name
state: absent
username: admin
password: secret
endpoints: hostdev-system
'''
RETURN = '''
msg:
description: module return status.
returned: as needed
type: string
sample: "domain 'domain_name' created successfully."
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ibm_sa_utils import execute_pyxcli_command, \
connect_ssl, spectrum_accelerate_spec, is_pyxcli_installed
def main():
argument_spec = spectrum_accelerate_spec()
argument_spec.update(
dict(
state=dict(default='present', choices=['present', 'absent']),
domain=dict(required=True),
size=dict(),
max_dms=dict(),
max_cgs=dict(),
ldap_id=dict(),
max_mirrors=dict(),
max_pools=dict(),
max_volumes=dict(),
perf_class=dict(),
hard_capacity=dict(),
soft_capacity=dict()
)
)
module = AnsibleModule(argument_spec)
is_pyxcli_installed(module)
xcli_client = connect_ssl(module)
domain = xcli_client.cmd.domain_list(
domain=module.params['domain']).as_single_element
state = module.params['state']
state_changed = False
msg = 'Domain \'{0}\''.format(module.params['domain'])
if state == 'present' and not domain:
state_changed = execute_pyxcli_command(
module, 'domain_create', xcli_client)
msg += " created successfully."
elif state == 'absent' and domain:
state_changed = execute_pyxcli_command(
module, 'domain_delete', xcli_client)
msg += " deleted successfully."
else:
msg += " state unchanged."
module.exit_json(changed=state_changed, msg=msg)
if __name__ == '__main__':
main()
| gpl-3.0 |
shakamunyi/nova | nova/tests/unit/db/test_migrations.py | 1 | 32632 | # Copyright 2010-2011 OpenStack Foundation
# Copyright 2012-2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for database migrations.
There are "opportunistic" tests which allows testing against all 3 databases
(sqlite in memory, mysql, pg) in a properly configured unit test environment.
For the opportunistic testing you need to set up db's named 'openstack_citest'
with user 'openstack_citest' and password 'openstack_citest' on localhost. The
test will then use that db and u/p combo to run the tests.
For postgres on Ubuntu this can be done with the following commands::
| sudo -u postgres psql
| postgres=# create user openstack_citest with createdb login password
| 'openstack_citest';
| postgres=# create database openstack_citest with owner openstack_citest;
"""
import glob
import logging
import os
from migrate.versioning import repository
import mock
from oslo.db.sqlalchemy import test_base
from oslo.db.sqlalchemy import test_migrations
from oslo.db.sqlalchemy import utils as oslodbutils
import sqlalchemy
from sqlalchemy.engine import reflection
import sqlalchemy.exc
from sqlalchemy.sql import null
from nova.db import migration
from nova.db.sqlalchemy import migrate_repo
from nova.db.sqlalchemy import migration as sa_migration
from nova.db.sqlalchemy import utils as db_utils
from nova import exception
from nova import test
LOG = logging.getLogger(__name__)
class NovaMigrationsCheckers(test_migrations.WalkVersionsMixin):
"""Test sqlalchemy-migrate migrations."""
TIMEOUT_SCALING_FACTOR = 2
snake_walk = True
downgrade = True
@property
def INIT_VERSION(self):
return migration.db_initial_version()
@property
def REPOSITORY(self):
return repository.Repository(
os.path.abspath(os.path.dirname(migrate_repo.__file__)))
@property
def migration_api(self):
return sa_migration.versioning_api
@property
def migrate_engine(self):
return self.engine
def setUp(self):
super(NovaMigrationsCheckers, self).setUp()
# NOTE(viktors): We should reduce log output because it causes issues,
# when we run tests with testr
migrate_log = logging.getLogger('migrate')
old_level = migrate_log.level
migrate_log.setLevel(logging.WARN)
self.addCleanup(migrate_log.setLevel, old_level)
def assertColumnExists(self, engine, table_name, column):
self.assertTrue(oslodbutils.column_exists(engine, table_name, column))
def assertColumnNotExists(self, engine, table_name, column):
self.assertFalse(oslodbutils.column_exists(engine, table_name, column))
def assertTableNotExists(self, engine, table):
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
oslodbutils.get_table, engine, table)
def assertIndexExists(self, engine, table_name, index):
self.assertTrue(oslodbutils.index_exists(engine, table_name, index))
def assertIndexNotExists(self, engine, table_name, index):
self.assertFalse(oslodbutils.index_exists(engine, table_name, index))
def assertIndexMembers(self, engine, table, index, members):
# NOTE(johannes): Order of columns can matter. Most SQL databases
# can use the leading columns for optimizing queries that don't
# include all of the covered columns.
self.assertIndexExists(engine, table, index)
t = oslodbutils.get_table(engine, table)
index_columns = None
for idx in t.indexes:
if idx.name == index:
index_columns = [c.name for c in idx.columns]
break
self.assertEqual(members, index_columns)
def _skippable_migrations(self):
special = [
216, # Havana
272, # NOOP migration due to revert
]
havana_placeholders = range(217, 227)
icehouse_placeholders = range(235, 244)
juno_placeholders = range(255, 265)
return (special +
havana_placeholders +
icehouse_placeholders +
juno_placeholders)
def migrate_up(self, version, with_data=False):
if with_data:
check = getattr(self, "_check_%03d" % version, None)
if version not in self._skippable_migrations():
self.assertIsNotNone(check,
('DB Migration %i does not have a '
'test. Please add one!') % version)
super(NovaMigrationsCheckers, self).migrate_up(version, with_data)
def test_walk_versions(self):
self.walk_versions(self.snake_walk, self.downgrade)
def _check_227(self, engine, data):
table = oslodbutils.get_table(engine, 'project_user_quotas')
# Insert fake_quotas with the longest resource name.
fake_quotas = {'id': 5,
'project_id': 'fake_project',
'user_id': 'fake_user',
'resource': 'injected_file_content_bytes',
'hard_limit': 10}
table.insert().execute(fake_quotas)
# Check we can get the longest resource name.
quota = table.select(table.c.id == 5).execute().first()
self.assertEqual(quota['resource'], 'injected_file_content_bytes')
def _check_228(self, engine, data):
self.assertColumnExists(engine, 'compute_nodes', 'metrics')
compute_nodes = oslodbutils.get_table(engine, 'compute_nodes')
self.assertIsInstance(compute_nodes.c.metrics.type,
sqlalchemy.types.Text)
def _post_downgrade_228(self, engine):
self.assertColumnNotExists(engine, 'compute_nodes', 'metrics')
def _check_229(self, engine, data):
self.assertColumnExists(engine, 'compute_nodes', 'extra_resources')
compute_nodes = oslodbutils.get_table(engine, 'compute_nodes')
self.assertIsInstance(compute_nodes.c.extra_resources.type,
sqlalchemy.types.Text)
def _post_downgrade_229(self, engine):
self.assertColumnNotExists(engine, 'compute_nodes', 'extra_resources')
def _check_230(self, engine, data):
for table_name in ['instance_actions_events',
'shadow_instance_actions_events']:
self.assertColumnExists(engine, table_name, 'host')
self.assertColumnExists(engine, table_name, 'details')
action_events = oslodbutils.get_table(engine,
'instance_actions_events')
self.assertIsInstance(action_events.c.host.type,
sqlalchemy.types.String)
self.assertIsInstance(action_events.c.details.type,
sqlalchemy.types.Text)
def _post_downgrade_230(self, engine):
for table_name in ['instance_actions_events',
'shadow_instance_actions_events']:
self.assertColumnNotExists(engine, table_name, 'host')
self.assertColumnNotExists(engine, table_name, 'details')
def _check_231(self, engine, data):
self.assertColumnExists(engine, 'instances', 'ephemeral_key_uuid')
instances = oslodbutils.get_table(engine, 'instances')
self.assertIsInstance(instances.c.ephemeral_key_uuid.type,
sqlalchemy.types.String)
self.assertTrue(db_utils.check_shadow_table(engine, 'instances'))
def _post_downgrade_231(self, engine):
self.assertColumnNotExists(engine, 'instances', 'ephemeral_key_uuid')
self.assertTrue(db_utils.check_shadow_table(engine, 'instances'))
def _check_232(self, engine, data):
table_names = ['compute_node_stats', 'compute_nodes',
'instance_actions', 'instance_actions_events',
'instance_faults', 'migrations']
for table_name in table_names:
self.assertTableNotExists(engine, 'dump_' + table_name)
def _check_233(self, engine, data):
self.assertColumnExists(engine, 'compute_nodes', 'stats')
compute_nodes = oslodbutils.get_table(engine, 'compute_nodes')
self.assertIsInstance(compute_nodes.c.stats.type,
sqlalchemy.types.Text)
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
oslodbutils.get_table, engine, 'compute_node_stats')
def _post_downgrade_233(self, engine):
self.assertColumnNotExists(engine, 'compute_nodes', 'stats')
# confirm compute_node_stats exists
oslodbutils.get_table(engine, 'compute_node_stats')
def _check_234(self, engine, data):
self.assertIndexMembers(engine, 'reservations',
'reservations_deleted_expire_idx',
['deleted', 'expire'])
def _check_244(self, engine, data):
volume_usage_cache = oslodbutils.get_table(
engine, 'volume_usage_cache')
self.assertEqual(64, volume_usage_cache.c.user_id.type.length)
def _post_downgrade_244(self, engine):
volume_usage_cache = oslodbutils.get_table(
engine, 'volume_usage_cache')
self.assertEqual(36, volume_usage_cache.c.user_id.type.length)
def _pre_upgrade_245(self, engine):
# create a fake network
networks = oslodbutils.get_table(engine, 'networks')
fake_network = {'id': 1}
networks.insert().execute(fake_network)
def _check_245(self, engine, data):
networks = oslodbutils.get_table(engine, 'networks')
network = networks.select(networks.c.id == 1).execute().first()
# mtu should default to None
self.assertIsNone(network.mtu)
# dhcp_server should default to None
self.assertIsNone(network.dhcp_server)
# enable dhcp should default to true
self.assertTrue(network.enable_dhcp)
# share address should default to false
self.assertFalse(network.share_address)
def _post_downgrade_245(self, engine):
self.assertColumnNotExists(engine, 'networks', 'mtu')
self.assertColumnNotExists(engine, 'networks', 'dhcp_server')
self.assertColumnNotExists(engine, 'networks', 'enable_dhcp')
self.assertColumnNotExists(engine, 'networks', 'share_address')
def _check_246(self, engine, data):
pci_devices = oslodbutils.get_table(engine, 'pci_devices')
self.assertEqual(1, len([fk for fk in pci_devices.foreign_keys
if fk.parent.name == 'compute_node_id']))
def _post_downgrade_246(self, engine):
pci_devices = oslodbutils.get_table(engine, 'pci_devices')
self.assertEqual(0, len([fk for fk in pci_devices.foreign_keys
if fk.parent.name == 'compute_node_id']))
def _check_247(self, engine, data):
quota_usages = oslodbutils.get_table(engine, 'quota_usages')
self.assertFalse(quota_usages.c.resource.nullable)
pci_devices = oslodbutils.get_table(engine, 'pci_devices')
self.assertTrue(pci_devices.c.deleted.nullable)
self.assertFalse(pci_devices.c.product_id.nullable)
self.assertFalse(pci_devices.c.vendor_id.nullable)
self.assertFalse(pci_devices.c.dev_type.nullable)
def _post_downgrade_247(self, engine):
quota_usages = oslodbutils.get_table(engine, 'quota_usages')
self.assertTrue(quota_usages.c.resource.nullable)
pci_devices = oslodbutils.get_table(engine, 'pci_devices')
self.assertFalse(pci_devices.c.deleted.nullable)
self.assertTrue(pci_devices.c.product_id.nullable)
self.assertTrue(pci_devices.c.vendor_id.nullable)
self.assertTrue(pci_devices.c.dev_type.nullable)
def _check_248(self, engine, data):
self.assertIndexMembers(engine, 'reservations',
'reservations_deleted_expire_idx',
['deleted', 'expire'])
def _post_downgrade_248(self, engine):
reservations = oslodbutils.get_table(engine, 'reservations')
index_names = [idx.name for idx in reservations.indexes]
self.assertNotIn('reservations_deleted_expire_idx', index_names)
def _check_249(self, engine, data):
# Assert that only one index exists that covers columns
# instance_uuid and device_name
bdm = oslodbutils.get_table(engine, 'block_device_mapping')
self.assertEqual(1, len([i for i in bdm.indexes
if [c.name for c in i.columns] ==
['instance_uuid', 'device_name']]))
def _post_downgrade_249(self, engine):
# The duplicate index is not created on downgrade, so this
# asserts that only one index exists that covers columns
# instance_uuid and device_name
bdm = oslodbutils.get_table(engine, 'block_device_mapping')
self.assertEqual(1, len([i for i in bdm.indexes
if [c.name for c in i.columns] ==
['instance_uuid', 'device_name']]))
def _check_250(self, engine, data):
self.assertTableNotExists(engine, 'instance_group_metadata')
self.assertTableNotExists(engine, 'shadow_instance_group_metadata')
def _post_downgrade_250(self, engine):
oslodbutils.get_table(engine, 'instance_group_metadata')
oslodbutils.get_table(engine, 'shadow_instance_group_metadata')
def _check_251(self, engine, data):
self.assertColumnExists(engine, 'compute_nodes', 'numa_topology')
self.assertColumnExists(engine, 'shadow_compute_nodes',
'numa_topology')
compute_nodes = oslodbutils.get_table(engine, 'compute_nodes')
shadow_compute_nodes = oslodbutils.get_table(engine,
'shadow_compute_nodes')
self.assertIsInstance(compute_nodes.c.numa_topology.type,
sqlalchemy.types.Text)
self.assertIsInstance(shadow_compute_nodes.c.numa_topology.type,
sqlalchemy.types.Text)
def _post_downgrade_251(self, engine):
self.assertColumnNotExists(engine, 'compute_nodes', 'numa_topology')
self.assertColumnNotExists(engine, 'shadow_compute_nodes',
'numa_topology')
def _check_252(self, engine, data):
oslodbutils.get_table(engine, 'instance_extra')
oslodbutils.get_table(engine, 'shadow_instance_extra')
self.assertIndexMembers(engine, 'instance_extra',
'instance_extra_idx',
['instance_uuid'])
def _post_downgrade_252(self, engine):
self.assertTableNotExists(engine, 'instance_extra')
self.assertTableNotExists(engine, 'shadow_instance_extra')
def _check_253(self, engine, data):
self.assertColumnExists(engine, 'instance_extra', 'pci_requests')
self.assertColumnExists(
engine, 'shadow_instance_extra', 'pci_requests')
instance_extra = oslodbutils.get_table(engine, 'instance_extra')
shadow_instance_extra = oslodbutils.get_table(engine,
'shadow_instance_extra')
self.assertIsInstance(instance_extra.c.pci_requests.type,
sqlalchemy.types.Text)
self.assertIsInstance(shadow_instance_extra.c.pci_requests.type,
sqlalchemy.types.Text)
def _post_downgrade_253(self, engine):
self.assertColumnNotExists(engine, 'instance_extra', 'pci_requests')
self.assertColumnNotExists(engine, 'shadow_instance_extra',
'pci_requests')
def _check_254(self, engine, data):
self.assertColumnExists(engine, 'pci_devices', 'request_id')
self.assertColumnExists(
engine, 'shadow_pci_devices', 'request_id')
pci_devices = oslodbutils.get_table(engine, 'pci_devices')
shadow_pci_devices = oslodbutils.get_table(
engine, 'shadow_pci_devices')
self.assertIsInstance(pci_devices.c.request_id.type,
sqlalchemy.types.String)
self.assertIsInstance(shadow_pci_devices.c.request_id.type,
sqlalchemy.types.String)
def _post_downgrade_254(self, engine):
self.assertColumnNotExists(engine, 'pci_devices', 'request_id')
self.assertColumnNotExists(
engine, 'shadow_pci_devices', 'request_id')
def _check_265(self, engine, data):
# Assert that only one index exists that covers columns
# host and deleted
instances = oslodbutils.get_table(engine, 'instances')
self.assertEqual(1, len([i for i in instances.indexes
if [c.name for c in i.columns][:2] ==
['host', 'deleted']]))
# and only one index covers host column
iscsi_targets = oslodbutils.get_table(engine, 'iscsi_targets')
self.assertEqual(1, len([i for i in iscsi_targets.indexes
if [c.name for c in i.columns][:1] ==
['host']]))
def _post_downgrade_265(self, engine):
# The duplicated index is not created on downgrade, so this
# asserts that only one index exists that covers columns
# host and deleted
instances = oslodbutils.get_table(engine, 'instances')
self.assertEqual(1, len([i for i in instances.indexes
if [c.name for c in i.columns][:2] ==
['host', 'deleted']]))
# and only one index covers host column
iscsi_targets = oslodbutils.get_table(engine, 'iscsi_targets')
self.assertEqual(1, len([i for i in iscsi_targets.indexes
if [c.name for c in i.columns][:1] ==
['host']]))
def _check_266(self, engine, data):
self.assertColumnExists(engine, 'tags', 'resource_id')
self.assertColumnExists(engine, 'tags', 'tag')
table = oslodbutils.get_table(engine, 'tags')
self.assertIsInstance(table.c.resource_id.type,
sqlalchemy.types.String)
self.assertIsInstance(table.c.tag.type,
sqlalchemy.types.String)
def _post_downgrade_266(self, engine):
self.assertTableNotExists(engine, 'tags')
def _pre_upgrade_267(self, engine):
# Create a fixed_ips row with a null instance_uuid (if not already
# there) to make sure that's not deleted.
fixed_ips = oslodbutils.get_table(engine, 'fixed_ips')
fake_fixed_ip = {'id': 1}
fixed_ips.insert().execute(fake_fixed_ip)
# Create an instance record with a valid (non-null) UUID so we make
# sure we don't do something stupid and delete valid records.
instances = oslodbutils.get_table(engine, 'instances')
fake_instance = {'id': 1, 'uuid': 'fake-non-null-uuid'}
instances.insert().execute(fake_instance)
# Add a null instance_uuid entry for the volumes table
# since it doesn't have a foreign key back to the instances table.
volumes = oslodbutils.get_table(engine, 'volumes')
fake_volume = {'id': '9c3c317e-24db-4d57-9a6f-96e6d477c1da'}
volumes.insert().execute(fake_volume)
def _check_267(self, engine, data):
# Make sure the column is non-nullable and the UC exists.
fixed_ips = oslodbutils.get_table(engine, 'fixed_ips')
self.assertTrue(fixed_ips.c.instance_uuid.nullable)
fixed_ip = fixed_ips.select(fixed_ips.c.id == 1).execute().first()
self.assertIsNone(fixed_ip.instance_uuid)
instances = oslodbutils.get_table(engine, 'instances')
self.assertFalse(instances.c.uuid.nullable)
inspector = reflection.Inspector.from_engine(engine)
constraints = inspector.get_unique_constraints('instances')
constraint_names = [constraint['name'] for constraint in constraints]
self.assertIn('uniq_instances0uuid', constraint_names)
# Make sure the instances record with the valid uuid is still there.
instance = instances.select(instances.c.id == 1).execute().first()
self.assertIsNotNone(instance)
# Check that the null entry in the volumes table is still there since
# we skipped tables that don't have FK's back to the instances table.
volumes = oslodbutils.get_table(engine, 'volumes')
self.assertTrue(volumes.c.instance_uuid.nullable)
volume = fixed_ips.select(
volumes.c.id == '9c3c317e-24db-4d57-9a6f-96e6d477c1da'
).execute().first()
self.assertIsNone(volume.instance_uuid)
def _post_downgrade_267(self, engine):
# Make sure the UC is gone and the column is nullable again.
instances = oslodbutils.get_table(engine, 'instances')
self.assertTrue(instances.c.uuid.nullable)
inspector = reflection.Inspector.from_engine(engine)
constraints = inspector.get_unique_constraints('instances')
constraint_names = [constraint['name'] for constraint in constraints]
self.assertNotIn('uniq_instances0uuid', constraint_names)
def test_migration_267(self):
# This is separate from test_walk_versions so we can test the case
# where there are non-null instance_uuid entries in the database which
# cause the 267 migration to fail.
engine = self.migrate_engine
self.migration_api.version_control(
engine, self.REPOSITORY, self.INIT_VERSION)
self.migration_api.upgrade(engine, self.REPOSITORY, 266)
# Create a consoles record with a null instance_uuid so
# we can test that the upgrade fails if that entry is found.
# NOTE(mriedem): We use the consoles table since that's the only table
# created in the 216 migration with a ForeignKey created on the
# instance_uuid table for sqlite.
consoles = oslodbutils.get_table(engine, 'consoles')
fake_console = {'id': 1}
consoles.insert().execute(fake_console)
# NOTE(mriedem): We handle the 267 migration where we expect to
# hit a ValidationError on the consoles table to have
# a null instance_uuid entry
ex = self.assertRaises(exception.ValidationError,
self.migration_api.upgrade,
engine, self.REPOSITORY, 267)
self.assertIn("There are 1 records in the "
"'consoles' table where the uuid or "
"instance_uuid column is NULL.",
ex.kwargs['detail'])
# Remove the consoles entry with the null instance_uuid column.
rows = consoles.delete().where(
consoles.c['instance_uuid'] == null()).execute().rowcount
self.assertEqual(1, rows)
# Now run the 267 upgrade again.
self.migration_api.upgrade(engine, self.REPOSITORY, 267)
# Make sure the consoles entry with the null instance_uuid
# was deleted.
console = consoles.select(consoles.c.id == 1).execute().first()
self.assertIsNone(console)
def _check_268(self, engine, data):
# We can only assert that the col exists, not the unique constraint
# as the engine is running sqlite
self.assertColumnExists(engine, 'compute_nodes', 'host')
self.assertColumnExists(engine, 'shadow_compute_nodes', 'host')
compute_nodes = oslodbutils.get_table(engine, 'compute_nodes')
shadow_compute_nodes = oslodbutils.get_table(
engine, 'shadow_compute_nodes')
self.assertIsInstance(compute_nodes.c.host.type,
sqlalchemy.types.String)
self.assertIsInstance(shadow_compute_nodes.c.host.type,
sqlalchemy.types.String)
def _post_downgrade_268(self, engine):
self.assertColumnNotExists(engine, 'compute_nodes', 'host')
self.assertColumnNotExists(engine, 'shadow_compute_nodes', 'host')
def _check_269(self, engine, data):
self.assertColumnExists(engine, 'pci_devices', 'numa_node')
self.assertColumnExists(engine, 'shadow_pci_devices', 'numa_node')
pci_devices = oslodbutils.get_table(engine, 'pci_devices')
shadow_pci_devices = oslodbutils.get_table(
engine, 'shadow_pci_devices')
self.assertIsInstance(pci_devices.c.numa_node.type,
sqlalchemy.types.Integer)
self.assertTrue(pci_devices.c.numa_node.nullable)
self.assertIsInstance(shadow_pci_devices.c.numa_node.type,
sqlalchemy.types.Integer)
self.assertTrue(shadow_pci_devices.c.numa_node.nullable)
def _post_downgrade_269(self, engine):
self.assertColumnNotExists(engine, 'pci_devices', 'numa_node')
self.assertColumnNotExists(engine, 'shadow_pci_devices', 'numa_node')
def _check_270(self, engine, data):
self.assertColumnExists(engine, 'instance_extra', 'flavor')
self.assertColumnExists(engine, 'shadow_instance_extra', 'flavor')
instance_extra = oslodbutils.get_table(engine, 'instance_extra')
shadow_instance_extra = oslodbutils.get_table(
engine, 'shadow_instance_extra')
self.assertIsInstance(instance_extra.c.flavor.type,
sqlalchemy.types.Text)
self.assertIsInstance(shadow_instance_extra.c.flavor.type,
sqlalchemy.types.Text)
def _post_downgrade_270(self, engine):
self.assertColumnNotExists(engine, 'instance_extra', 'flavor')
self.assertColumnNotExists(engine, 'shadow_instance_extra', 'flavor')
def _check_271(self, engine, data):
self.assertIndexMembers(engine, 'block_device_mapping',
'snapshot_id', ['snapshot_id'])
self.assertIndexMembers(engine, 'block_device_mapping',
'volume_id', ['volume_id'])
self.assertIndexMembers(engine, 'dns_domains',
'dns_domains_project_id_idx',
['project_id'])
self.assertIndexMembers(engine, 'fixed_ips',
'network_id', ['network_id'])
self.assertIndexMembers(engine, 'fixed_ips',
'fixed_ips_instance_uuid_fkey',
['instance_uuid'])
self.assertIndexMembers(engine, 'fixed_ips',
'fixed_ips_virtual_interface_id_fkey',
['virtual_interface_id'])
self.assertIndexMembers(engine, 'floating_ips',
'fixed_ip_id', ['fixed_ip_id'])
self.assertIndexMembers(engine, 'iscsi_targets',
'iscsi_targets_volume_id_fkey', ['volume_id'])
self.assertIndexMembers(engine, 'virtual_interfaces',
'virtual_interfaces_network_id_idx',
['network_id'])
self.assertIndexMembers(engine, 'virtual_interfaces',
'virtual_interfaces_instance_uuid_fkey',
['instance_uuid'])
# Removed on MySQL, never existed on other databases
self.assertIndexNotExists(engine, 'dns_domains', 'project_id')
self.assertIndexNotExists(engine, 'virtual_interfaces', 'network_id')
def _post_downgrade_271(self, engine):
self.assertIndexNotExists(engine, 'dns_domains',
'dns_domains_project_id_idx')
self.assertIndexNotExists(engine, 'virtual_interfaces',
'virtual_interfaces_network_id_idx')
if engine.name == 'mysql':
self.assertIndexMembers(engine, 'dns_domains',
'project_id',
['project_id'])
self.assertIndexMembers(engine, 'virtual_interfaces',
'network_id',
['network_id'])
# Rest of indexes will still exist on MySQL
return
# Never existed on non-MySQL databases, so shouldn't exist now
self.assertIndexNotExists(engine, 'dns_domains', 'project_id')
self.assertIndexNotExists(engine, 'virtual_interfaces', 'network_id')
for table_name, index_name in [
('block_device_mapping', 'snapshot_id'),
('block_device_mapping', 'volume_id'),
('dns_domains', 'dns_domains_project_id_idx'),
('fixed_ips', 'network_id'),
('fixed_ips', 'fixed_ips_instance_uuid_fkey'),
('fixed_ips', 'fixed_ips_virtual_interface_id_fkey'),
('floating_ips', 'fixed_ip_id'),
('iscsi_targets', 'iscsi_targets_volume_id_fkey'),
('virtual_interfaces', 'virtual_interfaces_network_id_idx'),
('virtual_interfaces',
'virtual_interfaces_instance_uuid_fkey')]:
self.assertIndexNotExists(engine, table_name, index_name)
class TestNovaMigrationsSQLite(NovaMigrationsCheckers,
test.TestCase,
test_base.DbTestCase):
pass
class TestNovaMigrationsMySQL(NovaMigrationsCheckers,
test.TestCase,
test_base.MySQLOpportunisticTestCase):
def test_innodb_tables(self):
with mock.patch.object(sa_migration, 'get_engine',
return_value=self.migrate_engine):
sa_migration.db_sync()
total = self.migrate_engine.execute(
"SELECT count(*) "
"FROM information_schema.TABLES "
"WHERE TABLE_SCHEMA = '%(database)s'" %
{'database': self.migrate_engine.url.database})
self.assertTrue(total.scalar() > 0, "No tables found. Wrong schema?")
noninnodb = self.migrate_engine.execute(
"SELECT count(*) "
"FROM information_schema.TABLES "
"WHERE TABLE_SCHEMA='%(database)s' "
"AND ENGINE != 'InnoDB' "
"AND TABLE_NAME != 'migrate_version'" %
{'database': self.migrate_engine.url.database})
count = noninnodb.scalar()
self.assertEqual(count, 0, "%d non InnoDB tables created" % count)
class TestNovaMigrationsPostgreSQL(NovaMigrationsCheckers,
test.TestCase,
test_base.PostgreSQLOpportunisticTestCase):
pass
class ProjectTestCase(test.NoDBTestCase):
def test_all_migrations_have_downgrade(self):
topdir = os.path.normpath(os.path.dirname(__file__) + '/../../../')
py_glob = os.path.join(topdir, "nova", "db", "sqlalchemy",
"migrate_repo", "versions", "*.py")
missing_downgrade = []
for path in glob.iglob(py_glob):
has_upgrade = False
has_downgrade = False
with open(path, "r") as f:
for line in f:
if 'def upgrade(' in line:
has_upgrade = True
if 'def downgrade(' in line:
has_downgrade = True
if has_upgrade and not has_downgrade:
fname = os.path.basename(path)
missing_downgrade.append(fname)
helpful_msg = ("The following migrations are missing a downgrade:"
"\n\t%s" % '\n\t'.join(sorted(missing_downgrade)))
self.assertFalse(missing_downgrade, helpful_msg)
| apache-2.0 |
jotes/ansible | v2/test/parsing/test_mod_args.py | 109 | 4913 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.errors import AnsibleParserError
from ansible.compat.tests import unittest
class TestModArgsDwim(unittest.TestCase):
# TODO: add tests that construct ModuleArgsParser with a task reference
# TODO: verify the AnsibleError raised on failure knows the task
# and the task knows the line numbers
def setUp(self):
pass
def _debug(self, mod, args, to):
print("RETURNED module = {0}".format(mod))
print(" args = {0}".format(args))
print(" to = {0}".format(to))
def tearDown(self):
pass
def test_basic_shell(self):
m = ModuleArgsParser(dict(shell='echo hi'))
mod, args, to = m.parse()
self._debug(mod, args, to)
self.assertEqual(mod, 'command')
self.assertEqual(args, dict(
_raw_params = 'echo hi',
_uses_shell = True,
))
self.assertIsNone(to)
def test_basic_command(self):
m = ModuleArgsParser(dict(command='echo hi'))
mod, args, to = m.parse()
self._debug(mod, args, to)
self.assertEqual(mod, 'command')
self.assertEqual(args, dict(
_raw_params = 'echo hi',
))
self.assertIsNone(to)
def test_shell_with_modifiers(self):
m = ModuleArgsParser(dict(shell='/bin/foo creates=/tmp/baz removes=/tmp/bleep'))
mod, args, to = m.parse()
self._debug(mod, args, to)
self.assertEqual(mod, 'command')
self.assertEqual(args, dict(
creates = '/tmp/baz',
removes = '/tmp/bleep',
_raw_params = '/bin/foo',
_uses_shell = True,
))
self.assertIsNone(to)
def test_normal_usage(self):
m = ModuleArgsParser(dict(copy='src=a dest=b'))
mod, args, to = m.parse()
self._debug(mod, args, to)
self.assertEqual(mod, 'copy')
self.assertEqual(args, dict(src='a', dest='b'))
self.assertIsNone(to)
def test_complex_args(self):
m = ModuleArgsParser(dict(copy=dict(src='a', dest='b')))
mod, args, to = m.parse()
self._debug(mod, args, to)
self.assertEqual(mod, 'copy')
self.assertEqual(args, dict(src='a', dest='b'))
self.assertIsNone(to)
def test_action_with_complex(self):
m = ModuleArgsParser(dict(action=dict(module='copy', src='a', dest='b')))
mod, args, to = m.parse()
self._debug(mod, args, to)
self.assertEqual(mod, 'copy')
self.assertEqual(args, dict(src='a', dest='b'))
self.assertIsNone(to)
def test_action_with_complex_and_complex_args(self):
m = ModuleArgsParser(dict(action=dict(module='copy', args=dict(src='a', dest='b'))))
mod, args, to = m.parse()
self._debug(mod, args, to)
self.assertEqual(mod, 'copy')
self.assertEqual(args, dict(src='a', dest='b'))
self.assertIsNone(to)
def test_local_action_string(self):
m = ModuleArgsParser(dict(local_action='copy src=a dest=b'))
mod, args, to = m.parse()
self._debug(mod, args, to)
self.assertEqual(mod, 'copy')
self.assertEqual(args, dict(src='a', dest='b'))
self.assertIs(to, 'localhost')
def test_multiple_actions(self):
m = ModuleArgsParser(dict(action='shell echo hi', local_action='shell echo hi'))
self.assertRaises(AnsibleParserError, m.parse)
m = ModuleArgsParser(dict(action='shell echo hi', shell='echo hi'))
self.assertRaises(AnsibleParserError, m.parse)
m = ModuleArgsParser(dict(local_action='shell echo hi', shell='echo hi'))
self.assertRaises(AnsibleParserError, m.parse)
m = ModuleArgsParser(dict(ping='data=hi', shell='echo hi'))
self.assertRaises(AnsibleParserError, m.parse)
| gpl-3.0 |
ryuunosukeyoshi/PartnerPoi-Bot | lib/youtube_dl/extractor/tvigle.py | 48 | 4229 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
float_or_none,
int_or_none,
parse_age_limit,
)
class TvigleIE(InfoExtractor):
IE_NAME = 'tvigle'
IE_DESC = 'Интернет-телевидение Tvigle.ru'
_VALID_URL = r'https?://(?:www\.)?(?:tvigle\.ru/(?:[^/]+/)+(?P<display_id>[^/]+)/$|cloud\.tvigle\.ru/video/(?P<id>\d+))'
_GEO_BYPASS = False
_GEO_COUNTRIES = ['RU']
_TESTS = [
{
'url': 'http://www.tvigle.ru/video/sokrat/',
'md5': '36514aed3657d4f70b4b2cef8eb520cd',
'info_dict': {
'id': '1848932',
'display_id': 'sokrat',
'ext': 'flv',
'title': 'Сократ',
'description': 'md5:d6b92ffb7217b4b8ebad2e7665253c17',
'duration': 6586,
'age_limit': 12,
},
'skip': 'georestricted',
},
{
'url': 'http://www.tvigle.ru/video/vladimir-vysotskii/vedushchii-teleprogrammy-60-minut-ssha-o-vladimire-vysotskom/',
'md5': 'e7efe5350dd5011d0de6550b53c3ba7b',
'info_dict': {
'id': '5142516',
'ext': 'flv',
'title': 'Ведущий телепрограммы «60 минут» (США) о Владимире Высоцком',
'description': 'md5:027f7dc872948f14c96d19b4178428a4',
'duration': 186.080,
'age_limit': 0,
},
'skip': 'georestricted',
}, {
'url': 'https://cloud.tvigle.ru/video/5267604/',
'only_matching': True,
}
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
display_id = mobj.group('display_id')
if not video_id:
webpage = self._download_webpage(url, display_id)
video_id = self._html_search_regex(
(r'<div[^>]+class=["\']player["\'][^>]+id=["\'](\d+)',
r'var\s+cloudId\s*=\s*["\'](\d+)',
r'class="video-preview current_playing" id="(\d+)"'),
webpage, 'video id')
video_data = self._download_json(
'http://cloud.tvigle.ru/api/play/video/%s/' % video_id, display_id)
item = video_data['playlist']['items'][0]
videos = item.get('videos')
error_message = item.get('errorMessage')
if not videos and error_message:
if item.get('isGeoBlocked') is True:
self.raise_geo_restricted(
msg=error_message, countries=self._GEO_COUNTRIES)
else:
raise ExtractorError(
'%s returned error: %s' % (self.IE_NAME, error_message),
expected=True)
title = item['title']
description = item.get('description')
thumbnail = item.get('thumbnail')
duration = float_or_none(item.get('durationMilliseconds'), 1000)
age_limit = parse_age_limit(item.get('ageRestrictions'))
formats = []
for vcodec, fmts in item['videos'].items():
if vcodec == 'hls':
continue
for format_id, video_url in fmts.items():
if format_id == 'm3u8':
continue
height = self._search_regex(
r'^(\d+)[pP]$', format_id, 'height', default=None)
formats.append({
'url': video_url,
'format_id': '%s-%s' % (vcodec, format_id),
'vcodec': vcodec,
'height': int_or_none(height),
'filesize': int_or_none(item.get('video_files_size', {}).get(vcodec, {}).get(format_id)),
})
self._sort_formats(formats)
return {
'id': video_id,
'display_id': display_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'duration': duration,
'age_limit': age_limit,
'formats': formats,
}
| gpl-3.0 |
ukanga/SickRage | sickbeard/show_queue.py | 3 | 30406 | # coding=utf-8
# Author: Nic Wolfe <[email protected]>
# URL: https://sickrage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from collections import namedtuple
import os
import traceback
from imdb import _exceptions as imdb_exceptions
from libtrakt import TraktAPI
import sickbeard
from sickbeard import generic_queue, logger, name_cache, notifiers, ui
from sickbeard.blackandwhitelist import BlackAndWhiteList
from sickbeard.common import WANTED
from sickbeard.helpers import chmodAsParent, get_showname_from_indexer, makeDir, sortable_name
from sickbeard.tv import TVShow
from sickrage.helper.common import sanitize_filename
from sickrage.helper.encoding import ek
from sickrage.helper.exceptions import CantRefreshShowException, CantRemoveShowException, CantUpdateShowException, \
EpisodeDeletedException, MultipleShowObjectsException, ShowDirectoryNotFoundException
from sickrage.show.Show import Show
import six
class ShowQueue(generic_queue.GenericQueue):
def __init__(self):
super(ShowQueue, self).__init__()
self.queue_name = 'SHOWQUEUE'
def _is_in_queue(self, show, actions):
if not show:
return False
return show.indexerid in (x.show.indexerid if x.show else 0 for x in self.queue if x.action_id in actions)
def _is_being_somethinged(self, show, actions):
return self.currentItem is not None and show == self.currentItem.show and self.currentItem.action_id in actions
# def is_in_add_queue(self, show):
# return self._isInQueue(show, (ShowQueueActions.ADD,))
def is_in_update_queue(self, show):
return self._is_in_queue(show, (ShowQueueActions.UPDATE, ShowQueueActions.FORCEUPDATE))
def is_in_refresh_queue(self, show):
return self._is_in_queue(show, (ShowQueueActions.REFRESH,))
def is_in_rename_queue(self, show):
return self._is_in_queue(show, (ShowQueueActions.RENAME,))
def is_in_remove_queue(self, show):
return self._is_in_queue(show, (ShowQueueActions.REMOVE,))
def is_in_subtitle_queue(self, show):
return self._is_in_queue(show, (ShowQueueActions.SUBTITLE,))
def is_being_added(self, show):
return self._is_being_somethinged(show, (ShowQueueActions.ADD,))
def is_being_updated(self, show):
return self._is_being_somethinged(show, (ShowQueueActions.UPDATE, ShowQueueActions.FORCEUPDATE))
def is_being_refreshed(self, show):
return self._is_being_somethinged(show, (ShowQueueActions.REFRESH,))
def is_being_renamed(self, show):
return self._is_being_somethinged(show, (ShowQueueActions.RENAME,))
def is_being_removed(self, show):
return self._is_being_somethinged(show, (ShowQueueActions.REMOVE,))
def is_being_subtitled(self, show):
return self._is_being_somethinged(show, (ShowQueueActions.SUBTITLE,))
@property
def loading_show_list(self):
return {x for x in self.queue + [self.currentItem] if x and x.is_loading}
def update_show(self, show, force=False):
if self.is_being_added(show):
raise CantUpdateShowException(
'{0} is still being added, wait until it is finished before you update.'.format(show.name)
)
if self.is_being_updated(show):
raise CantUpdateShowException(
'{0} is already being updated by Post-processor or manually started, can\'t update again until it\'s done.'.format(show.name)
)
if self.is_in_update_queue(show):
raise CantUpdateShowException(
'{0} is in process of being updated by Post-processor or manually started, can\'t update again until it\'s done.'.format(show.name)
)
queue_item_obj = QueueItemUpdate(show, force=force)
self.add_item(queue_item_obj)
return queue_item_obj
def refresh_show(self, show, force=False):
if self.is_being_refreshed(show) and not force:
raise CantRefreshShowException('This show is already being refreshed, not refreshing again.')
if (self.is_being_updated(show) or self.is_in_update_queue(show)) and not force:
logger.log(
'A refresh was attempted but there is already an update queued or in progress. Updates do a refresh at the end so I\'m skipping this request.',
logger.DEBUG)
return
if show.paused and not force:
logger.log('Skipping show [{0}] because it is paused.'.format(show.name), logger.DEBUG)
return
logger.log('Queueing show refresh for {0}'.format(show.name), logger.DEBUG)
queue_item_obj = QueueItemRefresh(show, force=force)
self.add_item(queue_item_obj)
return queue_item_obj
def rename_show_episodes(self, show, force=False):
queue_item_obj = QueueItemRename(show)
self.add_item(queue_item_obj)
return queue_item_obj
def download_subtitles(self, show, force=False):
queue_item_obj = QueueItemSubtitle(show)
self.add_item(queue_item_obj)
return queue_item_obj
def add_show(self, # pylint: disable=too-many-arguments, too-many-locals
indexer, indexer_id, showDir, default_status=None, quality=None, season_folders=None,
lang=None, subtitles=None, subtitles_sr_metadata=None, anime=None, scene=None, paused=None,
blacklist=None, whitelist=None, default_status_after=None, root_dir=None):
if lang is None:
lang = sickbeard.INDEXER_DEFAULT_LANGUAGE
if default_status_after is None:
default_status_after = sickbeard.STATUS_DEFAULT_AFTER
queue_item_obj = QueueItemAdd(indexer, indexer_id, showDir, default_status, quality, season_folders, lang,
subtitles, subtitles_sr_metadata, anime, scene, paused, blacklist, whitelist,
default_status_after, root_dir)
self.add_item(queue_item_obj)
return queue_item_obj
def remove_show(self, show, full=False):
if not show:
raise CantRemoveShowException('Failed removing show: Show does not exist')
if not hasattr(show, 'indexerid'):
raise CantRemoveShowException('Failed removing show: Show does not have an indexer id')
if self._is_in_queue(show, (ShowQueueActions.REMOVE,)):
raise CantRemoveShowException('{0} is already queued to be removed'.format(show.name))
# remove other queued actions for this show.
for item in self.queue:
if item and item.show and item != self.currentItem and show.indexerid == item.show.indexerid:
self.queue.remove(item)
queue_item_obj = QueueItemRemove(show=show, full=full)
self.add_item(queue_item_obj)
return queue_item_obj
class ShowQueueActions(object): # pylint: disable=too-few-public-methods
def __init__(self):
pass
REFRESH = 1
ADD = 2
UPDATE = 3
FORCEUPDATE = 4
RENAME = 5
SUBTITLE = 6
REMOVE = 7
names = {
REFRESH: 'Refresh',
ADD: 'Add',
UPDATE: 'Update',
FORCEUPDATE: 'Force Update',
RENAME: 'Rename',
SUBTITLE: 'Subtitle',
REMOVE: 'Remove Show'
}
class ShowQueueItem(generic_queue.QueueItem):
"""
Represents an item in the queue waiting to be executed
Can be either:
- show being added (may or may not be associated with a show object)
- show being refreshed
- show being updated
- show being force updated
- show being subtitled
"""
def __init__(self, action_id, show):
super(ShowQueueItem, self).__init__(ShowQueueActions.names[action_id], action_id)
self.show = show
def is_in_queue(self):
return self in sickbeard.showQueueScheduler.action.queue + [
sickbeard.showQueueScheduler.action.currentItem]
@property
def show_name(self):
return self.show.name if self.show else 'UNSET'
@property
def is_loading(self): # pylint: disable=no-self-use
return False
class QueueItemAdd(ShowQueueItem): # pylint: disable=too-many-instance-attributes
def __init__(self, # pylint: disable=too-many-arguments, too-many-locals
indexer, indexer_id, showDir, default_status, quality, season_folders,
lang, subtitles, subtitles_sr_metadata, anime, scene, paused, blacklist, whitelist,
default_status_after, root_dir):
super(QueueItemAdd, self).__init__(ShowQueueActions.ADD, None)
if isinstance(showDir, bytes):
self.showDir = showDir.decode('utf-8')
else:
self.showDir = showDir
self.indexer = indexer
self.indexer_id = indexer_id
self.default_status = default_status
self.quality = quality
self.season_folders = season_folders
self.lang = lang
self.subtitles = subtitles
self.subtitles_sr_metadata = subtitles_sr_metadata
self.anime = anime
self.scene = scene
self.paused = paused
self.blacklist = blacklist
self.whitelist = whitelist
self.default_status_after = default_status_after
self.root_dir = root_dir
self.show = None
# Process add show in priority
self.priority = generic_queue.QueuePriorities.HIGH
@property
def show_name(self):
"""
Returns the show name if there is a show object created, if not returns
the dir that the show is being added to.
"""
return self.show.name if self.show else self.showDir.rsplit(os.sep)[-1]
@property
def is_loading(self):
"""
Returns True if we've gotten far enough to have a show object, or False
if we still only know the folder name.
"""
return self.show not in sickbeard.showList or not self.show
@property
def info(self):
info = namedtuple('LoadingShowInfo', 'id name sort_name network quality')
if self.show:
return info(id=self.show.indexerid, name=self.show.name, sort_name=self.show.sort_name, network=self.show.network, quality=self.show.quality)
return info(id=self.show_name, name=self.show_name, sort_name=sortable_name(self.show_name), network=_('Loading'), quality=0)
def run(self): # pylint: disable=too-many-branches, too-many-statements, too-many-return-statements
super(QueueItemAdd, self).run()
if self.showDir:
try:
assert isinstance(self.showDir, six.text_type)
except AssertionError:
logger.log(traceback.format_exc(), logger.WARNING)
self._finish_early()
return
logger.log('Starting to add show {0}'.format('by ShowDir: {0}'.format(self.showDir) if self.showDir else 'by Indexer Id: {0}'.format(self.indexer_id)))
# make sure the Indexer IDs are valid
try:
lINDEXER_API_PARMS = sickbeard.indexerApi(self.indexer).api_params.copy()
lINDEXER_API_PARMS['language'] = self.lang or sickbeard.INDEXER_DEFAULT_LANGUAGE
logger.log('{0}: {1!r}'.format(sickbeard.indexerApi(self.indexer).name, lINDEXER_API_PARMS))
t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
s = t[self.indexer_id]
# Let's try to create the show Dir if it's not provided. This way we force the show dir to build build using the
# Indexers provided series name
if self.root_dir and not self.showDir:
show_name = get_showname_from_indexer(self.indexer, self.indexer_id, self.lang)
if not show_name:
logger.log('Unable to get a show {0}, can\'t add the show'.format(self.showDir))
self._finish_early()
return
self.showDir = ek(os.path.join, self.root_dir, sanitize_filename(show_name))
dir_exists = makeDir(self.showDir)
if not dir_exists:
logger.log('Unable to create the folder {0}, can\'t add the show'.format(self.showDir))
self._finish_early()
return
chmodAsParent(self.showDir)
# this usually only happens if they have an NFO in their show dir which gave us a Indexer ID that has no proper english version of the show
if getattr(s, 'seriesname', None) is None:
error_string = 'Show in {0} has no name on {1}, probably searched with the wrong language. Delete .nfo and add manually in the correct language.'.format(
self.showDir, sickbeard.indexerApi(self.indexer).name)
logger.log(error_string, logger.WARNING)
ui.notifications.error('Unable to add show', error_string)
self._finish_early()
return
# if the show has no episodes/seasons
if not s:
error_string = 'Show {0} is on {1} but contains no season/episode data.'.format(
s[b'seriesname'], sickbeard.indexerApi(self.indexer).name)
logger.log(error_string)
ui.notifications.error('Unable to add show', error_string)
self._finish_early()
return
except Exception as error:
error_string = 'Unable to look up the show in {0} on {1} using ID {2}, not using the NFO. Delete .nfo and try adding manually again.'.format(
self.showDir, sickbeard.indexerApi(self.indexer).name, self.indexer_id)
logger.log('{0}: {1}'.format(error_string, error), logger.ERROR)
ui.notifications.error(
'Unable to add show', error_string)
if sickbeard.USE_TRAKT:
trakt_id = sickbeard.indexerApi(self.indexer).config[b'trakt_id']
trakt_api = TraktAPI(sickbeard.SSL_VERIFY, sickbeard.TRAKT_TIMEOUT)
title = self.showDir.split('/')[-1]
data = {
'shows': [
{
'title': title,
'ids': {}
}
]
}
if trakt_id == 'tvdb_id':
data[b'shows'][0][b'ids'][b'tvdb'] = self.indexer_id
else:
data[b'shows'][0][b'ids'][b'tvrage'] = self.indexer_id
trakt_api.traktRequest('sync/watchlist/remove', data, method='POST')
self._finish_early()
return
try:
try:
newShow = TVShow(self.indexer, self.indexer_id, self.lang)
except MultipleShowObjectsException as error:
# If we have the show in our list, but the location is wrong, lets fix it and refresh!
existing_show = Show.find(sickbeard.showList, self.indexer_id)
if existing_show and not ek(os.path.isdir, existing_show._location): # pylint: disable=protected-access
newShow = existing_show
else:
raise error
newShow.loadFromIndexer()
self.show = newShow
# set up initial values
self.show.location = self.showDir
self.show.subtitles = self.subtitles if self.subtitles is not None else sickbeard.SUBTITLES_DEFAULT
self.show.subtitles_sr_metadata = self.subtitles_sr_metadata
self.show.quality = self.quality if self.quality else sickbeard.QUALITY_DEFAULT
self.show.season_folders = self.season_folders if self.season_folders is not None else sickbeard.SEASON_FOLDERS_DEFAULT
self.show.anime = self.anime if self.anime is not None else sickbeard.ANIME_DEFAULT
self.show.scene = self.scene if self.scene is not None else sickbeard.SCENE_DEFAULT
self.show.paused = self.paused if self.paused is not None else False
# set up default new/missing episode status
logger.log('Setting all episodes to the specified default status: {0}' .format(self.show.default_ep_status))
self.show.default_ep_status = self.default_status
if self.show.anime:
self.show.release_groups = BlackAndWhiteList(self.show.indexerid)
if self.blacklist:
self.show.release_groups.set_black_keywords(self.blacklist)
if self.whitelist:
self.show.release_groups.set_white_keywords(self.whitelist)
# # be smartish about this
# if self.show.genre and 'talk show' in self.show.genre.lower():
# self.show.air_by_date = 1
# if self.show.genre and 'documentary' in self.show.genre.lower():
# self.show.air_by_date = 0
# if self.show.classification and 'sports' in self.show.classification.lower():
# self.show.sports = 1
except sickbeard.indexer_exception as error:
error_string = 'Unable to add {0} due to an error with {1}'.format(
self.show.name if self.show else 'show', sickbeard.indexerApi(self.indexer).name)
logger.log('{0}: {1}'.format(error_string, error), logger.ERROR)
ui.notifications.error('Unable to add show', error_string)
self._finish_early()
return
except MultipleShowObjectsException:
error_string = 'The show in {0} is already in your show list, skipping'.format(self.showDir)
logger.log(error_string, logger.WARNING)
ui.notifications.error('Show skipped', error_string)
self._finish_early()
return
except Exception as error:
logger.log('Error trying to add show: {0}'.format(error), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
self._finish_early()
raise
logger.log('Retrieving show info from IMDb', logger.DEBUG)
try:
self.show.loadIMDbInfo()
except imdb_exceptions.IMDbError as error:
logger.log(' Something wrong on IMDb api: {0}'.format(error), logger.WARNING)
except Exception as error:
logger.log('Error loading IMDb info: {0}'.format(error), logger.ERROR)
try:
self.show.saveToDB()
except Exception as error:
logger.log('Error saving the show to the database: {0}'.format(error), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
self._finish_early()
raise
# add it to the show list
if not Show.find(sickbeard.showList, self.indexer_id):
sickbeard.showList.append(self.show)
try:
self.show.loadEpisodesFromIndexer()
except Exception as error:
logger.log(
'Error with {0}, not creating episode list: {1}'.format
(sickbeard.indexerApi(self.show.indexer).name, error), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
# update internal name cache
name_cache.buildNameCache(self.show)
try:
self.show.loadEpisodesFromDir()
except Exception as error:
logger.log('Error searching dir for episodes: {0}'.format(error), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
# if they set default ep status to WANTED then run the backlog to search for episodes
# FIXME: This needs to be a backlog queue item!!!
if self.show.default_ep_status == WANTED:
logger.log('Launching backlog for this show since its episodes are WANTED')
sickbeard.backlogSearchScheduler.action.searchBacklog([self.show])
self.show.writeMetadata()
self.show.updateMetadata()
self.show.populateCache()
self.show.flushEpisodes()
if sickbeard.USE_TRAKT:
# if there are specific episodes that need to be added by trakt
sickbeard.traktCheckerScheduler.action.manageNewShow(self.show)
# add show to trakt.tv library
if sickbeard.TRAKT_SYNC:
sickbeard.traktCheckerScheduler.action.addShowToTraktLibrary(self.show)
if sickbeard.TRAKT_SYNC_WATCHLIST:
logger.log('update watchlist')
notifiers.trakt_notifier.update_watchlist(show_obj=self.show)
# Load XEM data to DB for show
sickbeard.scene_numbering.xem_refresh(self.show.indexerid, self.show.indexer, force=True)
# check if show has XEM mapping so we can determin if searches should go by scene numbering or indexer numbering.
if not self.scene and sickbeard.scene_numbering.get_xem_numbering_for_show(self.show.indexerid,
self.show.indexer):
self.show.scene = 1
# After initial add, set to default_status_after.
self.show.default_ep_status = self.default_status_after
super(QueueItemAdd, self).finish()
self.finish()
def _finish_early(self):
if self.show is not None:
sickbeard.showQueueScheduler.action.remove_show(self.show)
super(QueueItemAdd, self).finish()
self.finish()
class QueueItemRefresh(ShowQueueItem):
def __init__(self, show=None, force=False):
super(QueueItemRefresh, self).__init__(ShowQueueActions.REFRESH, show)
# do refreshes first because they're quick
self.priority = generic_queue.QueuePriorities.HIGH
# force refresh certain items
self.force = force
def run(self):
super(QueueItemRefresh, self).run()
logger.log('Performing refresh on {0}'.format(self.show.name))
self.show.refreshDir()
self.show.writeMetadata()
if self.force:
self.show.updateMetadata()
self.show.populateCache()
# Load XEM data to DB for show
sickbeard.scene_numbering.xem_refresh(self.show.indexerid, self.show.indexer)
super(QueueItemRefresh, self).finish()
self.finish()
class QueueItemRename(ShowQueueItem):
def __init__(self, show=None):
super(QueueItemRename, self).__init__(ShowQueueActions.RENAME, show)
def run(self):
super(QueueItemRename, self).run()
logger.log('Performing rename on {0}'.format(self.show.name))
try:
self.show.location
except ShowDirectoryNotFoundException:
logger.log('Can\'t perform rename on {0} when the show dir is missing.'.format(self.show.name), logger.WARNING)
super(QueueItemRename, self).finish()
self.finish()
return
ep_obj_rename_list = []
ep_obj_list = self.show.getAllEpisodes(has_location=True)
for cur_ep_obj in ep_obj_list:
# Only want to rename if we have a location
if cur_ep_obj.location:
if cur_ep_obj.relatedEps:
# do we have one of multi-episodes in the rename list already
have_already = False
for cur_related_ep in cur_ep_obj.relatedEps + [cur_ep_obj]:
if cur_related_ep in ep_obj_rename_list:
have_already = True
break
if not have_already:
ep_obj_rename_list.append(cur_ep_obj)
else:
ep_obj_rename_list.append(cur_ep_obj)
for cur_ep_obj in ep_obj_rename_list:
cur_ep_obj.rename()
super(QueueItemRename, self).finish()
self.finish()
class QueueItemSubtitle(ShowQueueItem):
def __init__(self, show=None):
super(QueueItemSubtitle, self).__init__(ShowQueueActions.SUBTITLE, show)
def run(self):
super(QueueItemSubtitle, self).run()
logger.log('Downloading subtitles for {0} '.format(self.show.name))
self.show.download_subtitles()
super(QueueItemSubtitle, self).finish()
self.finish()
class QueueItemUpdate(ShowQueueItem):
def __init__(self, show=None, force=False):
action = ShowQueueActions.FORCEUPDATE if force else ShowQueueActions.UPDATE
super(QueueItemUpdate, self).__init__(action, show)
self.force = force
self.priority = generic_queue.QueuePriorities.HIGH
def run(self): # pylint: disable=too-many-branches, too-many-statements
super(QueueItemUpdate, self).run()
logger.log('Beginning update of {0}'.format(self.show.name), logger.DEBUG)
logger.log('Retrieving show info from {0}'.format(sickbeard.indexerApi(self.show.indexer).name), logger.DEBUG)
try:
self.show.loadFromIndexer(cache=not self.force)
except sickbeard.indexer_error as error:
logger.log('Unable to contact {0}, aborting: {1}'.format
(sickbeard.indexerApi(self.show.indexer).name, error), logger.WARNING)
super(QueueItemUpdate, self).finish()
self.finish()
return
except sickbeard.indexer_attributenotfound as error:
logger.log('Data retrieved from {0} was incomplete, aborting: {1}'.format
(sickbeard.indexerApi(self.show.indexer).name, error), logger.ERROR)
super(QueueItemUpdate, self).finish()
self.finish()
return
logger.log('Retrieving show info from IMDb', logger.DEBUG)
try:
self.show.loadIMDbInfo()
except imdb_exceptions.IMDbError as error:
logger.log('Something wrong on IMDb api: {0}'.format(error), logger.WARNING)
except Exception as error:
logger.log('Error loading IMDb info: {0}'.format(error), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
# have to save show before reading episodes from db
try:
self.show.saveToDB()
except Exception as error:
logger.log('Error saving show info to the database: {0}'.format(error), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
# get episode list from DB
logger.log('Loading all episodes from the database', logger.DEBUG)
DBEpList = self.show.loadEpisodesFromDB()
# get episode list from TVDB
logger.log('Loading all episodes from {0}'.format(sickbeard.indexerApi(self.show.indexer).name), logger.DEBUG)
try:
IndexerEpList = self.show.loadEpisodesFromIndexer(cache=not self.force)
except sickbeard.indexer_exception as error:
logger.log('Unable to get info from {0}, the show info will not be refreshed: {1}'.format
(sickbeard.indexerApi(self.show.indexer).name, error), logger.ERROR)
IndexerEpList = None
if not IndexerEpList:
logger.log('No data returned from {0}, unable to update this show.'.format
(sickbeard.indexerApi(self.show.indexer).name), logger.ERROR)
else:
# for each ep we found on the Indexer delete it from the DB list
for curSeason in IndexerEpList:
for curEpisode in IndexerEpList[curSeason]:
curEp = self.show.getEpisode(curSeason, curEpisode)
curEp.saveToDB()
if curSeason in DBEpList and curEpisode in DBEpList[curSeason]:
del DBEpList[curSeason][curEpisode]
# remaining episodes in the DB list are not on the indexer, just delete them from the DB
for curSeason in DBEpList:
for curEpisode in DBEpList[curSeason]:
logger.log('Permanently deleting episode {0:02d}E{1:02d} from the database'.format
(curSeason, curEpisode), logger.INFO)
curEp = self.show.getEpisode(curSeason, curEpisode)
try:
curEp.deleteEpisode()
except EpisodeDeletedException:
pass
# save show again, in case episodes have changed
try:
self.show.saveToDB()
except Exception as error:
logger.log('Error saving show info to the database: {0}'.format(error), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
logger.log('Finished update of {0}'.format(self.show.name), logger.DEBUG)
sickbeard.showQueueScheduler.action.refresh_show(self.show, self.force)
super(QueueItemUpdate, self).finish()
self.finish()
class QueueItemRemove(ShowQueueItem):
def __init__(self, show=None, full=False):
super(QueueItemRemove, self).__init__(ShowQueueActions.REMOVE, show)
# lets make sure this happens before any other high priority actions
self.priority = generic_queue.QueuePriorities.HIGH ** 2
self.full = full
def run(self):
super(QueueItemRemove, self).run()
logger.log('Removing {0}'.format(self.show.name))
self.show.deleteShow(full=self.full)
if sickbeard.USE_TRAKT:
try:
sickbeard.traktCheckerScheduler.action.removeShowFromTraktLibrary(self.show)
except Exception as error:
logger.log('Unable to delete show from Trakt: {0}. Error: {1}'.format(self.show.name, error), logger.WARNING)
super(QueueItemRemove, self).finish()
self.finish()
| gpl-3.0 |
Kivvix/stage-LPC | compareSrc/searchSDSSdata.py | 1 | 4221 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import time
import os
import glob
from config import *
import data.calexp
import data.src
## @def attributs
# @brief attributs which we select in SDSS DB and src fits file
attributs = 'objid,run,camcol,field,ra,dec,u,g,r,i,z'
## Calexp treatment ##
def coordCalexp( fitsNum , calexpFits , first=True ):
coordMin, coordMax = data.calexp.coord( calexpFits , first )
if ( first ):
return coordMin
else:
return coordMax
def savCalexp( coordMin , coordMax , fitsNum ):
global attributs , PATH_OUTPUT
calexpLines = data.calexp.query( coordMin , coordMax , attributs , fitsNum )
data.calexp.write( calexpLines , attributs , fitsNum , PATH_OUTPUT , True )
def calexp( fitsNum , calexpFits , first=True ):
"""
find and write calexp data (id,ra,dec,mag)
:param fitsNum: number of fits file (``rrrrrr-bc-ffff``)
:param calexpFits: name of calexp fits file
:param first: take all the picture or less 128 first pixels
:type fitsNum: string
:type calexpFits: string
:type first: boolean
"""
global attributs , PATH_OUTPUT
coordMin, coordMax = data.calexp.coord( calexpFits , first )
calexpLines = data.calexp.query( coordMin , coordMax , attributs , fitsNum )
data.calexp.write( calexpLines , attributs , fitsNum[0:9] , PATH_OUTPUT , first )
## Src treatment ##
def src( fitsNum , srcFits , first=True ):
"""
find and write src data (id,ra,dec,mag)
:param fitsNum: number of fits file (``rrrrrr-bc-ffff``)
:param srcFits: name of src fits file
:param first: take all the picture or less 128 first pixels
:type fitsNum: string
:type srcFits: string
:type first: boolean
"""
global attributs , PATH_OUTPUT
srcCoord,srcMag = data.src.coord( srcFits , fitsNum , first )
srcLines = data.src.map( srcCoord , srcMag )
data.src.write( srcLines , attributs , fitsNum[0:9] , PATH_OUTPUT , first )
def analyCol( runNum , c ):
"""
function threaded calling research of data
:param runNum_c: tupe with run number and column of the CCD (1-6)
:type runNum_c: tuple of string
"""
global b , PATH_DATA , PWD
print " " + str(c) + " ",
# data of each pair of fits files
first = True
for fits in glob.glob( c + "/" + b + "/calexp/calexp*.fits" ):
fitsNum = fits[18:32]
## @def calexpFits
# @brief path and name of calexp fits file
calexpFits = PATH_DATA + "/" + runNum + "/" + c + "/" + b + "/calexp/calexp-" + fitsNum + ".fits"
## @def srcFits
# @brief path and name of src fits file
#srcFits = PATH_DATA + "/" + runNum + "/" + c + "/" + b + "/src/src-" + fitsNum + ".fits"
#calexp( fitsNum , calexpFits , first )
if ( first ):
coordMin = coordCalexp( fitsNum , calexpFits , first )
else:
coordMax = coordCalexp( fitsNum , calexpFits , first )
#src( fitsNum , srcFits , first )
first = False
savCalexp( coordMin , coordMax , "%06d" % int(runNum) + "-" + b + c )
def analyRun( runNum ):
global b , PWD , PATH_DATA , PATH_OUTPUT , attributs
print "run : " + str(runNum ) + " : ",
os.chdir( PATH_DATA + "/" + runNum )
columns = glob.glob( "*" )
for c in columns :
analyCol( runNum , c )
if __name__ == '__main__':
os.chdir( PATH_DATA )
runs = glob.glob( "*" )
#runs = ( 7158, 7112, 5924, 5566, 6421, 7057, 6430, 4895, 5895, 6474, 6383, 7038, 5642, 6409, 6513, 6501, 6552, 2650, 6559, 6355, 7177, 7121, 3465, 7170, 7051, 6283, 6458, 5853, 6484, 5765, 2708, 5786, 4253, 6934, 6508, 2662, 6518, 6584, 4188, 6976, 7202, 7173, 4153, 5820, 2649, 7140, 6330, 3388, 7117, 6504, 6314, 4128, 6596, 6564, 5807, 6367, 6373, 5622, 5882, 7034, 7136, 6577, 6600, 2768, 3437, 4927, 6414, 3434, 5813, 7084, 4858, 7124, 6982, 4917, 4192, 5898, 6479, 4868, 7106, 7195, 5744, 3360, 4198, 6963, 6533, 4933, 5603, 3384, 7155, 5619, 4207, 4849, 5582, 7024, 1755, 5709, 5781, 5770, 7145, 5754, 5646, 5800, 5759, 6287, 6568, 7054, 4203, 5776, 6433, 4247, 5823, 5052, 3325, 5836, 5590, 6580, 7161, 2728, 4145, 5633, 6461, 6555, 6955, 4874, 5792, 5918, 6425, 6377, 4263, 5878, 6441, 6447, 7080, 5905, 5713, 6618, 6537, 5637, 6402, 6530, 7047, 6524, 7101, 6293 )
for r in runs :
analyRun( r )
print " "
time.sleep(60)
| mit |
pinkavaj/gnuradio | gnuradio-runtime/python/gnuradio/gru/msgq_runner.py | 94 | 2529 | #
# Copyright 2009 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
"""
Convenience class for dequeuing messages from a gr.msg_queue and
invoking a callback.
Creates a Python thread that does a blocking read on the supplied
gr.msg_queue, then invokes callback each time a msg is received.
If the msg type is not 0, then it is treated as a signal to exit
its loop.
If the callback raises an exception, and the runner was created
with 'exit_on_error' equal to True, then the runner will store the
exception and exit its loop, otherwise the exception is ignored.
To get the exception that the callback raised, if any, call
exit_error() on the object.
To manually stop the runner, call stop() on the object.
To determine if the runner has exited, call exited() on the object.
"""
from gnuradio import gr
import gnuradio.gr.gr_threading as _threading
class msgq_runner(_threading.Thread):
def __init__(self, msgq, callback, exit_on_error=False):
_threading.Thread.__init__(self)
self._msgq = msgq
self._callback = callback
self._exit_on_error = exit_on_error
self._done = False
self._exited = False
self._exit_error = None
self.setDaemon(1)
self.start()
def run(self):
while not self._done:
msg = self._msgq.delete_head()
if msg.type() != 0:
self.stop()
else:
try:
self._callback(msg)
except Exception, e:
if self._exit_on_error:
self._exit_error = e
self.stop()
self._exited = True
def stop(self):
self._done = True
def exited(self):
return self._exited
def exit_error(self):
return self._exit_error
| gpl-3.0 |
yamila-moreno/django | tests/gis_tests/geoadmin/tests.py | 304 | 3157 | from __future__ import unicode_literals
from django.contrib.gis import admin
from django.contrib.gis.geos import Point
from django.test import TestCase, override_settings, skipUnlessDBFeature
from .admin import UnmodifiableAdmin
from .models import City, site
@skipUnlessDBFeature("gis_enabled")
@override_settings(ROOT_URLCONF='django.contrib.gis.tests.geoadmin.urls')
class GeoAdminTest(TestCase):
def test_ensure_geographic_media(self):
geoadmin = site._registry[City]
admin_js = geoadmin.media.render_js()
self.assertTrue(any(geoadmin.openlayers_url in js for js in admin_js))
def test_olmap_OSM_rendering(self):
delete_all_btn = """<a href="javascript:geodjango_point.clearFeatures()">Delete all Features</a>"""
original_geoadmin = site._registry[City]
params = original_geoadmin.get_map_widget(City._meta.get_field('point')).params
result = original_geoadmin.get_map_widget(City._meta.get_field('point'))(
).render('point', Point(-79.460734, 40.18476), params)
self.assertIn(
"""geodjango_point.layers.base = new OpenLayers.Layer.OSM("OpenStreetMap (Mapnik)");""",
result)
self.assertIn(delete_all_btn, result)
site.unregister(City)
site.register(City, UnmodifiableAdmin)
try:
geoadmin = site._registry[City]
params = geoadmin.get_map_widget(City._meta.get_field('point')).params
result = geoadmin.get_map_widget(City._meta.get_field('point'))(
).render('point', Point(-79.460734, 40.18476), params)
self.assertNotIn(delete_all_btn, result)
finally:
site.unregister(City)
site.register(City, original_geoadmin.__class__)
def test_olmap_WMS_rendering(self):
geoadmin = admin.GeoModelAdmin(City, site)
result = geoadmin.get_map_widget(City._meta.get_field('point'))(
).render('point', Point(-79.460734, 40.18476))
self.assertIn(
"""geodjango_point.layers.base = new OpenLayers.Layer.WMS("OpenLayers WMS", """
""""http://vmap0.tiles.osgeo.org/wms/vmap0", {layers: 'basic', format: 'image/jpeg'});""",
result)
def test_olwidget_has_changed(self):
"""
Check that changes are accurately noticed by OpenLayersWidget.
"""
geoadmin = site._registry[City]
form = geoadmin.get_changelist_form(None)()
has_changed = form.fields['point'].has_changed
initial = Point(13.4197458572965953, 52.5194108501149799, srid=4326)
data_same = "SRID=3857;POINT(1493879.2754093995 6894592.019687599)"
data_almost_same = "SRID=3857;POINT(1493879.2754093990 6894592.019687590)"
data_changed = "SRID=3857;POINT(1493884.0527237 6894593.8111804)"
self.assertTrue(has_changed(None, data_changed))
self.assertTrue(has_changed(initial, ""))
self.assertFalse(has_changed(None, ""))
self.assertFalse(has_changed(initial, data_same))
self.assertFalse(has_changed(initial, data_almost_same))
self.assertTrue(has_changed(initial, data_changed))
| bsd-3-clause |
xaled/wunderous-analytics | wunderous/drive.py | 1 | 5688 | import os
import sys
import httplib2
from oauth2client.file import Storage
from apiclient import discovery
from oauth2client.client import OAuth2WebServerFlow
from wunderous.config import config
OAUTH_SCOPE = 'https://www.googleapis.com/auth/drive'
SHEETS_OAUTH_SCOPE = 'https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/drive.readonly https://www.googleapis.com/auth/drive.file https://www.googleapis.com/auth/spreadsheets https://www.googleapis.com/auth/spreadsheets.readonly'
REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob'
CREDS_FILE = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'credentials.json')
SHEETS_CREDS_FILE = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'sheets_credentials.json')
# CONFIG_FILE = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), "wunderous.config.json")
sheet_service = None
drive_service = None
# def load_configs():
# client_secret = config['client_secret']
# client_id = config['client_id']
# return client_id, client_secret
def init_drive_service():
global drive_service
if drive_service:
return drive_service
storage = Storage(CREDS_FILE)
credentials = storage.get()
if credentials is None:
# Run through the OAuth flow and retrieve credentials
# client_id, client_secret = load_configs()
flow = OAuth2WebServerFlow(config['drive']['client_id'], config['drive']['client_secret'], OAUTH_SCOPE, REDIRECT_URI)
authorize_url = flow.step1_get_authorize_url()
print('Go to the following link in your browser: ' + authorize_url)
code = input('Enter verification code: ').strip()
credentials = flow.step2_exchange(code)
storage.put(credentials)
# Create an httplib2.Http object and authorize it with our credentials
http = httplib2.Http()
http = credentials.authorize(http)
drive_service = discovery.build('drive', 'v2', http=http)
return drive_service
def init_sheet_service():
global sheet_service
if sheet_service:
return sheet_service
storage = Storage(SHEETS_CREDS_FILE)
credentials = storage.get()
if credentials is None:
# Run through the OAuth flow and retrieve credentials
# client_id, client_secret = load_configs()
flow = OAuth2WebServerFlow(config['drive']['client_id'], config['drive']['client_secret'], OAUTH_SCOPE, REDIRECT_URI)
authorize_url = flow.step1_get_authorize_url()
print('Go to the following link in your browser: ' + authorize_url)
code = input('Enter verification code: ').strip()
credentials = flow.step2_exchange(code)
storage.put(credentials)
# Create an httplib2.Http object and authorize it with our credentials
http = httplib2.Http()
http = credentials.authorize(http)
sheet_service = discovery.build('sheets', 'v4', http=http)
return sheet_service
def list_files(service):
page_token = None
while True:
param = {}
if page_token:
param['pageToken'] = page_token
files = service.files().list(**param).execute()
for item in files['items']:
yield item
page_token = files.get('nextPageToken')
if not page_token:
break
def _download_file(drive_service, download_url, outfile):
resp, content = drive_service._http.request(download_url)
if resp.status == 200:
with open(outfile, 'wb') as f:
f.write(content)
print("OK")
return
else:
raise Exception("ERROR downloading %s, response code is not 200!" % outfile)
def download_file(outfile, fileid):
drive_service = init_drive_service()
for item in list_files(drive_service):
if fileid == item.get('id'):
if 'downloadUrl' in item:
_download_file(drive_service, item['downloadUrl'], outfile)
return
else:
raise Exception("No download link is found for file: %s" % item['title'])
raise Exception("No file with id: %s is found " % fileid)
def get_sheet_metadata(spreadsheet_id):
sheet_service = init_sheet_service()
sheet_metadata = sheet_service.spreadsheets().get(spreadsheetId=spreadsheet_id).execute()
return sheet_metadata
def get_sheet_values(spreadsheet_id, range_):
sheet_service = init_sheet_service()
request = sheet_service.spreadsheets().values().get(spreadsheetId=spreadsheet_id, range=range_,
valueRenderOption='FORMATTED_VALUE',
dateTimeRenderOption='SERIAL_NUMBER')
response = request.execute()
return response
def get_sheet_value(spreadsheet_id, range_):
response = get_sheet_values(spreadsheet_id, range_)
try:
return response['values'][0][0]
except:
return ''
def update_sheet_values(spreadsheet_id, range_, values):
sheet_service = init_sheet_service()
body = {'values': values}
result = sheet_service.spreadsheets().values().update(spreadsheetId=spreadsheet_id, range=range_, body=body,
valueInputOption='USER_ENTERED').execute()
return result.get('updatedCells')
def append_sheet_values(spreadsheet_id, range_, values):
sheet_service = init_sheet_service()
body = {'values': values}
result = sheet_service.spreadsheets().values().append(spreadsheetId=spreadsheet_id, range=range_, body=body,
valueInputOption='USER_ENTERED').execute()
return result.get('updates').get('updatedCells')
| mit |
astocko/statsmodels | statsmodels/tsa/varma_process.py | 30 | 19986 | # -*- coding: utf-8 -*-
""" Helper and filter functions for VAR and VARMA, and basic VAR class
Created on Mon Jan 11 11:04:23 2010
Author: josef-pktd
License: BSD
This is a new version, I didn't look at the old version again, but similar
ideas.
not copied/cleaned yet:
* fftn based filtering, creating samples with fft
* Tests: I ran examples but did not convert them to tests
examples look good for parameter estimate and forecast, and filter functions
main TODOs:
* result statistics
* see whether Bayesian dummy observation can be included without changing
the single call to linalg.lstsq
* impulse response function does not treat correlation, see Hamilton and jplv
Extensions
* constraints, Bayesian priors/penalization
* Error Correction Form and Cointegration
* Factor Models Stock-Watson, ???
see also VAR section in Notes.txt
"""
from __future__ import print_function
import numpy as np
from numpy.testing import assert_equal
from scipy import signal
#might not (yet) need the following
from scipy.signal.signaltools import _centered as trim_centered
from statsmodels.tsa.tsatools import lagmat
def varfilter(x, a):
'''apply an autoregressive filter to a series x
Warning: I just found out that convolve doesn't work as I
thought, this likely doesn't work correctly for
nvars>3
x can be 2d, a can be 1d, 2d, or 3d
Parameters
----------
x : array_like
data array, 1d or 2d, if 2d then observations in rows
a : array_like
autoregressive filter coefficients, ar lag polynomial
see Notes
Returns
-------
y : ndarray, 2d
filtered array, number of columns determined by x and a
Notes
-----
In general form this uses the linear filter ::
y = a(L)x
where
x : nobs, nvars
a : nlags, nvars, npoly
Depending on the shape and dimension of a this uses different
Lag polynomial arrays
case 1 : a is 1d or (nlags,1)
one lag polynomial is applied to all variables (columns of x)
case 2 : a is 2d, (nlags, nvars)
each series is independently filtered with its own
lag polynomial, uses loop over nvar
case 3 : a is 3d, (nlags, nvars, npoly)
the ith column of the output array is given by the linear filter
defined by the 2d array a[:,:,i], i.e. ::
y[:,i] = a(.,.,i)(L) * x
y[t,i] = sum_p sum_j a(p,j,i)*x(t-p,j)
for p = 0,...nlags-1, j = 0,...nvars-1,
for all t >= nlags
Note: maybe convert to axis=1, Not
TODO: initial conditions
'''
x = np.asarray(x)
a = np.asarray(a)
if x.ndim == 1:
x = x[:,None]
if x.ndim > 2:
raise ValueError('x array has to be 1d or 2d')
nvar = x.shape[1]
nlags = a.shape[0]
ntrim = nlags//2
# for x is 2d with ncols >1
if a.ndim == 1:
# case: identical ar filter (lag polynomial)
return signal.convolve(x, a[:,None], mode='valid')
# alternative:
#return signal.lfilter(a,[1],x.astype(float),axis=0)
elif a.ndim == 2:
if min(a.shape) == 1:
# case: identical ar filter (lag polynomial)
return signal.convolve(x, a, mode='valid')
# case: independent ar
#(a bit like recserar in gauss, but no x yet)
#(no, reserar is inverse filter)
result = np.zeros((x.shape[0]-nlags+1, nvar))
for i in range(nvar):
# could also use np.convolve, but easier for swiching to fft
result[:,i] = signal.convolve(x[:,i], a[:,i], mode='valid')
return result
elif a.ndim == 3:
# case: vector autoregressive with lag matrices
# #not necessary:
# if np.any(a.shape[1:] != nvar):
# raise ValueError('if 3d shape of a has to be (nobs,nvar,nvar)')
yf = signal.convolve(x[:,:,None], a)
yvalid = yf[ntrim:-ntrim, yf.shape[1]//2,:]
return yvalid
def varinversefilter(ar, nobs, version=1):
'''creates inverse ar filter (MA representation) recursively
The VAR lag polynomial is defined by ::
ar(L) y_t = u_t or
y_t = -ar_{-1}(L) y_{t-1} + u_t
the returned lagpolynomial is arinv(L)=ar^{-1}(L) in ::
y_t = arinv(L) u_t
Parameters
----------
ar : array, (nlags,nvars,nvars)
matrix lagpolynomial, currently no exog
first row should be identity
Returns
-------
arinv : array, (nobs,nvars,nvars)
Notes
-----
'''
nlags, nvars, nvarsex = ar.shape
if nvars != nvarsex:
print('exogenous variables not implemented not tested')
arinv = np.zeros((nobs+1, nvarsex, nvars))
arinv[0,:,:] = ar[0]
arinv[1:nlags,:,:] = -ar[1:]
if version == 1:
for i in range(2,nobs+1):
tmp = np.zeros((nvars,nvars))
for p in range(1,nlags):
tmp += np.dot(-ar[p],arinv[i-p,:,:])
arinv[i,:,:] = tmp
if version == 0:
for i in range(nlags+1,nobs+1):
print(ar[1:].shape, arinv[i-1:i-nlags:-1,:,:].shape)
#arinv[i,:,:] = np.dot(-ar[1:],arinv[i-1:i-nlags:-1,:,:])
#print(np.tensordot(-ar[1:],arinv[i-1:i-nlags:-1,:,:],axes=([2],[1])).shape
#arinv[i,:,:] = np.tensordot(-ar[1:],arinv[i-1:i-nlags:-1,:,:],axes=([2],[1]))
raise NotImplementedError('waiting for generalized ufuncs or something')
return arinv
def vargenerate(ar, u, initvalues=None):
'''generate an VAR process with errors u
similar to gauss
uses loop
Parameters
----------
ar : array (nlags,nvars,nvars)
matrix lagpolynomial
u : array (nobs,nvars)
exogenous variable, error term for VAR
Returns
-------
sar : array (1+nobs,nvars)
sample of var process, inverse filtered u
does not trim initial condition y_0 = 0
Examples
--------
# generate random sample of VAR
nobs, nvars = 10, 2
u = numpy.random.randn(nobs,nvars)
a21 = np.array([[[ 1. , 0. ],
[ 0. , 1. ]],
[[-0.8, 0. ],
[ 0., -0.6]]])
vargenerate(a21,u)
# Impulse Response to an initial shock to the first variable
imp = np.zeros((nobs, nvars))
imp[0,0] = 1
vargenerate(a21,imp)
'''
nlags, nvars, nvarsex = ar.shape
nlagsm1 = nlags - 1
nobs = u.shape[0]
if nvars != nvarsex:
print('exogenous variables not implemented not tested')
if u.shape[1] != nvars:
raise ValueError('u needs to have nvars columns')
if initvalues is None:
sar = np.zeros((nobs+nlagsm1, nvars))
start = nlagsm1
else:
start = max(nlagsm1, initvalues.shape[0])
sar = np.zeros((nobs+start, nvars))
sar[start-initvalues.shape[0]:start] = initvalues
#sar[nlagsm1:] = u
sar[start:] = u
#if version == 1:
for i in range(start,start+nobs):
for p in range(1,nlags):
sar[i] += np.dot(sar[i-p,:],-ar[p])
return sar
def padone(x, front=0, back=0, axis=0, fillvalue=0):
'''pad with zeros along one axis, currently only axis=0
can be used sequentially to pad several axis
Examples
--------
>>> padone(np.ones((2,3)),1,3,axis=1)
array([[ 0., 1., 1., 1., 0., 0., 0.],
[ 0., 1., 1., 1., 0., 0., 0.]])
>>> padone(np.ones((2,3)),1,1, fillvalue=np.nan)
array([[ NaN, NaN, NaN],
[ 1., 1., 1.],
[ 1., 1., 1.],
[ NaN, NaN, NaN]])
'''
#primitive version
shape = np.array(x.shape)
shape[axis] += (front + back)
shapearr = np.array(x.shape)
out = np.empty(shape)
out.fill(fillvalue)
startind = np.zeros(x.ndim)
startind[axis] = front
endind = startind + shapearr
myslice = [slice(startind[k], endind[k]) for k in range(len(endind))]
#print(myslice
#print(out.shape
#print(out[tuple(myslice)].shape
out[tuple(myslice)] = x
return out
def trimone(x, front=0, back=0, axis=0):
'''trim number of array elements along one axis
Examples
--------
>>> xp = padone(np.ones((2,3)),1,3,axis=1)
>>> xp
array([[ 0., 1., 1., 1., 0., 0., 0.],
[ 0., 1., 1., 1., 0., 0., 0.]])
>>> trimone(xp,1,3,1)
array([[ 1., 1., 1.],
[ 1., 1., 1.]])
'''
shape = np.array(x.shape)
shape[axis] -= (front + back)
#print(shape, front, back
shapearr = np.array(x.shape)
startind = np.zeros(x.ndim)
startind[axis] = front
endind = startind + shape
myslice = [slice(startind[k], endind[k]) for k in range(len(endind))]
#print(myslice
#print(shape, endind
#print(x[tuple(myslice)].shape
return x[tuple(myslice)]
def ar2full(ar):
'''make reduced lagpolynomial into a right side lagpoly array
'''
nlags, nvar,nvarex = ar.shape
return np.r_[np.eye(nvar,nvarex)[None,:,:],-ar]
def ar2lhs(ar):
'''convert full (rhs) lagpolynomial into a reduced, left side lagpoly array
this is mainly a reminder about the definition
'''
return -ar[1:]
class _Var(object):
'''obsolete VAR class, use tsa.VAR instead, for internal use only
Examples
--------
>>> v = Var(ar2s)
>>> v.fit(1)
>>> v.arhat
array([[[ 1. , 0. ],
[ 0. , 1. ]],
[[-0.77784898, 0.01726193],
[ 0.10733009, -0.78665335]]])
'''
def __init__(self, y):
self.y = y
self.nobs, self.nvars = y.shape
def fit(self, nlags):
'''estimate parameters using ols
Parameters
----------
nlags : integer
number of lags to include in regression, same for all variables
Returns
-------
None, but attaches
arhat : array (nlags, nvar, nvar)
full lag polynomial array
arlhs : array (nlags-1, nvar, nvar)
reduced lag polynomial for left hand side
other statistics as returned by linalg.lstsq : need to be completed
This currently assumes all parameters are estimated without restrictions.
In this case SUR is identical to OLS
estimation results are attached to the class instance
'''
self.nlags = nlags # without current period
nvars = self.nvars
#TODO: ar2s looks like a module variable, bug?
#lmat = lagmat(ar2s, nlags, trim='both', original='in')
lmat = lagmat(self.y, nlags, trim='both', original='in')
self.yred = lmat[:,:nvars]
self.xred = lmat[:,nvars:]
res = np.linalg.lstsq(self.xred, self.yred)
self.estresults = res
self.arlhs = res[0].reshape(nlags, nvars, nvars)
self.arhat = ar2full(self.arlhs)
self.rss = res[1]
self.xredrank = res[2]
def predict(self):
'''calculate estimated timeseries (yhat) for sample
'''
if not hasattr(self, 'yhat'):
self.yhat = varfilter(self.y, self.arhat)
return self.yhat
def covmat(self):
''' covariance matrix of estimate
# not sure it's correct, need to check orientation everywhere
# looks ok, display needs getting used to
>>> v.rss[None,None,:]*np.linalg.inv(np.dot(v.xred.T,v.xred))[:,:,None]
array([[[ 0.37247445, 0.32210609],
[ 0.1002642 , 0.08670584]],
[[ 0.1002642 , 0.08670584],
[ 0.45903637, 0.39696255]]])
>>>
>>> v.rss[0]*np.linalg.inv(np.dot(v.xred.T,v.xred))
array([[ 0.37247445, 0.1002642 ],
[ 0.1002642 , 0.45903637]])
>>> v.rss[1]*np.linalg.inv(np.dot(v.xred.T,v.xred))
array([[ 0.32210609, 0.08670584],
[ 0.08670584, 0.39696255]])
'''
#check if orientation is same as self.arhat
self.paramcov = (self.rss[None,None,:] *
np.linalg.inv(np.dot(self.xred.T, self.xred))[:,:,None])
def forecast(self, horiz=1, u=None):
'''calculates forcast for horiz number of periods at end of sample
Parameters
----------
horiz : int (optional, default=1)
forecast horizon
u : array (horiz, nvars)
error term for forecast periods. If None, then u is zero.
Returns
-------
yforecast : array (nobs+horiz, nvars)
this includes the sample and the forecasts
'''
if u is None:
u = np.zeros((horiz, self.nvars))
return vargenerate(self.arhat, u, initvalues=self.y)
class VarmaPoly(object):
'''class to keep track of Varma polynomial format
Examples
--------
ar23 = np.array([[[ 1. , 0. ],
[ 0. , 1. ]],
[[-0.6, 0. ],
[ 0.2, -0.6]],
[[-0.1, 0. ],
[ 0.1, -0.1]]])
ma22 = np.array([[[ 1. , 0. ],
[ 0. , 1. ]],
[[ 0.4, 0. ],
[ 0.2, 0.3]]])
'''
def __init__(self, ar, ma=None):
self.ar = ar
self.ma = ma
nlags, nvarall, nvars = ar.shape
self.nlags, self.nvarall, self.nvars = nlags, nvarall, nvars
self.isstructured = not (ar[0,:nvars] == np.eye(nvars)).all()
if self.ma is None:
self.ma = np.eye(nvars)[None,...]
self.isindependent = True
else:
self.isindependent = not (ma[0] == np.eye(nvars)).all()
self.malags = ar.shape[0]
self.hasexog = nvarall > nvars
self.arm1 = -ar[1:]
#@property
def vstack(self, a=None, name='ar'):
'''stack lagpolynomial vertically in 2d array
'''
if not a is None:
a = a
elif name == 'ar':
a = self.ar
elif name == 'ma':
a = self.ma
else:
raise ValueError('no array or name given')
return a.reshape(-1, self.nvarall)
#@property
def hstack(self, a=None, name='ar'):
'''stack lagpolynomial horizontally in 2d array
'''
if not a is None:
a = a
elif name == 'ar':
a = self.ar
elif name == 'ma':
a = self.ma
else:
raise ValueError('no array or name given')
return a.swapaxes(1,2).reshape(-1, self.nvarall).T
#@property
def stacksquare(self, a=None, name='ar', orientation='vertical'):
'''stack lagpolynomial vertically in 2d square array with eye
'''
if not a is None:
a = a
elif name == 'ar':
a = self.ar
elif name == 'ma':
a = self.ma
else:
raise ValueError('no array or name given')
astacked = a.reshape(-1, self.nvarall)
lenpk, nvars = astacked.shape #[0]
amat = np.eye(lenpk, k=nvars)
amat[:,:nvars] = astacked
return amat
#@property
def vstackarma_minus1(self):
'''stack ar and lagpolynomial vertically in 2d array
'''
a = np.concatenate((self.ar[1:], self.ma[1:]),0)
return a.reshape(-1, self.nvarall)
#@property
def hstackarma_minus1(self):
'''stack ar and lagpolynomial vertically in 2d array
this is the Kalman Filter representation, I think
'''
a = np.concatenate((self.ar[1:], self.ma[1:]),0)
return a.swapaxes(1,2).reshape(-1, self.nvarall)
def getisstationary(self, a=None):
'''check whether the auto-regressive lag-polynomial is stationary
Returns
-------
isstationary : boolean
*attaches*
areigenvalues : complex array
eigenvalues sorted by absolute value
References
----------
formula taken from NAG manual
'''
if not a is None:
a = a
else:
if self.isstructured:
a = -self.reduceform(self.ar)[1:]
else:
a = -self.ar[1:]
amat = self.stacksquare(a)
ev = np.sort(np.linalg.eigvals(amat))[::-1]
self.areigenvalues = ev
return (np.abs(ev) < 1).all()
def getisinvertible(self, a=None):
'''check whether the auto-regressive lag-polynomial is stationary
Returns
-------
isinvertible : boolean
*attaches*
maeigenvalues : complex array
eigenvalues sorted by absolute value
References
----------
formula taken from NAG manual
'''
if not a is None:
a = a
else:
if self.isindependent:
a = self.reduceform(self.ma)[1:]
else:
a = self.ma[1:]
if a.shape[0] == 0:
# no ma lags
self.maeigenvalues = np.array([], np.complex)
return True
amat = self.stacksquare(a)
ev = np.sort(np.linalg.eigvals(amat))[::-1]
self.maeigenvalues = ev
return (np.abs(ev) < 1).all()
def reduceform(self, apoly):
'''
this assumes no exog, todo
'''
if apoly.ndim != 3:
raise ValueError('apoly needs to be 3d')
nlags, nvarsex, nvars = apoly.shape
a = np.empty_like(apoly)
try:
a0inv = np.linalg.inv(a[0,:nvars, :])
except np.linalg.LinAlgError:
raise ValueError('matrix not invertible',
'ask for implementation of pinv')
for lag in range(nlags):
a[lag] = np.dot(a0inv, apoly[lag])
return a
if __name__ == "__main__":
# some example lag polynomials
a21 = np.array([[[ 1. , 0. ],
[ 0. , 1. ]],
[[-0.8, 0. ],
[ 0., -0.6]]])
a22 = np.array([[[ 1. , 0. ],
[ 0. , 1. ]],
[[-0.8, 0. ],
[ 0.1, -0.8]]])
a23 = np.array([[[ 1. , 0. ],
[ 0. , 1. ]],
[[-0.8, 0.2],
[ 0.1, -0.6]]])
a24 = np.array([[[ 1. , 0. ],
[ 0. , 1. ]],
[[-0.6, 0. ],
[ 0.2, -0.6]],
[[-0.1, 0. ],
[ 0.1, -0.1]]])
a31 = np.r_[np.eye(3)[None,:,:], 0.8*np.eye(3)[None,:,:]]
a32 = np.array([[[ 1. , 0. , 0. ],
[ 0. , 1. , 0. ],
[ 0. , 0. , 1. ]],
[[ 0.8, 0. , 0. ],
[ 0.1, 0.6, 0. ],
[ 0. , 0. , 0.9]]])
########
ut = np.random.randn(1000,2)
ar2s = vargenerate(a22,ut)
#res = np.linalg.lstsq(lagmat(ar2s,1)[:,1:], ar2s)
res = np.linalg.lstsq(lagmat(ar2s,1), ar2s)
bhat = res[0].reshape(1,2,2)
arhat = ar2full(bhat)
#print(maxabs(arhat - a22)
v = _Var(ar2s)
v.fit(1)
v.forecast()
v.forecast(25)[-30:]
ar23 = np.array([[[ 1. , 0. ],
[ 0. , 1. ]],
[[-0.6, 0. ],
[ 0.2, -0.6]],
[[-0.1, 0. ],
[ 0.1, -0.1]]])
ma22 = np.array([[[ 1. , 0. ],
[ 0. , 1. ]],
[[ 0.4, 0. ],
[ 0.2, 0.3]]])
ar23ns = np.array([[[ 1. , 0. ],
[ 0. , 1. ]],
[[-1.9, 0. ],
[ 0.4, -0.6]],
[[ 0.3, 0. ],
[ 0.1, -0.1]]])
vp = VarmaPoly(ar23, ma22)
print(vars(vp))
print(vp.vstack())
print(vp.vstack(a24))
print(vp.hstackarma_minus1())
print(vp.getisstationary())
print(vp.getisinvertible())
vp2 = VarmaPoly(ar23ns)
print(vp2.getisstationary())
print(vp2.getisinvertible()) # no ma lags
| bsd-3-clause |
timesking/sublime-evernote | lib/pygments/lexers/templates.py | 9 | 56066 | # -*- coding: utf-8 -*-
"""
pygments.lexers.templates
~~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for various template engines' markup.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexers.web import \
PhpLexer, HtmlLexer, XmlLexer, JavascriptLexer, CssLexer, LassoLexer
from pygments.lexers.agile import PythonLexer, PerlLexer
from pygments.lexers.compiled import JavaLexer
from pygments.lexers.jvm import TeaLangLexer
from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
include, using, this
from pygments.token import Error, Punctuation, \
Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
from pygments.util import html_doctype_matches, looks_like_xml
__all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
'SmartyLexer', 'HtmlSmartyLexer', 'XmlSmartyLexer',
'CssSmartyLexer', 'JavascriptSmartyLexer', 'DjangoLexer',
'HtmlDjangoLexer', 'CssDjangoLexer', 'XmlDjangoLexer',
'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
'ColdfusionHtmlLexer', 'VelocityLexer', 'VelocityHtmlLexer',
'VelocityXmlLexer', 'SspLexer', 'TeaTemplateLexer', 'LassoHtmlLexer',
'LassoXmlLexer', 'LassoCssLexer', 'LassoJavascriptLexer']
class ErbLexer(Lexer):
"""
Generic `ERB <http://ruby-doc.org/core/classes/ERB.html>`_ (Ruby Templating)
lexer.
Just highlights ruby code between the preprocessor directives, other data
is left untouched by the lexer.
All options are also forwarded to the `RubyLexer`.
"""
name = 'ERB'
aliases = ['erb']
mimetypes = ['application/x-ruby-templating']
_block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
def __init__(self, **options):
from pygments.lexers.agile import RubyLexer
self.ruby_lexer = RubyLexer(**options)
Lexer.__init__(self, **options)
def get_tokens_unprocessed(self, text):
"""
Since ERB doesn't allow "<%" and other tags inside of ruby
blocks we have to use a split approach here that fails for
that too.
"""
tokens = self._block_re.split(text)
tokens.reverse()
state = idx = 0
try:
while True:
# text
if state == 0:
val = tokens.pop()
yield idx, Other, val
idx += len(val)
state = 1
# block starts
elif state == 1:
tag = tokens.pop()
# literals
if tag in ('<%%', '%%>'):
yield idx, Other, tag
idx += 3
state = 0
# comment
elif tag == '<%#':
yield idx, Comment.Preproc, tag
val = tokens.pop()
yield idx + 3, Comment, val
idx += 3 + len(val)
state = 2
# blocks or output
elif tag in ('<%', '<%=', '<%-'):
yield idx, Comment.Preproc, tag
idx += len(tag)
data = tokens.pop()
r_idx = 0
for r_idx, r_token, r_value in \
self.ruby_lexer.get_tokens_unprocessed(data):
yield r_idx + idx, r_token, r_value
idx += len(data)
state = 2
elif tag in ('%>', '-%>'):
yield idx, Error, tag
idx += len(tag)
state = 0
# % raw ruby statements
else:
yield idx, Comment.Preproc, tag[0]
r_idx = 0
for r_idx, r_token, r_value in \
self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
yield idx + 1 + r_idx, r_token, r_value
idx += len(tag)
state = 0
# block ends
elif state == 2:
tag = tokens.pop()
if tag not in ('%>', '-%>'):
yield idx, Other, tag
else:
yield idx, Comment.Preproc, tag
idx += len(tag)
state = 0
except IndexError:
return
def analyse_text(text):
if '<%' in text and '%>' in text:
return 0.4
class SmartyLexer(RegexLexer):
"""
Generic `Smarty <http://smarty.php.net/>`_ template lexer.
Just highlights smarty code between the preprocessor directives, other
data is left untouched by the lexer.
"""
name = 'Smarty'
aliases = ['smarty']
filenames = ['*.tpl']
mimetypes = ['application/x-smarty']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
(r'[^{]+', Other),
(r'(\{)(\*.*?\*)(\})',
bygroups(Comment.Preproc, Comment, Comment.Preproc)),
(r'(\{php\})(.*?)(\{/php\})',
bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
Comment.Preproc)),
(r'(\{)(/?[a-zA-Z_][a-zA-Z0-9_]*)(\s*)',
bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
(r'\{', Comment.Preproc, 'smarty')
],
'smarty': [
(r'\s+', Text),
(r'\}', Comment.Preproc, '#pop'),
(r'#[a-zA-Z_][a-zA-Z0-9_]*#', Name.Variable),
(r'\$[a-zA-Z_][a-zA-Z0-9_]*(\.[a-zA-Z0-9_]+)*', Name.Variable),
(r'[~!%^&*()+=|\[\]:;,.<>/?{}@-]', Operator),
(r'(true|false|null)\b', Keyword.Constant),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Attribute)
]
}
def analyse_text(text):
rv = 0.0
if re.search('\{if\s+.*?\}.*?\{/if\}', text):
rv += 0.15
if re.search('\{include\s+file=.*?\}', text):
rv += 0.15
if re.search('\{foreach\s+.*?\}.*?\{/foreach\}', text):
rv += 0.15
if re.search('\{\$.*?\}', text):
rv += 0.01
return rv
class VelocityLexer(RegexLexer):
"""
Generic `Velocity <http://velocity.apache.org/>`_ template lexer.
Just highlights velocity directives and variable references, other
data is left untouched by the lexer.
"""
name = 'Velocity'
aliases = ['velocity']
filenames = ['*.vm','*.fhtml']
flags = re.MULTILINE | re.DOTALL
identifier = r'[a-zA-Z_][a-zA-Z0-9_]*'
tokens = {
'root': [
(r'[^{#$]+', Other),
(r'(#)(\*.*?\*)(#)',
bygroups(Comment.Preproc, Comment, Comment.Preproc)),
(r'(##)(.*?$)',
bygroups(Comment.Preproc, Comment)),
(r'(#\{?)(' + identifier + r')(\}?)(\s?\()',
bygroups(Comment.Preproc, Name.Function, Comment.Preproc, Punctuation),
'directiveparams'),
(r'(#\{?)(' + identifier + r')(\}|\b)',
bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
(r'\$\{?', Punctuation, 'variable')
],
'variable': [
(identifier, Name.Variable),
(r'\(', Punctuation, 'funcparams'),
(r'(\.)(' + identifier + r')',
bygroups(Punctuation, Name.Variable), '#push'),
(r'\}', Punctuation, '#pop'),
(r'', Other, '#pop')
],
'directiveparams': [
(r'(&&|\|\||==?|!=?|[-<>+*%&\|\^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
Operator),
(r'\[', Operator, 'rangeoperator'),
(r'\b' + identifier + r'\b', Name.Function),
include('funcparams')
],
'rangeoperator': [
(r'\.\.', Operator),
include('funcparams'),
(r'\]', Operator, '#pop')
],
'funcparams': [
(r'\$\{?', Punctuation, 'variable'),
(r'\s+', Text),
(r',', Punctuation),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r"0[xX][0-9a-fA-F]+[Ll]?", Number),
(r"\b[0-9]+\b", Number),
(r'(true|false|null)\b', Keyword.Constant),
(r'\(', Punctuation, '#push'),
(r'\)', Punctuation, '#pop'),
(r'\[', Punctuation, '#push'),
(r'\]', Punctuation, '#pop'),
]
}
def analyse_text(text):
rv = 0.0
if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text):
rv += 0.25
if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text):
rv += 0.15
if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text):
rv += 0.15
if re.search(r'\$\{?[a-zA-Z_][a-zA-Z0-9_]*(\([^)]*\))?'
r'(\.[a-zA-Z0-9_]+(\([^)]*\))?)*\}?', text):
rv += 0.01
return rv
class VelocityHtmlLexer(DelegatingLexer):
"""
Subclass of the `VelocityLexer` that highlights unlexer data
with the `HtmlLexer`.
"""
name = 'HTML+Velocity'
aliases = ['html+velocity']
alias_filenames = ['*.html','*.fhtml']
mimetypes = ['text/html+velocity']
def __init__(self, **options):
super(VelocityHtmlLexer, self).__init__(HtmlLexer, VelocityLexer,
**options)
class VelocityXmlLexer(DelegatingLexer):
"""
Subclass of the `VelocityLexer` that highlights unlexer data
with the `XmlLexer`.
"""
name = 'XML+Velocity'
aliases = ['xml+velocity']
alias_filenames = ['*.xml','*.vm']
mimetypes = ['application/xml+velocity']
def __init__(self, **options):
super(VelocityXmlLexer, self).__init__(XmlLexer, VelocityLexer,
**options)
def analyse_text(text):
rv = VelocityLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.5
return rv
class DjangoLexer(RegexLexer):
"""
Generic `django <http://www.djangoproject.com/documentation/templates/>`_
and `jinja <http://wsgiarea.pocoo.org/jinja/>`_ template lexer.
It just highlights django/jinja code between the preprocessor directives,
other data is left untouched by the lexer.
"""
name = 'Django/Jinja'
aliases = ['django', 'jinja']
mimetypes = ['application/x-django-templating', 'application/x-jinja']
flags = re.M | re.S
tokens = {
'root': [
(r'[^{]+', Other),
(r'\{\{', Comment.Preproc, 'var'),
# jinja/django comments
(r'\{[*#].*?[*#]\}', Comment),
# django comments
(r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)'
r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})',
bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
Comment, Comment.Preproc, Text, Keyword, Text,
Comment.Preproc)),
# raw jinja blocks
(r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
Text, Comment.Preproc, Text, Keyword, Text,
Comment.Preproc)),
# filter blocks
(r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
'block'),
(r'(\{%)(-?\s*)([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Comment.Preproc, Text, Keyword), 'block'),
(r'\{', Other)
],
'varnames': [
(r'(\|)(\s*)([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Operator, Text, Name.Function)),
(r'(is)(\s+)(not)?(\s+)?([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Keyword, Text, Keyword, Text, Name.Function)),
(r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
(r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
Keyword),
(r'(loop|block|super|forloop)\b', Name.Builtin),
(r'[a-zA-Z][a-zA-Z0-9_-]*', Name.Variable),
(r'\.[a-zA-Z0-9_]+', Name.Variable),
(r':?"(\\\\|\\"|[^"])*"', String.Double),
(r":?'(\\\\|\\'|[^'])*'", String.Single),
(r'([{}()\[\]+\-*/,:~]|[><=]=?)', Operator),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
],
'var': [
(r'\s+', Text),
(r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
include('varnames')
],
'block': [
(r'\s+', Text),
(r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
include('varnames'),
(r'.', Punctuation)
]
}
def analyse_text(text):
rv = 0.0
if re.search(r'\{%\s*(block|extends)', text) is not None:
rv += 0.4
if re.search(r'\{%\s*if\s*.*?%\}', text) is not None:
rv += 0.1
if re.search(r'\{\{.*?\}\}', text) is not None:
rv += 0.1
return rv
class MyghtyLexer(RegexLexer):
"""
Generic `myghty templates`_ lexer. Code that isn't Myghty
markup is yielded as `Token.Other`.
.. versionadded:: 0.6
.. _myghty templates: http://www.myghty.org/
"""
name = 'Myghty'
aliases = ['myghty']
filenames = ['*.myt', 'autodelegate']
mimetypes = ['application/x-myghty']
tokens = {
'root': [
(r'\s+', Text),
(r'(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
bygroups(Name.Tag, Text, Name.Function, Name.Tag,
using(this), Name.Tag)),
(r'(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
bygroups(Name.Tag, Name.Function, Name.Tag,
using(PythonLexer), Name.Tag)),
(r'(<&[^|])(.*?)(,.*?)?(&>)',
bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
(r'(<&\|)(.*?)(,.*?)?(&>)(?s)',
bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
(r'</&>', Name.Tag),
(r'(<%!?)(.*?)(%>)(?s)',
bygroups(Name.Tag, using(PythonLexer), Name.Tag)),
(r'(?<=^)#[^\n]*(\n|\Z)', Comment),
(r'(?<=^)(%)([^\n]*)(\n|\Z)',
bygroups(Name.Tag, using(PythonLexer), Other)),
(r"""(?sx)
(.+?) # anything, followed by:
(?:
(?<=\n)(?=[%#]) | # an eval or comment line
(?=</?[%&]) | # a substitution or block or
# call start or end
# - don't consume
(\\\n) | # an escaped newline
\Z # end of string
)""", bygroups(Other, Operator)),
]
}
class MyghtyHtmlLexer(DelegatingLexer):
"""
Subclass of the `MyghtyLexer` that highlights unlexer data
with the `HtmlLexer`.
.. versionadded:: 0.6
"""
name = 'HTML+Myghty'
aliases = ['html+myghty']
mimetypes = ['text/html+myghty']
def __init__(self, **options):
super(MyghtyHtmlLexer, self).__init__(HtmlLexer, MyghtyLexer,
**options)
class MyghtyXmlLexer(DelegatingLexer):
"""
Subclass of the `MyghtyLexer` that highlights unlexer data
with the `XmlLexer`.
.. versionadded:: 0.6
"""
name = 'XML+Myghty'
aliases = ['xml+myghty']
mimetypes = ['application/xml+myghty']
def __init__(self, **options):
super(MyghtyXmlLexer, self).__init__(XmlLexer, MyghtyLexer,
**options)
class MyghtyJavascriptLexer(DelegatingLexer):
"""
Subclass of the `MyghtyLexer` that highlights unlexer data
with the `JavascriptLexer`.
.. versionadded:: 0.6
"""
name = 'JavaScript+Myghty'
aliases = ['js+myghty', 'javascript+myghty']
mimetypes = ['application/x-javascript+myghty',
'text/x-javascript+myghty',
'text/javascript+mygthy']
def __init__(self, **options):
super(MyghtyJavascriptLexer, self).__init__(JavascriptLexer,
MyghtyLexer, **options)
class MyghtyCssLexer(DelegatingLexer):
"""
Subclass of the `MyghtyLexer` that highlights unlexer data
with the `CssLexer`.
.. versionadded:: 0.6
"""
name = 'CSS+Myghty'
aliases = ['css+myghty']
mimetypes = ['text/css+myghty']
def __init__(self, **options):
super(MyghtyCssLexer, self).__init__(CssLexer, MyghtyLexer,
**options)
class MasonLexer(RegexLexer):
"""
Generic `mason templates`_ lexer. Stolen from Myghty lexer. Code that isn't
Mason markup is HTML.
.. _mason templates: http://www.masonhq.com/
.. versionadded:: 1.4
"""
name = 'Mason'
aliases = ['mason']
filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
mimetypes = ['application/x-mason']
tokens = {
'root': [
(r'\s+', Text),
(r'(<%doc>)(.*?)(</%doc>)(?s)',
bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
(r'(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
bygroups(Name.Tag, Text, Name.Function, Name.Tag,
using(this), Name.Tag)),
(r'(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
bygroups(Name.Tag, Name.Function, Name.Tag,
using(PerlLexer), Name.Tag)),
(r'(<&[^|])(.*?)(,.*?)?(&>)(?s)',
bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
(r'(<&\|)(.*?)(,.*?)?(&>)(?s)',
bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
(r'</&>', Name.Tag),
(r'(<%!?)(.*?)(%>)(?s)',
bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
(r'(?<=^)#[^\n]*(\n|\Z)', Comment),
(r'(?<=^)(%)([^\n]*)(\n|\Z)',
bygroups(Name.Tag, using(PerlLexer), Other)),
(r"""(?sx)
(.+?) # anything, followed by:
(?:
(?<=\n)(?=[%#]) | # an eval or comment line
(?=</?[%&]) | # a substitution or block or
# call start or end
# - don't consume
(\\\n) | # an escaped newline
\Z # end of string
)""", bygroups(using(HtmlLexer), Operator)),
]
}
def analyse_text(text):
rv = 0.0
if re.search('<&', text) is not None:
rv = 1.0
return rv
class MakoLexer(RegexLexer):
"""
Generic `mako templates`_ lexer. Code that isn't Mako
markup is yielded as `Token.Other`.
.. versionadded:: 0.7
.. _mako templates: http://www.makotemplates.org/
"""
name = 'Mako'
aliases = ['mako']
filenames = ['*.mao']
mimetypes = ['application/x-mako']
tokens = {
'root': [
(r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
bygroups(Text, Comment.Preproc, Keyword, Other)),
(r'(\s*)(%)([^\n]*)(\n|\Z)',
bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
(r'(\s*)(##[^\n]*)(\n|\Z)',
bygroups(Text, Comment.Preproc, Other)),
(r'(?s)<%doc>.*?</%doc>', Comment.Preproc),
(r'(<%)([\w\.\:]+)',
bygroups(Comment.Preproc, Name.Builtin), 'tag'),
(r'(</%)([\w\.\:]+)(>)',
bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
(r'<%(?=([\w\.\:]+))', Comment.Preproc, 'ondeftags'),
(r'(<%(?:!?))(.*?)(%>)(?s)',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
(r'(\$\{)(.*?)(\})',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
(r'''(?sx)
(.+?) # anything, followed by:
(?:
(?<=\n)(?=%|\#\#) | # an eval or comment line
(?=\#\*) | # multiline comment
(?=</?%) | # a python block
# call start or end
(?=\$\{) | # a substitution
(?<=\n)(?=\s*%) |
# - don't consume
(\\\n) | # an escaped newline
\Z # end of string
)
''', bygroups(Other, Operator)),
(r'\s+', Text),
],
'ondeftags': [
(r'<%', Comment.Preproc),
(r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
include('tag'),
],
'tag': [
(r'((?:\w+)\s*=)(\s*)(".*?")',
bygroups(Name.Attribute, Text, String)),
(r'/?\s*>', Comment.Preproc, '#pop'),
(r'\s+', Text),
],
'attr': [
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
],
}
class MakoHtmlLexer(DelegatingLexer):
"""
Subclass of the `MakoLexer` that highlights unlexed data
with the `HtmlLexer`.
.. versionadded:: 0.7
"""
name = 'HTML+Mako'
aliases = ['html+mako']
mimetypes = ['text/html+mako']
def __init__(self, **options):
super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer,
**options)
class MakoXmlLexer(DelegatingLexer):
"""
Subclass of the `MakoLexer` that highlights unlexer data
with the `XmlLexer`.
.. versionadded:: 0.7
"""
name = 'XML+Mako'
aliases = ['xml+mako']
mimetypes = ['application/xml+mako']
def __init__(self, **options):
super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer,
**options)
class MakoJavascriptLexer(DelegatingLexer):
"""
Subclass of the `MakoLexer` that highlights unlexer data
with the `JavascriptLexer`.
.. versionadded:: 0.7
"""
name = 'JavaScript+Mako'
aliases = ['js+mako', 'javascript+mako']
mimetypes = ['application/x-javascript+mako',
'text/x-javascript+mako',
'text/javascript+mako']
def __init__(self, **options):
super(MakoJavascriptLexer, self).__init__(JavascriptLexer,
MakoLexer, **options)
class MakoCssLexer(DelegatingLexer):
"""
Subclass of the `MakoLexer` that highlights unlexer data
with the `CssLexer`.
.. versionadded:: 0.7
"""
name = 'CSS+Mako'
aliases = ['css+mako']
mimetypes = ['text/css+mako']
def __init__(self, **options):
super(MakoCssLexer, self).__init__(CssLexer, MakoLexer,
**options)
# Genshi and Cheetah lexers courtesy of Matt Good.
class CheetahPythonLexer(Lexer):
"""
Lexer for handling Cheetah's special $ tokens in Python syntax.
"""
def get_tokens_unprocessed(self, text):
pylexer = PythonLexer(**self.options)
for pos, type_, value in pylexer.get_tokens_unprocessed(text):
if type_ == Token.Error and value == '$':
type_ = Comment.Preproc
yield pos, type_, value
class CheetahLexer(RegexLexer):
"""
Generic `cheetah templates`_ lexer. Code that isn't Cheetah
markup is yielded as `Token.Other`. This also works for
`spitfire templates`_ which use the same syntax.
.. _cheetah templates: http://www.cheetahtemplate.org/
.. _spitfire templates: http://code.google.com/p/spitfire/
"""
name = 'Cheetah'
aliases = ['cheetah', 'spitfire']
filenames = ['*.tmpl', '*.spt']
mimetypes = ['application/x-cheetah', 'application/x-spitfire']
tokens = {
'root': [
(r'(##[^\n]*)$',
(bygroups(Comment))),
(r'#[*](.|\n)*?[*]#', Comment),
(r'#end[^#\n]*(?:#|$)', Comment.Preproc),
(r'#slurp$', Comment.Preproc),
(r'(#[a-zA-Z]+)([^#\n]*)(#|$)',
(bygroups(Comment.Preproc, using(CheetahPythonLexer),
Comment.Preproc))),
# TODO support other Python syntax like $foo['bar']
(r'(\$)([a-zA-Z_][a-zA-Z0-9_\.]*[a-zA-Z0-9_])',
bygroups(Comment.Preproc, using(CheetahPythonLexer))),
(r'(\$\{!?)(.*?)(\})(?s)',
bygroups(Comment.Preproc, using(CheetahPythonLexer),
Comment.Preproc)),
(r'''(?sx)
(.+?) # anything, followed by:
(?:
(?=[#][#a-zA-Z]*) | # an eval comment
(?=\$[a-zA-Z_{]) | # a substitution
\Z # end of string
)
''', Other),
(r'\s+', Text),
],
}
class CheetahHtmlLexer(DelegatingLexer):
"""
Subclass of the `CheetahLexer` that highlights unlexer data
with the `HtmlLexer`.
"""
name = 'HTML+Cheetah'
aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
mimetypes = ['text/html+cheetah', 'text/html+spitfire']
def __init__(self, **options):
super(CheetahHtmlLexer, self).__init__(HtmlLexer, CheetahLexer,
**options)
class CheetahXmlLexer(DelegatingLexer):
"""
Subclass of the `CheetahLexer` that highlights unlexer data
with the `XmlLexer`.
"""
name = 'XML+Cheetah'
aliases = ['xml+cheetah', 'xml+spitfire']
mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
def __init__(self, **options):
super(CheetahXmlLexer, self).__init__(XmlLexer, CheetahLexer,
**options)
class CheetahJavascriptLexer(DelegatingLexer):
"""
Subclass of the `CheetahLexer` that highlights unlexer data
with the `JavascriptLexer`.
"""
name = 'JavaScript+Cheetah'
aliases = ['js+cheetah', 'javascript+cheetah',
'js+spitfire', 'javascript+spitfire']
mimetypes = ['application/x-javascript+cheetah',
'text/x-javascript+cheetah',
'text/javascript+cheetah',
'application/x-javascript+spitfire',
'text/x-javascript+spitfire',
'text/javascript+spitfire']
def __init__(self, **options):
super(CheetahJavascriptLexer, self).__init__(JavascriptLexer,
CheetahLexer, **options)
class GenshiTextLexer(RegexLexer):
"""
A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ text
templates.
"""
name = 'Genshi Text'
aliases = ['genshitext']
mimetypes = ['application/x-genshi-text', 'text/x-genshi']
tokens = {
'root': [
(r'[^#\$\s]+', Other),
(r'^(\s*)(##.*)$', bygroups(Text, Comment)),
(r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
include('variable'),
(r'[#\$\s]', Other),
],
'directive': [
(r'\n', Text, '#pop'),
(r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'),
(r'(choose|when|with)([^\S\n]+)(.*)',
bygroups(Keyword, Text, using(PythonLexer)), '#pop'),
(r'(choose|otherwise)\b', Keyword, '#pop'),
(r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text, Comment), '#pop'),
],
'variable': [
(r'(?<!\$)(\$\{)(.+?)(\})',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
(r'(?<!\$)(\$)([a-zA-Z_][a-zA-Z0-9_\.]*)',
Name.Variable),
]
}
class GenshiMarkupLexer(RegexLexer):
"""
Base lexer for Genshi markup, used by `HtmlGenshiLexer` and
`GenshiLexer`.
"""
flags = re.DOTALL
tokens = {
'root': [
(r'[^<\$]+', Other),
(r'(<\?python)(.*?)(\?>)',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
# yield style and script blocks as Other
(r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
(r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
(r'<\s*[a-zA-Z0-9:]+', Name.Tag, 'tag'),
include('variable'),
(r'[<\$]', Other),
],
'pytag': [
(r'\s+', Text),
(r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'pyattr'),
(r'/?\s*>', Name.Tag, '#pop'),
],
'pyattr': [
('(")(.*?)(")', bygroups(String, using(PythonLexer), String), '#pop'),
("(')(.*?)(')", bygroups(String, using(PythonLexer), String), '#pop'),
(r'[^\s>]+', String, '#pop'),
],
'tag': [
(r'\s+', Text),
(r'py:[a-zA-Z0-9_-]+\s*=', Name.Attribute, 'pyattr'),
(r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'attr'),
(r'/?\s*>', Name.Tag, '#pop'),
],
'attr': [
('"', String, 'attr-dstring'),
("'", String, 'attr-sstring'),
(r'[^\s>]*', String, '#pop')
],
'attr-dstring': [
('"', String, '#pop'),
include('strings'),
("'", String)
],
'attr-sstring': [
("'", String, '#pop'),
include('strings'),
("'", String)
],
'strings': [
('[^"\'$]+', String),
include('variable')
],
'variable': [
(r'(?<!\$)(\$\{)(.+?)(\})',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
(r'(?<!\$)(\$)([a-zA-Z_][a-zA-Z0-9_\.]*)',
Name.Variable),
]
}
class HtmlGenshiLexer(DelegatingLexer):
"""
A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
`kid <http://kid-templating.org/>`_ kid HTML templates.
"""
name = 'HTML+Genshi'
aliases = ['html+genshi', 'html+kid']
alias_filenames = ['*.html', '*.htm', '*.xhtml']
mimetypes = ['text/html+genshi']
def __init__(self, **options):
super(HtmlGenshiLexer, self).__init__(HtmlLexer, GenshiMarkupLexer,
**options)
def analyse_text(text):
rv = 0.0
if re.search('\$\{.*?\}', text) is not None:
rv += 0.2
if re.search('py:(.*?)=["\']', text) is not None:
rv += 0.2
return rv + HtmlLexer.analyse_text(text) - 0.01
class GenshiLexer(DelegatingLexer):
"""
A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
`kid <http://kid-templating.org/>`_ kid XML templates.
"""
name = 'Genshi'
aliases = ['genshi', 'kid', 'xml+genshi', 'xml+kid']
filenames = ['*.kid']
alias_filenames = ['*.xml']
mimetypes = ['application/x-genshi', 'application/x-kid']
def __init__(self, **options):
super(GenshiLexer, self).__init__(XmlLexer, GenshiMarkupLexer,
**options)
def analyse_text(text):
rv = 0.0
if re.search('\$\{.*?\}', text) is not None:
rv += 0.2
if re.search('py:(.*?)=["\']', text) is not None:
rv += 0.2
return rv + XmlLexer.analyse_text(text) - 0.01
class JavascriptGenshiLexer(DelegatingLexer):
"""
A lexer that highlights javascript code in genshi text templates.
"""
name = 'JavaScript+Genshi Text'
aliases = ['js+genshitext', 'js+genshi', 'javascript+genshitext',
'javascript+genshi']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+genshi',
'text/x-javascript+genshi',
'text/javascript+genshi']
def __init__(self, **options):
super(JavascriptGenshiLexer, self).__init__(JavascriptLexer,
GenshiTextLexer,
**options)
def analyse_text(text):
return GenshiLexer.analyse_text(text) - 0.05
class CssGenshiLexer(DelegatingLexer):
"""
A lexer that highlights CSS definitions in genshi text templates.
"""
name = 'CSS+Genshi Text'
aliases = ['css+genshitext', 'css+genshi']
alias_filenames = ['*.css']
mimetypes = ['text/css+genshi']
def __init__(self, **options):
super(CssGenshiLexer, self).__init__(CssLexer, GenshiTextLexer,
**options)
def analyse_text(text):
return GenshiLexer.analyse_text(text) - 0.05
class RhtmlLexer(DelegatingLexer):
"""
Subclass of the ERB lexer that highlights the unlexed data with the
html lexer.
Nested Javascript and CSS is highlighted too.
"""
name = 'RHTML'
aliases = ['rhtml', 'html+erb', 'html+ruby']
filenames = ['*.rhtml']
alias_filenames = ['*.html', '*.htm', '*.xhtml']
mimetypes = ['text/html+ruby']
def __init__(self, **options):
super(RhtmlLexer, self).__init__(HtmlLexer, ErbLexer, **options)
def analyse_text(text):
rv = ErbLexer.analyse_text(text) - 0.01
if html_doctype_matches(text):
# one more than the XmlErbLexer returns
rv += 0.5
return rv
class XmlErbLexer(DelegatingLexer):
"""
Subclass of `ErbLexer` which highlights data outside preprocessor
directives with the `XmlLexer`.
"""
name = 'XML+Ruby'
aliases = ['xml+erb', 'xml+ruby']
alias_filenames = ['*.xml']
mimetypes = ['application/xml+ruby']
def __init__(self, **options):
super(XmlErbLexer, self).__init__(XmlLexer, ErbLexer, **options)
def analyse_text(text):
rv = ErbLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
return rv
class CssErbLexer(DelegatingLexer):
"""
Subclass of `ErbLexer` which highlights unlexed data with the `CssLexer`.
"""
name = 'CSS+Ruby'
aliases = ['css+erb', 'css+ruby']
alias_filenames = ['*.css']
mimetypes = ['text/css+ruby']
def __init__(self, **options):
super(CssErbLexer, self).__init__(CssLexer, ErbLexer, **options)
def analyse_text(text):
return ErbLexer.analyse_text(text) - 0.05
class JavascriptErbLexer(DelegatingLexer):
"""
Subclass of `ErbLexer` which highlights unlexed data with the
`JavascriptLexer`.
"""
name = 'JavaScript+Ruby'
aliases = ['js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+ruby',
'text/x-javascript+ruby',
'text/javascript+ruby']
def __init__(self, **options):
super(JavascriptErbLexer, self).__init__(JavascriptLexer, ErbLexer,
**options)
def analyse_text(text):
return ErbLexer.analyse_text(text) - 0.05
class HtmlPhpLexer(DelegatingLexer):
"""
Subclass of `PhpLexer` that highlights unhandled data with the `HtmlLexer`.
Nested Javascript and CSS is highlighted too.
"""
name = 'HTML+PHP'
aliases = ['html+php']
filenames = ['*.phtml']
alias_filenames = ['*.php', '*.html', '*.htm', '*.xhtml',
'*.php[345]']
mimetypes = ['application/x-php',
'application/x-httpd-php', 'application/x-httpd-php3',
'application/x-httpd-php4', 'application/x-httpd-php5']
def __init__(self, **options):
super(HtmlPhpLexer, self).__init__(HtmlLexer, PhpLexer, **options)
def analyse_text(text):
rv = PhpLexer.analyse_text(text) - 0.01
if html_doctype_matches(text):
rv += 0.5
return rv
class XmlPhpLexer(DelegatingLexer):
"""
Subclass of `PhpLexer` that higlights unhandled data with the `XmlLexer`.
"""
name = 'XML+PHP'
aliases = ['xml+php']
alias_filenames = ['*.xml', '*.php', '*.php[345]']
mimetypes = ['application/xml+php']
def __init__(self, **options):
super(XmlPhpLexer, self).__init__(XmlLexer, PhpLexer, **options)
def analyse_text(text):
rv = PhpLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
return rv
class CssPhpLexer(DelegatingLexer):
"""
Subclass of `PhpLexer` which highlights unmatched data with the `CssLexer`.
"""
name = 'CSS+PHP'
aliases = ['css+php']
alias_filenames = ['*.css']
mimetypes = ['text/css+php']
def __init__(self, **options):
super(CssPhpLexer, self).__init__(CssLexer, PhpLexer, **options)
def analyse_text(text):
return PhpLexer.analyse_text(text) - 0.05
class JavascriptPhpLexer(DelegatingLexer):
"""
Subclass of `PhpLexer` which highlights unmatched data with the
`JavascriptLexer`.
"""
name = 'JavaScript+PHP'
aliases = ['js+php', 'javascript+php']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+php',
'text/x-javascript+php',
'text/javascript+php']
def __init__(self, **options):
super(JavascriptPhpLexer, self).__init__(JavascriptLexer, PhpLexer,
**options)
def analyse_text(text):
return PhpLexer.analyse_text(text)
class HtmlSmartyLexer(DelegatingLexer):
"""
Subclass of the `SmartyLexer` that highighlights unlexed data with the
`HtmlLexer`.
Nested Javascript and CSS is highlighted too.
"""
name = 'HTML+Smarty'
aliases = ['html+smarty']
alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.tpl']
mimetypes = ['text/html+smarty']
def __init__(self, **options):
super(HtmlSmartyLexer, self).__init__(HtmlLexer, SmartyLexer, **options)
def analyse_text(text):
rv = SmartyLexer.analyse_text(text) - 0.01
if html_doctype_matches(text):
rv += 0.5
return rv
class XmlSmartyLexer(DelegatingLexer):
"""
Subclass of the `SmartyLexer` that highlights unlexed data with the
`XmlLexer`.
"""
name = 'XML+Smarty'
aliases = ['xml+smarty']
alias_filenames = ['*.xml', '*.tpl']
mimetypes = ['application/xml+smarty']
def __init__(self, **options):
super(XmlSmartyLexer, self).__init__(XmlLexer, SmartyLexer, **options)
def analyse_text(text):
rv = SmartyLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
return rv
class CssSmartyLexer(DelegatingLexer):
"""
Subclass of the `SmartyLexer` that highlights unlexed data with the
`CssLexer`.
"""
name = 'CSS+Smarty'
aliases = ['css+smarty']
alias_filenames = ['*.css', '*.tpl']
mimetypes = ['text/css+smarty']
def __init__(self, **options):
super(CssSmartyLexer, self).__init__(CssLexer, SmartyLexer, **options)
def analyse_text(text):
return SmartyLexer.analyse_text(text) - 0.05
class JavascriptSmartyLexer(DelegatingLexer):
"""
Subclass of the `SmartyLexer` that highlights unlexed data with the
`JavascriptLexer`.
"""
name = 'JavaScript+Smarty'
aliases = ['js+smarty', 'javascript+smarty']
alias_filenames = ['*.js', '*.tpl']
mimetypes = ['application/x-javascript+smarty',
'text/x-javascript+smarty',
'text/javascript+smarty']
def __init__(self, **options):
super(JavascriptSmartyLexer, self).__init__(JavascriptLexer, SmartyLexer,
**options)
def analyse_text(text):
return SmartyLexer.analyse_text(text) - 0.05
class HtmlDjangoLexer(DelegatingLexer):
"""
Subclass of the `DjangoLexer` that highighlights unlexed data with the
`HtmlLexer`.
Nested Javascript and CSS is highlighted too.
"""
name = 'HTML+Django/Jinja'
aliases = ['html+django', 'html+jinja', 'htmldjango']
alias_filenames = ['*.html', '*.htm', '*.xhtml']
mimetypes = ['text/html+django', 'text/html+jinja']
def __init__(self, **options):
super(HtmlDjangoLexer, self).__init__(HtmlLexer, DjangoLexer, **options)
def analyse_text(text):
rv = DjangoLexer.analyse_text(text) - 0.01
if html_doctype_matches(text):
rv += 0.5
return rv
class XmlDjangoLexer(DelegatingLexer):
"""
Subclass of the `DjangoLexer` that highlights unlexed data with the
`XmlLexer`.
"""
name = 'XML+Django/Jinja'
aliases = ['xml+django', 'xml+jinja']
alias_filenames = ['*.xml']
mimetypes = ['application/xml+django', 'application/xml+jinja']
def __init__(self, **options):
super(XmlDjangoLexer, self).__init__(XmlLexer, DjangoLexer, **options)
def analyse_text(text):
rv = DjangoLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
return rv
class CssDjangoLexer(DelegatingLexer):
"""
Subclass of the `DjangoLexer` that highlights unlexed data with the
`CssLexer`.
"""
name = 'CSS+Django/Jinja'
aliases = ['css+django', 'css+jinja']
alias_filenames = ['*.css']
mimetypes = ['text/css+django', 'text/css+jinja']
def __init__(self, **options):
super(CssDjangoLexer, self).__init__(CssLexer, DjangoLexer, **options)
def analyse_text(text):
return DjangoLexer.analyse_text(text) - 0.05
class JavascriptDjangoLexer(DelegatingLexer):
"""
Subclass of the `DjangoLexer` that highlights unlexed data with the
`JavascriptLexer`.
"""
name = 'JavaScript+Django/Jinja'
aliases = ['js+django', 'javascript+django',
'js+jinja', 'javascript+jinja']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+django',
'application/x-javascript+jinja',
'text/x-javascript+django',
'text/x-javascript+jinja',
'text/javascript+django',
'text/javascript+jinja']
def __init__(self, **options):
super(JavascriptDjangoLexer, self).__init__(JavascriptLexer, DjangoLexer,
**options)
def analyse_text(text):
return DjangoLexer.analyse_text(text) - 0.05
class JspRootLexer(RegexLexer):
"""
Base for the `JspLexer`. Yields `Token.Other` for area outside of
JSP tags.
.. versionadded:: 0.7
"""
tokens = {
'root': [
(r'<%\S?', Keyword, 'sec'),
# FIXME: I want to make these keywords but still parse attributes.
(r'</?jsp:(forward|getProperty|include|plugin|setProperty|useBean).*?>',
Keyword),
(r'[^<]+', Other),
(r'<', Other),
],
'sec': [
(r'%>', Keyword, '#pop'),
# note: '\w\W' != '.' without DOTALL.
(r'[\w\W]+?(?=%>|\Z)', using(JavaLexer)),
],
}
class JspLexer(DelegatingLexer):
"""
Lexer for Java Server Pages.
.. versionadded:: 0.7
"""
name = 'Java Server Page'
aliases = ['jsp']
filenames = ['*.jsp']
mimetypes = ['application/x-jsp']
def __init__(self, **options):
super(JspLexer, self).__init__(XmlLexer, JspRootLexer, **options)
def analyse_text(text):
rv = JavaLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
if '<%' in text and '%>' in text:
rv += 0.1
return rv
class EvoqueLexer(RegexLexer):
"""
For files using the Evoque templating system.
.. versionadded:: 1.1
"""
name = 'Evoque'
aliases = ['evoque']
filenames = ['*.evoque']
mimetypes = ['application/x-evoque']
flags = re.DOTALL
tokens = {
'root': [
(r'[^#$]+', Other),
(r'#\[', Comment.Multiline, 'comment'),
(r'\$\$', Other),
# svn keywords
(r'\$\w+:[^$\n]*\$', Comment.Multiline),
# directives: begin, end
(r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
bygroups(Punctuation, Name.Builtin, Punctuation, None,
String, Punctuation)),
# directives: evoque, overlay
# see doc for handling first name arg: /directives/evoque/
#+ minor inconsistency: the "name" in e.g. $overlay{name=site_base}
# should be using(PythonLexer), not passed out as String
(r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+[^=,%}]+?)?'
r'(.*?)((?(4)%)\})',
bygroups(Punctuation, Name.Builtin, Punctuation, None,
String, using(PythonLexer), Punctuation)),
# directives: if, for, prefer, test
(r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
bygroups(Punctuation, Name.Builtin, Punctuation, None,
using(PythonLexer), Punctuation)),
# directive clauses (no {} expression)
(r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
# expressions
(r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
bygroups(Punctuation, None, using(PythonLexer),
Name.Builtin, None, None, Punctuation)),
(r'#', Other),
],
'comment': [
(r'[^\]#]', Comment.Multiline),
(r'#\[', Comment.Multiline, '#push'),
(r'\]#', Comment.Multiline, '#pop'),
(r'[\]#]', Comment.Multiline)
],
}
class EvoqueHtmlLexer(DelegatingLexer):
"""
Subclass of the `EvoqueLexer` that highlights unlexed data with the
`HtmlLexer`.
.. versionadded:: 1.1
"""
name = 'HTML+Evoque'
aliases = ['html+evoque']
filenames = ['*.html']
mimetypes = ['text/html+evoque']
def __init__(self, **options):
super(EvoqueHtmlLexer, self).__init__(HtmlLexer, EvoqueLexer,
**options)
class EvoqueXmlLexer(DelegatingLexer):
"""
Subclass of the `EvoqueLexer` that highlights unlexed data with the
`XmlLexer`.
.. versionadded:: 1.1
"""
name = 'XML+Evoque'
aliases = ['xml+evoque']
filenames = ['*.xml']
mimetypes = ['application/xml+evoque']
def __init__(self, **options):
super(EvoqueXmlLexer, self).__init__(XmlLexer, EvoqueLexer,
**options)
class ColdfusionLexer(RegexLexer):
"""
Coldfusion statements
"""
name = 'cfstatement'
aliases = ['cfs']
filenames = []
mimetypes = []
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
(r'//.*', Comment),
(r'\+\+|--', Operator),
(r'[-+*/^&=!]', Operator),
(r'<=|>=|<|>', Operator),
(r'mod\b', Operator),
(r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator),
(r'\|\||&&', Operator),
(r'"', String.Double, 'string'),
# There is a special rule for allowing html in single quoted
# strings, evidently.
(r"'.*?'", String.Single),
(r'\d+', Number),
(r'(if|else|len|var|case|default|break|switch)\b', Keyword),
(r'([A-Za-z_$][A-Za-z0-9_.]*)(\s*)(\()',
bygroups(Name.Function, Text, Punctuation)),
(r'[A-Za-z_$][A-Za-z0-9_.]*', Name.Variable),
(r'[()\[\]{};:,.\\]', Punctuation),
(r'\s+', Text),
],
'string': [
(r'""', String.Double),
(r'#.+?#', String.Interp),
(r'[^"#]+', String.Double),
(r'#', String.Double),
(r'"', String.Double, '#pop'),
],
}
class ColdfusionMarkupLexer(RegexLexer):
"""
Coldfusion markup only
"""
name = 'Coldfusion'
aliases = ['cf']
filenames = []
mimetypes = []
tokens = {
'root': [
(r'[^<]+', Other),
include('tags'),
(r'<[^<>]*', Other),
],
'tags': [
(r'(?s)<!---.*?--->', Comment.Multiline),
(r'(?s)<!--.*?-->', Comment),
(r'<cfoutput.*?>', Name.Builtin, 'cfoutput'),
(r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)',
bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
# negative lookbehind is for strings with embedded >
(r'(?s)(</?cf(?:component|include|if|else|elseif|loop|return|'
r'dbinfo|dump|abort|location|invoke|throw|file|savecontent|'
r'mailpart|mail|header|content|zip|image|lock|argument|try|'
r'catch|break|directory|http|set|function|param)\b)(.*?)((?<!\\)>)',
bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
],
'cfoutput': [
(r'[^#<]+', Other),
(r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
Punctuation)),
#(r'<cfoutput.*?>', Name.Builtin, '#push'),
(r'</cfoutput.*?>', Name.Builtin, '#pop'),
include('tags'),
(r'(?s)<[^<>]*', Other),
(r'#', Other),
],
}
class ColdfusionHtmlLexer(DelegatingLexer):
"""
Coldfusion markup in html
"""
name = 'Coldfusion HTML'
aliases = ['cfm']
filenames = ['*.cfm', '*.cfml', '*.cfc']
mimetypes = ['application/x-coldfusion']
def __init__(self, **options):
super(ColdfusionHtmlLexer, self).__init__(HtmlLexer, ColdfusionMarkupLexer,
**options)
class SspLexer(DelegatingLexer):
"""
Lexer for Scalate Server Pages.
.. versionadded:: 1.4
"""
name = 'Scalate Server Page'
aliases = ['ssp']
filenames = ['*.ssp']
mimetypes = ['application/x-ssp']
def __init__(self, **options):
super(SspLexer, self).__init__(XmlLexer, JspRootLexer, **options)
def analyse_text(text):
rv = 0.0
if re.search('val \w+\s*:', text):
rv += 0.6
if looks_like_xml(text):
rv += 0.2
if '<%' in text and '%>' in text:
rv += 0.1
return rv
class TeaTemplateRootLexer(RegexLexer):
"""
Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
code blocks.
.. versionadded:: 1.5
"""
tokens = {
'root': [
(r'<%\S?', Keyword, 'sec'),
(r'[^<]+', Other),
(r'<', Other),
],
'sec': [
(r'%>', Keyword, '#pop'),
# note: '\w\W' != '.' without DOTALL.
(r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
],
}
class TeaTemplateLexer(DelegatingLexer):
"""
Lexer for `Tea Templates <http://teatrove.org/>`_.
.. versionadded:: 1.5
"""
name = 'Tea'
aliases = ['tea']
filenames = ['*.tea']
mimetypes = ['text/x-tea']
def __init__(self, **options):
super(TeaTemplateLexer, self).__init__(XmlLexer,
TeaTemplateRootLexer, **options)
def analyse_text(text):
rv = TeaLangLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
if '<%' in text and '%>' in text:
rv += 0.1
return rv
class LassoHtmlLexer(DelegatingLexer):
"""
Subclass of the `LassoLexer` which highlights unhandled data with the
`HtmlLexer`.
Nested JavaScript and CSS is also highlighted.
.. versionadded:: 1.6
"""
name = 'HTML+Lasso'
aliases = ['html+lasso']
alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
'*.incl', '*.inc', '*.las']
mimetypes = ['text/html+lasso',
'application/x-httpd-lasso',
'application/x-httpd-lasso[89]']
def __init__(self, **options):
super(LassoHtmlLexer, self).__init__(HtmlLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.01
if re.search(r'<\w+>', text, re.I):
rv += 0.2
if html_doctype_matches(text):
rv += 0.5
return rv
class LassoXmlLexer(DelegatingLexer):
"""
Subclass of the `LassoLexer` which highlights unhandled data with the
`XmlLexer`.
.. versionadded:: 1.6
"""
name = 'XML+Lasso'
aliases = ['xml+lasso']
alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
'*.incl', '*.inc', '*.las']
mimetypes = ['application/xml+lasso']
def __init__(self, **options):
super(LassoXmlLexer, self).__init__(XmlLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
return rv
class LassoCssLexer(DelegatingLexer):
"""
Subclass of the `LassoLexer` which highlights unhandled data with the
`CssLexer`.
.. versionadded:: 1.6
"""
name = 'CSS+Lasso'
aliases = ['css+lasso']
alias_filenames = ['*.css']
mimetypes = ['text/css+lasso']
def __init__(self, **options):
options['requiredelimiters'] = True
super(LassoCssLexer, self).__init__(CssLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.05
if re.search(r'\w+:.+?;', text):
rv += 0.1
if 'padding:' in text:
rv += 0.1
return rv
class LassoJavascriptLexer(DelegatingLexer):
"""
Subclass of the `LassoLexer` which highlights unhandled data with the
`JavascriptLexer`.
.. versionadded:: 1.6
"""
name = 'JavaScript+Lasso'
aliases = ['js+lasso', 'javascript+lasso']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+lasso',
'text/x-javascript+lasso',
'text/javascript+lasso']
def __init__(self, **options):
options['requiredelimiters'] = True
super(LassoJavascriptLexer, self).__init__(JavascriptLexer, LassoLexer,
**options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.05
if 'function' in text:
rv += 0.2
return rv
| mit |
dgladkov/django | django/db/models/signals.py | 399 | 2734 | from django.apps import apps
from django.dispatch import Signal
from django.utils import six
class_prepared = Signal(providing_args=["class"])
class ModelSignal(Signal):
"""
Signal subclass that allows the sender to be lazily specified as a string
of the `app_label.ModelName` form.
"""
def __init__(self, *args, **kwargs):
super(ModelSignal, self).__init__(*args, **kwargs)
self.unresolved_references = {}
class_prepared.connect(self._resolve_references)
def _resolve_references(self, sender, **kwargs):
opts = sender._meta
reference = (opts.app_label, opts.object_name)
try:
receivers = self.unresolved_references.pop(reference)
except KeyError:
pass
else:
for receiver, weak, dispatch_uid in receivers:
super(ModelSignal, self).connect(
receiver, sender=sender, weak=weak, dispatch_uid=dispatch_uid
)
def connect(self, receiver, sender=None, weak=True, dispatch_uid=None):
if isinstance(sender, six.string_types):
try:
app_label, model_name = sender.split('.')
except ValueError:
raise ValueError(
"Specified sender must either be a model or a "
"model name of the 'app_label.ModelName' form."
)
try:
sender = apps.get_registered_model(app_label, model_name)
except LookupError:
ref = (app_label, model_name)
refs = self.unresolved_references.setdefault(ref, [])
refs.append((receiver, weak, dispatch_uid))
return
super(ModelSignal, self).connect(
receiver, sender=sender, weak=weak, dispatch_uid=dispatch_uid
)
pre_init = ModelSignal(providing_args=["instance", "args", "kwargs"], use_caching=True)
post_init = ModelSignal(providing_args=["instance"], use_caching=True)
pre_save = ModelSignal(providing_args=["instance", "raw", "using", "update_fields"],
use_caching=True)
post_save = ModelSignal(providing_args=["instance", "raw", "created", "using", "update_fields"], use_caching=True)
pre_delete = ModelSignal(providing_args=["instance", "using"], use_caching=True)
post_delete = ModelSignal(providing_args=["instance", "using"], use_caching=True)
m2m_changed = ModelSignal(
providing_args=["action", "instance", "reverse", "model", "pk_set", "using"],
use_caching=True,
)
pre_migrate = Signal(providing_args=["app_config", "verbosity", "interactive", "using"])
post_migrate = Signal(providing_args=["app_config", "verbosity", "interactive", "using"])
| bsd-3-clause |
DiptoDas8/Biponi | lib/python2.7/site-packages/django/template/backends/django.py | 44 | 3018 | # Since this package contains a "django" module, this is required on Python 2.
from __future__ import absolute_import
import warnings
from django.conf import settings
from django.template.context import Context, RequestContext, make_context
from django.template.engine import Engine, _dirs_undefined
from django.utils.deprecation import RemovedInDjango110Warning
from .base import BaseEngine
class DjangoTemplates(BaseEngine):
app_dirname = 'templates'
def __init__(self, params):
params = params.copy()
options = params.pop('OPTIONS').copy()
options.setdefault('debug', settings.DEBUG)
options.setdefault('file_charset', settings.FILE_CHARSET)
super(DjangoTemplates, self).__init__(params)
self.engine = Engine(self.dirs, self.app_dirs, **options)
def from_string(self, template_code):
return Template(self.engine.from_string(template_code))
def get_template(self, template_name, dirs=_dirs_undefined):
return Template(self.engine.get_template(template_name, dirs))
class Template(object):
def __init__(self, template):
self.template = template
@property
def origin(self):
# TODO: define the Origin API. For now simply forwarding to the
# underlying Template preserves backwards-compatibility.
return self.template.origin
def render(self, context=None, request=None):
# A deprecation path is required here to cover the following usage:
# >>> from django.template import Context
# >>> from django.template.loader import get_template
# >>> template = get_template('hello.html')
# >>> template.render(Context({'name': 'world'}))
# In Django 1.7 get_template() returned a django.template.Template.
# In Django 1.8 it returns a django.template.backends.django.Template.
# In Django 1.10 the isinstance checks should be removed. If passing a
# Context or a RequestContext works by accident, it won't be an issue
# per se, but it won't be officially supported either.
if isinstance(context, RequestContext):
if request is not None and request is not context.request:
raise ValueError(
"render() was called with a RequestContext and a request "
"argument which refer to different requests. Make sure "
"that the context argument is a dict or at least that "
"the two arguments refer to the same request.")
warnings.warn(
"render() must be called with a dict, not a RequestContext.",
RemovedInDjango110Warning, stacklevel=2)
elif isinstance(context, Context):
warnings.warn(
"render() must be called with a dict, not a Context.",
RemovedInDjango110Warning, stacklevel=2)
else:
context = make_context(context, request)
return self.template.render(context)
| mit |
ex1usive-m4d/TemplateDocx | controllers/phpdocx/lib/openoffice/openoffice.org/basis3.4/program/python-core-2.6.1/lib/encodings/__init__.py | 60 | 5638 | """ Standard "encodings" Package
Standard Python encoding modules are stored in this package
directory.
Codec modules must have names corresponding to normalized encoding
names as defined in the normalize_encoding() function below, e.g.
'utf-8' must be implemented by the module 'utf_8.py'.
Each codec module must export the following interface:
* getregentry() -> codecs.CodecInfo object
The getregentry() API must a CodecInfo object with encoder, decoder,
incrementalencoder, incrementaldecoder, streamwriter and streamreader
atttributes which adhere to the Python Codec Interface Standard.
In addition, a module may optionally also define the following
APIs which are then used by the package's codec search function:
* getaliases() -> sequence of encoding name strings to use as aliases
Alias names returned by getaliases() must be normalized encoding
names as defined by normalize_encoding().
Written by Marc-Andre Lemburg ([email protected]).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""#"
import codecs
from encodings import aliases
import __builtin__
_cache = {}
_unknown = '--unknown--'
_import_tail = ['*']
_norm_encoding_map = (' . '
'0123456789 ABCDEFGHIJKLMNOPQRSTUVWXYZ '
' abcdefghijklmnopqrstuvwxyz '
' '
' '
' ')
_aliases = aliases.aliases
class CodecRegistryError(LookupError, SystemError):
pass
def normalize_encoding(encoding):
""" Normalize an encoding name.
Normalization works as follows: all non-alphanumeric
characters except the dot used for Python package names are
collapsed and replaced with a single underscore, e.g. ' -;#'
becomes '_'. Leading and trailing underscores are removed.
Note that encoding names should be ASCII only; if they do use
non-ASCII characters, these must be Latin-1 compatible.
"""
# Make sure we have an 8-bit string, because .translate() works
# differently for Unicode strings.
if hasattr(__builtin__, "unicode") and isinstance(encoding, unicode):
# Note that .encode('latin-1') does *not* use the codec
# registry, so this call doesn't recurse. (See unicodeobject.c
# PyUnicode_AsEncodedString() for details)
encoding = encoding.encode('latin-1')
return '_'.join(encoding.translate(_norm_encoding_map).split())
def search_function(encoding):
# Cache lookup
entry = _cache.get(encoding, _unknown)
if entry is not _unknown:
return entry
# Import the module:
#
# First try to find an alias for the normalized encoding
# name and lookup the module using the aliased name, then try to
# lookup the module using the standard import scheme, i.e. first
# try in the encodings package, then at top-level.
#
norm_encoding = normalize_encoding(encoding)
aliased_encoding = _aliases.get(norm_encoding) or \
_aliases.get(norm_encoding.replace('.', '_'))
if aliased_encoding is not None:
modnames = [aliased_encoding,
norm_encoding]
else:
modnames = [norm_encoding]
for modname in modnames:
if not modname or '.' in modname:
continue
try:
# Import is absolute to prevent the possibly malicious import of a
# module with side-effects that is not in the 'encodings' package.
mod = __import__('encodings.' + modname, fromlist=_import_tail,
level=0)
except ImportError:
pass
else:
break
else:
mod = None
try:
getregentry = mod.getregentry
except AttributeError:
# Not a codec module
mod = None
if mod is None:
# Cache misses
_cache[encoding] = None
return None
# Now ask the module for the registry entry
entry = getregentry()
if not isinstance(entry, codecs.CodecInfo):
if not 4 <= len(entry) <= 7:
raise CodecRegistryError,\
'module "%s" (%s) failed to register' % \
(mod.__name__, mod.__file__)
if not callable(entry[0]) or \
not callable(entry[1]) or \
(entry[2] is not None and not callable(entry[2])) or \
(entry[3] is not None and not callable(entry[3])) or \
(len(entry) > 4 and entry[4] is not None and not callable(entry[4])) or \
(len(entry) > 5 and entry[5] is not None and not callable(entry[5])):
raise CodecRegistryError,\
'incompatible codecs in module "%s" (%s)' % \
(mod.__name__, mod.__file__)
if len(entry)<7 or entry[6] is None:
entry += (None,)*(6-len(entry)) + (mod.__name__.split(".", 1)[1],)
entry = codecs.CodecInfo(*entry)
# Cache the codec registry entry
_cache[encoding] = entry
# Register its aliases (without overwriting previously registered
# aliases)
try:
codecaliases = mod.getaliases()
except AttributeError:
pass
else:
for alias in codecaliases:
if not _aliases.has_key(alias):
_aliases[alias] = modname
# Return the registry entry
return entry
# Register the search_function in the Python codec registry
codecs.register(search_function)
| bsd-3-clause |
0k/OpenUpgrade | addons/purchase_double_validation/__init__.py | 441 | 1090 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import purchase_double_validation_installer
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
LuckDragon82/demo | boilerplate/external/babel/tests/core.py | 61 | 1726 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
import doctest
import os
import unittest
from babel import core
from babel.core import default_locale
class DefaultLocaleTest(unittest.TestCase):
def setUp(self):
self._old_locale_settings = self._current_locale_settings()
def tearDown(self):
self._set_locale_settings(self._old_locale_settings)
def _current_locale_settings(self):
settings = {}
for name in ('LANGUAGE', 'LC_ALL', 'LC_CTYPE', 'LANG'):
settings[name] = os.environ[name]
return settings
def _set_locale_settings(self, settings):
for name, value in settings.items():
os.environ[name] = value
def test_ignore_invalid_locales_in_lc_ctype(self):
# This is a regression test specifically for a bad LC_CTYPE setting on
# MacOS X 10.6 (#200)
os.environ['LC_CTYPE'] = 'UTF-8'
# must not throw an exception
default_locale('LC_CTYPE')
def suite():
suite = unittest.TestSuite()
suite.addTest(doctest.DocTestSuite(core))
suite.addTest(unittest.makeSuite(DefaultLocaleTest))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| lgpl-3.0 |
lnielsen/invenio | invenio/legacy/pdfchecker/arxiv.py | 3 | 16346 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
ArXiv Pdf Checker Task
Checks arxiv records for missing pdfs and downloads them from arXiv
"""
import os
import time
import re
from datetime import datetime
from tempfile import NamedTemporaryFile
from xml.dom import minidom
import socket
from invenio.legacy.bibdocfile.cli import bibupload_ffts
from invenio.legacy.docextract.task import store_last_updated, \
fetch_last_updated
from invenio.utils.shell import split_cli_ids_arg
from invenio.legacy.dbquery import run_sql
from invenio.legacy.bibsched.bibtask import task_low_level_submission
from invenio.legacy.refextract.api import record_has_fulltext, \
check_record_for_refextract
from invenio.legacy.bibsched.bibtask import task_init, \
write_message, \
task_update_progress, \
task_get_option, \
task_set_option, \
task_sleep_now_if_required
from invenio.legacy.bibrecord import get_fieldvalues
from invenio.config import CFG_VERSION, \
CFG_TMPSHAREDDIR, \
CFG_TMPDIR, \
CFG_ARXIV_URL_PATTERN
# Help message is the usage() print out of how to use Refextract
from invenio.legacy.docextract.record import get_record
from invenio.legacy.bibdocfile.api import BibRecDocs, \
calculate_md5
from invenio.legacy.oaiharvest import utils as oai_harvest_daemon
from invenio.utils.filedownload import (download_external_url,
InvenioFileDownloadError)
NAME = 'arxiv-pdf-checker'
ARXIV_VERSION_PATTERN = re.compile(ur'v\d$', re.UNICODE)
STATUS_OK = 'ok'
STATUS_MISSING = 'missing'
class PdfNotAvailable(Exception):
pass
class FoundExistingPdf(Exception):
pass
class AlreadyHarvested(Exception):
def __init__(self, status):
Exception.__init__(self)
self.status = status
def build_arxiv_url(arxiv_id, version):
return CFG_ARXIV_URL_PATTERN % (arxiv_id, version)
def extract_arxiv_ids_from_recid(recid):
"""Extract arxiv # for given recid
We get them from the record which has this format:
037__ $9arXiv$arXiv:1010.1111
"""
record = get_record(recid)
for report_number_field in record.get('037', []):
try:
source = report_number_field.get_subfield_values('9')[0]
except IndexError:
continue
else:
if source != 'arXiv':
continue
try:
report_number = report_number_field.get_subfield_values('a')[0]
except IndexError:
continue
else:
# Extract arxiv id
if report_number.startswith('arXiv'):
report_number = report_number.split(':')[1]
if ARXIV_VERSION_PATTERN.search(report_number):
report_number = report_number[:-2]
yield report_number
def cb_parse_option(key, value, opts, args):
"""Parse command line options"""
if args:
# There should be no standalone arguments
raise StandardError("Error: Unrecognised argument '%s'." % args[0])
if key in ('-i', '--id'):
recids = task_get_option('recids')
if not recids:
recids = set()
task_set_option('recids', recids)
recids.update(split_cli_ids_arg(value))
return True
def store_arxiv_pdf_status(recid, status, version):
"""Store pdf harvesting status in the database"""
valid_status = (STATUS_OK, STATUS_MISSING)
if status not in valid_status:
raise ValueError('invalid status %s' % status)
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
run_sql("""REPLACE INTO bibARXIVPDF (id_bibrec, status, date_harvested, version)
VALUES (%s, %s, %s, %s)""", (recid, status, now, version))
def fetch_arxiv_pdf_status(recid):
"""Fetch from the database the harvest status of given recid"""
ret = run_sql("""SELECT status, version FROM bibARXIVPDF
WHERE id_bibrec = %s""", [recid])
return ret and ret[0] or (None, None)
def download_one(recid, version):
"""Download given version of the PDF from arxiv"""
write_message('fetching %s' % recid)
for count, arxiv_id in enumerate(extract_arxiv_ids_from_recid(recid)):
if count != 0:
write_message("Warning: %s has multiple arxiv #" % recid)
continue
url_for_pdf = build_arxiv_url(arxiv_id, version)
filename_arxiv_id = arxiv_id.replace('/', '_')
temp_file = NamedTemporaryFile(prefix="arxiv-pdf-checker",
dir=CFG_TMPSHAREDDIR,
suffix="%s.pdf" % filename_arxiv_id)
write_message('downloading pdf from %s' % url_for_pdf)
path = download_external_url(url_for_pdf,
temp_file.name,
content_type='pdf')
# Check if it is not an html not found page
filesize = os.path.getsize(path)
if filesize < 25000:
f = open(path)
try:
for line in f:
if 'PDF unavailable' in line:
raise PdfNotAvailable()
finally:
f.close()
docs = BibRecDocs(recid)
bibdocfiles = docs.list_latest_files(doctype="arXiv")
needs_update = False
try:
bibdocfile = bibdocfiles[0]
except IndexError:
bibdocfile = None
needs_update = True
else:
existing_md5 = calculate_md5(bibdocfile.fullpath)
new_md5 = calculate_md5(path.encode('utf-8'))
if new_md5 != existing_md5:
write_message('md5 differs updating')
needs_update = True
else:
write_message('md5 matches existing pdf, skipping')
if needs_update:
if bibdocfiles:
write_message('adding as new version')
docs.add_new_version(path, docname=bibdocfile.name)
else:
write_message('adding as new file')
docs.add_new_file(path,
doctype="arXiv",
docname="arXiv:%s" % filename_arxiv_id)
else:
raise FoundExistingPdf()
def oai_harvest_query(arxiv_id, prefix='arXivRaw', verb='GetRecord',
max_retries=5, repositories=[]):
"""Wrapper of oai_harvest_daemon.oai_harvest_get that handles retries"""
if not len(repositories):
from invenio.modules.oaiharvester.models import OaiHARVEST
repository = OaiHARVEST.query.filter(
OaiHARVEST.name == 'arxiv'
).first().todict()
else:
repository = repositories[0]
harvestpath = os.path.join(CFG_TMPDIR, "arxiv-pdf-checker-oai-")
def get():
return oai_harvest_daemon.oai_harvest_get(
prefix=prefix,
baseurl=repository['baseurl'],
harvestpath=harvestpath,
verb=verb,
identifier='oai:arXiv.org:%s' % arxiv_id)
responses = None
for retry_count in range(1, max_retries + 1):
try:
responses = get()
except (socket.timeout, socket.error):
write_message('socket error, arxiv is down?')
else:
if not responses:
write_message('no responses from oai server')
break
if retry_count <= 2:
write_message('sleeping for 10s')
time.sleep(10)
else:
write_message('sleeping for 30 minutes')
time.sleep(1800)
if responses is None:
raise Exception('arXiv is down')
return responses
def fetch_arxiv_version(recid):
"""Query arxiv and extract the version of the pdf from the response"""
for count, arxiv_id in enumerate(extract_arxiv_ids_from_recid(recid)):
if count != 0:
write_message("Warning: %s has multiple arxiv #" % recid)
continue
responses = oai_harvest_query(arxiv_id)
if not responses:
return None
# The response is roughly in this format
# <OAI-PMH>
# <GetRecord>
# <metadata>
# <version version="v1">
# <date>Mon, 15 Apr 2013 19:33:21 GMT</date>
# <size>609kb</size>
# <source_type>D</source_type>
# <version version="v2">
# <date>Mon, 25 Apr 2013 19:33:21 GMT</date>
# <size>620kb</size>
# <source_type>D</source_type>
# </version>
# </<metadata>
# </<GetRecord>
# </<OAI-PMH>
# We pass one arxiv id, we are assuming a single response file
tree = minidom.parse(responses[0])
version_tags = tree.getElementsByTagName('version')
if version_tags:
version = version_tags[-1].getAttribute('version')
else:
version = 'v1'
# We have to remove the responses files manually
# For some written the response is written to disk instead of
# being a string
for file_path in responses:
os.unlink(file_path)
return int(version[1:])
def process_one(recid):
"""Checks given recid for updated pdfs on arxiv"""
write_message('checking %s' % recid)
# Last version we have harvested
harvest_status, harvest_version = fetch_arxiv_pdf_status(recid)
# Fetch arxiv version
arxiv_version = fetch_arxiv_version(recid)
if not arxiv_version:
msg = 'version information unavailable'
write_message(msg)
raise PdfNotAvailable(msg)
write_message('harvested_version %s' % harvest_version)
write_message('arxiv_version %s' % arxiv_version)
if record_has_fulltext(recid) and harvest_version == arxiv_version:
write_message('our version matches arxiv')
raise AlreadyHarvested(status=harvest_status)
# We already tried to harvest this record but failed
if harvest_status == STATUS_MISSING and harvest_version == arxiv_version:
raise PdfNotAvailable()
updated = False
try:
download_one(recid, arxiv_version)
except PdfNotAvailable:
store_arxiv_pdf_status(recid, STATUS_MISSING, arxiv_version)
raise
except FoundExistingPdf:
store_arxiv_pdf_status(recid, STATUS_OK, arxiv_version)
raise
else:
store_arxiv_pdf_status(recid, STATUS_OK, arxiv_version)
updated = True
return updated
def submit_fixmarc_task(recids):
"""Submit a task that synchronizes the 8564 tags
This should be done right after changing the files attached to a record"""
field = [{'doctype' : 'FIX-MARC'}]
ffts = {}
for recid in recids:
ffts[recid] = field
bibupload_ffts(ffts, append=False, interactive=False)
def submit_refextract_task(recids):
"""Submit a refextract task if needed"""
# First filter out recids we cannot safely extract references from
# (mostly because they have been curated)
recids = [recid for recid in recids if check_record_for_refextract(recid)]
if recids:
recids_str = ','.join(str(recid) for recid in recids)
task_low_level_submission('refextract', NAME, '-i', recids_str)
def fetch_updated_arxiv_records(date):
"""Fetch all the arxiv records modified since the last run"""
def check_arxiv(recid):
"""Returns True for arxiv papers"""
for report_number in get_fieldvalues(recid, '037__9'):
if report_number == 'arXiv':
return True
return False
# Fetch all records inserted since last run
sql = "SELECT `id`, `modification_date` FROM `bibrec` " \
"WHERE `modification_date` >= %s " \
"ORDER BY `modification_date`"
records = run_sql(sql, [date.isoformat()])
records = [(r, mod_date) for r, mod_date in records if check_arxiv(r)]
# Show all records for debugging purposes
if task_get_option('verbose') >= 9:
write_message('recids:', verbose=9)
for recid, mod_date in records:
write_message("* %s, %s" % (recid, mod_date), verbose=9)
task_update_progress("Done fetching %s arxiv record ids" % len(records))
return records
def task_run_core(name=NAME):
"""Entry point for the arxiv-pdf-checker task"""
# First gather recids to process
recids = task_get_option('recids')
if recids:
start_date = None
recids = [(recid, None) for recid in recids]
else:
start_date = datetime.now()
dummy, last_date = fetch_last_updated(name)
recids = fetch_updated_arxiv_records(last_date)
updated_recids = set()
try:
for count, (recid, dummy) in enumerate(recids):
if count % 50 == 0:
msg = 'Done %s of %s' % (count, len(recids))
write_message(msg)
task_update_progress(msg)
# BibTask sleep
task_sleep_now_if_required(can_stop_too=True)
write_message('processing %s' % recid, verbose=9)
try:
if process_one(recid):
updated_recids.add(recid)
time.sleep(6)
except AlreadyHarvested:
write_message('already harvested successfully')
time.sleep(6)
except FoundExistingPdf:
write_message('pdf already attached (matching md5)')
time.sleep(6)
except PdfNotAvailable:
write_message("no pdf available")
time.sleep(20)
except InvenioFileDownloadError, e:
write_message("failed to download: %s" % e)
time.sleep(20)
finally:
# We want to process updated records even in case we are interrupted
msg = 'Updated %s records' % len(updated_recids)
write_message(msg)
task_update_progress(msg)
write_message(repr(updated_recids))
# For all updated records, we want to sync the 8564 tags
# and reextract references
if updated_recids:
submit_fixmarc_task(updated_recids)
submit_refextract_task(updated_recids)
# Store last run date of the daemon
# not if it ran on specific recids from the command line with --id
# but only if it ran on the modified records
if start_date:
store_last_updated(0, start_date, name)
return True
def main():
"""Constructs the refextract bibtask."""
# Build and submit the task
task_init(authorization_action='runarxivpdfchecker',
authorization_msg="Arxiv Pdf Checker Task Submission",
description="""Daemon that checks if we have the latest version of arxiv PDFs""",
# get the global help_message variable imported from refextract.py
help_specific_usage="""
Scheduled (daemon) options:
-i, --id Record id to check.
Examples:
(run a daemon job)
arxiv-pdf-checker
""",
version="Invenio v%s" % CFG_VERSION,
specific_params=("i:", ["id="]),
task_submit_elaborate_specific_parameter_fnc=cb_parse_option,
task_run_fnc=task_run_core)
| gpl-2.0 |
Red-M/CloudBot-legacy | util/http.py | 7 | 3042 | # convenience wrapper for urllib2 & friends
import cookielib
import json
import urllib
import urllib2
import urlparse
from urllib import quote, quote_plus as _quote_plus
from lxml import etree, html
from bs4 import BeautifulSoup
# used in plugins that import this
from urllib2 import URLError, HTTPError
ua_cloudbot = 'Cloudbot/DEV http://github.com/CloudDev/CloudBot'
ua_firefox = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:17.0) Gecko/17.0' \
' Firefox/17.0'
ua_old_firefox = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; ' \
'rv:1.8.1.6) Gecko/20070725 Firefox/2.0.0.6'
ua_internetexplorer = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)'
ua_chrome = 'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.4 (KHTML, ' \
'like Gecko) Chrome/22.0.1229.79 Safari/537.4'
jar = cookielib.CookieJar()
def get(*args, **kwargs):
return open(*args, **kwargs).read()
def get_url(*args, **kwargs):
return open(*args, **kwargs).geturl()
def get_html(*args, **kwargs):
return html.fromstring(get(*args, **kwargs))
def get_soup(*args, **kwargs):
return BeautifulSoup(get(*args, **kwargs), 'lxml')
def get_xml(*args, **kwargs):
return etree.fromstring(get(*args, **kwargs))
def get_json(*args, **kwargs):
return json.loads(get(*args, **kwargs))
def open(url, query_params=None, user_agent=None, post_data=None,
referer=None, get_method=None, cookies=False, timeout=None, headers=None, **kwargs):
if query_params is None:
query_params = {}
if user_agent is None:
user_agent = ua_cloudbot
query_params.update(kwargs)
url = prepare_url(url, query_params)
request = urllib2.Request(url, post_data)
if get_method is not None:
request.get_method = lambda: get_method
if headers is not None:
for header_key, header_value in headers.iteritems():
request.add_header(header_key, header_value)
request.add_header('User-Agent', user_agent)
if referer is not None:
request.add_header('Referer', referer)
if cookies:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(jar))
else:
opener = urllib2.build_opener()
if timeout:
return opener.open(request, timeout=timeout)
else:
return opener.open(request)
def prepare_url(url, queries):
if queries:
scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
query = dict(urlparse.parse_qsl(query))
query.update(queries)
query = urllib.urlencode(dict((to_utf8(key), to_utf8(value))
for key, value in query.iteritems()))
url = urlparse.urlunsplit((scheme, netloc, path, query, fragment))
return url
def to_utf8(s):
if isinstance(s, unicode):
return s.encode('utf8', 'ignore')
else:
return str(s)
def quote_plus(s):
return _quote_plus(to_utf8(s))
def unescape(s):
if not s.strip():
return s
return html.fromstring(s).text_content()
| gpl-3.0 |
dennisobrien/bokeh | examples/embed/autoload_static.py | 5 | 2050 | from jinja2 import Template
from tornado.ioloop import IOLoop
from tornado.web import Application, RequestHandler
from bokeh.embed import autoload_static
from bokeh.plotting import figure
from bokeh.resources import CDN
from bokeh.sampledata.iris import flowers
from bokeh.util.browser import view
template = Template("""
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
</head>
<body>
<div>
The plot embedded below is a standalone plot that was embedded using
<fixed>autoload_static</fixed>. For more information see the
<a target="_blank" href="https://bokeh.pydata.org/en/latest/docs/user_guide/embed.html#static-data">
documentation</a>.
</div>
{{ script|safe }}
</body>
</html>
""")
class IndexHandler(RequestHandler):
def initialize(self, script):
self.script = script
def get(self):
self.write(template.render(script=self.script))
# Normally, you might save the .js files to some location on disk, and serve
# them from there. Here we us this request handler, just to make the example
# completely self-contained.
class JSHandler(RequestHandler):
def initialize(self, js):
self.js = js
def get(self): self.write(self.js)
def make_plot():
colormap = {'setosa': 'red', 'versicolor': 'green', 'virginica': 'blue'}
colors = [colormap[x] for x in flowers['species']]
p = figure(title = "Iris Morphology")
p.xaxis.axis_label = 'Petal Length'
p.yaxis.axis_label = 'Petal Width'
p.circle(flowers["petal_length"], flowers["petal_width"],
color=colors, fill_alpha=0.2, size=10)
return p
if __name__ == '__main__':
print('Opening Tornado app with embedded Bokeh plot on http://localhost:8080/')
js, script = autoload_static(make_plot(), CDN, "embed.js")
app = Application([
(r"/", IndexHandler, dict(script=script)),
(r"/embed.js", JSHandler, dict(js=js))
])
app.listen(8080)
io_loop = IOLoop.current()
io_loop.add_callback(view, "http://localhost:8080/")
io_loop.start()
| bsd-3-clause |
aaxelb/SHARE | share/harvesters/org_elife.py | 3 | 3365 | import time
import logging
import requests
from django.conf import settings
from furl import furl
from lxml import etree
from share.harvest import BaseHarvester
logger = logging.getLogger(__name__)
class ELifeHarvester(BaseHarvester):
VERSION = 1
BASE_DATA_URL = 'https://raw.githubusercontent.com/elifesciences/elife-article-xml/master/{}'
BASE_URL = 'https://api.github.com/repos/elifesciences/elife-article-xml/commits{}'
def request(self, *args, **kwargs):
if settings.GITHUB_API_KEY:
kwargs.setdefault('headers', {})['Authorization'] = 'token {}'.format(settings.GITHUB_API_KEY)
while True:
response = self.requests.get(*args, **kwargs)
if int(response.headers.get('X-RateLimit-Remaining', 0)) == 0:
reset = int(response.headers.get('X-RateLimit-Reset', time.time())) - time.time()
logger.warning('Hit GitHub ratelimit sleeping for %s seconds', reset)
time.sleep(reset)
if response.status_code != 403:
response.raise_for_status()
return response
def do_harvest(self, start_date, end_date):
end_date = end_date.date()
start_date = start_date.date()
logger.info("The data for each record must be requested individually - this may take a while... ")
for sha in self.fetch_commits(start_date, end_date):
for file_name in self.fetch_file_names(sha):
if not file_name.endswith('.xml'):
continue
record = self.fetch_xml(file_name)
if record is not None:
continue
doc = etree.tostring(record)
doc_id = record.xpath('//article-id[@*]')[0].text
yield (doc_id, doc)
def fetch_commits(self, start_date, end_date):
page = -1
url = self.BASE_URL.format('?')
while True:
page += 1
response = self.request(furl(url).set(query_params={
'since': start_date.isoformat(),
'until': end_date.isoformat(),
'page': page,
'per_page': 100
}).url)
commits = response.json()
for commit in commits:
if commit.get('sha'):
yield commit['sha']
if len(commits) != 100:
break
def fetch_file_names(self, sha):
page = -1
url = self.BASE_URL.format('/{}'.format(sha))
while True:
page += 1
response = self.request(furl(url).set(query_params={
'page': page,
'per_page': 100
}))
files = response.json()['files']
for f in files:
yield f['filename']
if len(files) != 100:
break
def fetch_xml(self, file_name):
file_url = furl(self.BASE_DATA_URL.format(file_name))
# Not using self.requests when getting the file contents because the eLife rate limit (1, 60) does not apply
resp = requests.get(file_url.url)
if resp.status_code == 404:
logger.warning('Could not download file %s', file_name)
return None
resp.raise_for_status()
xml = etree.XML(resp.content)
return xml
| apache-2.0 |
angryrancor/kivy | examples/audio/main.py | 40 | 2207 | '''
Audio example
=============
This example plays sounds of different formats. You should see a grid of
buttons labelled with filenames. Clicking on the buttons will play, or
restart, each sound. Not all sound formats will play on all platforms.
All the sounds are from the http://woolyss.com/chipmusic-samples.php
"THE FREESOUND PROJECT", Under Creative Commons Sampling Plus 1.0 License.
'''
import kivy
kivy.require('1.0.8')
from kivy.app import App
from kivy.uix.button import Button
from kivy.uix.boxlayout import BoxLayout
from kivy.core.audio import SoundLoader
from kivy.properties import StringProperty, ObjectProperty, NumericProperty
from glob import glob
from os.path import dirname, join, basename
class AudioButton(Button):
filename = StringProperty(None)
sound = ObjectProperty(None, allownone=True)
volume = NumericProperty(1.0)
def on_press(self):
if self.sound is None:
self.sound = SoundLoader.load(self.filename)
# stop the sound if it's currently playing
if self.sound.status != 'stop':
self.sound.stop()
self.sound.volume = self.volume
self.sound.play()
def release_audio(self):
if self.sound:
self.sound.stop()
self.sound.unload()
self.sound = None
def set_volume(self, volume):
self.volume = volume
if self.sound:
self.sound.volume = volume
class AudioBackground(BoxLayout):
pass
class AudioApp(App):
def build(self):
root = AudioBackground(spacing=5)
for fn in glob(join(dirname(__file__), '*.wav')):
btn = AudioButton(
text=basename(fn[:-4]).replace('_', ' '), filename=fn,
size_hint=(None, None), halign='center',
size=(128, 128), text_size=(118, None))
root.ids.sl.add_widget(btn)
return root
def release_audio(self):
for audiobutton in self.root.ids.sl.children:
audiobutton.release_audio()
def set_volume(self, value):
for audiobutton in self.root.ids.sl.children:
audiobutton.set_volume(value)
if __name__ == '__main__':
AudioApp().run()
| mit |
lattwood/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/layout_tests/servers/http_server.py | 121 | 10730 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A class to help start/stop the lighttpd server used by layout tests."""
import logging
import os
import time
from webkitpy.layout_tests.servers import http_server_base
_log = logging.getLogger(__name__)
class Lighttpd(http_server_base.HttpServerBase):
def __init__(self, port_obj, output_dir, background=False, port=None,
root=None, run_background=None, additional_dirs=None,
layout_tests_dir=None, number_of_servers=None):
"""Args:
output_dir: the absolute path to the layout test result directory
"""
# Webkit tests
http_server_base.HttpServerBase.__init__(self, port_obj, number_of_servers)
self._name = 'lighttpd'
self._output_dir = output_dir
self._port = port
self._root = root
self._run_background = run_background
self._additional_dirs = additional_dirs
self._layout_tests_dir = layout_tests_dir
self._pid_file = self._filesystem.join(self._runtime_path, '%s.pid' % self._name)
if self._port:
self._port = int(self._port)
if not self._layout_tests_dir:
self._layout_tests_dir = self._port_obj.layout_tests_dir()
self._webkit_tests = os.path.join(self._layout_tests_dir, 'http', 'tests')
self._js_test_resource = os.path.join(self._layout_tests_dir, 'fast', 'js', 'resources')
self._media_resource = os.path.join(self._layout_tests_dir, 'media')
# Self generated certificate for SSL server (for client cert get
# <base-path>\chrome\test\data\ssl\certs\root_ca_cert.crt)
self._pem_file = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'httpd2.pem')
# One mapping where we can get to everything
self.VIRTUALCONFIG = []
if self._webkit_tests:
self.VIRTUALCONFIG.extend(
# Three mappings (one with SSL) for LayoutTests http tests
[{'port': 8000, 'docroot': self._webkit_tests},
{'port': 8080, 'docroot': self._webkit_tests},
{'port': 8443, 'docroot': self._webkit_tests,
'sslcert': self._pem_file}])
def _prepare_config(self):
base_conf_file = self._port_obj.path_from_webkit_base('Tools',
'Scripts', 'webkitpy', 'layout_tests', 'servers', 'lighttpd.conf')
out_conf_file = os.path.join(self._output_dir, 'lighttpd.conf')
time_str = time.strftime("%d%b%Y-%H%M%S")
access_file_name = "access.log-" + time_str + ".txt"
access_log = os.path.join(self._output_dir, access_file_name)
log_file_name = "error.log-" + time_str + ".txt"
error_log = os.path.join(self._output_dir, log_file_name)
# Write out the config
base_conf = self._filesystem.read_text_file(base_conf_file)
# FIXME: This should be re-worked so that this block can
# use with open() instead of a manual file.close() call.
f = self._filesystem.open_text_file_for_writing(out_conf_file)
f.write(base_conf)
# Write out our cgi handlers. Run perl through env so that it
# processes the #! line and runs perl with the proper command
# line arguments. Emulate apache's mod_asis with a cat cgi handler.
f.write(('cgi.assign = ( ".cgi" => "/usr/bin/env",\n'
' ".pl" => "/usr/bin/env",\n'
' ".asis" => "/bin/cat",\n'
' ".php" => "%s" )\n\n') %
self._port_obj._path_to_lighttpd_php())
# Setup log files
f.write(('server.errorlog = "%s"\n'
'accesslog.filename = "%s"\n\n') % (error_log, access_log))
# Setup upload folders. Upload folder is to hold temporary upload files
# and also POST data. This is used to support XHR layout tests that
# does POST.
f.write(('server.upload-dirs = ( "%s" )\n\n') % (self._output_dir))
# Setup a link to where the js test templates are stored
f.write(('alias.url = ( "/js-test-resources" => "%s" )\n\n') %
(self._js_test_resource))
if self._additional_dirs:
for alias, path in self._additional_dirs.iteritems():
f.write(('alias.url += ( "%s" => "%s" )\n\n') % (alias, path))
# Setup a link to where the media resources are stored.
f.write(('alias.url += ( "/media-resources" => "%s" )\n\n') %
(self._media_resource))
# dump out of virtual host config at the bottom.
if self._root:
if self._port:
# Have both port and root dir.
mappings = [{'port': self._port, 'docroot': self._root}]
else:
# Have only a root dir - set the ports as for LayoutTests.
# This is used in ui_tests to run http tests against a browser.
# default set of ports as for LayoutTests but with a
# specified root.
mappings = [{'port': 8000, 'docroot': self._root},
{'port': 8080, 'docroot': self._root},
{'port': 8443, 'docroot': self._root,
'sslcert': self._pem_file}]
else:
mappings = self.VIRTUALCONFIG
for mapping in mappings:
ssl_setup = ''
if 'sslcert' in mapping:
ssl_setup = (' ssl.engine = "enable"\n'
' ssl.pemfile = "%s"\n' % mapping['sslcert'])
f.write(('$SERVER["socket"] == "127.0.0.1:%d" {\n'
' server.document-root = "%s"\n' +
ssl_setup +
'}\n\n') % (mapping['port'], mapping['docroot']))
f.close()
executable = self._port_obj._path_to_lighttpd()
module_path = self._port_obj._path_to_lighttpd_modules()
start_cmd = [executable,
# Newly written config file
'-f', os.path.join(self._output_dir, 'lighttpd.conf'),
# Where it can find its module dynamic libraries
'-m', module_path]
if not self._run_background:
start_cmd.append(# Don't background
'-D')
# Copy liblightcomp.dylib to /tmp/lighttpd/lib to work around the
# bug that mod_alias.so loads it from the hard coded path.
if self._port_obj.host.platform.is_mac():
tmp_module_path = '/tmp/lighttpd/lib'
if not self._filesystem.exists(tmp_module_path):
self._filesystem.maybe_make_directory(tmp_module_path)
lib_file = 'liblightcomp.dylib'
self._filesystem.copyfile(self._filesystem.join(module_path, lib_file),
self._filesystem.join(tmp_module_path, lib_file))
self._start_cmd = start_cmd
self._env = self._port_obj.setup_environ_for_server('lighttpd')
self._mappings = mappings
def _remove_stale_logs(self):
# Sometimes logs are open in other processes but they should clear eventually.
for log_prefix in ('access.log-', 'error.log-'):
try:
self._remove_log_files(self._output_dir, log_prefix)
except OSError, e:
_log.warning('Failed to remove old %s %s files' % (self._name, log_prefix))
def _spawn_process(self):
_log.debug('Starting %s server, cmd="%s"' % (self._name, self._start_cmd))
process = self._executive.popen(self._start_cmd, env=self._env, shell=False, stderr=self._executive.PIPE)
pid = process.pid
self._filesystem.write_text_file(self._pid_file, str(pid))
return pid
def _stop_running_server(self):
# FIXME: It would be nice if we had a cleaner way of killing this process.
# Currently we throw away the process object created in _spawn_process,
# since there doesn't appear to be any way to kill the server any more
# cleanly using it than just killing the pid, and we need to support
# killing a pid directly anyway for run-webkit-httpd and run-webkit-websocketserver.
self._wait_for_action(self._check_and_kill)
if self._filesystem.exists(self._pid_file):
self._filesystem.remove(self._pid_file)
def _check_and_kill(self):
if self._executive.check_running_pid(self._pid):
host = self._port_obj.host
if host.platform.is_win() and not host.platform.is_cygwin():
# FIXME: https://bugs.webkit.org/show_bug.cgi?id=106838
# We need to kill all of the child processes as well as the
# parent, so we can't use executive.kill_process().
#
# If this is actually working, we should figure out a clean API.
self._executive.run_command(["taskkill.exe", "/f", "/t", "/pid", self._pid], error_handler=self._executive.ignore_error)
else:
self._executive.kill_process(self._pid)
return False
return True
| bsd-3-clause |
jiangzhuo/kbengine | kbe/res/scripts/common/Lib/test/test_email/test_utils.py | 87 | 5422 | import datetime
from email import utils
import test.support
import time
import unittest
import sys
import os.path
class DateTimeTests(unittest.TestCase):
datestring = 'Sun, 23 Sep 2001 20:10:55'
dateargs = (2001, 9, 23, 20, 10, 55)
offsetstring = ' -0700'
utcoffset = datetime.timedelta(hours=-7)
tz = datetime.timezone(utcoffset)
naive_dt = datetime.datetime(*dateargs)
aware_dt = datetime.datetime(*dateargs, tzinfo=tz)
def test_naive_datetime(self):
self.assertEqual(utils.format_datetime(self.naive_dt),
self.datestring + ' -0000')
def test_aware_datetime(self):
self.assertEqual(utils.format_datetime(self.aware_dt),
self.datestring + self.offsetstring)
def test_usegmt(self):
utc_dt = datetime.datetime(*self.dateargs,
tzinfo=datetime.timezone.utc)
self.assertEqual(utils.format_datetime(utc_dt, usegmt=True),
self.datestring + ' GMT')
def test_usegmt_with_naive_datetime_raises(self):
with self.assertRaises(ValueError):
utils.format_datetime(self.naive_dt, usegmt=True)
def test_usegmt_with_non_utc_datetime_raises(self):
with self.assertRaises(ValueError):
utils.format_datetime(self.aware_dt, usegmt=True)
def test_parsedate_to_datetime(self):
self.assertEqual(
utils.parsedate_to_datetime(self.datestring + self.offsetstring),
self.aware_dt)
def test_parsedate_to_datetime_naive(self):
self.assertEqual(
utils.parsedate_to_datetime(self.datestring + ' -0000'),
self.naive_dt)
class LocaltimeTests(unittest.TestCase):
def test_localtime_is_tz_aware_daylight_true(self):
test.support.patch(self, time, 'daylight', True)
t = utils.localtime()
self.assertIsNotNone(t.tzinfo)
def test_localtime_is_tz_aware_daylight_false(self):
test.support.patch(self, time, 'daylight', False)
t = utils.localtime()
self.assertIsNotNone(t.tzinfo)
def test_localtime_daylight_true_dst_false(self):
test.support.patch(self, time, 'daylight', True)
t0 = datetime.datetime(2012, 3, 12, 1, 1)
t1 = utils.localtime(t0, isdst=-1)
t2 = utils.localtime(t1)
self.assertEqual(t1, t2)
def test_localtime_daylight_false_dst_false(self):
test.support.patch(self, time, 'daylight', False)
t0 = datetime.datetime(2012, 3, 12, 1, 1)
t1 = utils.localtime(t0, isdst=-1)
t2 = utils.localtime(t1)
self.assertEqual(t1, t2)
def test_localtime_daylight_true_dst_true(self):
test.support.patch(self, time, 'daylight', True)
t0 = datetime.datetime(2012, 3, 12, 1, 1)
t1 = utils.localtime(t0, isdst=1)
t2 = utils.localtime(t1)
self.assertEqual(t1, t2)
def test_localtime_daylight_false_dst_true(self):
test.support.patch(self, time, 'daylight', False)
t0 = datetime.datetime(2012, 3, 12, 1, 1)
t1 = utils.localtime(t0, isdst=1)
t2 = utils.localtime(t1)
self.assertEqual(t1, t2)
@test.support.run_with_tz('EST+05EDT,M3.2.0,M11.1.0')
def test_localtime_epoch_utc_daylight_true(self):
test.support.patch(self, time, 'daylight', True)
t0 = datetime.datetime(1990, 1, 1, tzinfo = datetime.timezone.utc)
t1 = utils.localtime(t0)
t2 = t0 - datetime.timedelta(hours=5)
t2 = t2.replace(tzinfo = datetime.timezone(datetime.timedelta(hours=-5)))
self.assertEqual(t1, t2)
@test.support.run_with_tz('EST+05EDT,M3.2.0,M11.1.0')
def test_localtime_epoch_utc_daylight_false(self):
test.support.patch(self, time, 'daylight', False)
t0 = datetime.datetime(1990, 1, 1, tzinfo = datetime.timezone.utc)
t1 = utils.localtime(t0)
t2 = t0 - datetime.timedelta(hours=5)
t2 = t2.replace(tzinfo = datetime.timezone(datetime.timedelta(hours=-5)))
self.assertEqual(t1, t2)
def test_localtime_epoch_notz_daylight_true(self):
test.support.patch(self, time, 'daylight', True)
t0 = datetime.datetime(1990, 1, 1)
t1 = utils.localtime(t0)
t2 = utils.localtime(t0.replace(tzinfo=None))
self.assertEqual(t1, t2)
def test_localtime_epoch_notz_daylight_false(self):
test.support.patch(self, time, 'daylight', False)
t0 = datetime.datetime(1990, 1, 1)
t1 = utils.localtime(t0)
t2 = utils.localtime(t0.replace(tzinfo=None))
self.assertEqual(t1, t2)
# XXX: Need a more robust test for Olson's tzdata
@unittest.skipIf(sys.platform.startswith('win'),
"Windows does not use Olson's TZ database")
@unittest.skipUnless(os.path.exists('/usr/share/zoneinfo') or
os.path.exists('/usr/lib/zoneinfo'),
"Can't find the Olson's TZ database")
@test.support.run_with_tz('Europe/Kiev')
def test_variable_tzname(self):
t0 = datetime.datetime(1984, 1, 1, tzinfo=datetime.timezone.utc)
t1 = utils.localtime(t0)
self.assertEqual(t1.tzname(), 'MSK')
t0 = datetime.datetime(1994, 1, 1, tzinfo=datetime.timezone.utc)
t1 = utils.localtime(t0)
self.assertEqual(t1.tzname(), 'EET')
if __name__ == '__main__':
unittest.main()
| lgpl-3.0 |
Goamaral/SCC | inputWindow.py | 1 | 31922 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'inputWindow.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(708, 428)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.verticalLayout_4 = QtGui.QVBoxLayout(self.centralwidget)
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.List = QtGui.QVBoxLayout()
self.List.setObjectName(_fromUtf8("List"))
self.listItem_3 = QtGui.QWidget(self.centralwidget)
self.listItem_3.setMinimumSize(QtCore.QSize(0, 0))
self.listItem_3.setMaximumSize(QtCore.QSize(10000, 100))
self.listItem_3.setObjectName(_fromUtf8("listItem_3"))
self.horizontalLayout_5 = QtGui.QHBoxLayout(self.listItem_3)
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.nameLabel_3 = QtGui.QLabel(self.listItem_3)
self.nameLabel_3.setMinimumSize(QtCore.QSize(125, 0))
self.nameLabel_3.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.nameLabel_3.setFont(font)
self.nameLabel_3.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.nameLabel_3.setObjectName(_fromUtf8("nameLabel_3"))
self.horizontalLayout_5.addWidget(self.nameLabel_3)
self.nameLabel_27 = QtGui.QLabel(self.listItem_3)
self.nameLabel_27.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_27.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.nameLabel_27.setFont(font)
self.nameLabel_27.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_27.setObjectName(_fromUtf8("nameLabel_27"))
self.horizontalLayout_5.addWidget(self.nameLabel_27)
self.mediaChegadaA = QtGui.QLineEdit(self.listItem_3)
self.mediaChegadaA.setMinimumSize(QtCore.QSize(50, 25))
self.mediaChegadaA.setMaximumSize(QtCore.QSize(50, 25))
self.mediaChegadaA.setText(_fromUtf8(""))
self.mediaChegadaA.setObjectName(_fromUtf8("mediaChegadaA"))
self.horizontalLayout_5.addWidget(self.mediaChegadaA)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_5.addItem(spacerItem)
self.List.addWidget(self.listItem_3)
self.listItem_6 = QtGui.QWidget(self.centralwidget)
self.listItem_6.setMinimumSize(QtCore.QSize(0, 0))
self.listItem_6.setMaximumSize(QtCore.QSize(10000, 100))
self.listItem_6.setObjectName(_fromUtf8("listItem_6"))
self.horizontalLayout_7 = QtGui.QHBoxLayout(self.listItem_6)
self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7"))
self.nameLabel_7 = QtGui.QLabel(self.listItem_6)
self.nameLabel_7.setMinimumSize(QtCore.QSize(125, 0))
self.nameLabel_7.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.nameLabel_7.setFont(font)
self.nameLabel_7.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.nameLabel_7.setObjectName(_fromUtf8("nameLabel_7"))
self.horizontalLayout_7.addWidget(self.nameLabel_7)
self.nameLabel_8 = QtGui.QLabel(self.listItem_6)
self.nameLabel_8.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_8.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.nameLabel_8.setFont(font)
self.nameLabel_8.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_8.setObjectName(_fromUtf8("nameLabel_8"))
self.horizontalLayout_7.addWidget(self.nameLabel_8)
self.mediaPerfuracaoA = QtGui.QLineEdit(self.listItem_6)
self.mediaPerfuracaoA.setMinimumSize(QtCore.QSize(50, 25))
self.mediaPerfuracaoA.setMaximumSize(QtCore.QSize(50, 25))
self.mediaPerfuracaoA.setText(_fromUtf8(""))
self.mediaPerfuracaoA.setObjectName(_fromUtf8("mediaPerfuracaoA"))
self.horizontalLayout_7.addWidget(self.mediaPerfuracaoA)
self.nameLabel_9 = QtGui.QLabel(self.listItem_6)
self.nameLabel_9.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_9.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.nameLabel_9.setFont(font)
self.nameLabel_9.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_9.setObjectName(_fromUtf8("nameLabel_9"))
self.horizontalLayout_7.addWidget(self.nameLabel_9)
self.desvioPerfuracaoA = QtGui.QLineEdit(self.listItem_6)
self.desvioPerfuracaoA.setMinimumSize(QtCore.QSize(50, 25))
self.desvioPerfuracaoA.setMaximumSize(QtCore.QSize(50, 25))
self.desvioPerfuracaoA.setText(_fromUtf8(""))
self.desvioPerfuracaoA.setObjectName(_fromUtf8("desvioPerfuracaoA"))
self.horizontalLayout_7.addWidget(self.desvioPerfuracaoA)
self.nameLabel_10 = QtGui.QLabel(self.listItem_6)
self.nameLabel_10.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_10.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.nameLabel_10.setFont(font)
self.nameLabel_10.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_10.setObjectName(_fromUtf8("nameLabel_10"))
self.horizontalLayout_7.addWidget(self.nameLabel_10)
self.nMaquinasPerfuracaoA = QtGui.QLineEdit(self.listItem_6)
self.nMaquinasPerfuracaoA.setMinimumSize(QtCore.QSize(50, 25))
self.nMaquinasPerfuracaoA.setMaximumSize(QtCore.QSize(50, 25))
self.nMaquinasPerfuracaoA.setText(_fromUtf8(""))
self.nMaquinasPerfuracaoA.setObjectName(_fromUtf8("nMaquinasPerfuracaoA"))
self.horizontalLayout_7.addWidget(self.nMaquinasPerfuracaoA)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_7.addItem(spacerItem1)
self.List.addWidget(self.listItem_6)
self.listItem_7 = QtGui.QWidget(self.centralwidget)
self.listItem_7.setMinimumSize(QtCore.QSize(0, 0))
self.listItem_7.setMaximumSize(QtCore.QSize(10000, 100))
self.listItem_7.setObjectName(_fromUtf8("listItem_7"))
self.horizontalLayout_8 = QtGui.QHBoxLayout(self.listItem_7)
self.horizontalLayout_8.setObjectName(_fromUtf8("horizontalLayout_8"))
self.nameLabel_11 = QtGui.QLabel(self.listItem_7)
self.nameLabel_11.setMinimumSize(QtCore.QSize(125, 0))
self.nameLabel_11.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.nameLabel_11.setFont(font)
self.nameLabel_11.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.nameLabel_11.setObjectName(_fromUtf8("nameLabel_11"))
self.horizontalLayout_8.addWidget(self.nameLabel_11)
self.nameLabel_12 = QtGui.QLabel(self.listItem_7)
self.nameLabel_12.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_12.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.nameLabel_12.setFont(font)
self.nameLabel_12.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_12.setObjectName(_fromUtf8("nameLabel_12"))
self.horizontalLayout_8.addWidget(self.nameLabel_12)
self.mediaPolimentoA = QtGui.QLineEdit(self.listItem_7)
self.mediaPolimentoA.setMinimumSize(QtCore.QSize(50, 25))
self.mediaPolimentoA.setMaximumSize(QtCore.QSize(50, 25))
self.mediaPolimentoA.setText(_fromUtf8(""))
self.mediaPolimentoA.setObjectName(_fromUtf8("mediaPolimentoA"))
self.horizontalLayout_8.addWidget(self.mediaPolimentoA)
self.nameLabel_13 = QtGui.QLabel(self.listItem_7)
self.nameLabel_13.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_13.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.nameLabel_13.setFont(font)
self.nameLabel_13.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_13.setObjectName(_fromUtf8("nameLabel_13"))
self.horizontalLayout_8.addWidget(self.nameLabel_13)
self.desvioPolimentoA = QtGui.QLineEdit(self.listItem_7)
self.desvioPolimentoA.setMinimumSize(QtCore.QSize(50, 25))
self.desvioPolimentoA.setMaximumSize(QtCore.QSize(50, 25))
self.desvioPolimentoA.setText(_fromUtf8(""))
self.desvioPolimentoA.setObjectName(_fromUtf8("desvioPolimentoA"))
self.horizontalLayout_8.addWidget(self.desvioPolimentoA)
self.nameLabel_14 = QtGui.QLabel(self.listItem_7)
self.nameLabel_14.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_14.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.nameLabel_14.setFont(font)
self.nameLabel_14.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_14.setObjectName(_fromUtf8("nameLabel_14"))
self.horizontalLayout_8.addWidget(self.nameLabel_14)
self.nMaquinasPolimentoA = QtGui.QLineEdit(self.listItem_7)
self.nMaquinasPolimentoA.setMinimumSize(QtCore.QSize(50, 25))
self.nMaquinasPolimentoA.setMaximumSize(QtCore.QSize(50, 25))
self.nMaquinasPolimentoA.setText(_fromUtf8(""))
self.nMaquinasPolimentoA.setObjectName(_fromUtf8("nMaquinasPolimentoA"))
self.horizontalLayout_8.addWidget(self.nMaquinasPolimentoA)
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_8.addItem(spacerItem2)
self.List.addWidget(self.listItem_7)
self.line_2 = QtGui.QFrame(self.centralwidget)
self.line_2.setMinimumSize(QtCore.QSize(5, 0))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.line_2.setFont(font)
self.line_2.setFrameShape(QtGui.QFrame.HLine)
self.line_2.setFrameShadow(QtGui.QFrame.Sunken)
self.line_2.setObjectName(_fromUtf8("line_2"))
self.List.addWidget(self.line_2)
self.listItem_4 = QtGui.QWidget(self.centralwidget)
self.listItem_4.setMinimumSize(QtCore.QSize(0, 0))
self.listItem_4.setMaximumSize(QtCore.QSize(10000, 100))
self.listItem_4.setObjectName(_fromUtf8("listItem_4"))
self.horizontalLayout_6 = QtGui.QHBoxLayout(self.listItem_4)
self.horizontalLayout_6.setObjectName(_fromUtf8("horizontalLayout_6"))
self.nameLabel_4 = QtGui.QLabel(self.listItem_4)
self.nameLabel_4.setMinimumSize(QtCore.QSize(125, 0))
self.nameLabel_4.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.nameLabel_4.setFont(font)
self.nameLabel_4.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.nameLabel_4.setObjectName(_fromUtf8("nameLabel_4"))
self.horizontalLayout_6.addWidget(self.nameLabel_4)
self.nameLabel_31 = QtGui.QLabel(self.listItem_4)
self.nameLabel_31.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_31.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.nameLabel_31.setFont(font)
self.nameLabel_31.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_31.setObjectName(_fromUtf8("nameLabel_31"))
self.horizontalLayout_6.addWidget(self.nameLabel_31)
self.mediaChegadaB = QtGui.QLineEdit(self.listItem_4)
self.mediaChegadaB.setMinimumSize(QtCore.QSize(50, 25))
self.mediaChegadaB.setMaximumSize(QtCore.QSize(50, 25))
self.mediaChegadaB.setText(_fromUtf8(""))
self.mediaChegadaB.setObjectName(_fromUtf8("mediaChegadaB"))
self.horizontalLayout_6.addWidget(self.mediaChegadaB)
spacerItem3 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_6.addItem(spacerItem3)
self.List.addWidget(self.listItem_4)
self.listItem_9 = QtGui.QWidget(self.centralwidget)
self.listItem_9.setMinimumSize(QtCore.QSize(0, 0))
self.listItem_9.setMaximumSize(QtCore.QSize(10000, 100))
self.listItem_9.setObjectName(_fromUtf8("listItem_9"))
self.horizontalLayout_13 = QtGui.QHBoxLayout(self.listItem_9)
self.horizontalLayout_13.setObjectName(_fromUtf8("horizontalLayout_13"))
self.nameLabel_36 = QtGui.QLabel(self.listItem_9)
self.nameLabel_36.setMinimumSize(QtCore.QSize(125, 0))
self.nameLabel_36.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.nameLabel_36.setFont(font)
self.nameLabel_36.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.nameLabel_36.setObjectName(_fromUtf8("nameLabel_36"))
self.horizontalLayout_13.addWidget(self.nameLabel_36)
self.nameLabel_37 = QtGui.QLabel(self.listItem_9)
self.nameLabel_37.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_37.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.nameLabel_37.setFont(font)
self.nameLabel_37.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_37.setObjectName(_fromUtf8("nameLabel_37"))
self.horizontalLayout_13.addWidget(self.nameLabel_37)
self.mediaPerfuracaoB = QtGui.QLineEdit(self.listItem_9)
self.mediaPerfuracaoB.setMinimumSize(QtCore.QSize(50, 25))
self.mediaPerfuracaoB.setMaximumSize(QtCore.QSize(50, 25))
self.mediaPerfuracaoB.setText(_fromUtf8(""))
self.mediaPerfuracaoB.setObjectName(_fromUtf8("mediaPerfuracaoB"))
self.horizontalLayout_13.addWidget(self.mediaPerfuracaoB)
self.nameLabel_38 = QtGui.QLabel(self.listItem_9)
self.nameLabel_38.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_38.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.nameLabel_38.setFont(font)
self.nameLabel_38.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_38.setObjectName(_fromUtf8("nameLabel_38"))
self.horizontalLayout_13.addWidget(self.nameLabel_38)
self.desvioPerfuracaoB = QtGui.QLineEdit(self.listItem_9)
self.desvioPerfuracaoB.setMinimumSize(QtCore.QSize(50, 25))
self.desvioPerfuracaoB.setMaximumSize(QtCore.QSize(50, 25))
self.desvioPerfuracaoB.setText(_fromUtf8(""))
self.desvioPerfuracaoB.setObjectName(_fromUtf8("desvioPerfuracaoB"))
self.horizontalLayout_13.addWidget(self.desvioPerfuracaoB)
self.nameLabel_39 = QtGui.QLabel(self.listItem_9)
self.nameLabel_39.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_39.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.nameLabel_39.setFont(font)
self.nameLabel_39.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_39.setObjectName(_fromUtf8("nameLabel_39"))
self.horizontalLayout_13.addWidget(self.nameLabel_39)
self.nMaquinasPerfuracaoB = QtGui.QLineEdit(self.listItem_9)
self.nMaquinasPerfuracaoB.setMinimumSize(QtCore.QSize(50, 25))
self.nMaquinasPerfuracaoB.setMaximumSize(QtCore.QSize(50, 25))
self.nMaquinasPerfuracaoB.setText(_fromUtf8(""))
self.nMaquinasPerfuracaoB.setObjectName(_fromUtf8("nMaquinasPerfuracaoB"))
self.horizontalLayout_13.addWidget(self.nMaquinasPerfuracaoB)
spacerItem4 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_13.addItem(spacerItem4)
self.List.addWidget(self.listItem_9)
self.listItem_8 = QtGui.QWidget(self.centralwidget)
self.listItem_8.setMinimumSize(QtCore.QSize(0, 0))
self.listItem_8.setMaximumSize(QtCore.QSize(10000, 100))
self.listItem_8.setObjectName(_fromUtf8("listItem_8"))
self.horizontalLayout_10 = QtGui.QHBoxLayout(self.listItem_8)
self.horizontalLayout_10.setObjectName(_fromUtf8("horizontalLayout_10"))
self.nameLabel_19 = QtGui.QLabel(self.listItem_8)
self.nameLabel_19.setMinimumSize(QtCore.QSize(125, 0))
self.nameLabel_19.setMaximumSize(QtCore.QSize(75, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.nameLabel_19.setFont(font)
self.nameLabel_19.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.nameLabel_19.setObjectName(_fromUtf8("nameLabel_19"))
self.horizontalLayout_10.addWidget(self.nameLabel_19)
self.nameLabel_20 = QtGui.QLabel(self.listItem_8)
self.nameLabel_20.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_20.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.nameLabel_20.setFont(font)
self.nameLabel_20.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_20.setObjectName(_fromUtf8("nameLabel_20"))
self.horizontalLayout_10.addWidget(self.nameLabel_20)
self.mediaPolimentoB = QtGui.QLineEdit(self.listItem_8)
self.mediaPolimentoB.setMinimumSize(QtCore.QSize(50, 25))
self.mediaPolimentoB.setMaximumSize(QtCore.QSize(50, 25))
self.mediaPolimentoB.setText(_fromUtf8(""))
self.mediaPolimentoB.setObjectName(_fromUtf8("mediaPolimentoB"))
self.horizontalLayout_10.addWidget(self.mediaPolimentoB)
self.nameLabel_21 = QtGui.QLabel(self.listItem_8)
self.nameLabel_21.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_21.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.nameLabel_21.setFont(font)
self.nameLabel_21.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_21.setObjectName(_fromUtf8("nameLabel_21"))
self.horizontalLayout_10.addWidget(self.nameLabel_21)
self.desvioPolimentoB = QtGui.QLineEdit(self.listItem_8)
self.desvioPolimentoB.setMinimumSize(QtCore.QSize(50, 25))
self.desvioPolimentoB.setMaximumSize(QtCore.QSize(50, 25))
self.desvioPolimentoB.setText(_fromUtf8(""))
self.desvioPolimentoB.setObjectName(_fromUtf8("desvioPolimentoB"))
self.horizontalLayout_10.addWidget(self.desvioPolimentoB)
self.nameLabel_22 = QtGui.QLabel(self.listItem_8)
self.nameLabel_22.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_22.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.nameLabel_22.setFont(font)
self.nameLabel_22.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_22.setObjectName(_fromUtf8("nameLabel_22"))
self.horizontalLayout_10.addWidget(self.nameLabel_22)
self.nMaquinasPolimentoB = QtGui.QLineEdit(self.listItem_8)
self.nMaquinasPolimentoB.setMinimumSize(QtCore.QSize(50, 25))
self.nMaquinasPolimentoB.setMaximumSize(QtCore.QSize(50, 25))
self.nMaquinasPolimentoB.setText(_fromUtf8(""))
self.nMaquinasPolimentoB.setObjectName(_fromUtf8("nMaquinasPolimentoB"))
self.horizontalLayout_10.addWidget(self.nMaquinasPolimentoB)
spacerItem5 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_10.addItem(spacerItem5)
self.List.addWidget(self.listItem_8)
self.line = QtGui.QFrame(self.centralwidget)
self.line.setMinimumSize(QtCore.QSize(0, 5))
self.line.setFrameShape(QtGui.QFrame.HLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.List.addWidget(self.line)
self.listItem_11 = QtGui.QWidget(self.centralwidget)
self.listItem_11.setMinimumSize(QtCore.QSize(0, 0))
self.listItem_11.setMaximumSize(QtCore.QSize(10000, 100))
self.listItem_11.setObjectName(_fromUtf8("listItem_11"))
self.horizontalLayout_12 = QtGui.QHBoxLayout(self.listItem_11)
self.horizontalLayout_12.setObjectName(_fromUtf8("horizontalLayout_12"))
self.nameLabel_23 = QtGui.QLabel(self.listItem_11)
self.nameLabel_23.setMinimumSize(QtCore.QSize(125, 0))
self.nameLabel_23.setMaximumSize(QtCore.QSize(125, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.nameLabel_23.setFont(font)
self.nameLabel_23.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.nameLabel_23.setObjectName(_fromUtf8("nameLabel_23"))
self.horizontalLayout_12.addWidget(self.nameLabel_23)
self.nameLabel_24 = QtGui.QLabel(self.listItem_11)
self.nameLabel_24.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_24.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.nameLabel_24.setFont(font)
self.nameLabel_24.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_24.setObjectName(_fromUtf8("nameLabel_24"))
self.horizontalLayout_12.addWidget(self.nameLabel_24)
self.mediaEnvernizamento = QtGui.QLineEdit(self.listItem_11)
self.mediaEnvernizamento.setMinimumSize(QtCore.QSize(50, 25))
self.mediaEnvernizamento.setMaximumSize(QtCore.QSize(50, 25))
self.mediaEnvernizamento.setText(_fromUtf8(""))
self.mediaEnvernizamento.setObjectName(_fromUtf8("mediaEnvernizamento"))
self.horizontalLayout_12.addWidget(self.mediaEnvernizamento)
self.nameLabel_25 = QtGui.QLabel(self.listItem_11)
self.nameLabel_25.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_25.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.nameLabel_25.setFont(font)
self.nameLabel_25.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_25.setObjectName(_fromUtf8("nameLabel_25"))
self.horizontalLayout_12.addWidget(self.nameLabel_25)
self.desvioEnvernizamento = QtGui.QLineEdit(self.listItem_11)
self.desvioEnvernizamento.setMinimumSize(QtCore.QSize(50, 25))
self.desvioEnvernizamento.setMaximumSize(QtCore.QSize(50, 25))
self.desvioEnvernizamento.setText(_fromUtf8(""))
self.desvioEnvernizamento.setObjectName(_fromUtf8("desvioEnvernizamento"))
self.horizontalLayout_12.addWidget(self.desvioEnvernizamento)
self.nameLabel_26 = QtGui.QLabel(self.listItem_11)
self.nameLabel_26.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_26.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.nameLabel_26.setFont(font)
self.nameLabel_26.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_26.setObjectName(_fromUtf8("nameLabel_26"))
self.horizontalLayout_12.addWidget(self.nameLabel_26)
self.nMaquinasEnvernizamento = QtGui.QLineEdit(self.listItem_11)
self.nMaquinasEnvernizamento.setMinimumSize(QtCore.QSize(50, 25))
self.nMaquinasEnvernizamento.setMaximumSize(QtCore.QSize(50, 25))
self.nMaquinasEnvernizamento.setText(_fromUtf8(""))
self.nMaquinasEnvernizamento.setObjectName(_fromUtf8("nMaquinasEnvernizamento"))
self.horizontalLayout_12.addWidget(self.nMaquinasEnvernizamento)
spacerItem6 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_12.addItem(spacerItem6)
self.List.addWidget(self.listItem_11)
self.verticalLayout_4.addLayout(self.List)
self.footer = QtGui.QWidget(self.centralwidget)
self.footer.setMaximumSize(QtCore.QSize(100000, 50))
self.footer.setObjectName(_fromUtf8("footer"))
self.horizontalLayout = QtGui.QHBoxLayout(self.footer)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.nameLabel_30 = QtGui.QLabel(self.footer)
self.nameLabel_30.setMinimumSize(QtCore.QSize(130, 0))
self.nameLabel_30.setMaximumSize(QtCore.QSize(130, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.nameLabel_30.setFont(font)
self.nameLabel_30.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.nameLabel_30.setObjectName(_fromUtf8("nameLabel_30"))
self.horizontalLayout.addWidget(self.nameLabel_30)
self.tipoLimite = QtGui.QComboBox(self.footer)
self.tipoLimite.setMinimumSize(QtCore.QSize(125, 0))
self.tipoLimite.setMaximumSize(QtCore.QSize(125, 16777215))
self.tipoLimite.setObjectName(_fromUtf8("tipoLimite"))
self.tipoLimite.addItem(_fromUtf8(""))
self.tipoLimite.addItem(_fromUtf8(""))
self.horizontalLayout.addWidget(self.tipoLimite)
self.nameLabel_28 = QtGui.QLabel(self.footer)
self.nameLabel_28.setMinimumSize(QtCore.QSize(50, 0))
self.nameLabel_28.setMaximumSize(QtCore.QSize(50, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.nameLabel_28.setFont(font)
self.nameLabel_28.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_28.setObjectName(_fromUtf8("nameLabel_28"))
self.horizontalLayout.addWidget(self.nameLabel_28)
self.valorLimite = QtGui.QLineEdit(self.footer)
self.valorLimite.setMinimumSize(QtCore.QSize(75, 25))
self.valorLimite.setMaximumSize(QtCore.QSize(75, 25))
self.valorLimite.setText(_fromUtf8(""))
self.valorLimite.setObjectName(_fromUtf8("valorLimite"))
self.horizontalLayout.addWidget(self.valorLimite)
spacerItem7 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem7)
self.nameLabel_29 = QtGui.QLabel(self.footer)
self.nameLabel_29.setMinimumSize(QtCore.QSize(100, 0))
self.nameLabel_29.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.nameLabel_29.setFont(font)
self.nameLabel_29.setAlignment(QtCore.Qt.AlignCenter)
self.nameLabel_29.setObjectName(_fromUtf8("nameLabel_29"))
self.horizontalLayout.addWidget(self.nameLabel_29)
self.nRepeticoes = QtGui.QLineEdit(self.footer)
self.nRepeticoes.setMinimumSize(QtCore.QSize(50, 25))
self.nRepeticoes.setMaximumSize(QtCore.QSize(50, 25))
self.nRepeticoes.setText(_fromUtf8(""))
self.nRepeticoes.setObjectName(_fromUtf8("nRepeticoes"))
self.horizontalLayout.addWidget(self.nRepeticoes)
self.botaoSimular = QtGui.QPushButton(self.footer)
self.botaoSimular.setMinimumSize(QtCore.QSize(100, 25))
self.botaoSimular.setMaximumSize(QtCore.QSize(100, 25))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.botaoSimular.setFont(font)
self.botaoSimular.setLayoutDirection(QtCore.Qt.RightToLeft)
self.botaoSimular.setAutoFillBackground(False)
self.botaoSimular.setStyleSheet(_fromUtf8(""))
self.botaoSimular.setFlat(False)
self.botaoSimular.setObjectName(_fromUtf8("botaoSimular"))
self.horizontalLayout.addWidget(self.botaoSimular)
self.verticalLayout_4.addWidget(self.footer)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Descriçao da simulaçao", None))
self.nameLabel_3.setText(_translate("MainWindow", "Peças grandes (A)", None))
self.nameLabel_27.setText(_translate("MainWindow", "Media chegada", None))
self.nameLabel_7.setText(_translate("MainWindow", "Perfuraçao", None))
self.nameLabel_8.setText(_translate("MainWindow", "Media", None))
self.nameLabel_9.setText(_translate("MainWindow", "Desvio padrao", None))
self.nameLabel_10.setText(_translate("MainWindow", "Nº maquinas", None))
self.nameLabel_11.setText(_translate("MainWindow", "Polimento", None))
self.nameLabel_12.setText(_translate("MainWindow", "Media", None))
self.nameLabel_13.setText(_translate("MainWindow", "Desvio padrao", None))
self.nameLabel_14.setText(_translate("MainWindow", "Nº maquinas", None))
self.nameLabel_4.setText(_translate("MainWindow", "Peças grandes (B)", None))
self.nameLabel_31.setText(_translate("MainWindow", "Media chegada", None))
self.nameLabel_36.setText(_translate("MainWindow", "Perfuraçao", None))
self.nameLabel_37.setText(_translate("MainWindow", "Media", None))
self.nameLabel_38.setText(_translate("MainWindow", "Desvio padrao", None))
self.nameLabel_39.setText(_translate("MainWindow", "Nº maquinas", None))
self.nameLabel_19.setText(_translate("MainWindow", "Polimento", None))
self.nameLabel_20.setText(_translate("MainWindow", "Media", None))
self.nameLabel_21.setText(_translate("MainWindow", "Desvio padrao", None))
self.nameLabel_22.setText(_translate("MainWindow", "Nº maquinas", None))
self.nameLabel_23.setText(_translate("MainWindow", "Envernizamento", None))
self.nameLabel_24.setText(_translate("MainWindow", "Media", None))
self.nameLabel_25.setText(_translate("MainWindow", "Desvio padrao", None))
self.nameLabel_26.setText(_translate("MainWindow", "Nº maquinas", None))
self.nameLabel_30.setText(_translate("MainWindow", "Limites da simulacao", None))
self.tipoLimite.setItemText(0, _translate("MainWindow", "Tempo simulacao", None))
self.tipoLimite.setItemText(1, _translate("MainWindow", "Nº Clientes", None))
self.nameLabel_28.setText(_translate("MainWindow", "Valor", None))
self.nameLabel_29.setText(_translate("MainWindow", "Nº Repeticoes", None))
self.botaoSimular.setText(_translate("MainWindow", "Simular", None))
| mit |
treejames/erpnext | erpnext/buying/doctype/quality_inspection/quality_inspection.py | 61 | 2058 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class QualityInspection(Document):
def get_item_specification_details(self):
self.set('readings', [])
variant_of = frappe.db.get_value("Item", self.item_code, "variant_of")
if variant_of:
specification = frappe.db.sql("select specification, value from `tabItem Quality Inspection Parameter` \
where parent in (%s, %s) order by idx", (self.item_code, variant_of))
else:
specification = frappe.db.sql("select specification, value from `tabItem Quality Inspection Parameter` \
where parent = %s order by idx", self.item_code)
for d in specification:
child = self.append('readings', {})
child.specification = d[0]
child.value = d[1]
child.status = 'Accepted'
def on_submit(self):
if self.purchase_receipt_no:
frappe.db.sql("""update `tabPurchase Receipt Item` t1, `tabPurchase Receipt` t2
set t1.qa_no = %s, t2.modified = %s
where t1.parent = %s and t1.item_code = %s and t1.parent = t2.name""",
(self.name, self.modified, self.purchase_receipt_no,
self.item_code))
def on_cancel(self):
if self.purchase_receipt_no:
frappe.db.sql("""update `tabPurchase Receipt Item` t1, `tabPurchase Receipt` t2
set t1.qa_no = '', t2.modified = %s
where t1.parent = %s and t1.item_code = %s and t1.parent = t2.name""",
(self.modified, self.purchase_receipt_no, self.item_code))
def item_query(doctype, txt, searchfield, start, page_len, filters):
if filters.get("from"):
from frappe.desk.reportview import get_match_cond
filters.update({
"txt": txt,
"mcond": get_match_cond(filters["from"]),
"start": start,
"page_len": page_len
})
return frappe.db.sql("""select item_code from `tab%(from)s`
where parent='%(parent)s' and docstatus < 2 and item_code like '%%%(txt)s%%' %(mcond)s
order by item_code limit %(start)s, %(page_len)s""" % filters)
| agpl-3.0 |
kirca/odoo | addons/point_of_sale/wizard/pos_session_opening.py | 40 | 5025 |
from openerp.osv import osv, fields
from openerp.tools.translate import _
from openerp.addons.point_of_sale.point_of_sale import pos_session
class pos_session_opening(osv.osv_memory):
_name = 'pos.session.opening'
_columns = {
'pos_config_id' : fields.many2one('pos.config', 'Point of Sale', required=True),
'pos_session_id' : fields.many2one('pos.session', 'PoS Session'),
'pos_state' : fields.related('pos_session_id', 'state',
type='selection',
selection=pos_session.POS_SESSION_STATE,
string='Session Status', readonly=True),
'pos_state_str' : fields.char('Status', 32, readonly=True),
'show_config' : fields.boolean('Show Config', readonly=True),
'pos_session_name' : fields.related('pos_session_id', 'name',
type='char', size=64, readonly=True),
'pos_session_username' : fields.related('pos_session_id', 'user_id', 'name',
type='char', size=64, readonly=True)
}
def open_ui(self, cr, uid, ids, context=None):
context = context or {}
data = self.browse(cr, uid, ids[0], context=context)
context['active_id'] = data.pos_session_id.id
return {
'type' : 'ir.actions.act_url',
'url': '/pos/web/',
'target': 'self',
}
def open_existing_session_cb_close(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context=context)
self.pool.get('pos.session').signal_cashbox_control(cr, uid, [wizard.pos_session_id.id])
return self.open_session_cb(cr, uid, ids, context)
def open_session_cb(self, cr, uid, ids, context=None):
assert len(ids) == 1, "you can open only one session at a time"
proxy = self.pool.get('pos.session')
wizard = self.browse(cr, uid, ids[0], context=context)
if not wizard.pos_session_id:
values = {
'user_id' : uid,
'config_id' : wizard.pos_config_id.id,
}
session_id = proxy.create(cr, uid, values, context=context)
s = proxy.browse(cr, uid, session_id, context=context)
if s.state=='opened':
return self.open_ui(cr, uid, ids, context=context)
return self._open_session(session_id)
return self._open_session(wizard.pos_session_id.id)
def open_existing_session_cb(self, cr, uid, ids, context=None):
assert len(ids) == 1
wizard = self.browse(cr, uid, ids[0], context=context)
return self._open_session(wizard.pos_session_id.id)
def _open_session(self, session_id):
return {
'name': _('Session'),
'view_type': 'form',
'view_mode': 'form,tree',
'res_model': 'pos.session',
'res_id': session_id,
'view_id': False,
'type': 'ir.actions.act_window',
}
def on_change_config(self, cr, uid, ids, config_id, context=None):
result = {
'pos_session_id': False,
'pos_state': False,
'pos_state_str' : '',
'pos_session_username' : False,
'pos_session_name' : False,
}
if not config_id:
return {'value' : result}
proxy = self.pool.get('pos.session')
session_ids = proxy.search(cr, uid, [
('state', '!=', 'closed'),
('config_id', '=', config_id),
('user_id', '=', uid),
], context=context)
if session_ids:
session = proxy.browse(cr, uid, session_ids[0], context=context)
result['pos_state'] = str(session.state)
result['pos_state_str'] = dict(pos_session.POS_SESSION_STATE).get(session.state, '')
result['pos_session_id'] = session.id
result['pos_session_name'] = session.name
result['pos_session_username'] = session.user_id.name
return {'value' : result}
def default_get(self, cr, uid, fieldnames, context=None):
so = self.pool.get('pos.session')
session_ids = so.search(cr, uid, [('state','<>','closed'), ('user_id','=',uid)], context=context)
if session_ids:
result = so.browse(cr, uid, session_ids[0], context=context).config_id.id
else:
current_user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
result = current_user.pos_config and current_user.pos_config.id or False
if not result:
r = self.pool.get('pos.config').search(cr, uid, [], context=context)
result = r and r[0] or False
count = self.pool.get('pos.config').search_count(cr, uid, [('state', '=', 'active')], context=context)
show_config = bool(count > 1)
return {
'pos_config_id' : result,
'show_config' : show_config,
}
| agpl-3.0 |
Product-Foundry/vaultier | vaultier/nodes/business/permissions.py | 3 | 1731 | from rest_framework import permissions
from accounts.models import Member
def _has_membership(user, node):
return Member.objects.filter(node=node.get_root(), user=user).exists()
def _get_membership(user, node):
if not _has_membership(user, node):
return
return Member.objects.to_node(user, node)
class NodePermission(permissions.BasePermission):
"""
Prepared permissions for Nodes. Returning True only at this point.
"""
def has_permission(self, request, view):
"""
Grant permission
"""
parent = view.kwargs.get('parent') or view.kwargs.get('node')
if request.method == "GET" and parent:
member = _get_membership(request.user, parent)
if not member:
return
return parent.acl.has_permission('read', member)
return True
def has_object_permission(self, request, view, obj):
"""
Grant object permission
"""
member = _get_membership(request.user, obj)
if not member:
return
if request.method == "GET":
return obj.acl.has_permission('read', member)
if request.method in ('PUT', 'PATCH', 'DELETE'):
return obj.acl.has_permission('update', member)
class PolicyPermission(NodePermission):
def has_object_permission(self, request, view, obj):
node = view.kwargs.get('node')
member = _get_membership(request.user, obj)
if not member:
return
if view.action == "retrieve":
return node.acl.has_permission('read', member)
if view.action in ('update', 'partial_update'):
return node.acl.has_permission('update', member)
| bsd-3-clause |
ennoborg/gramps | gramps/gen/plug/_pluginreg.py | 2 | 47851 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2009 Benny Malengier
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
This module provides the base class for plugin registration.
It provides an object containing data about the plugin (version, filename, ...)
and a register for the data of all plugins .
"""
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
import os
import sys
import re
import traceback
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from ...version import VERSION as GRAMPSVERSION, VERSION_TUPLE
from ..const import IMAGE_DIR
from ..const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
import logging
LOG = logging.getLogger('._manager')
#-------------------------------------------------------------------------
#
# PluginData
#
#-------------------------------------------------------------------------
#a plugin is stable or unstable
STABLE = 0
UNSTABLE = 1
STATUS = [STABLE, UNSTABLE]
STATUSTEXT = {STABLE: _('Stable'), UNSTABLE: _('Unstable')}
#possible plugin types
REPORT = 0
QUICKREPORT = 1 # deprecated
QUICKVIEW = 1
TOOL = 2
IMPORT = 3
EXPORT = 4
DOCGEN = 5
GENERAL = 6
MAPSERVICE = 7
VIEW = 8
RELCALC = 9
GRAMPLET = 10
SIDEBAR = 11
DATABASE = 12
PTYPE = [REPORT , QUICKREPORT, TOOL, IMPORT, EXPORT, DOCGEN, GENERAL,
MAPSERVICE, VIEW, RELCALC, GRAMPLET, SIDEBAR, DATABASE]
PTYPE_STR = {
REPORT: _('Report') ,
QUICKREPORT: _('Quickreport'),
TOOL: _('Tool'),
IMPORT: _('Importer'),
EXPORT: _('Exporter'),
DOCGEN: _('Doc creator'),
GENERAL: _('Plugin lib'),
MAPSERVICE: _('Map service'),
VIEW: _('Gramps View'),
RELCALC: _('Relationships'),
GRAMPLET: _('Gramplet'),
SIDEBAR: _('Sidebar'),
DATABASE: _('Database'),
}
#possible report categories
CATEGORY_TEXT = 0
CATEGORY_DRAW = 1
CATEGORY_CODE = 2
CATEGORY_WEB = 3
CATEGORY_BOOK = 4
CATEGORY_GRAPHVIZ = 5
REPORT_CAT = [ CATEGORY_TEXT, CATEGORY_DRAW, CATEGORY_CODE,
CATEGORY_WEB, CATEGORY_BOOK, CATEGORY_GRAPHVIZ]
#possible tool categories
TOOL_DEBUG = -1
TOOL_ANAL = 0
TOOL_DBPROC = 1
TOOL_DBFIX = 2
TOOL_REVCTL = 3
TOOL_UTILS = 4
TOOL_CAT = [ TOOL_DEBUG, TOOL_ANAL, TOOL_DBPROC, TOOL_DBFIX, TOOL_REVCTL,
TOOL_UTILS]
#possible quickreport categories
CATEGORY_QR_MISC = -1
CATEGORY_QR_PERSON = 0
CATEGORY_QR_FAMILY = 1
CATEGORY_QR_EVENT = 2
CATEGORY_QR_SOURCE = 3
CATEGORY_QR_PLACE = 4
CATEGORY_QR_REPOSITORY = 5
CATEGORY_QR_NOTE = 6
CATEGORY_QR_DATE = 7
CATEGORY_QR_MEDIA = 8
CATEGORY_QR_CITATION = 9
CATEGORY_QR_SOURCE_OR_CITATION = 10
# Modes for generating reports
REPORT_MODE_GUI = 1 # Standalone report using GUI
REPORT_MODE_BKI = 2 # Book Item interface using GUI
REPORT_MODE_CLI = 4 # Command line interface (CLI)
REPORT_MODES = [REPORT_MODE_GUI, REPORT_MODE_BKI, REPORT_MODE_CLI]
# Modes for running tools
TOOL_MODE_GUI = 1 # Standard tool using GUI
TOOL_MODE_CLI = 2 # Command line interface (CLI)
TOOL_MODES = [TOOL_MODE_GUI, TOOL_MODE_CLI]
# possible view orders
START = 1
END = 2
#-------------------------------------------------------------------------
#
# Functions and classes
#
#-------------------------------------------------------------------------
def myint(s):
"""
Protected version of int()
"""
try:
v = int(s)
except:
v = s
return v
def version(sversion):
"""
Return the tuple version of a string version.
"""
return tuple([myint(x or "0") for x in (sversion + "..").split(".")])
def valid_plugin_version(plugin_version_string):
"""
Checks to see if string is a valid version string for this version
of Gramps.
"""
if not isinstance(plugin_version_string, str): return False
dots = plugin_version_string.count(".")
if dots == 1:
plugin_version = tuple(map(int, plugin_version_string.split(".", 1)))
return plugin_version == VERSION_TUPLE[:2]
elif dots == 2:
plugin_version = tuple(map(int, plugin_version_string.split(".", 2)))
return (plugin_version[:2] == VERSION_TUPLE[:2] and
plugin_version <= VERSION_TUPLE)
return False
class PluginData:
"""
This is the base class for all plugin data objects.
The workflow is:
1. plugin manager reads all register files, and stores plugin data
objects in a plugin register
2. when plugin is needed, the plugin register creates the plugin, and
the manager stores this, after which it can be executed.
Attributes present for all plugins
.. attribute:: id
A unique identifier for the plugin. This is eg used to store the plugin
settings.
.. attribute:: name
A friendly name to call this plugin (normally translated)
.. attribute:: name_accell
A friendly name to call this plugin (normally translated), with an
accellerator present (eg '_Descendant report', with D to be accellerator
key
.. attribute:: description
A friendly description of what the plugin does
.. attribute:: version
The version of the plugin
.. attribute:: status
The status of the plugin, STABLE or UNSTABLE
UNSTABLE is only visible in development code, not in release
.. attribute:: fname
The python file where the plugin implementation can be found
.. attribute:: fpath
The python path where the plugin implementation can be found
.. attribute:: ptype
The plugin type. One of REPORT , QUICKREPORT, TOOL, IMPORT,
EXPORT, DOCGEN, GENERAL, MAPSERVICE, VIEW, GRAMPLET, DATABASE
.. attribute:: authors
List of authors of the plugin, default=[]
.. attribute:: authors_email
List of emails of the authors of the plugin, default=[]
.. attribute:: supported
Bool value indicating if the plugin is still supported, default=True
.. attribute:: load_on_reg
bool value, if True, the plugin is loaded on Gramps startup. Some
plugins. Only set this value if for testing you want the plugin to be
loaded immediately on startup. default=False
.. attribute: icons
New stock icons to register. A list of tuples (stock_id, icon_label),
eg:
[('gramps_myplugin', _('My Plugin')),
('gramps_myplugin_open', _('Open Plugin')]
The icon directory must contain the directories scalable, 48x48, 22x22
and 16x16 with the icons, eg:
scalable/gramps_myplugin.svg
48x48/gramps_myplugin.png
22x22/gramps_myplugin.png
.. attribute: icondir
The directory to use for the icons. If icondir is not set or None, it
reverts to the plugindirectory itself.
Attributes for RELCALC plugins:
.. attribute:: relcalcclass
The class in the module that is the relationcalc class
.. attribute:: lang_list
List of languages this plugin handles
Attributes for REPORT plugins:
.. attribute:: require_active
Bool, If the reports requries an active person to be set or not
.. attribute:: reportclass
The class in the module that is the report class
.. attribute:: report_modes
The report modes: list of REPORT_MODE_GUI ,REPORT_MODE_BKI,REPORT_MODE_CLI
Attributes for REPORT and TOOL and QUICKREPORT and VIEW plugins
.. attribute:: category
Or the report category the plugin belongs to, default=CATEGORY_TEXT
or the tool category a plugin belongs to, default=TOOL_UTILS
or the quickreport category a plugin belongs to, default=CATEGORY_QR_PERSON
or the view category a plugin belongs to,
default=("Miscellaneous", _("Miscellaneous"))
Attributes for REPORT and TOOL and DOCGEN plugins
.. attribute:: optionclass
The class in the module that is the option class
Attributes for TOOL plugins
.. attribute:: toolclass
The class in the module that is the tool class
.. attribute:: tool_modes
The tool modes: list of TOOL_MODE_GUI, TOOL_MODE_CLI
Attributes for DOCGEN plugins
.. attribute :: docclass
The class in the module that is the BaseDoc defined
.. attribute :: paper
bool, Indicates whether the plugin uses paper or not, default=True
.. attribute :: style
bool, Indicates whether the plugin uses styles or not, default=True
Attribute for DOCGEN, EXPORT plugins
.. attribute :: extension
str, The file extension to use for output produced by the docgen/export,
default=''
Attributes for QUICKREPORT plugins
.. attribute:: runfunc
The function that executes the quick report
Attributes for MAPSERVICE plugins
.. attribute:: mapservice
The class in the module that is a mapservice
Attributes for EXPORT plugins
.. attribute:: export_function
Function that produces the export
.. attribute:: export_options
Class to set options
.. attribute:: export_options_title
Title for the option page
Attributes for IMPORT plugins
.. attribute:: import_function
Function that starts an import
Attributes for GRAMPLET plugins
.. attribute:: gramplet
The function or class that defines the gramplet.
.. attribute:: height
The height the gramplet should have in a column on GrampletView,
default = 200
.. attribute:: detached_height
The height the gramplet should have detached, default 300
.. attribute:: detached_width
The width the gramplet should have detached, default 400
.. attribute:: expand
If the attributed should be expanded on start, default False
.. attribute:: gramplet_title
Title to use for the gramplet, default = _('Gramplet')
.. attribute:: navtypes
Navigation types that the gramplet is appropriate for, default = []
.. attribute:: help_url
The URL where documentation for the URL can be found
Attributes for VIEW plugins
.. attribute:: viewclass
A class of type ViewCreator that holds the needed info of the
view to be created: icon, viewclass that derives from pageview, ...
.. attribute:: stock_icon
The icon in the toolbar or sidebar used to select the view
Attributes for SIDEBAR plugins
.. attribute:: sidebarclass
The class that defines the sidebar.
.. attribute:: menu_label
A label to use on the seltion menu.
Attributes for VIEW and SIDEBAR plugins
.. attribute:: order
order can be START or END. Default is END. For END, on registering,
the plugin is appended to the list of plugins. If START, then the
plugin is prepended. Only set START if you want a plugin to be the
first in the order of plugins
Attributes for DATABASE plugins
.. attribute:: databaseclass
The class in the module that is the database class
.. attribute:: reset_system
Boolean to indicate that the system (sys.modules) should
be reset.
"""
def __init__(self):
#read/write attribute
self.directory = None
#base attributes
self._id = None
self._name = None
self._name_accell = None
self._version = None
self._gramps_target_version = None
self._description = None
self._status = UNSTABLE
self._fname = None
self._fpath = None
self._ptype = None
self._authors = []
self._authors_email = []
self._supported = True
self._load_on_reg = False
self._icons = []
self._icondir = None
self._depends_on = []
self._include_in_listing = True
#derived var
self.mod_name = None
#RELCALC attr
self._relcalcclass = None
self._lang_list = None
#REPORT attr
self._reportclass = None
self._require_active = True
self._report_modes = [REPORT_MODE_GUI]
#REPORT and TOOL and GENERAL attr
self._category = None
#REPORT and TOOL attr
self._optionclass = None
#TOOL attr
self._toolclass = None
self._tool_modes = [TOOL_MODE_GUI]
#DOCGEN attr
self._paper = True
self._style = True
self._extension = ''
#QUICKREPORT attr
self._runfunc = None
#MAPSERVICE attr
self._mapservice = None
#EXPORT attr
self._export_function = None
self._export_options = None
self._export_options_title = ''
#IMPORT attr
self._import_function = None
#GRAMPLET attr
self._gramplet = None
self._height = 200
self._detached_height = 300
self._detached_width = 400
self._expand = False
self._gramplet_title = _('Gramplet')
self._navtypes = []
self._orientation = None
self._help_url = None
#VIEW attr
self._viewclass = None
self._stock_icon = None
#SIDEBAR attr
self._sidebarclass = None
self._menu_label = ''
#VIEW and SIDEBAR attr
self._order = END
#DATABASE attr
self._databaseclass = None
self._reset_system = False
#GENERAL attr
self._data = []
self._process = None
def _set_id(self, id):
self._id = id
def _get_id(self):
return self._id
def _set_name(self, name):
self._name = name
def _get_name(self):
return self._name
def _set_name_accell(self, name):
self._name_accell = name
def _get_name_accell(self):
if self._name_accell is None:
return self._name
else:
return self._name_accell
def _set_description(self, description):
self._description = description
def _get_description(self):
return self._description
def _set_version(self, version):
self._version = version
def _get_version(self):
return self._version
def _set_gramps_target_version(self, version):
self._gramps_target_version = version
def _get_gramps_target_version(self):
return self._gramps_target_version
def _set_status(self, status):
if status not in STATUS:
raise ValueError('plugin status cannot be %s' % str(status))
self._status = status
def _get_status(self):
return self._status
def _set_fname(self, fname):
self._fname = fname
def _get_fname(self):
return self._fname
def _set_fpath(self, fpath):
self._fpath = fpath
def _get_fpath(self):
return self._fpath
def _set_ptype(self, ptype):
if ptype not in PTYPE:
raise ValueError('Plugin type cannot be %s' % str(ptype))
elif self._ptype is not None:
raise ValueError('Plugin type may not be changed')
self._ptype = ptype
if self._ptype == REPORT:
self._category = CATEGORY_TEXT
elif self._ptype == TOOL:
self._category = TOOL_UTILS
elif self._ptype == QUICKREPORT:
self._category = CATEGORY_QR_PERSON
elif self._ptype == VIEW:
self._category = ("Miscellaneous", _("Miscellaneous"))
#if self._ptype == DOCGEN:
# self._load_on_reg = True
def _get_ptype(self):
return self._ptype
def _set_authors(self, authors):
if not authors or not isinstance(authors, list):
return
self._authors = authors
def _get_authors(self):
return self._authors
def _set_authors_email(self, authors_email):
if not authors_email or not isinstance(authors_email, list):
return
self._authors_email = authors_email
def _get_authors_email(self):
return self._authors_email
def _set_supported(self, supported):
if not isinstance(supported, bool):
raise ValueError('Plugin must have supported=True or False')
self._supported = supported
def _get_supported(self):
return self._supported
def _set_load_on_reg(self, load_on_reg):
if not isinstance(load_on_reg, bool):
raise ValueError('Plugin must have load_on_reg=True or False')
self._load_on_reg = load_on_reg
def _get_load_on_reg(self):
return self._load_on_reg
def _get_icons(self):
return self._icons
def _set_icons(self, icons):
if not isinstance(icons, list):
raise ValueError('Plugin must have icons as a list')
self._icons = icons
def _get_icondir(self):
return self._icondir
def _set_icondir(self, icondir):
self._icondir = icondir
def _get_depends_on(self):
return self._depends_on
def _set_depends_on(self, depends):
if not isinstance(depends, list):
raise ValueError('Plugin must have depends_on as a list')
self._depends_on = depends
def _get_include_in_listing(self):
return self._include_in_listing
def _set_include_in_listing(self, include):
if not isinstance(include, bool):
raise ValueError('Plugin must have include_in_listing as a bool')
self._include_in_listing = include
id = property(_get_id, _set_id)
name = property(_get_name, _set_name)
name_accell = property(_get_name_accell, _set_name_accell)
description = property(_get_description, _set_description)
version = property(_get_version, _set_version)
gramps_target_version = property(_get_gramps_target_version,
_set_gramps_target_version)
status = property(_get_status, _set_status)
fname = property(_get_fname, _set_fname)
fpath = property(_get_fpath, _set_fpath)
ptype = property(_get_ptype, _set_ptype)
authors = property(_get_authors, _set_authors)
authors_email = property(_get_authors_email, _set_authors_email)
supported = property(_get_supported, _set_supported)
load_on_reg = property(_get_load_on_reg, _set_load_on_reg)
icons = property(_get_icons, _set_icons)
icondir = property(_get_icondir, _set_icondir)
depends_on = property(_get_depends_on, _set_depends_on)
include_in_listing = property(_get_include_in_listing, _set_include_in_listing)
def statustext(self):
return STATUSTEXT[self.status]
#type specific plugin attributes
#RELCALC attributes
def _set_relcalcclass(self, relcalcclass):
if not self._ptype == RELCALC:
raise ValueError('relcalcclass may only be set for RELCALC plugins')
self._relcalcclass = relcalcclass
def _get_relcalcclass(self):
return self._relcalcclass
def _set_lang_list(self, lang_list):
if not self._ptype == RELCALC:
raise ValueError('relcalcclass may only be set for RELCALC plugins')
self._lang_list = lang_list
def _get_lang_list(self):
return self._lang_list
relcalcclass = property(_get_relcalcclass, _set_relcalcclass)
lang_list = property(_get_lang_list, _set_lang_list)
#REPORT attributes
def _set_require_active(self, require_active):
if not self._ptype == REPORT:
raise ValueError('require_active may only be set for REPORT plugins')
if not isinstance(require_active, bool):
raise ValueError('Report must have require_active=True or False')
self._require_active = require_active
def _get_require_active(self):
return self._require_active
def _set_reportclass(self, reportclass):
if not self._ptype == REPORT:
raise ValueError('reportclass may only be set for REPORT plugins')
self._reportclass = reportclass
def _get_reportclass(self):
return self._reportclass
def _set_report_modes(self, report_modes):
if not self._ptype == REPORT:
raise ValueError('report_modes may only be set for REPORT plugins')
if not isinstance(report_modes, list):
raise ValueError('report_modes must be a list')
self._report_modes = [x for x in report_modes if x in REPORT_MODES]
if not self._report_modes:
raise ValueError('report_modes not a valid list of modes')
def _get_report_modes(self):
return self._report_modes
#REPORT or TOOL or QUICKREPORT or GENERAL attributes
def _set_category(self, category):
if self._ptype not in [REPORT, TOOL, QUICKREPORT, VIEW, GENERAL]:
raise ValueError('category may only be set for ' \
'REPORT/TOOL/QUICKREPORT/VIEW/GENERAL plugins')
self._category = category
def _get_category(self):
return self._category
#REPORT OR TOOL attributes
def _set_optionclass(self, optionclass):
if not (self._ptype == REPORT or self.ptype == TOOL or self._ptype == DOCGEN):
raise ValueError('optionclass may only be set for REPORT/TOOL/DOCGEN plugins')
self._optionclass = optionclass
def _get_optionclass(self):
return self._optionclass
#TOOL attributes
def _set_toolclass(self, toolclass):
if not self._ptype == TOOL:
raise ValueError('toolclass may only be set for TOOL plugins')
self._toolclass = toolclass
def _get_toolclass(self):
return self._toolclass
def _set_tool_modes(self, tool_modes):
if not self._ptype == TOOL:
raise ValueError('tool_modes may only be set for TOOL plugins')
if not isinstance(tool_modes, list):
raise ValueError('tool_modes must be a list')
self._tool_modes = [x for x in tool_modes if x in TOOL_MODES]
if not self._tool_modes:
raise ValueError('tool_modes not a valid list of modes')
def _get_tool_modes(self):
return self._tool_modes
require_active = property(_get_require_active, _set_require_active)
reportclass = property(_get_reportclass, _set_reportclass)
report_modes = property(_get_report_modes, _set_report_modes)
category = property(_get_category, _set_category)
optionclass = property(_get_optionclass, _set_optionclass)
toolclass = property(_get_toolclass, _set_toolclass)
tool_modes = property(_get_tool_modes, _set_tool_modes)
#DOCGEN attributes
def _set_paper(self, paper):
if not self._ptype == DOCGEN:
raise ValueError('paper may only be set for DOCGEN plugins')
if not isinstance(paper, bool):
raise ValueError('Plugin must have paper=True or False')
self._paper = paper
def _get_paper(self):
return self._paper
def _set_style(self, style):
if not self._ptype == DOCGEN:
raise ValueError('style may only be set for DOCGEN plugins')
if not isinstance(style, bool):
raise ValueError('Plugin must have style=True or False')
self._style = style
def _get_style(self):
return self._style
def _set_extension(self, extension):
if not (self._ptype == DOCGEN or self._ptype == EXPORT
or self._ptype == IMPORT):
raise ValueError('extension may only be set for DOCGEN/EXPORT/'\
'IMPORT plugins')
self._extension = extension
def _get_extension(self):
return self._extension
paper = property(_get_paper, _set_paper)
style = property(_get_style, _set_style)
extension = property(_get_extension, _set_extension)
#QUICKREPORT attributes
def _set_runfunc(self, runfunc):
if not self._ptype == QUICKREPORT:
raise ValueError('runfunc may only be set for QUICKREPORT plugins')
self._runfunc = runfunc
def _get_runfunc(self):
return self._runfunc
runfunc = property(_get_runfunc, _set_runfunc)
#MAPSERVICE attributes
def _set_mapservice(self, mapservice):
if not self._ptype == MAPSERVICE:
raise ValueError('mapservice may only be set for MAPSERVICE plugins')
self._mapservice = mapservice
def _get_mapservice(self):
return self._mapservice
mapservice = property(_get_mapservice, _set_mapservice)
#EXPORT attributes
def _set_export_function(self, export_function):
if not self._ptype == EXPORT:
raise ValueError('export_function may only be set for EXPORT plugins')
self._export_function = export_function
def _get_export_function(self):
return self._export_function
def _set_export_options(self, export_options):
if not self._ptype == EXPORT:
raise ValueError('export_options may only be set for EXPORT plugins')
self._export_options = export_options
def _get_export_options(self):
return self._export_options
def _set_export_options_title(self, export_options_title):
if not self._ptype == EXPORT:
raise ValueError('export_options_title may only be set for EXPORT plugins')
self._export_options_title = export_options_title
def _get_export_options_title(self):
return self._export_options_title
export_function = property(_get_export_function, _set_export_function)
export_options = property(_get_export_options, _set_export_options)
export_options_title = property(_get_export_options_title,
_set_export_options_title)
#IMPORT attributes
def _set_import_function(self, import_function):
if not self._ptype == IMPORT:
raise ValueError('import_function may only be set for IMPORT plugins')
self._import_function = import_function
def _get_import_function(self):
return self._import_function
import_function = property(_get_import_function, _set_import_function)
#GRAMPLET attributes
def _set_gramplet(self, gramplet):
if not self._ptype == GRAMPLET:
raise ValueError('gramplet may only be set for GRAMPLET plugins')
self._gramplet = gramplet
def _get_gramplet(self):
return self._gramplet
def _set_height(self, height):
if not self._ptype == GRAMPLET:
raise ValueError('height may only be set for GRAMPLET plugins')
if not isinstance(height, int):
raise ValueError('Plugin must have height an integer')
self._height = height
def _get_height(self):
return self._height
def _set_detached_height(self, detached_height):
if not self._ptype == GRAMPLET:
raise ValueError('detached_height may only be set for GRAMPLET plugins')
if not isinstance(detached_height, int):
raise ValueError('Plugin must have detached_height an integer')
self._detached_height = detached_height
def _get_detached_height(self):
return self._detached_height
def _set_detached_width(self, detached_width):
if not self._ptype == GRAMPLET:
raise ValueError('detached_width may only be set for GRAMPLET plugins')
if not isinstance(detached_width, int):
raise ValueError('Plugin must have detached_width an integer')
self._detached_width = detached_width
def _get_detached_width(self):
return self._detached_width
def _set_expand(self, expand):
if not self._ptype == GRAMPLET:
raise ValueError('expand may only be set for GRAMPLET plugins')
if not isinstance(expand, bool):
raise ValueError('Plugin must have expand as a bool')
self._expand = expand
def _get_expand(self):
return self._expand
def _set_gramplet_title(self, gramplet_title):
if not self._ptype == GRAMPLET:
raise ValueError('gramplet_title may only be set for GRAMPLET plugins')
if not isinstance(gramplet_title, str):
raise ValueError('gramplet_title is type %s, string or unicode required' % type(gramplet_title))
self._gramplet_title = gramplet_title
def _get_gramplet_title(self):
return self._gramplet_title
def _set_help_url(self, help_url):
if not self._ptype == GRAMPLET:
raise ValueError('help_url may only be set for GRAMPLET plugins')
self._help_url = help_url
def _get_help_url(self):
return self._help_url
def _set_navtypes(self, navtypes):
if not self._ptype == GRAMPLET:
raise ValueError('navtypes may only be set for GRAMPLET plugins')
self._navtypes = navtypes
def _get_navtypes(self):
return self._navtypes
def _set_orientation(self, orientation):
if not self._ptype == GRAMPLET:
raise ValueError('orientation may only be set for GRAMPLET plugins')
self._orientation = orientation
def _get_orientation(self):
return self._orientation
gramplet = property(_get_gramplet, _set_gramplet)
height = property(_get_height, _set_height)
detached_height = property(_get_detached_height, _set_detached_height)
detached_width = property(_get_detached_width, _set_detached_width)
expand = property(_get_expand, _set_expand)
gramplet_title = property(_get_gramplet_title, _set_gramplet_title)
navtypes = property(_get_navtypes, _set_navtypes)
orientation = property(_get_orientation, _set_orientation)
help_url = property(_get_help_url, _set_help_url)
def _set_viewclass(self, viewclass):
if not self._ptype == VIEW:
raise ValueError('viewclass may only be set for VIEW plugins')
self._viewclass = viewclass
def _get_viewclass(self):
return self._viewclass
def _set_stock_icon(self, stock_icon):
if not self._ptype == VIEW:
raise ValueError('stock_icon may only be set for VIEW plugins')
self._stock_icon = stock_icon
def _get_stock_icon(self):
return self._stock_icon
viewclass = property(_get_viewclass, _set_viewclass)
stock_icon = property(_get_stock_icon, _set_stock_icon)
#SIDEBAR attributes
def _set_sidebarclass(self, sidebarclass):
if not self._ptype == SIDEBAR:
raise ValueError('sidebarclass may only be set for SIDEBAR plugins')
self._sidebarclass = sidebarclass
def _get_sidebarclass(self):
return self._sidebarclass
def _set_menu_label(self, menu_label):
if not self._ptype == SIDEBAR:
raise ValueError('menu_label may only be set for SIDEBAR plugins')
self._menu_label = menu_label
def _get_menu_label(self):
return self._menu_label
sidebarclass = property(_get_sidebarclass, _set_sidebarclass)
menu_label = property(_get_menu_label, _set_menu_label)
#VIEW and SIDEBAR attributes
def _set_order(self, order):
if not self._ptype in (VIEW, SIDEBAR):
raise ValueError('order may only be set for VIEW and SIDEBAR plugins')
self._order = order
def _get_order(self):
return self._order
order = property(_get_order, _set_order)
#DATABASE attributes
def _set_databaseclass(self, databaseclass):
if not self._ptype == DATABASE:
raise ValueError('databaseclass may only be set for DATABASE plugins')
self._databaseclass = databaseclass
def _get_databaseclass(self):
return self._databaseclass
def _set_reset_system(self, reset_system):
if not self._ptype == DATABASE:
raise ValueError('reset_system may only be set for DATABASE plugins')
self._reset_system = reset_system
def _get_reset_system(self):
return self._reset_system
databaseclass = property(_get_databaseclass, _set_databaseclass)
reset_system = property(_get_reset_system, _set_reset_system)
#GENERAL attr
def _set_data(self, data):
if not self._ptype in (GENERAL,):
raise ValueError('data may only be set for GENERAL plugins')
self._data = data
def _get_data(self):
return self._data
def _set_process(self, process):
if not self._ptype in (GENERAL,):
raise ValueError('process may only be set for GENERAL plugins')
self._process = process
def _get_process(self):
return self._process
data = property(_get_data, _set_data)
process = property(_get_process, _set_process)
def newplugin():
"""
Function to create a new plugindata object, add it to list of
registered plugins
:returns: a newly created PluginData which is already part of the register
"""
gpr = PluginRegister.get_instance()
pgd = PluginData()
gpr.add_plugindata(pgd)
return pgd
def register(ptype, **kwargs):
"""
Convenience function to register a new plugin using a dictionary as input.
The register functions will call newplugin() function, and use the
dictionary kwargs to assign data to the PluginData newplugin() created,
as in: plugindata.key = data
:param ptype: the plugin type, one of REPORT, TOOL, ...
:param kwargs: dictionary with keys attributes of the plugin, and data
the value
:returns: a newly created PluginData which is already part of the register
and which has kwargs assigned as attributes
"""
plg = newplugin()
plg.ptype = ptype
for prop in kwargs:
#check it is a valid attribute with getattr
getattr(plg, prop)
#set the value
setattr(plg, prop, kwargs[prop])
return plg
def make_environment(**kwargs):
env = {
'newplugin': newplugin,
'register': register,
'STABLE': STABLE,
'UNSTABLE': UNSTABLE,
'REPORT': REPORT,
'QUICKREPORT': QUICKREPORT,
'TOOL': TOOL,
'IMPORT': IMPORT,
'EXPORT': EXPORT,
'DOCGEN': DOCGEN,
'GENERAL': GENERAL,
'MAPSERVICE': MAPSERVICE,
'VIEW': VIEW,
'RELCALC': RELCALC,
'GRAMPLET': GRAMPLET,
'SIDEBAR': SIDEBAR,
'CATEGORY_TEXT': CATEGORY_TEXT,
'CATEGORY_DRAW': CATEGORY_DRAW,
'CATEGORY_CODE': CATEGORY_CODE,
'CATEGORY_WEB': CATEGORY_WEB,
'CATEGORY_BOOK': CATEGORY_BOOK,
'CATEGORY_GRAPHVIZ': CATEGORY_GRAPHVIZ,
'TOOL_DEBUG': TOOL_DEBUG,
'TOOL_ANAL': TOOL_ANAL,
'TOOL_DBPROC': TOOL_DBPROC,
'TOOL_DBFIX': TOOL_DBFIX,
'TOOL_REVCTL': TOOL_REVCTL,
'TOOL_UTILS': TOOL_UTILS,
'CATEGORY_QR_MISC': CATEGORY_QR_MISC,
'CATEGORY_QR_PERSON': CATEGORY_QR_PERSON,
'CATEGORY_QR_FAMILY': CATEGORY_QR_FAMILY,
'CATEGORY_QR_EVENT': CATEGORY_QR_EVENT,
'CATEGORY_QR_SOURCE': CATEGORY_QR_SOURCE,
'CATEGORY_QR_CITATION': CATEGORY_QR_CITATION,
'CATEGORY_QR_SOURCE_OR_CITATION': CATEGORY_QR_SOURCE_OR_CITATION,
'CATEGORY_QR_PLACE': CATEGORY_QR_PLACE,
'CATEGORY_QR_MEDIA': CATEGORY_QR_MEDIA,
'CATEGORY_QR_REPOSITORY': CATEGORY_QR_REPOSITORY,
'CATEGORY_QR_NOTE': CATEGORY_QR_NOTE,
'CATEGORY_QR_DATE': CATEGORY_QR_DATE,
'REPORT_MODE_GUI': REPORT_MODE_GUI,
'REPORT_MODE_BKI': REPORT_MODE_BKI,
'REPORT_MODE_CLI': REPORT_MODE_CLI,
'TOOL_MODE_GUI': TOOL_MODE_GUI,
'TOOL_MODE_CLI': TOOL_MODE_CLI,
'DATABASE': DATABASE,
'GRAMPSVERSION': GRAMPSVERSION,
'START': START,
'END': END,
'IMAGE_DIR': IMAGE_DIR,
}
env.update(kwargs)
return env
#-------------------------------------------------------------------------
#
# PluginRegister
#
#-------------------------------------------------------------------------
class PluginRegister:
"""
PluginRegister is a Singleton which holds plugin data
.. attribute : stable_only
Bool, include stable plugins only or not. Default True
"""
__instance = None
def get_instance():
""" Use this function to get the instance of the PluginRegister """
if PluginRegister.__instance is None:
PluginRegister.__instance = 1 # Set to 1 for __init__()
PluginRegister.__instance = PluginRegister()
return PluginRegister.__instance
get_instance = staticmethod(get_instance)
def __init__(self):
""" This function should only be run once by get_instance() """
if PluginRegister.__instance is not 1:
raise Exception("This class is a singleton. "
"Use the get_instance() method")
self.stable_only = True
if __debug__:
self.stable_only = False
self.__plugindata = []
self.__id_to_pdata = {}
def add_plugindata(self, plugindata):
""" This is used to add an entry to the registration list. The way it
is used, this entry is not yet filled in, so we cannot use the id to
add to the __id_to_pdata dict at this time. """
self.__plugindata.append(plugindata)
def scan_dir(self, dir, filenames, uistate=None):
"""
The dir name will be scanned for plugin registration code, which will
be loaded in :class:`PluginData` objects if they satisfy some checks.
:returns: A list with :class:`PluginData` objects
"""
# if the directory does not exist, do nothing
if not (os.path.isdir(dir) or os.path.islink(dir)):
return []
ext = r".gpr.py"
extlen = -len(ext)
pymod = re.compile(r"^(.*)\.py$")
for filename in filenames:
if not filename[extlen:] == ext:
continue
lenpd = len(self.__plugindata)
full_filename = os.path.join(dir, filename)
try:
with open(full_filename, "r", encoding='utf-8') as fd:
stream = fd.read()
except Exception as msg:
print(_('ERROR: Failed reading plugin registration %(filename)s') % \
{'filename' : filename})
print(msg)
continue
if os.path.exists(os.path.join(os.path.dirname(full_filename),
'locale')):
try:
local_gettext = glocale.get_addon_translator(full_filename).gettext
except ValueError:
print(_('WARNING: Plugin %(plugin_name)s has no translation'
' for any of your configured languages, using US'
' English instead') %
{'plugin_name' : filename.split('.')[0] })
local_gettext = glocale.translation.gettext
else:
local_gettext = glocale.translation.gettext
try:
exec (compile(stream, filename, 'exec'),
make_environment(_=local_gettext), {'uistate': uistate})
for pdata in self.__plugindata[lenpd:]:
# should not be duplicate IDs in different plugins
assert pdata.id not in self.__id_to_pdata
# if pdata.id in self.__id_to_pdata:
# print("Error: %s is duplicated!" % pdata.id)
self.__id_to_pdata[pdata.id] = pdata
except ValueError as msg:
print(_('ERROR: Failed reading plugin registration %(filename)s') % \
{'filename' : filename})
print(msg)
self.__plugindata = self.__plugindata[:lenpd]
except:
print(_('ERROR: Failed reading plugin registration %(filename)s') % \
{'filename' : filename})
print("".join(traceback.format_exception(*sys.exc_info())))
self.__plugindata = self.__plugindata[:lenpd]
#check if:
# 1. plugin exists, if not remove, otherwise set module name
# 2. plugin not stable, if stable_only=True, remove
# 3. TOOL_DEBUG only if __debug__ True
rmlist = []
ind = lenpd-1
for plugin in self.__plugindata[lenpd:]:
#LOG.warning("\nPlugin scanned %s at registration", plugin.id)
ind += 1
plugin.directory = dir
if not valid_plugin_version(plugin.gramps_target_version):
print(_('ERROR: Plugin file %(filename)s has a version of '
'"%(gramps_target_version)s" which is invalid for Gramps '
'"%(gramps_version)s".' %
{'filename': os.path.join(dir, plugin.fname),
'gramps_version': GRAMPSVERSION,
'gramps_target_version': plugin.gramps_target_version,}
))
rmlist.append(ind)
continue
if not plugin.status == STABLE and self.stable_only:
rmlist.append(ind)
continue
if plugin.ptype == TOOL and plugin.category == TOOL_DEBUG \
and not __debug__:
rmlist.append(ind)
continue
if plugin.fname is None:
continue
match = pymod.match(plugin.fname)
if not match:
rmlist.append(ind)
print(_('ERROR: Wrong python file %(filename)s in register file '
'%(regfile)s') % {
'filename': os.path.join(dir, plugin.fname),
'regfile': os.path.join(dir, filename)
})
continue
if not os.path.isfile(os.path.join(dir, plugin.fname)):
rmlist.append(ind)
print(_('ERROR: Python file %(filename)s in register file '
'%(regfile)s does not exist') % {
'filename': os.path.join(dir, plugin.fname),
'regfile': os.path.join(dir, filename)
})
continue
module = match.groups()[0]
plugin.mod_name = module
plugin.fpath = dir
#LOG.warning("\nPlugin added %s at registration", plugin.id)
rmlist.reverse()
for ind in rmlist:
del self.__id_to_pdata[self.__plugindata[ind].id]
del self.__plugindata[ind]
def get_plugin(self, id):
"""
Return the :class:`PluginData` for the plugin with id
"""
assert(len(self.__id_to_pdata) == len(self.__plugindata))
# if len(self.__id_to_pdata) != len(self.__plugindata):
# print(len(self.__id_to_pdata), len(self.__plugindata))
return self.__id_to_pdata.get(id, None)
def type_plugins(self, ptype):
"""
Return a list of :class:`PluginData` that are of type ptype
"""
return [self.get_plugin(id) for id in
set([x.id for x in self.__plugindata if x.ptype == ptype])]
def report_plugins(self, gui=True):
"""
Return a list of gui or cli :class:`PluginData` that are of type REPORT
:param gui: bool, if True then gui plugin, otherwise cli plugin
"""
if gui:
return [x for x in self.type_plugins(REPORT) if REPORT_MODE_GUI
in x.report_modes]
else:
return [x for x in self.type_plugins(REPORT) if REPORT_MODE_CLI
in x.report_modes]
def tool_plugins(self, gui=True):
"""
Return a list of :class:`PluginData` that are of type TOOL
"""
if gui:
return [x for x in self.type_plugins(TOOL) if TOOL_MODE_GUI
in x.tool_modes]
else:
return [x for x in self.type_plugins(TOOL) if TOOL_MODE_CLI
in x.tool_modes]
def bookitem_plugins(self):
"""
Return a list of REPORT :class:`PluginData` that are can be used as
bookitem
"""
return [x for x in self.type_plugins(REPORT) if REPORT_MODE_BKI
in x.report_modes]
def quickreport_plugins(self):
"""
Return a list of :class:`PluginData` that are of type QUICKREPORT
"""
return self.type_plugins(QUICKREPORT)
def import_plugins(self):
"""
Return a list of :class:`PluginData` that are of type IMPORT
"""
return self.type_plugins(IMPORT)
def export_plugins(self):
"""
Return a list of :class:`PluginData` that are of type EXPORT
"""
return self.type_plugins(EXPORT)
def docgen_plugins(self):
"""
Return a list of :class:`PluginData` that are of type DOCGEN
"""
return self.type_plugins(DOCGEN)
def general_plugins(self, category=None):
"""
Return a list of :class:`PluginData` that are of type GENERAL
"""
plugins = self.type_plugins(GENERAL)
if category:
return [plugin for plugin in plugins
if plugin.category == category]
return plugins
def mapservice_plugins(self):
"""
Return a list of :class:`PluginData` that are of type MAPSERVICE
"""
return self.type_plugins(MAPSERVICE)
def view_plugins(self):
"""
Return a list of :class:`PluginData` that are of type VIEW
"""
return self.type_plugins(VIEW)
def relcalc_plugins(self):
"""
Return a list of :class:`PluginData` that are of type RELCALC
"""
return self.type_plugins(RELCALC)
def gramplet_plugins(self):
"""
Return a list of :class:`PluginData` that are of type GRAMPLET
"""
return self.type_plugins(GRAMPLET)
def sidebar_plugins(self):
"""
Return a list of :class:`PluginData` that are of type SIDEBAR
"""
return self.type_plugins(SIDEBAR)
def database_plugins(self):
"""
Return a list of :class:`PluginData` that are of type DATABASE
"""
return self.type_plugins(DATABASE)
def filter_load_on_reg(self):
"""
Return a list of :class:`PluginData` that have load_on_reg == True
"""
return [self.get_plugin(id) for id in
set([x.id for x in self.__plugindata
if x.load_on_reg == True])]
| gpl-2.0 |
iglootools/genconf | genconf/filegenerator/_filegenerator.py | 2 | 2425 | """
Copyright 2011 Sami Dalouche
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import codecs
from genconf.filegenerator._defaulteventlistener import DefaultEventListener
from genconf.filegenerator._defaulterrorlistener import DefaultErrorListener
from genconf.manifest import TemplateNotFoundException, TemplateProcessingException
class FileGenerator(object):
def __init__(self, template_loader, targetdir):
assert template_loader is not None, "template_loader is required"
assert targetdir is not None, "targetdir is required"
self._template_loader = template_loader
self._targetdir = targetdir
def generate_files(self, manifest, error_listener=DefaultErrorListener(), event_listener=DefaultEventListener()):
profiles = manifest.concrete_profiles()
for p in profiles:
event_listener.on_before_profile(p)
for f in p.output_files:
filename = os.path.join(self._targetdir, f.target_path)
event_listener.on_before_file_update(filename)
try:
directory = os.path.dirname(filename)
if not os.path.exists(directory):
os.makedirs(directory)
content = f.render(self._template_loader)
with codecs.open(filename, "wb", encoding="utf-8") as f:
f.write(content)
event_listener.on_after_file_update(filename, content)
except TemplateNotFoundException as e:
error_listener.on_template_not_found(e)
except TemplateProcessingException as e:
error_listener.on_template_processing_error(e)
except Exception as e:
error_listener.on_write_error(filename, e)
event_listener.on_after_profile(p) | apache-2.0 |
shakalaca/ASUS_ZenFone_ZE500CL | linux/kernel/tools/perf/util/setup.py | 2079 | 1438 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = ['-fno-strict-aliasing', '-Wno-write-strings']
cflags += getenv('CFLAGS', '').split()
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
libtraceevent = getenv('LIBTRACEEVENT')
liblk = getenv('LIBLK')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
extra_objects = [libtraceevent, liblk],
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='[email protected]',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
| gpl-2.0 |
wgcv/SWW-Crashphone | lib/python2.7/site-packages/django/contrib/auth/management/commands/createsuperuser.py | 63 | 7419 | """
Management utility to create superusers.
"""
from __future__ import unicode_literals
import getpass
import sys
from optparse import make_option
from django.contrib.auth import get_user_model
from django.contrib.auth.management import get_default_username
from django.core import exceptions
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS
from django.utils.encoding import force_str
from django.utils.six.moves import input
from django.utils.text import capfirst
class NotRunningInTTYException(Exception):
pass
class Command(BaseCommand):
def __init__(self, *args, **kwargs):
# Options are defined in an __init__ method to support swapping out
# custom user models in tests.
super(Command, self).__init__(*args, **kwargs)
self.UserModel = get_user_model()
self.username_field = self.UserModel._meta.get_field(self.UserModel.USERNAME_FIELD)
self.option_list = BaseCommand.option_list + (
make_option('--%s' % self.UserModel.USERNAME_FIELD, dest=self.UserModel.USERNAME_FIELD, default=None,
help='Specifies the login for the superuser.'),
make_option('--noinput', action='store_false', dest='interactive', default=True,
help=('Tells Django to NOT prompt the user for input of any kind. '
'You must use --%s with --noinput, along with an option for '
'any other required field. Superusers created with --noinput will '
' not be able to log in until they\'re given a valid password.' %
self.UserModel.USERNAME_FIELD)),
make_option('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Specifies the database to use. Default is "default".'),
) + tuple(
make_option('--%s' % field, dest=field, default=None,
help='Specifies the %s for the superuser.' % field)
for field in self.UserModel.REQUIRED_FIELDS
)
option_list = BaseCommand.option_list
help = 'Used to create a superuser.'
def execute(self, *args, **options):
self.stdin = options.get('stdin', sys.stdin) # Used for testing
return super(Command, self).execute(*args, **options)
def handle(self, *args, **options):
username = options.get(self.UserModel.USERNAME_FIELD, None)
interactive = options.get('interactive')
verbosity = int(options.get('verbosity', 1))
database = options.get('database')
# If not provided, create the user with an unusable password
password = None
user_data = {}
# Do quick and dirty validation if --noinput
if not interactive:
try:
if not username:
raise CommandError("You must use --%s with --noinput." %
self.UserModel.USERNAME_FIELD)
username = self.username_field.clean(username, None)
for field_name in self.UserModel.REQUIRED_FIELDS:
if options.get(field_name):
field = self.UserModel._meta.get_field(field_name)
user_data[field_name] = field.clean(options[field_name], None)
else:
raise CommandError("You must use --%s with --noinput." % field_name)
except exceptions.ValidationError as e:
raise CommandError('; '.join(e.messages))
else:
# Prompt for username/password, and any other required fields.
# Enclose this whole thing in a try/except to trap for a
# keyboard interrupt and exit gracefully.
default_username = get_default_username()
try:
if hasattr(self.stdin, 'isatty') and not self.stdin.isatty():
raise NotRunningInTTYException("Not running in a TTY")
# Get a username
verbose_field_name = self.username_field.verbose_name
while username is None:
if not username:
input_msg = capfirst(verbose_field_name)
if default_username:
input_msg = "%s (leave blank to use '%s')" % (
input_msg, default_username)
raw_value = input(force_str('%s: ' % input_msg))
if default_username and raw_value == '':
raw_value = default_username
try:
username = self.username_field.clean(raw_value, None)
except exceptions.ValidationError as e:
self.stderr.write("Error: %s" % '; '.join(e.messages))
username = None
continue
try:
self.UserModel._default_manager.db_manager(database).get_by_natural_key(username)
except self.UserModel.DoesNotExist:
pass
else:
self.stderr.write("Error: That %s is already taken." %
verbose_field_name)
username = None
for field_name in self.UserModel.REQUIRED_FIELDS:
field = self.UserModel._meta.get_field(field_name)
user_data[field_name] = options.get(field_name)
while user_data[field_name] is None:
raw_value = input(force_str('%s: ' % capfirst(field.verbose_name)))
try:
user_data[field_name] = field.clean(raw_value, None)
except exceptions.ValidationError as e:
self.stderr.write("Error: %s" % '; '.join(e.messages))
user_data[field_name] = None
# Get a password
while password is None:
if not password:
password = getpass.getpass()
password2 = getpass.getpass(force_str('Password (again): '))
if password != password2:
self.stderr.write("Error: Your passwords didn't match.")
password = None
continue
if password.strip() == '':
self.stderr.write("Error: Blank passwords aren't allowed.")
password = None
continue
except KeyboardInterrupt:
self.stderr.write("\nOperation cancelled.")
sys.exit(1)
except NotRunningInTTYException:
self.stdout.write(
"Superuser creation skipped due to not running in a TTY. "
"You can run `manage.py createsuperuser` in your project "
"to create one manually."
)
if username:
user_data[self.UserModel.USERNAME_FIELD] = username
user_data['password'] = password
self.UserModel._default_manager.db_manager(database).create_superuser(**user_data)
if verbosity >= 1:
self.stdout.write("Superuser created successfully.")
| apache-2.0 |
redbear/micropython | tests/extmod/ure1.py | 35 | 1169 | try:
import ure as re
except ImportError:
import re
r = re.compile(".+")
m = r.match("abc")
print(m.group(0))
try:
m.group(1)
except IndexError:
print("IndexError")
r = re.compile("(.+)1")
m = r.match("xyz781")
print(m.group(0))
print(m.group(1))
try:
m.group(2)
except IndexError:
print("IndexError")
r = re.compile("[a-cu-z]")
m = r.match("a")
print(m.group(0))
m = r.match("z")
print(m.group(0))
m = r.match("d")
print(m)
m = r.match("A")
print(m)
print("===")
r = re.compile("[^a-cu-z]")
m = r.match("a")
print(m)
m = r.match("z")
print(m)
m = r.match("d")
print(m.group(0))
m = r.match("A")
print(m.group(0))
r = re.compile("o+")
m = r.search("foobar")
print(m.group(0))
try:
m.group(1)
except IndexError:
print("IndexError")
m = re.match(".*", "foo")
print(m.group(0))
m = re.search("w.r", "hello world")
print(m.group(0))
m = re.match('a+?', 'ab'); print(m.group(0))
m = re.match('a*?', 'ab'); print(m.group(0))
m = re.match('^ab$', 'ab'); print(m.group(0))
m = re.match('a|b', 'b'); print(m.group(0))
m = re.match('a|b|c', 'c'); print(m.group(0))
try:
re.compile("*")
except:
print("Caught invalid regex")
| mit |
PrismTech/opensplice | build/scripts/overnight/python/DBMSConnect.py | 2 | 12435 | import sys
import os
import json
import shutil
import subprocess
import fileinput
import platform
import time
from shutil import copy
import example_logparser
from example_exceptions import LogCheckFail
from Example import Example
from Example import ExeThread
import pdb
"""
Class specific to the DBMSConnect example as it is very different
to all other examples having a different directory structure and
also runs more than a simple publisher/subscriber
"""
class dbmsconnect (Example):
def __init__(self, host, logger):
super(dbmsconnect, self).__init__(host, logger, "dbmsconnect", "services")
with open ('examples.json') as data_file:
data = json.load(data_file)
self.odbcMsgBoard_params = data["services"]["dbmsconnect"]["params"]["odbcMsgBoard_params"]
self.odbcChatter1_params = data["services"]["dbmsconnect"]["params"]["odbcChatter1_params"]
self.odbcChatter2_params = data["services"]["dbmsconnect"]["params"]["odbcChatter2_params"]
self.cppChatter1_params = data["services"]["dbmsconnect"]["params"]["cppChatter1_params"]
self.cppChatter2_params = data["services"]["dbmsconnect"]["params"]["cppChatter2_params"]
self.odbcChatterQuit_params = data["services"]["dbmsconnect"]["params"]["odbcChatterQuit_params"]
self.cppChatterQuit_params = data["services"]["dbmsconnect"]["params"]["cppChatterQuit_params"]
super(dbmsconnect, self).setPath(os.path.join(os.environ['OSPL_HOME'], 'examples', 'services', 'dbmsconnect', 'SQL', 'C++', 'ODBC'))
if os.environ['EXRUNTYPE'] == "shm":
self.uri = "file://" + os.path.join(os.environ['OSPL_HOME'], 'examples', 'services', 'dbmsconnect', self.shm_uri)
else:
self.uri = "file://" + os.path.join(os.environ['OSPL_HOME'], 'examples', 'services', 'dbmsconnect', self.sp_uri)
self.runDBMSConnect = self.host.runExample(self.expath, self.name, "")
def runExample(self):
print "In runExample for " + self.expath + ": " + self.name
currPath = os.getcwd()
try:
self.exdir = "servicesdbmsconnectSQLCPPODBC"
exSfx = ""
if self.host.isWindows():
exSfx = ".exe"
os.putenv("ODBC_LIB_NAME", "odbc32")
else:
os.putenv("ODBC_LIB_NAME", "odbc")
msg = "NONE"
result = "PASS"
dsn = self.odbcMsgBoard_params[0]
os.putenv("MY_DSN", dsn);
os.environ["MY_DSN"]= dsn;
os.putenv("OSPL_URI", self.uri)
os.environ["OSPL_URI"] = self.uri
try:
self.convertConfig()
self.setLogPathAndLogs("", "")
odbcMsgBoardLog = os.path.join(self.pPath, 'odbcMsgBoard.log')
odbcChatter1Log = os.path.join(self.pPath, 'odbcChatter1.log')
odbcChatter2Log = os.path.join(self.pPath, 'odbcChatter2.log')
odbcChatterQuitLog = os.path.join(self.pPath, 'odbcChatterQuit.log')
cppMsgBoardLog = os.path.join(self.pPath, 'cppMsgBoard.log')
cppChatter1Log = os.path.join(self.pPath, 'cppChatter1.log')
cppChatter2Log = os.path.join(self.pPath, 'cppChatter2.log')
cppChatterQuitLog = os.path.join(self.pPath, 'cppChatterQuit.log')
with open ('examples.json') as data_file:
data = json.load(data_file)
odbcMsgBoardName = data[self.expath][self.name]["executables"]["odbc"]["msgBoardName"]
odbcChatterName = data[self.expath][self.name]["executables"]["odbc"]["chatterName"]
cppMsgBoardName = data[self.expath][self.name]["executables"]["cpp"]["msgBoardName"]
cppChatterName = data[self.expath][self.name]["executables"]["cpp"]["chatterName"]
odbcmsgboard_conds_file = data[self.expath][self.name]["log_conditions_file"]["odbcmsgboard_conds"]
cppmsgboard_conds_file = data[self.expath][self.name]["log_conditions_file"]["msgboard_conds"]
odbcchatter_conds_file = data[self.expath][self.name]["log_conditions_file"]["odbcchatter_conds"]
chatter_conds_file = data[self.expath][self.name]["log_conditions_file"]["chatter_conds"]
odbcmsgboard_conds = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'yaml', odbcmsgboard_conds_file)
odbcchatter_conds = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'yaml', odbcchatter_conds_file)
cppmsgboard_conds = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'yaml', cppmsgboard_conds_file)
cppchatter_conds = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'yaml', chatter_conds_file)
if odbcMsgBoardName != "":
if self.classpath == "":
odbcMsgBoardExe = os.path.join(self.pPath, odbcMsgBoardName) + exSfx
if not os.path.isfile (odbcMsgBoardExe):
msg = "MissingExecutable: " + odbcMsgBoardExe
else:
odbcMsgBoardExe = odbcMsgBoardName
if odbcChatterName != "":
if self.classpath == "":
odbcChatterNameExe = os.path.join(self.pPath, odbcChatterName) + exSfx
if not os.path.isfile (odbcChatterNameExe):
msg = "MissingExecutable: " + odbcChatterNameExe
else:
odbcChatterNameExe = odbcChatterName
cppPath = os.path.join(os.environ['OSPL_HOME'], 'examples', "dcps", "Tutorial", "cpp", "standalone")
if cppMsgBoardName != "":
if self.classpath == "":
cppMsgBoardExe = os.path.join(cppPath, cppMsgBoardName) + exSfx
if not os.path.isfile (cppMsgBoardExe):
msg = "MissingExecutable: " + cppMsgBoardExe
else:
cppMsgBoardExe = cppMsgBoardName
if cppChatterName != "":
if self.classpath == "":
cppChatterNameExe = os.path.join(cppPath, cppChatterName) + exSfx
if not os.path.isfile (cppChatterNameExe):
msg = "MissingExecutable: " + cppChatterNameExe
else:
cppChatterNameExe = cppChatterName
if msg == "NONE":
odbcMsgBoard_Thread = ExeThread(self.classpath, odbcMsgBoardLog, "", odbcMsgBoardExe, self.odbcMsgBoard_params, self.example_timeout * 2)
odbcChatter1_Thread = ExeThread(self.classpath, odbcChatter1Log, "", odbcChatterNameExe, self.odbcChatter1_params, self.example_timeout)
odbcChatter2_Thread = ExeThread(self.classpath, odbcChatter2Log, "", odbcChatterNameExe, self.odbcChatter2_params, self.example_timeout)
cppMsgBoard_Thread = ExeThread(self.classpath, cppMsgBoardLog, "", cppMsgBoardExe, "", self.example_timeout * 2)
cppChatter1_Thread = ExeThread(self.classpath, cppChatter1Log, "", cppChatterNameExe, self.cppChatter1_params, self.example_timeout)
cppChatter2_Thread = ExeThread(self.classpath, cppChatter2Log, "", cppChatterNameExe, self.cppChatter2_params, self.example_timeout)
odbcChatterQuit_Thread = ExeThread(self.classpath, odbcChatterQuitLog, "", odbcChatterNameExe, self.odbcChatterQuit_params, self.example_timeout)
cppChatterQuit_Thread = ExeThread(self.classpath, cppChatterQuitLog, "", cppChatterNameExe, self.cppChatterQuit_params, self.example_timeout)
os.chdir(self.pPath)
self.startOSPL()
cppMsgBoard_Thread.start()
odbcMsgBoard_Thread.start()
time.sleep(5)
odbcChatter1_Thread.start()
odbcChatter2_Thread.start()
cppChatter1_Thread.start()
cppChatter2_Thread.start()
odbcChatter1_Thread.join(self.example_timeout)
odbcChatter2_Thread.join(self.example_timeout)
cppChatter1_Thread.join(self.example_timeout)
cppChatter2_Thread.join(self.example_timeout)
time.sleep(10)
odbcChatterQuit_Thread.start()
cppChatterQuit_Thread.start()
odbcChatterQuit_Thread.join(self.example_timeout)
cppChatterQuit_Thread.join(self.example_timeout)
cppMsgBoard_Thread.join(self.example_timeout)
odbcMsgBoard_Thread.join(self.example_timeout)
except Exception as ex:
msg = "Exception running ", str(ex)
try:
self.stopOSPL()
except Exception as ex:
print "Exception stopping OpenSplice ", str(ex)
if msg == "NONE":
try:
#Allow time for all messages to be written to log
time.sleep (15)
super(dbmsconnect, self).copyLogs()
if os.path.isfile (self.ospl_error_log):
msg = "ospl-error.log found"
print "checking odbcMsgBoardLog with odbcmsgboard_conds", odbcMsgBoardLog, odbcmsgboard_conds
self.checkResults(odbcMsgBoardLog, odbcmsgboard_conds)
print "checking odbcChatter1Log with odbcchatter_conds", odbcChatter1Log, odbcchatter_conds
self.checkResults(odbcChatter1Log, odbcchatter_conds)
print "checking odbcChatter2Log with odbcchatter_conds", odbcChatter2Log, odbcchatter_conds
self.checkResults(odbcChatter2Log, odbcchatter_conds)
self.checkResults(cppMsgBoardLog, cppmsgboard_conds)
self.checkResults(cppChatter1Log, cppchatter_conds)
self.checkResults(cppChatter2Log, cppchatter_conds)
self.checkOSPLInfoLog(self.ospl_info_log)
except LogCheckFail as lf:
reason = str(lf)
if "OpenSpliceDDS Warnings" in reason:
msg = "LogCheckFail: OpenSpliceDDS Warnings in ospl-info.log"
else:
msg = "LogCheckFail: " + str(lf)
except Exception:
msg = "Exception checking logs " + str(sys.exc_info()[0])
logdir = os.path.join(os.environ['LOGDIR'], "examples", "run_" + os.environ['EXRUNTYPE'], self.exdir)
dbmsconnLog = os.path.join(self.pPath, 'dbmsconnect.log')
print "dbmsconnect.log is ", dbmsconnLog
copy(dbmsconnLog, logdir)
if msg != "NONE":
result = "FAIL"
try:
self.writeResult (result, self.exdir, "", msg)
except Exception as ex:
print "Exception writing result", str(ex)
try:
self.cleanUp()
except Exception as ex:
print "Exception cleaning up", str(ex)
except Exception as ex:
print "Unexpected exception ", str(ex)
finally:
os.chdir(currPath)
def convertConfig(self):
if os.environ['EXRUNTYPE'] == "shm":
uri = self.shm_uri
else:
uri = self.sp_uri
fcfg = os.path.join(os.environ['OSPL_HOME'], 'examples', 'services', 'dbmsconnect', uri)
forig = os.path.join(os.environ['OSPL_HOME'], 'examples', 'services', 'dbmsconnect', uri+'.orig')
os.rename(fcfg, forig)
if self.host.name != "default":
hn = self.host.name
else:
hn = platform.uname()[1]
prefix = hn[:16].replace('-', '_') + '_'
fout = open(fcfg, "w")
for line in fileinput.input(forig):
fout.write(line.replace("Sql", prefix))
fout.close()
| gpl-3.0 |
MakeHer/edx-platform | cms/djangoapps/contentstore/views/tests/test_course_index.py | 25 | 36973 | """
Unit tests for getting the list of courses and the course outline.
"""
import ddt
import json
import lxml
import datetime
import mock
import pytz
from django.conf import settings
from django.core.exceptions import PermissionDenied
from django.test.utils import override_settings
from django.utils.translation import ugettext as _
from contentstore.courseware_index import CoursewareSearchIndexer, SearchIndexingError
from contentstore.tests.utils import CourseTestCase
from contentstore.utils import reverse_course_url, reverse_library_url, add_instructor, reverse_usage_url
from contentstore.views.course import (
course_outline_initial_state, reindex_course_and_check_access, _deprecated_blocks_info
)
from contentstore.views.item import create_xblock_info, VisibilityState
from course_action_state.managers import CourseRerunUIStateManager
from course_action_state.models import CourseRerunState
from opaque_keys.edx.locator import CourseLocator
from search.api import perform_search
from student.auth import has_course_author_access
from student.tests.factories import UserFactory
from util.date_utils import get_default_time_display
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, LibraryFactory
class TestCourseIndex(CourseTestCase):
"""
Unit tests for getting the list of courses and the course outline.
"""
def setUp(self):
"""
Add a course with odd characters in the fields
"""
super(TestCourseIndex, self).setUp()
# had a problem where index showed course but has_access failed to retrieve it for non-staff
self.odd_course = CourseFactory.create(
org='test.org_1-2',
number='test-2.3_course',
display_name='dotted.course.name-2',
)
def check_index_and_outline(self, authed_client):
"""
Test getting the list of courses and then pulling up their outlines
"""
index_url = '/home/'
index_response = authed_client.get(index_url, {}, HTTP_ACCEPT='text/html')
parsed_html = lxml.html.fromstring(index_response.content)
course_link_eles = parsed_html.find_class('course-link')
self.assertGreaterEqual(len(course_link_eles), 2)
for link in course_link_eles:
self.assertRegexpMatches(
link.get("href"),
'course/{}'.format(settings.COURSE_KEY_PATTERN)
)
# now test that url
outline_response = authed_client.get(link.get("href"), {}, HTTP_ACCEPT='text/html')
# ensure it has the expected 2 self referential links
outline_parsed = lxml.html.fromstring(outline_response.content)
outline_link = outline_parsed.find_class('course-link')[0]
self.assertEqual(outline_link.get("href"), link.get("href"))
course_menu_link = outline_parsed.find_class('nav-course-courseware-outline')[0]
self.assertEqual(course_menu_link.find("a").get("href"), link.get("href"))
def test_libraries_on_course_index(self):
"""
Test getting the list of libraries from the course listing page
"""
# Add a library:
lib1 = LibraryFactory.create()
index_url = '/home/'
index_response = self.client.get(index_url, {}, HTTP_ACCEPT='text/html')
parsed_html = lxml.html.fromstring(index_response.content)
library_link_elements = parsed_html.find_class('library-link')
self.assertEqual(len(library_link_elements), 1)
link = library_link_elements[0]
self.assertEqual(
link.get("href"),
reverse_library_url('library_handler', lib1.location.library_key),
)
# now test that url
outline_response = self.client.get(link.get("href"), {}, HTTP_ACCEPT='text/html')
self.assertEqual(outline_response.status_code, 200)
def test_is_staff_access(self):
"""
Test that people with is_staff see the courses and can navigate into them
"""
self.check_index_and_outline(self.client)
def test_negative_conditions(self):
"""
Test the error conditions for the access
"""
outline_url = reverse_course_url('course_handler', self.course.id)
# register a non-staff member and try to delete the course branch
non_staff_client, _ = self.create_non_staff_authed_user_client()
response = non_staff_client.delete(outline_url, {}, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 403)
def test_course_staff_access(self):
"""
Make and register course_staff and ensure they can access the courses
"""
course_staff_client, course_staff = self.create_non_staff_authed_user_client()
for course in [self.course, self.odd_course]:
permission_url = reverse_course_url('course_team_handler', course.id, kwargs={'email': course_staff.email})
self.client.post(
permission_url,
data=json.dumps({"role": "staff"}),
content_type="application/json",
HTTP_ACCEPT="application/json",
)
# test access
self.check_index_and_outline(course_staff_client)
def test_json_responses(self):
outline_url = reverse_course_url('course_handler', self.course.id)
chapter = ItemFactory.create(parent_location=self.course.location, category='chapter', display_name="Week 1")
lesson = ItemFactory.create(parent_location=chapter.location, category='sequential', display_name="Lesson 1")
subsection = ItemFactory.create(
parent_location=lesson.location,
category='vertical',
display_name='Subsection 1'
)
ItemFactory.create(parent_location=subsection.location, category="video", display_name="My Video")
resp = self.client.get(outline_url, HTTP_ACCEPT='application/json')
json_response = json.loads(resp.content)
# First spot check some values in the root response
self.assertEqual(json_response['category'], 'course')
self.assertEqual(json_response['id'], unicode(self.course.location))
self.assertEqual(json_response['display_name'], self.course.display_name)
self.assertTrue(json_response['published'])
self.assertIsNone(json_response['visibility_state'])
# Now verify the first child
children = json_response['child_info']['children']
self.assertTrue(len(children) > 0)
first_child_response = children[0]
self.assertEqual(first_child_response['category'], 'chapter')
self.assertEqual(first_child_response['id'], unicode(chapter.location))
self.assertEqual(first_child_response['display_name'], 'Week 1')
self.assertTrue(json_response['published'])
self.assertEqual(first_child_response['visibility_state'], VisibilityState.unscheduled)
self.assertTrue(len(first_child_response['child_info']['children']) > 0)
# Finally, validate the entire response for consistency
self.assert_correct_json_response(json_response)
def test_notifications_handler_get(self):
state = CourseRerunUIStateManager.State.FAILED
action = CourseRerunUIStateManager.ACTION
should_display = True
# try when no notification exists
notification_url = reverse_course_url('course_notifications_handler', self.course.id, kwargs={
'action_state_id': 1,
})
resp = self.client.get(notification_url, HTTP_ACCEPT='application/json')
# verify that we get an empty dict out
self.assertEquals(resp.status_code, 400)
# create a test notification
rerun_state = CourseRerunState.objects.update_state(
course_key=self.course.id,
new_state=state,
allow_not_found=True
)
CourseRerunState.objects.update_should_display(
entry_id=rerun_state.id,
user=UserFactory(),
should_display=should_display
)
# try to get information on this notification
notification_url = reverse_course_url('course_notifications_handler', self.course.id, kwargs={
'action_state_id': rerun_state.id,
})
resp = self.client.get(notification_url, HTTP_ACCEPT='application/json')
json_response = json.loads(resp.content)
self.assertEquals(json_response['state'], state)
self.assertEquals(json_response['action'], action)
self.assertEquals(json_response['should_display'], should_display)
def test_notifications_handler_dismiss(self):
state = CourseRerunUIStateManager.State.FAILED
should_display = True
rerun_course_key = CourseLocator(org='testx', course='test_course', run='test_run')
# add an instructor to this course
user2 = UserFactory()
add_instructor(rerun_course_key, self.user, user2)
# create a test notification
rerun_state = CourseRerunState.objects.update_state(
course_key=rerun_course_key,
new_state=state,
allow_not_found=True
)
CourseRerunState.objects.update_should_display(
entry_id=rerun_state.id,
user=user2,
should_display=should_display
)
# try to get information on this notification
notification_dismiss_url = reverse_course_url('course_notifications_handler', self.course.id, kwargs={
'action_state_id': rerun_state.id,
})
resp = self.client.delete(notification_dismiss_url)
self.assertEquals(resp.status_code, 200)
with self.assertRaises(CourseRerunState.DoesNotExist):
# delete nofications that are dismissed
CourseRerunState.objects.get(id=rerun_state.id)
self.assertFalse(has_course_author_access(user2, rerun_course_key))
def assert_correct_json_response(self, json_response):
"""
Asserts that the JSON response is syntactically consistent
"""
self.assertIsNotNone(json_response['display_name'])
self.assertIsNotNone(json_response['id'])
self.assertIsNotNone(json_response['category'])
self.assertTrue(json_response['published'])
if json_response.get('child_info', None):
for child_response in json_response['child_info']['children']:
self.assert_correct_json_response(child_response)
def test_course_updates_invalid_url(self):
"""
Tests the error conditions for the invalid course updates URL.
"""
# Testing the response code by passing slash separated course id whose format is valid but no course
# having this id exists.
invalid_course_key = '{}_blah_blah_blah'.format(self.course.id)
course_updates_url = reverse_course_url('course_info_handler', invalid_course_key)
response = self.client.get(course_updates_url)
self.assertEqual(response.status_code, 404)
# Testing the response code by passing split course id whose format is valid but no course
# having this id exists.
split_course_key = CourseLocator(org='orgASD', course='course_01213', run='Run_0_hhh_hhh_hhh')
course_updates_url_split = reverse_course_url('course_info_handler', split_course_key)
response = self.client.get(course_updates_url_split)
self.assertEqual(response.status_code, 404)
# Testing the response by passing split course id whose format is invalid.
invalid_course_id = 'invalid.course.key/{}'.format(split_course_key)
course_updates_url_split = reverse_course_url('course_info_handler', invalid_course_id)
response = self.client.get(course_updates_url_split)
self.assertEqual(response.status_code, 404)
def test_course_index_invalid_url(self):
"""
Tests the error conditions for the invalid course index URL.
"""
# Testing the response code by passing slash separated course key, no course
# having this key exists.
invalid_course_key = '{}_some_invalid_run'.format(self.course.id)
course_outline_url = reverse_course_url('course_handler', invalid_course_key)
response = self.client.get_html(course_outline_url)
self.assertEqual(response.status_code, 404)
# Testing the response code by passing split course key, no course
# having this key exists.
split_course_key = CourseLocator(org='invalid_org', course='course_01111', run='Run_0_invalid')
course_outline_url_split = reverse_course_url('course_handler', split_course_key)
response = self.client.get_html(course_outline_url_split)
self.assertEqual(response.status_code, 404)
def test_course_outline_with_display_course_number_as_none(self):
"""
Tests course outline when 'display_coursenumber' field is none.
"""
# Change 'display_coursenumber' field to None and update the course.
self.course.display_coursenumber = None
updated_course = self.update_course(self.course, self.user.id)
# Assert that 'display_coursenumber' field has been changed successfully.
self.assertEqual(updated_course.display_coursenumber, None)
# Perform GET request on course outline url with the course id.
course_outline_url = reverse_course_url('course_handler', updated_course.id)
response = self.client.get_html(course_outline_url)
# Assert that response code is 200.
self.assertEqual(response.status_code, 200)
# Assert that 'display_course_number' is being set to "" (as display_coursenumber was None).
self.assertIn('display_course_number: ""', response.content)
@ddt.ddt
class TestCourseOutline(CourseTestCase):
"""
Unit tests for the course outline.
"""
def setUp(self):
"""
Set up the for the course outline tests.
"""
super(TestCourseOutline, self).setUp()
self.chapter = ItemFactory.create(
parent_location=self.course.location, category='chapter', display_name="Week 1"
)
self.sequential = ItemFactory.create(
parent_location=self.chapter.location, category='sequential', display_name="Lesson 1"
)
self.vertical = ItemFactory.create(
parent_location=self.sequential.location, category='vertical', display_name='Subsection 1'
)
self.video = ItemFactory.create(
parent_location=self.vertical.location, category="video", display_name="My Video"
)
def test_json_responses(self):
"""
Verify the JSON responses returned for the course.
"""
outline_url = reverse_course_url('course_handler', self.course.id)
resp = self.client.get(outline_url, HTTP_ACCEPT='application/json')
json_response = json.loads(resp.content)
# First spot check some values in the root response
self.assertEqual(json_response['category'], 'course')
self.assertEqual(json_response['id'], unicode(self.course.location))
self.assertEqual(json_response['display_name'], self.course.display_name)
self.assertTrue(json_response['published'])
self.assertIsNone(json_response['visibility_state'])
# Now verify the first child
children = json_response['child_info']['children']
self.assertTrue(len(children) > 0)
first_child_response = children[0]
self.assertEqual(first_child_response['category'], 'chapter')
self.assertEqual(first_child_response['id'], unicode(self.chapter.location))
self.assertEqual(first_child_response['display_name'], 'Week 1')
self.assertTrue(json_response['published'])
self.assertEqual(first_child_response['visibility_state'], VisibilityState.unscheduled)
self.assertTrue(len(first_child_response['child_info']['children']) > 0)
# Finally, validate the entire response for consistency
self.assert_correct_json_response(json_response)
def assert_correct_json_response(self, json_response):
"""
Asserts that the JSON response is syntactically consistent
"""
self.assertIsNotNone(json_response['display_name'])
self.assertIsNotNone(json_response['id'])
self.assertIsNotNone(json_response['category'])
self.assertTrue(json_response['published'])
if json_response.get('child_info', None):
for child_response in json_response['child_info']['children']:
self.assert_correct_json_response(child_response)
def test_course_outline_initial_state(self):
course_module = modulestore().get_item(self.course.location)
course_structure = create_xblock_info(
course_module,
include_child_info=True,
include_children_predicate=lambda xblock: not xblock.category == 'vertical'
)
# Verify that None is returned for a non-existent locator
self.assertIsNone(course_outline_initial_state('no-such-locator', course_structure))
# Verify that the correct initial state is returned for the test chapter
chapter_locator = unicode(self.chapter.location)
initial_state = course_outline_initial_state(chapter_locator, course_structure)
self.assertEqual(initial_state['locator_to_show'], chapter_locator)
expanded_locators = initial_state['expanded_locators']
self.assertIn(unicode(self.sequential.location), expanded_locators)
self.assertIn(unicode(self.vertical.location), expanded_locators)
def test_start_date_on_page(self):
"""
Verify that the course start date is included on the course outline page.
"""
def _get_release_date(response):
"""Return the release date from the course page"""
parsed_html = lxml.html.fromstring(response.content)
return parsed_html.find_class('course-status')[0].find_class('status-release-value')[0].text_content()
def _assert_settings_link_present(response):
"""
Asserts there's a course settings link on the course page by the course release date.
"""
parsed_html = lxml.html.fromstring(response.content)
settings_link = parsed_html.find_class('course-status')[0].find_class('action-edit')[0].find('a')
self.assertIsNotNone(settings_link)
self.assertEqual(settings_link.get('href'), reverse_course_url('settings_handler', self.course.id))
outline_url = reverse_course_url('course_handler', self.course.id)
response = self.client.get(outline_url, {}, HTTP_ACCEPT='text/html')
# A course with the default release date should display as "Unscheduled"
self.assertEqual(_get_release_date(response), 'Unscheduled')
_assert_settings_link_present(response)
self.course.start = datetime.datetime(2014, 1, 1, tzinfo=pytz.utc)
modulestore().update_item(self.course, ModuleStoreEnum.UserID.test)
response = self.client.get(outline_url, {}, HTTP_ACCEPT='text/html')
self.assertEqual(_get_release_date(response), get_default_time_display(self.course.start))
_assert_settings_link_present(response)
def _create_test_data(self, course_module, create_blocks=False, publish=True, block_types=None):
"""
Create data for test.
"""
if create_blocks:
for block_type in block_types:
ItemFactory.create(
parent_location=self.vertical.location,
category=block_type,
display_name='{} Problem'.format(block_type)
)
if not publish:
self.store.unpublish(self.vertical.location, self.user.id)
course_module.advanced_modules.extend(block_types)
def _verify_deprecated_info(self, course_id, advanced_modules, info, deprecated_block_types):
"""
Verify deprecated info.
"""
expected_blocks = []
for block_type in deprecated_block_types:
expected_blocks.append(
[
reverse_usage_url('container_handler', self.vertical.location),
'{} Problem'.format(block_type)
]
)
self.assertEqual(info['block_types'], deprecated_block_types)
self.assertEqual(
info['block_types_enabled'],
any(component in advanced_modules for component in deprecated_block_types)
)
self.assertItemsEqual(info['blocks'], expected_blocks)
self.assertEqual(
info['advance_settings_url'],
reverse_course_url('advanced_settings_handler', course_id)
)
@ddt.data(
{'publish': True},
{'publish': False},
)
@ddt.unpack
def test_verify_deprecated_warning_message_with_single_feature(self, publish):
"""
Verify deprecated warning info for single deprecated feature.
"""
block_types = ['notes']
with override_settings(DEPRECATED_BLOCK_TYPES=block_types):
course_module = modulestore().get_item(self.course.location)
self._create_test_data(course_module, create_blocks=True, block_types=block_types, publish=publish)
info = _deprecated_blocks_info(course_module, block_types)
self._verify_deprecated_info(
course_module.id,
course_module.advanced_modules,
info,
block_types
)
def test_verify_deprecated_warning_message_with_multiple_features(self):
"""
Verify deprecated warning info for multiple deprecated features.
"""
block_types = ['notes', 'lti']
with override_settings(DEPRECATED_BLOCK_TYPES=block_types):
course_module = modulestore().get_item(self.course.location)
self._create_test_data(course_module, create_blocks=True, block_types=block_types)
info = _deprecated_blocks_info(course_module, block_types)
self._verify_deprecated_info(course_module.id, course_module.advanced_modules, info, block_types)
@ddt.data(
{'delete_vertical': True},
{'delete_vertical': False},
)
@ddt.unpack
def test_deprecated_blocks_list_updated_correctly(self, delete_vertical):
"""
Verify that deprecated blocks list shown on banner is updated correctly.
Here is the scenario:
This list of deprecated blocks shown on banner contains published
and un-published blocks. That list should be updated when we delete
un-published block(s). This behavior should be same if we delete
unpublished vertical or problem.
"""
block_types = ['notes']
course_module = modulestore().get_item(self.course.location)
vertical1 = ItemFactory.create(
parent_location=self.sequential.location, category='vertical', display_name='Vert1 Subsection1'
)
problem1 = ItemFactory.create(
parent_location=vertical1.location,
category='notes',
display_name='notes problem in vert1',
publish_item=False
)
info = _deprecated_blocks_info(course_module, block_types)
# info['blocks'] should be empty here because there is nothing
# published or un-published present
self.assertEqual(info['blocks'], [])
vertical2 = ItemFactory.create(
parent_location=self.sequential.location, category='vertical', display_name='Vert2 Subsection1'
)
ItemFactory.create(
parent_location=vertical2.location,
category='notes',
display_name='notes problem in vert2',
pubish_item=True
)
# At this point CourseStructure will contain both the above
# published and un-published verticals
info = _deprecated_blocks_info(course_module, block_types)
self.assertItemsEqual(
info['blocks'],
[
[reverse_usage_url('container_handler', vertical1.location), 'notes problem in vert1'],
[reverse_usage_url('container_handler', vertical2.location), 'notes problem in vert2']
]
)
# Delete the un-published vertical or problem so that CourseStructure updates its data
if delete_vertical:
self.store.delete_item(vertical1.location, self.user.id)
else:
self.store.delete_item(problem1.location, self.user.id)
info = _deprecated_blocks_info(course_module, block_types)
# info['blocks'] should only contain the info about vertical2 which is published.
# There shouldn't be any info present about un-published vertical1
self.assertEqual(
info['blocks'],
[[reverse_usage_url('container_handler', vertical2.location), 'notes problem in vert2']]
)
class TestCourseReIndex(CourseTestCase):
"""
Unit tests for the course outline.
"""
SUCCESSFUL_RESPONSE = _("Course has been successfully reindexed.")
def setUp(self):
"""
Set up the for the course outline tests.
"""
super(TestCourseReIndex, self).setUp()
self.course.start = datetime.datetime(2014, 1, 1, tzinfo=pytz.utc)
modulestore().update_item(self.course, self.user.id)
self.chapter = ItemFactory.create(
parent_location=self.course.location, category='chapter', display_name="Week 1"
)
self.sequential = ItemFactory.create(
parent_location=self.chapter.location, category='sequential', display_name="Lesson 1"
)
self.vertical = ItemFactory.create(
parent_location=self.sequential.location, category='vertical', display_name='Subsection 1'
)
self.video = ItemFactory.create(
parent_location=self.vertical.location, category="video", display_name="My Video"
)
self.html = ItemFactory.create(
parent_location=self.vertical.location, category="html", display_name="My HTML",
data="<div>This is my unique HTML content</div>",
)
def test_reindex_course(self):
"""
Verify that course gets reindexed.
"""
index_url = reverse_course_url('course_search_index_handler', self.course.id)
response = self.client.get(index_url, {}, HTTP_ACCEPT='application/json')
# A course with the default release date should display as "Unscheduled"
self.assertIn(self.SUCCESSFUL_RESPONSE, response.content)
self.assertEqual(response.status_code, 200)
response = self.client.post(index_url, {}, HTTP_ACCEPT='application/json')
self.assertEqual(response.content, '')
self.assertEqual(response.status_code, 405)
self.client.logout()
response = self.client.get(index_url, {}, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 302)
def test_negative_conditions(self):
"""
Test the error conditions for the access
"""
index_url = reverse_course_url('course_search_index_handler', self.course.id)
# register a non-staff member and try to delete the course branch
non_staff_client, _ = self.create_non_staff_authed_user_client()
response = non_staff_client.get(index_url, {}, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 403)
def test_empty_content_type(self):
"""
Test json content type is set if '' is selected
"""
index_url = reverse_course_url('course_search_index_handler', self.course.id)
response = self.client.get(index_url, {}, CONTENT_TYPE='')
# A course with the default release date should display as "Unscheduled"
self.assertIn(self.SUCCESSFUL_RESPONSE, response.content)
self.assertEqual(response.status_code, 200)
@mock.patch('xmodule.html_module.HtmlDescriptor.index_dictionary')
def test_reindex_course_search_index_error(self, mock_index_dictionary):
"""
Test json response with mocked error data for html
"""
# set mocked exception response
err = SearchIndexingError
mock_index_dictionary.return_value = err
index_url = reverse_course_url('course_search_index_handler', self.course.id)
# Start manual reindex and check error in response
response = self.client.get(index_url, {}, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 500)
def test_reindex_json_responses(self):
"""
Test json response with real data
"""
# results are indexed because they are published from ItemFactory
response = perform_search(
"unique",
user=self.user,
size=10,
from_=0,
course_id=unicode(self.course.id))
self.assertEqual(response['total'], 1)
# Start manual reindex
reindex_course_and_check_access(self.course.id, self.user)
# Check results remain the same
response = perform_search(
"unique",
user=self.user,
size=10,
from_=0,
course_id=unicode(self.course.id))
self.assertEqual(response['total'], 1)
@mock.patch('xmodule.video_module.VideoDescriptor.index_dictionary')
def test_reindex_video_error_json_responses(self, mock_index_dictionary):
"""
Test json response with mocked error data for video
"""
# results are indexed because they are published from ItemFactory
response = perform_search(
"unique",
user=self.user,
size=10,
from_=0,
course_id=unicode(self.course.id))
self.assertEqual(response['total'], 1)
# set mocked exception response
err = SearchIndexingError
mock_index_dictionary.return_value = err
# Start manual reindex and check error in response
with self.assertRaises(SearchIndexingError):
reindex_course_and_check_access(self.course.id, self.user)
@mock.patch('xmodule.html_module.HtmlDescriptor.index_dictionary')
def test_reindex_html_error_json_responses(self, mock_index_dictionary):
"""
Test json response with mocked error data for html
"""
# results are indexed because they are published from ItemFactory
response = perform_search(
"unique",
user=self.user,
size=10,
from_=0,
course_id=unicode(self.course.id))
self.assertEqual(response['total'], 1)
# set mocked exception response
err = SearchIndexingError
mock_index_dictionary.return_value = err
# Start manual reindex and check error in response
with self.assertRaises(SearchIndexingError):
reindex_course_and_check_access(self.course.id, self.user)
@mock.patch('xmodule.seq_module.SequenceDescriptor.index_dictionary')
def test_reindex_seq_error_json_responses(self, mock_index_dictionary):
"""
Test json response with mocked error data for sequence
"""
# results are indexed because they are published from ItemFactory
response = perform_search(
"unique",
user=self.user,
size=10,
from_=0,
course_id=unicode(self.course.id))
self.assertEqual(response['total'], 1)
# set mocked exception response
err = Exception
mock_index_dictionary.return_value = err
# Start manual reindex and check error in response
with self.assertRaises(SearchIndexingError):
reindex_course_and_check_access(self.course.id, self.user)
@mock.patch('xmodule.modulestore.mongo.base.MongoModuleStore.get_course')
def test_reindex_no_item(self, mock_get_course):
"""
Test system logs an error if no item found.
"""
# set mocked exception response
err = ItemNotFoundError
mock_get_course.return_value = err
# Start manual reindex and check error in response
with self.assertRaises(SearchIndexingError):
reindex_course_and_check_access(self.course.id, self.user)
def test_reindex_no_permissions(self):
# register a non-staff member and try to delete the course branch
user2 = UserFactory()
with self.assertRaises(PermissionDenied):
reindex_course_and_check_access(self.course.id, user2)
def test_indexing_responses(self):
"""
Test do_course_reindex response with real data
"""
# results are indexed because they are published from ItemFactory
response = perform_search(
"unique",
user=self.user,
size=10,
from_=0,
course_id=unicode(self.course.id))
self.assertEqual(response['total'], 1)
# Start manual reindex
CoursewareSearchIndexer.do_course_reindex(modulestore(), self.course.id)
# Check results are the same following reindex
response = perform_search(
"unique",
user=self.user,
size=10,
from_=0,
course_id=unicode(self.course.id))
self.assertEqual(response['total'], 1)
@mock.patch('xmodule.video_module.VideoDescriptor.index_dictionary')
def test_indexing_video_error_responses(self, mock_index_dictionary):
"""
Test do_course_reindex response with mocked error data for video
"""
# results are indexed because they are published from ItemFactory
response = perform_search(
"unique",
user=self.user,
size=10,
from_=0,
course_id=unicode(self.course.id))
self.assertEqual(response['total'], 1)
# set mocked exception response
err = Exception
mock_index_dictionary.return_value = err
# Start manual reindex and check error in response
with self.assertRaises(SearchIndexingError):
CoursewareSearchIndexer.do_course_reindex(modulestore(), self.course.id)
@mock.patch('xmodule.html_module.HtmlDescriptor.index_dictionary')
def test_indexing_html_error_responses(self, mock_index_dictionary):
"""
Test do_course_reindex response with mocked error data for html
"""
# results are indexed because they are published from ItemFactory
response = perform_search(
"unique",
user=self.user,
size=10,
from_=0,
course_id=unicode(self.course.id))
self.assertEqual(response['total'], 1)
# set mocked exception response
err = Exception
mock_index_dictionary.return_value = err
# Start manual reindex and check error in response
with self.assertRaises(SearchIndexingError):
CoursewareSearchIndexer.do_course_reindex(modulestore(), self.course.id)
@mock.patch('xmodule.seq_module.SequenceDescriptor.index_dictionary')
def test_indexing_seq_error_responses(self, mock_index_dictionary):
"""
Test do_course_reindex response with mocked error data for sequence
"""
# results are indexed because they are published from ItemFactory
response = perform_search(
"unique",
user=self.user,
size=10,
from_=0,
course_id=unicode(self.course.id))
self.assertEqual(response['total'], 1)
# set mocked exception response
err = Exception
mock_index_dictionary.return_value = err
# Start manual reindex and check error in response
with self.assertRaises(SearchIndexingError):
CoursewareSearchIndexer.do_course_reindex(modulestore(), self.course.id)
@mock.patch('xmodule.modulestore.mongo.base.MongoModuleStore.get_course')
def test_indexing_no_item(self, mock_get_course):
"""
Test system logs an error if no item found.
"""
# set mocked exception response
err = ItemNotFoundError
mock_get_course.return_value = err
# Start manual reindex and check error in response
with self.assertRaises(SearchIndexingError):
CoursewareSearchIndexer.do_course_reindex(modulestore(), self.course.id)
| agpl-3.0 |
robbiet480/home-assistant | homeassistant/components/xs1/__init__.py | 8 | 2700 | """Support for the EZcontrol XS1 gateway."""
import asyncio
import logging
import voluptuous as vol
import xs1_api_client
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
CONF_USERNAME,
)
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DOMAIN = "xs1"
ACTUATORS = "actuators"
SENSORS = "sensors"
# define configuration parameters
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_PORT, default=80): cv.string,
vol.Optional(CONF_SSL, default=False): cv.boolean,
vol.Optional(CONF_USERNAME): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
XS1_COMPONENTS = ["climate", "sensor", "switch"]
# Lock used to limit the amount of concurrent update requests
# as the XS1 Gateway can only handle a very
# small amount of concurrent requests
UPDATE_LOCK = asyncio.Lock()
def setup(hass, config):
"""Set up XS1 Component."""
_LOGGER.debug("Initializing XS1")
host = config[DOMAIN][CONF_HOST]
port = config[DOMAIN][CONF_PORT]
ssl = config[DOMAIN][CONF_SSL]
user = config[DOMAIN].get(CONF_USERNAME)
password = config[DOMAIN].get(CONF_PASSWORD)
# initialize XS1 API
try:
xs1 = xs1_api_client.XS1(
host=host, port=port, ssl=ssl, user=user, password=password
)
except ConnectionError as error:
_LOGGER.error(
"Failed to create XS1 API client because of a connection error: %s", error,
)
return False
_LOGGER.debug("Establishing connection to XS1 gateway and retrieving data...")
hass.data[DOMAIN] = {}
actuators = xs1.get_all_actuators(enabled=True)
sensors = xs1.get_all_sensors(enabled=True)
hass.data[DOMAIN][ACTUATORS] = actuators
hass.data[DOMAIN][SENSORS] = sensors
_LOGGER.debug("Loading components for XS1 platform...")
# Load components for supported devices
for component in XS1_COMPONENTS:
discovery.load_platform(hass, component, DOMAIN, {}, config)
return True
class XS1DeviceEntity(Entity):
"""Representation of a base XS1 device."""
def __init__(self, device):
"""Initialize the XS1 device."""
self.device = device
async def async_update(self):
"""Retrieve latest device state."""
async with UPDATE_LOCK:
await self.hass.async_add_executor_job(self.device.update)
| apache-2.0 |
hendradarwin/VTK | Filters/General/Testing/Python/clipImage.py | 20 | 1534 | #!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# create pipeline
#
v16 = vtk.vtkVolume16Reader()
v16.SetDataDimensions(64,64)
v16.GetOutput().SetOrigin(0.0,0.0,0.0)
v16.SetDataByteOrderToLittleEndian()
v16.SetFilePrefix("" + str(VTK_DATA_ROOT) + "/Data/headsq/quarter")
v16.SetImageRange(45,45)
v16.SetDataSpacing(3.2,3.2,1.5)
v16.Update()
# do the pixel clipping
clip = vtk.vtkClipDataSet()
clip.SetInputConnection(v16.GetOutputPort())
clip.SetValue(1000)
clipMapper = vtk.vtkDataSetMapper()
clipMapper.SetInputConnection(clip.GetOutputPort())
clipMapper.ScalarVisibilityOff()
clipActor = vtk.vtkActor()
clipActor.SetMapper(clipMapper)
# put an outline around the data
outline = vtk.vtkOutlineFilter()
outline.SetInputConnection(v16.GetOutputPort())
outlineMapper = vtk.vtkPolyDataMapper()
outlineMapper.SetInputConnection(outline.GetOutputPort())
outlineActor = vtk.vtkActor()
outlineActor.SetMapper(outlineMapper)
outlineActor.VisibilityOff()
# Create the RenderWindow, Renderer and both Actors
#
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer, set the background and size
#
ren1.AddActor(outlineActor)
ren1.AddActor(clipActor)
ren1.SetBackground(0,0,0)
renWin.SetSize(200,200)
iren.Initialize()
# render the image
#
# prevent the tk window from showing up then start the event loop
# --- end of script --
| bsd-3-clause |
dhruv13J/scikit-learn | sklearn/decomposition/nmf.py | 15 | 19103 | """ Non-negative matrix factorization
"""
# Author: Vlad Niculae
# Lars Buitinck <[email protected]>
# Author: Chih-Jen Lin, National Taiwan University (original projected gradient
# NMF implementation)
# Author: Anthony Di Franco (original Python and NumPy port)
# License: BSD 3 clause
from __future__ import division
from math import sqrt
import warnings
import numpy as np
import scipy.sparse as sp
from scipy.optimize import nnls
from ..base import BaseEstimator, TransformerMixin
from ..utils import check_random_state, check_array
from ..utils.extmath import randomized_svd, safe_sparse_dot, squared_norm
from ..utils.validation import check_is_fitted
def safe_vstack(Xs):
if any(sp.issparse(X) for X in Xs):
return sp.vstack(Xs)
else:
return np.vstack(Xs)
def norm(x):
"""Dot product-based Euclidean norm implementation
See: http://fseoane.net/blog/2011/computing-the-vector-norm/
"""
return sqrt(squared_norm(x))
def trace_dot(X, Y):
"""Trace of np.dot(X, Y.T)."""
return np.dot(X.ravel(), Y.ravel())
def _sparseness(x):
"""Hoyer's measure of sparsity for a vector"""
sqrt_n = np.sqrt(len(x))
return (sqrt_n - np.linalg.norm(x, 1) / norm(x)) / (sqrt_n - 1)
def check_non_negative(X, whom):
X = X.data if sp.issparse(X) else X
if (X < 0).any():
raise ValueError("Negative values in data passed to %s" % whom)
def _initialize_nmf(X, n_components, variant=None, eps=1e-6,
random_state=None):
"""NNDSVD algorithm for NMF initialization.
Computes a good initial guess for the non-negative
rank k matrix approximation for X: X = WH
Parameters
----------
X : array, [n_samples, n_features]
The data matrix to be decomposed.
n_components : array, [n_components, n_features]
The number of components desired in the approximation.
variant : None | 'a' | 'ar'
The variant of the NNDSVD algorithm.
Accepts None, 'a', 'ar'
None: leaves the zero entries as zero
'a': Fills the zero entries with the average of X
'ar': Fills the zero entries with standard normal random variates.
Default: None
eps: float
Truncate all values less then this in output to zero.
random_state : numpy.RandomState | int, optional
The generator used to fill in the zeros, when using variant='ar'
Default: numpy.random
Returns
-------
(W, H) :
Initial guesses for solving X ~= WH such that
the number of columns in W is n_components.
References
----------
C. Boutsidis, E. Gallopoulos: SVD based initialization: A head start for
nonnegative matrix factorization - Pattern Recognition, 2008
http://tinyurl.com/nndsvd
"""
check_non_negative(X, "NMF initialization")
if variant not in (None, 'a', 'ar'):
raise ValueError("Invalid variant name")
U, S, V = randomized_svd(X, n_components)
W, H = np.zeros(U.shape), np.zeros(V.shape)
# The leading singular triplet is non-negative
# so it can be used as is for initialization.
W[:, 0] = np.sqrt(S[0]) * np.abs(U[:, 0])
H[0, :] = np.sqrt(S[0]) * np.abs(V[0, :])
for j in range(1, n_components):
x, y = U[:, j], V[j, :]
# extract positive and negative parts of column vectors
x_p, y_p = np.maximum(x, 0), np.maximum(y, 0)
x_n, y_n = np.abs(np.minimum(x, 0)), np.abs(np.minimum(y, 0))
# and their norms
x_p_nrm, y_p_nrm = norm(x_p), norm(y_p)
x_n_nrm, y_n_nrm = norm(x_n), norm(y_n)
m_p, m_n = x_p_nrm * y_p_nrm, x_n_nrm * y_n_nrm
# choose update
if m_p > m_n:
u = x_p / x_p_nrm
v = y_p / y_p_nrm
sigma = m_p
else:
u = x_n / x_n_nrm
v = y_n / y_n_nrm
sigma = m_n
lbd = np.sqrt(S[j] * sigma)
W[:, j] = lbd * u
H[j, :] = lbd * v
W[W < eps] = 0
H[H < eps] = 0
if variant == "a":
avg = X.mean()
W[W == 0] = avg
H[H == 0] = avg
elif variant == "ar":
random_state = check_random_state(random_state)
avg = X.mean()
W[W == 0] = abs(avg * random_state.randn(len(W[W == 0])) / 100)
H[H == 0] = abs(avg * random_state.randn(len(H[H == 0])) / 100)
return W, H
def _nls_subproblem(V, W, H, tol, max_iter, sigma=0.01, beta=0.1):
"""Non-negative least square solver
Solves a non-negative least squares subproblem using the
projected gradient descent algorithm.
min || WH - V ||_2
Parameters
----------
V, W : array-like
Constant matrices.
H : array-like
Initial guess for the solution.
tol : float
Tolerance of the stopping condition.
max_iter : int
Maximum number of iterations before timing out.
sigma : float
Constant used in the sufficient decrease condition checked by the line
search. Smaller values lead to a looser sufficient decrease condition,
thus reducing the time taken by the line search, but potentially
increasing the number of iterations of the projected gradient
procedure. 0.01 is a commonly used value in the optimization
literature.
beta : float
Factor by which the step size is decreased (resp. increased) until
(resp. as long as) the sufficient decrease condition is satisfied.
Larger values allow to find a better step size but lead to longer line
search. 0.1 is a commonly used value in the optimization literature.
Returns
-------
H : array-like
Solution to the non-negative least squares problem.
grad : array-like
The gradient.
n_iter : int
The number of iterations done by the algorithm.
References
----------
C.-J. Lin. Projected gradient methods for non-negative matrix factorization.
Neural Computation, 19(2007), 2756-2779.
http://www.csie.ntu.edu.tw/~cjlin/nmf/
"""
WtV = safe_sparse_dot(W.T, V)
WtW = np.dot(W.T, W)
# values justified in the paper
alpha = 1
for n_iter in range(1, max_iter + 1):
grad = np.dot(WtW, H) - WtV
# The following multiplication with a boolean array is more than twice
# as fast as indexing into grad.
if norm(grad * np.logical_or(grad < 0, H > 0)) < tol:
break
Hp = H
for inner_iter in range(19):
# Gradient step.
Hn = H - alpha * grad
# Projection step.
Hn *= Hn > 0
d = Hn - H
gradd = np.dot(grad.ravel(), d.ravel())
dQd = np.dot(np.dot(WtW, d).ravel(), d.ravel())
suff_decr = (1 - sigma) * gradd + 0.5 * dQd < 0
if inner_iter == 0:
decr_alpha = not suff_decr
if decr_alpha:
if suff_decr:
H = Hn
break
else:
alpha *= beta
elif not suff_decr or (Hp == Hn).all():
H = Hp
break
else:
alpha /= beta
Hp = Hn
if n_iter == max_iter:
warnings.warn("Iteration limit reached in nls subproblem.")
return H, grad, n_iter
class ProjectedGradientNMF(BaseEstimator, TransformerMixin):
"""Non-Negative matrix factorization by Projected Gradient (NMF)
Read more in the :ref:`User Guide <NMF>`.
Parameters
----------
n_components : int or None
Number of components, if n_components is not set all components
are kept
init : 'nndsvd' | 'nndsvda' | 'nndsvdar' | 'random'
Method used to initialize the procedure.
Default: 'nndsvdar' if n_components < n_features, otherwise random.
Valid options::
'nndsvd': Nonnegative Double Singular Value Decomposition (NNDSVD)
initialization (better for sparseness)
'nndsvda': NNDSVD with zeros filled with the average of X
(better when sparsity is not desired)
'nndsvdar': NNDSVD with zeros filled with small random values
(generally faster, less accurate alternative to NNDSVDa
for when sparsity is not desired)
'random': non-negative random matrices
sparseness : 'data' | 'components' | None, default: None
Where to enforce sparsity in the model.
beta : double, default: 1
Degree of sparseness, if sparseness is not None. Larger values mean
more sparseness.
eta : double, default: 0.1
Degree of correctness to maintain, if sparsity is not None. Smaller
values mean larger error.
tol : double, default: 1e-4
Tolerance value used in stopping conditions.
max_iter : int, default: 200
Number of iterations to compute.
nls_max_iter : int, default: 2000
Number of iterations in NLS subproblem.
random_state : int or RandomState
Random number generator seed control.
Attributes
----------
components_ : array, [n_components, n_features]
Non-negative components of the data.
reconstruction_err_ : number
Frobenius norm of the matrix difference between
the training data and the reconstructed data from
the fit produced by the model. ``|| X - WH ||_2``
n_iter_ : int
Number of iterations run.
Examples
--------
>>> import numpy as np
>>> X = np.array([[1,1], [2, 1], [3, 1.2], [4, 1], [5, 0.8], [6, 1]])
>>> from sklearn.decomposition import ProjectedGradientNMF
>>> model = ProjectedGradientNMF(n_components=2, init='random',
... random_state=0)
>>> model.fit(X) #doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
ProjectedGradientNMF(beta=1, eta=0.1, init='random', max_iter=200,
n_components=2, nls_max_iter=2000, random_state=0, sparseness=None,
tol=0.0001)
>>> model.components_
array([[ 0.77032744, 0.11118662],
[ 0.38526873, 0.38228063]])
>>> model.reconstruction_err_ #doctest: +ELLIPSIS
0.00746...
>>> model = ProjectedGradientNMF(n_components=2,
... sparseness='components', init='random', random_state=0)
>>> model.fit(X) #doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
ProjectedGradientNMF(beta=1, eta=0.1, init='random', max_iter=200,
n_components=2, nls_max_iter=2000, random_state=0,
sparseness='components', tol=0.0001)
>>> model.components_
array([[ 1.67481991, 0.29614922],
[ 0. , 0.4681982 ]])
>>> model.reconstruction_err_ #doctest: +ELLIPSIS
0.513...
References
----------
This implements
C.-J. Lin. Projected gradient methods
for non-negative matrix factorization. Neural
Computation, 19(2007), 2756-2779.
http://www.csie.ntu.edu.tw/~cjlin/nmf/
P. Hoyer. Non-negative Matrix Factorization with
Sparseness Constraints. Journal of Machine Learning
Research 2004.
NNDSVD is introduced in
C. Boutsidis, E. Gallopoulos: SVD based
initialization: A head start for nonnegative
matrix factorization - Pattern Recognition, 2008
http://tinyurl.com/nndsvd
"""
def __init__(self, n_components=None, init=None, sparseness=None, beta=1,
eta=0.1, tol=1e-4, max_iter=200, nls_max_iter=2000,
random_state=None):
self.n_components = n_components
self.init = init
self.tol = tol
if sparseness not in (None, 'data', 'components'):
raise ValueError(
'Invalid sparseness parameter: got %r instead of one of %r' %
(sparseness, (None, 'data', 'components')))
self.sparseness = sparseness
self.beta = beta
self.eta = eta
self.max_iter = max_iter
self.nls_max_iter = nls_max_iter
self.random_state = random_state
def _init(self, X):
n_samples, n_features = X.shape
init = self.init
if init is None:
if self.n_components_ < n_features:
init = 'nndsvd'
else:
init = 'random'
random_state = self.random_state
if init == 'nndsvd':
W, H = _initialize_nmf(X, self.n_components_)
elif init == 'nndsvda':
W, H = _initialize_nmf(X, self.n_components_, variant='a')
elif init == 'nndsvdar':
W, H = _initialize_nmf(X, self.n_components_, variant='ar')
elif init == "random":
rng = check_random_state(random_state)
W = rng.randn(n_samples, self.n_components_)
# we do not write np.abs(W, out=W) to stay compatible with
# numpy 1.5 and earlier where the 'out' keyword is not
# supported as a kwarg on ufuncs
np.abs(W, W)
H = rng.randn(self.n_components_, n_features)
np.abs(H, H)
else:
raise ValueError(
'Invalid init parameter: got %r instead of one of %r' %
(init, (None, 'nndsvd', 'nndsvda', 'nndsvdar', 'random')))
return W, H
def _update_W(self, X, H, W, tolW):
n_samples, n_features = X.shape
if self.sparseness is None:
W, gradW, iterW = _nls_subproblem(X.T, H.T, W.T, tolW,
self.nls_max_iter)
elif self.sparseness == 'data':
W, gradW, iterW = _nls_subproblem(
safe_vstack([X.T, np.zeros((1, n_samples))]),
safe_vstack([H.T, np.sqrt(self.beta) * np.ones((1,
self.n_components_))]),
W.T, tolW, self.nls_max_iter)
elif self.sparseness == 'components':
W, gradW, iterW = _nls_subproblem(
safe_vstack([X.T,
np.zeros((self.n_components_, n_samples))]),
safe_vstack([H.T,
np.sqrt(self.eta) * np.eye(self.n_components_)]),
W.T, tolW, self.nls_max_iter)
return W.T, gradW.T, iterW
def _update_H(self, X, H, W, tolH):
n_samples, n_features = X.shape
if self.sparseness is None:
H, gradH, iterH = _nls_subproblem(X, W, H, tolH,
self.nls_max_iter)
elif self.sparseness == 'data':
H, gradH, iterH = _nls_subproblem(
safe_vstack([X, np.zeros((self.n_components_, n_features))]),
safe_vstack([W,
np.sqrt(self.eta) * np.eye(self.n_components_)]),
H, tolH, self.nls_max_iter)
elif self.sparseness == 'components':
H, gradH, iterH = _nls_subproblem(
safe_vstack([X, np.zeros((1, n_features))]),
safe_vstack([W,
np.sqrt(self.beta)
* np.ones((1, self.n_components_))]),
H, tolH, self.nls_max_iter)
return H, gradH, iterH
def fit_transform(self, X, y=None):
"""Learn a NMF model for the data X and returns the transformed data.
This is more efficient than calling fit followed by transform.
Parameters
----------
X: {array-like, sparse matrix}, shape = [n_samples, n_features]
Data matrix to be decomposed
Returns
-------
data: array, [n_samples, n_components]
Transformed data
"""
X = check_array(X, accept_sparse='csr')
check_non_negative(X, "NMF.fit")
n_samples, n_features = X.shape
if not self.n_components:
self.n_components_ = n_features
else:
self.n_components_ = self.n_components
W, H = self._init(X)
gradW = (np.dot(W, np.dot(H, H.T))
- safe_sparse_dot(X, H.T, dense_output=True))
gradH = (np.dot(np.dot(W.T, W), H)
- safe_sparse_dot(W.T, X, dense_output=True))
init_grad = norm(np.r_[gradW, gradH.T])
tolW = max(0.001, self.tol) * init_grad # why max?
tolH = tolW
tol = self.tol * init_grad
for n_iter in range(1, self.max_iter + 1):
# stopping condition
# as discussed in paper
proj_norm = norm(np.r_[gradW[np.logical_or(gradW < 0, W > 0)],
gradH[np.logical_or(gradH < 0, H > 0)]])
if proj_norm < tol:
break
# update W
W, gradW, iterW = self._update_W(X, H, W, tolW)
if iterW == 1:
tolW = 0.1 * tolW
# update H
H, gradH, iterH = self._update_H(X, H, W, tolH)
if iterH == 1:
tolH = 0.1 * tolH
if not sp.issparse(X):
error = norm(X - np.dot(W, H))
else:
sqnorm_X = np.dot(X.data, X.data)
norm_WHT = trace_dot(np.dot(np.dot(W.T, W), H), H)
cross_prod = trace_dot((X * H.T), W)
error = sqrt(sqnorm_X + norm_WHT - 2. * cross_prod)
self.reconstruction_err_ = error
self.comp_sparseness_ = _sparseness(H.ravel())
self.data_sparseness_ = _sparseness(W.ravel())
H[H == 0] = 0 # fix up negative zeros
self.components_ = H
if n_iter == self.max_iter:
warnings.warn("Iteration limit reached during fit. Solving for W exactly.")
return self.transform(X)
self.n_iter_ = n_iter
return W
def fit(self, X, y=None, **params):
"""Learn a NMF model for the data X.
Parameters
----------
X: {array-like, sparse matrix}, shape = [n_samples, n_features]
Data matrix to be decomposed
Returns
-------
self
"""
self.fit_transform(X, **params)
return self
def transform(self, X):
"""Transform the data X according to the fitted NMF model
Parameters
----------
X: {array-like, sparse matrix}, shape = [n_samples, n_features]
Data matrix to be transformed by the model
Returns
-------
data: array, [n_samples, n_components]
Transformed data
"""
check_is_fitted(self, 'n_components_')
X = check_array(X, accept_sparse='csc')
Wt = np.zeros((self.n_components_, X.shape[0]))
check_non_negative(X, "ProjectedGradientNMF.transform")
if sp.issparse(X):
Wt, _, _ = _nls_subproblem(X.T, self.components_.T, Wt,
tol=self.tol,
max_iter=self.nls_max_iter)
else:
for j in range(0, X.shape[0]):
Wt[:, j], _ = nnls(self.components_.T, X[j, :])
return Wt.T
class NMF(ProjectedGradientNMF):
__doc__ = ProjectedGradientNMF.__doc__
pass
| bsd-3-clause |
toinbis/369old | src/web369/conf/base.py | 1 | 2325 | from pkg_resources import resource_filename
DEBUG = False
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'web369',
'USER': 'root',
'PASSWORD': '',
}
}
TIME_ZONE = 'Europe/Vilnius'
LANGUAGE_CODE = 'lt'
SITE_ID = 1
USE_I18N = True
USE_L10N = True
STATIC_URL = '/static/'
STATIC_ROOT = resource_filename('web369', '../../var/htdocs/static')
STATICFILES_DIRS = (
resource_filename('web369', 'static'),
)
MEDIA_URL = '/media/'
MEDIA_ROOT = resource_filename('web369', '../../var/htdocs/media')
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
SECRET_KEY = 'SBX*YTL!cANetM&uFTf6R5Je(@PX3!rtgo)kgwNT'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'web369.urls.default'
TEMPLATE_DIRS = (
resource_filename('web369', 'templates'),
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
# 'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.request',
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.sitemaps',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
# 'south',
'web369',
)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
'LOCATION': '/tmp/django_cache',
'TIMEOUT': 60,
'OPTIONS': {
'MAX_ENTRIES': 1000
}
}
}
# Word count will be updated when new documents are scrapped:
LIVE_WORD_COUNT = True
| bsd-3-clause |
ZhangXinNan/tensorflow | tensorflow/python/ops/parallel_for/pfor.py | 2 | 101653 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Compiled parallel-for loop."""
# pylint: disable=missing-docstring
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from absl import flags
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import functional_ops
from tensorflow.python.ops import gen_parsing_ops
from tensorflow.python.ops import gen_sparse_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import parsing_ops
from tensorflow.python.ops import sparse_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
flags.DEFINE_bool(
"op_conversion_fallback_to_while_loop", False,
"If true, falls back to using a while loop for ops for "
"which a converter is not defined.")
def _stack(t, length):
"""stacks `t` `length` times."""
ones = array_ops.ones_like(array_ops.shape(t))
multiples = array_ops.concat([length, ones], 0)
t = array_ops.tile(array_ops.expand_dims(t, 0), multiples)
return wrap(t, True)
# The following stateful ops can be safely called once, and with the same
# signature as the unconverted version, if their inputs are loop invariant.
# TODO(agarwal): implement a strategy for converting Variable reads/writes. The
# plan is to map each read/write in the loop_fn to a corresponding merged
# read/write in the converted graph. Writes need to be mergeable (e.g.
# AssignAdd) to be used in `pfor`. Given a certain read/write order in the
# loop_fn, doing a one-to-one conversion will simulate executing such
# instructions in lock-step across all iterations.
passthrough_stateful_ops = set([
"VariableV2",
"VarHandleOp",
"ReadVariableOp",
"StackV2",
"TensorArrayWriteV3",
"TensorArrayReadV3",
"TensorArraySizeV3",
])
def _is_stateful_pfor_op(op):
if isinstance(op, WhileOp):
return op.is_stateful
if op.type == "Const":
# Const didn't have an op_def.
return False
if op.type in passthrough_stateful_ops:
return False
assert hasattr(op, "op_def") and op.op_def is not None, op
return op.op_def.is_stateful
# pylint: disable=protected-access
class WhileOp(object):
"""Object for storing state for converting the outputs of a while_loop."""
def __init__(self, exit_node, pfor_ops):
"""Initializer.
Args:
exit_node: A tensor output from the while_loop.
pfor_ops: list of ops inside the current pfor loop.
"""
self._pfor_ops = set(pfor_ops)
self._pfor_op_ids = set([x._id for x in pfor_ops])
assert isinstance(exit_node, ops.Tensor)
self._while_context = exit_node.op._get_control_flow_context()
assert isinstance(self._while_context, control_flow_ops.WhileContext)
self._context_name = self._while_context.name
self._condition = self._while_context.pivot.op.inputs[0]
# Parts of an external while_loop could be created inside a pfor loop.
# However for the purpose here, we declare such loops to be external. Also
# note that we check if the condition was created inside or outside to
# determine if the while_loop was first created inside or outside.
# TODO(agarwal): check that the Enter and Exit of this loop are unstacked.
self._is_inside_loop = self.op_is_inside_loop(self._condition.op)
if self._is_inside_loop:
for e in self._while_context.loop_exits:
assert self.op_is_inside_loop(e.op)
# Note the code below tries to reverse engineer an existing while_loop graph
# by assuming the following pattern of nodes.
#
# NextIteration <---- Body <--- Enter
# | ^
# V ___| Y
# Enter -> Merge -> Switch___
# ^ | N
# | V
# LoopCond Exit
# Node that elements in the list below correspond one-to-one with each
# other. i.e. these lists are the same size, and the i_th entry corresponds
# to different Operations/Tensors of a single cycle as illustrated above.
# List of Switch ops (ops.Operation) that feed into an Exit Node.
self._exit_switches = []
# List of inputs (ops.Tensor) to NextIteration.
self._body_outputs = []
# List of list of control inputs of the NextIteration nodes.
self._next_iter_control_inputs = []
# List of Merge ops (ops.Operation).
self._enter_merges = []
# List of output (ops.Tensor) of Exit nodes.
self._outputs = []
# List of Enter Tensors.
# There are two types of Enter nodes:
# - The Enter nodes that are used in the `loop_vars` argument to
# `while_loop` (see
# https://www.tensorflow.org/api_docs/python/tf/while_loop). We collect
# these Enter nodes immediately below by tracing backwards from the Exit
# nodes via Exit <- Switch <- Merge <- Enter. You can see this chain in the
# diagram above. This allows us to have a 1:1 correspondence between the
# self._outputs and the first elements in self._enters.
# - The Enter nodes that are used only by the body. They don't appear in the
# `loop_vars` and are not returned from the `while_loop`. In Python code,
# they are usually captured by the body lambda. We collect them below by
# iterating over all the ops in the graph. They are appended to the end of
# self._enters or self._direct_enters, and don't correspond to any outputs
# in self._outputs. Note that we keep the resource/variant Enter nodes in
# self._direct_enters and the constructed while_loop's body uses them
# directly as opposed to passing them as loop variables. This is done
# because the while_body cannot partition the resource/variant Tensors, so
# it has to leave them unchanged.
self._enters = []
self._direct_enters = []
for e in self._while_context.loop_exits:
self._outputs.append(e.op.outputs[0])
switch = e.op.inputs[0].op
assert switch.type == "Switch", switch
self._exit_switches.append(switch)
merge = switch.inputs[0].op
assert merge.type == "Merge", merge
self._enter_merges.append(merge)
enter = merge.inputs[0].op
assert enter.type == "Enter", enter
self._enters.append(enter.outputs[0])
next_iter = merge.inputs[1].op
assert next_iter.type == "NextIteration", next_iter
self._body_outputs.append(next_iter.inputs[0])
self._next_iter_control_inputs.append(next_iter.control_inputs)
# Collect all the Enter nodes that are not part of `loop_vars`, the second
# category described above.
# Also track whether the loop body has any stateful ops.
self._is_stateful = False
for op in ops.get_default_graph().get_operations():
# TODO(agarwal): make sure this works with nested case.
control_flow_context = op._get_control_flow_context()
if control_flow_context is None:
continue
if control_flow_context.name == self._context_name:
self._is_stateful |= _is_stateful_pfor_op(op)
if op.type == "Enter":
output = op.outputs[0]
if output not in self._enters:
if output.dtype in (dtypes.resource, dtypes.variant):
if output not in self._direct_enters:
self._direct_enters.append(output)
else:
self._enters.append(output)
def __str__(self):
"""String representation."""
return "while_loop(%s)" % self.name
@property
def inputs(self):
"""Input to all the Enter nodes."""
return [x.op.inputs[0] for x in self._enters + self._direct_enters]
@property
def control_inputs(self):
"""Control input to all the Enter nodes."""
control_inputs = []
for x in self._enters + self._direct_enters:
control_inputs.extend(x.op.control_inputs)
return control_inputs
@property
def outputs(self):
"""Outputs of all the Exit nodes."""
return self._outputs
@property
def name(self):
"""Context name for the while loop."""
return self._context_name
@property
def is_inside_loop(self):
"""Returns true if the while_loop was created inside the pfor."""
return self._is_inside_loop
def op_is_inside_loop(self, op):
"""True if op was created inside the pfor loop body."""
assert isinstance(op, ops.Operation)
# Note that we use self._pfor_op_ids for the check and not self._pfor_ops
# since it appears there tensorflow API could return different python
# objects representing the same Operation node.
return op._id in self._pfor_op_ids
@property
def is_stateful(self):
return self._is_stateful
@property
def pfor_converter(self):
"""Return a converter for the while loop."""
return self
def _init_pfor(self, parent_pfor, indices, cond_stacked, inputs,
inputs_stacked):
"""Create a PFor object for converting parts of the while_loop.
Args:
parent_pfor: PFor object being used for converting the while_loop.
indices: int32 Tensor of ids for the iterations that are still active
(i.e. did not exit the while_loop).
cond_stacked: True if the while_loop condition is stacked.
inputs: list of input Tensors corresponding 1-to-1 with self._enters. Note
that these Tensors are a subset of the loop variables for the generated
while_loop.
inputs_stacked: List of booleans corresponding 1-to-1 with `inputs`,
indicating if the value is stacked or not.
Returns:
A PFor instance. The instance is initialized by adding conversion mappings
of nodes that will be external to the conversion that the returned
instance will be used for. e.g. Enter nodes as well as Merge and Switch
outputs are mapped to converted values.
"""
num_outputs = len(self._outputs)
assert len(inputs) == len(self._enters)
assert len(inputs_stacked) == len(self._enters)
loop_var = parent_pfor.loop_var
loop_len = array_ops.size(indices)
pfor = PFor(
loop_var,
loop_len,
pfor_ops=self._pfor_ops,
all_indices=indices,
all_indices_partitioned=cond_stacked)
# Map all inputs of Enter nodes in self._direct_enters to their converted
# values.
for enter in self._direct_enters:
enter_input = enter.op.inputs[0]
converted_enter, stacked, is_sparse_stacked = parent_pfor._convert_helper(
enter_input)
# Since these are resources / variants, they should be unstacked.
assert not stacked and not is_sparse_stacked, (enter, converted_enter)
pfor._add_conversion(enter, wrap(converted_enter, False))
# Map all Enter nodes to the inputs.
for enter, inp, stacked in zip(self._enters, inputs, inputs_stacked):
pfor._add_conversion(enter, wrap(inp, stacked))
# Map outputs of Switch and Merge.
for i in range(num_outputs):
wrapped_inp = wrap(inputs[i], inputs_stacked[i])
merge = self._enter_merges[i]
pfor._add_conversion(merge.outputs[0], wrapped_inp)
# Note that second output of Merge is typically not used, except possibly
# as a control dependency. To avoid trying to output the correct value, we
# employ a hack here. We output a dummy invalid value with an incorrect
# dtype. This will allow control dependency to work but if using it as an
# input, it should typically lead to errors during graph construction due
# to dtype mismatch.
# TODO(agarwal): Check in the original graph to see if there are any
# consumers of this Tensor that use it as an input.
pfor._add_conversion(merge.outputs[1],
wrap(constant_op.constant(-1.0), False))
switch = self._exit_switches[i]
# Don't need to worry about switch.output[0] which will feed to Exit node.
pfor._add_conversion(switch.outputs[1], wrapped_inp)
return pfor
def _convert_enter(self, parent_pfor, enter):
"""Converts an Enter node."""
inp, stacked, _ = parent_pfor._convert_helper(enter.op.inputs[0])
control_inputs = [
parent_pfor._convert_helper(x).t for x in enter.op.control_inputs
]
if control_inputs:
with ops.control_dependencies(control_inputs):
inp = array_ops.identity(inp)
return inp, stacked
def _maybe_stacked(self, cache, inp):
"""Heuristic to figue out if the coverting inp leads to a stacked value.
Args:
cache: map from Tensor to boolean indicating stacked/unstacked.
inp: input Tensor.
Returns:
True if `inp` could get stacked. If the function returns False, the
converted value should be guaranteed to be unstacked. If returning True,
it may or may not be stacked.
"""
if inp in cache:
return cache[inp]
if not self.op_is_inside_loop(inp.op):
return False
op = inp.op
output = False
if op.type in [
"Shape",
"Rank"
"ShapeN",
"ZerosLike",
"TensorArrayV3",
"TensorArraySizeV3",
]:
output = False
elif _is_stateful_pfor_op(op):
# This may be fairly aggressive.
output = True
elif op.type == "Exit":
# This may be fairly aggressive.
output = True
else:
for t in op.inputs:
if self._maybe_stacked(cache, t):
output = True
break
cache[inp] = output
return output
def _create_init_values(self, pfor_input):
"""Create arguments passed to converted while_loop."""
with ops.name_scope("while_init"):
loop_len_vector = pfor_input.pfor.loop_len_vector
loop_len = loop_len_vector[0]
num_outputs = len(self._outputs)
inputs = []
maybe_stacked_cache = {}
# Convert all the Enters. Need to do this before checking for stacking
# below.
for i, enter in enumerate(self._enters):
inp, stacked = self._convert_enter(pfor_input.pfor, enter)
inputs.append(inp)
maybe_stacked_cache[enter] = stacked
# Since this enter node is part of the `loop_vars`, it corresponds to an
# output and its preceding switch. We mark this switch's output the same
# stackness, to act at the base case for the logic below. Below, we will
# be going through the body figuring out which inputs might need to be
# stacked and which inputs can safely remain unstacked.
if i < num_outputs:
maybe_stacked_cache[self._exit_switches[i].outputs[1]] = stacked
# Shape invariants for init_values corresponding to self._enters.
input_shape_invariants = []
# TensorArrays for outputs of converted while loop
output_tas = []
# Shape invariants for output TensorArrays.
ta_shape_invariants = []
# List of booleans indicating stackness of inputs, i.e. tensors
# corresponding to self._enters.
inputs_stacked = []
for i, inp in enumerate(inputs):
enter = self._enters[i]
inp_stacked = self._maybe_stacked(maybe_stacked_cache, enter)
# Note that even when an input is unstacked, the body could make it
# stacked. we use a heuristic below to figure out if body may be making
# it stacked.
if i < num_outputs:
body_output = self._body_outputs[i]
if enter.op in self._pfor_ops:
body_output_stacked = self._maybe_stacked(maybe_stacked_cache,
body_output)
else:
# If constructed outside of pfor loop, then the output would not be
# stacked.
body_output_stacked = False
if body_output_stacked and not inp_stacked:
inp = _stack(inp, loop_len_vector).t
inputs[i] = inp
inp_stacked = True
# TODO(agarwal): other attributes for the TensorArray ?
output_tas.append(tensor_array_ops.TensorArray(inp.dtype, loop_len))
ta_shape_invariants.append(tensor_shape.TensorShape(None))
inputs_stacked.append(inp_stacked)
input_shape_invariants.append(tensor_shape.TensorShape(None))
# See documentation for __call__ for the structure of init_values.
init_values = [True, pfor_input.pfor.all_indices] + inputs + output_tas
# TODO(agarwal): try stricter shape invariants
shape_invariants = (
[tensor_shape.TensorShape(None),
tensor_shape.TensorShape(None)
] + input_shape_invariants + ta_shape_invariants)
return init_values, inputs_stacked, shape_invariants
def _process_cond_unstacked(self, conditions, indices, inputs, output_tas):
"""Handles case when condition is unstacked.
Note that all iterations end together. So we don't need to partition the
inputs. When all iterations are done, we write the inputs to the
TensorArrays. Note that we only write to index 0 of output_tas. Since all
iterations end together, they can all be output together.
"""
not_all_done = array_ops.reshape(conditions, [])
new_output_tas = []
# pylint: disable=cell-var-from-loop
for i, out_ta in enumerate(output_tas):
inp = inputs[i]
new_output_tas.append(
control_flow_ops.cond(not_all_done,
lambda: out_ta,
lambda: out_ta.write(0, inp)))
# pylint: enable=cell-var-from-loop
return not_all_done, indices, inputs, new_output_tas
def _process_cond_stacked(self, conditions, indices, inputs, inputs_stacked,
output_tas):
num_outputs = len(self._outputs)
# Compute if all iterations are done.
not_all_done = math_ops.reduce_any(conditions)
conditions_int = math_ops.cast(conditions, dtypes.int32)
# Partition the indices.
done_indices, new_indices = data_flow_ops.dynamic_partition(
indices, conditions_int, 2)
new_inputs = []
new_output_tas = []
for i, (inp, stacked) in enumerate(zip(inputs, inputs_stacked)):
# Partition the inputs.
if stacked:
done_inp, new_inp = data_flow_ops.dynamic_partition(
inp, conditions_int, 2)
else:
# TODO(agarwal): avoid this stacking. See TODO earlier in
# _process_cond_unstacked.
done_inp = _stack(inp, [array_ops.size(done_indices)]).t
new_inp = inp
new_inputs.append(new_inp)
# For iterations that are done, write them to TensorArrays.
if i < num_outputs:
out_ta = output_tas[i]
# Note that done_indices can be empty. done_inp should also be empty in
# that case.
new_output_tas.append(out_ta.scatter(done_indices, done_inp))
return not_all_done, new_indices, new_inputs, new_output_tas
def _process_body(self, pfor_input, inputs_stacked,
new_indices, cond_stacked, new_inputs,
not_all_done):
"""Convert the body function."""
def true_fn(control_inputs, body_pfor, body_output, stacked):
"""Converts the body function for all but last iteration.
This essentially converts body_output. Additionally, it needs to handle
any control dependencies on the NextIteration node. So it creates another
Identity node with the converted dependencies.
"""
converted_control_inp = []
for x in control_inputs:
for t in x.outputs:
converted_control_inp.append(body_pfor._convert_helper(t).t)
if stacked:
# Note convert always does the stacking.
output = body_pfor.convert(body_output)
else:
output, convert_stacked, _ = body_pfor._convert_helper(body_output)
assert convert_stacked == stacked, body_output
with ops.control_dependencies(converted_control_inp):
return array_ops.identity(output)
body_pfor = self._init_pfor(pfor_input.pfor, new_indices,
cond_stacked, new_inputs,
inputs_stacked)
new_outputs = []
for i, (body_output, stacked) in enumerate(
zip(self._body_outputs, inputs_stacked)):
control_inp = self._next_iter_control_inputs[i]
out_dtype = body_output.dtype
# Note that we want to run the body only if not all pfor iterations are
# done. If all are done, we return empty tensors since these values will
# not be used. Notice that the value returned by the loop is based on
# TensorArrays and not directly on these returned values.
# pylint: disable=cell-var-from-loop
new_output = control_flow_ops.cond(
not_all_done,
lambda: true_fn(control_inp, body_pfor, body_output, stacked),
lambda: constant_op.constant([], dtype=out_dtype))
# pylint: enable=cell-var-from-loop
new_outputs.append(new_output)
return new_outputs
def __call__(self, pfor_input):
"""Converter for the while_loop.
The conversion of a while_loop is another while_loop.
The arguments to this converted while_loop are as follows:
not_all_done: Boolean scalar Tensor indicating if all the pfor iterations
are done.
indices: int32 1-D Tensor storing the id of the iterations that are not
done.
args: Remaining arguments. These can be divided into 3 categories:
- First set of arguments are the tensors that correspond to the initial
elements of self._enters. The elements that appear in original while
loop's `loop_vars`.
- The second set of arguments are the tensors that correspond to the
remaining elements of self._enters. These are the tensors that directly
enter the original while loop body.
- Finally, the last set of arguments are TensorArrays. These TensorArrays
correspond to the outputs of the original while_loop, i.e. to the
elements in self._outputs. Each TensorArray has `PFor.loop_len`
elements, i.e. the number of pfor iterations. At the end, the i'th
element of each TensorArray will contain the output computed by the
i'th iteration of pfor. Note that elements can be written into these
tensors arrays in any order, depending on when the corresponding pfor
iteration is done.
If the original while_loop had `k` tensors in its `loop_vars` and its body
directly captured `m` tensors, the `args` will contain `2 * k + m` values.
In each iteration, the while_loop body recomputes the condition for all
active pfor iterations to see which of them are now done. It then partitions
all the inputs and passes them along to the converted body. Values for all
the iterations that are done are written to TensorArrays indexed by the pfor
iteration number. When all iterations are done, the TensorArrays are stacked
to get the final value.
Args:
pfor_input: A PForInput object corresponding to the output of any Exit
node from this while loop.
Returns:
List of converted outputs.
"""
# Create init_values that will be passed to the while_loop.
init_values, inputs_stacked, shape_invariants = self._create_init_values(
pfor_input)
# Note that we use a list as a hack since we need the nested function body
# to set the value of cond_is_stacked. python2.x doesn't support nonlocal
# variables.
cond_is_stacked = [None]
def cond(not_all_done, *_):
return not_all_done
def body(not_all_done, indices, *args):
# See documentatin for __call__ for the structure of *args.
num_enters = len(self._enters)
inputs = args[:num_enters]
output_tas = args[num_enters:]
# TODO(agarwal): see which outputs have consumers and only populate the
# TensorArrays corresponding to those. Or do those paths get trimmed out
# from inside the while_loop body?
assert len(inputs) >= len(output_tas)
assert len(inputs) == len(inputs_stacked)
# Convert condition
with ops.name_scope("while_cond"):
# Note that we set cond_stacked to True here. At this point we don't
# know if it could be loop invariant, hence the conservative value is
# to assume stacked.
cond_pfor = self._init_pfor(pfor_input.pfor, indices,
cond_stacked=True,
inputs=inputs,
inputs_stacked=inputs_stacked)
conditions, cond_stacked, _ = cond_pfor._convert_helper(self._condition)
cond_is_stacked[0] = cond_stacked
# Recompute the new condition, write outputs of done iterations, and
# partition the inputs if needed.
if not cond_stacked:
(not_all_done, new_indices,
new_inputs, new_output_tas) = self._process_cond_unstacked(
conditions, indices, inputs, output_tas)
else:
(not_all_done, new_indices,
new_inputs, new_output_tas) = self._process_cond_stacked(
conditions, indices, inputs, inputs_stacked, output_tas)
# Convert body
with ops.name_scope("while_body"):
# Compute the outputs from the body.
new_outputs = self._process_body(pfor_input, inputs_stacked,
new_indices, cond_stacked, new_inputs,
not_all_done)
# Note that the first num_outputs new values of inputs are computed using
# the body. Rest of them were direct Enters into the condition/body and
# the partitioning done earlier is sufficient to give the new value.
num_outputs = len(self._outputs)
new_args = ([not_all_done, new_indices] + new_outputs + list(
new_inputs[num_outputs:]) + new_output_tas)
return tuple(new_args)
while_outputs = control_flow_ops.while_loop(
cond, body, init_values, shape_invariants=shape_invariants)
output_tas = while_outputs[-len(self._outputs):]
outputs = []
assert cond_is_stacked[0] is not None
for inp_stacked, ta in zip(inputs_stacked, output_tas):
if cond_is_stacked[0]:
outputs.append(wrap(ta.stack(), True))
else:
# Note that if while_loop condition is unstacked, all iterations exit at
# the same time and we wrote those outputs in index 0 of the tensor
# array.
outputs.append(wrap(ta.read(0), inp_stacked))
return outputs
class _PforInput(object):
"""Input object passed to registered pfor converters."""
def __init__(self, pfor, op, inputs):
"""Creates a _PforInput object.
Args:
pfor: PFor converter object.
op: the Operation object that is being converted.
inputs: list of WrappedTensor objects representing converted values of the
inputs of `op`.
"""
self.pfor = pfor
self._op = op
self._inputs = inputs
def stack_inputs(self, stack_indices=None):
"""Stacks unstacked inputs at `stack_indices`.
Args:
stack_indices: indices of inputs at which stacking is done. If None,
stacking is done at all indices.
"""
if stack_indices is None:
stack_indices = range(len(self._inputs))
length = self.pfor.loop_len_vector
for i in stack_indices:
inp = self._inputs[i]
if not inp.is_stacked:
self._inputs[i] = _stack(inp.t, length)
def expanddim_inputs_for_broadcast(self):
"""Reshapes stacked inputs to prepare them for broadcast.
Since stacked inputs have an extra leading dimension, automatic broadcasting
rules could incorrectly try to expand dimensions before that leading
dimension. To avoid that, we reshape these stacked inputs to the maximum
rank they will need to be broadcasted to.
"""
if not self._inputs:
return
# Find max rank
def _get_rank(x):
rank = array_ops.rank(x.t)
if not x.is_stacked:
rank += 1
return rank
ranks = [_get_rank(x) for x in self._inputs]
max_rank = ranks[0]
for rank in ranks[1:]:
max_rank = math_ops.maximum(rank, max_rank)
for i, inp in enumerate(self._inputs):
if inp.is_stacked:
shape = array_ops.shape(inp.t)
rank_diff = array_ops.reshape(max_rank - ranks[i], [1])
ones = array_ops.tile([1], rank_diff)
new_shape = array_ops.concat([shape[:1], ones, shape[1:]], axis=0)
self._inputs[i] = wrap(array_ops.reshape(inp.t, new_shape), True)
@property
def inputs(self):
return self._inputs
@property
def num_inputs(self):
return len(self._inputs)
def input(self, index):
assert len(self._inputs) > index, (index, self._inputs)
return self._inputs[index]
def stacked_input(self, index):
t, is_stacked, _ = self.input(index)
if not is_stacked:
op_type = self.op_type
op_def = getattr(self._op, "op_def", None)
if op_def is None:
input_name = "at index %d" % index
else:
input_name = "\"%s\"" % op_def.input_arg[index].name
raise ValueError("Input %s of op \"%s\" expected to be not loop invariant"
".\nError while converting op %s"
"with converted inputs\n%s" % (input_name, op_type,
self._op, self.inputs))
return t
def unstacked_input(self, index):
t, is_stacked, _ = self.input(index)
if is_stacked:
op_type = self.op_type
op_def = getattr(self._op, "op_def", None)
if op_def is None:
input_name = "at index %d" % index
else:
input_name = "\"%s\"" % op_def.input_arg[index].name
raise ValueError("Input %s of op \"%s\" expected to be loop invariant"
".\nError while converting op %s"
"with converted inputs\n%s" % (input_name, op_type,
self._op, self.inputs))
return t
@property
def op(self):
return self._op
@property
def op_type(self):
return self._op.type
def get_attr(self, attr):
return self._op.get_attr(attr)
@property
def outputs(self):
return self._op.outputs
def output(self, index):
assert index < len(self._op.outputs)
return self._op.outputs[index]
_pfor_converter_registry = {}
class RegisterPFor(object):
"""Utility to register converters for pfor.
Usage:
@RegisterPFor(foo_op_type)
def _foo_converter(pfor_input):
...
The above will register conversion function `_foo_converter` for handling
conversion of `foo_op_type`. During conversion, the registered functin will be
called with a single argument of type `PForInput` which will contain state
needed for the conversion. This registered function should output a list of
WrappedTensor object with the same length as the number of outputs of op being
converted. If the op had zero outputs, then it should return a ops.Operation
object.
"""
def __init__(self, op_type):
"""Creates an object to register a converter for op with type `op_type`."""
self.op_type = op_type
def __call__(self, converter):
name = self.op_type
assert name not in _pfor_converter_registry, "Re-registering %s " % name
_pfor_converter_registry[name] = converter
return converter
class RegisterPForWithArgs(RegisterPFor):
"""Utility to register converters for pfor.
Usage:
@RegisteRPFor(foo_op_type, foo=value, ....)
def _foo_converter(pfor_input, foo=None, ....):
...
See RegisterPFor for details on the conversion function.
`RegisterPForWithArgs` allows binding extra arguments to the
conversion function at registration time.
"""
def __init__(self, op_type, *args, **kw_args):
super(RegisterPForWithArgs, self).__init__(op_type)
self._args = args
self._kw_args = kw_args
def __call__(self, converter):
def _f(pfor_input):
return converter(pfor_input, self.op_type, *self._args, **self._kw_args)
super(RegisterPForWithArgs, self).__call__(_f)
return converter
def _create_op(op_type, inputs, op_dtypes, attrs=None):
"""Utility to create an op."""
return ops.get_default_graph().create_op(
op_type, inputs, op_dtypes, attrs=attrs, compute_device=True)
WrappedTensor = collections.namedtuple("WrappedTensor",
["t", "is_stacked", "is_sparse_stacked"])
"""Wrapper around the result of a Tensor conversion.
The additional fields are useful for keeping track of the conversion state as
data flows through the ops in the loop body. For every op whose output is a
Tensor, its converter should return either a WrappedTensor or a list of
WrappedTensors.
Args:
t: The converted tensor
is_stacked: True if the tensor is stacked, i.e. represents the results of all
the iterations of the loop, where each row i of the tensor corresponds to
that op's output on iteration i of the loop. False if the tensor is not
stacked, i.e. represents the result of the op on of a single iteration of
the loop, where the result does not vary between iterations.
is_sparse_stacked: True if the tensor corresponds to a component tensor
(indices, values, or dense_shape) of a sparse tensor, and has been logically
stacked via a sparse conversion.
"""
def wrap(tensor, is_stacked=True, is_sparse_stacked=False):
"""Helper to create a WrappedTensor object."""
assert isinstance(is_stacked, bool)
assert isinstance(is_sparse_stacked, bool)
assert isinstance(tensor, ops.Tensor)
assert not is_sparse_stacked or is_stacked, ("If the wrapped tensor is "
"stacked via a sparse "
"conversion, it must also be "
"stacked.")
return WrappedTensor(tensor, is_stacked, is_sparse_stacked)
def _fallback_converter(pfor_input):
logging.warn("Using a while_loop for converting %s", pfor_input.op_type)
output_dtypes = [x.dtype for x in pfor_input.outputs]
iters = pfor_input.pfor.loop_len_vector[0]
def while_body(i, *ta_list):
"""Body of while loop."""
inputs = [
x[i, ...] if stacked else x for x, stacked, _ in pfor_input.inputs
]
op_outputs = _create_op(
pfor_input.op_type,
inputs,
output_dtypes,
attrs=pfor_input.op.node_def.attr).outputs
outputs = []
for out, ta in zip(op_outputs, ta_list):
assert isinstance(out, ops.Tensor)
outputs.append(ta.write(i, array_ops.expand_dims(out, 0)))
return tuple([i + 1] + outputs)
ta_list = control_flow_ops.while_loop(
lambda i, *ta: i < iters, while_body, [0] + [
tensor_array_ops.TensorArray(dtype, iters) for dtype in output_dtypes
])[1:]
return tuple([wrap(ta.concat(), True) for ta in ta_list])
class PFor(object):
"""Implementation of rewrite of parallel-for loops.
This class takes a DAG or a set of DAGs representing the body of a
parallel-for loop, and adds new operations to the graph that implements
functionality equivalent to running that loop body for a specified number of
iterations. This new set of nodes may or may not use a tensorflow loop
construct.
The process of conversion does not delete or change any existing operations.
It only adds operations that efficiently implement the equivalent
functionality. We refer to the added ops as "converted ops".
The conversion process uses a simple greedy heuristic. It walks the loop body
and tries to express the functionality of running each node in a loop with a
new set of nodes. When converting an op several cases are possible:
- The op is not inside the loop body. Hence it can be used as is.
- The op does not depend on the iteration number and is stateless. In this
case, it can be used as is.
- The op is not stateful, and depends on iteration number only through control
dependencies. In this case, we can create a single op with same inputs and
attributes, but with "converted" control dependencies.
- The op is not stateful, and all its inputs are loop invariant. In this
case, similar to above, we can create a single op with same inputs and
attributes, but with "converted" control dependencies.
- The op is stateful or at least one of the inputs is not loop invariant. In
this case, we run the registered converter for that op to create a set of
converted ops. All nodes in the set will have converted control dependencies
corresponding to control dependencies of the original op. If the op returned
multiple outputs, "converted outputs" could be produced by different ops in
this set.
"""
def __init__(self,
loop_var,
loop_len,
pfor_ops,
all_indices=None,
all_indices_partitioned=False):
"""Creates an object to rewrite a parallel-for loop.
Args:
loop_var: ops.Tensor output of a Placeholder operation. The value should
be an int32 scalar representing the loop iteration number.
loop_len: A scalar or scalar Tensor representing the number of iterations
the loop is run for.
pfor_ops: List of all ops inside the loop body.
all_indices: If not None, an int32 vector with size `loop_len`
representing the iteration ids that are still active. These values
should be unique and sorted. However they may not be contiguous. This is
typically the case when inside a control flow construct which has
partitioned the indices of the iterations that are being converted.
all_indices_partitioned: If True, this object is being constructed from a
control flow construct where not all the pfor iterations are guaranteed
to be active.
"""
assert isinstance(loop_var, ops.Tensor)
assert loop_var.op.type == "Placeholder"
self._loop_var = loop_var
loop_len_value = tensor_util.constant_value(loop_len)
if loop_len_value is not None:
loop_len = loop_len_value
self._loop_len_vector = array_ops.reshape(loop_len, [1])
self._all_indices_partitioned = all_indices_partitioned
if all_indices_partitioned:
assert all_indices is not None
self.all_indices = (
math_ops.range(loop_len) if all_indices is None else all_indices)
self._conversion_map = {}
self._conversion_map[loop_var] = wrap(self.all_indices, True)
self._pfor_ops = set(pfor_ops)
self._pfor_op_ids = set([x._id for x in pfor_ops])
def op_is_inside_loop(self, op):
"""True if op was created inside the pfor loop body."""
assert isinstance(op, ops.Operation)
# Note that we use self._pfor_op_ids for the check and not self._pfor_ops
# since it appears there tensorflow API could return different python
# objects representing the same Operation node.
return op._id in self._pfor_op_ids
def _convert_sparse(self, y):
"""Returns the converted value corresponding to SparseTensor y.
For SparseTensors, instead of stacking the component tensors separately,
resulting in component tensors with shapes (N, m, rank), (N, m), and (N,
rank) respectively for indices, values, and dense_shape (where N is the loop
length and m is the number of sparse tensor values per loop iter), we want
to logically stack the SparseTensors, to create a SparseTensor whose
components are size (N * m, rank + 1), (N * m, ), and (rank + 1,)
respectively.
Here, we try to get the conversion of each component tensor.
If the tensors are stacked via a sparse conversion, return the resulting
SparseTensor composed of the converted components. Otherwise, the component
tensors are either unstacked or stacked naively. In the latter case, we
unstack the component tensors to reform loop_len SparseTensor elements,
then correctly batch them.
The unstacked tensors must have the same rank. Each dimension of each
SparseTensor will expand to be the largest among all SparseTensor elements
for that dimension. For example, if there are N SparseTensors of rank 3
being stacked, with N dense shapes, where the i_th shape is (x_i, y_i, z_i),
the new dense shape will be (N, max_i(x_i), max_i(y_i), max_i(z_i)).
Args:
y: A tf.SparseTensor.
Returns:
A tf.SparseTensor that is the converted value corresponding to y.
"""
outputs = [
self._convert_helper(t) for t in (y.indices, y.values, y.dense_shape)
]
assert all(isinstance(o, WrappedTensor) for o in outputs)
if all(w.is_sparse_stacked for w in outputs):
return sparse_tensor.SparseTensor(*[w.t for w in outputs])
assert not any(w.is_sparse_stacked for w in outputs), (
"Error converting SparseTensor. All components should be logically "
"stacked, or none.")
# If component tensors were not sparsely stacked, they are either unstacked
# or stacked without knowledge that they are components of sparse tensors.
# In this case, we have to restack them.
return self._restack_sparse_tensor_logically(
*[self._unwrap_or_tile(w) for w in outputs])
def _restack_sparse_tensor_logically(self, indices, values, shape):
sparse_tensor_rank = indices.get_shape()[-1].value
if sparse_tensor_rank is not None:
sparse_tensor_rank += 1
def map_fn(args):
res = gen_sparse_ops.serialize_sparse(
args[0], args[1], args[2], out_type=dtypes.variant)
return res
# Applies a map function to the component tensors to serialize each
# sparse tensor element and batch them all, then deserializes the batch.
# TODO(rachelim): Try to do this without map_fn -- add the right offsets
# to shape and indices tensors instead.
result = functional_ops.map_fn(
map_fn, [indices, values, shape], dtype=dtypes.variant)
return sparse_ops.deserialize_sparse(
result, dtype=values.dtype, rank=sparse_tensor_rank)
def _unwrap_or_tile(self, wrapped_tensor):
"""Given a wrapped tensor, unwrap if stacked. Otherwise, tiles it."""
output, is_stacked = wrapped_tensor.t, wrapped_tensor.is_stacked
if is_stacked:
return output
else:
return _stack(output, self._loop_len_vector).t
def convert(self, y):
"""Returns the converted value corresponding to y.
Args:
y: A ops.Tensor or a ops.Operation object. If latter, y should not have
any outputs.
Returns:
If y does not need to be converted, it returns y as is. Else it returns
the "converted value" corresponding to y.
"""
if isinstance(y, sparse_tensor.SparseTensor):
return self._convert_sparse(y)
output = self._convert_helper(y)
if isinstance(output, WrappedTensor):
assert isinstance(y, ops.Tensor)
return self._unwrap_or_tile(output)
else:
assert isinstance(y, ops.Operation)
assert not y.outputs
assert isinstance(output, ops.Operation)
return output
def _was_converted(self, t):
"""True if t is not a conversion of itself."""
converted_t = self._conversion_map[t]
return converted_t.t is not t
def _add_conversion(self, old_output, new_output):
self._conversion_map[old_output] = new_output
def _convert_helper(self, op_or_tensor):
stack = [op_or_tensor]
while stack:
y = stack[0]
if y in self._conversion_map:
assert isinstance(self._conversion_map[y],
(WrappedTensor, ops.Operation))
stack.pop(0)
continue
if isinstance(y, ops.Operation):
assert not y.outputs, (
"We only support converting Operation objects with no outputs. "
"Got %s", y)
y_op = y
else:
assert isinstance(y, ops.Tensor), y
y_op = y.op
is_while_loop = y_op.type == "Exit"
if is_while_loop:
while_op = WhileOp(y, pfor_ops=self._pfor_ops)
is_inside_loop = while_op.is_inside_loop
# If all nodes in the while_loop graph were created inside the pfor, we
# treat the whole loop subgraph as a single op (y_op) and try to convert
# it. For while_loops that are created completely or partially outside,
# we treat them as external and should be able to simply return the Exit
# node output as is without needing any conversion. Note that for
# while_loops that are partially constructed inside, we assume they will
# be loop invariant. If that is not the case, it will create runtime
# errors since the converted graph would depend on the self._loop_var
# placeholder.
if is_inside_loop:
y_op = while_op
else:
is_inside_loop = self.op_is_inside_loop(y_op)
# If this op was not created inside the loop body, we will return as is.
# 1. Convert inputs and control inputs.
def _add_to_stack(x):
if x not in self._conversion_map:
stack.insert(0, x)
return True
else:
return False
if is_inside_loop:
added_to_stack = False
for inp in y_op.inputs:
added_to_stack |= _add_to_stack(inp)
for cinp in y_op.control_inputs:
if cinp.outputs:
for t in cinp.outputs:
added_to_stack |= _add_to_stack(t)
else:
added_to_stack |= _add_to_stack(cinp)
if added_to_stack:
continue
converted_inputs = [self._conversion_map[inp] for inp in y_op.inputs]
some_input_converted = any(
[self._was_converted(x) for x in y_op.inputs])
some_input_stacked = any([x.is_stacked for x in converted_inputs])
converted_control_ops = set()
some_control_input_converted = False
for cinp in y_op.control_inputs:
if cinp.outputs:
for t in cinp.outputs:
converted_t = self._conversion_map[t]
if self._was_converted(t):
some_control_input_converted = True
converted_control_ops.add(converted_t.t.op)
else:
converted_cinp = self._conversion_map[cinp]
assert isinstance(converted_cinp, ops.Operation)
if converted_cinp != cinp:
some_control_input_converted = True
converted_control_ops.add(converted_cinp)
converted_control_ops = list(converted_control_ops)
is_stateful = _is_stateful_pfor_op(y_op)
else:
converted_inputs = []
converted_control_ops = []
logging.vlog(3, "converting op:%s\ninputs:%s\ncontrol_inputs:%s", y_op,
converted_inputs, converted_control_ops)
# 2. Convert y_op
# If converting a while_loop, we let the while_loop convertor deal with
# putting the control dependencies appropriately.
control_dependencies = [] if is_while_loop else converted_control_ops
with ops.control_dependencies(control_dependencies), ops.name_scope(
y_op.name + "/pfor/"):
# None of the inputs and control inputs were converted.
if (not is_inside_loop or
(not is_stateful and not some_input_converted and
not some_control_input_converted)):
if y == y_op:
assert not isinstance(y_op, WhileOp)
new_outputs = y_op
else:
new_outputs = [wrap(x, False) for x in y_op.outputs]
elif not (is_stateful or is_while_loop or some_input_stacked):
# All inputs are unstacked or uncoverted but some control inputs are
# converted.
# TODO(rachelim): Handle the case where some inputs are sparsely
# stacked (i.e. any([x.is_sparse_stacked for x in converted_inputs]))
new_op = _create_op(y_op.type, [x.t for x in converted_inputs],
[x.dtype for x in y_op.outputs],
y_op.node_def.attr)
if y == y_op:
new_outputs = new_op
else:
new_outputs = [wrap(x, False) for x in new_op.outputs]
else:
# Either some inputs are not loop invariant or op is stateful.
if hasattr(y_op, "pfor_converter"):
converter = y_op.pfor_converter
else:
converter = _pfor_converter_registry.get(y_op.type, None)
if converter is None:
if flags.FLAGS.op_conversion_fallback_to_while_loop:
converter = _fallback_converter
else:
raise ValueError(
"No converter defined for %s\n%s\ninputs: %s. "
"\nEither add a converter or set "
"--op_conversion_fallback_to_while_loop=True, "
"which may run slower" % (y_op.type, y_op, converted_inputs))
# TODO(rachelim): Handle the case where some inputs are sparsely
# stacked. We should only call the converter if it supports handling
# those inputs.
new_outputs = converter(_PforInput(self, y_op, converted_inputs))
if isinstance(new_outputs, WrappedTensor):
new_outputs = [new_outputs]
assert isinstance(new_outputs,
(list, tuple, ops.Operation)), new_outputs
logging.vlog(2, "converted %s %s", y_op, new_outputs)
# Insert into self._conversion_map
if y == y_op:
assert isinstance(new_outputs, ops.Operation)
self._add_conversion(y_op, new_outputs)
else:
for old_output, new_output in zip(y_op.outputs, new_outputs):
assert isinstance(new_output, WrappedTensor), (new_output, y, y_op)
self._add_conversion(old_output, new_output)
stack.pop(0)
return self._conversion_map[op_or_tensor]
@property
def loop_len_vector(self):
"""Returns a single element vector whose value is number of iterations."""
return self._loop_len_vector
@property
def loop_var(self):
"""Returns placeholder loop variable."""
return self._loop_var
@property
def pfor_ops(self):
return self._pfor_ops
@property
def all_indices_partitioned(self):
"""all_indices_partitioned property.
Returns:
True if we are inside a control flow construct and not all pfor iterations
may be active.
"""
return self._all_indices_partitioned
# nn_ops
def _flatten_first_two_dims(x):
"""Merges first two dimensions."""
old_shape = array_ops.shape(x)
new_shape = array_ops.concat([[-1], old_shape[2:]], axis=0)
return array_ops.reshape(x, new_shape)
def _unflatten_first_dim(x, first_dim):
"""Splits first dimension into [first_dim, -1]."""
old_shape = array_ops.shape(x)
new_shape = array_ops.concat([first_dim, [-1], old_shape[1:]], axis=0)
return array_ops.reshape(x, new_shape)
def _inputs_with_flattening(pfor_input, input_indices):
"""Stacks and flattens first dim of inputs at indices `input_indices`."""
if input_indices is None:
input_indices = []
pfor_input.stack_inputs(stack_indices=input_indices)
inputs = []
for i in range(pfor_input.num_inputs):
if i in input_indices:
inp = pfor_input.stacked_input(i)
inp = _flatten_first_two_dims(inp)
else:
inp = pfor_input.unstacked_input(i)
inputs.append(inp)
return inputs
@RegisterPForWithArgs("Conv2D", dims=[0])
@RegisterPForWithArgs("AvgPool", dims=[0])
@RegisterPForWithArgs("MaxPool", dims=[0])
@RegisterPForWithArgs("MaxPoolGrad", dims=[0, 1, 2])
@RegisterPForWithArgs("SoftmaxCrossEntropyWithLogits", dims=[0, 1])
def _convert_flatten_batch(pfor_input, op_type, dims):
del op_type
inputs = _inputs_with_flattening(pfor_input, dims)
outputs = _create_op(
pfor_input.op_type,
inputs, [x.dtype for x in pfor_input.outputs],
attrs=pfor_input.op.node_def.attr).outputs
n = pfor_input.pfor.loop_len_vector
outputs = [_unflatten_first_dim(x, n) for x in outputs]
return [wrap(x, True) for x in outputs]
_channel_flatten_input_cache = {}
def _channel_flatten_input(x, data_format):
"""Merge the stack dimension with the channel dimension.
If S is pfor's stacking dimension, then,
- for SNCHW, we transpose to NSCHW. If N dimension has size 1, the transpose
should be cheap.
- for SNHWC, we transpose to NHWCS.
We then merge the S and C dimension.
Args:
x: ops.Tensor to transform.
data_format: "NCHW" or "NHWC".
Returns:
A 3-element tuple with the transformed value, along with the shape for
reshape and order for transpose required to transform back.
"""
graph = ops.get_default_graph()
cache_key = (graph, x, data_format)
if cache_key not in _channel_flatten_input_cache:
x_shape = array_ops.shape(x)
if data_format == b"NCHW":
order = [1, 0, 2, 3, 4]
shape = array_ops.concat([x_shape[1:2], [-1], x_shape[3:]], axis=0)
reverse_order = order
else:
order = [1, 2, 3, 0, 4]
shape = array_ops.concat([x_shape[1:4], [-1]], axis=0)
reverse_order = [3, 0, 1, 2, 4]
# Move S dimension next to C dimension.
x = array_ops.transpose(x, order)
reverse_shape = array_ops.shape(x)
# Reshape to merge the S and C dimension.
x = array_ops.reshape(x, shape)
outputs = x, reverse_order, reverse_shape
_channel_flatten_input_cache[cache_key] = outputs
else:
outputs = _channel_flatten_input_cache[cache_key]
return outputs
# Note that with training=True, running FusedBatchNorm on individual examples
# is very different from running FusedBatchNorm on a batch of those examples.
# This is because, for the latter case, the operation can be considered as first
# computing the mean and variance over all the examples and then using these
# to scale all those examples. This creates a data dependency between these
# different "iterations" since the inputs to the scaling step depends on the
# statistics coming from all these inputs.
# As with other kernels, the conversion here effectively runs the kernel
# independently for each iteration, and returns outputs by stacking outputs from
# each of those iterations.
@RegisterPFor("FusedBatchNorm")
def _convert_fused_batch_norm(pfor_input):
is_training = pfor_input.get_attr("is_training")
# When BatchNorm is used with training=False, mean and variance are provided
# externally and used as is by the op. Thus, we can merge the S and N
# dimensions as we do for regular operations.
# When BatchNorm is used with training=True, mean and variance are computed
# for each channel across the batch dimension (first one). If we merge S and N
# dimensions, mean and variances will be computed over a larger set. So, we
# merge the S and C dimensions instead.
if not is_training:
# We return zeros for batch_mean and batch_variance output. Note that CPU
# and GPU seem to have different behavior for those two outputs. CPU outputs
# zero because these values are not used during inference. GPU outputs
# something, probably real means and variances.
inputs = _inputs_with_flattening(pfor_input, [0])
outputs = _create_op(
pfor_input.op_type,
inputs, [x.dtype for x in pfor_input.outputs],
attrs=pfor_input.op.node_def.attr).outputs
y = outputs[0]
n = pfor_input.pfor.loop_len_vector
y = _unflatten_first_dim(y, n)
mean = pfor_input.unstacked_input(3)
zeros = array_ops.zeros_like(mean)
return [wrap(y, True), wrap(zeros, False), wrap(zeros, False)]
pfor_input.stack_inputs()
data_format = pfor_input.get_attr("data_format")
# We merge the first dimension with the "C" dimension, run FusedBatchNorm, and
# then transpose back.
x = pfor_input.stacked_input(0)
x, reverse_order, reverse_shape = _channel_flatten_input(x, data_format)
# Note that we stack all the other inputs as well so that they are the same
# size as the new size of the channel dimension.
inputs = [x] + [
array_ops.reshape(pfor_input.stacked_input(i), [-1])
for i in range(1, pfor_input.num_inputs)
]
outputs = _create_op(
pfor_input.op_type,
inputs, [x.dtype for x in pfor_input.outputs],
attrs=pfor_input.op.node_def.attr).outputs
y = outputs[0]
y = array_ops.reshape(y, reverse_shape)
y = array_ops.transpose(y, reverse_order)
n = pfor_input.pfor.loop_len_vector
outputs = [_unflatten_first_dim(x, n) for x in outputs[1:]]
outputs = [y] + outputs
return [wrap(x, True) for x in outputs]
@RegisterPFor("FusedBatchNormGrad")
def _convert_fused_batch_norm_grad(pfor_input):
pfor_input.stack_inputs()
data_format = pfor_input.get_attr("data_format")
y_backprop = pfor_input.stacked_input(0)
y_backprop, _, _ = _channel_flatten_input(y_backprop, data_format)
x = pfor_input.stacked_input(1)
x, x_reverse_order, x_reverse_shape = _channel_flatten_input(x, data_format)
inputs = [y_backprop, x] + [
array_ops.reshape(pfor_input.stacked_input(i), [-1])
for i in range(2, pfor_input.num_inputs)
]
outputs = _create_op(
pfor_input.op_type,
inputs, [x.dtype for x in pfor_input.outputs],
attrs=pfor_input.op.node_def.attr).outputs
x_backprop = outputs[0]
x_backprop = array_ops.reshape(x_backprop, x_reverse_shape)
x_backprop = array_ops.transpose(x_backprop, x_reverse_order)
n = pfor_input.pfor.loop_len_vector
outputs = [_unflatten_first_dim(x, n) for x in outputs[1:]]
outputs = [x_backprop] + outputs
return [wrap(output, True) for output in outputs]
@RegisterPForWithArgs("Conv2DBackpropInput", flatten_dims=[2], shape_dim=0)
@RegisterPForWithArgs("AvgPoolGrad", flatten_dims=[1], shape_dim=0)
def _convert_flatten_batch_shape_input(pfor_input, op_type, flatten_dims,
shape_dim):
del op_type
inputs = _inputs_with_flattening(pfor_input, flatten_dims)
n = pfor_input.pfor.loop_len_vector
# Adjust the `input_sizes` input.
ones = array_ops.ones(
[array_ops.shape(inputs[shape_dim])[0] - 1], dtype=n.dtype)
inputs[shape_dim] *= array_ops.concat([n, ones], axis=0)
outputs = _create_op(
pfor_input.op_type,
inputs, [x.dtype for x in pfor_input.outputs],
attrs=pfor_input.op.node_def.attr).outputs
outputs = [_unflatten_first_dim(x, n) for x in outputs]
return [wrap(x, True) for x in outputs]
@RegisterPFor("Conv2DBackpropFilter")
def _convert_conv2d_backprop_filter(pfor_input):
pfor_input.stack_inputs(stack_indices=[2])
inputs, inputs_stacked, _ = pfor_input.input(0)
filter_sizes = pfor_input.unstacked_input(1)
grads = pfor_input.stacked_input(2)
strides = pfor_input.get_attr("strides")
padding = pfor_input.get_attr("padding")
use_cudnn_on_gpu = pfor_input.get_attr("use_cudnn_on_gpu")
data_format = pfor_input.get_attr("data_format")
dilations = pfor_input.get_attr("dilations")
if inputs_stacked:
# TODO(agarwal): Implement this efficiently.
logging.warn("Conv2DBackpropFilter uses a while_loop. Fix that!")
def while_body(i, ta):
inp_i = inputs[i, ...]
grad_i = grads[i, ...]
output = nn_ops.conv2d_backprop_filter(
inp_i,
filter_sizes,
grad_i,
strides=strides,
padding=padding,
use_cudnn_on_gpu=use_cudnn_on_gpu,
data_format=data_format,
dilations=dilations)
return i + 1, ta.write(i, array_ops.expand_dims(output, 0))
n = array_ops.reshape(pfor_input.pfor.loop_len_vector, [])
_, ta = control_flow_ops.while_loop(
lambda i, ta: i < n, while_body,
(0, tensor_array_ops.TensorArray(inputs.dtype, n)))
output = ta.concat()
return wrap(output, True)
else:
# We merge the stack dimension with the channel dimension of the gradients
# and pretend we had a larger filter (see change to filter_sizes below).
# Once the filter backprop is computed, we reshape and transpose back
# appropriately.
grads, _, _ = _channel_flatten_input(grads, data_format)
n = pfor_input.pfor.loop_len_vector
old_filter_sizes = filter_sizes
filter_sizes *= array_ops.concat([[1, 1, 1], n], axis=0)
output = nn_ops.conv2d_backprop_filter(
inputs,
filter_sizes,
grads,
strides=strides,
padding=padding,
use_cudnn_on_gpu=use_cudnn_on_gpu,
data_format=data_format,
dilations=dilations)
new_filter_shape = array_ops.concat([old_filter_sizes[:3], n, [-1]], axis=0)
output = array_ops.reshape(output, new_filter_shape)
output = array_ops.transpose(output, [3, 0, 1, 2, 4])
return wrap(output, True)
# array_ops
@RegisterPForWithArgs("Identity", array_ops.identity)
@RegisterPForWithArgs("StopGradient", array_ops.stop_gradient)
def _convert_identity(pfor_input, op_type, op_func):
del op_type
return wrap(op_func(*[x.t for x in pfor_input.inputs]), True)
@RegisterPFor("Reshape")
def _convert_reshape(pfor_input):
t = pfor_input.stacked_input(0)
shape = pfor_input.unstacked_input(1)
new_dim = array_ops.shape(t)[:1]
new_shape = array_ops.concat([new_dim, shape], axis=0)
return wrap(array_ops.reshape(t, new_shape), True)
@RegisterPFor("ExpandDims")
def _convert_expanddims(pfor_input):
t = pfor_input.stacked_input(0)
dim = pfor_input.unstacked_input(1)
dim += math_ops.cast(dim >= 0, dtypes.int32)
return wrap(array_ops.expand_dims(t, axis=dim), True)
@RegisterPFor("Slice")
def _convert_slice(pfor_input):
t = pfor_input.stacked_input(0)
begin = pfor_input.unstacked_input(1)
size = pfor_input.unstacked_input(2)
begin = array_ops.concat([[0], begin], axis=0)
size = array_ops.concat([[-1], size], axis=0)
return wrap(array_ops.slice(t, begin, size), True)
@RegisterPFor("Tile")
def _convert_tile(pfor_input):
t = pfor_input.stacked_input(0)
multiples = pfor_input.unstacked_input(1)
multiples = array_ops.concat([[1], multiples], 0)
return wrap(array_ops.tile(t, multiples), True)
@RegisterPFor("Pack")
def _convert_pack(pfor_input):
pfor_input.stack_inputs()
axis = pfor_input.get_attr("axis")
if axis >= 0:
axis += 1
return wrap(
array_ops.stack([x.t for x in pfor_input.inputs], axis=axis), True)
@RegisterPFor("Unpack")
def _convert_unpack(pfor_input):
value = pfor_input.stacked_input(0)
axis = pfor_input.get_attr("axis")
if axis >= 0:
axis += 1
num = pfor_input.get_attr("num")
return [wrap(x, True) for x in array_ops.unstack(value, axis=axis, num=num)]
@RegisterPFor("Pad")
def _convert_pad(pfor_input):
t = pfor_input.stacked_input(0)
paddings = pfor_input.unstacked_input(1)
paddings = array_ops.concat([[[0, 0]], paddings], 0)
return wrap(array_ops.pad(t, paddings, mode="CONSTANT"), True)
@RegisterPFor("Split")
def _convert_split(pfor_input):
split_dim = pfor_input.unstacked_input(0)
t = pfor_input.stacked_input(1)
num_split = pfor_input.get_attr("num_split")
split_dim += math_ops.cast(split_dim >= 0, dtypes.int32)
return [wrap(x, True) for x in array_ops.split(t, num_split, axis=split_dim)]
@RegisterPFor("Transpose")
def _convert_transpose(pfor_input):
t = pfor_input.stacked_input(0)
perm = pfor_input.unstacked_input(1)
new_perm = array_ops.concat([[0], perm + 1], axis=0)
return wrap(array_ops.transpose(t, new_perm), True)
@RegisterPFor("ZerosLike")
def _convert_zeroslike(pfor_input):
t = pfor_input.stacked_input(0)
shape = array_ops.shape(t)[1:]
return wrap(array_ops.zeros(shape, dtype=t.dtype), False)
@RegisterPFor("Gather")
@RegisterPFor("GatherV2")
def _convert_gather(pfor_input):
param, param_stacked, _ = pfor_input.input(0)
indices, indices_stacked, _ = pfor_input.input(1)
op_type = pfor_input.op_type
if op_type == "Gather":
validate_indices = pfor_input.get_attr("validate_indices")
axis = 0
else:
validate_indices = None
axis = pfor_input.unstacked_input(2)
axis_value = tensor_util.constant_value(axis)
if axis_value is not None:
axis = axis_value
if indices_stacked and not param_stacked:
if indices == pfor_input.pfor.all_indices and axis == 0:
param_shape0 = param.shape[0].value
indices_shape0 = indices.shape[0].value
if param_shape0 is not None and indices_shape0 == param_shape0:
# Note that with loops and conditionals, indices may not be contiguous.
# However they will be sorted and unique. So if the shape matches, then
# it must be picking up all the rows of param.
return wrap(param, True)
# TODO(agarwal): use array_ops.slice here.
output = array_ops.gather(
param, indices, validate_indices=validate_indices, axis=axis)
if axis != 0:
axis = control_flow_ops.cond(
axis < 0, lambda: axis + array_ops.rank(param), lambda: axis)
order = array_ops.concat(
[[axis],
math_ops.range(axis),
math_ops.range(axis + 1, array_ops.rank(output))],
axis=0)
output = control_flow_ops.cond(
math_ops.equal(axis, 0), lambda: output,
lambda: array_ops.transpose(output, order))
return wrap(output, True)
if param_stacked:
loop_len_vector = pfor_input.pfor.loop_len_vector
pfor_input.stack_inputs(stack_indices=[1])
indices = pfor_input.stacked_input(1)
param_flat = _flatten_first_two_dims(param)
# Recompute indices to handle stacked param.
indices_offset = math_ops.range(
loop_len_vector[0]) * array_ops.shape(param)[1]
# Reshape indices_offset to allow broadcast addition
ones = array_ops.ones([array_ops.rank(indices) - 1], dtype=dtypes.int32)
new_shape = array_ops.concat([loop_len_vector, ones], axis=0)
indices_offset = array_ops.reshape(indices_offset, new_shape)
indices += indices_offset
# TODO(agarwal): handle axis != 0. May need to transpose param or
# array_ops.gather_nd.
if isinstance(axis, ops.Tensor):
axis_value = tensor_util.constant_value(axis)
else:
try:
axis_value = int(axis)
except TypeError:
axis_value = None
msg = ("Gather, where indices and param are both loop dependent, currently "
"requires axis=0")
if axis_value is not None and axis_value != 0:
raise ValueError("Error while converting %s. %s. Got axis=%d" %
(pfor_input.op, msg, axis))
with ops.control_dependencies(
[check_ops.assert_equal(axis, 0, message=msg)]):
output = array_ops.gather(param_flat, indices)
return wrap(output, True)
@RegisterPFor("ConcatV2")
def _convert_concatv2(pfor_input):
n = pfor_input.num_inputs
pfor_input.stack_inputs(stack_indices=range(n - 1))
axis = pfor_input.unstacked_input(n - 1)
axis += math_ops.cast(axis >= 0, axis.dtype)
return wrap(
array_ops.concat([x.t for x in pfor_input.inputs[:n - 1]], axis=axis),
True)
@RegisterPFor("StridedSlice")
def _convert_strided_slice(pfor_input):
inp = pfor_input.stacked_input(0)
begin = pfor_input.unstacked_input(1)
end = pfor_input.unstacked_input(2)
strides = pfor_input.unstacked_input(3)
begin_mask = pfor_input.get_attr("begin_mask")
end_mask = pfor_input.get_attr("end_mask")
ellipsis_mask = pfor_input.get_attr("ellipsis_mask")
new_axis_mask = pfor_input.get_attr("new_axis_mask")
shrink_axis_mask = pfor_input.get_attr("shrink_axis_mask")
begin = array_ops.concat([[0], begin], axis=0)
end = array_ops.concat([[0], end], axis=0)
strides = array_ops.concat([[1], strides], axis=0)
begin_mask = begin_mask << 1 | 1
end_mask = end_mask << 1 | 1
ellipsis_mask <<= 1
new_axis_mask <<= 1
shrink_axis_mask <<= 1
return wrap(
array_ops.strided_slice(
inp,
begin,
end,
strides,
begin_mask=begin_mask,
end_mask=end_mask,
ellipsis_mask=ellipsis_mask,
new_axis_mask=new_axis_mask,
shrink_axis_mask=shrink_axis_mask), True)
@RegisterPFor("StridedSliceGrad")
def _convert_strided_slice_grad(pfor_input):
shape = pfor_input.unstacked_input(0)
begin = pfor_input.unstacked_input(1)
end = pfor_input.unstacked_input(2)
strides = pfor_input.unstacked_input(3)
dy = pfor_input.stacked_input(4)
begin_mask = pfor_input.get_attr("begin_mask")
end_mask = pfor_input.get_attr("end_mask")
ellipsis_mask = pfor_input.get_attr("ellipsis_mask")
new_axis_mask = pfor_input.get_attr("new_axis_mask")
shrink_axis_mask = pfor_input.get_attr("shrink_axis_mask")
shape = array_ops.concat([pfor_input.pfor.loop_len_vector, shape], axis=0)
begin = array_ops.concat([[0], begin], axis=0)
end = array_ops.concat([[0], end], axis=0)
strides = array_ops.concat([[1], strides], axis=0)
begin_mask = begin_mask << 1 | 1
end_mask = end_mask << 1 | 1
ellipsis_mask <<= 1
new_axis_mask <<= 1
shrink_axis_mask <<= 1
return wrap(
array_ops.strided_slice_grad(
shape,
begin,
end,
strides,
dy,
begin_mask=begin_mask,
end_mask=end_mask,
ellipsis_mask=ellipsis_mask,
new_axis_mask=new_axis_mask,
shrink_axis_mask=shrink_axis_mask), True)
# math_ops
@RegisterPFor("MatMul")
def _convert_matmul(pfor_input):
# TODO(agarwal): Check if tiling is faster than two transposes.
a, a_stacked, _ = pfor_input.input(0)
b, b_stacked, _ = pfor_input.input(1)
tr_a = pfor_input.get_attr("transpose_a")
tr_b = pfor_input.get_attr("transpose_b")
if a_stacked and b_stacked:
output = wrap(math_ops.matmul(a, b, adjoint_a=tr_a, adjoint_b=tr_b), True)
return output
elif a_stacked:
if tr_a:
a = array_ops.transpose(a, [0, 2, 1])
if a.shape.is_fully_defined():
x, y, z = a.shape
else:
x, y, z = [
array_ops.reshape(i, [])
for i in array_ops.split(array_ops.shape(a), 3)
]
a = array_ops.reshape(a, [x * y, z])
prod = math_ops.matmul(a, b, transpose_b=tr_b)
return wrap(array_ops.reshape(prod, [x, y, -1]), True)
else:
assert b_stacked
if tr_b:
perm = [2, 0, 1]
b = array_ops.transpose(b, perm)
else:
# As an optimization, if one of the first two dimensions is 1, then we can
# reshape instead of transpose.
# TODO(agarwal): This check can be done inside Transpose kernel.
b_shape = array_ops.shape(b)
min_dim = math_ops.minimum(b_shape[0], b_shape[1])
perm = control_flow_ops.cond(
math_ops.equal(min_dim, 1), lambda: [0, 1, 2], lambda: [1, 0, 2])
new_shape = array_ops.stack([b_shape[1], b_shape[0], b_shape[2]])
b = array_ops.transpose(b, perm)
b = array_ops.reshape(b, new_shape)
if b.shape.is_fully_defined():
x, y, z = b.shape
else:
x, y, z = [
array_ops.reshape(i, [])
for i in array_ops.split(array_ops.shape(b), 3)
]
b = array_ops.reshape(b, [x, y * z])
prod = math_ops.matmul(a, b, transpose_a=tr_a)
prod = array_ops.reshape(prod, [-1, y, z])
prod = array_ops.transpose(prod, [1, 0, 2])
return wrap(prod, True)
@RegisterPFor("BatchMatMul")
def _convert_batch_mat_mul(pfor_input):
# TODO(agarwal): There may be a more efficient way to do this instead of
# stacking the inputs.
pfor_input.stack_inputs()
x = pfor_input.stacked_input(0)
y = pfor_input.stacked_input(1)
adj_x = pfor_input.get_attr("adj_x")
adj_y = pfor_input.get_attr("adj_y")
x = _flatten_first_two_dims(x)
y = _flatten_first_two_dims(y)
output = math_ops.matmul(x, y, adjoint_a=adj_x, adjoint_b=adj_y)
output = _unflatten_first_dim(output, pfor_input.pfor.loop_len_vector)
return wrap(output, True)
@RegisterPForWithArgs("Sum", math_ops.reduce_sum)
@RegisterPForWithArgs("Prod", math_ops.reduce_prod)
@RegisterPForWithArgs("Max", math_ops.reduce_max)
@RegisterPForWithArgs("Min", math_ops.reduce_min)
def _convert_reduction(pfor_input, _, op_func):
t = pfor_input.stacked_input(0)
indices = pfor_input.unstacked_input(1)
# Shift positive indices by one to account for the extra dimension.
indices += math_ops.cast(indices >= 0, dtypes.int32)
keep_dims = pfor_input.get_attr("keep_dims")
return wrap(op_func(t, indices, keepdims=keep_dims), True)
@RegisterPForWithArgs("Cumsum", math_ops.cumsum)
@RegisterPForWithArgs("Cumprod", math_ops.cumprod)
def _convert_cumfoo(pfor_input, _, op_func):
t = pfor_input.stacked_input(0)
axis = pfor_input.unstacked_input(1)
# Shift positive indices by one to account for the extra dimension.
axis += math_ops.cast(axis >= 0, dtypes.int32)
exclusive = pfor_input.get_attr("exclusive")
reverse = pfor_input.get_attr("reverse")
return wrap(op_func(t, axis, exclusive=exclusive, reverse=reverse), True)
@RegisterPFor("BiasAdd")
def _convert_biasadd(pfor_input):
t = pfor_input.stacked_input(0)
bias = pfor_input.unstacked_input(1)
data_format = pfor_input.get_attr("data_format")
if data_format != b"NCHW":
return wrap(nn_ops.bias_add(t, bias, data_format=data_format), True)
shape = array_ops.shape(t)
flattened_shape = array_ops.concat([[-1], shape[2:]], axis=0)
t = array_ops.reshape(t, flattened_shape)
t = nn_ops.bias_add(t, bias, data_format=b"NCHW")
t = array_ops.reshape(t, shape)
return wrap(t, True)
@RegisterPFor("UnsortedSegmentSum")
def _convert_unsortedsegmentsum(pfor_input):
data, data_stacked, _ = pfor_input.input(0)
# TODO(agarwal): handle unstacked?
segment_ids = pfor_input.stacked_input(1)
# TODO(agarwal): handle stacked?
num_segments = pfor_input.unstacked_input(2)
if not data_stacked:
data = _stack(data, pfor_input.pfor.loop_len_vector).t
segment_shape = array_ops.shape(segment_ids)
n = segment_shape[0]
ones = array_ops.ones_like(segment_shape)[1:]
segment_offset = num_segments * math_ops.range(n)
segment_offset = array_ops.reshape(segment_offset,
array_ops.concat([[n], ones], axis=0))
segment_ids += segment_offset
num_segments *= n
output = math_ops.unsorted_segment_sum(data, segment_ids, num_segments)
new_output_shape = array_ops.concat(
[[n, -1], array_ops.shape(output)[1:]], axis=0)
output = array_ops.reshape(output, new_output_shape)
return wrap(output, True)
@RegisterPFor("Cast")
def _convert_cast(pfor_input):
inp = pfor_input.stacked_input(0)
dtype = pfor_input.get_attr("DstT")
return wrap(math_ops.cast(inp, dtype), True)
# Note that ops handled here do not have attributes except "T", and hence don't
# need extra arguments passed to the cwise_op call below.
@RegisterPForWithArgs("Add", math_ops.add)
@RegisterPForWithArgs("Ceil", math_ops.ceil)
@RegisterPForWithArgs("Equal", math_ops.equal)
@RegisterPForWithArgs("NotEqual", math_ops.not_equal)
@RegisterPForWithArgs("Floor", math_ops.floor)
@RegisterPForWithArgs("Greater", math_ops.greater)
@RegisterPForWithArgs("GreaterEqual", math_ops.greater_equal)
@RegisterPForWithArgs("Less", math_ops.less)
@RegisterPForWithArgs("LessEqual", math_ops.less_equal)
@RegisterPForWithArgs("LogicalOr", math_ops.logical_or)
@RegisterPForWithArgs("LogicalAnd", math_ops.logical_and)
@RegisterPForWithArgs("LogicalNot", math_ops.logical_not)
@RegisterPForWithArgs("LogicalXor", math_ops.logical_xor)
@RegisterPForWithArgs("Maximum", math_ops.maximum)
@RegisterPForWithArgs("Minimum", math_ops.minimum)
@RegisterPForWithArgs("Mul", math_ops.multiply)
@RegisterPForWithArgs("Neg", math_ops.negative)
@RegisterPForWithArgs("RealDiv", math_ops.divide)
@RegisterPForWithArgs("Relu", nn_ops.relu)
@RegisterPForWithArgs("Sigmoid", math_ops.sigmoid)
@RegisterPForWithArgs("Square", math_ops.square)
@RegisterPForWithArgs("Sub", math_ops.subtract)
@RegisterPForWithArgs("Tanh", math_ops.tanh)
def _convert_cwise(pfor_input, op_type, op_func):
del op_type
pfor_input.expanddim_inputs_for_broadcast()
return wrap(op_func(*[x.t for x in pfor_input.inputs]), True)
@RegisterPFor("Shape")
def _convert_shape(pfor_input):
out_type = pfor_input.get_attr("out_type")
return wrap(
array_ops.shape(pfor_input.stacked_input(0), out_type=out_type)[1:],
False)
@RegisterPFor("ShapeN")
def _convert_shape_n(pfor_input):
out_type = pfor_input.get_attr("out_type")
shapes = [
array_ops.shape(x, out_type=out_type)[1:]
if stacked else array_ops.shape(x) for x, stacked, _ in pfor_input.inputs
]
return [wrap(x, False) for x in shapes]
@RegisterPFor("Size")
def _convert_size(pfor_input):
out_type = pfor_input.get_attr("out_type")
n = math_ops.cast(pfor_input.pfor.loop_len_vector[0], out_type)
return wrap(
array_ops.size(pfor_input.stacked_input(0), out_type=out_type) // n,
False)
@RegisterPFor("Rank")
def _convert_rank(pfor_input):
return wrap(array_ops.rank(pfor_input.stacked_input(0)) - 1, False)
@RegisterPFor("AddN")
def _convert_addn(pfor_input):
# AddN does not support broadcasting.
pfor_input.stack_inputs()
return wrap(math_ops.add_n([x.t for x in pfor_input.inputs]), True)
@RegisterPFor("BiasAddGrad")
def _convert_biasaddgrad(pfor_input):
grad = pfor_input.stacked_input(0)
fmt = pfor_input.get_attr("data_format")
if fmt == b"NCHW":
output = math_ops.reduce_sum(grad, axis=[1, 3, 4], keepdims=False)
else:
grad_shape = array_ops.shape(grad)
last_dim_shape = grad_shape[-1]
first_dim_shape = grad_shape[0]
output = array_ops.reshape(grad, [first_dim_shape, -1, last_dim_shape])
output = math_ops.reduce_sum(output, axis=[1], keepdims=False)
return wrap(output, True)
# Some required ops are not exposed under the tf namespace. Hence relying on
# _create_op to create them.
@RegisterPForWithArgs("ReluGrad")
@RegisterPForWithArgs("TanhGrad")
@RegisterPForWithArgs("SigmoidGrad")
def _convert_grads(pfor_input, op_type, *args, **kw_args):
del args
del kw_args
# TODO(agarwal): Looks like these ops don't support broadcasting. Hence we
# have to use tiling here.
pfor_input.stack_inputs()
outputs = _create_op(
op_type, [x.t for x in pfor_input.inputs],
[x.dtype for x in pfor_input.outputs],
attrs=pfor_input.op.node_def.attr).outputs
return [wrap(x, True) for x in outputs]
@RegisterPFor("Select")
def _convert_select(pfor_input):
pfor_input.stack_inputs()
cond = pfor_input.stacked_input(0)
t = pfor_input.stacked_input(1)
e = pfor_input.stacked_input(2)
cond_rank = array_ops.rank(cond)
cond, t, e = control_flow_ops.cond(
cond_rank > 1, lambda: _inputs_with_flattening(pfor_input, [0, 1, 2]),
lambda: [cond, t, e])
outputs = _create_op(
pfor_input.op_type, [cond, t, e], [x.dtype for x in pfor_input.outputs],
attrs=pfor_input.op.node_def.attr).outputs
n = pfor_input.pfor.loop_len_vector
out = control_flow_ops.cond(cond_rank > 1,
lambda: _unflatten_first_dim(outputs[0], n),
lambda: outputs[0])
return [wrap(out, True) for x in outputs]
# random_ops
@RegisterPForWithArgs("RandomUniform")
@RegisterPForWithArgs("RandomUniformInt")
@RegisterPForWithArgs("RandomStandardNormal")
@RegisterPForWithArgs("TruncatedNormal")
@RegisterPForWithArgs("RandomGamma")
@RegisterPForWithArgs("RandomPoissonV2")
def _convert_random(pfor_input, op_type, *args, **kw_args):
del args
del kw_args
inputs = [pfor_input.unstacked_input(i) for i in range(pfor_input.num_inputs)]
# inputs[0] is "shape"
inputs[0] = array_ops.concat(
[pfor_input.pfor.loop_len_vector, inputs[0]], axis=0)
logging.warning(
"Note that %s inside pfor op may not give same output as "
"inside a sequential loop.", op_type)
outputs = _create_op(
op_type,
inputs, [x.dtype for x in pfor_input.outputs],
attrs=pfor_input.op.node_def.attr).outputs
return [wrap(x, True) for x in outputs]
# logging_ops
@RegisterPFor("Assert")
def _convert_assert(pfor_input):
cond, cond_stacked, _ = pfor_input.input(0)
if cond_stacked:
cond = math_ops.reduce_all(cond)
data_list = [x.t for x in pfor_input.inputs][1:]
return _create_op("Assert", [cond] + data_list, [],
attrs=pfor_input.op.node_def.attr)
@RegisterPFor("Print")
def _convert_print(pfor_input):
# Note that we don't stack all the inputs. Hence unstacked values are printed
# once here vs multiple times in a while_loop.
pfor_input.stack_inputs([0])
outputs = _create_op(
"Print", [x.t for x in pfor_input.inputs],
[x.dtype for x in pfor_input.outputs],
attrs=pfor_input.op.node_def.attr).outputs
return [wrap(x, True) for x in outputs]
# data_flow_ops
# TensorArray conversion is tricky since we don't support arrays of
# TensorArrays. For converting them, we consider two distinct cases:
#
# 1. The array is constructed outside the pfor call, and read/written inside the
# loop.
# This is an easier case since we don't need to make an array of TensorArrays.
# A correctness requirement is that these parallel iterations shouldn't attempt
# to write to the same location. Hence at conversion time we disallow indices to
# be loop-invariant as that would guarantee a collision. Even if the indices are
# not loop-invariant, they could conflict and that shall trigger runtime errors.
#
# 2. The array is constructed and used entirely inside each pfor iteration.
# For simplicity, here we require that the indices used for write/scatter are
# "unstacked". Otherwise it becomes hard to merge the TensorArrays created in
# different pfor iterations. We consider two sub_cases:
#
# 2a Elements written to the array are "stacked"
# To simulate multiple TensorArrays, we may increase the dimension of each
# element of the array. i.e. the i_th row of the j_th entry of the converted
# TensorArray corresponds to the j_th entry of the TensorArray in the i_th
# pfor iteration.
#
# 2b Elements written to the array are "unstacked"
# In this case we don't increase the dimensions to avoid redundant tiling. Each
# iteration is trying to write the same value. So we convert that to a single
# write.
#
# Here are some tricks used to implement the above:
# - TensorArrayV3 constructor encodes the element shape as an attr. Instead of
# trying to trace whether future writes are stacked or unstacked in order to set
# this attr, we set it to correspond to unknown shape.
# - We use the "flow" output of the different ops to track whether the array
# elements are stacked or unstacked. If a stacked write/scatter is done, we make
# the flow stacked as well.
# - We use some heuristic traversal of the graph to track whether the
# TensorArray handle was created inside or outside the pfor loop.
@RegisterPFor("TensorArrayV3")
def _convert_tensor_array_v3(pfor_input):
size = pfor_input.unstacked_input(0)
dtype = pfor_input.get_attr("dtype")
dynamic_size = pfor_input.get_attr("dynamic_size")
clear_after_read = pfor_input.get_attr("clear_after_read")
identical_element_shapes = pfor_input.get_attr("identical_element_shapes")
tensor_array_name = pfor_input.get_attr("tensor_array_name")
handle, flow = data_flow_ops.tensor_array_v3(
size,
dtype=dtype,
# We don't set element shape since we don't know if writes are stacked or
# not yet.
element_shape=None,
dynamic_size=dynamic_size,
clear_after_read=clear_after_read,
identical_element_shapes=identical_element_shapes,
tensor_array_name=tensor_array_name)
# Note we keep flow unstacked for now since we don't know if writes will be
# stacked or not.
return wrap(handle, False), wrap(flow, False)
@RegisterPFor("TensorArraySizeV3")
def _convert_tensor_array_size_v3(pfor_input):
handle = pfor_input.unstacked_input(0)
flow, flow_stacked, _ = pfor_input.input(1)
if flow_stacked:
flow = _unstack_flow(flow)
size = data_flow_ops.tensor_array_size_v3(handle, flow)
return wrap(size, False)
def _handle_inside_pfor(pfor_input, handle):
"""Returns True if handle was created inside the pfor loop."""
# We use some heuristic to find the original TensorArray creation op.
# The logic should handle the common cases (except cond based subgraphs).
# In theory the user could perform different operations on the handle (like
# Reshape, stack multiple handles, etc) which could break this logic.
# TODO(agarwal): handle Switch/Merge.
while handle.op.type in ("Enter", "Identity"):
handle = handle.op.inputs[0]
if handle.op.type not in [
"TensorArrayV3", "TensorArrayGradV3", "TensorArrayGradWithShape"]:
raise ValueError("Unable to find source for handle %s" % handle)
else:
return pfor_input.pfor.op_is_inside_loop(handle.op)
def _unstack_flow(value):
# TODO(agarwal): consider looking if this is a Tile op then get its input.
# This may avoid running the Tile operations.
return array_ops.gather(value, 0)
@RegisterPFor("TensorArrayReadV3")
def _convert_tensor_array_read_v3(pfor_input):
handle = pfor_input.unstacked_input(0)
index, index_stacked, _ = pfor_input.input(1)
dtype = pfor_input.get_attr("dtype")
flow, flow_stacked, _ = pfor_input.input(2)
if flow_stacked:
flow = _unstack_flow(flow)
is_inside_pfor = _handle_inside_pfor(pfor_input, pfor_input.op.inputs[0])
if is_inside_pfor:
# Note that if we are inside a control flow construct inside the pfor, and
# only some of the iterations are doing the read (i.e.
# `all_indices_partitioned` is True), then the read operation should only
# return values for the currently active pfor iterations (`all_indices`
# below). Hence, whenever the returned value is stacked (i.e. `flow` is
# stacked), we may need to do an extra gather after reading the values. Also
# note that if `is_inside` is false, then values in the tensor array are
# unstacked. So the check is only needed in this branch.
all_indices = pfor_input.pfor.all_indices
all_indices_partitioned = pfor_input.pfor.all_indices_partitioned
# Note: flow_stacked indicates if values in the TensorArray are stacked or
# not.
if index_stacked:
if flow_stacked:
raise ValueError(
"It looks like TensorArrayReadV3 was called on a TensorArray whose"
" values are not loop-invariant, and the read indices were also"
" not loop invariant. This is currently unsupported.")
value = data_flow_ops.tensor_array_gather_v3(
handle, index, flow, dtype=dtype)
return wrap(value, True)
value = data_flow_ops.tensor_array_read_v3(
handle, index, flow, dtype=dtype)
if flow_stacked and all_indices_partitioned:
value = array_ops.gather(value, all_indices)
return wrap(value, flow_stacked)
# Values in the TensorArray should be unstacked (since different iterations
# couldn't write to the same location). So whether output is stacked or not
# depends on index_stacked.
if index_stacked:
value = data_flow_ops.tensor_array_gather_v3(
handle, index, flow, dtype=dtype)
else:
value = data_flow_ops.tensor_array_read_v3(
handle, index, flow, dtype=dtype)
return wrap(value, index_stacked)
@RegisterPFor("TensorArrayWriteV3")
def _convert_tensor_array_write_v3(pfor_input):
handle = pfor_input.unstacked_input(0)
index, index_stacked, _ = pfor_input.input(1)
value, value_stacked, _ = pfor_input.input(2)
flow, flow_stacked, _ = pfor_input.input(3)
if value_stacked and pfor_input.pfor.all_indices_partitioned:
# Looks like we are in a control flow in a pfor where not all iterations are
# active now. We don't allow that since that could lead to different indices
# having different shapes which will be hard to merge later.
raise ValueError("Writing non loop invariant values to TensorArray from "
"inside a while_loop/cond not supported.")
if flow_stacked:
flow = _unstack_flow(flow)
is_inside = _handle_inside_pfor(pfor_input, pfor_input.op.inputs[0])
if is_inside:
if index_stacked:
raise ValueError("Need indices for %s to be loop invariant" % handle)
if not flow_stacked and not value_stacked:
flow_out = data_flow_ops.tensor_array_write_v3(handle, index, value, flow)
return wrap(flow_out, False)
else:
if not value_stacked:
value = _stack(value, pfor_input.pfor.loop_len_vector).t
# TODO(agarwal): Note that if flow is unstacked and value is stacked, then
# this may or may not be a safe situation. flow is unstacked both for a
# freshly created TensorArray, as well as after unstacked values are
# written to it. If it is the latter, then we cannot write a stacked value
# now since that may cause runtime errors due to different shapes in the
# array. At the moment we are not able to handle this gracefully and
# distinguish between the two cases. That would require some heuristic
# traversal of the graph to figure out whether all the writes are
# unstacked or not.
flow_out = data_flow_ops.tensor_array_write_v3(handle, index, value, flow)
return _stack(flow_out, pfor_input.pfor.loop_len_vector)
else:
if not index_stacked:
raise ValueError("Need indices for %s to be not loop invariant" % handle)
# Note that even when index_stacked is true, actual values in index may
# still not be unique. However that will cause runtime error when executing
# the scatter operation below.
if not value_stacked:
value = _stack(value, pfor_input.pfor.loop_len_vector).t
flow_out = data_flow_ops.tensor_array_scatter_v3(handle, index, value, flow)
return _stack(flow_out, pfor_input.pfor.loop_len_vector)
def _transpose_first_two_dims(value):
# TODO(agarwal): optimize if one of the dims == 1.
value_shape = array_ops.shape(value)
v0 = value_shape[0]
v1 = value_shape[1]
value = array_ops.reshape(value, [v0, v1, -1])
value = array_ops.transpose(value, [1, 0, 2])
new_shape = array_ops.concat([[v1, v0], value_shape[2:]], axis=0)
return array_ops.reshape(value, new_shape)
@RegisterPFor("TensorArrayGatherV3")
def _convert_tensor_array_gather_v3(pfor_input):
handle = pfor_input.unstacked_input(0)
indices, indices_stacked, _ = pfor_input.input(1)
indices = array_ops.reshape(indices, [-1])
flow, flow_stacked, _ = pfor_input.input(2)
if flow_stacked:
flow = _unstack_flow(flow)
dtype = pfor_input.get_attr("dtype")
# TODO(agarwal): support element_shape attr?
n = pfor_input.pfor.loop_len_vector
value = data_flow_ops.tensor_array_gather_v3(
handle, indices, flow, dtype=dtype)
is_inside = _handle_inside_pfor(pfor_input, pfor_input.op.inputs[0])
if is_inside:
# flow_stacked indicates if values in the TensorArray are stacked or not.
if indices_stacked:
if flow_stacked:
raise ValueError(
"It looks like TensorArrayGatherV3 was called on a TensorArray "
"whose values are not loop-invariant, and the indices were also "
"not loop invariant. This is currently unsupported.")
else:
value = _unflatten_first_dim(value, n)
return wrap(value, True)
else:
if flow_stacked:
# Since elements in this array are stacked and `value` was produced by
# gather, its first two dims are "gathered elements" and "stack
# dimension". Our semantics require these two to be flipped.
value = _transpose_first_two_dims(value)
return wrap(value, flow_stacked)
else:
# Values in the TensorArray should be unstacked (since different iterations
# couldn't write to the same location). So whether output is stacked or not
# depends on indices_stacked.
if indices_stacked:
value = _unflatten_first_dim(value, n)
return wrap(value, indices_stacked)
@RegisterPFor("TensorArrayScatterV3")
def _convert_tensor_array_scatter_v3(pfor_input):
handle = pfor_input.unstacked_input(0)
indices, indices_stacked, _ = pfor_input.input(1)
indices = array_ops.reshape(indices, [-1])
value, value_stacked, _ = pfor_input.input(2)
flow, flow_stacked, _ = pfor_input.input(3)
if flow_stacked:
flow = _unstack_flow(flow)
is_inside = _handle_inside_pfor(pfor_input, pfor_input.op.inputs[0])
if is_inside:
if indices_stacked:
raise ValueError("Need indices for %s to be loop invariant" % handle)
# Note that flow_stacked indicates if existing values in the array are
# stacked or not.
if not flow_stacked and not value_stacked:
flow_out = data_flow_ops.tensor_array_scatter_v3(handle, indices, value,
flow)
return wrap(flow_out, False)
if not value_stacked:
# TODO(agarwal): tile in the second dimension directly instead of
# transposing below.
value = _stack(value, pfor_input.pfor.loop_len_vector).t
value = _transpose_first_two_dims(value)
# TODO(agarwal): Note that if a previous write was unstacked, flow will be
# unstacked, and a stacked value may be written here which may cause
# runtime error due to different elements having different shape. We do
# not try to prevent that.
flow_out = data_flow_ops.tensor_array_scatter_v3(handle, indices, value,
flow)
return _stack(flow_out, pfor_input.pfor.loop_len_vector)
if not indices_stacked:
raise ValueError("Need indices for %s to be not loop invariant" % handle)
if not value_stacked:
value = _stack(value, pfor_input.pfor.loop_len_vector).t
value = _flatten_first_two_dims(value)
flow_out = data_flow_ops.tensor_array_scatter_v3(handle, indices, value,
flow)
return _stack(flow_out, pfor_input.pfor.loop_len_vector)
@RegisterPFor("TensorArrayGradV3")
def _convert_tensor_array_grad_v3(pfor_input):
handle = pfor_input.unstacked_input(0)
flow, flow_stacked, _ = pfor_input.input(1)
if flow_stacked:
flow = _unstack_flow(flow)
source = pfor_input.get_attr("source")
# TODO(agarwal): For now, we assume that gradients are stacked if the
# TensorArrayGradV3 call is being done inside the pfor. Getting that wrong
# will give runtime error due to incorrect shape being written to the
# accumulator. It is difficult to know in advance if gradients written will be
# stacked or not. Note that flow being stacked is not indicative of the
# gradient being stacked or not. Revisit this later.
shape_to_prepend = pfor_input.pfor.loop_len_vector
grad_handle, flow_out = data_flow_ops.tensor_array_grad_with_shape(
handle=handle,
flow_in=flow,
shape_to_prepend=shape_to_prepend,
source=source)
flow_out = _stack(flow_out, pfor_input.pfor.loop_len_vector).t
return [wrap(grad_handle, False), wrap(flow_out, True)]
# StackV2 conversion is tricky since we don't have arrays of StackV2. So similar
# to TensorArrays, we convert them by changing the dimension of the elements
# inside the stack.
#
# We consider two cases:
#
# 1. StackV2 is constructed and used entirely inside the pfor loop.
# We keep a single Stack and perform the push/pop operations of all the
# iterations in lock-step. We also assume that all the iterations perform these
# operations. In case of dynamic control flow, if only some of the iterations
# try to perform a push/pop, then the conversion may not work correctly and may
# cause undefined behavior.
# TODO(agarwal): test StackV2 with dynamic control flow.
#
# 2. StackV2 is constructed outside the pfor loop.
# Performing stack push/pop in a parallel fashion is ill-defined. However given
# that reading stacks created externally is a common operation when computing
# jacobians, we provide some special semantics here as follows.
# - disallow push operations to the stack
# - pop operations are performed in lock step by all iterations, similar to the
# case when the stack is created inside. A single value is popped during the
# lock-step operation and broadcast to all the iterations. Values in the stack
# are assumed to be loop-invariant.
#
# Some other implementation details:
# We use an ugly logic to find whether values in Stack data structure are
# loop invariant or not. When converting push/pop operations, we keep track of
# whether the last conversion used a stacked value or not (see _stack_cache
# below). As a result if an unstacked value is written first, subsequent stacked
# writes are disallowed when they could have been allowed in theory.
# Map from cache key based on StackV2 handle to a bool indicating whether values
# are stacked or not.
# TODO(agarwal): move _stack_cache inside pfor?
_stack_cache = {}
def _stack_cache_key(pfor_input):
"""Create cache key corresponding to a stack handle."""
op_type = pfor_input.op_type
assert op_type in ["StackPushV2", "StackPopV2"], op_type
orig_handle = pfor_input.op.inputs[0]
while orig_handle.op.type in ["Identity", "Enter"]:
orig_handle = orig_handle.op.inputs[0]
assert orig_handle.op.type == "StackV2", orig_handle.op
return ops.get_default_graph(), pfor_input.pfor, orig_handle
def _stack_handle_inside_pfor(handle, pfor_input):
while handle.op.type in ["Identity", "Enter"]:
handle = handle.op.inputs[0]
assert handle.op.type == "StackV2", (
"Unable to find StackV2 op. Got %s" % handle.op)
return pfor_input.pfor.op_is_inside_loop(handle.op)
@RegisterPFor("StackPushV2")
def _convert_stack_push_v2(pfor_input):
handle = pfor_input.unstacked_input(0)
elem, elem_stacked, _ = pfor_input.input(1)
swap_memory = pfor_input.get_attr("swap_memory")
if not _stack_handle_inside_pfor(pfor_input.op.inputs[0], pfor_input):
raise ValueError("StackPushV2 not allowed on stacks created outside pfor")
stack_cache_key = _stack_cache_key(pfor_input)
stacked = _stack_cache.get(stack_cache_key, None)
if stacked is None:
stacked = elem_stacked
_stack_cache[stack_cache_key] = stacked
else:
# If we previously made it unstacked then we can't revert to being stacked.
if not stacked and elem_stacked:
raise ValueError(
"It looks like the stack was previously determined to be loop"
" invariant, but we are now trying to push a loop dependent value"
" to it. This is currently unsupported.")
if stacked and not elem_stacked:
elem = _stack(elem, pfor_input.pfor.loop_len_vector).t
out = data_flow_ops.stack_push_v2(handle, elem, swap_memory=swap_memory)
return wrap(out, stacked)
# Note that inputs to this convertor will be unstacked. However it should get
# called since it is a stateful op.
@RegisterPFor("StackPopV2")
def _convert_stack_pop_v2(pfor_input):
handle = pfor_input.unstacked_input(0)
stack_cache_key = _stack_cache_key(pfor_input)
stacked = _stack_cache.get(stack_cache_key, None)
# If a StackPushV2 has not been converted yet, we default to unstacked since
# the push could be outside of pfor, or the covertor may not be called if the
# inputs are unconverted.
if stacked is None:
stacked = False
_stack_cache[stack_cache_key] = False
elem_type = pfor_input.get_attr("elem_type")
out = data_flow_ops.stack_pop_v2(handle, elem_type)
return wrap(out, stacked)
# parsing_ops
@RegisterPFor("DecodeCSV")
def _convert_decode_csv(pfor_input):
lines = pfor_input.stacked_input(0)
record_defaults = [
pfor_input.unstacked_input(i) for i in range(1, pfor_input.num_inputs)
]
field_delim = pfor_input.get_attr("field_delim")
use_quote_delim = pfor_input.get_attr("use_quote_delim")
select_cols = pfor_input.get_attr("select_cols")
if not select_cols:
select_cols = None
return [
wrap(t, True) for t in parsing_ops.decode_csv(
lines,
record_defaults,
field_delim=field_delim,
use_quote_delim=use_quote_delim,
select_cols=select_cols)
]
@RegisterPFor("ParseSingleExample")
def _convert_parse_single_example(pfor_input):
serialized = pfor_input.stacked_input(0)
dense_defaults = [
pfor_input.unstacked_input(i) for i in range(1, pfor_input.num_inputs)
]
sparse_keys = pfor_input.get_attr("sparse_keys")
dense_keys = pfor_input.get_attr("dense_keys")
sparse_types = pfor_input.get_attr("sparse_types")
dense_shapes = pfor_input.get_attr("dense_shapes")
output = gen_parsing_ops.parse_example(
serialized=serialized,
names=[],
dense_defaults=dense_defaults,
sparse_keys=sparse_keys,
dense_keys=dense_keys,
sparse_types=sparse_types,
dense_shapes=dense_shapes)
return [wrap(t, True, True) for t in nest.flatten(output)]
| apache-2.0 |
danviv/trading-with-python | cookbook/reconstructVXX/reconstructVXX.py | 77 | 3574 | # -*- coding: utf-8 -*-
"""
Reconstructing VXX from futures data
author: Jev Kuznetsov
License : BSD
"""
from __future__ import division
from pandas import *
import numpy as np
import os
class Future(object):
""" vix future class, used to keep data structures simple """
def __init__(self,series,code=None):
""" code is optional, example '2010_01' """
self.series = series.dropna() # price data
self.settleDate = self.series.index[-1]
self.dt = len(self.series) # roll period (this is default, should be recalculated)
self.code = code # string code 'YYYY_MM'
def monthNr(self):
""" get month nr from the future code """
return int(self.code.split('_')[1])
def dr(self,date):
""" days remaining before settlement, on a given date """
return(sum(self.series.index>date))
def price(self,date):
""" price on a date """
return self.series.get_value(date)
def returns(df):
""" daily return """
return (df/df.shift(1)-1)
def recounstructVXX():
"""
calculate VXX returns
needs a previously preprocessed file vix_futures.csv
"""
dataDir = os.path.expanduser('~')+'/twpData'
X = DataFrame.from_csv(dataDir+'/vix_futures.csv') # raw data table
# build end dates list & futures classes
futures = []
codes = X.columns
endDates = []
for code in codes:
f = Future(X[code],code=code)
print code,':', f.settleDate
endDates.append(f.settleDate)
futures.append(f)
endDates = np.array(endDates)
# set roll period of each future
for i in range(1,len(futures)):
futures[i].dt = futures[i].dr(futures[i-1].settleDate)
# Y is the result table
idx = X.index
Y = DataFrame(index=idx, columns=['first','second','days_left','w1','w2',
'ret','30days_avg'])
# W is the weight matrix
W = DataFrame(data = np.zeros(X.values.shape),index=idx,columns = X.columns)
# for VXX calculation see http://www.ipathetn.com/static/pdf/vix-prospectus.pdf
# page PS-20
for date in idx:
i =np.nonzero(endDates>=date)[0][0] # find first not exprired future
first = futures[i] # first month futures class
second = futures[i+1] # second month futures class
dr = first.dr(date) # number of remaining dates in the first futures contract
dt = first.dt #number of business days in roll period
W.set_value(date,codes[i],100*dr/dt)
W.set_value(date,codes[i+1],100*(dt-dr)/dt)
# this is all just debug info
p1 = first.price(date)
p2 = second.price(date)
w1 = 100*dr/dt
w2 = 100*(dt-dr)/dt
Y.set_value(date,'first',p1)
Y.set_value(date,'second',p2)
Y.set_value(date,'days_left',first.dr(date))
Y.set_value(date,'w1',w1)
Y.set_value(date,'w2',w2)
Y.set_value(date,'30days_avg',(p1*w1+p2*w2)/100)
valCurr = (X*W.shift(1)).sum(axis=1) # value on day N
valYest = (X.shift(1)*W.shift(1)).sum(axis=1) # value on day N-1
Y['ret'] = valCurr/valYest-1 # index return on day N
return Y
##-------------------Main script---------------------------
if __name__=="__main__":
Y = recounstructVXX()
print Y.head(30)#
Y.to_csv('reconstructedVXX.csv')
| bsd-3-clause |
plaes/numpy | doc/source/conf.py | 6 | 8773 | # -*- coding: utf-8 -*-
import sys, os, re
# Check Sphinx version
import sphinx
if sphinx.__version__ < "0.5":
raise RuntimeError("Sphinx 0.5.dev or newer required")
# -----------------------------------------------------------------------------
# General configuration
# -----------------------------------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
sys.path.insert(0, os.path.abspath('../sphinxext'))
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.pngmath', 'numpydoc',
'sphinx.ext.intersphinx', 'sphinx.ext.coverage',
'sphinx.ext.doctest',
'plot_directive']
if sphinx.__version__ >= "0.7":
extensions.append('sphinx.ext.autosummary')
else:
extensions.append('autosummary')
extensions.append('only_directives')
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
#master_doc = 'index'
# General substitutions.
project = 'NumPy'
copyright = '2008-2009, The Scipy community'
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
#
import numpy
# The short X.Y version (including .devXXXX, rcX, b1 suffixes if present)
version = re.sub(r'(\d+\.\d+)\.\d+(.*)', r'\1\2', numpy.__version__)
version = re.sub(r'(\.dev\d+).*?$', r'\1', version)
# The full version, including alpha/beta/rc tags.
release = numpy.__version__
print version, release
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# The reST default role (used for this markup: `text`) to use for all documents.
default_role = "autolink"
# List of directories, relative to source directories, that shouldn't be searched
# for source files.
exclude_dirs = []
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = False
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -----------------------------------------------------------------------------
# HTML output
# -----------------------------------------------------------------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
html_style = 'scipy.css'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "%s v%s Manual (DRAFT)" % (project, version)
# The name of an image file (within the static path) to place at the top of
# the sidebar.
html_logo = 'scipyshiny_small.png'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'index': 'indexsidebar.html'
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
html_additional_pages = {
'index': 'indexcontent.html',
}
# If false, no module index is generated.
html_use_modindex = True
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".html").
#html_file_suffix = '.html'
# Output file base name for HTML help builder.
htmlhelp_basename = 'numpy'
# Pngmath should try to align formulas properly
pngmath_use_preview = True
# -----------------------------------------------------------------------------
# LaTeX output
# -----------------------------------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
_stdauthor = 'Written by the NumPy community'
latex_documents = [
('reference/index', 'numpy-ref.tex', 'NumPy Reference',
_stdauthor, 'manual'),
('user/index', 'numpy-user.tex', 'NumPy User Guide',
_stdauthor, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
latex_preamble = r'''
\usepackage{amsmath}
\DeclareUnicodeCharacter{00A0}{\nobreakspace}
% In the parameters section, place a newline after the Parameters
% header
\usepackage{expdlist}
\let\latexdescription=\description
\def\description{\latexdescription{}{} \breaklabel}
% Make Examples/etc section headers smaller and more compact
\makeatletter
\titleformat{\paragraph}{\normalsize\py@HeaderFamily}%
{\py@TitleColor}{0em}{\py@TitleColor}{\py@NormalColor}
\titlespacing*{\paragraph}{0pt}{1ex}{0pt}
\makeatother
% Fix footer/header
\renewcommand{\chaptermark}[1]{\markboth{\MakeUppercase{\thechapter.\ #1}}{}}
\renewcommand{\sectionmark}[1]{\markright{\MakeUppercase{\thesection.\ #1}}}
'''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
latex_use_modindex = False
# -----------------------------------------------------------------------------
# Intersphinx configuration
# -----------------------------------------------------------------------------
intersphinx_mapping = {'http://docs.python.org/dev': None}
# -----------------------------------------------------------------------------
# Numpy extensions
# -----------------------------------------------------------------------------
# If we want to do a phantom import from an XML file for all autodocs
phantom_import_file = 'dump.xml'
# Make numpydoc to generate plots for example sections
numpydoc_use_plots = True
# -----------------------------------------------------------------------------
# Autosummary
# -----------------------------------------------------------------------------
if sphinx.__version__ >= "0.7":
import glob
autosummary_generate = glob.glob("reference/*.rst")
# -----------------------------------------------------------------------------
# Coverage checker
# -----------------------------------------------------------------------------
coverage_ignore_modules = r"""
""".split()
coverage_ignore_functions = r"""
test($|_) (some|all)true bitwise_not cumproduct pkgload
generic\.
""".split()
coverage_ignore_classes = r"""
""".split()
coverage_c_path = []
coverage_c_regexes = {}
coverage_ignore_c_items = {}
# -----------------------------------------------------------------------------
# Plots
# -----------------------------------------------------------------------------
plot_pre_code = """
import numpy as np
np.random.seed(0)
"""
plot_include_source = True
plot_formats = [('png', 100), 'pdf']
import math
phi = (math.sqrt(5) + 1)/2
import matplotlib
matplotlib.rcParams.update({
'font.size': 8,
'axes.titlesize': 8,
'axes.labelsize': 8,
'xtick.labelsize': 8,
'ytick.labelsize': 8,
'legend.fontsize': 8,
'figure.figsize': (3*phi, 3),
'figure.subplot.bottom': 0.2,
'figure.subplot.left': 0.2,
'figure.subplot.right': 0.9,
'figure.subplot.top': 0.85,
'figure.subplot.wspace': 0.4,
'text.usetex': False,
})
| bsd-3-clause |
smallyear/linuxLearn | salt/salt/client/ssh/state.py | 1 | 6047 | # -*- coding: utf-8 -*-
'''
Create ssh executor system
'''
from __future__ import absolute_import
# Import python libs
import os
import tarfile
import tempfile
import json
import shutil
from contextlib import closing
# Import salt libs
import salt.client.ssh.shell
import salt.client.ssh
import salt.utils
import salt.utils.thin
import salt.utils.url
import salt.roster
import salt.state
import salt.loader
import salt.minion
class SSHState(salt.state.State):
'''
Create a State object which wraps the SSH functions for state operations
'''
def __init__(self, opts, pillar=None, wrapper=None):
self.wrapper = wrapper
super(SSHState, self).__init__(opts, pillar)
def load_modules(self, data=None, proxy=None):
'''
Load up the modules for remote compilation via ssh
'''
self.functions = self.wrapper
self.utils = salt.loader.utils(self.opts)
locals_ = salt.loader.minion_mods(self.opts, utils=self.utils)
self.states = salt.loader.states(self.opts, locals_, self.utils)
self.rend = salt.loader.render(self.opts, self.functions)
def check_refresh(self, data, ret):
'''
Stub out check_refresh
'''
return
def module_refresh(self):
'''
Module refresh is not needed, stub it out
'''
return
class SSHHighState(salt.state.BaseHighState):
'''
Used to compile the highstate on the master
'''
stack = []
def __init__(self, opts, pillar=None, wrapper=None, fsclient=None):
self.client = fsclient
salt.state.BaseHighState.__init__(self, opts)
self.state = SSHState(opts, pillar, wrapper)
self.matcher = salt.minion.Matcher(self.opts)
def load_dynamic(self, matches):
'''
Stub out load_dynamic
'''
return
def lowstate_file_refs(chunks, extras=''):
'''
Create a list of file ref objects to reconcile
'''
refs = {}
for chunk in chunks:
if not isinstance(chunk, dict):
continue
saltenv = 'base'
crefs = []
for state in chunk:
if state == '__env__':
saltenv = chunk[state]
elif state.startswith('__'):
continue
crefs.extend(salt_refs(chunk[state]))
if crefs:
if saltenv not in refs:
refs[saltenv] = []
refs[saltenv].append(crefs)
if extras:
extra_refs = extras.split(',')
if extra_refs:
for env in refs:
for x in extra_refs:
refs[env].append([x])
return refs
def salt_refs(data, ret=None):
'''
Pull salt file references out of the states
'''
proto = 'salt://'
if ret is None:
ret = []
if isinstance(data, str):
if data.startswith(proto) and data not in ret:
ret.append(data)
if isinstance(data, list):
for comp in data:
salt_refs(comp, ret)
if isinstance(data, dict):
for comp in data:
salt_refs(data[comp], ret)
return ret
def prep_trans_tar(file_client, chunks, file_refs, pillar=None, id_=None):
'''
Generate the execution package from the saltenv file refs and a low state
data structure
'''
gendir = tempfile.mkdtemp()
trans_tar = salt.utils.mkstemp()
lowfn = os.path.join(gendir, 'lowstate.json')
pillarfn = os.path.join(gendir, 'pillar.json')
sync_refs = [
[salt.utils.url.create('_modules')],
[salt.utils.url.create('_states')],
[salt.utils.url.create('_grains')],
[salt.utils.url.create('_renderers')],
[salt.utils.url.create('_returners')],
[salt.utils.url.create('_output')],
[salt.utils.url.create('_utils')],
]
with salt.utils.fopen(lowfn, 'w+') as fp_:
fp_.write(json.dumps(chunks))
if pillar:
with salt.utils.fopen(pillarfn, 'w+') as fp_:
fp_.write(json.dumps(pillar))
cachedir = os.path.join('salt-ssh', id_)
for saltenv in file_refs:
file_refs[saltenv].extend(sync_refs)
env_root = os.path.join(gendir, saltenv)
if not os.path.isdir(env_root):
os.makedirs(env_root)
for ref in file_refs[saltenv]:
for name in ref:
short = salt.utils.url.parse(name)[0]
path = file_client.cache_file(name, saltenv, cachedir=cachedir)
if path:
tgt = os.path.join(env_root, short)
tgt_dir = os.path.dirname(tgt)
if not os.path.isdir(tgt_dir):
os.makedirs(tgt_dir)
shutil.copy(path, tgt)
continue
files = file_client.cache_dir(name, saltenv, cachedir=cachedir)
if files:
for filename in files:
fn = filename[filename.find(short) + len(short):]
if fn.startswith('/'):
fn = fn.strip('/')
tgt = os.path.join(
env_root,
short,
fn,
)
tgt_dir = os.path.dirname(tgt)
if not os.path.isdir(tgt_dir):
os.makedirs(tgt_dir)
shutil.copy(filename, tgt)
continue
try: # cwd may not exist if it was removed but salt was run from it
cwd = os.getcwd()
except OSError:
cwd = None
os.chdir(gendir)
with closing(tarfile.open(trans_tar, 'w:gz')) as tfp:
for root, dirs, files in os.walk(gendir):
for name in files:
full = os.path.join(root, name)
tfp.add(full[len(gendir):].lstrip(os.sep))
if cwd:
os.chdir(cwd)
shutil.rmtree(gendir)
return trans_tar
| apache-2.0 |
doganov/edx-platform | common/djangoapps/enrollment/data.py | 41 | 9880 | """
Data Aggregation Layer of the Enrollment API. Collects all enrollment specific data into a single
source to be used throughout the API.
"""
import logging
from django.contrib.auth.models import User
from opaque_keys.edx.keys import CourseKey
from enrollment.errors import (
CourseEnrollmentClosedError, CourseEnrollmentFullError,
CourseEnrollmentExistsError, UserNotFoundError, InvalidEnrollmentAttribute
)
from enrollment.serializers import CourseEnrollmentSerializer, CourseSerializer
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.lib.exceptions import CourseNotFoundError
from student.models import (
CourseEnrollment, NonExistentCourseError, EnrollmentClosedError,
CourseFullError, AlreadyEnrolledError, CourseEnrollmentAttribute
)
log = logging.getLogger(__name__)
def get_course_enrollments(user_id):
"""Retrieve a list representing all aggregated data for a user's course enrollments.
Construct a representation of all course enrollment data for a specific user.
Args:
user_id (str): The name of the user to retrieve course enrollment information for.
Returns:
A serializable list of dictionaries of all aggregated enrollment data for a user.
"""
qset = CourseEnrollment.objects.filter(
user__username=user_id,
is_active=True
).order_by('created')
enrollments = CourseEnrollmentSerializer(qset, many=True).data
# Find deleted courses and filter them out of the results
deleted = []
valid = []
for enrollment in enrollments:
if enrollment.get("course_details") is not None:
valid.append(enrollment)
else:
deleted.append(enrollment)
if deleted:
log.warning(
(
u"Course enrollments for user %s reference "
u"courses that do not exist (this can occur if a course is deleted)."
), user_id,
)
return valid
def get_course_enrollment(username, course_id):
"""Retrieve an object representing all aggregated data for a user's course enrollment.
Get the course enrollment information for a specific user and course.
Args:
username (str): The name of the user to retrieve course enrollment information for.
course_id (str): The course to retrieve course enrollment information for.
Returns:
A serializable dictionary representing the course enrollment.
"""
course_key = CourseKey.from_string(course_id)
try:
enrollment = CourseEnrollment.objects.get(
user__username=username, course_id=course_key
)
return CourseEnrollmentSerializer(enrollment).data
except CourseEnrollment.DoesNotExist:
return None
def create_course_enrollment(username, course_id, mode, is_active):
"""Create a new course enrollment for the given user.
Creates a new course enrollment for the specified user username.
Args:
username (str): The name of the user to create a new course enrollment for.
course_id (str): The course to create the course enrollment for.
mode (str): (Optional) The mode for the new enrollment.
is_active (boolean): (Optional) Determines if the enrollment is active.
Returns:
A serializable dictionary representing the new course enrollment.
Raises:
CourseNotFoundError
CourseEnrollmentFullError
EnrollmentClosedError
CourseEnrollmentExistsError
"""
course_key = CourseKey.from_string(course_id)
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
msg = u"Not user with username '{username}' found.".format(username=username)
log.warn(msg)
raise UserNotFoundError(msg)
try:
enrollment = CourseEnrollment.enroll(user, course_key, check_access=True)
return _update_enrollment(enrollment, is_active=is_active, mode=mode)
except NonExistentCourseError as err:
raise CourseNotFoundError(err.message)
except EnrollmentClosedError as err:
raise CourseEnrollmentClosedError(err.message)
except CourseFullError as err:
raise CourseEnrollmentFullError(err.message)
except AlreadyEnrolledError as err:
enrollment = get_course_enrollment(username, course_id)
raise CourseEnrollmentExistsError(err.message, enrollment)
def update_course_enrollment(username, course_id, mode=None, is_active=None):
"""Modify a course enrollment for a user.
Allows updates to a specific course enrollment.
Args:
username (str): The name of the user to retrieve course enrollment information for.
course_id (str): The course to retrieve course enrollment information for.
mode (str): (Optional) If specified, modify the mode for this enrollment.
is_active (boolean): (Optional) Determines if the enrollment is active.
Returns:
A serializable dictionary representing the modified course enrollment.
"""
course_key = CourseKey.from_string(course_id)
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
msg = u"Not user with username '{username}' found.".format(username=username)
log.warn(msg)
raise UserNotFoundError(msg)
try:
enrollment = CourseEnrollment.objects.get(user=user, course_id=course_key)
return _update_enrollment(enrollment, is_active=is_active, mode=mode)
except CourseEnrollment.DoesNotExist:
return None
def add_or_update_enrollment_attr(user_id, course_id, attributes):
"""Set enrollment attributes for the enrollment of given user in the
course provided.
Args:
course_id (str): The Course to set enrollment attributes for.
user_id (str): The User to set enrollment attributes for.
attributes (list): Attributes to be set.
Example:
>>>add_or_update_enrollment_attr(
"Bob",
"course-v1-edX-DemoX-1T2015",
[
{
"namespace": "credit",
"name": "provider_id",
"value": "hogwarts",
},
]
)
"""
course_key = CourseKey.from_string(course_id)
user = _get_user(user_id)
enrollment = CourseEnrollment.get_enrollment(user, course_key)
if not _invalid_attribute(attributes) and enrollment is not None:
CourseEnrollmentAttribute.add_enrollment_attr(enrollment, attributes)
def get_enrollment_attributes(user_id, course_id):
"""Retrieve enrollment attributes for given user for provided course.
Args:
user_id: The User to get enrollment attributes for
course_id (str): The Course to get enrollment attributes for.
Example:
>>>get_enrollment_attributes("Bob", "course-v1-edX-DemoX-1T2015")
[
{
"namespace": "credit",
"name": "provider_id",
"value": "hogwarts",
},
]
Returns: list
"""
course_key = CourseKey.from_string(course_id)
user = _get_user(user_id)
enrollment = CourseEnrollment.get_enrollment(user, course_key)
return CourseEnrollmentAttribute.get_enrollment_attributes(enrollment)
def _get_user(user_id):
"""Retrieve user with provided user_id
Args:
user_id(str): username of the user for which object is to retrieve
Returns: obj
"""
try:
return User.objects.get(username=user_id)
except User.DoesNotExist:
msg = u"Not user with username '{username}' found.".format(username=user_id)
log.warn(msg)
raise UserNotFoundError(msg)
def _update_enrollment(enrollment, is_active=None, mode=None):
enrollment.update_enrollment(is_active=is_active, mode=mode)
enrollment.save()
return CourseEnrollmentSerializer(enrollment).data
def _invalid_attribute(attributes):
"""Validate enrollment attribute
Args:
attributes(dict): dict of attribute
Return:
list of invalid attributes
"""
invalid_attributes = []
for attribute in attributes:
if "namespace" not in attribute:
msg = u"'namespace' not in enrollment attribute"
log.warn(msg)
invalid_attributes.append("namespace")
raise InvalidEnrollmentAttribute(msg)
if "name" not in attribute:
msg = u"'name' not in enrollment attribute"
log.warn(msg)
invalid_attributes.append("name")
raise InvalidEnrollmentAttribute(msg)
if "value" not in attribute:
msg = u"'value' not in enrollment attribute"
log.warn(msg)
invalid_attributes.append("value")
raise InvalidEnrollmentAttribute(msg)
return invalid_attributes
def get_course_enrollment_info(course_id, include_expired=False):
"""Returns all course enrollment information for the given course.
Based on the course id, return all related course information.
Args:
course_id (str): The course to retrieve enrollment information for.
include_expired (bool): Boolean denoting whether expired course modes
should be included in the returned JSON data.
Returns:
A serializable dictionary representing the course's enrollment information.
Raises:
CourseNotFoundError
"""
course_key = CourseKey.from_string(course_id)
try:
course = CourseOverview.get_from_id(course_key)
except CourseOverview.DoesNotExist:
msg = u"Requested enrollment information for unknown course {course}".format(course=course_id)
log.warning(msg)
raise CourseNotFoundError(msg)
else:
return CourseSerializer(course, include_expired=include_expired).data
| agpl-3.0 |
xodus7/tensorflow | tensorflow/java/maven/tensorflow-android/update.py | 27 | 3972 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fetch android artifacts and update pom properties."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import json
import string
import sys
import urllib2
def get_args():
"""Parse command line args."""
parser = argparse.ArgumentParser()
parser.add_argument(
'--version', required=True, help='Version for the artifact.')
parser.add_argument(
'--dir',
required=True,
help='Directory where the pom and aar artifact will be written.')
parser.add_argument(
'--template', required=True, help='Path to pom template file.')
return parser.parse_args()
def get_json(url):
"""Load the contents of the URL as a json object."""
return json.load(urllib2.urlopen(url))
def get_commit_id(build_info):
"""Fetch the git commit id from the build info json object."""
release_commit_id = build_info.get('build_commit_id')
if release_commit_id:
return release_commit_id
actions = build_info.get('actions')
build_data = next(
a for a in actions
if a.get('_class') == 'hudson.plugins.git.util.BuildData')
if not build_data:
raise ValueError('Missing BuildData: %s' % build_info)
revision_info = build_data.get('lastBuiltRevision')
if not revision_info:
raise ValueError('Missing lastBuiltRevision: %s' % build_info)
return revision_info.get('SHA1')
def get_aar_url(build_info):
"""Given the json build info, find the URL to the tensorflow.aar artifact."""
base_url = build_info.get('url')
if not base_url:
raise ValueError('Missing url: %s' % build_info)
build_class = build_info.get('_class')
if (build_class == 'hudson.model.FreeStyleBuild' or
build_class == 'hudson.matrix.MatrixRun'):
aar_info = next(
a for a in build_info.get('artifacts')
if a.get('fileName') == 'tensorflow.aar')
if not aar_info:
raise ValueError('Missing aar artifact: %s' % build_info)
return '%s/artifact/%s' % (base_url, aar_info.get('relativePath'))
raise ValueError('Unknown build_type %s' % build_info)
def read_template(path):
with open(path) as f:
return string.Template(f.read())
def main():
args = get_args()
release_prefix = 'https://storage.googleapis.com/tensorflow/libtensorflow'
info_url = '%s/android_buildinfo-%s.json' % (release_prefix, args.version)
aar_url = '%s/tensorflow-%s.aar' % (release_prefix, args.version)
build_type = 'release-android'
# Retrieve build information
build_info = get_json(info_url)
# Check all required build info is present
build_commit_id = get_commit_id(build_info)
if not build_commit_id:
raise ValueError('Missing commit id: %s' % build_info)
# Write the pom file updated with build attributes.
template = read_template(args.template)
with open('%s/pom-android.xml' % args.dir, 'w') as f:
f.write(
template.substitute({
'build_commit_id': build_commit_id,
'build_type': build_type,
'version': args.version
}))
# Retrieve the aar location if needed.
if not aar_url:
aar_url = get_aar_url(build_info)
# And download the aar to the desired location.
with open('%s/tensorflow.aar' % args.dir, 'w') as f:
aar = urllib2.urlopen(aar_url)
f.write(aar.read())
if __name__ == '__main__':
sys.exit(main())
| apache-2.0 |
marcoarruda/MissionPlanner | Lib/site-packages/scipy/fftpack/benchmarks/bench_basic.py | 63 | 7559 | """ Test functions for fftpack.basic module
"""
import sys
from numpy.testing import *
from scipy.fftpack import ifft, fft, fftn, irfft, rfft
from numpy import arange, asarray, zeros, dot, exp, pi, double, cdouble
import numpy.fft
from numpy.random import rand
def random(size):
return rand(*size)
def direct_dft(x):
x = asarray(x)
n = len(x)
y = zeros(n,dtype=cdouble)
w = -arange(n)*(2j*pi/n)
for i in range(n):
y[i] = dot(exp(i*w),x)
return y
def direct_idft(x):
x = asarray(x)
n = len(x)
y = zeros(n,dtype=cdouble)
w = arange(n)*(2j*pi/n)
for i in range(n):
y[i] = dot(exp(i*w),x)/n
return y
class TestFft(TestCase):
def bench_random(self):
from numpy.fft import fft as numpy_fft
print
print ' Fast Fourier Transform'
print '================================================='
print ' | real input | complex input '
print '-------------------------------------------------'
print ' size | scipy | numpy | scipy | numpy '
print '-------------------------------------------------'
for size,repeat in [(100,7000),(1000,2000),
(256,10000),
(512,10000),
(1024,1000),
(2048,1000),
(2048*2,500),
(2048*4,500),
]:
print '%5s' % size,
sys.stdout.flush()
for x in [random([size]).astype(double),
random([size]).astype(cdouble)+random([size]).astype(cdouble)*1j
]:
if size > 500: y = fft(x)
else: y = direct_dft(x)
assert_array_almost_equal(fft(x),y)
print '|%8.2f' % measure('fft(x)',repeat),
sys.stdout.flush()
assert_array_almost_equal(numpy_fft(x),y)
print '|%8.2f' % measure('numpy_fft(x)',repeat),
sys.stdout.flush()
print ' (secs for %s calls)' % (repeat)
sys.stdout.flush()
class TestIfft(TestCase):
def bench_random(self):
from numpy.fft import ifft as numpy_ifft
print
print ' Inverse Fast Fourier Transform'
print '==============================================='
print ' | real input | complex input '
print '-----------------------------------------------'
print ' size | scipy | numpy | scipy | numpy '
print '-----------------------------------------------'
for size,repeat in [(100,7000),(1000,2000),
(256,10000),
(512,10000),
(1024,1000),
(2048,1000),
(2048*2,500),
(2048*4,500),
]:
print '%5s' % size,
sys.stdout.flush()
for x in [random([size]).astype(double),
random([size]).astype(cdouble)+random([size]).astype(cdouble)*1j
]:
if size > 500: y = ifft(x)
else: y = direct_idft(x)
assert_array_almost_equal(ifft(x),y)
print '|%8.2f' % measure('ifft(x)',repeat),
sys.stdout.flush()
assert_array_almost_equal(numpy_ifft(x),y)
print '|%8.2f' % measure('numpy_ifft(x)',repeat),
sys.stdout.flush()
print ' (secs for %s calls)' % (repeat)
sys.stdout.flush()
class TestRfft(TestCase):
def bench_random(self):
from numpy.fft import rfft as numpy_rfft
print
print 'Fast Fourier Transform (real data)'
print '=================================='
print ' size | scipy | numpy '
print '----------------------------------'
for size,repeat in [(100,7000),(1000,2000),
(256,10000),
(512,10000),
(1024,1000),
(2048,1000),
(2048*2,500),
(2048*4,500),
]:
print '%5s' % size,
sys.stdout.flush()
x = random([size]).astype(double)
print '|%8.2f' % measure('rfft(x)',repeat),
sys.stdout.flush()
print '|%8.2f' % measure('numpy_rfft(x)',repeat),
sys.stdout.flush()
print ' (secs for %s calls)' % (repeat)
sys.stdout.flush()
class TestIrfft(TestCase):
def bench_random(self):
from numpy.fft import irfft as numpy_irfft
print
print 'Inverse Fast Fourier Transform (real data)'
print '=================================='
print ' size | scipy | numpy '
print '----------------------------------'
for size,repeat in [(100,7000),(1000,2000),
(256,10000),
(512,10000),
(1024,1000),
(2048,1000),
(2048*2,500),
(2048*4,500),
]:
print '%5s' % size,
sys.stdout.flush()
x = random([size]).astype(double)
x1 = zeros(size/2+1,dtype=cdouble)
x1[0] = x[0]
for i in range(1,size/2):
x1[i] = x[2*i-1] + 1j * x[2*i]
if not size%2:
x1[-1] = x[-1]
y = irfft(x)
print '|%8.2f' % measure('irfft(x)',repeat),
sys.stdout.flush()
assert_array_almost_equal(numpy_irfft(x1,size),y)
print '|%8.2f' % measure('numpy_irfft(x1,size)',repeat),
sys.stdout.flush()
print ' (secs for %s calls)' % (repeat)
sys.stdout.flush()
class TestFftn(TestCase):
def bench_random(self):
from numpy.fft import fftn as numpy_fftn
print
print ' Multi-dimensional Fast Fourier Transform'
print '==================================================='
print ' | real input | complex input '
print '---------------------------------------------------'
print ' size | scipy | numpy | scipy | numpy '
print '---------------------------------------------------'
for size,repeat in [((100,100),100),((1000,100),7),
((256,256),10),
((512,512),3),
]:
print '%9s' % ('%sx%s'%size),
sys.stdout.flush()
for x in [random(size).astype(double),
random(size).astype(cdouble)+random(size).astype(cdouble)*1j
]:
y = fftn(x)
#if size > 500: y = fftn(x)
#else: y = direct_dft(x)
assert_array_almost_equal(fftn(x),y)
print '|%8.2f' % measure('fftn(x)',repeat),
sys.stdout.flush()
assert_array_almost_equal(numpy_fftn(x),y)
print '|%8.2f' % measure('numpy_fftn(x)',repeat),
sys.stdout.flush()
print ' (secs for %s calls)' % (repeat)
sys.stdout.flush()
if __name__ == "__main__":
run_module_suite()
| gpl-3.0 |
cbeck88/fifengine | engine/python/fife/extensions/cegui/ceguibasicapplication.py | 2 | 4813 | # -*- coding: utf-8 -*-
# ####################################################################
# Copyright (C) 2005-2013 by the FIFE team
# http://www.fifengine.net
# This file is part of FIFE.
#
# FIFE is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# ####################################################################
"""
The basic application and main loop.
See the L{ApplicationBase} documentation.
"""
from fife import fife
from fife.extensions.basicapplication import ApplicationBase
import PyCEGUI
class CEGUIEventListener(fife.IKeyListener, fife.ICommandListener):
"""
Default, rudimentary event listener.
Will cause the application to quit on pressing ESC.
"""
def __init__(self, app):
self.app = app
self.engine = app.engine
eventmanager = self.engine.getEventManager()
#eventmanager.setNonConsumableKeys([fife.Key.ESCAPE])
fife.IKeyListener.__init__(self)
eventmanager.addKeyListener(self)
fife.ICommandListener.__init__(self)
eventmanager.addCommandListener(self)
self.quitrequested = False
self.debuggeractive = False
def keyPressed(self, evt):
keyval = evt.getKey().getValue()
if keyval == fife.Key.ESCAPE:
self.app.quit()
def keyReleased(self, evt):
pass
def onCommand(self, command):
if command.getCommandType() == fife.CMD_QUIT_GAME:
self.quitrequested = True
command.consume()
DEFAULT_GUI_DIR = "gui/"
class CEGUIApplicationBase(ApplicationBase):
def __init__(self, setting=None):
super(CEGUIApplicationBase, self).__init__(setting)
self._initGuiManager()
self._loadCEGuiSettings()
def _initGuiManager(self):
settings = self.engine.getSettings()
major_v, minor_v = map(int, PyCEGUI.Version__.split('.')[:2])
#For CEGUI versions lower than 0.8.0 we use the old CEGuiManager
if major_v == 0 and minor_v <= 7:
guimanager = fife.CEGuiManager()
else:
guimanager = fife.CEGui_0Manager()
#transfer ownership to the engine
guimanager.thisown = 0
self.guimanager = guimanager
self.engine.setGuiManager(self.guimanager)
self.engine.getEventManager().addSdlEventListener(self.guimanager)
def _loadCEGuiSettings(self):
self._loadResourcePaths()
def _loadResourcePaths(self):
resourceprovider = PyCEGUI.System.getSingleton().getResourceProvider()
major_v, minor_v = map(int, PyCEGUI.Version__.split('.')[:2])
if major_v == 0 and minor_v <= 7:
resourcetypemap = { "schemes" : PyCEGUI.Scheme.setDefaultResourceGroup,
"imagesets" : PyCEGUI.Imageset.setDefaultResourceGroup,
"fonts" : PyCEGUI.Font.setDefaultResourceGroup,
"layouts" : PyCEGUI.WindowManager.setDefaultResourceGroup,
"looksnfeels" : PyCEGUI.WidgetLookManager.setDefaultResourceGroup,
}
else:
resourcetypemap = { "schemes" : PyCEGUI.Scheme.setDefaultResourceGroup,
"imagesets" : PyCEGUI.ImageManager.setImagesetDefaultResourceGroup,
"fonts" : PyCEGUI.Font.setDefaultResourceGroup,
"layouts" : PyCEGUI.WindowManager.setDefaultResourceGroup,
"looksnfeels" : PyCEGUI.WidgetLookManager.setDefaultResourceGroup,
}
if not self._setting:
for restype, res_setfunc in resourcetypemap.iteritems():
resourceprovider.setResourceGroupDirectory(restype, DEFAULT_GUI_DIR + restype)
res_setfunc(restype)
else:
for restype, res_setfunc in resourcetypemap.iteritems():
path = self._setting.get("CEGUI", restype)
if path:
resourceprovider.setResourceGroupDirectory(restype, path)
res_setfunc(restype)
else:
#set default path
resourceprovider.setResourceGroupDirectory(restype, DEFAULT_GUI_DIR + restype)
res_setfunc(restype)
parser = PyCEGUI.System.getSingleton().getXMLParser()
if parser.isPropertyPresent("SchemaDefaultResourceGroup"):
path = self._setting.get("CEGUI", "schemas")
if path:
rp.setResourceGroupDirectory("schemas", path)
else:
rp.setResourceGroupDirectory("schemas", DEFAULT_GUI_DIR + "schemas")
parser.setProperty("SchemaDefaultResourceGroup", "schemas")
def createListener(self):
self._listener = CEGUIEventListener(self)
return self._listener
| lgpl-2.1 |
shuangshuangwang/spark | examples/src/main/python/mllib/gradient_boosting_classification_example.py | 27 | 2446 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Gradient Boosted Trees Classification Example.
"""
from pyspark import SparkContext
# $example on$
from pyspark.mllib.tree import GradientBoostedTrees, GradientBoostedTreesModel
from pyspark.mllib.util import MLUtils
# $example off$
if __name__ == "__main__":
sc = SparkContext(appName="PythonGradientBoostedTreesClassificationExample")
# $example on$
# Load and parse the data file.
data = MLUtils.loadLibSVMFile(sc, "data/mllib/sample_libsvm_data.txt")
# Split the data into training and test sets (30% held out for testing)
(trainingData, testData) = data.randomSplit([0.7, 0.3])
# Train a GradientBoostedTrees model.
# Notes: (a) Empty categoricalFeaturesInfo indicates all features are continuous.
# (b) Use more iterations in practice.
model = GradientBoostedTrees.trainClassifier(trainingData,
categoricalFeaturesInfo={}, numIterations=3)
# Evaluate model on test instances and compute test error
predictions = model.predict(testData.map(lambda x: x.features))
labelsAndPredictions = testData.map(lambda lp: lp.label).zip(predictions)
testErr = labelsAndPredictions.filter(
lambda lp: lp[0] != lp[1]).count() / float(testData.count())
print('Test Error = ' + str(testErr))
print('Learned classification GBT model:')
print(model.toDebugString())
# Save and load model
model.save(sc, "target/tmp/myGradientBoostingClassificationModel")
sameModel = GradientBoostedTreesModel.load(sc,
"target/tmp/myGradientBoostingClassificationModel")
# $example off$
| apache-2.0 |
rapidpro/chatpro | chatpro/rooms/models.py | 1 | 2494 | from __future__ import absolute_import, unicode_literals
from chatpro.profiles.tasks import sync_org_contacts
from dash.orgs.models import Org
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Room(models.Model):
"""
Corresponds to a RapidPro contact group
"""
uuid = models.CharField(max_length=36, unique=True)
org = models.ForeignKey(Org, verbose_name=_("Organization"), related_name='rooms')
name = models.CharField(verbose_name=_("Name"), max_length=128, blank=True,
help_text=_("Name of this room"))
users = models.ManyToManyField(User, verbose_name=_("Users"), related_name='rooms',
help_text=_("Users who can chat in this room"))
managers = models.ManyToManyField(User, verbose_name=_("Managers"), related_name='manage_rooms',
help_text=_("Users who can manage contacts in this room"))
is_active = models.BooleanField(default=True, help_text="Whether this room is active")
@classmethod
def create(cls, org, name, uuid):
return cls.objects.create(org=org, name=name, uuid=uuid)
@classmethod
def get_all(cls, org):
return cls.objects.filter(org=org, is_active=True)
@classmethod
def update_room_groups(cls, org, group_uuids):
"""
Updates an org's chat rooms based on the selected groups UUIDs
"""
# de-activate rooms not included
org.rooms.exclude(uuid__in=group_uuids).update(is_active=False)
# fetch group details
groups = org.get_temba_client().get_groups()
group_names = {group.uuid: group.name for group in groups}
for group_uuid in group_uuids:
existing = org.rooms.filter(uuid=group_uuid).first()
if existing:
existing.name = group_names[group_uuid]
existing.is_active = True
existing.save()
else:
cls.create(org, group_names[group_uuid], group_uuid)
sync_org_contacts.delay(org.id)
def get_contacts(self):
return self.contacts.filter(is_active=True)
def get_users(self):
return self.users.filter(is_active=True).select_related('profile')
def get_managers(self):
return self.managers.filter(is_active=True).select_related('profile')
def __unicode__(self):
return self.name
| bsd-3-clause |
sbalde/edxplatform | common/djangoapps/third_party_auth/tests/specs/test_testshib.py | 24 | 12276 | """
Third_party_auth integration tests using a mock version of the TestShib provider
"""
import json
import unittest
import httpretty
from mock import patch
from django.core.urlresolvers import reverse
from openedx.core.lib.json_utils import EscapedEdxJSONEncoder
from student.tests.factories import UserFactory
from third_party_auth.tasks import fetch_saml_metadata
from third_party_auth.tests import testutil
TESTSHIB_ENTITY_ID = 'https://idp.testshib.org/idp/shibboleth'
TESTSHIB_METADATA_URL = 'https://mock.testshib.org/metadata/testshib-providers.xml'
TESTSHIB_SSO_URL = 'https://idp.testshib.org/idp/profile/SAML2/Redirect/SSO'
TPA_TESTSHIB_LOGIN_URL = '/auth/login/tpa-saml/?auth_entry=login&next=%2Fdashboard&idp=testshib'
TPA_TESTSHIB_REGISTER_URL = '/auth/login/tpa-saml/?auth_entry=register&next=%2Fdashboard&idp=testshib'
TPA_TESTSHIB_COMPLETE_URL = '/auth/complete/tpa-saml/'
@unittest.skipUnless(testutil.AUTH_FEATURE_ENABLED, 'third_party_auth not enabled')
class TestShibIntegrationTest(testutil.SAMLTestCase):
"""
TestShib provider Integration Test, to test SAML functionality
"""
def setUp(self):
super(TestShibIntegrationTest, self).setUp()
self.login_page_url = reverse('signin_user')
self.register_page_url = reverse('register_user')
self.enable_saml(
private_key=self._get_private_key(),
public_key=self._get_public_key(),
entity_id="https://saml.example.none",
)
# Mock out HTTP requests that may be made to TestShib:
httpretty.enable()
def metadata_callback(_request, _uri, headers):
""" Return a cached copy of TestShib's metadata by reading it from disk """
return (200, headers, self.read_data_file('testshib_metadata.xml'))
httpretty.register_uri(httpretty.GET, TESTSHIB_METADATA_URL, content_type='text/xml', body=metadata_callback)
self.addCleanup(httpretty.disable)
self.addCleanup(httpretty.reset)
# Configure the SAML library to use the same request ID for every request.
# Doing this and freezing the time allows us to play back recorded request/response pairs
uid_patch = patch('onelogin.saml2.utils.OneLogin_Saml2_Utils.generate_unique_id', return_value='TESTID')
uid_patch.start()
self.addCleanup(uid_patch.stop)
def test_login_before_metadata_fetched(self):
self._configure_testshib_provider(fetch_metadata=False)
# The user goes to the login page, and sees a button to login with TestShib:
self._check_login_page()
# The user clicks on the TestShib button:
try_login_response = self.client.get(TPA_TESTSHIB_LOGIN_URL)
# The user should be redirected to back to the login page:
self.assertEqual(try_login_response.status_code, 302)
self.assertEqual(try_login_response['Location'], self.url_prefix + self.login_page_url)
# When loading the login page, the user will see an error message:
response = self.client.get(self.login_page_url)
self.assertEqual(response.status_code, 200)
self.assertIn('Authentication with TestShib is currently unavailable.', response.content)
def test_register(self):
self._configure_testshib_provider()
self._freeze_time(timestamp=1434326820) # This is the time when the saved request/response was recorded.
# The user goes to the register page, and sees a button to register with TestShib:
self._check_register_page()
# The user clicks on the TestShib button:
try_login_response = self.client.get(TPA_TESTSHIB_REGISTER_URL)
# The user should be redirected to TestShib:
self.assertEqual(try_login_response.status_code, 302)
self.assertTrue(try_login_response['Location'].startswith(TESTSHIB_SSO_URL))
# Now the user will authenticate with the SAML provider
testshib_response = self._fake_testshib_login_and_return()
# We should be redirected to the register screen since this account is not linked to an edX account:
self.assertEqual(testshib_response.status_code, 302)
self.assertEqual(testshib_response['Location'], self.url_prefix + self.register_page_url)
register_response = self.client.get(self.register_page_url)
# We'd now like to see if the "You've successfully signed into TestShib" message is
# shown, but it's managed by a JavaScript runtime template, and we can't run JS in this
# type of test, so we just check for the variable that triggers that message.
self.assertIn('"currentProvider": "TestShib"', register_response.content)
self.assertIn('"errorMessage": null', register_response.content)
# Now do a crude check that the data (e.g. email) from the provider is displayed in the form:
self.assertIn('"defaultValue": "[email protected]"', register_response.content)
self.assertIn('"defaultValue": "Me Myself And I"', register_response.content)
# Now complete the form:
ajax_register_response = self.client.post(
reverse('user_api_registration'),
{
'email': '[email protected]',
'name': 'Myself',
'username': 'myself',
'honor_code': True,
}
)
self.assertEqual(ajax_register_response.status_code, 200)
# Then the AJAX will finish the third party auth:
continue_response = self.client.get(TPA_TESTSHIB_COMPLETE_URL)
# And we should be redirected to the dashboard:
self.assertEqual(continue_response.status_code, 302)
self.assertEqual(continue_response['Location'], self.url_prefix + reverse('dashboard'))
# Now check that we can login again:
self.client.logout()
self.verify_user_email('[email protected]')
self._test_return_login()
def test_login(self):
self._configure_testshib_provider()
self._freeze_time(timestamp=1434326820) # This is the time when the saved request/response was recorded.
user = UserFactory.create()
# The user goes to the login page, and sees a button to login with TestShib:
self._check_login_page()
# The user clicks on the TestShib button:
try_login_response = self.client.get(TPA_TESTSHIB_LOGIN_URL)
# The user should be redirected to TestShib:
self.assertEqual(try_login_response.status_code, 302)
self.assertTrue(try_login_response['Location'].startswith(TESTSHIB_SSO_URL))
# Now the user will authenticate with the SAML provider
testshib_response = self._fake_testshib_login_and_return()
# We should be redirected to the login screen since this account is not linked to an edX account:
self.assertEqual(testshib_response.status_code, 302)
self.assertEqual(testshib_response['Location'], self.url_prefix + self.login_page_url)
login_response = self.client.get(self.login_page_url)
# We'd now like to see if the "You've successfully signed into TestShib" message is
# shown, but it's managed by a JavaScript runtime template, and we can't run JS in this
# type of test, so we just check for the variable that triggers that message.
self.assertIn('"currentProvider": "TestShib"', login_response.content)
self.assertIn('"errorMessage": null', login_response.content)
# Now the user enters their username and password.
# The AJAX on the page will log them in:
ajax_login_response = self.client.post(
reverse('user_api_login_session'),
{'email': user.email, 'password': 'test'}
)
self.assertEqual(ajax_login_response.status_code, 200)
# Then the AJAX will finish the third party auth:
continue_response = self.client.get(TPA_TESTSHIB_COMPLETE_URL)
# And we should be redirected to the dashboard:
self.assertEqual(continue_response.status_code, 302)
self.assertEqual(continue_response['Location'], self.url_prefix + reverse('dashboard'))
# Now check that we can login again:
self.client.logout()
self._test_return_login()
def _test_return_login(self):
""" Test logging in to an account that is already linked. """
# Make sure we're not logged in:
dashboard_response = self.client.get(reverse('dashboard'))
self.assertEqual(dashboard_response.status_code, 302)
# The user goes to the login page, and sees a button to login with TestShib:
self._check_login_page()
# The user clicks on the TestShib button:
try_login_response = self.client.get(TPA_TESTSHIB_LOGIN_URL)
# The user should be redirected to TestShib:
self.assertEqual(try_login_response.status_code, 302)
self.assertTrue(try_login_response['Location'].startswith(TESTSHIB_SSO_URL))
# Now the user will authenticate with the SAML provider
login_response = self._fake_testshib_login_and_return()
# There will be one weird redirect required to set the login cookie:
self.assertEqual(login_response.status_code, 302)
self.assertEqual(login_response['Location'], self.url_prefix + TPA_TESTSHIB_COMPLETE_URL)
# And then we should be redirected to the dashboard:
login_response = self.client.get(TPA_TESTSHIB_COMPLETE_URL)
self.assertEqual(login_response.status_code, 302)
self.assertEqual(login_response['Location'], self.url_prefix + reverse('dashboard'))
# Now we are logged in:
dashboard_response = self.client.get(reverse('dashboard'))
self.assertEqual(dashboard_response.status_code, 200)
def _freeze_time(self, timestamp):
""" Mock the current time for SAML, so we can replay canned requests/responses """
now_patch = patch('onelogin.saml2.utils.OneLogin_Saml2_Utils.now', return_value=timestamp)
now_patch.start()
self.addCleanup(now_patch.stop)
def _check_login_page(self):
""" Load the login form and check that it contains a TestShib button """
response = self.client.get(self.login_page_url)
self.assertEqual(response.status_code, 200)
self.assertIn("TestShib", response.content)
self.assertIn(json.dumps(TPA_TESTSHIB_LOGIN_URL, cls=EscapedEdxJSONEncoder), response.content)
return response
def _check_register_page(self):
""" Load the login form and check that it contains a TestShib button """
response = self.client.get(self.register_page_url)
self.assertEqual(response.status_code, 200)
self.assertIn("TestShib", response.content)
self.assertIn(json.dumps(TPA_TESTSHIB_REGISTER_URL, cls=EscapedEdxJSONEncoder), response.content)
return response
def _configure_testshib_provider(self, **kwargs):
""" Enable and configure the TestShib SAML IdP as a third_party_auth provider """
fetch_metadata = kwargs.pop('fetch_metadata', True)
kwargs.setdefault('name', 'TestShib')
kwargs.setdefault('enabled', True)
kwargs.setdefault('idp_slug', 'testshib')
kwargs.setdefault('entity_id', TESTSHIB_ENTITY_ID)
kwargs.setdefault('metadata_source', TESTSHIB_METADATA_URL)
kwargs.setdefault('icon_class', 'fa-university')
kwargs.setdefault('attr_email', 'urn:oid:1.3.6.1.4.1.5923.1.1.1.6') # eduPersonPrincipalName
self.configure_saml_provider(**kwargs)
if fetch_metadata:
self.assertTrue(httpretty.is_enabled())
num_changed, num_failed, num_total = fetch_saml_metadata()
self.assertEqual(num_failed, 0)
self.assertEqual(num_changed, 1)
self.assertEqual(num_total, 1)
def _fake_testshib_login_and_return(self):
""" Mocked: the user logs in to TestShib and then gets redirected back """
# The SAML provider (TestShib) will authenticate the user, then get the browser to POST a response:
return self.client.post(
TPA_TESTSHIB_COMPLETE_URL,
content_type='application/x-www-form-urlencoded',
data=self.read_data_file('testshib_response.txt'),
)
| agpl-3.0 |
JoKnopp/wp-import | test/test_postgresql.py | 1 | 4427 | # -*- coding: UTF-8 -*-
# © Copyright 2009 Wolodja Wentland. All Rights Reserved.
# This file is part of wp-import.
#
# wp-import is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wp-import is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with wp-import. If not, see <http://www.gnu.org/licenses/>.
"""Tests for wp_import.postgresql
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import re
import tempfile
from nose.tools import *
import wp_import.utils as wpi_utils
import wp_import.postgresql as wpi_psql
PREFIX = os.path.join(*os.path.split(os.path.dirname(__file__))[:-1])
TEST_DATA_DIR = os.path.join(PREFIX, 'test', 'data')
DOWNLOAD_DIR = os.path.join(TEST_DATA_DIR, 'download')
EXPECTED_STMTS = {
'categorylinks': [
"""INSERT INTO "categorylinks" VALUES """ \
"(130,'Linux','Linux\u5185\u6838','2006-07-25T19:03:22Z')"],
'langlinks': [
"""INSERT INTO "langlinks" VALUES """ \
"(43017,'af','Dante Alighieri')"],
'pagelinks': [
"""INSERT INTO "pagelinks" VALUES (12,0,'P/NP\u554f\u984c')"""],
'redirect': [
"""INSERT INTO "redirect" VALUES (71247,0,'ASCII\u827a\u672f')"""]}
class FakeOptions(object):
pass
def test_insert_statements():
fn_pat = re.compile(
r'''(?P<language>\w+)wiki-(?P<date>\d{8})-(?P<table>[\w_]+).*''')
for dump_path in sorted(wpi_utils.find('*.sql.gz', DOWNLOAD_DIR)):
filename = os.path.basename(dump_path)
mat = fn_pat.match(filename)
stmts = list(wpi_psql.insert_statements(dump_path))
eq_(list(wpi_psql.insert_statements(dump_path)),
EXPECTED_STMTS[mat.group('table')])
def test_categorylink_pipeline():
for file_path in wpi_utils.find('*categorylinks*.sql.gz', DOWNLOAD_DIR):
with wpi_utils.open_compressed(file_path) as cl_file:
eq_(list(wpi_psql.categorylinks_pipeline(cl_file)),
EXPECTED_STMTS['categorylinks'])
def test_psql_quotation():
eq_(list(wpi_psql.psql_quotation(['f `b`', 'baz', 'shrubbery ``'])),
['f "b"', 'baz', 'shrubbery ""'])
def test_timestamp_to_iso_8601():
eq_(list(wpi_psql.timestamp_to_iso_8601([',20080218135752) foo'])),
[",'2008-02-18T13:57:52Z') foo"])
def test_parse_pgpass():
with tempfile.NamedTemporaryFile() as tmp_f:
tmp_f.write('*:*:*:*:GrailQuest\n')
tmp_f.seek(0)
eq_(wpi_psql._parse_pgpass(tmp_f.name).next(),
{'user': '*', 'host': '*', 'port': '*', 'database': '*',
'password': 'GrailQuest'})
tmp_f.write('hostname:port:database:username:password\n')
tmp_f.seek(0)
eq_(wpi_psql._parse_pgpass(tmp_f.name).next(),
{'user': 'username', 'host': 'hostname', 'port': 'port',
'database': 'database',
'password': 'password'})
def test_password_from_pgpass():
with tempfile.NamedTemporaryFile() as tmp_f:
options = FakeOptions()
options.pg_passfile = tmp_f.name
options.pg_user = 'KingArthur'
options.pg_port = '2342'
options.pg_host = 'Camelot'
# test generic pgpass line
tmp_f.write('*:*:*:*:GrailQuest\n')
tmp_f.seek(0)
eq_(wpi_psql.password_from_pgpass(options),
'GrailQuest')
# test specific pgpass line
tmp_f.write('Camelot:2342:postgres:KingArthur:GrailQuest\n')
tmp_f.seek(0)
eq_(wpi_psql.password_from_pgpass(options),
'GrailQuest')
# test pick most specific
tmp_f.write('Jerusalem:2342:postgres:Brian:Jehova\n')
tmp_f.write('Camelot:2342:postgres:KingArthur:GrailQuest\n')
tmp_f.write('*:*:*:*:UnladenSwallow\n')
tmp_f.seek(0)
eq_(wpi_psql.password_from_pgpass(options),
'GrailQuest')
tmp_f.write('*:*:*:*\n')
tmp_f.seek(0)
assert_raises(KeyError, wpi_psql.password_from_pgpass,
options=options)
| gpl-3.0 |
simonfork/aquaria | ExternalLibs/freetype2/src/tools/glnames.py | 259 | 103407 | #!/usr/bin/env python
#
#
# FreeType 2 glyph name builder
#
# Copyright 1996-2000, 2003, 2005, 2007, 2008 by
# David Turner, Robert Wilhelm, and Werner Lemberg.
#
# This file is part of the FreeType project, and may only be used, modified,
# and distributed under the terms of the FreeType project license,
# LICENSE.TXT. By continuing to use, modify, or distribute this file you
# indicate that you have read the license and understand and accept it
# fully.
"""\
usage: %s <output-file>
This python script generates the glyph names tables defined in the
`psnames' module.
Its single argument is the name of the header file to be created.
"""
import sys, string, struct, re, os.path
# This table lists the glyphs according to the Macintosh specification.
# It is used by the TrueType Postscript names table.
#
# See
#
# http://fonts.apple.com/TTRefMan/RM06/Chap6post.html
#
# for the official list.
#
mac_standard_names = \
[
# 0
".notdef", ".null", "nonmarkingreturn", "space", "exclam",
"quotedbl", "numbersign", "dollar", "percent", "ampersand",
# 10
"quotesingle", "parenleft", "parenright", "asterisk", "plus",
"comma", "hyphen", "period", "slash", "zero",
# 20
"one", "two", "three", "four", "five",
"six", "seven", "eight", "nine", "colon",
# 30
"semicolon", "less", "equal", "greater", "question",
"at", "A", "B", "C", "D",
# 40
"E", "F", "G", "H", "I",
"J", "K", "L", "M", "N",
# 50
"O", "P", "Q", "R", "S",
"T", "U", "V", "W", "X",
# 60
"Y", "Z", "bracketleft", "backslash", "bracketright",
"asciicircum", "underscore", "grave", "a", "b",
# 70
"c", "d", "e", "f", "g",
"h", "i", "j", "k", "l",
# 80
"m", "n", "o", "p", "q",
"r", "s", "t", "u", "v",
# 90
"w", "x", "y", "z", "braceleft",
"bar", "braceright", "asciitilde", "Adieresis", "Aring",
# 100
"Ccedilla", "Eacute", "Ntilde", "Odieresis", "Udieresis",
"aacute", "agrave", "acircumflex", "adieresis", "atilde",
# 110
"aring", "ccedilla", "eacute", "egrave", "ecircumflex",
"edieresis", "iacute", "igrave", "icircumflex", "idieresis",
# 120
"ntilde", "oacute", "ograve", "ocircumflex", "odieresis",
"otilde", "uacute", "ugrave", "ucircumflex", "udieresis",
# 130
"dagger", "degree", "cent", "sterling", "section",
"bullet", "paragraph", "germandbls", "registered", "copyright",
# 140
"trademark", "acute", "dieresis", "notequal", "AE",
"Oslash", "infinity", "plusminus", "lessequal", "greaterequal",
# 150
"yen", "mu", "partialdiff", "summation", "product",
"pi", "integral", "ordfeminine", "ordmasculine", "Omega",
# 160
"ae", "oslash", "questiondown", "exclamdown", "logicalnot",
"radical", "florin", "approxequal", "Delta", "guillemotleft",
# 170
"guillemotright", "ellipsis", "nonbreakingspace", "Agrave", "Atilde",
"Otilde", "OE", "oe", "endash", "emdash",
# 180
"quotedblleft", "quotedblright", "quoteleft", "quoteright", "divide",
"lozenge", "ydieresis", "Ydieresis", "fraction", "currency",
# 190
"guilsinglleft", "guilsinglright", "fi", "fl", "daggerdbl",
"periodcentered", "quotesinglbase", "quotedblbase", "perthousand",
"Acircumflex",
# 200
"Ecircumflex", "Aacute", "Edieresis", "Egrave", "Iacute",
"Icircumflex", "Idieresis", "Igrave", "Oacute", "Ocircumflex",
# 210
"apple", "Ograve", "Uacute", "Ucircumflex", "Ugrave",
"dotlessi", "circumflex", "tilde", "macron", "breve",
# 220
"dotaccent", "ring", "cedilla", "hungarumlaut", "ogonek",
"caron", "Lslash", "lslash", "Scaron", "scaron",
# 230
"Zcaron", "zcaron", "brokenbar", "Eth", "eth",
"Yacute", "yacute", "Thorn", "thorn", "minus",
# 240
"multiply", "onesuperior", "twosuperior", "threesuperior", "onehalf",
"onequarter", "threequarters", "franc", "Gbreve", "gbreve",
# 250
"Idotaccent", "Scedilla", "scedilla", "Cacute", "cacute",
"Ccaron", "ccaron", "dcroat"
]
# The list of standard `SID' glyph names. For the official list,
# see Annex A of document at
#
# http://partners.adobe.com/asn/developer/pdfs/tn/5176.CFF.pdf.
#
sid_standard_names = \
[
# 0
".notdef", "space", "exclam", "quotedbl", "numbersign",
"dollar", "percent", "ampersand", "quoteright", "parenleft",
# 10
"parenright", "asterisk", "plus", "comma", "hyphen",
"period", "slash", "zero", "one", "two",
# 20
"three", "four", "five", "six", "seven",
"eight", "nine", "colon", "semicolon", "less",
# 30
"equal", "greater", "question", "at", "A",
"B", "C", "D", "E", "F",
# 40
"G", "H", "I", "J", "K",
"L", "M", "N", "O", "P",
# 50
"Q", "R", "S", "T", "U",
"V", "W", "X", "Y", "Z",
# 60
"bracketleft", "backslash", "bracketright", "asciicircum", "underscore",
"quoteleft", "a", "b", "c", "d",
# 70
"e", "f", "g", "h", "i",
"j", "k", "l", "m", "n",
# 80
"o", "p", "q", "r", "s",
"t", "u", "v", "w", "x",
# 90
"y", "z", "braceleft", "bar", "braceright",
"asciitilde", "exclamdown", "cent", "sterling", "fraction",
# 100
"yen", "florin", "section", "currency", "quotesingle",
"quotedblleft", "guillemotleft", "guilsinglleft", "guilsinglright", "fi",
# 110
"fl", "endash", "dagger", "daggerdbl", "periodcentered",
"paragraph", "bullet", "quotesinglbase", "quotedblbase", "quotedblright",
# 120
"guillemotright", "ellipsis", "perthousand", "questiondown", "grave",
"acute", "circumflex", "tilde", "macron", "breve",
# 130
"dotaccent", "dieresis", "ring", "cedilla", "hungarumlaut",
"ogonek", "caron", "emdash", "AE", "ordfeminine",
# 140
"Lslash", "Oslash", "OE", "ordmasculine", "ae",
"dotlessi", "lslash", "oslash", "oe", "germandbls",
# 150
"onesuperior", "logicalnot", "mu", "trademark", "Eth",
"onehalf", "plusminus", "Thorn", "onequarter", "divide",
# 160
"brokenbar", "degree", "thorn", "threequarters", "twosuperior",
"registered", "minus", "eth", "multiply", "threesuperior",
# 170
"copyright", "Aacute", "Acircumflex", "Adieresis", "Agrave",
"Aring", "Atilde", "Ccedilla", "Eacute", "Ecircumflex",
# 180
"Edieresis", "Egrave", "Iacute", "Icircumflex", "Idieresis",
"Igrave", "Ntilde", "Oacute", "Ocircumflex", "Odieresis",
# 190
"Ograve", "Otilde", "Scaron", "Uacute", "Ucircumflex",
"Udieresis", "Ugrave", "Yacute", "Ydieresis", "Zcaron",
# 200
"aacute", "acircumflex", "adieresis", "agrave", "aring",
"atilde", "ccedilla", "eacute", "ecircumflex", "edieresis",
# 210
"egrave", "iacute", "icircumflex", "idieresis", "igrave",
"ntilde", "oacute", "ocircumflex", "odieresis", "ograve",
# 220
"otilde", "scaron", "uacute", "ucircumflex", "udieresis",
"ugrave", "yacute", "ydieresis", "zcaron", "exclamsmall",
# 230
"Hungarumlautsmall", "dollaroldstyle", "dollarsuperior", "ampersandsmall",
"Acutesmall",
"parenleftsuperior", "parenrightsuperior", "twodotenleader",
"onedotenleader", "zerooldstyle",
# 240
"oneoldstyle", "twooldstyle", "threeoldstyle", "fouroldstyle",
"fiveoldstyle",
"sixoldstyle", "sevenoldstyle", "eightoldstyle", "nineoldstyle",
"commasuperior",
# 250
"threequartersemdash", "periodsuperior", "questionsmall", "asuperior",
"bsuperior",
"centsuperior", "dsuperior", "esuperior", "isuperior", "lsuperior",
# 260
"msuperior", "nsuperior", "osuperior", "rsuperior", "ssuperior",
"tsuperior", "ff", "ffi", "ffl", "parenleftinferior",
# 270
"parenrightinferior", "Circumflexsmall", "hyphensuperior", "Gravesmall",
"Asmall",
"Bsmall", "Csmall", "Dsmall", "Esmall", "Fsmall",
# 280
"Gsmall", "Hsmall", "Ismall", "Jsmall", "Ksmall",
"Lsmall", "Msmall", "Nsmall", "Osmall", "Psmall",
# 290
"Qsmall", "Rsmall", "Ssmall", "Tsmall", "Usmall",
"Vsmall", "Wsmall", "Xsmall", "Ysmall", "Zsmall",
# 300
"colonmonetary", "onefitted", "rupiah", "Tildesmall", "exclamdownsmall",
"centoldstyle", "Lslashsmall", "Scaronsmall", "Zcaronsmall",
"Dieresissmall",
# 310
"Brevesmall", "Caronsmall", "Dotaccentsmall", "Macronsmall", "figuredash",
"hypheninferior", "Ogoneksmall", "Ringsmall", "Cedillasmall",
"questiondownsmall",
# 320
"oneeighth", "threeeighths", "fiveeighths", "seveneighths", "onethird",
"twothirds", "zerosuperior", "foursuperior", "fivesuperior",
"sixsuperior",
# 330
"sevensuperior", "eightsuperior", "ninesuperior", "zeroinferior",
"oneinferior",
"twoinferior", "threeinferior", "fourinferior", "fiveinferior",
"sixinferior",
# 340
"seveninferior", "eightinferior", "nineinferior", "centinferior",
"dollarinferior",
"periodinferior", "commainferior", "Agravesmall", "Aacutesmall",
"Acircumflexsmall",
# 350
"Atildesmall", "Adieresissmall", "Aringsmall", "AEsmall", "Ccedillasmall",
"Egravesmall", "Eacutesmall", "Ecircumflexsmall", "Edieresissmall",
"Igravesmall",
# 360
"Iacutesmall", "Icircumflexsmall", "Idieresissmall", "Ethsmall",
"Ntildesmall",
"Ogravesmall", "Oacutesmall", "Ocircumflexsmall", "Otildesmall",
"Odieresissmall",
# 370
"OEsmall", "Oslashsmall", "Ugravesmall", "Uacutesmall",
"Ucircumflexsmall",
"Udieresissmall", "Yacutesmall", "Thornsmall", "Ydieresissmall",
"001.000",
# 380
"001.001", "001.002", "001.003", "Black", "Bold",
"Book", "Light", "Medium", "Regular", "Roman",
# 390
"Semibold"
]
# This table maps character codes of the Adobe Standard Type 1
# encoding to glyph indices in the sid_standard_names table.
#
t1_standard_encoding = \
[
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 2, 3, 4, 5, 6, 7, 8,
9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
19, 20, 21, 22, 23, 24, 25, 26, 27, 28,
29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
59, 60, 61, 62, 63, 64, 65, 66, 67, 68,
69, 70, 71, 72, 73, 74, 75, 76, 77, 78,
79, 80, 81, 82, 83, 84, 85, 86, 87, 88,
89, 90, 91, 92, 93, 94, 95, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 96, 97, 98, 99, 100, 101, 102, 103, 104,
105, 106, 107, 108, 109, 110, 0, 111, 112, 113,
114, 0, 115, 116, 117, 118, 119, 120, 121, 122,
0, 123, 0, 124, 125, 126, 127, 128, 129, 130,
131, 0, 132, 133, 0, 134, 135, 136, 137, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 138, 0, 139, 0, 0,
0, 0, 140, 141, 142, 143, 0, 0, 0, 0,
0, 144, 0, 0, 0, 145, 0, 0, 146, 147,
148, 149, 0, 0, 0, 0
]
# This table maps character codes of the Adobe Expert Type 1
# encoding to glyph indices in the sid_standard_names table.
#
t1_expert_encoding = \
[
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 229, 230, 0, 231, 232, 233, 234,
235, 236, 237, 238, 13, 14, 15, 99, 239, 240,
241, 242, 243, 244, 245, 246, 247, 248, 27, 28,
249, 250, 251, 252, 0, 253, 254, 255, 256, 257,
0, 0, 0, 258, 0, 0, 259, 260, 261, 262,
0, 0, 263, 264, 265, 0, 266, 109, 110, 267,
268, 269, 0, 270, 271, 272, 273, 274, 275, 276,
277, 278, 279, 280, 281, 282, 283, 284, 285, 286,
287, 288, 289, 290, 291, 292, 293, 294, 295, 296,
297, 298, 299, 300, 301, 302, 303, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 304, 305, 306, 0, 0, 307, 308, 309, 310,
311, 0, 312, 0, 0, 313, 0, 0, 314, 315,
0, 0, 316, 317, 318, 0, 0, 0, 158, 155,
163, 319, 320, 321, 322, 323, 324, 325, 0, 0,
326, 150, 164, 169, 327, 328, 329, 330, 331, 332,
333, 334, 335, 336, 337, 338, 339, 340, 341, 342,
343, 344, 345, 346, 347, 348, 349, 350, 351, 352,
353, 354, 355, 356, 357, 358, 359, 360, 361, 362,
363, 364, 365, 366, 367, 368, 369, 370, 371, 372,
373, 374, 375, 376, 377, 378
]
# This data has been taken literally from the file `glyphlist.txt',
# version 2.0, 22 Sept 2002. It is available from
#
# http://partners.adobe.com/asn/developer/typeforum/unicodegn.html
# http://partners.adobe.com/public/developer/en/opentype/glyphlist.txt
#
adobe_glyph_list = """\
A;0041
AE;00C6
AEacute;01FC
AEmacron;01E2
AEsmall;F7E6
Aacute;00C1
Aacutesmall;F7E1
Abreve;0102
Abreveacute;1EAE
Abrevecyrillic;04D0
Abrevedotbelow;1EB6
Abrevegrave;1EB0
Abrevehookabove;1EB2
Abrevetilde;1EB4
Acaron;01CD
Acircle;24B6
Acircumflex;00C2
Acircumflexacute;1EA4
Acircumflexdotbelow;1EAC
Acircumflexgrave;1EA6
Acircumflexhookabove;1EA8
Acircumflexsmall;F7E2
Acircumflextilde;1EAA
Acute;F6C9
Acutesmall;F7B4
Acyrillic;0410
Adblgrave;0200
Adieresis;00C4
Adieresiscyrillic;04D2
Adieresismacron;01DE
Adieresissmall;F7E4
Adotbelow;1EA0
Adotmacron;01E0
Agrave;00C0
Agravesmall;F7E0
Ahookabove;1EA2
Aiecyrillic;04D4
Ainvertedbreve;0202
Alpha;0391
Alphatonos;0386
Amacron;0100
Amonospace;FF21
Aogonek;0104
Aring;00C5
Aringacute;01FA
Aringbelow;1E00
Aringsmall;F7E5
Asmall;F761
Atilde;00C3
Atildesmall;F7E3
Aybarmenian;0531
B;0042
Bcircle;24B7
Bdotaccent;1E02
Bdotbelow;1E04
Becyrillic;0411
Benarmenian;0532
Beta;0392
Bhook;0181
Blinebelow;1E06
Bmonospace;FF22
Brevesmall;F6F4
Bsmall;F762
Btopbar;0182
C;0043
Caarmenian;053E
Cacute;0106
Caron;F6CA
Caronsmall;F6F5
Ccaron;010C
Ccedilla;00C7
Ccedillaacute;1E08
Ccedillasmall;F7E7
Ccircle;24B8
Ccircumflex;0108
Cdot;010A
Cdotaccent;010A
Cedillasmall;F7B8
Chaarmenian;0549
Cheabkhasiancyrillic;04BC
Checyrillic;0427
Chedescenderabkhasiancyrillic;04BE
Chedescendercyrillic;04B6
Chedieresiscyrillic;04F4
Cheharmenian;0543
Chekhakassiancyrillic;04CB
Cheverticalstrokecyrillic;04B8
Chi;03A7
Chook;0187
Circumflexsmall;F6F6
Cmonospace;FF23
Coarmenian;0551
Csmall;F763
D;0044
DZ;01F1
DZcaron;01C4
Daarmenian;0534
Dafrican;0189
Dcaron;010E
Dcedilla;1E10
Dcircle;24B9
Dcircumflexbelow;1E12
Dcroat;0110
Ddotaccent;1E0A
Ddotbelow;1E0C
Decyrillic;0414
Deicoptic;03EE
Delta;2206
Deltagreek;0394
Dhook;018A
Dieresis;F6CB
DieresisAcute;F6CC
DieresisGrave;F6CD
Dieresissmall;F7A8
Digammagreek;03DC
Djecyrillic;0402
Dlinebelow;1E0E
Dmonospace;FF24
Dotaccentsmall;F6F7
Dslash;0110
Dsmall;F764
Dtopbar;018B
Dz;01F2
Dzcaron;01C5
Dzeabkhasiancyrillic;04E0
Dzecyrillic;0405
Dzhecyrillic;040F
E;0045
Eacute;00C9
Eacutesmall;F7E9
Ebreve;0114
Ecaron;011A
Ecedillabreve;1E1C
Echarmenian;0535
Ecircle;24BA
Ecircumflex;00CA
Ecircumflexacute;1EBE
Ecircumflexbelow;1E18
Ecircumflexdotbelow;1EC6
Ecircumflexgrave;1EC0
Ecircumflexhookabove;1EC2
Ecircumflexsmall;F7EA
Ecircumflextilde;1EC4
Ecyrillic;0404
Edblgrave;0204
Edieresis;00CB
Edieresissmall;F7EB
Edot;0116
Edotaccent;0116
Edotbelow;1EB8
Efcyrillic;0424
Egrave;00C8
Egravesmall;F7E8
Eharmenian;0537
Ehookabove;1EBA
Eightroman;2167
Einvertedbreve;0206
Eiotifiedcyrillic;0464
Elcyrillic;041B
Elevenroman;216A
Emacron;0112
Emacronacute;1E16
Emacrongrave;1E14
Emcyrillic;041C
Emonospace;FF25
Encyrillic;041D
Endescendercyrillic;04A2
Eng;014A
Enghecyrillic;04A4
Enhookcyrillic;04C7
Eogonek;0118
Eopen;0190
Epsilon;0395
Epsilontonos;0388
Ercyrillic;0420
Ereversed;018E
Ereversedcyrillic;042D
Escyrillic;0421
Esdescendercyrillic;04AA
Esh;01A9
Esmall;F765
Eta;0397
Etarmenian;0538
Etatonos;0389
Eth;00D0
Ethsmall;F7F0
Etilde;1EBC
Etildebelow;1E1A
Euro;20AC
Ezh;01B7
Ezhcaron;01EE
Ezhreversed;01B8
F;0046
Fcircle;24BB
Fdotaccent;1E1E
Feharmenian;0556
Feicoptic;03E4
Fhook;0191
Fitacyrillic;0472
Fiveroman;2164
Fmonospace;FF26
Fourroman;2163
Fsmall;F766
G;0047
GBsquare;3387
Gacute;01F4
Gamma;0393
Gammaafrican;0194
Gangiacoptic;03EA
Gbreve;011E
Gcaron;01E6
Gcedilla;0122
Gcircle;24BC
Gcircumflex;011C
Gcommaaccent;0122
Gdot;0120
Gdotaccent;0120
Gecyrillic;0413
Ghadarmenian;0542
Ghemiddlehookcyrillic;0494
Ghestrokecyrillic;0492
Gheupturncyrillic;0490
Ghook;0193
Gimarmenian;0533
Gjecyrillic;0403
Gmacron;1E20
Gmonospace;FF27
Grave;F6CE
Gravesmall;F760
Gsmall;F767
Gsmallhook;029B
Gstroke;01E4
H;0048
H18533;25CF
H18543;25AA
H18551;25AB
H22073;25A1
HPsquare;33CB
Haabkhasiancyrillic;04A8
Hadescendercyrillic;04B2
Hardsigncyrillic;042A
Hbar;0126
Hbrevebelow;1E2A
Hcedilla;1E28
Hcircle;24BD
Hcircumflex;0124
Hdieresis;1E26
Hdotaccent;1E22
Hdotbelow;1E24
Hmonospace;FF28
Hoarmenian;0540
Horicoptic;03E8
Hsmall;F768
Hungarumlaut;F6CF
Hungarumlautsmall;F6F8
Hzsquare;3390
I;0049
IAcyrillic;042F
IJ;0132
IUcyrillic;042E
Iacute;00CD
Iacutesmall;F7ED
Ibreve;012C
Icaron;01CF
Icircle;24BE
Icircumflex;00CE
Icircumflexsmall;F7EE
Icyrillic;0406
Idblgrave;0208
Idieresis;00CF
Idieresisacute;1E2E
Idieresiscyrillic;04E4
Idieresissmall;F7EF
Idot;0130
Idotaccent;0130
Idotbelow;1ECA
Iebrevecyrillic;04D6
Iecyrillic;0415
Ifraktur;2111
Igrave;00CC
Igravesmall;F7EC
Ihookabove;1EC8
Iicyrillic;0418
Iinvertedbreve;020A
Iishortcyrillic;0419
Imacron;012A
Imacroncyrillic;04E2
Imonospace;FF29
Iniarmenian;053B
Iocyrillic;0401
Iogonek;012E
Iota;0399
Iotaafrican;0196
Iotadieresis;03AA
Iotatonos;038A
Ismall;F769
Istroke;0197
Itilde;0128
Itildebelow;1E2C
Izhitsacyrillic;0474
Izhitsadblgravecyrillic;0476
J;004A
Jaarmenian;0541
Jcircle;24BF
Jcircumflex;0134
Jecyrillic;0408
Jheharmenian;054B
Jmonospace;FF2A
Jsmall;F76A
K;004B
KBsquare;3385
KKsquare;33CD
Kabashkircyrillic;04A0
Kacute;1E30
Kacyrillic;041A
Kadescendercyrillic;049A
Kahookcyrillic;04C3
Kappa;039A
Kastrokecyrillic;049E
Kaverticalstrokecyrillic;049C
Kcaron;01E8
Kcedilla;0136
Kcircle;24C0
Kcommaaccent;0136
Kdotbelow;1E32
Keharmenian;0554
Kenarmenian;053F
Khacyrillic;0425
Kheicoptic;03E6
Khook;0198
Kjecyrillic;040C
Klinebelow;1E34
Kmonospace;FF2B
Koppacyrillic;0480
Koppagreek;03DE
Ksicyrillic;046E
Ksmall;F76B
L;004C
LJ;01C7
LL;F6BF
Lacute;0139
Lambda;039B
Lcaron;013D
Lcedilla;013B
Lcircle;24C1
Lcircumflexbelow;1E3C
Lcommaaccent;013B
Ldot;013F
Ldotaccent;013F
Ldotbelow;1E36
Ldotbelowmacron;1E38
Liwnarmenian;053C
Lj;01C8
Ljecyrillic;0409
Llinebelow;1E3A
Lmonospace;FF2C
Lslash;0141
Lslashsmall;F6F9
Lsmall;F76C
M;004D
MBsquare;3386
Macron;F6D0
Macronsmall;F7AF
Macute;1E3E
Mcircle;24C2
Mdotaccent;1E40
Mdotbelow;1E42
Menarmenian;0544
Mmonospace;FF2D
Msmall;F76D
Mturned;019C
Mu;039C
N;004E
NJ;01CA
Nacute;0143
Ncaron;0147
Ncedilla;0145
Ncircle;24C3
Ncircumflexbelow;1E4A
Ncommaaccent;0145
Ndotaccent;1E44
Ndotbelow;1E46
Nhookleft;019D
Nineroman;2168
Nj;01CB
Njecyrillic;040A
Nlinebelow;1E48
Nmonospace;FF2E
Nowarmenian;0546
Nsmall;F76E
Ntilde;00D1
Ntildesmall;F7F1
Nu;039D
O;004F
OE;0152
OEsmall;F6FA
Oacute;00D3
Oacutesmall;F7F3
Obarredcyrillic;04E8
Obarreddieresiscyrillic;04EA
Obreve;014E
Ocaron;01D1
Ocenteredtilde;019F
Ocircle;24C4
Ocircumflex;00D4
Ocircumflexacute;1ED0
Ocircumflexdotbelow;1ED8
Ocircumflexgrave;1ED2
Ocircumflexhookabove;1ED4
Ocircumflexsmall;F7F4
Ocircumflextilde;1ED6
Ocyrillic;041E
Odblacute;0150
Odblgrave;020C
Odieresis;00D6
Odieresiscyrillic;04E6
Odieresissmall;F7F6
Odotbelow;1ECC
Ogoneksmall;F6FB
Ograve;00D2
Ogravesmall;F7F2
Oharmenian;0555
Ohm;2126
Ohookabove;1ECE
Ohorn;01A0
Ohornacute;1EDA
Ohorndotbelow;1EE2
Ohorngrave;1EDC
Ohornhookabove;1EDE
Ohorntilde;1EE0
Ohungarumlaut;0150
Oi;01A2
Oinvertedbreve;020E
Omacron;014C
Omacronacute;1E52
Omacrongrave;1E50
Omega;2126
Omegacyrillic;0460
Omegagreek;03A9
Omegaroundcyrillic;047A
Omegatitlocyrillic;047C
Omegatonos;038F
Omicron;039F
Omicrontonos;038C
Omonospace;FF2F
Oneroman;2160
Oogonek;01EA
Oogonekmacron;01EC
Oopen;0186
Oslash;00D8
Oslashacute;01FE
Oslashsmall;F7F8
Osmall;F76F
Ostrokeacute;01FE
Otcyrillic;047E
Otilde;00D5
Otildeacute;1E4C
Otildedieresis;1E4E
Otildesmall;F7F5
P;0050
Pacute;1E54
Pcircle;24C5
Pdotaccent;1E56
Pecyrillic;041F
Peharmenian;054A
Pemiddlehookcyrillic;04A6
Phi;03A6
Phook;01A4
Pi;03A0
Piwrarmenian;0553
Pmonospace;FF30
Psi;03A8
Psicyrillic;0470
Psmall;F770
Q;0051
Qcircle;24C6
Qmonospace;FF31
Qsmall;F771
R;0052
Raarmenian;054C
Racute;0154
Rcaron;0158
Rcedilla;0156
Rcircle;24C7
Rcommaaccent;0156
Rdblgrave;0210
Rdotaccent;1E58
Rdotbelow;1E5A
Rdotbelowmacron;1E5C
Reharmenian;0550
Rfraktur;211C
Rho;03A1
Ringsmall;F6FC
Rinvertedbreve;0212
Rlinebelow;1E5E
Rmonospace;FF32
Rsmall;F772
Rsmallinverted;0281
Rsmallinvertedsuperior;02B6
S;0053
SF010000;250C
SF020000;2514
SF030000;2510
SF040000;2518
SF050000;253C
SF060000;252C
SF070000;2534
SF080000;251C
SF090000;2524
SF100000;2500
SF110000;2502
SF190000;2561
SF200000;2562
SF210000;2556
SF220000;2555
SF230000;2563
SF240000;2551
SF250000;2557
SF260000;255D
SF270000;255C
SF280000;255B
SF360000;255E
SF370000;255F
SF380000;255A
SF390000;2554
SF400000;2569
SF410000;2566
SF420000;2560
SF430000;2550
SF440000;256C
SF450000;2567
SF460000;2568
SF470000;2564
SF480000;2565
SF490000;2559
SF500000;2558
SF510000;2552
SF520000;2553
SF530000;256B
SF540000;256A
Sacute;015A
Sacutedotaccent;1E64
Sampigreek;03E0
Scaron;0160
Scarondotaccent;1E66
Scaronsmall;F6FD
Scedilla;015E
Schwa;018F
Schwacyrillic;04D8
Schwadieresiscyrillic;04DA
Scircle;24C8
Scircumflex;015C
Scommaaccent;0218
Sdotaccent;1E60
Sdotbelow;1E62
Sdotbelowdotaccent;1E68
Seharmenian;054D
Sevenroman;2166
Shaarmenian;0547
Shacyrillic;0428
Shchacyrillic;0429
Sheicoptic;03E2
Shhacyrillic;04BA
Shimacoptic;03EC
Sigma;03A3
Sixroman;2165
Smonospace;FF33
Softsigncyrillic;042C
Ssmall;F773
Stigmagreek;03DA
T;0054
Tau;03A4
Tbar;0166
Tcaron;0164
Tcedilla;0162
Tcircle;24C9
Tcircumflexbelow;1E70
Tcommaaccent;0162
Tdotaccent;1E6A
Tdotbelow;1E6C
Tecyrillic;0422
Tedescendercyrillic;04AC
Tenroman;2169
Tetsecyrillic;04B4
Theta;0398
Thook;01AC
Thorn;00DE
Thornsmall;F7FE
Threeroman;2162
Tildesmall;F6FE
Tiwnarmenian;054F
Tlinebelow;1E6E
Tmonospace;FF34
Toarmenian;0539
Tonefive;01BC
Tonesix;0184
Tonetwo;01A7
Tretroflexhook;01AE
Tsecyrillic;0426
Tshecyrillic;040B
Tsmall;F774
Twelveroman;216B
Tworoman;2161
U;0055
Uacute;00DA
Uacutesmall;F7FA
Ubreve;016C
Ucaron;01D3
Ucircle;24CA
Ucircumflex;00DB
Ucircumflexbelow;1E76
Ucircumflexsmall;F7FB
Ucyrillic;0423
Udblacute;0170
Udblgrave;0214
Udieresis;00DC
Udieresisacute;01D7
Udieresisbelow;1E72
Udieresiscaron;01D9
Udieresiscyrillic;04F0
Udieresisgrave;01DB
Udieresismacron;01D5
Udieresissmall;F7FC
Udotbelow;1EE4
Ugrave;00D9
Ugravesmall;F7F9
Uhookabove;1EE6
Uhorn;01AF
Uhornacute;1EE8
Uhorndotbelow;1EF0
Uhorngrave;1EEA
Uhornhookabove;1EEC
Uhorntilde;1EEE
Uhungarumlaut;0170
Uhungarumlautcyrillic;04F2
Uinvertedbreve;0216
Ukcyrillic;0478
Umacron;016A
Umacroncyrillic;04EE
Umacrondieresis;1E7A
Umonospace;FF35
Uogonek;0172
Upsilon;03A5
Upsilon1;03D2
Upsilonacutehooksymbolgreek;03D3
Upsilonafrican;01B1
Upsilondieresis;03AB
Upsilondieresishooksymbolgreek;03D4
Upsilonhooksymbol;03D2
Upsilontonos;038E
Uring;016E
Ushortcyrillic;040E
Usmall;F775
Ustraightcyrillic;04AE
Ustraightstrokecyrillic;04B0
Utilde;0168
Utildeacute;1E78
Utildebelow;1E74
V;0056
Vcircle;24CB
Vdotbelow;1E7E
Vecyrillic;0412
Vewarmenian;054E
Vhook;01B2
Vmonospace;FF36
Voarmenian;0548
Vsmall;F776
Vtilde;1E7C
W;0057
Wacute;1E82
Wcircle;24CC
Wcircumflex;0174
Wdieresis;1E84
Wdotaccent;1E86
Wdotbelow;1E88
Wgrave;1E80
Wmonospace;FF37
Wsmall;F777
X;0058
Xcircle;24CD
Xdieresis;1E8C
Xdotaccent;1E8A
Xeharmenian;053D
Xi;039E
Xmonospace;FF38
Xsmall;F778
Y;0059
Yacute;00DD
Yacutesmall;F7FD
Yatcyrillic;0462
Ycircle;24CE
Ycircumflex;0176
Ydieresis;0178
Ydieresissmall;F7FF
Ydotaccent;1E8E
Ydotbelow;1EF4
Yericyrillic;042B
Yerudieresiscyrillic;04F8
Ygrave;1EF2
Yhook;01B3
Yhookabove;1EF6
Yiarmenian;0545
Yicyrillic;0407
Yiwnarmenian;0552
Ymonospace;FF39
Ysmall;F779
Ytilde;1EF8
Yusbigcyrillic;046A
Yusbigiotifiedcyrillic;046C
Yuslittlecyrillic;0466
Yuslittleiotifiedcyrillic;0468
Z;005A
Zaarmenian;0536
Zacute;0179
Zcaron;017D
Zcaronsmall;F6FF
Zcircle;24CF
Zcircumflex;1E90
Zdot;017B
Zdotaccent;017B
Zdotbelow;1E92
Zecyrillic;0417
Zedescendercyrillic;0498
Zedieresiscyrillic;04DE
Zeta;0396
Zhearmenian;053A
Zhebrevecyrillic;04C1
Zhecyrillic;0416
Zhedescendercyrillic;0496
Zhedieresiscyrillic;04DC
Zlinebelow;1E94
Zmonospace;FF3A
Zsmall;F77A
Zstroke;01B5
a;0061
aabengali;0986
aacute;00E1
aadeva;0906
aagujarati;0A86
aagurmukhi;0A06
aamatragurmukhi;0A3E
aarusquare;3303
aavowelsignbengali;09BE
aavowelsigndeva;093E
aavowelsigngujarati;0ABE
abbreviationmarkarmenian;055F
abbreviationsigndeva;0970
abengali;0985
abopomofo;311A
abreve;0103
abreveacute;1EAF
abrevecyrillic;04D1
abrevedotbelow;1EB7
abrevegrave;1EB1
abrevehookabove;1EB3
abrevetilde;1EB5
acaron;01CE
acircle;24D0
acircumflex;00E2
acircumflexacute;1EA5
acircumflexdotbelow;1EAD
acircumflexgrave;1EA7
acircumflexhookabove;1EA9
acircumflextilde;1EAB
acute;00B4
acutebelowcmb;0317
acutecmb;0301
acutecomb;0301
acutedeva;0954
acutelowmod;02CF
acutetonecmb;0341
acyrillic;0430
adblgrave;0201
addakgurmukhi;0A71
adeva;0905
adieresis;00E4
adieresiscyrillic;04D3
adieresismacron;01DF
adotbelow;1EA1
adotmacron;01E1
ae;00E6
aeacute;01FD
aekorean;3150
aemacron;01E3
afii00208;2015
afii08941;20A4
afii10017;0410
afii10018;0411
afii10019;0412
afii10020;0413
afii10021;0414
afii10022;0415
afii10023;0401
afii10024;0416
afii10025;0417
afii10026;0418
afii10027;0419
afii10028;041A
afii10029;041B
afii10030;041C
afii10031;041D
afii10032;041E
afii10033;041F
afii10034;0420
afii10035;0421
afii10036;0422
afii10037;0423
afii10038;0424
afii10039;0425
afii10040;0426
afii10041;0427
afii10042;0428
afii10043;0429
afii10044;042A
afii10045;042B
afii10046;042C
afii10047;042D
afii10048;042E
afii10049;042F
afii10050;0490
afii10051;0402
afii10052;0403
afii10053;0404
afii10054;0405
afii10055;0406
afii10056;0407
afii10057;0408
afii10058;0409
afii10059;040A
afii10060;040B
afii10061;040C
afii10062;040E
afii10063;F6C4
afii10064;F6C5
afii10065;0430
afii10066;0431
afii10067;0432
afii10068;0433
afii10069;0434
afii10070;0435
afii10071;0451
afii10072;0436
afii10073;0437
afii10074;0438
afii10075;0439
afii10076;043A
afii10077;043B
afii10078;043C
afii10079;043D
afii10080;043E
afii10081;043F
afii10082;0440
afii10083;0441
afii10084;0442
afii10085;0443
afii10086;0444
afii10087;0445
afii10088;0446
afii10089;0447
afii10090;0448
afii10091;0449
afii10092;044A
afii10093;044B
afii10094;044C
afii10095;044D
afii10096;044E
afii10097;044F
afii10098;0491
afii10099;0452
afii10100;0453
afii10101;0454
afii10102;0455
afii10103;0456
afii10104;0457
afii10105;0458
afii10106;0459
afii10107;045A
afii10108;045B
afii10109;045C
afii10110;045E
afii10145;040F
afii10146;0462
afii10147;0472
afii10148;0474
afii10192;F6C6
afii10193;045F
afii10194;0463
afii10195;0473
afii10196;0475
afii10831;F6C7
afii10832;F6C8
afii10846;04D9
afii299;200E
afii300;200F
afii301;200D
afii57381;066A
afii57388;060C
afii57392;0660
afii57393;0661
afii57394;0662
afii57395;0663
afii57396;0664
afii57397;0665
afii57398;0666
afii57399;0667
afii57400;0668
afii57401;0669
afii57403;061B
afii57407;061F
afii57409;0621
afii57410;0622
afii57411;0623
afii57412;0624
afii57413;0625
afii57414;0626
afii57415;0627
afii57416;0628
afii57417;0629
afii57418;062A
afii57419;062B
afii57420;062C
afii57421;062D
afii57422;062E
afii57423;062F
afii57424;0630
afii57425;0631
afii57426;0632
afii57427;0633
afii57428;0634
afii57429;0635
afii57430;0636
afii57431;0637
afii57432;0638
afii57433;0639
afii57434;063A
afii57440;0640
afii57441;0641
afii57442;0642
afii57443;0643
afii57444;0644
afii57445;0645
afii57446;0646
afii57448;0648
afii57449;0649
afii57450;064A
afii57451;064B
afii57452;064C
afii57453;064D
afii57454;064E
afii57455;064F
afii57456;0650
afii57457;0651
afii57458;0652
afii57470;0647
afii57505;06A4
afii57506;067E
afii57507;0686
afii57508;0698
afii57509;06AF
afii57511;0679
afii57512;0688
afii57513;0691
afii57514;06BA
afii57519;06D2
afii57534;06D5
afii57636;20AA
afii57645;05BE
afii57658;05C3
afii57664;05D0
afii57665;05D1
afii57666;05D2
afii57667;05D3
afii57668;05D4
afii57669;05D5
afii57670;05D6
afii57671;05D7
afii57672;05D8
afii57673;05D9
afii57674;05DA
afii57675;05DB
afii57676;05DC
afii57677;05DD
afii57678;05DE
afii57679;05DF
afii57680;05E0
afii57681;05E1
afii57682;05E2
afii57683;05E3
afii57684;05E4
afii57685;05E5
afii57686;05E6
afii57687;05E7
afii57688;05E8
afii57689;05E9
afii57690;05EA
afii57694;FB2A
afii57695;FB2B
afii57700;FB4B
afii57705;FB1F
afii57716;05F0
afii57717;05F1
afii57718;05F2
afii57723;FB35
afii57793;05B4
afii57794;05B5
afii57795;05B6
afii57796;05BB
afii57797;05B8
afii57798;05B7
afii57799;05B0
afii57800;05B2
afii57801;05B1
afii57802;05B3
afii57803;05C2
afii57804;05C1
afii57806;05B9
afii57807;05BC
afii57839;05BD
afii57841;05BF
afii57842;05C0
afii57929;02BC
afii61248;2105
afii61289;2113
afii61352;2116
afii61573;202C
afii61574;202D
afii61575;202E
afii61664;200C
afii63167;066D
afii64937;02BD
agrave;00E0
agujarati;0A85
agurmukhi;0A05
ahiragana;3042
ahookabove;1EA3
aibengali;0990
aibopomofo;311E
aideva;0910
aiecyrillic;04D5
aigujarati;0A90
aigurmukhi;0A10
aimatragurmukhi;0A48
ainarabic;0639
ainfinalarabic;FECA
aininitialarabic;FECB
ainmedialarabic;FECC
ainvertedbreve;0203
aivowelsignbengali;09C8
aivowelsigndeva;0948
aivowelsigngujarati;0AC8
akatakana;30A2
akatakanahalfwidth;FF71
akorean;314F
alef;05D0
alefarabic;0627
alefdageshhebrew;FB30
aleffinalarabic;FE8E
alefhamzaabovearabic;0623
alefhamzaabovefinalarabic;FE84
alefhamzabelowarabic;0625
alefhamzabelowfinalarabic;FE88
alefhebrew;05D0
aleflamedhebrew;FB4F
alefmaddaabovearabic;0622
alefmaddaabovefinalarabic;FE82
alefmaksuraarabic;0649
alefmaksurafinalarabic;FEF0
alefmaksurainitialarabic;FEF3
alefmaksuramedialarabic;FEF4
alefpatahhebrew;FB2E
alefqamatshebrew;FB2F
aleph;2135
allequal;224C
alpha;03B1
alphatonos;03AC
amacron;0101
amonospace;FF41
ampersand;0026
ampersandmonospace;FF06
ampersandsmall;F726
amsquare;33C2
anbopomofo;3122
angbopomofo;3124
angkhankhuthai;0E5A
angle;2220
anglebracketleft;3008
anglebracketleftvertical;FE3F
anglebracketright;3009
anglebracketrightvertical;FE40
angleleft;2329
angleright;232A
angstrom;212B
anoteleia;0387
anudattadeva;0952
anusvarabengali;0982
anusvaradeva;0902
anusvaragujarati;0A82
aogonek;0105
apaatosquare;3300
aparen;249C
apostrophearmenian;055A
apostrophemod;02BC
apple;F8FF
approaches;2250
approxequal;2248
approxequalorimage;2252
approximatelyequal;2245
araeaekorean;318E
araeakorean;318D
arc;2312
arighthalfring;1E9A
aring;00E5
aringacute;01FB
aringbelow;1E01
arrowboth;2194
arrowdashdown;21E3
arrowdashleft;21E0
arrowdashright;21E2
arrowdashup;21E1
arrowdblboth;21D4
arrowdbldown;21D3
arrowdblleft;21D0
arrowdblright;21D2
arrowdblup;21D1
arrowdown;2193
arrowdownleft;2199
arrowdownright;2198
arrowdownwhite;21E9
arrowheaddownmod;02C5
arrowheadleftmod;02C2
arrowheadrightmod;02C3
arrowheadupmod;02C4
arrowhorizex;F8E7
arrowleft;2190
arrowleftdbl;21D0
arrowleftdblstroke;21CD
arrowleftoverright;21C6
arrowleftwhite;21E6
arrowright;2192
arrowrightdblstroke;21CF
arrowrightheavy;279E
arrowrightoverleft;21C4
arrowrightwhite;21E8
arrowtableft;21E4
arrowtabright;21E5
arrowup;2191
arrowupdn;2195
arrowupdnbse;21A8
arrowupdownbase;21A8
arrowupleft;2196
arrowupleftofdown;21C5
arrowupright;2197
arrowupwhite;21E7
arrowvertex;F8E6
asciicircum;005E
asciicircummonospace;FF3E
asciitilde;007E
asciitildemonospace;FF5E
ascript;0251
ascriptturned;0252
asmallhiragana;3041
asmallkatakana;30A1
asmallkatakanahalfwidth;FF67
asterisk;002A
asteriskaltonearabic;066D
asteriskarabic;066D
asteriskmath;2217
asteriskmonospace;FF0A
asterisksmall;FE61
asterism;2042
asuperior;F6E9
asymptoticallyequal;2243
at;0040
atilde;00E3
atmonospace;FF20
atsmall;FE6B
aturned;0250
aubengali;0994
aubopomofo;3120
audeva;0914
augujarati;0A94
augurmukhi;0A14
aulengthmarkbengali;09D7
aumatragurmukhi;0A4C
auvowelsignbengali;09CC
auvowelsigndeva;094C
auvowelsigngujarati;0ACC
avagrahadeva;093D
aybarmenian;0561
ayin;05E2
ayinaltonehebrew;FB20
ayinhebrew;05E2
b;0062
babengali;09AC
backslash;005C
backslashmonospace;FF3C
badeva;092C
bagujarati;0AAC
bagurmukhi;0A2C
bahiragana;3070
bahtthai;0E3F
bakatakana;30D0
bar;007C
barmonospace;FF5C
bbopomofo;3105
bcircle;24D1
bdotaccent;1E03
bdotbelow;1E05
beamedsixteenthnotes;266C
because;2235
becyrillic;0431
beharabic;0628
behfinalarabic;FE90
behinitialarabic;FE91
behiragana;3079
behmedialarabic;FE92
behmeeminitialarabic;FC9F
behmeemisolatedarabic;FC08
behnoonfinalarabic;FC6D
bekatakana;30D9
benarmenian;0562
bet;05D1
beta;03B2
betasymbolgreek;03D0
betdagesh;FB31
betdageshhebrew;FB31
bethebrew;05D1
betrafehebrew;FB4C
bhabengali;09AD
bhadeva;092D
bhagujarati;0AAD
bhagurmukhi;0A2D
bhook;0253
bihiragana;3073
bikatakana;30D3
bilabialclick;0298
bindigurmukhi;0A02
birusquare;3331
blackcircle;25CF
blackdiamond;25C6
blackdownpointingtriangle;25BC
blackleftpointingpointer;25C4
blackleftpointingtriangle;25C0
blacklenticularbracketleft;3010
blacklenticularbracketleftvertical;FE3B
blacklenticularbracketright;3011
blacklenticularbracketrightvertical;FE3C
blacklowerlefttriangle;25E3
blacklowerrighttriangle;25E2
blackrectangle;25AC
blackrightpointingpointer;25BA
blackrightpointingtriangle;25B6
blacksmallsquare;25AA
blacksmilingface;263B
blacksquare;25A0
blackstar;2605
blackupperlefttriangle;25E4
blackupperrighttriangle;25E5
blackuppointingsmalltriangle;25B4
blackuppointingtriangle;25B2
blank;2423
blinebelow;1E07
block;2588
bmonospace;FF42
bobaimaithai;0E1A
bohiragana;307C
bokatakana;30DC
bparen;249D
bqsquare;33C3
braceex;F8F4
braceleft;007B
braceleftbt;F8F3
braceleftmid;F8F2
braceleftmonospace;FF5B
braceleftsmall;FE5B
bracelefttp;F8F1
braceleftvertical;FE37
braceright;007D
bracerightbt;F8FE
bracerightmid;F8FD
bracerightmonospace;FF5D
bracerightsmall;FE5C
bracerighttp;F8FC
bracerightvertical;FE38
bracketleft;005B
bracketleftbt;F8F0
bracketleftex;F8EF
bracketleftmonospace;FF3B
bracketlefttp;F8EE
bracketright;005D
bracketrightbt;F8FB
bracketrightex;F8FA
bracketrightmonospace;FF3D
bracketrighttp;F8F9
breve;02D8
brevebelowcmb;032E
brevecmb;0306
breveinvertedbelowcmb;032F
breveinvertedcmb;0311
breveinverteddoublecmb;0361
bridgebelowcmb;032A
bridgeinvertedbelowcmb;033A
brokenbar;00A6
bstroke;0180
bsuperior;F6EA
btopbar;0183
buhiragana;3076
bukatakana;30D6
bullet;2022
bulletinverse;25D8
bulletoperator;2219
bullseye;25CE
c;0063
caarmenian;056E
cabengali;099A
cacute;0107
cadeva;091A
cagujarati;0A9A
cagurmukhi;0A1A
calsquare;3388
candrabindubengali;0981
candrabinducmb;0310
candrabindudeva;0901
candrabindugujarati;0A81
capslock;21EA
careof;2105
caron;02C7
caronbelowcmb;032C
caroncmb;030C
carriagereturn;21B5
cbopomofo;3118
ccaron;010D
ccedilla;00E7
ccedillaacute;1E09
ccircle;24D2
ccircumflex;0109
ccurl;0255
cdot;010B
cdotaccent;010B
cdsquare;33C5
cedilla;00B8
cedillacmb;0327
cent;00A2
centigrade;2103
centinferior;F6DF
centmonospace;FFE0
centoldstyle;F7A2
centsuperior;F6E0
chaarmenian;0579
chabengali;099B
chadeva;091B
chagujarati;0A9B
chagurmukhi;0A1B
chbopomofo;3114
cheabkhasiancyrillic;04BD
checkmark;2713
checyrillic;0447
chedescenderabkhasiancyrillic;04BF
chedescendercyrillic;04B7
chedieresiscyrillic;04F5
cheharmenian;0573
chekhakassiancyrillic;04CC
cheverticalstrokecyrillic;04B9
chi;03C7
chieuchacirclekorean;3277
chieuchaparenkorean;3217
chieuchcirclekorean;3269
chieuchkorean;314A
chieuchparenkorean;3209
chochangthai;0E0A
chochanthai;0E08
chochingthai;0E09
chochoethai;0E0C
chook;0188
cieucacirclekorean;3276
cieucaparenkorean;3216
cieuccirclekorean;3268
cieuckorean;3148
cieucparenkorean;3208
cieucuparenkorean;321C
circle;25CB
circlemultiply;2297
circleot;2299
circleplus;2295
circlepostalmark;3036
circlewithlefthalfblack;25D0
circlewithrighthalfblack;25D1
circumflex;02C6
circumflexbelowcmb;032D
circumflexcmb;0302
clear;2327
clickalveolar;01C2
clickdental;01C0
clicklateral;01C1
clickretroflex;01C3
club;2663
clubsuitblack;2663
clubsuitwhite;2667
cmcubedsquare;33A4
cmonospace;FF43
cmsquaredsquare;33A0
coarmenian;0581
colon;003A
colonmonetary;20A1
colonmonospace;FF1A
colonsign;20A1
colonsmall;FE55
colontriangularhalfmod;02D1
colontriangularmod;02D0
comma;002C
commaabovecmb;0313
commaaboverightcmb;0315
commaaccent;F6C3
commaarabic;060C
commaarmenian;055D
commainferior;F6E1
commamonospace;FF0C
commareversedabovecmb;0314
commareversedmod;02BD
commasmall;FE50
commasuperior;F6E2
commaturnedabovecmb;0312
commaturnedmod;02BB
compass;263C
congruent;2245
contourintegral;222E
control;2303
controlACK;0006
controlBEL;0007
controlBS;0008
controlCAN;0018
controlCR;000D
controlDC1;0011
controlDC2;0012
controlDC3;0013
controlDC4;0014
controlDEL;007F
controlDLE;0010
controlEM;0019
controlENQ;0005
controlEOT;0004
controlESC;001B
controlETB;0017
controlETX;0003
controlFF;000C
controlFS;001C
controlGS;001D
controlHT;0009
controlLF;000A
controlNAK;0015
controlRS;001E
controlSI;000F
controlSO;000E
controlSOT;0002
controlSTX;0001
controlSUB;001A
controlSYN;0016
controlUS;001F
controlVT;000B
copyright;00A9
copyrightsans;F8E9
copyrightserif;F6D9
cornerbracketleft;300C
cornerbracketlefthalfwidth;FF62
cornerbracketleftvertical;FE41
cornerbracketright;300D
cornerbracketrighthalfwidth;FF63
cornerbracketrightvertical;FE42
corporationsquare;337F
cosquare;33C7
coverkgsquare;33C6
cparen;249E
cruzeiro;20A2
cstretched;0297
curlyand;22CF
curlyor;22CE
currency;00A4
cyrBreve;F6D1
cyrFlex;F6D2
cyrbreve;F6D4
cyrflex;F6D5
d;0064
daarmenian;0564
dabengali;09A6
dadarabic;0636
dadeva;0926
dadfinalarabic;FEBE
dadinitialarabic;FEBF
dadmedialarabic;FEC0
dagesh;05BC
dageshhebrew;05BC
dagger;2020
daggerdbl;2021
dagujarati;0AA6
dagurmukhi;0A26
dahiragana;3060
dakatakana;30C0
dalarabic;062F
dalet;05D3
daletdagesh;FB33
daletdageshhebrew;FB33
dalethatafpatah;05D3 05B2
dalethatafpatahhebrew;05D3 05B2
dalethatafsegol;05D3 05B1
dalethatafsegolhebrew;05D3 05B1
dalethebrew;05D3
dalethiriq;05D3 05B4
dalethiriqhebrew;05D3 05B4
daletholam;05D3 05B9
daletholamhebrew;05D3 05B9
daletpatah;05D3 05B7
daletpatahhebrew;05D3 05B7
daletqamats;05D3 05B8
daletqamatshebrew;05D3 05B8
daletqubuts;05D3 05BB
daletqubutshebrew;05D3 05BB
daletsegol;05D3 05B6
daletsegolhebrew;05D3 05B6
daletsheva;05D3 05B0
daletshevahebrew;05D3 05B0
dalettsere;05D3 05B5
dalettserehebrew;05D3 05B5
dalfinalarabic;FEAA
dammaarabic;064F
dammalowarabic;064F
dammatanaltonearabic;064C
dammatanarabic;064C
danda;0964
dargahebrew;05A7
dargalefthebrew;05A7
dasiapneumatacyrilliccmb;0485
dblGrave;F6D3
dblanglebracketleft;300A
dblanglebracketleftvertical;FE3D
dblanglebracketright;300B
dblanglebracketrightvertical;FE3E
dblarchinvertedbelowcmb;032B
dblarrowleft;21D4
dblarrowright;21D2
dbldanda;0965
dblgrave;F6D6
dblgravecmb;030F
dblintegral;222C
dbllowline;2017
dbllowlinecmb;0333
dbloverlinecmb;033F
dblprimemod;02BA
dblverticalbar;2016
dblverticallineabovecmb;030E
dbopomofo;3109
dbsquare;33C8
dcaron;010F
dcedilla;1E11
dcircle;24D3
dcircumflexbelow;1E13
dcroat;0111
ddabengali;09A1
ddadeva;0921
ddagujarati;0AA1
ddagurmukhi;0A21
ddalarabic;0688
ddalfinalarabic;FB89
dddhadeva;095C
ddhabengali;09A2
ddhadeva;0922
ddhagujarati;0AA2
ddhagurmukhi;0A22
ddotaccent;1E0B
ddotbelow;1E0D
decimalseparatorarabic;066B
decimalseparatorpersian;066B
decyrillic;0434
degree;00B0
dehihebrew;05AD
dehiragana;3067
deicoptic;03EF
dekatakana;30C7
deleteleft;232B
deleteright;2326
delta;03B4
deltaturned;018D
denominatorminusonenumeratorbengali;09F8
dezh;02A4
dhabengali;09A7
dhadeva;0927
dhagujarati;0AA7
dhagurmukhi;0A27
dhook;0257
dialytikatonos;0385
dialytikatonoscmb;0344
diamond;2666
diamondsuitwhite;2662
dieresis;00A8
dieresisacute;F6D7
dieresisbelowcmb;0324
dieresiscmb;0308
dieresisgrave;F6D8
dieresistonos;0385
dihiragana;3062
dikatakana;30C2
dittomark;3003
divide;00F7
divides;2223
divisionslash;2215
djecyrillic;0452
dkshade;2593
dlinebelow;1E0F
dlsquare;3397
dmacron;0111
dmonospace;FF44
dnblock;2584
dochadathai;0E0E
dodekthai;0E14
dohiragana;3069
dokatakana;30C9
dollar;0024
dollarinferior;F6E3
dollarmonospace;FF04
dollaroldstyle;F724
dollarsmall;FE69
dollarsuperior;F6E4
dong;20AB
dorusquare;3326
dotaccent;02D9
dotaccentcmb;0307
dotbelowcmb;0323
dotbelowcomb;0323
dotkatakana;30FB
dotlessi;0131
dotlessj;F6BE
dotlessjstrokehook;0284
dotmath;22C5
dottedcircle;25CC
doubleyodpatah;FB1F
doubleyodpatahhebrew;FB1F
downtackbelowcmb;031E
downtackmod;02D5
dparen;249F
dsuperior;F6EB
dtail;0256
dtopbar;018C
duhiragana;3065
dukatakana;30C5
dz;01F3
dzaltone;02A3
dzcaron;01C6
dzcurl;02A5
dzeabkhasiancyrillic;04E1
dzecyrillic;0455
dzhecyrillic;045F
e;0065
eacute;00E9
earth;2641
ebengali;098F
ebopomofo;311C
ebreve;0115
ecandradeva;090D
ecandragujarati;0A8D
ecandravowelsigndeva;0945
ecandravowelsigngujarati;0AC5
ecaron;011B
ecedillabreve;1E1D
echarmenian;0565
echyiwnarmenian;0587
ecircle;24D4
ecircumflex;00EA
ecircumflexacute;1EBF
ecircumflexbelow;1E19
ecircumflexdotbelow;1EC7
ecircumflexgrave;1EC1
ecircumflexhookabove;1EC3
ecircumflextilde;1EC5
ecyrillic;0454
edblgrave;0205
edeva;090F
edieresis;00EB
edot;0117
edotaccent;0117
edotbelow;1EB9
eegurmukhi;0A0F
eematragurmukhi;0A47
efcyrillic;0444
egrave;00E8
egujarati;0A8F
eharmenian;0567
ehbopomofo;311D
ehiragana;3048
ehookabove;1EBB
eibopomofo;311F
eight;0038
eightarabic;0668
eightbengali;09EE
eightcircle;2467
eightcircleinversesansserif;2791
eightdeva;096E
eighteencircle;2471
eighteenparen;2485
eighteenperiod;2499
eightgujarati;0AEE
eightgurmukhi;0A6E
eighthackarabic;0668
eighthangzhou;3028
eighthnotebeamed;266B
eightideographicparen;3227
eightinferior;2088
eightmonospace;FF18
eightoldstyle;F738
eightparen;247B
eightperiod;248F
eightpersian;06F8
eightroman;2177
eightsuperior;2078
eightthai;0E58
einvertedbreve;0207
eiotifiedcyrillic;0465
ekatakana;30A8
ekatakanahalfwidth;FF74
ekonkargurmukhi;0A74
ekorean;3154
elcyrillic;043B
element;2208
elevencircle;246A
elevenparen;247E
elevenperiod;2492
elevenroman;217A
ellipsis;2026
ellipsisvertical;22EE
emacron;0113
emacronacute;1E17
emacrongrave;1E15
emcyrillic;043C
emdash;2014
emdashvertical;FE31
emonospace;FF45
emphasismarkarmenian;055B
emptyset;2205
enbopomofo;3123
encyrillic;043D
endash;2013
endashvertical;FE32
endescendercyrillic;04A3
eng;014B
engbopomofo;3125
enghecyrillic;04A5
enhookcyrillic;04C8
enspace;2002
eogonek;0119
eokorean;3153
eopen;025B
eopenclosed;029A
eopenreversed;025C
eopenreversedclosed;025E
eopenreversedhook;025D
eparen;24A0
epsilon;03B5
epsilontonos;03AD
equal;003D
equalmonospace;FF1D
equalsmall;FE66
equalsuperior;207C
equivalence;2261
erbopomofo;3126
ercyrillic;0440
ereversed;0258
ereversedcyrillic;044D
escyrillic;0441
esdescendercyrillic;04AB
esh;0283
eshcurl;0286
eshortdeva;090E
eshortvowelsigndeva;0946
eshreversedloop;01AA
eshsquatreversed;0285
esmallhiragana;3047
esmallkatakana;30A7
esmallkatakanahalfwidth;FF6A
estimated;212E
esuperior;F6EC
eta;03B7
etarmenian;0568
etatonos;03AE
eth;00F0
etilde;1EBD
etildebelow;1E1B
etnahtafoukhhebrew;0591
etnahtafoukhlefthebrew;0591
etnahtahebrew;0591
etnahtalefthebrew;0591
eturned;01DD
eukorean;3161
euro;20AC
evowelsignbengali;09C7
evowelsigndeva;0947
evowelsigngujarati;0AC7
exclam;0021
exclamarmenian;055C
exclamdbl;203C
exclamdown;00A1
exclamdownsmall;F7A1
exclammonospace;FF01
exclamsmall;F721
existential;2203
ezh;0292
ezhcaron;01EF
ezhcurl;0293
ezhreversed;01B9
ezhtail;01BA
f;0066
fadeva;095E
fagurmukhi;0A5E
fahrenheit;2109
fathaarabic;064E
fathalowarabic;064E
fathatanarabic;064B
fbopomofo;3108
fcircle;24D5
fdotaccent;1E1F
feharabic;0641
feharmenian;0586
fehfinalarabic;FED2
fehinitialarabic;FED3
fehmedialarabic;FED4
feicoptic;03E5
female;2640
ff;FB00
ffi;FB03
ffl;FB04
fi;FB01
fifteencircle;246E
fifteenparen;2482
fifteenperiod;2496
figuredash;2012
filledbox;25A0
filledrect;25AC
finalkaf;05DA
finalkafdagesh;FB3A
finalkafdageshhebrew;FB3A
finalkafhebrew;05DA
finalkafqamats;05DA 05B8
finalkafqamatshebrew;05DA 05B8
finalkafsheva;05DA 05B0
finalkafshevahebrew;05DA 05B0
finalmem;05DD
finalmemhebrew;05DD
finalnun;05DF
finalnunhebrew;05DF
finalpe;05E3
finalpehebrew;05E3
finaltsadi;05E5
finaltsadihebrew;05E5
firsttonechinese;02C9
fisheye;25C9
fitacyrillic;0473
five;0035
fivearabic;0665
fivebengali;09EB
fivecircle;2464
fivecircleinversesansserif;278E
fivedeva;096B
fiveeighths;215D
fivegujarati;0AEB
fivegurmukhi;0A6B
fivehackarabic;0665
fivehangzhou;3025
fiveideographicparen;3224
fiveinferior;2085
fivemonospace;FF15
fiveoldstyle;F735
fiveparen;2478
fiveperiod;248C
fivepersian;06F5
fiveroman;2174
fivesuperior;2075
fivethai;0E55
fl;FB02
florin;0192
fmonospace;FF46
fmsquare;3399
fofanthai;0E1F
fofathai;0E1D
fongmanthai;0E4F
forall;2200
four;0034
fourarabic;0664
fourbengali;09EA
fourcircle;2463
fourcircleinversesansserif;278D
fourdeva;096A
fourgujarati;0AEA
fourgurmukhi;0A6A
fourhackarabic;0664
fourhangzhou;3024
fourideographicparen;3223
fourinferior;2084
fourmonospace;FF14
fournumeratorbengali;09F7
fouroldstyle;F734
fourparen;2477
fourperiod;248B
fourpersian;06F4
fourroman;2173
foursuperior;2074
fourteencircle;246D
fourteenparen;2481
fourteenperiod;2495
fourthai;0E54
fourthtonechinese;02CB
fparen;24A1
fraction;2044
franc;20A3
g;0067
gabengali;0997
gacute;01F5
gadeva;0917
gafarabic;06AF
gaffinalarabic;FB93
gafinitialarabic;FB94
gafmedialarabic;FB95
gagujarati;0A97
gagurmukhi;0A17
gahiragana;304C
gakatakana;30AC
gamma;03B3
gammalatinsmall;0263
gammasuperior;02E0
gangiacoptic;03EB
gbopomofo;310D
gbreve;011F
gcaron;01E7
gcedilla;0123
gcircle;24D6
gcircumflex;011D
gcommaaccent;0123
gdot;0121
gdotaccent;0121
gecyrillic;0433
gehiragana;3052
gekatakana;30B2
geometricallyequal;2251
gereshaccenthebrew;059C
gereshhebrew;05F3
gereshmuqdamhebrew;059D
germandbls;00DF
gershayimaccenthebrew;059E
gershayimhebrew;05F4
getamark;3013
ghabengali;0998
ghadarmenian;0572
ghadeva;0918
ghagujarati;0A98
ghagurmukhi;0A18
ghainarabic;063A
ghainfinalarabic;FECE
ghaininitialarabic;FECF
ghainmedialarabic;FED0
ghemiddlehookcyrillic;0495
ghestrokecyrillic;0493
gheupturncyrillic;0491
ghhadeva;095A
ghhagurmukhi;0A5A
ghook;0260
ghzsquare;3393
gihiragana;304E
gikatakana;30AE
gimarmenian;0563
gimel;05D2
gimeldagesh;FB32
gimeldageshhebrew;FB32
gimelhebrew;05D2
gjecyrillic;0453
glottalinvertedstroke;01BE
glottalstop;0294
glottalstopinverted;0296
glottalstopmod;02C0
glottalstopreversed;0295
glottalstopreversedmod;02C1
glottalstopreversedsuperior;02E4
glottalstopstroke;02A1
glottalstopstrokereversed;02A2
gmacron;1E21
gmonospace;FF47
gohiragana;3054
gokatakana;30B4
gparen;24A2
gpasquare;33AC
gradient;2207
grave;0060
gravebelowcmb;0316
gravecmb;0300
gravecomb;0300
gravedeva;0953
gravelowmod;02CE
gravemonospace;FF40
gravetonecmb;0340
greater;003E
greaterequal;2265
greaterequalorless;22DB
greatermonospace;FF1E
greaterorequivalent;2273
greaterorless;2277
greateroverequal;2267
greatersmall;FE65
gscript;0261
gstroke;01E5
guhiragana;3050
guillemotleft;00AB
guillemotright;00BB
guilsinglleft;2039
guilsinglright;203A
gukatakana;30B0
guramusquare;3318
gysquare;33C9
h;0068
haabkhasiancyrillic;04A9
haaltonearabic;06C1
habengali;09B9
hadescendercyrillic;04B3
hadeva;0939
hagujarati;0AB9
hagurmukhi;0A39
haharabic;062D
hahfinalarabic;FEA2
hahinitialarabic;FEA3
hahiragana;306F
hahmedialarabic;FEA4
haitusquare;332A
hakatakana;30CF
hakatakanahalfwidth;FF8A
halantgurmukhi;0A4D
hamzaarabic;0621
hamzadammaarabic;0621 064F
hamzadammatanarabic;0621 064C
hamzafathaarabic;0621 064E
hamzafathatanarabic;0621 064B
hamzalowarabic;0621
hamzalowkasraarabic;0621 0650
hamzalowkasratanarabic;0621 064D
hamzasukunarabic;0621 0652
hangulfiller;3164
hardsigncyrillic;044A
harpoonleftbarbup;21BC
harpoonrightbarbup;21C0
hasquare;33CA
hatafpatah;05B2
hatafpatah16;05B2
hatafpatah23;05B2
hatafpatah2f;05B2
hatafpatahhebrew;05B2
hatafpatahnarrowhebrew;05B2
hatafpatahquarterhebrew;05B2
hatafpatahwidehebrew;05B2
hatafqamats;05B3
hatafqamats1b;05B3
hatafqamats28;05B3
hatafqamats34;05B3
hatafqamatshebrew;05B3
hatafqamatsnarrowhebrew;05B3
hatafqamatsquarterhebrew;05B3
hatafqamatswidehebrew;05B3
hatafsegol;05B1
hatafsegol17;05B1
hatafsegol24;05B1
hatafsegol30;05B1
hatafsegolhebrew;05B1
hatafsegolnarrowhebrew;05B1
hatafsegolquarterhebrew;05B1
hatafsegolwidehebrew;05B1
hbar;0127
hbopomofo;310F
hbrevebelow;1E2B
hcedilla;1E29
hcircle;24D7
hcircumflex;0125
hdieresis;1E27
hdotaccent;1E23
hdotbelow;1E25
he;05D4
heart;2665
heartsuitblack;2665
heartsuitwhite;2661
hedagesh;FB34
hedageshhebrew;FB34
hehaltonearabic;06C1
heharabic;0647
hehebrew;05D4
hehfinalaltonearabic;FBA7
hehfinalalttwoarabic;FEEA
hehfinalarabic;FEEA
hehhamzaabovefinalarabic;FBA5
hehhamzaaboveisolatedarabic;FBA4
hehinitialaltonearabic;FBA8
hehinitialarabic;FEEB
hehiragana;3078
hehmedialaltonearabic;FBA9
hehmedialarabic;FEEC
heiseierasquare;337B
hekatakana;30D8
hekatakanahalfwidth;FF8D
hekutaarusquare;3336
henghook;0267
herutusquare;3339
het;05D7
hethebrew;05D7
hhook;0266
hhooksuperior;02B1
hieuhacirclekorean;327B
hieuhaparenkorean;321B
hieuhcirclekorean;326D
hieuhkorean;314E
hieuhparenkorean;320D
hihiragana;3072
hikatakana;30D2
hikatakanahalfwidth;FF8B
hiriq;05B4
hiriq14;05B4
hiriq21;05B4
hiriq2d;05B4
hiriqhebrew;05B4
hiriqnarrowhebrew;05B4
hiriqquarterhebrew;05B4
hiriqwidehebrew;05B4
hlinebelow;1E96
hmonospace;FF48
hoarmenian;0570
hohipthai;0E2B
hohiragana;307B
hokatakana;30DB
hokatakanahalfwidth;FF8E
holam;05B9
holam19;05B9
holam26;05B9
holam32;05B9
holamhebrew;05B9
holamnarrowhebrew;05B9
holamquarterhebrew;05B9
holamwidehebrew;05B9
honokhukthai;0E2E
hookabovecomb;0309
hookcmb;0309
hookpalatalizedbelowcmb;0321
hookretroflexbelowcmb;0322
hoonsquare;3342
horicoptic;03E9
horizontalbar;2015
horncmb;031B
hotsprings;2668
house;2302
hparen;24A3
hsuperior;02B0
hturned;0265
huhiragana;3075
huiitosquare;3333
hukatakana;30D5
hukatakanahalfwidth;FF8C
hungarumlaut;02DD
hungarumlautcmb;030B
hv;0195
hyphen;002D
hypheninferior;F6E5
hyphenmonospace;FF0D
hyphensmall;FE63
hyphensuperior;F6E6
hyphentwo;2010
i;0069
iacute;00ED
iacyrillic;044F
ibengali;0987
ibopomofo;3127
ibreve;012D
icaron;01D0
icircle;24D8
icircumflex;00EE
icyrillic;0456
idblgrave;0209
ideographearthcircle;328F
ideographfirecircle;328B
ideographicallianceparen;323F
ideographiccallparen;323A
ideographiccentrecircle;32A5
ideographicclose;3006
ideographiccomma;3001
ideographiccommaleft;FF64
ideographiccongratulationparen;3237
ideographiccorrectcircle;32A3
ideographicearthparen;322F
ideographicenterpriseparen;323D
ideographicexcellentcircle;329D
ideographicfestivalparen;3240
ideographicfinancialcircle;3296
ideographicfinancialparen;3236
ideographicfireparen;322B
ideographichaveparen;3232
ideographichighcircle;32A4
ideographiciterationmark;3005
ideographiclaborcircle;3298
ideographiclaborparen;3238
ideographicleftcircle;32A7
ideographiclowcircle;32A6
ideographicmedicinecircle;32A9
ideographicmetalparen;322E
ideographicmoonparen;322A
ideographicnameparen;3234
ideographicperiod;3002
ideographicprintcircle;329E
ideographicreachparen;3243
ideographicrepresentparen;3239
ideographicresourceparen;323E
ideographicrightcircle;32A8
ideographicsecretcircle;3299
ideographicselfparen;3242
ideographicsocietyparen;3233
ideographicspace;3000
ideographicspecialparen;3235
ideographicstockparen;3231
ideographicstudyparen;323B
ideographicsunparen;3230
ideographicsuperviseparen;323C
ideographicwaterparen;322C
ideographicwoodparen;322D
ideographiczero;3007
ideographmetalcircle;328E
ideographmooncircle;328A
ideographnamecircle;3294
ideographsuncircle;3290
ideographwatercircle;328C
ideographwoodcircle;328D
ideva;0907
idieresis;00EF
idieresisacute;1E2F
idieresiscyrillic;04E5
idotbelow;1ECB
iebrevecyrillic;04D7
iecyrillic;0435
ieungacirclekorean;3275
ieungaparenkorean;3215
ieungcirclekorean;3267
ieungkorean;3147
ieungparenkorean;3207
igrave;00EC
igujarati;0A87
igurmukhi;0A07
ihiragana;3044
ihookabove;1EC9
iibengali;0988
iicyrillic;0438
iideva;0908
iigujarati;0A88
iigurmukhi;0A08
iimatragurmukhi;0A40
iinvertedbreve;020B
iishortcyrillic;0439
iivowelsignbengali;09C0
iivowelsigndeva;0940
iivowelsigngujarati;0AC0
ij;0133
ikatakana;30A4
ikatakanahalfwidth;FF72
ikorean;3163
ilde;02DC
iluyhebrew;05AC
imacron;012B
imacroncyrillic;04E3
imageorapproximatelyequal;2253
imatragurmukhi;0A3F
imonospace;FF49
increment;2206
infinity;221E
iniarmenian;056B
integral;222B
integralbottom;2321
integralbt;2321
integralex;F8F5
integraltop;2320
integraltp;2320
intersection;2229
intisquare;3305
invbullet;25D8
invcircle;25D9
invsmileface;263B
iocyrillic;0451
iogonek;012F
iota;03B9
iotadieresis;03CA
iotadieresistonos;0390
iotalatin;0269
iotatonos;03AF
iparen;24A4
irigurmukhi;0A72
ismallhiragana;3043
ismallkatakana;30A3
ismallkatakanahalfwidth;FF68
issharbengali;09FA
istroke;0268
isuperior;F6ED
iterationhiragana;309D
iterationkatakana;30FD
itilde;0129
itildebelow;1E2D
iubopomofo;3129
iucyrillic;044E
ivowelsignbengali;09BF
ivowelsigndeva;093F
ivowelsigngujarati;0ABF
izhitsacyrillic;0475
izhitsadblgravecyrillic;0477
j;006A
jaarmenian;0571
jabengali;099C
jadeva;091C
jagujarati;0A9C
jagurmukhi;0A1C
jbopomofo;3110
jcaron;01F0
jcircle;24D9
jcircumflex;0135
jcrossedtail;029D
jdotlessstroke;025F
jecyrillic;0458
jeemarabic;062C
jeemfinalarabic;FE9E
jeeminitialarabic;FE9F
jeemmedialarabic;FEA0
jeharabic;0698
jehfinalarabic;FB8B
jhabengali;099D
jhadeva;091D
jhagujarati;0A9D
jhagurmukhi;0A1D
jheharmenian;057B
jis;3004
jmonospace;FF4A
jparen;24A5
jsuperior;02B2
k;006B
kabashkircyrillic;04A1
kabengali;0995
kacute;1E31
kacyrillic;043A
kadescendercyrillic;049B
kadeva;0915
kaf;05DB
kafarabic;0643
kafdagesh;FB3B
kafdageshhebrew;FB3B
kaffinalarabic;FEDA
kafhebrew;05DB
kafinitialarabic;FEDB
kafmedialarabic;FEDC
kafrafehebrew;FB4D
kagujarati;0A95
kagurmukhi;0A15
kahiragana;304B
kahookcyrillic;04C4
kakatakana;30AB
kakatakanahalfwidth;FF76
kappa;03BA
kappasymbolgreek;03F0
kapyeounmieumkorean;3171
kapyeounphieuphkorean;3184
kapyeounpieupkorean;3178
kapyeounssangpieupkorean;3179
karoriisquare;330D
kashidaautoarabic;0640
kashidaautonosidebearingarabic;0640
kasmallkatakana;30F5
kasquare;3384
kasraarabic;0650
kasratanarabic;064D
kastrokecyrillic;049F
katahiraprolongmarkhalfwidth;FF70
kaverticalstrokecyrillic;049D
kbopomofo;310E
kcalsquare;3389
kcaron;01E9
kcedilla;0137
kcircle;24DA
kcommaaccent;0137
kdotbelow;1E33
keharmenian;0584
kehiragana;3051
kekatakana;30B1
kekatakanahalfwidth;FF79
kenarmenian;056F
kesmallkatakana;30F6
kgreenlandic;0138
khabengali;0996
khacyrillic;0445
khadeva;0916
khagujarati;0A96
khagurmukhi;0A16
khaharabic;062E
khahfinalarabic;FEA6
khahinitialarabic;FEA7
khahmedialarabic;FEA8
kheicoptic;03E7
khhadeva;0959
khhagurmukhi;0A59
khieukhacirclekorean;3278
khieukhaparenkorean;3218
khieukhcirclekorean;326A
khieukhkorean;314B
khieukhparenkorean;320A
khokhaithai;0E02
khokhonthai;0E05
khokhuatthai;0E03
khokhwaithai;0E04
khomutthai;0E5B
khook;0199
khorakhangthai;0E06
khzsquare;3391
kihiragana;304D
kikatakana;30AD
kikatakanahalfwidth;FF77
kiroguramusquare;3315
kiromeetorusquare;3316
kirosquare;3314
kiyeokacirclekorean;326E
kiyeokaparenkorean;320E
kiyeokcirclekorean;3260
kiyeokkorean;3131
kiyeokparenkorean;3200
kiyeoksioskorean;3133
kjecyrillic;045C
klinebelow;1E35
klsquare;3398
kmcubedsquare;33A6
kmonospace;FF4B
kmsquaredsquare;33A2
kohiragana;3053
kohmsquare;33C0
kokaithai;0E01
kokatakana;30B3
kokatakanahalfwidth;FF7A
kooposquare;331E
koppacyrillic;0481
koreanstandardsymbol;327F
koroniscmb;0343
kparen;24A6
kpasquare;33AA
ksicyrillic;046F
ktsquare;33CF
kturned;029E
kuhiragana;304F
kukatakana;30AF
kukatakanahalfwidth;FF78
kvsquare;33B8
kwsquare;33BE
l;006C
labengali;09B2
lacute;013A
ladeva;0932
lagujarati;0AB2
lagurmukhi;0A32
lakkhangyaothai;0E45
lamaleffinalarabic;FEFC
lamalefhamzaabovefinalarabic;FEF8
lamalefhamzaaboveisolatedarabic;FEF7
lamalefhamzabelowfinalarabic;FEFA
lamalefhamzabelowisolatedarabic;FEF9
lamalefisolatedarabic;FEFB
lamalefmaddaabovefinalarabic;FEF6
lamalefmaddaaboveisolatedarabic;FEF5
lamarabic;0644
lambda;03BB
lambdastroke;019B
lamed;05DC
lameddagesh;FB3C
lameddageshhebrew;FB3C
lamedhebrew;05DC
lamedholam;05DC 05B9
lamedholamdagesh;05DC 05B9 05BC
lamedholamdageshhebrew;05DC 05B9 05BC
lamedholamhebrew;05DC 05B9
lamfinalarabic;FEDE
lamhahinitialarabic;FCCA
laminitialarabic;FEDF
lamjeeminitialarabic;FCC9
lamkhahinitialarabic;FCCB
lamlamhehisolatedarabic;FDF2
lammedialarabic;FEE0
lammeemhahinitialarabic;FD88
lammeeminitialarabic;FCCC
lammeemjeeminitialarabic;FEDF FEE4 FEA0
lammeemkhahinitialarabic;FEDF FEE4 FEA8
largecircle;25EF
lbar;019A
lbelt;026C
lbopomofo;310C
lcaron;013E
lcedilla;013C
lcircle;24DB
lcircumflexbelow;1E3D
lcommaaccent;013C
ldot;0140
ldotaccent;0140
ldotbelow;1E37
ldotbelowmacron;1E39
leftangleabovecmb;031A
lefttackbelowcmb;0318
less;003C
lessequal;2264
lessequalorgreater;22DA
lessmonospace;FF1C
lessorequivalent;2272
lessorgreater;2276
lessoverequal;2266
lesssmall;FE64
lezh;026E
lfblock;258C
lhookretroflex;026D
lira;20A4
liwnarmenian;056C
lj;01C9
ljecyrillic;0459
ll;F6C0
lladeva;0933
llagujarati;0AB3
llinebelow;1E3B
llladeva;0934
llvocalicbengali;09E1
llvocalicdeva;0961
llvocalicvowelsignbengali;09E3
llvocalicvowelsigndeva;0963
lmiddletilde;026B
lmonospace;FF4C
lmsquare;33D0
lochulathai;0E2C
logicaland;2227
logicalnot;00AC
logicalnotreversed;2310
logicalor;2228
lolingthai;0E25
longs;017F
lowlinecenterline;FE4E
lowlinecmb;0332
lowlinedashed;FE4D
lozenge;25CA
lparen;24A7
lslash;0142
lsquare;2113
lsuperior;F6EE
ltshade;2591
luthai;0E26
lvocalicbengali;098C
lvocalicdeva;090C
lvocalicvowelsignbengali;09E2
lvocalicvowelsigndeva;0962
lxsquare;33D3
m;006D
mabengali;09AE
macron;00AF
macronbelowcmb;0331
macroncmb;0304
macronlowmod;02CD
macronmonospace;FFE3
macute;1E3F
madeva;092E
magujarati;0AAE
magurmukhi;0A2E
mahapakhhebrew;05A4
mahapakhlefthebrew;05A4
mahiragana;307E
maichattawalowleftthai;F895
maichattawalowrightthai;F894
maichattawathai;0E4B
maichattawaupperleftthai;F893
maieklowleftthai;F88C
maieklowrightthai;F88B
maiekthai;0E48
maiekupperleftthai;F88A
maihanakatleftthai;F884
maihanakatthai;0E31
maitaikhuleftthai;F889
maitaikhuthai;0E47
maitholowleftthai;F88F
maitholowrightthai;F88E
maithothai;0E49
maithoupperleftthai;F88D
maitrilowleftthai;F892
maitrilowrightthai;F891
maitrithai;0E4A
maitriupperleftthai;F890
maiyamokthai;0E46
makatakana;30DE
makatakanahalfwidth;FF8F
male;2642
mansyonsquare;3347
maqafhebrew;05BE
mars;2642
masoracirclehebrew;05AF
masquare;3383
mbopomofo;3107
mbsquare;33D4
mcircle;24DC
mcubedsquare;33A5
mdotaccent;1E41
mdotbelow;1E43
meemarabic;0645
meemfinalarabic;FEE2
meeminitialarabic;FEE3
meemmedialarabic;FEE4
meemmeeminitialarabic;FCD1
meemmeemisolatedarabic;FC48
meetorusquare;334D
mehiragana;3081
meizierasquare;337E
mekatakana;30E1
mekatakanahalfwidth;FF92
mem;05DE
memdagesh;FB3E
memdageshhebrew;FB3E
memhebrew;05DE
menarmenian;0574
merkhahebrew;05A5
merkhakefulahebrew;05A6
merkhakefulalefthebrew;05A6
merkhalefthebrew;05A5
mhook;0271
mhzsquare;3392
middledotkatakanahalfwidth;FF65
middot;00B7
mieumacirclekorean;3272
mieumaparenkorean;3212
mieumcirclekorean;3264
mieumkorean;3141
mieumpansioskorean;3170
mieumparenkorean;3204
mieumpieupkorean;316E
mieumsioskorean;316F
mihiragana;307F
mikatakana;30DF
mikatakanahalfwidth;FF90
minus;2212
minusbelowcmb;0320
minuscircle;2296
minusmod;02D7
minusplus;2213
minute;2032
miribaarusquare;334A
mirisquare;3349
mlonglegturned;0270
mlsquare;3396
mmcubedsquare;33A3
mmonospace;FF4D
mmsquaredsquare;339F
mohiragana;3082
mohmsquare;33C1
mokatakana;30E2
mokatakanahalfwidth;FF93
molsquare;33D6
momathai;0E21
moverssquare;33A7
moverssquaredsquare;33A8
mparen;24A8
mpasquare;33AB
mssquare;33B3
msuperior;F6EF
mturned;026F
mu;00B5
mu1;00B5
muasquare;3382
muchgreater;226B
muchless;226A
mufsquare;338C
mugreek;03BC
mugsquare;338D
muhiragana;3080
mukatakana;30E0
mukatakanahalfwidth;FF91
mulsquare;3395
multiply;00D7
mumsquare;339B
munahhebrew;05A3
munahlefthebrew;05A3
musicalnote;266A
musicalnotedbl;266B
musicflatsign;266D
musicsharpsign;266F
mussquare;33B2
muvsquare;33B6
muwsquare;33BC
mvmegasquare;33B9
mvsquare;33B7
mwmegasquare;33BF
mwsquare;33BD
n;006E
nabengali;09A8
nabla;2207
nacute;0144
nadeva;0928
nagujarati;0AA8
nagurmukhi;0A28
nahiragana;306A
nakatakana;30CA
nakatakanahalfwidth;FF85
napostrophe;0149
nasquare;3381
nbopomofo;310B
nbspace;00A0
ncaron;0148
ncedilla;0146
ncircle;24DD
ncircumflexbelow;1E4B
ncommaaccent;0146
ndotaccent;1E45
ndotbelow;1E47
nehiragana;306D
nekatakana;30CD
nekatakanahalfwidth;FF88
newsheqelsign;20AA
nfsquare;338B
ngabengali;0999
ngadeva;0919
ngagujarati;0A99
ngagurmukhi;0A19
ngonguthai;0E07
nhiragana;3093
nhookleft;0272
nhookretroflex;0273
nieunacirclekorean;326F
nieunaparenkorean;320F
nieuncieuckorean;3135
nieuncirclekorean;3261
nieunhieuhkorean;3136
nieunkorean;3134
nieunpansioskorean;3168
nieunparenkorean;3201
nieunsioskorean;3167
nieuntikeutkorean;3166
nihiragana;306B
nikatakana;30CB
nikatakanahalfwidth;FF86
nikhahitleftthai;F899
nikhahitthai;0E4D
nine;0039
ninearabic;0669
ninebengali;09EF
ninecircle;2468
ninecircleinversesansserif;2792
ninedeva;096F
ninegujarati;0AEF
ninegurmukhi;0A6F
ninehackarabic;0669
ninehangzhou;3029
nineideographicparen;3228
nineinferior;2089
ninemonospace;FF19
nineoldstyle;F739
nineparen;247C
nineperiod;2490
ninepersian;06F9
nineroman;2178
ninesuperior;2079
nineteencircle;2472
nineteenparen;2486
nineteenperiod;249A
ninethai;0E59
nj;01CC
njecyrillic;045A
nkatakana;30F3
nkatakanahalfwidth;FF9D
nlegrightlong;019E
nlinebelow;1E49
nmonospace;FF4E
nmsquare;339A
nnabengali;09A3
nnadeva;0923
nnagujarati;0AA3
nnagurmukhi;0A23
nnnadeva;0929
nohiragana;306E
nokatakana;30CE
nokatakanahalfwidth;FF89
nonbreakingspace;00A0
nonenthai;0E13
nonuthai;0E19
noonarabic;0646
noonfinalarabic;FEE6
noonghunnaarabic;06BA
noonghunnafinalarabic;FB9F
noonhehinitialarabic;FEE7 FEEC
nooninitialarabic;FEE7
noonjeeminitialarabic;FCD2
noonjeemisolatedarabic;FC4B
noonmedialarabic;FEE8
noonmeeminitialarabic;FCD5
noonmeemisolatedarabic;FC4E
noonnoonfinalarabic;FC8D
notcontains;220C
notelement;2209
notelementof;2209
notequal;2260
notgreater;226F
notgreaternorequal;2271
notgreaternorless;2279
notidentical;2262
notless;226E
notlessnorequal;2270
notparallel;2226
notprecedes;2280
notsubset;2284
notsucceeds;2281
notsuperset;2285
nowarmenian;0576
nparen;24A9
nssquare;33B1
nsuperior;207F
ntilde;00F1
nu;03BD
nuhiragana;306C
nukatakana;30CC
nukatakanahalfwidth;FF87
nuktabengali;09BC
nuktadeva;093C
nuktagujarati;0ABC
nuktagurmukhi;0A3C
numbersign;0023
numbersignmonospace;FF03
numbersignsmall;FE5F
numeralsigngreek;0374
numeralsignlowergreek;0375
numero;2116
nun;05E0
nundagesh;FB40
nundageshhebrew;FB40
nunhebrew;05E0
nvsquare;33B5
nwsquare;33BB
nyabengali;099E
nyadeva;091E
nyagujarati;0A9E
nyagurmukhi;0A1E
o;006F
oacute;00F3
oangthai;0E2D
obarred;0275
obarredcyrillic;04E9
obarreddieresiscyrillic;04EB
obengali;0993
obopomofo;311B
obreve;014F
ocandradeva;0911
ocandragujarati;0A91
ocandravowelsigndeva;0949
ocandravowelsigngujarati;0AC9
ocaron;01D2
ocircle;24DE
ocircumflex;00F4
ocircumflexacute;1ED1
ocircumflexdotbelow;1ED9
ocircumflexgrave;1ED3
ocircumflexhookabove;1ED5
ocircumflextilde;1ED7
ocyrillic;043E
odblacute;0151
odblgrave;020D
odeva;0913
odieresis;00F6
odieresiscyrillic;04E7
odotbelow;1ECD
oe;0153
oekorean;315A
ogonek;02DB
ogonekcmb;0328
ograve;00F2
ogujarati;0A93
oharmenian;0585
ohiragana;304A
ohookabove;1ECF
ohorn;01A1
ohornacute;1EDB
ohorndotbelow;1EE3
ohorngrave;1EDD
ohornhookabove;1EDF
ohorntilde;1EE1
ohungarumlaut;0151
oi;01A3
oinvertedbreve;020F
okatakana;30AA
okatakanahalfwidth;FF75
okorean;3157
olehebrew;05AB
omacron;014D
omacronacute;1E53
omacrongrave;1E51
omdeva;0950
omega;03C9
omega1;03D6
omegacyrillic;0461
omegalatinclosed;0277
omegaroundcyrillic;047B
omegatitlocyrillic;047D
omegatonos;03CE
omgujarati;0AD0
omicron;03BF
omicrontonos;03CC
omonospace;FF4F
one;0031
onearabic;0661
onebengali;09E7
onecircle;2460
onecircleinversesansserif;278A
onedeva;0967
onedotenleader;2024
oneeighth;215B
onefitted;F6DC
onegujarati;0AE7
onegurmukhi;0A67
onehackarabic;0661
onehalf;00BD
onehangzhou;3021
oneideographicparen;3220
oneinferior;2081
onemonospace;FF11
onenumeratorbengali;09F4
oneoldstyle;F731
oneparen;2474
oneperiod;2488
onepersian;06F1
onequarter;00BC
oneroman;2170
onesuperior;00B9
onethai;0E51
onethird;2153
oogonek;01EB
oogonekmacron;01ED
oogurmukhi;0A13
oomatragurmukhi;0A4B
oopen;0254
oparen;24AA
openbullet;25E6
option;2325
ordfeminine;00AA
ordmasculine;00BA
orthogonal;221F
oshortdeva;0912
oshortvowelsigndeva;094A
oslash;00F8
oslashacute;01FF
osmallhiragana;3049
osmallkatakana;30A9
osmallkatakanahalfwidth;FF6B
ostrokeacute;01FF
osuperior;F6F0
otcyrillic;047F
otilde;00F5
otildeacute;1E4D
otildedieresis;1E4F
oubopomofo;3121
overline;203E
overlinecenterline;FE4A
overlinecmb;0305
overlinedashed;FE49
overlinedblwavy;FE4C
overlinewavy;FE4B
overscore;00AF
ovowelsignbengali;09CB
ovowelsigndeva;094B
ovowelsigngujarati;0ACB
p;0070
paampssquare;3380
paasentosquare;332B
pabengali;09AA
pacute;1E55
padeva;092A
pagedown;21DF
pageup;21DE
pagujarati;0AAA
pagurmukhi;0A2A
pahiragana;3071
paiyannoithai;0E2F
pakatakana;30D1
palatalizationcyrilliccmb;0484
palochkacyrillic;04C0
pansioskorean;317F
paragraph;00B6
parallel;2225
parenleft;0028
parenleftaltonearabic;FD3E
parenleftbt;F8ED
parenleftex;F8EC
parenleftinferior;208D
parenleftmonospace;FF08
parenleftsmall;FE59
parenleftsuperior;207D
parenlefttp;F8EB
parenleftvertical;FE35
parenright;0029
parenrightaltonearabic;FD3F
parenrightbt;F8F8
parenrightex;F8F7
parenrightinferior;208E
parenrightmonospace;FF09
parenrightsmall;FE5A
parenrightsuperior;207E
parenrighttp;F8F6
parenrightvertical;FE36
partialdiff;2202
paseqhebrew;05C0
pashtahebrew;0599
pasquare;33A9
patah;05B7
patah11;05B7
patah1d;05B7
patah2a;05B7
patahhebrew;05B7
patahnarrowhebrew;05B7
patahquarterhebrew;05B7
patahwidehebrew;05B7
pazerhebrew;05A1
pbopomofo;3106
pcircle;24DF
pdotaccent;1E57
pe;05E4
pecyrillic;043F
pedagesh;FB44
pedageshhebrew;FB44
peezisquare;333B
pefinaldageshhebrew;FB43
peharabic;067E
peharmenian;057A
pehebrew;05E4
pehfinalarabic;FB57
pehinitialarabic;FB58
pehiragana;307A
pehmedialarabic;FB59
pekatakana;30DA
pemiddlehookcyrillic;04A7
perafehebrew;FB4E
percent;0025
percentarabic;066A
percentmonospace;FF05
percentsmall;FE6A
period;002E
periodarmenian;0589
periodcentered;00B7
periodhalfwidth;FF61
periodinferior;F6E7
periodmonospace;FF0E
periodsmall;FE52
periodsuperior;F6E8
perispomenigreekcmb;0342
perpendicular;22A5
perthousand;2030
peseta;20A7
pfsquare;338A
phabengali;09AB
phadeva;092B
phagujarati;0AAB
phagurmukhi;0A2B
phi;03C6
phi1;03D5
phieuphacirclekorean;327A
phieuphaparenkorean;321A
phieuphcirclekorean;326C
phieuphkorean;314D
phieuphparenkorean;320C
philatin;0278
phinthuthai;0E3A
phisymbolgreek;03D5
phook;01A5
phophanthai;0E1E
phophungthai;0E1C
phosamphaothai;0E20
pi;03C0
pieupacirclekorean;3273
pieupaparenkorean;3213
pieupcieuckorean;3176
pieupcirclekorean;3265
pieupkiyeokkorean;3172
pieupkorean;3142
pieupparenkorean;3205
pieupsioskiyeokkorean;3174
pieupsioskorean;3144
pieupsiostikeutkorean;3175
pieupthieuthkorean;3177
pieuptikeutkorean;3173
pihiragana;3074
pikatakana;30D4
pisymbolgreek;03D6
piwrarmenian;0583
plus;002B
plusbelowcmb;031F
pluscircle;2295
plusminus;00B1
plusmod;02D6
plusmonospace;FF0B
plussmall;FE62
plussuperior;207A
pmonospace;FF50
pmsquare;33D8
pohiragana;307D
pointingindexdownwhite;261F
pointingindexleftwhite;261C
pointingindexrightwhite;261E
pointingindexupwhite;261D
pokatakana;30DD
poplathai;0E1B
postalmark;3012
postalmarkface;3020
pparen;24AB
precedes;227A
prescription;211E
primemod;02B9
primereversed;2035
product;220F
projective;2305
prolongedkana;30FC
propellor;2318
propersubset;2282
propersuperset;2283
proportion;2237
proportional;221D
psi;03C8
psicyrillic;0471
psilipneumatacyrilliccmb;0486
pssquare;33B0
puhiragana;3077
pukatakana;30D7
pvsquare;33B4
pwsquare;33BA
q;0071
qadeva;0958
qadmahebrew;05A8
qafarabic;0642
qaffinalarabic;FED6
qafinitialarabic;FED7
qafmedialarabic;FED8
qamats;05B8
qamats10;05B8
qamats1a;05B8
qamats1c;05B8
qamats27;05B8
qamats29;05B8
qamats33;05B8
qamatsde;05B8
qamatshebrew;05B8
qamatsnarrowhebrew;05B8
qamatsqatanhebrew;05B8
qamatsqatannarrowhebrew;05B8
qamatsqatanquarterhebrew;05B8
qamatsqatanwidehebrew;05B8
qamatsquarterhebrew;05B8
qamatswidehebrew;05B8
qarneyparahebrew;059F
qbopomofo;3111
qcircle;24E0
qhook;02A0
qmonospace;FF51
qof;05E7
qofdagesh;FB47
qofdageshhebrew;FB47
qofhatafpatah;05E7 05B2
qofhatafpatahhebrew;05E7 05B2
qofhatafsegol;05E7 05B1
qofhatafsegolhebrew;05E7 05B1
qofhebrew;05E7
qofhiriq;05E7 05B4
qofhiriqhebrew;05E7 05B4
qofholam;05E7 05B9
qofholamhebrew;05E7 05B9
qofpatah;05E7 05B7
qofpatahhebrew;05E7 05B7
qofqamats;05E7 05B8
qofqamatshebrew;05E7 05B8
qofqubuts;05E7 05BB
qofqubutshebrew;05E7 05BB
qofsegol;05E7 05B6
qofsegolhebrew;05E7 05B6
qofsheva;05E7 05B0
qofshevahebrew;05E7 05B0
qoftsere;05E7 05B5
qoftserehebrew;05E7 05B5
qparen;24AC
quarternote;2669
qubuts;05BB
qubuts18;05BB
qubuts25;05BB
qubuts31;05BB
qubutshebrew;05BB
qubutsnarrowhebrew;05BB
qubutsquarterhebrew;05BB
qubutswidehebrew;05BB
question;003F
questionarabic;061F
questionarmenian;055E
questiondown;00BF
questiondownsmall;F7BF
questiongreek;037E
questionmonospace;FF1F
questionsmall;F73F
quotedbl;0022
quotedblbase;201E
quotedblleft;201C
quotedblmonospace;FF02
quotedblprime;301E
quotedblprimereversed;301D
quotedblright;201D
quoteleft;2018
quoteleftreversed;201B
quotereversed;201B
quoteright;2019
quoterightn;0149
quotesinglbase;201A
quotesingle;0027
quotesinglemonospace;FF07
r;0072
raarmenian;057C
rabengali;09B0
racute;0155
radeva;0930
radical;221A
radicalex;F8E5
radoverssquare;33AE
radoverssquaredsquare;33AF
radsquare;33AD
rafe;05BF
rafehebrew;05BF
ragujarati;0AB0
ragurmukhi;0A30
rahiragana;3089
rakatakana;30E9
rakatakanahalfwidth;FF97
ralowerdiagonalbengali;09F1
ramiddlediagonalbengali;09F0
ramshorn;0264
ratio;2236
rbopomofo;3116
rcaron;0159
rcedilla;0157
rcircle;24E1
rcommaaccent;0157
rdblgrave;0211
rdotaccent;1E59
rdotbelow;1E5B
rdotbelowmacron;1E5D
referencemark;203B
reflexsubset;2286
reflexsuperset;2287
registered;00AE
registersans;F8E8
registerserif;F6DA
reharabic;0631
reharmenian;0580
rehfinalarabic;FEAE
rehiragana;308C
rehyehaleflamarabic;0631 FEF3 FE8E 0644
rekatakana;30EC
rekatakanahalfwidth;FF9A
resh;05E8
reshdageshhebrew;FB48
reshhatafpatah;05E8 05B2
reshhatafpatahhebrew;05E8 05B2
reshhatafsegol;05E8 05B1
reshhatafsegolhebrew;05E8 05B1
reshhebrew;05E8
reshhiriq;05E8 05B4
reshhiriqhebrew;05E8 05B4
reshholam;05E8 05B9
reshholamhebrew;05E8 05B9
reshpatah;05E8 05B7
reshpatahhebrew;05E8 05B7
reshqamats;05E8 05B8
reshqamatshebrew;05E8 05B8
reshqubuts;05E8 05BB
reshqubutshebrew;05E8 05BB
reshsegol;05E8 05B6
reshsegolhebrew;05E8 05B6
reshsheva;05E8 05B0
reshshevahebrew;05E8 05B0
reshtsere;05E8 05B5
reshtserehebrew;05E8 05B5
reversedtilde;223D
reviahebrew;0597
reviamugrashhebrew;0597
revlogicalnot;2310
rfishhook;027E
rfishhookreversed;027F
rhabengali;09DD
rhadeva;095D
rho;03C1
rhook;027D
rhookturned;027B
rhookturnedsuperior;02B5
rhosymbolgreek;03F1
rhotichookmod;02DE
rieulacirclekorean;3271
rieulaparenkorean;3211
rieulcirclekorean;3263
rieulhieuhkorean;3140
rieulkiyeokkorean;313A
rieulkiyeoksioskorean;3169
rieulkorean;3139
rieulmieumkorean;313B
rieulpansioskorean;316C
rieulparenkorean;3203
rieulphieuphkorean;313F
rieulpieupkorean;313C
rieulpieupsioskorean;316B
rieulsioskorean;313D
rieulthieuthkorean;313E
rieultikeutkorean;316A
rieulyeorinhieuhkorean;316D
rightangle;221F
righttackbelowcmb;0319
righttriangle;22BF
rihiragana;308A
rikatakana;30EA
rikatakanahalfwidth;FF98
ring;02DA
ringbelowcmb;0325
ringcmb;030A
ringhalfleft;02BF
ringhalfleftarmenian;0559
ringhalfleftbelowcmb;031C
ringhalfleftcentered;02D3
ringhalfright;02BE
ringhalfrightbelowcmb;0339
ringhalfrightcentered;02D2
rinvertedbreve;0213
rittorusquare;3351
rlinebelow;1E5F
rlongleg;027C
rlonglegturned;027A
rmonospace;FF52
rohiragana;308D
rokatakana;30ED
rokatakanahalfwidth;FF9B
roruathai;0E23
rparen;24AD
rrabengali;09DC
rradeva;0931
rragurmukhi;0A5C
rreharabic;0691
rrehfinalarabic;FB8D
rrvocalicbengali;09E0
rrvocalicdeva;0960
rrvocalicgujarati;0AE0
rrvocalicvowelsignbengali;09C4
rrvocalicvowelsigndeva;0944
rrvocalicvowelsigngujarati;0AC4
rsuperior;F6F1
rtblock;2590
rturned;0279
rturnedsuperior;02B4
ruhiragana;308B
rukatakana;30EB
rukatakanahalfwidth;FF99
rupeemarkbengali;09F2
rupeesignbengali;09F3
rupiah;F6DD
ruthai;0E24
rvocalicbengali;098B
rvocalicdeva;090B
rvocalicgujarati;0A8B
rvocalicvowelsignbengali;09C3
rvocalicvowelsigndeva;0943
rvocalicvowelsigngujarati;0AC3
s;0073
sabengali;09B8
sacute;015B
sacutedotaccent;1E65
sadarabic;0635
sadeva;0938
sadfinalarabic;FEBA
sadinitialarabic;FEBB
sadmedialarabic;FEBC
sagujarati;0AB8
sagurmukhi;0A38
sahiragana;3055
sakatakana;30B5
sakatakanahalfwidth;FF7B
sallallahoualayhewasallamarabic;FDFA
samekh;05E1
samekhdagesh;FB41
samekhdageshhebrew;FB41
samekhhebrew;05E1
saraaathai;0E32
saraaethai;0E41
saraaimaimalaithai;0E44
saraaimaimuanthai;0E43
saraamthai;0E33
saraathai;0E30
saraethai;0E40
saraiileftthai;F886
saraiithai;0E35
saraileftthai;F885
saraithai;0E34
saraothai;0E42
saraueeleftthai;F888
saraueethai;0E37
saraueleftthai;F887
sarauethai;0E36
sarauthai;0E38
sarauuthai;0E39
sbopomofo;3119
scaron;0161
scarondotaccent;1E67
scedilla;015F
schwa;0259
schwacyrillic;04D9
schwadieresiscyrillic;04DB
schwahook;025A
scircle;24E2
scircumflex;015D
scommaaccent;0219
sdotaccent;1E61
sdotbelow;1E63
sdotbelowdotaccent;1E69
seagullbelowcmb;033C
second;2033
secondtonechinese;02CA
section;00A7
seenarabic;0633
seenfinalarabic;FEB2
seeninitialarabic;FEB3
seenmedialarabic;FEB4
segol;05B6
segol13;05B6
segol1f;05B6
segol2c;05B6
segolhebrew;05B6
segolnarrowhebrew;05B6
segolquarterhebrew;05B6
segoltahebrew;0592
segolwidehebrew;05B6
seharmenian;057D
sehiragana;305B
sekatakana;30BB
sekatakanahalfwidth;FF7E
semicolon;003B
semicolonarabic;061B
semicolonmonospace;FF1B
semicolonsmall;FE54
semivoicedmarkkana;309C
semivoicedmarkkanahalfwidth;FF9F
sentisquare;3322
sentosquare;3323
seven;0037
sevenarabic;0667
sevenbengali;09ED
sevencircle;2466
sevencircleinversesansserif;2790
sevendeva;096D
seveneighths;215E
sevengujarati;0AED
sevengurmukhi;0A6D
sevenhackarabic;0667
sevenhangzhou;3027
sevenideographicparen;3226
seveninferior;2087
sevenmonospace;FF17
sevenoldstyle;F737
sevenparen;247A
sevenperiod;248E
sevenpersian;06F7
sevenroman;2176
sevensuperior;2077
seventeencircle;2470
seventeenparen;2484
seventeenperiod;2498
seventhai;0E57
sfthyphen;00AD
shaarmenian;0577
shabengali;09B6
shacyrillic;0448
shaddaarabic;0651
shaddadammaarabic;FC61
shaddadammatanarabic;FC5E
shaddafathaarabic;FC60
shaddafathatanarabic;0651 064B
shaddakasraarabic;FC62
shaddakasratanarabic;FC5F
shade;2592
shadedark;2593
shadelight;2591
shademedium;2592
shadeva;0936
shagujarati;0AB6
shagurmukhi;0A36
shalshelethebrew;0593
shbopomofo;3115
shchacyrillic;0449
sheenarabic;0634
sheenfinalarabic;FEB6
sheeninitialarabic;FEB7
sheenmedialarabic;FEB8
sheicoptic;03E3
sheqel;20AA
sheqelhebrew;20AA
sheva;05B0
sheva115;05B0
sheva15;05B0
sheva22;05B0
sheva2e;05B0
shevahebrew;05B0
shevanarrowhebrew;05B0
shevaquarterhebrew;05B0
shevawidehebrew;05B0
shhacyrillic;04BB
shimacoptic;03ED
shin;05E9
shindagesh;FB49
shindageshhebrew;FB49
shindageshshindot;FB2C
shindageshshindothebrew;FB2C
shindageshsindot;FB2D
shindageshsindothebrew;FB2D
shindothebrew;05C1
shinhebrew;05E9
shinshindot;FB2A
shinshindothebrew;FB2A
shinsindot;FB2B
shinsindothebrew;FB2B
shook;0282
sigma;03C3
sigma1;03C2
sigmafinal;03C2
sigmalunatesymbolgreek;03F2
sihiragana;3057
sikatakana;30B7
sikatakanahalfwidth;FF7C
siluqhebrew;05BD
siluqlefthebrew;05BD
similar;223C
sindothebrew;05C2
siosacirclekorean;3274
siosaparenkorean;3214
sioscieuckorean;317E
sioscirclekorean;3266
sioskiyeokkorean;317A
sioskorean;3145
siosnieunkorean;317B
siosparenkorean;3206
siospieupkorean;317D
siostikeutkorean;317C
six;0036
sixarabic;0666
sixbengali;09EC
sixcircle;2465
sixcircleinversesansserif;278F
sixdeva;096C
sixgujarati;0AEC
sixgurmukhi;0A6C
sixhackarabic;0666
sixhangzhou;3026
sixideographicparen;3225
sixinferior;2086
sixmonospace;FF16
sixoldstyle;F736
sixparen;2479
sixperiod;248D
sixpersian;06F6
sixroman;2175
sixsuperior;2076
sixteencircle;246F
sixteencurrencydenominatorbengali;09F9
sixteenparen;2483
sixteenperiod;2497
sixthai;0E56
slash;002F
slashmonospace;FF0F
slong;017F
slongdotaccent;1E9B
smileface;263A
smonospace;FF53
sofpasuqhebrew;05C3
softhyphen;00AD
softsigncyrillic;044C
sohiragana;305D
sokatakana;30BD
sokatakanahalfwidth;FF7F
soliduslongoverlaycmb;0338
solidusshortoverlaycmb;0337
sorusithai;0E29
sosalathai;0E28
sosothai;0E0B
sosuathai;0E2A
space;0020
spacehackarabic;0020
spade;2660
spadesuitblack;2660
spadesuitwhite;2664
sparen;24AE
squarebelowcmb;033B
squarecc;33C4
squarecm;339D
squarediagonalcrosshatchfill;25A9
squarehorizontalfill;25A4
squarekg;338F
squarekm;339E
squarekmcapital;33CE
squareln;33D1
squarelog;33D2
squaremg;338E
squaremil;33D5
squaremm;339C
squaremsquared;33A1
squareorthogonalcrosshatchfill;25A6
squareupperlefttolowerrightfill;25A7
squareupperrighttolowerleftfill;25A8
squareverticalfill;25A5
squarewhitewithsmallblack;25A3
srsquare;33DB
ssabengali;09B7
ssadeva;0937
ssagujarati;0AB7
ssangcieuckorean;3149
ssanghieuhkorean;3185
ssangieungkorean;3180
ssangkiyeokkorean;3132
ssangnieunkorean;3165
ssangpieupkorean;3143
ssangsioskorean;3146
ssangtikeutkorean;3138
ssuperior;F6F2
sterling;00A3
sterlingmonospace;FFE1
strokelongoverlaycmb;0336
strokeshortoverlaycmb;0335
subset;2282
subsetnotequal;228A
subsetorequal;2286
succeeds;227B
suchthat;220B
suhiragana;3059
sukatakana;30B9
sukatakanahalfwidth;FF7D
sukunarabic;0652
summation;2211
sun;263C
superset;2283
supersetnotequal;228B
supersetorequal;2287
svsquare;33DC
syouwaerasquare;337C
t;0074
tabengali;09A4
tackdown;22A4
tackleft;22A3
tadeva;0924
tagujarati;0AA4
tagurmukhi;0A24
taharabic;0637
tahfinalarabic;FEC2
tahinitialarabic;FEC3
tahiragana;305F
tahmedialarabic;FEC4
taisyouerasquare;337D
takatakana;30BF
takatakanahalfwidth;FF80
tatweelarabic;0640
tau;03C4
tav;05EA
tavdages;FB4A
tavdagesh;FB4A
tavdageshhebrew;FB4A
tavhebrew;05EA
tbar;0167
tbopomofo;310A
tcaron;0165
tccurl;02A8
tcedilla;0163
tcheharabic;0686
tchehfinalarabic;FB7B
tchehinitialarabic;FB7C
tchehmedialarabic;FB7D
tchehmeeminitialarabic;FB7C FEE4
tcircle;24E3
tcircumflexbelow;1E71
tcommaaccent;0163
tdieresis;1E97
tdotaccent;1E6B
tdotbelow;1E6D
tecyrillic;0442
tedescendercyrillic;04AD
teharabic;062A
tehfinalarabic;FE96
tehhahinitialarabic;FCA2
tehhahisolatedarabic;FC0C
tehinitialarabic;FE97
tehiragana;3066
tehjeeminitialarabic;FCA1
tehjeemisolatedarabic;FC0B
tehmarbutaarabic;0629
tehmarbutafinalarabic;FE94
tehmedialarabic;FE98
tehmeeminitialarabic;FCA4
tehmeemisolatedarabic;FC0E
tehnoonfinalarabic;FC73
tekatakana;30C6
tekatakanahalfwidth;FF83
telephone;2121
telephoneblack;260E
telishagedolahebrew;05A0
telishaqetanahebrew;05A9
tencircle;2469
tenideographicparen;3229
tenparen;247D
tenperiod;2491
tenroman;2179
tesh;02A7
tet;05D8
tetdagesh;FB38
tetdageshhebrew;FB38
tethebrew;05D8
tetsecyrillic;04B5
tevirhebrew;059B
tevirlefthebrew;059B
thabengali;09A5
thadeva;0925
thagujarati;0AA5
thagurmukhi;0A25
thalarabic;0630
thalfinalarabic;FEAC
thanthakhatlowleftthai;F898
thanthakhatlowrightthai;F897
thanthakhatthai;0E4C
thanthakhatupperleftthai;F896
theharabic;062B
thehfinalarabic;FE9A
thehinitialarabic;FE9B
thehmedialarabic;FE9C
thereexists;2203
therefore;2234
theta;03B8
theta1;03D1
thetasymbolgreek;03D1
thieuthacirclekorean;3279
thieuthaparenkorean;3219
thieuthcirclekorean;326B
thieuthkorean;314C
thieuthparenkorean;320B
thirteencircle;246C
thirteenparen;2480
thirteenperiod;2494
thonangmonthothai;0E11
thook;01AD
thophuthaothai;0E12
thorn;00FE
thothahanthai;0E17
thothanthai;0E10
thothongthai;0E18
thothungthai;0E16
thousandcyrillic;0482
thousandsseparatorarabic;066C
thousandsseparatorpersian;066C
three;0033
threearabic;0663
threebengali;09E9
threecircle;2462
threecircleinversesansserif;278C
threedeva;0969
threeeighths;215C
threegujarati;0AE9
threegurmukhi;0A69
threehackarabic;0663
threehangzhou;3023
threeideographicparen;3222
threeinferior;2083
threemonospace;FF13
threenumeratorbengali;09F6
threeoldstyle;F733
threeparen;2476
threeperiod;248A
threepersian;06F3
threequarters;00BE
threequartersemdash;F6DE
threeroman;2172
threesuperior;00B3
threethai;0E53
thzsquare;3394
tihiragana;3061
tikatakana;30C1
tikatakanahalfwidth;FF81
tikeutacirclekorean;3270
tikeutaparenkorean;3210
tikeutcirclekorean;3262
tikeutkorean;3137
tikeutparenkorean;3202
tilde;02DC
tildebelowcmb;0330
tildecmb;0303
tildecomb;0303
tildedoublecmb;0360
tildeoperator;223C
tildeoverlaycmb;0334
tildeverticalcmb;033E
timescircle;2297
tipehahebrew;0596
tipehalefthebrew;0596
tippigurmukhi;0A70
titlocyrilliccmb;0483
tiwnarmenian;057F
tlinebelow;1E6F
tmonospace;FF54
toarmenian;0569
tohiragana;3068
tokatakana;30C8
tokatakanahalfwidth;FF84
tonebarextrahighmod;02E5
tonebarextralowmod;02E9
tonebarhighmod;02E6
tonebarlowmod;02E8
tonebarmidmod;02E7
tonefive;01BD
tonesix;0185
tonetwo;01A8
tonos;0384
tonsquare;3327
topatakthai;0E0F
tortoiseshellbracketleft;3014
tortoiseshellbracketleftsmall;FE5D
tortoiseshellbracketleftvertical;FE39
tortoiseshellbracketright;3015
tortoiseshellbracketrightsmall;FE5E
tortoiseshellbracketrightvertical;FE3A
totaothai;0E15
tpalatalhook;01AB
tparen;24AF
trademark;2122
trademarksans;F8EA
trademarkserif;F6DB
tretroflexhook;0288
triagdn;25BC
triaglf;25C4
triagrt;25BA
triagup;25B2
ts;02A6
tsadi;05E6
tsadidagesh;FB46
tsadidageshhebrew;FB46
tsadihebrew;05E6
tsecyrillic;0446
tsere;05B5
tsere12;05B5
tsere1e;05B5
tsere2b;05B5
tserehebrew;05B5
tserenarrowhebrew;05B5
tserequarterhebrew;05B5
tserewidehebrew;05B5
tshecyrillic;045B
tsuperior;F6F3
ttabengali;099F
ttadeva;091F
ttagujarati;0A9F
ttagurmukhi;0A1F
tteharabic;0679
ttehfinalarabic;FB67
ttehinitialarabic;FB68
ttehmedialarabic;FB69
tthabengali;09A0
tthadeva;0920
tthagujarati;0AA0
tthagurmukhi;0A20
tturned;0287
tuhiragana;3064
tukatakana;30C4
tukatakanahalfwidth;FF82
tusmallhiragana;3063
tusmallkatakana;30C3
tusmallkatakanahalfwidth;FF6F
twelvecircle;246B
twelveparen;247F
twelveperiod;2493
twelveroman;217B
twentycircle;2473
twentyhangzhou;5344
twentyparen;2487
twentyperiod;249B
two;0032
twoarabic;0662
twobengali;09E8
twocircle;2461
twocircleinversesansserif;278B
twodeva;0968
twodotenleader;2025
twodotleader;2025
twodotleadervertical;FE30
twogujarati;0AE8
twogurmukhi;0A68
twohackarabic;0662
twohangzhou;3022
twoideographicparen;3221
twoinferior;2082
twomonospace;FF12
twonumeratorbengali;09F5
twooldstyle;F732
twoparen;2475
twoperiod;2489
twopersian;06F2
tworoman;2171
twostroke;01BB
twosuperior;00B2
twothai;0E52
twothirds;2154
u;0075
uacute;00FA
ubar;0289
ubengali;0989
ubopomofo;3128
ubreve;016D
ucaron;01D4
ucircle;24E4
ucircumflex;00FB
ucircumflexbelow;1E77
ucyrillic;0443
udattadeva;0951
udblacute;0171
udblgrave;0215
udeva;0909
udieresis;00FC
udieresisacute;01D8
udieresisbelow;1E73
udieresiscaron;01DA
udieresiscyrillic;04F1
udieresisgrave;01DC
udieresismacron;01D6
udotbelow;1EE5
ugrave;00F9
ugujarati;0A89
ugurmukhi;0A09
uhiragana;3046
uhookabove;1EE7
uhorn;01B0
uhornacute;1EE9
uhorndotbelow;1EF1
uhorngrave;1EEB
uhornhookabove;1EED
uhorntilde;1EEF
uhungarumlaut;0171
uhungarumlautcyrillic;04F3
uinvertedbreve;0217
ukatakana;30A6
ukatakanahalfwidth;FF73
ukcyrillic;0479
ukorean;315C
umacron;016B
umacroncyrillic;04EF
umacrondieresis;1E7B
umatragurmukhi;0A41
umonospace;FF55
underscore;005F
underscoredbl;2017
underscoremonospace;FF3F
underscorevertical;FE33
underscorewavy;FE4F
union;222A
universal;2200
uogonek;0173
uparen;24B0
upblock;2580
upperdothebrew;05C4
upsilon;03C5
upsilondieresis;03CB
upsilondieresistonos;03B0
upsilonlatin;028A
upsilontonos;03CD
uptackbelowcmb;031D
uptackmod;02D4
uragurmukhi;0A73
uring;016F
ushortcyrillic;045E
usmallhiragana;3045
usmallkatakana;30A5
usmallkatakanahalfwidth;FF69
ustraightcyrillic;04AF
ustraightstrokecyrillic;04B1
utilde;0169
utildeacute;1E79
utildebelow;1E75
uubengali;098A
uudeva;090A
uugujarati;0A8A
uugurmukhi;0A0A
uumatragurmukhi;0A42
uuvowelsignbengali;09C2
uuvowelsigndeva;0942
uuvowelsigngujarati;0AC2
uvowelsignbengali;09C1
uvowelsigndeva;0941
uvowelsigngujarati;0AC1
v;0076
vadeva;0935
vagujarati;0AB5
vagurmukhi;0A35
vakatakana;30F7
vav;05D5
vavdagesh;FB35
vavdagesh65;FB35
vavdageshhebrew;FB35
vavhebrew;05D5
vavholam;FB4B
vavholamhebrew;FB4B
vavvavhebrew;05F0
vavyodhebrew;05F1
vcircle;24E5
vdotbelow;1E7F
vecyrillic;0432
veharabic;06A4
vehfinalarabic;FB6B
vehinitialarabic;FB6C
vehmedialarabic;FB6D
vekatakana;30F9
venus;2640
verticalbar;007C
verticallineabovecmb;030D
verticallinebelowcmb;0329
verticallinelowmod;02CC
verticallinemod;02C8
vewarmenian;057E
vhook;028B
vikatakana;30F8
viramabengali;09CD
viramadeva;094D
viramagujarati;0ACD
visargabengali;0983
visargadeva;0903
visargagujarati;0A83
vmonospace;FF56
voarmenian;0578
voicediterationhiragana;309E
voicediterationkatakana;30FE
voicedmarkkana;309B
voicedmarkkanahalfwidth;FF9E
vokatakana;30FA
vparen;24B1
vtilde;1E7D
vturned;028C
vuhiragana;3094
vukatakana;30F4
w;0077
wacute;1E83
waekorean;3159
wahiragana;308F
wakatakana;30EF
wakatakanahalfwidth;FF9C
wakorean;3158
wasmallhiragana;308E
wasmallkatakana;30EE
wattosquare;3357
wavedash;301C
wavyunderscorevertical;FE34
wawarabic;0648
wawfinalarabic;FEEE
wawhamzaabovearabic;0624
wawhamzaabovefinalarabic;FE86
wbsquare;33DD
wcircle;24E6
wcircumflex;0175
wdieresis;1E85
wdotaccent;1E87
wdotbelow;1E89
wehiragana;3091
weierstrass;2118
wekatakana;30F1
wekorean;315E
weokorean;315D
wgrave;1E81
whitebullet;25E6
whitecircle;25CB
whitecircleinverse;25D9
whitecornerbracketleft;300E
whitecornerbracketleftvertical;FE43
whitecornerbracketright;300F
whitecornerbracketrightvertical;FE44
whitediamond;25C7
whitediamondcontainingblacksmalldiamond;25C8
whitedownpointingsmalltriangle;25BF
whitedownpointingtriangle;25BD
whiteleftpointingsmalltriangle;25C3
whiteleftpointingtriangle;25C1
whitelenticularbracketleft;3016
whitelenticularbracketright;3017
whiterightpointingsmalltriangle;25B9
whiterightpointingtriangle;25B7
whitesmallsquare;25AB
whitesmilingface;263A
whitesquare;25A1
whitestar;2606
whitetelephone;260F
whitetortoiseshellbracketleft;3018
whitetortoiseshellbracketright;3019
whiteuppointingsmalltriangle;25B5
whiteuppointingtriangle;25B3
wihiragana;3090
wikatakana;30F0
wikorean;315F
wmonospace;FF57
wohiragana;3092
wokatakana;30F2
wokatakanahalfwidth;FF66
won;20A9
wonmonospace;FFE6
wowaenthai;0E27
wparen;24B2
wring;1E98
wsuperior;02B7
wturned;028D
wynn;01BF
x;0078
xabovecmb;033D
xbopomofo;3112
xcircle;24E7
xdieresis;1E8D
xdotaccent;1E8B
xeharmenian;056D
xi;03BE
xmonospace;FF58
xparen;24B3
xsuperior;02E3
y;0079
yaadosquare;334E
yabengali;09AF
yacute;00FD
yadeva;092F
yaekorean;3152
yagujarati;0AAF
yagurmukhi;0A2F
yahiragana;3084
yakatakana;30E4
yakatakanahalfwidth;FF94
yakorean;3151
yamakkanthai;0E4E
yasmallhiragana;3083
yasmallkatakana;30E3
yasmallkatakanahalfwidth;FF6C
yatcyrillic;0463
ycircle;24E8
ycircumflex;0177
ydieresis;00FF
ydotaccent;1E8F
ydotbelow;1EF5
yeharabic;064A
yehbarreearabic;06D2
yehbarreefinalarabic;FBAF
yehfinalarabic;FEF2
yehhamzaabovearabic;0626
yehhamzaabovefinalarabic;FE8A
yehhamzaaboveinitialarabic;FE8B
yehhamzaabovemedialarabic;FE8C
yehinitialarabic;FEF3
yehmedialarabic;FEF4
yehmeeminitialarabic;FCDD
yehmeemisolatedarabic;FC58
yehnoonfinalarabic;FC94
yehthreedotsbelowarabic;06D1
yekorean;3156
yen;00A5
yenmonospace;FFE5
yeokorean;3155
yeorinhieuhkorean;3186
yerahbenyomohebrew;05AA
yerahbenyomolefthebrew;05AA
yericyrillic;044B
yerudieresiscyrillic;04F9
yesieungkorean;3181
yesieungpansioskorean;3183
yesieungsioskorean;3182
yetivhebrew;059A
ygrave;1EF3
yhook;01B4
yhookabove;1EF7
yiarmenian;0575
yicyrillic;0457
yikorean;3162
yinyang;262F
yiwnarmenian;0582
ymonospace;FF59
yod;05D9
yoddagesh;FB39
yoddageshhebrew;FB39
yodhebrew;05D9
yodyodhebrew;05F2
yodyodpatahhebrew;FB1F
yohiragana;3088
yoikorean;3189
yokatakana;30E8
yokatakanahalfwidth;FF96
yokorean;315B
yosmallhiragana;3087
yosmallkatakana;30E7
yosmallkatakanahalfwidth;FF6E
yotgreek;03F3
yoyaekorean;3188
yoyakorean;3187
yoyakthai;0E22
yoyingthai;0E0D
yparen;24B4
ypogegrammeni;037A
ypogegrammenigreekcmb;0345
yr;01A6
yring;1E99
ysuperior;02B8
ytilde;1EF9
yturned;028E
yuhiragana;3086
yuikorean;318C
yukatakana;30E6
yukatakanahalfwidth;FF95
yukorean;3160
yusbigcyrillic;046B
yusbigiotifiedcyrillic;046D
yuslittlecyrillic;0467
yuslittleiotifiedcyrillic;0469
yusmallhiragana;3085
yusmallkatakana;30E5
yusmallkatakanahalfwidth;FF6D
yuyekorean;318B
yuyeokorean;318A
yyabengali;09DF
yyadeva;095F
z;007A
zaarmenian;0566
zacute;017A
zadeva;095B
zagurmukhi;0A5B
zaharabic;0638
zahfinalarabic;FEC6
zahinitialarabic;FEC7
zahiragana;3056
zahmedialarabic;FEC8
zainarabic;0632
zainfinalarabic;FEB0
zakatakana;30B6
zaqefgadolhebrew;0595
zaqefqatanhebrew;0594
zarqahebrew;0598
zayin;05D6
zayindagesh;FB36
zayindageshhebrew;FB36
zayinhebrew;05D6
zbopomofo;3117
zcaron;017E
zcircle;24E9
zcircumflex;1E91
zcurl;0291
zdot;017C
zdotaccent;017C
zdotbelow;1E93
zecyrillic;0437
zedescendercyrillic;0499
zedieresiscyrillic;04DF
zehiragana;305C
zekatakana;30BC
zero;0030
zeroarabic;0660
zerobengali;09E6
zerodeva;0966
zerogujarati;0AE6
zerogurmukhi;0A66
zerohackarabic;0660
zeroinferior;2080
zeromonospace;FF10
zerooldstyle;F730
zeropersian;06F0
zerosuperior;2070
zerothai;0E50
zerowidthjoiner;FEFF
zerowidthnonjoiner;200C
zerowidthspace;200B
zeta;03B6
zhbopomofo;3113
zhearmenian;056A
zhebrevecyrillic;04C2
zhecyrillic;0436
zhedescendercyrillic;0497
zhedieresiscyrillic;04DD
zihiragana;3058
zikatakana;30B8
zinorhebrew;05AE
zlinebelow;1E95
zmonospace;FF5A
zohiragana;305E
zokatakana;30BE
zparen;24B5
zretroflexhook;0290
zstroke;01B6
zuhiragana;305A
zukatakana;30BA
"""
# string table management
#
class StringTable:
def __init__( self, name_list, master_table_name ):
self.names = name_list
self.master_table = master_table_name
self.indices = {}
index = 0
for name in name_list:
self.indices[name] = index
index += len( name ) + 1
self.total = index
def dump( self, file ):
write = file.write
write( " static const char " + self.master_table +
"[" + repr( self.total ) + "] =\n" )
write( " {\n" )
line = ""
for name in self.names:
line += " '"
line += string.join( ( re.findall( ".", name ) ), "','" )
line += "', 0,\n"
write( line + " };\n\n\n" )
def dump_sublist( self, file, table_name, macro_name, sublist ):
write = file.write
write( "#define " + macro_name + " " + repr( len( sublist ) ) + "\n\n" )
write( " /* Values are offsets into the `" +
self.master_table + "' table */\n\n" )
write( " static const short " + table_name +
"[" + macro_name + "] =\n" )
write( " {\n" )
line = " "
comma = ""
col = 0
for name in sublist:
line += comma
line += "%4d" % self.indices[name]
col += 1
comma = ","
if col == 14:
col = 0
comma = ",\n "
write( line + "\n };\n\n\n" )
# We now store the Adobe Glyph List in compressed form. The list is put
# into a data structure called `trie' (because it has a tree-like
# appearance). Consider, for example, that you want to store the
# following name mapping:
#
# A => 1
# Aacute => 6
# Abalon => 2
# Abstract => 4
#
# It is possible to store the entries as follows.
#
# A => 1
# |
# +-acute => 6
# |
# +-b
# |
# +-alon => 2
# |
# +-stract => 4
#
# We see that each node in the trie has:
#
# - one or more `letters'
# - an optional value
# - zero or more child nodes
#
# The first step is to call
#
# root = StringNode( "", 0 )
# for word in map.values():
# root.add( word, map[word] )
#
# which creates a large trie where each node has only one children.
#
# Executing
#
# root = root.optimize()
#
# optimizes the trie by merging the letters of successive nodes whenever
# possible.
#
# Each node of the trie is stored as follows.
#
# - First the node's letter, according to the following scheme. We
# use the fact that in the AGL no name contains character codes > 127.
#
# name bitsize description
# ----------------------------------------------------------------
# notlast 1 Set to 1 if this is not the last letter
# in the word.
# ascii 7 The letter's ASCII value.
#
# - The letter is followed by a children count and the value of the
# current key (if any). Again we can do some optimization because all
# AGL entries are from the BMP; this means that 16 bits are sufficient
# to store its Unicode values. Additionally, no node has more than
# 127 children.
#
# name bitsize description
# -----------------------------------------
# hasvalue 1 Set to 1 if a 16-bit Unicode value follows.
# num_children 7 Number of children. Can be 0 only if
# `hasvalue' is set to 1.
# value 16 Optional Unicode value.
#
# - A node is finished by a list of 16bit absolute offsets to the
# children, which must be sorted in increasing order of their first
# letter.
#
# For simplicity, all 16bit quantities are stored in big-endian order.
#
# The root node has first letter = 0, and no value.
#
class StringNode:
def __init__( self, letter, value ):
self.letter = letter
self.value = value
self.children = {}
def __cmp__( self, other ):
return ord( self.letter[0] ) - ord( other.letter[0] )
def add( self, word, value ):
if len( word ) == 0:
self.value = value
return
letter = word[0]
word = word[1:]
if self.children.has_key( letter ):
child = self.children[letter]
else:
child = StringNode( letter, 0 )
self.children[letter] = child
child.add( word, value )
def optimize( self ):
# optimize all children first
children = self.children.values()
self.children = {}
for child in children:
self.children[child.letter[0]] = child.optimize()
# don't optimize if there's a value,
# if we don't have any child or if we
# have more than one child
if ( self.value != 0 ) or ( not children ) or len( children ) > 1:
return self
child = children[0]
self.letter += child.letter
self.value = child.value
self.children = child.children
return self
def dump_debug( self, write, margin ):
# this is used during debugging
line = margin + "+-"
if len( self.letter ) == 0:
line += "<NOLETTER>"
else:
line += self.letter
if self.value:
line += " => " + repr( self.value )
write( line + "\n" )
if self.children:
margin += "| "
for child in self.children.values():
child.dump_debug( write, margin )
def locate( self, index ):
self.index = index
if len( self.letter ) > 0:
index += len( self.letter ) + 1
else:
index += 2
if self.value != 0:
index += 2
children = self.children.values()
children.sort()
index += 2 * len( children )
for child in children:
index = child.locate( index )
return index
def store( self, storage ):
# write the letters
l = len( self.letter )
if l == 0:
storage += struct.pack( "B", 0 )
else:
for n in range( l ):
val = ord( self.letter[n] )
if n < l - 1:
val += 128
storage += struct.pack( "B", val )
# write the count
children = self.children.values()
children.sort()
count = len( children )
if self.value != 0:
storage += struct.pack( "!BH", count + 128, self.value )
else:
storage += struct.pack( "B", count )
for child in children:
storage += struct.pack( "!H", child.index )
for child in children:
storage = child.store( storage )
return storage
def adobe_glyph_values():
"""return the list of glyph names and their unicode values"""
lines = string.split( adobe_glyph_list, '\n' )
glyphs = []
values = []
for line in lines:
if line:
fields = string.split( line, ';' )
# print fields[1] + ' - ' + fields[0]
subfields = string.split( fields[1], ' ' )
if len( subfields ) == 1:
glyphs.append( fields[0] )
values.append( fields[1] )
return glyphs, values
def filter_glyph_names( alist, filter ):
"""filter `alist' by taking _out_ all glyph names that are in `filter'"""
count = 0
extras = []
for name in alist:
try:
filtered_index = filter.index( name )
except:
extras.append( name )
return extras
def dump_encoding( file, encoding_name, encoding_list ):
"""dump a given encoding"""
write = file.write
write( " /* the following are indices into the SID name table */\n" )
write( " static const unsigned short " + encoding_name +
"[" + repr( len( encoding_list ) ) + "] =\n" )
write( " {\n" )
line = " "
comma = ""
col = 0
for value in encoding_list:
line += comma
line += "%3d" % value
comma = ","
col += 1
if col == 16:
col = 0
comma = ",\n "
write( line + "\n };\n\n\n" )
def dump_array( the_array, write, array_name ):
"""dumps a given encoding"""
write( " static const unsigned char " + array_name +
"[" + repr( len( the_array ) ) + "L] =\n" )
write( " {\n" )
line = ""
comma = " "
col = 0
for value in the_array:
line += comma
line += "%3d" % ord( value )
comma = ","
col += 1
if col == 16:
col = 0
comma = ",\n "
if len( line ) > 1024:
write( line )
line = ""
write( line + "\n };\n\n\n" )
def main():
"""main program body"""
if len( sys.argv ) != 2:
print __doc__ % sys.argv[0]
sys.exit( 1 )
file = open( sys.argv[1], "w\n" )
write = file.write
count_sid = len( sid_standard_names )
# `mac_extras' contains the list of glyph names in the Macintosh standard
# encoding which are not in the SID Standard Names.
#
mac_extras = filter_glyph_names( mac_standard_names, sid_standard_names )
# `base_list' contains the names of our final glyph names table.
# It consists of the `mac_extras' glyph names, followed by the SID
# standard names.
#
mac_extras_count = len( mac_extras )
base_list = mac_extras + sid_standard_names
write( "/***************************************************************************/\n" )
write( "/* */\n" )
write( "/* %-71s*/\n" % os.path.basename( sys.argv[1] ) )
write( "/* */\n" )
write( "/* PostScript glyph names. */\n" )
write( "/* */\n" )
write( "/* Copyright 2005, 2008 by */\n" )
write( "/* David Turner, Robert Wilhelm, and Werner Lemberg. */\n" )
write( "/* */\n" )
write( "/* This file is part of the FreeType project, and may only be used, */\n" )
write( "/* modified, and distributed under the terms of the FreeType project */\n" )
write( "/* license, LICENSE.TXT. By continuing to use, modify, or distribute */\n" )
write( "/* this file you indicate that you have read the license and */\n" )
write( "/* understand and accept it fully. */\n" )
write( "/* */\n" )
write( "/***************************************************************************/\n" )
write( "\n" )
write( "\n" )
write( " /* This file has been generated automatically -- do not edit! */\n" )
write( "\n" )
write( "\n" )
# dump final glyph list (mac extras + sid standard names)
#
st = StringTable( base_list, "ft_standard_glyph_names" )
st.dump( file )
st.dump_sublist( file, "ft_mac_names",
"FT_NUM_MAC_NAMES", mac_standard_names )
st.dump_sublist( file, "ft_sid_names",
"FT_NUM_SID_NAMES", sid_standard_names )
dump_encoding( file, "t1_standard_encoding", t1_standard_encoding )
dump_encoding( file, "t1_expert_encoding", t1_expert_encoding )
# dump the AGL in its compressed form
#
agl_glyphs, agl_values = adobe_glyph_values()
dict = StringNode( "", 0 )
for g in range( len( agl_glyphs ) ):
dict.add( agl_glyphs[g], eval( "0x" + agl_values[g] ) )
dict = dict.optimize()
dict_len = dict.locate( 0 )
dict_array = dict.store( "" )
write( """\
/*
* This table is a compressed version of the Adobe Glyph List (AGL),
* optimized for efficient searching. It has been generated by the
* `glnames.py' python script located in the `src/tools' directory.
*
* The lookup function to get the Unicode value for a given string
* is defined below the table.
*/
#ifdef FT_CONFIG_OPTION_ADOBE_GLYPH_LIST
""" )
dump_array( dict_array, write, "ft_adobe_glyph_list" )
# write the lookup routine now
#
write( """\
/*
* This function searches the compressed table efficiently.
*/
static unsigned long
ft_get_adobe_glyph_index( const char* name,
const char* limit )
{
int c = 0;
int count, min, max;
const unsigned char* p = ft_adobe_glyph_list;
if ( name == 0 || name >= limit )
goto NotFound;
c = *name++;
count = p[1];
p += 2;
min = 0;
max = count;
while ( min < max )
{
int mid = ( min + max ) >> 1;
const unsigned char* q = p + mid * 2;
int c2;
q = ft_adobe_glyph_list + ( ( (int)q[0] << 8 ) | q[1] );
c2 = q[0] & 127;
if ( c2 == c )
{
p = q;
goto Found;
}
if ( c2 < c )
min = mid + 1;
else
max = mid;
}
goto NotFound;
Found:
for (;;)
{
/* assert (*p & 127) == c */
if ( name >= limit )
{
if ( (p[0] & 128) == 0 &&
(p[1] & 128) != 0 )
return (unsigned long)( ( (int)p[2] << 8 ) | p[3] );
goto NotFound;
}
c = *name++;
if ( p[0] & 128 )
{
p++;
if ( c != (p[0] & 127) )
goto NotFound;
continue;
}
p++;
count = p[0] & 127;
if ( p[0] & 128 )
p += 2;
p++;
for ( ; count > 0; count--, p += 2 )
{
int offset = ( (int)p[0] << 8 ) | p[1];
const unsigned char* q = ft_adobe_glyph_list + offset;
if ( c == ( q[0] & 127 ) )
{
p = q;
goto NextIter;
}
}
goto NotFound;
NextIter:
;
}
NotFound:
return 0;
}
#endif /* FT_CONFIG_OPTION_ADOBE_GLYPH_LIST */
""" )
if 0: # generate unit test, or don't
#
# now write the unit test to check that everything works OK
#
write( "#ifdef TEST\n\n" )
write( "static const char* const the_names[] = {\n" )
for name in agl_glyphs:
write( ' "' + name + '",\n' )
write( " 0\n};\n" )
write( "static const unsigned long the_values[] = {\n" )
for val in agl_values:
write( ' 0x' + val + ',\n' )
write( " 0\n};\n" )
write( """
#include <stdlib.h>
#include <stdio.h>
int
main( void )
{
int result = 0;
const char* const* names = the_names;
const unsigned long* values = the_values;
for ( ; *names; names++, values++ )
{
const char* name = *names;
unsigned long reference = *values;
unsigned long value;
value = ft_get_adobe_glyph_index( name, name + strlen( name ) );
if ( value != reference )
{
result = 1;
fprintf( stderr, "name '%s' => %04x instead of %04x\\n",
name, value, reference );
}
}
return result;
}
""" )
write( "#endif /* TEST */\n" )
write("\n/* END */\n")
# Now run the main routine
#
main()
# END
| gpl-2.0 |
ep1cman/workload-automation | wlauto/instrumentation/daq/__init__.py | 2 | 20324 | # Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=W0613,E1101,access-member-before-definition,attribute-defined-outside-init
from __future__ import division
import os
import sys
import csv
import shutil
import tempfile
from collections import OrderedDict, defaultdict
from string import ascii_lowercase
from multiprocessing import Process, Queue
from wlauto import Instrument, Parameter
from wlauto.core import signal
from wlauto.exceptions import ConfigError, InstrumentError, DeviceError
from wlauto.utils.misc import ensure_directory_exists as _d
from wlauto.utils.types import list_of_ints, list_of_strs, boolean
# pylint: disable=wrong-import-position,wrong-import-order
daqpower_path = os.path.join(os.path.dirname(__file__), '..', '..', 'external', 'daq_server', 'src')
sys.path.insert(0, daqpower_path)
try:
import daqpower.client as daq # pylint: disable=F0401
from daqpower.config import DeviceConfiguration, ServerConfiguration, ConfigurationError # pylint: disable=F0401
except ImportError, e:
daq, DeviceConfiguration, ServerConfiguration, ConfigurationError = None, None, None, None
import_error_mesg = e.message
sys.path.pop(0)
UNITS = {
'energy': 'Joules',
'power': 'Watts',
'voltage': 'Volts',
}
GPIO_ROOT = '/sys/class/gpio'
TRACE_MARKER_PATH = '/sys/kernel/debug/tracing/trace_marker'
def dict_or_bool(value):
"""
Ensures that either a dictionary or a boolean is used as a parameter.
"""
if isinstance(value, dict):
return value
return boolean(value)
class Daq(Instrument):
name = 'daq'
description = """
DAQ instrument obtains the power consumption of the target device's core
measured by National Instruments Data Acquisition(DAQ) device.
WA communicates with a DAQ device server running on a Windows machine
(Please refer to :ref:`daq_setup`) over a network. You must specify the IP
address and port the server is listening on in the config file as follows ::
daq_server_host = '10.1.197.176'
daq_server_port = 45677
These values will be output by the server when you run it on Windows.
You must also specify the values of resistors (in Ohms) across which the
voltages are measured (Please refer to :ref:`daq_setup`). The values should be
specified as a list with an entry for each resistor, e.g.::
daq_resistor_values = [0.005, 0.005]
In addition to this mandatory configuration, you can also optionally specify the
following::
:daq_labels: Labels to be used for ports. Defaults to ``'PORT_<pnum>'``, where
'pnum' is the number of the port.
:daq_device_id: The ID under which the DAQ is registered with the driver.
Defaults to ``'Dev1'``.
:daq_v_range: Specifies the voltage range for the SOC voltage channel on the DAQ
(please refer to :ref:`daq_setup` for details). Defaults to ``2.5``.
:daq_dv_range: Specifies the voltage range for the resistor voltage channel on
the DAQ (please refer to :ref:`daq_setup` for details).
Defaults to ``0.2``.
:daq_sampling_rate: DAQ sampling rate. DAQ will take this many samples each
second. Please note that this maybe limitted by your DAQ model
and then number of ports you're measuring (again, see
:ref:`daq_setup`). Defaults to ``10000``.
:daq_channel_map: Represents mapping from logical AI channel number to physical
connector on the DAQ (varies between DAQ models). The default
assumes DAQ 6363 and similar with AI channels on connectors
0-7 and 16-23.
"""
parameters = [
Parameter('server_host', kind=str, default='localhost',
global_alias='daq_server_host',
description='The host address of the machine that runs the daq Server which the '
'instrument communicates with.'),
Parameter('server_port', kind=int, default=45677,
global_alias='daq_server_port',
description='The port number for daq Server in which daq instrument communicates '
'with.'),
Parameter('device_id', kind=str, default='Dev1',
global_alias='daq_device_id',
description='The ID under which the DAQ is registered with the driver.'),
Parameter('v_range', kind=float, default=2.5,
global_alias='daq_v_range',
description='Specifies the voltage range for the SOC voltage channel on the DAQ '
'(please refer to :ref:`daq_setup` for details).'),
Parameter('dv_range', kind=float, default=0.2,
global_alias='daq_dv_range',
description='Specifies the voltage range for the resistor voltage channel on '
'the DAQ (please refer to :ref:`daq_setup` for details).'),
Parameter('sampling_rate', kind=int, default=10000,
global_alias='daq_sampling_rate',
description='DAQ sampling rate. DAQ will take this many samples each '
'second. Please note that this maybe limitted by your DAQ model '
'and then number of ports you\'re measuring (again, see '
':ref:`daq_setup`)'),
Parameter('resistor_values', kind=list, mandatory=True,
global_alias='daq_resistor_values',
description='The values of resistors (in Ohms) across which the voltages are measured on '
'each port.'),
Parameter('channel_map', kind=list_of_ints, default=(0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23),
global_alias='daq_channel_map',
description='Represents mapping from logical AI channel number to physical '
'connector on the DAQ (varies between DAQ models). The default '
'assumes DAQ 6363 and similar with AI channels on connectors '
'0-7 and 16-23.'),
Parameter('labels', kind=list_of_strs,
global_alias='daq_labels',
description='List of port labels. If specified, the lenght of the list must match '
'the length of ``resistor_values``. Defaults to "PORT_<pnum>", where '
'"pnum" is the number of the port.'),
Parameter('negative_samples', default='keep', allowed_values=['keep', 'zero', 'drop', 'abs'],
global_alias='daq_negative_samples',
description="""
Specifies how negative power samples should be handled. The following
methods are possible:
:keep: keep them as they are
:zero: turn negative values to zero
:drop: drop samples if they contain negative values. *warning:* this may result in
port files containing different numbers of samples
:abs: take the absoulte value of negave samples
"""),
Parameter('gpio_sync', kind=int, constraint=lambda x: x > 0,
description="""
If specified, the instrument will simultaneously set the
specified GPIO pin high and put a marker into ftrace. This is
to facillitate syncing kernel trace events to DAQ power
trace.
"""),
Parameter('merge_channels', kind=dict_or_bool, default=False,
description="""
If set to ``True``, channels with consecutive letter suffixes will be summed.
e.g. If you have channels A7a, A7b, A7c, A15a, A15b they will be summed to A7, A15
You can also manually specify the name of channels to be merged and the name of the
result like so:
merge_channels:
A15: [A15dvfs, A15ram]
NonCPU: [GPU, RoS, Mem]
In the above exaples the DAQ channels labeled A15a and A15b will be summed together
with the results being saved as 'channel' ''a''. A7, GPU and RoS will be summed to 'c'
""")
]
def initialize(self, context):
status, devices = self._execute_command('list_devices')
if status == daq.Status.OK and not devices:
raise InstrumentError('DAQ: server did not report any devices registered with the driver.')
self._results = OrderedDict()
self.gpio_path = None
if self.gpio_sync:
if not self.device.file_exists(GPIO_ROOT):
raise InstrumentError('GPIO sysfs not enabled on the device.')
try:
export_path = self.device.path.join(GPIO_ROOT, 'export')
self.device.set_sysfile_value(export_path, self.gpio_sync, verify=False)
pin_root = self.device.path.join(GPIO_ROOT, 'gpio{}'.format(self.gpio_sync))
direction_path = self.device.path.join(pin_root, 'direction')
self.device.set_sysfile_value(direction_path, 'out')
self.gpio_path = self.device.path.join(pin_root, 'value')
self.device.set_sysfile_value(self.gpio_path, 0, verify=False)
signal.connect(self.insert_start_marker, signal.BEFORE_WORKLOAD_EXECUTION, priority=11)
signal.connect(self.insert_stop_marker, signal.AFTER_WORKLOAD_EXECUTION, priority=11)
except DeviceError as e:
raise InstrumentError('Could not configure GPIO on device: {}'.format(e))
def setup(self, context):
self.logger.debug('Initialising session.')
self._execute_command('configure', config=self.device_config)
def slow_start(self, context):
self.logger.debug('Starting collecting measurements.')
self._execute_command('start')
def slow_stop(self, context):
self.logger.debug('Stopping collecting measurements.')
self._execute_command('stop')
def update_result(self, context): # pylint: disable=R0914
self.logger.debug('Downloading data files.')
output_directory = _d(os.path.join(context.output_directory, 'daq'))
self._execute_command('get_data', output_directory=output_directory)
if self.merge_channels:
self._merge_channels(context)
for entry in os.listdir(output_directory):
context.add_iteration_artifact('DAQ_{}'.format(os.path.splitext(entry)[0]),
path=os.path.join('daq', entry),
kind='data',
description='DAQ power measurments.')
port = os.path.splitext(entry)[0]
path = os.path.join(output_directory, entry)
key = (context.spec.id, context.spec.label, context.current_iteration)
if key not in self._results:
self._results[key] = {}
temp_file = os.path.join(tempfile.gettempdir(), entry)
writer, wfh = None, None
with open(path) as fh:
if self.negative_samples != 'keep':
wfh = open(temp_file, 'wb')
writer = csv.writer(wfh)
reader = csv.reader(fh)
metrics = reader.next()
if writer:
writer.writerow(metrics)
self._metrics |= set(metrics)
rows = _get_rows(reader, writer, self.negative_samples)
data = zip(*rows)
if writer:
wfh.close()
shutil.move(temp_file, os.path.join(output_directory, entry))
n = len(data[0])
means = [s / n for s in map(sum, data)]
for metric, value in zip(metrics, means):
metric_name = '{}_{}'.format(port, metric)
context.result.add_metric(metric_name, round(value, 3), UNITS[metric])
self._results[key][metric_name] = round(value, 3)
energy = sum(data[metrics.index('power')]) * (self.sampling_rate / 1000000)
context.result.add_metric('{}_energy'.format(port), round(energy, 3), UNITS['energy'])
def teardown(self, context):
self.logger.debug('Terminating session.')
self._execute_command('close')
def finalize(self, context):
if self.gpio_path:
unexport_path = self.device.path.join(GPIO_ROOT, 'unexport')
self.device.set_sysfile_value(unexport_path, self.gpio_sync, verify=False)
def validate(self): # pylint: disable=too-many-branches
if not daq:
raise ImportError(import_error_mesg)
self._results = None
self._metrics = set()
if self.labels:
if len(self.labels) != len(self.resistor_values):
raise ConfigError('Number of DAQ port labels does not match the number of resistor values.')
duplicates = set([x for x in self.labels if self.labels.count(x) > 1])
if len(duplicates) > 0:
raise ConfigError('Duplicate labels: {}'.format(', '.join(duplicates)))
else:
self.labels = ['PORT_{}'.format(i) for i, _ in enumerate(self.resistor_values)]
self.server_config = ServerConfiguration(host=self.server_host,
port=self.server_port)
self.device_config = DeviceConfiguration(device_id=self.device_id,
v_range=self.v_range,
dv_range=self.dv_range,
sampling_rate=self.sampling_rate,
resistor_values=self.resistor_values,
channel_map=self.channel_map,
labels=self.labels)
try:
self.server_config.validate()
self.device_config.validate()
except ConfigurationError, ex:
raise ConfigError('DAQ configuration: ' + ex.message) # Re-raise as a WA error
self.grouped_suffixes = defaultdict(str)
if isinstance(self.merge_channels, bool):
if self.merge_channels:
# Create a dict of potential prefixes and a list of their suffixes
grouped_suffixes = defaultdict(list)
for label in sorted(self.labels):
if len(label) > 1:
grouped_suffixes[label[:-1]].append(label)
# Only merge channels if more than one channel has the same prefix and the prefixes
# are consecutive letters starting with 'a'.
self.label_map = {}
for channel, suffixes in grouped_suffixes.iteritems():
if len(suffixes) > 1:
if "".join([s[-1] for s in suffixes]) in ascii_lowercase[:len(suffixes)]:
self.label_map[channel] = suffixes
elif isinstance(self.merge_channels, dict):
# Check if given channel names match labels
for old_names in self.merge_channels.values():
for name in old_names:
if name not in self.labels:
raise ConfigError("No channel with label {} specified".format(name))
self.label_map = self.merge_channels # pylint: disable=redefined-variable-type
self.merge_channels = True
else: # Should never reach here
raise AssertionError("``merge_channels`` is of invalid type")
def before_overall_results_processing(self, context):
if self._results:
headers = ['id', 'workload', 'iteration']
metrics = ['{}_{}'.format(p, m) for p in self.labels for m in sorted(self._metrics)]
headers += metrics
rows = [headers]
for key, value in self._results.iteritems():
rows.append(list(key) + [value[m] for m in metrics])
outfile = os.path.join(context.output_directory, 'daq_power.csv')
with open(outfile, 'wb') as fh:
writer = csv.writer(fh)
writer.writerows(rows)
def insert_start_marker(self, context):
if self.gpio_path:
command = 'echo DAQ_START_MARKER > {}; echo 1 > {}'.format(TRACE_MARKER_PATH, self.gpio_path)
self.device.execute(command, as_root=self.device.is_rooted)
def insert_stop_marker(self, context):
if self.gpio_path:
command = 'echo DAQ_STOP_MARKER > {}; echo 0 > {}'.format(TRACE_MARKER_PATH, self.gpio_path)
self.device.execute(command, as_root=self.device.is_rooted)
def _execute_command(self, command, **kwargs):
# pylint: disable=E1101
q = Queue()
p = Process(target=_send_daq_command, args=(q, self.server_config, command), kwargs=kwargs)
p.start()
result = q.get()
p.join()
if result.status == daq.Status.OK:
pass # all good
elif result.status == daq.Status.OKISH:
self.logger.debug(result.message)
elif result.status == daq.Status.ERROR:
raise InstrumentError('DAQ: {}'.format(result.message))
else:
raise InstrumentError('DAQ: Unexpected result: {} - {}'.format(result.status, result.message))
return (result.status, result.data)
def _merge_channels(self, context): # pylint: disable=r0914
output_directory = _d(os.path.join(context.output_directory, 'daq'))
for name, labels in self.label_map.iteritems():
summed = None
for label in labels:
path = os.path.join(output_directory, "{}.csv".format(label))
with open(path) as fh:
reader = csv.reader(fh)
metrics = reader.next()
rows = _get_rows(reader, None, self.negative_samples)
if summed:
summed = [[x + y for x, y in zip(a, b)] for a, b in zip(rows, summed)]
else:
summed = rows
output_path = os.path.join(output_directory, "{}.csv".format(name))
with open(output_path, 'wb') as wfh:
writer = csv.writer(wfh)
writer.writerow(metrics)
for row in summed:
writer.writerow(row)
def _send_daq_command(q, *args, **kwargs):
result = daq.execute_command(*args, **kwargs)
q.put(result)
def _get_rows(reader, writer, negative_samples):
rows = []
for row in reader:
row = map(float, row)
if negative_samples == 'keep':
rows.append(row)
elif negative_samples == 'zero':
def nonneg(v):
return v if v >= 0 else 0
rows.append([nonneg(v) for v in row])
elif negative_samples == 'drop':
if all(v >= 0 for v in row):
rows.append(row)
elif negative_samples == 'abs':
rows.append([abs(v) for v in row])
else:
raise AssertionError(negative_samples) # should never get here
if writer:
writer.writerow(row)
return rows
| apache-2.0 |
danielchalef/gensim | gensim/corpora/bleicorpus.py | 68 | 4496 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Radim Rehurek <[email protected]>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Blei's LDA-C format.
"""
from __future__ import with_statement
from os import path
import logging
from gensim import interfaces, utils
from gensim.corpora import IndexedCorpus
from six.moves import xrange
logger = logging.getLogger('gensim.corpora.bleicorpus')
class BleiCorpus(IndexedCorpus):
"""
Corpus in Blei's LDA-C format.
The corpus is represented as two files: one describing the documents, and another
describing the mapping between words and their ids.
Each document is one line::
N fieldId1:fieldValue1 fieldId2:fieldValue2 ... fieldIdN:fieldValueN
The vocabulary is a file with words, one word per line; word at line K has an
implicit ``id=K``.
"""
def __init__(self, fname, fname_vocab=None):
"""
Initialize the corpus from a file.
`fname_vocab` is the file with vocabulary; if not specified, it defaults to
`fname.vocab`.
"""
IndexedCorpus.__init__(self, fname)
logger.info("loading corpus from %s" % fname)
if fname_vocab is None:
fname_base, _ = path.splitext(fname)
fname_dir = path.dirname(fname)
for fname_vocab in [
utils.smart_extension(fname, '.vocab'),
utils.smart_extension(fname, '/vocab.txt'),
utils.smart_extension(fname_base, '.vocab'),
utils.smart_extension(fname_dir, '/vocab.txt'),
]:
if path.exists(fname_vocab):
break
else:
raise IOError('BleiCorpus: could not find vocabulary file')
self.fname = fname
with utils.smart_open(fname_vocab) as fin:
words = [utils.to_unicode(word).rstrip() for word in fin]
self.id2word = dict(enumerate(words))
def __iter__(self):
"""
Iterate over the corpus, returning one sparse vector at a time.
"""
lineno = -1
with utils.smart_open(self.fname) as fin:
for lineno, line in enumerate(fin):
yield self.line2doc(line)
self.length = lineno + 1
def line2doc(self, line):
parts = utils.to_unicode(line).split()
if int(parts[0]) != len(parts) - 1:
raise ValueError("invalid format in %s: %s" % (self.fname, repr(line)))
doc = [part.rsplit(':', 1) for part in parts[1:]]
doc = [(int(p1), float(p2)) for p1, p2 in doc]
return doc
@staticmethod
def save_corpus(fname, corpus, id2word=None, metadata=False):
"""
Save a corpus in the LDA-C format.
There are actually two files saved: `fname` and `fname.vocab`, where
`fname.vocab` is the vocabulary file.
This function is automatically called by `BleiCorpus.serialize`; don't
call it directly, call `serialize` instead.
"""
if id2word is None:
logger.info("no word id mapping provided; initializing from corpus")
id2word = utils.dict_from_corpus(corpus)
num_terms = len(id2word)
else:
num_terms = 1 + max([-1] + id2word.keys())
logger.info("storing corpus in Blei's LDA-C format into %s" % fname)
with utils.smart_open(fname, 'wb') as fout:
offsets = []
for doc in corpus:
doc = list(doc)
offsets.append(fout.tell())
parts = ["%i:%g" % p for p in doc if abs(p[1]) > 1e-7]
fout.write(utils.to_utf8("%i %s\n" % (len(doc), ' '.join(parts))))
# write out vocabulary, in a format compatible with Blei's topics.py script
fname_vocab = utils.smart_extension(fname, '.vocab')
logger.info("saving vocabulary of %i words to %s" % (num_terms, fname_vocab))
with utils.smart_open(fname_vocab, 'wb') as fout:
for featureid in xrange(num_terms):
fout.write(utils.to_utf8("%s\n" % id2word.get(featureid, '---')))
return offsets
def docbyoffset(self, offset):
"""
Return the document stored at file position `offset`.
"""
with utils.smart_open(self.fname) as f:
f.seek(offset)
return self.line2doc(f.readline())
# endclass BleiCorpus
| gpl-3.0 |
40223220/2015cd_midterm | static/Brython3.1.1-20150328-091302/Lib/site-packages/pygame/draw.py | 603 | 6456 | from javascript import console
from browser import timer
import math
class Queue:
def __init__(self):
self._list=[]
def empty(self):
return len(self._list) == 0
def put(self, element):
self._list.append(element)
def get(self):
if len(self._list) == 0:
raise BaseError
_element=self._list[0]
if len(self._list) == 1:
self._list=[]
else:
self._list=self._list[1:]
return _element
dm={}
def aaline(canvas, color, startpos, endpos, width, outline, blend=1):
#console.log("aaline")
if canvas not in dm:
dm[canvas]=DrawManager(canvas)
dm[canvas].process()
_dl=DrawLine(startpos[0], startpos[1], endpos[0], endpos[1], color,
width, outline, speed=10)
dm[canvas].add_line(_dl) #color, startpos, endpos, width, outline)
def aapolygon(canvas, color, coordinates, width, outline, blend=1):
#console.log("aapolygon")
if canvas not in dm:
dm[canvas]=DrawManager(canvas)
dm[canvas].process()
_dp=DrawPolygon(coordinates, color, width, outline, speed=10)
dm[canvas].add_polygon(_dp)
def aapolygon_bg(canvas, shape):
if canvas not in dm:
dm[canvas]=DrawManager(canvas)
dm[canvas].process()
dm[canvas].add_polygon_bg(shape)
class DrawPolygon:
def __init__(self, coordinates, color, width, outline, speed=10):
self.moveTo=coordinates[0]
self.segments=coordinates[1:]
self.color=color
self.width=width
self.outline=outline
class DrawLine:
def __init__(self, x0, y0, x1, y1, color, width, outline, speed=None):
self._type='LINE'
self._x0=x0
self._x1=x1
self._y0=y0
self._y1=y1
self._speed=speed
self._color=color
self._width=width
self._outline=outline
def get_segments(self):
if self._speed==0: #no animate since speed is 0 (return one segment)
return [{'type': self._type, 'x0':self._x0, 'y0': self._y0,
'x1': self._x1, 'y1': self._y1, 'color': self._color}]
#need to figure out how to translate speed into pixels, etc
#maybe speed is pixels per ms? 10 = 10 pixels per millisecond?
_x=(self._x1 - self._x0)
_x*=_x
_y=(self._y1 - self._y0)
_y*=_y
_distance=math.sqrt(_x + _y)
if _distance < self._speed: # we can do this in one segment
return [{'type': self._type, 'x0':self._x0, 'y0': self._y0,
'x1': self._x1, 'y1': self._y1, 'color': self._color}]
_segments=[]
_num_segments=math.floor(_distance/self._speed)
_pos_x=self._x0
_pos_y=self._y0
_x_diff=self._x1 - self._x0
_y_diff=self._y1 - self._y0
for _i in range(1,_num_segments+1):
_x=self._x0 + _i/_num_segments * _x_diff
_y=self._y0 + _i/_num_segments * _y_diff
_segments.append({'type': 'LINE': 'x0': _pos_x, 'y0': _pos_y,
'x1': _x, 'y1': _y, 'color': self._color})
_pos_x=_x
_pos_y=_y
if _pos_x != self._x1 or _pos_y != self._y1:
_segments.append({'type': 'LINE': 'x0': _pos_x, 'y0': _pos_y,
'x1': _x, 'y1': _y, 'color': self._color})
return _segments
class DrawManager:
def __init__(self, canvas):
self._queue=Queue()
self._canvas=canvas
self._ctx=canvas.getContext('2d')
self._interval=None
self._bg=None #used to capture bg before polygon is drawn
def __del__(self):
if self._interval is not None:
timer.clear_Interval(self._interval)
self._interval=None
del self._queue
def rect_from_shape(self, points):
_width=self._canvas.width
_height=self._canvas.height
_min_x=_width
_max_x=0
_min_y=_height
_max_y=0
for _point in points:
_x, _y = _point
_min_x=min(_min_x, _x)
_min_y=min(_min_y, _y)
_max_x=max(_max_x, _x)
_max_y=max(_max_y, _y)
_w2=_width/2
_h2=_height/2
return math.floor(_min_x-0.5)+_w2, math.floor(_min_y-0.5+_h2), \
math.ceil(_max_x+0.5)+_w2, math.ceil(_max_y+0.5+_h2)
def __interval(self):
if not self._queue.empty():
_dict=self._queue.get()
if _dict['type'] == 'LINE':
self._ctx.beginPath()
self._ctx.moveTo(_dict['x0'], _dict['y0'])
self._ctx.lineTo(_dict['x1'], _dict['y1'])
#if _dict['outline'] is not None:
# self._ctx.strokeStyle=_dict['outline'] #set line color
if _dict['color'] is not None:
self._ctx.fillStyle=_dict['color']
self._ctx.stroke()
elif _dict['type'] == 'POLYGON':
if self._bg is not None:
self._ctx.putImageData(self._bg[0], self._bg[1], self._bg[2])
console.log(self._bg[0])
self._bg=None
self._ctx.beginPath()
_moveTo=_dict['moveTo']
self._ctx.moveTo(_moveTo[0], _moveTo[1])
for _segment in _dict['segments']:
self._ctx.lineTo(_segment[0], _segment[1])
if _dict['width']:
self._ctx.lineWidth=_dict['width']
if _dict['outline']:
self._ctx.strokeStyle=_dict['outline']
if _dict['color']:
self._ctx.fillStyle=_dict['color']
self._ctx.fill()
self._ctx.closePath()
self._ctx.stroke()
elif _dict['type'] == 'POLYGON_BG':
_x0,_y0,_x1,_y1=self.rect_from_shape(_dict['shape'])
console.log(_x0,_y0,_x1, _y1)
self._bg=[]
self._bg.append(self._ctx.getImageData(_x0,_y0,abs(_x1)-abs(_x0),abs(_y1)-abs(_y0)))
self._bg.append(_x0)
self._bg.append(_y0)
def process(self):
self._interval=timer.set_interval(self.__interval, 10)
def add_line(self, dl): #color, startpos, endpos, width, outline, speed=None):
for _segment in dl.get_segments():
self._queue.put(_segment)
def add_polygon(self, dp):
self._queue.put({'type': 'POLYGON', 'moveTo': dp.moveTo,
'segments': dp.segments, 'color': dp.color,
'outline': dp.outline, 'width': dp.width})
def add_polygon_bg(self, shape):
self._queue.put({'type': 'POLYGON_BG', 'shape': shape})
| gpl-3.0 |
BadDNA/anolis | web/env/lib/python2.6/site-packages/pip-0.7.2-py2.6.egg/pip/locations.py | 3 | 1508 | """Locations where we look for configs, install stuff, etc"""
import sys
import os
from distutils import sysconfig
if getattr(sys, 'real_prefix', None):
## FIXME: is build/ a good name?
build_prefix = os.path.join(sys.prefix, 'build')
src_prefix = os.path.join(sys.prefix, 'src')
else:
## FIXME: this isn't a very good default
build_prefix = os.path.join(os.getcwd(), 'build')
src_prefix = os.path.join(os.getcwd(), 'src')
# FIXME doesn't account for venv linked to global site-packages
site_packages = sysconfig.get_python_lib()
user_dir = os.path.expanduser('~')
if sys.platform == 'win32':
bin_py = os.path.join(sys.prefix, 'Scripts')
# buildout uses 'bin' on Windows too?
if not os.path.exists(bin_py):
bin_py = os.path.join(sys.prefix, 'bin')
user_dir = os.environ.get('APPDATA', user_dir) # Use %APPDATA% for roaming
default_storage_dir = os.path.join(user_dir, 'pip')
default_config_file = os.path.join(default_storage_dir, 'pip.ini')
default_log_file = os.path.join(default_storage_dir, 'pip.log')
else:
bin_py = os.path.join(sys.prefix, 'bin')
default_storage_dir = os.path.join(user_dir, '.pip')
default_config_file = os.path.join(default_storage_dir, 'pip.conf')
default_log_file = os.path.join(default_storage_dir, 'pip.log')
# Forcing to use /usr/local/bin for standard Mac OS X framework installs
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
bin_py = '/usr/local/bin'
| bsd-3-clause |
bgris/ODL_bgris | lib/python3.5/site-packages/qtconsole/mainwindow.py | 7 | 31388 | """The Qt MainWindow for the QtConsole
This is a tabbed pseudo-terminal of Jupyter sessions, with a menu bar for
common actions.
"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import sys
import webbrowser
from threading import Thread
from qtconsole.qt import QtGui,QtCore
from qtconsole.usage import gui_reference
def background(f):
"""call a function in a simple thread, to prevent blocking"""
t = Thread(target=f)
t.start()
return t
class MainWindow(QtGui.QMainWindow):
#---------------------------------------------------------------------------
# 'object' interface
#---------------------------------------------------------------------------
def __init__(self, app,
confirm_exit=True,
new_frontend_factory=None, slave_frontend_factory=None,
):
""" Create a tabbed MainWindow for managing FrontendWidgets
Parameters
----------
app : reference to QApplication parent
confirm_exit : bool, optional
Whether we should prompt on close of tabs
new_frontend_factory : callable
A callable that returns a new JupyterWidget instance, attached to
its own running kernel.
slave_frontend_factory : callable
A callable that takes an existing JupyterWidget, and returns a new
JupyterWidget instance, attached to the same kernel.
"""
super(MainWindow, self).__init__()
self._kernel_counter = 0
self._app = app
self.confirm_exit = confirm_exit
self.new_frontend_factory = new_frontend_factory
self.slave_frontend_factory = slave_frontend_factory
self.tab_widget = QtGui.QTabWidget(self)
self.tab_widget.setDocumentMode(True)
self.tab_widget.setTabsClosable(True)
self.tab_widget.tabCloseRequested[int].connect(self.close_tab)
self.setCentralWidget(self.tab_widget)
# hide tab bar at first, since we have no tabs:
self.tab_widget.tabBar().setVisible(False)
# prevent focus in tab bar
self.tab_widget.setFocusPolicy(QtCore.Qt.NoFocus)
def update_tab_bar_visibility(self):
""" update visibility of the tabBar depending of the number of tab
0 or 1 tab, tabBar hidden
2+ tabs, tabBar visible
send a self.close if number of tab ==0
need to be called explicitly, or be connected to tabInserted/tabRemoved
"""
if self.tab_widget.count() <= 1:
self.tab_widget.tabBar().setVisible(False)
else:
self.tab_widget.tabBar().setVisible(True)
if self.tab_widget.count()==0 :
self.close()
@property
def next_kernel_id(self):
"""constantly increasing counter for kernel IDs"""
c = self._kernel_counter
self._kernel_counter += 1
return c
@property
def active_frontend(self):
return self.tab_widget.currentWidget()
def create_tab_with_new_frontend(self):
"""create a new frontend and attach it to a new tab"""
widget = self.new_frontend_factory()
self.add_tab_with_frontend(widget)
def create_tab_with_current_kernel(self):
"""create a new frontend attached to the same kernel as the current tab"""
current_widget = self.tab_widget.currentWidget()
current_widget_index = self.tab_widget.indexOf(current_widget)
current_widget_name = self.tab_widget.tabText(current_widget_index)
widget = self.slave_frontend_factory(current_widget)
if 'slave' in current_widget_name:
# don't keep stacking slaves
name = current_widget_name
else:
name = '(%s) slave' % current_widget_name
self.add_tab_with_frontend(widget,name=name)
def close_tab(self,current_tab):
""" Called when you need to try to close a tab.
It takes the number of the tab to be closed as argument, or a reference
to the widget inside this tab
"""
# let's be sure "tab" and "closing widget" are respectively the index
# of the tab to close and a reference to the frontend to close
if type(current_tab) is not int :
current_tab = self.tab_widget.indexOf(current_tab)
closing_widget=self.tab_widget.widget(current_tab)
# when trying to be closed, widget might re-send a request to be
# closed again, but will be deleted when event will be processed. So
# need to check that widget still exists and skip if not. One example
# of this is when 'exit' is sent in a slave tab. 'exit' will be
# re-sent by this function on the master widget, which ask all slave
# widgets to exit
if closing_widget is None:
return
#get a list of all slave widgets on the same kernel.
slave_tabs = self.find_slave_widgets(closing_widget)
keepkernel = None #Use the prompt by default
if hasattr(closing_widget,'_keep_kernel_on_exit'): #set by exit magic
keepkernel = closing_widget._keep_kernel_on_exit
# If signal sent by exit magic (_keep_kernel_on_exit, exist and not None)
# we set local slave tabs._hidden to True to avoid prompting for kernel
# restart when they get the signal. and then "forward" the 'exit'
# to the main window
if keepkernel is not None:
for tab in slave_tabs:
tab._hidden = True
if closing_widget in slave_tabs:
try :
self.find_master_tab(closing_widget).execute('exit')
except AttributeError:
self.log.info("Master already closed or not local, closing only current tab")
self.tab_widget.removeTab(current_tab)
self.update_tab_bar_visibility()
return
kernel_client = closing_widget.kernel_client
kernel_manager = closing_widget.kernel_manager
if keepkernel is None and not closing_widget._confirm_exit:
# don't prompt, just terminate the kernel if we own it
# or leave it alone if we don't
keepkernel = closing_widget._existing
if keepkernel is None: #show prompt
if kernel_client and kernel_client.channels_running:
title = self.window().windowTitle()
cancel = QtGui.QMessageBox.Cancel
okay = QtGui.QMessageBox.Ok
if closing_widget._may_close:
msg = "You are closing the tab : "+'"'+self.tab_widget.tabText(current_tab)+'"'
info = "Would you like to quit the Kernel and close all attached Consoles as well?"
justthis = QtGui.QPushButton("&No, just this Tab", self)
justthis.setShortcut('N')
closeall = QtGui.QPushButton("&Yes, close all", self)
closeall.setShortcut('Y')
# allow ctrl-d ctrl-d exit, like in terminal
closeall.setShortcut('Ctrl+D')
box = QtGui.QMessageBox(QtGui.QMessageBox.Question,
title, msg)
box.setInformativeText(info)
box.addButton(cancel)
box.addButton(justthis, QtGui.QMessageBox.NoRole)
box.addButton(closeall, QtGui.QMessageBox.YesRole)
box.setDefaultButton(closeall)
box.setEscapeButton(cancel)
pixmap = QtGui.QPixmap(self._app.icon.pixmap(QtCore.QSize(64,64)))
box.setIconPixmap(pixmap)
reply = box.exec_()
if reply == 1: # close All
for slave in slave_tabs:
background(slave.kernel_client.stop_channels)
self.tab_widget.removeTab(self.tab_widget.indexOf(slave))
kernel_manager.shutdown_kernel()
self.tab_widget.removeTab(current_tab)
background(kernel_client.stop_channels)
elif reply == 0: # close Console
if not closing_widget._existing:
# Have kernel: don't quit, just close the tab
closing_widget.execute("exit True")
self.tab_widget.removeTab(current_tab)
background(kernel_client.stop_channels)
else:
reply = QtGui.QMessageBox.question(self, title,
"Are you sure you want to close this Console?"+
"\nThe Kernel and other Consoles will remain active.",
okay|cancel,
defaultButton=okay
)
if reply == okay:
self.tab_widget.removeTab(current_tab)
elif keepkernel: #close console but leave kernel running (no prompt)
self.tab_widget.removeTab(current_tab)
background(kernel_client.stop_channels)
else: #close console and kernel (no prompt)
self.tab_widget.removeTab(current_tab)
if kernel_client and kernel_client.channels_running:
for slave in slave_tabs:
background(slave.kernel_client.stop_channels)
self.tab_widget.removeTab(self.tab_widget.indexOf(slave))
if kernel_manager:
kernel_manager.shutdown_kernel()
background(kernel_client.stop_channels)
self.update_tab_bar_visibility()
def add_tab_with_frontend(self,frontend,name=None):
""" insert a tab with a given frontend in the tab bar, and give it a name
"""
if not name:
name = 'kernel %i' % self.next_kernel_id
self.tab_widget.addTab(frontend,name)
self.update_tab_bar_visibility()
self.make_frontend_visible(frontend)
frontend.exit_requested.connect(self.close_tab)
def next_tab(self):
self.tab_widget.setCurrentIndex((self.tab_widget.currentIndex()+1))
def prev_tab(self):
self.tab_widget.setCurrentIndex((self.tab_widget.currentIndex()-1))
def make_frontend_visible(self,frontend):
widget_index=self.tab_widget.indexOf(frontend)
if widget_index > 0 :
self.tab_widget.setCurrentIndex(widget_index)
def find_master_tab(self,tab,as_list=False):
"""
Try to return the frontend that owns the kernel attached to the given widget/tab.
Only finds frontend owned by the current application. Selection
based on port of the kernel might be inaccurate if several kernel
on different ip use same port number.
This function does the conversion tabNumber/widget if needed.
Might return None if no master widget (non local kernel)
Will crash if more than 1 masterWidget
When asList set to True, always return a list of widget(s) owning
the kernel. The list might be empty or containing several Widget.
"""
#convert from/to int/richIpythonWidget if needed
if isinstance(tab, int):
tab = self.tab_widget.widget(tab)
km=tab.kernel_client
#build list of all widgets
widget_list = [self.tab_widget.widget(i) for i in range(self.tab_widget.count())]
# widget that are candidate to be the owner of the kernel does have all the same port of the curent widget
# And should have a _may_close attribute
filtered_widget_list = [ widget for widget in widget_list if
widget.kernel_client.connection_file == km.connection_file and
hasattr(widget,'_may_close') ]
# the master widget is the one that may close the kernel
master_widget= [ widget for widget in filtered_widget_list if widget._may_close]
if as_list:
return master_widget
assert(len(master_widget)<=1 )
if len(master_widget)==0:
return None
return master_widget[0]
def find_slave_widgets(self,tab):
"""return all the frontends that do not own the kernel attached to the given widget/tab.
Only find frontends owned by the current application. Selection
based on connection file of the kernel.
This function does the conversion tabNumber/widget if needed.
"""
#convert from/to int/richIpythonWidget if needed
if isinstance(tab, int):
tab = self.tab_widget.widget(tab)
km=tab.kernel_client
#build list of all widgets
widget_list = [self.tab_widget.widget(i) for i in range(self.tab_widget.count())]
# widget that are candidate not to be the owner of the kernel does have all the same port of the curent widget
filtered_widget_list = ( widget for widget in widget_list if
widget.kernel_client.connection_file == km.connection_file)
# Get a list of all widget owning the same kernel and removed it from
# the previous cadidate. (better using sets ?)
master_widget_list = self.find_master_tab(tab, as_list=True)
slave_list = [widget for widget in filtered_widget_list if widget not in master_widget_list]
return slave_list
# Populate the menu bar with common actions and shortcuts
def add_menu_action(self, menu, action, defer_shortcut=False):
"""Add action to menu as well as self
So that when the menu bar is invisible, its actions are still available.
If defer_shortcut is True, set the shortcut context to widget-only,
where it will avoid conflict with shortcuts already bound to the
widgets themselves.
"""
menu.addAction(action)
self.addAction(action)
if defer_shortcut:
action.setShortcutContext(QtCore.Qt.WidgetShortcut)
def init_menu_bar(self):
#create menu in the order they should appear in the menu bar
self.init_file_menu()
self.init_edit_menu()
self.init_view_menu()
self.init_kernel_menu()
self.init_window_menu()
self.init_help_menu()
def init_file_menu(self):
self.file_menu = self.menuBar().addMenu("&File")
self.new_kernel_tab_act = QtGui.QAction("New Tab with &New kernel",
self,
shortcut="Ctrl+T",
triggered=self.create_tab_with_new_frontend)
self.add_menu_action(self.file_menu, self.new_kernel_tab_act)
self.slave_kernel_tab_act = QtGui.QAction("New Tab with Sa&me kernel",
self,
shortcut="Ctrl+Shift+T",
triggered=self.create_tab_with_current_kernel)
self.add_menu_action(self.file_menu, self.slave_kernel_tab_act)
self.file_menu.addSeparator()
self.close_action=QtGui.QAction("&Close Tab",
self,
shortcut=QtGui.QKeySequence.Close,
triggered=self.close_active_frontend
)
self.add_menu_action(self.file_menu, self.close_action)
self.export_action=QtGui.QAction("&Save to HTML/XHTML",
self,
shortcut=QtGui.QKeySequence.Save,
triggered=self.export_action_active_frontend
)
self.add_menu_action(self.file_menu, self.export_action, True)
self.file_menu.addSeparator()
printkey = QtGui.QKeySequence(QtGui.QKeySequence.Print)
if printkey.matches("Ctrl+P") and sys.platform != 'darwin':
# Only override the default if there is a collision.
# Qt ctrl = cmd on OSX, so the match gets a false positive on OSX.
printkey = "Ctrl+Shift+P"
self.print_action = QtGui.QAction("&Print",
self,
shortcut=printkey,
triggered=self.print_action_active_frontend)
self.add_menu_action(self.file_menu, self.print_action, True)
if sys.platform != 'darwin':
# OSX always has Quit in the Application menu, only add it
# to the File menu elsewhere.
self.file_menu.addSeparator()
self.quit_action = QtGui.QAction("&Quit",
self,
shortcut=QtGui.QKeySequence.Quit,
triggered=self.close,
)
self.add_menu_action(self.file_menu, self.quit_action)
def init_edit_menu(self):
self.edit_menu = self.menuBar().addMenu("&Edit")
self.undo_action = QtGui.QAction("&Undo",
self,
shortcut=QtGui.QKeySequence.Undo,
statusTip="Undo last action if possible",
triggered=self.undo_active_frontend
)
self.add_menu_action(self.edit_menu, self.undo_action)
self.redo_action = QtGui.QAction("&Redo",
self,
shortcut=QtGui.QKeySequence.Redo,
statusTip="Redo last action if possible",
triggered=self.redo_active_frontend)
self.add_menu_action(self.edit_menu, self.redo_action)
self.edit_menu.addSeparator()
self.cut_action = QtGui.QAction("&Cut",
self,
shortcut=QtGui.QKeySequence.Cut,
triggered=self.cut_active_frontend
)
self.add_menu_action(self.edit_menu, self.cut_action, True)
self.copy_action = QtGui.QAction("&Copy",
self,
shortcut=QtGui.QKeySequence.Copy,
triggered=self.copy_active_frontend
)
self.add_menu_action(self.edit_menu, self.copy_action, True)
self.copy_raw_action = QtGui.QAction("Copy (&Raw Text)",
self,
shortcut="Ctrl+Shift+C",
triggered=self.copy_raw_active_frontend
)
self.add_menu_action(self.edit_menu, self.copy_raw_action, True)
self.paste_action = QtGui.QAction("&Paste",
self,
shortcut=QtGui.QKeySequence.Paste,
triggered=self.paste_active_frontend
)
self.add_menu_action(self.edit_menu, self.paste_action, True)
self.edit_menu.addSeparator()
selectall = QtGui.QKeySequence(QtGui.QKeySequence.SelectAll)
if selectall.matches("Ctrl+A") and sys.platform != 'darwin':
# Only override the default if there is a collision.
# Qt ctrl = cmd on OSX, so the match gets a false positive on OSX.
selectall = "Ctrl+Shift+A"
self.select_all_action = QtGui.QAction("Select &All",
self,
shortcut=selectall,
triggered=self.select_all_active_frontend
)
self.add_menu_action(self.edit_menu, self.select_all_action, True)
def init_view_menu(self):
self.view_menu = self.menuBar().addMenu("&View")
if sys.platform != 'darwin':
# disable on OSX, where there is always a menu bar
self.toggle_menu_bar_act = QtGui.QAction("Toggle &Menu Bar",
self,
shortcut="Ctrl+Shift+M",
statusTip="Toggle visibility of menubar",
triggered=self.toggle_menu_bar)
self.add_menu_action(self.view_menu, self.toggle_menu_bar_act)
fs_key = "Ctrl+Meta+F" if sys.platform == 'darwin' else "F11"
self.full_screen_act = QtGui.QAction("&Full Screen",
self,
shortcut=fs_key,
statusTip="Toggle between Fullscreen and Normal Size",
triggered=self.toggleFullScreen)
self.add_menu_action(self.view_menu, self.full_screen_act)
self.view_menu.addSeparator()
self.increase_font_size = QtGui.QAction("Zoom &In",
self,
shortcut=QtGui.QKeySequence.ZoomIn,
triggered=self.increase_font_size_active_frontend
)
self.add_menu_action(self.view_menu, self.increase_font_size, True)
self.decrease_font_size = QtGui.QAction("Zoom &Out",
self,
shortcut=QtGui.QKeySequence.ZoomOut,
triggered=self.decrease_font_size_active_frontend
)
self.add_menu_action(self.view_menu, self.decrease_font_size, True)
self.reset_font_size = QtGui.QAction("Zoom &Reset",
self,
shortcut="Ctrl+0",
triggered=self.reset_font_size_active_frontend
)
self.add_menu_action(self.view_menu, self.reset_font_size, True)
self.view_menu.addSeparator()
self.clear_action = QtGui.QAction("&Clear Screen",
self,
shortcut='Ctrl+L',
statusTip="Clear the console",
triggered=self.clear_active_frontend)
self.add_menu_action(self.view_menu, self.clear_action)
self.pager_menu = self.view_menu.addMenu("&Pager")
hsplit_action = QtGui.QAction(".. &Horizontal Split",
self,
triggered=lambda: self.set_paging_active_frontend('hsplit'))
vsplit_action = QtGui.QAction(" : &Vertical Split",
self,
triggered=lambda: self.set_paging_active_frontend('vsplit'))
inside_action = QtGui.QAction(" &Inside Pager",
self,
triggered=lambda: self.set_paging_active_frontend('inside'))
self.pager_menu.addAction(hsplit_action)
self.pager_menu.addAction(vsplit_action)
self.pager_menu.addAction(inside_action)
def init_kernel_menu(self):
self.kernel_menu = self.menuBar().addMenu("&Kernel")
# Qt on OSX maps Ctrl to Cmd, and Meta to Ctrl
# keep the signal shortcuts to ctrl, rather than
# platform-default like we do elsewhere.
ctrl = "Meta" if sys.platform == 'darwin' else "Ctrl"
self.interrupt_kernel_action = QtGui.QAction("&Interrupt current Kernel",
self,
triggered=self.interrupt_kernel_active_frontend,
shortcut=ctrl+"+C",
)
self.add_menu_action(self.kernel_menu, self.interrupt_kernel_action)
self.restart_kernel_action = QtGui.QAction("&Restart current Kernel",
self,
triggered=self.restart_kernel_active_frontend,
shortcut=ctrl+"+.",
)
self.add_menu_action(self.kernel_menu, self.restart_kernel_action)
self.kernel_menu.addSeparator()
self.confirm_restart_kernel_action = QtGui.QAction("&Confirm kernel restart",
self,
checkable=True,
checked=self.active_frontend.confirm_restart,
triggered=self.toggle_confirm_restart_active_frontend
)
self.add_menu_action(self.kernel_menu, self.confirm_restart_kernel_action)
self.tab_widget.currentChanged.connect(self.update_restart_checkbox)
def init_window_menu(self):
self.window_menu = self.menuBar().addMenu("&Window")
if sys.platform == 'darwin':
# add min/maximize actions to OSX, which lacks default bindings.
self.minimizeAct = QtGui.QAction("Mini&mize",
self,
shortcut="Ctrl+m",
statusTip="Minimize the window/Restore Normal Size",
triggered=self.toggleMinimized)
# maximize is called 'Zoom' on OSX for some reason
self.maximizeAct = QtGui.QAction("&Zoom",
self,
shortcut="Ctrl+Shift+M",
statusTip="Maximize the window/Restore Normal Size",
triggered=self.toggleMaximized)
self.add_menu_action(self.window_menu, self.minimizeAct)
self.add_menu_action(self.window_menu, self.maximizeAct)
self.window_menu.addSeparator()
prev_key = "Ctrl+Shift+Left" if sys.platform == 'darwin' else "Ctrl+PgUp"
self.prev_tab_act = QtGui.QAction("Pre&vious Tab",
self,
shortcut=prev_key,
statusTip="Select previous tab",
triggered=self.prev_tab)
self.add_menu_action(self.window_menu, self.prev_tab_act)
next_key = "Ctrl+Shift+Right" if sys.platform == 'darwin' else "Ctrl+PgDown"
self.next_tab_act = QtGui.QAction("Ne&xt Tab",
self,
shortcut=next_key,
statusTip="Select next tab",
triggered=self.next_tab)
self.add_menu_action(self.window_menu, self.next_tab_act)
def init_help_menu(self):
# please keep the Help menu in Mac Os even if empty. It will
# automatically contain a search field to search inside menus and
# please keep it spelled in English, as long as Qt Doesn't support
# a QAction.MenuRole like HelpMenuRole otherwise it will lose
# this search field functionality
self.help_menu = self.menuBar().addMenu("&Help")
# Help Menu
self.help_action = QtGui.QAction("Show &QtConsole help", self,
triggered=self._show_help)
self.online_help_action = QtGui.QAction("Open online &help", self,
triggered=self._open_online_help)
self.add_menu_action(self.help_menu, self.help_action)
self.add_menu_action(self.help_menu, self.online_help_action)
def _set_active_frontend_focus(self):
# this is a hack, self.active_frontend._control seems to be
# a private member. Unfortunately this is the only method
# to set focus reliably
QtCore.QTimer.singleShot(200, self.active_frontend._control.setFocus)
# minimize/maximize/fullscreen actions:
def toggle_menu_bar(self):
menu_bar = self.menuBar()
if menu_bar.isVisible():
menu_bar.setVisible(False)
else:
menu_bar.setVisible(True)
def toggleMinimized(self):
if not self.isMinimized():
self.showMinimized()
else:
self.showNormal()
def _show_help(self):
self.active_frontend._page(gui_reference)
def _open_online_help(self):
filename="http://ipython.org/ipython-doc/stable/index.html"
webbrowser.open(filename, new=1, autoraise=True)
def toggleMaximized(self):
if not self.isMaximized():
self.showMaximized()
else:
self.showNormal()
# Min/Max imizing while in full screen give a bug
# when going out of full screen, at least on OSX
def toggleFullScreen(self):
if not self.isFullScreen():
self.showFullScreen()
if sys.platform == 'darwin':
self.maximizeAct.setEnabled(False)
self.minimizeAct.setEnabled(False)
else:
self.showNormal()
if sys.platform == 'darwin':
self.maximizeAct.setEnabled(True)
self.minimizeAct.setEnabled(True)
def set_paging_active_frontend(self, paging):
self.active_frontend._set_paging(paging)
def close_active_frontend(self):
self.close_tab(self.active_frontend)
def restart_kernel_active_frontend(self):
self.active_frontend.request_restart_kernel()
def interrupt_kernel_active_frontend(self):
self.active_frontend.request_interrupt_kernel()
def toggle_confirm_restart_active_frontend(self):
widget = self.active_frontend
widget.confirm_restart = not widget.confirm_restart
self.confirm_restart_kernel_action.setChecked(widget.confirm_restart)
def update_restart_checkbox(self):
if self.active_frontend is None:
return
widget = self.active_frontend
self.confirm_restart_kernel_action.setChecked(widget.confirm_restart)
def clear_active_frontend(self):
self.active_frontend.clear()
def cut_active_frontend(self):
widget = self.active_frontend
if widget.can_cut():
widget.cut()
def copy_active_frontend(self):
widget = self.active_frontend
widget.copy()
def copy_raw_active_frontend(self):
self.active_frontend._copy_raw_action.trigger()
def paste_active_frontend(self):
widget = self.active_frontend
if widget.can_paste():
widget.paste()
def undo_active_frontend(self):
self.active_frontend.undo()
def redo_active_frontend(self):
self.active_frontend.redo()
def print_action_active_frontend(self):
self.active_frontend.print_action.trigger()
def export_action_active_frontend(self):
self.active_frontend.export_action.trigger()
def select_all_active_frontend(self):
self.active_frontend.select_all_action.trigger()
def increase_font_size_active_frontend(self):
self.active_frontend.increase_font_size.trigger()
def decrease_font_size_active_frontend(self):
self.active_frontend.decrease_font_size.trigger()
def reset_font_size_active_frontend(self):
self.active_frontend.reset_font_size.trigger()
#---------------------------------------------------------------------------
# QWidget interface
#---------------------------------------------------------------------------
def closeEvent(self, event):
""" Forward the close event to every tabs contained by the windows
"""
if self.tab_widget.count() == 0:
# no tabs, just close
event.accept()
return
# Do Not loop on the widget count as it change while closing
title = self.window().windowTitle()
cancel = QtGui.QMessageBox.Cancel
okay = QtGui.QMessageBox.Ok
accept_role = QtGui.QMessageBox.AcceptRole
if self.confirm_exit:
if self.tab_widget.count() > 1:
msg = "Close all tabs, stop all kernels, and Quit?"
else:
msg = "Close console, stop kernel, and Quit?"
info = "Kernels not started here (e.g. notebooks) will be left alone."
closeall = QtGui.QPushButton("&Quit", self)
closeall.setShortcut('Q')
box = QtGui.QMessageBox(QtGui.QMessageBox.Question,
title, msg)
box.setInformativeText(info)
box.addButton(cancel)
box.addButton(closeall, QtGui.QMessageBox.YesRole)
box.setDefaultButton(closeall)
box.setEscapeButton(cancel)
pixmap = QtGui.QPixmap(self._app.icon.pixmap(QtCore.QSize(64,64)))
box.setIconPixmap(pixmap)
reply = box.exec_()
else:
reply = okay
if reply == cancel:
event.ignore()
return
if reply == okay or reply == accept_role:
while self.tab_widget.count() >= 1:
# prevent further confirmations:
widget = self.active_frontend
widget._confirm_exit = False
self.close_tab(widget)
event.accept()
| gpl-3.0 |
CSC-ORG/Dynamic-Dashboard-2015 | engine/lib/python2.7/site-packages/django/contrib/contenttypes/tests/tests.py | 12 | 10973 | from __future__ import unicode_literals
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.views import shortcut
from django.contrib.sites.shortcuts import get_current_site
from django.core.management import call_command
from django.http import HttpRequest, Http404
from django.test import TestCase, override_settings, skipUnlessDBFeature
from django.test.utils import override_system_checks
from django.utils import six
from .models import ConcreteModel, ProxyModel, FooWithoutUrl, FooWithUrl, FooWithBrokenAbsoluteUrl
class ContentTypesTests(TestCase):
def setUp(self):
ContentType.objects.clear_cache()
def tearDown(self):
ContentType.objects.clear_cache()
def test_lookup_cache(self):
"""
Make sure that the content type cache (see ContentTypeManager)
works correctly. Lookups for a particular content type -- by model, ID
or natural key -- should hit the database only on the first lookup.
"""
# At this point, a lookup for a ContentType should hit the DB
with self.assertNumQueries(1):
ContentType.objects.get_for_model(ContentType)
# A second hit, though, won't hit the DB, nor will a lookup by ID
# or natural key
with self.assertNumQueries(0):
ct = ContentType.objects.get_for_model(ContentType)
with self.assertNumQueries(0):
ContentType.objects.get_for_id(ct.id)
with self.assertNumQueries(0):
ContentType.objects.get_by_natural_key('contenttypes',
'contenttype')
# Once we clear the cache, another lookup will again hit the DB
ContentType.objects.clear_cache()
with self.assertNumQueries(1):
ContentType.objects.get_for_model(ContentType)
# The same should happen with a lookup by natural key
ContentType.objects.clear_cache()
with self.assertNumQueries(1):
ContentType.objects.get_by_natural_key('contenttypes',
'contenttype')
# And a second hit shouldn't hit the DB
with self.assertNumQueries(0):
ContentType.objects.get_by_natural_key('contenttypes',
'contenttype')
def test_get_for_models_empty_cache(self):
# Empty cache.
with self.assertNumQueries(1):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl)
self.assertEqual(cts, {
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
})
def test_get_for_models_partial_cache(self):
# Partial cache
ContentType.objects.get_for_model(ContentType)
with self.assertNumQueries(1):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl)
self.assertEqual(cts, {
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
})
def test_get_for_models_full_cache(self):
# Full cache
ContentType.objects.get_for_model(ContentType)
ContentType.objects.get_for_model(FooWithUrl)
with self.assertNumQueries(0):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl)
self.assertEqual(cts, {
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
})
def test_get_for_concrete_model(self):
"""
Make sure the `for_concrete_model` kwarg correctly works
with concrete, proxy and deferred models
"""
concrete_model_ct = ContentType.objects.get_for_model(ConcreteModel)
self.assertEqual(concrete_model_ct,
ContentType.objects.get_for_model(ProxyModel))
self.assertEqual(concrete_model_ct,
ContentType.objects.get_for_model(ConcreteModel,
for_concrete_model=False))
proxy_model_ct = ContentType.objects.get_for_model(ProxyModel,
for_concrete_model=False)
self.assertNotEqual(concrete_model_ct, proxy_model_ct)
# Make sure deferred model are correctly handled
ConcreteModel.objects.create(name="Concrete")
DeferredConcreteModel = ConcreteModel.objects.only('pk').get().__class__
DeferredProxyModel = ProxyModel.objects.only('pk').get().__class__
self.assertEqual(concrete_model_ct,
ContentType.objects.get_for_model(DeferredConcreteModel))
self.assertEqual(concrete_model_ct,
ContentType.objects.get_for_model(DeferredConcreteModel,
for_concrete_model=False))
self.assertEqual(concrete_model_ct,
ContentType.objects.get_for_model(DeferredProxyModel))
self.assertEqual(proxy_model_ct,
ContentType.objects.get_for_model(DeferredProxyModel,
for_concrete_model=False))
def test_get_for_concrete_models(self):
"""
Make sure the `for_concrete_models` kwarg correctly works
with concrete, proxy and deferred models.
"""
concrete_model_ct = ContentType.objects.get_for_model(ConcreteModel)
cts = ContentType.objects.get_for_models(ConcreteModel, ProxyModel)
self.assertEqual(cts, {
ConcreteModel: concrete_model_ct,
ProxyModel: concrete_model_ct,
})
proxy_model_ct = ContentType.objects.get_for_model(ProxyModel,
for_concrete_model=False)
cts = ContentType.objects.get_for_models(ConcreteModel, ProxyModel,
for_concrete_models=False)
self.assertEqual(cts, {
ConcreteModel: concrete_model_ct,
ProxyModel: proxy_model_ct,
})
# Make sure deferred model are correctly handled
ConcreteModel.objects.create(name="Concrete")
DeferredConcreteModel = ConcreteModel.objects.only('pk').get().__class__
DeferredProxyModel = ProxyModel.objects.only('pk').get().__class__
cts = ContentType.objects.get_for_models(DeferredConcreteModel,
DeferredProxyModel)
self.assertEqual(cts, {
DeferredConcreteModel: concrete_model_ct,
DeferredProxyModel: concrete_model_ct,
})
cts = ContentType.objects.get_for_models(DeferredConcreteModel,
DeferredProxyModel,
for_concrete_models=False)
self.assertEqual(cts, {
DeferredConcreteModel: concrete_model_ct,
DeferredProxyModel: proxy_model_ct,
})
@override_settings(ALLOWED_HOSTS=['example.com'])
def test_shortcut_view(self):
"""
Check that the shortcut view (used for the admin "view on site"
functionality) returns a complete URL regardless of whether the sites
framework is installed
"""
request = HttpRequest()
request.META = {
"SERVER_NAME": "Example.com",
"SERVER_PORT": "80",
}
user_ct = ContentType.objects.get_for_model(FooWithUrl)
obj = FooWithUrl.objects.create(name="john")
with self.modify_settings(INSTALLED_APPS={'append': 'django.contrib.sites'}):
response = shortcut(request, user_ct.id, obj.id)
self.assertEqual("http://%s/users/john/" % get_current_site(request).domain,
response._headers.get("location")[1])
with self.modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}):
response = shortcut(request, user_ct.id, obj.id)
self.assertEqual("http://Example.com/users/john/",
response._headers.get("location")[1])
def test_shortcut_view_without_get_absolute_url(self):
"""
Check that the shortcut view (used for the admin "view on site"
functionality) returns 404 when get_absolute_url is not defined.
"""
request = HttpRequest()
request.META = {
"SERVER_NAME": "Example.com",
"SERVER_PORT": "80",
}
user_ct = ContentType.objects.get_for_model(FooWithoutUrl)
obj = FooWithoutUrl.objects.create(name="john")
self.assertRaises(Http404, shortcut, request, user_ct.id, obj.id)
def test_shortcut_view_with_broken_get_absolute_url(self):
"""
Check that the shortcut view does not catch an AttributeError raised
by the model's get_absolute_url method.
Refs #8997.
"""
request = HttpRequest()
request.META = {
"SERVER_NAME": "Example.com",
"SERVER_PORT": "80",
}
user_ct = ContentType.objects.get_for_model(FooWithBrokenAbsoluteUrl)
obj = FooWithBrokenAbsoluteUrl.objects.create(name="john")
self.assertRaises(AttributeError, shortcut, request, user_ct.id, obj.id)
def test_missing_model(self):
"""
Ensures that displaying content types in admin (or anywhere) doesn't
break on leftover content type records in the DB for which no model
is defined anymore.
"""
ct = ContentType.objects.create(
name='Old model',
app_label='contenttypes',
model='OldModel',
)
self.assertEqual(six.text_type(ct), 'Old model')
self.assertIsNone(ct.model_class())
# Make sure stale ContentTypes can be fetched like any other object.
# Before Django 1.6 this caused a NoneType error in the caching mechanism.
# Instead, just return the ContentType object and let the app detect stale states.
ct_fetched = ContentType.objects.get_for_id(ct.pk)
self.assertIsNone(ct_fetched.model_class())
class MigrateTests(TestCase):
@skipUnlessDBFeature('can_rollback_ddl')
@override_system_checks([])
def test_unmigrating_first_migration_post_migrate_signal(self):
"""
#24075 - When unmigrating an app before its first migration,
post_migrate signal handler must be aware of the missing tables.
"""
try:
with override_settings(
INSTALLED_APPS=["django.contrib.contenttypes"],
MIGRATION_MODULES={'contenttypes': 'django.contrib.contenttypes.migrations'},
):
call_command("migrate", "contenttypes", "zero", verbosity=0)
finally:
call_command("migrate", verbosity=0)
| mit |
koichi626/hadoop-gpu | hadoop-gpu-0.20.1/build/contrib/hod/testing/testModule.py | 182 | 2187 | #Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
import unittest, os, sys, re, threading, time
myDirectory = os.path.realpath(sys.argv[0])
rootDirectory = re.sub("/testing/.*", "", myDirectory)
sys.path.append(rootDirectory)
from testing.lib import BaseTestSuite
excludes = ['test_MINITEST3']
# All test-case classes should have the naming convention test_.*
class test_MINITEST1(unittest.TestCase):
def setUp(self):
pass
# All testMethods have to have their names start with 'test'
def testSuccess(self):
pass
def testFailure(self):
pass
def tearDown(self):
pass
class test_MINITEST2(unittest.TestCase):
def setUp(self):
pass
# All testMethods have to have their names start with 'test'
def testSuccess(self):
pass
def testFailure(self):
pass
def tearDown(self):
pass
class test_MINITEST3(unittest.TestCase):
def setUp(self):
pass
# All testMethods have to have their names start with 'test'
def testSuccess(self):
pass
def testFailure(self):
pass
def tearDown(self):
pass
class ModuleTestSuite(BaseTestSuite):
def __init__(self):
# suite setup
BaseTestSuite.__init__(self, __name__, excludes)
pass
def cleanUp(self):
# suite tearDown
pass
def RunModuleTests():
# modulename_suite
suite = ModuleTestSuite()
testResult = suite.runTests()
suite.cleanUp()
return testResult
if __name__ == "__main__":
RunModuleTests()
| apache-2.0 |
sonnyhu/numpy | numpy/lib/info.py | 61 | 6353 | """
Basic functions used by several sub-packages and
useful to have in the main name-space.
Type Handling
-------------
================ ===================
iscomplexobj Test for complex object, scalar result
isrealobj Test for real object, scalar result
iscomplex Test for complex elements, array result
isreal Test for real elements, array result
imag Imaginary part
real Real part
real_if_close Turns complex number with tiny imaginary part to real
isneginf Tests for negative infinity, array result
isposinf Tests for positive infinity, array result
isnan Tests for nans, array result
isinf Tests for infinity, array result
isfinite Tests for finite numbers, array result
isscalar True if argument is a scalar
nan_to_num Replaces NaN's with 0 and infinities with large numbers
cast Dictionary of functions to force cast to each type
common_type Determine the minimum common type code for a group
of arrays
mintypecode Return minimal allowed common typecode.
================ ===================
Index Tricks
------------
================ ===================
mgrid Method which allows easy construction of N-d
'mesh-grids'
``r_`` Append and construct arrays: turns slice objects into
ranges and concatenates them, for 2d arrays appends rows.
index_exp Konrad Hinsen's index_expression class instance which
can be useful for building complicated slicing syntax.
================ ===================
Useful Functions
----------------
================ ===================
select Extension of where to multiple conditions and choices
extract Extract 1d array from flattened array according to mask
insert Insert 1d array of values into Nd array according to mask
linspace Evenly spaced samples in linear space
logspace Evenly spaced samples in logarithmic space
fix Round x to nearest integer towards zero
mod Modulo mod(x,y) = x % y except keeps sign of y
amax Array maximum along axis
amin Array minimum along axis
ptp Array max-min along axis
cumsum Cumulative sum along axis
prod Product of elements along axis
cumprod Cumluative product along axis
diff Discrete differences along axis
angle Returns angle of complex argument
unwrap Unwrap phase along given axis (1-d algorithm)
sort_complex Sort a complex-array (based on real, then imaginary)
trim_zeros Trim the leading and trailing zeros from 1D array.
vectorize A class that wraps a Python function taking scalar
arguments into a generalized function which can handle
arrays of arguments using the broadcast rules of
numerix Python.
================ ===================
Shape Manipulation
------------------
================ ===================
squeeze Return a with length-one dimensions removed.
atleast_1d Force arrays to be > 1D
atleast_2d Force arrays to be > 2D
atleast_3d Force arrays to be > 3D
vstack Stack arrays vertically (row on row)
hstack Stack arrays horizontally (column on column)
column_stack Stack 1D arrays as columns into 2D array
dstack Stack arrays depthwise (along third dimension)
stack Stack arrays along a new axis
split Divide array into a list of sub-arrays
hsplit Split into columns
vsplit Split into rows
dsplit Split along third dimension
================ ===================
Matrix (2D Array) Manipulations
-------------------------------
================ ===================
fliplr 2D array with columns flipped
flipud 2D array with rows flipped
rot90 Rotate a 2D array a multiple of 90 degrees
eye Return a 2D array with ones down a given diagonal
diag Construct a 2D array from a vector, or return a given
diagonal from a 2D array.
mat Construct a Matrix
bmat Build a Matrix from blocks
================ ===================
Polynomials
-----------
================ ===================
poly1d A one-dimensional polynomial class
poly Return polynomial coefficients from roots
roots Find roots of polynomial given coefficients
polyint Integrate polynomial
polyder Differentiate polynomial
polyadd Add polynomials
polysub Substract polynomials
polymul Multiply polynomials
polydiv Divide polynomials
polyval Evaluate polynomial at given argument
================ ===================
Import Tricks
-------------
================ ===================
ppimport Postpone module import until trying to use it
ppimport_attr Postpone module import until trying to use its attribute
ppresolve Import postponed module and return it.
================ ===================
Machine Arithmetics
-------------------
================ ===================
machar_single Single precision floating point arithmetic parameters
machar_double Double precision floating point arithmetic parameters
================ ===================
Threading Tricks
----------------
================ ===================
ParallelExec Execute commands in parallel thread.
================ ===================
1D Array Set Operations
-----------------------
Set operations for 1D numeric arrays based on sort() function.
================ ===================
ediff1d Array difference (auxiliary function).
unique Unique elements of an array.
intersect1d Intersection of 1D arrays with unique elements.
setxor1d Set exclusive-or of 1D arrays with unique elements.
in1d Test whether elements in a 1D array are also present in
another array.
union1d Union of 1D arrays with unique elements.
setdiff1d Set difference of 1D arrays with unique elements.
================ ===================
"""
from __future__ import division, absolute_import, print_function
depends = ['core', 'testing']
global_symbols = ['*']
| bsd-3-clause |
fillycheezstake/MissionPlanner | ExtLibs/Mavlink/mavgen.py | 34 | 3007 | #!/usr/bin/env python
'''
parse a MAVLink protocol XML file and generate a python implementation
Copyright Andrew Tridgell 2011
Released under GNU GPL version 3 or later
'''
def mavgen(opts, args) :
"""Generate mavlink message formatters and parsers (C and Python ) using options
and args where args are a list of xml files. This function allows python
scripts under Windows to control mavgen using the same interface as
shell scripts under Unix"""
import sys, textwrap, os
import mavparse
import mavgen_python
import mavgen_c
import mavgen_csharp
xml = []
for fname in args:
print("Parsing %s" % fname)
xml.append(mavparse.MAVXML(fname, opts.wire_protocol))
# expand includes
for x in xml[:]:
for i in x.include:
fname = os.path.join(os.path.dirname(x.filename), i)
print("Parsing %s" % fname)
xml.append(mavparse.MAVXML(fname, opts.wire_protocol))
# include message lengths and CRCs too
for idx in range(0, 256):
if x.message_lengths[idx] == 0:
x.message_lengths[idx] = xml[-1].message_lengths[idx]
x.message_crcs[idx] = xml[-1].message_crcs[idx]
x.message_names[idx] = xml[-1].message_names[idx]
# work out max payload size across all includes
largest_payload = 0
for x in xml:
if x.largest_payload > largest_payload:
largest_payload = x.largest_payload
for x in xml:
x.largest_payload = largest_payload
if mavparse.check_duplicates(xml):
sys.exit(1)
print("Found %u MAVLink message types in %u XML files" % (
mavparse.total_msgs(xml), len(xml)))
if opts.language == 'python':
mavgen_python.generate(opts.output, xml)
elif opts.language == 'C':
mavgen_c.generate(opts.output, xml)
elif opts.language == 'csharp':
mavgen_csharp.generate(opts.output, xml)
else:
print("Unsupported language %s" % opts.language)
if __name__=="__main__":
import sys, textwrap, os
from optparse import OptionParser
# allow import from the parent directory, where mavutil.py is
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))
import mavparse
import mavgen_python
import mavgen_c
parser = OptionParser("mavgen.py [options] <XML files>")
parser.add_option("-o", "--output", dest="output", default="mavlink", help="output base name")
parser.add_option("--lang", dest="language", default="python", help="language to generate")
parser.add_option("--wire-protocol", dest="wire_protocol", default=mavparse.PROTOCOL_0_9, help="wire protocol version")
(opts, args) = parser.parse_args()
if len(args) < 1:
parser.error("You must supply at least one MAVLink XML protocol definition")
mavgen(opts, args)
| gpl-3.0 |
SebastianLloret/Clever-Bot | libpasteurize/fixes/fix_kwargs.py | 61 | 6008 | u"""
Fixer for Python 3 function parameter syntax
This fixer is rather sensitive to incorrect py3k syntax.
"""
# Note: "relevant" parameters are parameters following the first STAR in the list.
from lib2to3 import fixer_base
from lib2to3.fixer_util import token, String, Newline, Comma, Name
from libfuturize.fixer_util import indentation, suitify, DoubleStar
_assign_template = u"%(name)s = %(kwargs)s['%(name)s']; del %(kwargs)s['%(name)s']"
_if_template = u"if '%(name)s' in %(kwargs)s: %(assign)s"
_else_template = u"else: %(name)s = %(default)s"
_kwargs_default_name = u"_3to2kwargs"
def gen_params(raw_params):
u"""
Generator that yields tuples of (name, default_value) for each parameter in the list
If no default is given, then it is default_value is None (not Leaf(token.NAME, 'None'))
"""
assert raw_params[0].type == token.STAR and len(raw_params) > 2
curr_idx = 2 # the first place a keyword-only parameter name can be is index 2
max_idx = len(raw_params)
while curr_idx < max_idx:
curr_item = raw_params[curr_idx]
prev_item = curr_item.prev_sibling
if curr_item.type != token.NAME:
curr_idx += 1
continue
if prev_item is not None and prev_item.type == token.DOUBLESTAR:
break
name = curr_item.value
nxt = curr_item.next_sibling
if nxt is not None and nxt.type == token.EQUAL:
default_value = nxt.next_sibling
curr_idx += 2
else:
default_value = None
yield (name, default_value)
curr_idx += 1
def remove_params(raw_params, kwargs_default=_kwargs_default_name):
u"""
Removes all keyword-only args from the params list and a bare star, if any.
Does not add the kwargs dict if needed.
Returns True if more action is needed, False if not
(more action is needed if no kwargs dict exists)
"""
assert raw_params[0].type == token.STAR
if raw_params[1].type == token.COMMA:
raw_params[0].remove()
raw_params[1].remove()
kw_params = raw_params[2:]
else:
kw_params = raw_params[3:]
for param in kw_params:
if param.type != token.DOUBLESTAR:
param.remove()
else:
return False
else:
return True
def needs_fixing(raw_params, kwargs_default=_kwargs_default_name):
u"""
Returns string with the name of the kwargs dict if the params after the first star need fixing
Otherwise returns empty string
"""
found_kwargs = False
needs_fix = False
for t in raw_params[2:]:
if t.type == token.COMMA:
# Commas are irrelevant at this stage.
continue
elif t.type == token.NAME and not found_kwargs:
# Keyword-only argument: definitely need to fix.
needs_fix = True
elif t.type == token.NAME and found_kwargs:
# Return 'foobar' of **foobar, if needed.
return t.value if needs_fix else u''
elif t.type == token.DOUBLESTAR:
# Found either '*' from **foobar.
found_kwargs = True
else:
# Never found **foobar. Return a synthetic name, if needed.
return kwargs_default if needs_fix else u''
class FixKwargs(fixer_base.BaseFix):
run_order = 7 # Run after function annotations are removed
PATTERN = u"funcdef< 'def' NAME parameters< '(' arglist=typedargslist< params=any* > ')' > ':' suite=any >"
def transform(self, node, results):
params_rawlist = results[u"params"]
for i, item in enumerate(params_rawlist):
if item.type == token.STAR:
params_rawlist = params_rawlist[i:]
break
else:
return
# params is guaranteed to be a list starting with *.
# if fixing is needed, there will be at least 3 items in this list:
# [STAR, COMMA, NAME] is the minimum that we need to worry about.
new_kwargs = needs_fixing(params_rawlist)
# new_kwargs is the name of the kwargs dictionary.
if not new_kwargs:
return
suitify(node)
# At this point, params_rawlist is guaranteed to be a list
# beginning with a star that includes at least one keyword-only param
# e.g., [STAR, NAME, COMMA, NAME, COMMA, DOUBLESTAR, NAME] or
# [STAR, COMMA, NAME], or [STAR, COMMA, NAME, COMMA, DOUBLESTAR, NAME]
# Anatomy of a funcdef: ['def', 'name', parameters, ':', suite]
# Anatomy of that suite: [NEWLINE, INDENT, first_stmt, all_other_stmts]
# We need to insert our new stuff before the first_stmt and change the
# first_stmt's prefix.
suite = node.children[4]
first_stmt = suite.children[2]
ident = indentation(first_stmt)
for name, default_value in gen_params(params_rawlist):
if default_value is None:
suite.insert_child(2, Newline())
suite.insert_child(2, String(_assign_template %{u'name':name, u'kwargs':new_kwargs}, prefix=ident))
else:
suite.insert_child(2, Newline())
suite.insert_child(2, String(_else_template %{u'name':name, u'default':default_value}, prefix=ident))
suite.insert_child(2, Newline())
suite.insert_child(2, String(_if_template %{u'assign':_assign_template %{u'name':name, u'kwargs':new_kwargs}, u'name':name, u'kwargs':new_kwargs}, prefix=ident))
first_stmt.prefix = ident
suite.children[2].prefix = u""
# Now, we need to fix up the list of params.
must_add_kwargs = remove_params(params_rawlist)
if must_add_kwargs:
arglist = results[u'arglist']
if len(arglist.children) > 0 and arglist.children[-1].type != token.COMMA:
arglist.append_child(Comma())
arglist.append_child(DoubleStar(prefix=u" "))
arglist.append_child(Name(new_kwargs))
| gpl-3.0 |
jazkarta/edx-platform | common/lib/xmodule/xmodule/vertical_block.py | 56 | 5689 | """
VerticalBlock - an XBlock which renders its children in a column.
"""
import logging
from copy import copy
from lxml import etree
from xblock.core import XBlock
from xblock.fragment import Fragment
from xmodule.mako_module import MakoTemplateBlockBase
from xmodule.progress import Progress
from xmodule.seq_module import SequenceFields
from xmodule.studio_editable import StudioEditableBlock
from xmodule.x_module import STUDENT_VIEW, XModuleFields
from xmodule.xml_module import XmlParserMixin
log = logging.getLogger(__name__)
# HACK: This shouldn't be hard-coded to two types
# OBSOLETE: This obsoletes 'type'
CLASS_PRIORITY = ['video', 'problem']
class VerticalBlock(SequenceFields, XModuleFields, StudioEditableBlock, XmlParserMixin, MakoTemplateBlockBase, XBlock):
"""
Layout XBlock for rendering subblocks vertically.
"""
mako_template = 'widgets/sequence-edit.html'
js_module_name = "VerticalBlock"
has_children = True
show_in_read_only_mode = True
def student_view(self, context):
"""
Renders the student view of the block in the LMS.
"""
fragment = Fragment()
contents = []
child_context = {} if not context else copy(context)
child_context['child_of_vertical'] = True
# pylint: disable=no-member
for child in self.get_display_items():
rendered_child = child.render(STUDENT_VIEW, child_context)
fragment.add_frag_resources(rendered_child)
contents.append({
'id': child.location.to_deprecated_string(),
'content': rendered_child.content
})
fragment.add_content(self.system.render_template('vert_module.html', {
'items': contents,
'xblock_context': context,
}))
return fragment
def author_view(self, context):
"""
Renders the Studio preview view, which supports drag and drop.
"""
fragment = Fragment()
root_xblock = context.get('root_xblock')
is_root = root_xblock and root_xblock.location == self.location # pylint: disable=no-member
# For the container page we want the full drag-and-drop, but for unit pages we want
# a more concise version that appears alongside the "View =>" link-- unless it is
# the unit page and the vertical being rendered is itself the unit vertical (is_root == True).
if is_root or not context.get('is_unit_page'):
self.render_children(context, fragment, can_reorder=True, can_add=True)
return fragment
def get_progress(self):
"""
Returns the progress on this block and all children.
"""
# TODO: Cache progress or children array?
children = self.get_children()
progresses = [child.get_progress() for child in children]
progress = reduce(Progress.add_counts, progresses, None)
return progress
def get_icon_class(self):
"""
Returns the highest priority icon class.
"""
child_classes = set(child.get_icon_class() for child in self.get_children())
new_class = 'other'
for higher_class in CLASS_PRIORITY:
if higher_class in child_classes:
new_class = higher_class
return new_class
@classmethod
def definition_from_xml(cls, xml_object, system):
children = []
for child in xml_object:
try:
child_block = system.process_xml(etree.tostring(child, encoding='unicode')) # pylint: disable=no-member
children.append(child_block.scope_ids.usage_id)
except Exception as exc: # pylint: disable=broad-except
log.exception("Unable to load child when parsing Vertical. Continuing...")
if system.error_tracker is not None:
system.error_tracker(u"ERROR: {0}".format(exc))
continue
return {}, children
def definition_to_xml(self, resource_fs):
xml_object = etree.Element('vertical') # pylint: disable=no-member
for child in self.get_children():
self.runtime.add_block_as_child_node(child, xml_object)
return xml_object
@property
def non_editable_metadata_fields(self):
"""
Gather all fields which can't be edited.
"""
non_editable_fields = super(VerticalBlock, self).non_editable_metadata_fields
non_editable_fields.extend([
self.fields['due'],
])
return non_editable_fields
def studio_view(self, context):
fragment = super(VerticalBlock, self).studio_view(context)
# This continues to use the old XModuleDescriptor javascript code to enabled studio editing.
# TODO: Remove this when studio better supports editing of pure XBlocks.
fragment.add_javascript('VerticalBlock = XModule.Descriptor;')
return fragment
def index_dictionary(self):
"""
Return dictionary prepared with module content and type for indexing.
"""
# return key/value fields in a Python dict object
# values may be numeric / string or dict
# default implementation is an empty dict
xblock_body = super(VerticalBlock, self).index_dictionary()
index_body = {
"display_name": self.display_name,
}
if "content" in xblock_body:
xblock_body["content"].update(index_body)
else:
xblock_body["content"] = index_body
# We use "Sequence" for sequentials and verticals
xblock_body["content_type"] = "Sequence"
return xblock_body
| agpl-3.0 |
jonathonwalz/ansible | lib/ansible/modules/storage/zfs/zpool_facts.py | 69 | 6653 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Adam Števko <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: zpool_facts
short_description: Gather facts about ZFS pools.
description:
- Gather facts from ZFS pool properties.
version_added: "2.3"
author: Adam Števko (@xen0l)
options:
name:
description:
- ZFS pool name.
aliases: [ "pool", "zpool" ]
required: false
parsable:
description:
- Specifies if property values should be displayed in machine
friendly format.
type: bool
default: False
required: false
properties:
description:
- Specifies which dataset properties should be queried in comma-separated format.
For more information about dataset properties, check zpool(1M) man page.
aliases: [ "props" ]
default: all
required: false
'''
EXAMPLES = '''
# Gather facts about ZFS pool rpool
zpool_facts: pool=rpool
# Gather space usage about all imported ZFS pools
zpool_facts: properties='free,size'
debug: msg='ZFS pool {{ item.name }} has {{ item.free }} free space out of {{ item.size }}.'
with_items: '{{ ansible_zfs_pools }}'
'''
RETURN = '''
name:
description: ZFS pool name
returned: always
type: string
sample: rpool
parsable:
description: if parsable output should be provided in machine friendly format.
returned: if 'parsable' is set to True
type: boolean
sample: True
zfs_pools:
description: ZFS pool facts
returned: always
type: string
sample:
{
"allocated": "3.46G",
"altroot": "-",
"autoexpand": "off",
"autoreplace": "off",
"bootfs": "rpool/ROOT/openindiana",
"cachefile": "-",
"capacity": "6%",
"comment": "-",
"dedupditto": "0",
"dedupratio": "1.00x",
"delegation": "on",
"expandsize": "-",
"failmode": "wait",
"feature@async_destroy": "enabled",
"feature@bookmarks": "enabled",
"feature@edonr": "enabled",
"feature@embedded_data": "active",
"feature@empty_bpobj": "active",
"feature@enabled_txg": "active",
"feature@extensible_dataset": "enabled",
"feature@filesystem_limits": "enabled",
"feature@hole_birth": "active",
"feature@large_blocks": "enabled",
"feature@lz4_compress": "active",
"feature@multi_vdev_crash_dump": "enabled",
"feature@sha512": "enabled",
"feature@skein": "enabled",
"feature@spacemap_histogram": "active",
"fragmentation": "3%",
"free": "46.3G",
"freeing": "0",
"guid": "15729052870819522408",
"health": "ONLINE",
"leaked": "0",
"listsnapshots": "off",
"name": "rpool",
"readonly": "off",
"size": "49.8G",
"version": "-"
}
'''
import os
from collections import defaultdict
from ansible.module_utils.six import iteritems
from ansible.module_utils.basic import AnsibleModule
class ZPoolFacts(object):
def __init__(self, module):
self.module = module
self.name = module.params['name']
self.parsable = module.params['parsable']
self.properties = module.params['properties']
self._pools = defaultdict(dict)
self.facts = []
def pool_exists(self):
cmd = [self.module.get_bin_path('zpool')]
cmd.append('list')
cmd.append(self.name)
(rc, out, err) = self.module.run_command(cmd)
if rc == 0:
return True
else:
return False
def get_facts(self):
cmd = [self.module.get_bin_path('zpool')]
cmd.append('get')
cmd.append('-H')
if self.parsable:
cmd.append('-p')
cmd.append('-o')
cmd.append('name,property,value')
cmd.append(self.properties)
if self.name:
cmd.append(self.name)
(rc, out, err) = self.module.run_command(cmd)
if rc == 0:
for line in out.splitlines():
pool, property, value = line.split('\t')
self._pools[pool].update({property: value})
for k, v in iteritems(self._pools):
v.update({'name': k})
self.facts.append(v)
return {'ansible_zfs_pools': self.facts}
else:
self.module.fail_json(msg='Error while trying to get facts about ZFS pool: %s' % self.name,
stderr=err,
rc=rc)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=False, aliases=['pool', 'zpool'], type='str'),
parsable=dict(required=False, default=False, type='bool'),
properties=dict(required=False, default='all', type='str'),
),
supports_check_mode=True
)
zpool_facts = ZPoolFacts(module)
result = {}
result['changed'] = False
result['name'] = zpool_facts.name
if zpool_facts.parsable:
result['parsable'] = zpool_facts.parsable
if zpool_facts.name is not None:
if zpool_facts.pool_exists():
result['ansible_facts'] = zpool_facts.get_facts()
else:
module.fail_json(msg='ZFS pool %s does not exist!' % zpool_facts.name)
else:
result['ansible_facts'] = zpool_facts.get_facts()
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
zzicewind/nova | nova/tests/unit/objects/test_virt_cpu_topology.py | 94 | 1397 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import objects
from nova.tests.unit.objects import test_objects
_top_dict = {
'sockets': 2,
'cores': 4,
'threads': 8
}
class _TestVirtCPUTopologyObject(object):
def test_object_from_dict(self):
top_obj = objects.VirtCPUTopology.from_dict(_top_dict)
self.compare_obj(top_obj, _top_dict)
def test_object_to_dict(self):
top_obj = objects.VirtCPUTopology()
top_obj.sockets = 2
top_obj.cores = 4
top_obj.threads = 8
spec = top_obj.to_dict()
self.assertEqual(_top_dict, spec)
class TestVirtCPUTopologyObject(test_objects._LocalTest,
_TestVirtCPUTopologyObject):
pass
class TestRemoteVirtCPUTopologyObject(test_objects._RemoteTest,
_TestVirtCPUTopologyObject):
pass
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.