src
stringlengths 721
1.04M
|
---|
import gc, os, sys, string, re, pdb, scipy.stats, cPickle
import Mapping2, getNewer1000GSNPAnnotations, Bowtie, binom, GetCNVAnnotations, dictmerge, utils, InBindingSite
TABLE=string.maketrans('ACGTacgt', 'TGCAtgca')
USAGE="%s mindepth snpfile readfiletmplt maptmplt bindingsites cnvfile outfile logfile ksfile"
def reverseComplement(seq):
tmp=seq[::-1]
return tmp.translate(TABLE)
def makeMappers(maptmplt):
mappers={}
cs=['chr%s' % str(c) for c in range(1,23)] + ['chrX', 'chrY', 'chrM']
for c in cs:
f=maptmplt % c
if os.path.exists(f):
mappers[c] = Mapping2.Mapping(f)
return mappers
THRESH1=0.90
THRESH2=0.05
SYMMETRIC="Sym"
ASYMMETRIC="Asym"
HOMOZYGOUS="Homo"
WEIRD="Weird"
tbl={
'a':('a','a'),
'c':('c','c'),
'g':('g','g'),
't':('t','t'),
'r':('a','g'),
'y':('c','t'),
's':('c','g'),
'w':('a','t'),
'k':('g','t'),
'm':('a','c')
}
def convert(a):
return tbl[a.lower()]
def testCounts(counts, chrm, snprec):
winningParent='?'
ref_pos, mat_genotype, pat_genotype, child_genotype, mat_allele, pat_allele, typ, ref, hetSNP = snprec
# first, make sure that the expected alleles are the bulk of the counts
total = counts['a']+counts['c']+counts['g']+counts['t']
a1,a2=convert(child_genotype)
if a1==a2:
allelecnts = counts[a1]
else:
allelecnts = counts[a1]+counts[a2]
both=counts[a1]+counts[a2]
sortedCounts=sorted([(counts['a'], 'a'), (counts['c'],'c'), (counts['g'], 'g'), (counts['t'], 't')], reverse=True)
majorAllele=sortedCounts[0][1]
smaller=min(counts[a1], counts[a2])
#pval=binomialDist.cdf(smaller, both, 0.5)*2 # This had problems for large sample sizes. Switched to using scipy
pval = binom.binomtest(smaller, both, 0.5) # scipy.binom_test was unstable for large counts
if float(allelecnts)/total < THRESH1:
print >>LOGFP, "WARNING %s:%d failed thresh 1 %d %d" % (chrm, ref_pos, allelecnts, total)
return (WEIRD, pval, a1, a2, counts, winningParent)
# if the snp was phased
if mat_allele and pat_allele:
if mat_allele.lower()==majorAllele.lower():
winningParent='M'
elif pat_allele.lower()==majorAllele.lower():
winningParent='P'
else:
winningParent='?'
if a1!=a2:
# we expect roughly 50/50.
if pval < THRESH2:
print >>LOGFP, "NOTE %s:%d Looks interesting: failed thresh 2 %d %d %f" % (chrm, ref_pos, both, smaller, pval)
print >>LOGFP, "SNPS %s/%s, COUNTS a:%d c:%d g:%d t:%d" % (a1, a2, counts['a'], counts['c'], counts['g'], counts['t'])
print >>LOGFP, "Phasing P:%s M:%s D:%s" % (pat_allele, mat_allele, snprec)
print >>LOGFP, "\n"
return (ASYMMETRIC, pval, a1, a2, counts, winningParent)
else:
return (SYMMETRIC, pval, a1, a2, counts, winningParent)
else:
return (HOMOZYGOUS, pval, a1, a2, counts, winningParent)
def process(chrm, snppos, counts, snprec, CNVhandler):
ref_pos, mat_genotype, pat_genotype, child_genotype, mat_allele, pat_allele, typ, ref, hetSNP = snprec
t, pval, a1, a2, counts, winningParent = testCounts(counts, chrm, snprec)
#if t==ASYMMETRIC or t==SYMMETRIC:
# hetSnps+=1
#if t==ASYMMETRIC:
# interestingSnps+=1
if BShandler:
inBS=1 if BShandler.check("chr%s"%chrm, snppos) else 0
else:
inBS=-1
cnv=CNVhandler.getAnnotation("chr%s"%chrm, snppos)
if cnv:
cnv=cnv[2]
else:
cnv='1.0'
#nd, np = scipy.stats.kstest(ksvals, 'uniform', (0.0, 1.0))
print >>OUTFP, utils.myFormat('\t', (chrm, snppos, ref, mat_genotype, pat_genotype, child_genotype, typ, mat_allele, pat_allele, counts['a'], counts['c'], counts['g'], counts['t'], winningParent, t, pval, inBS, cnv))
OUTFP.flush()
# This is used to order the chromosomes 1,2,3,...,22,X,Y. Tricky, eh?
def chrcmp(a, b):
try:
a=int(a)
except:
pass
try:
b=int(b)
except:
pass
return cmp(a,b)
if __name__=='__main__':
if len(sys.argv) < 7:
print USAGE % sys.argv[0]
sys.exit(-1)
mindepth=int(sys.argv[1])
snpfile=sys.argv[2]
BindingSitefile=sys.argv[3]
CNVFile=sys.argv[4]
OUTFP = open(sys.argv[5], 'w')
LOGFP = open(sys.argv[6], 'w')
countfiles=sys.argv[7:]
if os.access(BindingSitefile, os.R_OK):
BShandler=InBindingSite.BSHandler(BindingSitefile)
else:
BShandler=None
CNVhandler=GetCNVAnnotations.Handler(CNVFile)
hetSnps=0
interestingSnps=0
gc.disable()
pat=re.compile('chr(\w+)_([mp]aternal)')
print >>OUTFP, utils.myFormat('\t', ['chrm', 'snppos ', 'ref', 'mat_gtyp', 'pat_gtyp', 'c_gtyp', 'phase', 'mat_all', 'pat_all', 'cA', 'cC', 'cG', 'cT', 'winning', 'SymCls', 'SymPval', 'BindingSite', 'cnv'])
ref_1000G=getNewer1000GSNPAnnotations.Handler(snpfile, None, 'PAT', hasHeader=True, onlyHets=True)
counts={}
for countfile in countfiles:
temp=cPickle.load(open(countfile))
dictmerge.accum(counts, temp, lambda : 0, lambda a, b: a+b)
for chrm in sorted(counts.keys(), chrcmp):
for pos in sorted(counts[chrm].keys()):
total = sum(counts[chrm][pos].values())
if total >= mindepth:
process(chrm, pos, counts[chrm][pos], ref_1000G.getAnnotation(chrm, pos), CNVhandler)
|
# coding: utf8
import BTrees
import StringIO
import ZODB
import os
import threading
import transaction
import zeit.connector.cache
import zeit.connector.testing
import zope.app.testing.functional
class TestResourceCache(zope.app.testing.functional.FunctionalTestCase):
layer = zeit.connector.testing.zope_connector_layer
def setUp(self):
super(TestResourceCache, self).setUp()
self.cache = zeit.connector.cache.ResourceCache()
self.getRootFolder()['cache'] = self.cache
self.properties1 = {('getetag', 'DAV:'): 'etag1'}
self.properties2 = {('getetag', 'DAV:'): 'etag2'}
self.uniqueId = u'föö'
self.key = zeit.connector.cache.get_storage_key(self.uniqueId)
self.BUFFER_SIZE = zeit.connector.cache.Body.BUFFER_SIZE
def test_etag_migration(self):
self.cache._etags = BTrees.family64.OO.BTree()
self.cache._etags[self.key] = 'etag1'
data = zeit.connector.cache.SlottedStringRef('data')
self.cache._data[self.key] = data
self.assertEquals(
'data',
self.cache.getData(self.uniqueId, self.properties1).read())
del self.cache._etags[self.key]
self.assertRaises(
KeyError, self.cache.getData, self.uniqueId, self.properties1)
del self.cache._etags
self.assertRaises(
KeyError, self.cache.getData, self.uniqueId, self.properties1)
def test_missing_blob_file(self):
data1 = StringIO.StringIO(self.BUFFER_SIZE* 2 * 'x')
data2 = StringIO.StringIO(self.BUFFER_SIZE* 2 * 'y')
self.cache.setData(self.uniqueId, self.properties1, data1)
transaction.commit()
body = self.cache._data[self.key]
os.remove(body.data.committed())
del body.data._p_changed # Invalidate, thus force reload
self.assertRaises(KeyError,
self.cache.getData, self.uniqueId, self.properties1)
self.cache.setData(self.uniqueId, self.properties2, data2)
self.assertEquals(
data2.getvalue(),
self.cache.getData(self.uniqueId, self.properties2).read())
def test_missing_blob_file_with_legacy_data(self):
data = ZODB.blob.Blob()
data.open('w').write('ablob')
self.cache._data[self.key] = data
self.cache._etags = BTrees.family64.OO.BTree()
self.cache._etags[self.key] = 'etag1'
transaction.commit()
os.remove(data.committed())
del data._p_changed
self.assertRaises(KeyError,
self.cache.getData, self.uniqueId, self.properties1)
data2 = StringIO.StringIO(self.BUFFER_SIZE * 2 * 'y')
self.cache.setData(self.uniqueId, self.properties2, data2)
self.assertEquals(
data2.getvalue(),
self.cache.getData(self.uniqueId, self.properties2).read())
def test_blob_conflict_resolution(self):
size = zeit.connector.cache.Body.BUFFER_SIZE
body = StringIO.StringIO('body' * size)
def store():
transaction.abort()
self.cache.setData(self.uniqueId, self.properties1, body)
transaction.commit()
t1 = threading.Thread(target=store)
t2 = threading.Thread(target=store)
t1.start()
t2.start()
t1.join()
t2.join()
|
from nsbaseresource import NSBaseResource
from nsconfig import NSConfig
from nscspolicy import NSCSPolicy
from nscsvserver import NSCSVServer
from nscsvservercspolicybinding import NSCSVServerCSPolicyBinding
from nscsvserverresponderpolicybinding import NSCSVServerResponderPolicyBinding
from nscsvserverrewritepolicybinding import NSCSVServerRewritePolicyBinding
from nslbvserver import NSLBVServer
from nslbvserverservicebinding import NSLBVServerServiceBinding
from nslbvservercsvserverbinding import NSLBVServerCSVserverBinding
from nsresponderaction import NSResponderAction
from nsresponderpolicy import NSResponderPolicy
from nsresponderpolicylabel import NSResponderPolicyLabel
from nsresponderpolicylabelbinding import NSResponderPolicyLabelBinding
from nsresponderpolicycsvserverbinding import NSResponderPolicyCSVServerBinding
from nsrewritepolicy import NSRewritePolicy
from nsrewritepolicycsvserverbinding import NSRewritePolicyCSVServerBinding
from nsservice import NSService
from nsserver import NSServer
from nsservicegroup import NSServiceGroup
from nsservicelbmonitorbinding import NSServiceLBMonitorBinding
from nssslcertkey import NSSSLCertKey
from nssslcertkeysslvserverbinding import NSSSLCertKeySSLVServerBinding
from nssslvserver import NSSSLVServer
from nssslvserversslcertkeybinding import NSSSLVServerSSLCertKeyBinding
from nshanode import NSHANode
from nsip import NSIP
from nsvlan import NSVLAN
from nsvlaninterfacebinding import NSVLANInterfaceBinding
from nsvlannsipbinding import NSVLANNSIPBinding
from nsfeature import NSFeature
from nsrewriteaction import NSRewriteAction
from nslbmonitorservicebinding import NSLBMonitorServiceBinding
from nssystemcmdpolicy import NSSystemCMDPolicy
from nsacl import NSAcl
from nsacls import NSAcls
__all__ = ['NSBaseResource',
'NSConfig',
'NSCSPolicy',
'NSCSVServer',
'NSCSVServerCSPolicyBinding',
'NSCSVServerResponderPolicyBinding',
'NSCSVServerRewritePolicyBinding',
'NSLBVServer',
'NSLBVServerServiceBinding',
'NSLBVServerCSVserverBinding',
'NSResponderAction',
'NSResponderPolicy',
'NSResponderPolicyLabel',
'NSResponderPolicyLabelBinding',
'NSResponderPolicyCSVServerBinding',
'NSRewritePolicy',
'NSRewritePolicyCSVServerBinding',
'NSServer',
'NSService',
'NSServiceGroup',
'NSServiceLBMonitorBinding',
'NSSSLCertKey',
'NSSSLCertKeySSLVServerBinding',
'NSSSLVServer',
'NSSSLVServerSSLCertKeyBinding',
'NSHANode',
'NSIP',
'NSVLAN',
'NSVLANInterfaceBinding',
'NSVLANNSIPBinding',
'NSFeature',
'NSRewriteAction',
'NSLBMonitorServiceBinding',
'NSSystemCMDPolicy',
'NSAcl',
'NSAcls'
]
|
# -- coding: utf-8 --
from CsAddress import CsAddress
from CsDatabag import CsCmdLine
class CsConfig(object):
"""
A class to cache all the stuff that the other classes need
"""
__LOG_FILE = "/var/log/cloud.log"
__LOG_LEVEL = "DEBUG"
__LOG_FORMAT = "%(asctime)s %(levelname)-8s %(message)s"
cl = None
def __init__(self):
self.fw = []
self.ingress_rules = {}
def set_address(self):
self.ips = CsAddress("ips", self)
@classmethod
def get_cmdline_instance(cls):
if cls.cl is None:
cls.cl = CsCmdLine("cmdline")
return cls.cl
def cmdline(self):
return self.get_cmdline_instance()
def address(self):
return self.ips
def get_fw(self):
return self.fw
def get_ingress_rules(self, key):
if self.ingress_rules.has_key(key):
return self.ingress_rules[key]
return None
def set_ingress_rules(self, key, ingress_rules):
self.ingress_rules[key] = ingress_rules
def get_logger(self):
return self.__LOG_FILE
def get_level(self):
return self.__LOG_LEVEL
def is_vpc(self):
return self.cl.get_type() == 'vpcrouter'
def is_router(self):
return self.cl.get_type() == 'router'
def is_dhcp(self):
return self.cl.get_type() == 'dhcpsrvr'
def has_dns(self):
return not self.use_extdns()
def has_metadata(self):
return any((self.is_vpc(), self.is_router(), self.is_dhcp()))
def use_extdns(self):
return self.cmdline().idata().get('useextdns', 'false') == 'true'
def get_domain(self):
return self.cl.get_domain()
def get_dns(self):
conf = self.cmdline().idata()
dns = []
if not self.use_extdns():
if not self.is_vpc() and self.cl.is_redundant() and self.cl.get_guest_gw():
dns.append(self.cl.get_guest_gw())
else:
dns.append(self.address().get_guest_ip())
for name in ["dns1", "dns2"]:
if name in conf:
dns.append(conf[name])
return dns
def get_format(self):
return self.__LOG_FORMAT
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from .unittestbase import PostgreSQLTestCase
from nose.plugins.attrib import attr
from nose.tools import eq_, assert_raises
import datetime
from socorro.external.postgresql.backfill import Backfill
from socorro.external.postgresql import staticdata, fakedata
from socorro.external import MissingArgumentError
from socorro.lib import datetimeutil
#==============================================================================
@attr(integration='postgres')
class TestBackfill(PostgreSQLTestCase):
"""Tests the calling of all backfill functions"""
#--------------------------------------------------------------------------
def setUp(self):
""" Populate tables with fake data """
super(TestBackfill, self).setUp()
cursor = self.connection.cursor()
self.tables = []
for table in staticdata.tables + fakedata.tables:
# staticdata has no concept of duration
if table.__module__ == 'socorro.external.postgresql.staticdata':
table = table()
else:
table = table(days=1)
table.releases = {
'WaterWolf': {
'channels': {
'Nightly': {
'versions': [{
'number': '18.0',
'probability': 0.5,
'buildid': '%s000020'
}],
'adu': '10',
'repository': 'nightly',
'throttle': '1',
'update_channel': 'nightly',
},
},
'crashes_per_hour': '5',
'guid': '{[email protected]}'
},
'B2G': {
'channels': {
'Nightly': {
'versions': [{
'number': '18.0',
'probability': 0.5,
'buildid': '%s000020'
}],
'adu': '10',
'repository': 'nightly',
'throttle': '1',
'update_channel': 'nightly',
},
},
'crashes_per_hour': '5',
'guid': '{[email protected]}'
}
}
table_name = table.table
table_columns = table.columns
values = str(tuple(["%(" + i + ")s" for i in table_columns]))
columns = str(tuple(table_columns))
self.tables.append(table_name)
# TODO: backfill_reports_clean() sometimes tries to insert a
# os_version_id that already exists
if table_name is not "os_versions":
for rows in table.generate_rows():
data = dict(zip(table_columns, rows))
query = "INSERT INTO %(table)s " % {'table': table_name}
query = query + columns.replace("'", "").replace(",)", ")")
query = query + " VALUES "
query = query + values.replace(",)", ")").replace("'", "")
cursor.execute(query, data)
self.connection.commit()
#--------------------------------------------------------------------------
def tearDown(self):
""" Cleanup the database, delete tables and functions """
cursor = self.connection.cursor()
tables = str(self.tables).replace("[", "").replace("]", "")
cursor.execute("TRUNCATE " + tables.replace("'", "") + " CASCADE;")
self.connection.commit()
self.connection.close()
super(TestBackfill, self).tearDown()
#--------------------------------------------------------------------------
def setup_data(self):
self.now = datetimeutil.utc_now()
now = self.now.date()
yesterday = now - datetime.timedelta(days=1)
lastweek = now - datetime.timedelta(days=7)
now_str = datetimeutil.date_to_string(now)
yesterday_str = datetimeutil.date_to_string(yesterday)
lastweek_str = datetimeutil.date_to_string(lastweek)
self.test_source_data = {
# Test backfill_adu
'adu': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_all_dups
'all_dups': {
'params': {
"start_date": yesterday_str,
"end_date": now_str,
},
'res_expected': [(True,)],
},
# Test backfill_build_adu
'build_adu': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_correlations
'correlations': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_crashes_by_user_build
'crashes_by_user_build': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_crashes_by_user
'crashes_by_user': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# TODO: Test backfill_daily_crashes tries to insert into a table
# that do not exists. It can be fixed by creating a temporary one.
#'daily_crashes': {
# 'params': {
# "update_day": now_str,
# },
# 'res_expected': [(True,)],
# },
# Test backfill_exploitability
'exploitability': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_explosiveness
'explosiveness': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_home_page_graph_build
'home_page_graph_build': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_home_page_graph
'home_page_graph': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_matviews
'matviews': {
'params': {
"start_date": yesterday_str,
"reports_clean": 'false',
},
'res_expected': [(True,)],
},
# Test backfill_nightly_builds
'nightly_builds': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_rank_compare
'rank_compare': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_reports_clean
'reports_clean': {
'params': {
"start_date": yesterday_str,
"end_date": now_str,
},
'res_expected': [(True,)],
},
# TODO: Test backfill_reports_duplicates tries to insert into a
# table that do not exists. It can be fixed by using the update
# function inside of the backfill.
#'reports_duplicates': {
# 'params': {
# "start_date": yesterday_str,
# "end_date": now_str,
# },
# 'res_expected': [(True,)],
# },
# TODO: Test backfill_signature_counts tries to insert into
# tables and to update functions that does not exist.
#'signature_counts': {
# 'params': {
# "start_date": yesterday_str,
# "end_date": now_str,
# },
# 'res_expected': [(True,)],
# },
# Test backfill_tcbs_build
'tcbs_build': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_tcbs
'tcbs': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_weekly_report_partitions
'weekly_report_partitions': {
'params': {
"start_date": lastweek_str,
"end_date": now_str,
"table_name": 'raw_crashes',
},
'res_expected': [(True,)],
},
# TODO: Update Backfill to support signature_summary backfill
# through the API
#'signature_summary_products': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
#'signature_summary_installations': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
#'signature_summary_uptime': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
#'signature_summary_os': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
#'signature_summary_process_type': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
#'signature_summary_architecture': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
#'signature_summary_flash_version': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
#'signature_summary_device': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
#'signature_summary_graphics': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
}
#--------------------------------------------------------------------------
def test_get(self):
backfill = Backfill(config=self.config)
#......................................................................
# Test raise error if kind of backfill is not passed
params = {"backfill_type": ''}
assert_raises(MissingArgumentError, backfill.get, **params)
#......................................................................
# Test all the backfill functions
self.setup_data()
for test, data in self.test_source_data.items():
data['params']['backfill_type'] = str(test)
res = backfill.get(**data['params'])
eq_(res[0], data['res_expected'][0])
|
from django.db import models
class SponsorManager(models.Manager):
def active(self):
return self.get_query_set().filter(active=True).order_by("level")
def with_weblogo(self):
queryset = self.raw("""
SELECT DISTINCT
"sponsorship_sponsor"."id",
"sponsorship_sponsor"."applicant_id",
"sponsorship_sponsor"."name",
"sponsorship_sponsor"."external_url",
"sponsorship_sponsor"."annotation",
"sponsorship_sponsor"."contact_name",
"sponsorship_sponsor"."contact_email",
"sponsorship_sponsor"."level_id",
"sponsorship_sponsor"."added",
"sponsorship_sponsor"."active",
"sponsorship_sponsorlevel"."order"
FROM
"sponsorship_sponsor"
INNER JOIN
"sponsorship_sponsorbenefit" ON ("sponsorship_sponsor"."id" = "sponsorship_sponsorbenefit"."sponsor_id")
INNER JOIN
"sponsorship_benefit" ON ("sponsorship_sponsorbenefit"."benefit_id" = "sponsorship_benefit"."id")
LEFT OUTER JOIN
"sponsorship_sponsorlevel" ON ("sponsorship_sponsor"."level_id" = "sponsorship_sponsorlevel"."id")
WHERE (
"sponsorship_sponsor"."active" = 't' AND
"sponsorship_benefit"."type" = 'weblogo' AND
"sponsorship_sponsorbenefit"."upload" != ''
)
ORDER BY "sponsorship_sponsorlevel"."order" ASC, "sponsorship_sponsor"."added" ASC
""")
return queryset
|
# update_hi - receive binary and i) parse it, ii) update json tally as needed, iii) store .hi file for later
# get_hi -- fetch a bin for the emu
# get_json_tally - dump highscore table as json (for fancy frontend to display, say)
# get_html_tally - dump highscore in vaguely readable html table (for web browser quickies)
# get_last_modify_epoch - get epoch-time of last tally modify
import logging
import json
import array
import os
import pprint
import time
import traceback
import profile
from paths import _basepath
import modulemap
import activity_log
SCOREBOARD_MAX=500
logging.info ( "LOADING: singlescore_handler" )
# "score" should not be supplied, unless its multiscore sending its shit here
def update_hi ( req, score_int=None ):
#pp = pprint.PrettyPrinter ( indent=4 )
# base game path
writepath = _basepath ( req )
try:
logging.debug ( "Attempt to create dirs %s" % ( writepath ) )
os.makedirs ( writepath )
except:
pass
# pull up existing tally file
#
tally = _read_tally ( req )
sb = tally [ 'scoreboard' ]
# parse new hi buffer
#
if score_int:
hi = score_int
else:
hi = parse_hi_bin ( req, req [ '_bindata' ] )
# is any of this new buffer better than existing tally?
# if so, update tally file and record it
# if not, we're done
# new tally update? great ..
# .. store hi-file
# .. store new tally file
# -------
# does this score factor into the high score table, or too low to count?
if False and hi < sb [ SCOREBOARD_MAX - 1 ][ 'score' ]:
logging.info ( "hidb - %s - submitter score of %d is NOT sufficient to enter scoreboard (lowest %d, highest %d)" % ( req [ 'gamename' ], hi, sb [ SCOREBOARD_MAX - 1 ][ 'score' ], sb [ 0 ][ 'score' ] ) )
return
# is score same as existing top .. if so, its just resubmitting the score they pulled down, likely, so.. discard
if False and hi == sb [ 0 ][ 'score' ]:
logging.info ( "hidb - %s - submitter score of %d is same as highest score .. probably just looping. (lowest %d, highest %d)" % ( req [ 'gamename' ], hi, sb [ SCOREBOARD_MAX - 1 ][ 'score' ], sb [ 0 ][ 'score' ] ) )
return
# okay, so the guys score is at least better than one of them.. start at top, pushing the way down
if False:
logging.info ( "hidb - %s - submitter score of %d IS sufficient to enter scoreboard (lowest %d, highest %d)" % ( req [ 'gamename' ], hi, sb [ SCOREBOARD_MAX - 1 ][ 'score' ], sb [ 0 ][ 'score' ] ) )
# determine desired sort order
order = 'highest-first'
try:
_order = modulemap.gamemap [ req [ 'gamename' ] ] [ '_general'] [ 'ordering' ]
logging.info ( 'hidb - %s - ordering from conf is %s' % ( req [ 'gamename' ], _order ) )
if _order in ( 'highest-first' ,'lowest-first' ):
order = _order
else:
order = 'highest-first'
except:
pp = pprint.PrettyPrinter(indent=4)
pp.pprint ( modulemap.gamemap [ req [ 'gamename' ] ] )
print modulemap.gamemap [ req [ 'gamename' ] ]
traceback.print_exc()
logging.info ( 'hidb - %s - ordering -> exception .. assuming highest-first' % ( req [ 'gamename' ] ) )
order = 'highest-first'
logging.info ( 'hidb - %s - ordering to use is %s' % ( req [ 'gamename' ], order ) )
# create new score entry
d = dict()
d [ 'prid' ] = req [ 'prid' ]
d [ 'score' ] = hi
d [ 'time' ] = int ( time.time() )
# old: insert with manual assumed-ascending sort order
if False:
for i in range ( SCOREBOARD_MAX ):
if hi > sb [ i ][ 'score' ]:
# log the activity
activity_log.log_entry ( req, d, i )
# insert
sb.insert ( i, d )
# drop off last guy
sb.pop()
# if we updated the first entry, the very highest score, spit out a new .hi file
# (mspacman only has a single high score, so we only update it for the highest score.. not a whole table)
if i == 0 and score_int == None:
f = open ( writepath + req [ 'gamename' ] + ".hi", "w" )
f.write ( build_hi_bin ( req, sb [ 0 ][ 'score' ] ) )
f.close()
break
# insert at first, assuming a post-sort; we can drop the 'worst' entry after sort
if True:
sb.insert ( 0, d )
# update activity log.. try to find the entry match and publish it
if True:
for i in range ( SCOREBOARD_MAX ):
if d [ 'prid' ] == sb [ i ] [ 'prid' ] and d [ 'score' ] == sb [ i ] [ 'score' ] and d [ 'time' ] == sb [ i ] [ 'time' ]:
activity_log.log_entry ( req, d, i )
break
# post-sort to games desired sort order
# reverse=False -> ascending (lowest first), lowest is best
# reverse=True -> descending (highest first), highest is best -> most typical case
def _sortvalue ( entry ):
if entry [ 'score' ] == 0:
if order == 'lowest-first':
return 999999999999
else:
return -1
else:
return entry [ 'score' ]
if True:
reversify = True
if order == 'lowest-first':
reversify = False
try:
sb.sort ( key=_sortvalue, reverse=reversify )
except:
traceback.print_exc()
# drop 'worst' (last, since we sorted) entry
if True:
sb.pop()
#logging.info ( 'hidb - %s - sorted ' % ( req [ 'gamename' ] ) )
# update stats and write out the updated tally file
tally [ 'hi' ] = sb [ 0 ][ 'score' ]
tally [ 'prid' ] = sb [ 0 ][ 'prid' ]
tallyfile = json.dumps ( tally )
f = open ( writepath + req [ 'gamename' ] + ".json", "w" )
f.write ( tallyfile )
f.close()
#logging.debug ( "received len %d" % ( req [ '_binlen' ] ) )
return
def get_hi ( req ):
req [ '_bindata' ] = build_hi_bin ( req, 0 )
req [ '_binlen' ] = len ( req [ '_bindata' ] )
logging.info ( "%s - pulled generated zero-score hi file (len %s)" % ( req [ 'gamename' ], req [ '_binlen' ] ) )
'''
writepath = _basepath ( req )
try:
f = open ( writepath + req [ 'gamename' ] + ".hi", "r" )
bindata = f.read()
f.close()
req [ '_bindata' ] = bindata
req [ '_binlen' ] = len ( bindata )
logging.info ( "%s - pulled existant hi file (len %s)" % ( req [ 'gamename' ], req [ '_binlen' ] ) )
except:
req [ '_bindata' ] = build_hi_bin ( req, 270 )
req [ '_binlen' ] = len ( req [ '_bindata' ] )
logging.info ( "%s - pulled generated zero-score hi file (len %s)" % ( req [ 'gamename' ], req [ '_binlen' ] ) )
'''
return
def get_json_tally ( req, raw=False ):
tally = _read_tally ( req )
for ent in tally [ 'scoreboard' ]:
prident = profile.fetch_pridfile_as_dict ( ent [ 'prid' ] )
if prident == None:
prident = profile.NULL_PROFILE
ent [ 'shortname' ] = prident [ 'shortname' ]
ent [ 'longname' ] = prident [ 'longname' ]
if '_general' in modulemap.gamemap [ req [ 'gamename' ] ]:
if 'dispunit' in modulemap.gamemap [ req [ 'gamename' ] ][ '_general' ]:
ent [ 'dispunit' ] = modulemap.gamemap [ req [ 'gamename' ] ] [ '_general' ][ 'dispunit' ]
del ent [ 'prid' ]
if raw:
req [ '_bindata' ] = tally
else:
req [ '_bindata' ] = json.dumps ( tally )
req [ '_binlen' ] = len ( req [ '_bindata' ] )
return
def get_html_tally ( req ):
tally = _read_tally ( req )
if '_backdate' in req:
if req [ '_backdate' ].isdigit():
timeframe = 'Specific Month: ' + req [ '_backdate' ]
else:
timeframe = 'All Time'
else:
timeframe = 'Current Month'
html = ''
html += "<h2>" + req [ 'gamename' ] + "</h2>\n"
html += "<h3>" + timeframe + "</h3>\n"
html += "<table>\n"
html += '<tr>\n'
html += ' <td style="padding:0 15px 0 15px;"><b>Rank</b></td>\n'
html += ' <td style="padding:0 15px 0 15px;"><b>Initial</b></td>\n'
html += ' <td style="padding:0 15px 0 15px;"><b>Name</b></td>\n'
html += ' <td style="padding:0 15px 0 15px;"><b>Score</b></td>\n'
html += ' <td style="padding:0 15px 0 15px;"><b>When</b></td>\n'
html += '</tr>\n'
i = 1
pridcache = dict()
lastprident = None
lastrun = 0 # for an RLE-like run count
for ent in tally [ 'scoreboard' ]:
prident = None
if ent [ 'prid' ]:
try:
prident = pridcache [ ent [ 'prid' ] ]
except:
prident = profile.fetch_pridfile_as_dict ( ent [ 'prid' ] )
pridcache [ ent [ 'prid' ] ] = prident
if prident == None:
prident = profile.NULL_PROFILE
tlocal = time.localtime ( ent [ 'time' ] )
tdisplay = time.strftime ( '%d-%b-%Y', tlocal )
# units
unit = ''
if '_general' in modulemap.gamemap [ req [ 'gamename' ] ]:
if 'dispunit' in modulemap.gamemap [ req [ 'gamename' ] ][ '_general' ]:
unit = ' ' + str ( modulemap.gamemap [ req [ 'gamename' ] ][ '_general' ][ 'dispunit' ] )
showrow = 1 # 0 no, 1 yes, 2 ellipses
if False: # True -> force to full length display
lastprident = None # if uncommented, forces full display .. no ellipses hidden entries
if lastprident == prident:
showrow = 0
lastrun += 1
else:
# if not first row, and the RLE is significant .. show an ellipses
if lastprident != None and lastrun > 0:
showrow = 2
else:
showrow = 1
# last and current are not the same, so RLE is back to zero
lastrun = 0
if showrow == 0:
pass # suppress
else:
if showrow == 2:
# so our last row is not same as this row, and last guy was not also the first
# row.. so show "..."
html += '<tr>\n'
html += ' <td style="padding:0 15px 0 15px;">' + "" + "</td>\n"
html += ' <td style="padding:0 15px 0 15px;">' + "" + "</td>\n"
html += ' <td style="padding:0 15px 0 15px;">' + "..." + "</td>\n"
html += ' <td style="padding:0 15px 0 15px;"></td>\n'
html += ' <td style="padding:0 15px 0 15px;"></td>\n'
html += '</tr>\n'
# showrow == 1, or showrow == 2 .. show this line
html += '<tr>\n'
html += ' <td style="padding:0 15px 0 15px;">' + str ( i ) + "</td>\n"
html += ' <td style="padding:0 15px 0 15px;">' + prident [ 'shortname' ] + "</td>\n"
html += ' <td style="padding:0 15px 0 15px;">' + prident [ 'longname' ] + "</td>\n"
if ent [ 'score' ] > 0:
html += ' <td style="padding:0 15px 0 15px;">' + str ( ent [ 'score' ] ) + unit + "</td>\n"
else:
html += ' <td style="padding:0 15px 0 15px;">-</td>\n'
if ent [ 'time' ] > 0:
html += ' <td style="padding:0 15px 0 15px;">' + tdisplay + "</td>\n"
else:
html += ' <td style="padding:0 15px 0 15px;"></td>\n'
html += '</tr>\n'
lastprident = prident
i += 1
html += "</table>\n"
html += "<p>%d unique profiles in the leaderboard</p>\n" % ( len ( pridcache ) )
req [ '_bindata' ] = html
req [ '_binlen' ] = len ( req [ '_bindata' ] )
return
def get_last_modify_epoch ( req ):
try:
filename = _basepath ( req ) + req [ 'gamename' ] + ".json"
return int ( os.path.getmtime ( filename ) )
except:
return 0
# ---------------
def _read_tally ( req ):
writepath = _basepath ( req )
try:
f = open ( writepath + req [ 'gamename' ] + ".json", "r" )
tallyfile = f.read()
f.close()
tally = json.loads ( tallyfile )
except:
logging.warning ( "%s - assuming new score file (all zeroes)" % ( req [ 'gamename' ] ) )
tally = dict()
tally [ 'hi' ] = 0
tally [ 'prid' ] = '_default_'
scoreboard = list()
for i in range ( SCOREBOARD_MAX ):
scoreboard.append ( { 'prid': '_default_', 'score': 0, 'time': 0 } )
tally [ 'scoreboard' ] = scoreboard
return tally
def parse_hi_bin ( req, bindata ):
return modulemap.gamemap [ req [ 'gamename' ] ][ 'module' ].parse_hi_bin ( req, bindata )
def build_hi_bin ( req, hiscore ):
return modulemap.gamemap [ req [ 'gamename' ] ][ 'module' ].build_hi_bin ( req, hiscore )
def done ( req ):
pass
|
# Copyright (C) 2016 ABRT Team
# Copyright (C) 2016 Red Hat, Inc.
#
# This file is part of faf.
#
# faf is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# faf is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with faf. If not, see <http://www.gnu.org/licenses/>.
from pyfaf.actions import Action
from pyfaf.storage.opsys import (BuildOpSysReleaseArch, Build, Package,
PackageDependency, BuildArch, BuildComponent)
from pyfaf.storage.report import ReportPackage
from pyfaf.storage.problem import ProblemOpSysRelease
from pyfaf.storage.llvm import LlvmBuild, LlvmBcFile, LlvmResultFile
class CleanupUnassigned(Action):
name = "cleanup-unassigned"
def run(self, cmdline, db) -> None:
# find all build, that are not assigned to any opsysrelease
all_builds = (db.session.query(Build)
.filter(~ db.session.query().exists().where(BuildOpSysReleaseArch.build_id == Build.id))
.yield_per(1000))
count = 0
#delete all builds and packages from them
for build in all_builds:
count += 1
q = db.session.query(Package).filter(Package.build_id == build.id)
for pkg in q.all():
self.log_info("Processing package {0}".format(pkg.nevr()))
self.delete_package(pkg, not(cmdline.force))
if cmdline.force:
db.session.query(PackageDependency).filter(PackageDependency.package_id == pkg.id).delete()
db.session.query(ReportPackage).filter(ReportPackage.installed_package_id == pkg.id).delete()
if cmdline.force:
q.delete()
db.session.query(BuildArch).filter(build.id == BuildArch.build_id).delete()
db.session.query(BuildComponent).filter(build.id == BuildComponent.build_id).delete()
db.session.query(ProblemOpSysRelease).filter(build.id
== ProblemOpSysRelease.probable_fix_build_id).delete()
q_llvm = db.session.query(LlvmBuild.build_id == build.id)
for llvm in q_llvm.all():
db.session.query(LlvmBcFile).filter(LlvmBcFile.llvmbuild_id == llvm.id).delete()
db.session.query(LlvmResultFile).filter(LlvmResultFile.llvmbuild_id == llvm.id).delete()
db.session.query(Build).filter(Build.id == build.id).delete()
if count > 1000:
db.session.flush()
count = 0
def tweak_cmdline_parser(self, parser) -> None:
parser.add_argument("-f", "--force", action="store_true",
help="delete all unassigned packages."
" Without -f acts like --dry-run.")
|
from celery.utils.log import get_task_logger
from celery.signals import after_setup_task_logger
from thehonestgenepipeline.celery import celery
from riskpredictor.core import predictor as pred
from os import path
from . import GENOTYPE_FOLDER,DATA_FOLDER
from . import get_platform_from_genotype
from .progress_logger import CeleryProgressLogHandler
import h5py
import logging
logger = get_task_logger(pred.__name__)
# pass through environment
@after_setup_task_logger.connect
def setup_task_logger(**kwargs):
progress_handler = CeleryProgressLogHandler(celery,'riskprediction')
logger.addHandler(progress_handler)
@celery.task(serialiazer='json')
def run(id,trait):
try:
log_extra={'id':id,'progress':0,'data':trait}
logger.info('Starting Risk Prediction',extra=log_extra)
genotype_file= '%s/IMPUTED/%s.hdf5' % (GENOTYPE_FOLDER,id)
platform = get_platform_from_genotype(genotype_file)
trait_folder = '%s/PRED_DATA/%s/%s/' % (DATA_FOLDER,trait,platform)
risk = pred.predict(genotype_file,trait_folder,log_extra=log_extra)
result = {'trait':trait,'risk':risk}
logger.info('Finished Risk Prediction',extra={'id':id,'progress':100,'state':'FINISHED','data':trait})
except Exception as err:
logger.error('Error calculating risk prediction',extra=log_extra)
raise err
return result
|
#Copyright ReportLab Europe Ltd. 2000-2004
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/graphics/widgetbase.py
__version__=''' $Id: widgetbase.py,v 1.1 2006/05/26 19:19:38 thomas Exp $ '''
import string
from reportlab.graphics import shapes
from reportlab import rl_config
from reportlab.lib import colors
from reportlab.lib.validators import *
from reportlab.lib.attrmap import *
class PropHolder:
'''Base for property holders'''
_attrMap = None
def verify(self):
"""If the _attrMap attribute is not None, this
checks all expected attributes are present; no
unwanted attributes are present; and (if a
checking function is found) checks each
attribute has a valid value. Either succeeds
or raises an informative exception.
"""
if self._attrMap is not None:
for key in self.__dict__.keys():
if key[0] <> '_':
msg = "Unexpected attribute %s found in %s" % (key, self)
assert self._attrMap.has_key(key), msg
for (attr, metavalue) in self._attrMap.items():
msg = "Missing attribute %s from %s" % (attr, self)
assert hasattr(self, attr), msg
value = getattr(self, attr)
args = (value, attr, self.__class__.__name__)
assert metavalue.validate(value), "Invalid value %s for attribute %s in class %s" % args
if rl_config.shapeChecking:
"""This adds the ability to check every attribute assignment
as it is made. It slows down shapes but is a big help when
developing. It does not get defined if rl_config.shapeChecking = 0.
"""
def __setattr__(self, name, value):
"""By default we verify. This could be off
in some parallel base classes."""
validateSetattr(self,name,value)
def getProperties(self,recur=1):
"""Returns a list of all properties which can be edited and
which are not marked as private. This may include 'child
widgets' or 'primitive shapes'. You are free to override
this and provide alternative implementations; the default
one simply returns everything without a leading underscore.
"""
from reportlab.lib.validators import isValidChild
# TODO when we need it, but not before -
# expose sequence contents?
props = {}
for name in self.__dict__.keys():
if name[0:1] <> '_':
component = getattr(self, name)
if recur and isValidChild(component):
# child object, get its properties too
childProps = component.getProperties(recur=recur)
for (childKey, childValue) in childProps.items():
#key might be something indexed like '[2].fillColor'
#or simple like 'fillColor'; in the former case we
#don't need a '.' between me and my child.
if childKey[0] == '[':
props['%s%s' % (name, childKey)] = childValue
else:
props['%s.%s' % (name, childKey)] = childValue
else:
props[name] = component
return props
def setProperties(self, propDict):
"""Permits bulk setting of properties. These may include
child objects e.g. "chart.legend.width = 200".
All assignments will be validated by the object as if they
were set individually in python code.
All properties of a top-level object are guaranteed to be
set before any of the children, which may be helpful to
widget designers.
"""
childPropDicts = {}
for (name, value) in propDict.items():
parts = string.split(name, '.', 1)
if len(parts) == 1:
#simple attribute, set it now
setattr(self, name, value)
else:
(childName, remains) = parts
try:
childPropDicts[childName][remains] = value
except KeyError:
childPropDicts[childName] = {remains: value}
# now assign to children
for (childName, childPropDict) in childPropDicts.items():
child = getattr(self, childName)
child.setProperties(childPropDict)
def dumpProperties(self, prefix=""):
"""Convenience. Lists them on standard output. You
may provide a prefix - mostly helps to generate code
samples for documentation.
"""
propList = self.getProperties().items()
propList.sort()
if prefix:
prefix = prefix + '.'
for (name, value) in propList:
print '%s%s = %s' % (prefix, name, value)
class Widget(PropHolder, shapes.UserNode):
"""Base for all user-defined widgets. Keep as simple as possible. Does
not inherit from Shape so that we can rewrite shapes without breaking
widgets and vice versa."""
def _setKeywords(self,**kw):
for k,v in kw.items():
if not self.__dict__.has_key(k):
setattr(self,k,v)
def draw(self):
msg = "draw() must be implemented for each Widget!"
raise shapes.NotImplementedError, msg
def demo(self):
msg = "demo() must be implemented for each Widget!"
raise shapes.NotImplementedError, msg
def provideNode(self):
return self.draw()
def getBounds(self):
"Return outer boundary as x1,y1,x2,y2. Can be overridden for efficiency"
return self.draw().getBounds()
_ItemWrapper={}
class TypedPropertyCollection(PropHolder):
"""A container with properties for objects of the same kind.
This makes it easy to create lists of objects. You initialize
it with a class of what it is to contain, and that is all you
can add to it. You can assign properties to the collection
as a whole, or to a numeric index within it; if so it creates
a new child object to hold that data.
So:
wedges = TypedPropertyCollection(WedgeProperties)
wedges.strokeWidth = 2 # applies to all
wedges.strokeColor = colors.red # applies to all
wedges[3].strokeColor = colors.blue # only to one
The last line should be taken as a prescription of how to
create wedge no. 3 if one is needed; no error is raised if
there are only two data points.
"""
def __init__(self, exampleClass):
#give it same validation rules as what it holds
self.__dict__['_value'] = exampleClass()
self.__dict__['_children'] = {}
def __getitem__(self, index):
try:
return self._children[index]
except KeyError:
Klass = self._value.__class__
if _ItemWrapper.has_key(Klass):
WKlass = _ItemWrapper[Klass]
else:
class WKlass(Klass):
def __getattr__(self,name):
try:
return self.__class__.__bases__[0].__getattr__(self,name)
except:
if self._index and self._parent._children.has_key(self._index):
if self._parent._children[self._index].__dict__.has_key(name):
return getattr(self._parent._children[self._index],name)
return getattr(self._parent,name)
_ItemWrapper[Klass] = WKlass
child = WKlass()
child._parent = self
if type(index) in (type(()),type([])):
index = tuple(index)
if len(index)>1:
child._index = tuple(index[:-1])
else:
child._index = None
else:
child._index = None
for i in filter(lambda x,K=child.__dict__.keys(): x in K,child._attrMap.keys()):
del child.__dict__[i]
self._children[index] = child
return child
def has_key(self,key):
if type(key) in (type(()),type([])): key = tuple(key)
return self._children.has_key(key)
def __setitem__(self, key, value):
msg = "This collection can only hold objects of type %s" % self._value.__class__.__name__
assert isinstance(value, self._value.__class__), msg
def __len__(self):
return len(self._children.keys())
def getProperties(self,recur=1):
# return any children which are defined and whatever
# differs from the parent
props = {}
for (key, value) in self._value.getProperties(recur=recur).items():
props['%s' % key] = value
for idx in self._children.keys():
childProps = self._children[idx].getProperties(recur=recur)
for (key, value) in childProps.items():
if not hasattr(self,key) or getattr(self, key)<>value:
newKey = '[%s].%s' % (idx, key)
props[newKey] = value
return props
def setVector(self,**kw):
for name, value in kw.items():
for i in xrange(len(value)):
setattr(self[i],name,value[i])
def __getattr__(self,name):
return getattr(self._value,name)
def __setattr__(self,name,value):
return setattr(self._value,name,value)
## No longer needed!
class StyleProperties(PropHolder):
"""A container class for attributes used in charts and legends.
Attributes contained can be those for any graphical element
(shape?) in the ReportLab graphics package. The idea for this
container class is to be useful in combination with legends
and/or the individual appearance of data series in charts.
A legend could be as simple as a wrapper around a list of style
properties, where the 'desc' attribute contains a descriptive
string and the rest could be used by the legend e.g. to draw
something like a color swatch. The graphical presentation of
the legend would be its own business, though.
A chart could be inspecting a legend or, more directly, a list
of style properties to pick individual attributes that it knows
about in order to render a particular row of the data. A bar
chart e.g. could simply use 'strokeColor' and 'fillColor' for
drawing the bars while a line chart could also use additional
ones like strokeWidth.
"""
_attrMap = AttrMap(
strokeWidth = AttrMapValue(isNumber),
strokeLineCap = AttrMapValue(isNumber),
strokeLineJoin = AttrMapValue(isNumber),
strokeMiterLimit = AttrMapValue(None),
strokeDashArray = AttrMapValue(isListOfNumbersOrNone),
strokeOpacity = AttrMapValue(isNumber),
strokeColor = AttrMapValue(isColorOrNone),
fillColor = AttrMapValue(isColorOrNone),
desc = AttrMapValue(isString),
)
def __init__(self, **kwargs):
"Initialize with attributes if any."
for k, v in kwargs.items():
setattr(self, k, v)
def __setattr__(self, name, value):
"Verify attribute name and value, before setting it."
validateSetattr(self,name,value)
class TwoCircles(Widget):
def __init__(self):
self.leftCircle = shapes.Circle(100,100,20, fillColor=colors.red)
self.rightCircle = shapes.Circle(300,100,20, fillColor=colors.red)
def draw(self):
return shapes.Group(self.leftCircle, self.rightCircle)
class Face(Widget):
"""This draws a face with two eyes.
It exposes a couple of properties
to configure itself and hides all other details.
"""
_attrMap = AttrMap(
x = AttrMapValue(isNumber),
y = AttrMapValue(isNumber),
size = AttrMapValue(isNumber),
skinColor = AttrMapValue(isColorOrNone),
eyeColor = AttrMapValue(isColorOrNone),
mood = AttrMapValue(OneOf('happy','sad','ok')),
)
def __init__(self):
self.x = 10
self.y = 10
self.size = 80
self.skinColor = None
self.eyeColor = colors.blue
self.mood = 'happy'
def demo(self):
pass
def draw(self):
s = self.size # abbreviate as we will use this a lot
g = shapes.Group()
g.transform = [1,0,0,1,self.x, self.y]
# background
g.add(shapes.Circle(s * 0.5, s * 0.5, s * 0.5, fillColor=self.skinColor))
# left eye
g.add(shapes.Circle(s * 0.35, s * 0.65, s * 0.1, fillColor=colors.white))
g.add(shapes.Circle(s * 0.35, s * 0.65, s * 0.05, fillColor=self.eyeColor))
# right eye
g.add(shapes.Circle(s * 0.65, s * 0.65, s * 0.1, fillColor=colors.white))
g.add(shapes.Circle(s * 0.65, s * 0.65, s * 0.05, fillColor=self.eyeColor))
# nose
g.add(shapes.Polygon(
points=[s * 0.5, s * 0.6, s * 0.4, s * 0.3, s * 0.6, s * 0.3],
fillColor=None))
# mouth
if self.mood == 'happy':
offset = -0.05
elif self.mood == 'sad':
offset = +0.05
else:
offset = 0
g.add(shapes.Polygon(
points = [
s * 0.3, s * 0.2, #left of mouth
s * 0.7, s * 0.2, #right of mouth
s * 0.6, s * (0.2 + offset), # the bit going up or down
s * 0.4, s * (0.2 + offset) # the bit going up or down
],
fillColor = colors.pink,
strokeColor = colors.red,
strokeWidth = s * 0.03
))
return g
class TwoFaces(Widget):
def __init__(self):
self.faceOne = Face()
self.faceOne.mood = "happy"
self.faceTwo = Face()
self.faceTwo.x = 100
self.faceTwo.mood = "sad"
def draw(self):
"""Just return a group"""
return shapes.Group(self.faceOne, self.faceTwo)
def demo(self):
"""The default case already looks good enough,
no implementation needed here"""
pass
class Sizer(Widget):
"Container to show size of all enclosed objects"
_attrMap = AttrMap(BASE=shapes.SolidShape,
contents = AttrMapValue(isListOfShapes,desc="Contained drawable elements"),
)
def __init__(self, *elements):
self.contents = []
self.fillColor = colors.cyan
self.strokeColor = colors.magenta
for elem in elements:
self.add(elem)
def _addNamedNode(self,name,node):
'if name is not None add an attribute pointing to node and add to the attrMap'
if name:
if name not in self._attrMap.keys():
self._attrMap[name] = AttrMapValue(isValidChild)
setattr(self, name, node)
def add(self, node, name=None):
"""Appends non-None child node to the 'contents' attribute. In addition,
if a name is provided, it is subsequently accessible by name
"""
# propagates properties down
if node is not None:
assert isValidChild(node), "Can only add Shape or UserNode objects to a Group"
self.contents.append(node)
self._addNamedNode(name,node)
def getBounds(self):
# get bounds of each object
if self.contents:
b = []
for elem in self.contents:
b.append(elem.getBounds())
return shapes.getRectsBounds(b)
else:
return (0,0,0,0)
def draw(self):
g = shapes.Group()
(x1, y1, x2, y2) = self.getBounds()
r = shapes.Rect(
x = x1,
y = y1,
width = x2-x1,
height = y2-y1,
fillColor = self.fillColor,
strokeColor = self.strokeColor
)
g.add(r)
for elem in self.contents:
g.add(elem)
return g
def test():
from reportlab.graphics.charts.piecharts import WedgeProperties
wedges = TypedPropertyCollection(WedgeProperties)
wedges.fillColor = colors.red
wedges.setVector(fillColor=(colors.blue,colors.green,colors.white))
print len(_ItemWrapper)
d = shapes.Drawing(400, 200)
tc = TwoCircles()
d.add(tc)
import renderPDF
renderPDF.drawToFile(d, 'sample_widget.pdf', 'A Sample Widget')
print 'saved sample_widget.pdf'
d = shapes.Drawing(400, 200)
f = Face()
f.skinColor = colors.yellow
f.mood = "sad"
d.add(f, name='theFace')
print 'drawing 1 properties:'
d.dumpProperties()
renderPDF.drawToFile(d, 'face.pdf', 'A Sample Widget')
print 'saved face.pdf'
d2 = d.expandUserNodes()
renderPDF.drawToFile(d2, 'face_copy.pdf', 'An expanded drawing')
print 'saved face_copy.pdf'
print 'drawing 2 properties:'
d2.dumpProperties()
if __name__=='__main__':
test()
|
import sc, random, contextlib, wave, os, math
import shlex, subprocess, signal
import NRTOSCParser3
import numpy as np
import scipy.signal
import matplotlib.pyplot as plt
# generator class for weighted random numbers
#
# Pass in one or the other:
# - weights: custom weights array
# - size: size of "standard" weights array that algo should make on its own
#
# call next to actually make the random selection
#
class RandomGenerator_8Bit(object):
def __init__(self, initval=-1):
if initval >= 0:
self.val = initval
else:
self.val = random.randint(0,256)
def next(self, scale=1.0):
self.val = random.randint(0,256)
def __call__(self): return self.next()
# slot assignments for sigmaSynth
ALPHA = 0
C_DELAY = 1
C_DECAY = 2
BETA = 3
D_MULT = 4
GAMMA = 5
MS_BINS = 6
DELTA = 7
class GenomicExplorer:
def __init__(self, anchor, sfilenames, subdir=None, out_dir='out', size=50, margin=10, report_interval=20, mut_prob=0.01, stop_slope=0.000001, stop_maxgens=5, rev_flag=False): #, start_state=[1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0]
self.anchor = anchor
if subdir is not None:
self.sfpaths = [os.path.join(anchor, 'snd', subdir, sfilename) for sfilename in sfilenames]
else:
self.sfpaths = [os.path.join(anchor, 'snd', sfilename) for sfilename in sfilenames]
self.out_dir = out_dir
self.filenames = sfilenames
self.sfinfos = []
for path in self.sfpaths:
with contextlib.closing(wave.open(path,'r')) as f:
frames = f.getnframes()
rate = f.getframerate()
dur = frames/float(rate)
self.sfinfos += [{'rate':rate,'dur':dur}]
self.mutation_prob = mut_prob
self.depth = margin
self.rev_flag = rev_flag
# 'alpha', 'c_delay', 'c_decay', 'beta', 'd_mult', 'gamma', 'ms_bins'
self.parser = NRTOSCParser3.NRTOSCParser3(anchor=self.anchor)
self.rawtable, self.rawmaps, self.dists, self.pool_means, self.pool_stdevs = dict(), dict(), dict(), dict(), dict()
self.reporting_interval = report_interval
self.stopping_slope = stop_slope
self.running_avg_mean_stdevs = dict()
self.stopping_crit_min_gens = stop_maxgens
self.init_population(size=size)
def init_population(self, size):
self.population = []
for n in range(size):
# self.population += [Genome()] #random seed
self.population += [Genome(values=[0,0,0,0,0,0,0,0])] #random seed
self.population[0] = Genome(values=[0,0,0,0,0,0,0,0])
self.analyze_individual(0)
self.activate_raw_data(0)
# print self.population[0]
self.compare_all_individuals(aflag=True)
def mutate_pop(self):
for indiv in range(1, len(self.population)):
if random.random() < self.mutation_prob:
print "indiv: ", indiv
self.population[ indiv ].mutate()
self.do_update_cascade(indiv)
# This is performed once per mutation
# - only affects the individual being mutated
def do_update_cascade(self, index, clearedits=False):
if clearedits is True:
self.population[ index ].edits = 0
else:
self.population[ index ].edits += 1
self.analyze_individual( index )
self.activate_raw_data( index )
# self.compare_individual_chi_squared( index )
self.compare_individual( index )
def mate(self, a, b, kill_index):
offspring = None
if random.random() < 0.5:
offspring = self.population[a].values[:]
else:
offspring = self.population[b].values[:]
# basic random gene selection from 2 parents
for i in range(7):
if random.random() < 0.5:
offspring[i] = self.population[a].values[i]
else:
offspring[i] = self.population[b].values[i]
# replace the killed individual with our new individual
self.population[kill_index] = Genome(offspring)
self.do_update_cascade(kill_index, True)
def sort_by_distances(self, depth, rev=False):
sorted_dists = [[k, self.dists[k], self.population[k].age, self.population[k].edits] for k in sorted(self.dists.keys())]
sorted_dists = sorted(sorted_dists[1:], key = lambda row: row[1], reverse=rev) # + (maxedits - row[3])))
print 'sorted dists: '
print sorted_dists
return sorted_dists[:depth], sorted_dists[(-1*depth):]
def reproduce(self, depth=25):
#kills, duplicates = self.sort_by_distances(depth)
duplicates, kills = self.sort_by_distances(depth, self.rev_flag)
for dup in duplicates:
self.render_individual(dup[0], 'gen'+str(self.population[0].age))
print 'depth: ', depth
# depth # of times: choose 2 random parents to mate and overwrite replacement in unfit individual's slot
for n in range(depth):
print 'num. duplicates: ', len(duplicates)
aidx = duplicates[ random.randint(0, depth-1) ][0]
bidx = duplicates[ random.randint(0, depth-1) ][0]
kidx = kills[ random.randint(0, depth-1) ][0]
self.mate(aidx, bidx, kidx)
def age_pop(self):
for i in range(len(self.population)): self.population[i].age += 1
def iterate(self, iters=1):
sc.quit()
for iter in range(iters):
self.age_pop()
self.mutate_pop()
# self.crossover()
if (iter%self.reporting_interval)==0:
print self.population[0].age
self.reproduce(self.depth)
self.collect_population_data()
res = self.check_for_stopping_conditions()
if (res == 1) and (self.population[0].age > self.reporting_interval):
return
def print_all_individuals(self):
print '== pop ==========================='
for g in self.population: print g
def start_sc(self):
try:
sc.start(verbose=1, spew=1, startscsynth=1)
except OSError: # in case we've already started the synth
print 'QUIT!'
sc.quit()
print 'sfpaths: ', self.sfpaths
for i, sfpath in enumerate(self.sfpaths):
bnum = sc.loadSnd(os.path.basename(sfpath), wait=False)
print 'bnum: ', bnum
self.sfinfos[i]['bnum'] = bnum
return 1
def play_genome(self, index):
vals = self.population[index].realvalues
if vals[C_DELAY] < 1.0:
cdelay = 0.0
else:
cdelay = vals[C_DELAY]
decay = 0.9
tr = self.population[index].tratio
if index == 0:
slot = 0
else:
slot = 1
print '===================\n', self.sfinfos[slot]['dur']
# |outbus=20, srcbufNum, start=0.0, dur=1.0, transp=1.0, c_delay=0.0, c_decay=0.0, d_mult=1.0, d_amp=0.7, ms_bins=0, alpha=1, beta=1, gamma=1|
sc.Synth('sigmaSynth',
args=[
'srcbufNum', self.sfinfos[slot]['bnum'],
'start', 0,
'dur', self.sfinfos[slot]['dur']*1000,
'transp', tr,
'c_delay', cdelay,
'c_decay', decay,
'd_mult', vals[D_MULT],
'ms_bins', vals[MS_BINS],
'alpha', vals[ALPHA],
'beta', vals[BETA],
'gamma', vals[GAMMA],
'delta', vals[DELTA]])
def analyze_individual(self, index):
print "%%%%%%%%%%%%%%%%%%"
print "INDEX: ", index
print len(self.sfpaths)
if index == 0:
oscpath = os.path.join(self.anchor, 'snd', 'osc', `index`, (os.path.splitext(self.filenames[0])[0] + '_sigmaAnalyzer2.osc'))
# mdpath = os.path.join(self.anchor, 'snd', 'md', `index`, self.filenames[0])
mdpath = os.path.join(self.anchor, 'snd', 'md', `index`, (os.path.splitext(self.filenames[0])[0] + '.md.wav'))
else:
oscpath = os.path.join(self.anchor, 'snd', 'osc', `index`, (os.path.splitext(self.filenames[1])[0] + '_sigmaAnalyzer2.osc'))
mdpath = os.path.join(self.anchor, 'snd', 'md', `index`, self.filenames[0])
mdpath = os.path.join(self.anchor, 'snd', 'md', `index`, (os.path.splitext(self.filenames[1])[0] + '.md.wav'))
print "-----------------------------"
print oscpath
print mdpath
vals = self.population[index].realvalues
if vals[C_DELAY] < 0.01:
cdelay = 0.0
else:
cdelay = vals[C_DELAY]
# decay = 0.9
tr = self.population[index].tratio
if index == 0:
slot = 0
else:
slot = 1
print (self.sfpaths[slot], index, tr, self.sfinfos[slot]['rate'], self.sfinfos[slot]['dur'])
print ''
print ['c_delay', cdelay, 'c_decay', vals[C_DECAY], 'd_mult', vals[D_MULT], 'ms_bins', vals[MS_BINS], 'alpha', vals[ALPHA], 'beta', vals[BETA], 'gamma', vals[GAMMA], 'delta', vals[DELTA]]
print ''
oscpath, mdpath = self.parser.createNRTScore(self.sfpaths[slot],
index=index,
tratio=tr,
srate=self.sfinfos[slot]['rate'],
duration=self.sfinfos[slot]['dur'],
params=[
'c_delay', cdelay,
'c_decay', vals[C_DECAY],
'd_mult', vals[D_MULT],
'ms_bins', vals[MS_BINS],
'alpha', vals[ALPHA],
'beta', vals[BETA],
'gamma', vals[GAMMA],
'delta', vals[DELTA]])
cmd = 'scsynth -N ' + oscpath + ' _ _ 44100 AIFF int16 -o 1'
print cmd
args = shlex.split(cmd)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) #, shell=True, close_fds=True)
print 'PID: ', p.pid
rc = p.wait()
print 'RC: ', rc
if rc == 1:
num_frames = int(math.ceil(self.sfinfos[slot]['dur'] / 0.04 / tr))
# print 'num frames: ', num_frames
self.rawtable[index] = (mdpath, num_frames)
# print self.rawtable
def render_individual(self, index, generation_subdir='gen0'):
vals = self.population[index].realvalues
if vals[C_DELAY] < 0.01:
cdelay = 0.0
else:
cdelay = vals[C_DELAY]
# decay = 0.9
tr = self.population[index].tratio
if index == 0:
slot = 0
else:
slot = 1
oscpath, mdpath = self.parser.createNRTScore(self.sfpaths[slot],
index=index,
tratio=tr,
srate=self.sfinfos[slot]['rate'],
duration=self.sfinfos[slot]['dur'],
params=[
'c_delay', cdelay,
'c_decay', vals[C_DECAY],
'd_mult', vals[D_MULT],
'ms_bins', vals[MS_BINS],
'alpha', vals[ALPHA],
'beta', vals[BETA],
'gamma', vals[GAMMA],
'delta', vals[DELTA]])
if os.path.exists(os.path.join(self.anchor, 'snd', self.out_dir, str(generation_subdir))) is False:
os.mkdir(os.path.join(self.anchor, 'snd', self.out_dir, str(generation_subdir)))
cmd = 'scsynth -N ' + oscpath + ' _ ' + os.path.join(self.anchor, 'snd', self.out_dir, generation_subdir, (str(index) + '.aiff')) + ' 44100 AIFF int16 -o 1'
args = shlex.split(cmd)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) #, shell=True, close_fds=True)
rc = p.wait()
if rc == 1:
print 'SUCCESS: ', os.path.join(self.anchor, 'snd', self.out_dir, (str(index) + '.aiff'))
rc = 0
else:
return None
# cannot get this to work:
# cmd = 'sox -b 16 ' + os.path.join(self.anchor, 'snd', self.out_dir, str(generation_subdir), (str(index) + '.aiff')) + ' ' + os.path.join(self.anchor, 'snd', self.out_dir, str(generation_subdir), (str(index) + '.wav')) + '; rm ' + os.path.join(self.anchor, 'snd', self.out_dir, str(generation_subdir), (str(index) + '.aiff'))
print cmd
args = shlex.split(cmd)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) #, shell=True, close_fds=True)
rc = p.wait()
print rc
if rc == 1: ' DOUBLE SUCCESS!!'
def activate_raw_data(self, index):
mdpath = self.rawtable[index][0]
num_frames = self.rawtable[index][1]
self.rawmaps[index] = np.memmap(mdpath, dtype=np.float32, mode='r', offset=272, shape=(num_frames, 25))
"""
COMPARE_ALL_INDIVIDUALS:
... to individual in slot 0!
"""
def compare_all_individuals(self, aflag=False):
print self.population
for i in range(1, len(self.population)):
if aflag:
self.analyze_individual(i)
self.activate_raw_data(i)
# self.compare_individual_chi_squared(i)
self.compare_individual(i)
print self.dists
return self.dists
"""
COMPARE_INDIVIDUAL:
... to individual in the slot that is stipulated by the arg zeroindex!
- by convention, we should usually put what we are comparing to in slot 0
"""
def compare_individual_resample(self, index, zeroindex=0):
i_length = self.rawmaps[index].shape[0]
zr0_length = self.rawmaps[zeroindex].shape[0]
print i_length, ' | ', zr0_length
# i1_length = self.rawmaps[index-1].shape[0] ## <--- NEIGHBOR comparison
# print i_length, ' | ', i1_length, ' | ', zr0_length
# based on length comparison, resample the mutated individuals so that they are same length as the zeroth individual (that does not mutate)
# if indiv. is longer, resample indiv., take abs. diff., sum, div. by length
if zr0_length < i_length:
mfccs_dist_to_zero = float(np.sum(np.abs(scipy.signal.signaltools.resample(self.rawmaps[index][:,1:14], zr0_length, window='hanning') - self.rawmaps[0][:,1:14]))) / float(zr0_length)
total_dist = (float(np.sqrt(np.sum(np.abs(self.rawmaps[index][:zr0_length,0] - self.rawmaps[0][:zr0_length,0])))) / float(zr0_length)) + mfccs_dist_to_zero
# if zeroth indiv. is longer, resample zeroth indiv., take abs. diff., sum, div. by length, then do same comparison with "neighbor"
elif i_length < zr0_length:
mfccs_dist_to_zero = float(np.sum(np.abs(self.rawmaps[index][:,1:14] - scipy.signal.signaltools.resample(self.rawmaps[0][:,1:14], float(i_length), window='hanning')))) / float(i_length)
total_dist = (float(np.sqrt(np.sum(np.abs(self.rawmaps[index][:i_length,0] - self.rawmaps[0][:i_length,0])))) / float(i_length)) + mfccs_dist_to_zero
else:
# otherwise, take abs. diff., sum, div. by length, then do amp
mfccs_dist_to_zero = float(np.sum(np.abs(self.rawmaps[index][:,1:14] - self.rawmaps[0][:,1:14]))) / float(zr0_length)
total_dist = float(np.sqrt(np.sum(np.abs(self.rawmaps[index][:,0] - self.rawmaps[0][:,0])))) / float(zr0_length) + mfccs_dist_to_zero
self.dists[index] = total_dist
def compare_individual(self, index, zeroindex=0):
i_length = self.rawmaps[index].shape[0]
zr0_length = self.rawmaps[zeroindex].shape[0]
print i_length, ' | ', zr0_length
min_length = min(i_length, zr0_length)
print i_length, ' | ', zr0_length, ' | ', min_length
# based on length comparison, resample the mutated individuals so that they are same length as the zeroth individual (that does not mutate)
# if indiv. is longer, resample indiv., take abs. diff., sum, div. by length
mfccs_dist_to_zero = float(np.sum(np.abs( self.rawmaps[index][:zr0_length,1:14] - self.rawmaps[0][:min_length,1:14]))) / float(min_length)
total_dist = (float(np.sqrt(np.sum(np.abs(self.rawmaps[index][:min_length,0] - self.rawmaps[0][:min_length,0])))) / float(min_length)) + mfccs_dist_to_zero
self.dists[index] = (total_dist / float(min_length))
def compare_individual_chi_squared(self, index):
i_length = self.rawmaps[index].shape[0]
i1_length = self.rawmaps[index-1].shape[0]
zr0_length = self.rawmaps[0].shape[0]
# print i_length, '|', zr0_length
# based on length comparison, resample the mutated individuals so that they are same length as the zeroth individual (that does not mutate)
# if indiv. is longer, resample indiv., take abs. diff., sum, div. by length
if zr0_length < i_length:
mfccs_dist_to_zero = scipy.stats.mstats.chisquare(scipy.signal.signaltools.resample(self.rawmaps[index], zr0_length, window='hanning'), self.rawmaps[0])
# print self.dists[index]
# if zeroth indiv. is longer, resample zeroth indiv., take abs. diff., sum, div. by length, then do same comparison with "neighbor"
elif i_length < zr0_length:
mfccs_dist_to_zero = scipy.stats.mstats.chisquare(self.rawmaps[index], scipy.signal.signaltools.resample(self.rawmaps[0], i_length, window='hanning'))
else:
# otherwise, take abs. diff., sum, div. by length, then do same comparison with "neighbor"
print 'CHI-ZERO'
mfccs_dist_to_zero = scipy.stats.mstats.chisquare(self.rawmaps[index], self.rawmaps[0])
if i1_length < i_length:
neighbor_dist = scipy.stats.mstats.chisquare(scipy.signal.signaltools.resample(self.rawmaps[index-1], i_length, window='hanning') - self.rawmaps[index])
elif i_length < i1_length:
neighbor_dist = scipy.stats.mstats.chisquare(self.rawmaps[index-1], scipy.signal.signaltools.resample(self.rawmaps[index], i1_length, window='hanning'))
else:
print 'CHI-NEIGHBOR'
neighbor_dist = scipy.stats.mstats.chisquare(self.rawmaps[index-1], scipy.signal.signaltools.resample(self.rawmaps[index], i1_length, window='hanning'))
nsum = np.sum(np.abs(neighbor_dist[0].data[:24]))
zsum = np.sum(np.abs(mfccs_dist_to_zero[0].data[:24]))
nasum = neighbor_dist[0].data[24]
zasum = mfccs_dist_to_zero[0].data[24]
self.dists[index] = nsum + zsum - (24.0 * nasum) - (24.0 * zasum)
def collect_population_data(self):
diffs = [self.dists[k] for k in self.dists.keys()]
print 'diffs: ', diffs
age0 = self.population[0].age
age1 = self.population[1].age
print 'ages: ', age0, '|', age1
self.pool_means[age1] = np.mean(diffs)
self.pool_stdevs[age1] = np.std(diffs)
def collect_population_data_resample(self):
zero_data = np.array(self.rawmaps[0][:,1:14])
zr0_length = zero_data.shape[0]
diffs = []
for indiv in range(1, len(self.population)):
data = np.array(self.rawmaps[indiv][:,1:14])
i_length = data.shape[0]
if (i_length > zr0_length):
diffs += [np.sum(np.abs(scipy.signal.signaltools.resample(data, zr0_length, window='hanning') - zero_data))]
elif (i_length < zr0_length):
diffs += [np.sum(np.abs(data - scipy.signal.signaltools.resample(zero_data, i_length, window='hanning')))]
else:
diffs += [np.sum(np.abs(data - zero_data))]
diffs = np.array(diffs)
age0 = self.population[0].age
age1 = self.population[1].age
print 'ages: ', age0, '|', age1
self.pool_means[age1] = np.mean(diffs)
self.pool_stdevs[age1] = np.std(diffs)
def check_for_stopping_conditions(self):
"""
0 = continue
1 = stop
"""
age0 = self.population[0].age
stdevs_skeys = sorted(self.pool_stdevs.keys())
self.stdevs_ordered = [self.pool_stdevs[key] for key in stdevs_skeys]
lastNstdevs = self.stdevs_ordered[(-1*self.stopping_crit_min_gens):]
self.running_avg_mean_stdevs[age0] = mean(lastNstdevs)
print ">>>>>>>>>>>>>>>>> STOP??? ::: ", (abs(max(lastNstdevs) - min(lastNstdevs)) / self.stopping_crit_min_gens)
print ">>>>>>>>>>>>>>>>> MEAN ::: ", mean(lastNstdevs)
if (len(lastNstdevs) < self.stopping_crit_min_gens) or ((abs(max(lastNstdevs) - min(lastNstdevs)) / self.stopping_crit_min_gens) > self.stopping_slope):
print " continue ..."
return 0
else:
print "**STOP**"
return 1 # signal stop
def population_realvalues_as_array(self):
realvals = []
for indiv in self.population:
realvals += indiv.realvalues
return np.array(realvals).reshape((-1,7))
def population_8bitvalues_as_array(self):
vals = []
for indiv in self.population:
vals += indiv.values
return np.array(vals).reshape((-1,7))
class Genome:
def __init__(self, values=None, slotranges=[[1.0,0.5],[0.0,0.05],[0.0,1.0],[1.0, 0.5],[1.0,10.],[1.0,0.5],[0.0,50.],[1.0,0.5]]):
# """
# 'alpha', 'c_delay', 'decay' 'beta', 'd_mult', 'gamma', 'ms_bins', 'delta'
# [[1.0,0.5],[0.0,0.05],[0.0,1.0],[1.0,0.5],[1.0,10.],[1.0,0.5],[0.0,50.]],[1.0,0.5]
# """
self.tratio = 1.0 # CHECK THIS... WHY IS IT HERE/in Hertz!!! ???
self.boundaries = slotranges
self.generators = [RandomGenerator_8Bit(-1) for n in range(8)] ### CONSTANT WARNING
#StaticGenerator_8Bit(VAL) ???
if values is None:
self.values = [gen.val for gen in self.generators]
else:
self.values = values
self.bitlength = len(self.values) * 8
self.binarystring = vals_to_binarystring(self.values)
# print self.values
# print type(self.values[0])
self.realvalues = [lininterp(val,self.boundaries[i]) for i,val in enumerate(self.values)]
self.age = 0
self.edits = 0
def __repr__(self):
#print '1. ', tuple(self.values)
print '2. ', ((self.age, self.edits) + tuple(self.values) + tuple(self.binarystring))
return "%9i/%9i || %.6f|%.6f|%.6f|%.6f|%.6f|%.6f|%.6f|%.6f" % ((self.age, self.edits) + tuple(self.realvalues)) # + tuple(self.binarystring)
def mutate(self):
pos = random.randint(0,(self.bitlength-1))
# flip bit
print 'bit flipped to: ', abs(1 - int(self.binarystring[pos],2))
self.binarystring = substitute_char_in_string(self.binarystring, pos, abs(1 - int(self.binarystring[pos],2)))
# recalc binary string
self.values = binarystring_to_vals(self.binarystring)
print "values: ", self.values
self.realvalues = [lininterp(val,self.boundaries[i]) for i,val in enumerate(self.values)]
# def xover_sub(self, pos, incomingSeq, headortail=0):
# if headortail == 0:
# print '<<>> ', self.binarystring
# print '<<>> ', pos
# print '<<>> ', incomingSeq
# self.binarystring = incomingSeq[:pos] + self.binarystring[pos:]
# else:
# print '<<>> ', self.binarystring
# print '<<>> ', pos
# print '<<>> ', incomingSeq
# self.binarystring = self.binarystring[:pos] + incomingSeq[:(len(self.binarystring)-pos)]
# # recalc binary string
# print '==== ', self.binarystring
# self.values = binarystring_to_vals(self.binarystring)
# print "values: ", self.values
# self.realvalues = [lininterp(val,self.boundaries[i]) for i,val in enumerate(self.values)]
def mean(arr):
return sum([(float(val)/len(arr)) for val in arr])
def lininterp(val,bounds=[0.,1.]):
return (((val/128.0)*(bounds[1]-bounds[0]))+bounds[0])
def substitute_char_in_string(s, p, c):
l = list(s)
l[p] = str(c)
return "".join(l)
# conversion function
def midi2hz(m): return pow(2.0, (m/12.0))
def vals_to_binarystring(vals = [0, 0, 0, 0, 0]):
return ''.join((("{0:08b}".format(val)) for val in vals))
# never a '0bXXX' string!
def binarystring_to_vals(binstring):
mystring = binstring[:]
length = len(mystring) / 8 # ignore the last digits if it doesn't chunk into 8-item substrings
res = []
# print mystring[(n*8):((n+1)*8)]
return [int(mystring[(n*8):((n+1)*8)], 2) for n in range(length)]
def plot_one_generational_means_stdevs(pool_means1, pool_stdevs1, poolsize1):
fig, ax1 = plt.subplots(nrows=1, ncols=1)
timepoints1 = sorted(pool_means1.keys())
means1 = np.array([pool_means1[tp] for tp in timepoints1])
stdevs1 = np.array([pool_stdevs1[tp] for tp in timepoints1])
lower_stdevs1 = np.where(np.subtract(means1, (stdevs1/2))>0, stdevs1/2, means1) # 0
ax1.errorbar(timepoints1, means1, yerr=[lower_stdevs1, stdevs1/2], fmt='o')
ax1.set_xlabel('Number of generations')
ax1.set_ylabel('Fitness score/dissimilarity')
plt.show()
def plot_generational_means_stdevs(pool_means1, pool_stdevs1, poolsize1, pool_means2, pool_stdevs2, poolsize2):
fig, (ax1, ax2) = plt.subplots(nrows=1, ncols=2, sharey=True)
timepoints1 = sorted(pool_means1.keys())
means1 = np.array([pool_means1[tp] for tp in timepoints1])
stdevs1 = np.array([pool_stdevs1[tp] for tp in timepoints1])
lower_stdevs1 = np.where(np.subtract(means1, stdevs1)>0, stdevs1, means1) # 0
ax1.errorbar(timepoints1, means1, yerr=[lower_stdevs1/2, stdevs1/2], fmt='o')
#
timepoints2 = sorted(pool_means2.keys())
means2 = np.array([pool_means2[tp] for tp in timepoints2])
stdevs2 = np.array([pool_stdevs2[tp] for tp in timepoints2])
lower_stdevs2 = np.where(np.subtract(means2, stdevs2)>0, stdevs2, means2) # 0
ax2.errorbar(timepoints2, means2, yerr=[lower_stdevs2/2, stdevs2/2], fmt='o')
ax1.set_xlabel('Number of generations')
ax2.set_xlabel('Number of generations')
ax1.set_ylabel('Fitness score/dissimilarity')
plt.show()
# if __name__=='__main__':
# genex = GenomicExplorer('/Users/kfl/dev/python/sc-0.3.1/genomic', 'test.wav')
# genex.analyze_genome(1)
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Nicolas P. Rougier
# Distributed under the (new) BSD License.
#
# Contributors: Nicolas P. Rougier ([email protected])
# -----------------------------------------------------------------------------
# --- Time ---
ms = 0.001
dt = 1*ms
tau = 10*ms
# --- Learning ---
alpha_CUE = 0.050
alpha_LTP = 0.002
alpha_LTD = 0.001
# --- Sigmoid ---
Vmin = 0
Vmax = 20
Vh = 16
Vc = 3
# --- Model ---
decision_threshold = 40
noise = 0.001
CTX_rest = -3.0
STR_rest = 0.0
STN_rest = -10.0
GPI_rest = 10.0
THL_rest = -40.0
# --- Cues & Rewards ---
V_cue = 7
rewards = 3/3.,2/3.,1/3.,0/3.
# -- Weight ---
Wmin = 0.25
Wmax = 0.75
gains = { "CTX.cog -> STR.cog" : +1.0,
"CTX.mot -> STR.mot" : +1.0,
"CTX.ass -> STR.ass" : +1.0,
"CTX.cog -> STR.ass" : +0.2,
"CTX.mot -> STR.ass" : +0.2,
"CTX.cog -> STN.cog" : +1.0,
"CTX.mot -> STN.mot" : +1.0,
"STR.cog -> GPI.cog" : -2.0,
"STR.mot -> GPI.mot" : -2.0,
"STR.ass -> GPI.cog" : -2.0,
"STR.ass -> GPI.mot" : -2.0,
"STN.cog -> GPI.cog" : +1.0,
"STN.mot -> GPI.mot" : +1.0,
"GPI.cog -> THL.cog" : -0.25,
"GPI.mot -> THL.mot" : -0.25,
"THL.cog -> CTX.cog" : +0.4,
"THL.mot -> CTX.mot" : +0.4,
"CTX.cog -> THL.cog" : +0.1,
"CTX.mot -> THL.mot" : +0.1,
"CTX.mot -> CTX.mot" : +0.5,
"CTX.cog -> CTX.cog" : +0.5,
"CTX.ass -> CTX.ass" : +0.5,
"CTX.ass -> CTX.cog" : +0.01,
"CTX.ass -> CTX.mot" : +0.025,
"CTX.cog -> CTX.ass" : +0.025,
"CTX.mot -> CTX.ass" : +0.01,
}
|
# -*- coding: utf-8 -*-
# Copyright 2019 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Package utility functionality."""
from __future__ import print_function
import collections
import fileinput
import functools
import json
import os
import re
import sys
import six
from google.protobuf import json_format
from chromite.api.gen.config import replication_config_pb2
from chromite.cbuildbot import manifest_version
from chromite.lib import constants
from chromite.lib import cros_build_lib
from chromite.lib import cros_logging as logging
from chromite.lib import git
from chromite.lib import osutils
from chromite.lib import portage_util
from chromite.lib import replication_lib
from chromite.lib import uprev_lib
if cros_build_lib.IsInsideChroot():
from chromite.service import dependency
# Registered handlers for uprevving versioned packages.
_UPREV_FUNCS = {}
class Error(Exception):
"""Module's base error class."""
class UnknownPackageError(Error):
"""Uprev attempted for a package without a registered handler."""
class UprevError(Error):
"""An error occurred while uprevving packages."""
class NoAndroidVersionError(Error):
"""An error occurred while trying to determine the android version."""
class NoAndroidBranchError(Error):
"""An error occurred while trying to determine the android branch."""
class NoAndroidTargetError(Error):
"""An error occurred while trying to determine the android target."""
class AndroidIsPinnedUprevError(UprevError):
"""Raised when we try to uprev while Android is pinned."""
def __init__(self, new_android_atom):
"""Initialize a AndroidIsPinnedUprevError.
Args:
new_android_atom: The Android atom that we failed to
uprev to, due to Android being pinned.
"""
assert new_android_atom
msg = ('Failed up uprev to Android version %s as Android was pinned.' %
new_android_atom)
super(AndroidIsPinnedUprevError, self).__init__(msg)
self.new_android_atom = new_android_atom
class EbuildManifestError(Error):
"""Error when running ebuild manifest."""
class GeneratedCrosConfigFilesError(Error):
"""Error when cros_config_schema does not produce expected files"""
def __init__(self, expected_files, found_files):
msg = ('Expected to find generated C files: %s. Actually found: %s' %
(expected_files, found_files))
super(GeneratedCrosConfigFilesError, self).__init__(msg)
UprevVersionedPackageModifications = collections.namedtuple(
'UprevVersionedPackageModifications', ('new_version', 'files'))
class UprevVersionedPackageResult(object):
"""Data object for uprev_versioned_package."""
def __init__(self):
self.modified = []
def add_result(self, new_version, modified_files):
"""Adds version/ebuilds tuple to result.
Args:
new_version: New version number of package.
modified_files: List of files modified for the given version.
"""
result = UprevVersionedPackageModifications(new_version, modified_files)
self.modified.append(result)
return self
@property
def uprevved(self):
return bool(self.modified)
def patch_ebuild_vars(ebuild_path, variables):
"""Updates variables in ebuild.
Use this function rather than portage_util.EBuild.UpdateEBuild when you
want to preserve the variable position and quotes within the ebuild.
Args:
ebuild_path: The path of the ebuild.
variables: Dictionary of variables to update in ebuild.
"""
try:
for line in fileinput.input(ebuild_path, inplace=1):
varname, eq, _ = line.partition('=')
if eq == '=' and varname.strip() in variables:
value = variables[varname]
sys.stdout.write('%s="%s"\n' % (varname, value))
else:
sys.stdout.write(line)
finally:
fileinput.close()
def uprevs_versioned_package(package):
"""Decorator to register package uprev handlers."""
assert package
def register(func):
"""Registers |func| as a handler for |package|."""
_UPREV_FUNCS[package] = func
@functools.wraps(func)
def pass_through(*args, **kwargs):
return func(*args, **kwargs)
return pass_through
return register
def uprev_android(tracking_branch,
android_package,
android_build_branch,
chroot,
build_targets=None,
android_version=None):
"""Returns the portage atom for the revved Android ebuild - see man emerge."""
command = [
'cros_mark_android_as_stable',
'--tracking_branch=%s' % tracking_branch,
'--android_package=%s' % android_package,
'--android_build_branch=%s' % android_build_branch,
]
if build_targets:
command.append('--boards=%s' % ':'.join(bt.name for bt in build_targets))
if android_version:
command.append('--force_version=%s' % android_version)
result = cros_build_lib.run(
command,
stdout=True,
enter_chroot=True,
encoding='utf-8',
chroot_args=chroot.get_enter_args())
portage_atom_string = result.stdout.strip()
android_atom = None
if portage_atom_string:
android_atom = portage_atom_string.splitlines()[-1].partition('=')[-1]
if not android_atom:
logging.info('Found nothing to rev.')
return None
for target in build_targets or []:
# Sanity check: We should always be able to merge the version of
# Android we just unmasked.
command = ['emerge-%s' % target.name, '-p', '--quiet', '=%s' % android_atom]
try:
cros_build_lib.run(
command, enter_chroot=True, chroot_args=chroot.get_enter_args())
except cros_build_lib.RunCommandError:
logging.error(
'Cannot emerge-%s =%s\nIs Android pinned to an older '
'version?', target, android_atom)
raise AndroidIsPinnedUprevError(android_atom)
return android_atom
def uprev_build_targets(build_targets,
overlay_type,
chroot=None,
output_dir=None):
"""Uprev the set provided build targets, or all if not specified.
Args:
build_targets (list[build_target_lib.BuildTarget]|None): The build targets
whose overlays should be uprevved, empty or None for all.
overlay_type (str): One of the valid overlay types except None (see
constants.VALID_OVERLAYS).
chroot (chroot_lib.Chroot|None): The chroot to clean, if desired.
output_dir (str|None): The path to optionally dump result files.
"""
# Need a valid overlay, but exclude None.
assert overlay_type and overlay_type in constants.VALID_OVERLAYS
if build_targets:
overlays = portage_util.FindOverlaysForBoards(
overlay_type, boards=[t.name for t in build_targets])
else:
overlays = portage_util.FindOverlays(overlay_type)
return uprev_overlays(
overlays,
build_targets=build_targets,
chroot=chroot,
output_dir=output_dir)
def uprev_overlays(overlays, build_targets=None, chroot=None, output_dir=None):
"""Uprev the given overlays.
Args:
overlays (list[str]): The list of overlay paths.
build_targets (list[build_target_lib.BuildTarget]|None): The build targets
to clean in |chroot|, if desired. No effect unless |chroot| is provided.
chroot (chroot_lib.Chroot|None): The chroot to clean, if desired.
output_dir (str|None): The path to optionally dump result files.
Returns:
list[str] - The paths to all of the modified ebuild files. This includes the
new files that were added (i.e. the new versions) and all of the removed
files (i.e. the old versions).
"""
assert overlays
manifest = git.ManifestCheckout.Cached(constants.SOURCE_ROOT)
uprev_manager = uprev_lib.UprevOverlayManager(
overlays,
manifest,
build_targets=build_targets,
chroot=chroot,
output_dir=output_dir)
uprev_manager.uprev()
return uprev_manager.modified_ebuilds
def uprev_versioned_package(package, build_targets, refs, chroot):
"""Call registered uprev handler function for the package.
Args:
package (portage_util.CPV): The package being uprevved.
build_targets (list[build_target_lib.BuildTarget]): The build targets to
clean on a successful uprev.
refs (list[uprev_lib.GitRef]):
chroot (chroot_lib.Chroot): The chroot to enter for cleaning.
Returns:
UprevVersionedPackageResult: The result.
"""
assert package
if package.cp not in _UPREV_FUNCS:
raise UnknownPackageError(
'Package "%s" does not have a registered handler.' % package.cp)
return _UPREV_FUNCS[package.cp](build_targets, refs, chroot)
@uprevs_versioned_package('afdo/kernel-profiles')
def uprev_kernel_afdo(*_args, **_kwargs):
"""Updates kernel ebuilds with versions from kernel_afdo.json.
See: uprev_versioned_package.
Raises:
EbuildManifestError: When ebuild manifest does not complete successfuly.
"""
path = os.path.join(constants.SOURCE_ROOT, 'src', 'third_party',
'toolchain-utils', 'afdo_metadata', 'kernel_afdo.json')
with open(path, 'r') as f:
versions = json.load(f)
result = UprevVersionedPackageResult()
for version, version_info in versions.items():
path = os.path.join('src', 'third_party', 'chromiumos-overlay',
'sys-kernel', version)
ebuild_path = os.path.join(constants.SOURCE_ROOT, path,
'%s-9999.ebuild' % version)
chroot_ebuild_path = os.path.join(constants.CHROOT_SOURCE_ROOT, path,
'%s-9999.ebuild' % version)
afdo_profile_version = version_info['name']
patch_ebuild_vars(ebuild_path,
dict(AFDO_PROFILE_VERSION=afdo_profile_version))
try:
cmd = ['ebuild', chroot_ebuild_path, 'manifest', '--force']
cros_build_lib.run(cmd, enter_chroot=True)
except cros_build_lib.RunCommandError as e:
raise EbuildManifestError(
'Error encountered when regenerating the manifest for ebuild: %s\n%s'
% (chroot_ebuild_path, e), e)
manifest_path = os.path.join(constants.SOURCE_ROOT, path, 'Manifest')
result.add_result(afdo_profile_version, [ebuild_path, manifest_path])
return result
@uprevs_versioned_package('chromeos-base/termina-image-amd64')
def uprev_termina_amd64(_build_targets, _refs, chroot):
"""Updates termina amd64 VM - chromeos-base/termina-image-amd64.
See: uprev_versioned_package.
"""
return uprev_termina('termina-image-amd64', chroot)
@uprevs_versioned_package('chromeos-base/termina-image-arm')
def uprev_termina_arm(_build_targets, _refs, chroot):
"""Updates termina arm VM - chromeos-base/termina-image-arm.
See: uprev_versioned_package.
"""
return uprev_termina('termina-image-arm', chroot)
def uprev_termina(package, chroot):
"""Helper function to uprev termina VM.
Args:
package (string): name of the package
chroot (chroot_lib.Chroot): specify a chroot to enter.
Returns:
UprevVersionedPackageResult: The result.
"""
package_path = os.path.join(constants.CHROMIUMOS_OVERLAY_DIR, 'chromeos-base',
package)
version_pin_path = os.path.join(package_path, 'VERSION-PIN')
return uprev_ebuild_from_pin(package_path, version_pin_path, chroot)
@uprevs_versioned_package('chromeos-base/chromeos-dtc-vm')
def uprev_sludge(_build_targets, _refs, chroot):
"""Updates sludge VM - chromeos-base/chromeos-dtc-vm.
See: uprev_versioned_package.
"""
package = 'chromeos-dtc-vm'
package_path = os.path.join('src', 'private-overlays',
'project-wilco-private', 'chromeos-base', package)
version_pin_path = os.path.join(package_path, 'VERSION-PIN')
return uprev_ebuild_from_pin(package_path, version_pin_path, chroot)
def uprev_ebuild_from_pin(package_path, version_pin_path, chroot):
"""Changes the package ebuild's version to match the version pin file.
Args:
package_path: The path of the package relative to the src root. This path
should contain a single ebuild with the same name as the package.
version_pin_path: The path of the version_pin file that contains only a
version string. The ebuild's version will be directly set to this
number.
chroot (chroot_lib.Chroot): specify a chroot to enter.
Returns:
UprevVersionedPackageResult: The result.
"""
package = os.path.basename(package_path)
package_src_path = os.path.join(constants.SOURCE_ROOT, package_path)
ebuild_paths = list(portage_util.EBuild.List(package_src_path))
if not ebuild_paths:
raise UprevError('No ebuilds found for %s' % package)
elif len(ebuild_paths) > 1:
raise UprevError('Multiple ebuilds found for %s' % package)
else:
ebuild_path = ebuild_paths[0]
version_pin_src_path = os.path.join(constants.SOURCE_ROOT, version_pin_path)
version = osutils.ReadFile(version_pin_src_path).strip()
new_ebuild_path = os.path.join(package_path,
'%s-%s-r1.ebuild' % (package, version))
new_ebuild_src_path = os.path.join(constants.SOURCE_ROOT, new_ebuild_path)
os.rename(ebuild_path, new_ebuild_src_path)
manifest_src_path = os.path.join(package_src_path, 'Manifest')
new_ebuild_chroot_path = os.path.join(constants.CHROOT_SOURCE_ROOT,
new_ebuild_path)
try:
portage_util.UpdateEbuildManifest(new_ebuild_chroot_path, chroot=chroot)
except cros_build_lib.RunCommandError as e:
raise EbuildManifestError(
'Unable to update manifest for %s: %s' % (package, e.stderr))
result = UprevVersionedPackageResult()
result.add_result(version,
[new_ebuild_src_path, ebuild_path, manifest_src_path])
return result
@uprevs_versioned_package(constants.CHROME_CP)
def uprev_chrome(build_targets, refs, chroot):
"""Uprev chrome and its related packages.
See: uprev_versioned_package.
"""
# Determine the version from the refs (tags), i.e. the chrome versions are the
# tag names.
chrome_version = uprev_lib.get_chrome_version_from_refs(refs)
logging.debug('Chrome version determined from refs: %s', chrome_version)
uprev_manager = uprev_lib.UprevChromeManager(
chrome_version, build_targets=build_targets, chroot=chroot)
result = UprevVersionedPackageResult()
# Start with chrome itself, as we can't do anything else unless chrome
# uprevs successfully.
# TODO(crbug.com/1080429): Handle all possible outcomes of a Chrome uprev
# attempt. The expected behavior is documented in the following table:
#
# Outcome of Chrome uprev attempt:
# NEWER_VERSION_EXISTS:
# Do nothing.
# SAME_VERSION_EXISTS or REVISION_BUMP:
# Uprev followers
# Assert not VERSION_BUMP (any other outcome is fine)
# VERSION_BUMP or NEW_EBUILD_CREATED:
# Uprev followers
# Assert that Chrome & followers are at same package version
if not uprev_manager.uprev(constants.CHROME_CP):
return result
# With a successful chrome rev, also uprev related packages.
for package in constants.OTHER_CHROME_PACKAGES:
uprev_manager.uprev(package)
return result.add_result(chrome_version, uprev_manager.modified_ebuilds)
def _generate_platform_c_files(replication_config, chroot):
"""Generates platform C files from a platform JSON payload.
Args:
replication_config (replication_config_pb2.ReplicationConfig): A
ReplicationConfig that has already been run. If it produced a
build_config.json file, that file will be used to generate platform C
files. Otherwise, nothing will be generated.
chroot (chroot_lib.Chroot): The chroot to use to generate.
Returns:
A list of generated files.
"""
# Generate the platform C files from the build config. Note that it would be
# more intuitive to generate the platform C files from the platform config;
# however, cros_config_schema does not allow this, because the platform config
# payload is not always valid input. For example, if a property is both
# 'required' and 'build-only', it will fail schema validation. Thus, use the
# build config, and use '-f' to filter.
build_config_path = [
rule.destination_path
for rule in replication_config.file_replication_rules
if rule.destination_path.endswith('build_config.json')
]
if not build_config_path:
logging.info(
'No build_config.json found, will not generate platform C files. '
'Replication config: %s', replication_config)
return []
if len(build_config_path) > 1:
raise ValueError('Expected at most one build_config.json destination path. '
'Replication config: %s' % replication_config)
build_config_path = build_config_path[0]
# Paths to the build_config.json and dir to output C files to, in the
# chroot.
build_config_chroot_path = os.path.join(constants.CHROOT_SOURCE_ROOT,
build_config_path)
generated_output_chroot_dir = os.path.join(constants.CHROOT_SOURCE_ROOT,
os.path.dirname(build_config_path))
command = [
'cros_config_schema', '-m', build_config_chroot_path, '-g',
generated_output_chroot_dir, '-f', '"TRUE"'
]
cros_build_lib.run(
command, enter_chroot=True, chroot_args=chroot.get_enter_args())
# A relative (to the source root) path to the generated C files.
generated_output_dir = os.path.dirname(build_config_path)
generated_files = []
expected_c_files = ['config.c', 'ec_config.c', 'ec_config.h']
for f in expected_c_files:
if os.path.exists(
os.path.join(constants.SOURCE_ROOT, generated_output_dir, f)):
generated_files.append(os.path.join(generated_output_dir, f))
if len(expected_c_files) != len(generated_files):
raise GeneratedCrosConfigFilesError(expected_c_files, generated_files)
return generated_files
def _get_private_overlay_package_root(ref, package):
"""Returns the absolute path to the root of a given private overlay.
Args:
ref (uprev_lib.GitRef): GitRef for the private overlay.
package (str): Path to the package in the overlay.
"""
# There might be a cleaner way to map from package -> path within the source
# tree. For now, just use string patterns.
private_overlay_ref_pattern = r'/chromeos\/overlays\/overlay-([\w-]+)-private'
match = re.match(private_overlay_ref_pattern, ref.path)
if not match:
raise ValueError('ref.path must match the pattern: %s. Actual ref: %s' %
(private_overlay_ref_pattern, ref))
overlay = match.group(1)
return os.path.join(constants.SOURCE_ROOT,
'src/private-overlays/overlay-%s-private' % overlay,
package)
@uprevs_versioned_package('chromeos-base/chromeos-config-bsp')
def replicate_private_config(_build_targets, refs, chroot):
"""Replicate a private cros_config change to the corresponding public config.
See uprev_versioned_package for args
"""
package = 'chromeos-base/chromeos-config-bsp'
if len(refs) != 1:
raise ValueError('Expected exactly one ref, actual %s' % refs)
# Expect a replication_config.jsonpb in the package root.
package_root = _get_private_overlay_package_root(refs[0], package)
replication_config_path = os.path.join(package_root,
'replication_config.jsonpb')
try:
replication_config = json_format.Parse(
osutils.ReadFile(replication_config_path),
replication_config_pb2.ReplicationConfig())
except IOError:
raise ValueError(
'Expected ReplicationConfig missing at %s' % replication_config_path)
replication_lib.Replicate(replication_config)
modified_files = [
rule.destination_path
for rule in replication_config.file_replication_rules
]
# The generated platform C files are not easily filtered by replication rules,
# i.e. JSON / proto filtering can be described by a FieldMask, arbitrary C
# files cannot. Therefore, replicate and filter the JSON payloads, and then
# generate filtered C files from the JSON payload.
modified_files.extend(_generate_platform_c_files(replication_config, chroot))
# Use the private repo's commit hash as the new version.
new_private_version = refs[0].revision
# modified_files should contain only relative paths at this point, but the
# returned UprevVersionedPackageResult must contain only absolute paths.
for i, modified_file in enumerate(modified_files):
assert not os.path.isabs(modified_file)
modified_files[i] = os.path.join(constants.SOURCE_ROOT, modified_file)
return UprevVersionedPackageResult().add_result(new_private_version,
modified_files)
def get_best_visible(atom, build_target=None):
"""Returns the best visible CPV for the given atom.
Args:
atom (str): The atom to look up.
build_target (build_target_lib.BuildTarget): The build target whose
sysroot should be searched, or the SDK if not provided.
Returns:
portage_util.CPV|None: The best visible package.
"""
assert atom
board = build_target.name if build_target else None
return portage_util.PortageqBestVisible(atom, board=board)
def has_prebuilt(atom, build_target=None, useflags=None):
"""Check if a prebuilt exists.
Args:
atom (str): The package whose prebuilt is being queried.
build_target (build_target_lib.BuildTarget): The build target whose
sysroot should be searched, or the SDK if not provided.
useflags: Any additional USE flags that should be set. May be a string
of properly formatted USE flags, or an iterable of individual flags.
Returns:
bool: True iff there is an available prebuilt, False otherwise.
"""
assert atom
board = build_target.name if build_target else None
extra_env = None
if useflags:
new_flags = useflags
if not isinstance(useflags, six.string_types):
new_flags = ' '.join(useflags)
existing = os.environ.get('USE', '')
final_flags = '%s %s' % (existing, new_flags)
extra_env = {'USE': final_flags.strip()}
return portage_util.HasPrebuilt(atom, board=board, extra_env=extra_env)
def builds(atom, build_target, packages=None):
"""Check if |build_target| builds |atom| (has it in its depgraph)."""
cros_build_lib.AssertInsideChroot()
graph, _sdk_graph = dependency.GetBuildDependency(build_target.name, packages)
return any(atom in package for package in graph['package_deps'])
def determine_chrome_version(build_target):
"""Returns the current Chrome version for the board (or in buildroot).
Args:
build_target (build_target_lib.BuildTarget): The board build target.
Returns:
str|None: The chrome version if available.
"""
# TODO(crbug/1019770): Long term we should not need the try/catch here once
# the builds function above only returns True for chrome when
# determine_chrome_version will succeed.
try:
cpv = portage_util.PortageqBestVisible(
constants.CHROME_CP, build_target.name, cwd=constants.SOURCE_ROOT)
except cros_build_lib.RunCommandError as e:
# Return None because portage failed when trying to determine the chrome
# version.
logging.warning('Caught exception in determine_chrome_package: %s', e)
return None
# Something like 78.0.3877.4_rc -> 78.0.3877.4
return cpv.version_no_rev.partition('_')[0]
def determine_android_package(board):
"""Returns the active Android container package in use by the board.
Args:
board: The board name this is specific to.
Returns:
str|None: The android package string if there is one.
"""
try:
packages = portage_util.GetPackageDependencies(board, 'virtual/target-os')
except cros_build_lib.RunCommandError as e:
# Return None because a command (likely portage) failed when trying to
# determine the package.
logging.warning('Caught exception in determine_android_package: %s', e)
return None
# We assume there is only one Android package in the depgraph.
for package in packages:
if package.startswith('chromeos-base/android-container-') or \
package.startswith('chromeos-base/android-vm-'):
return package
return None
def determine_android_version(boards=None):
"""Determine the current Android version in buildroot now and return it.
This uses the typical portage logic to determine which version of Android
is active right now in the buildroot.
Args:
boards: List of boards to check version of.
Returns:
The Android build ID of the container for the boards.
Raises:
NoAndroidVersionError: if no unique Android version can be determined.
"""
if not boards:
return None
# Verify that all boards have the same version.
version = None
for board in boards:
package = determine_android_package(board)
if not package:
return None
cpv = portage_util.SplitCPV(package)
if not cpv:
raise NoAndroidVersionError(
'Android version could not be determined for %s' % board)
if not version:
version = cpv.version_no_rev
elif version != cpv.version_no_rev:
raise NoAndroidVersionError('Different Android versions (%s vs %s) for %s'
% (version, cpv.version_no_rev, boards))
return version
def determine_android_branch(board):
"""Returns the Android branch in use by the active container ebuild."""
try:
android_package = determine_android_package(board)
except cros_build_lib.RunCommandError:
raise NoAndroidBranchError(
'Android branch could not be determined for %s' % board)
if not android_package:
return None
ebuild_path = portage_util.FindEbuildForBoardPackage(android_package, board)
# We assume all targets pull from the same branch and that we always
# have an ARM_TARGET, ARM_USERDEBUG_TARGET, or an X86_USERDEBUG_TARGET.
targets = ['ARM_TARGET', 'ARM_USERDEBUG_TARGET', 'X86_USERDEBUG_TARGET']
ebuild_content = osutils.SourceEnvironment(ebuild_path, targets)
for target in targets:
if target in ebuild_content:
branch = re.search(r'(.*?)-linux-', ebuild_content[target])
if branch is not None:
return branch.group(1)
raise NoAndroidBranchError(
'Android branch could not be determined for %s (ebuild empty?)' % board)
def determine_android_target(board):
"""Returns the Android target in use by the active container ebuild."""
try:
android_package = determine_android_package(board)
except cros_build_lib.RunCommandError:
raise NoAndroidTargetError(
'Android Target could not be determined for %s' % board)
if not android_package:
return None
if android_package.startswith('chromeos-base/android-vm-'):
return 'bertha'
elif android_package.startswith('chromeos-base/android-container-'):
return 'cheets'
raise NoAndroidTargetError(
'Android Target cannot be determined for the package: %s' %
android_package)
def determine_platform_version():
"""Returns the platform version from the source root."""
# Platform version is something like '12575.0.0'.
version = manifest_version.VersionInfo.from_repo(constants.SOURCE_ROOT)
return version.VersionString()
def determine_milestone_version():
"""Returns the platform version from the source root."""
# Milestone version is something like '79'.
version = manifest_version.VersionInfo.from_repo(constants.SOURCE_ROOT)
return version.chrome_branch
def determine_full_version():
"""Returns the full version from the source root."""
# Full version is something like 'R79-12575.0.0'.
milestone_version = determine_milestone_version()
platform_version = determine_platform_version()
full_version = ('R%s-%s' % (milestone_version, platform_version))
return full_version
FirmwareVersions = collections.namedtuple(
'FirmwareVersions', ['model', 'main', 'main_rw', 'ec', 'ec_rw'])
def get_firmware_versions(build_target):
"""Extract version information from the firmware updater, if one exists.
Args:
build_target (build_target_lib.BuildTarget): The build target.
Returns:
A FirmwareVersions namedtuple instance.
Each element will either be set to the string output by the firmware
updater shellball, or None if there is no firmware updater.
"""
cros_build_lib.AssertInsideChroot()
cmd_result = _get_firmware_version_cmd_result(build_target)
if cmd_result:
return _find_firmware_versions(cmd_result)
else:
return FirmwareVersions(None, None, None, None, None)
def _get_firmware_version_cmd_result(build_target):
"""Gets the raw result output of the firmware updater version command.
Args:
build_target (build_target_lib.BuildTarget): The build target.
Returns:
Command execution result.
"""
updater = os.path.join(build_target.root,
'usr/sbin/chromeos-firmwareupdate')
logging.info('Calling updater %s', updater)
# Call the updater using the chroot-based path.
return cros_build_lib.run([updater, '-V'],
capture_output=True, log_output=True,
encoding='utf-8').stdout
def _find_firmware_versions(cmd_output):
"""Finds firmware version output via regex matches against the cmd_output.
Args:
cmd_output: The raw output to search against.
Returns:
FirmwareVersions namedtuple with results.
Each element will either be set to the string output by the firmware
updater shellball, or None if there is no match.
"""
# Sometimes a firmware bundle includes a special combination of RO+RW
# firmware. In this case, the RW firmware version is indicated with a "(RW)
# version" field. In other cases, the "(RW) version" field is not present.
# Therefore, search for the "(RW)" fields first and if they aren't present,
# fallback to the other format. e.g. just "BIOS version:".
# TODO(mmortensen): Use JSON once the firmware updater supports it.
main = None
main_rw = None
ec = None
ec_rw = None
model = None
match = re.search(r'BIOS version:\s*(?P<version>.*)', cmd_output)
if match:
main = match.group('version')
match = re.search(r'BIOS \(RW\) version:\s*(?P<version>.*)', cmd_output)
if match:
main_rw = match.group('version')
match = re.search(r'EC version:\s*(?P<version>.*)', cmd_output)
if match:
ec = match.group('version')
match = re.search(r'EC \(RW\) version:\s*(?P<version>.*)', cmd_output)
if match:
ec_rw = match.group('version')
match = re.search(r'Model:\s*(?P<model>.*)', cmd_output)
if match:
model = match.group('model')
return FirmwareVersions(model, main, main_rw, ec, ec_rw)
MainEcFirmwareVersions = collections.namedtuple(
'MainEcFirmwareVersions', ['main_fw_version', 'ec_fw_version'])
def determine_firmware_versions(build_target):
"""Returns a namedtuple with main and ec firmware versions.
Args:
build_target (build_target_lib.BuildTarget): The build target.
Returns:
MainEcFirmwareVersions namedtuple with results.
"""
fw_versions = get_firmware_versions(build_target)
main_fw_version = fw_versions.main_rw or fw_versions.main
ec_fw_version = fw_versions.ec_rw or fw_versions.ec
return MainEcFirmwareVersions(main_fw_version, ec_fw_version)
|
#
#
# Copyright (C) 2006, 2007, 2010, 2011 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Module encapsulating ssh functionality.
"""
import os
import logging
from ganeti import utils
from ganeti import errors
from ganeti import constants
from ganeti import netutils
from ganeti import pathutils
from ganeti import vcluster
from ganeti import compat
def GetUserFiles(user, mkdir=False, dircheck=True, kind=constants.SSHK_DSA,
_homedir_fn=None):
"""Return the paths of a user's SSH files.
@type user: string
@param user: Username
@type mkdir: bool
@param mkdir: Whether to create ".ssh" directory if it doesn't exist
@type dircheck: bool
@param dircheck: Whether to check if ".ssh" directory exists
@type kind: string
@param kind: One of L{constants.SSHK_ALL}
@rtype: tuple; (string, string, string)
@return: Tuple containing three file system paths; the private SSH key file,
the public SSH key file and the user's C{authorized_keys} file
@raise errors.OpExecError: When home directory of the user can not be
determined
@raise errors.OpExecError: Regardless of the C{mkdir} parameters, this
exception is raised if C{~$user/.ssh} is not a directory and C{dircheck}
is set to C{True}
"""
if _homedir_fn is None:
_homedir_fn = utils.GetHomeDir
user_dir = _homedir_fn(user)
if not user_dir:
raise errors.OpExecError("Cannot resolve home of user '%s'" % user)
if kind == constants.SSHK_DSA:
suffix = "dsa"
elif kind == constants.SSHK_RSA:
suffix = "rsa"
else:
raise errors.ProgrammerError("Unknown SSH key kind '%s'" % kind)
ssh_dir = utils.PathJoin(user_dir, ".ssh")
if mkdir:
utils.EnsureDirs([(ssh_dir, constants.SECURE_DIR_MODE)])
elif dircheck and not os.path.isdir(ssh_dir):
raise errors.OpExecError("Path %s is not a directory" % ssh_dir)
return [utils.PathJoin(ssh_dir, base)
for base in ["id_%s" % suffix, "id_%s.pub" % suffix,
"authorized_keys"]]
def GetAllUserFiles(user, mkdir=False, dircheck=True, _homedir_fn=None):
"""Wrapper over L{GetUserFiles} to retrieve files for all SSH key types.
See L{GetUserFiles} for details.
@rtype: tuple; (string, dict with string as key, tuple of (string, string) as
value)
"""
helper = compat.partial(GetUserFiles, user, mkdir=mkdir, dircheck=dircheck,
_homedir_fn=_homedir_fn)
result = [(kind, helper(kind=kind)) for kind in constants.SSHK_ALL]
authorized_keys = [i for (_, (_, _, i)) in result]
assert len(frozenset(authorized_keys)) == 1, \
"Different paths for authorized_keys were returned"
return (authorized_keys[0],
dict((kind, (privkey, pubkey))
for (kind, (privkey, pubkey, _)) in result))
class SshRunner:
"""Wrapper for SSH commands.
"""
def __init__(self, cluster_name, ipv6=False):
"""Initializes this class.
@type cluster_name: str
@param cluster_name: name of the cluster
@type ipv6: bool
@param ipv6: If true, force ssh to use IPv6 addresses only
"""
self.cluster_name = cluster_name
self.ipv6 = ipv6
def _BuildSshOptions(self, batch, ask_key, use_cluster_key,
strict_host_check, private_key=None, quiet=True):
"""Builds a list with needed SSH options.
@param batch: same as ssh's batch option
@param ask_key: allows ssh to ask for key confirmation; this
parameter conflicts with the batch one
@param use_cluster_key: if True, use the cluster name as the
HostKeyAlias name
@param strict_host_check: this makes the host key checking strict
@param private_key: use this private key instead of the default
@param quiet: whether to enable -q to ssh
@rtype: list
@return: the list of options ready to use in L{utils.process.RunCmd}
"""
options = [
"-oEscapeChar=none",
"-oHashKnownHosts=no",
"-oGlobalKnownHostsFile=%s" % pathutils.SSH_KNOWN_HOSTS_FILE,
"-oUserKnownHostsFile=/dev/null",
"-oCheckHostIp=no",
]
if use_cluster_key:
options.append("-oHostKeyAlias=%s" % self.cluster_name)
if quiet:
options.append("-q")
if private_key:
options.append("-i%s" % private_key)
# TODO: Too many boolean options, maybe convert them to more descriptive
# constants.
# Note: ask_key conflicts with batch mode
if batch:
if ask_key:
raise errors.ProgrammerError("SSH call requested conflicting options")
options.append("-oBatchMode=yes")
if strict_host_check:
options.append("-oStrictHostKeyChecking=yes")
else:
options.append("-oStrictHostKeyChecking=no")
else:
# non-batch mode
if ask_key:
options.append("-oStrictHostKeyChecking=ask")
elif strict_host_check:
options.append("-oStrictHostKeyChecking=yes")
else:
options.append("-oStrictHostKeyChecking=no")
if self.ipv6:
options.append("-6")
else:
options.append("-4")
return options
def BuildCmd(self, hostname, user, command, batch=True, ask_key=False,
tty=False, use_cluster_key=True, strict_host_check=True,
private_key=None, quiet=True):
"""Build an ssh command to execute a command on a remote node.
@param hostname: the target host, string
@param user: user to auth as
@param command: the command
@param batch: if true, ssh will run in batch mode with no prompting
@param ask_key: if true, ssh will run with
StrictHostKeyChecking=ask, so that we can connect to an
unknown host (not valid in batch mode)
@param use_cluster_key: whether to expect and use the
cluster-global SSH key
@param strict_host_check: whether to check the host's SSH key at all
@param private_key: use this private key instead of the default
@param quiet: whether to enable -q to ssh
@return: the ssh call to run 'command' on the remote host.
"""
argv = [constants.SSH]
argv.extend(self._BuildSshOptions(batch, ask_key, use_cluster_key,
strict_host_check, private_key,
quiet=quiet))
if tty:
argv.extend(["-t", "-t"])
argv.append("%s@%s" % (user, hostname))
# Insert variables for virtual nodes
argv.extend("export %s=%s;" %
(utils.ShellQuote(name), utils.ShellQuote(value))
for (name, value) in
vcluster.EnvironmentForHost(hostname).items())
argv.append(command)
return argv
def Run(self, *args, **kwargs):
"""Runs a command on a remote node.
This method has the same return value as `utils.RunCmd()`, which it
uses to launch ssh.
Args: see SshRunner.BuildCmd.
@rtype: L{utils.process.RunResult}
@return: the result as from L{utils.process.RunCmd()}
"""
return utils.RunCmd(self.BuildCmd(*args, **kwargs))
def CopyFileToNode(self, node, filename):
"""Copy a file to another node with scp.
@param node: node in the cluster
@param filename: absolute pathname of a local file
@rtype: boolean
@return: the success of the operation
"""
if not os.path.isabs(filename):
logging.error("File %s must be an absolute path", filename)
return False
if not os.path.isfile(filename):
logging.error("File %s does not exist", filename)
return False
command = [constants.SCP, "-p"]
command.extend(self._BuildSshOptions(True, False, True, True))
command.append(filename)
if netutils.IP6Address.IsValid(node):
node = netutils.FormatAddress((node, None))
command.append("%s:%s" % (node, vcluster.ExchangeNodeRoot(node, filename)))
result = utils.RunCmd(command)
if result.failed:
logging.error("Copy to node %s failed (%s) error '%s',"
" command was '%s'",
node, result.fail_reason, result.output, result.cmd)
return not result.failed
def VerifyNodeHostname(self, node):
"""Verify hostname consistency via SSH.
This functions connects via ssh to a node and compares the hostname
reported by the node to the name with have (the one that we
connected to).
This is used to detect problems in ssh known_hosts files
(conflicting known hosts) and inconsistencies between dns/hosts
entries and local machine names
@param node: nodename of a host to check; can be short or
full qualified hostname
@return: (success, detail), where:
- success: True/False
- detail: string with details
"""
cmd = ("if test -z \"$GANETI_HOSTNAME\"; then"
" hostname --fqdn;"
"else"
" echo \"$GANETI_HOSTNAME\";"
"fi")
retval = self.Run(node, constants.SSH_LOGIN_USER, cmd, quiet=False)
if retval.failed:
msg = "ssh problem"
output = retval.output
if output:
msg += ": %s" % output
else:
msg += ": %s (no output)" % retval.fail_reason
logging.error("Command %s failed: %s", retval.cmd, msg)
return False, msg
remotehostname = retval.stdout.strip()
if not remotehostname or remotehostname != node:
if node.startswith(remotehostname + "."):
msg = "hostname not FQDN"
else:
msg = "hostname mismatch"
return False, ("%s: expected %s but got %s" %
(msg, node, remotehostname))
return True, "host matches"
def WriteKnownHostsFile(cfg, file_name):
"""Writes the cluster-wide equally known_hosts file.
"""
data = ""
if cfg.GetRsaHostKey():
data += "%s ssh-rsa %s\n" % (cfg.GetClusterName(), cfg.GetRsaHostKey())
if cfg.GetDsaHostKey():
data += "%s ssh-dss %s\n" % (cfg.GetClusterName(), cfg.GetDsaHostKey())
utils.WriteFile(file_name, mode=0600, data=data)
|
# coding=utf-8
#
# Copyright 2018 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Directory: cm module: failover-status.
REST URI
``https://localhost/mgmt/tm/cm/failover-status``
GUI Path
``XXX``
REST Kind
``tm:cm:failover-status:*``
"""
from f5.bigip.resource import UnnamedResource
class Failover_Status(UnnamedResource):
"""BIG-IP® cluster resource"""
def __init__(self, cm):
super(Failover_Status, self).__init__(cm)
self._meta_data['required_json_kind'] =\
"tm:cm:failover-status:failover-statusstats"
|
from os import environ as env
import unittest
from .wpull import WpullArgs
from seesaw.item import Item
# taken form pipeline/pipeline.py
if 'WARC_MAX_SIZE' in env:
WARC_MAX_SIZE = env['WARC_MAX_SIZE']
else:
WARC_MAX_SIZE = '5368709120'
def joined(args):
return str.join(' ', args)
class TestWpullArgs(unittest.TestCase):
def setUp(self):
self.item = {
'cookie_jar': '/foobar/cookies.txt',
'ident': 'abc123',
'item_dir': '/foobar',
'url': 'http://www.example.com',
'warc_file_base': '/foobar/warc'
}
self.args = WpullArgs(default_user_agent='Default/1',
wpull_exe='/bin/wpull',
youtube_dl_exe='/usr/bin/youtube-dl',
phantomjs_exe='/usr/bin/phantomjs',
finished_warcs_dir='/lost+found/',
warc_max_size=WARC_MAX_SIZE
)
def test_user_agent_can_be_set(self):
self.item['user_agent'] = 'Frobinator/20.1'
self.assertIn('-U Frobinator/20.1', joined(self.args.realize(self.item)))
def test_youtube_dl_activation(self):
self.item['youtube_dl'] = True
self.assertIn('--youtube-dl', joined(self.args.realize(self.item)))
def test_uses_default_user_agent(self):
self.assertIn('-U Default/1', joined(self.args.realize(self.item)))
def test_recursive_fetch_settings(self):
self.item['recursive'] = True
self.item['depth'] = 'inf'
cmdline = joined(self.args.realize(self.item))
self.assertIn('--recursive', cmdline)
self.assertIn('--level inf', cmdline)
def test_nonrecursive_fetch_settings(self):
self.item['recursive'] = False
cmdline = joined(self.args.realize(self.item))
self.assertNotIn('--recursive', cmdline)
self.assertNotIn('--level inf', cmdline)
def test_recursive_fetch_enables_linked_pages_and_requisites(self):
self.item['recursive'] = True
self.item['depth'] = 'inf'
cmdline = joined(self.args.realize(self.item))
self.assertIn('--span-hosts-allow page-requisites,linked-pages',
cmdline)
def test_recursive_fetch_with_no_offsite_links_enables_requisites(self):
self.item['recursive'] = True
self.item['depth'] = 'inf'
self.item['no_offsite_links'] = True
cmdline = joined(self.args.realize(self.item))
self.assertIn('--span-hosts-allow page-requisites', cmdline)
self.assertNotIn('linked-pages', cmdline)
def test_nonrecursive_fetch_enables_requisites(self):
self.item['recursive'] = False
cmdline = joined(self.args.realize(self.item))
self.assertIn('--span-hosts-allow page-requisites', cmdline)
self.assertNotIn('linked-pages', cmdline)
# vim:ts=4:sw=4:et:tw=78
|
import os
import sys
import textwrap
from typing import Optional
import requests
__version__ = "1.1"
class AkismetError(Exception):
"""
Base exception class for Akismet errors.
"""
pass
class UnknownArgumentError(AkismetError):
"""
Indicates an unknown argument was used as part of an API request.
"""
pass
class ProtocolError(AkismetError):
"""
Indicates an unexpected or non-standard response was received from
Akismet.
"""
pass
class ConfigurationError(AkismetError):
"""
Indicates an Akismet configuration error (config missing or invalid).
"""
pass
class APIKeyError(ConfigurationError):
"""
Indicates the supplied Akismet API key/URL are invalid.
"""
pass
class Akismet:
"""
A Python wrapper for the Akismet web API.
Two configuration parameters -- your Akismet API key and
registered URL -- are required; they can be passed when
instantiating, or set in the environment variables
PYTHON_AKISMET_API_KEY and PYTHON_AKISMET_BLOG_URL.
All the operations of the Akismet API are exposed here:
* verify_key
* comment_check
* submit_spam
* submit_ham
For full details of the Akismet API, see the Akismet documentation:
https://akismet.com/development/api/#detailed-docs
The verify_key operation will be automatically called for you as
this class is instantiated; ConfigurationError will be raised if
the configuration cannot be found or if the supplied key/URL are
invalid.
"""
COMMENT_CHECK_URL = "https://{}.rest.akismet.com/1.1/comment-check"
SUBMIT_HAM_URL = "https://{}.rest.akismet.com/1.1/submit-ham"
SUBMIT_SPAM_URL = "https://{}.rest.akismet.com/1.1/submit-spam"
VERIFY_KEY_URL = "https://rest.akismet.com/1.1/verify-key"
SUBMIT_SUCCESS_RESPONSE = "Thanks for making the web a better place."
OPTIONAL_KEYS = [
"blog_charset",
"blog_lang",
"comment_author",
"comment_author_email",
"comment_author_url",
"comment_content",
"comment_date_gmt",
"comment_post_modified_gmt",
"comment_type",
"is_test",
"permalink",
"recheck_reason",
"referrer",
"user_role",
]
user_agent_header = {
"User-Agent": "Python/{} | akismet.py/{}".format(
"{}.{}".format(*sys.version_info[:2]), __version__
)
}
def __init__(self, key: Optional[str] = None, blog_url: Optional[str] = None):
maybe_key = key if key is not None else os.getenv("PYTHON_AKISMET_API_KEY", "")
maybe_url = (
blog_url
if blog_url is not None
else os.getenv("PYTHON_AKISMET_BLOG_URL", "")
)
if maybe_key == "" or maybe_url == "":
raise ConfigurationError(
textwrap.dedent(
"""
Could not find full Akismet configuration.
Found API key: {}
Found blog URL: {}
""".format(
maybe_key, maybe_url
)
)
)
if not self.verify_key(maybe_key, maybe_url):
raise APIKeyError(
"Akismet key ({}, {}) is invalid.".format(maybe_key, maybe_url)
)
self.api_key = maybe_key
self.blog_url = maybe_url
def _api_request(
self, endpoint: str, user_ip: str, user_agent: str, **kwargs: str
) -> requests.Response:
"""
Makes a request to the Akismet API.
This method is used for all API calls except key verification,
since all endpoints other than key verification must
interpolate the API key into the URL and supply certain basic
data.
"""
unknown_args = [k for k in kwargs if k not in self.OPTIONAL_KEYS]
if unknown_args:
raise UnknownArgumentError(
"Unknown arguments while making request: {}.".format(
", ".join(unknown_args)
)
)
data = {
"blog": self.blog_url,
"user_ip": user_ip,
"user_agent": user_agent,
**kwargs,
}
return requests.post(
endpoint.format(self.api_key), data=data, headers=self.user_agent_header
)
def _submission_request(
self, operation: str, user_ip: str, user_agent: str, **kwargs: str
) -> bool:
"""
Submits spam or ham to the Akismet API.
"""
endpoint = {
"submit_spam": self.SUBMIT_SPAM_URL,
"submit_ham": self.SUBMIT_HAM_URL,
}[operation]
response = self._api_request(endpoint, user_ip, user_agent, **kwargs)
if response.text == self.SUBMIT_SUCCESS_RESPONSE:
return True
self._protocol_error(operation, response)
@classmethod
def _protocol_error(cls, operation: str, response: requests.Response) -> None:
"""
Raises an appropriate exception for unexpected API responses.
"""
raise ProtocolError(
textwrap.dedent(
"""
Received unexpected or non-standard response from Akismet API.
API operation was: {}
API response received was: {}
Debug header value was: {}
"""
).format(
operation, response.text, response.headers.get("X-akismet-debug-help")
)
)
@classmethod
def verify_key(cls, key: str, blog_url: str) -> bool:
"""
Verifies an Akismet API key and URL.
Returns True if the key and URL are valid, False otherwise.
"""
if not blog_url.startswith(("http://", "https://")):
raise ConfigurationError(
textwrap.dedent(
"""
Invalid site URL specified: {}
Akismet requires the full URL including the leading
'http://' or 'https://'.
"""
).format(blog_url)
)
response = requests.post(
cls.VERIFY_KEY_URL,
data={"key": key, "blog": blog_url},
headers=cls.user_agent_header,
)
if response.text == "valid":
return True
elif response.text == "invalid":
return False
else:
cls._protocol_error("verify_key", response)
def comment_check(self, user_ip: str, user_agent: str, **kwargs: str) -> bool:
"""
Checks a comment to determine whether it is spam.
The IP address and user-agent string of the remote user are
required. All other arguments documented by Akismet (other
than the PHP server information) are also optionally accepted.
See the Akismet API documentation for a full list:
https://akismet.com/development/api/#comment-check
Like the Akismet web API, returns True for a comment that is
spam, and False for a comment that is not spam.
"""
response = self._api_request(
self.COMMENT_CHECK_URL, user_ip, user_agent, **kwargs
)
if response.text == "true":
return True
elif response.text == "false":
return False
else:
self._protocol_error("comment_check", response)
def submit_spam(self, user_ip: str, user_agent: str, **kwargs: str) -> bool:
"""
Informs Akismet that a comment is spam.
The IP address and user-agent string of the remote user are
required. All other arguments documented by Akismet (other
than the PHP server information) are also optionally accepted.
See the Akismet API documentation for a full list:
https://akismet.com/development/api/#submit-spam
Returns True on success (the only expected response).
"""
return self._submission_request("submit_spam", user_ip, user_agent, **kwargs)
def submit_ham(self, user_ip: str, user_agent: str, **kwargs: str) -> bool:
"""
Informs Akismet that a comment is not spam.
The IP address and user-agent string of the remote user are
required. All other arguments documented by Akismet (other
than the PHP server information) are also optionally accepted.
See the Akismet API documentation for a full list:
https://akismet.com/development/api/#submit-ham
Returns True on success (the only expected response).
"""
return self._submission_request("submit_ham", user_ip, user_agent, **kwargs)
|
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Contact: Kyle Lahnakoski ([email protected])
#
from __future__ import absolute_import, division, unicode_literals
import os
import platform
import sys
from datetime import datetime
from mo_dots import Data, FlatList, coalesce, is_data, is_list, listwrap, unwraplist, wrap
from mo_future import PY3, is_text, text
from mo_logs import constants, exceptions, strings
from mo_logs.exceptions import Except, LogItem, suppress_exception
from mo_logs.strings import CR, indent
_Thread = None
if PY3:
STDOUT = sys.stdout.buffer
else:
STDOUT = sys.stdout
class Log(object):
"""
FOR STRUCTURED LOGGING AND EXCEPTION CHAINING
"""
trace = False
main_log = None
logging_multi = None
profiler = None # simple pypy-friendly profiler
error_mode = False # prevent error loops
@classmethod
def start(cls, settings=None):
"""
RUN ME FIRST TO SETUP THE THREADED LOGGING
http://victorlin.me/2012/08/good-logging-practice-in-python/
log - LIST OF PARAMETERS FOR LOGGER(S)
trace - SHOW MORE DETAILS IN EVERY LOG LINE (default False)
cprofile - True==ENABLE THE C-PROFILER THAT COMES WITH PYTHON (default False)
USE THE LONG FORM TO SET THE FILENAME {"enabled": True, "filename": "cprofile.tab"}
profile - True==ENABLE pyLibrary SIMPLE PROFILING (default False) (eg with Profiler("some description"):)
USE THE LONG FORM TO SET FILENAME {"enabled": True, "filename": "profile.tab"}
constants - UPDATE MODULE CONSTANTS AT STARTUP (PRIMARILY INTENDED TO CHANGE DEBUG STATE)
"""
global _Thread
if not settings:
return
settings = wrap(settings)
Log.stop()
cls.settings = settings
cls.trace = coalesce(settings.trace, False)
if cls.trace:
from mo_threads import Thread as _Thread
_ = _Thread
# ENABLE CPROFILE
if settings.cprofile is False:
settings.cprofile = {"enabled": False}
elif settings.cprofile is True:
if isinstance(settings.cprofile, bool):
settings.cprofile = {"enabled": True, "filename": "cprofile.tab"}
if settings.cprofile.enabled:
from mo_threads import profiles
profiles.enable_profilers(settings.cprofile.filename)
if settings.profile is True or (is_data(settings.profile) and settings.profile.enabled):
Log.error("REMOVED 2018-09-02, Activedata revision 3f30ff46f5971776f8ba18")
# from mo_logs import profiles
#
# if isinstance(settings.profile, bool):
# profiles.ON = True
# settings.profile = {"enabled": True, "filename": "profile.tab"}
#
# if settings.profile.enabled:
# profiles.ON = True
if settings.constants:
constants.set(settings.constants)
logs = coalesce(settings.log, settings.logs)
if logs:
cls.logging_multi = StructuredLogger_usingMulti()
for log in listwrap(logs):
Log.add_log(Log.new_instance(log))
from mo_logs.log_usingThread import StructuredLogger_usingThread
cls.main_log = StructuredLogger_usingThread(cls.logging_multi)
@classmethod
def stop(cls):
"""
DECONSTRUCTS ANY LOGGING, AND RETURNS TO DIRECT-TO-stdout LOGGING
EXECUTING MULUTIPLE TIMES IN A ROW IS SAFE, IT HAS NO NET EFFECT, IT STILL LOGS TO stdout
:return: NOTHING
"""
main_log, cls.main_log = cls.main_log, StructuredLogger_usingStream(STDOUT)
main_log.stop()
@classmethod
def new_instance(cls, settings):
settings = wrap(settings)
if settings["class"]:
if settings["class"].startswith("logging.handlers."):
from mo_logs.log_usingHandler import StructuredLogger_usingHandler
return StructuredLogger_usingHandler(settings)
else:
with suppress_exception:
from mo_logs.log_usingLogger import make_log_from_settings
return make_log_from_settings(settings)
# OH WELL :(
if settings.log_type == "logger":
from mo_logs.log_usingLogger import StructuredLogger_usingLogger
return StructuredLogger_usingLogger(settings)
if settings.log_type == "file" or settings.file:
return StructuredLogger_usingFile(settings.file)
if settings.log_type == "file" or settings.filename:
return StructuredLogger_usingFile(settings.filename)
if settings.log_type == "console":
from mo_logs.log_usingThreadedStream import StructuredLogger_usingThreadedStream
return StructuredLogger_usingThreadedStream(STDOUT)
if settings.log_type == "mozlog":
from mo_logs.log_usingMozLog import StructuredLogger_usingMozLog
return StructuredLogger_usingMozLog(STDOUT, coalesce(settings.app_name, settings.appname))
if settings.log_type == "stream" or settings.stream:
from mo_logs.log_usingThreadedStream import StructuredLogger_usingThreadedStream
return StructuredLogger_usingThreadedStream(settings.stream)
if settings.log_type == "elasticsearch" or settings.stream:
from mo_logs.log_usingElasticSearch import StructuredLogger_usingElasticSearch
return StructuredLogger_usingElasticSearch(settings)
if settings.log_type == "email":
from mo_logs.log_usingEmail import StructuredLogger_usingEmail
return StructuredLogger_usingEmail(settings)
if settings.log_type == "ses":
from mo_logs.log_usingSES import StructuredLogger_usingSES
return StructuredLogger_usingSES(settings)
if settings.log_type.lower() in ["nothing", "none", "null"]:
from mo_logs.log_usingNothing import StructuredLogger
return StructuredLogger()
Log.error("Log type of {{log_type|quote}} is not recognized", log_type=settings.log_type)
@classmethod
def add_log(cls, log):
cls.logging_multi.add_log(log)
@classmethod
def note(
cls,
template,
default_params={},
stack_depth=0,
log_context=None,
**more_params
):
"""
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: *any more parameters (which will overwrite default_params)
:return:
"""
timestamp = datetime.utcnow()
if not is_text(template):
Log.error("Log.note was expecting a unicode template")
Log._annotate(
LogItem(
context=exceptions.NOTE,
format=template,
template=template,
params=dict(default_params, **more_params)
),
timestamp,
stack_depth+1
)
@classmethod
def unexpected(
cls,
template,
default_params={},
cause=None,
stack_depth=0,
log_context=None,
**more_params
):
"""
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param cause: *Exception* for chaining
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: *any more parameters (which will overwrite default_params)
:return:
"""
timestamp = datetime.utcnow()
if not is_text(template):
Log.error("Log.warning was expecting a unicode template")
if isinstance(default_params, BaseException):
cause = default_params
default_params = {}
if "values" in more_params.keys():
Log.error("Can not handle a logging parameter by name `values`")
params = Data(dict(default_params, **more_params))
cause = unwraplist([Except.wrap(c) for c in listwrap(cause)])
trace = exceptions.get_stacktrace(stack_depth + 1)
e = Except(exceptions.UNEXPECTED, template=template, params=params, cause=cause, trace=trace)
Log._annotate(
e,
timestamp,
stack_depth+1
)
@classmethod
def alarm(
cls,
template,
default_params={},
stack_depth=0,
log_context=None,
**more_params
):
"""
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: more parameters (which will overwrite default_params)
:return:
"""
timestamp = datetime.utcnow()
format = ("*" * 80) + CR + indent(template, prefix="** ").strip() + CR + ("*" * 80)
Log._annotate(
LogItem(
context=exceptions.ALARM,
format=format,
template=template,
params=dict(default_params, **more_params)
),
timestamp,
stack_depth + 1
)
alert = alarm
@classmethod
def warning(
cls,
template,
default_params={},
cause=None,
stack_depth=0,
log_context=None,
**more_params
):
"""
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param cause: *Exception* for chaining
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: *any more parameters (which will overwrite default_params)
:return:
"""
timestamp = datetime.utcnow()
if not is_text(template):
Log.error("Log.warning was expecting a unicode template")
if isinstance(default_params, BaseException):
cause = default_params
default_params = {}
if "values" in more_params.keys():
Log.error("Can not handle a logging parameter by name `values`")
params = Data(dict(default_params, **more_params))
cause = unwraplist([Except.wrap(c) for c in listwrap(cause)])
trace = exceptions.get_stacktrace(stack_depth + 1)
e = Except(exceptions.WARNING, template=template, params=params, cause=cause, trace=trace)
Log._annotate(
e,
timestamp,
stack_depth+1
)
@classmethod
def error(
cls,
template, # human readable template
default_params={}, # parameters for template
cause=None, # pausible cause
stack_depth=0,
**more_params
):
"""
raise an exception with a trace for the cause too
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param cause: *Exception* for chaining
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: *any more parameters (which will overwrite default_params)
:return:
"""
if not is_text(template):
sys.stderr.write(str("Log.error was expecting a unicode template"))
Log.error("Log.error was expecting a unicode template")
if default_params and isinstance(listwrap(default_params)[0], BaseException):
cause = default_params
default_params = {}
params = Data(dict(default_params, **more_params))
add_to_trace = False
if cause == None:
causes = None
elif is_list(cause):
causes = []
for c in listwrap(cause): # CAN NOT USE LIST-COMPREHENSION IN PYTHON3 (EXTRA STACK DEPTH FROM THE IN-LINED GENERATOR)
causes.append(Except.wrap(c, stack_depth=1))
causes = FlatList(causes)
elif isinstance(cause, BaseException):
causes = Except.wrap(cause, stack_depth=1)
else:
causes = None
Log.error("can only accept Exception, or list of exceptions")
trace = exceptions.get_stacktrace(stack_depth + 1)
if add_to_trace:
cause[0].trace.extend(trace[1:])
e = Except(context=exceptions.ERROR, template=template, params=params, cause=causes, trace=trace)
raise_from_none(e)
@classmethod
def _annotate(
cls,
item,
timestamp,
stack_depth
):
"""
:param itemt: A LogItemTHE TYPE OF MESSAGE
:param stack_depth: FOR TRACKING WHAT LINE THIS CAME FROM
:return:
"""
item.timestamp = timestamp
item.machine = machine_metadata
item.template = strings.limit(item.template, 10000)
item.format = strings.limit(item.format, 10000)
if item.format == None:
format = text(item)
else:
format = item.format.replace("{{", "{{params.")
if not format.startswith(CR) and format.find(CR) > -1:
format = CR + format
if cls.trace:
log_format = item.format = "{{machine.name}} (pid {{machine.pid}}) - {{timestamp|datetime}} - {{thread.name}} - \"{{location.file}}:{{location.line}}\" - ({{location.method}}) - " + format
f = sys._getframe(stack_depth + 1)
item.location = {
"line": f.f_lineno,
"file": text(f.f_code.co_filename),
"method": text(f.f_code.co_name)
}
thread = _Thread.current()
item.thread = {"name": thread.name, "id": thread.id}
else:
log_format = item.format = "{{timestamp|datetime}} - " + format
cls.main_log.write(log_format, item.__data__())
def write(self):
raise NotImplementedError
def _same_frame(frameA, frameB):
return (frameA.line, frameA.file) == (frameB.line, frameB.file)
# GET THE MACHINE METADATA
machine_metadata = wrap({
"pid": os.getpid(),
"python": text(platform.python_implementation()),
"os": text(platform.system() + platform.release()).strip(),
"name": text(platform.node())
})
def raise_from_none(e):
raise e
if PY3:
exec("def raise_from_none(e):\n raise e from None\n", globals(), locals())
from mo_logs.log_usingFile import StructuredLogger_usingFile
from mo_logs.log_usingMulti import StructuredLogger_usingMulti
from mo_logs.log_usingStream import StructuredLogger_usingStream
if not Log.main_log:
Log.main_log = StructuredLogger_usingStream(STDOUT)
|
import json, logging
from collections import OrderedDict
from functools import wraps
from django.conf import settings
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.http import JsonResponse, HttpResponseRedirect, Http404
from django.shortcuts import get_object_or_404, render
from django.template import Context, Template
from django.views.decorators.csrf import csrf_exempt
from awl.decorators import post_required
from wrench.utils import dynamic_load
from .forms import SurveyForm
from .models import (EditNotAllowedException, Survey, SurveyVersion, Question,
AnswerGroup)
logger = logging.getLogger(__name__)
# ============================================================================
# Security Decorator
# ============================================================================
def permission_hook(target):
@wraps(target)
def wrapper(*args, **kwargs):
if hasattr(settings, 'DFORM_PERMISSION_HOOK'):
fn = dynamic_load(settings.DFORM_PERMISSION_HOOK)
fn(target.__name__, *args, **kwargs)
# everything verified, run the view
return target(*args, **kwargs)
return wrapper
# ============================================================================
# Admin Methods
# ============================================================================
@staff_member_required
@post_required(['delta'])
def survey_delta(request, survey_version_id):
delta = json.loads(request.POST['delta'], object_pairs_hook=OrderedDict)
if survey_version_id == '0':
# new survey
survey = Survey.factory(name=delta['name'])
version = survey.latest_version
else:
version = get_object_or_404(SurveyVersion, id=survey_version_id)
response = {
'success':True,
}
try:
version.replace_from_dict(delta)
except ValidationError as ve:
response['success'] = False
response['errors'] = ve.params
except EditNotAllowedException:
raise Http404('Survey %s is not editable' % version.survey)
except Question.DoesNotExist as dne:
raise Http404('Bad question id: %s' % dne)
# issue a 200 response
return JsonResponse(response)
@staff_member_required
def survey_editor(request, survey_version_id):
if survey_version_id == '0':
# new survey
survey = Survey.factory(name='New Survey')
version = survey.latest_version
else:
version = get_object_or_404(SurveyVersion, id=survey_version_id)
admin_link = reverse('admin:index')
return_url = request.META.get('HTTP_REFERER', admin_link)
save_url = reverse('dform-survey-delta', args=(version.id, ))
data = {
'survey_version':json.dumps(version.to_dict()),
'save_url':save_url,
'return_url':return_url,
}
return render(request, 'dform/edit_survey.html', data)
@staff_member_required
def new_version(request, survey_id):
survey = get_object_or_404(Survey, id=survey_id)
survey.new_version()
admin_link = reverse('admin:index')
return_url = request.META.get('HTTP_REFERER', admin_link)
return HttpResponseRedirect(return_url)
@staff_member_required
def survey_links(request, survey_version_id):
"""Shows links and embedding code for pointing to this survey on an HTML
page.
"""
version = get_object_or_404(SurveyVersion, id=survey_version_id)
survey_url = request.build_absolute_uri(
reverse('dform-survey', args=(version.id, version.survey.token)))
embedded_survey_url = request.build_absolute_uri(
reverse('dform-embedded-survey', args=(version.id,
version.survey.token)))
survey_latest_url = request.build_absolute_uri(
reverse('dform-survey-latest', args=(version.survey.id,
version.survey.token)))
embedded_survey_latest_url = request.build_absolute_uri(
reverse('dform-embedded-survey-latest', args=(version.survey.id,
version.survey.token)))
pym_url = request.build_absolute_uri(
staticfiles_storage.url('dform/js/pym.min.js'))
data = {
'title':'Links for: %s' % version.survey.name,
'survey_url':survey_url,
'embedded_survey_url':embedded_survey_url,
'survey_latest_url':survey_latest_url,
'embedded_survey_latest_url':embedded_survey_latest_url,
'pym_url':pym_url,
'version':version,
}
return render(request, 'dform/links_survey.html', data)
@staff_member_required
def answer_links(request, answer_group_id):
"""Shows links and embedding code for pointing to this AnswerGroup on an
HTML page so a user could edit their data.
"""
answer_group = get_object_or_404(AnswerGroup, id=answer_group_id)
survey_url = request.build_absolute_uri(
reverse('dform-survey-with-answers', args=(
answer_group.survey_version.id,
answer_group.survey_version.survey.token, answer_group.id,
answer_group.token)))
data = {
'title':'Answer Links for: %s' % (
answer_group.survey_version.survey.name),
'survey_url':survey_url,
}
return render(request, 'dform/links_answers.html', data)
# ============================================================================
# Form Views
# ============================================================================
@permission_hook
def sample_survey(request, survey_version_id):
"""A view for displaying a sample version of a form. The submit mechanism
does nothing.
URL name reference for this view: ``dform-sample-survey``
:param survey_version_id:
Id of a :class:`SurveyVersion` object
"""
version = get_object_or_404(SurveyVersion, id=survey_version_id)
form = SurveyForm(survey_version=version)
data = {
'title':'Sample: %s' % version.survey.name,
'survey_version':version,
'form':form,
'submit_action':'',
}
return render(request, 'dform/survey.html', data)
# -------------------
def _survey_view(request, survey_version_id, token, is_embedded):
"""General view code for handling a survey, called by survey() or
embedded_survey()
"""
version = get_object_or_404(SurveyVersion, id=survey_version_id,
survey__token=token)
if request.method == 'POST':
form = SurveyForm(request.POST, survey_version=version,
ip_address=request.META['REMOTE_ADDR'])
if form.is_valid():
form.save()
name = getattr(settings, 'DFORM_SUBMIT_HOOK', '')
if name:
fn = dynamic_load(name)
fn(form)
return HttpResponseRedirect(version.on_success())
else:
form = SurveyForm(survey_version=version)
try:
# check if we have an alternate submit mechanism defined
template = Template(settings.DFORM_SURVEY_SUBMIT)
context = Context({'survey_version':version})
submit_action = template.render(context)
except AttributeError:
# use our default submit url
name = 'dform-embedded-survey' if is_embedded else 'dform-survey'
submit_action = reverse(name, args=(version.id, version.survey.token))
data = {
'title':version.survey.name,
'survey_version':version,
'form':form,
'is_embedded':is_embedded,
'submit_action':submit_action,
}
return render(request, 'dform/survey.html', data)
@permission_hook
def survey(request, survey_version_id, token):
"""View for submitting the answers to a survey version.
URL name reference for this view: ``dform-survey``
"""
return _survey_view(request, survey_version_id, token, False)
@permission_hook
@csrf_exempt
def embedded_survey(request, survey_version_id, token):
"""View for submitting the answers to a survey version with additional
Javascript handling for being embedded in an iframe.
URL name reference for this view: ``dform-survey``
"""
return _survey_view(request, survey_version_id, token, True)
@permission_hook
def survey_latest(request, survey_id, token):
"""View for submitting the answers to the latest version of a survey.
URL name reference for this view: ``dform-survey``
"""
survey = get_object_or_404(Survey, id=survey_id, token=token)
return _survey_view(request, survey.latest_version.id, token, False)
@permission_hook
@csrf_exempt
def embedded_survey_latest(request, survey_id, token):
"""View for submitting the answers to the latest version of a survey with
additional Javascript handling for being embedded in an iframe.
URL name reference for this view: ``dform-survey``
"""
survey = get_object_or_404(Survey, id=survey_id, token=token)
return _survey_view(request, survey.latest_version.id, token, True)
#------------------
def _survey_with_answers_view(request, survey_version_id, survey_token,
answer_group_id, answer_token, is_embedded):
"""General view code for editing answer for a survey. Called by
survey_with_answers() and embedded_survey_with_answers()
"""
version = get_object_or_404(SurveyVersion, id=survey_version_id,
survey__token=survey_token)
answer_group = get_object_or_404(AnswerGroup, id=answer_group_id,
token=answer_token)
if request.method == 'POST':
form = SurveyForm(request.POST, survey_version=version,
answer_group=answer_group)
if form.is_valid():
form.save()
name = getattr(settings, 'DFORM_EDIT_HOOK', '')
if name:
fn = dynamic_load(name)
fn(form)
return HttpResponseRedirect(version.on_success())
else:
form = SurveyForm(survey_version=version, answer_group=answer_group)
try:
# check for alternate survey edit handler
template = Template(settings.DFORM_SURVEY_WITH_ANSWERS_SUBMIT)
context = Context({
'survey_version':version,
'answer_group':answer_group
})
submit_action = template.render(context)
except AttributeError:
# use default survey edit handler
name = 'dform-survey-with-answers' if is_embedded else \
'dform-embedded-survey-with-answers'
submit_action = reverse(name, args=(version.id, version.survey.token,
answer_group.id, answer_group.token))
data = {
'title':version.survey.name,
'survey_version':version,
'answer_group':answer_group,
'form':form,
'is_embedded':is_embedded,
'submit_action':submit_action,
}
return render(request, 'dform/survey.html', data)
@permission_hook
def survey_with_answers(request, survey_version_id, survey_token,
answer_group_id, answer_token):
"""View for viewing and changing the answers to a survey that already has
answers.
URL name reference for this view: ``dform-survey-with-answers``
"""
return _survey_with_answers_view(request, survey_version_id, survey_token,
answer_group_id, answer_token, False)
@permission_hook
@csrf_exempt
def embedded_survey_with_answers(request, survey_version_id, survey_token,
answer_group_id, answer_token):
"""View for viewing and changing the answers to a survey that already has
answers with additional Javascript for being handled in an iframe.
URL name reference for this view: ``dform-survey-with-answers``
"""
return _survey_with_answers_view(request, survey_version_id, survey_token,
answer_group_id, answer_token, True)
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Netquatro, C.A. (<http://openerp.netquatro.com>). All Rights Reserved
# $Id$
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "Venezuela - Generic Test",
"version" : "1.0",
"author" : ['OpenERP SA', "Vauxoo"],
"category" : "Localization/Account Charts",
"description":
'''
This is the module to manage the accounting chart for Venezuela in OpenERP.
===========================================================================
Este módulo es para manejar un catálogo de cuentas ejemplo para Venezuela.
''',
"depends" : ["account", "base_vat", "account_chart"],
"demo_xml" : [],
"update_xml" : ['account_tax_code.xml',"account_chart.xml",
'account_tax.xml','l10n_chart_ve_wizard.xml'],
"auto_install": False,
"installable": True,
'images': ['images/config_chart_l10n_ve.jpeg','images/l10n_ve_chart.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'CommandConfirmation'
db.create_table(u'mail_commandconfirmation', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('confirmation_key', self.gf('django.db.models.fields.CharField')(unique=True, max_length=40)),
('date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('commands', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal(u'mail', ['CommandConfirmation'])
# Adding model 'BounceStats'
db.create_table(u'mail_bouncestats', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('email_user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['core.EmailUser'])),
('mails_sent', self.gf('django.db.models.fields.IntegerField')(default=0)),
('mails_bounced', self.gf('django.db.models.fields.IntegerField')(default=0)),
('date', self.gf('django.db.models.fields.DateField')()),
))
db.send_create_signal(u'mail', ['BounceStats'])
# Adding unique constraint on 'BounceStats', fields ['email_user', 'date']
db.create_unique(u'mail_bouncestats', ['email_user_id', 'date'])
def backwards(self, orm):
# Removing unique constraint on 'BounceStats', fields ['email_user', 'date']
db.delete_unique(u'mail_bouncestats', ['email_user_id', 'date'])
# Deleting model 'CommandConfirmation'
db.delete_table(u'mail_commandconfirmation')
# Deleting model 'BounceStats'
db.delete_table(u'mail_bouncestats')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'core.emailuser': {
'Meta': {'object_name': 'EmailUser'},
'default_keywords': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['core.Keyword']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user_email': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['django_email_accounts.UserEmail']", 'unique': 'True'})
},
u'core.keyword': {
'Meta': {'object_name': 'Keyword'},
'default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'})
},
u'django_email_accounts.user': {
'Meta': {'object_name': 'User'},
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'main_email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '255'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'django_email_accounts.useremail': {
'Meta': {'object_name': 'UserEmail'},
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '244'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'emails'", 'null': 'True', 'to': u"orm['django_email_accounts.User']"})
},
u'mail.bouncestats': {
'Meta': {'ordering': "[u'-date']", 'unique_together': "((u'email_user', u'date'),)", 'object_name': 'BounceStats'},
'date': ('django.db.models.fields.DateField', [], {}),
'email_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.EmailUser']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mails_bounced': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'mails_sent': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'mail.commandconfirmation': {
'Meta': {'object_name': 'CommandConfirmation'},
'commands': ('django.db.models.fields.TextField', [], {}),
'confirmation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['mail']
|
# This file is part of Beneath a Binary Sky.
# Copyright (C) 2016, Aidin Gharibnavaz <[email protected]>
#
# Beneath a Binary Sky is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Beneath a Binary Sky is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Beneath a Binary Sky. If not, see
# <http://www.gnu.org/licenses/>.
import time
from actions.action import Action
from actions.exceptions import InvalidArgumentsError, RobotHaveNoWaterError
from world.world import World
from database.exceptions import LockAlreadyAquiredError
class WaterAction(Action):
def __init__(self):
super().__init__()
self._world = World()
def do_action(self, robot, args):
'''Waters the square robot stands on.
@param robot: Instance of `objects.robot.Robot'.
'''
if len(args) != 1:
raise InvalidArgumentsError("`water' action takes no arguments.")
if not robot.get_has_water():
raise RobotHaveNoWaterError("Robot does not carry water.")
try:
square = self._world.get_square(robot.get_location(), for_update=True)
except LockAlreadyAquiredError:
# Waiting a little, and trying one more time.
time.sleep(0.02)
square = self._world.get_square(robot.get_location(), for_update=True)
# Note: we don't raise an exception if there's no plant. A robot can waste its water.
plant = square.get_plant()
if plant is not None:
plant.set_water_level(100)
robot.set_honor(robot.get_honor() + 1)
robot.set_has_water(False)
|
#!/usr/bin/env python3
import os, logging, argparse, json, datetime
import requests
import dns.resolver
from bottle import route, request, response, redirect, hook, error, default_app, view, static_file, template
def set_content_type(fn):
def _return_type(*args, **kwargs):
if request.headers.get('Accept') == "application/json":
response.headers['Content-Type'] = 'application/json'
if request.headers.get('Accept') == "text/plain":
response.headers['Content-Type'] = 'text/plain'
if request.method != 'OPTIONS':
return fn(*args, **kwargs)
return _return_type
def enable_cors(fn):
def _enable_cors(*args, **kwargs):
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'GET, POST, PUT, OPTIONS'
response.headers['Access-Control-Allow-Headers'] = 'Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token'
if request.method != 'OPTIONS':
return fn(*args, **kwargs)
return _enable_cors
def resolveDomain(domain, recordType, args):
records = []
if args.doh:
try:
payload = {
'name': domain,
'type': recordType
}
data = requests.get("{}".format(args.resolver), params=payload)
for rec in data.json()['Answer']:
records.append(rec['data'])
except:
return records
return records
else:
try:
resolver = dns.resolver.Resolver()
resolver.nameservers = args.resolver.split(',')
if recordType in args.records.split(','):
lookup = resolver.resolve(domain, recordType)
for data in lookup:
if recordType in ['A', 'AAAA']:
records.append(data.address)
elif recordType in ['TXT']:
for rec in data.strings:
records.append(rec.decode("utf-8").replace('"', '').strip())
else:
records.append(str(data).replace('"', '').strip())
return records
except dns.resolver.NXDOMAIN:
return records
except dns.resolver.NoAnswer:
return records
except dns.exception.Timeout:
return records
except dns.resolver.NoNameservers:
return records
@error('404')
@error('403')
def returnError(code, msg, contentType="text/plain"):
response.status = int(code)
response.content_type = contentType
return template('error')
@route('/static/<filepath:path>')
def static(filepath):
return static_file(filepath, root='views/static')
@route('/servers')
def servers():
try:
response.content_type = 'text/plain'
return "\r\n".join(args.resolver.split(","))
except:
return "Unable to open servers file."
@route('/version')
def version():
try:
dirname, filename = os.path.split(os.path.abspath(__file__))
del filename
f = open(os.getenv('VERSION_PATH', dirname + '/.git/refs/heads/master'), 'r')
content = f.read()
response.content_type = 'text/plain'
return content
except:
return "Unable to open version file."
@route('/<record>')
def route_redirect(record):
return redirect("/{}/A".format(record))
@route('/<record>/<type>')
@route('/<record>/<type>.<ext>')
@set_content_type
@enable_cors
def loadRecord(record, type='A', ext='html'):
try:
if record == "":
raise ValueError
if not ext in ["html","txt", "text", "json"]:
raise ValueError
if not type.upper() in args.records.split(','):
raise ValueError
except ValueError:
return returnError(404, "Not Found", "text/html")
if ext in ["json"]:
response.content_type = 'application/json'
if ext in ["txt", "text"]:
response.content_type = 'text/plain'
# We make a request to get information
data = resolveDomain(record, type.upper(), args)
if response.content_type == 'application/json':
return json.dumps({
'results': {
'name': record,
'type': type.upper(),
'records': data,
}
})
elif response.content_type == "text/plain":
return "\r\n".join(data)
else:
return template('rec', {
'name': record,
'type': type.upper(),
'records': data,
'recTypes': args.records.split(',')
})
@route('/', ('GET', 'POST'))
def index():
if request.method == "POST":
recordName = request.forms.get('recordName', '')
recordType = request.forms.get('recordType', '')
if recordName != '' and recordType in args.records.split(','):
return redirect("/{}/{}".format(recordName, recordType))
else:
return returnError(404, "We were not able to figure out what you were asking for", "text/html")
return template("home", {
'recTypes': args.records.split(',')
})
if __name__ == '__main__':
parser = argparse.ArgumentParser()
# Server settings
parser.add_argument("-i", "--host", default=os.getenv('HOST', '127.0.0.1'), help="server ip")
parser.add_argument("-p", "--port", default=os.getenv('PORT', 5000), help="server port")
# Redis settings
parser.add_argument("--redis", default=os.getenv('REDIS', 'redis://localhost:6379/0'), help="redis connection string")
# Application settings
parser.add_argument("--doh", help="use DNS-over-HTTPS and treat --resolver as DNS-over-HTTPS capable (beta)", action="store_true")
parser.add_argument("--records", default=os.getenv('RECORDS', "A,AAAA,CAA,CNAME,DS,DNSKEY,MX,NS,NSEC,NSEC3,RRSIG,SOA,TXT"), help="supported records")
parser.add_argument("--resolver", default=os.getenv('RESOLVER', '8.8.8.8'), help="resolver address")
# Verbose mode
parser.add_argument("--verbose", "-v", help="increase output verbosity", action="store_true")
args = parser.parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
try:
app = default_app()
app.run(host=args.host, port=args.port, server='tornado')
except:
log.error("Unable to start server on {}:{}".format(args.host, args.port))
|
from Quotes import Quote
from Command import Command
class Quote_Command(Command):
def __init__(self, config):
self.connection = config['connection']
self.event = config['event']
self.channel = config['channel']
pass
def resolve(self):
args = self.event.arguments[0].split()
# Don't let people skip last 10 (for voting!)
if not self.channel.quote_last_ten:
#Check if they asked for a source
if len(args) > 1:
try:
#Grab a random quote from given source
q = self.channel.quotes_list.random_quote(args[1])
except Exception:
#Invalid source name
q = Quote("your_boss", "Don't you think you should be getting back to work?")
else:
#Grab random quote from random source
q = self.channel.quotes_list.random_quote()
self.channel.last_quote = q
#Print the quote
self.respond(self.event.target, q)
pass
def respond(self, target, message):
self.connection.privmsg(target, message)
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import login, logout, password_reset, password_reset_done, password_reset_confirm,\
password_reset_complete
from django.conf import settings
from django.conf.urls.static import static
from seadssite import views as v
admin.autodiscover()
urlpatterns = [
url(r'^login/$', login),
url(r'^logout/$', logout, {'next_page': '/'}),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/password/reset/$', password_reset,
{'post_reset_redirect': '/accounts/password/reset/done/'}),
url(r'^accounts/password/reset/done/$', password_reset_done),
url(r'^accounts/password/reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>.+)/$',
password_reset_confirm, {'post_reset_redirect': '/accounts/password/done/'}),
url(r'^accounts/password/done/$', password_reset_complete),
url(r'^$', v.IndexView.as_view()),
url(r'^dashboard/$', v.DashboardView),
url(r'^dashboard/[0-9]+/$', v.graph),
url(r'^dashboard/[0-9]+/timer/$', v.TimerView),
url(r'^dashboard/[0-9]+/appliances/$', v.DevicesView),
url(r'^register', v.RegisterView.as_view()),
]
|
"""
Django models specific to peer assessment.
NOTE: We've switched to migrations, so if you make any edits to this file, you
need to then generate a matching migration for it using:
./manage.py schemamigration openassessment.assessment --auto
"""
import random
from datetime import timedelta
from django.db import models, DatabaseError
from django.utils.timezone import now
from openassessment.assessment.models.base import Assessment
from openassessment.assessment.errors import PeerAssessmentWorkflowError, PeerAssessmentInternalError
import logging
logger = logging.getLogger("openassessment.assessment.models")
class AssessmentFeedbackOption(models.Model):
"""
Option a student can select to provide feedback on the feedback they received.
`AssessmentFeedback` stands in a one-to-many relationship with `AssessmentFeedbackOption`s:
a student can select zero or more `AssessmentFeedbackOption`s when providing feedback.
Over time, we may decide to add, delete, or reword assessment feedback options.
To preserve data integrity, we will always get-or-create `AssessmentFeedbackOption`s
based on the option text.
"""
text = models.CharField(max_length=255, unique=True)
class Meta:
app_label = "assessment"
def __unicode__(self):
return u'"{}"'.format(self.text)
class AssessmentFeedback(models.Model):
"""
Feedback on feedback. When students receive their grades, they
can provide feedback on how they were assessed, to be reviewed by course staff.
This consists of free-form written feedback
("Please provide any thoughts or comments on the feedback you received from your peers")
as well as zero or more feedback options
("Please select the statements below that reflect what you think of this peer grading experience")
"""
MAXSIZE = 1024 * 100 # 100KB
submission_uuid = models.CharField(max_length=128, unique=True, db_index=True)
assessments = models.ManyToManyField(Assessment, related_name='assessment_feedback', default=None)
feedback_text = models.TextField(max_length=10000, default="")
options = models.ManyToManyField(AssessmentFeedbackOption, related_name='assessment_feedback', default=None)
class Meta:
app_label = "assessment"
def add_options(self, selected_options):
"""
Select feedback options for this assessment.
Students can select zero or more options.
Note: you *must* save the model before calling this method.
Args:
option_text_list (list of unicode): List of options that the user selected.
Raises:
DatabaseError
"""
# First, retrieve options that already exist
options = list(AssessmentFeedbackOption.objects.filter(text__in=selected_options))
# If there are additional options that do not yet exist, create them
new_options = [text for text in selected_options if text not in [opt.text for opt in options]]
for new_option_text in new_options:
options.append(AssessmentFeedbackOption.objects.create(text=new_option_text))
# Add all options to the feedback model
# Note that we've already saved each of the AssessmentFeedbackOption models, so they have primary keys
# (required for adding to a many-to-many relationship)
self.options.add(*options) # pylint:disable=E1101
class PeerWorkflow(models.Model):
"""Internal Model for tracking Peer Assessment Workflow
This model can be used to determine the following information required
throughout the Peer Assessment Workflow:
1) Get next submission that requires assessment.
2) Does a submission have enough assessments?
3) Has a student completed enough assessments?
4) Does a student already have a submission open for assessment?
5) Close open assessments when completed.
6) Should 'over grading' be allowed for a submission?
The student item is the author of the submission. Peer Workflow Items are
created for each assessment made by this student.
"""
# Amount of time before a lease on a submission expires
TIME_LIMIT = timedelta(hours=8)
student_id = models.CharField(max_length=40, db_index=True)
item_id = models.CharField(max_length=128, db_index=True)
course_id = models.CharField(max_length=255, db_index=True)
submission_uuid = models.CharField(max_length=128, db_index=True, unique=True)
created_at = models.DateTimeField(default=now, db_index=True)
completed_at = models.DateTimeField(null=True, db_index=True)
grading_completed_at = models.DateTimeField(null=True, db_index=True)
cancelled_at = models.DateTimeField(null=True, db_index=True)
class Meta:
ordering = ["created_at", "id"]
app_label = "assessment"
@property
def is_cancelled(self):
"""
Check if workflow is cancelled.
Returns:
True/False
"""
return bool(self.cancelled_at)
@classmethod
def get_by_submission_uuid(cls, submission_uuid):
"""
Retrieve the Peer Workflow associated with the given submission UUID.
Args:
submission_uuid (str): The string representation of the UUID belonging
to the associated Peer Workflow.
Returns:
workflow (PeerWorkflow): The most recent peer workflow associated with
this submission UUID.
Raises:
PeerAssessmentWorkflowError: Thrown when no workflow can be found for
the associated submission UUID. This should always exist before a
student is allow to request submissions for peer assessment.
Examples:
>>> PeerWorkflow.get_workflow_by_submission_uuid("abc123")
{
'student_id': u'Bob',
'item_id': u'type_one',
'course_id': u'course_1',
'submission_uuid': u'1',
'created_at': datetime.datetime(2014, 1, 29, 17, 14, 52, 668850, tzinfo=<UTC>)
}
"""
try:
return cls.objects.get(submission_uuid=submission_uuid)
except cls.DoesNotExist:
return None
except DatabaseError:
error_message = (
u"Error finding workflow for submission UUID {}. Workflow must be "
u"created for submission before beginning peer assessment."
).format(submission_uuid)
logger.exception(error_message)
raise PeerAssessmentWorkflowError(error_message)
@classmethod
def create_item(cls, scorer_workflow, submission_uuid):
"""
Create a new peer workflow for a student item and submission.
Args:
scorer_workflow (PeerWorkflow): The peer workflow associated with the scorer.
submission_uuid (str): The submission associated with this workflow.
Raises:
PeerAssessmentInternalError: Raised when there is an internal error
creating the Workflow.
"""
peer_workflow = cls.get_by_submission_uuid(submission_uuid)
try:
workflow_items = PeerWorkflowItem.objects.filter(
scorer=scorer_workflow,
author=peer_workflow,
submission_uuid=submission_uuid
)
if len(workflow_items) > 0:
item = workflow_items[0]
else:
item = PeerWorkflowItem.objects.create(
scorer=scorer_workflow,
author=peer_workflow,
submission_uuid=submission_uuid
)
item.started_at = now()
item.save()
return item
except DatabaseError:
error_message = (
u"An internal error occurred while creating a new peer workflow "
u"item for workflow {}"
).format(scorer_workflow)
logger.exception(error_message)
raise PeerAssessmentInternalError(error_message)
def find_active_assessments(self):
"""Given a student item, return an active assessment if one is found.
Before retrieving a new submission for a peer assessor, check to see if that
assessor already has a submission out for assessment. If an unfinished
assessment is found that has not expired or has not been cancelled,
return the associated submission.
TODO: If a user begins an assessment, then resubmits, this will never find
the unfinished assessment. Is this OK?
Args:
workflow (PeerWorkflow): See if there is an associated active assessment
for this PeerWorkflow.
Returns:
(PeerWorkflowItem) The PeerWorkflowItem for the submission that the
student has open for active assessment.
"""
oldest_acceptable = now() - self.TIME_LIMIT
items = list(self.graded.all().select_related('author').order_by("-started_at", "-id"))
valid_open_items = []
completed_sub_uuids = []
# First, remove all completed items.
for item in items:
if item.assessment is not None or item.author.is_cancelled:
completed_sub_uuids.append(item.submission_uuid)
else:
valid_open_items.append(item)
# Remove any open items which have a submission which has been completed.
for item in valid_open_items:
if (item.started_at < oldest_acceptable or
item.submission_uuid in completed_sub_uuids):
valid_open_items.remove(item)
return valid_open_items[0] if valid_open_items else None
def get_submission_for_review(self, graded_by):
"""
Find a submission for peer assessment. This function will find the next
submission that requires assessment, excluding any submission that has been
completely graded, or is actively being reviewed by other students.
Args:
graded_by (unicode): Student ID of the scorer.
Returns:
submission_uuid (str): The submission_uuid for the submission to review.
Raises:
PeerAssessmentInternalError: Raised when there is an error retrieving
the workflows or workflow items for this request.
"""
timeout = (now() - self.TIME_LIMIT).strftime("%Y-%m-%d %H:%M:%S")
# The follow query behaves as the Peer Assessment Queue. This will
# find the next submission (via PeerWorkflow) in this course / question
# that:
# 1) Does not belong to you
# 2) Does not have enough completed assessments
# 3) Is not something you have already scored.
# 4) Does not have a combination of completed assessments or open
# assessments equal to or more than the requirement.
# 5) Has not been cancelled.
try:
peer_workflows = list(PeerWorkflow.objects.raw(
"select pw.id, pw.submission_uuid "
"from assessment_peerworkflow pw "
"where pw.item_id=%s "
"and pw.course_id=%s "
"and pw.student_id<>%s "
"and pw.grading_completed_at is NULL "
"and pw.cancelled_at is NULL "
"and pw.id not in ("
" select pwi.author_id "
" from assessment_peerworkflowitem pwi "
" where pwi.scorer_id=%s "
" and pwi.assessment_id is not NULL "
") "
"and ("
" select count(pwi.id) as c "
" from assessment_peerworkflowitem pwi "
" where pwi.author_id=pw.id "
" and (pwi.assessment_id is not NULL or pwi.started_at > %s) "
") < %s "
"order by pw.created_at, pw.id "
"limit 1; ",
[
self.item_id,
self.course_id,
self.student_id,
self.id,
timeout,
graded_by
]
))
if not peer_workflows:
return None
return peer_workflows[0].submission_uuid
except DatabaseError:
error_message = (
u"An internal error occurred while retrieving a peer submission "
u"for learner {}"
).format(self)
logger.exception(error_message)
raise PeerAssessmentInternalError(error_message)
def get_submission_for_over_grading(self):
"""
Retrieve the next submission uuid for over grading in peer assessment.
"""
# The follow query behaves as the Peer Assessment Over Grading Queue. This
# will find a random submission (via PeerWorkflow) in this course / question
# that:
# 1) Does not belong to you
# 2) Is not something you have already scored
# 3) Has not been cancelled.
try:
query = list(PeerWorkflow.objects.raw(
"select pw.id, pw.submission_uuid "
"from assessment_peerworkflow pw "
"where course_id=%s "
"and item_id=%s "
"and student_id<>%s "
"and pw.cancelled_at is NULL "
"and pw.id not in ( "
"select pwi.author_id "
"from assessment_peerworkflowitem pwi "
"where pwi.scorer_id=%s"
"); ",
[self.course_id, self.item_id, self.student_id, self.id]
))
workflow_count = len(query)
if workflow_count < 1:
return None
random_int = random.randint(0, workflow_count - 1)
random_workflow = query[random_int]
return random_workflow.submission_uuid
except DatabaseError:
error_message = (
u"An internal error occurred while retrieving a peer submission "
u"for learner {}"
).format(self)
logger.exception(error_message)
raise PeerAssessmentInternalError(error_message)
def close_active_assessment(self, submission_uuid, assessment, num_required_grades):
"""
Updates a workflow item on the student's workflow with the associated
assessment. When a workflow item has an assessment, it is considered
finished.
Args:
submission_uuid (str): The submission the scorer is grading.
assessment (PeerAssessment): The associate assessment for this action.
graded_by (int): The required number of grades the peer workflow
requires to be considered complete.
Returns:
None
"""
try:
item_query = self.graded.filter(
submission_uuid=submission_uuid
).order_by("-started_at", "-id") # pylint:disable=E1101
items = list(item_query[:1])
if not items:
msg = (
u"No open assessment was found for learner {} while assessing "
u"submission UUID {}."
).format(self.student_id, submission_uuid)
raise PeerAssessmentWorkflowError(msg)
item = items[0]
item.assessment = assessment
item.save()
if (
not item.author.grading_completed_at and
item.author.graded_by.filter(assessment__isnull=False).count() >= num_required_grades
):
item.author.grading_completed_at = now()
item.author.save()
except (DatabaseError, PeerWorkflowItem.DoesNotExist):
error_message = (
u"An internal error occurred while retrieving a workflow item for "
u"learner {}. Workflow Items are created when submissions are "
u"pulled for assessment."
).format(self.student_id)
logger.exception(error_message)
raise PeerAssessmentWorkflowError(error_message)
def num_peers_graded(self):
"""
Returns the number of peers the student owning the workflow has graded.
Returns:
integer
"""
return self.graded.filter(assessment__isnull=False).count() # pylint:disable=E1101
def __repr__(self):
return (
"PeerWorkflow(student_id={0.student_id}, item_id={0.item_id}, "
"course_id={0.course_id}, submission_uuid={0.submission_uuid}"
"created_at={0.created_at}, completed_at={0.completed_at})"
).format(self)
def __unicode__(self):
return repr(self)
class PeerWorkflowItem(models.Model):
"""Represents an assessment associated with a particular workflow
Created every time a submission is requested for peer assessment. The
associated workflow represents the scorer of the given submission, and the
assessment represents the completed assessment for this work item.
"""
scorer = models.ForeignKey(PeerWorkflow, related_name='graded')
author = models.ForeignKey(PeerWorkflow, related_name='graded_by')
submission_uuid = models.CharField(max_length=128, db_index=True)
started_at = models.DateTimeField(default=now, db_index=True)
assessment = models.ForeignKey(Assessment, null=True)
# This WorkflowItem was used to determine the final score for the Workflow.
scored = models.BooleanField(default=False)
@classmethod
def get_scored_assessments(cls, submission_uuid):
"""
Return all scored assessments for a given submission.
Args:
submission_uuid (str): The UUID of the submission.
Returns:
QuerySet of Assessment objects.
"""
return Assessment.objects.filter(
pk__in=[
item.assessment.pk for item in PeerWorkflowItem.objects.filter(
submission_uuid=submission_uuid, scored=True
)
]
)
class Meta:
ordering = ["started_at", "id"]
app_label = "assessment"
def __repr__(self):
return (
"PeerWorkflowItem(scorer={0.scorer}, author={0.author}, "
"submission_uuid={0.submission_uuid}, "
"started_at={0.started_at}, assessment={0.assessment}, "
"scored={0.scored})"
).format(self)
def __unicode__(self):
return repr(self)
|
import os
import sys
from pyramid.paster import get_appsettings
from pyramid.paster import setup_logging
from pyramid.scripts.common import parse_vars
from skosprovider_sqlalchemy.models import ConceptScheme
from skosprovider_sqlalchemy.models import Label
from skosprovider_sqlalchemy.utils import import_provider
from sqlalchemy import engine_from_config
from sqlalchemy.orm import sessionmaker
def usage(argv):
cmd = os.path.basename(argv[0])
print('usage: %s <config_uri> [var=value]\n'
'(example: "%s development.ini")' % (cmd, cmd))
sys.exit(1)
def main(argv=sys.argv):
from fixtures.data import trees, geo
from fixtures.styles_and_cultures import styles_and_cultures
from fixtures.materials import materials
from fixtures.eventtypes import eventtypes
from fixtures.heritagetypes import heritagetypes
from fixtures.periods import periods
from fixtures.species import species
if len(argv) < 2:
usage(argv)
config_uri = argv[1]
options = parse_vars(argv[2:])
setup_logging(config_uri)
settings = get_appsettings(config_uri, options=options)
engine = engine_from_config(settings, 'sqlalchemy.')
db_session = sessionmaker(bind=engine)()
import_provider(
trees,
ConceptScheme(
id=1,
uri='urn:x-skosprovider:trees',
labels=[
Label('Verschillende soorten bomen', u'prefLabel', u'nl'),
Label('Different types of trees', u'prefLabel', u'en')
]
),
db_session
)
import_provider(
geo,
ConceptScheme(
id=2,
uri='urn:x-skosprovider:geo',
labels=[
Label('Geografie', u'prefLabel', u'nl'),
Label('Geography', u'prefLabel', u'en')
]
),
db_session
)
import_provider(
styles_and_cultures,
ConceptScheme(
id=3,
uri='https://id.erfgoed.net/thesauri/stijlen_en_culturen',
labels=[
Label('Stijlen en Culturen', u'prefLabel', u'nl'),
Label('Styles and Cultures', u'prefLabel', u'en')
]
),
db_session
)
import_provider(
materials,
ConceptScheme(
id=4,
uri='https://id.erfgoed.net/thesauri/materialen',
labels=[
Label('Materialen', u'prefLabel', u'nl'),
Label('Materials', u'prefLabel', u'en')
]
),
db_session
)
import_provider(
eventtypes,
ConceptScheme(
id=5,
uri='https://id.erfgoed.net/thesauri/gebeurtenistypes',
labels=[
Label('Gebeurtenistypes', u'prefLabel', u'nl'),
Label('Event types', u'prefLabel', u'en')
]
),
db_session
)
import_provider(
heritagetypes,
ConceptScheme(
id=6,
uri='https://id.erfgoed.net/thesauri/erfgoedtypes',
labels=[
Label('Erfgoedtypes', u'prefLabel', u'nl'),
Label('Heritage types', u'prefLabel', u'en')
]
),
db_session
)
import_provider(
periods,
ConceptScheme(
id=7,
uri='https://id.erfgoed.net/thesauri/dateringen',
labels=[
Label('Dateringen', u'prefLabel', u'nl'),
Label('Periods', u'prefLabel', u'en')
]
),
db_session
)
import_provider(
species,
ConceptScheme(
id=8,
uri='https://id.erfgoed.net/thesauri/soorten',
labels=[
Label('Soorten', u'prefLabel', u'nl'),
Label('Species', u'prefLabel', u'en')
]
),
db_session
)
db_session.commit()
db_session.close()
print('--atramhasis-db-initialized--')
|
#! /usr/bin/env python
from optparse import OptionParser
import os
import os.path
import sys
import re
import json
from alignment_stats import Align_Stats
class QC_Sample:
def __init__(self, options):
self.options = options
self.sample_json = json.load(open(options.json))
self.__softwareDirectory = "/rawdata/legos"
self.__QCDirectory = "/rawdata/legos/scripts/QC"
self.no_errors = True
def runCommandLine(self, systemCall):
#run the call and return the status
print 'Starting %s' % (systemCall)
status = os.system(systemCall)
return(status)
# will find all of the runs in a sample and QC them with each other
def QC_all_runs(self):
# if this is a germline sample, QC all of the normal runs with each other.
if self.sample_json['sample_type'] == 'germline':
# QC the normal runs with each other
self.QC_normal_runs(self.sample_json['runs'])
# if this is a tumor_normal sample, find the normal and tumor runs, and then QC them with each other.
elif self.sample_json['sample_type'] == 'tumor_normal':
normal_runs = []
tumor_runs = []
for run in self.sample_json['runs']:
run_json = json.load(open(run))
if run_json['run_type'] == 'normal':
normal_runs.append(run)
elif run_json['run_type'] == 'tumor':
tumor_runs.append(run)
else:
print "ERROR run type is not normal or tumor."
if self.sample_json['analysis']['settings']['type'] == 'all_tumor_normal':
# QC the normal runs with each other
self.QC_normal_runs(normal_runs, 'normal_')
# QC the tumor runs with each other
self.QC_tumor_runs(tumor_runs, 'tumor_')
# now QC the tumor and normal runs together.
self.QC_normal_tumor_runs(normal_runs, tumor_runs)
elif self.sample_json['analysis']['settings']['type'] == 'normal_only':
# QC the normal runs with each other
self.QC_normal_runs(normal_runs, 'normal_')
elif self.sample_json['analysis']['settings']['type'] == 'tumor_only':
# QC the tumor runs with each other
self.QC_tumor_runs(tumor_runs, 'tumor_')
# need to run TVC and COV first still...
#elif self.sample_json['analysis']['settings']['type'] == 'tumor_normal_only':
# # now QC the tumor and normal runs together.
# self.QC_normal_tumor_runs(self, normal_runs, tumor_runs):
# QC the normal runs with each other
def QC_normal_runs(self, normal_runs, pref=''):
# first run TVC_CV and get the Run info to prepare for QC2Runs
for normal_run1 in normal_runs:
self.runTVC_COV(normal_run1, pref)
self.getRunInfo(normal_run1, pref)
for normal_run1 in normal_runs:
normal_run1_json = json.load(open(normal_run1))
for normal_run2 in normal_runs:
normal_run2_json = json.load(open(normal_run2))
# check to see if these two runs should be QC'd together.
if int(normal_run1_json['run_num']) < int(normal_run2_json['run_num']):
self.QC_2Runs(normal_run1, normal_run2, pref, pref)
# QC the tumor runs with each other
def QC_tumor_runs(self, tumor_runs, pref):
# first run TVC_CV and get the Run info to prepare for QC2Runs
for tumor_run1 in tumor_runs:
self.runTVC_COV(tumor_run1, pref)
self.getRunInfo(tumor_run1, pref)
for tumor_run1 in tumor_runs:
tumor_run1_json = json.load(open(tumor_run1))
for tumor_run2 in tumor_runs:
tumor_run2_json = json.load(open(tumor_run2))
# check to see if these two runs should be QC'd together.
if int(tumor_run1_json['run_num']) < int(tumor_run2_json['run_num']):
self.QC_2Runs(tumor_run1, tumor_run2, pref, pref)
# now QC the tumor and normal runs together.
def QC_normal_tumor_runs(self, normal_runs, tumor_runs):
for normal_run in normal_runs:
for tumor_run in tumor_runs:
# QC the normal and tumor runs together
self.QC_2Runs(normal_run, tumor_run, 'normal_', 'tumor_')
# @param run the run for which to run TVC and coverage analysis
def runTVC_COV(self, run, pref):
#default is to not flag dups
dupFlag = '--remove_dup_flags'
#see if settings want to mark dups though
if 'mark_dups' in self.sample_json['analysis']['settings']:
#if it is set to true, then we change the flag
if self.sample_json['analysis']['settings']['mark_dups'] == 'true':
dupFlag = '--flag_dups'
#default is AmpliSeq for coverage analysis
coverageAnalysisFlag = '--ampliseq'
#see if the settings say targetseq
if 'capture_type' in self.sample_json['analysis']['settings']:
#if it is set to true, then we change the flag
if self.sample_json['analysis']['settings']['capture_type'].lower() == 'targetseq' or self.sample_json['analysis']['settings']['capture_type'].lower() == 'target_seq':
coverageAnalysisFlag = '--targetseq'
run_json = json.load(open(run))
#print run_json
for file in run_json['analysis']['files']:
command = 'bash %s/scripts/runTVC_COV.sh '%self.__softwareDirectory + \
'--ptrim PTRIM.bam ' + \
'--cleanup %s %s '%(dupFlag, coverageAnalysisFlag) + \
'--cov %s %s '%(self.sample_json['analysis']['settings']['qc_merged_bed'], self.sample_json['analysis']['settings']['qc_unmerged_bed']) + \
'--tvc %s %s '%(self.sample_json['analysis']['settings']['project_bed'], self.sample_json['analysis']['settings']['%stvc_json'%pref]) + \
'--output_dir %s %s/%s '%(run_json['run_folder'], run_json['run_folder'], file)
# run TVC and Cov analysis on this sample.
status = self.runCommandLine(command)
if status != 0:
sys.stderr.write("%s runTVC_COV.sh had an error!!\n"%run)
self.no_errors = False
# @param run the json file of the run
def getRunInfo(self, run, pref):
run_json = json.load(open(run))
# QC_getRunInfo.sh gets the following metrics: % amps covered at the beg and end, Ts/Tv ratio, # Total variants, # HET variants, # HOM variants
# It also gets the metrics from the report.pdf if it is available.
# I had to put it all on one line because python kept complaining about formatting issues.
qcgetruninfo="bash %s/QC_getRunInfo.sh "%self.__QCDirectory + \
"--run_dir %s "%run_json['run_folder'] + \
"--out_dir %s/Analysis_Files/temp_files "%run_json['run_folder'] + \
"--amp_cov_cutoff %s "%self.sample_json['analysis']['settings']['min_amplicon_coverage'] + \
"--depth_cutoff %s "%self.sample_json['analysis']['settings']['%smin_base_coverage'%pref] + \
"--wt_hom_cutoff %s %s "%(self.sample_json['analysis']['settings']['%swt_cutoff'%pref], self.sample_json['analysis']['settings']['%shom_cutoff'%pref])+ \
"--beg_bed %s "%self.sample_json['analysis']['settings']['beg_bed'] + \
"--end_bed %s "%self.sample_json['analysis']['settings']['end_bed'] + \
"--project_bed %s "%str(self.sample_json['analysis']['settings']['project_bed']) + \
"--ptrim_json %s/PTRIM.bam "%run_json['run_folder']
#if [ "$CDS_BED" != "" ]; then
# qcgetruninfo="$qcgetruninfo --cds_bed $CDS_BED "
# QC_getRunInfo's will run the pool dropout script
if self.sample_json['analysis']['settings']['pool_dropout'] == True:
qcgetruninfo += "--pool_dropout "
# cleanup will be done at the end of this script
#run the qcgetruninfo command
status = self.runCommandLine(qcgetruninfo)
if status == 1:
sys.stderr.write("%s QC_getRunInfo.sh had an error!!\n"%run)
self.no_errors = False
if status == 4:
sys.stderr.write("%s QC_getRunInfo.sh got a file not found error...\n"%run)
self.no_errors = False
if status == 8:
sys.stderr.write("%s QC_getRunInfo.sh got a usage error...\n"%run)
self.no_errors = False
# if the median read length was not gathered from the report PDF, or if this is a merged bam file, then calculate the median read length
new_run_json = json.load(open(run))
if 'median_read_length' not in new_run_json or new_run_json['median_read_length'] == "":
Align_Stats = Align_Stats()
new_run_json['median_read_length'] = Align_Stats.calcMedianFromBam(new_run_json['analysis']['files'][0])
#write new json file
with open(run, 'w') as newJobFile:
json.dump(new_run_json, newJobFile, sort_keys=True, indent=4)
# QC two runs with each other
# For Tumor / Normal pairs, Run1 should be the normal run, and Run2 should be the tumor run.
# Output will be put into a dir like: sample1/QC/Run1vsRun2
def QC_2Runs(self, run1, run2, pref1, pref2):
# This if statement is not needed, it's just an extra catch
if run1 != run2:
run1_json = json.load(open(run1))
run2_json = json.load(open(run2))
if 'results_QC_json' in self.sample_json:
output_json = self.sample_json['results_QC_json']
else:
output_json = "%s/results_QC.json"%self.sample_json['output_folder']
# QC these two runs for every chr type that is listed in chromosomes to analyze.
for chromosome in self.sample_json['analysis']['settings']['chromosomes_to_analyze']:
# QC these two runs. QC_2Runs.sh takes the two run dirs and finds a .bam, .vcf, and .cov.xls file in the same dir as the .bam file
qc2runs = "bash %s/QC_2Runs.sh "%self.__QCDirectory + \
"--run_dirs %s %s "%(run1_json['run_folder'], run2_json['run_folder']) + \
"--json_out %s "%output_json + \
"--project_bed %s "%self.sample_json['analysis']['settings']['project_bed'] + \
"-a %s "%self.sample_json['analysis']['settings']['min_amplicon_coverage'] + \
"-jp %s %s "%(self.sample_json['analysis']['settings']['%stvc_json'%pref1], self.sample_json['analysis']['settings']['%stvc_json'%pref2]) + \
"-d %s %s "%(self.sample_json['analysis']['settings']['%smin_base_coverage'%pref1], self.sample_json['analysis']['settings']['%smin_base_coverage'%pref2]) + \
"-gt %s %s %s %s "%(self.sample_json['analysis']['settings']['%swt_cutoff'%pref1], self.sample_json['analysis']['settings']['%shom_cutoff'%pref1], self.sample_json['analysis']['settings']['%swt_cutoff'%pref2], self.sample_json['analysis']['settings']['%shom_cutoff'%pref2])
# now set the output_dir
output_dir = "%s/%s%svs%s"%(self.sample_json['output_folder'], chromosome, run1_json['run_name'], run2_json['run_name'])
if chromosome != "all":
qc2runs += "--subset_chr %s "%chromosome
output_dir = "%s/%s%svs%s"%(self.sample_json['output_folder'], chromosome, run1_json['run_name'], run2_json['run_name'])
qc2runs += "--output_dir %s "%output_dir
#if [ "$CDS_BED" != "" ]; then
# qc2runs="$qc2runs -cb $CDS_BED "
#if [ "$SUBSET_BED" != "" ]; then
# qc2runs="$qc2runs --subset_bed $SUBSET_BED "
#if [ "$RUN_GATK_CDS" == "True" ]; then
# qc2runs="$qc2runs --run_gatk_cds "
# The cleanup will be done at the end of this script because the PTRIM.bam is needed for QC_getRunInfo.sh, and the chr_subset is needed for each run comparison
#if [ "$CLEANUP" == "True" ]; then
# qc2runs="$qc2runs --cleanup "
#run the qcgetruninfo command
status = self.runCommandLine(qc2runs)
if status == 1:
sys.stderr.write("%s QC_2Runs.sh had an error!!\n"%run)
self.no_errors = False
if status == 4:
sys.stderr.write("%s QC_2Runs.sh got a file not found error...\n"%run)
self.no_errors = False
if status == 8:
sys.stderr.write("%s QC_2Runs.sh got a usage error...\n"%run)
self.no_errors = False
if __name__ == '__main__':
# set up the option parser
parser = OptionParser()
# add the options to parse
parser.add_option('-j', '--json', dest='json', help="A sample's json file which contains the necessary options and list of runs to QC with each other")
(options, args) = parser.parse_args()
# check to make sure the inputs are valid
if not options.json:
print "USAGE-ERROR-- --json is required"
parser.print_help()
sys.exit(1)
if not os.path.isfile(options.json):
print "ERROR-- %s not found"%options.json
parser.print_help()
sys.exit(1)
qc_sample = QC_Sample(options)
qc_sample.QC_all_runs()
|
#!/usr/bin/env python
#
# @file CMakeFiles.py
# @brief class for generating the cmake files
# @author Frank Bergmann
# @author Sarah Keating
#
# <!--------------------------------------------------------------------------
#
# Copyright (c) 2013-2015 by the California Institute of Technology
# (California, USA), the European Bioinformatics Institute (EMBL-EBI, UK)
# and the University of Heidelberg (Germany), with support from the National
# Institutes of Health (USA) under grant R01GM070923. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
# Neither the name of the California Institute of Technology (Caltech), nor
# of the European Bioinformatics Institute (EMBL-EBI), nor of the University
# of Heidelberg, nor the names of any contributors, may be used to endorse
# or promote products derived from this software without specific prior
# written permission.
# ------------------------------------------------------------------------ -->
import os
from util import global_variables
from . import PackageFile
from . import RegisterFile
from . import BaseCMakeFiles
class CMakeFiles():
"""Class for all cmake files"""
def __init__(self, pkg_object, this_dir, verbose=False):
self.verbose = verbose
self.this_dir = this_dir
# # members from object
self.package = pkg_object['name']
self.language = global_variables.language
self.elements = pkg_object['baseElements']
self.plugins = pkg_object['plugins']
#########################################################################
def write_package_files(self):
name = '{0}-package'.format(self.package)
ext = PackageFile.PackageFile(name, self.package, False)
if self.verbose:
print('Writing file {0}'.format(ext.fileout.filename))
ext.write_file()
ext.close_file()
os.chdir('src')
ext = PackageFile.PackageFile(name, self.package, True)
if self.verbose:
print('Writing file {0}'.format(ext.fileout.filename))
ext.write_file()
ext.close_file()
os.chdir(self.this_dir)
def write_register_files(self):
name = '{0}-register'.format(self.package)
ext = RegisterFile.RegisterFile(name, self.package, False)
if self.verbose:
print('Writing file {0}'.format(ext.fileout.filename))
ext.write_file()
ext.close_file()
ext = RegisterFile.RegisterFile(name, self.package, True)
if self.verbose:
print('Writing file {0}'.format(ext.fileout.filename))
ext.write_file()
ext.close_file()
########################################################################
def write_files(self):
self.write_package_files()
os.chdir('src/{0}/packages'.format(self.language))
self.write_register_files()
os.chdir(self.this_dir)
def write_other_library_files(self):
os.chdir(self.this_dir)
cmake = BaseCMakeFiles.BaseCMakeFiles(self.verbose)
cmake.write_files()
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- coding: utf-8 -*-
# Copyright 2010-2011 OpenStack, LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import hashlib
import json
import StringIO
from oslo.config import cfg
import routes
import six
import webob
import glance.api
import glance.api.common
from glance.api.v1 import filters
from glance.api.v1 import images
from glance.api.v1 import router
from glance.common import exception
import glance.common.config
import glance.context
from glance.db.sqlalchemy import api as db_api
from glance.db.sqlalchemy import models as db_models
from glance.openstack.common import timeutils
from glance.openstack.common import uuidutils
import glance.store.filesystem
from glance.tests.unit import base
from glance.tests import utils as test_utils
import glance.tests.unit.utils as unit_test_utils
CONF = cfg.CONF
_gen_uuid = uuidutils.generate_uuid
UUID1 = _gen_uuid()
UUID2 = _gen_uuid()
class TestGlanceAPI(base.IsolatedUnitTest):
def setUp(self):
"""Establish a clean test environment"""
super(TestGlanceAPI, self).setUp()
self.mapper = routes.Mapper()
self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper))
self.FIXTURES = [
{'id': UUID1,
'name': 'fake image #1',
'status': 'active',
'disk_format': 'ami',
'container_format': 'ami',
'is_public': False,
'created_at': timeutils.utcnow(),
'updated_at': timeutils.utcnow(),
'deleted_at': None,
'deleted': False,
'checksum': None,
'size': 13,
'locations': [{'url': "file:///%s/%s" % (self.test_dir, UUID1),
'metadata': {}}],
'properties': {'type': 'kernel'}},
{'id': UUID2,
'name': 'fake image #2',
'status': 'active',
'disk_format': 'vhd',
'container_format': 'ovf',
'is_public': True,
'created_at': timeutils.utcnow(),
'updated_at': timeutils.utcnow(),
'deleted_at': None,
'deleted': False,
'checksum': 'abc123',
'size': 19,
'locations': [{'url': "file:///%s/%s" % (self.test_dir, UUID2),
'metadata': {}}],
'properties': {}}]
self.context = glance.context.RequestContext(is_admin=True)
db_api.setup_db_env()
db_api.get_engine()
self.destroy_fixtures()
self.create_fixtures()
def tearDown(self):
"""Clear the test environment"""
super(TestGlanceAPI, self).tearDown()
self.destroy_fixtures()
def create_fixtures(self):
for fixture in self.FIXTURES:
db_api.image_create(self.context, fixture)
# We write a fake image file to the filesystem
with open("%s/%s" % (self.test_dir, fixture['id']), 'wb') as image:
image.write("chunk00000remainder")
image.flush()
def destroy_fixtures(self):
# Easiest to just drop the models and re-create them...
db_models.unregister_models(db_api._ENGINE)
db_models.register_models(db_api._ENGINE)
def _do_test_defaulted_format(self, format_key, format_value):
fixture_headers = {'x-image-meta-name': 'defaulted',
'x-image-meta-location': 'http://localhost:0/image',
format_key: format_value}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertEquals(format_value, res_body['disk_format'])
self.assertEquals(format_value, res_body['container_format'])
def test_defaulted_amazon_format(self):
for key in ('x-image-meta-disk-format',
'x-image-meta-container-format'):
for value in ('aki', 'ari', 'ami'):
self._do_test_defaulted_format(key, value)
def test_bad_disk_format(self):
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://localhost:0/image.tar.gz',
'x-image-meta-disk-format': 'invalid',
'x-image-meta-container-format': 'ami',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
self.assertTrue('Invalid disk format' in res.body, res.body)
def test_configured_disk_format_good(self):
self.config(disk_formats=['foo'])
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://localhost:0/image.tar.gz',
'x-image-meta-disk-format': 'foo',
'x-image-meta-container-format': 'bare',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
def test_configured_disk_format_bad(self):
self.config(disk_formats=['foo'])
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://localhost:0/image.tar.gz',
'x-image-meta-disk-format': 'bar',
'x-image-meta-container-format': 'bare',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
self.assertTrue('Invalid disk format' in res.body, res.body)
def test_configured_container_format_good(self):
self.config(container_formats=['foo'])
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://localhost:0/image.tar.gz',
'x-image-meta-disk-format': 'raw',
'x-image-meta-container-format': 'foo',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
def test_configured_container_format_bad(self):
self.config(container_formats=['foo'])
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://localhost:0/image.tar.gz',
'x-image-meta-disk-format': 'raw',
'x-image-meta-container-format': 'bar',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
self.assertTrue('Invalid container format' in res.body, res.body)
def test_container_and_disk_amazon_format_differs(self):
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://localhost:0/image.tar.gz',
'x-image-meta-disk-format': 'aki',
'x-image-meta-container-format': 'ami'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
expected = ("Invalid mix of disk and container formats. "
"When setting a disk or container format to one of "
"'aki', 'ari', or 'ami', "
"the container and disk formats must match.")
self.assertEquals(res.status_int, 400)
self.assertTrue(expected in res.body, res.body)
def test_create_with_location_no_container_format(self):
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://localhost:0/image.tar.gz',
'x-image-meta-disk-format': 'vhd',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
self.assertTrue('Invalid container format' in res.body)
def test_bad_container_format(self):
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://localhost:0/image.tar.gz',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'invalid',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
self.assertTrue('Invalid container format' in res.body)
def test_bad_image_size(self):
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://example.com/image.tar.gz',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-size': 'invalid',
'x-image-meta-container-format': 'bare',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
self.assertTrue('Incoming image size' in res.body)
def test_bad_image_name(self):
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'X' * 256,
'x-image-meta-location': 'http://example.com/image.tar.gz',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'bare',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_image_no_location_no_image_as_body(self):
"""Tests creates a queued image for no body and no loc header"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertEquals('queued', res_body['status'])
image_id = res_body['id']
# Test that we are able to edit the Location field
# per LP Bug #911599
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'PUT'
req.headers['x-image-meta-location'] = 'http://localhost:0/images/123'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_body = json.loads(res.body)['image']
# Once the location is set, the image should be activated
# see LP Bug #939484
self.assertEquals('active', res_body['status'])
self.assertFalse('location' in res_body) # location never shown
def test_add_image_no_location_no_content_type(self):
"""Tests creates a queued image for no body and no loc header"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
req.body = "chunk00000remainder"
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_image_size_header_too_big(self):
"""Tests raises BadRequest for supplied image size that is too big"""
fixture_headers = {'x-image-meta-size': CONF.image_size_cap + 1,
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_image_size_chunked_data_too_big(self):
self.config(image_size_cap=512)
fixture_headers = {
'x-image-meta-name': 'fake image #3',
'x-image-meta-container_format': 'ami',
'x-image-meta-disk_format': 'ami',
'transfer-encoding': 'chunked',
'content-type': 'application/octet-stream',
}
req = webob.Request.blank("/images")
req.method = 'POST'
req.body_file = StringIO.StringIO('X' * (CONF.image_size_cap + 1))
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 413)
def test_add_image_size_data_too_big(self):
self.config(image_size_cap=512)
fixture_headers = {
'x-image-meta-name': 'fake image #3',
'x-image-meta-container_format': 'ami',
'x-image-meta-disk_format': 'ami',
'content-type': 'application/octet-stream',
}
req = webob.Request.blank("/images")
req.method = 'POST'
req.body = 'X' * (CONF.image_size_cap + 1)
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_image_size_header_exceed_quota(self):
quota = 500
self.config(user_storage_quota=quota)
fixture_headers = {'x-image-meta-size': quota + 1,
'x-image-meta-name': 'fake image #3',
'x-image-meta-container_format': 'bare',
'x-image-meta-disk_format': 'qcow2',
'content-type': 'application/octet-stream',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.body = 'X' * (quota + 1)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 413)
def test_add_image_size_data_exceed_quota(self):
quota = 500
self.config(user_storage_quota=quota)
fixture_headers = {
'x-image-meta-name': 'fake image #3',
'x-image-meta-container_format': 'bare',
'x-image-meta-disk_format': 'qcow2',
'content-type': 'application/octet-stream',
}
req = webob.Request.blank("/images")
req.method = 'POST'
req.body = 'X' * (quota + 1)
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 413)
def test_add_image_size_data_exceed_quota_readd(self):
quota = 500
self.config(user_storage_quota=quota)
fixture_headers = {
'x-image-meta-name': 'fake image #3',
'x-image-meta-container_format': 'bare',
'x-image-meta-disk_format': 'qcow2',
'content-type': 'application/octet-stream',
}
req = webob.Request.blank("/images")
req.method = 'POST'
req.body = 'X' * (quota + 1)
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 413)
used_size = sum([f['size'] for f in self.FIXTURES])
req = webob.Request.blank("/images")
req.method = 'POST'
req.body = 'X' * (quota - used_size)
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
def _add_check_no_url_info(self):
fixture_headers = {'x-image-meta-disk-format': 'ami',
'x-image-meta-container-format': 'ami',
'x-image-meta-size': '0',
'x-image-meta-name': 'empty image'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
res_body = json.loads(res.body)['image']
self.assertFalse('locations' in res_body)
self.assertFalse('direct_url' in res_body)
image_id = res_body['id']
# HEAD empty image
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
self.assertFalse('x-image-meta-locations' in res.headers)
self.assertFalse('x-image-meta-direct_url' in res.headers)
def test_add_check_no_url_info_ml(self):
self.config(show_multiple_locations=True)
self._add_check_no_url_info()
def test_add_check_no_url_info_direct_url(self):
self.config(show_image_direct_url=True)
self._add_check_no_url_info()
def test_add_check_no_url_info_both_on(self):
self.config(show_image_direct_url=True)
self.config(show_multiple_locations=True)
self._add_check_no_url_info()
def test_add_check_no_url_info_both_off(self):
self._add_check_no_url_info()
def test_add_image_zero_size(self):
"""Tests creating an active image with explicitly zero size"""
fixture_headers = {'x-image-meta-disk-format': 'ami',
'x-image-meta-container-format': 'ami',
'x-image-meta-size': '0',
'x-image-meta-name': 'empty image'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertEquals('active', res_body['status'])
image_id = res_body['id']
# GET empty image
req = webob.Request.blank("/images/%s" % image_id)
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
self.assertEqual(len(res.body), 0)
def _do_test_add_image_attribute_mismatch(self, attributes):
fixture_headers = {
'x-image-meta-name': 'fake image #3',
}
fixture_headers.update(attributes)
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "XXXX"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_image_checksum_mismatch(self):
attributes = {
'x-image-meta-checksum': 'asdf',
}
self._do_test_add_image_attribute_mismatch(attributes)
def test_add_image_size_mismatch(self):
attributes = {
'x-image-meta-size': str(len("XXXX") + 1),
}
self._do_test_add_image_attribute_mismatch(attributes)
def test_add_image_checksum_and_size_mismatch(self):
attributes = {
'x-image-meta-checksum': 'asdf',
'x-image-meta-size': str(len("XXXX") + 1),
}
self._do_test_add_image_attribute_mismatch(attributes)
def test_add_image_bad_store(self):
"""Tests raises BadRequest for invalid store header"""
fixture_headers = {'x-image-meta-store': 'bad',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_image_basic_file_store(self):
"""Tests to add a basic image in the file store"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
# Test that the Location: header is set to the URI to
# edit the newly-created image, as required by APP.
# See LP Bug #719825
self.assertTrue('location' in res.headers,
"'location' not in response headers.\n"
"res.headerlist = %r" % res.headerlist)
res_body = json.loads(res.body)['image']
self.assertTrue('/images/%s' % res_body['id']
in res.headers['location'])
self.assertEquals('active', res_body['status'])
image_id = res_body['id']
# Test that we are NOT able to edit the Location field
# per LP Bug #911599
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'PUT'
req.headers['x-image-meta-location'] = 'http://example.com/images/123'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_image_unauthorized(self):
rules = {"add_image": '!'}
self.set_policy_rules(rules)
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_add_publicize_image_unauthorized(self):
rules = {"add_image": '@', "modify_image": '@',
"publicize_image": '!'}
self.set_policy_rules(rules)
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-is-public': 'true',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_add_publicize_image_authorized(self):
rules = {"add_image": '@', "modify_image": '@',
"publicize_image": '@'}
self.set_policy_rules(rules)
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-is-public': 'true',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
def test_add_copy_from_image_unauthorized(self):
rules = {"add_image": '@', "copy_from": '!'}
self.set_policy_rules(rules)
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-glance-api-copy-from': 'http://glance.com/i.ovf',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #F'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_add_copy_from_image_authorized(self):
rules = {"add_image": '@', "copy_from": '@'}
self.set_policy_rules(rules)
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-glance-api-copy-from': 'http://glance.com/i.ovf',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #F'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
def test_add_copy_from_with_nonempty_body(self):
"""Tests creates an image from copy-from and nonempty body"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-glance-api-copy-from': 'http://a/b/c.ovf',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #F'}
req = webob.Request.blank("/images")
req.headers['Content-Type'] = 'application/octet-stream'
req.method = 'POST'
req.body = "chunk00000remainder"
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_location_with_nonempty_body(self):
"""Tests creates an image from location and nonempty body"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-location': 'http://a/b/c.tar.gz',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #F'}
req = webob.Request.blank("/images")
req.headers['Content-Type'] = 'application/octet-stream'
req.method = 'POST'
req.body = "chunk00000remainder"
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_location_with_conflict_image_size(self):
"""Tests creates an image from location and conflict image size"""
self.stubs.Set(glance.api.v1.images, 'get_size_from_backend',
lambda *args, **kwargs: 2)
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-location': 'http://a/b/c.tar.gz',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #F',
'x-image-meta-size': '1'}
req = webob.Request.blank("/images")
req.headers['Content-Type'] = 'application/octet-stream'
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 409)
def test_add_copy_from_with_location(self):
"""Tests creates an image from copy-from and location"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-glance-api-copy-from': 'http://a/b/c.ovf',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #F',
'x-image-meta-location': 'http://a/b/c.tar.gz'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def _do_test_post_image_content_missing_format(self, missing):
"""Tests creation of an image with missing format"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
header = 'x-image-meta-' + missing.replace('_', '-')
del fixture_headers[header]
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEqual(res.status_int, 400)
def test_add_copy_from_with_restricted_sources(self):
"""Tests creates an image from copy-from with restricted sources"""
header_template = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #F'}
schemas = ["file:///etc/passwd",
"swift+config:///xxx",
"filesystem:///etc/passwd"]
for schema in schemas:
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in six.iteritems(header_template):
req.headers[k] = v
req.headers['x-glance-api-copy-from'] = schema
res = req.get_response(self.api)
self.assertEqual(400, res.status_int)
def test_post_image_content_missing_disk_format(self):
"""Tests creation of an image with missing disk format"""
self._do_test_post_image_content_missing_format('disk_format')
def test_post_image_content_missing_container_type(self):
"""Tests creation of an image with missing container format"""
self._do_test_post_image_content_missing_format('container_format')
def _do_test_put_image_content_missing_format(self, missing):
"""Tests delayed activation of an image with missing format"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
header = 'x-image-meta-' + missing.replace('_', '-')
del fixture_headers[header]
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertEquals('queued', res_body['status'])
image_id = res_body['id']
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'PUT'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_put_image_content_missing_disk_format(self):
"""Tests delayed activation of image with missing disk format"""
self._do_test_put_image_content_missing_format('disk_format')
def test_put_image_content_missing_container_type(self):
"""Tests delayed activation of image with missing container format"""
self._do_test_put_image_content_missing_format('container_format')
def test_update_deleted_image(self):
"""Tests that exception raised trying to update a deleted image"""
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
fixture = {'name': 'test_del_img'}
req = webob.Request.blank('/images/%s' % UUID2)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(image=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
self.assertTrue('Forbidden to update deleted image' in res.body)
def test_delete_deleted_image(self):
"""Tests that exception raised trying to delete a deleted image"""
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
# Verify the status is deleted
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEqual("deleted", res.headers['x-image-meta-status'])
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
msg = "Image %s not found." % UUID2
self.assertTrue(msg in res.body)
# Verify the status is still deleted
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEqual("deleted", res.headers['x-image-meta-status'])
def test_delete_pending_delete_image(self):
"""
Tests that correct response returned when deleting
a pending_delete image
"""
# First deletion
self.config(delayed_delete=True, scrubber_datadir='/tmp/scrubber')
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
# Verify the status is pending_delete
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEqual("pending_delete", res.headers['x-image-meta-status'])
# Second deletion
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
self.assertTrue('Forbidden to delete a pending_delete image'
in res.body)
# Verify the status is still pending_delete
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEqual("pending_delete", res.headers['x-image-meta-status'])
def test_register_and_upload(self):
"""
Test that the process of registering an image with
some metadata, then uploading an image file with some
more metadata doesn't mark the original metadata deleted
:see LP Bug#901534
"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3',
'x-image-meta-property-key1': 'value1'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertTrue('id' in res_body)
image_id = res_body['id']
self.assertTrue('/images/%s' % image_id in res.headers['location'])
# Verify the status is queued
self.assertTrue('status' in res_body)
self.assertEqual('queued', res_body['status'])
# Check properties are not deleted
self.assertTrue('properties' in res_body)
self.assertTrue('key1' in res_body['properties'])
self.assertEqual('value1', res_body['properties']['key1'])
# Now upload the image file along with some more
# metadata and verify original metadata properties
# are not marked deleted
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'PUT'
req.headers['Content-Type'] = 'application/octet-stream'
req.headers['x-image-meta-property-key2'] = 'value2'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
# Verify the status is queued
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertTrue('x-image-meta-property-key1' in res.headers,
"Did not find required property in headers. "
"Got headers: %r" % res.headers)
self.assertEqual("active", res.headers['x-image-meta-status'])
def test_disable_purge_props(self):
"""
Test the special x-glance-registry-purge-props header controls
the purge property behaviour of the registry.
:see LP Bug#901534
"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3',
'x-image-meta-property-key1': 'value1'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertTrue('id' in res_body)
image_id = res_body['id']
self.assertTrue('/images/%s' % image_id in res.headers['location'])
# Verify the status is queued
self.assertTrue('status' in res_body)
self.assertEqual('active', res_body['status'])
# Check properties are not deleted
self.assertTrue('properties' in res_body)
self.assertTrue('key1' in res_body['properties'])
self.assertEqual('value1', res_body['properties']['key1'])
# Now update the image, setting new properties without
# passing the x-glance-registry-purge-props header and
# verify that original properties are marked deleted.
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'PUT'
req.headers['x-image-meta-property-key2'] = 'value2'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
# Verify the original property no longer in headers
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertTrue('x-image-meta-property-key2' in res.headers,
"Did not find required property in headers. "
"Got headers: %r" % res.headers)
self.assertFalse('x-image-meta-property-key1' in res.headers,
"Found property in headers that was not expected. "
"Got headers: %r" % res.headers)
# Now update the image, setting new properties and
# passing the x-glance-registry-purge-props header with
# a value of "false" and verify that second property
# still appears in headers.
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'PUT'
req.headers['x-image-meta-property-key3'] = 'value3'
req.headers['x-glance-registry-purge-props'] = 'false'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
# Verify the second and third property in headers
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertTrue('x-image-meta-property-key2' in res.headers,
"Did not find required property in headers. "
"Got headers: %r" % res.headers)
self.assertTrue('x-image-meta-property-key3' in res.headers,
"Did not find required property in headers. "
"Got headers: %r" % res.headers)
def test_publicize_image_unauthorized(self):
"""Create a non-public image then fail to make public"""
rules = {"add_image": '@', "publicize_image": '!'}
self.set_policy_rules(rules)
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-is-public': 'false',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
req = webob.Request.blank("/images/%s" % res_body['id'])
req.method = 'PUT'
req.headers['x-image-meta-is-public'] = 'true'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_update_image_size_header_too_big(self):
"""Tests raises BadRequest for supplied image size that is too big"""
fixture_headers = {'x-image-meta-size': CONF.image_size_cap + 1}
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'PUT'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_update_image_size_data_too_big(self):
self.config(image_size_cap=512)
fixture_headers = {'content-type': 'application/octet-stream'}
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'PUT'
req.body = 'X' * (CONF.image_size_cap + 1)
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_update_image_size_chunked_data_too_big(self):
self.config(image_size_cap=512)
# Create new image that has no data
req = webob.Request.blank("/images")
req.method = 'POST'
req.headers['x-image-meta-name'] = 'something'
req.headers['x-image-meta-container_format'] = 'ami'
req.headers['x-image-meta-disk_format'] = 'ami'
res = req.get_response(self.api)
image_id = json.loads(res.body)['image']['id']
fixture_headers = {
'content-type': 'application/octet-stream',
'transfer-encoding': 'chunked',
}
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'PUT'
req.body_file = StringIO.StringIO('X' * (CONF.image_size_cap + 1))
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 413)
def test_update_non_existing_image(self):
self.config(image_size_cap=100)
req = webob.Request.blank("images/%s" % _gen_uuid)
req.method = 'PUT'
req.body = 'test'
req.headers['x-image-meta-name'] = 'test'
req.headers['x-image-meta-container_format'] = 'ami'
req.headers['x-image-meta-disk_format'] = 'ami'
req.headers['x-image-meta-is_public'] = 'False'
res = req.get_response(self.api)
self.assertEqual(res.status_int, 404)
def test_update_public_image(self):
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-is-public': 'true',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
req = webob.Request.blank("/images/%s" % res_body['id'])
req.method = 'PUT'
req.headers['x-image-meta-name'] = 'updated public image'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
def test_get_index_sort_name_asc(self):
"""
Tests that the /images registry API returns list of
public images sorted alphabetically by name in
ascending order.
"""
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'asdf',
'size': 19,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
UUID4 = _gen_uuid()
extra_fixture = {'id': UUID4,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'xyz',
'size': 20,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/images?sort_key=name&sort_dir=asc')
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
self.assertEquals(len(images), 3)
self.assertEquals(images[0]['id'], UUID3)
self.assertEquals(images[1]['id'], UUID2)
self.assertEquals(images[2]['id'], UUID4)
def test_get_details_filter_changes_since(self):
"""
Tests that the /images/detail registry API returns list of
public images that have a size less than or equal to size_max
"""
dt1 = timeutils.utcnow() - datetime.timedelta(1)
iso1 = timeutils.isotime(dt1)
date_only1 = dt1.strftime('%Y-%m-%d')
date_only2 = dt1.strftime('%Y%m%d')
date_only3 = dt1.strftime('%Y-%m%d')
dt2 = timeutils.utcnow() + datetime.timedelta(1)
iso2 = timeutils.isotime(dt2)
image_ts = timeutils.utcnow() + datetime.timedelta(2)
hour_before = image_ts.strftime('%Y-%m-%dT%H:%M:%S%%2B01:00')
hour_after = image_ts.strftime('%Y-%m-%dT%H:%M:%S-01:00')
dt4 = timeutils.utcnow() + datetime.timedelta(3)
iso4 = timeutils.isotime(dt4)
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'fake image #3',
'size': 18,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
db_api.image_destroy(self.context, UUID3)
UUID4 = _gen_uuid()
extra_fixture = {'id': UUID4,
'status': 'active',
'is_public': True,
'disk_format': 'ami',
'container_format': 'ami',
'name': 'fake image #4',
'size': 20,
'checksum': None,
'created_at': image_ts,
'updated_at': image_ts}
db_api.image_create(self.context, extra_fixture)
# Check a standard list, 4 images in db (2 deleted)
req = webob.Request.blank('/images/detail')
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
self.assertEquals(len(images), 2)
self.assertEqual(images[0]['id'], UUID4)
self.assertEqual(images[1]['id'], UUID2)
# Expect 3 images (1 deleted)
req = webob.Request.blank('/images/detail?changes-since=%s' % iso1)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
self.assertEquals(len(images), 3)
self.assertEqual(images[0]['id'], UUID4)
self.assertEqual(images[1]['id'], UUID3) # deleted
self.assertEqual(images[2]['id'], UUID2)
# Expect 1 images (0 deleted)
req = webob.Request.blank('/images/detail?changes-since=%s' % iso2)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
self.assertEquals(len(images), 1)
self.assertEqual(images[0]['id'], UUID4)
# Expect 1 images (0 deleted)
req = webob.Request.blank('/images/detail?changes-since=%s' %
hour_before)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
self.assertEquals(len(images), 1)
self.assertEqual(images[0]['id'], UUID4)
# Expect 0 images (0 deleted)
req = webob.Request.blank('/images/detail?changes-since=%s' %
hour_after)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
self.assertEquals(len(images), 0)
# Expect 0 images (0 deleted)
req = webob.Request.blank('/images/detail?changes-since=%s' % iso4)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
self.assertEquals(len(images), 0)
for param in [date_only1, date_only2, date_only3]:
# Expect 3 images (1 deleted)
req = webob.Request.blank('/images/detail?changes-since=%s' %
param)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
self.assertEquals(len(images), 3)
self.assertEqual(images[0]['id'], UUID4)
self.assertEqual(images[1]['id'], UUID3) # deleted
self.assertEqual(images[2]['id'], UUID2)
# Bad request (empty changes-since param)
req = webob.Request.blank('/images/detail?changes-since=')
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_get_images_bad_urls(self):
"""Check that routes collections are not on (LP bug 1185828)"""
req = webob.Request.blank('/images/detail.xxx')
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
req = webob.Request.blank('/images.xxx')
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
req = webob.Request.blank('/images/new')
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
req = webob.Request.blank("/images/%s/members" % UUID1)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
req = webob.Request.blank("/images/%s/members.xxx" % UUID1)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
def test_get_images_detailed_unauthorized(self):
rules = {"get_images": '!'}
self.set_policy_rules(rules)
req = webob.Request.blank('/images/detail')
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_get_images_unauthorized(self):
rules = {"get_images": '!'}
self.set_policy_rules(rules)
req = webob.Request.blank('/images/detail')
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_store_location_not_revealed(self):
"""
Test that the internal store location is NOT revealed
through the API server
"""
# Check index and details...
for url in ('/images', '/images/detail'):
req = webob.Request.blank(url)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
num_locations = sum([1 for record in images
if 'location' in record.keys()])
self.assertEquals(0, num_locations, images)
# Check GET
req = webob.Request.blank("/images/%s" % UUID2)
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
self.assertFalse('X-Image-Meta-Location' in res.headers)
# Check HEAD
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
self.assertFalse('X-Image-Meta-Location' in res.headers)
# Check PUT
req = webob.Request.blank("/images/%s" % UUID2)
req.body = res.body
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
res_body = json.loads(res.body)
self.assertFalse('location' in res_body['image'])
# Check POST
req = webob.Request.blank("/images")
headers = {'x-image-meta-location': 'http://localhost',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
for k, v in headers.iteritems():
req.headers[k] = v
req.method = 'POST'
res = req.get_response(self.api)
self.assertEqual(res.status_int, 201)
res_body = json.loads(res.body)
self.assertFalse('location' in res_body['image'])
def test_image_is_checksummed(self):
"""Test that the image contents are checksummed properly"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
image_contents = "chunk00000remainder"
image_checksum = hashlib.md5(image_contents).hexdigest()
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = image_contents
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertEquals(image_checksum, res_body['checksum'],
"Mismatched checksum. Expected %s, got %s" %
(image_checksum, res_body['checksum']))
def test_etag_equals_checksum_header(self):
"""Test that the ETag header matches the x-image-meta-checksum"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
image_contents = "chunk00000remainder"
image_checksum = hashlib.md5(image_contents).hexdigest()
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = image_contents
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
image = json.loads(res.body)['image']
# HEAD the image and check the ETag equals the checksum header...
expected_headers = {'x-image-meta-checksum': image_checksum,
'etag': image_checksum}
req = webob.Request.blank("/images/%s" % image['id'])
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
for key in expected_headers.keys():
self.assertTrue(key in res.headers,
"required header '%s' missing from "
"returned headers" % key)
for key, value in expected_headers.iteritems():
self.assertEquals(value, res.headers[key])
def test_bad_checksum_prevents_image_creation(self):
"""Test that the image contents are checksummed properly"""
image_contents = "chunk00000remainder"
bad_checksum = hashlib.md5("invalid").hexdigest()
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3',
'x-image-meta-checksum': bad_checksum,
'x-image-meta-is-public': 'true'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = image_contents
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
# Test that only one image was returned (that already exists)
req = webob.Request.blank("/images")
req.method = 'GET'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
images = json.loads(res.body)['images']
self.assertEqual(len(images), 1)
def test_image_meta(self):
"""Test for HEAD /images/<ID>"""
expected_headers = {'x-image-meta-id': UUID2,
'x-image-meta-name': 'fake image #2'}
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
for key, value in expected_headers.iteritems():
self.assertEquals(value, res.headers[key])
def test_image_meta_unauthorized(self):
rules = {"get_image": '!'}
self.set_policy_rules(rules)
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_show_image_basic(self):
req = webob.Request.blank("/images/%s" % UUID2)
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
self.assertEqual(res.content_type, 'application/octet-stream')
self.assertEqual('chunk00000remainder', res.body)
def test_show_non_exists_image(self):
req = webob.Request.blank("/images/%s" % _gen_uuid())
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
def test_show_image_unauthorized(self):
rules = {"get_image": '!'}
self.set_policy_rules(rules)
req = webob.Request.blank("/images/%s" % UUID2)
res = req.get_response(self.api)
self.assertEqual(res.status_int, 403)
def test_show_image_unauthorized_download(self):
rules = {"download_image": '!'}
self.set_policy_rules(rules)
req = webob.Request.blank("/images/%s" % UUID2)
res = req.get_response(self.api)
self.assertEqual(res.status_int, 403)
def test_delete_image(self):
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEquals(res.body, '')
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'GET'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404,
res.body)
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEquals(res.headers['x-image-meta-deleted'], 'True')
self.assertEquals(res.headers['x-image-meta-status'], 'deleted')
def test_delete_non_exists_image(self):
req = webob.Request.blank("/images/%s" % _gen_uuid())
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
def test_delete_not_allowed(self):
# Verify we can get the image data
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'GET'
req.headers['X-Auth-Token'] = 'user:tenant:'
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
self.assertEqual(len(res.body), 19)
# Verify we cannot delete the image
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEqual(res.status_int, 403)
# Verify the image data is still there
req.method = 'GET'
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
self.assertEqual(len(res.body), 19)
def test_delete_queued_image(self):
"""Delete an image in a queued state
Bug #747799 demonstrated that trying to DELETE an image
that had had its save process killed manually results in failure
because the location attribute is None.
Bug #1048851 demonstrated that the status was not properly
being updated to 'deleted' from 'queued'.
"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertEquals('queued', res_body['status'])
# Now try to delete the image...
req = webob.Request.blank("/images/%s" % res_body['id'])
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
req = webob.Request.blank('/images/%s' % res_body['id'])
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEquals(res.headers['x-image-meta-deleted'], 'True')
self.assertEquals(res.headers['x-image-meta-status'], 'deleted')
def test_delete_queued_image_delayed_delete(self):
"""Delete an image in a queued state when delayed_delete is on
Bug #1048851 demonstrated that the status was not properly
being updated to 'deleted' from 'queued'.
"""
self.config(delayed_delete=True)
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertEquals('queued', res_body['status'])
# Now try to delete the image...
req = webob.Request.blank("/images/%s" % res_body['id'])
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
req = webob.Request.blank('/images/%s' % res_body['id'])
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEquals(res.headers['x-image-meta-deleted'], 'True')
self.assertEquals(res.headers['x-image-meta-status'], 'deleted')
def test_delete_protected_image(self):
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-name': 'fake image #3',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-protected': 'True'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertEquals('queued', res_body['status'])
# Now try to delete the image...
req = webob.Request.blank("/images/%s" % res_body['id'])
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_delete_image_unauthorized(self):
rules = {"delete_image": '!'}
self.set_policy_rules(rules)
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_get_details_invalid_marker(self):
"""
Tests that the /images/detail registry API returns a 400
when an invalid marker is provided
"""
req = webob.Request.blank('/images/detail?marker=%s' % _gen_uuid())
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_get_image_members(self):
"""
Tests members listing for existing images
"""
req = webob.Request.blank('/images/%s/members' % UUID2)
req.method = 'GET'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
memb_list = json.loads(res.body)
num_members = len(memb_list['members'])
self.assertEquals(num_members, 0)
def test_get_image_members_allowed_by_policy(self):
rules = {"get_members": '@'}
self.set_policy_rules(rules)
req = webob.Request.blank('/images/%s/members' % UUID2)
req.method = 'GET'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
memb_list = json.loads(res.body)
num_members = len(memb_list['members'])
self.assertEquals(num_members, 0)
def test_get_image_members_forbidden_by_policy(self):
rules = {"get_members": '!'}
self.set_policy_rules(rules)
req = webob.Request.blank('/images/%s/members' % UUID2)
req.method = 'GET'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPForbidden.code)
def test_get_image_members_not_existing(self):
"""
Tests proper exception is raised if attempt to get members of
non-existing image
"""
req = webob.Request.blank('/images/%s/members' % _gen_uuid())
req.method = 'GET'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
def test_add_member(self):
"""
Tests adding image members
"""
test_router_api = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router_api, is_admin=True)
req = webob.Request.blank('/images/%s/members/test' % UUID2)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
def test_get_member_images(self):
"""
Tests image listing for members
"""
req = webob.Request.blank('/shared-images/pattieblack')
req.method = 'GET'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
memb_list = json.loads(res.body)
num_members = len(memb_list['shared_images'])
self.assertEquals(num_members, 0)
def test_replace_members(self):
"""
Tests replacing image members raises right exception
"""
test_router_api = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router_api, is_admin=False)
fixture = dict(member_id='pattieblack')
req = webob.Request.blank('/images/%s/members' % UUID2)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(image_memberships=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, 401)
def test_active_image_immutable_props_for_user(self):
"""
Tests user cannot update immutable props of active image
"""
test_router_api = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router_api, is_admin=False)
fixture_header_list = [{'x-image-meta-checksum': '1234'},
{'x-image-meta-size': '12345'}]
for fixture_header in fixture_header_list:
req = webob.Request.blank('/images/%s' % UUID2)
req.method = 'PUT'
for k, v in fixture_header.iteritems():
req = webob.Request.blank('/images/%s' % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
orig_value = res.headers[k]
req = webob.Request.blank('/images/%s' % UUID2)
req.headers[k] = v
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
prop = k[len('x-image-meta-'):]
self.assertNotEqual(res.body.find("Forbidden to modify \'%s\' "
"of active "
"image" % prop), -1)
req = webob.Request.blank('/images/%s' % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEquals(orig_value, res.headers[k])
def test_props_of_active_image_mutable_for_admin(self):
"""
Tests admin can update 'immutable' props of active image
"""
test_router_api = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router_api, is_admin=True)
fixture_header_list = [{'x-image-meta-checksum': '1234'},
{'x-image-meta-size': '12345'}]
for fixture_header in fixture_header_list:
req = webob.Request.blank('/images/%s' % UUID2)
req.method = 'PUT'
for k, v in fixture_header.iteritems():
req = webob.Request.blank('/images/%s' % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
orig_value = res.headers[k]
req = webob.Request.blank('/images/%s' % UUID2)
req.headers[k] = v
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
req = webob.Request.blank('/images/%s' % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEquals(v, res.headers[k])
def test_replace_members_non_existing_image(self):
"""
Tests replacing image members raises right exception
"""
test_router_api = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router_api, is_admin=True)
fixture = dict(member_id='pattieblack')
req = webob.Request.blank('/images/%s/members' % _gen_uuid())
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(image_memberships=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
def test_replace_members_bad_request(self):
"""
Tests replacing image members raises bad request if body is wrong
"""
test_router_api = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router_api, is_admin=True)
fixture = dict(member_id='pattieblack')
req = webob.Request.blank('/images/%s/members' % UUID2)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(image_memberships=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_replace_members_positive(self):
"""
Tests replacing image members
"""
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router, is_admin=True)
fixture = [dict(member_id='pattieblack', can_share=False)]
# Replace
req = webob.Request.blank('/images/%s/members' % UUID2)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(memberships=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, 204)
def test_replace_members_forbidden_by_policy(self):
rules = {"modify_member": '!'}
self.set_policy_rules(rules)
self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),
is_admin=True)
fixture = [{'member_id': 'pattieblack', 'can_share': 'false'}]
req = webob.Request.blank('/images/%s/members' % UUID1)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(memberships=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPForbidden.code)
def test_replace_members_allowed_by_policy(self):
rules = {"modify_member": '@'}
self.set_policy_rules(rules)
self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),
is_admin=True)
fixture = [{'member_id': 'pattieblack', 'can_share': 'false'}]
req = webob.Request.blank('/images/%s/members' % UUID1)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(memberships=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code)
def test_add_member(self):
"""
Tests adding image members raises right exception
"""
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router, is_admin=False)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 401)
def test_add_member_non_existing_image(self):
"""
Tests adding image members raises right exception
"""
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router, is_admin=True)
test_uri = '/images/%s/members/pattieblack'
req = webob.Request.blank(test_uri % _gen_uuid())
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
def test_add_member_positive(self):
"""
Tests adding image members
"""
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router, is_admin=True)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 204)
def test_add_member_with_body(self):
"""
Tests adding image members
"""
fixture = dict(can_share=True)
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router, is_admin=True)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'PUT'
req.body = json.dumps(dict(member=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, 204)
def test_add_member_forbidden_by_policy(self):
rules = {"modify_member": '!'}
self.set_policy_rules(rules)
self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),
is_admin=True)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID1)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPForbidden.code)
def test_add_member_allowed_by_policy(self):
rules = {"modify_member": '@'}
self.set_policy_rules(rules)
self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),
is_admin=True)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID1)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code)
def test_get_members_of_deleted_image_raises_404(self):
"""
Tests members listing for deleted image raises 404.
"""
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
req = webob.Request.blank('/images/%s/members' % UUID2)
req.method = 'GET'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNotFound.code)
self.assertTrue(
'Image with identifier %s has been deleted.' % UUID2 in res.body)
def test_delete_member_of_deleted_image_raises_404(self):
"""
Tests deleting members of deleted image raises 404.
"""
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(test_router, is_admin=True)
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNotFound.code)
self.assertTrue(
'Image with identifier %s has been deleted.' % UUID2 in res.body)
def test_update_members_of_deleted_image_raises_404(self):
"""
Tests update members of deleted image raises 404.
"""
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(test_router, is_admin=True)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 204)
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
fixture = [{'member_id': 'pattieblack', 'can_share': 'false'}]
req = webob.Request.blank('/images/%s/members' % UUID2)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(memberships=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNotFound.code)
self.assertTrue(
'Image with identifier %s has been deleted.' % UUID2 in res.body)
def test_create_member_to_deleted_image_raises_404(self):
"""
Tests adding members to deleted image raises 404.
"""
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(test_router, is_admin=True)
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNotFound.code)
self.assertTrue(
'Image with identifier %s has been deleted.' % UUID2 in res.body)
def test_delete_member(self):
"""
Tests deleting image members raises right exception
"""
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router, is_admin=False)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 401)
def test_delete_member_on_non_existing_image(self):
"""
Tests deleting image members raises right exception
"""
test_router = router.API(self.mapper)
api = test_utils.FakeAuthMiddleware(test_router, is_admin=True)
test_uri = '/images/%s/members/pattieblack'
req = webob.Request.blank(test_uri % _gen_uuid())
req.method = 'DELETE'
res = req.get_response(api)
self.assertEquals(res.status_int, 404)
def test_delete_non_exist_member(self):
"""
Test deleting image members raises right exception
"""
test_router = router.API(self.mapper)
api = test_utils.FakeAuthMiddleware(
test_router, is_admin=True)
req = webob.Request.blank('/images/%s/members/test_user' % UUID2)
req.method = 'DELETE'
res = req.get_response(api)
self.assertEquals(res.status_int, 404)
def test_delete_image_member(self):
test_rserver = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_rserver, is_admin=True)
# Add member to image:
fixture = dict(can_share=True)
test_uri = '/images/%s/members/test_add_member_positive'
req = webob.Request.blank(test_uri % UUID2)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(member=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, 204)
# Delete member
test_uri = '/images/%s/members/test_add_member_positive'
req = webob.Request.blank(test_uri % UUID2)
req.headers['X-Auth-Token'] = 'test1:test1:'
req.method = 'DELETE'
req.content_type = 'application/json'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
self.assertTrue('Forbidden' in res.body)
def test_delete_member_allowed_by_policy(self):
rules = {"delete_member": '@', "modify_member": '@'}
self.set_policy_rules(rules)
self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),
is_admin=True)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code)
def test_delete_member_forbidden_by_policy(self):
rules = {"delete_member": '!', "modify_member": '@'}
self.set_policy_rules(rules)
self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),
is_admin=True)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPForbidden.code)
class TestImageSerializer(base.IsolatedUnitTest):
def setUp(self):
"""Establish a clean test environment"""
super(TestImageSerializer, self).setUp()
self.receiving_user = 'fake_user'
self.receiving_tenant = 2
self.context = glance.context.RequestContext(
is_admin=True,
user=self.receiving_user,
tenant=self.receiving_tenant)
self.serializer = images.ImageSerializer()
def image_iter():
for x in ['chunk', '678911234', '56789']:
yield x
self.FIXTURE = {
'image_iterator': image_iter(),
'image_meta': {
'id': UUID2,
'name': 'fake image #2',
'status': 'active',
'disk_format': 'vhd',
'container_format': 'ovf',
'is_public': True,
'created_at': timeutils.utcnow(),
'updated_at': timeutils.utcnow(),
'deleted_at': None,
'deleted': False,
'checksum': '06ff575a2856444fbe93100157ed74ab92eb7eff',
'size': 19,
'owner': _gen_uuid(),
'location': "file:///tmp/glance-tests/2",
'properties': {},
}
}
def test_meta(self):
exp_headers = {'x-image-meta-id': UUID2,
'x-image-meta-location': 'file:///tmp/glance-tests/2',
'ETag': self.FIXTURE['image_meta']['checksum'],
'x-image-meta-name': 'fake image #2'}
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
req.remote_addr = "1.2.3.4"
req.context = self.context
response = webob.Response(request=req)
self.serializer.meta(response, self.FIXTURE)
for key, value in exp_headers.iteritems():
self.assertEquals(value, response.headers[key])
def test_meta_utf8(self):
# We get unicode strings from JSON, and therefore all strings in the
# metadata will actually be unicode when handled internally. But we
# want to output utf-8.
FIXTURE = {
'image_meta': {
'id': unicode(UUID2),
'name': u'fake image #2 with utf-8 éàè',
'status': u'active',
'disk_format': u'vhd',
'container_format': u'ovf',
'is_public': True,
'created_at': timeutils.utcnow(),
'updated_at': timeutils.utcnow(),
'deleted_at': None,
'deleted': False,
'checksum': u'06ff575a2856444fbe93100157ed74ab92eb7eff',
'size': 19,
'owner': unicode(_gen_uuid()),
'location': u"file:///tmp/glance-tests/2",
'properties': {
u'prop_éé': u'ça marche',
u'prop_çé': u'çé',
}
}
}
exp_headers = {'x-image-meta-id': UUID2.encode('utf-8'),
'x-image-meta-location': 'file:///tmp/glance-tests/2',
'ETag': '06ff575a2856444fbe93100157ed74ab92eb7eff',
'x-image-meta-size': '19', # str, not int
'x-image-meta-name': 'fake image #2 with utf-8 éàè',
'x-image-meta-property-prop_éé': 'ça marche',
'x-image-meta-property-prop_çé': u'çé'.encode('utf-8')}
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
req.remote_addr = "1.2.3.4"
req.context = self.context
response = webob.Response(request=req)
self.serializer.meta(response, FIXTURE)
self.assertNotEqual(type(FIXTURE['image_meta']['name']),
type(response.headers['x-image-meta-name']))
self.assertEqual(response.headers['x-image-meta-name'].decode('utf-8'),
FIXTURE['image_meta']['name'])
for key, value in exp_headers.iteritems():
self.assertEquals(value, response.headers[key])
FIXTURE['image_meta']['properties'][u'prop_bad'] = 'çé'
self.assertRaises(UnicodeDecodeError,
self.serializer.meta, response, FIXTURE)
def test_show(self):
exp_headers = {'x-image-meta-id': UUID2,
'x-image-meta-location': 'file:///tmp/glance-tests/2',
'ETag': self.FIXTURE['image_meta']['checksum'],
'x-image-meta-name': 'fake image #2'}
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'GET'
req.context = self.context
response = webob.Response(request=req)
self.serializer.show(response, self.FIXTURE)
for key, value in exp_headers.iteritems():
self.assertEquals(value, response.headers[key])
self.assertEqual(response.body, 'chunk67891123456789')
def test_show_notify(self):
"""Make sure an eventlet posthook for notify_image_sent is added."""
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'GET'
req.context = self.context
response = webob.Response(request=req)
response.request.environ['eventlet.posthooks'] = []
self.serializer.show(response, self.FIXTURE)
#just make sure the app_iter is called
for chunk in response.app_iter:
pass
self.assertNotEqual(response.request.environ['eventlet.posthooks'], [])
def test_image_send_notification(self):
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'GET'
req.remote_addr = '1.2.3.4'
req.context = self.context
image_meta = self.FIXTURE['image_meta']
called = {"notified": False}
expected_payload = {
'bytes_sent': 19,
'image_id': UUID2,
'owner_id': image_meta['owner'],
'receiver_tenant_id': self.receiving_tenant,
'receiver_user_id': self.receiving_user,
'destination_ip': '1.2.3.4',
}
def fake_info(_event_type, _payload):
self.assertEqual(_payload, expected_payload)
called['notified'] = True
self.stubs.Set(self.serializer.notifier, 'info', fake_info)
glance.api.common.image_send_notification(19, 19, image_meta, req,
self.serializer.notifier)
self.assertTrue(called['notified'])
def test_image_send_notification_error(self):
"""Ensure image.send notification is sent on error."""
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'GET'
req.remote_addr = '1.2.3.4'
req.context = self.context
image_meta = self.FIXTURE['image_meta']
called = {"notified": False}
expected_payload = {
'bytes_sent': 17,
'image_id': UUID2,
'owner_id': image_meta['owner'],
'receiver_tenant_id': self.receiving_tenant,
'receiver_user_id': self.receiving_user,
'destination_ip': '1.2.3.4',
}
def fake_error(_event_type, _payload):
self.assertEqual(_payload, expected_payload)
called['notified'] = True
self.stubs.Set(self.serializer.notifier, 'error', fake_error)
#expected and actually sent bytes differ
glance.api.common.image_send_notification(17, 19, image_meta, req,
self.serializer.notifier)
self.assertTrue(called['notified'])
def test_redact_location(self):
"""Ensure location redaction does not change original metadata"""
image_meta = {'size': 3, 'id': '123', 'location': 'http://localhost'}
redacted_image_meta = {'size': 3, 'id': '123'}
copy_image_meta = copy.deepcopy(image_meta)
tmp_image_meta = glance.api.v1.images.redact_loc(image_meta)
self.assertEqual(image_meta, copy_image_meta)
self.assertEqual(tmp_image_meta, redacted_image_meta)
def test_noop_redact_location(self):
"""Check no-op location redaction does not change original metadata"""
image_meta = {'size': 3, 'id': '123'}
redacted_image_meta = {'size': 3, 'id': '123'}
copy_image_meta = copy.deepcopy(image_meta)
tmp_image_meta = glance.api.v1.images.redact_loc(image_meta)
self.assertEqual(image_meta, copy_image_meta)
self.assertEqual(tmp_image_meta, redacted_image_meta)
self.assertEqual(image_meta, redacted_image_meta)
class TestFilterValidator(base.IsolatedUnitTest):
def test_filter_validator(self):
self.assertFalse(glance.api.v1.filters.validate('size_max', -1))
self.assertTrue(glance.api.v1.filters.validate('size_max', 1))
self.assertTrue(glance.api.v1.filters.validate('protected', 'True'))
self.assertTrue(glance.api.v1.filters.validate('protected', 'FALSE'))
self.assertFalse(glance.api.v1.filters.validate('protected', '-1'))
class TestAPIProtectedProps(base.IsolatedUnitTest):
def setUp(self):
"""Establish a clean test environment"""
super(TestAPIProtectedProps, self).setUp()
self.mapper = routes.Mapper()
# turn on property protections
self.set_property_protections()
self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper))
db_api.setup_db_env()
db_api.get_engine()
db_models.unregister_models(db_api._ENGINE)
db_models.register_models(db_api._ENGINE)
def tearDown(self):
"""Clear the test environment"""
super(TestAPIProtectedProps, self).tearDown()
self.destroy_fixtures()
def destroy_fixtures(self):
# Easiest to just drop the models and re-create them...
db_models.unregister_models(db_api._ENGINE)
db_models.register_models(db_api._ENGINE)
def _create_admin_image(self, props={}):
request = unit_test_utils.get_fake_request(path='/images')
headers = {'x-image-meta-disk-format': 'ami',
'x-image-meta-container-format': 'ami',
'x-image-meta-name': 'foo',
'x-image-meta-size': '0',
'x-auth-token': 'user:tenant:admin'}
headers.update(props)
for k, v in headers.iteritems():
request.headers[k] = v
created_image = request.get_response(self.api)
res_body = json.loads(created_image.body)['image']
image_id = res_body['id']
return image_id
def test_prop_protection_with_create_and_permitted_role(self):
"""
As admin role, create and image and verify permitted role 'member' can
create a protected property
"""
image_id = self._create_admin_image()
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:member',
'x-image-meta-property-x_owner_foo': 'bar'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
res_body = json.loads(output.body)['image']
self.assertEqual(res_body['properties']['x_owner_foo'], 'bar')
def test_prop_protection_with_create_and_unpermitted_role(self):
"""
As admin role, create an image and verify unpermitted role
'fake_member' can *not* create a protected property
"""
image_id = self._create_admin_image()
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:fake_member',
'x-image-meta-property-x_owner_foo': 'bar'}
for k, v in headers.iteritems():
another_request.headers[k] = v
another_request.get_response(self.api)
output = another_request.get_response(self.api)
self.assertEquals(output.status_int, webob.exc.HTTPForbidden.code)
self.assertIn("Property '%s' is protected" %
"x_owner_foo", output.body)
def test_prop_protection_with_show_and_permitted_role(self):
"""
As admin role, create an image with a protected property, and verify
permitted role 'member' can read that protected property via HEAD
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
method='HEAD', path='/images/%s' % image_id)
headers = {'x-auth-token': 'user:tenant:member'}
for k, v in headers.iteritems():
another_request.headers[k] = v
res2 = another_request.get_response(self.api)
self.assertEqual(res2.headers['x-image-meta-property-x_owner_foo'],
'bar')
def test_prop_protection_with_show_and_unpermitted_role(self):
"""
As admin role, create an image with a protected property, and verify
permitted role 'fake_role' can *not* read that protected property via
HEAD
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
method='HEAD', path='/images/%s' % image_id)
headers = {'x-auth-token': 'user:tenant:fake_role'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEqual(output.status_int, 200)
self.assertEqual('', output.body)
self.assertNotIn('x-image-meta-property-x_owner_foo', output.headers)
def test_prop_protection_with_get_and_permitted_role(self):
"""
As admin role, create an image with a protected property, and verify
permitted role 'member' can read that protected property via GET
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
method='GET', path='/images/%s' % image_id)
headers = {'x-auth-token': 'user:tenant:member'}
for k, v in headers.iteritems():
another_request.headers[k] = v
res2 = another_request.get_response(self.api)
self.assertEqual(res2.headers['x-image-meta-property-x_owner_foo'],
'bar')
def test_prop_protection_with_get_and_unpermitted_role(self):
"""
As admin role, create an image with a protected property, and verify
permitted role 'fake_role' can *not* read that protected property via
GET
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
method='GET', path='/images/%s' % image_id)
headers = {'x-auth-token': 'user:tenant:fake_role'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEqual(output.status_int, 200)
self.assertEqual('', output.body)
self.assertNotIn('x-image-meta-property-x_owner_foo', output.headers)
def test_prop_protection_with_detail_and_permitted_role(self):
"""
As admin role, create an image with a protected property, and verify
permitted role 'member' can read that protected property via
/images/detail
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
method='GET', path='/images/detail')
headers = {'x-auth-token': 'user:tenant:member'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEqual(output.status_int, 200)
res_body = json.loads(output.body)['images'][0]
self.assertEqual(res_body['properties']['x_owner_foo'], 'bar')
def test_prop_protection_with_detail_and_unpermitted_role(self):
"""
As admin role, create an image with a protected property, and verify
permitted role 'fake_role' can *not* read that protected property via
/images/detail
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
method='GET', path='/images/detail')
headers = {'x-auth-token': 'user:tenant:fake_role'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEqual(output.status_int, 200)
res_body = json.loads(output.body)['images'][0]
self.assertNotIn('x-image-meta-property-x_owner_foo',
res_body['properties'])
def test_prop_protection_with_update_and_permitted_role(self):
"""
As admin role, create an image with protected property, and verify
permitted role 'member' can update that protected property
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:member',
'x-image-meta-property-x_owner_foo': 'baz'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
res_body = json.loads(output.body)['image']
self.assertEqual(res_body['properties']['x_owner_foo'], 'baz')
def test_prop_protection_with_update_and_unpermitted_role(self):
"""
As admin role, create an image with protected property, and verify
unpermitted role 'fake_role' can *not* update that protected property
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:fake_role',
'x-image-meta-property-x_owner_foo': 'baz'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEquals(output.status_int, webob.exc.HTTPForbidden.code)
self.assertIn("Property '%s' is protected" %
"x_owner_foo", output.body)
def test_prop_protection_update_without_read(self):
"""
Test protected property cannot be updated without read permission
"""
image_id = self._create_admin_image(
{'x-image-meta-property-spl_update_only_prop': 'foo'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:spl_role',
'x-image-meta-property-spl_update_only_prop': 'bar'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEquals(output.status_int, webob.exc.HTTPForbidden.code)
self.assertIn("Property '%s' is protected" %
"spl_update_only_prop", output.body)
def test_prop_protection_update_noop(self):
"""
Test protected property update is allowed as long as the user has read
access and the value is unchanged
"""
image_id = self._create_admin_image(
{'x-image-meta-property-spl_read_prop': 'foo'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:spl_role',
'x-image-meta-property-spl_read_prop': 'foo'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
res_body = json.loads(output.body)['image']
self.assertEqual(res_body['properties']['spl_read_prop'], 'foo')
self.assertEquals(output.status_int, 200)
def test_prop_protection_with_delete_and_permitted_role(self):
"""
As admin role, create an image with protected property, and verify
permitted role 'member' can can delete that protected property
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:member',
'X-Glance-Registry-Purge-Props': 'True'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
res_body = json.loads(output.body)['image']
self.assertEqual(res_body['properties'], {})
def test_prop_protection_with_delete_and_unpermitted_read(self):
"""
Test protected property cannot be deleted without read permission
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:fake_role',
'X-Glance-Registry-Purge-Props': 'True'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEquals(output.status_int, 200)
self.assertNotIn('x-image-meta-property-x_owner_foo', output.headers)
another_request = unit_test_utils.get_fake_request(
method='HEAD', path='/images/%s' % image_id)
headers = {'x-auth-token': 'user:tenant:admin'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEqual(output.status_int, 200)
self.assertEqual('', output.body)
self.assertEqual(output.headers['x-image-meta-property-x_owner_foo'],
'bar')
def test_prop_protection_with_delete_and_unpermitted_delete(self):
"""
Test protected property cannot be deleted without delete permission
"""
image_id = self._create_admin_image(
{'x-image-meta-property-spl_update_prop': 'foo'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:spl_role',
'X-Glance-Registry-Purge-Props': 'True'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEquals(output.status_int, 403)
self.assertIn("Property '%s' is protected" %
"spl_update_prop", output.body)
another_request = unit_test_utils.get_fake_request(
method='HEAD', path='/images/%s' % image_id)
headers = {'x-auth-token': 'user:tenant:admin'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEqual(output.status_int, 200)
self.assertEqual('', output.body)
self.assertEqual(
output.headers['x-image-meta-property-spl_update_prop'], 'foo')
def test_read_protected_props_leak_with_update(self):
"""
Verify when updating props that ones we don't have read permission for
are not disclosed
"""
image_id = self._create_admin_image(
{'x-image-meta-property-spl_update_prop': '0',
'x-image-meta-property-foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:spl_role',
'x-image-meta-property-spl_update_prop': '1',
'X-Glance-Registry-Purge-Props': 'False'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
res_body = json.loads(output.body)['image']
self.assertEqual(res_body['properties']['spl_update_prop'], '1')
self.assertNotIn('foo', res_body['properties'])
def test_update_protected_props_mix_no_read(self):
"""
Create an image with two props - one only readable by admin, and one
readable/updatable by member. Verify member can sucessfully update
their property while the admin owned one is ignored transparently
"""
image_id = self._create_admin_image(
{'x-image-meta-property-admin_foo': 'bar',
'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:member',
'x-image-meta-property-x_owner_foo': 'baz'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
res_body = json.loads(output.body)['image']
self.assertEqual(res_body['properties']['x_owner_foo'], 'baz')
self.assertNotIn('admin_foo', res_body['properties'])
def test_update_protected_props_mix_read(self):
"""
Create an image with two props - one readable/updatable by admin, but
also readable by spl_role. The other is readable/updatable by
spl_role. Verify spl_role can successfully update their property but
not the admin owned one
"""
custom_props = {
'x-image-meta-property-spl_read_only_prop': '1',
'x-image-meta-property-spl_update_prop': '2'
}
image_id = self._create_admin_image(custom_props)
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
# verify spl_role can update it's prop
headers = {'x-auth-token': 'user:tenant:spl_role',
'x-image-meta-property-spl_read_only_prop': '1',
'x-image-meta-property-spl_update_prop': '1'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
res_body = json.loads(output.body)['image']
self.assertEqual(output.status_int, 200)
self.assertEqual(res_body['properties']['spl_read_only_prop'], '1')
self.assertEqual(res_body['properties']['spl_update_prop'], '1')
# verify spl_role can not update admin controlled prop
headers = {'x-auth-token': 'user:tenant:spl_role',
'x-image-meta-property-spl_read_only_prop': '2',
'x-image-meta-property-spl_update_prop': '1'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEqual(output.status_int, 403)
def test_delete_protected_props_mix_no_read(self):
"""
Create an image with two props - one only readable by admin, and one
readable/deletable by member. Verify member can sucessfully delete
their property while the admin owned one is ignored transparently
"""
image_id = self._create_admin_image(
{'x-image-meta-property-admin_foo': 'bar',
'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:member',
'X-Glance-Registry-Purge-Props': 'True'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
res_body = json.loads(output.body)['image']
self.assertNotIn('x_owner_foo', res_body['properties'])
self.assertNotIn('admin_foo', res_body['properties'])
def test_delete_protected_props_mix_read(self):
"""
Create an image with two props - one readable/deletable by admin, but
also readable by spl_role. The other is readable/deletable by
spl_role. Verify spl_role is forbidden to purge_props in this scenario
without retaining the readable prop.
"""
custom_props = {
'x-image-meta-property-spl_read_only_prop': '1',
'x-image-meta-property-spl_delete_prop': '2'
}
image_id = self._create_admin_image(custom_props)
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:spl_role',
'X-Glance-Registry-Purge-Props': 'True'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEqual(output.status_int, 403)
|
#!/usr/bin/python
# version 4
# april 2012
# this was written by saikia81 and is copyrighted under the GNU general public license 3
# it was written in notepad++, a program I recommend!
# whitespace ftw!
#import random, system and operating system possibilities.
import os, sys
import random, time #time moduele
#pickling for data2file
import cPickle as pickle
#introducing the player
def instructions():
print 'welcome to the guess my number game V4'
print "I'll think of a number and you have to guess it\n"
#making a list of all possible numbers for every dificulty
def list_numbers():
list_easy = []
list_medium = []
list_hard = []
for n in range(1,101):
list_easy.append(n)
list_medium.append(n)
list_hard.append(n)
for n in range(101,201):
list_medium.append(n)
list_hard.append(n)
for n in range(-201,0):
n += 1
list_hard.append(n)
return list_easy, list_medium, list_hard
#does the player want to change the dificulty
def change_dificulty(dificulty):
if dificulty == None:
dificulty = choose_dificulty()
return dificulty
if raw_input("do you want to change dificulty? yes/no: ") == 'yes':
dificulty = choose_dificulty()
return dificulty
else:
return dificulty
#the dificulty the player wants to choose
def choose_dificulty():
print '\nwhat dificulty do you want to play in?'
dificulty = raw_input('choose between "easy", "medium" or "hard":\n')
dificulties = 'easy', 'medium', 'hard'
#if anybody tries to be smart: help them get it right
wrong = -1
if dificulty in dificulties: wrong = 0
elif dificulty not in dificulties:
wrong += 1
for n in (1,2,3):
if n == 3:
print "\nseems like you can't handle choosing a dificulty..."
dificulty = "easy"
time.sleep(2)
print ""
elif (dificulty not in dificulties):
print 'something went wrong!!! please try again\n'
dificulty = raw_input('choose between "easy", "medium" or "hard":\n')
wrong += 1
elif dificulty in dificulties:
print "\nalright so let's get started :D\n"
break
else:
print "you're doing something wrong! I'll chooce a dificulty for you\a\a\a\a\n"
dificulty = 'easy'
print "ERROR: 008"
time.sleep(2)
else:
print '\a\a\asomething went wrong the program will shutdown.'
print "ERROR: 009"
time.sleep(2.5)
sys.exit()
return dificulty
#so here a random number will be choosen depending of the dificulty
def random_number(dificulty, list_easy, list_medium, list_hard):
if dificulty == 'easy':
NUMBER = random.randrange(100) + 1
print "you have choosen the dificulty easy."
number_range = '1 and 100: '
numbers = list_easy
elif dificulty == 'medium':
NUMBER = random.randrange(200) + 1
print "you have choosen the dificulty medium."
number_range = '1 and 200: '
numbers = list_medium
elif dificulty =='hard':
NUMBER = random.randrange(-200,201)
print "you have choosen the dificulty hard."
number_range = '-200 and 200: '
numbers = list_hard
else:
print "dificulty malfunction"
print "ERROR: 003"
time.sleep(2.5)
exit()
return NUMBER, number_range, numbers
# if the guess != "the (predefined) number": loop.
def game(dificulty, NUMBER, number_range, numbers):
time.sleep(2.5)
os.system('cls')
guesses=0
guess='nothing'
while guess != NUMBER:
if guess == 'nothing':
print 'guess a number between', number_range
try:
guess = input()
except:
print "\nsomething went wrong\nyou're getting another try\n\n"
continue
guesses += 1
elif guess == 'cheater':
guess = NUMBER
elif guess not in numbers:
print "\nthe guess you made isn't in the range of valid numbers.\nAre you sure you want to make this guess?"
answ = raw_input("'yes'/'no' \n")
if answ == 'yes':
print "it's your funeral"
print '\nnguess a number between', number_range
guesses += 1
elif answ == 'no':
print "good choice"
print '\nguess a number between', number_range
try:
guess = input()
except:
print "something went wrong\nyou're getting another try\n"
continue
else:
print "that isn't a valid option"
print "let's continue\n"
#if the number is higher than the guess
elif guess < NUMBER:
print 'higher...'
print '\nguess a number between', number_range
try:
guess = input()
except:
print "something went wrong\nyou're getting another try\n"
continue
guesses += 1
continue
#if the number is 'lower...'
elif guess > NUMBER:
print 'lower...'
print '\nguess a number between', number_range
try:
guess = input()
except:
print "something went wrong\n you'll get another try"
continue
guesses -= 1
guesses += 1
#this is actually an error that will never occur... but better safe than sorry.
else:
print '\a\a\asorry, something went wrong. The game will now end itself.'
sys.exit()
print
print 'you did it the NUMBER was: ', NUMBER,
print 'it cost you ', guesses, 'guesses to get it right', 'on dificulty', dificulty
print
return guesses
##Here I will use the 'os' module to keep a highscore system
#in the default appdata of the users profile.
#everything here is to see if everything is alright in it's place.
def highscore(dificulty,guesses):
FOLDER_LOCALAPPDATA = os.environ['LOCALAPPDATA']
FOLDER_NUMBER_GAME = FOLDER_LOCALAPPDATA + '\\Number_game'
#deciding if a new highscore file and/or dir is needed
if os.access(FOLDER_NUMBER_GAME, 0) == False: #dir
try:
os.mkdir(FOLDER_NUMBER_GAME)
except:
os.system('cls')
print 'creating folder: ERROR\nError code: 002'
os.system('pause')
sys.exit()
try:
HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+"\\highscores.dat", "w+")
easy_highscores={}
medium_highscores={}
hard_highscores={}
all_highscores = [easy_highscores,medium_highscores,hard_highscores]
pickle.dump(all_highscores,HIGHSCORES_DAT)
HIGHSCORES_DAT.close()
HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+"\\highscores.dat", "r+")
unpickled_file = pickle.load(HIGHSCORES_DAT)
except:
os.system('cls')
print 'loading file: ERROR\nError code: 001'
os.system('pause')
sys.exit()
else:
HIGHSCORES_DAT.close()
#done with file and folder creation
#
#showing highscores
HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+"\\highscores.dat", "r")
try:
unpickled_file = pickle.load(HIGHSCORES_DAT)
except:
print "couldn't locate or unpickle file"
print "ERROR: 005"
print "\n if this was your first run of the game: this is common"
print "if not, please send a message at [email protected], thank you"
time.sleep(1)
print "everything went worse then expected. shutting down"
time.sleep(2.5)
sys.exit()
else:
HIGHSCORES_DAT.close()
if dificulty == "easy": l=0
if dificulty == "medium": l=1
if dificulty == "hard": l=2
highscores = unpickled_file[l]
#creating your highscore...
your_name = raw_input('what is your name?: ')
try:
if highscores[your_name]>guesses:
os.system('cls')
print "congratulations, new highscore!!"
if raw_input('do you want to replace your score yes/no: ') =="yes": highscores[your_name]=guesses
except:
print "new user"
highscores[your_name]=guesses
list_keys= highscores.keys()
list_values= highscores.values()
list_values.sort()
time.sleep(4)
os.system('cls')
#deeply annoying part
#highscore display
print" ---HIGHSCORE---"
print "highscores in", dificulty,"dificulty"
print"\nname attempts"
print"----------------------------------------"
i=0
#for values in sorted values list
for n in list_values:
#reset found to find next highscore
found = False
#set p to 0: to try different keys
p=0
#while the matching key and value not found keep looking
while found != True:
#m = the next key in list
m=list_keys[p]
if highscores[m] == n: found=True
p+=1
b=len(m)
b=21-b
print m,' '*b,highscores[m]
HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+"\\highscores.dat", "r")
unpickled_file = pickle.load(HIGHSCORES_DAT)
HIGHSCORES_DAT.close()
if l==0: unpickled_file[0]=highscores
if l==1: unpickled_file[1]=highscores
if l==2: unpickled_file[2]=highscores
HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+"\\highscores.dat", "w")
pickle.dump(unpickled_file,HIGHSCORES_DAT)
HIGHSCORES_DAT.close()
def end():
time.sleep(1)
print('''
The number Game V4
Copyright (C) 2012 Saikia81
''')
time.sleep(5)
os.system('cls')
print("""
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
""")
time.sleep(7)
try:
if pygame.mixer.get_busy()>0:
try:
pygame.mixer.music.fadeout(3000)
except:
print "ERROR: 012"
except:
pass
time.sleep(3)
os.system('pause')
sys.exit()
def main():
#initializing
ask_music = raw_input('music "on"?: ')
if (ask_music == 'on') or (ask_music == 'yes'):
try:
import pygame.mixer
pygame.mixer.init()
pygame.mixer.music.load("song.mp3")
pygame.mixer.music.play(-1)
except:
print "pygame not working!\nError: 013"
os.system('cls')
list_easy, list_medium, list_hard = list_numbers()
dificulty = None
instructions()
while 1:
dificulty=change_dificulty(dificulty)
NUMBER, number_range, numbers = random_number(dificulty, list_easy, list_medium, list_hard)
guesses = game(dificulty, NUMBER, number_range, numbers)
highscore(dificulty,guesses)
ask_again = raw_input('\ndo you want to play again? yes/no: ')
os.system('cls')
if ask_again == 'no': end()
#start
main()
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import logging
try:
import raven
except ImportError:
raven = None
from socorro.signature.signature_utilities import (
SignatureGenerationRule,
StackwalkerErrorSignatureRule,
OOMSignature,
AbortSignature,
SignatureShutdownTimeout,
SignatureRunWatchDog,
SignatureIPCChannelError,
SignatureIPCMessageName,
SigTrim,
SigTrunc,
SignatureJitCategory,
)
DEFAULT_PIPELINE = [
SignatureGenerationRule(),
StackwalkerErrorSignatureRule(),
OOMSignature(),
AbortSignature(),
SignatureShutdownTimeout(),
SignatureRunWatchDog(),
SignatureIPCChannelError(),
SignatureIPCMessageName(),
SigTrim(),
SigTrunc(),
SignatureJitCategory(),
]
logger = logging.getLogger(__name__)
class SignatureGenerator:
def __init__(self, pipeline=None, sentry_dsn=None, debug=False):
self.pipeline = pipeline or list(DEFAULT_PIPELINE)
self.sentry_dsn = sentry_dsn
self.debug = debug
def _send_to_sentry(self, rule, raw_crash, processed_crash):
"""Sends an unhandled error to Sentry
If self.sentry_dsn is non-None, it will try to send it to Sentry.
"""
if self.sentry_dsn is None:
logger.warning('Sentry DSN is not configured and an exception happened')
return
extra = {
'rule': rule.__class__.__name__,
}
if 'uuid' in raw_crash:
extra['crash_id'] = raw_crash['uuid']
try:
client = raven.Client(dsn=self.sentry_dsn)
client.context.activate()
client.context.merge({'extra': extra})
try:
identifier = client.captureException()
logger.info('Error captured in Sentry! Reference: {}'.format(identifier))
finally:
client.context.clear()
except Exception:
logger.error('Unable to report error with Raven', exc_info=True)
def generate(self, raw_crash, processed_crash):
"""Takes data and returns a signature
:arg dict raw_crash: the raw crash data
:arg dict processed_crash: the processed crash data
:returns: dict containing ``signature`` and ``notes`` keys representing the
signature and processor notes
"""
all_notes = []
for rule in self.pipeline:
notes = []
try:
if rule.predicate(raw_crash, processed_crash):
sig = processed_crash.get('signature', '')
rule.action(raw_crash, processed_crash, notes)
if self.debug:
notes.append('%s: %s -> %s' % (
rule.__class__.__name__, sig, processed_crash['signature']
))
except Exception as exc:
self._send_to_sentry(rule, raw_crash, processed_crash)
notes.append('Rule %s failed: %s' % (rule.__class__.__name__, exc))
if notes:
all_notes.extend(notes)
return {
'signature': processed_crash.get('signature', ''),
'notes': all_notes
}
|
#!/usr/bin/python3
"""
Given an unsorted array of integers, find the number of longest increasing
subsequence.
Example 1:
Input: [1,3,5,4,7]
Output: 2
Explanation: The two longest increasing subsequence are [1, 3, 4, 7] and
[1, 3, 5, 7].
Example 2:
Input: [2,2,2,2,2]
Output: 5
Explanation: The length of longest continuous increasing subsequence is 1, and
there are 5 subsequences' length is 1, so output 5.
Note: Length of the given array will be not exceed 2000 and the answer is
guaranteed to be fit in 32-bit signed int.
"""
from typing import List
class LenCnt:
def __init__(self, l, c):
self.l = l
self.c = c
def __repr__(self):
return repr((self.l, self.c))
class Solution:
def findNumberOfLIS(self, A: List[int]) -> int:
"""
Two pass - 1st pass find the LIS, 2nd pass find the number
Let F[i] be the length of LIS ended at A[i]
"""
if not A:
return 0
n = len(A)
F = [LenCnt(l=1, c=1) for _ in A]
mx = LenCnt(l=1, c=1)
for i in range(1, n):
for j in range(i):
if A[i] > A[j]:
if F[i].l < F[j].l + 1:
F[i].l = F[j].l + 1
F[i].c = F[j].c
elif F[i].l == F[j].l + 1:
F[i].c += F[j].c
if F[i].l > mx.l:
# mx = F[i] error, need deep copy
mx.l = F[i].l
mx.c = F[i].c
elif F[i].l == mx.l:
mx.c += F[i].c
return mx.c
if __name__ == "__main__":
assert Solution().findNumberOfLIS([1,1,1,2,2,2,3,3,3]) == 27
assert Solution().findNumberOfLIS([1, 3, 5, 4, 7]) == 2
assert Solution().findNumberOfLIS([2, 2, 2, 2, 2]) == 5
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2008-2009 Adriano Monteiro Marques.
#
# Author: Bartosz SKOWRON <getxsick at gmail dot com>
#
# This library is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import glob
import os
import os.path
from stat import ST_MODE
from distutils.core import setup
from distutils.command.install import install
UMPA_VERSION = '0.2'
SHARE_DIR = os.path.join('share', 'umpa')
DOCS_DIR = os.path.join('share', 'doc', 'umpa')
TESTS_DIR = [
os.path.join('tests'),
os.path.join('tests', 'system'),
os.path.join('tests', 'system', 'test_snd'),
os.path.join('tests', 'system', 'test_sndrcv'),
os.path.join('tests', 'a_unit'),
os.path.join('tests', 'a_unit', 'test_extensions'),
os.path.join('tests', 'a_unit', 'test_protocols'),
os.path.join('tests', 'a_unit', 'test_utils'),
os.path.join('tests', 'a_unit', 'test_sniffing'),
os.path.join('tests', 'a_unit', 'test_sniffing', 'test_libpcap'),
]
class umpa_install(install):
def run(self):
install.run(self)
self.create_uninstaller()
def create_uninstaller(self):
uninstaller_filename = os.path.join(
self.install_data, SHARE_DIR, 'uninstall_umpa')
uninstaller = []
uninstaller.append(
"#!/usr/bin/env python\n"
"import os, sys, shutil\n"
"\n"
"print\n"
"print '%(line)s Uninstall UMPA %(version)s %(line)s'\n"
"print\n"
"\n"
"answer = raw_input('Are you sure that you want to '\n"
" 'completly uninstall UMPA %(version)s? (yes/no) ')\n"
"\n"
"if answer.lower() not in ['yes', 'y']:\n"
" sys.exit(0)\n"
"\n"
"print\n"
"print '%(line)s Uninstalling UMPA %(version)s... %(line)s'\n"
"print\n" % {'version': UMPA_VERSION, 'line': '-' * 10})
for output in self.get_outputs():
uninstaller.append(
'print "Removing %(output)s..."\n'
'if os.path.exists("%(output)s"):\n'
' os.remove("%(output)s")\n' % {'output': output})
uninstaller.append(
"print 'Removing uninstaller itself...'\n"
"os.remove('%s')\n" % uninstaller_filename)
uninstaller.append('print "Removing empty directories..."\n')
for dir in (
os.path.join(self.install_data, SHARE_DIR),
os.path.join(self.install_data, DOCS_DIR),
os.path.join(self.install_lib, 'umpa'),
):
uninstaller.append(
'if os.path.exists("%(dir)s"):\n'
' shutil.rmtree("%(dir)s")\n' % {'dir' : dir})
uninstaller_file = open(uninstaller_filename, 'w')
uninstaller_file.writelines(uninstaller)
uninstaller_file.close()
# Set exec bit for uninstaller
mode = ((os.stat(uninstaller_filename)[ST_MODE]) | 0555) & 07777
os.chmod(uninstaller_filename, mode)
cmdclasses = {
'install' : umpa_install,
}
test_files = []
for dir in TESTS_DIR:
test_files = test_files + [ (os.path.join(SHARE_DIR, dir),
glob.glob(os.path.join(dir,'*.py')))]
data_files = [ (os.path.join(SHARE_DIR,'examples'),
glob.glob(os.path.join('examples','*'))),
(os.path.join(DOCS_DIR,'API'),
glob.glob(os.path.join('docs','API','*'))),
(os.path.join(DOCS_DIR,'tutorials','_sources'),
glob.glob(os.path.join('docs','tutorials','_sources','*'))),
(os.path.join(DOCS_DIR,'tutorials','_static'),
glob.glob(os.path.join('docs','tutorials','_static','*'))),
(os.path.join(DOCS_DIR,'tutorials'),
glob.glob(os.path.join('docs','tutorials','*.*'))),
(SHARE_DIR, ('run_tests.sh', 'run_tests.bat')),
(DOCS_DIR,
('README', 'COPYING', 'AUTHORS', 'TODO', 'CHANGES',
'INSTALL')),
(os.path.join(SHARE_DIR, 'tests'),
(os.path.join('tests','README'),
os.path.join('tests','IMPORTANT'))),
] + test_files
setup( name = "UMPA",
version = UMPA_VERSION,
description = "Umit's Manipulations of Packets Art",
author = "Bartosz SKOWRON",
author_email = "[email protected]",
url = "http://www.umpa.umitproject.org",
license = "GNU LGPLv2",
platforms = ["Platform Independent"],
packages = [ "umit",
"umit.umpa",
"umit.umpa.protocols",
"umit.umpa.sniffing",
"umit.umpa.sniffing.libpcap",
"umit.umpa.extensions",
"umit.umpa.utils",
],
data_files = data_files,
cmdclass = cmdclasses,
)
|
#! /usr/bin/env python3
#
# In this script we solve the linear elasticity problem on a unit square
# domain, clamped at the left boundary, and stretched at the right boundary
# while keeping vertical displacements free.
from nutils import mesh, function, solver, export, cli, testing
def main(nelems:int, etype:str, btype:str, degree:int, poisson:float):
'''
Horizontally loaded linear elastic plate.
.. arguments::
nelems [10]
Number of elements along edge.
etype [square]
Type of elements (square/triangle/mixed).
btype [std]
Type of basis function (std/spline), with availability depending on the
configured element type.
degree [1]
Polynomial degree.
poisson [.25]
Poisson's ratio, nonnegative and strictly smaller than 1/2.
'''
domain, geom = mesh.unitsquare(nelems, etype)
ns = function.Namespace()
ns.x = geom
ns.basis = domain.basis(btype, degree=degree).vector(2)
ns.u_i = 'basis_ni ?lhs_n'
ns.X_i = 'x_i + u_i'
ns.lmbda = 2 * poisson
ns.mu = 1 - 2 * poisson
ns.strain_ij = '(d(u_i, x_j) + d(u_j, x_i)) / 2'
ns.stress_ij = 'lmbda strain_kk δ_ij + 2 mu strain_ij'
sqr = domain.boundary['left'].integral('u_k u_k J(x)' @ ns, degree=degree*2)
sqr += domain.boundary['right'].integral('(u_0 - .5)^2 J(x)' @ ns, degree=degree*2)
cons = solver.optimize('lhs', sqr, droptol=1e-15)
res = domain.integral('d(basis_ni, x_j) stress_ij J(x)' @ ns, degree=degree*2)
lhs = solver.solve_linear('lhs', res, constrain=cons)
bezier = domain.sample('bezier', 5)
X, sxy = bezier.eval(['X', 'stress_01'] @ ns, lhs=lhs)
export.triplot('shear.png', X, sxy, tri=bezier.tri, hull=bezier.hull)
return cons, lhs
# If the script is executed (as opposed to imported), :func:`nutils.cli.run`
# calls the main function with arguments provided from the command line. For
# example, to keep with the default arguments simply run :sh:`python3
# elasticity.py`. To select mixed elements and quadratic basis functions add
# :sh:`python3 elasticity.py etype=mixed degree=2`.
if __name__ == '__main__':
cli.run(main)
# Once a simulation is developed and tested, it is good practice to save a few
# strategic return values for regression testing. The :mod:`nutils.testing`
# module, which builds on the standard :mod:`unittest` framework, facilitates
# this by providing :func:`nutils.testing.TestCase.assertAlmostEqual64` for the
# embedding of desired results as compressed base64 data.
class test(testing.TestCase):
@testing.requires('matplotlib')
def test_default(self):
cons, lhs = main(nelems=4, etype='square', btype='std', degree=1, poisson=.25)
with self.subTest('constraints'): self.assertAlmostEqual64(cons, '''
eNpjYMACGsiHP0wxMQBKlBdi''')
with self.subTest('left-hand side'): self.assertAlmostEqual64(lhs, '''
eNpjYMAEKcaiRmLGQQZCxgwMYsbrzqcYvz672KTMaIKJimG7CQPDBJM75xabdJ3NMO0xSjG1MUw0Beox
PXIuw7Tk7A/TXqMfQLEfQLEfQLEfpsVnAUzzHtI=''')
@testing.requires('matplotlib')
def test_mixed(self):
cons, lhs = main(nelems=4, etype='mixed', btype='std', degree=1, poisson=.25)
with self.subTest('constraints'): self.assertAlmostEqual64(cons, '''
eNpjYICCBiiEsdFpIuEPU0wMAG6UF2I=''')
with self.subTest('left-hand side'): self.assertAlmostEqual64(lhs, '''
eNpjYICAJGMOI3ljcQMwx3i/JohSMr51HkQnGP8422eiYrjcJM+o3aToWq/Jy3PLTKafzTDtM0oxtTRM
MF2okmJ67lyGacnZH6aOhj9Mu41+mMZq/DA9dO6HaflZAAMdIls=''')
@testing.requires('matplotlib')
def test_quadratic(self):
cons, lhs = main(nelems=4, etype='square', btype='std', degree=2, poisson=.25)
with self.subTest('constraints'): self.assertAlmostEqual64(cons, '''
eNpjYCACNIxc+MOUMAYA/+NOFg==''')
with self.subTest('left-hand side'): self.assertAlmostEqual64(lhs, '''
eNqFzL9KA0EQx/HlLI5wprBJCol/rtfN7MxobZEXOQIJQdBCwfgAItwVStQmZSAvcOmtVW6z5wP4D2yE
aKOwEhTnDRz4VvPhp9T/1zeP0ILF5hhSnUK5cQlKpaDvx3DoWvA57Zt128PIMO5CjHvNOn5s1lCpOi6V
MZ5PGS/k/1U0qGcqVMIcQ5jhmX4XM8N9N8dvWyFtG3RVjOjADOkNBrQMGV3rlJTKaMcN6NUOqWZHlBVV
PjER/0DIDAE/6ICVCjh2Id/ZiBdslY+LrpiOmLaYhJ90IibhNdcW0xHTFTPhUzPhX8h5W3rRuZicV1zO
N3bCgXRUeDFedjxvSc/ai/G86jzfWi87Xswfg5Nx3Q==''')
@testing.requires('matplotlib')
def test_poisson(self):
cons, lhs = main(nelems=4, etype='square', btype='std', degree=1, poisson=.4)
with self.subTest('constraints'): self.assertAlmostEqual64(cons, '''
eNpjYMACGsiHP0wxMQBKlBdi''')
with self.subTest('left-hand side'): self.assertAlmostEqual64(lhs, '''
eNpjYMAEFsaTjdcYvTFcasTAsMZI5JyFce6ZKSavjbNMFhhFmDAwZJkknJ1iInom0ZTJJNx0q1GgKQND
uKn32UTTf6d/mLKY/DDdZvQDKPbD1OvsD9M/pwGZyh9l''')
|
import json
import random
import datetime
from codalib import APP_AUTHOR
from codalib.bagatom import wrapAtom, makeObjectFeed
from dateutil import parser
from django.conf import settings
from django.contrib.sites.models import Site
from django.contrib.syndication.views import Feed
from django.core.paginator import Paginator
from django.http import HttpResponse, HttpResponseNotFound
from django.shortcuts import get_object_or_404, render
from django.utils.feedgenerator import Atom1Feed
from lxml import etree
from django.views.generic import ListView
from .models import Validate
XML_HEADER = b"<?xml version=\"1.0\"?>\n%s"
class CorrectMimeTypeFeed(Atom1Feed):
mime_type = 'application/xml'
class AtomNextNewsFeed(Feed):
"""
next view.
an atom pub representation of the next validation to occur.
should be a single item.
"""
feed_type = Atom1Feed
link = "/validate/next/"
title = "UNT Coda Validate App"
subtitle = "The highest priority validation item"
reason = 'None'
author_name = APP_AUTHOR.get('name', None)
author_link = APP_AUTHOR.get('uri', None)
feed_type = CorrectMimeTypeFeed
def get_object(self, request, server):
if server:
return server
else:
return None
def items(self, obj):
# need to filter by server first, if provided
reason = ''
if obj:
validations = Validate.objects.all().filter(server=obj)
reason = 'This selection was filtered to only consider \
server %s. ' % obj
else:
validations = Validate.objects.all()
# next check if we have any with a priority above 0
v = validations.filter(
priority__gt=0).order_by('priority_change_date')
if v.exists():
reason += 'Item was chosen because it is the \
oldest prioritized.'
# if set is empty, go with any priority with last_verified older than
# settings.VALIDATION_PERIOD
else:
# It might seem natural to use django's built-in random ordering,
# but that technique becomes slow when using large sets
# because 'order by ?' is very expensive against MySQL dbs.
# v = Validate.objects.all().filter(
# last_verified__gte=datetime.datetime.now() -
# settings.VALIDATION_PERIOD
# ).order_by('?')
# instead, let's do this:
# http://elpenia.wordpress.com/2010/05/11/getting-random-objects-from-a-queryset-in-django/
now = datetime.datetime.now()
v = validations.filter(
last_verified__lte=now - settings.VALIDATION_PERIOD
)
if v.exists():
random_slice = int(random.random() * v.count())
v = v[random_slice:]
reason += 'Item was randomly selected and within the \
past year because there is no prioritized record.'
# if that set has no objects, pick the oldest verified item.
else:
v = validations.order_by('last_verified')
reason += 'Item was chosen because there \
is no prioritized record and it had not been validated in the longest \
duration of time.'
self.reason = reason
return v[:1]
def item_title(self, item):
return item.identifier
def item_description(self, item):
return self.reason
def item_link(self, item):
return '/APP/validate/%s/' % item.identifier
# for some reason, I couldn't get AtomNextFeed to work without a server
# I don't think optional arguments are supported for class-based syndication
# feeds, so I have this work around to make it work.
class AtomNextFeedNoServer(AtomNextNewsFeed):
def get_object(self, request):
pass
def index(request):
context = {
'recently_prioritized': Validate.objects.filter(
priority__gt=0).order_by('-priority_change_date')[:20],
'recently_verified': Validate.objects.all().order_by('-last_verified')[:20],
'verified_counts': Validate.objects.last_verified_status_counts()
}
return render(request, 'coda_validate/index.html', context)
def last_day_of_month(year, month):
""" Work out the last day of the month """
last_days = [31, 30, 29, 28, 27]
for i in last_days:
try:
end = datetime.datetime(year, month, i)
except ValueError:
continue
else:
return end.day
return None
def stats(request):
"""
stats page
"""
if not Validate.objects.exists():
return render(
request,
'coda_validate/stats.html',
{
'sums_by_date': {},
'validations': None,
'this_month': None,
'last_24h': None,
'last_vp': None,
'unverified': 0,
'passed': 0,
'failed': 0,
'validation_period': '%s days' % str(
settings.VALIDATION_PERIOD.days
),
}
)
# resolve the range for last month filter
today = datetime.date.today()
first = datetime.date(day=1, month=today.month, year=today.year)
last_day = last_day_of_month(first.year, first.month)
this_month_range = [
'%s-%s-01 00:00:00' % (first.year, first.month),
'%s-%s-%s 23:59:59' % (first.year, first.month, last_day),
]
# resolve the range for last 24 hours filter
now = datetime.datetime.now()
twenty_four_hours_ago = now - datetime.timedelta(hours=24)
since_validation_period = now - datetime.timedelta(
days=settings.VALIDATION_PERIOD.days)
# make a set of data that makes sense for the heatmap
result_counts = Validate.objects.last_verified_status_counts()
total = sum(result_counts.values())
sums_by_date = Validate.sums_by_date()
sums_by_date_g = {}
years = set()
for dt, ct in sums_by_date.items():
y, m, d = dt
dt = (y, m - 1, d)
sums_by_date_g[dt] = ct
years.add(y)
sums_by_date = sums_by_date_g
num_years = len(years)
return render(
request,
'coda_validate/stats.html',
{
'sums_by_date': dict((('%d, %d, %d' % s, c)
for s, c in sums_by_date.items())),
'num_years': num_years,
'validations': total,
'this_month': Validate.objects.filter(
last_verified__range=this_month_range).count(),
'last_24h': Validate.objects.filter(
last_verified__range=[twenty_four_hours_ago, now]).count(),
'last_vp': Validate.objects.filter(
last_verified__range=[since_validation_period, now]).count(),
'unverified': result_counts.get('Unverified'),
'passed': result_counts.get('Passed'),
'failed': result_counts.get('Failed'),
'validation_period': '%s days' % str(settings.VALIDATION_PERIOD.days),
}
)
def prioritize(request):
"""
prioritize view
"""
identifier = request.GET.get('identifier')
prioritized = False
if identifier:
v = get_object_or_404(Validate, identifier=identifier)
v.priority = 1
v.priority_change_date = datetime.datetime.now()
v.save()
prioritized = True
return render(
request,
'coda_validate/prioritize.html',
{
'identifier': identifier,
'prioritized': prioritized,
}
)
def validate(request, identifier):
"""
prioritize view
"""
# this view always gets an identifier, if it's wrong, 404
v = get_object_or_404(Validate, identifier=identifier)
# clicked priority button on validate detail page
p = request.GET.get('priority')
if p == '1':
v.priority = 1
v.priority_change_date = datetime.datetime.now()
v.save()
return render(
request,
'coda_validate/validate.html',
{
'validate': v,
}
)
def prioritize_json(request):
"""
prioritize json view
"""
DOMAIN = Site.objects.get_current().domain
identifier = request.GET.get('identifier')
json_dict = {}
json_dict['status'] = 'failure'
status = 404
if identifier:
json_dict['requested_identifier'] = identifier
try:
v = Validate.objects.get(identifier=identifier)
except Exception:
v = None
if v:
v.priority = 1
v.priority_change_date = datetime.datetime.now()
v.save()
json_dict['status'] = 'success'
json_dict['priority'] = v.priority
json_dict['priority_change_date'] = str(v.priority_change_date)
json_dict['atom_pub_url'] = '%s/APP/validate/%s' % \
(DOMAIN, v.identifier)
status = 200
else:
json_dict['response'] = 'identifier was not found'
json_dict['requested_identifier'] = identifier
else:
json_dict['response'] = 'missing identifier parameter'
json_dict['requested_identifier'] = ''
status = 400
response = HttpResponse(content_type='application/json', status=status)
json.dump(
json_dict,
fp=response,
indent=4,
sort_keys=True,
)
return response
def validateToXML(validateObject):
"""
This is the reverse of xmlToValidateObject.
Given a "Validate" object, it generates an
XML object representative of such.
"""
# define namespace
validate_namespace = "http://digital2.library.unt.edu/coda/validatexml/"
val = "{%s}" % validate_namespace
validate_nsmap = {"validate": validate_namespace}
# build xml from object and return
XML = etree.Element("{0}validate".format(val), nsmap=validate_nsmap)
label = etree.SubElement(XML, "{0}identifier".format(val))
label.text = validateObject.identifier
last_verified = etree.SubElement(XML, "{0}last_verified".format(val))
last_verified.text = validateObject.last_verified.isoformat()
last_verified_status = etree.SubElement(XML, "{0}last_verified_status".format(val))
last_verified_status.text = validateObject.last_verified_status
priority_change_date = etree.SubElement(XML, "{0}priority_change_date".format(val))
priority_change_date.text = validateObject.priority_change_date.isoformat()
priority = etree.SubElement(XML, "{0}priority".format(val))
priority.text = str(validateObject.priority)
server = etree.SubElement(XML, "{0}server".format(val))
server.text = validateObject.server
return XML
def xmlToValidateObject(validateXML):
"""
Parse the XML in a POST request and create the validate object
"""
entryRoot = etree.XML(validateXML)
if entryRoot is None:
raise ValueError("Unable to parse uploaded XML")
# parse XML
contentElement = entryRoot.xpath("*[local-name() = 'content']")[0]
validateXML = contentElement.xpath("*[local-name() = 'validate']")[0]
identifier = validateXML.xpath(
"*[local-name() = 'identifier']")[0].text.strip()
last_verified = validateXML.xpath(
"*[local-name() = 'last_verified']")[0].text.strip()
last_verified = parser.parse(last_verified)
last_verified_status = validateXML.xpath(
"*[local-name() = 'last_verified_status']")[0].text.strip()
priority_change_date = validateXML.xpath(
"*[local-name() = 'priority_change_date']")[0].text.strip()
priority_change_date = parser.parse(priority_change_date)
priority = validateXML.xpath(
"*[local-name() = 'priority']")[0].text.strip()
server = validateXML.xpath("*[local-name() = 'server']")[0].text.strip()
# make the object and return
validate = Validate(
identifier=identifier,
last_verified=last_verified,
last_verified_status=last_verified_status,
priority_change_date=priority_change_date,
priority=priority,
server=server,
)
return validate
def xmlToUpdateValidateObject(validateXML):
"""
Parse the XML in a PUT request and adjust the validate based on that
*ONLY MODIFIES 'last_verified_status'*
"""
entryRoot = etree.XML(validateXML)
if entryRoot is None:
raise ValueError("Unable to parse uploaded XML")
# parse XML
contentElement = entryRoot.xpath("*[local-name() = 'content']")[0]
validateXML = contentElement.xpath("*[local-name() = 'validate']")[0]
identifier = validateXML.xpath(
"*[local-name() = 'identifier']")[0].text.strip()
last_verified_status = validateXML.xpath(
"*[local-name() = 'last_verified_status']")[0].text.strip()
# get the object (or 404) and return to the APP view to finish up.
validate = get_object_or_404(Validate, identifier=identifier)
validate.last_verified_status = last_verified_status
validate.last_verified = datetime.datetime.now()
validate.priority = 0
validate.save()
return validate
def app_validate(request, identifier=None):
"""
This method handles the ATOMpub protocol for validate objects
"""
# are we POSTing a new identifier here?
if request.method == 'POST' and not identifier:
# to object
validateObject = xmlToValidateObject(request.body)
validateObject.save()
# and back to xml
validateObjectXML = validateToXML(validateObject)
atomXML = wrapAtom(
xml=validateObjectXML,
id='http://%s/APP/validate/%s/' % (
request.META['HTTP_HOST'], validateObject.identifier
),
title=validateObject.identifier,
)
atomText = XML_HEADER % etree.tostring(atomXML, pretty_print=True)
resp = HttpResponse(atomText, content_type="application/atom+xml")
resp.status_code = 201
resp['Location'] = 'http://%s/APP/validate/%s/' % \
(request.META['HTTP_HOST'], validateObject.identifier)
elif request.method == 'HEAD':
resp = HttpResponse(content_type="application/atom+xml")
resp.status_code = 200
# if not, return a feed
elif request.method == 'GET' and not identifier:
# negotiate the details of our feed here
validates = Validate.objects.all()
page = int(request.GET['page']) if request.GET.get('page') else 1
atomFeed = makeObjectFeed(
paginator=Paginator(validates, 20),
objectToXMLFunction=validateToXML,
feedId=request.path[1:],
webRoot='http://%s' % request.META.get('HTTP_HOST'),
title="validate Entry Feed",
idAttr="identifier",
nameAttr="identifier",
dateAttr="added",
request=request,
page=page,
author={
"name": APP_AUTHOR.get('name', None),
"uri": APP_AUTHOR.get('uri', None)
},
)
atomFeedText = XML_HEADER % etree.tostring(atomFeed, pretty_print=True)
resp = HttpResponse(atomFeedText, content_type="application/atom+xml")
resp.status_code = 200
# updating an existing record
elif request.method == 'PUT' and identifier:
returnValidate = xmlToUpdateValidateObject(request.body)
validateObjectXML = validateToXML(returnValidate)
atomXML = wrapAtom(
xml=validateObjectXML,
id='http://%s/APP/validate/%s/' % (
request.META['HTTP_HOST'], identifier
),
title=identifier,
)
atomText = XML_HEADER % etree.tostring(atomXML, pretty_print=True)
resp = HttpResponse(atomText, content_type="application/atom+xml")
resp.status_code = 200
elif request.method == 'GET' and identifier:
# attempt to retrieve record -- error if unable
try:
validate_object = Validate.objects.get(identifier=identifier)
except Validate.DoesNotExist:
return HttpResponseNotFound(
"There is no validate for identifier %s.\n" % identifier
)
returnValidate = validate_object
validateObjectXML = validateToXML(returnValidate)
atomXML = wrapAtom(
xml=validateObjectXML,
id='http://%s/APP/validate/%s/' % (
request.META['HTTP_HOST'], identifier
),
title=identifier,
author=APP_AUTHOR.get('name', None),
author_uri=APP_AUTHOR.get('uri', None)
)
atomText = XML_HEADER % etree.tostring(atomXML, pretty_print=True)
resp = HttpResponse(atomText, content_type="application/atom+xml")
resp.status_code = 200
elif request.method == 'DELETE' and identifier:
# attempt to retrieve record -- error if unable
try:
validate_object = Validate.objects.get(identifier=identifier)
except:
return HttpResponseNotFound(
"Unable to Delete. There is no identifier %s.\n" % identifier)
# grab the validate, delete it, and inform the user.
returnValidate = validate_object
validateObjectXML = validateToXML(returnValidate)
validate_object.delete()
atomXML = wrapAtom(
xml=validateObjectXML,
id='http://%s/APP/validate/%s/' % (
request.META['HTTP_HOST'], identifier
),
title=identifier,
)
atomText = XML_HEADER % etree.tostring(atomXML, pretty_print=True)
resp = HttpResponse(atomText, content_type="application/atom+xml")
resp.status_code = 200
return resp
def check_json(request):
counts = Validate.objects.last_verified_status_counts()
return HttpResponse(json.dumps(counts), content_type='application/json')
class ValidateListView(ListView):
model = Validate
template_name = 'coda_validate/list.html'
context_object_name = 'validation_list'
paginate_by = 20
def get_queryset(self):
queryset = super(ValidateListView, self).get_queryset()
status = self.request.GET.get('status')
if status:
queryset = queryset.filter(last_verified_status=status)
return queryset
|
from morse.builder import *
# Land robot
morsy = Morsy()
morsy.translate(2.25, -0.75)
pose = Pose()
pose.translate(x=-0.0, z=0.0)
morsy.append(pose)
gripper = Gripper()
gripper.translate(x=0.2000, z=0.5000)
gripper.properties(Angle=360.0, Distance=0.5);
morsy.append(gripper)
camera = VideoCamera()
camera.translate(x=0.2000, z=0.9000)
camera.properties(cam_width=128, cam_height=128);
morsy.append(camera)
waypoint = Waypoint()
waypoint.translate(x=0.0, y=0.0, z=0.0)
waypoint.properties(AngleTolerance=0.1, Speed=50.0)
morsy.append(waypoint)
motion = MotionVW()
morsy.append(motion)
motion.add_service('socket')
gripper.add_service('socket')
pose.add_service('socket')
camera.add_interface('socket')
waypoint.add_interface('socket')
duke = Morsy()
duke.translate(-18.25, -0.25)
pose = Pose()
pose.translate(x=-0.0, z=0.0)
duke.append(pose)
gripper = Gripper()
gripper.translate(x=0.2000, z=0.5000)
gripper.properties(Angle=360.0, Distance=0.5);
duke.append(gripper)
camera = VideoCamera()
camera.translate(x=0.2000, z=0.9000)
camera.properties(cam_width=128, cam_height=128);
duke.append(camera)
waypoint = Waypoint()
waypoint.translate(x=0.0, y=0.0, z=0.0)
waypoint.properties(AngleTolerance=0.1, Speed=50.0)
duke.append(waypoint)
motion = MotionVW()
duke.append(motion)
motion.add_service('socket')
gripper.add_service('socket')
pose.add_service('socket')
camera.add_interface('socket')
waypoint.add_interface('socket')
lara = Morsy()
lara.translate(-8.75, -9.25)
pose = Pose()
pose.translate(x=-0.0, z=0.0)
lara.append(pose)
gripper = Gripper()
gripper.translate(x=0.2000, z=0.5000)
gripper.properties(Angle=360.0, Distance=0.5);
lara.append(gripper)
camera = VideoCamera()
camera.translate(x=0.2000, z=0.9000)
camera.properties(cam_width=128, cam_height=128);
lara.append(camera)
waypoint = Waypoint()
waypoint.translate(x=0.0, y=0.0, z=0.0)
waypoint.properties(AngleTolerance=0.1, Speed=50.0)
lara.append(waypoint)
motion = MotionVW()
lara.append(motion)
motion.add_service('socket')
gripper.add_service('socket')
pose.add_service('socket')
camera.add_interface('socket')
waypoint.add_interface('socket')
env = Environment('/home/orocos/Desktop/cnambot/code/environment_blender/indoor-5')
env.set_camera_rotation([1.0470, 0, 0.7854])
|
#!/usr/bin/python
import numpy as np
import argparse
parser = argparse.ArgumentParser(add_help=True)
parser.register('type', 'bool',
lambda v: v.lower() in ("yes", "true", "t", "1"))
parser.add_argument("-db", type="bool", action="store", default=False)
parser.add_argument("-qoi_dim", type=int, action="store",
default=1, help="MIMC dim")
parser.add_argument("-qoi_df_nu", type=float, action="store",
default=3.5, help="MIMC dim")
args, unknowns = parser.parse_known_args()
if args.qoi_dim:
base = "\
mimc_run.py -mimc_TOL {TOL} -qoi_seed 0 \
-qoi_problem 0 -qoi_sigma 0.2 \
-mimc_min_dim {qoi_dim} -qoi_dim {qoi_dim} -qoi_df_nu {qoi_df_nu} \
-qoi_x0 0.3 0.4 0.6 -ksp_rtol 1e-25 -ksp_type gmres \
-qoi_a0 0 -qoi_f0 1 \
-qoi_scale 10 -qoi_df_sig 0.5 -mimc_M0 1 \
-mimc_beta {beta} -mimc_gamma {gamma} -mimc_h0inv 3 \
-mimc_bayes_fit_lvls 3 -mimc_moments 1 -mimc_bayesian False \
".format(TOL="{TOL}", qoi_df_nu=args.qoi_df_nu, qoi_dim=args.qoi_dim,
beta=" ".join([str("2")]*args.qoi_dim),
gamma=" ".join([str("1")]*args.qoi_dim))
else:
assert False
base += " ".join(unknowns)
if not args.db:
cmd_single = "python " + base + " -mimc_verbose 10 -db False "
print(cmd_single.format(TOL=0.001))
else:
cmd_multi = "python " + base + " -mimc_verbose 0 -db True -db_tag {tag} "
print cmd_multi.format(tag="misc_matern_d{:d}_nu{:.2g}".format(args.qoi_dim, args.qoi_df_nu), TOL=1e-10)
|
#!/usr/bin/env python2
# coding=utf-8
"""
Config Handler
"""
__author__ = "Manuel Ebert"
__copyright__ = "Copyright 2015, summer.ai"
__date__ = "2015-11-09"
__email__ = "[email protected]"
import boto3
import os
from util import AttrDict
path = os.path.dirname(os.path.abspath(__file__))
def load_yaml(filename):
"""
This is a shitty YAML parser. If we were grown ups, we'd use PyYaml of course.
But since PyYaml refuses to run on AWS Lambda, we'll do this instead.
Args:
filename - filename to load
Returns:
dict
"""
def parse_value(value):
if "#" in value:
value = value[:value.index("#")]
value = value.strip(" \n")
if not value:
return None
if value.lower() == "true":
return True
if value.lower() == "false":
return False
try:
return int(value)
except:
try:
return float(value)
except:
return value
result = {}
current_key = None
with open(filename) as f:
for line in f.readlines():
if ":" in line:
key, value = line.split(":", 1)
key = key.strip()
current_key = key
result[key] = parse_value(value)
elif line.strip().startswith("-"):
value = line.strip(" -\n")
if not isinstance(result[current_key], list):
result[current_key] = [parse_value(value)]
else:
result[current_key].append(parse_value(value))
return result
def abs_path(filename):
return os.path.join(path, "config", "{}.yaml".format(filename))
def load_config(config):
keys = load_yaml(abs_path("default"))
keys['credentials'] = {}
if os.path.exists(abs_path("credentials")):
keys['credentials'] = load_yaml(abs_path("credentials"))
if config != 'default':
keys.update(load_yaml(abs_path(config)))
if "aws_access_key" in keys['credentials']:
keys['s3'] = boto3.resource(
's3', region_name=keys['region'],
aws_access_key_id=keys['credentials']['aws_access_key'],
aws_secret_access_key=keys['credentials']['aws_access_secret']
)
keys['s3_client'] = boto3.client(
's3', region_name=keys['region'],
aws_access_key_id=keys['credentials']['aws_access_key'],
aws_secret_access_key=keys['credentials']['aws_access_secret']
)
else:
keys['s3'] = boto3.resource('s3', region_name=keys['region'])
keys['s3_client'] = boto3.client('s3', region_name=keys['region'])
return AttrDict(keys)
config = load_config(os.environ.get('WORDNIK_CONFIG', 'default'))
def update_config(config_name):
global config
config.__data.update(load_yaml(abs_path(config_name)))
|
#!/usr/bin/env python3
# vim:fileencoding=utf-8
#
# (C) Copyright 2012 lilydjwg <[email protected]>
#
# This file is part of xmpptalk.
#
# xmpptalk is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# xmpptalk is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with xmpptalk. If not, see <http://www.gnu.org/licenses/>.
#
import sys
import os
import logging
import datetime
import base64
import hashlib
from collections import defaultdict
from functools import partial
from xml.etree import ElementTree as ET
import pyxmpp2.exceptions
from pyxmpp2.jid import JID
from pyxmpp2.message import Message
from pyxmpp2.presence import Presence
from pyxmpp2.client import Client
from pyxmpp2.settings import XMPPSettings
from pyxmpp2.roster import RosterReceivedEvent
from pyxmpp2.interfaces import EventHandler, event_handler, QUIT, NO_CHANGE
from pyxmpp2.streamevents import AuthorizedEvent, DisconnectedEvent
from pyxmpp2.interfaces import XMPPFeatureHandler
from pyxmpp2.interfaces import presence_stanza_handler, message_stanza_handler
from pyxmpp2.ext.version import VersionProvider
from pyxmpp2.expdict import ExpiringDictionary
from pyxmpp2.iq import Iq
try:
from xmpp_receipt import ReceiptSender
except ImportError:
ReceiptSender = None
from misc import *
import config
import models
from models import ValidationError
from messages import MessageMixin
from user import UserMixin
if getattr(config, 'conn_lost_interval_minutes', False):
conn_lost_interval = datetime.timedelta(minutes=config.conn_lost_interval_minutes)
else:
conn_lost_interval = None
class ChatBot(MessageMixin, UserMixin, EventHandler, XMPPFeatureHandler):
got_roster = False
message_queue = None
receipt_sender = None
ignore = set()
def __init__(self, jid, settings, botsettings=None):
if 'software_name' not in settings:
settings['software_name'] = self.__class__.__name__
if 'software_version' not in settings:
settings['software_version'] = __version__
version_provider = VersionProvider(settings)
handlers = []
if ReceiptSender:
self.receipt_sender = rs = ReceiptSender()
handlers.append(rs)
handlers.extend([self, version_provider])
self.client = Client(jid, handlers, settings)
self.presence = defaultdict(dict)
self.subscribes = ExpiringDictionary(default_timeout=5)
self.invited = {}
self.avatar_hash = None
self.settings = botsettings
def run(self):
self.client.connect()
self.jid = self.client.jid.bare()
logger.info('self jid: %r', self.jid)
self.update_on_setstatus = set()
if self.receipt_sender:
self.receipt_sender.stream = self.client.stream
self.client.run()
def disconnect(self):
'''Request disconnection and let the main loop run for a 2 more
seconds for graceful disconnection.'''
self.client.disconnect()
while True:
try:
self.client.run(timeout = 2)
except pyxmpp2.exceptions.StreamParseError:
# we raise SystemExit to exit, expat says XML_ERROR_FINISHED
pass
else:
break
def handle_early_message(self):
self.got_roster = True
q = self.message_queue
if q:
self.now = datetime.datetime.utcnow()
for sender, stanza in q:
self.current_jid = sender
self._cached_jid = None
try:
timestamp = stanza.as_xml().find('{urn:xmpp:delay}delay').attrib['stamp']
except AttributeError:
timestamp = None
self.handle_message(stanza.body, timestamp)
self.message_queue = self.__class__.message_queue = None
@event_handler(RosterReceivedEvent)
def roster_received(self, stanze):
self.delayed_call(2, self.handle_early_message)
self.delayed_call(getattr(config, 'reconnect_timeout', 24 * 3600), self.signal_connect)
nick, avatar_type, avatar_file = (getattr(config, x, None) for x in ('nick', 'avatar_type', 'avatar_file'))
if nick or (avatar_type and avatar_file):
self.set_vcard(nick, (avatar_type, avatar_file))
return True
def signal_connect(self):
logging.info('Schedule to re-connecting...')
self.client.disconnect()
@message_stanza_handler()
def message_received(self, stanza):
if stanza.stanza_type != 'chat':
return True
if not stanza.body:
logging.info("%s message: %s", stanza.from_jid, stanza.serialize())
return True
sender = stanza.from_jid
body = stanza.body
self.current_jid = sender
self.now = datetime.datetime.utcnow()
logging.info('[%s] %s', sender, stanza.body)
if '@' not in str(sender.bare()):
logging.info('(server messages ignored)')
return True
if str(sender.bare()) in self.ignore:
logging.info('(The above message is ignored on purpose)')
return True
if getattr(config, 'ban_russian'):
if str(sender.bare()).endswith('.ru'):
logging.info('(Russian messager banned)')
return True
elif is_russian(body):
logging.info('(Russian message banned)')
return True
if not self.got_roster:
if not self.message_queue:
self.message_queue = []
self.message_queue.append((sender, stanza))
else:
self.handle_message(body)
logging.info('done with new message')
return True
def send_message(self, receiver, msg):
if isinstance(receiver, str):
receiver = JID(receiver)
m = Message(
stanza_type = 'chat',
from_jid = self.jid,
to_jid = receiver,
body = msg,
)
self.send(m)
def reply(self, msg):
self.send_message(self.current_jid, msg)
def send(self, stanza):
self.client.stream.send(stanza)
def delayed_call(self, seconds, func, *args, **kwargs):
self.client.main_loop.delayed_call(seconds, partial(func, *args, **kwargs))
@event_handler(DisconnectedEvent)
def handle_disconnected(self, event):
return QUIT
@property
def roster(self):
return self.client.roster
def get_online_users(self):
ret = [x.jid for x in self.roster if x.subscription == 'both' and \
str(x.jid) in self.presence]
logging.info('%d online buddies: %r', len(ret), ret)
return ret
def get_xmpp_status(self, jid):
return sorted(self.presence[str(jid)].values(), key=lambda x: x['priority'], reverse=True)[0]
def xmpp_setstatus(self, status, to_jid=None):
if isinstance(to_jid, str):
to_jid = JID(to_jid)
presence = Presence(status=status, to_jid=to_jid)
self.send(presence)
def update_roster(self, jid, name=NO_CHANGE, groups=NO_CHANGE):
self.client.roster_client.update_item(jid, name, groups)
def removeInvitation(self):
for ri in self.roster.values():
if ri.ask is not None:
self.client.roster_client.remove_item(ri.jid)
logging.info('%s removed', ri.jid)
def unsubscribe(self, jid, type='unsubscribe'):
presence = Presence(to_jid=jid, stanza_type=type)
self.send(presence)
def subscribe(self, jid):
self.invited[jid] = 2
presence = Presence(to_jid=jid, stanza_type='subscribe')
self.send(presence)
@presence_stanza_handler('subscribe')
def handle_presence_subscribe(self, stanza):
logging.info('%s subscribe', stanza.from_jid)
sender = stanza.from_jid
bare = sender.bare()
# avoid repeated request
invited = False
if bare not in self.subscribes:
invited = self.invited.get(bare, False)
if invited is not False:
if invited == 2:
self.invited[bare] = 1
else:
del self.invited[bare]
return stanza.make_accept_response()
# We won't deny inivted members
self.handle_userjoin_before()
else:
if config.private and str(bare) != config.root:
self.send_message(sender, _('Sorry, this is a private group, and you are not invited.'))
return stanza.make_deny_response()
if not self.handle_userjoin_before():
return stanza.make_deny_response()
self.current_jid = sender
self.now = datetime.datetime.utcnow()
try:
self.handle_userjoin(action=stanza.stanza_type)
except ValidationError:
#The server is subscribing
pass
self.subscribes[bare] = True
if stanza.stanza_type.endswith('ed'):
return stanza.make_accept_response()
if invited is False:
presence = Presence(to_jid=stanza.from_jid.bare(),
stanza_type='subscribe')
return [stanza.make_accept_response(), presence]
@presence_stanza_handler('subscribed')
def handle_presence_subscribed(self, stanza):
# use the same function
logging.info('%s subscribed', stanza.from_jid)
return self.handle_presence_subscribe(stanza)
@presence_stanza_handler('unsubscribe')
def handle_presence_unsubscribe(self, stanza):
logging.info('%s unsubscribe', stanza.from_jid)
sender = stanza.from_jid
self.current_jid = sender
self.now = datetime.datetime.utcnow()
self.handle_userleave(action=stanza.stanza_type)
if stanza.stanza_type.endswith('ed'):
return stanza.make_accept_response()
presence = Presence(to_jid=stanza.from_jid.bare(),
stanza_type='unsubscribe')
return [stanza.make_accept_response(), presence]
@presence_stanza_handler('unsubscribed')
def handle_presence_unsubscribed(self, stanza):
# use the same function
logging.info('%s unsubscribed', stanza.from_jid)
return self.handle_presence_unsubscribe(stanza)
@presence_stanza_handler()
def handle_presence_available(self, stanza):
if stanza.stanza_type not in ('available', None):
return False
jid = stanza.from_jid
plainjid = str(jid.bare())
if plainjid == str(self.jid):
return
self.now = datetime.datetime.utcnow()
if plainjid not in self.presence:
type = 'new'
self.current_jid = jid
self.user_update_presence(plainjid)
if conn_lost_interval and self.current_user and self.current_user.last_seen and \
self.now - self.current_user.last_seen < conn_lost_interval:
type = 'reconnect'
self.send_lost_message()
logging.info('%s[%s] (%s)', jid, stanza.show or 'available', type)
if self.roster and jid.bare() not in self.roster:
presence = Presence(to_jid=jid.bare(), stanza_type='subscribe')
self.send(presence)
presence = Presence(to_jid=jid.bare(), stanza_type='subscribed')
self.send(presence)
else:
if jid.resource not in self.presence[plainjid]:
self.user_update_presence(plainjid)
logging.info('%s[%s]', jid, stanza.show or 'available')
self.presence[plainjid][jid.resource] = {
'show': stanza.show,
'status': stanza.status,
'priority': stanza.priority,
}
if self.get_user_by_jid(plainjid) is None:
try:
self.current_jid = jid
self.handle_userjoin()
except ValidationError:
#The server is subscribing
pass
if config.warnv105 and jid.resource and \
jid.resource.startswith('Talk.') and not jid.resource.startswith('Talk.v104'):
# Got a Talk.v107...
# No need to translate; GTalk only has a v105 for Chinese.
self.send_message(jid, '警告:你正在使用的可能是不加密的 GTalk v105 版本。网络上的其它人可能会截获您的消息。这样不安全!请使用 GTalk v104 英文版或者其它 XMPP 客户端。\nGTalk 英文版: http://www.google.com/talk/index.html\nPidgin: http://www.pidgin.im/')
return True
@presence_stanza_handler('unavailable')
def handle_presence_unavailable(self, stanza):
jid = stanza.from_jid
plainjid = str(jid.bare())
if plainjid in self.presence and plainjid != str(self.jid):
try:
del self.presence[plainjid][jid.resource]
except KeyError:
pass
if self.presence[plainjid]:
logging.info('%s[unavailable] (partly)', jid)
else:
del self.presence[plainjid]
self.now = datetime.datetime.utcnow()
self.user_disappeared(plainjid)
logging.info('%s[unavailable] (totally)', jid)
return True
@event_handler()
def handle_all(self, event):
'''Log all events.'''
logging.info('-- {0}'.format(event))
def get_name(self, jid):
if isinstance(jid, str):
jid = JID(jid)
else:
jid = jid.bare()
try:
return self.roster[jid].name or hashjid(jid)
except KeyError:
return hashjid(jid)
def get_vcard(self, jid=None, callback=None):
'''callback is used as both result handler and error handler'''
q = Iq(
to_jid = jid and jid.bare(),
stanza_type = 'get',
)
vc = ET.Element("{vcard-temp}vCard")
q.add_payload(vc)
if callback:
self.stanza_processor.set_response_handlers(q, callback, callback)
self.send(q)
def set_vcard(self, nick=None, avatar=None):
self.get_vcard(callback=partial(self._set_vcard, nick, avatar))
def _set_vcard(self, nick=None, avatar=None, stanza=None):
#FIXME: This doesn't seem to work with jabber.org
q = Iq(
from_jid = self.jid,
stanza_type = 'set',
)
vc = ET.Element("{vcard-temp}vCard")
if nick is not None:
n = ET.SubElement(vc, '{vcard-temp}FN')
n.text = nick
if avatar is not None:
type, picfile = avatar
photo = ET.SubElement(vc, '{vcard-temp}PHOTO')
t = ET.SubElement(photo, '{vcard-temp}TYPE')
t.text = type
d = ET.SubElement(photo, '{vcard-temp}BINVAL')
data = open(picfile, 'rb').read()
d.text = base64.b64encode(data).decode('ascii')
self.avatar_hash = hashlib.new('sha1', data).hexdigest()
q.add_payload(vc)
self.stanza_processor.set_response_handlers(
q, self._set_vcard_callback, self._set_vcard_callback)
self.send(q)
def _set_vcard_callback(self, stanza):
if stanza.stanza_type == 'error':
logging.error('failed to set my vCard.')
else:
logging.info('my vCard set.')
self.update_presence()
def update_presence(self):
#TODO: update for individual users
presence = self.settings['presence']
x = ET.Element('{vcard-temp:x:update}x')
if self.avatar_hash:
photo = ET.SubElement(x, '{vcard-temp:x:update}photo')
photo.text = self.avatar_hash
presence.add_payload(x)
self.send(presence)
def runit(settings, mysettings):
bot = ChatBot(JID(config.jid), settings, mysettings)
try:
bot.run()
# Connection resets
raise Exception
except SystemExit as e:
if e.code == CMD_RESTART:
# restart
bot.disconnect()
models.connection.disconnect()
try:
os.close(lock_fd[0])
except:
pass
logging.info('restart...')
os.execv(sys.executable, [sys.executable] + sys.argv)
except KeyboardInterrupt:
pass
finally:
ChatBot.message_queue = bot.message_queue
bot.disconnect()
def main():
gp = models.connection.Group.one()
if gp and gp.status:
st = gp.status
else:
st = None
settings = dict(
# deliver here even if the admin logs in
initial_presence = Presence(priority=30, status=st),
poll_interval = 3,
)
botsettings = {
'presence': settings['initial_presence'],
}
settings.update(config.settings)
settings = XMPPSettings(settings)
if config.trace:
logging.info('enabling trace')
for logger in ('pyxmpp2.IN', 'pyxmpp2.OUT'):
logger = logging.getLogger(logger)
logger.setLevel(logging.DEBUG)
for logger in (
'pyxmpp2.mainloop.base', 'pyxmpp2.expdict',
'pyxmpp2.mainloop.poll', 'pyxmpp2.mainloop.events',
'pyxmpp2.transport', 'pyxmpp2.mainloop.events',
):
logger = logging.getLogger(logger)
logger.setLevel(max((logging.INFO, config.logging_level)))
if config.logging_level > logging.DEBUG:
restart_if_failed(runit, 3, args=(settings, botsettings))
else:
runit(settings, botsettings)
if __name__ == '__main__':
setup_logging()
models.init()
main()
|
# -*- coding: UTF-8 -*-
# webseach
# create at 2015/10/30
# autor: qianqians
import sys
reload(sys)
sys.setdefaultencoding('utf8')
sys.path.append('../')
from webget import gethtml
import pymongo
from doclex import doclex
import time
collection_key = None
def seach(urllist):
def process_keyurl(keyurl):
if keyurl is not None:
for key, urllist in keyurl.iteritems():
for url in urllist:
urlinfo = gethtml.process_url(url)
if urlinfo is None:
continue
list, keyurl1 = urlinfo
if list is not None:
gethtml.collection.insert({'key':key, 'url':url, 'timetmp':time.time()})
if keyurl1 is not None:
process_keyurl(keyurl1)
def process_urllist(url_list):
for url in url_list:
#print url,"sub url"
urlinfo = gethtml.process_url(url)
if urlinfo is None:
continue
list, keyurl = urlinfo
if list is not None:
process_urllist(list)
if keyurl is not None:
process_keyurl(keyurl)
time.sleep(0.1)
suburl = []
subkeyurl = {}
for url in urllist:
print url, "root url"
urlinfo = gethtml.process_url(url)
if urlinfo is None:
continue
list, keyurl = urlinfo
suburl.extend(list)
subkeyurl.update(keyurl)
try:
process_urllist(suburl)
process_keyurl(subkeyurl)
except:
import traceback
traceback.print_exc()
urllist = ["http://www.qidian.com/Default.aspx",
"http://www.zongheng.com/",
"http://chuangshi.qq.com/"
]
def refkeywords():
c = collection_key.find()
keywords = []
for it in c:
keywords.append(it["key"])
doclex.keykorks = keywords
if __name__ == '__main__':
conn = pymongo.Connection('localhost',27017)
db = conn.webseach
gethtml.collection = db.webpage
gethtml.collection_url_profile = db.urlprofile
gethtml.collection_url_title = db.urltitle
collection_key = db.keys
t = 0
while True:
timetmp = time.time()-t
if timetmp > 86400:
refkeywords()
t = time.time()
#urllist = seach(urllist)
seach(urllist)
|
import doctest
from insights.parsers import neutron_l3_agent_log
from insights.parsers.neutron_l3_agent_log import NeutronL3AgentLog
from insights.tests import context_wrap
from datetime import datetime
AGENT_LOG = """
2017-09-17 10:05:06.241 141544 INFO neutron.agent.l3.ha [-] Router 01d51830-0e3e-4100-a891-efd7dbc000b1 transitioned to backup
2017-09-17 10:05:07.828 141544 WARNING neutron.agent.linux.iptables_manager [-] Duplicate iptables rule detected. This may indicate a bug in the the iptables rule generation code. Line: -A neutron-l3-agent-INPUT -p tcp -m tcp --dport 9697 -j DROP
2017-09-17 10:05:07.829 141544 WARNING neutron.agent.linux.iptables_manager [-] Duplicate iptables rule detected. This may indicate a bug in the the iptables rule generation code. Line: -A neutron-l3-agent-INPUT -m mark --mark 0x1/0xffff -j ACCEP
"""
def test_metrics_log():
log = NeutronL3AgentLog(context_wrap(AGENT_LOG))
assert len(log.get('INFO')) == 1
assert 'Duplicate iptables rule detected' in log
assert len(log.get('Duplicate iptables rule detected')) == 2
assert len(list(log.get_after(datetime(2017, 2, 17, 19, 36, 38)))) == 3
def test_doc():
env = {'agent_log': NeutronL3AgentLog(context_wrap(AGENT_LOG, path='/var/log/neutron/l3-agent.log'))}
failed, total = doctest.testmod(neutron_l3_agent_log, globs=env)
assert failed == 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from lib.core.settings import IS_WIN, UNICODE_ENCODING
def singleTimeWarnMessage(message): # Cross-linked function
sys.stdout.write(message)
sys.stdout.write("\n")
sys.stdout.flush()
def stdoutencode(data):
retVal = None
try:
data = data or ""
# Reference: http://bugs.python.org/issue1602
if IS_WIN:
output = data.encode(sys.stdout.encoding, "replace")
if '?' in output and '?' not in data:
warnMsg = "cannot properly display Unicode characters "
warnMsg += "inside Windows OS command prompt "
warnMsg += "(http://bugs.python.org/issue1602). All "
warnMsg += "unhandled occurances will result in "
warnMsg += "replacement with '?' character. Please, find "
warnMsg += "proper character representation inside "
warnMsg += "corresponding output files. "
singleTimeWarnMessage(warnMsg)
retVal = output
else:
retVal = data.encode(sys.stdout.encoding)
except:
retVal = data.encode(UNICODE_ENCODING) if isinstance(data, unicode) else data
return retVal
|
from datetime import datetime, timedelta
from netCDF4 import Dataset
from netCDF4 import num2date
import numpy as np
import time
import os
__author__ = 'Trond Kristiansen'
__email__ = '[email protected]'
__created__ = datetime(2014, 1, 23)
__modified__ = datetime(2014, 1, 23)
__version__ = "0.1"
__status__ = "Development"
def help ():
"""
This function generates a netCDF4 file and saves the runnings average values for
specific years into file for each IPCC AR5 model.
Used to gether with extractIce.py
"""
def writeCMIP5File(modelName,scenario,myvarname,lon,lat,time,mydata,mydataanomaly,outfilename):
myformat='NETCDF3_CLASSIC'
if os.path.exists(outfilename):
os.remove(outfilename)
print "Results written to netcdf file: %s"%(outfilename)
if myvarname=="sic": myvar="SIC"
f1 = Dataset(outfilename, mode='w', format=myformat)
f1.title = "IPCC AR5 %s"%(myvar)
f1.description = "IPCC AR5 running averages of %s for model %s for scenario %s"%(myvar,modelName,scenario)
f1.history = "Created " + str(datetime.now())
f1.source = "Trond Kristiansen ([email protected])"
f1.type = "File in NetCDF3 format created using iceExtract.py"
f1.Conventions = "CF-1.0"
"""Define dimensions"""
f1.createDimension('x', len(lon))
f1.createDimension('y', len(lat))
f1.createDimension('time', None)
vnc = f1.createVariable('longitude', 'd', ('x',),zlib=False)
vnc.long_name = 'Longitude'
vnc.units = 'degree_east'
vnc.standard_name = 'longitude'
vnc[:] = lon
vnc = f1.createVariable('latitude', 'd', ('y',),zlib=False)
vnc.long_name = 'Latitude'
vnc.units = 'degree_north'
vnc.standard_name = 'latitude'
vnc[:] = lat
v_time = f1.createVariable('time', 'd', ('time',),zlib=False)
v_time.long_name = 'Years'
v_time.units = 'Years'
v_time.field = 'time, scalar, series'
v_time[:]=time
v_temp=f1.createVariable('SIC', 'd', ('time', 'y', 'x',),zlib=False)
v_temp.long_name = "Sea-ice area fraction (%)"
v_temp.units = "%"
v_temp.time = "time"
v_temp.field="SIC, scalar, series"
v_temp.missing_value = 1e20
if myvarname=='sic':
f1.variables['SIC'][:,:,:] = mydata
f1.close()
|
# =================================================================
#
# Authors: Tom Kralidis <[email protected]>
# Just van den Broecke <[email protected]>
#
# Copyright (c) 2014 Tom Kralidis
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# =================================================================
import base64
import csv
import json
import logging
from io import StringIO
from flask import (abort, flash, g, jsonify, redirect,
render_template, request, url_for)
from flask_babel import gettext
from flask_login import (LoginManager, login_user, logout_user,
current_user, login_required)
from flask_migrate import Migrate
from itertools import chain
import views
from __init__ import __version__
from enums import RESOURCE_TYPES
from factory import Factory
from init import App
from models import Resource, Run, ProbeVars, CheckVars, Tag, User, Recipient
from resourceauth import ResourceAuth
from util import send_email, geocode, format_checked_datetime, \
format_run_status, format_obj_value
# Module globals for convenience
LOGGER = logging.getLogger(__name__)
APP = App.get_app()
CONFIG = App.get_config()
DB = App.get_db()
BABEL = App.get_babel()
MIGRATE = Migrate(APP, DB)
LOGIN_MANAGER = LoginManager()
LOGIN_MANAGER.init_app(APP)
LANGUAGES = (
('en', 'English'),
('fr', 'Français'),
('de', 'German'),
('nl_NL', 'Nederlands (Nederland)'),
('es_BO', 'Español (Bolivia)'),
('hr_HR', 'Croatian (Croatia)')
)
# Should GHC Runner be run within GHC webapp?
if CONFIG['GHC_RUNNER_IN_WEBAPP'] is True:
LOGGER.info('Running GHC Scheduler in WebApp')
from scheduler import start_schedule
# Start scheduler
start_schedule()
else:
LOGGER.info('NOT Running GHC Scheduler in WebApp')
# commit or rollback shorthand
def db_commit():
err = None
try:
DB.session.commit()
except Exception:
DB.session.rollback()
# finally:
# DB.session.close()
return err
@APP.before_request
def before_request():
g.user = current_user
if request.args and 'lang' in request.args and request.args['lang'] != '':
g.current_lang = request.args['lang']
if not hasattr(g, 'current_lang'):
g.current_lang = 'en'
if CONFIG['GHC_REQUIRE_WEBAPP_AUTH'] is True:
# Login is required to access GHC Webapp.
# We need to pass-through static resources like CSS.
if any(['/static/' in request.path,
request.path.endswith('.ico'),
g.user.is_authenticated(), # This is from Flask-Login
(request.endpoint is not None
and getattr(APP.view_functions[request.endpoint],
'is_public', False))]):
return # Access granted
else:
return redirect(url_for('login'))
# Marks (endpoint-) function as always to be accessible
# (used for GHC_REQUIRE_WEBAPP_AUTH)
def public_route(decorated_function):
decorated_function.is_public = True
return decorated_function
@APP.teardown_appcontext
def shutdown_session(exception=None):
DB.session.remove()
@BABEL.localeselector
def get_locale():
return g.get('current_lang', 'en')
# return request.accept_languages.best_match(LANGUAGES.keys())
@LOGIN_MANAGER.user_loader
def load_user(identifier):
return User.query.get(int(identifier))
@LOGIN_MANAGER.unauthorized_handler
def unauthorized_callback():
if request.query_string:
url = '%s%s?%s' % (request.script_root, request.path,
request.query_string)
else:
url = '%s%s' % (request.script_root, request.path)
return redirect(url_for('login', lang=g.current_lang, next=url))
@LOGIN_MANAGER.request_loader
def load_user_from_request(request):
# Try to login using Basic Auth
# Inspiration: https://flask-login.readthedocs.io
# /en/latest/#custom-login-using-request-loader
basic_auth_val = request.headers.get('Authorization')
if basic_auth_val:
basic_auth_val = basic_auth_val.replace('Basic ', '', 1)
authenticated = False
try:
username, password = base64.b64decode(basic_auth_val).split(':')
user = User.query.filter_by(username=username).first()
if user:
authenticated = user.authenticate(password)
finally:
# Ignore errors, they should all fail the auth attempt
pass
if not authenticated:
LOGGER.warning('Unauthorized access for user=%s' % username)
abort(401)
else:
return user
# TODO: may add login via api-key or token here
# finally, return None if both methods did not login the user
return None
@APP.template_filter('cssize_reliability')
def cssize_reliability(value, css_type=None):
"""returns CSS button class snippet based on score"""
number = int(value)
if CONFIG['GHC_RELIABILITY_MATRIX']['red']['min'] <= number <= \
CONFIG['GHC_RELIABILITY_MATRIX']['red']['max']:
score = 'danger'
panel = 'red'
elif (CONFIG['GHC_RELIABILITY_MATRIX']['orange']['min'] <= number <=
CONFIG['GHC_RELIABILITY_MATRIX']['orange']['max']):
score = 'warning'
panel = 'yellow'
elif (CONFIG['GHC_RELIABILITY_MATRIX']['green']['min'] <= number <=
CONFIG['GHC_RELIABILITY_MATRIX']['green']['max']):
score = 'success'
panel = 'green'
else: # should never really get here
score = 'info'
panel = 'blue'
if css_type is not None and css_type == 'panel':
return panel
else:
return score
@APP.template_filter('cssize_reliability2')
def cssize_reliability2(value):
"""returns CSS panel class snippet based on score"""
return cssize_reliability(value, 'panel')
@APP.template_filter('round2')
def round2(value):
"""rounds a number to 2 decimal places except for values of 0 or 100"""
if value in [0.0, 100.0]:
return int(value)
return round(value, 2)
@APP.context_processor
def context_processors():
"""global context processors for templates"""
rtc = views.get_resource_types_counts()
tags = views.get_tag_counts()
return {
'app_version': __version__,
'resource_types': RESOURCE_TYPES,
'resource_types_counts': rtc['counts'],
'resources_total': rtc['total'],
'languages': LANGUAGES,
'tags': tags,
'tagnames': list(tags.keys())
}
@APP.route('/')
def home():
"""homepage"""
response = views.get_health_summary()
return render_template('home.html', response=response)
@APP.route('/csv', endpoint='csv')
@APP.route('/json', endpoint='json')
def export():
"""export resource list as JSON"""
resource_type = None
if request.args.get('resource_type') in RESOURCE_TYPES.keys():
resource_type = request.args['resource_type']
query = request.args.get('q')
response = views.list_resources(resource_type, query)
if request.url_rule.rule == '/json':
json_dict = {'total': response['total'], 'resources': []}
for r in response['resources']:
try:
ghc_url = '%s/resource/%s' % \
(CONFIG['GHC_SITE_URL'], r.identifier)
last_run_report = '-'
if r.last_run:
last_run_report = r.last_run.report
json_dict['resources'].append({
'resource_type': r.resource_type,
'title': r.title,
'url': r.url,
'ghc_url': ghc_url,
'ghc_json': '%s/json' % ghc_url,
'ghc_csv': '%s/csv' % ghc_url,
'first_run': format_checked_datetime(r.first_run),
'last_run': format_checked_datetime(r.last_run),
'status': format_run_status(r.last_run),
'min_response_time': round(r.min_response_time, 2),
'average_response_time': round(r.average_response_time, 2),
'max_response_time': round(r.max_response_time, 2),
'reliability': round(r.reliability, 2),
'last_report': format_obj_value(last_run_report)
})
except Exception as e:
LOGGER.warning(
'JSON error resource id=%d: %s' % (r.identifier, str(e)))
return jsonify(json_dict)
elif request.url_rule.rule == '/csv':
output = StringIO()
writer = csv.writer(output)
header = [
'resource_type', 'title', 'url', 'ghc_url', 'ghc_json', 'ghc_csv',
'first_run', 'last_run', 'status', 'min_response_time',
'average_response_time', 'max_response_time', 'reliability'
]
writer.writerow(header)
for r in response['resources']:
try:
ghc_url = '%s%s' % (CONFIG['GHC_SITE_URL'],
url_for('get_resource_by_id',
identifier=r.identifier))
writer.writerow([
r.resource_type,
r.title,
r.url,
ghc_url,
'%s/json' % ghc_url,
'%s/csv' % ghc_url,
format_checked_datetime(r.first_run),
format_checked_datetime(r.last_run),
format_run_status(r.last_run),
round(r.min_response_time, 2),
round(r.average_response_time, 2),
round(r.max_response_time, 2),
round(r.reliability, 2)
])
except Exception as e:
LOGGER.warning(
'CSV error resource id=%d: %s' % (r.identifier, str(e)))
return output.getvalue(), 200, {'Content-type': 'text/csv'}
@APP.route('/opensearch')
def opensearch():
"""generate OpenSearch description document"""
content = render_template('opensearch_description.xml')
return content, 200, {'Content-type': 'text/xml'}
@APP.route('/resource/<identifier>/csv', endpoint='csv-resource')
@APP.route('/resource/<identifier>/json', endpoint='json-resource')
def export_resource(identifier):
"""export resource as JSON or CSV"""
resource = views.get_resource_by_id(identifier)
history_csv = '%s/resource/%s/history/csv' % (CONFIG['GHC_SITE_URL'],
resource.identifier)
history_json = '%s/resource/%s/history/json' % (CONFIG['GHC_SITE_URL'],
resource.identifier)
if 'json' in request.url_rule.rule:
last_run_report = '-'
if resource.last_run:
last_run_report = resource.last_run.report
json_dict = {
'identifier': resource.identifier,
'title': resource.title,
'url': resource.url,
'resource_type': resource.resource_type,
'owner': resource.owner.username,
'min_response_time': resource.min_response_time,
'average_response_time': resource.average_response_time,
'max_response_time': resource.max_response_time,
'reliability': resource.reliability,
'status': format_run_status(resource.last_run),
'first_run': format_checked_datetime(resource.first_run),
'last_run': format_checked_datetime(resource.last_run),
'history_csv': history_csv,
'history_json': history_json,
'last_report': format_obj_value(last_run_report)
}
return jsonify(json_dict)
elif 'csv' in request.url_rule.rule:
output = StringIO()
writer = csv.writer(output)
header = [
'identifier', 'title', 'url', 'resource_type', 'owner',
'min_response_time', 'average_response_time', 'max_response_time',
'reliability', 'status', 'first_run', 'last_run', 'history_csv',
'history_json'
]
writer.writerow(header)
writer.writerow([
resource.identifier,
resource.title,
resource.url,
resource.resource_type,
resource.owner.username,
resource.min_response_time,
resource.average_response_time,
resource.max_response_time,
resource.reliability,
format_run_status(resource.last_run),
format_checked_datetime(resource.first_run),
format_checked_datetime(resource.last_run),
history_csv,
history_json
])
return output.getvalue(), 200, {'Content-type': 'text/csv'}
@APP.route('/resource/<identifier>/history/csv',
endpoint='csv-resource-history')
@APP.route('/resource/<identifier>/history/json',
endpoint='json-resource-history')
def export_resource_history(identifier):
"""export resource history as JSON or CSV"""
resource = views.get_resource_by_id(identifier)
if 'json' in request.url_rule.rule:
json_dict = {'runs': []}
for run in resource.runs:
json_dict['runs'].append({
'owner': resource.owner.username,
'resource_type': resource.resource_type,
'checked_datetime': format_checked_datetime(run),
'title': resource.title,
'url': resource.url,
'response_time': round(run.response_time, 2),
'status': format_run_status(run)
})
return jsonify(json_dict)
elif 'csv' in request.url_rule.rule:
output = StringIO()
writer = csv.writer(output)
header = [
'owner', 'resource_type', 'checked_datetime', 'title', 'url',
'response_time', 'status'
]
writer.writerow(header)
for run in resource.runs:
writer.writerow([
resource.owner.username,
resource.resource_type,
format_checked_datetime(run),
resource.title,
resource.url,
round(run.response_time, 2),
format_run_status(run),
])
return output.getvalue(), 200, {'Content-type': 'text/csv'}
@APP.route('/settings')
def settings():
"""settings"""
pass
@APP.route('/resources')
def resources():
"""lists resources with optional filter"""
resource_type = None
if request.args.get('resource_type') in RESOURCE_TYPES.keys():
resource_type = request.args['resource_type']
tag = request.args.get('tag')
query = request.args.get('q')
response = views.list_resources(resource_type, query, tag)
return render_template('resources.html', response=response)
@APP.route('/resource/<identifier>')
def get_resource_by_id(identifier):
"""show resource"""
response = views.get_resource_by_id(identifier)
return render_template('resource.html', resource=response)
@APP.route('/register', methods=['GET', 'POST'])
def register():
"""register a new user"""
if not CONFIG['GHC_SELF_REGISTER']:
msg1 = gettext('This site is not configured for self-registration')
msg2 = gettext('Please contact')
msg = '%s. %s %s' % (msg1, msg2,
CONFIG['GHC_ADMIN_EMAIL'])
flash('%s' % msg, 'danger')
return render_template('register.html', errmsg=msg)
if request.method == 'GET':
return render_template('register.html')
# Check for existing user or email
user = User.query.filter_by(username=request.form['username']).first()
email = User.query.filter_by(email=request.form['email']).first()
if user or email:
flash('%s' % gettext('Invalid username or email'), 'danger')
return render_template('register.html')
user = User(request.form['username'],
request.form['password'], request.form['email'])
DB.session.add(user)
try:
DB.session.commit()
except Exception as err:
DB.session.rollback()
bad_column = err.message.split()[2]
bad_value = request.form[bad_column]
msg = gettext('already registered')
flash('%s %s %s' % (bad_column, bad_value, msg), 'danger')
return redirect(url_for('register', lang=g.current_lang))
return redirect(url_for('login', lang=g.current_lang))
@APP.route('/add', methods=['GET', 'POST'])
@login_required
def add():
"""add resource"""
if not g.user.is_authenticated():
return render_template('add.html')
if request.method == 'GET':
return render_template('add.html')
resource_type = request.form['resource_type']
tags = request.form.getlist('tags')
url = request.form['url'].strip()
resources_to_add = []
from healthcheck import sniff_test_resource, run_test_resource
sniffed_resources = sniff_test_resource(CONFIG, resource_type, url)
if not sniffed_resources:
msg = gettext("No resources detected")
LOGGER.exception()
flash(msg, 'danger')
for (resource_type, resource_url,
title, success, response_time,
message, start_time, resource_tags,) in sniffed_resources:
tags_to_add = []
for tag in chain(tags, resource_tags):
tag_obj = tag
if not isinstance(tag, Tag):
tag_obj = Tag.query.filter_by(name=tag).first()
if tag_obj is None:
tag_obj = Tag(name=tag)
tags_to_add.append(tag_obj)
resource_to_add = Resource(current_user,
resource_type,
title,
resource_url,
tags=tags_to_add)
resources_to_add.append(resource_to_add)
probe_to_add = None
checks_to_add = []
# Always add a default Probe and Check(s)
# from the GHC_PROBE_DEFAULTS conf
if resource_type in CONFIG['GHC_PROBE_DEFAULTS']:
resource_settings = CONFIG['GHC_PROBE_DEFAULTS'][resource_type]
probe_class = resource_settings['probe_class']
if probe_class:
# Add the default Probe
probe_obj = Factory.create_obj(probe_class)
probe_to_add = ProbeVars(
resource_to_add, probe_class,
probe_obj.get_default_parameter_values())
# Add optional default (parameterized)
# Checks to add to this Probe
checks_info = probe_obj.get_checks_info()
checks_param_info = probe_obj.get_plugin_vars()['CHECKS_AVAIL']
for check_class in checks_info:
check_param_info = checks_param_info[check_class]
if 'default' in checks_info[check_class]:
if checks_info[check_class]['default']:
# Filter out params for Check with fixed values
param_defs = check_param_info['PARAM_DEFS']
param_vals = {}
for param in param_defs:
if param_defs[param]['value']:
param_vals[param] = \
param_defs[param]['value']
check_vars = CheckVars(
probe_to_add, check_class, param_vals)
checks_to_add.append(check_vars)
result = run_test_resource(resource_to_add)
run_to_add = Run(resource_to_add, result)
DB.session.add(resource_to_add)
# prepopulate notifications for current user
resource_to_add.set_recipients('email', [g.user.email])
if probe_to_add:
DB.session.add(probe_to_add)
for check_to_add in checks_to_add:
DB.session.add(check_to_add)
DB.session.add(run_to_add)
try:
DB.session.commit()
msg = gettext('Services registered')
flash('%s (%s, %s)' % (msg, resource_type, url), 'success')
except Exception as err:
DB.session.rollback()
flash(str(err), 'danger')
return redirect(url_for('home', lang=g.current_lang))
if len(resources_to_add) == 1:
return edit_resource(resources_to_add[0].identifier)
return redirect(url_for('home', lang=g.current_lang))
@APP.route('/resource/<int:resource_identifier>/update', methods=['POST'])
@login_required
def update(resource_identifier):
"""update a resource"""
update_counter = 0
status = 'success'
try:
resource_identifier_dict = request.get_json()
resource = Resource.query.filter_by(
identifier=resource_identifier).first()
for key, value in resource_identifier_dict.items():
if key == 'tags':
resource_tags = [t.name for t in resource.tags]
tags_to_add = set(value) - set(resource_tags)
tags_to_delete = set(resource_tags) - set(value)
# Existing Tags: create relation else add new Tag
all_tag_objs = Tag.query.all()
for tag in tags_to_add:
tag_add_obj = None
for tag_obj in all_tag_objs:
if tag == tag_obj.name:
# use existing
tag_add_obj = tag_obj
break
if not tag_add_obj:
# add new
tag_add_obj = Tag(name=tag)
DB.session.add(tag_add_obj)
resource.tags.append(tag_add_obj)
for tag in tags_to_delete:
tag_to_delete = Tag.query.filter_by(name=tag).first()
resource.tags.remove(tag_to_delete)
update_counter += 1
elif key == 'probes':
# Remove all existing ProbeVars for Resource
for probe_var in resource.probe_vars:
resource.probe_vars.remove(probe_var)
# Add ProbeVars anew each with optional CheckVars
for probe in value:
LOGGER.info('adding Probe class=%s parms=%s' %
(probe['probe_class'], str(probe)))
probe_vars = ProbeVars(resource, probe['probe_class'],
probe['parameters'])
for check in probe['checks']:
check_vars = CheckVars(
probe_vars, check['check_class'],
check['parameters'])
probe_vars.check_vars.append(check_vars)
resource.probe_vars.append(probe_vars)
update_counter += 1
elif key == 'notify_emails':
resource.set_recipients('email',
[v for v in value if v.strip()])
elif key == 'notify_webhooks':
resource.set_recipients('webhook',
[v for v in value if v.strip()])
elif key == 'auth':
resource.auth = value
elif getattr(resource, key) != resource_identifier_dict[key]:
# Update other resource attrs, mainly 'name'
setattr(resource, key, resource_identifier_dict[key])
min_run_freq = CONFIG['GHC_MINIMAL_RUN_FREQUENCY_MINS']
if int(resource.run_frequency) < min_run_freq:
resource.run_frequency = min_run_freq
update_counter += 1
# Always update geo-IP: maybe failure on creation or
# IP-address of URL may have changed.
latitude, longitude = geocode(resource.url)
if latitude != 0.0 and longitude != 0.0:
# Only update for valid lat/lon
resource.latitude = latitude
resource.longitude = longitude
update_counter += 1
except Exception as err:
LOGGER.error("Cannot update resource: %s", err, exc_info=err)
DB.session.rollback()
status = str(err)
update_counter = 0
# finally:
# DB.session.close()
if update_counter > 0:
err = db_commit()
if err:
status = str(err)
return jsonify({'status': status})
@APP.route('/resource/<int:resource_identifier>/test', methods=['GET', 'POST'])
@login_required
def test(resource_identifier):
"""test a resource"""
resource = Resource.query.filter_by(identifier=resource_identifier).first()
if resource is None:
flash(gettext('Resource not found'), 'danger')
return redirect(request.referrer)
from healthcheck import run_test_resource
result = run_test_resource(
resource)
if request.method == 'GET':
if result.message == 'Skipped':
msg = gettext('INFO')
flash('%s: %s' % (msg, result.message), 'info')
elif result.message not in ['OK', None, 'None']:
msg = gettext('ERROR')
flash('%s: %s' % (msg, result.message), 'danger')
else:
flash(gettext('Resource tested successfully'), 'success')
return redirect(url_for('get_resource_by_id', lang=g.current_lang,
identifier=resource_identifier))
elif request.method == 'POST':
return jsonify(result.get_report())
@APP.route('/resource/<int:resource_identifier>/edit')
@login_required
def edit_resource(resource_identifier):
"""edit a resource"""
resource = Resource.query.filter_by(identifier=resource_identifier).first()
if resource is None:
flash(gettext('Resource not found'), 'danger')
return redirect(request.referrer)
probes_avail = views.get_probes_avail(resource.resource_type, resource)
suggestions = json.dumps(Recipient.get_suggestions('email',
g.user.username))
return render_template('edit_resource.html',
lang=g.current_lang,
resource=resource,
suggestions=suggestions,
auths_avail=ResourceAuth.get_auth_defs(),
probes_avail=probes_avail)
@APP.route('/resource/<int:resource_identifier>/delete')
@login_required
def delete(resource_identifier):
"""delete a resource"""
resource = Resource.query.filter_by(identifier=resource_identifier).first()
if g.user.role != 'admin' and g.user.username != resource.owner.username:
msg = gettext('You do not have access to delete this resource')
flash(msg, 'danger')
return redirect(url_for('get_resource_by_id', lang=g.current_lang,
identifier=resource_identifier))
if resource is None:
flash(gettext('Resource not found'), 'danger')
return redirect(url_for('home', lang=g.current_lang))
resource.clear_recipients()
DB.session.delete(resource)
try:
DB.session.commit()
flash(gettext('Resource deleted'), 'success')
return redirect(url_for('home', lang=g.current_lang))
except Exception as err:
DB.session.rollback()
flash(str(err), 'danger')
return redirect(url_for(request.referrer))
@APP.route('/probe/<string:probe_class>/<int:resource_identifier>/edit_form')
@APP.route('/probe/<string:probe_class>/edit_form')
@login_required
def get_probe_edit_form(probe_class, resource_identifier=None):
"""get the form to edit a Probe"""
probe_obj = Factory.create_obj(probe_class)
if resource_identifier:
resource = views.get_resource_by_id(resource_identifier)
if resource:
probe_obj._resource = resource
probe_obj.expand_params(resource)
probe_info = probe_obj.get_plugin_vars()
probe_vars = ProbeVars(
None, probe_class, probe_obj.get_default_parameter_values())
# Get only the default Checks for this Probe class
checks_avail = probe_obj.get_checks_info_defaults()
checks_avail = probe_obj.expand_check_vars(checks_avail)
for check_class in checks_avail:
check_obj = Factory.create_obj(check_class)
check_params = check_obj.get_default_parameter_values()
probe_check_param_defs = \
probe_info['CHECKS_AVAIL'][check_class]['PARAM_DEFS']
for param in probe_check_param_defs:
if 'value' in probe_check_param_defs[param]:
check_params[param] = probe_check_param_defs[param]['value']
# Appends 'check_vars' to 'probe_vars' (SQLAlchemy)
CheckVars(probe_vars, check_class, check_params)
return render_template('includes/probe_edit_form.html',
lang=g.current_lang,
probe=probe_vars, probe_info=probe_info)
@APP.route('/check/<string:check_class>/edit_form')
@login_required
def get_check_edit_form(check_class):
"""get the form to edit a Check"""
check_obj = Factory.create_obj(check_class)
check_info = check_obj.get_plugin_vars()
check_vars = CheckVars(
None, check_class, check_obj.get_default_parameter_values())
return render_template('includes/check_edit_form.html',
lang=g.current_lang,
check=check_vars, check_info=check_info)
@APP.route('/login', methods=['GET', 'POST'])
@public_route
def login():
"""login"""
if request.method == 'GET':
return render_template('login.html')
username = request.form['username']
password = request.form['password']
registered_user = User.query.filter_by(username=username).first()
authenticated = False
if registered_user:
# May not have upgraded to pw encryption: warn
if len(registered_user.password) < 80:
msg = 'Please upgrade GHC to encrypted passwords first, see docs!'
flash(gettext(msg), 'danger')
return redirect(url_for('login', lang=g.current_lang))
try:
authenticated = registered_user.authenticate(password)
finally:
pass
if not authenticated:
flash(gettext('Invalid username and / or password'), 'danger')
return redirect(url_for('login', lang=g.current_lang))
# Login ok
login_user(registered_user)
if 'next' in request.args:
return redirect(request.args.get('next'))
return redirect(url_for('home', lang=g.current_lang))
@APP.route('/logout')
def logout():
"""logout"""
logout_user()
flash(gettext('Logged out'), 'success')
if request.referrer:
return redirect(request.referrer)
else:
return redirect(url_for('home', lang=g.current_lang))
@APP.route('/reset_req', methods=['GET', 'POST'])
@public_route
def reset_req():
"""
Reset password request handling.
"""
if request.method == 'GET':
return render_template('reset_password_request.html')
# Reset request form with email
email = request.form['email']
registered_user = User.query.filter_by(email=email).first()
if registered_user is None:
LOGGER.warn('Invalid email for reset_req: %s' % email)
flash(gettext('Invalid email'), 'danger')
return redirect(url_for('reset_req', lang=g.current_lang))
# Generate reset url using user-specific token
token = registered_user.get_token()
reset_url = '%s/reset/%s' % (CONFIG['GHC_SITE_URL'], token)
# Create message body with reset link
msg_body = render_template('reset_password_email.txt',
lang=g.current_lang, config=CONFIG,
reset_url=reset_url,
username=registered_user.username)
try:
from email.mime.text import MIMEText
from email.utils import formataddr
msg = MIMEText(msg_body, 'plain', 'utf-8')
msg['From'] = formataddr((CONFIG['GHC_SITE_TITLE'],
CONFIG['GHC_ADMIN_EMAIL']))
msg['To'] = registered_user.email
msg['Subject'] = '[%s] %s' % (CONFIG['GHC_SITE_TITLE'],
gettext('reset password'))
from_addr = '%s <%s>' % (CONFIG['GHC_SITE_TITLE'],
CONFIG['GHC_ADMIN_EMAIL'])
to_addr = registered_user.email
msg_text = msg.as_string()
send_email(CONFIG['GHC_SMTP'], from_addr, to_addr, msg_text)
except Exception as err:
msg = 'Cannot send email. Contact admin: '
LOGGER.warn(msg + ' err=' + str(err))
flash(gettext(msg) + CONFIG['GHC_ADMIN_EMAIL'], 'danger')
return redirect(url_for('login', lang=g.current_lang))
flash(gettext('Password reset link sent via email'), 'success')
if 'next' in request.args:
return redirect(request.args.get('next'))
return redirect(url_for('home', lang=g.current_lang))
@APP.route('/reset/<token>', methods=['GET', 'POST'])
@public_route
def reset(token=None):
"""
Reset password submit form handling.
"""
# Must have at least a token to proceed.
if token is None:
return redirect(url_for('reset_req', lang=g.current_lang))
# Token received: verify if ok, may also time-out.
registered_user = User.verify_token(token)
if registered_user is None:
LOGGER.warn('Cannot find User from token: %s' % token)
flash(gettext('Invalid token'), 'danger')
return redirect(url_for('login', lang=g.current_lang))
# Token and user ok: return reset form.
if request.method == 'GET':
return render_template('reset_password_form.html')
# Valid token and user: change password from form-value
password = request.form['password']
if not password:
flash(gettext('Password required'), 'danger')
return redirect(url_for('reset/%s' % token, lang=g.current_lang))
registered_user.set_password(password)
DB.session.add(registered_user)
try:
DB.session.commit()
flash(gettext('Update password OK'), 'success')
except Exception as err:
msg = 'Update password failed!'
LOGGER.warn(msg + ' err=' + str(err))
DB.session.rollback()
flash(gettext(msg), 'danger')
# Finally redirect user to login page
return redirect(url_for('login', lang=g.current_lang))
#
# REST Interface Calls
#
@APP.route('/api/v1.0/summary')
@APP.route('/api/v1.0/summary/')
@APP.route('/api/v1.0/summary.<content_type>')
def api_summary(content_type='json'):
"""
Get health summary for all Resources within this instance.
"""
health_summary = views.get_health_summary()
# Convert Runs to dict-like structure
for run in ['first_run', 'last_run']:
run_obj = health_summary.get(run, None)
if run_obj:
health_summary[run] = run_obj.for_json()
# Convert Resources failing to dict-like structure
failed_resources = []
for resource in health_summary['failed_resources']:
failed_resources.append(resource.for_json())
health_summary['failed_resources'] = failed_resources
if content_type == 'json':
result = jsonify(health_summary)
else:
result = '<pre>\n%s\n</pre>' % \
render_template('status_report_email.txt',
lang=g.current_lang, summary=health_summary)
return result
@APP.route('/api/v1.0/probes-avail/')
@APP.route('/api/v1.0/probes-avail/<resource_type>')
@APP.route('/api/v1.0/probes-avail/<resource_type>/<int:resource_id>')
def api_probes_avail(resource_type=None, resource_id=None):
"""
Get available (configured) Probes for this
installation, optional for resource type
"""
resource = None
if resource_id:
resource = views.get_resource_by_id(resource_id)
probes = views.get_probes_avail(resource_type=resource_type,
resource=resource)
return jsonify(probes)
@APP.route('/api/v1.0/runs/<int:resource_id>')
@APP.route('/api/v1.0/runs/<int:resource_id>.<content_type>')
@APP.route('/api/v1.0/runs/<int:resource_id>/<int:run_id>')
@APP.route('/api/v1.0/runs/<int:resource_id>/<int:run_id>.<content_type>')
def api_runs(resource_id, run_id=None, content_type='json'):
"""
Get Runs (History of results) for Resource.
"""
if run_id:
runs = [views.get_run_by_id(run_id)]
else:
runs = views.get_run_by_resource_id(resource_id)
run_arr = []
for run in runs:
run_dict = {
'id': run.identifier,
'success': run.success,
'response_time': run.response_time,
'checked_datetime': run.checked_datetime,
'message': run.message,
'report': run.report
}
run_arr.append(run_dict)
runs_dict = {'total': len(run_arr), 'runs': run_arr}
result = 'unknown'
if content_type == 'json':
result = jsonify(runs_dict)
elif content_type == 'html':
result = render_template('includes/runs.html',
lang=g.current_lang, runs=runs_dict['runs'])
return result
if __name__ == '__main__': # run locally, for fun
import sys
HOST = '0.0.0.0'
PORT = 8000
if len(sys.argv) > 1:
HOST, PORT = sys.argv[1].split(':')
APP.run(host=HOST, port=int(PORT), use_reloader=True, debug=True)
|
# coding=utf-8
from __future__ import absolute_import, unicode_literals, division
from moarcofy.models import Photo, Album, User
from moarcofy.forms import PhotoForm, UploadPhotoForm
from moarcofy import app, config
from urllib import unquote_plus
from flask import session, request, url_for, abort, redirect, flash
"""Private routes"""
@app.route("/my/photos/")
@app.template("photo/home.html")
@app.logged_in
def photo_list():
return {"photos": Photo.objects(owner=session["user"]),
"user": session["user"]}
@app.route("/my/photos/new/", methods=["GET", "POST"])
@app.route("/my/photos/add/", methods=["GET", "POST"])
@app.template("photo/form.html")
@app.logged_in
def add_photo():
albums = Album.objects(owner=session["user"])
form = UploadPhotoForm(request.form)
form.albums.choices = [(str(album.id), album.name) for album in albums]
if request.method == "POST" and form.validate():
photo = form.photo_object
photo.name = form.name.data
photo.description = form.description.data
photo.owner = session["user"]
photo.albums = form.albums.data
photo.save()
flash("Your photo has been uploaded!", "success")
return redirect(url_for("edit_photo", id=str(photo.id)))
return {"form": form,
"albums": Album.objects(owner=session["user"])}
@app.route("/my/photos/<id>/", methods=["GET", "POST"])
@app.template("photo/form.html")
@app.logged_in
def edit_photo(id):
try:
photo = Photo.objects.get(id=unquote_plus(id))
if photo.owner != session["user"]:
raise Photo.DoesNotExist
except Photo.DoesNotExist:
return abort(404)
albums = Album.objects(owner=session["user"])
form = PhotoForm(request.form, photo)
form.albums.choices = [(str(album.id), album.name) for album in albums]
if request.method == "POST" and form.validate():
photo.name = form.name.data
photo.description = form.description.data
photo.albums = form.albums.data
photo.save()
flash("Your photo has been modified!", "success")
return redirect(url_for("edit_photo", id=str(photo.id)))
return {"form": form,
"photo": photo,
"albums": albums}
@app.route("/my/photos/<id>/delete/")
@app.logged_in
def delete_photo(id):
try:
photo = Photo.objects.get(id=unquote_plus(id))
if photo.owner != session["user"]:
raise Photo.DoesNotExist
except Photo.DoesNotExist:
return abort(404)
photo.delete()
flash("Your photo has been deleted!", "warning")
return redirect(url_for("photo_list"))
"""Public routes"""
@app.route("/photos/<id>/")
@app.template("photo/list.html")
def public_photos(id):
try:
user = User.objects.get(id=unquote_plus(id))
except User.DoesNotExist:
return abort(404)
return {"user": user,
"photos": Photo.objects(owner=user)}
@app.route("/photo/<id>/")
@app.template("photo/view.html")
def public_photo(id):
try:
photo = Photo.objects.get(id=unquote_plus(id))
if photo.owner is None:
raise User.DoesNotExist
except (Photo.DoesNotExist, User.DoesNotExist):
return abort(404)
return {"user": photo.owner,
"albums": Album.objects(__raw__={"photos": str(photo.id)}),
"photo": photo}
@app.route("/photo/<id>/download/")
def download_photo(id):
try:
photo = Photo.objects.get(id=unquote_plus(id))
except Photo.DoesNotExist:
return abort(404)
response = app.test_client().get("/static/{0}/photo/orig/{1}".format(
config["version"]["media"], id), headers=list(request.headers))
response.headers["content-type"] = photo.mime
response.headers["content-disposition"] = \
"attachment; filename={0}".format(photo.filename)
return response
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
from heat.engine.clients.os.keystone import fake_keystoneclient as fake_ks
from heat.engine import properties
from heat.engine import resource
from heat.engine.resources.openstack.keystone import service
from heat.engine import stack
from heat.engine import template
from heat.tests import common
from heat.tests import utils
keystone_service_template = {
'heat_template_version': '2015-04-30',
'resources': {
'test_service': {
'type': 'OS::Keystone::Service',
'properties': {
'name': 'test_service_1',
'description': 'Test service',
'type': 'orchestration',
'enabled': False
}
}
}
}
class KeystoneServiceTest(common.HeatTestCase):
def setUp(self):
super(KeystoneServiceTest, self).setUp()
self.ctx = utils.dummy_context()
# Mock client
self.keystoneclient = mock.Mock()
self.patchobject(resource.Resource, 'client',
return_value=fake_ks.FakeKeystoneClient(
client=self.keystoneclient))
self.services = self.keystoneclient.services
# Mock client plugin
self.keystone_client_plugin = mock.MagicMock()
def _setup_service_resource(self, stack_name, use_default=False):
tmpl_data = copy.deepcopy(keystone_service_template)
if use_default:
props = tmpl_data['resources']['test_service']['properties']
del props['name']
del props['enabled']
del props['description']
test_stack = stack.Stack(
self.ctx, stack_name,
template.Template(tmpl_data)
)
r_service = test_stack['test_service']
r_service.client = mock.MagicMock()
r_service.client.return_value = self.keystoneclient
r_service.client_plugin = mock.MagicMock()
r_service.client_plugin.return_value = self.keystone_client_plugin
return r_service
def _get_mock_service(self):
value = mock.MagicMock()
value.id = '477e8273-60a7-4c41-b683-fdb0bc7cd152'
return value
def test_service_handle_create(self):
rsrc = self._setup_service_resource('test_service_create')
mock_service = self._get_mock_service()
self.services.create.return_value = mock_service
# validate the properties
self.assertEqual(
'test_service_1',
rsrc.properties.get(service.KeystoneService.NAME))
self.assertEqual(
'Test service',
rsrc.properties.get(
service.KeystoneService.DESCRIPTION))
self.assertEqual(
'orchestration',
rsrc.properties.get(service.KeystoneService.TYPE))
self.assertFalse(rsrc.properties.get(
service.KeystoneService.ENABLED))
rsrc.handle_create()
# validate service creation
self.services.create.assert_called_once_with(
name='test_service_1',
description='Test service',
type='orchestration',
enabled=False)
# validate physical resource id
self.assertEqual(mock_service.id, rsrc.resource_id)
def test_service_handle_create_default(self):
rsrc = self._setup_service_resource('test_create_with_defaults',
use_default=True)
mock_service = self._get_mock_service()
self.services.create.return_value = mock_service
rsrc.physical_resource_name = mock.MagicMock()
rsrc.physical_resource_name.return_value = 'foo'
# validate the properties
self.assertIsNone(
rsrc.properties.get(service.KeystoneService.NAME))
self.assertIsNone(rsrc.properties.get(
service.KeystoneService.DESCRIPTION))
self.assertEqual(
'orchestration',
rsrc.properties.get(service.KeystoneService.TYPE))
self.assertTrue(rsrc.properties.get(service.KeystoneService.ENABLED))
rsrc.handle_create()
# validate service creation with physical resource name
self.services.create.assert_called_once_with(
name='foo',
description=None,
type='orchestration',
enabled=True)
def test_service_handle_update(self):
rsrc = self._setup_service_resource('test_update')
rsrc.resource_id = '477e8273-60a7-4c41-b683-fdb0bc7cd151'
prop_diff = {service.KeystoneService.NAME: 'test_service_1_updated',
service.KeystoneService.DESCRIPTION:
'Test Service updated',
service.KeystoneService.TYPE: 'heat_updated',
service.KeystoneService.ENABLED: False}
rsrc.handle_update(json_snippet=None,
tmpl_diff=None,
prop_diff=prop_diff)
self.services.update.assert_called_once_with(
service=rsrc.resource_id,
name=prop_diff[service.KeystoneService.NAME],
description=prop_diff[service.KeystoneService.DESCRIPTION],
type=prop_diff[service.KeystoneService.TYPE],
enabled=prop_diff[service.KeystoneService.ENABLED]
)
def test_service_handle_update_default_name(self):
rsrc = self._setup_service_resource('test_update_default_name')
rsrc.resource_id = '477e8273-60a7-4c41-b683-fdb0bc7cd151'
rsrc.physical_resource_name = mock.MagicMock()
rsrc.physical_resource_name.return_value = 'foo'
# Name is reset to None, so default to physical resource name
prop_diff = {service.KeystoneService.NAME: None}
rsrc.handle_update(json_snippet=None,
tmpl_diff=None,
prop_diff=prop_diff)
# validate default name to physical resource name
self.services.update.assert_called_once_with(
service=rsrc.resource_id,
name='foo',
type=None,
description=None,
enabled=None
)
def test_service_handle_update_only_enabled(self):
rsrc = self._setup_service_resource('test_update_enabled_only')
rsrc.resource_id = '477e8273-60a7-4c41-b683-fdb0bc7cd151'
prop_diff = {service.KeystoneService.ENABLED: False}
rsrc.handle_update(json_snippet=None,
tmpl_diff=None,
prop_diff=prop_diff)
self.services.update.assert_called_once_with(
service=rsrc.resource_id,
name=None,
description=None,
type=None,
enabled=prop_diff[service.KeystoneService.ENABLED]
)
def test_properties_title(self):
property_title_map = {
service.KeystoneService.NAME: 'name',
service.KeystoneService.DESCRIPTION: 'description',
service.KeystoneService.TYPE: 'type',
service.KeystoneService.ENABLED: 'enabled'
}
for actual_title, expected_title in property_title_map.items():
self.assertEqual(
expected_title,
actual_title,
'KeystoneService PROPERTIES(%s) title modified.' %
actual_title)
def test_property_name_validate_schema(self):
schema = service.KeystoneService.properties_schema[
service.KeystoneService.NAME]
self.assertTrue(
schema.update_allowed,
'update_allowed for property %s is modified' %
service.KeystoneService.NAME)
self.assertEqual(properties.Schema.STRING,
schema.type,
'type for property %s is modified' %
service.KeystoneService.NAME)
self.assertEqual('Name of keystone service.',
schema.description,
'description for property %s is modified' %
service.KeystoneService.NAME)
def test_property_description_validate_schema(self):
schema = service.KeystoneService.properties_schema[
service.KeystoneService.DESCRIPTION]
self.assertTrue(
schema.update_allowed,
'update_allowed for property %s is modified' %
service.KeystoneService.DESCRIPTION)
self.assertEqual(properties.Schema.STRING,
schema.type,
'type for property %s is modified' %
service.KeystoneService.DESCRIPTION)
self.assertEqual('Description of keystone service.',
schema.description,
'description for property %s is modified' %
service.KeystoneService.DESCRIPTION)
def test_property_type_validate_schema(self):
schema = service.KeystoneService.properties_schema[
service.KeystoneService.TYPE]
self.assertTrue(
schema.update_allowed,
'update_allowed for property %s is modified' %
service.KeystoneService.TYPE)
self.assertTrue(
schema.required,
'required for property %s is modified' %
service.KeystoneService.TYPE)
self.assertEqual(properties.Schema.STRING,
schema.type,
'type for property %s is modified' %
service.KeystoneService.TYPE)
self.assertEqual('Type of keystone Service.',
schema.description,
'description for property %s is modified' %
service.KeystoneService.TYPE)
def test_show_resource(self):
rsrc = self._setup_service_resource('test_show_resource')
moc_service = mock.Mock()
moc_service.to_dict.return_value = {'attr': 'val'}
self.services.get.return_value = moc_service
attributes = rsrc._show_resource()
self.assertEqual({'attr': 'val'}, attributes)
|
#!/usr/bin/python
#-*- coding:utf-8 -*-
__author__ = 'david'
from __builtin__ import *
import gc
import numpy as np
from skimage.feature import greycomatrix, greycoprops
import matplotlib as mpl
mpl.use('TkAgg') # Solve runtime issue
import matplotlib.pyplot as plt
## Fake imge and label volumes to fast test functionality
def loadImg():
return np.random.random_sample((100,100,100))
def loadAtlas():
atlas_volume = np.zeros((100,100,100),dtype=np.uint32)
atlas_volume[10:50,10:50,10:50]=np.ones((40,40,40),dtype=np.uint32)*1
atlas_volume[50:90,10:50,10:50]=np.ones((40,40,40),dtype=np.uint32)*2
atlas_volume[10:50,50:90,10:50]=np.ones((40,40,40),dtype=np.uint32)*3
atlas_volume[50:90,50:90,10:50]=np.ones((40,40,40),dtype=np.uint32)*4
atlas_volume[10:50,10:50,50:90]=np.ones((40,40,40),dtype=np.uint32)*5
atlas_volume[50:90,10:50,50:90]=np.ones((40,40,40),dtype=np.uint32)*6
atlas_volume[10:50,50:90,50:90]=np.ones((40,40,40),dtype=np.uint32)*7
atlas_volume[50:90,50:90,50:90]=np.ones((40,40,40),dtype=np.uint32)*8
return atlas_volume
## END
## True data
# path = "~/Workspaces/claritycontrol/code/data/raw/"
# token = "Fear199"
# pathname = path+token+".img"
#
# img_volume = nib.load(pathname).get_data()[:,:,:,0]
## END
## get atlas values
atlas_volume = loadAtlas()
print atlas_volume.shape
atlas_values, atlas_count = np.unique(atlas_volume,return_counts=True)
atlas_values = atlas_values[1:] # remove background
## get img
img_volume = loadImg()
print img_volume.shape
class_id = 0 # Fear, Control, Cocaine
subject_id = 199
## normalize volume Z-standardization
img_volume = (img_volume-np.mean(img_volume))/np.std(img_volume)
## prepare results matrix
columns = ['class_id', 'subject_id', 'roi', 'mean', 'std', 'energy', 'entropy', 'correlation', 'contrast', 'variance', 'sumMean',
'inertial', 'clusterShade', 'clusterTendency', 'homogeneity', 'maxProbability', 'inverseVariance']
features = np.zeros((len(atlas_values), len(columns)), dtype=np.float32)
## compute GLCM and properties
for roi_id in range(len(atlas_values)):
features[roi_id, 0] = class_id
features[roi_id, 1] = subject_id
features[roi_id, 2] = atlas_values[roi_id]
## mask img and get roi block
mask_volume = (atlas_volume == atlas_values[roi_id])
xs, ys, zs = mask_volume.nonzero()
roi_block = np.multiply(img_volume, mask_volume)[min(xs):max(xs), min(ys):max(ys), min(zs):max(zs)]
del mask_volume # memory collect
## compute mean and std
features[roi_id, 3] = np.mean(roi_block[roi_block != 0])
features[roi_id, 4] = np.std(roi_block[roi_block != 0])
## compute GLCM and properties
# features[roi_id, 5] = 0
# features[roi_id, 6] = 0
|
# Copyright (c) 2012 Intel
# Copyright (c) 2012 OpenStack Foundation
# Copyright (c) 2015 EMC Corporation
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import math
from oslo_log import log as logging
from cinder.scheduler import filters
LOG = logging.getLogger(__name__)
class CapacityFilter(filters.BaseBackendFilter):
"""Capacity filters based on volume backend's capacity utilization."""
def backend_passes(self, backend_state, filter_properties):
"""Return True if host has sufficient capacity."""
volid = None
# If the volume already exists on this host, don't fail it for
# insufficient capacity (e.g., if we are retyping)
if backend_state.backend_id == filter_properties.get('vol_exists_on'):
return True
spec = filter_properties.get('request_spec')
if spec:
volid = spec.get('volume_id')
grouping = 'cluster' if backend_state.cluster_name else 'host'
if filter_properties.get('new_size'):
# If new_size is passed, we are allocating space to extend a volume
requested_size = (int(filter_properties.get('new_size')) -
int(filter_properties.get('size')))
LOG.debug('Checking if %(grouping)s %(grouping_name)s can extend '
'the volume %(id)s in %(size)s GB',
{'grouping': grouping,
'grouping_name': backend_state.backend_id, 'id': volid,
'size': requested_size})
else:
requested_size = filter_properties.get('size')
LOG.debug('Checking if %(grouping)s %(grouping_name)s can create '
'a %(size)s GB volume (%(id)s)',
{'grouping': grouping,
'grouping_name': backend_state.backend_id, 'id': volid,
'size': requested_size})
# requested_size is 0 means that it's a manage request.
if requested_size == 0:
return True
if backend_state.free_capacity_gb is None:
# Fail Safe
LOG.error("Free capacity not set: "
"volume node info collection broken.")
return False
free_space = backend_state.free_capacity_gb
total_space = backend_state.total_capacity_gb
reserved = float(backend_state.reserved_percentage) / 100
if free_space in ['infinite', 'unknown']:
# NOTE(zhiteng) for those back-ends cannot report actual
# available capacity, we assume it is able to serve the
# request. Even if it was not, the retry mechanism is
# able to handle the failure by rescheduling
return True
elif total_space in ['infinite', 'unknown']:
# If total_space is 'infinite' or 'unknown' and reserved
# is 0, we assume the back-ends can serve the request.
# If total_space is 'infinite' or 'unknown' and reserved
# is not 0, we cannot calculate the reserved space.
# float(total_space) will throw an exception. total*reserved
# also won't work. So the back-ends cannot serve the request.
if reserved == 0:
return True
LOG.debug("Cannot calculate GB of reserved space (%s%%) with "
"backend's reported total capacity '%s'",
backend_state.reserved_percentage, total_space)
return False
total = float(total_space)
if total <= 0:
LOG.warning("Insufficient free space for volume creation. "
"Total capacity is %(total).2f on %(grouping)s "
"%(grouping_name)s.",
{"total": total,
"grouping": grouping,
"grouping_name": backend_state.backend_id})
return False
# Calculate how much free space is left after taking into account
# the reserved space.
free = free_space - math.floor(total * reserved)
# NOTE(xyang): If 'provisioning:type' is 'thick' in extra_specs,
# we will not use max_over_subscription_ratio and
# provisioned_capacity_gb to determine whether a volume can be
# provisioned. Instead free capacity will be used to evaluate.
thin = True
vol_type = filter_properties.get('volume_type', {}) or {}
provision_type = vol_type.get('extra_specs', {}).get(
'provisioning:type')
if provision_type == 'thick':
thin = False
# Only evaluate using max_over_subscription_ratio if
# thin_provisioning_support is True. Check if the ratio of
# provisioned capacity over total capacity has exceeded over
# subscription ratio.
if (thin and backend_state.thin_provisioning_support and
backend_state.max_over_subscription_ratio >= 1):
provisioned_ratio = ((backend_state.provisioned_capacity_gb +
requested_size) / total)
if provisioned_ratio > backend_state.max_over_subscription_ratio:
msg_args = {
"provisioned_ratio": provisioned_ratio,
"oversub_ratio": backend_state.max_over_subscription_ratio,
"grouping": grouping,
"grouping_name": backend_state.backend_id,
}
LOG.warning(
"Insufficient free space for thin provisioning. "
"The ratio of provisioned capacity over total capacity "
"%(provisioned_ratio).2f has exceeded the maximum over "
"subscription ratio %(oversub_ratio).2f on %(grouping)s "
"%(grouping_name)s.", msg_args)
return False
else:
# Thin provisioning is enabled and projected over-subscription
# ratio does not exceed max_over_subscription_ratio. The host
# passes if "adjusted" free virtual capacity is enough to
# accommodate the volume. Adjusted free virtual capacity is
# the currently available free capacity (taking into account
# of reserved space) which we can over-subscribe.
adjusted_free_virtual = (
free * backend_state.max_over_subscription_ratio)
res = adjusted_free_virtual >= requested_size
if not res:
msg_args = {"available": adjusted_free_virtual,
"size": requested_size,
"grouping": grouping,
"grouping_name": backend_state.backend_id}
LOG.warning("Insufficient free virtual space "
"(%(available)sGB) to accommodate thin "
"provisioned %(size)sGB volume on %(grouping)s"
" %(grouping_name)s.", msg_args)
return res
elif thin and backend_state.thin_provisioning_support:
LOG.warning("Filtering out %(grouping)s %(grouping_name)s "
"with an invalid maximum over subscription ratio "
"of %(oversub_ratio).2f. The ratio should be a "
"minimum of 1.0.",
{"oversub_ratio":
backend_state.max_over_subscription_ratio,
"grouping": grouping,
"grouping_name": backend_state.backend_id})
return False
msg_args = {"grouping_name": backend_state.backend_id,
"grouping": grouping,
"requested": requested_size,
"available": free}
if free < requested_size:
LOG.warning("Insufficient free space for volume creation "
"on %(grouping)s %(grouping_name)s (requested / "
"avail): %(requested)s/%(available)s",
msg_args)
return False
LOG.debug("Space information for volume creation "
"on %(grouping)s %(grouping_name)s (requested / avail): "
"%(requested)s/%(available)s", msg_args)
return True
|
import os
import logging
from flask import Flask
from slack_sdk.web import WebClient
from slackeventsapi import SlackEventAdapter
from onboarding_tutorial import OnboardingTutorial
# Initialize a Flask app to host the events adapter
app = Flask(__name__)
slack_events_adapter = SlackEventAdapter(os.environ["SLACK_SIGNING_SECRET"], "/slack/events", app)
# Initialize a Web API client
slack_web_client = WebClient(token=os.environ['SLACK_BOT_TOKEN'])
# For simplicity we'll store our app data in-memory with the following data structure.
# onboarding_tutorials_sent = {"channel": {"user_id": OnboardingTutorial}}
onboarding_tutorials_sent = {}
def start_onboarding(user_id: str, channel: str):
# Create a new onboarding tutorial.
onboarding_tutorial = OnboardingTutorial(channel)
# Get the onboarding message payload
message = onboarding_tutorial.get_message_payload()
# Post the onboarding message in Slack
response = slack_web_client.chat_postMessage(**message)
# Capture the timestamp of the message we've just posted so
# we can use it to update the message after a user
# has completed an onboarding task.
onboarding_tutorial.timestamp = response["ts"]
# Store the message sent in onboarding_tutorials_sent
if channel not in onboarding_tutorials_sent:
onboarding_tutorials_sent[channel] = {}
onboarding_tutorials_sent[channel][user_id] = onboarding_tutorial
# ================ Team Join Event =============== #
# When the user first joins a team, the type of the event will be 'team_join'.
# Here we'll link the onboarding_message callback to the 'team_join' event.
@slack_events_adapter.on("team_join")
def onboarding_message(payload):
"""Create and send an onboarding welcome message to new users. Save the
time stamp of this message so we can update this message in the future.
"""
event = payload.get("event", {})
# Get the id of the Slack user associated with the incoming event
user_id = event.get("user", {}).get("id")
# Open a DM with the new user.
response = slack_web_client.im_open(user=user_id)
channel = response["channel"]["id"]
# Post the onboarding message.
start_onboarding(user_id, channel)
# ============= Reaction Added Events ============= #
# When a users adds an emoji reaction to the onboarding message,
# the type of the event will be 'reaction_added'.
# Here we'll link the update_emoji callback to the 'reaction_added' event.
@slack_events_adapter.on("reaction_added")
def update_emoji(payload):
"""Update the onboarding welcome message after receiving a "reaction_added"
event from Slack. Update timestamp for welcome message as well.
"""
event = payload.get("event", {})
channel_id = event.get("item", {}).get("channel")
user_id = event.get("user")
if channel_id not in onboarding_tutorials_sent:
return
# Get the original tutorial sent.
onboarding_tutorial = onboarding_tutorials_sent[channel_id][user_id]
# Mark the reaction task as completed.
onboarding_tutorial.reaction_task_completed = True
# Get the new message payload
message = onboarding_tutorial.get_message_payload()
# Post the updated message in Slack
updated_message = slack_web_client.chat_update(**message)
# Update the timestamp saved on the onboarding tutorial object
onboarding_tutorial.timestamp = updated_message["ts"]
# =============== Pin Added Events ================ #
# When a users pins a message the type of the event will be 'pin_added'.
# Here we'll link the update_pin callback to the 'pin_added' event.
@slack_events_adapter.on("pin_added")
def update_pin(payload):
"""Update the onboarding welcome message after receiving a "pin_added"
event from Slack. Update timestamp for welcome message as well.
"""
event = payload.get("event", {})
channel_id = event.get("channel_id")
user_id = event.get("user")
# Get the original tutorial sent.
onboarding_tutorial = onboarding_tutorials_sent[channel_id][user_id]
# Mark the pin task as completed.
onboarding_tutorial.pin_task_completed = True
# Get the new message payload
message = onboarding_tutorial.get_message_payload()
# Post the updated message in Slack
updated_message = slack_web_client.chat_update(**message)
# Update the timestamp saved on the onboarding tutorial object
onboarding_tutorial.timestamp = updated_message["ts"]
# ============== Message Events ============= #
# When a user sends a DM, the event type will be 'message'.
# Here we'll link the message callback to the 'message' event.
@slack_events_adapter.on("message")
def message(payload):
"""Display the onboarding welcome message after receiving a message
that contains "start".
"""
event = payload.get("event", {})
channel_id = event.get("channel")
user_id = event.get("user")
text = event.get("text")
if text and text.lower() == "start":
return start_onboarding(user_id, channel_id)
if __name__ == "__main__":
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
app.run(port=3000)
|
#! /usr/bin/env python
"""
Build a JSON file with geo-locations from plane crash places in the DB.
"""
import os
import time
import json
import random
import geopy
from geopy.geocoders import Nominatim
import pandas as pd
from planecrashinfo_light import clean_database
geolocator = Nominatim()
def location_to_latlon(place):
"""
Get lat/lon dict or None for some place.
"""
try:
# Can raise geopy.exc.GeocoderServiceError,
# so be gentle and give it some time to breathe.
location = geolocator.geocode(place)
time.sleep(random.random())
except: # geopy.exc.GeocoderTimedOut:
print('* ' + place, None)
return None
if location is None:
print('* ' + place, None)
return None
print(place, (location.latitude, location.longitude))
return {'lat': location.latitude, 'lon': location.longitude}
def get_geolocations(df, previous=None):
"""
Return a dict with lat/lon for origins and destinations of one df."
{
'Bergen': {'lat': 60.3943532, 'lon': 5.325551},
'Cairo': {'lat': 30.0488185, 'lon': 31.2436663},
'Moroni Hahaya': None,
...
}
"""
res = previous or {}
for name in ['Origin', 'Destination']:
s = df.groupby(name).size().sort_values(ascending=True)
for loc, count in s.items():
# ignore unspecific place
if loc in ('Sightseeing', 'Training', 'Test flight', 'Military exercises', 'aerial survelliance'):
print('* Ignoring: %s' % loc)
continue
# ignore known place
if res.get(loc, None) != None:
print('* Already found: %s' % loc)
else:
latlon = location_to_latlon(loc)
res[loc] = latlon
return res
def build():
"""
Build a file with gelocations for places in the database.
"""
geolocs = {}
geolocs_path = 'data/geolocs.json'
if os.path.exists(geolocs_path):
geolocs = json.load(open())
print('Starting with %d' % len(geolocs))
for y in range(1921, 2017):
path = 'data/%d_original.csv' % y
print('Loading %s' % path)
df = pd.read_csv(path)
df = clean_database(df)
geolocs.update(get_geolocations(df, geolocs))
json.dump(geolocs, open(path, 'w'), indent=4)
print('Saved %d to %s\n' % (len(geolocs), geolocs_path))
if __name__ == '__main__':
build()
|
"""
data processing and playing with pendexo results
Horizontal Bin: 100ppm
data should be a combination between NIRSpec and MIRI
detection could indicate if the data is from NIRSpec of MIRI.
3 sigma detection: signal1-signal2 > 3*(signal1_error+signal2_error)
"""
"""
['FinalSpectrum', 'OriginalInput', 'timing_div', 'warnings_div',
'PandeiaOutTrans', 'input_div', 'warning', 'RawData', 'timing', 'input']
FinalSpectrum
['spectrum_w_rand', 'spectrum', 'error_w_floor', 'wave']
OriginalInput
['model_spec', 'model_wave']
timing_div warnings_div PandeiaOutTrans
['sub_reports', 'information', 'warnings', 'transform', '2d', 'scalar', '1d', 'input']
input_div warning
['Num Groups Reset?', 'Group Number Too Low?', 'Group Number Too High?', 'Saturated?', 'Non linear?', '% full well high?']
RawData
['var_in', 'rn[out,in]', 'electrons_out', 'e_rate_in', 'wave', 'electrons_in', 'error_no_floor', 'bkg[out,in]', 'e_rate_out', 'var_out']
timing
['Seconds per Frame', 'Number of Transits', 'Time/Integration incl reset (sec)', 'Observing Efficiency (%)', 'Num Integrations Out of Transit', 'Num Integrations In Transit', 'APT: Num Integrations per Occultation', 'APT: Num Groups per Integration', 'Transit+Baseline, no overhead (hrs)', 'Transit Duration']
input
['Target Mag', 'Readmode', 'Instrument', 'Disperser', 'Filter', 'Calculation Type', 'Mode', 'Saturation Level (electons)', 'Aperture', 'Subarray', 'Primary/Secondary']
"""
import pickle
import pprint
import numpy as np
import matplotlib.pyplot as plt
import pandexo.engine.justdoit as jdi
import pandexo.engine.justplotit as jpi
def load_pandexo(instrument,filename):
if type(instrument) == str:
pkl_file = open(filename, 'rb')
data = pickle.load(pkl_file)
wave_ori = data["OriginalInput"]["model_wave"]
spec_ori = data["OriginalInput"]["model_spec"]#*10**6
wave = data["FinalSpectrum"]["wave"]
spec = data["FinalSpectrum"]["spectrum"]
error = data["FinalSpectrum"]["error_w_floor"]
return wave_ori,spec_ori,wave,spec,error
if len(instrument) == 1:
pkl_file = open(filename, 'rb')
data = pickle.load(pkl_file)
print data[0]['NIRSpec G140M']["FinalSpectrum"].keys()
return
wave_ori = data[0][instrument[0]]["OriginalInput"]["model_wave"]
spec_ori = data[0][instrument[0]]["OriginalInput"]["model_spec"]#*10**6
wave = data[0][instrument[0]]["FinalSpectrum"]["wave"]
spec = data[0][instrument[0]]["FinalSpectrum"]["spectrum"]
error = data[0][instrument[0]]["FinalSpectrum"]["error_w_floor"]
return wave_ori,spec_ori,wave,spec,error
else:
pkl_file1 = open(filename, 'rb')
data = pickle.load(pkl_file1)
wave_ori = []
spec_ori = []
wave = []
spec = []
error = []
for i,inst in enumerate(instrument):
wave_ori = np.concatenate([wave_ori,data[i][inst]["OriginalInput"]["model_wave"]])
spec_ori = np.concatenate([wave_ori,data[i][inst]["OriginalInput"]["model_spec"]])
wave = np.concatenate([wave_ori,data[i][inst]["FinalSpectrum"]["wave"]])
spec = np.concatenate([wave_ori,data[i][inst]["FinalSpectrum"]["spectrum"]])
error = np.concatenate([wave_ori,data[i][inst]["FinalSpectrum"]["error_w_floor"]])
return wave_ori,spec_ori,wave,spec,error
def play_with_pandexo_output():
file1 = "ETC.p"
file2 = "ETC-2.p"
wave_ori1,spec_ori1,wave1,spec1,error1 = load_pandexo(file1)
wave_ori2,spec_ori2,wave2,spec2,error2 = load_pandexo(file2)
plt.plot(wave_ori1,spec_ori1)
plt.plot(wave_ori2,spec_ori2)
#plt.errorbar(wave1,spec1,yerr=error1)
plt.show()
def display_pandexo_output():
file1 = "Test_multi_instrument.p"
file1 = "Test_NIRSpec_instrument2.p"
instrument = ""#['NIRSpec Prism']#,'MIRI LRS']
#instrument = ['NIRSpec G140M']#, 'NIRSpec G235M','NIRSpec G395M']
#run.run_pandexo(file1)
pkl_file = open(file1, 'rb')
out = pickle.load(pkl_file)
"""
num_tran = 100
R = 100
ntran_old = out['timing']['Number of Transits']
to = out['timing']["Num Integrations Out of Transit"]
ti = out['timing']["Num Integrations In Transit"]
#remove any nans
y = out['FinalSpectrum']['spectrum_w_rand']
x = out['FinalSpectrum']['wave'][~np.isnan(y)]
err = out['FinalSpectrum']['error_w_floor'][~np.isnan(y)]
y = y[~np.isnan(y)]
new_wave = jpi.bin_wave_to_R(x, R)
print out['RawData']['electrons_in']
out = jpi.uniform_tophat_sum(new_wave,x, out['RawData']['electrons_out']*num_tran/ntran_old)
inn = jpi.uniform_tophat_sum(new_wave,x, out['RawData']['electrons_in']*num_tran/ntran_old)
return
vout = jpi.uniform_tophat_sum(new_wave,x, out['RawData']['var_out']*num_tran/ntran_old)
vin = jpi.uniform_tophat_sum(new_wave,x, out['RawData']['var_in']*num_tran/ntran_old)
var_tot = (to/ti/out)**2.0 * vin + (inn*to/ti/out**2.0)**2.0 * vout
if dict['input']['Primary/Secondary']=='fp/f*':
fac = -1.0
else:
fac = 1.0
rand_noise = np.sqrt((var_tot))*(np.random.randn(len(new_wave)))
raw_spec = (out/to-inn/ti)/(out/to)
sim_spec = fac*(raw_spec + rand_noise )
x = new_wave
y = sim_spec
err = np.sqrt(var_tot)
"""
x,y, e = jpi.jwst_1d_spec(out, R=50, num_tran=100, model=True, x_range=[.8,11.28])
#wave_ori1,spec_ori1,wave1,spec1,error1 = load_pandexo(instrument, file1)
#plt.plot(wave_ori1,spec_ori1)
#plt.errorbar(wave1,spec1,yerr=error1)
plt.show()
def display_compare():
file1 = "ref_trans_sim.p"
file2 = "bio_trans_sim_c.p"
Bins = 100
Tran = 100
out1 = pickle.load(open(file1, 'rb'))
out2 = pickle.load(open(file2, 'rb'))
m1,n1,x1,y1,e1 = jpi.jwst_1d_spec(out1, R=Bins, num_tran=Tran, model=True, x_range=[1,13])
m2,n2,x2,y2,e2 = jpi.jwst_1d_spec(out2, R=Bins, num_tran=Tran, model=True, x_range=[1,13])
plt.plot(m1,n1, label="base_ref")
plt.errorbar(x1,y1,e1,fmt="o",markersize='5', label="base_obs")
plt.plot(m2,n2, label="bio_ref")
plt.errorbar(x2,y2,e2,fmt="*",markersize='5', label="bio_obs")
plt.legend()
plt.title("Simulated Exoplanet Atmosphere Observed Spectra using Pandexo ETC \nwith %s bins and %s transits"%(Bins,Tran))
plt.xlabel('Wavelength [microns]')
plt.ylabel('fp/f*')
plt.show()
"""
"""
#detection = y2-y1# +3*(e1+e2)
x1 = np.array(x1[0])
y1 = np.array(y1[0])
y2 = np.array(y2[0])
e1 = np.array(e1[0])
e2 = np.array(e2[0])
detection = y2-y1# +3*(e1+e2)
error = 3*(e1+e2)
"""
plt.errorbar(x1,detection,error)
plt.title("Simulated Exoplanet Atmosphere Detection with %s bins and %s transits"%(Bins,Tran))
plt.xlabel('Wavelength [microns]')
plt.ylabel('fp/f*')
plt.show()
"""
detected_x = []
detected_y1 = []
detected_y2 = []
for i,bin in enumerate(detection-error):
if bin > 0:
print x1[i],"detected"
detected_x.append(x1[i])
detected_y1.append(y1[i])
detected_y2.append(y2[i])
plt.title("Simulated Exoplanet Atmosphere Detection with %s bins and %s transits at 3 Sigma"%(Bins,Tran))
plt.xlabel('Wavelength [microns]')
plt.ylabel('fp/f*')
plt.plot(m1,n1)
plt.plot(m2,n2)
plt.plot(detected_x,detected_y1,".")
plt.plot(detected_x,detected_y2,".")
plt.show()
# next, find local maximum...
if __name__ == "__main__":
display_compare()
|
##
# Copyright 2009-2020 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for MTL4, implemented as an easyblock
@author: Kenneth Hoste (Ghent University)
"""
import os
from easybuild.easyblocks.generic.tarball import Tarball
class EB_MTL4(Tarball):
"""Support for installing MTL4."""
def sanity_check_step(self):
"""Custom sanity check for MTL4."""
incpref = os.path.join('include', 'boost', 'numeric')
custom_paths = {
'files': [],
'dirs': [os.path.join(incpref, x) for x in ["itl", "linear_algebra", "meta_math", "mtl"]],
}
super(EB_MTL4, self).sanity_check_step(custom_paths=custom_paths)
def make_module_req_guess(self):
"""Adjust CPATH for MTL4."""
guesses = super(EB_MTL4, self).make_module_req_guess()
guesses.update({'CPATH': 'include'})
return guesses
|
from .util import cli_run
def test_list(client, first_label):
output = cli_run(client, ['label', 'list'])
assert first_label.short_id in output
output = cli_run(client, ['label', 'list', first_label.short_id])
assert first_label.short_id in output
def test_sensors(client, tmp_label, tmp_sensor):
output = cli_run(client, ['label', 'sensor', tmp_label.id])
assert output is not None
output = cli_run(client, ['label', 'sensor', tmp_label.id,
'--add', tmp_sensor.short_id])
assert tmp_sensor.short_id in output
output = cli_run(client, ['label', 'sensor', tmp_label.id,
'--remove', tmp_sensor.short_id])
assert tmp_sensor.short_id not in output
output = cli_run(client, ['label', 'sensor', tmp_label.id,
'--replace', tmp_sensor.short_id])
assert tmp_sensor.short_id in output
output = cli_run(client, ['label', 'sensor', tmp_label.id,
'--replace', 'none'])
assert tmp_sensor.short_id not in output
def test_elements(client, tmp_label, first_element):
output = cli_run(client, ['label', 'element', tmp_label.id])
assert output is not None
output = cli_run(client, ['label', 'element', tmp_label.id,
'--add', first_element.short_id])
assert first_element.short_id in output
output = cli_run(client, ['label', 'element', tmp_label.id,
'--remove', first_element.short_id])
assert first_element.short_id not in output
output = cli_run(client, ['label', 'element', tmp_label.id,
'--replace', first_element.short_id])
assert first_element.short_id in output
output = cli_run(client, ['label', 'element', tmp_label.id,
'--replace', 'none'])
assert first_element.short_id not in output
def test_create_delete(client, first_sensor, first_element):
output = cli_run(client, ['label', 'create', 'test_label',
'--sensors', first_sensor.short_id,
'--elements', first_element.short_id])
assert 'test_label' in output
output = cli_run(client, ['label', 'update',
'test_label',
'--name', 'renamed_label'])
assert 'renamed_label' in output
output = cli_run(client, ['label', 'delete', 'renamed_label'])
assert output.startswith('Deleted')
def test_metadata(client, tmp_label):
output = cli_run(client, ['label', 'metadata', 'list',
tmp_label.short_id])
assert output is not None
output = cli_run(client, ['label', 'metadata', 'update',
tmp_label.short_id,
'{"test": 42}'])
assert "42" in output
output = cli_run(client, ['label', 'list',
'--metadata', '{"test": 42}'])
assert tmp_label.short_id in output
output = cli_run(client, ['label', 'metadata', 'replace',
tmp_label.short_id,
'{}'])
assert "42" not in output
|
from django import forms
from django.conf import settings
from django.http import HttpResponseRedirect
from django.views.decorators.cache import never_cache
from django.contrib.auth import authenticate, REDIRECT_FIELD_NAME
from django.contrib.formtools.wizard import FormWizard
from pyppp.django import login
from pyppp.django.models import UserPPP
class UserFormBase(forms.Form):
def __init__(self, *args, **kwargs):
self.user_cache = None
super(UserFormBase, self).__init__(*args, **kwargs)
def get_user_id(self):
if self.user_cache:
return self.user_cache.id
return None
def get_user(self):
return self.user_cache
class AuthenticationForm(UserFormBase):
username = forms.CharField(max_length=30)
password = forms.CharField(widget=forms.PasswordInput)
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
if username and password:
self.user_cache = authenticate(username=username, password=password)
if self.user_cache is None:
raise forms.ValidationError('Please enter a correct username and password. Note that both fields are case-sensitive.')
elif not self.user_cache.is_active:
raise forms.ValidationError('This account is inactive')
return self.cleaned_data
class PasscodeForm(UserFormBase):
username = forms.CharField(max_length=30)
passcode = forms.CharField(max_length=4)
card = forms.CharField(max_length=8)
code = forms.CharField(max_length=8)
def __init__(self, *args, **kwargs):
super(PasscodeForm, self).__init__(*args, **kwargs)
self.fields['username'].widget.attrs['readonly'] = True
self.fields['card'].widget.attrs['readonly'] = True
self.fields['code'].widget.attrs['readonly'] = True
def clean(self):
if self.user_cache is not None:
return self.cleaned_data
username = self.cleaned_data.get('username')
passcode = self.cleaned_data.get('passcode')
if username and passcode:
self.user_cache = authenticate(username=username, passcode=passcode)
if self.user_cache is None:
raise forms.ValidationError('Incorrect passcode.')
return self.cleaned_data
class LoginWizard(FormWizard):
def parse_params(self, request, *args, **kwargs):
current_step = self.determine_step(request, *args, **kwargs)
if request.method == 'POST' and current_step == 0:
request.session.set_test_cookie()
form = self.get_form(current_step, request.POST)
if form.is_valid():
ppp, created = UserPPP.objects.get_or_create(user=form.user_cache)
passcode_info = ppp.get_current_sequence_info()
self.initial[(current_step + 1)] = {
'username': form.cleaned_data.get('username'),
'card': passcode_info['card'],
'code': '%s%s' % (passcode_info['row'], passcode_info['column'])
}
def get_template(self, step):
return 'pyppp/form.html'
def done(self, request, form_list):
if not request.session.test_cookie_worked():
print "Your Web browser doesn't appear to have cookies enabled. Cookies are required for logging in."
redirect_to = request.REQUEST.get(REDIRECT_FIELD_NAME, '')
if not redirect_to or '//' in redirect_to or ' ' in redirect_to:
redirect_to = settings.LOGIN_REDIRECT_URL
login(request, form_list[1].get_user())
return HttpResponseRedirect(redirect_to)
|
# -*- coding: utf-8 -*-
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Static data and helper functions."""
from __future__ import absolute_import
import collections
import errno
import logging
import math
import multiprocessing
import os
import pkgutil
import re
import struct
import sys
import tempfile
import textwrap
import threading
import traceback
import xml.etree.ElementTree as ElementTree
import boto
from boto import config
import boto.auth
from boto.exception import NoAuthHandlerFound
from boto.gs.connection import GSConnection
from boto.provider import Provider
from boto.pyami.config import BotoConfigLocations
import httplib2
from oauth2client.client import HAS_CRYPTO
from retry_decorator import retry_decorator
import gslib
from gslib.exception import CommandException
from gslib.storage_url import StorageUrlFromString
from gslib.translation_helper import AclTranslation
from gslib.translation_helper import GenerationFromUrlAndString
from gslib.translation_helper import S3_ACL_MARKER_GUID
from gslib.translation_helper import S3_DELETE_MARKER_GUID
from gslib.translation_helper import S3_MARKER_GUIDS
# Detect platform types.
PLATFORM = str(sys.platform).lower()
IS_WINDOWS = 'win32' in PLATFORM
IS_CYGWIN = 'cygwin' in PLATFORM
IS_LINUX = 'linux' in PLATFORM
IS_OSX = 'darwin' in PLATFORM
# pylint: disable=g-import-not-at-top
if IS_WINDOWS:
from ctypes import c_int
from ctypes import c_uint64
from ctypes import c_char_p
from ctypes import c_wchar_p
from ctypes import windll
from ctypes import POINTER
from ctypes import WINFUNCTYPE
from ctypes import WinError
# pylint: disable=g-import-not-at-top
try:
# This module doesn't necessarily exist on Windows.
import resource
HAS_RESOURCE_MODULE = True
except ImportError, e:
HAS_RESOURCE_MODULE = False
ONE_KIB = 1024
ONE_MIB = 1024 * 1024
TWO_MIB = 2 * ONE_MIB
EIGHT_MIB = 8 * ONE_MIB
TEN_MIB = 10 * ONE_MIB
DEFAULT_FILE_BUFFER_SIZE = 8 * ONE_KIB
_DEFAULT_LINES = 25
# By default, the timeout for SSL read errors is infinite. This could
# cause gsutil to hang on network disconnect, so pick a more reasonable
# timeout.
SSL_TIMEOUT = 60
# Start with a progress callback every 64 KiB during uploads/downloads (JSON
# API). Callback implementation should back off until it hits the maximum size
# so that callbacks do not create huge amounts of log output.
START_CALLBACK_PER_BYTES = 1024*64
MAX_CALLBACK_PER_BYTES = 1024*1024*100
# Upload/download files in 8 KiB chunks over the HTTP connection.
TRANSFER_BUFFER_SIZE = 1024*8
# Default number of progress callbacks during transfer (XML API).
XML_PROGRESS_CALLBACKS = 10
# For files >= this size, output a message indicating that we're running an
# operation on the file (like hashing or gzipping) so it does not appear to the
# user that the command is hanging.
MIN_SIZE_COMPUTE_LOGGING = 100*1024*1024 # 100 MiB
NO_MAX = sys.maxint
UTF8 = 'utf-8'
VERSION_MATCHER = re.compile(r'^(?P<maj>\d+)(\.(?P<min>\d+)(?P<suffix>.*))?')
RELEASE_NOTES_URL = 'https://pub.storage.googleapis.com/gsutil_ReleaseNotes.txt'
# Binary exponentiation strings.
_EXP_STRINGS = [
(0, 'B', 'bit'),
(10, 'KiB', 'Kibit', 'K'),
(20, 'MiB', 'Mibit', 'M'),
(30, 'GiB', 'Gibit', 'G'),
(40, 'TiB', 'Tibit', 'T'),
(50, 'PiB', 'Pibit', 'P'),
(60, 'EiB', 'Eibit', 'E'),
]
global manager # pylint: disable=global-at-module-level
certs_file_lock = threading.Lock()
configured_certs_files = []
def _GenerateSuffixRegex():
"""Creates a suffix regex for human-readable byte counts."""
human_bytes_re = r'(?P<num>\d*\.\d+|\d+)\s*(?P<suffix>%s)?'
suffixes = []
suffix_to_si = {}
for i, si in enumerate(_EXP_STRINGS):
si_suffixes = [s.lower() for s in list(si)[1:]]
for suffix in si_suffixes:
suffix_to_si[suffix] = i
suffixes.extend(si_suffixes)
human_bytes_re %= '|'.join(suffixes)
matcher = re.compile(human_bytes_re)
return suffix_to_si, matcher
SUFFIX_TO_SI, MATCH_HUMAN_BYTES = _GenerateSuffixRegex()
SECONDS_PER_DAY = 3600 * 24
# On Unix-like systems, we will set the maximum number of open files to avoid
# hitting the limit imposed by the OS. This number was obtained experimentally.
MIN_ACCEPTABLE_OPEN_FILES_LIMIT = 1000
GSUTIL_PUB_TARBALL = 'gs://pub/gsutil.tar.gz'
Retry = retry_decorator.retry # pylint: disable=invalid-name
# Cache the values from this check such that they're available to all callers
# without needing to run all the checks again (some of these, such as calling
# multiprocessing.Manager(), are expensive operations).
cached_multiprocessing_is_available = None
cached_multiprocessing_is_available_stack_trace = None
cached_multiprocessing_is_available_message = None
# Enum class for specifying listing style.
class ListingStyle(object):
SHORT = 'SHORT'
LONG = 'LONG'
LONG_LONG = 'LONG_LONG'
def UsingCrcmodExtension(crcmod):
return (getattr(crcmod, 'crcmod', None) and
getattr(crcmod.crcmod, '_usingExtension', None))
def CheckFreeSpace(path):
"""Return path/drive free space (in bytes)."""
if IS_WINDOWS:
try:
# pylint: disable=invalid-name
get_disk_free_space_ex = WINFUNCTYPE(c_int, c_wchar_p,
POINTER(c_uint64),
POINTER(c_uint64),
POINTER(c_uint64))
get_disk_free_space_ex = get_disk_free_space_ex(
('GetDiskFreeSpaceExW', windll.kernel32), (
(1, 'lpszPathName'),
(2, 'lpFreeUserSpace'),
(2, 'lpTotalSpace'),
(2, 'lpFreeSpace'),))
except AttributeError:
get_disk_free_space_ex = WINFUNCTYPE(c_int, c_char_p,
POINTER(c_uint64),
POINTER(c_uint64),
POINTER(c_uint64))
get_disk_free_space_ex = get_disk_free_space_ex(
('GetDiskFreeSpaceExA', windll.kernel32), (
(1, 'lpszPathName'),
(2, 'lpFreeUserSpace'),
(2, 'lpTotalSpace'),
(2, 'lpFreeSpace'),))
def GetDiskFreeSpaceExErrCheck(result, unused_func, args):
if not result:
raise WinError()
return args[1].value
get_disk_free_space_ex.errcheck = GetDiskFreeSpaceExErrCheck
return get_disk_free_space_ex(os.getenv('SystemDrive'))
else:
(_, f_frsize, _, _, f_bavail, _, _, _, _, _) = os.statvfs(path)
return f_frsize * f_bavail
def CreateDirIfNeeded(dir_path, mode=0777):
"""Creates a directory, suppressing already-exists errors."""
if not os.path.exists(dir_path):
try:
# Unfortunately, even though we catch and ignore EEXIST, this call will
# output a (needless) error message (no way to avoid that in Python).
os.makedirs(dir_path, mode)
# Ignore 'already exists' in case user tried to start up several
# resumable uploads concurrently from a machine where no tracker dir had
# yet been created.
except OSError as e:
if e.errno != errno.EEXIST:
raise
def DivideAndCeil(dividend, divisor):
"""Returns ceil(dividend / divisor).
Takes care to avoid the pitfalls of floating point arithmetic that could
otherwise yield the wrong result for large numbers.
Args:
dividend: Dividend for the operation.
divisor: Divisor for the operation.
Returns:
Quotient.
"""
quotient = dividend // divisor
if (dividend % divisor) != 0:
quotient += 1
return quotient
def GetGsutilStateDir():
"""Returns the location of the directory for gsutil state files.
Certain operations, such as cross-process credential sharing and
resumable transfer tracking, need a known location for state files which
are created by gsutil as-needed.
This location should only be used for storing data that is required to be in
a static location.
Returns:
Path to directory for gsutil static state files.
"""
config_file_dir = config.get(
'GSUtil', 'state_dir',
os.path.expanduser(os.path.join('~', '.gsutil')))
CreateDirIfNeeded(config_file_dir)
return config_file_dir
def GetCredentialStoreFilename():
return os.path.join(GetGsutilStateDir(), 'credstore')
def GetGceCredentialCacheFilename():
return os.path.join(GetGsutilStateDir(), 'gcecredcache')
def GetTabCompletionLogFilename():
return os.path.join(GetGsutilStateDir(), 'tab-completion-logs')
def GetTabCompletionCacheFilename():
tab_completion_dir = os.path.join(GetGsutilStateDir(), 'tab-completion')
# Limit read permissions on the directory to owner for privacy.
CreateDirIfNeeded(tab_completion_dir, mode=0700)
return os.path.join(tab_completion_dir, 'cache')
def PrintTrackerDirDeprecationWarningIfNeeded():
# TODO: Remove this along with the tracker_dir config value 1 year after
# 4.6 release date. Use state_dir instead.
if config.has_option('GSUtil', 'resumable_tracker_dir'):
sys.stderr.write('Warning: you have set resumable_tracker_dir in your '
'.boto configuration file. This configuration option is '
'deprecated; please use the state_dir configuration '
'option instead.\n')
# Name of file where we keep the timestamp for the last time we checked whether
# a new version of gsutil is available.
PrintTrackerDirDeprecationWarningIfNeeded()
CreateDirIfNeeded(GetGsutilStateDir())
LAST_CHECKED_FOR_GSUTIL_UPDATE_TIMESTAMP_FILE = (
os.path.join(GetGsutilStateDir(), '.last_software_update_check'))
def HasConfiguredCredentials():
"""Determines if boto credential/config file exists."""
has_goog_creds = (config.has_option('Credentials', 'gs_access_key_id') and
config.has_option('Credentials', 'gs_secret_access_key'))
has_amzn_creds = (config.has_option('Credentials', 'aws_access_key_id') and
config.has_option('Credentials', 'aws_secret_access_key'))
has_oauth_creds = (
config.has_option('Credentials', 'gs_oauth2_refresh_token'))
has_service_account_creds = (
HAS_CRYPTO and
config.has_option('Credentials', 'gs_service_client_id') and
config.has_option('Credentials', 'gs_service_key_file'))
if (has_goog_creds or has_amzn_creds or has_oauth_creds or
has_service_account_creds):
return True
valid_auth_handler = None
try:
valid_auth_handler = boto.auth.get_auth_handler(
GSConnection.DefaultHost, config, Provider('google'),
requested_capability=['s3'])
# Exclude the no-op auth handler as indicating credentials are configured.
# Note we can't use isinstance() here because the no-op module may not be
# imported so we can't get a reference to the class type.
if getattr(getattr(valid_auth_handler, '__class__', None),
'__name__', None) == 'NoOpAuth':
valid_auth_handler = None
except NoAuthHandlerFound:
pass
return valid_auth_handler
def ConfigureNoOpAuthIfNeeded():
"""Sets up no-op auth handler if no boto credentials are configured."""
if not HasConfiguredCredentials():
if (config.has_option('Credentials', 'gs_service_client_id')
and not HAS_CRYPTO):
if os.environ.get('CLOUDSDK_WRAPPER') == '1':
raise CommandException('\n'.join(textwrap.wrap(
'Your gsutil is configured with an OAuth2 service account, but '
'you do not have PyOpenSSL or PyCrypto 2.6 or later installed. '
'Service account authentication requires one of these libraries; '
'please reactivate your service account via the gcloud auth '
'command and ensure any gcloud packages necessary for '
'service accounts are present.')))
else:
raise CommandException('\n'.join(textwrap.wrap(
'Your gsutil is configured with an OAuth2 service account, but '
'you do not have PyOpenSSL or PyCrypto 2.6 or later installed. '
'Service account authentication requires one of these libraries; '
'please install either of them to proceed, or configure a '
'different type of credentials with "gsutil config".')))
else:
# With no boto config file the user can still access publicly readable
# buckets and objects.
from gslib import no_op_auth_plugin # pylint: disable=unused-variable
def GetConfigFilePath():
config_path = 'no config found'
for path in BotoConfigLocations:
try:
with open(path, 'r'):
config_path = path
break
except IOError:
pass
return config_path
def GetBotoConfigFileList():
"""Returns list of boto config files that exist."""
config_paths = boto.pyami.config.BotoConfigLocations
if 'AWS_CREDENTIAL_FILE' in os.environ:
config_paths.append(os.environ['AWS_CREDENTIAL_FILE'])
config_files = {}
for config_path in config_paths:
if os.path.exists(config_path):
config_files[config_path] = 1
cf_list = []
for config_file in config_files:
cf_list.append(config_file)
return cf_list
def GetCertsFile():
"""Configures and returns the CA Certificates file.
If one is already configured, use it. Otherwise, amend the configuration
(in boto.config) to use the cert roots distributed with gsutil.
Returns:
string filename of the certs file to use.
"""
certs_file = boto.config.get('Boto', 'ca_certificates_file', None)
if not certs_file:
with certs_file_lock:
if configured_certs_files:
disk_certs_file = configured_certs_files[0]
else:
disk_certs_file = os.path.abspath(
os.path.join(gslib.GSLIB_DIR, 'data', 'cacerts.txt'))
if not os.path.exists(disk_certs_file):
# If the file is not present on disk, this means the gslib module
# doesn't actually exist on disk anywhere. This can happen if it's
# being imported from a zip file. Unfortunately, we have to copy the
# certs file to a local temp file on disk because the underlying SSL
# socket requires it to be a filesystem path.
certs_data = pkgutil.get_data('gslib', 'data/cacerts.txt')
if not certs_data:
raise CommandException('Certificates file not found. Please '
'reinstall gsutil from scratch')
fd, fname = tempfile.mkstemp(suffix='.txt', prefix='gsutil-cacerts')
f = os.fdopen(fd, 'w')
f.write(certs_data)
f.close()
configured_certs_files.append(fname)
disk_certs_file = fname
certs_file = disk_certs_file
return certs_file
def GetCleanupFiles():
"""Returns a list of temp files to delete (if possible) when program exits."""
cleanup_files = []
if configured_certs_files:
cleanup_files += configured_certs_files
return cleanup_files
def ProxyInfoFromEnvironmentVar(proxy_env_var):
"""Reads proxy info from the environment and converts to httplib2.ProxyInfo.
Args:
proxy_env_var: Environment variable string to read, such as http_proxy or
https_proxy.
Returns:
httplib2.ProxyInfo constructed from the environment string.
"""
proxy_url = os.environ.get(proxy_env_var)
if not proxy_url or not proxy_env_var.lower().startswith('http'):
return httplib2.ProxyInfo(httplib2.socks.PROXY_TYPE_HTTP, None, 0)
proxy_protocol = proxy_env_var.lower().split('_')[0]
if not proxy_url.lower().startswith('http'):
# proxy_info_from_url requires a protocol, which is always http or https.
proxy_url = proxy_protocol + '://' + proxy_url
return httplib2.proxy_info_from_url(proxy_url, method=proxy_protocol)
def GetNewHttp(http_class=httplib2.Http, **kwargs):
"""Creates and returns a new httplib2.Http instance.
Args:
http_class: Optional custom Http class to use.
**kwargs: Arguments to pass to http_class constructor.
Returns:
An initialized httplib2.Http instance.
"""
proxy_info = httplib2.ProxyInfo(
proxy_type=3,
proxy_host=boto.config.get('Boto', 'proxy', None),
proxy_port=boto.config.getint('Boto', 'proxy_port', 0),
proxy_user=boto.config.get('Boto', 'proxy_user', None),
proxy_pass=boto.config.get('Boto', 'proxy_pass', None),
proxy_rdns=boto.config.get('Boto', 'proxy_rdns', False))
if not (proxy_info.proxy_host and proxy_info.proxy_port):
# Fall back to using the environment variable.
for proxy_env_var in ['http_proxy', 'https_proxy', 'HTTPS_PROXY']:
if proxy_env_var in os.environ and os.environ[proxy_env_var]:
proxy_info = ProxyInfoFromEnvironmentVar(proxy_env_var)
# Assume proxy_rnds is True if a proxy environment variable exists.
proxy_info.proxy_rdns = boto.config.get('Boto', 'proxy_rdns', True)
break
# Some installers don't package a certs file with httplib2, so use the
# one included with gsutil.
kwargs['ca_certs'] = GetCertsFile()
# Use a non-infinite SSL timeout to avoid hangs during network flakiness.
kwargs['timeout'] = SSL_TIMEOUT
http = http_class(proxy_info=proxy_info, **kwargs)
http.disable_ssl_certificate_validation = (not config.getbool(
'Boto', 'https_validate_certificates'))
return http
# Retry for 10 minutes with exponential backoff, which corresponds to
# the maximum Downtime Period specified in the GCS SLA
# (https://cloud.google.com/storage/sla)
def GetNumRetries():
return config.getint('Boto', 'num_retries', 23)
def GetMaxRetryDelay():
return config.getint('Boto', 'max_retry_delay', 32)
# Resumable downloads and uploads make one HTTP call per chunk (and must be
# in multiples of 256KiB). Overridable for testing.
def GetJsonResumableChunkSize():
chunk_size = config.getint('GSUtil', 'json_resumable_chunk_size',
1024*1024*100L)
if chunk_size == 0:
chunk_size = 1024*256L
elif chunk_size % 1024*256L != 0:
chunk_size += (1024*256L - (chunk_size % (1024*256L)))
return chunk_size
def _RoundToNearestExponent(num):
i = 0
while i+1 < len(_EXP_STRINGS) and num >= (2 ** _EXP_STRINGS[i+1][0]):
i += 1
return i, round(float(num) / 2 ** _EXP_STRINGS[i][0], 2)
def MakeHumanReadable(num):
"""Generates human readable string for a number of bytes.
Args:
num: The number, in bytes.
Returns:
A string form of the number using size abbreviations (KiB, MiB, etc.).
"""
i, rounded_val = _RoundToNearestExponent(num)
return '%g %s' % (rounded_val, _EXP_STRINGS[i][1])
def MakeBitsHumanReadable(num):
"""Generates human readable string for a number of bits.
Args:
num: The number, in bits.
Returns:
A string form of the number using bit size abbreviations (kbit, Mbit, etc.)
"""
i, rounded_val = _RoundToNearestExponent(num)
return '%g %s' % (rounded_val, _EXP_STRINGS[i][2])
def HumanReadableToBytes(human_string):
"""Tries to convert a human-readable string to a number of bytes.
Args:
human_string: A string supplied by user, e.g. '1M', '3 GiB'.
Returns:
An integer containing the number of bytes.
Raises:
ValueError: on an invalid string.
"""
human_string = human_string.lower()
m = MATCH_HUMAN_BYTES.match(human_string)
if m:
num = float(m.group('num'))
if m.group('suffix'):
power = _EXP_STRINGS[SUFFIX_TO_SI[m.group('suffix')]][0]
num *= (2.0 ** power)
num = int(round(num))
return num
raise ValueError('Invalid byte string specified: %s' % human_string)
def Percentile(values, percent, key=lambda x: x):
"""Find the percentile of a list of values.
Taken from: http://code.activestate.com/recipes/511478/
Args:
values: a list of numeric values. Note that the values MUST BE already
sorted.
percent: a float value from 0.0 to 1.0.
key: optional key function to compute value from each element of the list
of values.
Returns:
The percentile of the values.
"""
if not values:
return None
k = (len(values) - 1) * percent
f = math.floor(k)
c = math.ceil(k)
if f == c:
return key(values[int(k)])
d0 = key(values[int(f)]) * (c-k)
d1 = key(values[int(c)]) * (k-f)
return d0 + d1
def RemoveCRLFFromString(input_str):
"""Returns the input string with all \\n and \\r removed."""
return re.sub(r'[\r\n]', '', input_str)
def UnaryDictToXml(message):
"""Generates XML representation of a nested dict.
This dict contains exactly one top-level entry and an arbitrary number of
2nd-level entries, e.g. capturing a WebsiteConfiguration message.
Args:
message: The dict encoding the message.
Returns:
XML string representation of the input dict.
Raises:
Exception: if dict contains more than one top-level entry.
"""
if len(message) != 1:
raise Exception('Expected dict of size 1, got size %d' % len(message))
name, content = message.items()[0]
element_type = ElementTree.Element(name)
for element_property, value in sorted(content.items()):
node = ElementTree.SubElement(element_type, element_property)
node.text = value
return ElementTree.tostring(element_type)
def LookUpGsutilVersion(gsutil_api, url_str):
"""Looks up the gsutil version of the specified gsutil tarball URL.
Version is specified in the metadata field set on that object.
Args:
gsutil_api: gsutil Cloud API to use when retrieving gsutil tarball.
url_str: tarball URL to retrieve (such as 'gs://pub/gsutil.tar.gz').
Returns:
Version string if URL is a cloud URL containing x-goog-meta-gsutil-version
metadata, else None.
"""
url = StorageUrlFromString(url_str)
if url.IsCloudUrl():
obj = gsutil_api.GetObjectMetadata(url.bucket_name, url.object_name,
provider=url.scheme,
fields=['metadata'])
if obj.metadata and obj.metadata.additionalProperties:
for prop in obj.metadata.additionalProperties:
if prop.key == 'gsutil_version':
return prop.value
def GetGsutilVersionModifiedTime():
"""Returns unix timestamp of when the VERSION file was last modified."""
if not gslib.VERSION_FILE:
return 0
return int(os.path.getmtime(gslib.VERSION_FILE))
def IsRunningInteractively():
"""Returns True if currently running interactively on a TTY."""
return sys.stdout.isatty() and sys.stderr.isatty() and sys.stdin.isatty()
def _HttpsValidateCertifcatesEnabled():
return config.get('Boto', 'https_validate_certificates', True)
CERTIFICATE_VALIDATION_ENABLED = _HttpsValidateCertifcatesEnabled()
def _BotoIsSecure():
return config.get('Boto', 'is_secure', True)
BOTO_IS_SECURE = _BotoIsSecure()
def ResumableThreshold():
return config.getint('GSUtil', 'resumable_threshold', EIGHT_MIB)
def AddAcceptEncoding(headers):
"""Adds accept-encoding:gzip to the dictionary of headers."""
# If Accept-Encoding is not already set, set it to enable gzip.
if 'accept-encoding' not in headers:
headers['accept-encoding'] = 'gzip'
# pylint: disable=too-many-statements
def PrintFullInfoAboutObject(bucket_listing_ref, incl_acl=True):
"""Print full info for given object (like what displays for gsutil ls -L).
Args:
bucket_listing_ref: BucketListingRef being listed.
Must have ref_type OBJECT and a populated root_object
with the desired fields.
incl_acl: True if ACL info should be output.
Returns:
Tuple (number of objects, object_length)
Raises:
Exception: if calling bug encountered.
"""
url_str = bucket_listing_ref.url_string
storage_url = StorageUrlFromString(url_str)
obj = bucket_listing_ref.root_object
if (obj.metadata and S3_DELETE_MARKER_GUID in
obj.metadata.additionalProperties):
num_bytes = 0
num_objs = 0
url_str += '<DeleteMarker>'
else:
num_bytes = obj.size
num_objs = 1
print '%s:' % url_str.encode(UTF8)
if obj.updated:
print '\tCreation time:\t\t%s' % obj.updated.strftime(
'%a, %d %b %Y %H:%M:%S GMT')
if obj.cacheControl:
print '\tCache-Control:\t\t%s' % obj.cacheControl
if obj.contentDisposition:
print '\tContent-Disposition:\t\t%s' % obj.contentDisposition
if obj.contentEncoding:
print '\tContent-Encoding:\t\t%s' % obj.contentEncoding
if obj.contentLanguage:
print '\tContent-Language:\t%s' % obj.contentLanguage
print '\tContent-Length:\t\t%s' % obj.size
print '\tContent-Type:\t\t%s' % obj.contentType
if obj.componentCount:
print '\tComponent-Count:\t%d' % obj.componentCount
marker_props = {}
if obj.metadata and obj.metadata.additionalProperties:
non_marker_props = []
for add_prop in obj.metadata.additionalProperties:
if add_prop.key not in S3_MARKER_GUIDS:
non_marker_props.append(add_prop)
else:
marker_props[add_prop.key] = add_prop.value
if non_marker_props:
print '\tMetadata:'
for ap in non_marker_props:
meta_string = '\t\t%s:\t\t%s' % (ap.key, ap.value)
print meta_string.encode(UTF8)
if obj.crc32c: print '\tHash (crc32c):\t\t%s' % obj.crc32c
if obj.md5Hash: print '\tHash (md5):\t\t%s' % obj.md5Hash
print '\tETag:\t\t\t%s' % obj.etag.strip('"\'')
if obj.generation:
generation_str = GenerationFromUrlAndString(storage_url, obj.generation)
print '\tGeneration:\t\t%s' % generation_str
if obj.metageneration:
print '\tMetageneration:\t\t%s' % obj.metageneration
if incl_acl:
# JSON API won't return acls as part of the response unless we have
# full control scope
if obj.acl:
print '\tACL:\t\t%s' % AclTranslation.JsonFromMessage(obj.acl)
elif S3_ACL_MARKER_GUID in marker_props:
print '\tACL:\t\t%s' % marker_props[S3_ACL_MARKER_GUID]
else:
print ('\tACL:\t\t\tACCESS DENIED. Note: you need OWNER '
'permission\n\t\t\t\ton the object to read its ACL.')
return (num_objs, num_bytes)
def CompareVersions(first, second):
"""Compares the first and second gsutil version strings.
For example, 3.33 > 3.7, and 4.1 is a greater major version than 3.33.
Does not handle multiple periods (e.g. 3.3.4) or complicated suffixes
(e.g., 3.3RC4 vs. 3.3RC5). A version string with a suffix is treated as
less than its non-suffix counterpart (e.g. 3.32 > 3.32pre).
Args:
first: First gsutil version string.
second: Second gsutil version string.
Returns:
(g, m):
g is True if first known to be greater than second, else False.
m is True if first known to be greater by at least 1 major version,
else False.
"""
m1 = VERSION_MATCHER.match(str(first))
m2 = VERSION_MATCHER.match(str(second))
# If passed strings we don't know how to handle, be conservative.
if not m1 or not m2:
return (False, False)
major_ver1 = int(m1.group('maj'))
minor_ver1 = int(m1.group('min')) if m1.group('min') else 0
suffix_ver1 = m1.group('suffix')
major_ver2 = int(m2.group('maj'))
minor_ver2 = int(m2.group('min')) if m2.group('min') else 0
suffix_ver2 = m2.group('suffix')
if major_ver1 > major_ver2:
return (True, True)
elif major_ver1 == major_ver2:
if minor_ver1 > minor_ver2:
return (True, False)
elif minor_ver1 == minor_ver2:
return (bool(suffix_ver2) and not suffix_ver1, False)
return (False, False)
def _IncreaseSoftLimitForResource(resource_name, fallback_value):
"""Sets a new soft limit for the maximum number of open files.
The soft limit is used for this process (and its children), but the
hard limit is set by the system and cannot be exceeded.
We will first try to set the soft limit to the hard limit's value; if that
fails, we will try to set the soft limit to the fallback_value iff this would
increase the soft limit.
Args:
resource_name: Name of the resource to increase the soft limit for.
fallback_value: Fallback value to be used if we couldn't set the
soft value to the hard value (e.g., if the hard value
is "unlimited").
Returns:
Current soft limit for the resource (after any changes we were able to
make), or -1 if the resource doesn't exist.
"""
# Get the value of the resource.
try:
(soft_limit, hard_limit) = resource.getrlimit(resource_name)
except (resource.error, ValueError):
# The resource wasn't present, so we can't do anything here.
return -1
# Try to set the value of the soft limit to the value of the hard limit.
if hard_limit > soft_limit: # Some OS's report 0 for "unlimited".
try:
resource.setrlimit(resource_name, (hard_limit, hard_limit))
return hard_limit
except (resource.error, ValueError):
# We'll ignore this and try the fallback value.
pass
# Try to set the value of the soft limit to the fallback value.
if soft_limit < fallback_value:
try:
resource.setrlimit(resource_name, (fallback_value, hard_limit))
return fallback_value
except (resource.error, ValueError):
# We couldn't change the soft limit, so just report the current
# value of the soft limit.
return soft_limit
else:
return soft_limit
def GetCloudApiInstance(cls, thread_state=None):
"""Gets a gsutil Cloud API instance.
Since Cloud API implementations are not guaranteed to be thread-safe, each
thread needs its own instance. These instances are passed to each thread
via the thread pool logic in command.
Args:
cls: Command class to be used for single-threaded case.
thread_state: Per thread state from this thread containing a gsutil
Cloud API instance.
Returns:
gsutil Cloud API instance.
"""
return thread_state or cls.gsutil_api
def GetFileSize(fp, position_to_eof=False):
"""Returns size of file, optionally leaving fp positioned at EOF."""
if not position_to_eof:
cur_pos = fp.tell()
fp.seek(0, os.SEEK_END)
cur_file_size = fp.tell()
if not position_to_eof:
fp.seek(cur_pos)
return cur_file_size
def GetStreamFromFileUrl(storage_url):
if storage_url.IsStream():
return sys.stdin
else:
return open(storage_url.object_name, 'rb')
def UrlsAreForSingleProvider(url_args):
"""Tests whether the URLs are all for a single provider.
Args:
url_args: Strings to check.
Returns:
True if URLs are for single provider, False otherwise.
"""
provider = None
url = None
for url_str in url_args:
url = StorageUrlFromString(url_str)
if not provider:
provider = url.scheme
elif url.scheme != provider:
return False
return provider is not None
def HaveFileUrls(args_to_check):
"""Checks whether args_to_check contain any file URLs.
Args:
args_to_check: Command-line argument subset to check.
Returns:
True if args_to_check contains any file URLs.
"""
for url_str in args_to_check:
storage_url = StorageUrlFromString(url_str)
if storage_url.IsFileUrl():
return True
return False
def HaveProviderUrls(args_to_check):
"""Checks whether args_to_check contains any provider URLs (like 'gs://').
Args:
args_to_check: Command-line argument subset to check.
Returns:
True if args_to_check contains any provider URLs.
"""
for url_str in args_to_check:
storage_url = StorageUrlFromString(url_str)
if storage_url.IsCloudUrl() and storage_url.IsProvider():
return True
return False
# This must be defined at the module level for pickling across processes.
MultiprocessingIsAvailableResult = collections.namedtuple(
'MultiprocessingIsAvailableResult', ['is_available', 'stack_trace'])
def CheckMultiprocessingAvailableAndInit(logger=None):
"""Checks if multiprocessing is available.
There are some environments in which there is no way to use multiprocessing
logic that's built into Python (e.g., if /dev/shm is not available, then
we can't create semaphores). This simply tries out a few things that will be
needed to make sure the environment can support the pieces of the
multiprocessing module that we need.
If multiprocessing is available, this performs necessary initialization for
multiprocessing. See gslib.command.InitializeMultiprocessingVariables for
an explanation of why this is necessary.
Args:
logger: logging.logger to use for debug output.
Returns:
(multiprocessing_is_available, stack_trace):
multiprocessing_is_available: True iff the multiprocessing module is
available for use.
stack_trace: The stack trace generated by the call we tried that failed.
"""
# pylint: disable=global-variable-undefined
global cached_multiprocessing_is_available
global cached_multiprocessing_check_stack_trace
global cached_multiprocessing_is_available_message
if cached_multiprocessing_is_available is not None:
if logger:
logger.debug(cached_multiprocessing_check_stack_trace)
logger.warn(cached_multiprocessing_is_available_message)
return MultiprocessingIsAvailableResult(
is_available=cached_multiprocessing_is_available,
stack_trace=cached_multiprocessing_check_stack_trace)
if IS_WINDOWS:
message = """
Multiple processes are not supported on Windows. Operations requesting
parallelism will be executed with multiple threads in a single process only.
"""
if logger:
logger.warn(message)
return MultiprocessingIsAvailableResult(is_available=False,
stack_trace=None)
stack_trace = None
multiprocessing_is_available = True
message = """
You have requested multiple processes for an operation, but the
required functionality of Python\'s multiprocessing module is not available.
Operations requesting parallelism will be executed with multiple threads in a
single process only.
"""
try:
# Fails if /dev/shm (or some equivalent thereof) is not available for use
# (e.g., there's no implementation, or we can't write to it, etc.).
try:
multiprocessing.Value('i', 0)
except:
message += """
Please ensure that you have write access to both /dev/shm and /run/shm.
"""
raise # We'll handle this in one place below.
# Manager objects and Windows are generally a pain to work with, so try it
# out as a sanity check. This definitely works on some versions of Windows,
# but it's certainly possible that there is some unknown configuration for
# which it won't.
global manager # pylint: disable=global-variable-undefined
manager = multiprocessing.Manager()
# Check that the max number of open files is reasonable. Always check this
# after we're sure that the basic multiprocessing functionality is
# available, since this won't matter unless that's true.
limit = -1
if HAS_RESOURCE_MODULE:
# Try to set this with both resource names - RLIMIT_NOFILE for most Unix
# platforms, and RLIMIT_OFILE for BSD. Ignore AttributeError because the
# "resource" module is not guaranteed to know about these names.
try:
limit = max(limit,
_IncreaseSoftLimitForResource(
resource.RLIMIT_NOFILE,
MIN_ACCEPTABLE_OPEN_FILES_LIMIT))
except AttributeError:
pass
try:
limit = max(limit,
_IncreaseSoftLimitForResource(
resource.RLIMIT_OFILE, MIN_ACCEPTABLE_OPEN_FILES_LIMIT))
except AttributeError:
pass
if limit < MIN_ACCEPTABLE_OPEN_FILES_LIMIT:
message += ("""
Your max number of open files, %s, is too low to allow safe multiprocessing.
On Linux you can fix this by adding something like "ulimit -n 10000" to your
~/.bashrc or equivalent file and opening a new terminal.
On MacOS, you may also need to run a command like this once (in addition to the
above instructions), which might require a restart of your system to take
effect:
launchctl limit maxfiles 10000
Alternatively, edit /etc/launchd.conf with something like:
limit maxfiles 10000 10000
""" % limit)
raise Exception('Max number of open files, %s, is too low.' % limit)
except: # pylint: disable=bare-except
stack_trace = traceback.format_exc()
multiprocessing_is_available = False
if logger is not None:
logger.debug(stack_trace)
logger.warn(message)
# Set the cached values so that we never need to do this check again.
cached_multiprocessing_is_available = multiprocessing_is_available
cached_multiprocessing_check_stack_trace = stack_trace
cached_multiprocessing_is_available_message = message
return MultiprocessingIsAvailableResult(
is_available=cached_multiprocessing_is_available,
stack_trace=cached_multiprocessing_check_stack_trace)
def CreateLock():
"""Returns either a multiprocessing lock or a threading lock.
Use Multiprocessing lock iff we have access to the parts of the
multiprocessing module that are necessary to enable parallelism in operations.
Returns:
Multiprocessing or threading lock.
"""
if CheckMultiprocessingAvailableAndInit().is_available:
return manager.Lock()
else:
return threading.Lock()
def IsCloudSubdirPlaceholder(url, blr=None):
"""Determines if URL is a cloud subdir placeholder.
This function is needed because GUI tools (like the GCS cloud console) allow
users to create empty "folders" by creating a placeholder object; and parts
of gsutil need to treat those placeholder objects specially. For example,
gsutil rsync needs to avoid downloading those objects because they can cause
conflicts (see comments in rsync command for details).
We currently detect two cases:
- Cloud objects whose name ends with '_$folder$'
- Cloud objects whose name ends with '/'
Args:
url: The URL to be checked.
blr: BucketListingRef to check, or None if not available.
If None, size won't be checked.
Returns:
True/False.
"""
if not url.IsCloudUrl():
return False
url_str = url.url_string
if url_str.endswith('_$folder$'):
return True
if blr and blr.IsObject():
size = blr.root_object.size
else:
size = 0
return size == 0 and url_str.endswith('/')
def GetTermLines():
"""Returns number of terminal lines."""
# fcntl isn't supported in Windows.
try:
import fcntl # pylint: disable=g-import-not-at-top
import termios # pylint: disable=g-import-not-at-top
except ImportError:
return _DEFAULT_LINES
def ioctl_GWINSZ(fd): # pylint: disable=invalid-name
try:
return struct.unpack(
'hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))[0]
except: # pylint: disable=bare-except
return 0 # Failure (so will retry on different file descriptor below).
# Try to find a valid number of lines from termio for stdin, stdout,
# or stderr, in that order.
ioc = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
if not ioc:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
ioc = ioctl_GWINSZ(fd)
os.close(fd)
except: # pylint: disable=bare-except
pass
if not ioc:
ioc = os.environ.get('LINES', _DEFAULT_LINES)
return int(ioc)
class GsutilStreamHandler(logging.StreamHandler):
"""A subclass of StreamHandler for use in gsutil."""
def flush(self):
# Note: we override the flush method here due to a python 2.6 bug. The
# python logging module will try to flush all stream handlers at exit.
# If the StreamHandler is pointing to a file that is already closed, the
# method throws an exception. Our unit tests temporarily redirect stderr,
# which causes the default StreamHandler to open its stream against a
# temporary file. By the time the process shuts down, the underlying file
# is closed, causing an exception. This was fixed in Python 2.7, but to
# remove the flake from Python 2.6, we maintain this here.
try:
logging.StreamHandler.flush(self)
except ValueError:
pass
def StdinIterator():
"""A generator function that returns lines from stdin."""
for line in sys.stdin:
# Strip CRLF.
yield line.rstrip()
|
# Copyright (c) Victor van den Elzen
# Released under the Expat license, see LICENSE file for details
from struct import pack, unpack, calcsize
from collections import OrderedDict
def getbytes(s, n):
b = s.read(n)
assert len(b) == n, "Unexpected EOF"
return b
def getbyte(s):
return getbytes(s, 1)
class Seek(object):
def __init__(self, s, *args, **kwargs):
self.old_pos = None
self.s = s
self.args = args
self.kwargs = kwargs
def __enter__(self):
self.old_pos = self.s.tell()
self.s.seek(*self.args, **self.kwargs)
def __exit__(self, exc_type, exc_value, traceback):
self.s.seek(self.old_pos)
class FakeWriteStream(object):
def __init__(self, offset=0):
self.offset = offset
def seek(self, offset):
self.offset = offset
def tell(self):
return self.offset
def write(self, data):
self.offset += len(data)
return len(data)
class BaseField(object):
def unpack(self, s):
self.data = self.unpack_data(s)
def unpack_data(self, s):
raise notImplementedError
def pack(self, s):
self.pack_data(s, self.data)
def pack_data(self, s, data):
raise NotImplementedError(self)
def full_pack(self, s):
new_data = self.data
while True:
old_data = new_data
self.pack(FakeWriteStream(s.tell()))
new_data = self.data
if old_data == new_data:
break
self.pack(s)
def serialize(self):
return self.data
class ContainerField(BaseField):
def __getitem__(self, key):
return self.field[key]
def __setitem__(self, key, value):
self.field[key] = value
def __delitem__(self, key):
del self.field[key]
def __len__(self):
return len(self.field)
def __iter__(self):
return iter(self.field)
def __contains__(self, key):
return key in self.field
def serialize(self):
return self.field.serialize()
class Struct(ContainerField):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def add_field(self, name, f):
assert name not in self, name
self[name] = f
input_type, v = self.input
if input_type == "data":
f.data = v.get(name, None)
elif input_type == "stream":
f.unpack(v)
else:
assert False, input_type
return f
def F(self, name, f):
return self.add_field(name, f)
def unpack(self, s):
self.field = OrderedDict()
self.input = ("stream", s)
self.fields(*self.args, **self.kwargs)
del self.input
def pack(self, s):
for name, f in self.field.items():
f.pack(s)
@property
def data(self):
data = OrderedDict()
for k, v in self.field.items():
data[k] = v.data
return data
@data.setter
def data(self, v):
self.field = OrderedDict()
self.input = ("data", v)
self.fields(*self.args, **self.kwargs)
del self.input
def serialize(self):
data = OrderedDict()
for k, v in self.field.items():
if self.should_serialize(k, v):
data[k] = v.serialize()
return data
def should_serialize(self, k, v):
return True
def fields(self):
raise NotImplementedError(self)
class Magic(BaseField):
def __init__(self, magic):
if isinstance(magic, str):
magic = magic.encode()
self.magic = magic
def unpack(self, s):
data = getbytes(s, len(self.magic))
assert data == self.magic
def pack(self, s):
s.write(self.magic)
@property
def data(self):
return self.magic.decode()
@data.setter
def data(self, v):
assert v == self.magic or v is None, v
class Format(BaseField):
def __init__(self, fmt):
if fmt[0] in "@=<>!":
bosa = fmt[0]
fmt = fmt[1:]
else:
bosa = "<"
self.bosa = bosa
self.fmt = fmt
self.single = len(fmt) == 1
def unpack_data(self, s):
fmt = self.bosa + self.fmt
size = calcsize(fmt)
b = getbytes(s, size)
data = unpack(fmt, b)
if self.single:
assert len(data) == 1
data = data[0]
return data
def pack_data(self, s, data):
if self.single:
data = (data,)
s.write(pack(self.fmt, *data))
class BaseArray(ContainerField):
def __init__(self, field_maker=None, field_function=None):
if field_function is None:
field_function = lambda i, f: field_maker()
self.field_fun = field_function
self._dict = None
def unpack(self, s):
self.field = [self.field_fun(i, self) for i in range(self.size)]
for f in self:
f.unpack(s)
def pack(self, s):
for f in self:
f.pack(s)
@property
def data(self):
return [f.data for f in self]
def index(self, field):
if self._dict is None:
self._dict = {}
for i in range(len(self.field)):
self._dict[self.field[i]] = i
return self._dict[field]
@data.setter
def data(self, v):
self.field = [self.field_fun(i, self) for i in range(len(v))]
for f, fv in zip(self.field, v):
f.data = fv
self._dict = None
def serialize(self):
return [f.serialize() for f in self]
def append_data(self, v):
idx = len(self.field)
f = self.field_fun(idx, self)
self.field.append(f)
f.data = v
if self._dict is not None:
self._dict[f] = idx
class Array(BaseArray):
def __init__(self, size, *args, **kwargs):
self.size = size
BaseArray.__init__(self, *args, **kwargs)
class PrefixedArray(BaseArray):
def __init__(self, prefix_field, *args, **kwargs):
self.prefix_field = prefix_field
BaseArray.__init__(self, *args, **kwargs)
@property
def size(self):
return self.prefix_field.data
def unpack(self, s):
self.prefix_field.unpack(s)
BaseArray.unpack(self, s)
def pack(self, s):
self.prefix_field.data = len(self)
self.prefix_field.pack(s)
BaseArray.pack(self, s)
class BaseBlob(BaseField):
def unpack_data(self, s):
return getbytes(s, self.size)
def pack_data(self, s, data):
s.write(data)
class Blob(BaseBlob):
def __init__(self, size):
self.size = size
def serialize(self):
return None
class PrefixedBlob(BaseBlob):
def __init__(self, prefix_field, *args, **kwargs):
self.prefix_field = prefix_field
BaseBlob.__init__(self, *args, **kwargs)
@property
def size(self):
return self.prefix_field.data
def unpack(self, s):
self.prefix_field.unpack(s)
BaseBlob.unpack(self, s)
def pack(self, s):
self.prefix_field.data = len(self)
self.prefix_field.pack(s)
BaseBlob.pack(self, s)
class String(BaseField):
def unpack_data(self, s):
lc = []
c = getbyte(s)
while c != b"\0":
lc.append(c)
c = getbyte(s)
return b"".join(lc).decode()
def pack_data(self, s, data):
s.write(data.encode())
s.write(b"\0")
class FixedString(BaseField):
def __init__(self, size):
self.size = size
def unpack_data(self, s):
data = getbytes(s, self.size)
data = data.rstrip(b"\0").decode()
return data
def pack_data(self, s, data):
data = data.encode().ljust(self.size, b"\0")
s.write(data)
class Index(BaseField):
def __init__(self, array, index_field):
self.array = array
self.index_field = index_field
def unpack_data(self, s):
self.index_field.unpack(s)
return self.array[self.index_field.data].data
def pack_data(self, s, data):
try:
index = self.array.data.index(data)
except ValueError:
index = len(self.array)
self.array.append_data(data)
self.index_field.data = index
self.index_field.pack(s)
class Offset(BaseField):
def unpack_data(self, s):
return s.tell()
def pack_data(self, s, data):
self.data = s.tell()
class Pointer(ContainerField):
def __init__(self, offset, field):
self.offset = offset
self.field = field
def unpack(self, s):
with Seek(s, self.offset):
self.field.unpack(s)
@property
def data(self):
return self.field.data
@data.setter
def data(self, v):
self.field.data = v
def pack_data(self, s, data):
pass
class DataPointer(ContainerField):
def __init__(self, offset_field, field):
self.offset_field = offset_field
self.field = field
def unpack(self, s):
self.offset_field.unpack(s)
with Seek(s, self.offset_field.data):
self.field.unpack(s)
@property
def data(self):
return self.field.data
@data.setter
def data(self, v):
self.field.data = v
class Mapping(BaseField):
def __init__(self, field, mapping):
self.field = field
self.mapping = mapping
def unpack_data(self, s):
data = self.field.unpack_data(s)
return self.mapping[data]
class Flags(BaseField):
def __init__(self, field, flags):
self.field = field
self.flags = flags
def unpack_data(self, s):
data = self.field.unpack_data(s)
flag_data = []
for mask, name in self.flags:
if mask & data:
flag_data.append(name)
return flag_data
|
#!/usr/bin/env python
# coding: utf-8
# © 2015 Instacart
# Published as part of http://tech.instacart.com/ahab/
from contextlib import contextmanager
import logging
from pprint import pformat
from random import randint
import subprocess
from ahab import Ahab
import iptc
log = logging.getLogger()
def main():
logging.basicConfig(level=logging.INFO)
listener = Ahab(handlers=[nat_handler])
listener.listen()
def nat_handler(event, data):
log.info('Event:\n%s', pformat(event))
if 'Config' in data and 'Hostname' in data['Config']:
ident = data['Id']
f = {
'start': create_nat, # On 'start', we create the NAT rules
'die': clear_nat # On 'die', we remove them
}.get(event['status'])
# The 'start' and 'die' events are the only ones relevant for
# managing our NAT rules.
if f is None:
return
host = data['Config']['Hostname']
ip = data['NetworkSettings']['IPAddress']
# We make a few attempts at the IP Tables operaiont, in case
# there is overlap with another event handler trying to do the
# same thing for another container.
for n in range(1, 5):
try:
f(host, ip)
break
except iptc.IPTCError as e:
if 'Resource temporarily unavailable' not in str(e):
log.error('IP Tables trouble for %s during NAT '
'setup, not continuing: %s', ident, e)
break
except Exception as e:
log.error('Unexpected error while handling NAT for %s: '
'%s', ident, e)
break
# No matter what happens, we don't error out, because that
# would crash other handlers that might be in the midst of
# configuring other containers.
def create_nat(host, container_ip):
with table(iptc.Table.NAT) as nat:
free_ips = list(secondary_ips() - ips_in_use())
free = free_ips[randint(1, len(free_ips)) - 1]
# Send packets that come in on the outer IP to the inner IP.
dnat = iptc.Rule()
dnat.dst = free
target = dnat.create_target('DNAT')
target.to_destination = container_ip
comment = dnat.create_match('comment')
comment.comment = 'ahab//' + host
iptc.Chain(nat, 'DOCKER').insert_rule(dnat)
# Rewrite packets from the inner IP so they go out on the outer IP.
snat = iptc.Rule()
snat.src = container_ip
target = snat.create_target('SNAT')
target.to_source = free
comment = snat.create_match('comment')
comment.comment = 'ahab//' + host
iptc.Chain(nat, 'POSTROUTING').insert_rule(snat)
def clear_nat(host, container_ip):
del container_ip # Could be used for sanity check
with table(iptc.Table.NAT) as nat:
token = 'ahab//' + host
chains = ['DOCKER', 'POSTROUTING']
for chain in [iptc.Chain(nat, name) for name in chains]:
for rule in chain.rules:
comments = [m for m in rule.matches if m.name == 'comment']
if any(c.comment == token for c in comments):
chain.delete_rule(rule)
def ips_in_use():
with table(iptc.Table.NAT) as nat:
ips = set()
token = 'ahab//'
chains = ['DOCKER', 'POSTROUTING']
for chain in [iptc.Chain(nat, name) for name in chains]:
for rule in chain.rules:
comments = [m for m in rule.matches if m.name == 'comment']
if any(c.comment.startswith(token) for c in comments):
if rule.dst is not None:
ips |= set([rule.dst.split('/')[0]])
log.info('IPs in use: %s', ips)
return ips
def secondary_ips():
secondary_ips = []
script = 'ip addr list dev eth0 | fgrep secondary'
text = subprocess.check_output(['sh', '-c', script])
for line in text.splitlines():
fields = line.split()
if len(fields) < 2:
continue
secondary_ips += [fields[1].split('/')[0]]
return set(secondary_ips)
open_tables = {}
@contextmanager
def table(tab):
"""Access IPTables transactionally in a uniform way.
Ensures all access is done without autocommit and that only the outer
most task commits, and also ensures we refresh once and commit once.
"""
global open_tables
if tab in open_tables:
yield open_tables[tab]
else:
open_tables[tab] = iptc.Table(tab)
open_tables[tab].refresh()
open_tables[tab].autocommit = False
yield open_tables[tab]
open_tables[tab].commit()
del open_tables[tab]
if __name__ == '__main__':
main()
|
import func.core.config as c
import func.core.intro as intro
from func.core.lang import t
from func.core.viz import subselector
from func.core.prsnj import Pj
from func.core.export import imprimir_clases
import os
def cargar_archivo(prompt, carpeta):
from func.data.setup import data as s
ars, nom = [], []
for ar in os.listdir(carpeta):
if os.path.isfile(carpeta+'/'+ar):
personaje = c.abrir_json(carpeta+'/'+ar)
nom.append(personaje['nombre']+' ('+imprimir_clases(personaje['cla'],s.CLASES)+')')
ars.append(ar)
sel = subselector(prompt,nom,True)
data = c.abrir_json(carpeta+'/'+ars[sel])
return data
def menu ():
while True:
opciones = [t('Crear un nuevo personaje'),
t('Avanzar un personaje existente'),
t('Editar preferencias'),
t('Salir'),
'\n'+t('Ver licencia')]
intro.imprimir_titulo()
intro.introduccion()
print(t('Elije una opción'))
op = subselector(t('Opción'),opciones)
if op == 0: # Crear un nuevo Pj
import func.core.chargen
Pj.nuevo_pj()
func.core.chargen.go()
elif op == 1: # Avanzar un Pj existente
import func.core.chargen
Pj.cargar_pj(cargar_archivo('Personaje','Guardar'))
func.core.chargen.go()
elif op == 2: # preferencias
c.preferencias(c.abrir_json('config.json'))
elif op == 3: # exit
break
elif op == 4:
intro.licencia('LICENSE.txt')
input(t('\n[Presione Enter para continuar]\n'))
if __name__ == '__main__':
os.system(['clear','cls'][os.name == 'nt'])
menu()
|
from req import WebRequestHandler
from req import Service
import tornado
import math
class WebUsersHandler(WebRequestHandler):
@tornado.gen.coroutine
def get(self):
if self.map_power['user_manage'] not in self.account['power']:
self.write_error(403)
return
args = ["page"]
meta = self.get_args(args)
meta['count'] = 10
### default page is 1
if not meta['page']:
meta['page'] = 1
### if get page is not int then redirect to page 1
try:
meta["page"] = int(meta["page"])
except:
self.redirect('/users/')
return
### modify page in range (1, page_count)
err, count = yield from Service.User.get_user_list_count()
print(count, type(count))
page_count = max(math.ceil(count / meta['count']), 1)
if int(meta['page']) < 1:
self.redirect('/users/')
return
if int(meta['page']) > page_count:
self.redirect('/users/?page=%s'%str(page_count))
return
err, data = yield from Service.User.get_user_list(meta)
### about pagination
page = {}
page['total'] = page_count
page['current'] = meta['page']
page['url'] = '/users/'
page['get'] = {}
self.render('./users/users.html', data=data, page=page)
class WebUserHandler(WebRequestHandler):
""" single user data """
@tornado.gen.coroutine
def get(self, id=None, action=None):
if not id: id = self.account["id"]
###err, meta = yield from Service.User.get_user_advanced_info(id)
err, meta = yield from Service.User.get_user_basic_info({'id': id})
err, meta['group'] = yield from Service.User.get_user_group_info({'id': id})
if err:
self.write_error(err)
return
self.render('./users/user.html', data=meta)
class WebUserEditHandler(WebRequestHandler):
@tornado.gen.coroutine
def get(self, id):
if int(id) != self.account['id']:
self.write_error(403)
return
data = {}
err, data['schools'] = yield from Service.School.get_school_list()
self.render('./users/user_edit.html', data=data)
class WebUserSignHandler(WebRequestHandler):
@tornado.gen.coroutine
def get(self, action):
if action == "signin":
if self.account['id'] != 0:
self.redirect('/')
self.render('./users/user_signin.html')
elif action == "signout":
Service.User.signout(self)
self.redirect('/users/signin/')
elif action == "signup":
if self.account['id'] != 0:
self.redirect('/')
err, school = yield from Service.School.get_school_list()
self.render('./users/user_signup.html', school=school)
else:
self.write_error(404)
|
# All Rights Reserved.
# Copyright 2013 SolidFire Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import math
import random
import socket
import string
import time
import warnings
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import importutils
from oslo_utils import timeutils
from oslo_utils import units
import requests
from requests.packages.urllib3 import exceptions
import six
from cinder import context
from cinder import exception
from cinder.i18n import _, _LE, _LI, _LW
from cinder.image import image_utils
from cinder.volume.drivers.san import san
from cinder.volume import qos_specs
from cinder.volume.targets import iscsi as iscsi_driver
from cinder.volume import volume_types
LOG = logging.getLogger(__name__)
sf_opts = [
cfg.BoolOpt('sf_emulate_512',
default=True,
help='Set 512 byte emulation on volume creation; '),
cfg.BoolOpt('sf_allow_tenant_qos',
default=False,
help='Allow tenants to specify QOS on create'),
cfg.StrOpt('sf_account_prefix',
default=None,
help='Create SolidFire accounts with this prefix. Any string '
'can be used here, but the string \"hostname\" is special '
'and will create a prefix using the cinder node hostname '
'(previous default behavior). The default is NO prefix.'),
cfg.StrOpt('sf_template_account_name',
default='openstack-vtemplate',
help='Account name on the SolidFire Cluster to use as owner of '
'template/cache volumes (created if does not exist).'),
cfg.BoolOpt('sf_allow_template_caching',
default=True,
help='Create an internal cache of copy of images when '
'a bootable volume is created to eliminate fetch from '
'glance and qemu-conversion on subsequent calls.'),
cfg.IntOpt('sf_api_port',
default=443,
help='SolidFire API port. Useful if the device api is behind '
'a proxy on a different port.')]
CONF = cfg.CONF
CONF.register_opts(sf_opts)
def retry(exc_tuple, tries=5, delay=1, backoff=2):
def retry_dec(f):
@six.wraps(f)
def func_retry(*args, **kwargs):
_tries, _delay = tries, delay
while _tries > 1:
try:
return f(*args, **kwargs)
except exc_tuple:
time.sleep(_delay)
_tries -= 1
_delay *= backoff
LOG.debug('Retrying %(args)s, %(tries)s attempts '
'remaining...',
{'args': args, 'tries': _tries})
# NOTE(jdg): Don't log the params passed here
# some cmds like createAccount will have sensitive
# info in the params, grab only the second tuple
# which should be the Method
msg = (_('Retry count exceeded for command: %s') %
(args[1],))
LOG.error(msg)
raise exception.SolidFireAPIException(message=msg)
return func_retry
return retry_dec
class SolidFireDriver(san.SanISCSIDriver):
"""OpenStack driver to enable SolidFire cluster.
Version history:
1.0 - Initial driver
1.1 - Refactor, clone support, qos by type and minor bug fixes
1.2 - Add xfr and retype support
1.2.1 - Add export/import support
1.2.2 - Catch VolumeNotFound on accept xfr
2.0.0 - Move from httplib to requests
2.0.1 - Implement SolidFire Snapshots
2.0.2 - Implement secondary account
"""
VERSION = '2.0.2'
sf_qos_dict = {'slow': {'minIOPS': 100,
'maxIOPS': 200,
'burstIOPS': 200},
'medium': {'minIOPS': 200,
'maxIOPS': 400,
'burstIOPS': 400},
'fast': {'minIOPS': 500,
'maxIOPS': 1000,
'burstIOPS': 1000},
'performant': {'minIOPS': 2000,
'maxIOPS': 4000,
'burstIOPS': 4000},
'off': None}
sf_qos_keys = ['minIOPS', 'maxIOPS', 'burstIOPS']
cluster_stats = {}
retry_exc_tuple = (exception.SolidFireRetryableException,
requests.exceptions.ConnectionError)
retryable_errors = ['xDBVersionMismatch',
'xMaxSnapshotsPerVolumeExceeded',
'xMaxClonesPerVolumeExceeded',
'xMaxSnapshotsPerNodeExceeded',
'xMaxClonesPerNodeExceeded',
'xNotReadyForIO']
def __init__(self, *args, **kwargs):
super(SolidFireDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(sf_opts)
self._endpoint = self._build_endpoint_info()
self.template_account_id = None
self.max_volumes_per_account = 1990
try:
self._update_cluster_status()
except exception.SolidFireAPIException:
pass
if self.configuration.sf_allow_template_caching:
account = self.configuration.sf_template_account_name
self.template_account_id = self._create_template_account(account)
self.target_driver = (
importutils.import_object(
'cinder.volume.drivers.solidfire.SolidFireISCSI',
solidfire_driver=self,
configuration=self.configuration))
def _create_template_account(self, account_name):
# We raise an API exception if the account doesn't exist
# We need to take account_prefix settings into consideration
# This just uses the same method to do template account create
# as we use for any other OpenStack account
account_name = self._get_sf_account_name(account_name)
try:
id = self._issue_api_request(
'GetAccountByName',
{'username': account_name})['result']['account']['accountID']
except exception.SolidFireAPIException:
chap_secret = self._generate_random_string(12)
params = {'username': account_name,
'initiatorSecret': chap_secret,
'targetSecret': chap_secret,
'attributes': {}}
id = self._issue_api_request('AddAccount',
params)['result']['accountID']
return id
def _build_endpoint_info(self, **kwargs):
endpoint = {}
endpoint['mvip'] = (
kwargs.get('mvip', self.configuration.san_ip))
endpoint['login'] = (
kwargs.get('login', self.configuration.san_login))
endpoint['passwd'] = (
kwargs.get('passwd', self.configuration.san_password))
endpoint['port'] = (
kwargs.get('port', self.configuration.sf_api_port))
endpoint['url'] = 'https://%s:%s' % (endpoint['mvip'],
endpoint['port'])
# TODO(jdg): consider a call to GetAPI and setting version
return endpoint
@retry(retry_exc_tuple, tries=6)
def _issue_api_request(self, method, params, version='1.0', endpoint=None):
if params is None:
params = {}
if endpoint is None:
endpoint = self._endpoint
payload = {'method': method, 'params': params}
url = '%s/json-rpc/%s/' % (endpoint['url'], version)
with warnings.catch_warnings():
warnings.simplefilter("ignore", exceptions.InsecureRequestWarning)
req = requests.post(url,
data=json.dumps(payload),
auth=(endpoint['login'], endpoint['passwd']),
verify=False,
timeout=30)
response = req.json()
req.close()
if (('error' in response) and
(response['error']['name'] in self.retryable_errors)):
msg = ('Retryable error (%s) encountered during '
'SolidFire API call.' % response['error']['name'])
LOG.debug(msg)
raise exception.SolidFireRetryableException(message=msg)
if 'error' in response:
msg = _('API response: %s') % response
raise exception.SolidFireAPIException(msg)
return response
def _get_volumes_by_sfaccount(self, account_id):
"""Get all volumes on cluster for specified account."""
params = {'accountID': account_id}
data = self._issue_api_request('ListVolumesForAccount', params)
if 'result' in data:
return data['result']['volumes']
def _get_sfaccount_by_name(self, sf_account_name):
"""Get SolidFire account object by name."""
sfaccount = None
params = {'username': sf_account_name}
try:
data = self._issue_api_request('GetAccountByName', params)
if 'result' in data and 'account' in data['result']:
LOG.debug('Found solidfire account: %s', sf_account_name)
sfaccount = data['result']['account']
except exception.SolidFireAPIException as ex:
if 'xUnknownAccount' in ex.msg:
return sfaccount
else:
raise
return sfaccount
def _get_sf_account_name(self, project_id):
"""Build the SolidFire account name to use."""
prefix = self.configuration.sf_account_prefix or ''
if prefix == 'hostname':
prefix = socket.gethostname()
return '%s%s%s' % (prefix, '-' if prefix else '', project_id)
def _get_sfaccount(self, project_id):
sf_account_name = self._get_sf_account_name(project_id)
sfaccount = self._get_sfaccount_by_name(sf_account_name)
if sfaccount is None:
raise exception.SolidFireAccountNotFound(
account_name=sf_account_name)
return sfaccount
def _create_sfaccount(self, project_id):
"""Create account on SolidFire device if it doesn't already exist.
We're first going to check if the account already exists, if it does
just return it. If not, then create it.
"""
sf_account_name = self._get_sf_account_name(project_id)
sfaccount = self._get_sfaccount_by_name(sf_account_name)
if sfaccount is None:
LOG.debug('solidfire account: %s does not exist, create it...',
sf_account_name)
chap_secret = self._generate_random_string(12)
params = {'username': sf_account_name,
'initiatorSecret': chap_secret,
'targetSecret': chap_secret,
'attributes': {}}
data = self._issue_api_request('AddAccount', params)
if 'result' in data:
sfaccount = self._get_sfaccount_by_name(sf_account_name)
return sfaccount
def _get_cluster_info(self):
"""Query the SolidFire cluster for some property info."""
params = {}
data = self._issue_api_request('GetClusterInfo', params)
if 'result' not in data:
msg = _("API response: %s") % data
raise exception.SolidFireAPIException(msg)
return data['result']
def _generate_random_string(self, length):
"""Generates random_string to use for CHAP password."""
char_set = string.ascii_uppercase + string.digits
return ''.join(random.sample(char_set, length))
def _get_model_info(self, sfaccount, sf_volume_id):
"""Gets the connection info for specified account and volume."""
cluster_info = self._get_cluster_info()
iscsi_portal = cluster_info['clusterInfo']['svip'] + ':3260'
chap_secret = sfaccount['targetSecret']
found_volume = False
iteration_count = 0
while not found_volume and iteration_count < 600:
volume_list = self._get_volumes_by_sfaccount(
sfaccount['accountID'])
iqn = None
for v in volume_list:
if v['volumeID'] == sf_volume_id:
iqn = v['iqn']
found_volume = True
break
if not found_volume:
time.sleep(2)
iteration_count += 1
if not found_volume:
LOG.error(_LE('Failed to retrieve volume SolidFire-'
'ID: %s in get_by_account!'), sf_volume_id)
raise exception.VolumeNotFound(volume_id=sf_volume_id)
model_update = {}
# NOTE(john-griffith): SF volumes are always at lun 0
model_update['provider_location'] = ('%s %s %s'
% (iscsi_portal, iqn, 0))
model_update['provider_auth'] = ('CHAP %s %s'
% (sfaccount['username'],
chap_secret))
if not self.configuration.sf_emulate_512:
model_update['provider_geometry'] = ('%s %s' % (4096, 4096))
model_update['provider_id'] = ('%s' % sf_volume_id)
return model_update
def _do_clone_volume(self, src_uuid,
src_project_id,
vref):
"""Create a clone of an existing volume or snapshot."""
attributes = {}
qos = {}
sf_accounts = self._get_sfaccounts_for_tenant(vref['project_id'])
if not sf_accounts:
sf_account = self._create_sfaccount(vref['project_id'])
else:
# Check availability for creates
sf_account = self._get_account_create_availability(sf_accounts)
if not sf_account:
# TODO(jdg): We're not doing tertiaries, so fail
msg = _('volumes/account exceeded on both primary '
'and secondary SolidFire accounts')
raise exception.SolidFireDriverException(msg)
params = {'name': 'UUID-%s' % vref['id'],
'newAccountID': sf_account['accountID']}
# NOTE(jdg): First check the SF snapshots
# if we don't find a snap by the given name, just move on to check
# volumes. This may be a running system that was updated from
# before we did snapshots, so need to check both
is_clone = False
snap_name = 'UUID-%s' % src_uuid
snaps = self._get_sf_snapshots()
snap = next((s for s in snaps if s["name"] == snap_name), None)
if snap:
params['snapshotID'] = int(snap['snapshotID'])
params['volumeID'] = int(snap['volumeID'])
params['newSize'] = int(vref['size'] * units.Gi)
else:
sf_vol = self._get_sf_volume(
src_uuid, {'accountID': sf_account['accountID']})
if sf_vol is None:
raise exception.VolumeNotFound(volume_id=src_uuid)
params['volumeID'] = int(sf_vol['volumeID'])
params['newSize'] = int(vref['size'] * units.Gi)
is_clone = True
data = self._issue_api_request('CloneVolume', params, version='6.0')
if (('result' not in data) or ('volumeID' not in data['result'])):
msg = _("API response: %s") % data
raise exception.SolidFireAPIException(msg)
sf_volume_id = data['result']['volumeID']
if (self.configuration.sf_allow_tenant_qos and
vref.get('volume_metadata')is not None):
qos = self._set_qos_presets(vref)
ctxt = context.get_admin_context()
type_id = vref.get('volume_type_id', None)
if type_id is not None:
qos = self._set_qos_by_volume_type(ctxt, type_id)
# NOTE(jdg): all attributes are copied via clone, need to do an update
# to set any that were provided
params = {'volumeID': sf_volume_id}
create_time = vref['created_at'].isoformat()
attributes = {'uuid': vref['id'],
'is_clone': 'True',
'src_uuid': src_uuid,
'created_at': create_time}
if qos:
params['qos'] = qos
for k, v in qos.items():
attributes[k] = str(v)
params['attributes'] = attributes
data = self._issue_api_request('ModifyVolume', params)
model_update = self._get_model_info(sf_account, sf_volume_id)
if model_update is None:
mesg = _('Failed to get model update from clone')
raise exception.SolidFireAPIException(mesg)
# Increment the usage count, just for data collection
# We're only doing this for clones, not create_from snaps
if is_clone:
data = self._update_attributes(sf_vol)
return (data, sf_account, model_update)
def _update_attributes(self, sf_vol):
cloned_count = sf_vol['attributes'].get('cloned_count', 0)
cloned_count += 1
attributes = sf_vol['attributes']
attributes['cloned_count'] = cloned_count
params = {'volumeID': int(sf_vol['volumeID'])}
params['attributes'] = attributes
return self._issue_api_request('ModifyVolume', params)
def _do_volume_create(self, sf_account, params):
data = self._issue_api_request('CreateVolume', params)
if (('result' not in data) or ('volumeID' not in data['result'])):
msg = _("Failed volume create: %s") % data
raise exception.SolidFireAPIException(msg)
sf_volume_id = data['result']['volumeID']
return self._get_model_info(sf_account, sf_volume_id)
def _do_snapshot_create(self, params):
data = self._issue_api_request('CreateSnapshot', params, version='6.0')
if (('result' not in data) or ('snapshotID' not in data['result'])):
msg = _("Failed snapshot create: %s") % data
raise exception.SolidFireAPIException(msg)
return data['result']['snapshotID']
def _set_qos_presets(self, volume):
qos = {}
valid_presets = self.sf_qos_dict.keys()
# First look to see if they included a preset
presets = [i.value for i in volume.get('volume_metadata')
if i.key == 'sf-qos' and i.value in valid_presets]
if len(presets) > 0:
if len(presets) > 1:
LOG.warning(_LW('More than one valid preset was '
'detected, using %s'), presets[0])
qos = self.sf_qos_dict[presets[0]]
else:
# look for explicit settings
for i in volume.get('volume_metadata'):
if i.key in self.sf_qos_keys:
qos[i.key] = int(i.value)
return qos
def _set_qos_by_volume_type(self, ctxt, type_id):
qos = {}
volume_type = volume_types.get_volume_type(ctxt, type_id)
qos_specs_id = volume_type.get('qos_specs_id')
specs = volume_type.get('extra_specs')
# NOTE(jdg): We prefer the qos_specs association
# and over-ride any existing
# extra-specs settings if present
if qos_specs_id is not None:
kvs = qos_specs.get_qos_specs(ctxt, qos_specs_id)['specs']
else:
kvs = specs
for key, value in kvs.items():
if ':' in key:
fields = key.split(':')
key = fields[1]
if key in self.sf_qos_keys:
qos[key] = int(value)
return qos
def _get_sf_volume(self, uuid, params):
data = self._issue_api_request('ListVolumesForAccount', params)
if 'result' not in data:
msg = _("Failed to get SolidFire Volume: %s") % data
raise exception.SolidFireAPIException(msg)
found_count = 0
sf_volref = None
for v in data['result']['volumes']:
# NOTE(jdg): In the case of "name" we can't
# update that on manage/import, so we use
# the uuid attribute
meta = v.get('attributes')
alt_id = meta.get('uuid', 'empty')
if uuid in v['name'] or uuid in alt_id:
found_count += 1
sf_volref = v
LOG.debug("Mapped SolidFire volumeID %(volume_id)s "
"to cinder ID %(uuid)s.",
{'volume_id': v['volumeID'], 'uuid': uuid})
if found_count == 0:
# NOTE(jdg): Previously we would raise here, but there are cases
# where this might be a cleanup for a failed delete.
# Until we get better states we'll just log an error
LOG.error(_LE("Volume %s, not found on SF Cluster."), uuid)
if found_count > 1:
LOG.error(_LE("Found %(count)s volumes mapped to id: %(uuid)s."),
{'count': found_count,
'uuid': uuid})
raise exception.DuplicateSfVolumeNames(vol_name=uuid)
return sf_volref
def _get_sf_snapshots(self, sf_volid=None):
params = {}
if sf_volid:
params = {'volumeID': sf_volid}
data = self._issue_api_request('ListSnapshots', params, version='6.0')
if 'result' not in data:
msg = _("Failed to get SolidFire Snapshot: %s") % data
raise exception.SolidFireAPIException(msg)
return data['result']['snapshots']
def _create_image_volume(self, context,
image_meta, image_service,
image_id):
# NOTE(jdg): It's callers responsibility to ensure that
# the optional properties.virtual_size is set on the image
# before we get here
virt_size = int(image_meta['properties'].get('virtual_size'))
min_sz_in_bytes = (
math.ceil(virt_size / float(units.Gi)) * float(units.Gi))
min_sz_in_gig = math.ceil(min_sz_in_bytes / float(units.Gi))
attributes = {}
attributes['image_info'] = {}
attributes['image_info']['image_updated_at'] = (
image_meta['updated_at'].isoformat())
attributes['image_info']['image_name'] = (
image_meta['name'])
attributes['image_info']['image_created_at'] = (
image_meta['created_at'].isoformat())
attributes['image_info']['image_id'] = image_meta['id']
params = {'name': 'OpenStackIMG-%s' % image_id,
'accountID': self.template_account_id,
'sliceCount': 1,
'totalSize': int(min_sz_in_bytes),
'enable512e': self.configuration.sf_emulate_512,
'attributes': attributes,
'qos': {}}
sf_account = self._issue_api_request(
'GetAccountByID',
{'accountID': self.template_account_id})
template_vol = self._do_volume_create(sf_account, params)
tvol = {}
tvol['id'] = image_id
tvol['provider_location'] = template_vol['provider_location']
tvol['provider_auth'] = template_vol['provider_auth']
connector = 'na'
conn = self.initialize_connection(tvol, connector)
attach_info = super(SolidFireDriver, self)._connect_device(conn)
properties = 'na'
try:
image_utils.fetch_to_raw(context,
image_service,
image_id,
attach_info['device']['path'],
self.configuration.volume_dd_blocksize,
size=min_sz_in_gig)
except Exception as exc:
params['volumeID'] = template_vol['volumeID']
LOG.error(_LE('Failed image conversion during cache creation: %s'),
exc)
LOG.debug('Removing SolidFire Cache Volume (SF ID): %s',
template_vol['volumeID'])
self._detach_volume(context, attach_info, tvol, properties)
self._issue_api_request('DeleteVolume', params)
return
self._detach_volume(context, attach_info, tvol, properties)
sf_vol = self._get_sf_volume(image_id, params)
LOG.debug('Successfully created SolidFire Image Template '
'for image-id: %s', image_id)
return sf_vol
def _verify_image_volume(self, context, image_meta, image_service):
# This method just verifies that IF we have a cache volume that
# it's still up to date and current WRT the image in Glance
# ie an image-update hasn't occurred since we grabbed it
# If it's out of date, just delete it and we'll create a new one
# Any other case we don't care and just return without doing anything
params = {'accountID': self.template_account_id}
sf_vol = self._get_sf_volume(image_meta['id'], params)
if sf_vol is None:
return
# Check updated_at field, delete copy and update if needed
if sf_vol['attributes']['image_info']['image_updated_at'] == (
image_meta['updated_at'].isoformat()):
return
else:
# Bummer, it's been updated, delete it
params = {'accountID': self.template_account_id}
params['volumeID'] = sf_vol['volumeID']
data = self._issue_api_request('DeleteVolume', params)
if 'result' not in data:
msg = _("Failed to delete SolidFire Image-Volume: %s") % data
raise exception.SolidFireAPIException(msg)
if not self._create_image_volume(context,
image_meta,
image_service,
image_meta['id']):
msg = _("Failed to create SolidFire Image-Volume")
raise exception.SolidFireAPIException(msg)
def _get_sfaccounts_for_tenant(self, cinder_project_id):
data = self._issue_api_request('ListAccounts', {})
if 'result' not in data:
msg = _("API response: %s") % data
raise exception.SolidFireAPIException(msg)
# Note(jdg): On SF we map account-name to OpenStack's tenant ID
# we use tenantID in here to get secondaries that might exist
# Also: we expect this to be sorted, so we get the primary first
# in the list
return sorted([acc for acc in data['result']['accounts'] if
cinder_project_id in acc['username']])
def _get_all_active_volumes(self, cinder_uuid=None):
params = {}
data = self._issue_api_request('ListActiveVolumes',
params)
if 'result' not in data:
msg = _("Failed get active SolidFire volumes: %s") % data
raise exception.SolidFireAPIException(msg)
if cinder_uuid:
deleted_vols = ([v for v in data['result']['volumes'] if
cinder_uuid in v.name])
else:
deleted_vols = [v for v in data['result']['volumes']]
return deleted_vols
def _get_all_deleted_volumes(self, cinder_uuid=None):
params = {}
data = self._issue_api_request('ListDeletedVolumes',
params)
if 'result' not in data:
msg = _("Failed get Deleted SolidFire volumes: %s") % data
raise exception.SolidFireAPIException(msg)
if cinder_uuid:
deleted_vols = ([v for v in data['result']['volumes'] if
cinder_uuid in v['name']])
else:
deleted_vols = [v for v in data['result']['volumes']]
return deleted_vols
def _get_account_create_availability(self, accounts):
# we'll check both the primary and the secondary
# if it exists and return whichever one has count
# available.
for acc in accounts:
if self._get_volumes_for_account(
acc['accountID']) > self.max_volumes_per_account:
return acc
if len(accounts) == 1:
sfaccount = self._create_sfaccount(accounts[0]['name'] + '_')
return sfaccount
return None
def _get_volumes_for_account(self, sf_account_id, cinder_uuid=None):
# ListVolumesForAccount gives both Active and Deleted
# we require the solidfire accountID, uuid of volume
# is optional
params = {'accountID': sf_account_id}
response = self._issue_api_request('ListVolumesForAccount',
params)
if cinder_uuid:
vlist = [v for v in response['result']['volumes'] if
cinder_uuid in v['name']]
else:
vlist = [v for v in response['result']['volumes']]
vlist = sorted(vlist, key=lambda k: k['volumeID'])
return vlist
def clone_image(self, context,
volume, image_location,
image_meta, image_service):
# Check out pre-requisites:
# Is template caching enabled?
if not self.configuration.sf_allow_template_caching:
return None, False
# Is the image owned by this tenant or public?
if ((not image_meta.get('is_public', False)) and
(image_meta['owner'] != volume['project_id'])):
LOG.warning(_LW("Requested image is not "
"accessible by current Tenant."))
return None, False
# Is virtual_size property set on the image?
if ((not image_meta.get('properties', None)) or
(not image_meta['properties'].get('virtual_size', None))):
LOG.info(_LI('Unable to create cache volume because image: %s '
'does not include properties.virtual_size'),
image_meta['id'])
return None, False
try:
self._verify_image_volume(context,
image_meta,
image_service)
except exception.SolidFireAPIException:
return None, False
account = self.configuration.sf_template_account_name
try:
(data, sfaccount, model) = self._do_clone_volume(image_meta['id'],
account,
volume)
except exception.VolumeNotFound:
if self._create_image_volume(context,
image_meta,
image_service,
image_meta['id']) is None:
# We failed, dump out
return None, False
# Ok, should be good to go now, try it again
(data, sfaccount, model) = self._do_clone_volume(image_meta['id'],
account,
volume)
return model, True
def create_volume(self, volume):
"""Create volume on SolidFire device.
The account is where CHAP settings are derived from, volume is
created and exported. Note that the new volume is immediately ready
for use.
One caveat here is that an existing user account must be specified
in the API call to create a new volume. We use a set algorithm to
determine account info based on passed in cinder volume object. First
we check to see if the account already exists (and use it), or if it
does not already exist, we'll go ahead and create it.
"""
slice_count = 1
attributes = {}
qos = {}
if (self.configuration.sf_allow_tenant_qos and
volume.get('volume_metadata')is not None):
qos = self._set_qos_presets(volume)
ctxt = context.get_admin_context()
type_id = volume['volume_type_id']
if type_id is not None:
qos = self._set_qos_by_volume_type(ctxt, type_id)
create_time = volume['created_at'].isoformat()
attributes = {'uuid': volume['id'],
'is_clone': 'False',
'created_at': create_time}
if qos:
for k, v in qos.items():
attributes[k] = str(v)
sf_accounts = self._get_sfaccounts_for_tenant(volume['project_id'])
if not sf_accounts:
sf_account = self._create_sfaccount(volume['project_id'])
else:
sf_account = self._get_account_create_availability(sf_accounts)
params = {'name': 'UUID-%s' % volume['id'],
'accountID': sf_account['accountID'],
'sliceCount': slice_count,
'totalSize': int(volume['size'] * units.Gi),
'enable512e': self.configuration.sf_emulate_512,
'attributes': attributes,
'qos': qos}
# NOTE(jdg): Check if we're a migration tgt, if so
# use the old volume-id here for the SF Name
migration_status = volume.get('migration_status', None)
if migration_status and 'target' in migration_status:
k, v = migration_status.split(':')
params['name'] = 'UUID-%s' % v
params['attributes']['migration_uuid'] = volume['id']
params['attributes']['uuid'] = v
return self._do_volume_create(sf_account, params)
def create_cloned_volume(self, volume, src_vref):
"""Create a clone of an existing volume."""
(_data, _sfaccount, model) = self._do_clone_volume(
src_vref['id'],
src_vref['project_id'],
volume)
return model
def delete_volume(self, volume):
"""Delete SolidFire Volume from device.
SolidFire allows multiple volumes with same name,
volumeID is what's guaranteed unique.
"""
accounts = self._get_sfaccounts_for_tenant(volume['project_id'])
if accounts is None:
LOG.error(_LE("Account for Volume ID %s was not found on "
"the SolidFire Cluster while attempting "
"delete_volume operation!"), volume['id'])
LOG.error(_LE("This usually means the volume was never "
"successfully created."))
return
for acc in accounts:
sf_vol = self._get_volumes_for_account(acc['accountID'],
volume['id'])[0]
if sf_vol:
break
if sf_vol is not None:
params = {'volumeID': sf_vol['volumeID']}
data = self._issue_api_request('DeleteVolume', params)
if 'result' not in data:
msg = _("Failed to delete SolidFire Volume: %s") % data
raise exception.SolidFireAPIException(msg)
else:
LOG.error(_LE("Volume ID %s was not found on "
"the SolidFire Cluster while attempting "
"delete_volume operation!"), volume['id'])
def delete_snapshot(self, snapshot):
"""Delete the specified snapshot from the SolidFire cluster."""
sf_snap_name = 'UUID-%s' % snapshot['id']
accounts = self._get_sfaccounts_for_tenant(snapshot['project_id'])
snap = None
for a in accounts:
params = {'accountID': a['accountID']}
sf_vol = self._get_sf_volume(snapshot['volume_id'], params)
sf_snaps = self._get_sf_snapshots(sf_vol['volumeID'])
snap = next((s for s in sf_snaps if s["name"] == sf_snap_name),
None)
if snap:
params = {'snapshotID': snap['snapshotID']}
data = self._issue_api_request('DeleteSnapshot',
params,
version='6.0')
if 'result' not in data:
msg = (_("Failed to delete SolidFire Snapshot: %s") %
data)
raise exception.SolidFireAPIException(msg)
return
# Make sure it's not "old style" using clones as snaps
LOG.debug("Snapshot not found, checking old style clones.")
self.delete_volume(snapshot)
def create_snapshot(self, snapshot):
sfaccount = self._get_sfaccount(snapshot['project_id'])
if sfaccount is None:
LOG.error(_LE("Account for Volume ID %s was not found on "
"the SolidFire Cluster while attempting "
"create_snapshot operation!"), snapshot['volume_id'])
params = {'accountID': sfaccount['accountID']}
sf_vol = self._get_sf_volume(snapshot['volume_id'], params)
if sf_vol is None:
raise exception.VolumeNotFound(volume_id=snapshot['volume_id'])
params = {'volumeID': sf_vol['volumeID'],
'name': 'UUID-%s' % snapshot['id']}
self._do_snapshot_create(params)
def create_volume_from_snapshot(self, volume, snapshot):
"""Create a volume from the specified snapshot."""
(_data, _sfaccount, model) = self._do_clone_volume(
snapshot['id'],
snapshot['project_id'],
volume)
return model
def get_volume_stats(self, refresh=False):
"""Get volume status.
If 'refresh' is True, run update first.
The name is a bit misleading as
the majority of the data here is cluster
data
"""
if refresh:
try:
self._update_cluster_status()
except exception.SolidFireAPIException:
pass
return self.cluster_stats
def extend_volume(self, volume, new_size):
"""Extend an existing volume."""
sfaccount = self._get_sfaccount(volume['project_id'])
params = {'accountID': sfaccount['accountID']}
sf_vol = self._get_sf_volume(volume['id'], params)
if sf_vol is None:
LOG.error(_LE("Volume ID %s was not found on "
"the SolidFire Cluster while attempting "
"extend_volume operation!"), volume['id'])
raise exception.VolumeNotFound(volume_id=volume['id'])
params = {
'volumeID': sf_vol['volumeID'],
'totalSize': int(new_size * units.Gi)
}
data = self._issue_api_request('ModifyVolume',
params, version='5.0')
if 'result' not in data:
raise exception.SolidFireAPIDataException(data=data)
def _update_cluster_status(self):
"""Retrieve status info for the Cluster."""
params = {}
# NOTE(jdg): The SF api provides an UNBELIEVABLE amount
# of stats data, this is just one of the calls
results = self._issue_api_request('GetClusterCapacity', params)
if 'result' not in results:
LOG.error(_LE('Failed to get updated stats'))
results = results['result']['clusterCapacity']
free_capacity = (
results['maxProvisionedSpace'] - results['usedSpace'])
data = {}
backend_name = self.configuration.safe_get('volume_backend_name')
data["volume_backend_name"] = backend_name or self.__class__.__name__
data["vendor_name"] = 'SolidFire Inc'
data["driver_version"] = self.VERSION
data["storage_protocol"] = 'iSCSI'
data['total_capacity_gb'] = (
float(results['maxProvisionedSpace'] / units.Gi))
data['free_capacity_gb'] = float(free_capacity / units.Gi)
data['reserved_percentage'] = self.configuration.reserved_percentage
data['QoS_support'] = True
data['compression_percent'] = (
results['compressionPercent'])
data['deduplicaton_percent'] = (
results['deDuplicationPercent'])
data['thin_provision_percent'] = (
results['thinProvisioningPercent'])
self.cluster_stats = data
def attach_volume(self, context, volume,
instance_uuid, host_name,
mountpoint):
sfaccount = self._get_sfaccount(volume['project_id'])
params = {'accountID': sfaccount['accountID']}
sf_vol = self._get_sf_volume(volume['id'], params)
if sf_vol is None:
LOG.error(_LE("Volume ID %s was not found on "
"the SolidFire Cluster while attempting "
"attach_volume operation!"), volume['id'])
raise exception.VolumeNotFound(volume_id=volume['id'])
attributes = sf_vol['attributes']
attributes['attach_time'] = volume.get('attach_time', None)
attributes['attached_to'] = instance_uuid
params = {
'volumeID': sf_vol['volumeID'],
'attributes': attributes
}
data = self._issue_api_request('ModifyVolume', params)
if 'result' not in data:
raise exception.SolidFireAPIDataException(data=data)
def detach_volume(self, context, volume, attachment=None):
sfaccount = self._get_sfaccount(volume['project_id'])
params = {'accountID': sfaccount['accountID']}
sf_vol = self._get_sf_volume(volume['id'], params)
if sf_vol is None:
LOG.error(_LE("Volume ID %s was not found on "
"the SolidFire Cluster while attempting "
"detach_volume operation!"), volume['id'])
raise exception.VolumeNotFound(volume_id=volume['id'])
attributes = sf_vol['attributes']
attributes['attach_time'] = None
attributes['attached_to'] = None
params = {
'volumeID': sf_vol['volumeID'],
'attributes': attributes
}
data = self._issue_api_request('ModifyVolume', params)
if 'result' not in data:
raise exception.SolidFireAPIDataException(data=data)
def accept_transfer(self, context, volume,
new_user, new_project):
sfaccount = self._get_sfaccount(volume['project_id'])
params = {'accountID': sfaccount['accountID']}
sf_vol = self._get_sf_volume(volume['id'], params)
if sf_vol is None:
LOG.error(_LE("Volume ID %s was not found on "
"the SolidFire Cluster while attempting "
"accept_transfer operation!"), volume['id'])
raise exception.VolumeNotFound(volume_id=volume['id'])
if new_project != volume['project_id']:
# do a create_sfaccount here as this tenant
# may not exist on the cluster yet
sfaccount = self._create_sfaccount(new_project)
params = {
'volumeID': sf_vol['volumeID'],
'accountID': sfaccount['accountID']
}
data = self._issue_api_request('ModifyVolume',
params, version='5.0')
if 'result' not in data:
raise exception.SolidFireAPIDataException(data=data)
volume['project_id'] = new_project
volume['user_id'] = new_user
return self.target_driver.ensure_export(context, volume, None)
def retype(self, ctxt, volume, new_type, diff, host):
"""Convert the volume to be of the new type.
Returns a boolean indicating whether the retype occurred.
:param ctxt: Context
:param volume: A dictionary describing the volume to migrate
:param new_type: A dictionary describing the volume type to convert to
:param diff: A dictionary with the difference between the two types
:param host: A dictionary describing the host to migrate to, where
host['host'] is its name, and host['capabilities'] is a
dictionary of its reported capabilities (Not Used).
"""
qos = {}
attributes = {}
sfaccount = self._get_sfaccount(volume['project_id'])
params = {'accountID': sfaccount['accountID']}
sf_vol = self._get_sf_volume(volume['id'], params)
if sf_vol is None:
raise exception.VolumeNotFound(volume_id=volume['id'])
attributes = sf_vol['attributes']
attributes['retyped_at'] = timeutils.utcnow().isoformat()
params = {'volumeID': sf_vol['volumeID']}
qos = self._set_qos_by_volume_type(ctxt, new_type['id'])
if qos:
params['qos'] = qos
for k, v in qos.items():
attributes[k] = str(v)
params['attributes'] = attributes
self._issue_api_request('ModifyVolume', params)
return True
def manage_existing(self, volume, external_ref):
"""Manages an existing SolidFire Volume (import to Cinder).
Renames the Volume to match the expected name for the volume.
Also need to consider things like QoS, Emulation, account/tenant.
"""
sfid = external_ref.get('source-id', None)
sfname = external_ref.get('name', None)
if sfid is None:
raise exception.SolidFireAPIException(_("Manage existing volume "
"requires 'source-id'."))
# First get the volume on the SF cluster (MUST be active)
params = {'startVolumeID': sfid,
'limit': 1}
data = self._issue_api_request('ListActiveVolumes', params)
if 'result' not in data:
raise exception.SolidFireAPIDataException(data=data)
sf_ref = data['result']['volumes'][0]
sfaccount = self._create_sfaccount(volume['project_id'])
attributes = {}
qos = {}
if (self.configuration.sf_allow_tenant_qos and
volume.get('volume_metadata')is not None):
qos = self._set_qos_presets(volume)
ctxt = context.get_admin_context()
type_id = volume.get('volume_type_id', None)
if type_id is not None:
qos = self._set_qos_by_volume_type(ctxt, type_id)
import_time = volume['created_at'].isoformat()
attributes = {'uuid': volume['id'],
'is_clone': 'False',
'os_imported_at': import_time,
'old_name': sfname}
if qos:
for k, v in qos.items():
attributes[k] = str(v)
params = {'name': volume['name'],
'volumeID': sf_ref['volumeID'],
'accountID': sfaccount['accountID'],
'enable512e': self.configuration.sf_emulate_512,
'attributes': attributes,
'qos': qos}
data = self._issue_api_request('ModifyVolume',
params, version='5.0')
if 'result' not in data:
raise exception.SolidFireAPIDataException(data=data)
return self._get_model_info(sfaccount, sf_ref['volumeID'])
def manage_existing_get_size(self, volume, external_ref):
"""Return size of an existing LV for manage_existing.
existing_ref is a dictionary of the form:
{'name': <name of existing volume on SF Cluster>}
"""
sfid = external_ref.get('source-id', None)
if sfid is None:
raise exception.SolidFireAPIException(_("Manage existing get size "
"requires 'id'."))
params = {'startVolumeID': int(sfid),
'limit': 1}
data = self._issue_api_request('ListActiveVolumes', params)
if 'result' not in data:
raise exception.SolidFireAPIDataException(data=data)
sf_ref = data['result']['volumes'][0]
return int(sf_ref['totalSize']) / int(units.Gi)
def unmanage(self, volume):
"""Mark SolidFire Volume as unmanaged (export from Cinder)."""
sfaccount = self._get_sfaccount(volume['project_id'])
if sfaccount is None:
LOG.error(_LE("Account for Volume ID %s was not found on "
"the SolidFire Cluster while attempting "
"unmanage operation!"), volume['id'])
raise exception.SolidFireAPIException(_("Failed to find account "
"for volume."))
params = {'accountID': sfaccount['accountID']}
sf_vol = self._get_sf_volume(volume['id'], params)
if sf_vol is None:
raise exception.VolumeNotFound(volume_id=volume['id'])
export_time = timeutils.utcnow().isoformat()
attributes = sf_vol['attributes']
attributes['os_exported_at'] = export_time
params = {'volumeID': int(sf_vol['volumeID']),
'attributes': attributes}
data = self._issue_api_request('ModifyVolume',
params, version='5.0')
if 'result' not in data:
raise exception.SolidFireAPIDataException(data=data)
# #### Interface methods for transport layer #### #
# TODO(jdg): SolidFire can mix and do iSCSI and FC on the
# same cluster, we'll modify these later to check based on
# the volume info if we need an FC target driver or an
# iSCSI target driver
def ensure_export(self, context, volume):
return self.target_driver.ensure_export(context, volume, None)
def create_export(self, context, volume, connector):
return self.target_driver.create_export(
context,
volume,
None)
def remove_export(self, context, volume):
return self.target_driver.remove_export(context, volume)
def initialize_connection(self, volume, connector):
return self.target_driver.initialize_connection(volume, connector)
def validate_connector(self, connector):
return self.target_driver.validate_connector(connector)
def terminate_connection(self, volume, connector, **kwargs):
return self.target_driver.terminate_connection(volume, connector,
**kwargs)
class SolidFireISCSI(iscsi_driver.SanISCSITarget):
def __init__(self, *args, **kwargs):
super(SolidFireISCSI, self).__init__(*args, **kwargs)
self.sf_driver = kwargs.get('solidfire_driver')
def _do_iscsi_export(self, volume):
sfaccount = self.sf_driver._get_sfaccount(volume['project_id'])
model_update = {}
model_update['provider_auth'] = ('CHAP %s %s'
% (sfaccount['username'],
sfaccount['targetSecret']))
return model_update
def create_export(self, context, volume, volume_path):
return self._do_iscsi_export(volume)
def ensure_export(self, context, volume, volume_path):
try:
return self._do_iscsi_export(volume)
except exception.SolidFireAPIException:
return None
# Following are abc's that we make sure are caught and
# paid attention to. In our case we don't use them
# so just stub them out here.
def remove_export(self, context, volume):
pass
def terminate_connection(self, volume, connector, **kwargs):
pass
|
# Copyright 2012 (C) Mickael Menu <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
from string import Template
from .tags import *
class FileTemplate(object):
"""
The file template tool generates a full LaTeX/TikZ source from a template, preamble
and source.
"""
def __init__(self, template, preamble, source):
assert preamble is not None and source is not None
super(FileTemplate, self).__init__()
self.content = ""
self.preamble = preamble
self.source = source
self.latex_template = Template(template)
def buildFileContent(self):
"""
Builds the TikZ document with given preamble and source and the document template.
"""
self._buildPreambleChunk()
self._buildSourceChunk()
self._buildContentFromTemplate()
return self.content
def _buildPreambleChunk(self):
self.preamble = "%s\n%s\n%s\n" % (PREAMBLE_BEGIN_TAG, self.preamble, PREAMBLE_END_TAG)
def _buildSourceChunk(self):
self.source = "%s\n%s\n%s\n" % (SOURCE_BEGIN_TAG, self.source, SOURCE_END_TAG)
def _buildContentFromTemplate(self):
self.content = TIKZ_TAG + "\n"
self.content += self.latex_template.safe_substitute(PREAMBLE=self.preamble, SOURCE=self.source)
|
import copy
from corehq.apps.accounting.models import Subscription
from corehq.apps.domain.models import Domain
from corehq.pillows.base import HQPillow
from corehq.pillows.mappings.domain_mapping import DOMAIN_MAPPING, DOMAIN_INDEX
from dimagi.utils.decorators.memoized import memoized
from django.conf import settings
from django_countries.countries import OFFICIAL_COUNTRIES
class DomainPillow(HQPillow):
"""
Simple/Common Case properties Indexer
"""
document_class = Domain
couch_filter = "domain/domains_inclusive"
es_alias = "hqdomains"
es_type = "hqdomain"
es_index = DOMAIN_INDEX
default_mapping = DOMAIN_MAPPING
es_meta = {
"settings": {
"analysis": {
"analyzer": {
"default": {
"type": "custom",
"tokenizer": "whitespace",
"filter": ["lowercase"]
},
"comma": {
"type": "pattern",
"pattern": "\s*,\s*"
},
}
}
}
}
def get_unique_id(self):
return DOMAIN_INDEX
@memoized
def calc_meta(self):
"""
override of the meta calculator since we're separating out all the types,
so we just do a hash of the "prototype" instead to determined md5
"""
return self.calc_mapping_hash({"es_meta": self.es_meta,
"mapping": self.default_mapping})
def change_transform(self, doc_dict):
doc_ret = copy.deepcopy(doc_dict)
sub = Subscription.objects.filter(
subscriber__domain=doc_dict['name'],
is_active=True)
doc_dict['deployment'] = doc_dict.get('deployment', None) or {}
countries = doc_dict['deployment'].get('countries', [])
doc_ret['deployment']['countries'] = []
if sub:
doc_ret['subscription'] = sub[0].plan_version.plan.edition
for country in countries:
doc_ret['deployment']['countries'].append(OFFICIAL_COUNTRIES[country])
return doc_ret
|
# Copyright (c) 2012 NTT DOCOMO, INC.
# Copyright 2011 OpenStack Foundation
# Copyright 2011 Ilya Alekseyev
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import gzip
import os
import shutil
import stat
import tempfile
import time
import types
import mock
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_utils import uuidutils
import requests
import testtools
from ironic.common import boot_devices
from ironic.common import disk_partitioner
from ironic.common import exception
from ironic.common import images
from ironic.common import states
from ironic.common import utils as common_utils
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils
from ironic.drivers.modules import agent_client
from ironic.drivers.modules import deploy_utils as utils
from ironic.drivers.modules import image_cache
from ironic.tests import base as tests_base
from ironic.tests.conductor import utils as mgr_utils
from ironic.tests.db import base as db_base
from ironic.tests.db import utils as db_utils
from ironic.tests.objects import utils as obj_utils
_PXECONF_DEPLOY = b"""
default deploy
label deploy
kernel deploy_kernel
append initrd=deploy_ramdisk
ipappend 3
label boot_partition
kernel kernel
append initrd=ramdisk root={{ ROOT }}
label boot_whole_disk
COM32 chain.c32
append mbr:{{ DISK_IDENTIFIER }}
"""
_PXECONF_BOOT_PARTITION = """
default boot_partition
label deploy
kernel deploy_kernel
append initrd=deploy_ramdisk
ipappend 3
label boot_partition
kernel kernel
append initrd=ramdisk root=UUID=12345678-1234-1234-1234-1234567890abcdef
label boot_whole_disk
COM32 chain.c32
append mbr:{{ DISK_IDENTIFIER }}
"""
_PXECONF_BOOT_WHOLE_DISK = """
default boot_whole_disk
label deploy
kernel deploy_kernel
append initrd=deploy_ramdisk
ipappend 3
label boot_partition
kernel kernel
append initrd=ramdisk root={{ ROOT }}
label boot_whole_disk
COM32 chain.c32
append mbr:0x12345678
"""
_IPXECONF_DEPLOY = b"""
#!ipxe
dhcp
goto deploy
:deploy
kernel deploy_kernel
initrd deploy_ramdisk
boot
:boot_partition
kernel kernel
append initrd=ramdisk root={{ ROOT }}
boot
:boot_whole_disk
kernel chain.c32
append mbr:{{ DISK_IDENTIFIER }}
boot
"""
_IPXECONF_BOOT_PARTITION = """
#!ipxe
dhcp
goto boot_partition
:deploy
kernel deploy_kernel
initrd deploy_ramdisk
boot
:boot_partition
kernel kernel
append initrd=ramdisk root=UUID=12345678-1234-1234-1234-1234567890abcdef
boot
:boot_whole_disk
kernel chain.c32
append mbr:{{ DISK_IDENTIFIER }}
boot
"""
_IPXECONF_BOOT_WHOLE_DISK = """
#!ipxe
dhcp
goto boot_whole_disk
:deploy
kernel deploy_kernel
initrd deploy_ramdisk
boot
:boot_partition
kernel kernel
append initrd=ramdisk root={{ ROOT }}
boot
:boot_whole_disk
kernel chain.c32
append mbr:0x12345678
boot
"""
_UEFI_PXECONF_DEPLOY = b"""
default=deploy
image=deploy_kernel
label=deploy
initrd=deploy_ramdisk
append="ro text"
image=kernel
label=boot_partition
initrd=ramdisk
append="root={{ ROOT }}"
image=chain.c32
label=boot_whole_disk
append="mbr:{{ DISK_IDENTIFIER }}"
"""
_UEFI_PXECONF_BOOT_PARTITION = """
default=boot_partition
image=deploy_kernel
label=deploy
initrd=deploy_ramdisk
append="ro text"
image=kernel
label=boot_partition
initrd=ramdisk
append="root=UUID=12345678-1234-1234-1234-1234567890abcdef"
image=chain.c32
label=boot_whole_disk
append="mbr:{{ DISK_IDENTIFIER }}"
"""
_UEFI_PXECONF_BOOT_WHOLE_DISK = """
default=boot_whole_disk
image=deploy_kernel
label=deploy
initrd=deploy_ramdisk
append="ro text"
image=kernel
label=boot_partition
initrd=ramdisk
append="root={{ ROOT }}"
image=chain.c32
label=boot_whole_disk
append="mbr:0x12345678"
"""
@mock.patch.object(time, 'sleep', lambda seconds: None)
class PhysicalWorkTestCase(tests_base.TestCase):
def _mock_calls(self, name_list):
patch_list = [mock.patch.object(utils, name,
spec_set=types.FunctionType)
for name in name_list]
mock_list = [patcher.start() for patcher in patch_list]
for patcher in patch_list:
self.addCleanup(patcher.stop)
parent_mock = mock.MagicMock(spec=[])
for mocker, name in zip(mock_list, name_list):
parent_mock.attach_mock(mocker, name)
return parent_mock
def _test_deploy_partition_image(self, boot_option=None, boot_mode=None):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 0
ephemeral_format = None
configdrive_mb = 0
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
swap_part = '/dev/fake-part1'
root_part = '/dev/fake-part2'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'mkfs',
'block_uuid', 'notify', 'destroy_disk_metadata']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
parent_mock.block_uuid.return_value = root_uuid
parent_mock.make_partitions.return_value = {'root': root_part,
'swap': swap_part}
make_partitions_expected_args = [dev, root_mb, swap_mb, ephemeral_mb,
configdrive_mb]
make_partitions_expected_kwargs = {'commit': True}
deploy_kwargs = {}
if boot_option:
make_partitions_expected_kwargs['boot_option'] = boot_option
deploy_kwargs['boot_option'] = boot_option
else:
make_partitions_expected_kwargs['boot_option'] = 'netboot'
if boot_mode:
make_partitions_expected_kwargs['boot_mode'] = boot_mode
deploy_kwargs['boot_mode'] = boot_mode
else:
make_partitions_expected_kwargs['boot_mode'] = 'bios'
# If no boot_option, then it should default to netboot.
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.destroy_disk_metadata(dev, node_uuid),
mock.call.make_partitions(
*make_partitions_expected_args,
**make_partitions_expected_kwargs),
mock.call.is_block_device(root_part),
mock.call.is_block_device(swap_part),
mock.call.populate_image(image_path, root_part),
mock.call.mkfs(dev=swap_part, fs='swap',
label='swap1'),
mock.call.block_uuid(root_part),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuids_dict_returned = utils.deploy_partition_image(
address, port, iqn, lun, image_path, root_mb, swap_mb,
ephemeral_mb, ephemeral_format, node_uuid, **deploy_kwargs)
self.assertEqual(calls_expected, parent_mock.mock_calls)
expected_uuid_dict = {
'root uuid': root_uuid,
'efi system partition uuid': None}
self.assertEqual(expected_uuid_dict, uuids_dict_returned)
def test_deploy_partition_image_without_boot_option(self):
self._test_deploy_partition_image()
def test_deploy_partition_image_netboot(self):
self._test_deploy_partition_image(boot_option="netboot")
def test_deploy_partition_image_localboot(self):
self._test_deploy_partition_image(boot_option="local")
def test_deploy_partition_image_wo_boot_option_and_wo_boot_mode(self):
self._test_deploy_partition_image()
def test_deploy_partition_image_netboot_bios(self):
self._test_deploy_partition_image(boot_option="netboot",
boot_mode="bios")
def test_deploy_partition_image_localboot_bios(self):
self._test_deploy_partition_image(boot_option="local",
boot_mode="bios")
def test_deploy_partition_image_netboot_uefi(self):
self._test_deploy_partition_image(boot_option="netboot",
boot_mode="uefi")
@mock.patch.object(utils, 'get_image_mb', return_value=129, autospec=True)
def test_deploy_partition_image_image_exceeds_root_partition(self,
gim_mock):
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 0
ephemeral_format = None
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
self.assertRaises(exception.InstanceDeployFailure,
utils.deploy_partition_image, address, port, iqn,
lun, image_path, root_mb, swap_mb, ephemeral_mb,
ephemeral_format, node_uuid)
gim_mock.assert_called_once_with(image_path)
# We mock utils.block_uuid separately here because we can't predict
# the order in which it will be called.
@mock.patch.object(utils, 'block_uuid', autospec=True)
def test_deploy_partition_image_localboot_uefi(self, block_uuid_mock):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 0
ephemeral_format = None
configdrive_mb = 0
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
swap_part = '/dev/fake-part2'
root_part = '/dev/fake-part3'
efi_system_part = '/dev/fake-part1'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
efi_system_part_uuid = '9036-482'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'mkfs',
'notify', 'destroy_disk_metadata']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
def block_uuid_side_effect(device):
if device == root_part:
return root_uuid
if device == efi_system_part:
return efi_system_part_uuid
block_uuid_mock.side_effect = block_uuid_side_effect
parent_mock.make_partitions.return_value = {
'root': root_part, 'swap': swap_part,
'efi system partition': efi_system_part}
# If no boot_option, then it should default to netboot.
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.destroy_disk_metadata(dev, node_uuid),
mock.call.make_partitions(dev, root_mb, swap_mb,
ephemeral_mb,
configdrive_mb,
commit=True,
boot_option="local",
boot_mode="uefi"),
mock.call.is_block_device(root_part),
mock.call.is_block_device(swap_part),
mock.call.is_block_device(efi_system_part),
mock.call.mkfs(dev=efi_system_part, fs='vfat',
label='efi-part'),
mock.call.populate_image(image_path, root_part),
mock.call.mkfs(dev=swap_part, fs='swap',
label='swap1'),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_partition_image(
address, port, iqn, lun, image_path, root_mb, swap_mb,
ephemeral_mb, ephemeral_format, node_uuid, boot_option="local",
boot_mode="uefi")
self.assertEqual(calls_expected, parent_mock.mock_calls)
block_uuid_mock.assert_any_call('/dev/fake-part1')
block_uuid_mock.assert_any_call('/dev/fake-part3')
expected_uuid_dict = {
'root uuid': root_uuid,
'efi system partition uuid': efi_system_part_uuid}
self.assertEqual(expected_uuid_dict, uuid_dict_returned)
def test_deploy_partition_image_without_swap(self):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 0
ephemeral_mb = 0
ephemeral_format = None
configdrive_mb = 0
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
root_part = '/dev/fake-part1'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'block_uuid',
'notify', 'destroy_disk_metadata']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
parent_mock.block_uuid.return_value = root_uuid
parent_mock.make_partitions.return_value = {'root': root_part}
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.destroy_disk_metadata(dev, node_uuid),
mock.call.make_partitions(dev, root_mb, swap_mb,
ephemeral_mb,
configdrive_mb,
commit=True,
boot_option="netboot",
boot_mode="bios"),
mock.call.is_block_device(root_part),
mock.call.populate_image(image_path, root_part),
mock.call.block_uuid(root_part),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_partition_image(address, port, iqn,
lun, image_path,
root_mb, swap_mb,
ephemeral_mb,
ephemeral_format,
node_uuid)
self.assertEqual(calls_expected, parent_mock.mock_calls)
self.assertEqual(root_uuid, uuid_dict_returned['root uuid'])
def test_deploy_partition_image_with_ephemeral(self):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 256
configdrive_mb = 0
ephemeral_format = 'exttest'
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
ephemeral_part = '/dev/fake-part1'
swap_part = '/dev/fake-part2'
root_part = '/dev/fake-part3'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'mkfs',
'block_uuid', 'notify', 'destroy_disk_metadata']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
parent_mock.block_uuid.return_value = root_uuid
parent_mock.make_partitions.return_value = {'swap': swap_part,
'ephemeral': ephemeral_part,
'root': root_part}
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.destroy_disk_metadata(dev, node_uuid),
mock.call.make_partitions(dev, root_mb, swap_mb,
ephemeral_mb,
configdrive_mb,
commit=True,
boot_option="netboot",
boot_mode="bios"),
mock.call.is_block_device(root_part),
mock.call.is_block_device(swap_part),
mock.call.is_block_device(ephemeral_part),
mock.call.populate_image(image_path, root_part),
mock.call.mkfs(dev=swap_part, fs='swap',
label='swap1'),
mock.call.mkfs(dev=ephemeral_part,
fs=ephemeral_format,
label='ephemeral0'),
mock.call.block_uuid(root_part),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_partition_image(address, port, iqn,
lun, image_path,
root_mb, swap_mb,
ephemeral_mb,
ephemeral_format,
node_uuid)
self.assertEqual(calls_expected, parent_mock.mock_calls)
self.assertEqual(root_uuid, uuid_dict_returned['root uuid'])
def test_deploy_partition_image_preserve_ephemeral(self):
"""Check if all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 256
ephemeral_format = 'exttest'
configdrive_mb = 0
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
ephemeral_part = '/dev/fake-part1'
swap_part = '/dev/fake-part2'
root_part = '/dev/fake-part3'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'mkfs',
'block_uuid', 'notify', 'get_dev_block_size']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
parent_mock.block_uuid.return_value = root_uuid
parent_mock.make_partitions.return_value = {'swap': swap_part,
'ephemeral': ephemeral_part,
'root': root_part}
parent_mock.block_uuid.return_value = root_uuid
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.make_partitions(dev, root_mb, swap_mb,
ephemeral_mb,
configdrive_mb,
commit=False,
boot_option="netboot",
boot_mode="bios"),
mock.call.is_block_device(root_part),
mock.call.is_block_device(swap_part),
mock.call.is_block_device(ephemeral_part),
mock.call.populate_image(image_path, root_part),
mock.call.mkfs(dev=swap_part, fs='swap',
label='swap1'),
mock.call.block_uuid(root_part),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_partition_image(
address, port, iqn, lun, image_path, root_mb, swap_mb,
ephemeral_mb, ephemeral_format, node_uuid,
preserve_ephemeral=True, boot_option="netboot")
self.assertEqual(calls_expected, parent_mock.mock_calls)
self.assertFalse(parent_mock.get_dev_block_size.called)
self.assertEqual(root_uuid, uuid_dict_returned['root uuid'])
@mock.patch.object(common_utils, 'unlink_without_raise', autospec=True)
def test_deploy_partition_image_with_configdrive(self, mock_unlink):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 0
ephemeral_mb = 0
configdrive_mb = 10
ephemeral_format = None
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
configdrive_url = 'http://1.2.3.4/cd'
dev = '/dev/fake'
configdrive_part = '/dev/fake-part1'
root_part = '/dev/fake-part2'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'block_uuid',
'notify', 'destroy_disk_metadata', 'dd',
'_get_configdrive']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
parent_mock.block_uuid.return_value = root_uuid
parent_mock.make_partitions.return_value = {'root': root_part,
'configdrive':
configdrive_part}
parent_mock._get_configdrive.return_value = (10, 'configdrive-path')
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.destroy_disk_metadata(dev, node_uuid),
mock.call._get_configdrive(configdrive_url,
node_uuid),
mock.call.make_partitions(dev, root_mb, swap_mb,
ephemeral_mb,
configdrive_mb,
commit=True,
boot_option="netboot",
boot_mode="bios"),
mock.call.is_block_device(root_part),
mock.call.is_block_device(configdrive_part),
mock.call.dd(mock.ANY, configdrive_part),
mock.call.populate_image(image_path, root_part),
mock.call.block_uuid(root_part),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_partition_image(
address, port, iqn, lun, image_path, root_mb, swap_mb,
ephemeral_mb, ephemeral_format, node_uuid,
configdrive=configdrive_url)
self.assertEqual(calls_expected, parent_mock.mock_calls)
self.assertEqual(root_uuid, uuid_dict_returned['root uuid'])
mock_unlink.assert_called_once_with('configdrive-path')
@mock.patch.object(utils, 'get_disk_identifier', autospec=True)
def test_deploy_whole_disk_image(self, mock_gdi):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
name_list = ['get_dev', 'discovery', 'login_iscsi', 'logout_iscsi',
'delete_iscsi', 'is_block_device', 'populate_image',
'notify']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.is_block_device.return_value = True
mock_gdi.return_value = '0x12345678'
calls_expected = [mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.populate_image(image_path, dev),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_disk_image(address, port, iqn, lun,
image_path, node_uuid)
self.assertEqual(calls_expected, parent_mock.mock_calls)
self.assertEqual('0x12345678', uuid_dict_returned['disk identifier'])
@mock.patch.object(common_utils, 'execute', autospec=True)
def test_verify_iscsi_connection_raises(self, mock_exec):
iqn = 'iqn.xyz'
mock_exec.return_value = ['iqn.abc', '']
self.assertRaises(exception.InstanceDeployFailure,
utils.verify_iscsi_connection, iqn)
self.assertEqual(3, mock_exec.call_count)
@mock.patch.object(os.path, 'exists', autospec=True)
def test_check_file_system_for_iscsi_device_raises(self, mock_os):
iqn = 'iqn.xyz'
ip = "127.0.0.1"
port = "22"
mock_os.return_value = False
self.assertRaises(exception.InstanceDeployFailure,
utils.check_file_system_for_iscsi_device, ip, port, iqn)
self.assertEqual(3, mock_os.call_count)
@mock.patch.object(os.path, 'exists', autospec=True)
def test_check_file_system_for_iscsi_device(self, mock_os):
iqn = 'iqn.xyz'
ip = "127.0.0.1"
port = "22"
check_dir = "/dev/disk/by-path/ip-%s:%s-iscsi-%s-lun-1" % (ip,
port,
iqn)
mock_os.return_value = True
utils.check_file_system_for_iscsi_device(ip, port, iqn)
mock_os.assert_called_once_with(check_dir)
@mock.patch.object(common_utils, 'execute', autospec=True)
def test_verify_iscsi_connection(self, mock_exec):
iqn = 'iqn.xyz'
mock_exec.return_value = ['iqn.xyz', '']
utils.verify_iscsi_connection(iqn)
mock_exec.assert_called_once_with('iscsiadm',
'-m', 'node',
'-S',
run_as_root=True,
check_exit_code=[0])
@mock.patch.object(common_utils, 'execute', autospec=True)
def test_force_iscsi_lun_update(self, mock_exec):
iqn = 'iqn.xyz'
utils.force_iscsi_lun_update(iqn)
mock_exec.assert_called_once_with('iscsiadm',
'-m', 'node',
'-T', iqn,
'-R',
run_as_root=True,
check_exit_code=[0])
@mock.patch.object(common_utils, 'execute', autospec=True)
@mock.patch.object(utils, 'verify_iscsi_connection', autospec=True)
@mock.patch.object(utils, 'force_iscsi_lun_update', autospec=True)
@mock.patch.object(utils, 'check_file_system_for_iscsi_device',
autospec=True)
def test_login_iscsi_calls_verify_and_update(self,
mock_check_dev,
mock_update,
mock_verify,
mock_exec):
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
mock_exec.return_value = ['iqn.xyz', '']
utils.login_iscsi(address, port, iqn)
mock_exec.assert_called_once_with('iscsiadm',
'-m', 'node',
'-p', '%s:%s' % (address, port),
'-T', iqn,
'--login',
run_as_root=True,
check_exit_code=[0],
attempts=5,
delay_on_retry=True)
mock_verify.assert_called_once_with(iqn)
mock_update.assert_called_once_with(iqn)
mock_check_dev.assert_called_once_with(address, port, iqn)
@mock.patch.object(utils, 'is_block_device', lambda d: True)
def test_always_logout_and_delete_iscsi(self):
"""Check if logout_iscsi() and delete_iscsi() are called.
Make sure that logout_iscsi() and delete_iscsi() are called once
login_iscsi() is invoked.
"""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 256
ephemeral_format = 'exttest'
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
class TestException(Exception):
pass
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'work_on_disk']
patch_list = [mock.patch.object(utils, name,
spec_set=types.FunctionType)
for name in name_list]
mock_list = [patcher.start() for patcher in patch_list]
for patcher in patch_list:
self.addCleanup(patcher.stop)
parent_mock = mock.MagicMock(spec=[])
for mocker, name in zip(mock_list, name_list):
parent_mock.attach_mock(mocker, name)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.work_on_disk.side_effect = TestException
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.work_on_disk(dev, root_mb, swap_mb,
ephemeral_mb,
ephemeral_format, image_path,
node_uuid, configdrive=None,
preserve_ephemeral=False,
boot_option="netboot",
boot_mode="bios"),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
self.assertRaises(TestException, utils.deploy_partition_image,
address, port, iqn, lun, image_path,
root_mb, swap_mb, ephemeral_mb, ephemeral_format,
node_uuid)
self.assertEqual(calls_expected, parent_mock.mock_calls)
class SwitchPxeConfigTestCase(tests_base.TestCase):
def _create_config(self, ipxe=False, boot_mode=None):
(fd, fname) = tempfile.mkstemp()
if boot_mode == 'uefi':
pxe_cfg = _UEFI_PXECONF_DEPLOY
else:
pxe_cfg = _IPXECONF_DEPLOY if ipxe else _PXECONF_DEPLOY
os.write(fd, pxe_cfg)
os.close(fd)
self.addCleanup(os.unlink, fname)
return fname
def test_switch_pxe_config_partition_image(self):
boot_mode = 'bios'
fname = self._create_config()
utils.switch_pxe_config(fname,
'12345678-1234-1234-1234-1234567890abcdef',
boot_mode,
False)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_PXECONF_BOOT_PARTITION, pxeconf)
def test_switch_pxe_config_whole_disk_image(self):
boot_mode = 'bios'
fname = self._create_config()
utils.switch_pxe_config(fname,
'0x12345678',
boot_mode,
True)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_PXECONF_BOOT_WHOLE_DISK, pxeconf)
def test_switch_ipxe_config_partition_image(self):
boot_mode = 'bios'
cfg.CONF.set_override('ipxe_enabled', True, 'pxe')
fname = self._create_config(ipxe=True)
utils.switch_pxe_config(fname,
'12345678-1234-1234-1234-1234567890abcdef',
boot_mode,
False)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_IPXECONF_BOOT_PARTITION, pxeconf)
def test_switch_ipxe_config_whole_disk_image(self):
boot_mode = 'bios'
cfg.CONF.set_override('ipxe_enabled', True, 'pxe')
fname = self._create_config(ipxe=True)
utils.switch_pxe_config(fname,
'0x12345678',
boot_mode,
True)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_IPXECONF_BOOT_WHOLE_DISK, pxeconf)
def test_switch_uefi_pxe_config_partition_image(self):
boot_mode = 'uefi'
fname = self._create_config(boot_mode=boot_mode)
utils.switch_pxe_config(fname,
'12345678-1234-1234-1234-1234567890abcdef',
boot_mode,
False)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_UEFI_PXECONF_BOOT_PARTITION, pxeconf)
def test_switch_uefi_config_whole_disk_image(self):
boot_mode = 'uefi'
fname = self._create_config(boot_mode=boot_mode)
utils.switch_pxe_config(fname,
'0x12345678',
boot_mode,
True)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_UEFI_PXECONF_BOOT_WHOLE_DISK, pxeconf)
@mock.patch('time.sleep', lambda sec: None)
class OtherFunctionTestCase(db_base.DbTestCase):
def setUp(self):
super(OtherFunctionTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver="fake_pxe")
self.node = obj_utils.create_test_node(self.context, driver='fake_pxe')
def test_get_dev(self):
expected = '/dev/disk/by-path/ip-1.2.3.4:5678-iscsi-iqn.fake-lun-9'
actual = utils.get_dev('1.2.3.4', 5678, 'iqn.fake', 9)
self.assertEqual(expected, actual)
@mock.patch.object(os, 'stat', autospec=True)
@mock.patch.object(stat, 'S_ISBLK', autospec=True)
def test_is_block_device_works(self, mock_is_blk, mock_os):
device = '/dev/disk/by-path/ip-1.2.3.4:5678-iscsi-iqn.fake-lun-9'
mock_is_blk.return_value = True
mock_os().st_mode = 10000
self.assertTrue(utils.is_block_device(device))
mock_is_blk.assert_called_once_with(mock_os().st_mode)
@mock.patch.object(os, 'stat', autospec=True)
def test_is_block_device_raises(self, mock_os):
device = '/dev/disk/by-path/ip-1.2.3.4:5678-iscsi-iqn.fake-lun-9'
mock_os.side_effect = OSError
self.assertRaises(exception.InstanceDeployFailure,
utils.is_block_device, device)
mock_os.assert_has_calls([mock.call(device)] * 3)
@mock.patch.object(os.path, 'getsize', autospec=True)
@mock.patch.object(images, 'converted_size', autospec=True)
def test_get_image_mb(self, mock_csize, mock_getsize):
mb = 1024 * 1024
mock_getsize.return_value = 0
mock_csize.return_value = 0
self.assertEqual(0, utils.get_image_mb('x', False))
self.assertEqual(0, utils.get_image_mb('x', True))
mock_getsize.return_value = 1
mock_csize.return_value = 1
self.assertEqual(1, utils.get_image_mb('x', False))
self.assertEqual(1, utils.get_image_mb('x', True))
mock_getsize.return_value = mb
mock_csize.return_value = mb
self.assertEqual(1, utils.get_image_mb('x', False))
self.assertEqual(1, utils.get_image_mb('x', True))
mock_getsize.return_value = mb + 1
mock_csize.return_value = mb + 1
self.assertEqual(2, utils.get_image_mb('x', False))
self.assertEqual(2, utils.get_image_mb('x', True))
def test_parse_root_device_hints(self):
self.node.properties['root_device'] = {'wwn': 123456}
expected = 'wwn=123456'
result = utils.parse_root_device_hints(self.node)
self.assertEqual(expected, result)
def test_parse_root_device_hints_string_space(self):
self.node.properties['root_device'] = {'model': 'fake model'}
expected = 'model=fake%20model'
result = utils.parse_root_device_hints(self.node)
self.assertEqual(expected, result)
def test_parse_root_device_hints_no_hints(self):
self.node.properties = {}
result = utils.parse_root_device_hints(self.node)
self.assertIsNone(result)
def test_parse_root_device_hints_invalid_hints(self):
self.node.properties['root_device'] = {'vehicle': 'Owlship'}
self.assertRaises(exception.InvalidParameterValue,
utils.parse_root_device_hints, self.node)
def test_parse_root_device_hints_invalid_size(self):
self.node.properties['root_device'] = {'size': 'not-int'}
self.assertRaises(exception.InvalidParameterValue,
utils.parse_root_device_hints, self.node)
@mock.patch.object(disk_partitioner.DiskPartitioner, 'commit', lambda _: None)
class WorkOnDiskTestCase(tests_base.TestCase):
def setUp(self):
super(WorkOnDiskTestCase, self).setUp()
self.image_path = '/tmp/xyz/image'
self.root_mb = 128
self.swap_mb = 64
self.ephemeral_mb = 0
self.ephemeral_format = None
self.configdrive_mb = 0
self.dev = '/dev/fake'
self.swap_part = '/dev/fake-part1'
self.root_part = '/dev/fake-part2'
self.mock_ibd_obj = mock.patch.object(
utils, 'is_block_device', autospec=True)
self.mock_ibd = self.mock_ibd_obj.start()
self.addCleanup(self.mock_ibd_obj.stop)
self.mock_mp_obj = mock.patch.object(
utils, 'make_partitions', autospec=True)
self.mock_mp = self.mock_mp_obj.start()
self.addCleanup(self.mock_mp_obj.stop)
self.mock_remlbl_obj = mock.patch.object(
utils, 'destroy_disk_metadata', autospec=True)
self.mock_remlbl = self.mock_remlbl_obj.start()
self.addCleanup(self.mock_remlbl_obj.stop)
self.mock_mp.return_value = {'swap': self.swap_part,
'root': self.root_part}
def test_no_root_partition(self):
self.mock_ibd.return_value = False
self.assertRaises(exception.InstanceDeployFailure,
utils.work_on_disk, self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
self.ephemeral_format, self.image_path, 'fake-uuid')
self.mock_ibd.assert_called_once_with(self.root_part)
self.mock_mp.assert_called_once_with(self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
self.configdrive_mb, commit=True,
boot_option="netboot",
boot_mode="bios")
def test_no_swap_partition(self):
self.mock_ibd.side_effect = iter([True, False])
calls = [mock.call(self.root_part),
mock.call(self.swap_part)]
self.assertRaises(exception.InstanceDeployFailure,
utils.work_on_disk, self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
self.ephemeral_format, self.image_path, 'fake-uuid')
self.assertEqual(self.mock_ibd.call_args_list, calls)
self.mock_mp.assert_called_once_with(self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
self.configdrive_mb, commit=True,
boot_option="netboot",
boot_mode="bios")
def test_no_ephemeral_partition(self):
ephemeral_part = '/dev/fake-part1'
swap_part = '/dev/fake-part2'
root_part = '/dev/fake-part3'
ephemeral_mb = 256
ephemeral_format = 'exttest'
self.mock_mp.return_value = {'ephemeral': ephemeral_part,
'swap': swap_part,
'root': root_part}
self.mock_ibd.side_effect = iter([True, True, False])
calls = [mock.call(root_part),
mock.call(swap_part),
mock.call(ephemeral_part)]
self.assertRaises(exception.InstanceDeployFailure,
utils.work_on_disk, self.dev, self.root_mb,
self.swap_mb, ephemeral_mb, ephemeral_format,
self.image_path, 'fake-uuid')
self.assertEqual(self.mock_ibd.call_args_list, calls)
self.mock_mp.assert_called_once_with(self.dev, self.root_mb,
self.swap_mb, ephemeral_mb,
self.configdrive_mb, commit=True,
boot_option="netboot",
boot_mode="bios")
@mock.patch.object(common_utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(utils, '_get_configdrive', autospec=True)
def test_no_configdrive_partition(self, mock_configdrive, mock_unlink):
mock_configdrive.return_value = (10, 'fake-path')
swap_part = '/dev/fake-part1'
configdrive_part = '/dev/fake-part2'
root_part = '/dev/fake-part3'
configdrive_url = 'http://1.2.3.4/cd'
configdrive_mb = 10
self.mock_mp.return_value = {'swap': swap_part,
'configdrive': configdrive_part,
'root': root_part}
self.mock_ibd.side_effect = iter([True, True, False])
calls = [mock.call(root_part),
mock.call(swap_part),
mock.call(configdrive_part)]
self.assertRaises(exception.InstanceDeployFailure,
utils.work_on_disk, self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
self.ephemeral_format, self.image_path, 'fake-uuid',
preserve_ephemeral=False,
configdrive=configdrive_url,
boot_option="netboot")
self.assertEqual(self.mock_ibd.call_args_list, calls)
self.mock_mp.assert_called_once_with(self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
configdrive_mb, commit=True,
boot_option="netboot",
boot_mode="bios")
mock_unlink.assert_called_once_with('fake-path')
@mock.patch.object(common_utils, 'execute', autospec=True)
class MakePartitionsTestCase(tests_base.TestCase):
def setUp(self):
super(MakePartitionsTestCase, self).setUp()
self.dev = 'fake-dev'
self.root_mb = 1024
self.swap_mb = 512
self.ephemeral_mb = 0
self.configdrive_mb = 0
self.parted_static_cmd = ['parted', '-a', 'optimal', '-s', self.dev,
'--', 'unit', 'MiB', 'mklabel', 'msdos']
def _test_make_partitions(self, mock_exc, boot_option):
mock_exc.return_value = (None, None)
utils.make_partitions(self.dev, self.root_mb, self.swap_mb,
self.ephemeral_mb, self.configdrive_mb,
boot_option=boot_option)
expected_mkpart = ['mkpart', 'primary', 'linux-swap', '1', '513',
'mkpart', 'primary', '', '513', '1537']
if boot_option == "local":
expected_mkpart.extend(['set', '2', 'boot', 'on'])
parted_cmd = self.parted_static_cmd + expected_mkpart
parted_call = mock.call(*parted_cmd, run_as_root=True,
check_exit_code=[0])
fuser_cmd = ['fuser', 'fake-dev']
fuser_call = mock.call(*fuser_cmd, run_as_root=True,
check_exit_code=[0, 1])
mock_exc.assert_has_calls([parted_call, fuser_call])
def test_make_partitions(self, mock_exc):
self._test_make_partitions(mock_exc, boot_option="netboot")
def test_make_partitions_local_boot(self, mock_exc):
self._test_make_partitions(mock_exc, boot_option="local")
def test_make_partitions_with_ephemeral(self, mock_exc):
self.ephemeral_mb = 2048
expected_mkpart = ['mkpart', 'primary', '', '1', '2049',
'mkpart', 'primary', 'linux-swap', '2049', '2561',
'mkpart', 'primary', '', '2561', '3585']
cmd = self.parted_static_cmd + expected_mkpart
mock_exc.return_value = (None, None)
utils.make_partitions(self.dev, self.root_mb, self.swap_mb,
self.ephemeral_mb, self.configdrive_mb)
parted_call = mock.call(*cmd, run_as_root=True, check_exit_code=[0])
mock_exc.assert_has_calls([parted_call])
@mock.patch.object(utils, 'get_dev_block_size', autospec=True)
@mock.patch.object(common_utils, 'execute', autospec=True)
class DestroyMetaDataTestCase(tests_base.TestCase):
def setUp(self):
super(DestroyMetaDataTestCase, self).setUp()
self.dev = 'fake-dev'
self.node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
def test_destroy_disk_metadata(self, mock_exec, mock_gz):
mock_gz.return_value = 64
expected_calls = [mock.call('dd', 'if=/dev/zero', 'of=fake-dev',
'bs=512', 'count=36', run_as_root=True,
check_exit_code=[0]),
mock.call('dd', 'if=/dev/zero', 'of=fake-dev',
'bs=512', 'count=36', 'seek=28',
run_as_root=True,
check_exit_code=[0])]
utils.destroy_disk_metadata(self.dev, self.node_uuid)
mock_exec.assert_has_calls(expected_calls)
self.assertTrue(mock_gz.called)
def test_destroy_disk_metadata_get_dev_size_fail(self, mock_exec, mock_gz):
mock_gz.side_effect = processutils.ProcessExecutionError
expected_call = [mock.call('dd', 'if=/dev/zero', 'of=fake-dev',
'bs=512', 'count=36', run_as_root=True,
check_exit_code=[0])]
self.assertRaises(processutils.ProcessExecutionError,
utils.destroy_disk_metadata,
self.dev,
self.node_uuid)
mock_exec.assert_has_calls(expected_call)
def test_destroy_disk_metadata_dd_fail(self, mock_exec, mock_gz):
mock_exec.side_effect = processutils.ProcessExecutionError
expected_call = [mock.call('dd', 'if=/dev/zero', 'of=fake-dev',
'bs=512', 'count=36', run_as_root=True,
check_exit_code=[0])]
self.assertRaises(processutils.ProcessExecutionError,
utils.destroy_disk_metadata,
self.dev,
self.node_uuid)
mock_exec.assert_has_calls(expected_call)
self.assertFalse(mock_gz.called)
@mock.patch.object(common_utils, 'execute', autospec=True)
class GetDeviceBlockSizeTestCase(tests_base.TestCase):
def setUp(self):
super(GetDeviceBlockSizeTestCase, self).setUp()
self.dev = 'fake-dev'
self.node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
def test_get_dev_block_size(self, mock_exec):
mock_exec.return_value = ("64", "")
expected_call = [mock.call('blockdev', '--getsz', self.dev,
run_as_root=True, check_exit_code=[0])]
utils.get_dev_block_size(self.dev)
mock_exec.assert_has_calls(expected_call)
@mock.patch.object(utils, 'dd', autospec=True)
@mock.patch.object(images, 'qemu_img_info', autospec=True)
@mock.patch.object(images, 'convert_image', autospec=True)
class PopulateImageTestCase(tests_base.TestCase):
def setUp(self):
super(PopulateImageTestCase, self).setUp()
def test_populate_raw_image(self, mock_cg, mock_qinfo, mock_dd):
type(mock_qinfo.return_value).file_format = mock.PropertyMock(
return_value='raw')
utils.populate_image('src', 'dst')
mock_dd.assert_called_once_with('src', 'dst')
self.assertFalse(mock_cg.called)
def test_populate_qcow2_image(self, mock_cg, mock_qinfo, mock_dd):
type(mock_qinfo.return_value).file_format = mock.PropertyMock(
return_value='qcow2')
utils.populate_image('src', 'dst')
mock_cg.assert_called_once_with('src', 'dst', 'raw', True)
self.assertFalse(mock_dd.called)
@mock.patch.object(utils, 'is_block_device', lambda d: True)
@mock.patch.object(utils, 'block_uuid', lambda p: 'uuid')
@mock.patch.object(utils, 'dd', lambda *_: None)
@mock.patch.object(images, 'convert_image', lambda *_: None)
@mock.patch.object(common_utils, 'mkfs', lambda *_: None)
# NOTE(dtantsur): destroy_disk_metadata resets file size, disabling it
@mock.patch.object(utils, 'destroy_disk_metadata', lambda *_: None)
class RealFilePartitioningTestCase(tests_base.TestCase):
"""This test applies some real-world partitioning scenario to a file.
This test covers the whole partitioning, mocking everything not possible
on a file. That helps us assure, that we do all partitioning math properly
and also conducts integration testing of DiskPartitioner.
"""
def setUp(self):
super(RealFilePartitioningTestCase, self).setUp()
# NOTE(dtantsur): no parted utility on gate-ironic-python26
try:
common_utils.execute('parted', '--version')
except OSError as exc:
self.skipTest('parted utility was not found: %s' % exc)
self.file = tempfile.NamedTemporaryFile(delete=False)
# NOTE(ifarkas): the file needs to be closed, so fuser won't report
# any usage
self.file.close()
# NOTE(dtantsur): 20 MiB file with zeros
common_utils.execute('dd', 'if=/dev/zero', 'of=%s' % self.file.name,
'bs=1', 'count=0', 'seek=20MiB')
@staticmethod
def _run_without_root(func, *args, **kwargs):
"""Make sure root is not required when using utils.execute."""
real_execute = common_utils.execute
def fake_execute(*cmd, **kwargs):
kwargs['run_as_root'] = False
return real_execute(*cmd, **kwargs)
with mock.patch.object(common_utils, 'execute', fake_execute):
return func(*args, **kwargs)
def test_different_sizes(self):
# NOTE(dtantsur): Keep this list in order with expected partitioning
fields = ['ephemeral_mb', 'swap_mb', 'root_mb']
variants = ((0, 0, 12), (4, 2, 8), (0, 4, 10), (5, 0, 10))
for variant in variants:
kwargs = dict(zip(fields, variant))
self._run_without_root(utils.work_on_disk, self.file.name,
ephemeral_format='ext4', node_uuid='',
image_path='path', **kwargs)
part_table = self._run_without_root(
disk_partitioner.list_partitions, self.file.name)
for part, expected_size in zip(part_table, filter(None, variant)):
self.assertEqual(expected_size, part['size'],
"comparison failed for %s" % list(variant))
def test_whole_disk(self):
# 6 MiB ephemeral + 3 MiB swap + 9 MiB root + 1 MiB for MBR
# + 1 MiB MAGIC == 20 MiB whole disk
# TODO(dtantsur): figure out why we need 'magic' 1 more MiB
# and why the is different on Ubuntu and Fedora (see below)
self._run_without_root(utils.work_on_disk, self.file.name,
root_mb=9, ephemeral_mb=6, swap_mb=3,
ephemeral_format='ext4', node_uuid='',
image_path='path')
part_table = self._run_without_root(
disk_partitioner.list_partitions, self.file.name)
sizes = [part['size'] for part in part_table]
# NOTE(dtantsur): parted in Ubuntu 12.04 will occupy the last MiB,
# parted in Fedora 20 won't - thus two possible variants for last part
self.assertEqual([6, 3], sizes[:2],
"unexpected partitioning %s" % part_table)
self.assertIn(sizes[2], (9, 10))
@mock.patch.object(image_cache, 'clean_up_caches', autospec=True)
def test_fetch_images(self, mock_clean_up_caches):
mock_cache = mock.MagicMock(
spec_set=['fetch_image', 'master_dir'], master_dir='master_dir')
utils.fetch_images(None, mock_cache, [('uuid', 'path')])
mock_clean_up_caches.assert_called_once_with(None, 'master_dir',
[('uuid', 'path')])
mock_cache.fetch_image.assert_called_once_with('uuid', 'path',
ctx=None,
force_raw=True)
@mock.patch.object(image_cache, 'clean_up_caches', autospec=True)
def test_fetch_images_fail(self, mock_clean_up_caches):
exc = exception.InsufficientDiskSpace(path='a',
required=2,
actual=1)
mock_cache = mock.MagicMock(
spec_set=['master_dir'], master_dir='master_dir')
mock_clean_up_caches.side_effect = iter([exc])
self.assertRaises(exception.InstanceDeployFailure,
utils.fetch_images,
None,
mock_cache,
[('uuid', 'path')])
mock_clean_up_caches.assert_called_once_with(None, 'master_dir',
[('uuid', 'path')])
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
@mock.patch.object(requests, 'get', autospec=True)
class GetConfigdriveTestCase(tests_base.TestCase):
@mock.patch.object(gzip, 'GzipFile', autospec=True)
def test_get_configdrive(self, mock_gzip, mock_requests, mock_copy):
mock_requests.return_value = mock.MagicMock(
spec_set=['content'], content='Zm9vYmFy')
utils._get_configdrive('http://1.2.3.4/cd', 'fake-node-uuid')
mock_requests.assert_called_once_with('http://1.2.3.4/cd')
mock_gzip.assert_called_once_with('configdrive', 'rb',
fileobj=mock.ANY)
mock_copy.assert_called_once_with(mock.ANY, mock.ANY)
@mock.patch.object(gzip, 'GzipFile', autospec=True)
def test_get_configdrive_base64_string(self, mock_gzip, mock_requests,
mock_copy):
utils._get_configdrive('Zm9vYmFy', 'fake-node-uuid')
self.assertFalse(mock_requests.called)
mock_gzip.assert_called_once_with('configdrive', 'rb',
fileobj=mock.ANY)
mock_copy.assert_called_once_with(mock.ANY, mock.ANY)
def test_get_configdrive_bad_url(self, mock_requests, mock_copy):
mock_requests.side_effect = requests.exceptions.RequestException
self.assertRaises(exception.InstanceDeployFailure,
utils._get_configdrive, 'http://1.2.3.4/cd',
'fake-node-uuid')
self.assertFalse(mock_copy.called)
@mock.patch.object(base64, 'b64decode', autospec=True)
def test_get_configdrive_base64_error(self, mock_b64, mock_requests,
mock_copy):
mock_b64.side_effect = TypeError
self.assertRaises(exception.InstanceDeployFailure,
utils._get_configdrive,
'malformed', 'fake-node-uuid')
mock_b64.assert_called_once_with('malformed')
self.assertFalse(mock_copy.called)
@mock.patch.object(gzip, 'GzipFile', autospec=True)
def test_get_configdrive_gzip_error(self, mock_gzip, mock_requests,
mock_copy):
mock_requests.return_value = mock.MagicMock(
spec_set=['content'], content='Zm9vYmFy')
mock_copy.side_effect = IOError
self.assertRaises(exception.InstanceDeployFailure,
utils._get_configdrive, 'http://1.2.3.4/cd',
'fake-node-uuid')
mock_requests.assert_called_once_with('http://1.2.3.4/cd')
mock_gzip.assert_called_once_with('configdrive', 'rb',
fileobj=mock.ANY)
mock_copy.assert_called_once_with(mock.ANY, mock.ANY)
class VirtualMediaDeployUtilsTestCase(db_base.DbTestCase):
def setUp(self):
super(VirtualMediaDeployUtilsTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver="iscsi_ilo")
info_dict = db_utils.get_test_ilo_info()
self.node = obj_utils.create_test_node(self.context,
driver='iscsi_ilo', driver_info=info_dict)
def test_get_single_nic_with_vif_port_id(self):
obj_utils.create_test_port(self.context, node_id=self.node.id,
address='aa:bb:cc', uuid=uuidutils.generate_uuid(),
extra={'vif_port_id': 'test-vif-A'}, driver='iscsi_ilo')
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
address = utils.get_single_nic_with_vif_port_id(task)
self.assertEqual('aa:bb:cc', address)
class ParseInstanceInfoCapabilitiesTestCase(tests_base.TestCase):
def setUp(self):
super(ParseInstanceInfoCapabilitiesTestCase, self).setUp()
self.node = obj_utils.get_test_node(self.context, driver='fake')
def test_parse_instance_info_capabilities_string(self):
self.node.instance_info = {'capabilities': '{"cat": "meow"}'}
expected_result = {"cat": "meow"}
result = utils.parse_instance_info_capabilities(self.node)
self.assertEqual(expected_result, result)
def test_parse_instance_info_capabilities(self):
self.node.instance_info = {'capabilities': {"dog": "wuff"}}
expected_result = {"dog": "wuff"}
result = utils.parse_instance_info_capabilities(self.node)
self.assertEqual(expected_result, result)
def test_parse_instance_info_invalid_type(self):
self.node.instance_info = {'capabilities': 'not-a-dict'}
self.assertRaises(exception.InvalidParameterValue,
utils.parse_instance_info_capabilities, self.node)
def test_is_secure_boot_requested_true(self):
self.node.instance_info = {'capabilities': {"secure_boot": "tRue"}}
self.assertTrue(utils.is_secure_boot_requested(self.node))
def test_is_secure_boot_requested_false(self):
self.node.instance_info = {'capabilities': {"secure_boot": "false"}}
self.assertFalse(utils.is_secure_boot_requested(self.node))
def test_is_secure_boot_requested_invalid(self):
self.node.instance_info = {'capabilities': {"secure_boot": "invalid"}}
self.assertFalse(utils.is_secure_boot_requested(self.node))
def test_get_boot_mode_for_deploy_using_capabilities(self):
properties = {'capabilities': 'boot_mode:uefi,cap2:value2'}
self.node.properties = properties
result = utils.get_boot_mode_for_deploy(self.node)
self.assertEqual('uefi', result)
def test_get_boot_mode_for_deploy_using_instance_info_cap(self):
instance_info = {'capabilities': {'secure_boot': 'True'}}
self.node.instance_info = instance_info
result = utils.get_boot_mode_for_deploy(self.node)
self.assertEqual('uefi', result)
def test_get_boot_mode_for_deploy_using_instance_info(self):
instance_info = {'deploy_boot_mode': 'bios'}
self.node.instance_info = instance_info
result = utils.get_boot_mode_for_deploy(self.node)
self.assertEqual('bios', result)
class TrySetBootDeviceTestCase(db_base.DbTestCase):
def setUp(self):
super(TrySetBootDeviceTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver="fake")
self.node = obj_utils.create_test_node(self.context, driver="fake")
@mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True)
def test_try_set_boot_device_okay(self, node_set_boot_device_mock):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
utils.try_set_boot_device(task, boot_devices.DISK,
persistent=True)
node_set_boot_device_mock.assert_called_once_with(
task, boot_devices.DISK, persistent=True)
@mock.patch.object(utils, 'LOG', autospec=True)
@mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True)
def test_try_set_boot_device_ipmifailure_uefi(self,
node_set_boot_device_mock, log_mock):
self.node.properties = {'capabilities': 'boot_mode:uefi'}
self.node.save()
node_set_boot_device_mock.side_effect = exception.IPMIFailure(cmd='a')
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
utils.try_set_boot_device(task, boot_devices.DISK,
persistent=True)
node_set_boot_device_mock.assert_called_once_with(
task, boot_devices.DISK, persistent=True)
log_mock.warning.assert_called_once_with(mock.ANY)
@mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True)
def test_try_set_boot_device_ipmifailure_bios(
self, node_set_boot_device_mock):
node_set_boot_device_mock.side_effect = exception.IPMIFailure(cmd='a')
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
self.assertRaises(exception.IPMIFailure,
utils.try_set_boot_device,
task, boot_devices.DISK, persistent=True)
node_set_boot_device_mock.assert_called_once_with(
task, boot_devices.DISK, persistent=True)
@mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True)
def test_try_set_boot_device_some_other_exception(
self, node_set_boot_device_mock):
exc = exception.IloOperationError(operation="qwe", error="error")
node_set_boot_device_mock.side_effect = exc
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
self.assertRaises(exception.IloOperationError,
utils.try_set_boot_device,
task, boot_devices.DISK, persistent=True)
node_set_boot_device_mock.assert_called_once_with(
task, boot_devices.DISK, persistent=True)
class AgentCleaningTestCase(db_base.DbTestCase):
def setUp(self):
super(AgentCleaningTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver='fake_agent')
n = {'driver': 'fake_agent',
'driver_internal_info': {'agent_url': 'http://127.0.0.1:9999'}}
self.node = obj_utils.create_test_node(self.context, **n)
self.ports = [obj_utils.create_test_port(self.context,
node_id=self.node.id)]
self.clean_steps = {
'hardware_manager_version': '1',
'clean_steps': {
'GenericHardwareManager': [
{'interface': 'deploy',
'step': 'erase_devices',
'priority': 20},
],
'SpecificHardwareManager': [
{'interface': 'deploy',
'step': 'update_firmware',
'priority': 30},
{'interface': 'raid',
'step': 'create_raid',
'priority': 10},
]
}
}
@mock.patch('ironic.objects.Port.list_by_node_id',
spec_set=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'get_clean_steps',
autospec=True)
def test_get_clean_steps(self, client_mock, list_ports_mock):
client_mock.return_value = {
'command_result': self.clean_steps}
list_ports_mock.return_value = self.ports
with task_manager.acquire(
self.context, self.node['uuid'], shared=False) as task:
response = utils.agent_get_clean_steps(task)
client_mock.assert_called_once_with(mock.ANY, task.node,
self.ports)
self.assertEqual('1', task.node.driver_internal_info[
'hardware_manager_version'])
# Since steps are returned in dicts, they have non-deterministic
# ordering
self.assertEqual(2, len(response))
self.assertIn(self.clean_steps['clean_steps'][
'GenericHardwareManager'][0], response)
self.assertIn(self.clean_steps['clean_steps'][
'SpecificHardwareManager'][0], response)
@mock.patch('ironic.objects.Port.list_by_node_id',
spec_set=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'get_clean_steps',
autospec=True)
def test_get_clean_steps_missing_steps(self, client_mock,
list_ports_mock):
del self.clean_steps['clean_steps']
client_mock.return_value = {
'command_result': self.clean_steps}
list_ports_mock.return_value = self.ports
with task_manager.acquire(
self.context, self.node['uuid'], shared=False) as task:
self.assertRaises(exception.NodeCleaningFailure,
utils.agent_get_clean_steps,
task)
client_mock.assert_called_once_with(mock.ANY, task.node,
self.ports)
@mock.patch('ironic.objects.Port.list_by_node_id',
spec_set=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'execute_clean_step',
autospec=True)
def test_execute_clean_step(self, client_mock, list_ports_mock):
client_mock.return_value = {
'command_status': 'SUCCEEDED'}
list_ports_mock.return_value = self.ports
with task_manager.acquire(
self.context, self.node['uuid'], shared=False) as task:
response = utils.agent_execute_clean_step(
task,
self.clean_steps['clean_steps']['GenericHardwareManager'][0])
self.assertEqual(states.CLEANING, response)
@mock.patch('ironic.objects.Port.list_by_node_id',
spec_set=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'execute_clean_step',
autospec=True)
def test_execute_clean_step_running(self, client_mock, list_ports_mock):
client_mock.return_value = {
'command_status': 'RUNNING'}
list_ports_mock.return_value = self.ports
with task_manager.acquire(
self.context, self.node['uuid'], shared=False) as task:
response = utils.agent_execute_clean_step(
task,
self.clean_steps['clean_steps']['GenericHardwareManager'][0])
self.assertEqual(states.CLEANING, response)
@mock.patch('ironic.objects.Port.list_by_node_id',
spec_set=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'execute_clean_step',
autospec=True)
def test_execute_clean_step_version_mismatch(self, client_mock,
list_ports_mock):
client_mock.return_value = {
'command_status': 'RUNNING'}
list_ports_mock.return_value = self.ports
with task_manager.acquire(
self.context, self.node['uuid'], shared=False) as task:
response = utils.agent_execute_clean_step(
task,
self.clean_steps['clean_steps']['GenericHardwareManager'][0])
self.assertEqual(states.CLEANING, response)
@mock.patch.object(utils, 'is_block_device', autospec=True)
@mock.patch.object(utils, 'login_iscsi', lambda *_: None)
@mock.patch.object(utils, 'discovery', lambda *_: None)
@mock.patch.object(utils, 'logout_iscsi', lambda *_: None)
@mock.patch.object(utils, 'delete_iscsi', lambda *_: None)
@mock.patch.object(utils, 'get_dev', lambda *_: '/dev/fake')
class ISCSISetupAndHandleErrorsTestCase(tests_base.TestCase):
def test_no_parent_device(self, mock_ibd):
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
mock_ibd.return_value = False
expected_dev = '/dev/fake'
with testtools.ExpectedException(exception.InstanceDeployFailure):
with utils._iscsi_setup_and_handle_errors(
address, port, iqn, lun) as dev:
self.assertEqual(expected_dev, dev)
mock_ibd.assert_called_once_with(expected_dev)
def test_parent_device_yield(self, mock_ibd):
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
expected_dev = '/dev/fake'
mock_ibd.return_value = True
with utils._iscsi_setup_and_handle_errors(address, port,
iqn, lun) as dev:
self.assertEqual(expected_dev, dev)
mock_ibd.assert_called_once_with(expected_dev)
|
import numpy
import matplotlib
import matplotlib.mlab as mlab
import matplotlib.pyplot as plt
from matplotlib.ticker import NullFormatter
from matplotlib import rcParams
from scipy.optimize import curve_fit
from data_plots.utils import labeler, titler
rcParams['text.usetex'] = True
def scatter_hist(x, y, *args,
bins=10,
linestyle='r--', scatterstyle='k+',
histtype='stepfilled', facecolor='#FFFFFF', hatch='/',
show_mean=True, show_std=True,
**kwargs):
# no labels
nullfmt = NullFormatter()
# definitions for axes
left, width = 0.1, 0.65
bottom, height = 0.1, 0.65
bottom_h = left_h = left+width+0.02
rect_scatter = [left, bottom, width, height]
rect_histx = [left, bottom_h, width, 0.2]
rect_histy = [left_h, bottom, 0.2, height]
# start with a rectangular Figure
fig = plt.figure(1, figsize=(8, 8))
axScatter = fig.add_axes(rect_scatter)
axHistx = fig.add_axes(rect_histx)
axHisty = fig.add_axes(rect_histy)
# no labels on some axes
axHistx.xaxis.set_major_formatter(nullfmt)
axHisty.yaxis.set_major_formatter(nullfmt)
# the scatter plot:
axScatter.plot(x, y, scatterstyle)
# determine limits
xmin, ymin = numpy.min(x), numpy.min(y)
xmax, ymax = numpy.max(x), numpy.max(y)
x_mean, y_mean = x.mean(), y.mean()
x_std, y_std = x.std(), y.std()
# xlims = ((numpy.array([-xmin, xmax]) // binwidth) + 1) * binwidth
# ylims = ((numpy.array([-ymin, ymax]) // binwidth) + 1) * binwidth
xbins = numpy.linspace(xmin, xmax, bins)
ybins = numpy.linspace(ymin, ymax, bins)
# xbins = numpy.arange(-xlims[0], xlims[1]+binwidth, binwidth)
# ybins = numpy.arange(-ylims[0], ylims[1]+binwidth, binwidth)
n, xbins, xpatches = axHistx.hist(x, bins=xbins, normed=1,
histtype=histtype, facecolor=facecolor,
hatch=hatch)
n, ybins, ypatches = axHisty.hist(y, bins=ybins, normed=1,
histtype=histtype, facecolor=facecolor,
hatch=hatch,
orientation='horizontal')
mean_formatter = r'$\mu = {0:.5f}$'.format
std_formatter = r'$\sigma = {0:.5f}$'.format
xhandles, yhandles = [], []
xlabels, ylabels = [], []
if show_mean:
p = plt.Rectangle((0, 0), 1, 1, fc="r")
xlabels.append(mean_formatter(x_mean))
ylabels.append(mean_formatter(y_mean))
xhandles.append(p)
yhandles.append(p)
if show_std:
p = plt.Rectangle((0, 0), 1, 1, fc="b")
xlabels.append(std_formatter(x_std))
ylabels.append(std_formatter(y_std))
xhandles.append(p)
yhandles.append(p)
if show_mean or show_std:
axHistx.legend(xhandles, xlabels,
fontsize='small', loc='upper right')
axHisty.legend(xhandles, xlabels,
fontsize='small', loc='upper right')
xpdf = mlab.normpdf(xbins, x_mean, x_std)
ypdf = mlab.normpdf(ybins, y_mean, y_std)
axHistx.plot(xbins, xpdf, linestyle)
axHisty.plot(ypdf, ybins, linestyle)
axHistx.set_xlim(axScatter.get_xlim())
axHisty.set_ylim(axScatter.get_ylim())
axHistx.locator_params(tight=False, nbins=3)
axHisty.locator_params(tight=False, nbins=3)
axHistx = titler(axHistx, **kwargs)
axScatter = labeler(axScatter, **kwargs)
return fig
def scatter_hist_from_file(input, *args, usecols=range(2), **kwargs):
x, y = numpy.loadtxt(input, usecols=usecols, unpack=True)
return scatter_hist(x, y, *args, **kwargs)
def _gauss(x, *p):
A, mu, sigma = p
return A*numpy.exp(-(x-mu)**2/(2.*sigma**2))
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Entry point for both build and try bots.
This script is invoked from XXX, usually without arguments
to package an SDK. It automatically determines whether
this SDK is for mac, win, linux.
The script inspects the following environment variables:
BUILDBOT_BUILDERNAME to determine whether the script is run locally
and whether it should upload an SDK to file storage (GSTORE)
"""
# pylint: disable=W0621
# std python includes
import argparse
import datetime
import glob
import os
import re
import sys
if sys.version_info < (2, 7, 0):
sys.stderr.write("python 2.7 or later is required run this script\n")
sys.exit(1)
# local includes
import buildbot_common
import build_projects
import build_updater
import build_version
import generate_notice
import manifest_util
import parse_dsc
import verify_filelist
from build_paths import SCRIPT_DIR, SDK_SRC_DIR, SRC_DIR, NACL_DIR, OUT_DIR
from build_paths import NACLPORTS_DIR, GSTORE, GONACL_APPENGINE_SRC_DIR
# Add SDK make tools scripts to the python path.
sys.path.append(os.path.join(SDK_SRC_DIR, 'tools'))
sys.path.append(os.path.join(NACL_DIR, 'build'))
import getos
import oshelpers
BUILD_DIR = os.path.join(NACL_DIR, 'build')
NACL_TOOLCHAIN_DIR = os.path.join(NACL_DIR, 'toolchain')
NACL_TOOLCHAINTARS_DIR = os.path.join(NACL_TOOLCHAIN_DIR, '.tars')
CYGTAR = os.path.join(BUILD_DIR, 'cygtar.py')
PKGVER = os.path.join(BUILD_DIR, 'package_version', 'package_version.py')
NACLPORTS_URL = 'https://chromium.googlesource.com/external/naclports.git'
NACLPORTS_REV = '65c71c1524a74ff8415573e5e5ef7c59ce4ac437'
GYPBUILD_DIR = 'gypbuild'
options = None
# Map of: ToolchainName: (PackageName, SDKDir, arch).
TOOLCHAIN_PACKAGE_MAP = {
'arm_glibc': ('nacl_arm_glibc', '%(platform)s_arm_glibc', 'arm'),
'x86_glibc': ('nacl_x86_glibc', '%(platform)s_x86_glibc', 'x86'),
'pnacl': ('pnacl_newlib', '%(platform)s_pnacl', 'pnacl')
}
def GetToolchainDirName(tcname):
"""Return the directory name for a given toolchain"""
return TOOLCHAIN_PACKAGE_MAP[tcname][1] % {'platform': getos.GetPlatform()}
def GetToolchainDir(pepperdir, tcname):
"""Return the full path to a given toolchain within a given sdk root"""
return os.path.join(pepperdir, 'toolchain', GetToolchainDirName(tcname))
def GetToolchainLibc(tcname):
if tcname == 'pnacl':
return 'newlib'
for libc in ('glibc', 'newlib', 'host'):
if libc in tcname:
return libc
def GetToolchainNaClInclude(pepperdir, tcname, arch=None):
tcpath = GetToolchainDir(pepperdir, tcname)
if arch is None:
arch = TOOLCHAIN_PACKAGE_MAP[tcname][2]
if arch == 'x86':
return os.path.join(tcpath, 'x86_64-nacl', 'include')
elif arch == 'pnacl':
return os.path.join(tcpath, 'le32-nacl', 'include')
elif arch == 'arm':
return os.path.join(tcpath, 'arm-nacl', 'include')
else:
buildbot_common.ErrorExit('Unknown architecture: %s' % arch)
def GetConfigDir(arch):
if arch.endswith('x64') and getos.GetPlatform() == 'win':
return 'Release_x64'
else:
return 'Release'
def GetNinjaOutDir(arch):
return os.path.join(OUT_DIR, GYPBUILD_DIR + '-' + arch, GetConfigDir(arch))
def GetGypBuiltLib(tcname, arch):
if arch == 'ia32':
lib_suffix = '32'
elif arch == 'x64':
lib_suffix = '64'
elif arch == 'arm':
lib_suffix = 'arm'
else:
lib_suffix = ''
tcdir = 'tc_' + GetToolchainLibc(tcname)
if tcname == 'pnacl':
if arch is None:
lib_suffix = ''
tcdir = 'tc_pnacl_newlib'
arch = 'x64'
else:
arch = 'clang-' + arch
return os.path.join(GetNinjaOutDir(arch), 'gen', tcdir, 'lib' + lib_suffix)
def GetToolchainNaClLib(tcname, tcpath, arch):
if arch == 'ia32':
return os.path.join(tcpath, 'x86_64-nacl', 'lib32')
elif arch == 'x64':
return os.path.join(tcpath, 'x86_64-nacl', 'lib')
elif arch == 'arm':
return os.path.join(tcpath, 'arm-nacl', 'lib')
elif tcname == 'pnacl':
return os.path.join(tcpath, 'le32-nacl', 'lib')
def GetOutputToolchainLib(pepperdir, tcname, arch):
tcpath = os.path.join(pepperdir, 'toolchain', GetToolchainDirName(tcname))
return GetToolchainNaClLib(tcname, tcpath, arch)
def GetPNaClTranslatorLib(tcpath, arch):
if arch not in ['arm', 'x86-32', 'x86-64']:
buildbot_common.ErrorExit('Unknown architecture %s.' % arch)
return os.path.join(tcpath, 'translator', arch, 'lib')
def BuildStepDownloadToolchains(toolchains):
buildbot_common.BuildStep('Running package_version.py')
args = [sys.executable, PKGVER, '--mode', 'nacl_core_sdk']
args.extend(['sync', '--extract'])
buildbot_common.Run(args, cwd=NACL_DIR)
def BuildStepCleanPepperDirs(pepperdir, pepperdir_old):
buildbot_common.BuildStep('Clean Pepper Dirs')
dirs_to_remove = (
pepperdir,
pepperdir_old,
os.path.join(OUT_DIR, 'arm_trusted')
)
for dirname in dirs_to_remove:
if os.path.exists(dirname):
buildbot_common.RemoveDir(dirname)
buildbot_common.MakeDir(pepperdir)
def BuildStepMakePepperDirs(pepperdir, subdirs):
for subdir in subdirs:
buildbot_common.MakeDir(os.path.join(pepperdir, subdir))
TEXT_FILES = [
'AUTHORS',
'COPYING',
'LICENSE',
'README.Makefiles',
'getting_started/README',
]
def BuildStepCopyTextFiles(pepperdir, pepper_ver, chrome_revision,
nacl_revision):
buildbot_common.BuildStep('Add Text Files')
InstallFiles(SDK_SRC_DIR, pepperdir, TEXT_FILES)
# Replace a few placeholders in README
readme_text = open(os.path.join(SDK_SRC_DIR, 'README')).read()
readme_text = readme_text.replace('${VERSION}', pepper_ver)
readme_text = readme_text.replace('${CHROME_REVISION}', chrome_revision)
readme_text = readme_text.replace('${CHROME_COMMIT_POSITION}',
build_version.ChromeCommitPosition())
readme_text = readme_text.replace('${NACL_REVISION}', nacl_revision)
# Year/Month/Day Hour:Minute:Second
time_format = '%Y/%m/%d %H:%M:%S'
readme_text = readme_text.replace('${DATE}',
datetime.datetime.now().strftime(time_format))
open(os.path.join(pepperdir, 'README'), 'w').write(readme_text)
def BuildStepUntarToolchains(pepperdir, toolchains):
buildbot_common.BuildStep('Untar Toolchains')
platform = getos.GetPlatform()
build_platform = '%s_x86' % platform
tmpdir = os.path.join(OUT_DIR, 'tc_temp')
buildbot_common.RemoveDir(tmpdir)
buildbot_common.MakeDir(tmpdir)
# Create a list of extract packages tuples, the first part should be
# "$PACKAGE_TARGET/$PACKAGE". The second part should be the destination
# directory relative to pepperdir/toolchain.
extract_packages = []
for toolchain in toolchains:
toolchain_map = TOOLCHAIN_PACKAGE_MAP.get(toolchain, None)
if toolchain_map:
package_name, tcdir, _ = toolchain_map
package_tuple = (os.path.join(build_platform, package_name),
tcdir % {'platform': platform})
extract_packages.append(package_tuple)
# On linux we also want to extract the arm_trusted package which contains
# the ARM libraries we ship in support of sel_ldr_arm.
if platform == 'linux':
extract_packages.append((os.path.join(build_platform, 'arm_trusted'),
'arm_trusted'))
if extract_packages:
# Extract all of the packages into the temp directory.
package_names = [package_tuple[0] for package_tuple in extract_packages]
buildbot_common.Run([sys.executable, PKGVER,
'--packages', ','.join(package_names),
'--tar-dir', NACL_TOOLCHAINTARS_DIR,
'--dest-dir', tmpdir,
'extract'])
# Move all the packages we extracted to the correct destination.
for package_name, dest_dir in extract_packages:
full_src_dir = os.path.join(tmpdir, package_name)
full_dst_dir = os.path.join(pepperdir, 'toolchain', dest_dir)
buildbot_common.Move(full_src_dir, full_dst_dir)
# Cleanup the temporary directory we are no longer using.
buildbot_common.RemoveDir(tmpdir)
# List of toolchain headers to install.
# Source is relative to top of Chromium tree, destination is relative
# to the toolchain header directory.
NACL_HEADER_MAP = {
'newlib': [
('native_client/src/include/nacl/nacl_exception.h', 'nacl/'),
('native_client/src/include/nacl/nacl_minidump.h', 'nacl/'),
('native_client/src/untrusted/irt/irt.h', ''),
('native_client/src/untrusted/irt/irt_dev.h', ''),
('native_client/src/untrusted/irt/irt_extension.h', ''),
('native_client/src/untrusted/nacl/nacl_dyncode.h', 'nacl/'),
('native_client/src/untrusted/nacl/nacl_startup.h', 'nacl/'),
('native_client/src/untrusted/pthread/pthread.h', ''),
('native_client/src/untrusted/pthread/semaphore.h', ''),
('native_client/src/untrusted/valgrind/dynamic_annotations.h', 'nacl/'),
('ppapi/nacl_irt/public/irt_ppapi.h', ''),
],
'glibc': [
('native_client/src/include/nacl/nacl_exception.h', 'nacl/'),
('native_client/src/include/nacl/nacl_minidump.h', 'nacl/'),
('native_client/src/untrusted/irt/irt.h', ''),
('native_client/src/untrusted/irt/irt_dev.h', ''),
('native_client/src/untrusted/irt/irt_extension.h', ''),
('native_client/src/untrusted/nacl/nacl_dyncode.h', 'nacl/'),
('native_client/src/untrusted/nacl/nacl_startup.h', 'nacl/'),
('native_client/src/untrusted/valgrind/dynamic_annotations.h', 'nacl/'),
('ppapi/nacl_irt/public/irt_ppapi.h', ''),
],
}
def InstallFiles(src_root, dest_root, file_list):
"""Copy a set of files from src_root to dest_root according
to the given mapping. This allows files to be copied from
to a location in the destination tree that is different to the
location in the source tree.
If the destination mapping ends with a '/' then the destination
basename is inherited from the the source file.
Wildcards can be used in the source list but it is not recommended
as this can end up adding things to the SDK unintentionally.
"""
for file_spec in file_list:
# The list of files to install can be a simple list of
# strings or a list of pairs, where each pair corresponds
# to a mapping from source to destination names.
if type(file_spec) == str:
src_file = dest_file = file_spec
else:
src_file, dest_file = file_spec
src_file = os.path.join(src_root, src_file)
# Expand sources files using glob.
sources = glob.glob(src_file)
if not sources:
sources = [src_file]
if len(sources) > 1 and not dest_file.endswith('/'):
buildbot_common.ErrorExit("Target file must end in '/' when "
"using globbing to install multiple files")
for source in sources:
if dest_file.endswith('/'):
dest = os.path.join(dest_file, os.path.basename(source))
else:
dest = dest_file
dest = os.path.join(dest_root, dest)
if not os.path.isdir(os.path.dirname(dest)):
buildbot_common.MakeDir(os.path.dirname(dest))
buildbot_common.CopyFile(source, dest)
def InstallNaClHeaders(tc_dst_inc, tcname):
"""Copies NaCl headers to expected locations in the toolchain."""
InstallFiles(SRC_DIR, tc_dst_inc, NACL_HEADER_MAP[GetToolchainLibc(tcname)])
def MakeNinjaRelPath(path):
return os.path.join(os.path.relpath(OUT_DIR, SRC_DIR), path)
# TODO(ncbray): stop building and copying libraries into the SDK that are
# already provided by the toolchain.
# Mapping from libc to libraries gyp-build trusted libraries
TOOLCHAIN_LIBS = {
'newlib' : [
'libminidump_generator.a',
'libnacl.a',
'libnacl_dyncode.a',
'libnacl_exception.a',
'libnacl_list_mappings.a',
'libnosys.a',
'libppapi.a',
'libppapi_stub.a',
'libpthread.a',
],
'glibc': [
'libminidump_generator.a',
'libminidump_generator.so',
'libnacl.a',
'libnacl_dyncode.a',
'libnacl_dyncode.so',
'libnacl_exception.a',
'libnacl_exception.so',
'libnacl_list_mappings.a',
'libnacl_list_mappings.so',
'libppapi.a',
'libppapi.so',
'libppapi_stub.a',
]
}
def GypNinjaInstall(pepperdir, toolchains):
tools_files_32 = [
['sel_ldr', 'sel_ldr_x86_32'],
['irt_core_newlib_x32.nexe', 'irt_core_x86_32.nexe'],
['irt_core_newlib_x64.nexe', 'irt_core_x86_64.nexe'],
]
arm_files = [
['elf_loader_newlib_arm.nexe', 'elf_loader_arm.nexe'],
]
tools_files_64 = []
platform = getos.GetPlatform()
# TODO(binji): dump_syms doesn't currently build on Windows. See
# http://crbug.com/245456
if platform != 'win':
tools_files_64 += [
['dump_syms', 'dump_syms'],
['minidump_dump', 'minidump_dump'],
['minidump_stackwalk', 'minidump_stackwalk']
]
tools_files_64.append(['sel_ldr', 'sel_ldr_x86_64'])
tools_files_64.append(['ncval_new', 'ncval'])
if platform == 'linux':
tools_files_32.append(['nacl_helper_bootstrap',
'nacl_helper_bootstrap_x86_32'])
tools_files_64.append(['nacl_helper_bootstrap',
'nacl_helper_bootstrap_x86_64'])
tools_files_32.append(['nonsfi_loader_newlib_x32_nonsfi.nexe',
'nonsfi_loader_x86_32'])
tools_dir = os.path.join(pepperdir, 'tools')
buildbot_common.MakeDir(tools_dir)
# Add .exe extensions to all windows tools
for pair in tools_files_32 + tools_files_64:
if platform == 'win' and not pair[0].endswith('.nexe'):
pair[0] += '.exe'
pair[1] += '.exe'
# Add ARM binaries
if platform == 'linux' and not options.no_arm_trusted:
arm_files += [
['irt_core_newlib_arm.nexe', 'irt_core_arm.nexe'],
['nacl_helper_bootstrap', 'nacl_helper_bootstrap_arm'],
['nonsfi_loader_newlib_arm_nonsfi.nexe', 'nonsfi_loader_arm'],
['sel_ldr', 'sel_ldr_arm']
]
InstallFiles(GetNinjaOutDir('x64'), tools_dir, tools_files_64)
InstallFiles(GetNinjaOutDir('ia32'), tools_dir, tools_files_32)
InstallFiles(GetNinjaOutDir('arm'), tools_dir, arm_files)
for tc in toolchains:
if tc in ('host', 'clang-newlib'):
continue
elif tc == 'pnacl':
xarches = (None, 'ia32', 'x64', 'arm')
elif tc in ('x86_glibc', 'x86_newlib'):
xarches = ('ia32', 'x64')
elif tc == 'arm_glibc':
xarches = ('arm',)
else:
raise AssertionError('unexpected toolchain value: %s' % tc)
for xarch in xarches:
src_dir = GetGypBuiltLib(tc, xarch)
dst_dir = GetOutputToolchainLib(pepperdir, tc, xarch)
libc = GetToolchainLibc(tc)
InstallFiles(src_dir, dst_dir, TOOLCHAIN_LIBS[libc])
def GypNinjaBuild_NaCl(rel_out_dir):
gyp_py = os.path.join(NACL_DIR, 'build', 'gyp_nacl')
nacl_core_sdk_gyp = os.path.join(NACL_DIR, 'build', 'nacl_core_sdk.gyp')
all_gyp = os.path.join(NACL_DIR, 'build', 'all.gyp')
out_dir_32 = MakeNinjaRelPath(rel_out_dir + '-ia32')
out_dir_64 = MakeNinjaRelPath(rel_out_dir + '-x64')
out_dir_arm = MakeNinjaRelPath(rel_out_dir + '-arm')
out_dir_clang_32 = MakeNinjaRelPath(rel_out_dir + '-clang-ia32')
out_dir_clang_64 = MakeNinjaRelPath(rel_out_dir + '-clang-x64')
out_dir_clang_arm = MakeNinjaRelPath(rel_out_dir + '-clang-arm')
GypNinjaBuild('ia32', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk', out_dir_32,
gyp_defines=['use_nacl_clang=0'])
GypNinjaBuild('x64', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk', out_dir_64,
gyp_defines=['use_nacl_clang=0'])
GypNinjaBuild('arm', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk', out_dir_arm,
gyp_defines=['use_nacl_clang=0'])
GypNinjaBuild('ia32', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk',
out_dir_clang_32, gyp_defines=['use_nacl_clang=1'])
GypNinjaBuild('x64', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk',
out_dir_clang_64, gyp_defines=['use_nacl_clang=1'])
GypNinjaBuild('arm', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk',
out_dir_clang_arm, gyp_defines=['use_nacl_clang=1'])
GypNinjaBuild('x64', gyp_py, all_gyp, 'ncval_new', out_dir_64)
def GypNinjaBuild_Breakpad(rel_out_dir):
# TODO(binji): dump_syms doesn't currently build on Windows. See
# http://crbug.com/245456
if getos.GetPlatform() == 'win':
return
gyp_py = os.path.join(SRC_DIR, 'build', 'gyp_chromium')
out_dir = MakeNinjaRelPath(rel_out_dir)
gyp_file = os.path.join(SRC_DIR, 'breakpad', 'breakpad.gyp')
build_list = ['dump_syms', 'minidump_dump', 'minidump_stackwalk']
GypNinjaBuild('x64', gyp_py, gyp_file, build_list, out_dir)
def GypNinjaBuild_PPAPI(arch, rel_out_dir, gyp_defines=None):
gyp_py = os.path.join(SRC_DIR, 'build', 'gyp_chromium')
out_dir = MakeNinjaRelPath(rel_out_dir)
gyp_file = os.path.join(SRC_DIR, 'ppapi', 'native_client',
'native_client.gyp')
GypNinjaBuild(arch, gyp_py, gyp_file, 'ppapi_lib', out_dir,
gyp_defines=gyp_defines)
def GypNinjaBuild_Pnacl(rel_out_dir, target_arch):
# TODO(binji): This will build the pnacl_irt_shim twice; once as part of the
# Chromium build, and once here. When we move more of the SDK build process
# to gyp, we can remove this.
gyp_py = os.path.join(SRC_DIR, 'build', 'gyp_chromium')
out_dir = MakeNinjaRelPath(rel_out_dir)
gyp_file = os.path.join(SRC_DIR, 'ppapi', 'native_client', 'src',
'untrusted', 'pnacl_irt_shim', 'pnacl_irt_shim.gyp')
targets = ['aot']
GypNinjaBuild(target_arch, gyp_py, gyp_file, targets, out_dir)
def GypNinjaBuild(arch, gyp_py_script, gyp_file, targets,
out_dir, gyp_defines=None):
gyp_env = dict(os.environ)
gyp_env['GYP_GENERATORS'] = 'ninja'
gyp_defines = gyp_defines or []
gyp_defines.append('nacl_allow_thin_archives=0')
if not options.no_use_sysroot:
gyp_defines.append('use_sysroot=1')
if options.mac_sdk:
gyp_defines.append('mac_sdk=%s' % options.mac_sdk)
if arch is not None:
gyp_defines.append('target_arch=%s' % arch)
if arch == 'arm':
gyp_env['GYP_CROSSCOMPILE'] = '1'
if options.no_arm_trusted:
gyp_defines.append('disable_cross_trusted=1')
if getos.GetPlatform() == 'mac':
gyp_defines.append('clang=1')
gyp_env['GYP_DEFINES'] = ' '.join(gyp_defines)
# We can't use windows path separators in GYP_GENERATOR_FLAGS since
# gyp uses shlex to parse them and treats '\' as an escape char.
gyp_env['GYP_GENERATOR_FLAGS'] = 'output_dir=%s' % out_dir.replace('\\', '/')
# Print relevant environment variables
for key, value in gyp_env.iteritems():
if key.startswith('GYP') or key in ('CC',):
print ' %s="%s"' % (key, value)
buildbot_common.Run(
[sys.executable, gyp_py_script, gyp_file, '--depth=.'],
cwd=SRC_DIR,
env=gyp_env)
NinjaBuild(targets, out_dir, arch)
def NinjaBuild(targets, out_dir, arch):
if type(targets) is not list:
targets = [targets]
out_config_dir = os.path.join(out_dir, GetConfigDir(arch))
buildbot_common.Run(['ninja', '-C', out_config_dir] + targets, cwd=SRC_DIR)
def BuildStepBuildToolchains(pepperdir, toolchains, build, clean):
buildbot_common.BuildStep('SDK Items')
if clean:
for dirname in glob.glob(os.path.join(OUT_DIR, GYPBUILD_DIR + '*')):
buildbot_common.RemoveDir(dirname)
build = True
if build:
GypNinjaBuild_NaCl(GYPBUILD_DIR)
GypNinjaBuild_Breakpad(GYPBUILD_DIR + '-x64')
if set(toolchains) & set(['x86_glibc', 'x86_newlib']):
GypNinjaBuild_PPAPI('ia32', GYPBUILD_DIR + '-ia32',
['use_nacl_clang=0'])
GypNinjaBuild_PPAPI('x64', GYPBUILD_DIR + '-x64',
['use_nacl_clang=0'])
if 'arm_glibc' in toolchains:
GypNinjaBuild_PPAPI('arm', GYPBUILD_DIR + '-arm',
['use_nacl_clang=0'] )
if 'pnacl' in toolchains:
GypNinjaBuild_PPAPI('ia32', GYPBUILD_DIR + '-clang-ia32',
['use_nacl_clang=1'])
GypNinjaBuild_PPAPI('x64', GYPBUILD_DIR + '-clang-x64',
['use_nacl_clang=1'])
GypNinjaBuild_PPAPI('arm', GYPBUILD_DIR + '-clang-arm',
['use_nacl_clang=1'])
# NOTE: For ia32, gyp builds both x86-32 and x86-64 by default.
for arch in ('ia32', 'arm'):
# Fill in the latest native pnacl shim library from the chrome build.
build_dir = GYPBUILD_DIR + '-pnacl-' + arch
GypNinjaBuild_Pnacl(build_dir, arch)
GypNinjaInstall(pepperdir, toolchains)
for toolchain in toolchains:
if toolchain not in ('host', 'clang-newlib'):
InstallNaClHeaders(GetToolchainNaClInclude(pepperdir, toolchain),
toolchain)
if 'pnacl' in toolchains:
# NOTE: For ia32, gyp builds both x86-32 and x86-64 by default.
for arch in ('ia32', 'arm'):
# Fill in the latest native pnacl shim library from the chrome build.
build_dir = GYPBUILD_DIR + '-pnacl-' + arch
if arch == 'ia32':
nacl_arches = ['x86-32', 'x86-64']
elif arch == 'arm':
nacl_arches = ['arm']
else:
buildbot_common.ErrorExit('Unknown architecture: %s' % arch)
for nacl_arch in nacl_arches:
release_build_dir = os.path.join(OUT_DIR, build_dir, 'Release',
'gen', 'tc_pnacl_translate',
'lib-' + nacl_arch)
pnacldir = GetToolchainDir(pepperdir, 'pnacl')
pnacl_translator_lib_dir = GetPNaClTranslatorLib(pnacldir, nacl_arch)
if not os.path.isdir(pnacl_translator_lib_dir):
buildbot_common.ErrorExit('Expected %s directory to exist.' %
pnacl_translator_lib_dir)
buildbot_common.CopyFile(
os.path.join(release_build_dir, 'libpnacl_irt_shim.a'),
pnacl_translator_lib_dir)
InstallNaClHeaders(GetToolchainNaClInclude(pepperdir, 'pnacl', 'x86'),
'pnacl')
InstallNaClHeaders(GetToolchainNaClInclude(pepperdir, 'pnacl', 'arm'),
'pnacl')
def MakeDirectoryOrClobber(pepperdir, dirname, clobber):
dirpath = os.path.join(pepperdir, dirname)
if clobber:
buildbot_common.RemoveDir(dirpath)
buildbot_common.MakeDir(dirpath)
return dirpath
def BuildStepUpdateHelpers(pepperdir, clobber):
buildbot_common.BuildStep('Update project helpers')
build_projects.UpdateHelpers(pepperdir, clobber=clobber)
def BuildStepUpdateUserProjects(pepperdir, toolchains,
build_experimental, clobber):
buildbot_common.BuildStep('Update examples and libraries')
filters = {}
if not build_experimental:
filters['EXPERIMENTAL'] = False
dsc_toolchains = []
for t in toolchains:
if t.startswith('x86_') or t.startswith('arm_'):
if t[4:] not in dsc_toolchains:
dsc_toolchains.append(t[4:])
elif t == 'host':
dsc_toolchains.append(getos.GetPlatform())
else:
dsc_toolchains.append(t)
filters['TOOLS'] = dsc_toolchains
# Update examples and libraries
filters['DEST'] = [
'getting_started',
'examples/api',
'examples/demo',
'examples/tutorial',
'src'
]
tree = parse_dsc.LoadProjectTree(SDK_SRC_DIR, include=filters)
build_projects.UpdateProjects(pepperdir, tree, clobber=clobber,
toolchains=dsc_toolchains)
def BuildStepMakeAll(pepperdir, directory, step_name,
deps=True, clean=False, config='Debug', args=None):
buildbot_common.BuildStep(step_name)
build_projects.BuildProjectsBranch(pepperdir, directory, clean,
deps, config, args)
def BuildStepBuildLibraries(pepperdir, directory):
BuildStepMakeAll(pepperdir, directory, 'Build Libraries Debug',
clean=True, config='Debug')
BuildStepMakeAll(pepperdir, directory, 'Build Libraries Release',
clean=True, config='Release')
# Cleanup .pyc file generated while building libraries. Without
# this we would end up shipping the pyc in the SDK tarball.
buildbot_common.RemoveFile(os.path.join(pepperdir, 'tools', '*.pyc'))
def GenerateNotice(fileroot, output_filename='NOTICE', extra_files=None):
# Look for LICENSE files
license_filenames_re = re.compile('LICENSE|COPYING|COPYRIGHT')
license_files = []
for root, _, files in os.walk(fileroot):
for filename in files:
if license_filenames_re.match(filename):
path = os.path.join(root, filename)
license_files.append(path)
if extra_files:
license_files += [os.path.join(fileroot, f) for f in extra_files]
print '\n'.join(license_files)
if not os.path.isabs(output_filename):
output_filename = os.path.join(fileroot, output_filename)
generate_notice.Generate(output_filename, fileroot, license_files)
def BuildStepVerifyFilelist(pepperdir):
buildbot_common.BuildStep('Verify SDK Files')
file_list_path = os.path.join(SCRIPT_DIR, 'sdk_files.list')
try:
print 'SDK directory: %s' % pepperdir
verify_filelist.Verify(file_list_path, pepperdir)
print 'OK'
except verify_filelist.ParseException, e:
buildbot_common.ErrorExit('Parsing sdk_files.list failed:\n\n%s' % e)
except verify_filelist.VerifyException, e:
file_list_rel = os.path.relpath(file_list_path)
verify_filelist_py = os.path.splitext(verify_filelist.__file__)[0] + '.py'
verify_filelist_py = os.path.relpath(verify_filelist_py)
pepperdir_rel = os.path.relpath(pepperdir)
msg = """\
SDK verification failed:
%s
Add/remove files from %s to fix.
Run:
./%s %s %s
to test.""" % (e, file_list_rel, verify_filelist_py, file_list_rel,
pepperdir_rel)
buildbot_common.ErrorExit(msg)
def BuildStepTarBundle(pepper_ver, tarfile):
buildbot_common.BuildStep('Tar Pepper Bundle')
buildbot_common.MakeDir(os.path.dirname(tarfile))
buildbot_common.Run([sys.executable, CYGTAR, '-C', OUT_DIR, '-cjf', tarfile,
'pepper_' + pepper_ver], cwd=NACL_DIR)
def GetManifestBundle(pepper_ver, chrome_revision, nacl_revision, tarfile,
archive_url):
with open(tarfile, 'rb') as tarfile_stream:
archive_sha1, archive_size = manifest_util.DownloadAndComputeHash(
tarfile_stream)
archive = manifest_util.Archive(manifest_util.GetHostOS())
archive.url = archive_url
archive.size = archive_size
archive.checksum = archive_sha1
bundle = manifest_util.Bundle('pepper_' + pepper_ver)
bundle.revision = int(chrome_revision)
bundle.repath = 'pepper_' + pepper_ver
bundle.version = int(pepper_ver)
bundle.description = (
'Chrome %s bundle. Chrome revision: %s. NaCl revision: %s' % (
pepper_ver, chrome_revision, nacl_revision))
bundle.stability = 'dev'
bundle.recommended = 'no'
bundle.archives = [archive]
return bundle
def Archive(filename, from_directory, step_link=True):
if buildbot_common.IsSDKBuilder():
bucket_path = 'nativeclient-mirror/nacl/nacl_sdk/'
else:
bucket_path = 'nativeclient-mirror/nacl/nacl_sdk_test/'
bucket_path += build_version.ChromeVersion()
buildbot_common.Archive(filename, bucket_path, from_directory, step_link)
def BuildStepArchiveBundle(name, pepper_ver, chrome_revision, nacl_revision,
tarfile):
buildbot_common.BuildStep('Archive %s' % name)
tarname = os.path.basename(tarfile)
tarfile_dir = os.path.dirname(tarfile)
Archive(tarname, tarfile_dir)
# generate "manifest snippet" for this archive.
archive_url = GSTORE + 'nacl_sdk/%s/%s' % (
build_version.ChromeVersion(), tarname)
bundle = GetManifestBundle(pepper_ver, chrome_revision, nacl_revision,
tarfile, archive_url)
manifest_snippet_file = os.path.join(OUT_DIR, tarname + '.json')
with open(manifest_snippet_file, 'wb') as manifest_snippet_stream:
manifest_snippet_stream.write(bundle.GetDataAsString())
Archive(tarname + '.json', OUT_DIR, step_link=False)
def BuildStepBuildPNaClComponent(version, revision):
# Sadly revision can go backwords for a given version since when a version
# is built from master, revision will be a huge number (in the hundreds of
# thousands. Once the branch happens the revision will reset to zero.
# TODO(sbc): figure out how to compensate for this in some way such that
# revisions always go forward for a given version.
buildbot_common.BuildStep('PNaCl Component')
# Version numbers must follow the format specified in:
# https://developer.chrome.com/extensions/manifest/version
# So ensure that rev_major/rev_minor don't overflow and ensure there
# are no leading zeros.
if len(revision) > 4:
rev_minor = int(revision[-4:])
rev_major = int(revision[:-4])
version = "0.%s.%s.%s" % (version, rev_major, rev_minor)
else:
version = "0.%s.0.%s" % (version, revision)
buildbot_common.Run(['./make_pnacl_component.sh',
'pnacl_multicrx_%s.zip' % revision,
version], cwd=SCRIPT_DIR)
def BuildStepArchivePNaClComponent(revision):
buildbot_common.BuildStep('Archive PNaCl Component')
Archive('pnacl_multicrx_%s.zip' % revision, OUT_DIR)
def BuildStepArchiveSDKTools():
buildbot_common.BuildStep('Build SDK Tools')
build_updater.BuildUpdater(OUT_DIR)
buildbot_common.BuildStep('Archive SDK Tools')
Archive('sdk_tools.tgz', OUT_DIR, step_link=False)
Archive('nacl_sdk.zip', OUT_DIR, step_link=False)
def BuildStepBuildAppEngine(pepperdir, chrome_revision):
"""Build the projects found in src/gonacl_appengine/src"""
buildbot_common.BuildStep('Build GoNaCl AppEngine Projects')
cmd = ['make', 'upload', 'REVISION=%s' % chrome_revision]
env = dict(os.environ)
env['NACL_SDK_ROOT'] = pepperdir
env['NACLPORTS_NO_ANNOTATE'] = "1"
buildbot_common.Run(cmd, env=env, cwd=GONACL_APPENGINE_SRC_DIR)
def main(args):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--qemu', help='Add qemu for ARM.',
action='store_true')
parser.add_argument('--tar', help='Force the tar step.',
action='store_true')
parser.add_argument('--archive', help='Force the archive step.',
action='store_true')
parser.add_argument('--release', help='PPAPI release version.',
dest='release', default=None)
parser.add_argument('--build-app-engine',
help='Build AppEngine demos.', action='store_true')
parser.add_argument('--experimental',
help='build experimental examples and libraries', action='store_true',
dest='build_experimental')
parser.add_argument('--skip-toolchain', help='Skip toolchain untar',
action='store_true')
parser.add_argument('--no-clean', dest='clean', action='store_false',
help="Don't clean gypbuild directories")
parser.add_argument('--mac-sdk',
help='Set the mac-sdk (e.g. 10.6) to use when building with ninja.')
parser.add_argument('--no-arm-trusted', action='store_true',
help='Disable building of ARM trusted components (sel_ldr, etc).')
parser.add_argument('--no-use-sysroot', action='store_true',
help='Disable building against sysroot.')
# To setup bash completion for this command first install optcomplete
# and then add this line to your .bashrc:
# complete -F _optcomplete build_sdk.py
try:
import optcomplete
optcomplete.autocomplete(parser)
except ImportError:
pass
global options
options = parser.parse_args(args)
buildbot_common.BuildStep('build_sdk')
if buildbot_common.IsSDKBuilder():
options.archive = True
# TODO(binji): re-enable app_engine build when the linux builder stops
# breaking when trying to git clone from github.
# See http://crbug.com/412969.
options.build_app_engine = False
options.tar = True
# NOTE: order matters here. This will be the order that is specified in the
# Makefiles; the first toolchain will be the default.
toolchains = ['pnacl', 'x86_glibc', 'arm_glibc', 'clang-newlib', 'host']
print 'Building: ' + ' '.join(toolchains)
platform = getos.GetPlatform()
if options.archive and not options.tar:
parser.error('Incompatible arguments with archive.')
chrome_version = int(build_version.ChromeMajorVersion())
chrome_revision = build_version.ChromeRevision()
nacl_revision = build_version.NaClRevision()
pepper_ver = str(chrome_version)
pepper_old = str(chrome_version - 1)
pepperdir = os.path.join(OUT_DIR, 'pepper_' + pepper_ver)
pepperdir_old = os.path.join(OUT_DIR, 'pepper_' + pepper_old)
tarname = 'naclsdk_%s.tar.bz2' % platform
tarfile = os.path.join(OUT_DIR, tarname)
if options.release:
pepper_ver = options.release
print 'Building PEPPER %s at %s' % (pepper_ver, chrome_revision)
if 'NACL_SDK_ROOT' in os.environ:
# We don't want the currently configured NACL_SDK_ROOT to have any effect
# of the build.
del os.environ['NACL_SDK_ROOT']
if platform == 'linux':
# Linux-only: make sure the debian/stable sysroot image is installed
install_script = os.path.join(SRC_DIR, 'build', 'linux', 'sysroot_scripts',
'install-sysroot.py')
buildbot_common.Run([sys.executable, install_script, '--arch=arm'])
buildbot_common.Run([sys.executable, install_script, '--arch=i386'])
buildbot_common.Run([sys.executable, install_script, '--arch=amd64'])
if not options.skip_toolchain:
BuildStepCleanPepperDirs(pepperdir, pepperdir_old)
BuildStepMakePepperDirs(pepperdir, ['include', 'toolchain', 'tools'])
BuildStepDownloadToolchains(toolchains)
BuildStepUntarToolchains(pepperdir, toolchains)
if platform == 'linux':
buildbot_common.Move(os.path.join(pepperdir, 'toolchain', 'arm_trusted'),
os.path.join(OUT_DIR, 'arm_trusted'))
if platform == 'linux':
# Linux-only: Copy arm libraries from the arm_trusted package. These are
# needed to be able to run sel_ldr_arm under qemu.
arm_libs = [
'lib/arm-linux-gnueabihf/librt.so.1',
'lib/arm-linux-gnueabihf/libpthread.so.0',
'lib/arm-linux-gnueabihf/libgcc_s.so.1',
'lib/arm-linux-gnueabihf/libc.so.6',
'lib/arm-linux-gnueabihf/ld-linux-armhf.so.3',
'lib/arm-linux-gnueabihf/libm.so.6',
'usr/lib/arm-linux-gnueabihf/libstdc++.so.6'
]
arm_lib_dir = os.path.join(pepperdir, 'tools', 'lib', 'arm_trusted', 'lib')
buildbot_common.MakeDir(arm_lib_dir)
for arm_lib in arm_libs:
arm_lib = os.path.join(OUT_DIR, 'arm_trusted', arm_lib)
buildbot_common.CopyFile(arm_lib, arm_lib_dir)
buildbot_common.CopyFile(os.path.join(OUT_DIR, 'arm_trusted', 'qemu-arm'),
os.path.join(pepperdir, 'tools'))
BuildStepBuildToolchains(pepperdir, toolchains,
not options.skip_toolchain,
options.clean)
BuildStepUpdateHelpers(pepperdir, True)
BuildStepUpdateUserProjects(pepperdir, toolchains,
options.build_experimental, True)
BuildStepCopyTextFiles(pepperdir, pepper_ver, chrome_revision, nacl_revision)
# Ship with libraries prebuilt, so run that first.
BuildStepBuildLibraries(pepperdir, 'src')
GenerateNotice(pepperdir)
# Verify the SDK contains what we expect.
BuildStepVerifyFilelist(pepperdir)
if options.tar:
BuildStepTarBundle(pepper_ver, tarfile)
if platform == 'linux':
BuildStepBuildPNaClComponent(pepper_ver, chrome_revision)
if options.build_app_engine and platform == 'linux':
BuildStepBuildAppEngine(pepperdir, chrome_revision)
if options.qemu:
qemudir = os.path.join(NACL_DIR, 'toolchain', 'linux_arm-trusted')
oshelpers.Copy(['-r', qemudir, pepperdir])
# Archive the results on Google Cloud Storage.
if options.archive:
BuildStepArchiveBundle('build', pepper_ver, chrome_revision, nacl_revision,
tarfile)
# Only archive sdk_tools/naclport/pnacl_component on linux.
if platform == 'linux':
BuildStepArchiveSDKTools()
BuildStepArchivePNaClComponent(chrome_revision)
return 0
if __name__ == '__main__':
try:
sys.exit(main(sys.argv[1:]))
except KeyboardInterrupt:
buildbot_common.ErrorExit('build_sdk: interrupted')
|
# MIT License
# Copyright (c) 2016 Diogo Dutra <[email protected]>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import asyncio
import tempfile
from datetime import datetime
from time import sleep
from unittest import mock
from swaggerit.models._base import _all_models
from tests.integration.fixtures import TopSellerArrayTest
import pytest
import ujson
@pytest.fixture
def init_db(models, session, api):
user = {
'name': 'test',
'email': 'test',
'password': 'test',
'admin': True
}
session.loop.run_until_complete(models['users'].insert(session, user))
tmp = tempfile.TemporaryDirectory()
store = {
'name': 'test',
'country': 'test',
'configuration': {}
}
session.loop.run_until_complete(models['stores'].insert(session, store))
item_type = {
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'stores': [{'id': 1}]
}
session.loop.run_until_complete(models['item_types'].insert(session, item_type))
strategy = {
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest'
}
session.loop.run_until_complete(models['engine_strategies'].insert(session, strategy))
engine_object = {
'name': 'Top Seller Object',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}
session.loop.run_until_complete(models['engine_objects'].insert(session, engine_object))
yield tmp.name
tmp.cleanup()
_all_models.pop('store_items_products_1', None)
class TestEngineObjectsModelPost(object):
async def test_post_without_body(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.post('/engine_objects/', headers=headers)
assert resp.status == 400
assert (await resp.json()) == {'message': 'Request body is missing'}
async def test_post_with_invalid_body(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.post('/engine_objects/', headers=headers, data='[{}]')
assert resp.status == 400
assert (await resp.json()) == {
'message': "'name' is a required property. "\
"Failed validating instance['0'] for schema['items']['required']",
'schema': {
'type': 'object',
'additionalProperties': False,
'required': ['name', 'type', 'configuration', 'strategy_id', 'item_type_id', 'store_id'],
'properties': {
'name': {'type': 'string'},
'type': {'type': 'string'},
'strategy_id': {'type': 'integer'},
'item_type_id': {'type': 'integer'},
'store_id': {'type': 'integer'},
'configuration': {}
}
}
}
async def test_post(self, init_db, client, headers, headers_without_content_type):
client = await client
body = [{
'name': 'Top Seller Object Test',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}]
resp = await client.post('/engine_objects/', headers=headers, data=ujson.dumps(body))
resp_json = (await resp.json())
body[0]['id'] = 2
body[0]['store'] = resp_json[0]['store']
body[0]['strategy'] = resp_json[0]['strategy']
body[0]['item_type'] = resp_json[0]['item_type']
assert resp.status == 201
assert resp_json == body
async def test_post_with_invalid_grant(self, client):
client = await client
body = [{
'name': 'Top Seller Object Test',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}]
resp = await client.post('/engine_objects/', headers={'Authorization': 'invalid'}, data=ujson.dumps(body))
assert resp.status == 401
assert (await resp.json()) == {'message': 'Invalid authorization'}
class TestEngineObjectsModelGet(object):
async def test_get_not_found(self, init_db, headers_without_content_type, client):
client = await client
resp = await client.get(
'/engine_objects/?store_id=2&item_type_id=1&strategy_id=1',
headers=headers_without_content_type
)
assert resp.status == 404
async def test_get_invalid_with_body(self, init_db, headers, client):
client = await client
resp = await client.get(
'/engine_objects/?store_id=1&item_type_id=1&strategy_id=1',
headers=headers,
data='{}'
)
assert resp.status == 400
assert await resp.json() == {'message': 'Request body is not acceptable'}
async def test_get_valid(self, init_db, headers, headers_without_content_type, client):
body = [{
'name': 'Top Seller Object',
'type': 'top_seller_array',
'configuration': {"days_interval": 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1,
'id': 1,
'store': {
'id': 1,
'name': 'test',
'country': 'test',
'configuration': {}
},
'item_type': {
'id': 1,
'store_items_class': None,
'stores': [{
'configuration': {},
'country': 'test',
'id': 1,
'name': 'test'
}],
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'available_filters': [{'name': 'sku', 'schema': {'type': 'string'}}]
},
'strategy': {
'id': 1,
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest',
'object_types': ['top_seller_array']
}
}]
client = await client
resp = await client.get(
'/engine_objects/?store_id=1&item_type_id=1&strategy_id=1',
headers=headers_without_content_type
)
assert resp.status == 200
assert await resp.json() == body
class TestEngineObjectsModelUriTemplatePatch(object):
async def test_patch_without_body(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.patch('/engine_objects/1/', headers=headers, data='')
assert resp.status == 400
assert (await resp.json()) == {'message': 'Request body is missing'}
async def test_patch_with_invalid_body(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.patch('/engine_objects/1/', headers=headers, data='{}')
assert resp.status == 400
assert (await resp.json()) == {
'message': '{} does not have enough properties. '\
"Failed validating instance for schema['minProperties']",
'schema': {
'type': 'object',
'additionalProperties': False,
'minProperties': 1,
'properties': {
'name': {'type': 'string'},
'configuration': {}
}
}
}
async def test_patch_with_invalid_config(self, init_db, client, headers, headers_without_content_type):
client = await client
body = {
'configuration': {}
}
resp = await client.patch('/engine_objects/1/', headers=headers, data=ujson.dumps(body))
assert resp.status == 400
print(ujson.dumps(await resp.json(), indent=4))
assert (await resp.json()) == {
'message': "'days_interval' is a required property. "\
"Failed validating instance for schema['required']",
'schema': {
'type': 'object',
'required': ['days_interval'],
'additionalProperties': False,
'properties': {
'days_interval': {'type': 'integer'}
}
}
}
async def test_patch_not_found(self, init_db, client, headers, headers_without_content_type):
client = await client
body = {
'name': 'Top Seller Object Test'
}
resp = await client.patch('/engine_objects/2/', headers=headers, data=ujson.dumps(body))
assert resp.status == 404
async def test_patch(self, init_db, client, headers, headers_without_content_type):
client = await client
body = [{
'name': 'Top Seller Object Test',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}]
resp = await client.post('/engine_objects/', headers=headers, data=ujson.dumps(body))
obj = (await resp.json())[0]
body = {
'name': 'test2'
}
resp = await client.patch('/engine_objects/2/', headers=headers, data=ujson.dumps(body))
obj['name'] = 'test2'
assert resp.status == 200
assert (await resp.json()) == obj
class TestEngineObjectsModelUriTemplateGet(object):
async def test_get_with_body(self, init_db, headers, client):
client = await client
resp = await client.get('/engine_objects/1/', headers=headers, data='{}')
assert resp.status == 400
assert await resp.json() == {'message': 'Request body is not acceptable'}
async def test_get_not_found(self, init_db, headers_without_content_type, client):
client = await client
resp = await client.get('/engine_objects/2/', headers=headers_without_content_type)
assert resp.status == 404
async def test_get(self, init_db, headers, headers_without_content_type, client):
client = await client
resp = await client.get('/engine_objects/1/', headers=headers_without_content_type)
body = {
'name': 'Top Seller Object',
'type': 'top_seller_array',
'configuration': {"days_interval": 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1,
'id': 1,
'store': {
'id': 1,
'name': 'test',
'country': 'test',
'configuration': {}
},
'item_type': {
'id': 1,
'store_items_class': None,
'stores': [{
'configuration': {},
'country': 'test',
'id': 1,
'name': 'test'
}],
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'available_filters': [{'name': 'sku', 'schema': {'type': 'string'}}]
},
'strategy': {
'id': 1,
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest',
'object_types': ['top_seller_array']
}
}
assert resp.status == 200
assert await resp.json() == body
class TestEngineObjectsModelUriTemplateDelete(object):
async def test_delete_with_body(self, init_db, client, headers):
client = await client
resp = await client.delete('/engine_objects/1/', headers=headers, data='{}')
assert resp.status == 400
assert (await resp.json()) == {'message': 'Request body is not acceptable'}
async def test_delete_valid(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.get('/engine_objects/1/', headers=headers_without_content_type)
assert resp.status == 200
resp = await client.delete('/engine_objects/1/', headers=headers_without_content_type)
assert resp.status == 204
resp = await client.get('/engine_objects/1/', headers=headers_without_content_type)
assert resp.status == 404
def datetime_mock():
mock_ = mock.MagicMock()
mock_.now.return_value = datetime(1900, 1, 1)
return mock_
async def _wait_job_finish(client, headers_without_content_type, job_name='export'):
sleep(0.05)
while True:
resp = await client.get(
'/engine_objects/1/{}?job_hash=6342e10bd7dca3240c698aa79c98362e'.format(job_name),
headers=headers_without_content_type)
if (await resp.json())['status'] != 'running':
break
return resp
def set_patches(monkeypatch):
monkeypatch.setattr('swaggerit.models.orm._jobs_meta.random.getrandbits',
mock.MagicMock(return_value=131940827655846590526331314439483569710))
monkeypatch.setattr('swaggerit.models.orm._jobs_meta.datetime', datetime_mock())
class TestEngineObjectsModelsDataImporter(object):
async def test_importer_post(self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
resp = await client.post('/engine_objects/1/import_data', headers=headers_without_content_type)
assert resp.status == 201
assert await resp.json() == {'job_hash': '6342e10bd7dca3240c698aa79c98362e'}
await _wait_job_finish(client, headers_without_content_type, 'import_data')
async def test_importer_get_running(self, init_db, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await client.post('/engine_objects/1/import_data', headers=headers_without_content_type)
resp = await client.get('/engine_objects/1/import_data?job_hash=6342e10bd7dca3240c698aa79c98362e',
headers=headers_without_content_type)
assert await resp.json() == {'status': 'running'}
await _wait_job_finish(client, headers_without_content_type, 'import_data')
async def test_importer_get_done(self, init_db, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await client.post('/engine_objects/1/import_data', headers=headers_without_content_type)
resp = await _wait_job_finish(client, headers_without_content_type, 'import_data')
assert await resp.json() == {
'status': 'done',
'result': {'lines_count': 3},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def test_importer_get_with_error(self, init_db, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
monkeypatch.setattr('tests.integration.fixtures.TopSellerArrayTest.get_data',
mock.MagicMock(side_effect=Exception('testing')))
client = await client
await client.post('/engine_objects/1/import_data', headers=headers_without_content_type)
resp = await _wait_job_finish(client, headers_without_content_type, 'import_data')
assert await resp.json() == {
'status': 'error',
'result': {'message': 'testing', 'name': 'Exception'},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def _post_products(client, headers, headers_without_content_type, products=[{'sku': 'test'}]):
resp = await client.post('/item_types/1/items?store_id=1',
data=ujson.dumps(products), headers=headers)
resp = await client.post('/item_types/1/update_filters?store_id=1',
headers=headers_without_content_type)
sleep(0.05)
while True:
resp = await client.get(
'/item_types/1/update_filters?store_id=1&job_hash=6342e10bd7dca3240c698aa79c98362e',
headers=headers_without_content_type)
if (await resp.json())['status'] != 'running':
break
return resp
def set_readers_builders_patch(monkeypatch, values=None):
if values is None:
values = [[ujson.dumps({'value': 1, 'item_key': 'test'}).encode()]]
readers_builder = values
mock_ = mock.MagicMock()
mock_.return_value = readers_builder
monkeypatch.setattr(
'myreco.engine_objects.object_base.EngineObjectBase._build_csv_readers',
mock_
)
class TestEngineObjectsModelsObjectsExporter(object):
async def test_exporter_post(self, init_db, headers_without_content_type, headers, client, monkeypatch):
set_patches(monkeypatch)
set_readers_builders_patch(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
resp = await client.post('/engine_objects/1/export', headers=headers_without_content_type)
assert await resp.json() == {'job_hash': '6342e10bd7dca3240c698aa79c98362e'}
await _wait_job_finish(client, headers_without_content_type)
async def test_exporter_get_running(self, init_db, headers_without_content_type, headers, client, monkeypatch, loop):
set_patches(monkeypatch)
prods = [ujson.dumps({'value': i, 'item_key': 'test{}'.format(i)}).encode() for i in range(100)]
set_readers_builders_patch(monkeypatch, [[b'\n'.join(prods)]])
client = await client
products = [{'sku': 'test{}'.format(i)} for i in range(10)]
await _post_products(client, headers, headers_without_content_type, products)
await client.post('/engine_objects/1/export', headers=headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?job_hash=6342e10bd7dca3240c698aa79c98362e', headers=headers_without_content_type)
assert await resp.json() == {'status': 'running'}
await _wait_job_finish(client, headers_without_content_type)
async def test_exporter_get_done(self, init_db, headers_without_content_type, headers, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch)
await client.post('/engine_objects/1/export', headers=headers_without_content_type)
resp = await _wait_job_finish(client, headers_without_content_type)
assert await resp.json() == {
'status': 'done',
'result': {'length': 1, 'max_sells': 1, 'min_sells': 1},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def test_exporter_get_with_error(
self, init_db, headers_without_content_type, headers, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch, [])
await client.post('/engine_objects/1/export', headers=headers_without_content_type)
resp = await _wait_job_finish(client, headers_without_content_type)
assert await resp.json() == {
'status': 'error',
'result': {
'message': "No data found for engine object 'Top Seller Object'",
'name': 'EngineError'
},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
def CoroMock():
coro = mock.MagicMock(name="CoroutineResult")
corofunc = mock.MagicMock(name="CoroutineFunction", side_effect=asyncio.coroutine(coro))
corofunc.coro = coro
return corofunc
def set_data_importer_patch(monkeypatch, mock_=None):
if mock_ is None:
mock_ = mock.MagicMock()
monkeypatch.setattr('tests.integration.fixtures.TopSellerArrayTest.get_data', mock_)
return mock_
class TestEngineObjectsModelsObjectsExporterWithImport(object):
async def test_exporter_post_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch)
get_data_patch = set_data_importer_patch(monkeypatch)
get_data_patch.return_value = {}
resp = await client.post('/engine_objects/1/export?import_data=true',
headers=headers_without_content_type)
hash_ = await resp.json()
await _wait_job_finish(client, headers_without_content_type)
called = bool(TopSellerArrayTest.get_data.called)
TopSellerArrayTest.get_data.reset_mock()
assert hash_ == {'job_hash': '6342e10bd7dca3240c698aa79c98362e'}
assert called
async def test_exporter_get_running_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
def func(x, y, z):
sleep(1)
return {}
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch)
set_data_importer_patch(monkeypatch, func)
await client.post('/engine_objects/1/export?import_data=true',
headers=headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?job_hash=6342e10bd7dca3240c698aa79c98362e',
headers=headers_without_content_type)
assert await resp.json() == {'status': 'running'}
await _wait_job_finish(client, headers_without_content_type)
async def test_exporter_get_done_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch)
await client.post('/engine_objects/1/export?import_data=true',
headers=headers_without_content_type)
await _wait_job_finish(client, headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?job_hash=6342e10bd7dca3240c698aa79c98362e',
headers=headers_without_content_type)
assert await resp.json() == {
'status': 'done',
'result': {
'importer': {'lines_count': 3},
'exporter': {
'length': 1,
'max_sells': 1,
'min_sells': 1
}
},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def test_exporter_get_with_error_in_import_with_import(
self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
get_data_patch = set_data_importer_patch(monkeypatch)
get_data_patch.side_effect = Exception('testing')
await client.post('/engine_objects/1/export?import_data=true', headers=headers_without_content_type)
await _wait_job_finish(client, headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?job_hash=6342e10bd7dca3240c698aa79c98362e', headers=headers_without_content_type)
assert await resp.json() == {
'status': 'error',
'result': {'message': 'testing', 'name': 'Exception'},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def test_exporter_get_with_error_in_export_with_import(
self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch, [])
await client.post('/engine_objects/1/export?import_data=true', headers=headers_without_content_type)
await _wait_job_finish(client, headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?job_hash=6342e10bd7dca3240c698aa79c98362e', headers=headers_without_content_type)
assert await resp.json() == {
'status': 'error',
'result': {
'message': "No data found for engine object 'Top Seller Object'",
'name': 'EngineError'
},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
|
from hcsvlab_robochef.rdf.map import *
PARADISEC = "PARADISEC"
paradisecSpeakerMap = FieldMapper(AUSNC)
paradisecSpeakerMap.add('name', mapto=FOAF.name)
paradisecSpeakerMap.add('role', ignore=True)
paradisecMap = MetadataMapper(PARADISEC, speakerMap=paradisecSpeakerMap, documentMap = get_generic_doc_mapper())
paradisecMap.add('Box', mapto=DC.box)
paradisecMap.add('DCMIType', mapto=DC.type, ignore=True)
paradisecMap.add('ISO3166', mapto=DC.coverage)
paradisecMap.add('URI', ignore=True)
paradisecMap.add('W3CDTF', mapto=DC.created)
paradisecMap.add('accessRights', mapto=DC.accessRights)
paradisecMap.add('author', mapto=OLAC.author, ignore=True)
paradisecMap.add('bibliographicCitation', mapto=DC.bibliographicCitation)
paradisecMap.add('compiler', mapto=OLAC.compiler, ignore=True)
paradisecMap.add('consultant', mapto=OLAC.consultant, ignore=True)
paradisecMap.add('data_inputter', mapto=OLAC.data_inputter, ignore=True)
paradisecMap.add('depositor', mapto=OLAC.depositor, ignore=True)
paradisecMap.add('description', mapto=DC.description)
paradisecMap.add('discourse-type', mapto=OLAC.discourse_type)
paradisecMap.add('format', ignore=True)
paradisecMap.add('identifier', mapto=DC.identifier)
paradisecMap.add('interviewer', mapto=OLAC.interviewer, ignore=True)
paradisecMap.add('language', mapto=OLAC.language)
paradisecMap.add('linguistic-field', mapto=OLAC.linguistic_field)
paradisecMap.add('linguistic-type', mapto=OLAC.linguistic_type)
paradisecMap.add('photographer', mapto=OLAC.photographer, ignore=True)
paradisecMap.add('recorder', mapto=OLAC.recorder, ignore=True)
paradisecMap.add('researcher', mapto=OLAC.researcher, ignore=True)
paradisecMap.add('rights', mapto=DC.rights)
paradisecMap.add('speaker', mapto=OLAC.speaker, ignore=True)
paradisecMap.add('tableOfContents', ignore=True)
paradisecMap.add('title', mapto=DC.title)
paradisecMap.add('type', mapto=DC.type, ignore=True)
|
#!/bin/env python
# encoding:utf-8
#
# Author: CORDEA
# Created: 2014-09-12
#
infile = open("ilm_missene.onmis.vcf", "r")
lines = infile.readlines()
infile.close()
msDict = {}
for line in lines:
samples = line.split(",")[5]
tmp = samples.split("|")
for i in range(len(tmp)):
try:
msDict[i][int(tmp[i])] += 1
except:
msDict[i] = [0, 0, 0, 0]
msDict[i][int(tmp[i])] += 1
outFile = open("missene.snp_freq", "w")
all = [0,0,0,0]
for k, v in msDict.items():
outFile.write(str(k) + ",")
oSUM = v[1] + v[2] + v[3]
outFile.write(str(round( (v[1]/float(oSUM))*100, 2)) + ",")
outFile.write(str(round( ((v[2]+v[3])/float(oSUM))*100, 2)) + ",")
outFile.write(str(round( (v[1]/float(oSUM))*100, 2)) + ",")
outFile.write(str(round( (v[2]/float(oSUM))*100, 2)) + ",")
outFile.write(str(round( (v[3]/float(oSUM))*100, 2)) + "\n")
all[0] += v[0]
all[1] += v[1]
all[2] += v[2]
all[3] += v[3]
oSUM = all[1] + all[2] + all[3]
outFile.write(str(round( (all[1]/float(oSUM))*100, 2)) + ",")
outFile.write(str(round( ((all[2]+all[3])/float(oSUM))*100, 2)) + ",")
outFile.write(str(round( (all[1]/float(oSUM))*100, 2)) + ",")
outFile.write(str(round( (all[2]/float(oSUM))*100, 2)) + ",")
outFile.write(str(round( (all[3]/float(oSUM))*100, 2)) + "\n")
|
# coding=utf-8
#
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from f5.bigip.resource import AsmResource
from f5.bigip.resource import Collection
from f5.sdk_exception import UnsupportedOperation
class Response_Pages_s(Collection):
"""BIG-IP® ASM Response Pages sub-collection."""
def __init__(self, policy):
super(Response_Pages_s, self).__init__(policy)
self._meta_data['object_has_stats'] = False
self._meta_data['minimum_version'] = '11.6.0'
self._meta_data['allowed_lazy_attributes'] = [Response_Page]
self._meta_data['required_json_kind'] = 'tm:asm:policies:response-pages:response-pagecollectionstate'
self._meta_data['attribute_registry'] = {
'tm:asm:policies:response-pages:response-pagestate': Response_Page
}
class Response_Page(AsmResource):
"""BIG-IP® ASM Response Page resource."""
def __init__(self, response_pages_s):
super(Response_Page, self).__init__(response_pages_s)
self._meta_data['required_json_kind'] = 'tm:asm:policies:response-pages:response-pagestate'
def create(self, **kwargs):
"""Create is not supported for Response Page resources
:raises: UnsupportedOperation
"""
raise UnsupportedOperation(
"%s does not support the create method" % self.__class__.__name__
)
def delete(self, **kwargs):
"""Delete is not supported for Response Page resources
:raises: UnsupportedOperation
"""
raise UnsupportedOperation(
"%s does not support the delete method" % self.__class__.__name__
)
|
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
from wishlist import exceptions
class WishListItem(models.Model):
user = models.ForeignKey(
settings.AUTH_USER_MODEL, related_name='wishlist_items',
verbose_name=_('Owner'), on_delete=models.CASCADE)
product = models.ForeignKey(
'products.Product', verbose_name=_('Product'),
on_delete=models.CASCADE)
date_created = models.DateTimeField(
_('Date created'), auto_now_add=True, editable=False)
def __str__(self):
return str(self.product)
class Meta:
unique_together = ['user', 'product']
verbose_name = _('Wish list item')
verbose_name_plural = _('Wish list items')
class WishList(object):
def __init__(self, user):
self._user = user
@property
def _items(self):
if not self._user.is_authenticated:
raise exceptions.UserIsNotAuthenticated()
if not hasattr(self, '_items_cache'):
self._items_cache = {
i.product_id: i for i in
self._user.wishlist_items.all().select_related('product')
}
return self._items_cache
def add(self, product):
if product.id in self._items:
raise exceptions.ProductAlreadyAdded()
item = self._user.wishlist_items.create(product=product)
self._items_cache[product.id] = item
def remove(self, product_id):
if product_id not in self._items:
raise exceptions.ItemDoesNotExists()
self._items[product_id].delete()
del self._items[product_id]
def has_product(self, product_id):
if not self._user.is_authenticated:
return False
return product_id in self._items
def __iter__(self):
return iter(self._items.values())
def __len__(self):
if self._user.is_authenticated:
return len(self._items)
return 0
|
#!/usr/bin/env python
# standard library imports
# third party related imports
from boto.exception import BotoServerError
from mock import MagicMock
# local library imports
from mobile_push.actors.create_topic import CreateTopicActor
from mobile_push.db import Session, Topic
from ..base import BaseTestCase
class TestRun(BaseTestCase):
def setUp(self):
self.actor = CreateTopicActor()
self.sns_conn = MagicMock()
self.actor.connect_sns = MagicMock(return_value=self.sns_conn)
def test_when_name_is_not_present(self):
self.actor.run({'args': {}})
self.assertFalse(self.sns_conn.create_topic.called)
def test_when_sns_api_failed(self):
self.sns_conn.create_topic = MagicMock(
side_effect=BotoServerError(403, ':)')
)
self.actor.run({'args': {'name': 'qq'}})
session = Session()
self.assertEqual(session.query(Topic).count(), 0)
def test_when_everything_is_ok(self):
self.sns_conn.create_topic.return_value = {
'CreateTopicResponse': {
'CreateTopicResult': {'TopicArn': 'an-arn'},
'ResponseMetadata': {'RequestId': 'xxx'}
}
}
self.actor.run({'args': {'name': 'qq'}})
session = Session()
t = session.query(Topic).first()
self.assertEqual(t.name, 'qq')
self.assertEqual(t.arn, 'an-arn')
|
# -*- encoding: utf-8 -*-
#
# Module Writen to OpenERP, Open Source Management Solution
#
# Copyright (c) 2014 Vauxoo - http://www.vauxoo.com/
# All Rights Reserved.
# info Vauxoo ([email protected])
#
# Coded by: Vauxoo ([email protected])
#
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
{
'name': 'Runbot sync remote info',
'category': 'Website',
'summary': 'Runbot',
'version': '1.0',
'description': """This module create a connection with
remote host of git to sync information.
e.g. Status of pull request
e.g. name source branch of a pull request""",
'author': 'Vauxoo',
'depends': ['runbot'],
'external_dependencies': {
},
'data': [
'data/ir_cron_data.xml',
'view/runbot_view.xml',
],
'installable': True,
}
|
#!/usr/bin/env python3
'''
Where we make the things for EuroIX JSON
'''
import aiohttp
import asyncio
import async_timeout
import glob
import json
import logging
import os
import re
import time
from . import peer
class PeermeDb():
'''
Replaces talking to the Peering DB and generates output based on
EuroIX JSON data
We HTTP download the data and cache locally
'''
#this gets JSON files from IXP and save it with proper names
BASE_PATH = 'peerme/euroix-json/'
def __init__(self, config, refresh_data=False, loop=None):
self.global_config = config.config
self.HTTP_TIMEOUT = int(self.global_config['peerme']['http_timeout'])
self.loop = loop if loop else asyncio.get_event_loop()
if refresh_data:
self.fetch_json('peerme/euroix-list.json')
async def _get_via_http(self, url):
''' async JSON fetching coro '''
try:
async with aiohttp.ClientSession(loop=self.loop) as session:
with async_timeout.timeout(self.HTTP_TIMEOUT):
async with session.get(url) as response:
data = await response.text()
except Exception as e:
logging.error("{} unable to be fetched: {}".format(
url, str(e)), exc_info=True,
)
data = None
return url, data
def _create_base_path(self):
if not os.path.exists(self.BASE_PATH):
os.mkdir(self.BASE_PATH)
def fetch_json(self, ixp_json_file):
async_json_fetch_start = time.time()
with open(ixp_json_file, 'r') as f:
ixp_data_urls = json.load(f)
logging.info("Refreshing {} IXP JSON Datasets".format(
len(ixp_data_urls)
))
http_tasks = [
asyncio.ensure_future(
self._get_via_http(url)
) for url in ixp_data_urls
]
completed_tasks, _ = self.loop.run_until_complete(
asyncio.wait(http_tasks, timeout=self.HTTP_TIMEOUT)
)
for task in completed_tasks:
url, data = task.result()
if not data:
continue
logging.debug("Writing {} to disk".format(url))
ixp = json.loads(data)
# Strip everyting after the first space
file_name = re.sub(' .*$', '', ixp['ixp_list'][0]['shortname'])
# Make London Great Again - Hack
if file_name == "London":
file_name = "LINX"
# Ensure we have
self._create_base_path()
# TODO: Lets do smarter caching and in memory storage + be atomic
with open(self.BASE_PATH + file_name, 'w') as out_file:
out_file.write(data)
fetch_time = time.time() - async_json_fetch_start
logging.debug("HTTP JSON data fetch took {} seconds".format(fetch_time))
def session_on_all_ixp(self):
''' Gives all the sessions on all the IXP we have '''
full_peers_list = []
file_list = glob.glob(self.BASE_PATH + "*")
for filename in file_list:
#stripping foler name
filename = re.sub('^.*\/', '', filename)
#get the list per IXP and merge it
peers_list = self.session_by_ix(filename)
for peer in peers_list:
full_peers_list.append(peer)
return full_peers_list
async def get_session_by_ix(self, ix_name, dest_asn=None):
'''
gives the list of sessions you could establish on an IXP
if dest_asn is provided, it will only return peer information for that one
'''
my_asn = self.global_config['peerme']['my_asn']
peers_list = []
#open the file for the givent IXP
with open(self.BASE_PATH + ix_name, 'r') as f:
data = json.load(f)
#there can be several IXP in one file (AMS-IX HK, Chicago, etc...)
for ixp in data['ixp_list']:
try:
# name is not mandarory, shortname is
ixp_name = ixp["name"]
except KeyError:
ixp_name = ixp["shortname"]
for member in data['member_list']:
if not member:
logging.debug('Empty member on: {}'.format(ixp_name))
continue
if 'connection_list' not in member:
logging.debug(
'Member doens\'t have any connections:'
' {} {}'.format(ixp_name, member))
continue
#a member can have several connections on the same IXP/LAN
for connection in member["connection_list"]:
my_peer = peer.Peer()
#my_peer.ix_desc = ixp_name
my_peer.ix_desc = ixp["shortname"]
#connection_list list connections for all IXP in the file...
if ixp["ixp_id"] == connection["ixp_id"]:
my_peer.asn = member["asnum"]
my_peer.name = member["name"]
try:
for vlan in connection["vlan_list"]:
my_peer.peer_ipv4 = vlan["ipv4"]["address"]
try:
my_peer.peer_ipv6 = vlan["ipv6"]["address"]
except KeyError:
#because LINX has problem with IPv6
my_peer.peer_ipv6 = ''
for inetF in ["ipv4", "ipv6"]:
for optionals in ["max_prefix", "as_macro"]:
try:
vlan[inetF][optionals]
except KeyError:
pass
else:
if inetF == "ipv4" and optionals == "max_prefix": my_peer.prefix_limit_v4 = vlan[inetF][optionals]
if inetF == "ipv6" and optionals == "max_prefix": my_peer.prefix_limit_v6 = vlan[inetF][optionals]
if inetF == "ipv4" and optionals == "as_macro": my_peer.as_set_v4 = vlan[inetF][optionals]
if inetF == "ipv6" and optionals == "as_macro": my_peer.as_set_v6 = vlan[inetF][optionals]
except KeyError:
pass
except TypeError:
try:
#this case is due to AMS-IX not properly using vlan_list yet
my_peer.peer_ipv4 = connection["vlan_list"]["ipv4"]["address"]
my_peer.peer_ipv6 = connection["vlan_list"]["ipv6"]["address"]
for inetF in ["ipv4", "ipv6"]:
for optionals in ["max_prefix", "as_macro"]:
try:
connection["vlan_list"][inetF][optionals]
except KeyError:
pass
else:
if inetF == "ipv4" and optionals == "max_prefix": my_peer.prefix_limit_v4 = connection["vlan_list"][inetF][optionals]
if inetF == "ipv6" and optionals == "max_prefix": my_peer.prefix_limit_v6 = connection["vlan_list"][inetF][optionals]
if inetF == "ipv4" and optionals == "as_macro": my_peer.as_set_v4 = connection["vlan_list"][inetF][optionals]
if inetF == "ipv6" and optionals == "as_macro": my_peer.as_set_v6 = connection["vlan_list"][inetF][optionals]
except KeyError:
pass
#if we only want result for dest_asn
if dest_asn and (int(dest_asn) != my_peer.asn):
pass
else:
peers_list.append(my_peer)
return peers_list
# gives the list of sessions you could establish with asn
# if my_asn is provided, it will only return the list of sessions on IXP you have in common
async def get_session_by_asn(self, asn):
my_asn = self.global_config['peerme']['my_asn']
peers_list = []
file_list = glob.glob(self.BASE_PATH + "*")
#load all files in order to seek on all IXP
for filename in file_list:
#stripping foler name
filename = re.sub('^.*\/', '', filename)
ixp_peers_list = await self.get_session_by_ix(filename)
#we seek on the peers_list if my_asn is present and mark it
present = []
for peer in ixp_peers_list:
if peer.asn == int(my_asn):
present.append(peer.ix_desc)
#we seek for the asn we want to peer with, and make sure we are on the same IX
for peer in ixp_peers_list:
if peer.asn == int(asn) and peer.ix_desc in present:
#add peer to peers_list if my_asn in not define OR my_asn is present
if (my_asn and present) or my_asn is None:
peers_list.append(peer)
return peers_list
|
# -*- encoding: utf-8 -*-
"""Implements Locations UI"""
from robottelo.constants import FILTER
from robottelo.ui.base import Base
from robottelo.ui.locators import common_locators, locators, tab_locators
from robottelo.ui.navigator import Navigator
class Location(Base):
"""Implements CRUD functions for UI"""
def navigate_to_entity(self):
"""Navigate to Locations entity page"""
Navigator(self.browser).go_to_loc()
def _search_locator(self):
"""Specify locator for Locations entity search procedure"""
return locators['location.select_name']
def _configure_location(self, users=None, capsules=None, all_capsules=None,
subnets=None, resources=None, medias=None,
templates=None, ptables=None, domains=None,
envs=None, hostgroups=None, organizations=None,
new_users=None, new_capsules=None,
new_subnets=None, new_resources=None,
new_medias=None, new_templates=None,
new_ptables=None, new_domains=None, new_envs=None,
new_hostgroups=None, new_organizations=None,
params=None, new_params=None, select=None):
"""Configures different entities of selected location."""
loc = tab_locators
if users or new_users:
self.configure_entity(users, FILTER['loc_user'],
tab_locator=loc['context.tab_users'],
new_entity_list=new_users,
entity_select=select)
if capsules or new_capsules:
self.configure_entity(capsules, FILTER['loc_capsules'],
tab_locator=loc['context.tab_capsules'],
new_entity_list=new_capsules,
entity_select=select)
if all_capsules is not None:
self.click(loc['context.tab_capsules'])
self.assign_value(locators['location.all_capsules'], all_capsules)
if subnets or new_subnets:
self.configure_entity(subnets, FILTER['loc_subnet'],
tab_locator=loc['context.tab_subnets'],
new_entity_list=new_subnets,
entity_select=select)
if resources or new_resources:
self.configure_entity(resources, FILTER['loc_resource'],
tab_locator=loc['context.tab_resources'],
new_entity_list=new_resources,
entity_select=select)
if medias or new_medias:
self.configure_entity(medias, FILTER['loc_media'],
tab_locator=loc['context.tab_media'],
new_entity_list=new_medias,
entity_select=select)
if templates or new_templates:
self.configure_entity(templates, FILTER['loc_template'],
tab_locator=loc['context.tab_template'],
new_entity_list=new_templates,
entity_select=select)
if ptables or new_ptables:
self.configure_entity(ptables, FILTER['loc_ptable'],
tab_locator=loc['context.tab_ptable'],
new_entity_list=new_ptables,
entity_select=select)
if domains or new_domains:
self.configure_entity(domains, FILTER['loc_domain'],
tab_locator=loc['context.tab_domains'],
new_entity_list=new_domains,
entity_select=select)
if envs or new_envs:
self.configure_entity(envs, FILTER['loc_envs'],
tab_locator=loc['context.tab_env'],
new_entity_list=new_envs,
entity_select=select)
if hostgroups or new_hostgroups:
self.configure_entity(hostgroups, FILTER['loc_hostgroup'],
tab_locator=loc['context.tab_hostgrps'],
new_entity_list=new_hostgroups,
entity_select=select)
if organizations or new_organizations:
self.configure_entity(hostgroups, FILTER['loc_org'],
tab_locator=loc['context.tab_organizations'],
new_entity_list=new_organizations,
entity_select=select)
if params or new_params:
for param in (params or new_params):
self.set_parameter(*param, submit=False)
def create(self, name, parent=None, users=None, capsules=None,
all_capsules=None, subnets=None, resources=None, medias=None,
templates=None, ptables=None, domains=None, envs=None,
hostgroups=None, organizations=None, params=None, select=True):
"""Creates new Location from UI."""
self.click(locators['location.new'])
self.assign_value(locators['location.name'], name)
if parent:
self.select(locators['location.parent'], parent)
self.click(common_locators['submit'])
to_edit_locator = locators['location.proceed_to_edit']
if self.wait_until_element(to_edit_locator):
# In this case there is unassigned host and we need to skip step
# "2 Select Hosts"
self.click(to_edit_locator)
self._configure_location(
users=users, capsules=capsules, all_capsules=all_capsules,
subnets=subnets, resources=resources,
medias=medias, templates=templates,
ptables=ptables, domains=domains, envs=envs,
hostgroups=hostgroups,
organizations=organizations,
params=params,
select=select,
)
self.click(common_locators['submit'])
def update(self, loc_name, new_name=None, users=None, capsules=None,
all_capsules=None, subnets=None, resources=None, medias=None,
templates=None, ptables=None, domains=None, envs=None,
hostgroups=None, organizations=None, new_organizations=None,
new_users=None, new_capsules=None, new_subnets=None,
new_resources=None, new_medias=None, new_templates=None,
new_ptables=None, new_domains=None, new_envs=None,
new_hostgroups=None, new_params=None, select=False):
"""Update Location in UI."""
self.search_and_click(loc_name)
if new_name:
self.assign_value(locators['location.name'], new_name)
self._configure_location(
users=users, capsules=capsules,
all_capsules=all_capsules, subnets=subnets,
resources=resources, medias=medias,
templates=templates, ptables=ptables,
domains=domains, envs=envs, hostgroups=hostgroups,
organizations=organizations,
new_organizations=new_organizations,
new_users=new_users,
new_capsules=new_capsules,
new_subnets=new_subnets,
new_resources=new_resources,
new_medias=new_medias,
new_templates=new_templates,
new_ptables=new_ptables,
new_domains=new_domains,
new_envs=new_envs,
new_hostgroups=new_hostgroups,
new_params=new_params,
select=select
)
self.click(common_locators['submit'])
|
from django.shortcuts import render
from django.conf import settings
from django.core.mail import send_mail
from django.shortcuts import render
from Internet_store.products.models import ProductFeatured, Product
def home(request):
title = 'Sign up now!'
featured_image = ProductFeatured.objects.filter(active=True).first()
products = Product.objects.all().order_by('?')[:6]
featured_products = Product.objects.all().order_by('?')[:6]
context = {
'title': title,
'featured_image': featured_image,
'products': products,
'featured_products': featured_products,
}
# if form.is_valid():
# # form.save()
# # print request.POST['email'] #not recommended
# instance = form.save(commit=False)
#
# full_name = form.cleaned_data.get("full_name")
# if not full_name:
# full_name = "New full name"
# instance.full_name = full_name
# # if not instance.full_name:
# # instance.full_name = "Justin"
# instance.save()
# context = {
# "title": "Thank you"
# }
return render(request, 'pages/home.html', context)
|
#!/usr/bin/env python3
import os
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
import tensorflow as tf
import matplotlib as mpl
import pandas as pd
import numpy as np
mpl.use('Agg')
import time
import matplotlib.pyplot as plt
def convolve_inner_layers(x, W, b):
'''
inner layers of network --- tanh activation
'''
y = tf.nn.conv2d(x, W, strides = [1,1,1,1], padding='VALID')
y = tf.nn.bias_add(y, b)
return tf.nn.relu(y)
def convolve_ouput_layer(x, W, b):
'''
output layer of network --- linear activation
'''
y = tf.nn.conv2d(x, W, strides = [1,1,1,1], padding='VALID')
y = tf.nn.bias_add(y, b)
return y
def conv_net(x, W, b):
'''
entire conv net. each layer feed to following layer as well as output layer
'''
conv1 = convolve_inner_layers(x, W['weights1'], b['bias1'])
conv2 = convolve_inner_layers(conv1, W['weights2'], b['bias2'])
conv3 = convolve_inner_layers(conv2, W['weights3'], b['bias3'])
conv4 = convolve_inner_layers(conv3, W['weights4'], b['bias4'])
output_feed = tf.concat([conv1, conv2, conv3, conv4],3)
output = convolve_ouput_layer(output_feed, W['weights_out'], b['bias_out'])
return output
def get_variance(training_target):
'''
returns variance of the target data. used in normalizing the error.
'''
all_pixels = training_target.flatten()
return all_pixels.var()
def normalize_input(train_data, test_data):
'''
normailizing input across each pixel an each channel (i.e. normalize for each input to network).
'''
mean, std_dev = np.mean(train_data, axis=0), np.std(train_data, axis=0)
return (train_data - mean) / std_dev, (test_data - mean) / std_dev
def get_epoch(x, y, n):
'''
splits entire data set into an epoch with minibatch of size n. returns a dict with key being the
minibatch number and the value being a length 2 list with the features in first index and
targets in the second.
'''
input_size = x.shape[0]
number_batches = input_size // n
extra_examples = input_size % n
batches = {}
batch_indices = np.arange(input_size)
np.random.shuffle(batch_indices)
for i in range(number_batches):
temp_indices = batch_indices[n*i:n*(i+1)]
temp_x = []
temp_y = []
for j in temp_indices:
temp_x.append(x[j])
temp_y.append(y[j])
batches[i] = [np.asarray(temp_x), np.asarray(temp_y)]
if extra_examples != 0:
extra_indices = batch_indices[input_size-extra_examples:input_size]
temp_x = []
temp_y = []
for k in extra_indices:
temp_x.append(x[k])
temp_y.append(y[k])
batches[i+1] = [np.asarray(temp_x), np.asarray(temp_y)]
return batches
def main():
print('welcome to structure net.')
# parameters
filter_dim, filter_dim2 = 11, 1
batch_size = 4
image_dim, result_dim = 96, 86
input_layer, first_layer, second_layer, third_layer, fourth_layer, output_layer = 4, 100, 50, 25, 10, 1
learning_rate = .001
epochs = 5000
# data input
data_path = 'https://raw.githubusercontent.com/michaelneuder/image_quality_analysis/master/data/sample_data/'
# train data --- 500 images, 96x96 pixels
orig_500 = pd.read_csv('{}orig_500.txt'.format(data_path), header=None, delim_whitespace = True)
recon_500 = pd.read_csv('{}recon_500.txt'.format(data_path), header=None, delim_whitespace = True)
# test data --- 140 images, 96x96 pixels
orig_140 = pd.read_csv('{}orig_140.txt'.format(data_path), header=None, delim_whitespace = True)
recon_140 = pd.read_csv('{}recon_140.txt'.format(data_path), header=None, delim_whitespace = True)
# train target --- 500 images, 86x86 pixels (dimension reduction due no zero padding being used)
structure_500= pd.read_csv('{}structure_500.csv'.format(data_path), header=None)
structure_140 = pd.read_csv('{}structure_140.csv'.format(data_path), header=None)
print('images loaded...')
# getting 4 input channels for train and test --- (orig, recon, orig squared, recon squared)
original_images_train = orig_500.values
original_images_train_sq = orig_500.values**2
reconstructed_images_train = recon_500.values
reconstructed_images_train_sq = recon_500.values**2
original_images_test = orig_140.values
original_images_test_sq = orig_140.values**2
reconstructed_images_test = recon_140.values
reconstructed_images_test_sq = recon_140.values**2
# stack inputs
training_input = np.dstack((original_images_train, reconstructed_images_train, original_images_train_sq, reconstructed_images_train_sq))
testing_input = np.dstack((original_images_test, reconstructed_images_test, original_images_test_sq, reconstructed_images_test_sq))
# normalize inputs
training_input_normalized, testing_input_normalized = normalize_input(training_input, testing_input)
# target values
training_target = structure_500.values
testing_target = structure_140.values
# get size of training and testing set
train_size = original_images_train.shape[0]
test_size = original_images_test.shape[0]
# reshaping features to (num images, 96x96, 4 channels)
train_features = np.reshape(training_input_normalized, [train_size,image_dim,image_dim,input_layer])
test_features = np.reshape(testing_input_normalized, [test_size,image_dim,image_dim,input_layer])
# reshaping target to --- (num images, 86x86, 1)
train_target = np.reshape(training_target, [train_size, result_dim, result_dim, output_layer])
test_target = np.reshape(testing_target, [test_size, result_dim, result_dim, output_layer])
# initializing filters, this is what we are trying to learn --- fan in
scaling_factor = 0.1
initializer = tf.contrib.layers.xavier_initializer()
weights = {
'weights1': tf.get_variable('weights1', [filter_dim,filter_dim,input_layer,first_layer], initializer=initializer),
'weights2': tf.get_variable('weights2', [filter_dim2,filter_dim2,first_layer,second_layer], initializer=initializer),
'weights3': tf.get_variable('weights3', [filter_dim2,filter_dim2,second_layer,third_layer], initializer=initializer),
'weights4': tf.get_variable('weights4', [filter_dim2,filter_dim2,third_layer,fourth_layer], initializer=initializer),
'weights_out': tf.get_variable('weights_out', [filter_dim2,filter_dim2,fourth_layer+third_layer+second_layer+first_layer,output_layer], initializer=initializer)
}
biases = {
'bias1': tf.get_variable('bias1', [first_layer], initializer=initializer),
'bias2': tf.get_variable('bias2', [second_layer], initializer=initializer),
'bias3': tf.get_variable('bias3', [third_layer], initializer=initializer),
'bias4': tf.get_variable('bias4', [fourth_layer], initializer=initializer),
'bias_out': tf.get_variable('bias_out', [output_layer], initializer=initializer)
}
# tensorflow setup
x = tf.placeholder(tf.float32, [None, image_dim, image_dim, input_layer])
y = tf.placeholder(tf.float32, [None, result_dim, result_dim, output_layer])
# model
prediction = conv_net(x, weights, biases)
# get variance to normalize error terms during training
variance = get_variance(train_target)
# loss and optimization
cost = tf.reduce_mean(tf.square(tf.subtract(prediction, y)))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
init = tf.global_variables_initializer()
# error arrays
training_error, testing_error = [], []
epoch_time = np.asarray([])
# tensorflow session & training
with tf.Session() as sess:
sess.run(init)
global_start_time = time.time()
print('starting training...')
for epoch_count in range(epochs):
start_time = time.time()
epoch = get_epoch(train_features, train_target, batch_size)
for i in epoch:
x_data_train, y_data_train = np.asarray(epoch[i][0]), np.asarray(epoch[i][1])
sess.run(optimizer, feed_dict={x : x_data_train, y : y_data_train})
train_loss = sess.run(cost, feed_dict={x : x_data_train, y : y_data_train})
training_error.append(100*train_loss/variance)
test_loss = sess.run(cost, feed_dict={x : test_features, y : test_target})
testing_error.append(100*test_loss/variance)
end_time = time.time()
epoch_time = np.append(epoch_time, end_time-start_time)
print('current epoch: {} -- '.format(epoch_count)
+'current train error: {:.4f} -- '.format(100*train_loss/variance)
+'average epoch time: {:.4}s '.format(epoch_time.mean()))
f, axarr = plt.subplots(nrows=1, ncols=1, figsize=(9,6))
axarr.plot(np.arange(epoch_count+1), training_error, label='train')
axarr.plot(np.arange(epoch_count+1), testing_error, label='test')
axarr.legend()
axarr.set_ylim(0,100)
plt.savefig('relu_1521_struct.png')
print('training finished.')
if __name__ == '__main__':
main()
|
#!/usr/bin/python
import math
import rospy
import random
from sensor_msgs.msg import *
from geometry_msgs.msg import *
from constrained_path_generator.msg import *
from constrained_path_generator.srv import *
def make_pose((px, py, pz), (rx, ry, rz, rw)):
new_pose = Pose()
new_pose.position.x = px
new_pose.position.y = py
new_pose.position.z = pz
new_pose.orientation.x = rx
new_pose.orientation.y = ry
new_pose.orientation.z = rz
new_pose.orientation.w = rw
return new_pose
def make_pose_stamped((px, py, pz), (rx, ry, rz, rw), frame):
pose_stamped = PoseStamped()
pose_stamped.pose = make_pose((px, py, pz), (rx, ry, rz, rw))
pose_stamped.header.frame_id = frame
return pose_stamped
def make_quaternion(w, x, y, z):
new_quat = Quaternion()
new_quat.w = w
new_quat.x = x
new_quat.y = y
new_quat.z = z
return new_quat
def make_vector(x, y, z):
new_vector = Vector3()
new_vector.x = x
new_vector.y = y
new_vector.z = z
return new_vector
_joint_state = None
def joint_state_cb(msg):
global _joint_state
_joint_state = msg
def test():
test_node = rospy.init_node("test_planner")
js_sub = rospy.Subscriber("joint_states", JointState, joint_state_cb)
planner_client = rospy.ServiceProxy("plan_constrained_path", PlanConstrainedPath)
# Wait for a joint state
while _joint_state is None and not rospy.is_shutdown():
rospy.sleep(0.1)
print "got robot state"
# Make the waypoints
pose_1 = make_pose_stamped((0.585, 0.15, 1.250), (0.0, 0.888, 0.0, -0.460), "base_link")
waypoints = [pose_1]
# Make the request
query = PlanConstrainedPathQuery()
query.path_type = PlanConstrainedPathQuery.CHECK_ENVIRONMENT_COLLISIONS | PlanConstrainedPathQuery.CARTESIAN_IK | PlanConstrainedPathQuery.PLAN
query.waypoints = waypoints
query.group_name = "left_arm"
query.target_link = "l_wrist_roll_link"
query.planning_time = 5.0
query.max_cspace_jump = 0.05
query.task_space_step_size = 0.025
query.initial_state.joint_state = _joint_state
query.path_orientation_constraint = make_quaternion(0.0, 0.888, 0.0, -0.460)
query.path_angle_tolerance = make_vector(0.01, 0.01, 0.01)
query.path_position_tolerance = make_vector(0.02, 0.02, 0.02)
query.goal_angle_tolerance = make_vector(0.01, 0.01, 0.01)
query.goal_position_tolerance = make_vector(0.01, 0.01, 0.01)
full_req = PlanConstrainedPathRequest()
full_req.query = query
full_res = planner_client.call(full_req)
print full_res
# Make some collision_planes
raw_input("Press ENTER to close...")
print "Done"
if __name__ == '__main__':
test()
|
# Copyright 2013 The Distro Tracker Developers
# See the COPYRIGHT file at the top-level directory of this distribution and
# at http://deb.li/DTAuthors
#
# This file is part of Distro Tracker. It is subject to the license terms
# in the LICENSE file found in the top-level directory of this
# distribution and at http://deb.li/DTLicense. No part of Distro Tracker,
# including this file, may be copied, modified, propagated, or distributed
# except according to the terms contained in the LICENSE file.
from __future__ import unicode_literals
from django.contrib.auth.middleware import RemoteUserMiddleware
from django.contrib.auth.backends import RemoteUserBackend
from django.contrib import auth
from pts.accounts.models import UserEmail
from pts.core.utils import get_or_none
from pts.accounts.models import User
import ldap
class DebianSsoUserMiddleware(RemoteUserMiddleware):
"""
Middleware that initiates user authentication based on the REMOTE_USER
field provided by Debian's SSO system.
If the currently logged in user is a DD (as identified by having a @debian.org
address), he is forcefully logged out if the header is no longer found or is
invalid.
"""
header = 'REMOTE_USER'
def extract_email(self, username):
parts = [part for part in username.split(':') if part]
federation, jurisdiction = parts[:2]
if (federation, jurisdiction) != ('DEBIANORG', 'DEBIAN'):
return
return parts[-1] + '@debian.org'
def is_debian_user(self, user):
return any(
email.email.endswith('@debian.org')
for email in user.emails.all()
)
def log_out_user(self, request):
if request.user.is_authenticated():
if self.is_debian_user(request.user):
auth.logout(request)
def process_request(self, request):
if self.header not in request.META:
# If a user is logged in to the PTS by Debian SSO, sign him out
self.log_out_user(request)
return
username = request.META[self.header]
if not username:
self.log_out_user(request)
return
email = self.extract_email(username)
if request.user.is_authenticated():
if request.user.emails.filter(email=email).exists():
# The currently logged in user matches the one given by the
# headers.
return
user = auth.authenticate(remote_user=email)
if user:
request.user = user
auth.login(request, user)
class DebianSsoUserBackend(RemoteUserBackend):
"""
The authentication backend which authenticates the provided remote user
(identified by his @debian.org email) in the PTS. If a matching User
model instance does not exist, one is automatically created. In that case
the DDs first and last name are pulled from Debian's LDAP.
"""
def authenticate(self, remote_user):
if not remote_user:
return
email = remote_user
email_user = get_or_none(UserEmail, email=email)
if not email_user:
names = self.get_user_details(remote_user)
kwargs = {}
if names:
kwargs.update(names)
user = User.objects.create_user(main_email=email, **kwargs)
else:
user = email_user.user
return user
def get_uid(self, remote_user):
# Strips off the @debian.org part of the email leaving the uid
return remote_user[:-11]
def get_user_details(self, remote_user):
"""
Gets the details of the given user from the Debian LDAP.
:return: Dict with the keys ``first_name``, ``last_name``
``None`` if the LDAP lookup did not return anything.
"""
l = ldap.initialize('ldap://db.debian.org')
result_set = l.search_s(
'dc=debian,dc=org',
ldap.SCOPE_SUBTREE,
'uid={}'.format(self.get_uid(remote_user)),
None)
if not result_set:
return None
result = result_set[0]
return {
'first_name': result[1]['cn'][0].decode('utf-8'),
'last_name': result[1]['sn'][0].decode('utf-8'),
}
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
"""Utilities for manipulating Wine"""
import os
from collections import OrderedDict
from functools import lru_cache
from gettext import gettext as _
from lutris import runtime, settings
from lutris.gui.dialogs import DontShowAgainDialog, ErrorDialog
from lutris.runners.steam import steam
from lutris.util import linux, system
from lutris.util.log import logger
from lutris.util.strings import parse_version, version_sort
from lutris.util.wine import fsync
WINE_DIR = os.path.join(settings.RUNNER_DIR, "wine")
WINE_DEFAULT_ARCH = "win64" if linux.LINUX_SYSTEM.is_64_bit else "win32"
WINE_PATHS = {
"winehq-devel": "/opt/wine-devel/bin/wine",
"winehq-staging": "/opt/wine-staging/bin/wine",
"wine-development": "/usr/lib/wine-development/wine",
"system": "wine",
}
ESYNC_LIMIT_CHECK = os.environ.get("ESYNC_LIMIT_CHECK", "").lower()
FSYNC_SUPPORT_CHECK = os.environ.get("FSYNC_SUPPORT_CHECK", "").lower()
def get_playonlinux():
"""Return the folder containing PoL config files"""
pol_path = os.path.expanduser("~/.PlayOnLinux")
if system.path_exists(os.path.join(pol_path, "wine")):
return pol_path
return None
def _iter_proton_locations():
"""Iterate through all existing Proton locations"""
for path in [os.path.join(p, "common") for p in steam().get_steamapps_dirs()]:
if os.path.isdir(path):
yield path
for path in [os.path.join(p, "") for p in steam().get_steamapps_dirs()]:
if os.path.isdir(path):
yield path
def get_proton_paths():
"""Get the Folder that contains all the Proton versions. Can probably be improved"""
paths = set()
for path in _iter_proton_locations():
proton_versions = [p for p in os.listdir(path) if "Proton" in p]
for version in proton_versions:
if system.path_exists(os.path.join(path, version, "dist/bin/wine")):
paths.add(path)
return list(paths)
POL_PATH = get_playonlinux()
def detect_arch(prefix_path=None, wine_path=None):
"""Given a Wine prefix path, return its architecture"""
arch = detect_prefix_arch(prefix_path)
if arch:
return arch
if wine_path and system.path_exists(wine_path + "64"):
return "win64"
return "win32"
def detect_prefix_arch(prefix_path=None):
"""Return the architecture of the prefix found in `prefix_path`.
If no `prefix_path` given, return the arch of the system's default prefix.
If no prefix found, return None."""
if not prefix_path:
prefix_path = "~/.wine"
prefix_path = os.path.expanduser(prefix_path)
registry_path = os.path.join(prefix_path, "system.reg")
if not os.path.isdir(prefix_path) or not os.path.isfile(registry_path):
# No prefix_path exists or invalid prefix
logger.debug("Prefix not found: %s", prefix_path)
return None
with open(registry_path, "r") as registry:
for _line_no in range(5):
line = registry.readline()
if "win64" in line:
return "win64"
if "win32" in line:
return "win32"
logger.debug("Failed to detect Wine prefix architecture in %s", prefix_path)
return None
def set_drive_path(prefix, letter, path):
"""Changes the path to a Wine drive"""
dosdevices_path = os.path.join(prefix, "dosdevices")
if not system.path_exists(dosdevices_path):
raise OSError("Invalid prefix path %s" % prefix)
drive_path = os.path.join(dosdevices_path, letter + ":")
if system.path_exists(drive_path):
os.remove(drive_path)
logger.debug("Linking %s to %s", drive_path, path)
os.symlink(path, drive_path)
def use_lutris_runtime(wine_path, force_disable=False):
"""Returns whether to use the Lutris runtime.
The runtime can be forced to be disabled, otherwise it's disabled
automatically if Wine is installed system wide.
"""
if force_disable or runtime.RUNTIME_DISABLED:
logger.info("Runtime is forced disabled")
return False
if WINE_DIR in wine_path:
logger.debug("%s is provided by Lutris, using runtime", wine_path)
return True
if is_installed_systemwide():
logger.info("Using system wine version, not using runtime")
return False
logger.debug("Using Lutris runtime for wine")
return True
def is_mingw_build(wine_path):
"""Returns whether a wine build is built with MingW"""
base_path = os.path.dirname(os.path.dirname(wine_path))
# A MingW build has an .exe file while a GCC one will have a .so
return system.path_exists(os.path.join(base_path, "lib/wine/iexplore.exe"))
def is_gstreamer_build(wine_path):
"""Returns whether a wine build ships with gstreamer libraries.
This allows to set GST_PLUGIN_SYSTEM_PATH_1_0 for the builds that support it.
"""
base_path = os.path.dirname(os.path.dirname(wine_path))
return system.path_exists(os.path.join(base_path, "lib64/gstreamer-1.0"))
def is_installed_systemwide():
"""Return whether Wine is installed outside of Lutris"""
for build in WINE_PATHS.values():
if system.find_executable(build):
# if wine64 is installed but not wine32, don't consider it
# a system-wide installation.
if (
build == "wine" and system.path_exists("/usr/lib/wine/wine64")
and not system.path_exists("/usr/lib/wine/wine")
):
logger.warning("wine32 is missing from system")
return False
return True
return False
def get_system_wine_versions():
"""Return the list of wine versions installed on the system"""
versions = []
for build in sorted(WINE_PATHS.keys()):
version = get_wine_version(WINE_PATHS[build])
if version:
versions.append(build)
return versions
def get_lutris_wine_versions():
"""Return the list of wine versions installed by lutris"""
versions = []
if system.path_exists(WINE_DIR):
dirs = version_sort(os.listdir(WINE_DIR), reverse=True)
for dirname in dirs:
if is_version_installed(dirname):
versions.append(dirname)
return versions
def get_proton_versions():
"""Return the list of Proton versions installed in Steam"""
versions = []
for proton_path in get_proton_paths():
proton_versions = [p for p in os.listdir(proton_path) if "Proton" in p]
for version in proton_versions:
path = os.path.join(proton_path, version, "dist/bin/wine")
if os.path.isfile(path):
versions.append(version)
return versions
def get_pol_wine_versions():
"""Return the list of wine versions installed by Play on Linux"""
if not POL_PATH:
return []
versions = []
for arch in ['x86', 'amd64']:
builds_path = os.path.join(POL_PATH, "wine/linux-%s" % arch)
if not system.path_exists(builds_path):
continue
for version in os.listdir(builds_path):
if system.path_exists(os.path.join(builds_path, version, "bin/wine")):
versions.append("PlayOnLinux %s-%s" % (version, arch))
return versions
@lru_cache(maxsize=8)
def get_wine_versions():
"""Return the list of Wine versions installed"""
versions = []
versions += get_system_wine_versions()
versions += get_lutris_wine_versions()
if os.environ.get("LUTRIS_ENABLE_PROTON"):
versions += get_proton_versions()
versions += get_pol_wine_versions()
return versions
def get_wine_version_exe(version):
if not version:
version = get_default_version()
if not version:
raise RuntimeError("Wine is not installed")
return os.path.join(WINE_DIR, "{}/bin/wine".format(version))
def is_version_installed(version):
return os.path.isfile(get_wine_version_exe(version))
def is_esync_limit_set():
"""Checks if the number of files open is acceptable for esync usage."""
if ESYNC_LIMIT_CHECK in ("0", "off"):
logger.info("fd limit check for esync was manually disabled")
return True
return linux.LINUX_SYSTEM.has_enough_file_descriptors()
def is_fsync_supported():
"""Checks if the running kernel has Valve's futex patch applied."""
if FSYNC_SUPPORT_CHECK in ("0", "off"):
logger.info("futex patch check for fsync was manually disabled")
return True
return fsync.is_fsync_supported()
def get_default_version():
"""Return the default version of wine. Prioritize 64bit builds"""
installed_versions = get_wine_versions()
wine64_versions = [version for version in installed_versions if "64" in version]
if wine64_versions:
return wine64_versions[0]
if installed_versions:
return installed_versions[0]
return
def get_wine_version(wine_path="wine"):
"""Return the version of Wine installed on the system."""
if wine_path != "wine" and not system.path_exists(wine_path):
return
if wine_path == "wine" and not system.find_executable("wine"):
return
if os.path.isabs(wine_path):
wine_stats = os.stat(wine_path)
if wine_stats.st_size < 2000:
# This version is a script, ignore it
return
version = system.read_process_output([wine_path, "--version"])
if not version:
logger.error("Error reading wine version for %s", wine_path)
return
if version.startswith("wine-"):
version = version[5:]
return version
def is_version_esync(path):
"""Determines if a Wine build is Esync capable
Params:
path: the path to the Wine version
Returns:
bool: True is the build is Esync capable
"""
try:
version = path.split("/")[-3].lower()
except IndexError:
logger.error("Invalid path '%s'", path)
return False
_version_number, version_prefix, version_suffix = parse_version(version)
esync_compatible_versions = ["esync", "lutris", "tkg", "ge", "proton", "staging"]
for esync_version in esync_compatible_versions:
if esync_version in version_prefix or esync_version in version_suffix:
return True
wine_version = get_wine_version(path).lower()
return "esync" in wine_version or "staging" in wine_version
def is_version_fsync(path):
"""Determines if a Wine build is Fsync capable
Params:
path: the path to the Wine version
Returns:
bool: True is the build is Fsync capable
"""
try:
version = path.split("/")[-3].lower()
except IndexError:
logger.error("Invalid path '%s'", path)
return False
_, version_prefix, version_suffix = parse_version(version)
fsync_compatible_versions = ["fsync", "lutris", "ge", "proton"]
for fsync_version in fsync_compatible_versions:
if fsync_version in version_prefix or fsync_version in version_suffix:
return True
return "fsync" in get_wine_version(path).lower()
def get_real_executable(windows_executable, working_dir=None):
"""Given a Windows executable, return the real program
capable of launching it along with necessary arguments."""
exec_name = windows_executable.lower()
if exec_name.endswith(".msi"):
return ("msiexec", ["/i", windows_executable], working_dir)
if exec_name.endswith(".bat"):
if not working_dir or os.path.dirname(windows_executable) == working_dir:
working_dir = os.path.dirname(windows_executable) or None
windows_executable = os.path.basename(windows_executable)
return ("cmd", ["/C", windows_executable], working_dir)
if exec_name.endswith(".lnk"):
return ("start", ["/unix", windows_executable], working_dir)
return (windows_executable, [], working_dir)
def display_vulkan_error(on_launch):
if on_launch:
checkbox_message = _("Launch anyway and do not show this message again.")
else:
checkbox_message = _("Enable anyway and do not show this message again.")
setting = "hide-no-vulkan-warning"
DontShowAgainDialog(
setting,
_("Vulkan is not installed or is not supported by your system"),
secondary_message=_(
"If you have compatible hardware, please follow "
"the installation procedures as described in\n"
"<a href='https://github.com/lutris/lutris/wiki/How-to:-DXVK'>"
"How-to:-DXVK (https://github.com/lutris/lutris/wiki/How-to:-DXVK)</a>"
),
checkbox_message=checkbox_message,
)
return settings.read_setting(setting) == "True"
def esync_display_limit_warning():
ErrorDialog(_(
"Your limits are not set correctly."
" Please increase them as described here:"
" <a href='https://github.com/lutris/lutris/wiki/How-to:-Esync'>"
"How-to:-Esync (https://github.com/lutris/lutris/wiki/How-to:-Esync)</a>"
))
def fsync_display_support_warning():
ErrorDialog(_(
"Your kernel is not patched for fsync."
" Please get a patched kernel to use fsync."
))
def esync_display_version_warning(on_launch=False):
setting = "hide-wine-non-esync-version-warning"
if on_launch:
checkbox_message = _("Launch anyway and do not show this message again.")
else:
checkbox_message = _("Enable anyway and do not show this message again.")
DontShowAgainDialog(
setting,
_("Incompatible Wine version detected"),
secondary_message=_(
"The Wine build you have selected "
"does not support Esync.\n"
"Please switch to an Esync-capable version."
),
checkbox_message=checkbox_message,
)
return settings.read_setting(setting) == "True"
def fsync_display_version_warning(on_launch=False):
setting = "hide-wine-non-fsync-version-warning"
if on_launch:
checkbox_message = _("Launch anyway and do not show this message again.")
else:
checkbox_message = _("Enable anyway and do not show this message again.")
DontShowAgainDialog(
setting,
_("Incompatible Wine version detected"),
secondary_message=_(
"The Wine build you have selected "
"does not support Fsync.\n"
"Please switch to an Fsync-capable version."
),
checkbox_message=checkbox_message,
)
return settings.read_setting(setting) == "True"
def get_overrides_env(overrides):
"""
Output a string of dll overrides usable with WINEDLLOVERRIDES
See: https://wiki.winehq.org/Wine_User%27s_Guide#WINEDLLOVERRIDES.3DDLL_Overrides
"""
if not overrides:
return ""
override_buckets = OrderedDict([("n,b", []), ("b,n", []), ("b", []), ("n", []), ("d", []), ("", [])])
for dll, value in overrides.items():
if not value:
value = ""
value = value.replace(" ", "")
value = value.replace("builtin", "b")
value = value.replace("native", "n")
value = value.replace("disabled", "")
try:
override_buckets[value].append(dll)
except KeyError:
logger.error("Invalid override value %s", value)
continue
override_strings = []
for value, dlls in override_buckets.items():
if not dlls:
continue
override_strings.append("{}={}".format(",".join(sorted(dlls)), value))
return ";".join(override_strings)
|
"""Canadian Insider Transactions.
Usage:
sedi_transactions <issuer_num>...
Options:
-h --help Show this screen.
--version Show version.
"""
import os
from docopt import docopt
from sedi_transactions.transactions import SEDIView
OUTPUT_PATH = os.path.abspath(
os.path.join(os.path.abspath(__file__), '..', 'output')
)
if not os.path.exists(OUTPUT_PATH):
os.mkdir(OUTPUT_PATH)
def write_html(html_text, encoding, filename):
with open(filename, 'w', encoding=encoding) as outfile:
outfile.write(html_text)
def main():
arguments = docopt(__doc__, version='Canadian Insider Transactions 0.1')
sedar_issuers = arguments.get('<issuer_num>')
with SEDIView() as sv:
i = 0
while i < len(sedar_issuers):
html = sv.get_transactions_view(sedar_issuers[i])
filename = os.path.join(OUTPUT_PATH,
('{0}.html').format(sedar_issuers[i]))
if html:
print("Downloading HTML to {0}".format(filename))
write_html(html, sv.encoding, filename)
i += 1
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('patchwork', '0014_remove_userprofile_primary_project'),
]
operations = [
migrations.CreateModel(
name='SeriesReference',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('msgid', models.CharField(max_length=255, unique=True)),
],
),
migrations.CreateModel(
name='Series',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, help_text=b'An optional name to associate with the series, e.g. "John\'s PCI series".', max_length=255, null=True)),
('date', models.DateTimeField()),
('version', models.IntegerField(default=1, help_text=b'Version of series as indicated by the subject prefix(es)')),
('total', models.IntegerField(help_text=b'Number of patches in series as indicated by the subject prefix(es)')),
('cover_letter', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='series', to='patchwork.CoverLetter')),
],
options={
'ordering': ('date',),
},
),
migrations.CreateModel(
name='SeriesPatch',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('number', models.PositiveSmallIntegerField(help_text=b'The number assigned to this patch in the series')),
('patch', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='patchwork.Patch')),
('series', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='patchwork.Series')),
],
options={
'ordering': ['number'],
},
),
migrations.AddField(
model_name='series',
name='patches',
field=models.ManyToManyField(related_name='series', through='patchwork.SeriesPatch', to='patchwork.Patch'),
),
migrations.AddField(
model_name='series',
name='submitter',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='patchwork.Person'),
),
migrations.AddField(
model_name='seriesreference',
name='series',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='references', related_query_name=b'reference', to='patchwork.Series'),
),
migrations.AlterUniqueTogether(
name='seriespatch',
unique_together=set([('series', 'number'), ('series', 'patch')]),
),
]
|
from pprint import pprint
from django.shortcuts import render
from django.views import View
class Mapa(View):
def __init__(self):
self.template_name = 'cd/mapa.html'
self.title_name = 'Mapa'
def mount_context(self):
enderecos = {}
letras = [
{'letra': 'A', 'int_ini': 1},
{'letra': 'A', 'int_ini': 29},
{'letra': 'A', 'int_ini': 57},
{'letra': 'r'},
{'letra': 'B', 'int_ini': 57},
{'letra': 'B', 'int_ini': 29},
{'letra': 'B', 'int_ini': 1},
{'letra': 'l'},
{'letra': 'C', 'int_ini': 1},
{'letra': 'C', 'int_ini': 29},
{'letra': 'C', 'int_ini': 57},
{'letra': 'r'},
{'letra': 'l'},
{'letra': 'D', 'int_ini': 1},
{'letra': 'D', 'int_ini': 29},
{'letra': 'D', 'int_ini': 57},
{'letra': 'r'},
{'letra': 'E', 'int_ini': 57},
{'letra': 'E', 'int_ini': 29},
{'letra': 'E', 'int_ini': 1},
{'letra': 'l'},
{'letra': 'F', 'int_ini': 1},
{'letra': 'F', 'int_ini': 29},
{'letra': 'F', 'int_ini': 57},
{'letra': 'r'},
{'letra': 'G', 'int_ini': 57},
{'letra': 'G', 'int_ini': 29},
{'letra': 'G', 'int_ini': 1},
]
for num, letra in enumerate(letras):
ends_linha = []
for int_end in range(28):
if letra['letra'] == 'r':
conteudo = ''
elif letra['letra'] == 'l':
conteudo = '==='
else:
conteudo = '{}{}'.format(
letra['letra'], letra['int_ini']+27-int_end)
ends_linha.append(conteudo)
enderecos[num] = ends_linha
context = {
'linhas': [
'A3º', 'A2º', 'A1º', 'rua A/B', 'B1º', 'B2º', 'B3º', '===',
'C3º', 'C2º', 'C1º', 'rua C', '===',
'D3º', 'D2º', 'D1º', 'rua D/E', 'E1º', 'E2º', 'E3º', '===',
'F3º', 'F2º', 'F1º', 'rua F/G', 'G1º', 'G2º', 'G3º'
],
'enderecos': enderecos,
}
return context
def get(self, request, *args, **kwargs):
context = {'titulo': self.title_name}
data = self.mount_context()
context.update(data)
return render(request, self.template_name, context)
|
import theano as _th
import theano.tensor as _T
class Module:
def __init__(self):
self.training_mode = True
self.fn_forward = None
self.fn_accum_grads = None
self.fn_accum_stats = None
#def __hash__(self):
# raise NotImplementedError("You *need* to reimplement hash, even if it's just python's default. See the documentation for more info.")
def zero_grad_parameters(self):
_, grads = self.parameters()
for grad in grads:
grad.set_value(0 * grad.get_value())
def parameters(self):
params, grads = [], []
if hasattr(self, 'weight'):
assert hasattr(self, 'grad_weight'), "The layer {} has a `weight` variable but no `grad_weight`, you probably forget to implement it.".format(type(self))
params += [self.weight]
grads += [self.grad_weight]
if hasattr(self, 'bias'):
assert hasattr(self, 'grad_bias'), "The layer {} has a `bias` variable but no `grad_bias`, you probably forget to implement it.".format(type(self))
params += [self.bias]
grads += [self.grad_bias]
return params, grads
def evaluate(self):
self.training_mode = False
def training(self):
self.training_mode = True
def symb_forward(self, symb_input):
raise NotImplementedError
def forward(self, data):
if self.fn_forward is None:
symb_in = _T.TensorType(_th.config.floatX, (False,) * data.ndim)('X')
symb_out = self.symb_forward(symb_in)
self.fn_forward = _th.function(inputs=[symb_in], outputs=symb_out)
return self.fn_forward(data)
def accumulate_gradients(self, data_in, data_tgt, loss):
if self.fn_accum_grads is None:
symb_in = _T.TensorType(_th.config.floatX, (False,) * data_in.ndim)('X')
symb_tgt = _T.TensorType(_th.config.floatX, (False,) * data_tgt.ndim)('T')
symb_out = self.symb_forward(symb_in)
symb_err = loss.symb_forward(symb_out, symb_tgt)
params, grads = self.parameters()
symb_grads = _th.grad(cost=symb_err, wrt=params)
grads_updates = [(grad, grad + symb_grad) for grad, symb_grad in zip(grads, symb_grads)]
self.fn_accum_grads = _th.function(
inputs=[symb_in, symb_tgt],
outputs=symb_err,
updates=grads_updates
)
return self.fn_accum_grads(data_in, data_tgt)
def get_stat_updates(self):
return []
def accumulate_statistics(self, data_in):
if self.fn_accum_stats is None:
symb_in = _T.TensorType(_th.config.floatX, (False,) * data_in.ndim)('X')
self.symb_forward(symb_in)
stat_updates = self.get_stat_updates()
if not stat_updates:
# If there's no layer collecting statistics, we don't need to
# compile and call a function. This prevents theano errors.
return
self.fn_accum_stats = _th.function(
inputs=[symb_in],
updates=stat_updates
)
self.fn_accum_stats(data_in)
|
from django.db import models
from django.core.exceptions import ValidationError
from django.core.validators import MinValueValidator, RegexValidator
from filer.fields.file import FilerFileField
from filer.fields.image import FilerImageField
class Browser(models.Model):
class Meta:
ordering = ['name']
verbose_name = "Genome Browser"
verbose_name_plural = "Genome Browsers"
name = models.CharField('browser name',
help_text='Enter a brief, descriptive name for the browser.',
max_length=255,
unique=True,
)
description = models.TextField('browser description',
blank=True,
help_text='Enter a description for the browser.',
)
image = FilerImageField(
blank=True,
null=True,
help_text='Upload/select an image to represent this genome browser.',
related_name='%(app_label)s_%(class)s_browser_image',
)
slug = models.SlugField('slug',
help_text='Enter a unique slug for this genome browser. ' \
'This should get auto-generated.',
max_length=255,
unique=True,
)
chr = models.CharField('default chromosome',
help_text='The chromosome to display when the browser loads.',
max_length=64,
)
start = models.IntegerField('default start position',
help_text='The start position of range to display when the browser loads.',
validators=[
MinValueValidator(1),
],
)
end = models.IntegerField('default end position',
help_text='The end position of range to display when the browser loads.',
validators=[
MinValueValidator(1),
],
)
coordinate_system = models.ForeignKey('cms_genome_browser.CoordSystem',
help_text='Select a coordinate system. Taxonomy ID, authority, version, ' \
'and UCSC name are shown in parentheses, if present.',
)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
def clean(self):
if self.start > self.end:
raise ValidationError('Start position cannot come after end position.')
def __str__(self):
return self.name
class CoordSystem(models.Model):
class Meta:
ordering = ['species', 'auth', 'version']
verbose_name = "Coordinate System"
verbose_name_plural = "Coordinate Systems"
UCSC_OLD_REGEX = r'^[a-z]{2}\d+$' # gs#
UCSC_NEW_REGEX = r'^[a-z]{3}[A-Z][a-z]{2}\d+$' # gggSss#
species = models.ForeignKey('cms_genome_browser.Species',
help_text='Select a species. Taxonomy ID is shown in parentheses, if present.',
)
auth = models.CharField('authority',
blank=True,
help_text='Authority string used in the ' \
'<a href="http://dasregistry.org/" target="_blank">DAS Registry</a>.',
max_length=10,
)
version = models.CharField('version',
blank=True,
help_text='Version string used in the ' \
'<a href="http://dasregistry.org/" target="_blank">DAS Registry</a>.',
max_length=10,
)
ucsc_name = models.CharField('UCSC name',
blank=True,
help_text='UCSC genome browser name of the assembly, if defined in the list of ' \
'<a href="https://genome.ucsc.edu/FAQ/FAQreleases.html#release1" target="_blank">' \
'UCSC genome releases</a>.',
max_length=10,
validators=[
RegexValidator(
regex='%s|%s' % (UCSC_OLD_REGEX, UCSC_NEW_REGEX),
message="UCSC name must be of the format 'gs#' or 'gggSss#'.",
code='invalid_UCSC_name'
),
]
)
def __str__(self):
coord_system_str = self.species.name
supplemental = []
if self.species.taxid:
supplemental.append(str(self.species.taxid))
supplemental.append(' '.join([self.auth, self.version]))
if self.ucsc_name:
supplemental.append(self.ucsc_name)
if supplemental:
coord_system_str += ' (%s)' % '; '.join(supplemental)
return coord_system_str
class Species(models.Model):
class Meta:
ordering = ['name', 'taxid']
verbose_name = "Species"
verbose_name_plural = "Species"
name = models.CharField('species name',
help_text='Enter the species name.',
max_length=255,
)
taxid = models.IntegerField('taxonomy ID',
blank=True,
null=True,
help_text='Enter the Taxonomy ID for the species. ' \
'Taxonomy names and IDs can be found at ' \
'<a href="http://www.ncbi.nlm.nih.gov/taxonomy" target="_blank">NCBI</a>.',
)
def __str__(self):
species_str = self.name
if self.taxid:
species_str += ' (%s)' % self.taxid
return species_str
class Stylesheet(models.Model):
class Meta:
ordering = ['name',]
verbose_name='Stylesheet'
verbose_name_plural='Stylesheets'
STYLESHEET_TYPE_CHOICES = (
('XML', 'DAS XML Stylesheet'),
('JSON', 'JSON-encoded Stylesheet'),
)
name = models.CharField('stylesheet name',
help_text='Enter a brief, descriptive name for this stylesheet.',
max_length=255,
unique=True,
)
description = models.TextField('stylesheet description',
blank=True,
help_text='Describe the style this stylesheet provides.',
)
style_file = FilerFileField(
help_text='Upload/select an image to represent this genome browser.select a stylesheet for the track. More info can be found in the ' \
'<a href="https://www.biodalliance.org/stylesheets.html" target="_blank">' \
'Stylesheets for Dalliance</a> documentation.',
related_name='%(app_label)s_%(class)s_stylesheet',
)
is_downloadable = models.BooleanField('stylesheet downloadable?',
default=True,
help_text="Add download button for stylesheet file to the genome browser's info window.",
)
style_type = models.CharField('stylesheet type',
choices=STYLESHEET_TYPE_CHOICES,
help_text='Select the type of stylesheet being used.',
max_length=4,
)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
class Track(models.Model):
class Meta:
ordering = ['browser', 'order']
verbose_name = 'Track'
verbose_name_plural = 'Tracks'
TRACK_TYPE_CHOICES = (
('BAM', 'BAM'),
('BED', (
('BED-MemStore', 'BED (MemStore)'),
('BED-Tabix', 'BED (Tabix)'),
)
),
('bigWig', 'bigWig'),
('bigBed', 'bigBed'),
('DAS', (
('DAS-feature', 'DAS (feature)'),
('DAS-sequence', 'DAS (sequence)'),
)
),
('twoBit', 'twoBit'),
('VCF', (
('VCF-MemStore', 'VCF (MemStore)'),
('VCF-Tabix', 'VCF (Tabix)'),
)
),
('WIG', 'WIG'),
)
order = models.PositiveIntegerField()
name = models.CharField('track name',
help_text='Enter a brief name for the track.',
max_length=64,
)
description = models.CharField('track description',
blank=True,
help_text='Enter a short description for the track.',
max_length=255,
)
browser = models.ForeignKey(Browser,
blank=True,
null=True,
help_text='Specify genome browser this track belongs to.',
)
track_type = models.CharField('track type',
choices = TRACK_TYPE_CHOICES,
help_text='Select the source type for this track.',
max_length=20,
)
data_file = FilerFileField(
help_text='Upload/select a data file for the track. More info can be found in the ' \
'<a href="http://www.biodalliance.org/config-source.html" target="_blank">' \
'Configuring a source</a> documentation.',
related_name='%(app_label)s_%(class)s_data',
)
index_file = FilerFileField(
blank=True,
null=True,
help_text="<strong>If data file is a BAM or Tabix file</strong>, upload/select " \
"an index file (.bai or .tbi) that corresponds to the track's BAM/Tabix file.",
related_name='%(app_label)s_%(class)s_index',
)
collapse_super_groups = models.BooleanField('CSG?',
default=False,
help_text="Attempt to allow more 'gene-like' rendering for some data sources.",
)
provides_entrypoint = models.BooleanField('entry?',
default=False,
# What are Entry Points? http://genboree.org/theCommons/ezfaq/show/epigenome-workshop?faq_id=467
help_text='Does this track provide entry points? ' \
'Entry points comprise the coordinate system on which annotations are made.',
)
pinned = models.BooleanField('pin?',
default=False,
help_text="'Pin' this trackc in the non-scrolling section at the top of the browser.",
)
is_downloadable = models.BooleanField('D/L?',
default=True,
help_text="Add download button for data file to the genome browser's info window.",
)
stylesheet = models.ForeignKey('cms_genome_browser.Stylesheet',
blank=True,
null=True,
help_text='Choose a stylesheet to add cusom styles to this track.',
)
publish_track = models.BooleanField('publish?',
default=True,
help_text='Display track in the genome browser.'
)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
@property
def payload(self):
PAYLOADS = {
'BAM': '',
'BED-MemStore': 'bed',
'BED-Tabix': 'bed',
'bigWig': '',
'bigBed': '',
'DAS-feature': '',
'DAS-sequence': '',
'VCF-MemStore': 'vcf',
'VCF-Tabix': 'vcf',
'twoBit': '',
'WIG': 'wig',
}
return PAYLOADS[self.track_type]
@property
def tier_type(self):
TIER_TYPES = {
'BAM': '',
'BED-MemStore': 'memstore',
'BED-Tabix': 'tabix',
'bigWig': '',
'bigBed': '',
'DAS-feature': '',
'DAS-sequence': 'sequence',
'VCF-MemStore': 'memstore',
'VCF-Tabix': 'tabix',
'twoBit': 'sequence',
'WIG': 'memstore',
}
return TIER_TYPES[self.track_type]
@property
def uri_label(self):
URI_LABELS = {
'BAM': 'bamURI',
'BED-MemStore': 'uri',
'BED-Tabix': 'uri',
'bigWig': 'bwgURI',
'bigBed': 'bwgURI',
'DAS-feature': 'uri',
'DAS-sequence': 'uri',
'VCF-MemStore': 'uri',
'VCF-Tabix': 'uri',
'twoBit': 'twoBitURI',
'WIG': 'uri',
}
return URI_LABELS[self.track_type]
def clean(self):
if self.index_file == None:
if self.track_type == 'BAM':
raise ValidationError("Must upload/select BAM index (.bai) " \
"file for '{}'.".format(self.data_file))
if self.tier_type == 'tabix':
raise ValidationError("Must upload/select Tabix index (.tbi) " \
"file for '{}'.".format(self.data_file))
else:
if self.track_type != 'BAM' and self.tier_type != 'tabix':
raise ValidationError("Index files are only needed if data file is " \
"BAM, BED (Tabix), or VCF (Tabix). " \
"Please remove index file '{}' or switch data file type." \
.format(self.index_file))
def __str__(self):
return self.name
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# (c) Camille Scott, 2019
# File : cdbg_stream.py
# License: MIT
# Author : Camille Scott <[email protected]>
# Date : 11.03.2020
from goetia import libgoetia
from goetia.cdbg import (compute_connected_component_callback,
compute_unitig_fragmentation_callback,
write_cdbg_metrics_callback,
write_cdbg_callback)
from goetia.dbg import get_graph_args, process_graph_args
from goetia.parsing import get_fastx_args, iter_fastx_inputs
from goetia.processors import AsyncSequenceProcessor, at_modulo_interval
from goetia.messages import (Interval, SampleStarted, SampleFinished, Error, AllMessages)
from goetia.metadata import CUR_TIME
from goetia.serialization import cDBGSerialization
from goetia.cli.args import get_output_interval_args, print_interval_settings
from goetia.cli.runner import CommandRunner
import curio
import os
import sys
class cDBGRunner(CommandRunner):
def __init__(self, parser):
get_graph_args(parser)
get_cdbg_args(parser)
get_output_interval_args(parser)
group = get_fastx_args(parser)
group.add_argument('-o', dest='output_filename', default='/dev/stdout')
group.add_argument('-i', '--inputs', dest='inputs', nargs='+', required=True)
parser.add_argument('--echo', default=None,
help='echo all events to the given file.')
parser.add_argument('--curio-monitor', default=False, action='store_true',
help='Run curio kernel monitor for async debugging.')
parser.add_argument('--verbose', default=False, action='store_true')
super().__init__(parser)
def postprocess_args(self, args):
process_graph_args(args)
process_cdbg_args(args)
def setup(self, args):
os.makedirs(args.results_dir, exist_ok=True)
self.dbg_t = args.graph_t
self.hasher = args.hasher_t(args.ksize)
self.storage = args.storage.build(*args.storage_args)
self.dbg = args.graph_t.build(self.storage, self.hasher)
self.cdbg_t = libgoetia.cdbg.cDBG[type(self.dbg)]
self.compactor_t = libgoetia.cdbg.StreamingCompactor[type(self.dbg)]
self.compactor = self.compactor_t.Compactor.build(self.dbg)
if args.normalize:
self.file_processor = self.compactor_t.NormalizingCompactor[FastxReader].build(self.compactor,
args.normalize,
args.interval)
else:
self.file_processor = self.compactor_t.Processor.build(self.compactor,
args.interval)
# Iterator over samples (pairs or singles, depending on pairing-mode)
sample_iter = iter_fastx_inputs(args.inputs, args.pairing_mode, names=args.names)
# AsyncSequenceProcessor does event management and callback for the FileProcessors
self.processor = AsyncSequenceProcessor(self.file_processor, sample_iter, args.echo)
# Subscribe a listener to the FileProcessor producer
self.worker_listener = self.processor.add_listener('worker_q', 'cdbg.consumer')
#
# Register callbacks for data outputs.
# Track a list of files that need to be closed with a ]
# when we're done.
#
self.to_close = []
if args.track_cdbg_metrics:
self.worker_listener.on_message(Interval,
write_cdbg_metrics_callback,
self.compactor,
args.track_cdbg_metrics,
args.verbose)
self.to_close.append(args.track_cdbg_metrics)
if args.track_unitig_bp:
if args.unitig_bp_bins is None:
bins = [args.ksize, 100, 200, 500, 1000]
else:
bins = args.unitig_bp_bins
self.worker_listener.on_message(Interval,
at_modulo_interval(compute_unitig_fragmentation_callback,
modulus=args.unitig_bp_tick),
self.cdbg_t,
self.compactor.cdbg,
args.track_unitig_bp,
bins,
verbose=args.verbose)
self.to_close.append(args.track_unitig_bp)
if args.track_cdbg_components:
self.worker_listener.on_message(Interval,
at_modulo_interval(compute_connected_component_callback,
modulus=args.cdbg_components_tick),
self.cdbg_t,
self.compactor.cdbg,
args.track_cdbg_components,
args.component_sample_size,
verbose=args.verbose)
self.to_close.append(args.track_cdbg_components)
if args.save_cdbg:
for cdbg_format in args.save_cdbg_format:
self.worker_listener.on_message(Interval,
at_modulo_interval(write_cdbg_callback,
modulus=args.cdbg_tick),
args.save_cdbg,
cdbg_format,
verbose=args.verbose)
self.worker_listener.on_message(SampleFinished,
write_cdbg_callback,
args.save_cdbg,
cdbg_format,
verbose=args.verbose)
# Close all files when done
async def close_files(msg, files):
for file_name in files:
async with curio.aopen(file_name, 'a') as fp:
await fp.write('\n]\n')
self.worker_listener.on_message(SampleFinished, close_files, self.to_close)
#
# Regular diagnostics output
#
def info_output(msg):
info = f'{msg.msg_type}: {getattr(msg, "state", "")}'\
f'\n\tSample: {msg.sample_name}'\
f'\n\tSequences: {msg.sequence}'\
f'\n\tk-mers: {msg.t}'
if msg.msg_type == 'Error':
info += f'\n\tError: {msg.error}'
print(info, file=sys.stderr)
self.worker_listener.on_message(AllMessages, info_output)
def execute(self, args):
curio.run(self.processor.start, with_monitor=args.curio_monitor)
def teardown(self):
pass
def get_cdbg_args(parser):
default_prefix = 'goetia.build-cdbg.' + CUR_TIME
parser.default_prefix = default_prefix
group = parser.add_argument_group('cDBG')
group.add_argument('--results-dir',
default=default_prefix)
group.add_argument('--normalize',
type=int,
nargs='?',
const=10)
group.add_argument('--save-cdbg',
metavar='PREFIX.<format>',
nargs='?',
const='goetia.cdbg.graph',
help='Save a copy of the cDBG.')
group.add_argument('--save-cdbg-format',
nargs='+',
choices=cDBGSerialization.FORMATS,
default=['gfa1'])
group.add_argument('--cdbg-tick',
type=int,
default=10,
help='Save every N interval ticks.')
group.add_argument('--track-cdbg-metrics',
metavar='FILE_NAME.json',
nargs='?',
const='goetia.cdbg.stats.json',
help='Output basic cDBG metrics.')
group.add_argument('--cdbg-metrics-tick',
type=int,
default=5,
help='Output every N interval ticks.')
group.add_argument('--track-cdbg-components',
metavar='FILE_NAME.json',
nargs='?',
const='goetia.cdbg.components.json',
help='Save the distribution of component sizes.')
group.add_argument('--component-sample-size',
type=int,
default=10000,
help='Number of components to sample for size.')
group.add_argument('--cdbg-components-tick',
type=int,
default=5,
help='Sample and save distribution every N interval ticks.')
group.add_argument('--track-unitig-bp',
metavar='FILENAME.json',
nargs='?',
const='goetia.cdbg.unitigs.bp.json',
help='Track the distribution of unitig sizes.')
group.add_argument('--unitig-bp-bins',
nargs='+',
type=int,
help='Bin sizes of distribution.')
group.add_argument('--unitig-bp-tick',
type=int,
default=10)
group.add_argument('--validate',
metavar='FILENAME.csv',
nargs='?',
const='goetia.cdbg.validation.csv')
return group
def process_cdbg_args(args):
def join(p):
return p if p is None else os.path.join(args.results_dir, p)
args.track_cdbg_stats = join(args.track_cdbg_metrics)
args.track_cdbg_components = join(args.track_cdbg_components)
args.save_cdbg = join(args.save_cdbg)
args.track_cdbg_unitig_bp = join(args.track_unitig_bp)
def print_cdbg_args(args):
print('* cDBG Params', file=sys.stderr)
print('* Directory: ', args.results_dir, file=sys.stderr)
if args.save_cdbg:
print('* Saving cDBG every {0} sequences with file prefix {1}'.format(args.coarse_interval,
args.save_cdbg),
file=sys.stderr)
print('* cDBG save formats: {0}'.format(', '.join(args.save_cdbg_format)))
if args.track_cdbg_stats:
print('* Tracking cDBG stats and reporting every {0} sequences'.format(args.fine_interval),
file=sys.stderr)
print('* Saving tracking information to', args.track_cdbg_stats, file=sys.stderr)
if args.track_cdbg_history:
print('* Tracking cDBG history and saving to', args.track_cdbg_history, file=sys.stderr)
if args.validate:
print('* cDBG will be validated on completion and results saved to', args.validate,
file=sys.stderr)
print('*', '*' * 10, '*', sep='\n', file=sys.stderr)
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'GipsyToolbarMenu.url'
db.alter_column(u'gipsy_toolbar_gipsytoolbarmenu', 'url', self.gf('django.db.models.fields.CharField')(max_length=255, null=True))
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'GipsyToolbarMenu.url'
raise RuntimeError("Cannot reverse this migration. 'GipsyToolbarMenu.url' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration
# Changing field 'GipsyToolbarMenu.url'
db.alter_column(u'gipsy_toolbar_gipsytoolbarmenu', 'url', self.gf('django.db.models.fields.CharField')(max_length=255))
models = {
u'gipsy_toolbar.gipsytoolbarmenu': {
'Meta': {'ordering': "['order']", 'object_name': 'GipsyToolbarMenu'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'order': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['gipsy_toolbar.GipsyToolbarMenu']", 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['gipsy_toolbar']
|
import itertools
import random
# Directions
NORTH = 'N'
EAST = 'E'
SOUTH = 'S'
WEST = 'W'
DIRECTIONS = [NORTH, EAST, SOUTH, WEST]
REVERSE = {
NORTH: SOUTH,
EAST: WEST,
SOUTH: NORTH,
WEST: EAST,
}
OFFSET = {
NORTH: -16,
EAST: 1,
SOUTH: 16,
WEST: -1,
}
# Masks
M_NORTH = 0x01
M_EAST = 0x02
M_SOUTH = 0x04
M_WEST = 0x08
M_ROBOT = 0x10
M_LOOKUP = {
NORTH: M_NORTH,
EAST: M_EAST,
SOUTH: M_SOUTH,
WEST: M_WEST,
}
# Colors
RED = 'R'
GREEN = 'G'
BLUE = 'B'
YELLOW = 'Y'
COLORS = [RED, GREEN, BLUE, YELLOW]
# Shapes
CIRCLE = 'C'
TRIANGLE = 'T'
SQUARE = 'Q'
HEXAGON = 'H'
SHAPES = [CIRCLE, TRIANGLE, SQUARE, HEXAGON]
# Tokens
TOKENS = [''.join(token) for token in itertools.product(COLORS, SHAPES)]
# Quadrants
QUAD_1A = (
'NW,N,N,N,NE,NW,N,N,'
'W,S,X,X,X,X,SEYH,W,'
'WE,NWGT,X,X,X,X,N,X,'
'W,X,X,X,X,X,X,X,'
'W,X,X,X,X,X,S,X,'
'SW,X,X,X,X,X,NEBQ,W,'
'NW,X,E,SWRC,X,X,X,S,'
'W,X,X,N,X,X,E,NW'
)
QUAD_1B = (
'NW,NE,NW,N,NS,N,N,N,'
'W,S,X,E,NWRC,X,X,X,'
'W,NEGT,W,X,X,X,X,X,'
'W,X,X,X,X,X,SEYH,W,'
'W,X,X,X,X,X,N,X,'
'SW,X,X,X,X,X,X,X,'
'NW,X,E,SWBQ,X,X,X,S,'
'W,X,X,N,X,X,E,NW'
)
QUAD_2A = (
'NW,N,N,NE,NW,N,N,N,'
'W,X,X,X,X,E,SWBC,X,'
'W,S,X,X,X,X,N,X,'
'W,NEYT,W,X,X,S,X,X,'
'W,X,X,X,E,NWGQ,X,X,'
'W,X,SERH,W,X,X,X,X,'
'SW,X,N,X,X,X,X,S,'
'NW,X,X,X,X,X,E,NW'
)
QUAD_2B = (
'NW,N,N,N,NE,NW,N,N,'
'W,X,SERH,W,X,X,X,X,'
'W,X,N,X,X,X,X,X,'
'WE,SWGQ,X,X,X,X,S,X,'
'SW,N,X,X,X,E,NWYT,X,'
'NW,X,X,X,X,S,X,X,'
'W,X,X,X,X,NEBC,W,S,'
'W,X,X,X,X,X,E,NW'
)
QUAD_3A = (
'NW,N,N,NE,NW,N,N,N,'
'W,X,X,X,X,SEGH,W,X,'
'WE,SWRQ,X,X,X,N,X,X,'
'SW,N,X,X,X,X,S,X,'
'NW,X,X,X,X,E,NWYC,X,'
'W,X,S,X,X,X,X,X,'
'W,X,NEBT,W,X,X,X,S,'
'W,X,X,X,X,X,E,NW'
)
QUAD_3B = (
'NW,N,NS,N,NE,NW,N,N,'
'W,E,NWYC,X,X,X,X,X,'
'W,X,X,X,X,X,X,X,'
'W,X,X,X,X,E,SWBT,X,'
'SW,X,X,X,S,X,N,X,'
'NW,X,X,X,NERQ,W,X,X,'
'W,SEGH,W,X,X,X,X,S,'
'W,N,X,X,X,X,E,NW'
)
QUAD_4A = (
'NW,N,N,NE,NW,N,N,N,'
'W,X,X,X,X,X,X,X,'
'W,X,X,X,X,SEBH,W,X,'
'W,X,S,X,X,N,X,X,'
'SW,X,NEGC,W,X,X,X,X,'
'NW,S,X,X,X,X,E,SWRT,'
'WE,NWYQ,X,X,X,X,X,NS,'
'W,X,X,X,X,X,E,NW'
)
QUAD_4B = (
'NW,N,N,NE,NW,N,N,N,'
'WE,SWRT,X,X,X,X,S,X,'
'W,N,X,X,X,X,NEGC,W,'
'W,X,X,X,X,X,X,X,'
'W,X,SEBH,W,X,X,X,S,'
'SW,X,N,X,X,X,E,NWYQ,'
'NW,X,X,X,X,X,X,S,'
'W,X,X,X,X,X,E,NW'
)
QUADS = [
(QUAD_1A, QUAD_1B),
(QUAD_2A, QUAD_2B),
(QUAD_3A, QUAD_3B),
(QUAD_4A, QUAD_4B),
]
# Rotation
ROTATE_QUAD = [
56, 48, 40, 32, 24, 16, 8, 0,
57, 49, 41, 33, 25, 17, 9, 1,
58, 50, 42, 34, 26, 18, 10, 2,
59, 51, 43, 35, 27, 19, 11, 3,
60, 52, 44, 36, 28, 20, 12, 4,
61, 53, 45, 37, 29, 21, 13, 5,
62, 54, 46, 38, 30, 22, 14, 6,
63, 55, 47, 39, 31, 23, 15, 7,
]
ROTATE_WALL = {
NORTH: EAST,
EAST: SOUTH,
SOUTH: WEST,
WEST: NORTH,
}
# Helper Functions
def idx(x, y, size=16):
return y * size + x
def xy(index, size=16):
x = index % size
y = index / size
return (x, y)
def rotate_quad(data, times=1):
for i in range(times):
result = [data[index] for index in ROTATE_QUAD]
result = [''.join(ROTATE_WALL.get(c, c) for c in x) for x in result]
data = result
return data
def create_grid(quads=None):
if quads is None:
quads = [random.choice(pair) for pair in QUADS]
random.shuffle(quads)
quads = [quad.split(',') for quad in quads]
quads = [rotate_quad(quads[i], i) for i in [0, 1, 3, 2]]
result = [None for i in range(16 * 16)]
for i, quad in enumerate(quads):
dx, dy = xy(i, 2)
for j, data in enumerate(quad):
x, y = xy(j, 8)
x += dx * 8
y += dy * 8
index = idx(x, y)
result[index] = data
return result
def to_mask(cell):
result = 0
for letter, mask in M_LOOKUP.items():
if letter in cell:
result |= mask
return result
# Game
class Game(object):
@staticmethod
def hardest():
quads = [QUAD_2B, QUAD_4B, QUAD_3B, QUAD_1B]
robots = [226, 48, 43, 18]
token = 'BT'
return Game(quads=quads, robots=robots, token=token)
def __init__(self, seed=None, quads=None, robots=None, token=None):
if seed:
random.seed(seed)
self.grid = create_grid(quads)
if robots is None:
self.robots = self.place_robots()
else:
self.robots = dict(zip(COLORS, robots))
self.token = token or random.choice(TOKENS)
self.moves = 0
self.last = None
def place_robots(self):
result = {}
used = set()
for color in COLORS:
while True:
index = random.randint(0, 255)
if index in (119, 120, 135, 136):
continue
if self.grid[index][-2:] in TOKENS:
continue
if index in used:
continue
result[color] = index
used.add(index)
break
return result
def get_robot(self, index):
for color, position in self.robots.iteritems():
if position == index:
return color
return None
def can_move(self, color, direction):
if self.last == (color, REVERSE[direction]):
return False
index = self.robots[color]
if direction in self.grid[index]:
return False
new_index = index + OFFSET[direction]
if new_index in self.robots.itervalues():
return False
return True
def compute_move(self, color, direction):
index = self.robots[color]
robots = self.robots.values()
while True:
if direction in self.grid[index]:
break
new_index = index + OFFSET[direction]
if new_index in robots:
break
index = new_index
return index
def do_move(self, color, direction):
start = self.robots[color]
last = self.last
if last == (color, REVERSE[direction]):
print 'reverse'
#raise Exception
end = self.compute_move(color, direction)
if start == end:
print 'wall move'
#raise Exception
self.moves += 1
self.robots[color] = end
self.last = (color, direction)
return (color, start, last)
def undo_move(self, data):
color, start, last = data
self.moves -= 1
self.robots[color] = start
self.last = last
def get_moves(self, colors=None):
result = []
colors = colors or COLORS
for color in colors:
for direction in DIRECTIONS:
if self.can_move(color, direction):
result.append((color, direction))
return result
def over(self):
color = self.token[0]
return self.token in self.grid[self.robots[color]]
def key(self):
return tuple(self.robots.itervalues())
def search(self):
max_depth = 1
while True:
#print 'Searching to depth:', max_depth
result = self._search([], set(), 0, max_depth)
if result is not None:
return result
max_depth += 1
def _search(self, path, memo, depth, max_depth):
if self.over():
return list(path)
if depth == max_depth:
return None
key = (depth, self.key())
if key in memo:
return None
memo.add(key)
if depth == max_depth - 1:
colors = [self.token[0]]
else:
colors = None
moves = self.get_moves(colors)
for move in moves:
data = self.do_move(*move)
path.append(move)
result = self._search(path, memo, depth + 1, max_depth)
path.pop(-1)
self.undo_move(data)
if result:
return result
return None
def export(self):
grid = []
token = None
robots = [self.robots[color] for color in COLORS]
for index, cell in enumerate(self.grid):
mask = to_mask(cell)
if index in robots:
mask |= M_ROBOT
grid.append(mask)
if self.token in cell:
token = index
robot = COLORS.index(self.token[0])
return {
'grid': grid,
'robot': robot,
'token': token,
'robots': robots,
}
def export2(self):
grid = []
token = None
robots = [self.robots[color] for color in COLORS]
for index, cell in enumerate(self.grid):
mask = to_mask(cell)
grid.append(mask)
if self.token in cell:
token = index
robot = COLORS.index(self.token[0])
return {
'grid': grid,
'robot': robot,
'token': token,
'robots': robots,
}
|
# -*- coding: utf-8 -*-
import xbmc, xbmcgui, xbmcplugin, xbmcaddon, urllib2, urllib, re, string, sys, os, gzip, StringIO, math, urlparse
import base64, time, cookielib
import simplejson
# Plugin constants
__addon__ = xbmcaddon.Addon()
__addonname__ = __addon__.getAddonInfo('name')
__profile__ = xbmc.translatePath( __addon__.getAddonInfo('profile') ).decode("utf-8")
UserAgent = 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)'
ORDER_LIST1 = [['1','最多播放'], ['2','最多评论'], ['4','最受欢迎'], ['5','最近上映'], ['6','最近更新']]
DAYS_LIST1 = [['1','今日'], ['2','本周'], ['4','历史']]
ORDER_LIST2 = [['1','最多播放'], ['2','最新发布'], ['3','最多评论'], ['4','最多收藏'], ['5','最受欢迎']]
DAYS_LIST2 = [['1','今日'], ['2','本周'], ['3','本月'], ['4','历史']]
class youkuDecoder:
def __init__( self ):
return
def getFileIDMixString(self,seed):
mixed = []
source = list("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ/\:._-1234567890")
seed = float(seed)
for i in range(len(source)):
seed = (seed * 211 + 30031 ) % 65536
index = math.floor(seed /65536 *len(source))
mixed.append(source[int(index)])
source.remove(source[int(index)])
return mixed
def getFileId(self,fileId,seed):
mixed = self.getFileIDMixString(seed)
ids = fileId.split('*')
realId = []
for i in range(0,len(ids)-1):
realId.append(mixed[int(ids[i])])
return ''.join(realId)
def trans_e(self, a, c):
b = range(256)
f = 0
result = ''
h = 0
while h < 256:
f = (f + b[h] + ord(a[h % len(a)])) % 256
b[h], b[f] = b[f], b[h]
h += 1
q = f = h = 0
while q < len(c):
h = (h + 1) % 256
f = (f + b[h]) % 256
b[h], b[f] = b[f], b[h]
result += chr(ord(c[q]) ^ b[(b[h] + b[f]) % 256])
q += 1
return result
def trans_f(self, a, c):
"""
:argument a: list
:param c:
:return:
"""
b = []
for f in range(len(a)):
i = ord(a[f][0]) - 97 if "a" <= a[f] <= "z" else int(a[f]) + 26
e = 0
while e < 36:
if c[e] == i:
i = e
break
e += 1
v = i - 26 if i > 25 else chr(i + 97)
b.append(str(v))
return ''.join(b)
f_code_1 = 'becaf9be'
f_code_2 = 'bf7e5f01'
def _calc_ep(self, sid, fileId, token):
ep = self.trans_e(self.f_code_2, '%s_%s_%s' % (sid, fileId, token))
return base64.b64encode(ep)
def _calc_ep2(self, vid, ep):
e_code = self.trans_e(self.f_code_1, base64.b64decode(ep))
sid, token = e_code.split('_')
new_ep = self.trans_e(self.f_code_2, '%s_%s_%s' % (sid, vid, token))
return base64.b64encode(new_ep), token, sid
def get_sid(self, ep):
e_code = self.trans_e(self.f_code_1, base64.b64decode(ep))
return e_code.split('_')
def generate_ep(self, no, fileid, sid, token):
ep = urllib.quote(self._calc_ep(sid, fileid, token).encode('latin1'),
safe="~()*!.'"
)
return ep
def log(txt):
message = '%s: %s' % (__addonname__, txt)
xbmc.log(msg=message, level=xbmc.LOGDEBUG)
def GetHttpData(url, referer=''):
log("%s::url - %s" % (sys._getframe().f_code.co_name, url))
req = urllib2.Request(url)
req.add_header('User-Agent', UserAgent)
if referer:
req.add_header('Referer', referer)
try:
response = urllib2.urlopen(req)
httpdata = response.read()
if response.headers.get('content-encoding', None) == 'gzip':
httpdata = gzip.GzipFile(fileobj=StringIO.StringIO(httpdata)).read()
charset = response.headers.getparam('charset')
response.close()
except:
log( "%s (%d) [%s]" % (
sys.exc_info()[2].tb_frame.f_code.co_name,
sys.exc_info()[2].tb_lineno,
sys.exc_info()[1]
))
return ''
match = re.compile('<meta http-equiv=["]?[Cc]ontent-[Tt]ype["]? content="text/html;[\s]?charset=(.+?)"').findall(httpdata)
if match:
charset = match[0]
else:
match = re.compile('<meta charset="(.+?)"').findall(httpdata)
if match:
charset = match[0]
if charset:
charset = charset.lower()
if (charset != 'utf-8') and (charset != 'utf8'):
httpdata = httpdata.decode(charset, 'ignore').encode('utf8', 'ignore')
return httpdata
def searchDict(dlist,idx):
for i in range(0,len(dlist)):
if dlist[i][0] == idx:
return dlist[i][1]
return ''
def getCurrent(text,list,id):
match = re.compile('<li class="current"\s*><span>(.+?)</span>').search(text)
if match:
list.append([id, match.group(1)])
def getList(listpage,id,genre,area,year):
if id == 'c_95':
str1 = '风格:'
str3a = '发行:'
str3b = 'r'
elif id == 'c_84' or id == 'c_87':
str1 = '类型:'
str3a = '出品:'
str3b = 'pr'
else:
str1 = '类型:'
str3a = '时间:'
str3b = 'r'
match = re.compile('<label>%s</label>(.+?)</ul>' % (str1), re.DOTALL).search(listpage)
genrelist = re.compile('_g_([^_\.]*)[^>]*>([^<]+)</a>').findall(match.group(1))
getCurrent(match.group(1), genrelist, genre)
if id == 'c_84' or id == 'c_87':
arealist = []
else:
match = re.compile('<label>地区:</label>(.+?)</ul>', re.DOTALL).search(listpage)
arealist = re.compile('_a_([^_\.]*)[^>]*>([^<]+)</a>').findall(match.group(1))
getCurrent(match.group(1), arealist, area)
match = re.compile('<label>%s</label>(.+?)</ul>' % (str3a), re.DOTALL).search(listpage)
yearlist = re.compile('_%s_([^_\.]*)[^>]*>([^<]+)</a>' % (str3b)).findall(match.group(1))
getCurrent(match.group(1), yearlist, year)
return genrelist,arealist,yearlist
def getList2(listpage,genre):
match = re.compile('<label>类型:</label>(.+?)</ul>', re.DOTALL).search(listpage)
if match:
genrelist = re.compile('<li><a href=".*?/category/video/[^g]*g_([0-9]+)[^\.]*\.html"[^>]*>(.+?)</a></li>').findall(match.group(1))
getCurrent(match.group(1), genrelist, genre)
else:
genrelist = []
return genrelist
def rootList():
link = GetHttpData('http://list.youku.com/')
match0 = re.compile('<label>分类:</label>(.+?)</ul>', re.DOTALL).search(link)
match = re.compile('<li><a\s*href="/category/([^/]+)/([^\.]+)\.html">(.+?)</a></li>', re.DOTALL).findall(match0.group(1))
totalItems = len(match)
for path, id, name in match:
if path == 'show':
u = sys.argv[0]+"?mode=1&name="+urllib.quote_plus(name)+"&id="+urllib.quote_plus(id)+"&genre=&area=&year=&order=1&days=1&page=1"
else:
u = sys.argv[0]+"?mode=11&name="+urllib.quote_plus(name)+"&id="+urllib.quote_plus(id)+"&genre=0&year=1&order=1&days=1&page=1"
li = xbmcgui.ListItem(name)
xbmcplugin.addDirectoryItem(int(sys.argv[1]),u,li,True,totalItems)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def progList(name,id,page,genre,area,year,order,days):
url = 'http://list.youku.com/category/show/%s_g_%s_a_%s_s_%s_d_%s_r_%s_p_%s.html' % (id, genre, area, order, days, year, page)
link = GetHttpData(url)
match = re.compile('<ul class="yk-pages">(.+?)</ul>', re.DOTALL).search(link)
plist = []
if match:
match1 = re.compile('<li.+?>([0-9]+)(</a>|</span>)</li>', re.DOTALL).findall(match.group(1))
if match1:
for num, temp in match1:
if (num not in plist) and (num != page):
plist.append(num)
totalpages = int(match1[len(match1)-1][0])
else:
totalpages = 1
match = re.compile('<div class="yk-filter" id="filter">(.+?)<div class="yk-filter-handle">', re.DOTALL).search(link)
if match:
listpage = match.group(1)
else:
listpage = ''
if id == 'c_95':
match = re.compile('<div class="yk-pack p-list"(.+?)</ul></div>', re.DOTALL).findall(link)
else:
match = re.compile('<div class="yk-pack pack-film">(.+?)</ul></div>', re.DOTALL).findall(link)
totalItems = len(match) + 1 + len(plist)
currpage = int(page)
genrelist,arealist,yearlist = getList(listpage,id,genre,area,year)
if genre:
genrestr = searchDict(genrelist,genre)
else:
genrestr = '全部类型'
if area:
areastr = searchDict(arealist,area)
else:
areastr = '全部地区'
if year:
yearstr = searchDict(yearlist,year)
else:
if id == 'c_84' or id == 'c_87':
yearstr = '全部出品'
else:
yearstr = '全部年份'
li = xbmcgui.ListItem(name+'(第'+str(currpage)+'/'+str(totalpages)+'页)【[COLOR FFFF0000]' + genrestr + '[/COLOR]/[COLOR FF00FF00]' + areastr + '[/COLOR]/[COLOR FFFFFF00]' + yearstr + '[/COLOR]/[COLOR FF00FF00]' + searchDict(DAYS_LIST1,days) + '[/COLOR]/[COLOR FF00FFFF]' + searchDict(ORDER_LIST1,order) + '[/COLOR]】(按此选择)')
u = sys.argv[0]+"?mode=4&name="+urllib.quote_plus(name)+"&id="+urllib.quote_plus(id)+"&genre="+urllib.quote_plus(genre)+"&area="+urllib.quote_plus(area)+"&year="+urllib.quote_plus(year)+"&order="+order+"&days="+days+"&page="+urllib.quote_plus(listpage)
xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems)
for i in range(0,len(match)):
if id in ('c_96','c_95'):
mode = 2
isdir = False
else:
mode = 3
isdir = True
match1 = re.compile('/id_(.+?).html"').search(match[i])
p_id = match1.group(1)
match1 = re.compile('<img class="quic".*?src="(.+?)"').search(match[i])
p_thumb = match1.group(1)
match1 = re.compile('<li class="title"><a .*?">(.+?)</a>').search(match[i])
p_name = match1.group(1)
match1 = re.compile('<li class="status hover-hide"><span .*?<span>(.+?)</span>').search(match[i])
if match1:
p_name1 = p_name + '(' + match1.group(1) + ')'
else:
p_name1 = p_name
match1 = re.compile('<span class="vip-free">(.+?)</span>').search(match[i])
if match1:
p_name1 = p_name1 + '[' + match1.group(1) + ']'
li = xbmcgui.ListItem(str(i + 1) + '. ' + p_name1, iconImage = '', thumbnailImage = p_thumb)
u = sys.argv[0]+"?mode="+str(mode)+"&name="+urllib.quote_plus(p_name)+"&id="+urllib.quote_plus(p_id)+"&thumb="+urllib.quote_plus(p_thumb)
#li.setInfo(type = "Video", infoLabels = {"Title":p_name, "Director":p_director, "Genre":p_genre, "Plot":p_plot, "Year":p_year, "Cast":p_cast, "Tagline":p_tagline})
xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, isdir, totalItems)
for num in plist:
li = xbmcgui.ListItem("... 第" + num + "页")
u = sys.argv[0]+"?mode=1&name="+urllib.quote_plus(name)+"&id="+urllib.quote_plus(id)+"&genre="+urllib.quote_plus(genre)+"&area="+urllib.quote_plus(area)+"&year="+year+"&order="+order+"&days="+days+"&page="+str(num)
xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems)
xbmcplugin.setContent(int(sys.argv[1]), 'movies')
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def getMovie(name,id,thumb):
if len(id)==21:
link = GetHttpData('http://www.youku.com/show_page/id_' + id + '.html')
match = re.compile('<a class="btnShow btnplayposi".*?href="http://v.youku.com/v_show/id_(.+?)\.html[^"]*"', re.DOTALL).search(link)
if not match:
match = re.compile('<div class="btnplay">.*?href="http://v.youku.com/v_show/id_(.+?)\.html[^"]*"', re.DOTALL).search(link)
if match:
# 播放正片
PlayVideo(name, match.group(1), thumb)
else:
# 解析预告片
match = re.compile('class="btnShow btnplaytrailer".*?href="http://v.youku.com/v_show/id_(.+?)\.html[^"]*"', re.DOTALL).search(link)
if match:
PlayVideo(name, match.group(1), thumb)
else:
xbmcgui.Dialog().ok(__addonname__, '解析地址异常,可能是收费节目,无法播放')
else:
PlayVideo(name, id, thumb)
def seriesList(name,id,thumb):
url = "http://v.youku.com/v_show/id_%s.html" % (id)
data = GetHttpData(url)
#pages = re.compile('<li data="(point_reload_[0-9]+)"', re.DOTALL).findall(data)
#if len(pages)>1:
# for i in range(1,len(pages)):
# url = "http://www.youku.com/show_point/id_%s.html?dt=json&divid=%s&tab=0&__rt=1&__ro=%s" % (id, pages[i], pages[i])
# link = GetHttpData(url)
# data += link
match = re.compile('class="item(.+?)</div>', re.DOTALL).findall(data)
totalItems = len(match)
for i in range(0,len(match)):
match1 = re.compile('//v.youku.com/v_show/id_(.+?)\.html').search(match[i])
if match1:
p_id = match1.group(1)
else:
continue
#match1 = re.compile('<div class="thumb"><img .*?src="(.+?)"').search(match[i])
p_thumb = thumb
match1 = re.compile('title="(.+?)"').search(match[i])
p_name = "%s %s" % (name, match1.group(1))
p_name1 = p_name
li = xbmcgui.ListItem(p_name1, iconImage = '', thumbnailImage = p_thumb)
u = sys.argv[0]+"?mode=10&name="+urllib.quote_plus(p_name)+"&id="+urllib.quote_plus(p_id)+"&thumb="+urllib.quote_plus(p_thumb)
#li.setInfo(type = "Video", infoLabels = {"Title":p_name, "Director":p_director, "Genre":p_genre, "Plot":p_plot, "Year":p_year, "Cast":p_cast, "Tagline":p_tagline})
xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, False, totalItems)
xbmcplugin.setContent(int(sys.argv[1]), 'movies')
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def progList2(name,id,page,genre,order,days):
url = 'http://list.youku.com/category/video/%s_g_%s_s_%s_d_%s_p_%s.html' % (id, genre, order, days, page)
link = GetHttpData(url)
match = re.compile('<ul class="yk-pages">(.+?)</ul>', re.DOTALL).search(link)
plist = []
if match:
match1 = re.compile('<li.+?>([0-9]+)(</a>|</span>)</li>', re.DOTALL).findall(match.group(1))
if match1:
for num, temp in match1:
if (num not in plist) and (num != page):
plist.append(num)
totalpages = int(match1[len(match1)-1][0])
else:
totalpages = 1
match = re.compile('<div class="yk-filter\s*" id="filter">(.+?)<div class="yk-filter-handle">', re.DOTALL).search(link)
if match:
listpage = match.group(1)
else:
listpage = ''
match = re.compile('<div class="yk-pack p-list"(.+?)</ul></div>', re.DOTALL).findall(link)
totalItems = len(match) + 1 + len(plist)
currpage = int(page)
genrelist = getList2(listpage, genre)
if genre == '0':
genrestr = '全部类型'
else:
genrestr = searchDict(genrelist,genre)
li = xbmcgui.ListItem(name+'(第'+str(currpage)+'/'+str(totalpages)+'页)【[COLOR FFFF0000]' + genrestr + '[/COLOR]/[COLOR FF00FF00]' + searchDict(DAYS_LIST2,days) + '[/COLOR]/[COLOR FF00FFFF]' + searchDict(ORDER_LIST2,order) + '[/COLOR]】(按此选择)')
u = sys.argv[0]+"?mode=12&name="+urllib.quote_plus(name)+"&id="+urllib.quote_plus(id)+"&genre="+urllib.quote_plus(genre)+"&order="+order+"&days="+days+"&page="+urllib.quote_plus(listpage)
xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems)
for i in range(0,len(match)):
match1 = re.compile('/id_(.+?).html"').search(match[i])
p_id = match1.group(1)
match1 = re.compile('<img class="quic".*?src="(.+?)"').search(match[i])
p_thumb = match1.group(1)
match1 = re.compile('<li class="title"><a .*?">(.+?)</a>').search(match[i])
p_name = match1.group(1)
p_name1 = p_name
li = xbmcgui.ListItem(str(i + 1) + '. ' + p_name1, iconImage = '', thumbnailImage = p_thumb)
u = sys.argv[0]+"?mode=10&name="+urllib.quote_plus(p_name)+"&id="+urllib.quote_plus(p_id)+"&thumb="+urllib.quote_plus(p_thumb)
#li.setInfo(type = "Video", infoLabels = {"Title":p_name, "Director":p_director, "Genre":p_genre, "Plot":p_plot, "Year":p_year, "Cast":p_cast, "Tagline":p_tagline})
xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, False, totalItems)
for num in plist:
li = xbmcgui.ListItem("... 第" + num + "页")
u = sys.argv[0]+"?mode=11&name="+urllib.quote_plus(name)+"&id="+urllib.quote_plus(id)+"&genre="+urllib.quote_plus(genre)+"&order="+order+"&days="+days+"&page="+str(num)
xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems)
xbmcplugin.setContent(int(sys.argv[1]), 'movies')
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def selResolution(streamtypes):
ratelist = []
for i in range(0,len(streamtypes)):
if streamtypes[i] in ('flv', 'flvhd'): ratelist.append([4, '标清', i, 'flv']) # [清晰度设置值, 清晰度, streamtypes索引]
if streamtypes[i] in ('mp4', 'mp4hd'): ratelist.append([3, '高清', i, 'mp4'])
if streamtypes[i] in ('hd2', 'hd2v2', 'mp4hd2', 'mp4hd2v2'): ratelist.append([2, '超清', i, 'hd2'])
if streamtypes[i] in ('hd3', 'hd3v2', 'mp4hd3', 'mp4hd3v2'): ratelist.append([1, '1080P', i, 'hd3'])
ratelist.sort()
if len(ratelist) > 1:
resolution = int(__addon__.getSetting('resolution'))
if resolution == 0: # 每次询问视频清晰度
list = [x[1] for x in ratelist]
sel = xbmcgui.Dialog().select('清晰度(低网速请选择低清晰度)', list)
if sel == -1:
return None, None, None, None
else:
sel = 0
while sel < len(ratelist)-1 and resolution > ratelist[sel][0]: sel += 1
else:
sel = 0
return streamtypes[ratelist[sel][2]], ratelist[sel][1], ratelist[sel][2], ratelist[sel][3]
def youku_ups(id):
res = urllib2.urlopen('https://log.mmstat.com/eg.js')
cna = res.headers['etag'][1:-1]
query = urllib.urlencode(dict(
vid = id,
ccode = '0516',
client_ip = '192.168.1.1',
utid = cna,
client_ts = time.time() / 1000,
ckey = 'DIl58SLFxFNndSV1GFNnMQVYkx1PP5tKe1siZu/86PR1u/Wh1Ptd+WOZsHHWxysSfAOhNJpdVWsdVJNsfJ8Sxd8WKVvNfAS8aS8fAOzYARzPyPc3JvtnPHjTdKfESTdnuTW6ZPvk2pNDh4uFzotgdMEFkzQ5wZVXl2Pf1/Y6hLK0OnCNxBj3+nb0v72gZ6b0td+WOZsHHWxysSo/0y9D2K42SaB8Y/+aD2K42SaB8Y/+ahU+WOZsHcrxysooUeND'
))
url = 'https://ups.youku.com/ups/get.json?%s' % (query)
link = GetHttpData(url, referer='http://v.youku.com/')
json_response = simplejson.loads(link)
api_data = json_response['data']
data_error = api_data.get('error')
if data_error:
api_error_code = data_error.get('code')
api_error_msg = data_error.get('note').encode('utf-8')
dialog = xbmcgui.Dialog()
ok = dialog.ok(__addonname__,'地址解析错误(%d):\n%s' % (api_error_code,api_error_msg))
return {}
else:
return api_data
def change_cdn(url):
# if the cnd_url starts with an ip addr, it should be youku's old CDN
# which rejects http requests randomly with status code > 400
# change it to the dispatcher of aliCDN can do better
# at least a little more recoverable from HTTP 403
dispatcher_url = 'vali.cp31.ott.cibntv.net'
if dispatcher_url in url:
return url
elif 'k.youku.com' in url:
return url
else:
url_seg_list = list(urlparse.urlsplit(url))
url_seg_list[1] = dispatcher_url
return urlparse.urlunsplit(url_seg_list)
def PlayVideo(name,id,thumb):
movdat = youku_ups(id)
if not movdat:
return
vid = id
lang_select = int(__addon__.getSetting('lang_select')) # 默认|每次选择|自动首选
if lang_select != 0 and movdat.has_key('dvd') and 'audiolang' in movdat['dvd']:
langlist = movdat['dvd']['audiolang']
if lang_select == 1:
list = [x['lang'] for x in langlist]
sel = xbmcgui.Dialog().select('选择语言', list)
if sel ==-1:
return
vid = langlist[sel]['vid'].encode('utf-8')
name = '%s %s' % (name, langlist[sel]['lang'].encode('utf-8'))
else:
lang_prefer = __addon__.getSetting('lang_prefer') # 国语|粤语
for i in range(0,len(langlist)):
if langlist[i]['lang'].encode('utf-8') == lang_prefer:
vid = langlist[i]['vid'].encode('utf-8')
name = '%s %s' % (name, langlist[i]['lang'].encode('utf-8'))
break
if vid != id:
movdat = youku_ups(vid)
if not movdat:
return
streamtypes = [stream['stream_type'].encode('utf-8') for stream in movdat['stream']]
typeid, typename, streamno, resolution = selResolution(streamtypes)
if typeid:
'''
oip = movdat['security']['ip']
ep = movdat['security']['encrypt_string']
sid, token = youkuDecoder().get_sid(ep)
play_method = int(__addon__.getSetting('play_method'))
if play_method != 0: # m3u8方式
query = urllib.urlencode(dict(
vid=vid, ts=int(time.time()), keyframe=1, type=resolution,
ep=ep, oip=oip, ctype=12, ev=1, token=token, sid=sid,
))
cookie = ['%s=%s' % (x.name, x.value) for x in cj][0]
movurl = 'http://pl.youku.com/playlist/m3u8?%s|Cookie=%s' % (query, cookie)
else: # 默认播放方式
if typeid in ('mp4', 'mp4hd'):
type = 'mp4'
else:
type = 'flv'
urls = []
segs = movdat['stream'][streamno]['segs']
total = len(segs)
for no in range(0, total):
k = segs[no]['key']
if k == -1:
dialog = xbmcgui.Dialog()
ok = dialog.ok(__addonname__,'会员节目,无法播放')
return
fileid = segs[no]['fileid']
ep = youkuDecoder().generate_ep(no, fileid, sid, token)
query = urllib.urlencode(dict(
ctype = 12,
ev = 1,
K = k,
ep = urllib.unquote(ep),
oip = oip,
token = token,
yxon = 1
))
url = 'http://k.youku.com/player/getFlvPath/sid/{sid}_00/st/{container}/fileid/{fileid}?{query}'.format(
sid = sid,
container = type,
fileid = fileid,
query = query
)
link = GetHttpData(url)
json_response = simplejson.loads(link)
urls.append(json_response[0]['server'].encode('utf-8'))
movurl = 'stack://' + ' , '.join(urls)
'''
movurl = movdat['stream'][streamno]['m3u8_url']
#urls = []
#is_preview = False
#for seg in movdat['stream'][streamno]['segs']:
# if seg.get('cdn_url'):
# urls.append(change_cdn(seg['cdn_url'].encode('utf-8')))
# else:
# is_preview = True
#if not is_preview:
# movurl = 'stack://' + ' , '.join(urls)
name = '%s[%s]' % (name, typename)
listitem=xbmcgui.ListItem(name,thumbnailImage=thumb)
listitem.setInfo(type="Video",infoLabels={"Title":name})
xbmc.Player().play(movurl, listitem)
def performChanges(name,id,listpage,genre,area,year,order,days):
genrelist,arealist,yearlist = getList(listpage,id,genre,area,year)
change = False
if id == 'c_95':
str1 = '风格'
str3 = '发行'
elif id == 'c_84' or id == 'c_87':
str1 = '类型'
str3 = '出品'
else:
str1 = '类型'
str3 = '时间'
dialog = xbmcgui.Dialog()
if len(genrelist)>0:
list = [x[1] for x in genrelist]
sel = dialog.select(str1, list)
if sel != -1:
genre = genrelist[sel][0]
change = True
if len(arealist)>0:
list = [x[1] for x in arealist]
sel = dialog.select('地区', list)
if sel != -1:
area = arealist[sel][0]
change = True
if len(yearlist)>0:
list = [x[1] for x in yearlist]
sel = dialog.select(str3, list)
if sel != -1:
year = yearlist[sel][0]
change = True
list = [x[1] for x in DAYS_LIST1]
sel = dialog.select('范围', list)
if sel != -1:
days = DAYS_LIST1[sel][0]
change = True
list = [x[1] for x in ORDER_LIST1]
sel = dialog.select('排序', list)
if sel != -1:
order = ORDER_LIST1[sel][0]
change = True
if change:
progList(name,id,'1',genre,area,year,order,days)
def performChanges2(name,id,listpage,genre,order,days):
genrelist = getList2(listpage, genre)
change = False
dialog = xbmcgui.Dialog()
if len(genrelist)>0:
list = [x[1] for x in genrelist]
sel = dialog.select('类型', list)
if sel != -1:
genre = genrelist[sel][0]
change = True
list = [x[1] for x in DAYS_LIST2]
sel = dialog.select('范围', list)
if sel != -1:
days = DAYS_LIST2[sel][0]
change = True
list = [x[1] for x in ORDER_LIST2]
sel = dialog.select('排序', list)
if sel != -1:
order = ORDER_LIST2[sel][0]
change = True
if change:
progList2(name,id,'1',genre,order,days)
def get_params():
param = []
paramstring = sys.argv[2]
if len(paramstring) >= 2:
params = sys.argv[2]
cleanedparams = params.replace('?', '')
if (params[len(params) - 1] == '/'):
params = params[0:len(params) - 2]
pairsofparams = cleanedparams.split('&')
param = {}
for i in range(len(pairsofparams)):
splitparams = {}
splitparams = pairsofparams[i].split('=')
if (len(splitparams)) == 2:
param[splitparams[0]] = splitparams[1]
return param
params = get_params()
mode = None
name = ''
id = ''
genre = ''
area = ''
year = ''
order = ''
page = '1'
url = None
thumb = None
try:
thumb = urllib.unquote_plus(params["thumb"])
except:
pass
try:
url = urllib.unquote_plus(params["url"])
except:
pass
try:
page = urllib.unquote_plus(params["page"])
except:
pass
try:
order = urllib.unquote_plus(params["order"])
except:
pass
try:
days = urllib.unquote_plus(params["days"])
except:
pass
try:
year = urllib.unquote_plus(params["year"])
except:
pass
try:
area = urllib.unquote_plus(params["area"])
except:
pass
try:
genre = urllib.unquote_plus(params["genre"])
except:
pass
try:
id = urllib.unquote_plus(params["id"])
except:
pass
try:
name = urllib.unquote_plus(params["name"])
except:
pass
try:
mode = int(params["mode"])
except:
pass
cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
if mode == None:
rootList()
elif mode == 1:
progList(name,id,page,genre,area,year,order,days)
elif mode == 2:
getMovie(name,id,thumb)
elif mode == 3:
seriesList(name,id,thumb)
elif mode == 4:
performChanges(name,id,page,genre,area,year,order,days)
elif mode == 10:
PlayVideo(name,id,thumb)
elif mode == 11:
progList2(name,id,page,genre,order,days)
elif mode == 12:
performChanges2(name,id,page,genre,order,days)
|
import contextlib
import sys
import random
from blessings import Terminal
terminal = Terminal()
@contextlib.contextmanager
def noop():
yield
def on_entry(overwrite):
sys.stdout.write(terminal.civis + terminal.normal)
if not overwrite:
sys.stdout.write(terminal.clear)
def on_exit(overwrite):
if overwrite:
# move cursor to bottom of terminal
sys.stdout.write(terminal.move(terminal.height - 1, 0))
sys.stdout.write(terminal.cnorm + terminal.normal)
@contextlib.contextmanager
def reset_on_exit(overwrite):
try:
with (noop if overwrite else terminal.fullscreen)():
on_entry(overwrite)
yield
except KeyboardInterrupt:
pass
finally:
on_exit(overwrite)
def center():
return terminal.height // 2, terminal.width // 2
def rand_coord():
return (
random.randint(0, terminal.height - 1),
random.randint(0, terminal.width - 1)
)
def get_max_dimension():
return max(terminal.height, terminal.width)
def clip(strips):
'''
Yield a sequence of strips after clipping them to (0, 0), (height, width).
(inclusive)
'''
for y, x, length in strips:
if y < 0 or y >= terminal.height:
continue
if x < 0:
length += x
x = 0
if length + x > terminal.width:
length = terminal.width - x
if length > 0:
yield y, x, length
def render(strips):
return ''.join(
terminal.move(y, x) + ' ' * length
for y, x, length in clip(strips)
)
on_color = terminal.on_color
number_of_colors = terminal.number_of_colors
normal = terminal.normal
|
from collections import OrderedDict
from functools import partial
from math import floor, ceil
from operator import is_not
from django.contrib.auth.models import User
from django.contrib.postgres.fields import ArrayField
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.core.exceptions import ValidationError
from django.db import models
from django.db.models import Q
from django.utils.safestring import mark_safe
from django.utils.text import slugify
class Monster(models.Model):
ELEMENT_PURE = 'pure'
ELEMENT_FIRE = 'fire'
ELEMENT_WIND = 'wind'
ELEMENT_WATER = 'water'
ELEMENT_LIGHT = 'light'
ELEMENT_DARK = 'dark'
TYPE_ATTACK = 'attack'
TYPE_HP = 'hp'
TYPE_SUPPORT = 'support'
TYPE_DEFENSE = 'defense'
TYPE_MATERIAL = 'material'
TYPE_NONE = 'none'
ELEMENT_CHOICES = (
(ELEMENT_PURE, 'Pure'),
(ELEMENT_FIRE, 'Fire'),
(ELEMENT_WIND, 'Wind'),
(ELEMENT_WATER, 'Water'),
(ELEMENT_LIGHT, 'Light'),
(ELEMENT_DARK, 'Dark'),
)
TYPE_CHOICES = (
(TYPE_NONE, 'None'),
(TYPE_ATTACK, 'Attack'),
(TYPE_HP, 'HP'),
(TYPE_SUPPORT, 'Support'),
(TYPE_DEFENSE, 'Defense'),
(TYPE_MATERIAL, 'Material'),
)
STAR_CHOICES = (
(1, mark_safe('1<span class="glyphicon glyphicon-star"></span>')),
(2, mark_safe('2<span class="glyphicon glyphicon-star"></span>')),
(3, mark_safe('3<span class="glyphicon glyphicon-star"></span>')),
(4, mark_safe('4<span class="glyphicon glyphicon-star"></span>')),
(5, mark_safe('5<span class="glyphicon glyphicon-star"></span>')),
(6, mark_safe('6<span class="glyphicon glyphicon-star"></span>')),
)
name = models.CharField(max_length=40)
com2us_id = models.IntegerField(blank=True, null=True, help_text='ID given in game data files')
family_id = models.IntegerField(blank=True, null=True, help_text='Identifier that matches same family monsters')
image_filename = models.CharField(max_length=250, null=True, blank=True)
element = models.CharField(max_length=6, choices=ELEMENT_CHOICES, default=ELEMENT_FIRE)
archetype = models.CharField(max_length=10, choices=TYPE_CHOICES, default=TYPE_ATTACK)
base_stars = models.IntegerField(choices=STAR_CHOICES, help_text='Default stars a monster is summoned at')
obtainable = models.BooleanField(default=True, help_text='Is available for players to acquire')
can_awaken = models.BooleanField(default=True, help_text='Has an awakened form')
is_awakened = models.BooleanField(default=False, help_text='Is the awakened form')
awaken_bonus = models.TextField(blank=True, help_text='Bonus given upon awakening')
skills = models.ManyToManyField('Skill', blank=True)
skill_ups_to_max = models.IntegerField(null=True, blank=True, help_text='Number of skill-ups required to max all skills')
leader_skill = models.ForeignKey('LeaderSkill', on_delete=models.SET_NULL, null=True, blank=True)
# 1-star lvl 1 values from data source
raw_hp = models.IntegerField(null=True, blank=True, help_text='HP value from game data files')
raw_attack = models.IntegerField(null=True, blank=True, help_text='ATK value from game data files')
raw_defense = models.IntegerField(null=True, blank=True, help_text='DEF value from game data files')
# Base-star lvl MAX values as seen in-game
base_hp = models.IntegerField(null=True, blank=True, help_text='HP at base_stars lvl 1')
base_attack = models.IntegerField(null=True, blank=True, help_text='ATK at base_stars lvl 1')
base_defense = models.IntegerField(null=True, blank=True, help_text='DEF at base_stars lvl 1')
# 6-star lvl MAX values
max_lvl_hp = models.IntegerField(null=True, blank=True, help_text='HP at 6-stars lvl 40')
max_lvl_attack = models.IntegerField(null=True, blank=True, help_text='ATK at 6-stars lvl 40')
max_lvl_defense = models.IntegerField(null=True, blank=True, help_text='DEF at 6-stars lvl 40')
speed = models.IntegerField(null=True, blank=True)
crit_rate = models.IntegerField(null=True, blank=True)
crit_damage = models.IntegerField(null=True, blank=True)
resistance = models.IntegerField(null=True, blank=True)
accuracy = models.IntegerField(null=True, blank=True)
# Homunculus monster fields
homunculus = models.BooleanField(default=False)
craft_materials = models.ManyToManyField('CraftMaterial', through='MonsterCraftCost')
craft_cost = models.IntegerField(null=True, blank=True, help_text='Mana cost to craft this monster')
# Unicorn fields
transforms_into = models.ForeignKey('self', on_delete=models.SET_NULL, null=True, blank=True, related_name='+', help_text='Monster which this monster can transform into during battle')
awakens_from = models.ForeignKey('self', on_delete=models.SET_NULL, null=True, blank=True, related_name='+', help_text='Unawakened form of this monster')
awakens_to = models.ForeignKey('self', on_delete=models.SET_NULL, null=True, blank=True, related_name='+', help_text='Awakened form of this monster')
awaken_mats_fire_low = models.IntegerField(blank=True, default=0)
awaken_mats_fire_mid = models.IntegerField(blank=True, default=0)
awaken_mats_fire_high = models.IntegerField(blank=True, default=0)
awaken_mats_water_low = models.IntegerField(blank=True, default=0)
awaken_mats_water_mid = models.IntegerField(blank=True, default=0)
awaken_mats_water_high = models.IntegerField(blank=True, default=0)
awaken_mats_wind_low = models.IntegerField(blank=True, default=0)
awaken_mats_wind_mid = models.IntegerField(blank=True, default=0)
awaken_mats_wind_high = models.IntegerField(blank=True, default=0)
awaken_mats_light_low = models.IntegerField(blank=True, default=0)
awaken_mats_light_mid = models.IntegerField(blank=True, default=0)
awaken_mats_light_high = models.IntegerField(blank=True, default=0)
awaken_mats_dark_low = models.IntegerField(blank=True, default=0)
awaken_mats_dark_mid = models.IntegerField(blank=True, default=0)
awaken_mats_dark_high = models.IntegerField(blank=True, default=0)
awaken_mats_magic_low = models.IntegerField(blank=True, default=0)
awaken_mats_magic_mid = models.IntegerField(blank=True, default=0)
awaken_mats_magic_high = models.IntegerField(blank=True, default=0)
source = models.ManyToManyField('Source', blank=True, help_text='Where this monster can be acquired from')
farmable = models.BooleanField(default=False, help_text='Monster can be acquired easily without luck')
fusion_food = models.BooleanField(default=False, help_text='Monster is used as a fusion ingredient')
bestiary_slug = models.SlugField(max_length=255, editable=False, null=True)
def image_url(self):
if self.image_filename:
return mark_safe('<img src="%s" height="42" width="42"/>' % static('herders/images/monsters/' + self.image_filename))
else:
return 'No Image'
def max_level_from_stars(self, stars=None):
if stars:
return 10 + stars * 5
else:
return 10 + self.base_stars * 5
def get_stats(self):
from collections import OrderedDict
start_grade = self.base_stars
stats_list = OrderedDict()
if self.is_awakened and self.base_stars > 1:
start_grade -= 1
for grade in range(start_grade, 7):
max_level = self.max_level_from_stars(grade)
# Add the actual calculated stats
stats_list[str(grade)] = {
'HP': self.actual_hp(grade, max_level),
'ATK': self.actual_attack(grade, max_level),
'DEF': self.actual_defense(grade, max_level),
}
return stats_list
def actual_hp(self, grade, level):
# Check that base stat exists first
if not self.raw_hp:
return None
else:
return self._calculate_actual_stat(self.raw_hp, grade, level) * 15
def actual_attack(self, grade=base_stars, level=1):
# Check that base stat exists first
if not self.raw_attack:
return None
else:
return self._calculate_actual_stat(self.raw_attack, grade, level)
def actual_defense(self, grade=base_stars, level=1):
# Check that base stat exists first
if not self.raw_defense:
return None
else:
return self._calculate_actual_stat(self.raw_defense, grade, level)
@staticmethod
def _calculate_actual_stat(stat, grade, level):
# Magic multipliers taken from summoner's war wikia calculator. Used to calculate stats for lvl 1 and lvl MAX
magic_multipliers = [
{'1': 1.0, 'max': 1.9958},
{'1': 1.5966, 'max': 3.03050646},
{'1': 2.4242774, 'max': 4.364426603},
{'1': 3.4914444, 'max': 5.941390935},
{'1': 4.7529032, 'max': 8.072330795},
{'1': 6.4582449, 'max': 10.97901633},
]
max_lvl = 10 + grade * 5
stat_lvl_1 = round(stat * magic_multipliers[grade - 1]['1'], 0)
stat_lvl_max = round(stat * magic_multipliers[grade - 1]['max'], 0)
if level == 1:
return int(stat_lvl_1)
elif level == max_lvl:
return int(stat_lvl_max)
else:
# Use exponential function in format value=ae^(bx)
# a=stat_lvl_1*e^(-b)
from math import log, exp
b_coeff = log(stat_lvl_max / stat_lvl_1) / (max_lvl - 1)
return int(round((stat_lvl_1 * exp(-b_coeff)) * exp(b_coeff * level)))
def monster_family(self):
should_be_shown = Q(obtainable=True) | Q(transforms_into__isnull=False)
family = Monster.objects.filter(family_id=self.family_id).filter(should_be_shown).order_by('element', 'is_awakened')
return [
family.filter(element=Monster.ELEMENT_FIRE).first(),
family.filter(element=Monster.ELEMENT_WATER).first(),
family.filter(element=Monster.ELEMENT_WIND).first(),
family.filter(element=Monster.ELEMENT_LIGHT).first(),
family.filter(element=Monster.ELEMENT_DARK).first(),
]
def all_skill_effects(self):
return SkillEffect.objects.filter(pk__in=self.skills.exclude(skill_effect=None).values_list('skill_effect', flat=True))
def get_awakening_materials(self):
mats = OrderedDict()
mats['magic'] = OrderedDict()
mats['magic']['low'] = self.awaken_mats_magic_low
mats['magic']['mid'] = self.awaken_mats_magic_mid
mats['magic']['high'] = self.awaken_mats_magic_high
mats['fire'] = OrderedDict()
mats['fire']['low'] = self.awaken_mats_fire_low
mats['fire']['mid'] = self.awaken_mats_fire_mid
mats['fire']['high'] = self.awaken_mats_fire_high
mats['water'] = OrderedDict()
mats['water']['low'] = self.awaken_mats_water_low
mats['water']['mid'] = self.awaken_mats_water_mid
mats['water']['high'] = self.awaken_mats_water_high
mats['wind'] = OrderedDict()
mats['wind']['low'] = self.awaken_mats_wind_low
mats['wind']['mid'] = self.awaken_mats_wind_mid
mats['wind']['high'] = self.awaken_mats_wind_high
mats['light'] = OrderedDict()
mats['light']['low'] = self.awaken_mats_light_low
mats['light']['mid'] = self.awaken_mats_light_mid
mats['light']['high'] = self.awaken_mats_light_high
mats['dark'] = OrderedDict()
mats['dark']['low'] = self.awaken_mats_dark_low
mats['dark']['mid'] = self.awaken_mats_dark_mid
mats['dark']['high'] = self.awaken_mats_dark_high
return mats
def clean(self):
# Update null values
if self.awaken_mats_fire_high is None:
self.awaken_mats_fire_high = 0
if self.awaken_mats_fire_mid is None:
self.awaken_mats_fire_mid = 0
if self.awaken_mats_fire_low is None:
self.awaken_mats_fire_low = 0
if self.awaken_mats_water_high is None:
self.awaken_mats_water_high = 0
if self.awaken_mats_water_mid is None:
self.awaken_mats_water_mid = 0
if self.awaken_mats_water_low is None:
self.awaken_mats_water_low = 0
if self.awaken_mats_wind_high is None:
self.awaken_mats_wind_high = 0
if self.awaken_mats_wind_mid is None:
self.awaken_mats_wind_mid = 0
if self.awaken_mats_wind_low is None:
self.awaken_mats_wind_low = 0
if self.awaken_mats_light_high is None:
self.awaken_mats_light_high = 0
if self.awaken_mats_light_mid is None:
self.awaken_mats_light_mid = 0
if self.awaken_mats_light_low is None:
self.awaken_mats_light_low = 0
if self.awaken_mats_dark_high is None:
self.awaken_mats_dark_high = 0
if self.awaken_mats_dark_mid is None:
self.awaken_mats_dark_mid = 0
if self.awaken_mats_dark_low is None:
self.awaken_mats_dark_low = 0
if self.awaken_mats_magic_high is None:
self.awaken_mats_magic_high = 0
if self.awaken_mats_magic_mid is None:
self.awaken_mats_magic_mid = 0
if self.awaken_mats_magic_low is None:
self.awaken_mats_magic_low = 0
super(Monster, self).clean()
def save(self, *args, **kwargs):
# Update null values
if self.awaken_mats_fire_high is None:
self.awaken_mats_fire_high = 0
if self.awaken_mats_fire_mid is None:
self.awaken_mats_fire_mid = 0
if self.awaken_mats_fire_low is None:
self.awaken_mats_fire_low = 0
if self.awaken_mats_water_high is None:
self.awaken_mats_water_high = 0
if self.awaken_mats_water_mid is None:
self.awaken_mats_water_mid = 0
if self.awaken_mats_water_low is None:
self.awaken_mats_water_low = 0
if self.awaken_mats_wind_high is None:
self.awaken_mats_wind_high = 0
if self.awaken_mats_wind_mid is None:
self.awaken_mats_wind_mid = 0
if self.awaken_mats_wind_low is None:
self.awaken_mats_wind_low = 0
if self.awaken_mats_light_high is None:
self.awaken_mats_light_high = 0
if self.awaken_mats_light_mid is None:
self.awaken_mats_light_mid = 0
if self.awaken_mats_light_low is None:
self.awaken_mats_light_low = 0
if self.awaken_mats_dark_high is None:
self.awaken_mats_dark_high = 0
if self.awaken_mats_dark_mid is None:
self.awaken_mats_dark_mid = 0
if self.awaken_mats_dark_low is None:
self.awaken_mats_dark_low = 0
if self.awaken_mats_magic_high is None:
self.awaken_mats_magic_high = 0
if self.awaken_mats_magic_mid is None:
self.awaken_mats_magic_mid = 0
if self.awaken_mats_magic_low is None:
self.awaken_mats_magic_low = 0
if self.raw_hp:
self.base_hp = self._calculate_actual_stat(
self.raw_hp,
self.base_stars,
self.max_level_from_stars(self.base_stars)
) * 15
self.max_lvl_hp = self.actual_hp(6, 40)
if self.raw_attack:
self.base_attack = self._calculate_actual_stat(
self.raw_attack,
self.base_stars,
self.max_level_from_stars(self.base_stars)
)
self.max_lvl_attack = self.actual_attack(6, 40)
if self.raw_defense:
self.base_defense = self._calculate_actual_stat(
self.raw_defense,
self.base_stars,
self.max_level_from_stars(self.base_stars)
)
self.max_lvl_defense = self.actual_defense(6, 40)
if self.is_awakened and self.awakens_from:
self.bestiary_slug = self.awakens_from.bestiary_slug
else:
if self.awakens_to is not None:
self.bestiary_slug = slugify(" ".join([str(self.com2us_id), self.element, self.name, self.awakens_to.name]))
else:
self.bestiary_slug = slugify(" ".join([str(self.com2us_id), self.element, self.name]))
# Pull info from unawakened version of this monster. This copying of data is one directional only
if self.awakens_from:
# Copy awaken bonus from unawakened version
if self.is_awakened and self.awakens_from.awaken_bonus:
self.awaken_bonus = self.awakens_from.awaken_bonus
super(Monster, self).save(*args, **kwargs)
# Automatically set awakens from/to relationship if none exists
if self.awakens_from and self.awakens_from.awakens_to is not self:
self.awakens_from.awakens_to = self
self.awakens_from.save()
elif self.awakens_to and self.awakens_to.awakens_from is not self:
self.awakens_to.awakens_from = self
self.awakens_to.save()
class Meta:
ordering = ['name', 'element']
def __str__(self):
if self.is_awakened:
return self.name
else:
return self.name + ' (' + self.element.capitalize() + ')'
class Skill(models.Model):
name = models.CharField(max_length=40)
com2us_id = models.IntegerField(blank=True, null=True, help_text='ID given in game data files')
description = models.TextField()
slot = models.IntegerField(default=1, help_text='Which button position the skill is in during battle')
skill_effect = models.ManyToManyField('SkillEffect', blank=True)
effect = models.ManyToManyField('SkillEffect', through='SkillEffectDetail', blank=True, related_name='effect', help_text='Detailed skill effect information')
cooltime = models.IntegerField(null=True, blank=True, help_text='Number of turns until skill can be used again')
hits = models.IntegerField(default=1, help_text='Number of times this skill hits an enemy')
aoe = models.BooleanField(default=False, help_text='Skill affects all enemies or allies')
passive = models.BooleanField(default=False, help_text='Skill activates automatically')
max_level = models.IntegerField()
level_progress_description = models.TextField(null=True, blank=True, help_text='Description of bonus each skill level')
icon_filename = models.CharField(max_length=100, null=True, blank=True)
multiplier_formula = models.TextField(null=True, blank=True, help_text='Parsed multiplier formula')
multiplier_formula_raw = models.CharField(max_length=150, null=True, blank=True, help_text='Multiplier formula given in game data files')
scaling_stats = models.ManyToManyField('ScalingStat', blank=True, help_text='Monster stats which this skill scales on')
def image_url(self):
if self.icon_filename:
return mark_safe('<img src="%s" height="42" width="42"/>' % static('herders/images/skills/' + self.icon_filename))
else:
return 'No Image'
def level_progress_description_list(self):
return self.level_progress_description.splitlines()
def __str__(self):
if self.name:
name = self.name
else:
name = ''
if self.icon_filename:
icon = ' - ' + self.icon_filename
else:
icon = ''
if self.com2us_id:
com2us_id = ' - ' + str(self.com2us_id)
else:
com2us_id = ''
return name + com2us_id + icon
class Meta:
ordering = ['slot', 'name']
verbose_name = 'Skill'
verbose_name_plural = 'Skills'
class LeaderSkill(models.Model):
ATTRIBUTE_HP = 1
ATTRIBUTE_ATK = 2
ATTRIBUTE_DEF = 3
ATTRIBUTE_SPD = 4
ATTRIBUTE_CRIT_RATE = 5
ATTRIBUTE_RESIST = 6
ATTRIBUTE_ACCURACY = 7
ATTRIBUTE_CRIT_DMG = 8
ATTRIBUTE_CHOICES = (
(ATTRIBUTE_HP, 'HP'),
(ATTRIBUTE_ATK, 'Attack Power'),
(ATTRIBUTE_DEF, 'Defense'),
(ATTRIBUTE_SPD, 'Attack Speed'),
(ATTRIBUTE_CRIT_RATE, 'Critical Rate'),
(ATTRIBUTE_RESIST, 'Resistance'),
(ATTRIBUTE_ACCURACY, 'Accuracy'),
(ATTRIBUTE_CRIT_DMG, 'Critical DMG'),
)
AREA_GENERAL = 1
AREA_DUNGEON = 2
AREA_ELEMENT = 3
AREA_ARENA = 4
AREA_GUILD = 5
AREA_CHOICES = (
(AREA_GENERAL, 'General'),
(AREA_DUNGEON, 'Dungeon'),
(AREA_ELEMENT, 'Element'),
(AREA_ARENA, 'Arena'),
(AREA_GUILD, 'Guild'),
)
attribute = models.IntegerField(choices=ATTRIBUTE_CHOICES, help_text='Monster stat which is granted the bonus')
amount = models.IntegerField(help_text='Amount of bonus granted')
area = models.IntegerField(choices=AREA_CHOICES, default=AREA_GENERAL, help_text='Where this leader skill has an effect')
element = models.CharField(max_length=6, null=True, blank=True, choices=Monster.ELEMENT_CHOICES, help_text='Element of monster which this leader skill applies to')
def skill_string(self):
if self.area == self.AREA_DUNGEON:
condition = 'in the Dungeons '
elif self.area == self.AREA_ARENA:
condition = 'in the Arena '
elif self.area == self.AREA_GUILD:
condition = 'in Guild Content '
elif self.area == self.AREA_ELEMENT:
condition = 'with {} attribute '.format(self.get_element_display())
else:
condition = ''
return "Increase the {0} of ally monsters {1}by {2}%".format(self.get_attribute_display(), condition, self.amount)
def icon_filename(self):
if self.area == self.AREA_ELEMENT:
suffix = '_{}'.format(self.get_element_display())
elif self.area == self.AREA_GENERAL:
suffix = ''
else:
suffix = '_{}'.format(self.get_area_display())
return 'leader_skill_{0}{1}.png'.format(self.get_attribute_display().replace(' ', '_'), suffix)
def image_url(self):
return mark_safe('<img src="{}" height="42" width="42"/>'.format(
static('herders/images/skills/leader/' + self.icon_filename())
))
def __str__(self):
if self.area == self.AREA_ELEMENT:
condition = ' {}'.format(self.get_element_display())
elif self.area == self.AREA_GENERAL:
condition = ''
else:
condition = ' {}'.format(self.get_area_display())
return self.get_attribute_display() + ' ' + str(self.amount) + '%' + condition
class Meta:
ordering = ['attribute', 'amount', 'element']
verbose_name = 'Leader Skill'
verbose_name_plural = 'Leader Skills'
class SkillEffectBuffsManager(models.Manager):
def get_queryset(self):
return super(SkillEffectBuffsManager, self).get_queryset().values_list('pk', 'icon_filename').filter(is_buff=True).exclude(icon_filename='')
class SkillEffectDebuffsManager(models.Manager):
def get_queryset(self):
return super(SkillEffectDebuffsManager, self).get_queryset().values_list('pk', 'icon_filename').filter(is_buff=False).exclude(icon_filename='')
class SkillEffectOtherManager(models.Manager):
def get_queryset(self):
return super(SkillEffectOtherManager, self).get_queryset().filter(icon_filename='')
class SkillEffect(models.Model):
is_buff = models.BooleanField(default=True, help_text='Effect is beneficial to affected monster')
name = models.CharField(max_length=40)
description = models.TextField()
icon_filename = models.CharField(max_length=100, blank=True, default='')
objects = models.Manager()
class Meta:
ordering = ['name']
verbose_name = 'Skill Effect'
verbose_name_plural = 'Skill Effects'
def image_url(self):
if self.icon_filename:
return mark_safe('<img src="%s" height="42" width="42"/>' % static('herders/images/buffs/' + self.icon_filename))
else:
return 'No Image'
def __str__(self):
return self.name
class SkillEffectDetail(models.Model):
skill = models.ForeignKey(Skill, on_delete=models.CASCADE)
effect = models.ForeignKey(SkillEffect, on_delete=models.CASCADE)
aoe = models.BooleanField(default=False, help_text='Effect applies to entire friendly or enemy group')
single_target = models.BooleanField(default=False, help_text='Effect applies to a single monster')
self_effect = models.BooleanField(default=False, help_text='Effect applies to the monster using the skill')
chance = models.IntegerField(null=True, blank=True, help_text='Chance of effect occuring per hit')
on_crit = models.BooleanField(default=False)
on_death = models.BooleanField(default=False)
random = models.BooleanField(default=False, help_text='Skill effect applies randomly to the target')
quantity = models.IntegerField(null=True, blank=True, help_text='Number of items this effect affects on the target')
all = models.BooleanField(default=False, help_text='This effect affects all items on the target')
self_hp = models.BooleanField(default=False, help_text="Amount of this effect is based on casting monster's HP")
target_hp = models.BooleanField(default=False, help_text="Amount of this effect is based on target monster's HP")
damage = models.BooleanField(default=False, help_text='Amount of this effect is based on damage dealt')
note = models.TextField(blank=True, null=True, help_text="Explain anything else that doesn't fit in other fields")
class ScalingStat(models.Model):
stat = models.CharField(max_length=20)
com2us_desc = models.CharField(max_length=30, null=True, blank=True)
description = models.TextField(null=True, blank=True)
def __str__(self):
return self.stat
class Meta:
ordering = ['stat',]
verbose_name = 'Scaling Stat'
verbose_name_plural = 'Scaling Stats'
class HomunculusSkill(models.Model):
skill = models.ForeignKey(Skill, on_delete=models.CASCADE)
monsters = models.ManyToManyField(Monster)
craft_materials = models.ManyToManyField('CraftMaterial', through='HomunculusSkillCraftCost', help_text='Crafting materials required to purchase')
mana_cost = models.IntegerField(default=0, help_text='Cost to purchase')
prerequisites = models.ManyToManyField(Skill, blank=True, related_name='homunculus_prereq', help_text='Skills which must be acquired first')
def __str__(self):
return '{} ({})'.format(self.skill, self.skill.com2us_id)
class Source(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(null=True, blank=True)
icon_filename = models.CharField(max_length=100, null=True, blank=True)
farmable_source = models.BooleanField(default=False)
meta_order = models.IntegerField(db_index=True, default=0)
def image_url(self):
if self.icon_filename:
return mark_safe('<img src="%s" height="42" width="42"/>' % static('herders/images/icons/' + self.icon_filename))
else:
return 'No Image'
def __str__(self):
return self.name
class Meta:
ordering = ['meta_order', 'icon_filename', 'name']
class Fusion(models.Model):
product = models.ForeignKey('Monster', on_delete=models.CASCADE, related_name='product')
stars = models.IntegerField()
cost = models.IntegerField()
ingredients = models.ManyToManyField('Monster')
meta_order = models.IntegerField(db_index=True, default=0)
def __str__(self):
return str(self.product) + ' Fusion'
class Meta:
ordering = ['meta_order']
def sub_fusion_available(self):
return Fusion.objects.filter(product__in=self.ingredients.values_list('awakens_from__pk', flat=True)).exists()
def total_awakening_cost(self, owned_ingredients=None):
cost = {
'magic': {
'low': 0,
'mid': 0,
'high': 0,
},
'fire': {
'low': 0,
'mid': 0,
'high': 0,
},
'water': {
'low': 0,
'mid': 0,
'high': 0,
},
'wind': {
'low': 0,
'mid': 0,
'high': 0,
},
'light': {
'low': 0,
'mid': 0,
'high': 0,
},
'dark': {
'low': 0,
'mid': 0,
'high': 0,
},
}
if owned_ingredients:
qs = self.ingredients.exclude(pk__in=[o.monster.pk for o in owned_ingredients])
else:
qs = self.ingredients.all()
for ingredient in qs:
if ingredient.awakens_from:
cost['magic']['low'] += ingredient.awakens_from.awaken_mats_magic_low
cost['magic']['mid'] += ingredient.awakens_from.awaken_mats_magic_mid
cost['magic']['high'] += ingredient.awakens_from.awaken_mats_magic_high
cost['fire']['low'] += ingredient.awakens_from.awaken_mats_fire_low
cost['fire']['mid'] += ingredient.awakens_from.awaken_mats_fire_mid
cost['fire']['high'] += ingredient.awakens_from.awaken_mats_fire_high
cost['water']['low'] += ingredient.awakens_from.awaken_mats_water_low
cost['water']['mid'] += ingredient.awakens_from.awaken_mats_water_mid
cost['water']['high'] += ingredient.awakens_from.awaken_mats_water_high
cost['wind']['low'] += ingredient.awakens_from.awaken_mats_wind_low
cost['wind']['mid'] += ingredient.awakens_from.awaken_mats_wind_mid
cost['wind']['high'] += ingredient.awakens_from.awaken_mats_wind_high
cost['light']['low'] += ingredient.awakens_from.awaken_mats_light_low
cost['light']['mid'] += ingredient.awakens_from.awaken_mats_light_mid
cost['light']['high'] += ingredient.awakens_from.awaken_mats_light_high
cost['dark']['low'] += ingredient.awakens_from.awaken_mats_dark_low
cost['dark']['mid'] += ingredient.awakens_from.awaken_mats_dark_mid
cost['dark']['high'] += ingredient.awakens_from.awaken_mats_dark_high
return cost
class Building(models.Model):
AREA_GENERAL = 0
AREA_GUILD = 1
AREA_CHOICES = [
(AREA_GENERAL, 'Everywhere'),
(AREA_GUILD, 'Guild Content'),
]
STAT_HP = 0
STAT_ATK = 1
STAT_DEF = 2
STAT_SPD = 3
STAT_CRIT_RATE_PCT = 4
STAT_CRIT_DMG_PCT = 5
STAT_RESIST_PCT = 6
STAT_ACCURACY_PCT = 7
MAX_ENERGY = 8
MANA_STONE_STORAGE = 9
MANA_STONE_PRODUCTION = 10
ENERGY_PRODUCTION = 11
ARCANE_TOWER_ATK = 12
ARCANE_TOWER_SPD = 13
STAT_CHOICES = [
(STAT_HP, 'HP'),
(STAT_ATK, 'ATK'),
(STAT_DEF, 'DEF'),
(STAT_SPD, 'SPD'),
(STAT_CRIT_RATE_PCT, 'CRI Rate'),
(STAT_CRIT_DMG_PCT, 'CRI Dmg'),
(STAT_RESIST_PCT, 'Resistance'),
(STAT_ACCURACY_PCT, 'Accuracy'),
(MAX_ENERGY, 'Max. Energy'),
(MANA_STONE_STORAGE, 'Mana Stone Storage'),
(MANA_STONE_PRODUCTION, 'Mana Stone Production Rate'),
(ENERGY_PRODUCTION, 'Energy Production Rate'),
(ARCANE_TOWER_ATK, 'Arcane Tower ATK'),
(ARCANE_TOWER_SPD, 'Arcane Tower SPD'),
]
PERCENT_STATS = [
STAT_HP,
STAT_ATK,
STAT_DEF,
STAT_SPD,
STAT_CRIT_RATE_PCT,
STAT_CRIT_DMG_PCT,
STAT_RESIST_PCT,
STAT_ACCURACY_PCT,
MANA_STONE_PRODUCTION,
ENERGY_PRODUCTION,
ARCANE_TOWER_ATK,
ARCANE_TOWER_SPD,
]
com2us_id = models.IntegerField()
name = models.CharField(max_length=30)
max_level = models.IntegerField()
area = models.IntegerField(choices=AREA_CHOICES, null=True, blank=True)
affected_stat = models.IntegerField(choices=STAT_CHOICES, null=True, blank=True)
element = models.CharField(max_length=6, choices=Monster.ELEMENT_CHOICES, blank=True, null=True)
stat_bonus = ArrayField(models.IntegerField(blank=True, null=True))
upgrade_cost = ArrayField(models.IntegerField(blank=True, null=True))
description = models.TextField(null=True, blank=True)
icon_filename = models.CharField(max_length=100, null=True, blank=True)
def image_url(self):
if self.icon_filename:
return mark_safe('<img src="%s" height="42" width="42"/>' % static('herders/images/buildings/' + self.icon_filename))
else:
return 'No Image'
def __str__(self):
return self.name
class CraftMaterial(models.Model):
com2us_id = models.IntegerField()
name = models.CharField(max_length=40)
icon_filename = models.CharField(max_length=100, null=True, blank=True)
sell_value = models.IntegerField(blank=True, null=True)
source = models.ManyToManyField(Source, blank=True)
def image_url(self):
if self.icon_filename:
return mark_safe('<img src="%s" height="42" width="42"/>' % static('herders/images/crafts/' + self.icon_filename))
else:
return 'No Image'
def __str__(self):
return self.name
class MonsterCraftCost(models.Model):
monster = models.ForeignKey(Monster, on_delete=models.CASCADE)
craft = models.ForeignKey(CraftMaterial, on_delete=models.CASCADE)
quantity = models.IntegerField()
def __str__(self):
return '{} - qty. {}'.format(self.craft.name, self.quantity)
class HomunculusSkillCraftCost(models.Model):
skill = models.ForeignKey(HomunculusSkill, on_delete=models.CASCADE)
craft = models.ForeignKey(CraftMaterial, on_delete=models.CASCADE)
quantity = models.IntegerField()
def __str__(self):
return '{} - qty. {}'.format(self.craft.name, self.quantity)
class RuneObjectBase:
# Provides basic rune related constants
TYPE_ENERGY = 1
TYPE_FATAL = 2
TYPE_BLADE = 3
TYPE_RAGE = 4
TYPE_SWIFT = 5
TYPE_FOCUS = 6
TYPE_GUARD = 7
TYPE_ENDURE = 8
TYPE_VIOLENT = 9
TYPE_WILL = 10
TYPE_NEMESIS = 11
TYPE_SHIELD = 12
TYPE_REVENGE = 13
TYPE_DESPAIR = 14
TYPE_VAMPIRE = 15
TYPE_DESTROY = 16
TYPE_FIGHT = 17
TYPE_DETERMINATION = 18
TYPE_ENHANCE = 19
TYPE_ACCURACY = 20
TYPE_TOLERANCE = 21
TYPE_CHOICES = (
(TYPE_ENERGY, 'Energy'),
(TYPE_FATAL, 'Fatal'),
(TYPE_BLADE, 'Blade'),
(TYPE_RAGE, 'Rage'),
(TYPE_SWIFT, 'Swift'),
(TYPE_FOCUS, 'Focus'),
(TYPE_GUARD, 'Guard'),
(TYPE_ENDURE, 'Endure'),
(TYPE_VIOLENT, 'Violent'),
(TYPE_WILL, 'Will'),
(TYPE_NEMESIS, 'Nemesis'),
(TYPE_SHIELD, 'Shield'),
(TYPE_REVENGE, 'Revenge'),
(TYPE_DESPAIR, 'Despair'),
(TYPE_VAMPIRE, 'Vampire'),
(TYPE_DESTROY, 'Destroy'),
(TYPE_FIGHT, 'Fight'),
(TYPE_DETERMINATION, 'Determination'),
(TYPE_ENHANCE, 'Enhance'),
(TYPE_ACCURACY, 'Accuracy'),
(TYPE_TOLERANCE, 'Tolerance'),
)
STAR_CHOICES = (
(1, 1),
(2, 2),
(3, 3),
(4, 4),
(5, 5),
(6, 6),
)
STAT_HP = 1
STAT_HP_PCT = 2
STAT_ATK = 3
STAT_ATK_PCT = 4
STAT_DEF = 5
STAT_DEF_PCT = 6
STAT_SPD = 7
STAT_CRIT_RATE_PCT = 8
STAT_CRIT_DMG_PCT = 9
STAT_RESIST_PCT = 10
STAT_ACCURACY_PCT = 11
# Used for selecting type of stat in form
STAT_CHOICES = (
(STAT_HP, 'HP'),
(STAT_HP_PCT, 'HP %'),
(STAT_ATK, 'ATK'),
(STAT_ATK_PCT, 'ATK %'),
(STAT_DEF, 'DEF'),
(STAT_DEF_PCT, 'DEF %'),
(STAT_SPD, 'SPD'),
(STAT_CRIT_RATE_PCT, 'CRI Rate %'),
(STAT_CRIT_DMG_PCT, 'CRI Dmg %'),
(STAT_RESIST_PCT, 'Resistance %'),
(STAT_ACCURACY_PCT, 'Accuracy %'),
)
# The STAT_DISPLAY is used to construct rune values for display as 'HP: 5%' rather than 'HP %: 5' using
# the built in get_FOO_display() functions
STAT_DISPLAY = {
STAT_HP: 'HP',
STAT_HP_PCT: 'HP',
STAT_ATK: 'ATK',
STAT_ATK_PCT: 'ATK',
STAT_DEF: 'DEF',
STAT_DEF_PCT: 'DEF',
STAT_SPD: 'SPD',
STAT_CRIT_RATE_PCT: 'CRI Rate',
STAT_CRIT_DMG_PCT: 'CRI Dmg',
STAT_RESIST_PCT: 'Resistance',
STAT_ACCURACY_PCT: 'Accuracy',
}
PERCENT_STATS = [
STAT_HP_PCT,
STAT_ATK_PCT,
STAT_DEF_PCT,
STAT_CRIT_RATE_PCT,
STAT_CRIT_DMG_PCT,
STAT_RESIST_PCT,
STAT_ACCURACY_PCT,
]
FLAT_STATS = [
STAT_HP,
STAT_ATK,
STAT_DEF,
STAT_SPD,
]
QUALITY_NORMAL = 0
QUALITY_MAGIC = 1
QUALITY_RARE = 2
QUALITY_HERO = 3
QUALITY_LEGEND = 4
QUALITY_CHOICES = (
(QUALITY_NORMAL, 'Normal'),
(QUALITY_MAGIC, 'Magic'),
(QUALITY_RARE, 'Rare'),
(QUALITY_HERO, 'Hero'),
(QUALITY_LEGEND, 'Legend'),
)
class Rune(models.Model, RuneObjectBase):
MAIN_STAT_VALUES = {
# [stat][stars][level]: value
RuneObjectBase.STAT_HP: {
1: [40, 85, 130, 175, 220, 265, 310, 355, 400, 445, 490, 535, 580, 625, 670, 804],
2: [70, 130, 190, 250, 310, 370, 430, 490, 550, 610, 670, 730, 790, 850, 910, 1092],
3: [100, 175, 250, 325, 400, 475, 550, 625, 700, 775, 850, 925, 1000, 1075, 1150, 1380],
4: [160, 250, 340, 430, 520, 610, 700, 790, 880, 970, 1060, 1150, 1240, 1330, 1420, 1704],
5: [270, 375, 480, 585, 690, 795, 900, 1005, 1110, 1215, 1320, 1425, 1530, 1635, 1740, 2088],
6: [360, 480, 600, 720, 840, 960, 1080, 1200, 1320, 1440, 1560, 1680, 1800, 1920, 2040, 2448],
},
RuneObjectBase.STAT_HP_PCT: {
1: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18],
2: [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 19],
3: [4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 38],
4: [5, 7, 9, 11, 13, 16, 18, 20, 22, 24, 27, 29, 31, 33, 36, 43],
5: [8, 10, 12, 15, 17, 20, 22, 24, 27, 29, 32, 34, 37, 40, 43, 51],
6: [11, 14, 17, 20, 23, 26, 29, 32, 35, 38, 41, 44, 47, 50, 53, 63],
},
RuneObjectBase.STAT_ATK: {
1: [3, 6, 9, 12, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 54],
2: [5, 9, 13, 17, 21, 25, 29, 33, 37, 41, 45, 49, 53, 57, 61, 73],
3: [7, 12, 17, 22, 27, 32, 37, 42, 47, 52, 57, 62, 67, 72, 77, 92],
4: [10, 16, 22, 28, 34, 40, 46, 52, 58, 64, 70, 76, 82, 88, 94, 112],
5: [15, 22, 29, 36, 43, 50, 57, 64, 71, 78, 85, 92, 99, 106, 113, 135],
6: [22, 30, 38, 46, 54, 62, 70, 78, 86, 94, 102, 110, 118, 126, 134, 160],
},
RuneObjectBase.STAT_ATK_PCT: {
1: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18],
2: [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 19],
3: [4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 38],
4: [5, 7, 9, 11, 13, 16, 18, 20, 22, 24, 27, 29, 31, 33, 36, 43],
5: [8, 10, 12, 15, 17, 20, 22, 24, 27, 29, 32, 34, 37, 40, 43, 51],
6: [11, 14, 17, 20, 23, 26, 29, 32, 35, 38, 41, 44, 47, 50, 53, 63],
},
RuneObjectBase.STAT_DEF: {
1: [3, 6, 9, 12, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 54],
2: [5, 9, 13, 17, 21, 25, 29, 33, 37, 41, 45, 49, 53, 57, 61, 73],
3: [7, 12, 17, 22, 27, 32, 37, 42, 47, 52, 57, 62, 67, 72, 77, 92],
4: [10, 16, 22, 28, 34, 40, 46, 52, 58, 64, 70, 76, 82, 88, 94, 112],
5: [15, 22, 29, 36, 43, 50, 57, 64, 71, 78, 85, 92, 99, 106, 113, 135],
6: [22, 30, 38, 46, 54, 62, 70, 78, 86, 94, 102, 110, 118, 126, 134, 160],
},
RuneObjectBase.STAT_DEF_PCT: {
1: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18],
2: [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 19],
3: [4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 38],
4: [5, 7, 9, 11, 13, 16, 18, 20, 22, 24, 27, 29, 31, 33, 36, 43],
5: [8, 10, 12, 15, 17, 20, 22, 24, 27, 29, 32, 34, 37, 40, 43, 51],
6: [11, 14, 17, 20, 23, 26, 29, 32, 35, 38, 41, 44, 47, 50, 53, 63],
},
RuneObjectBase.STAT_SPD: {
1: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18],
2: [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 19],
3: [3, 4, 5, 6, 8, 9, 10, 12, 13, 14, 16, 17, 18, 19, 21, 25],
4: [4, 5, 7, 8, 10, 11, 13, 14, 16, 17, 19, 20, 22, 23, 25, 30],
5: [5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 39],
6: [7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 42],
},
RuneObjectBase.STAT_CRIT_RATE_PCT: {
1: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18],
2: [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 19],
3: [3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 37],
4: [4, 6, 8, 11, 13, 15, 17, 19, 22, 24, 26, 28, 30, 33, 35, 41],
5: [5, 7, 10, 12, 15, 17, 19, 22, 24, 27, 29, 31, 34, 36, 39, 47],
6: [7, 10, 13, 16, 19, 22, 25, 28, 31, 34, 37, 40, 43, 46, 49, 58],
},
RuneObjectBase.STAT_CRIT_DMG_PCT: {
1: [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 19],
2: [3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 37],
3: [4, 6, 9, 11, 13, 16, 18, 20, 22, 25, 27, 29, 32, 34, 36, 43],
4: [6, 9, 12, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 48, 57],
5: [8, 11, 15, 18, 21, 25, 28, 31, 34, 38, 41, 44, 48, 51, 54, 65],
6: [11, 15, 19, 23, 27, 31, 35, 39, 43, 47, 51, 55, 59, 63, 67, 80],
},
RuneObjectBase.STAT_RESIST_PCT: {
1: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18],
2: [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 19],
3: [4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 38],
4: [6, 8, 10, 13, 15, 17, 19, 21, 24, 26, 28, 30, 32, 35, 37, 44],
5: [9, 11, 14, 16, 19, 21, 23, 26, 28, 31, 33, 35, 38, 40, 43, 51],
6: [12, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 48, 51, 54, 64],
},
RuneObjectBase.STAT_ACCURACY_PCT: {
1: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18],
2: [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 19],
3: [4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 38],
4: [6, 8, 10, 13, 15, 17, 19, 21, 24, 26, 28, 30, 32, 35, 37, 44],
5: [9, 11, 14, 16, 19, 21, 23, 26, 28, 31, 33, 35, 38, 40, 43, 51],
6: [12, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 48, 51, 54, 64],
},
}
MAIN_STATS_BY_SLOT = {
1: [
RuneObjectBase.STAT_ATK,
],
2: [
RuneObjectBase.STAT_ATK,
RuneObjectBase.STAT_ATK_PCT,
RuneObjectBase.STAT_DEF,
RuneObjectBase.STAT_DEF_PCT,
RuneObjectBase.STAT_HP,
RuneObjectBase.STAT_HP_PCT,
RuneObjectBase.STAT_SPD,
],
3: [
RuneObjectBase.STAT_DEF,
],
4: [
RuneObjectBase.STAT_ATK,
RuneObjectBase.STAT_ATK_PCT,
RuneObjectBase.STAT_DEF,
RuneObjectBase.STAT_DEF_PCT,
RuneObjectBase.STAT_HP,
RuneObjectBase.STAT_HP_PCT,
RuneObjectBase.STAT_CRIT_RATE_PCT,
RuneObjectBase.STAT_CRIT_DMG_PCT,
],
5: [
RuneObjectBase.STAT_HP,
],
6: [
RuneObjectBase.STAT_ATK,
RuneObjectBase.STAT_ATK_PCT,
RuneObjectBase.STAT_DEF,
RuneObjectBase.STAT_DEF_PCT,
RuneObjectBase.STAT_HP,
RuneObjectBase.STAT_HP_PCT,
RuneObjectBase.STAT_RESIST_PCT,
RuneObjectBase.STAT_ACCURACY_PCT,
]
}
SUBSTAT_INCREMENTS = {
# [stat][stars]: value
RuneObjectBase.STAT_HP: {
1: 60,
2: 105,
3: 165,
4: 225,
5: 300,
6: 375,
},
RuneObjectBase.STAT_HP_PCT: {
1: 2,
2: 3,
3: 5,
4: 6,
5: 7,
6: 8,
},
RuneObjectBase.STAT_ATK: {
1: 4,
2: 5,
3: 8,
4: 10,
5: 15,
6: 20,
},
RuneObjectBase.STAT_ATK_PCT: {
1: 2,
2: 3,
3: 5,
4: 6,
5: 7,
6: 8,
},
RuneObjectBase.STAT_DEF: {
1: 4,
2: 5,
3: 8,
4: 10,
5: 15,
6: 20,
},
RuneObjectBase.STAT_DEF_PCT: {
1: 2,
2: 3,
3: 5,
4: 6,
5: 7,
6: 8,
},
RuneObjectBase.STAT_SPD: {
1: 1,
2: 2,
3: 3,
4: 4,
5: 5,
6: 6,
},
RuneObjectBase.STAT_CRIT_RATE_PCT: {
1: 1,
2: 2,
3: 3,
4: 4,
5: 5,
6: 6,
},
RuneObjectBase.STAT_CRIT_DMG_PCT: {
1: 2,
2: 3,
3: 4,
4: 5,
5: 6,
6: 7,
},
RuneObjectBase.STAT_RESIST_PCT: {
1: 2,
2: 3,
3: 5,
4: 6,
5: 7,
6: 8,
},
RuneObjectBase.STAT_ACCURACY_PCT: {
1: 2,
2: 3,
3: 5,
4: 6,
5: 7,
6: 8,
},
}
INNATE_STAT_TITLES = {
RuneObjectBase.STAT_HP: 'Strong',
RuneObjectBase.STAT_HP_PCT: 'Tenacious',
RuneObjectBase.STAT_ATK: 'Ferocious',
RuneObjectBase.STAT_ATK_PCT: 'Powerful',
RuneObjectBase.STAT_DEF: 'Sturdy',
RuneObjectBase.STAT_DEF_PCT: 'Durable',
RuneObjectBase.STAT_SPD: 'Quick',
RuneObjectBase.STAT_CRIT_RATE_PCT: 'Mortal',
RuneObjectBase.STAT_CRIT_DMG_PCT: 'Cruel',
RuneObjectBase.STAT_RESIST_PCT: 'Resistant',
RuneObjectBase.STAT_ACCURACY_PCT: 'Intricate',
}
RUNE_SET_COUNT_REQUIREMENTS = {
RuneObjectBase.TYPE_ENERGY: 2,
RuneObjectBase.TYPE_FATAL: 4,
RuneObjectBase.TYPE_BLADE: 2,
RuneObjectBase.TYPE_RAGE: 4,
RuneObjectBase.TYPE_SWIFT: 4,
RuneObjectBase.TYPE_FOCUS: 2,
RuneObjectBase.TYPE_GUARD: 2,
RuneObjectBase.TYPE_ENDURE: 2,
RuneObjectBase.TYPE_VIOLENT: 4,
RuneObjectBase.TYPE_WILL: 2,
RuneObjectBase.TYPE_NEMESIS: 2,
RuneObjectBase.TYPE_SHIELD: 2,
RuneObjectBase.TYPE_REVENGE: 2,
RuneObjectBase.TYPE_DESPAIR: 4,
RuneObjectBase.TYPE_VAMPIRE: 4,
RuneObjectBase.TYPE_DESTROY: 2,
RuneObjectBase.TYPE_FIGHT: 2,
RuneObjectBase.TYPE_DETERMINATION: 2,
RuneObjectBase.TYPE_ENHANCE: 2,
RuneObjectBase.TYPE_ACCURACY: 2,
RuneObjectBase.TYPE_TOLERANCE: 2,
}
RUNE_SET_BONUSES = {
RuneObjectBase.TYPE_ENERGY: {
'count': 2,
'stat': RuneObjectBase.STAT_HP_PCT,
'value': 15.0,
'team': False,
'description': '2 Set: HP +15%',
},
RuneObjectBase.TYPE_FATAL: {
'count': 4,
'stat': RuneObjectBase.STAT_ATK_PCT,
'value': 35.0,
'team': False,
'description': '4 Set: Attack Power +35%',
},
RuneObjectBase.TYPE_BLADE: {
'count': 2,
'stat': RuneObjectBase.STAT_CRIT_RATE_PCT,
'value': 12.0,
'team': False,
'description': '2 Set: Critical Rate +12%',
},
RuneObjectBase.TYPE_RAGE: {
'count': 4,
'stat': RuneObjectBase.STAT_CRIT_DMG_PCT,
'value': 40.0,
'team': False,
'description': '4 Set: Critical Damage +40%',
},
RuneObjectBase.TYPE_SWIFT: {
'count': 4,
'stat': RuneObjectBase.STAT_SPD,
'value': 25.0,
'team': False,
'description': '4 Set: Attack Speed +25%',
},
RuneObjectBase.TYPE_FOCUS: {
'count': 2,
'stat': RuneObjectBase.STAT_ACCURACY_PCT,
'value': 20.0,
'team': False,
'description': '2 Set: Accuracy +20%',
},
RuneObjectBase.TYPE_GUARD: {
'count': 2,
'stat': RuneObjectBase.STAT_DEF_PCT,
'value': 15.0,
'team': False,
'description': '2 Set: Defense +15%',
},
RuneObjectBase.TYPE_ENDURE: {
'count': 2,
'stat': RuneObjectBase.STAT_RESIST_PCT,
'value': 20.0,
'team': False,
'description': '2 Set: Resistance +20%',
},
RuneObjectBase.TYPE_VIOLENT: {
'count': 4,
'stat': None,
'value': None,
'team': False,
'description': '4 Set: Get Extra Turn +22%',
},
RuneObjectBase.TYPE_WILL: {
'count': 2,
'stat': None,
'value': None,
'team': False,
'description': '2 Set: Immunity +1 turn',
},
RuneObjectBase.TYPE_NEMESIS: {
'count': 2,
'stat': None,
'value': None,
'team': False,
'description': '2 Set: ATK Gauge +4% (for every 7% HP lost)',
},
RuneObjectBase.TYPE_SHIELD: {
'count': 2,
'stat': None,
'value': None,
'team': True,
'description': '2 Set: Ally Shield 3 turns (15% of HP)',
},
RuneObjectBase.TYPE_REVENGE: {
'count': 2,
'stat': None,
'value': None,
'team': False,
'description': '2 Set: Counterattack +15%',
},
RuneObjectBase.TYPE_DESPAIR: {
'count': 4,
'stat': None,
'value': None,
'team': False,
'description': '4 Set: Stun Rate +25%',
},
RuneObjectBase.TYPE_VAMPIRE: {
'count': 4,
'stat': None,
'value': None,
'team': False,
'description': '4 Set: Life Drain +35%',
},
RuneObjectBase.TYPE_DESTROY: {
'count': 2,
'stat': None,
'value': None,
'team': False,
'description': "2 Set: 30% of the damage dealt will reduce up to 4% of the enemy's Max HP",
},
RuneObjectBase.TYPE_FIGHT: {
'count': 2,
'stat': RuneObjectBase.STAT_ATK,
'value': 7.0,
'team': True,
'description': '2 Set: Increase the Attack Power of all allies by 7%',
},
RuneObjectBase.TYPE_DETERMINATION: {
'count': 2,
'stat': RuneObjectBase.STAT_DEF,
'value': 7.0,
'team': True,
'description': '2 Set: Increase the Defense of all allies by 7%',
},
RuneObjectBase.TYPE_ENHANCE: {
'count': 2,
'stat': RuneObjectBase.STAT_HP,
'value': 7.0,
'team': True,
'description': '2 Set: Increase the HP of all allies by 7%',
},
RuneObjectBase.TYPE_ACCURACY: {
'count': 2,
'stat': RuneObjectBase.STAT_ACCURACY_PCT,
'value': 10.0,
'team': True,
'description': '2 Set: Increase the Accuracy of all allies by 10%',
},
RuneObjectBase.TYPE_TOLERANCE: {
'count': 2,
'stat': RuneObjectBase.STAT_RESIST_PCT,
'value': 10.0,
'team': True,
'description': '2 Set: Increase the Resistance of all allies by 10%',
},
}
type = models.IntegerField(choices=RuneObjectBase.TYPE_CHOICES)
stars = models.IntegerField()
level = models.IntegerField()
slot = models.IntegerField()
quality = models.IntegerField(default=0, choices=RuneObjectBase.QUALITY_CHOICES)
original_quality = models.IntegerField(choices=RuneObjectBase.QUALITY_CHOICES, blank=True, null=True)
value = models.IntegerField(blank=True, null=True)
main_stat = models.IntegerField(choices=RuneObjectBase.STAT_CHOICES)
main_stat_value = models.IntegerField()
innate_stat = models.IntegerField(choices=RuneObjectBase.STAT_CHOICES, null=True, blank=True)
innate_stat_value = models.IntegerField(null=True, blank=True)
substats = ArrayField(
models.IntegerField(choices=RuneObjectBase.STAT_CHOICES, null=True, blank=True),
size=4,
default=list,
)
substat_values = ArrayField(
models.IntegerField(blank=True, null=True),
size=4,
default=list,
)
# The following fields exist purely to allow easier filtering and are updated on model save
has_hp = models.BooleanField(default=False)
has_atk = models.BooleanField(default=False)
has_def = models.BooleanField(default=False)
has_crit_rate = models.BooleanField(default=False)
has_crit_dmg = models.BooleanField(default=False)
has_speed = models.BooleanField(default=False)
has_resist = models.BooleanField(default=False)
has_accuracy = models.BooleanField(default=False)
efficiency = models.FloatField(blank=True, null=True)
max_efficiency = models.FloatField(blank=True, null=True)
substat_upgrades_remaining = models.IntegerField(blank=True, null=True)
class Meta:
abstract = True
def get_main_stat_rune_display(self):
return RuneObjectBase.STAT_DISPLAY.get(self.main_stat, '')
def get_innate_stat_rune_display(self):
return RuneObjectBase.STAT_DISPLAY.get(self.innate_stat, '')
def get_substat_rune_display(self, idx):
if len(self.substats) > idx:
return RuneObjectBase.STAT_DISPLAY.get(self.substats[idx], '')
else:
return ''
def get_stat(self, stat_type, sub_stats_only=False):
if self.main_stat == stat_type and not sub_stats_only:
return self.main_stat_value
elif self.innate_stat == stat_type and not sub_stats_only:
return self.innate_stat_value
else:
for idx, substat in enumerate(self.substats):
if substat == stat_type:
return self.substat_values[idx]
return 0
@property
def substat_upgrades_received(self):
return int(floor(min(self.level, 12) / 3) + 1)
def get_efficiency(self):
# https://www.youtube.com/watch?v=SBWeptNNbYc
# All runes are compared against max stat values for perfect 6* runes.
# Main stat efficiency
running_sum = float(self.MAIN_STAT_VALUES[self.main_stat][self.stars][15]) / float(self.MAIN_STAT_VALUES[self.main_stat][6][15])
# Substat efficiencies
if self.innate_stat is not None:
running_sum += self.innate_stat_value / float(self.SUBSTAT_INCREMENTS[self.innate_stat][6] * 5)
for substat, value in zip(self.substats, self.substat_values):
running_sum += value / float(self.SUBSTAT_INCREMENTS[substat][6] * 5)
return running_sum / 2.8 * 100
def update_fields(self):
# Set filterable fields
rune_stat_types = [self.main_stat, self.innate_stat] + self.substats
self.has_hp = any([i for i in rune_stat_types if i in [self.STAT_HP, self.STAT_HP_PCT]])
self.has_atk = any([i for i in rune_stat_types if i in [self.STAT_ATK, self.STAT_ATK_PCT]])
self.has_def = any([i for i in rune_stat_types if i in [self.STAT_DEF, self.STAT_DEF_PCT]])
self.has_crit_rate = self.STAT_CRIT_RATE_PCT in rune_stat_types
self.has_crit_dmg = self.STAT_CRIT_DMG_PCT in rune_stat_types
self.has_speed = self.STAT_SPD in rune_stat_types
self.has_resist = self.STAT_RESIST_PCT in rune_stat_types
self.has_accuracy = self.STAT_ACCURACY_PCT in rune_stat_types
self.quality = len([substat for substat in self.substats if substat])
self.substat_upgrades_remaining = 5 - self.substat_upgrades_received
self.efficiency = self.get_efficiency()
self.max_efficiency = self.efficiency + max(ceil((12 - self.level) / 3.0), 0) * 0.2 / 2.8 * 100
# Cap stat values to appropriate value
# Very old runes can have different values, but never higher than the cap
if self.main_stat_value:
self.main_stat_value = min(self.MAIN_STAT_VALUES[self.main_stat][self.stars][15], self.main_stat_value)
else:
self.main_stat_value = self.MAIN_STAT_VALUES[self.main_stat][self.stars][self.level]
if self.innate_stat and self.innate_stat_value > self.SUBSTAT_INCREMENTS[self.innate_stat][self.stars]:
self.innate_stat_value = self.SUBSTAT_INCREMENTS[self.innate_stat][self.stars]
for idx, substat in enumerate(self.substats):
max_sub_value = self.SUBSTAT_INCREMENTS[substat][self.stars] * self.substat_upgrades_received
if self.substat_values[idx] > max_sub_value:
self.substat_values[idx] = max_sub_value
def clean(self):
# Check slot, level, etc for valid ranges
if self.level is None or self.level < 0 or self.level > 15:
raise ValidationError({
'level': ValidationError(
'Level must be 0 through 15.',
code='invalid_rune_level',
)
})
if self.stars is None or (self.stars < 1 or self.stars > 6):
raise ValidationError({
'stars': ValidationError(
'Stars must be between 1 and 6.',
code='invalid_rune_stars',
)
})
if self.slot is not None:
if self.slot < 1 or self.slot > 6:
raise ValidationError({
'slot': ValidationError(
'Slot must be 1 through 6.',
code='invalid_rune_slot',
)
})
# Do slot vs stat check
if self.main_stat not in self.MAIN_STATS_BY_SLOT[self.slot]:
raise ValidationError({
'main_stat': ValidationError(
'Unacceptable stat for slot %(slot)s. Must be %(valid_stats)s.',
params={
'slot': self.slot,
'valid_stats': ', '.join([RuneObjectBase.STAT_CHOICES[stat - 1][1] for stat in self.MAIN_STATS_BY_SLOT[self.slot]])
},
code='invalid_rune_main_stat'
),
})
# Check that the same stat type was not used multiple times
stat_list = list(filter(
partial(is_not, None),
[self.main_stat, self.innate_stat] + self.substats
))
if len(stat_list) != len(set(stat_list)):
raise ValidationError(
'All stats and substats must be unique.',
code='duplicate_stats'
)
# Check if stat type was specified that it has value > 0
if self.main_stat_value is None:
raise ValidationError({
'main_stat_value': ValidationError(
'Missing main stat value.',
code='main_stat_missing_value',
)
})
max_main_stat_value = self.MAIN_STAT_VALUES[self.main_stat][self.stars][self.level]
if self.main_stat_value > max_main_stat_value:
raise ValidationError(
f'Main stat value for {self.get_main_stat_display()} at {self.stars}* lv. {self.level} must be less than {max_main_stat_value}',
code='main_stat_value_invalid',
)
if self.innate_stat is not None:
if self.innate_stat_value is None or self.innate_stat_value <= 0:
raise ValidationError({
'innate_stat_value': ValidationError(
'Must be greater than 0.',
code='invalid_rune_innate_stat_value'
)
})
max_sub_value = self.SUBSTAT_INCREMENTS[self.innate_stat][self.stars]
if self.innate_stat_value > max_sub_value:
raise ValidationError({
'innate_stat_value': ValidationError(
'Must be less than or equal to ' + str(max_sub_value) + '.',
code='invalid_rune_innate_stat_value'
)
})
for substat, value in zip(self.substats, self.substat_values):
if value is None or value <= 0:
raise ValidationError({
f'substat_values]': ValidationError(
'Must be greater than 0.',
code=f'invalid_rune_substat_values'
)
})
max_sub_value = self.SUBSTAT_INCREMENTS[substat][self.stars] * self.substat_upgrades_received
if value > max_sub_value:
raise ValidationError({
f'substat_values': ValidationError(
'Must be less than or equal to ' + str(max_sub_value) + '.',
code=f'invalid_rune_substat_value]'
)
})
class RuneCraft(RuneObjectBase):
CRAFT_GRINDSTONE = 0
CRAFT_ENCHANT_GEM = 1
CRAFT_IMMEMORIAL_GRINDSTONE = 2
CRAFT_IMMEMORIAL_GEM = 3
CRAFT_CHOICES = (
(CRAFT_GRINDSTONE, 'Grindstone'),
(CRAFT_ENCHANT_GEM, 'Enchant Gem'),
(CRAFT_IMMEMORIAL_GRINDSTONE, 'Immemorial Grindstone'),
(CRAFT_IMMEMORIAL_GEM, 'Immemorial Gem'),
)
CRAFT_ENCHANT_GEMS = [
CRAFT_ENCHANT_GEM,
CRAFT_IMMEMORIAL_GEM,
]
CRAFT_GRINDSTONES = [
CRAFT_GRINDSTONE,
CRAFT_IMMEMORIAL_GRINDSTONE,
]
# Type > Stat > Quality > Min/Max
CRAFT_VALUE_RANGES = {
CRAFT_GRINDSTONE: {
RuneObjectBase.STAT_HP: {
RuneObjectBase.QUALITY_NORMAL: {'min': 80, 'max': 120},
RuneObjectBase.QUALITY_MAGIC: {'min': 100, 'max': 200},
RuneObjectBase.QUALITY_RARE: {'min': 180, 'max': 250},
RuneObjectBase.QUALITY_HERO: {'min': 230, 'max': 450},
RuneObjectBase.QUALITY_LEGEND: {'min': 430, 'max': 550},
},
RuneObjectBase.STAT_HP_PCT: {
RuneObjectBase.QUALITY_NORMAL: {'min': 1, 'max': 3},
RuneObjectBase.QUALITY_MAGIC: {'min': 2, 'max': 5},
RuneObjectBase.QUALITY_RARE: {'min': 3, 'max': 6},
RuneObjectBase.QUALITY_HERO: {'min': 4, 'max': 7},
RuneObjectBase.QUALITY_LEGEND: {'min': 5, 'max': 10},
},
RuneObjectBase.STAT_ATK: {
RuneObjectBase.QUALITY_NORMAL: {'min': 4, 'max': 8},
RuneObjectBase.QUALITY_MAGIC: {'min': 6, 'max': 12},
RuneObjectBase.QUALITY_RARE: {'min': 10, 'max': 18},
RuneObjectBase.QUALITY_HERO: {'min': 12, 'max': 22},
RuneObjectBase.QUALITY_LEGEND: {'min': 18, 'max': 30},
},
RuneObjectBase.STAT_ATK_PCT: {
RuneObjectBase.QUALITY_NORMAL: {'min': 1, 'max': 3},
RuneObjectBase.QUALITY_MAGIC: {'min': 2, 'max': 5},
RuneObjectBase.QUALITY_RARE: {'min': 3, 'max': 6},
RuneObjectBase.QUALITY_HERO: {'min': 4, 'max': 7},
RuneObjectBase.QUALITY_LEGEND: {'min': 5, 'max': 10},
},
RuneObjectBase.STAT_DEF: {
RuneObjectBase.QUALITY_NORMAL: {'min': 4, 'max': 8},
RuneObjectBase.QUALITY_MAGIC: {'min': 6, 'max': 12},
RuneObjectBase.QUALITY_RARE: {'min': 10, 'max': 18},
RuneObjectBase.QUALITY_HERO: {'min': 12, 'max': 22},
RuneObjectBase.QUALITY_LEGEND: {'min': 18, 'max': 30},
},
RuneObjectBase.STAT_DEF_PCT: {
RuneObjectBase.QUALITY_NORMAL: {'min': 1, 'max': 3},
RuneObjectBase.QUALITY_MAGIC: {'min': 2, 'max': 5},
RuneObjectBase.QUALITY_RARE: {'min': 3, 'max': 6},
RuneObjectBase.QUALITY_HERO: {'min': 4, 'max': 7},
RuneObjectBase.QUALITY_LEGEND: {'min': 5, 'max': 10},
},
RuneObjectBase.STAT_SPD: {
RuneObjectBase.QUALITY_NORMAL: {'min': 1, 'max': 2},
RuneObjectBase.QUALITY_MAGIC: {'min': 1, 'max': 2},
RuneObjectBase.QUALITY_RARE: {'min': 2, 'max': 3},
RuneObjectBase.QUALITY_HERO: {'min': 3, 'max': 4},
RuneObjectBase.QUALITY_LEGEND: {'min': 4, 'max': 5},
},
RuneObjectBase.STAT_CRIT_RATE_PCT: {
RuneObjectBase.QUALITY_NORMAL: {'min': 1, 'max': 2},
RuneObjectBase.QUALITY_MAGIC: {'min': 1, 'max': 3},
RuneObjectBase.QUALITY_RARE: {'min': 2, 'max': 4},
RuneObjectBase.QUALITY_HERO: {'min': 3, 'max': 5},
RuneObjectBase.QUALITY_LEGEND: {'min': 4, 'max': 6},
},
RuneObjectBase.STAT_CRIT_DMG_PCT: {
RuneObjectBase.QUALITY_NORMAL: {'min': 1, 'max': 3},
RuneObjectBase.QUALITY_MAGIC: {'min': 2, 'max': 4},
RuneObjectBase.QUALITY_RARE: {'min': 2, 'max': 5},
RuneObjectBase.QUALITY_HERO: {'min': 3, 'max': 5},
RuneObjectBase.QUALITY_LEGEND: {'min': 4, 'max': 7},
},
RuneObjectBase.STAT_RESIST_PCT: {
RuneObjectBase.QUALITY_NORMAL: {'min': 1, 'max': 3},
RuneObjectBase.QUALITY_MAGIC: {'min': 2, 'max': 4},
RuneObjectBase.QUALITY_RARE: {'min': 2, 'max': 5},
RuneObjectBase.QUALITY_HERO: {'min': 3, 'max': 7},
RuneObjectBase.QUALITY_LEGEND: {'min': 4, 'max': 8},
},
RuneObjectBase.STAT_ACCURACY_PCT: {
RuneObjectBase.QUALITY_NORMAL: {'min': 1, 'max': 3},
RuneObjectBase.QUALITY_MAGIC: {'min': 2, 'max': 4},
RuneObjectBase.QUALITY_RARE: {'min': 2, 'max': 5},
RuneObjectBase.QUALITY_HERO: {'min': 3, 'max': 7},
RuneObjectBase.QUALITY_LEGEND: {'min': 4, 'max': 8},
},
},
CRAFT_ENCHANT_GEM: {
RuneObjectBase.STAT_HP: {
RuneObjectBase.QUALITY_NORMAL: {'min': 100, 'max': 150},
RuneObjectBase.QUALITY_MAGIC: {'min': 130, 'max': 220},
RuneObjectBase.QUALITY_RARE: {'min': 200, 'max': 310},
RuneObjectBase.QUALITY_HERO: {'min': 290, 'max': 420},
RuneObjectBase.QUALITY_LEGEND: {'min': 400, 'max': 580},
},
RuneObjectBase.STAT_HP_PCT: {
RuneObjectBase.QUALITY_NORMAL: {'min': 2, 'max': 4},
RuneObjectBase.QUALITY_MAGIC: {'min': 3, 'max': 7},
RuneObjectBase.QUALITY_RARE: {'min': 5, 'max': 9},
RuneObjectBase.QUALITY_HERO: {'min': 7, 'max': 11},
RuneObjectBase.QUALITY_LEGEND: {'min': 9, 'max': 13},
},
RuneObjectBase.STAT_ATK: {
RuneObjectBase.QUALITY_NORMAL: {'min': 8, 'max': 12},
RuneObjectBase.QUALITY_MAGIC: {'min': 10, 'max': 16},
RuneObjectBase.QUALITY_RARE: {'min': 15, 'max': 23},
RuneObjectBase.QUALITY_HERO: {'min': 20, 'max': 30},
RuneObjectBase.QUALITY_LEGEND: {'min': 28, 'max': 40},
},
RuneObjectBase.STAT_ATK_PCT: {
RuneObjectBase.QUALITY_NORMAL: {'min': 2, 'max': 4},
RuneObjectBase.QUALITY_MAGIC: {'min': 3, 'max': 7},
RuneObjectBase.QUALITY_RARE: {'min': 5, 'max': 9},
RuneObjectBase.QUALITY_HERO: {'min': 7, 'max': 11},
RuneObjectBase.QUALITY_LEGEND: {'min': 9, 'max': 13},
},
RuneObjectBase.STAT_DEF: {
RuneObjectBase.QUALITY_NORMAL: {'min': 8, 'max': 12},
RuneObjectBase.QUALITY_MAGIC: {'min': 10, 'max': 16},
RuneObjectBase.QUALITY_RARE: {'min': 15, 'max': 23},
RuneObjectBase.QUALITY_HERO: {'min': 20, 'max': 30},
RuneObjectBase.QUALITY_LEGEND: {'min': 28, 'max': 40},
},
RuneObjectBase.STAT_DEF_PCT: {
RuneObjectBase.QUALITY_NORMAL: {'min': 2, 'max': 4},
RuneObjectBase.QUALITY_MAGIC: {'min': 3, 'max': 7},
RuneObjectBase.QUALITY_RARE: {'min': 5, 'max': 9},
RuneObjectBase.QUALITY_HERO: {'min': 7, 'max': 11},
RuneObjectBase.QUALITY_LEGEND: {'min': 9, 'max': 13},
},
RuneObjectBase.STAT_SPD: {
RuneObjectBase.QUALITY_NORMAL: {'min': 1, 'max': 3},
RuneObjectBase.QUALITY_MAGIC: {'min': 2, 'max': 4},
RuneObjectBase.QUALITY_RARE: {'min': 3, 'max': 6},
RuneObjectBase.QUALITY_HERO: {'min': 5, 'max': 8},
RuneObjectBase.QUALITY_LEGEND: {'min': 7, 'max': 10},
},
RuneObjectBase.STAT_CRIT_RATE_PCT: {
RuneObjectBase.QUALITY_NORMAL: {'min': 1, 'max': 3},
RuneObjectBase.QUALITY_MAGIC: {'min': 2, 'max': 4},
RuneObjectBase.QUALITY_RARE: {'min': 3, 'max': 5},
RuneObjectBase.QUALITY_HERO: {'min': 4, 'max': 7},
RuneObjectBase.QUALITY_LEGEND: {'min': 6, 'max': 9},
},
RuneObjectBase.STAT_CRIT_DMG_PCT: {
RuneObjectBase.QUALITY_NORMAL: {'min': 2, 'max': 4},
RuneObjectBase.QUALITY_MAGIC: {'min': 3, 'max': 5},
RuneObjectBase.QUALITY_RARE: {'min': 4, 'max': 6},
RuneObjectBase.QUALITY_HERO: {'min': 5, 'max': 8},
RuneObjectBase.QUALITY_LEGEND: {'min': 7, 'max': 10},
},
RuneObjectBase.STAT_RESIST_PCT: {
RuneObjectBase.QUALITY_NORMAL: {'min': 2, 'max': 4},
RuneObjectBase.QUALITY_MAGIC: {'min': 3, 'max': 6},
RuneObjectBase.QUALITY_RARE: {'min': 5, 'max': 8},
RuneObjectBase.QUALITY_HERO: {'min': 6, 'max': 9},
RuneObjectBase.QUALITY_LEGEND: {'min': 8, 'max': 11},
},
RuneObjectBase.STAT_ACCURACY_PCT: {
RuneObjectBase.QUALITY_NORMAL: {'min': 2, 'max': 4},
RuneObjectBase.QUALITY_MAGIC: {'min': 3, 'max': 6},
RuneObjectBase.QUALITY_RARE: {'min': 5, 'max': 8},
RuneObjectBase.QUALITY_HERO: {'min': 6, 'max': 9},
RuneObjectBase.QUALITY_LEGEND: {'min': 8, 'max': 11},
},
}
}
CRAFT_VALUE_RANGES[CRAFT_IMMEMORIAL_GEM] = CRAFT_VALUE_RANGES[CRAFT_ENCHANT_GEM]
CRAFT_VALUE_RANGES[CRAFT_IMMEMORIAL_GRINDSTONE] = CRAFT_VALUE_RANGES[CRAFT_GRINDSTONE]
class Dungeon(models.Model):
CATEGORY_SCENARIO = 0
CATEGORY_RUNE_DUNGEON = 1
CATEGORY_ESSENCE_DUNGEON = 2
CATEGORY_OTHER_DUNGEON = 3
CATEGORY_RAID = 4
CATEGORY_HALL_OF_HEROES = 5
CATEGORY_CHOICES = [
(CATEGORY_SCENARIO, 'Scenarios'),
(CATEGORY_RUNE_DUNGEON, 'Rune Dungeons'),
(CATEGORY_ESSENCE_DUNGEON, 'Elemental Dungeons'),
(CATEGORY_OTHER_DUNGEON, 'Other Dungeons'),
(CATEGORY_RAID, 'Raids'),
(CATEGORY_HALL_OF_HEROES, 'Hall of Heroes'),
]
id = models.IntegerField(primary_key=True)
name = models.CharField(max_length=100)
max_floors = models.IntegerField(default=10)
slug = models.SlugField(blank=True, null=True)
category = models.IntegerField(choices=CATEGORY_CHOICES, blank=True, null=True)
# TODO: Remove following fields when Level model is fully utilized everywhere: energy_cost, xp, monster_slots
# For the following fields:
# Outer array index is difficulty (normal, hard, hell). Inner array index is the stage/floor
# Example: Hell B2 is dungeon.energy_cost[RunLog.DIFFICULTY_HELL][1]
energy_cost = ArrayField(ArrayField(models.IntegerField(blank=True, null=True)), blank=True, null=True)
xp = ArrayField(ArrayField(models.IntegerField(blank=True, null=True)), blank=True, null=True)
monster_slots = ArrayField(ArrayField(models.IntegerField(blank=True, null=True)), blank=True, null=True)
class Meta:
ordering = ['id', ]
def __str__(self):
return self.name
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(Dungeon, self).save(*args, **kwargs)
class Level(models.Model):
DIFFICULTY_NORMAL = 1
DIFFICULTY_HARD = 2
DIFFICULTY_HELL = 3
DIFFICULTY_CHOICES = (
(DIFFICULTY_NORMAL, 'Normal'),
(DIFFICULTY_HARD, 'Hard'),
(DIFFICULTY_HELL, 'Hell'),
)
dungeon = models.ForeignKey(Dungeon, on_delete=models.CASCADE)
floor = models.IntegerField()
difficulty = models.IntegerField(choices=DIFFICULTY_CHOICES, blank=True, null=True)
energy_cost = models.IntegerField(blank=True, null=True, help_text='Energy cost to start a run')
xp = models.IntegerField(blank=True, null=True, help_text='XP gained by fully clearing the level')
frontline_slots = models.IntegerField(
default=5,
help_text='Serves as general slots if dungeon does not have front/back lines'
)
backline_slots = models.IntegerField(blank=True, null=True, help_text='Leave null for normal dungeons')
max_slots = models.IntegerField(
blank=True,
null=True,
help_text='Maximum monsters combined front/backline. Not required if backline not specified.'
)
class Meta:
ordering = ('difficulty', 'floor')
unique_together = ('dungeon', 'floor', 'difficulty')
def __str__(self):
return f'{self.dungeon_id} {self.floor} - {self.get_difficulty_display()}'
class GuideBase(models.Model):
short_text = models.TextField(blank=True, default='')
long_text = models.TextField(blank=True, default='')
last_updated = models.DateTimeField(auto_now=True)
edited_by = models.ForeignKey(User, null=True, blank=True, on_delete=models.SET_NULL, editable=False)
class Meta:
abstract = True
class MonsterGuide(GuideBase):
monster = models.OneToOneField(Monster, on_delete=models.CASCADE)
def __str__(self):
return f'Monster Guide - {self.monster}'
class Meta:
ordering = ['monster__name']
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from textwrap import dedent
from pants.backend.core.tasks.listtargets import ListTargets
from pants.backend.jvm.artifact import Artifact
from pants.backend.jvm.repository import Repository
from pants.backend.jvm.scala_artifact import ScalaArtifact
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.build_graph.target import Target
from pants_test.tasks.task_test_base import ConsoleTaskTestBase
class BaseListTargetsTest(ConsoleTaskTestBase):
@classmethod
def task_type(cls):
return ListTargets
class ListTargetsTestEmpty(BaseListTargetsTest):
def test_list_all_empty(self):
self.assertEqual('', self.execute_task())
self.assertEqual('', self.execute_task(options={'sep': '###'}))
self.assertEqual([], self.execute_console_task())
class ListTargetsTest(BaseListTargetsTest):
@property
def alias_groups(self):
return BuildFileAliases(
targets={
'target': Target,
'java_library': JavaLibrary,
},
objects={
'pants': lambda x: x,
'artifact': Artifact,
'scala_artifact': ScalaArtifact,
'public': Repository(name='public',
url='http://maven.example.com',
push_db_basedir='/tmp'),
}
)
def setUp(self):
super(ListTargetsTest, self).setUp()
# Setup a BUILD tree for various list tests
class Lib(object):
def __init__(self, name, provides=False):
self.name = name
self.provides = dedent("""
artifact(
org='com.example',
name='{0}',
repo=public
)
""".format(name)).strip() if provides else 'None'
def create_library(path, *libs):
libs = libs or [Lib(os.path.basename(os.path.dirname(self.build_path(path))))]
for lib in libs:
target = "java_library(name='{name}', provides={provides}, sources=[])\n".format(
name=lib.name, provides=lib.provides)
self.add_to_build_file(path, target)
create_library('a')
create_library('a/b', Lib('b', provides=True))
create_library('a/b/c', Lib('c'), Lib('c2', provides=True), Lib('c3'))
create_library('a/b/d')
create_library('a/b/e', Lib('e1'))
self.add_to_build_file('f', dedent('''
target(
name='alias',
dependencies=[
'a/b/c:c3',
'a/b/d:d',
],
description = """
Exercises alias resolution.
Further description.
""",
)
'''))
def test_list_path(self):
self.assert_console_output('a/b:b', targets=[self.target('a/b')])
def test_list_siblings(self):
self.assert_console_output('a/b:b', targets=self.targets('a/b:'))
self.assert_console_output('a/b/c:c', 'a/b/c:c2', 'a/b/c:c3',
targets=self.targets('a/b/c/:'))
def test_list_descendants(self):
self.assert_console_output('a/b/c:c', 'a/b/c:c2', 'a/b/c:c3',
targets=self.targets('a/b/c/::'))
self.assert_console_output(
'a/b:b',
'a/b/c:c',
'a/b/c:c2',
'a/b/c:c3',
'a/b/d:d',
'a/b/e:e1',
targets=self.targets('a/b::'))
def test_list_all(self):
self.assert_entries('\n',
'a:a',
'a/b:b',
'a/b/c:c',
'a/b/c:c2',
'a/b/c:c3',
'a/b/d:d',
'a/b/e:e1',
'f:alias')
self.assert_entries(', ',
'a:a',
'a/b:b',
'a/b/c:c',
'a/b/c:c2',
'a/b/c:c3',
'a/b/d:d',
'a/b/e:e1',
'f:alias',
options={'sep': ', '})
self.assert_console_output(
'a:a',
'a/b:b',
'a/b/c:c',
'a/b/c:c2',
'a/b/c:c3',
'a/b/d:d',
'a/b/e:e1',
'f:alias')
def test_list_provides(self):
self.assert_console_output(
'a/b:b com.example#b',
'a/b/c:c2 com.example#c2',
options={'provides': True})
def test_list_provides_customcols(self):
self.assert_console_output(
'/tmp a/b:b http://maven.example.com public com.example#b',
'/tmp a/b/c:c2 http://maven.example.com public com.example#c2',
options={'provides': True,
'provides_columns': 'push_db_basedir,address,repo_url,repo_name,artifact_id'}
)
def test_list_dedups(self):
targets = []
targets.extend(self.targets('a/b/d/::'))
targets.extend(self.target('f:alias').dependencies)
self.assertEquals(3, len(targets), "Expected a duplicate of a/b/d:d")
self.assert_console_output(
'a/b/c:c3',
'a/b/d:d',
targets=targets
)
def test_list_documented(self):
self.assert_console_output(
# Confirm empty listing
targets=[self.target('a/b')],
options={'documented': True},
)
self.assert_console_output(
dedent("""
f:alias
Exercises alias resolution.
Further description.
""").strip(),
options={'documented': True}
)
|
from users.views import *
from .add_leader_form import *
from django.db.utils import IntegrityError
def create_leader_user(request, username):
form = LeaderForm()
if request.method == 'POST':
form = LeaderForm(request.POST)
if form.is_valid():
username = request.POST.get('username')
first_name = request.POST.get('first_name')
last_name = request.POST.get('last_name')
email = request.POST.get('email')
password = get_default_password()
try:
user = LeaderUser.objects.create_user(username=username, first_name=first_name, last_name=last_name,
email=email, password=password)
except IntegrityError as e:
return render(request, 'users/leaderuser_form.html',
{'form': form, 'mail_error': 'The email is not unique!'})
user.set_password(password)
mail_kickoff(user, password)
user.save()
update_session_auth_hash(request, request.user)
return redirect('display_admin', request.user.username)
return render(request, 'users/leaderuser_form.html', {'form': form})
@login_required
def display_leader_detail(request, username):
user = LeaderUser.objects.get(username__iexact=username)
return render(request, 'users/leaderdetail.html', {'user': user})
@login_required
def update_leader_detail(request, username):
user = LeaderUser.objects.get(username__iexact=username)
form_data = {'username': user.username, 'first_name': user.first_name, 'last_name': user.last_name,
'email': user.email,
'password': user.password, 'bio': user.bio}
form = UpdateLeaderForm(request.POST, initial=form_data)
if request.method == 'POST':
print(form.errors)
if form.is_valid():
user.first_name = request.POST.get('first_name')
user.last_name = request.POST.get('last_name')
user.email = request.POST.get('email')
pw = request.POST['password']
if (pw is not '' or pw is not None) and len(pw.strip()) >= 8:
user.set_password(pw)
user.bio = request.POST.get('bio')
user.save()
update_session_auth_hash(request, request.user)
return redirect('display_leader', username)
return render(request, 'users/update_leader_form.html', {'user': user, 'form': form, 'errors': form.errors})
|
""" Convenience Classes building on the base PMAPI extension module """
#
# Copyright (C) 2013-2015 Red Hat
# Copyright (C) 2009-2012 Michael T. Werner
#
# This file is part of the "pcp" module, the python interfaces for the
# Performance Co-Pilot toolkit.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
from sys import stderr
from ctypes import c_int, c_uint, c_char_p, cast, POINTER
from pcp.pmapi import (pmContext, pmResult, pmValueSet, pmValue, pmDesc,
pmErr, pmOptions, timeval)
from cpmapi import (PM_CONTEXT_HOST, PM_CONTEXT_ARCHIVE, PM_INDOM_NULL,
PM_IN_NULL, PM_ID_NULL, PM_SEM_COUNTER, PM_ERR_EOL, PM_TYPE_DOUBLE)
class MetricCore(object):
"""
Core metric information that can be queried from the PMAPI
PMAPI metrics are unique by name, and MetricCores should be also
rarely, some PMAPI metrics with different names might have identical PMIDs
PMAPI metrics are unique by (name) and by (name,pmid) - _usually_ by (pmid)
too. Note that names here (and only here) are stored as byte strings for
direct PMAPI access. All dictionaries/caching strategies built using the
core structure use native strings (i.e., not byte strings in python3).
"""
def __init__(self, ctx, name, pmid):
self.ctx = ctx
if type(name) != type(b''):
name = name.encode('utf-8')
self.name = name
self.pmid = pmid
self.desc = None
self.text = None
self.help = None
class Metric(object):
"""
Additional metric information, such as conversion factors and values
several instances of Metric may share a MetricCore instance
"""
##
# constructor
def __init__(self, core):
self._core = core # MetricCore
self._vset = None # pmValueSet member
self._values = None
self._prevvset = None
self._prevValues = None
self._convType = core.desc.contents.type
self._convUnits = None
self._errorStatus = None
self._netValues = None # (instance, name, value)
self._netPrevValues = None # (instance, name, value)
self._netConvertedValues = None # (instance, name, value)
##
# core property read methods
def _R_ctx(self):
return self._core.ctx
def _R_name(self):
return self._core.name.decode()
def _R_pmid(self):
return self._core.pmid
def _R_desc(self):
return self._core.desc
def _R_text(self):
return self._core.text
def _R_help(self):
return self._core.help
def get_vlist(self, vset, vlist_idx):
""" Return the vlist[vlist_idx] of vset[vset_idx] """
listptr = cast(vset.contents.vlist, POINTER(pmValue))
return listptr[vlist_idx]
def get_inst(self, vset, vlist_idx):
""" Return the inst for vlist[vlist_idx] of vset[vset_idx] """
return self.get_vlist(vset, vset_idx, vlist_idx).inst
def computeValues(self, inValues):
""" Extract the value for a singleton or list of instances
as a triple (inst, name, val)
"""
vset = inValues
ctx = self.ctx
instD = ctx.mcGetInstD(self.desc.contents.indom)
valL = []
for i in range(vset.numval):
instval = self.get_vlist(vset, i)
try:
name = instD[instval.inst]
except KeyError:
name = ''
outAtom = self.ctx.pmExtractValue(
vset.valfmt, instval, self.desc.type, self._convType)
if self._convUnits:
desc = (POINTER(pmDesc) * 1)()
desc[0] = self.desc
outAtom = self.ctx.pmConvScale(
self._convType, outAtom, desc, 0, self._convUnits)
value = outAtom.dref(self._convType)
valL.append((instval, name, value))
return valL
def _find_previous_instval(self, index, inst, pvset):
""" Find a metric instance in the previous resultset """
if index <= pvset.numval:
pinstval = self.get_vlist(pvset, index)
if inst == pinstval.inst:
return pinstval
for pi in range(pvset.numval):
pinstval = self.get_vlist(pvset, pi)
if inst == pinstval.inst:
return pinstval
return None
def convertValues(self, values, prevValues, delta):
""" Extract the value for a singleton or list of instances as a
triple (inst, name, val) for COUNTER metrics with the value
delta calculation applied (for rate conversion).
"""
if self.desc.sem != PM_SEM_COUNTER:
return self.computeValues(values)
if prevValues == None:
return None
pvset = prevValues
vset = values
ctx = self.ctx
instD = ctx.mcGetInstD(self.desc.contents.indom)
valL = []
for i in range(vset.numval):
instval = self.get_vlist(vset, i)
pinstval = self._find_previous_instval(i, instval.inst, pvset)
if pinstval == None:
continue
try:
name = instD[instval.inst]
except KeyError:
name = ''
outAtom = self.ctx.pmExtractValue(vset.valfmt,
instval, self.desc.type, PM_TYPE_DOUBLE)
poutAtom = self.ctx.pmExtractValue(pvset.valfmt,
pinstval, self.desc.type, PM_TYPE_DOUBLE)
if self._convUnits:
desc = (POINTER(pmDesc) * 1)()
desc[0] = self.desc
outAtom = self.ctx.pmConvScale(
PM_TYPE_DOUBLE, outAtom, desc, 0, self._convUnits)
poutAtom = self.ctx.pmConvScale(
PM_TYPE_DOUBLE, poutAtom, desc, 0, self._convUnits)
value = outAtom.dref(PM_TYPE_DOUBLE)
pvalue = poutAtom.dref(PM_TYPE_DOUBLE)
if (value >= pvalue):
valL.append((instval, name, (value - pvalue) / delta))
return valL
def _R_values(self):
return self._values
def _R_prevValues(self):
return self._prevValues
def _R_convType(self):
return self._convType
def _R_convUnits(self):
return self._convUnits
def _R_errorStatus(self):
return self._errorStatus
def _R_netConvValues(self):
return self._netConvValues
def _R_netPrevValues(self):
if not self._prevvset:
return None
self._netPrevValues = self.computeValues(self._prevvset)
return self._netPrevValues
def _R_netValues(self):
if not self._vset:
return None
self._netValues = self.computeValues(self._vset)
return self._netValues
def _W_values(self, values):
self._prev = self._values
self._values = values
self._netPrev = self._netValue
self._netValue = None
def _W_convType(self, value):
self._convType = value
def _W_convUnits(self, value):
self._convUnits = value
# interface to properties in MetricCore
ctx = property(_R_ctx, None, None, None)
name = property(_R_name, None, None, None)
pmid = property(_R_pmid, None, None, None)
desc = property(_R_desc, None, None, None)
text = property(_R_text, None, None, None)
help = property(_R_help, None, None, None)
# properties specific to this instance
values = property(_R_values, _W_values, None, None)
prevValues = property(_R_prevValues, None, None, None)
convType = property(_R_convType, _W_convType, None, None)
convUnits = property(_R_convUnits, _W_convUnits, None, None)
errorStatus = property(_R_errorStatus, None, None, None)
netValues = property(_R_netValues, None, None, None)
netPrevValues = property(_R_netPrevValues, None, None, None)
netConvValues = property(_R_netConvValues, None, None, None)
def metricPrint(self):
indomstr = self.ctx.pmInDomStr(self.desc.indom)
print(" ", "indom:", indomstr)
instD = self.ctx.mcGetInstD(self.desc.indom)
for inst, name, val in self.netValues:
print(" ", name, val)
def metricConvert(self, delta):
convertedList = self.convertValues(self._vset, self._prevvset, delta)
self._netConvValues = convertedList
return self._netConvValues
class MetricCache(pmContext):
"""
A cache of MetricCores is kept to reduce calls into the PMAPI library
this also slightly reduces the memory footprint of Metric instances
that share a common MetricCore
a cache of instance domain information is also kept, which further
reduces calls into the PMAPI and reduces the memory footprint of
Metric objects that share a common instance domain
"""
##
# overloads
def __init__(self, typed = PM_CONTEXT_HOST, target = "local:"):
pmContext.__init__(self, typed, target)
self._mcIndomD = {}
self._mcByNameD = {}
self._mcByPmidD = {}
##
# methods
def mcGetInstD(self, indom):
""" Query the instance : instance_list dictionary """
return self._mcIndomD[indom]
def _mcAdd(self, core):
""" Update the dictionary """
indom = core.desc.contents.indom
if indom not in self._mcIndomD:
if c_int(indom).value == c_int(PM_INDOM_NULL).value:
instmap = { PM_IN_NULL : b'PM_IN_NULL' }
else:
if self._type == PM_CONTEXT_ARCHIVE:
instL, nameL = self.pmGetInDomArchive(core.desc)
else:
instL, nameL = self.pmGetInDom(core.desc)
if instL != None and nameL != None:
instmap = dict(zip(instL, nameL))
else:
instmap = {}
self._mcIndomD.update({indom: instmap})
self._mcByNameD.update({core.name.decode(): core})
self._mcByPmidD.update({core.pmid: core})
def mcGetCoresByName(self, nameL):
""" Update the core (metric id, description,...) list """
coreL = []
missD = None
errL = None
# lookup names in cache
for index, name in enumerate(nameL):
if type(name) == type(b''):
name = name.decode()
# lookup metric core in cache
core = self._mcByNameD.get(name)
if not core:
# cache miss
if not missD:
missD = {}
missD.update({name: index})
coreL.append(core)
# some cache lookups missed, fetch pmids and build missing MetricCores
if missD:
idL, errL = self.mcFetchPmids(missD.keys())
for name, pmid in idL:
if pmid == PM_ID_NULL:
# fetch failed for the given metric name
if not errL:
errL = []
errL.append(name)
else:
# create core pmDesc
newcore = self._mcCreateCore(name, pmid)
# update core ref in return list
coreL[missD[name]] = newcore
return coreL, errL
def _mcCreateCore(self, name, pmid):
""" Update the core description """
newcore = MetricCore(self, name, pmid)
try:
newcore.desc = self.pmLookupDesc(pmid)
except pmErr as error:
fail = "%s: pmLookupDesc: %s" % (error.progname(), error.message())
print >> stderr, fail
raise SystemExit(1)
# insert core into cache
self._mcAdd(newcore)
return newcore
def mcFetchPmids(self, nameL):
""" Update the core metric ids. note: some names have identical pmids """
errL = None
nameA = (c_char_p * len(nameL))()
for index, name in enumerate(nameL):
if type(name) != type(b''):
name = name.encode('utf-8')
nameA[index] = c_char_p(name)
try:
pmidArray = self.pmLookupName(nameA)
if len(pmidArray) < len(nameA):
missing = "%d of %d metric names" % (len(pmidArray), len(nameA))
print >> stderr, "Cannot resolve", missing
raise SystemExit(1)
except pmErr as error:
fail = "%s: pmLookupName: %s" % (error.progname(), error.message())
print >> stderr, fail
raise SystemExit(1)
return zip(nameL, pmidArray), errL
class MetricGroup(dict):
"""
Manages a group of metrics for fetching the values of
a MetricGroup is a dictionary of Metric objects, for which data can
be fetched from a target system using a single call to pmFetch
the Metric objects are indexed by the metric name
pmFetch fetches data for a list of pmIDs, so there is also a shadow
dictionary keyed by pmID, along with a shadow list of pmIDs
"""
##
# property read methods
def _R_contextCache(self):
return self._ctx
def _R_pmidArray(self):
return self._pmidArray
def _R_timestamp(self):
return self._result.contents.timestamp
def _R_result(self):
return self._result
def _R_prevTimestamp(self):
return self._prev.contents.timestamp
def _R_prev(self):
return self._prev
##
# property write methods
def _W_result(self, pmresult):
self._prev = self._result
self._result = pmresult
##
# property definitions
contextCache = property(_R_contextCache, None, None, None)
pmidArray = property(_R_pmidArray, None, None, None)
result = property(_R_result, _W_result, None, None)
timestamp = property(_R_timestamp, None, None, None)
prev = property(_R_prev, None, None, None)
prevTimestamp = property(_R_prevTimestamp, None, None, None)
##
# overloads
def __init__(self, contextCache, inL = []):
dict.__init__(self)
self._ctx = contextCache
self._pmidArray = None
self._result = None
self._prev = None
self._altD = {}
self.mgAdd(inL)
def __setitem__(self, attr, value = []):
if attr in self:
raise KeyError("metric group with that key already exists")
else:
dict.__setitem__(self, attr, MetricGroup(self, inL = value))
##
# methods
def mgAdd(self, nameL):
""" Create the list of Metric(s) """
coreL, errL = self._ctx.mcGetCoresByName(nameL)
for core in coreL:
metric = Metric(core)
self.update({metric.name: metric})
self._altD.update({metric.pmid: metric})
n = len(self)
self._pmidArray = (c_uint * n)()
for x, key in enumerate(self.keys()):
self._pmidArray[x] = c_uint(self[key].pmid)
def mgFetch(self):
""" Fetch the list of Metric values. Save the old value. """
try:
self.result = self._ctx.pmFetch(self._pmidArray)
# update the result entries in each metric
result = self.result.contents
for i in range(self.result.contents.numpmid):
pmid = self.result.contents.get_pmid(i)
vset = self.result.contents.get_vset(i)
self._altD[pmid]._prevvset = self._altD[pmid]._vset
self._altD[pmid]._vset = vset
except pmErr as error:
if error.args[0] == PM_ERR_EOL:
raise SystemExit(0)
fail = "%s: pmFetch: %s" % (error.progname(), error.message())
print >> stderr, fail
raise SystemExit(1)
def mgDelta(self):
"""
Sample delta - used for rate conversion calculations, which
requires timestamps from successive samples.
"""
if self._prev != None:
prevTimestamp = float(self.prevTimestamp)
else:
prevTimestamp = 0.0
return float(self.timestamp) - prevTimestamp
class MetricGroupPrinter(object):
"""
Handles reporting of MetricGroups within a GroupManager.
This object is called upon at the end of each fetch when
new values are available. It is also responsible for
producing any initial (or on-going) header information
that the tool may wish to report.
"""
def report(self, manager):
""" Base implementation, all tools should override """
for group_name in manager.keys():
group = manager[group_name]
for metric_name in group.keys():
group[metric_name].metricPrint()
def convert(self, manager):
""" Do conversion for all metrics across all groups """
for group_name in manager.keys():
group = manager[group_name]
delta = group.mgDelta()
for metric_name in group.keys():
group[metric_name].metricConvert(delta)
class MetricGroupManager(dict, MetricCache):
"""
Manages a dictionary of MetricGroups which can be pmFetch'ed
inherits from MetricCache, which inherits from pmContext
"""
##
# property access methods
def _R_options(self): # command line option object
return self._options
def _W_options(self, options):
self._options = options
def _R_default_delta(self): # default interval unless command line set
return self._default_delta
def _W_default_delta(self, delta):
self._default_delta = delta
def _R_default_pause(self): # default reporting delay (archives only)
return self._default_pause
def _W_default_pause(self, pause):
self._default_pause = pause
def _W_printer(self, printer): # helper class for reporting
self._printer = printer
def _R_counter(self): # fetch iteration count, useful for printer
return self._counter
##
# property definitions
options = property(_R_options, _W_options, None, None)
default_delta = property(_R_default_delta, _W_default_delta, None, None)
default_pause = property(_R_default_pause, _W_default_pause, None, None)
printer = property(None, _W_printer, None, None)
counter = property(_R_counter, None, None, None)
##
# overloads
def __init__(self, typed = PM_CONTEXT_HOST, target = "local:"):
dict.__init__(self)
MetricCache.__init__(self, typed, target)
self._options = None
self._default_delta = timeval(1, 0)
self._default_pause = None
self._printer = None
self._counter = 0
def __setitem__(self, attr, value = []):
if attr in self:
raise KeyError("metric group with that key already exists")
else:
dict.__setitem__(self, attr, MetricGroup(self, inL = value))
@classmethod
def builder(build, options, argv):
""" Helper interface, simple PCP monitor argument parsing. """
manager = build.fromOptions(options, argv)
manager._default_delta = timeval(options.delta, 0)
manager._options = options
return manager
##
# methods
def _computeSamples(self):
""" Calculate the number of samples we are to take.
This is based on command line options --samples but also
must consider --start, --finish and --interval. If none
of these were presented, a zero return means "infinite".
"""
if self._options == None:
return 0 # loop until interrupted or PM_ERR_EOL
samples = self._options.pmGetOptionSamples()
if samples != None:
return samples
if self._options.pmGetOptionFinishOptarg() == None:
return 0 # loop until interrupted or PM_ERR_EOL
origin = self._options.pmGetOptionOrigin()
finish = self._options.pmGetOptionFinish()
delta = self._options.pmGetOptionInterval()
if delta == None:
delta = self._default_delta
period = (delta.tv_sec * 1.0e6 + delta.tv_usec) / 1e6
window = float(finish.tv_sec - origin.tv_sec)
window += float((finish.tv_usec - origin.tv_usec) / 1e6)
window /= period
return int(window + 0.5) # roundup to positive number
def _computePauseTime(self):
""" Figure out how long to sleep between samples.
This needs to take into account whether we were explicitly
asked for a delay (independent of context type, --pause),
whether this is an archive or live context, and the sampling
--interval (including the default value, if none requested).
"""
if self._default_pause != None:
return self._default_pause
if self.type == PM_CONTEXT_ARCHIVE:
self._default_pause = timeval(0, 0)
elif self._options != None:
pause = self._options.pmGetOptionInterval()
if pause != None:
self._default_pause = pause
else:
self._default_pause = self._default_delta
else:
self._default_pause = self._default_delta
return self._default_pause
def fetch(self):
""" Perform fetch operation on all of the groups. """
for group in self.keys():
self[group].mgFetch()
def run(self):
""" Using options specification, loop fetching and reporting,
pausing for the requested time interval between updates.
Transparently handles archive/live mode differences.
Note that this can be different to the sampling interval
in archive mode, but is usually the same as the sampling
interval in live mode.
"""
samples = self._computeSamples()
timer = self._computePauseTime()
try:
self.fetch()
while True:
if samples == 0 or self._counter <= samples:
self._printer.report(self)
if self._counter == samples:
break
# for need two fetches to report rate converted counter
# metrics. so the actual output samples will be less than
# the speicified number when using '-s' and '-T' option.
# '+1' can fix this issue.
self._counter += 1
timer.sleep()
self.fetch()
except SystemExit as code:
return code
except KeyboardInterrupt:
pass
return 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Word',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('finnish', models.CharField(unique=True, max_length=128)),
('english', models.CharField(max_length=128)),
('chinese', models.CharField(blank=True, max_length=128)),
('sentence', models.CharField(blank=True, max_length=256)),
('note', models.CharField(blank=True, max_length=256)),
('category', models.CharField(blank=True, max_length=128)),
('slug', models.SlugField(unique=True)),
('times_practiced', models.PositiveIntegerField(default=1)),
],
options={
},
bases=(models.Model,),
),
]
|
# Copyright 2014 Douglas RAILLARD
#
# This file is part of BrownBat.
#
# BrownBat is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BrownBat is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with BrownBat. If not, see <http://www.gnu.org/licenses/>.
"""
.. moduleauthor:: Douglas RAILLARD <[email protected]>
This module mostly provides base classes intended to be subclassed for building
langage specific source code generation libraries.
They implement functionnalities related to operators overloading that can be used in any langage.
Every class representing source code constructs are known as node.
The following helpers functions are provided:
* :func:`listify`: create a list from an iterable or a single element.
* :func:`format_string`: format a string according to the given convention (camel case, upper case, etc.).
* :func:`strip_starting_blank_lines`: strip the blank lines at the beginning of a multiline string.
The following classes are provided:
* :class:`Indentation`: manage the indentation level in the code generator.
* :class:`NonIterable`: inheriting that class allows a class which can be considered as iterable to be considered as a non iterable by :func:`listify`.
* :class:`NodeMeta`: metaclass of all class representing some source code constructs.
* :class:`NodeABC`: abstract base class of all class representing some source code constructs.
* :class:`NodeBase`: base class of almost all class representing some source code constructs.
* :class:`NodeAttrProxy`: proxy class that forwards the calls to the :class:`NodeABC` API to an attribute which is itself a :class:`NodeABC`. It implements composition.
* :class:`EnsureNode`: descriptor used to build attributes that guarantee that they contain an instance of NodeABC.
* :class:`DelegatedAttribute`: descriptor used to delegate an attribute to another instance which has the given attribute name.
* :class:`NodeViewBase`: base class for class representing a view of another node (for example a variable declaration is a view of a variable).
* :class:`PhantomNode`: class which can be used as an empty placeholder when a node is required.
* :class:`NodeContainerBase`: base class for node containers. It mostly implements operator overloading.
* :class:`TokenListABC`: abstract base class for token lists. This is a node that can contain a list of any object that can be used as a string, and concatenate them when printed.
* :class:`DelegatedTokenListBase`: base class for a token list that uses a specific attribute to really hold the token list instance (thus implementing composition instead of inheritance).
* :class:`TokenListBase`: base class for a token list.
* :class:`IndentedTokenListBase`: base class for a token list which indents it content when printed.
* :class:`IndentedDelegatedTokenListBase`: mix of :class:`IndentedTokenListBase` and :class:`DelegatedTokenListBase`.
* :class:`BacktraceBase`: base class for special token list that output a simplified backtrace of Python code that was used to build the instance. Useful when trying to debug the code generator.
"""
import collections
import numbers
import abc
import inspect
import copy
import functools
import os
def listify(iterable_or_single_elem):
"""Create a list out of:
* an iterable object: the result will be like ``list(iterable_or_single_elem)``
* a object which cannot be iterated over: return a list with only one item (just the object)
* an object which is iterable, but also a subclass of :class:`NonIterable`:
return a list with just the object, as if it was not iterable.
"""
if iterable_or_single_elem is None:
return []
# We exclude iterables such as strings or NonIterable (StmtContainer for example)
# because we want to keep them as one object and not split them
if isinstance(iterable_or_single_elem, collections.Iterable) \
and not isinstance(iterable_or_single_elem, (str, NonIterable)):
return list(iterable_or_single_elem)
else:
return [iterable_or_single_elem]
def format_string(string, style, separator="_"):
""" Format a string according to a convention.
It is can be used to write identfiers name in a unified format before applying a naming convention.
:param string: the string to be modified. It must be in a format where the word sperator is always the same.
:param style: the convention. It can be one of:
* UpperCamelCase
* lowerCamelCase
* lower_underscore_case
* UPPER_UNDERSCORE_CASE
:param separator: the word separator used to split the words appart before applying the convention.
It defaults to '_'.
"""
if isinstance(string, collections.Iterable) and not isinstance(string, (str, NonIterable)):
token_list = string
else:
token_list = str(string).split(separator)
# If there is only one token in the list and in case it is an empty
# string, we dont want to replace it with a _
if len(token_list) != 1:
for i, token in enumerate(token_list):
if not token:
token_list[i] = separator
if style == "UpperCamelCase":
return "".join(token.capitalize() for token in token_list)
if style == "lowerCamelCase":
first_word = token_list[0].lower()
remain_list = token_list[1:]
return first_word+"".join(token.capitalize() for token in remain_list)
if style == "lower_underscore_case":
return "_".join(token.lower() for token in token_list)
if style == "UPPER_UNDERSCORE_CASE":
return "_".join(token.upper() for token in token_list)
def strip_starting_blank_lines(snippet):
"""Strip blank lines at the beginning of a multiline string."""
last_new_line_pos = 0
for position, char in enumerate(snippet):
if char=='\n':
last_new_line_pos = position
elif char!='\t' and char!=' ' and char!='\v':
break
# Only keep one new line at the beginning, to avoid multiple blank lines
return snippet[last_new_line_pos:]
class Indentation:
"""This class manages the indentation in the source code output.
Instances can be printed to give the string to put at the beginning of a new indented line.
>>> idt = Indentation()
>>> idt.indent()
>>> print('*'+str(idt)+'indented Hello World')
* indented Hello World
"""
# Default indentation style (4 spaces)
indentation_string = ' '
@classmethod
def ensure_idt(cls, idt):
"""Create a new indentation instance if *idt* is None,
or return *idt* if it is already an :class:`Indentation` instance.
"""
if idt is None:
idt = cls()
elif isinstance(idt, numbers.Integral):
idt = cls(idt)
elif isinstance(idt, str):
idt = cls(indentator=idt)
return idt
def __init__(self, level=0, indentator=None):
"""
:param level: the initial indentation level
:type level: int
:param indentator: the string used to display indentation.
It defaults to the class attribute *indentation_string* which is four spaces.
"""
self.indentation_level = level
# If an indentation is string is given, override the classwide default with
# an instance-local string
if indentator is not None:
self.indentation_string = indentator
def indent(self, level=1):
"""Increase the indentation level by *level* levels."""
self.indentation_level += level
def dedent(self, level=1):
"""Decrease the indentation level by *level* levels."""
self.indentation_level -= level
def __str__(self):
"""Return the string to be used at the beginning of a line to display the indentation."""
return self.indentation_string * self.indentation_level
class NonIterable:
""" Inheriting from this class will prevent a class to be considered as
:class:`collections.Iterable` by :func:`listify`.
"""
pass
class NodeMeta(abc.ABCMeta):
"""Meta class used for every node, i.e. every class representing source code constructs.
Currently, it only does a bit of black magic on :meth:`NodeABC.inline_str` and :meth:`NodeABC.self_inline_str` methods:
it creates a wrapper around them that calls *inline_str_filter* if it exists on their return string, to
let the user apply some naming convention at the latest stage.
"""
def __new__(meta, name, bases, dct):
# Add automatic 'inheritance' for __format_string class attribute
attr_name = '_'+name+'__format_string'
if bases and not attr_name in dct:
try:
dct[attr_name] = bases[0].__dict__['_'+bases[0].__name__+'__format_string']
except KeyError:
pass
# Wrap inline_str function to allow automatic filtering on its output
def make_wrapper(wrapped_fun):
@functools.wraps(wrapped_fun)
def wrapper_fun(self, *args, **kwargs):
result = wrapped_fun(self, *args, **kwargs)
try:
filter_fun = self.inline_str_filter
except AttributeError:
# Just return the string as is, no filter hook is installed
return result
else:
# Call the filter on the resulting string
return filter_fun(result)
return wrapper_fun
for stringify_fun_name in ['inline_str', 'self_inline_str']:
if stringify_fun_name in dct:
wrapped_fun = dct[stringify_fun_name]
dct[stringify_fun_name] = make_wrapper(wrapped_fun)
return super().__new__(meta, name, bases, dct)
class NodeABC(metaclass=NodeMeta):
"""This class is an Abstract Base Class describing the most basic API evey node should conform to."""
__format_string = ''
@abc.abstractmethod
def inline_str(self, idt=None):
"""This function is called to print the content of the node in an inline context.
This can be for example when the node is printed inside an expression.
This function should not try to print a preceding new line or indentation string.
"""
pass
@abc.abstractmethod
def freestanding_str(self, idt=None):
"""This function is called to print the content of the node in a freestanding context.
This can be for example when the node is printed in directly in the source file.
This function should print the preceding new line and indentation if the source code constructs
requires it.
"""
pass
@abc.abstractmethod
def adopt_node(self, child):
pass
class NodeAttrProxy(NodeABC):
"""This class is a proxy that redirects calls to the :class:`NodeABC` API to a given
attribute of a given instance.
It creates stubs that allows transparent composition for the most limited subset of the APIs
provided by this library to avoid getting into crazy things.
This class should really be used when this enable to factor lots of code. A design based on
hooks implemented in subclasses called by a base class is preferable in most case where you
would be tempted to use this proxy.
"""
def __init__(self, obj, attr_name):
self.obj = obj
self.attr_name = attr_name
def inline_str(self, idt=None):
return getattr(self.obj, self.attr_name).inline_str(idt)
def freestanding_str(self, idt=None):
return getattr(self.obj, self.attr_name).freestanding_str(idt)
def adopt_node(self, child):
return getattr(self.obj, self.attr_name).adopt_node(child)
class EnsureNode:
"""This class is a descriptor that makes sure that the attribute that uses it holds a reference
to an instance of one of the classes given in *node_classinfo*.
When set, this descriptor check if the given object is indeed an instance of *node_classinfo* classes.
If not, it calls *node_factory* to build an object and store its return value. Therefore,
the content of the attribute using this descriptor is always some instance of the classes
contained in *node_classinfo*. This descriptor is used as a gatekeeper to be able to make some assumptions
on the type of data hold by the attribute.
.. note:: The *node_classinfo* always contains the class :class:`NodeABC`.
"""
def __init__(self, storage_attr_name, node_factory, node_classinfo=()):
"""
:param storage_attr_name: the underlying attribute used to store the object.
:param node_factory: the factory called when someone tries to store a non :class:`NodeABC` inside the attribute.
:param node_classinfo: this is a tuple that containes classes.
The value stored in the attribute is checked against this tuple using :func:`isinstance` to
determine if the factory should be used. This always contains at least :class:`NodeABC`
"""
self.storage_attr_name = storage_attr_name
self.node_factory = node_factory
node_classinfo = listify(node_classinfo)+[NodeABC]
if inspect.isclass(self.node_factory):
node_classinfo.append(self.node_factory)
node_classinfo = tuple(node_classinfo)
self.node_classinfo = node_classinfo
def __get__(self, instance, owner):
if instance is not None:
return instance.__dict__[self.storage_attr_name]
# If the descriptor is called as a class attribute, it
# just returns itself, to allow the world to see that it
# is a descriptor
else:
return self
def __set__(self, instance, value):
if not isinstance(value, self.node_classinfo):
value = self.node_factory(value)
instance.__dict__[self.storage_attr_name] = value
class NodeBase(NodeABC):
"""This class is the base classes of most nodes.
It provides some default implementations for methods of :class:`NodeABC`.
"""
@classmethod
def ensure_node(cls, obj, factory=None):
"""Ensure that the given object *obj* is an instance of the class this method is called from or of :class:`NodeABC`
, and if not, tries to build a node from it using the class this class method is called from or *factory*.
.. note:: You should better use the :class:`EnsureNode` descriptor when possible, instead of making a use of
this class method.
.. warning:: Not every class supports to be called whith only one parameter, so a call to this
class method is note is not guaranteed to succeed.
:param obj: the object to build a node from.
:param factory: an optional factory used to build the node from *obj*. If not provided, the class this
method is called from is called whith *obj* as first and only parameter.
"""
if isinstance(obj, (cls, NodeABC)):
return obj
else:
if factory is not None:
return factory(obj)
else:
return cls(obj)
def __init__(self, comment=None, side_comment=None, parent=None):
""" All of the paramaters should be used as keyword arguments, because they are forwarded from
the children classes and the order at the arrival is not guaranteed.
:param comment: a comment node that will be printed next to the current node when the source code of
the node is generated. Usually, it is a block comment printed before the node
in languages that supports them. This comment is printed by the containers such as
:class:`NodeContainerBase`, so it does not require any support from the class.
:param side_comment: a comment that will be printed just by the current node when the source code of
the node is generated. Usually, it is a one line comment, printed right to the
node. Be aware that this parameter is used by the class in whatever way it wants to,
and there is no guarantee it will be printed at all.
"""
# Should be EnsureNode descriptors with factory using phantom_node when given None in derived classes
self.comment = comment
# Should be EnsureNode descriptors with factory using phantom_node when given None in derived classes
self.side_comment = side_comment
# We dont use try: ... except: to avoid catching exceptions
# occuring inside adopt_node call
if parent is not None:
if hasattr(parent, 'adopt_node'):
parent.adopt_node(self)
else:
raise NotImplementedError("The given parent does not support child adoption")
def freestanding_str(self, idt=None):
"""See :class:`NodeABC` for the role of this function.
This implementation just calls *inline_str* and prepends a new line and indentation string.
"""
idt = Indentation.ensure_idt(idt)
snippet = self.inline_str(idt)
# Do not output anything if the string is empty
if snippet:
return '\n'+str(idt)+snippet
else:
return ''
def __str__(self, idt=None):
"""This implementation tries to print the node by probing the object for some methods:
1. *decl()*: it is usually used to return a :class:`NodeViewBase` corresponding to the declaration of the node
2. *defi()*: it is usually used to return a :class:`NodeViewBase` corresponding to the definition of the node
3. *freestanding_str()*: see :class:`NodeABC`
"""
# We dont use try: ... except: to avoid catching exceptions
# occuring inside freestanding_str call
# Try to display a declaration
if hasattr(self, 'decl'):
self_decl = self.decl()
if isinstance(self_decl, NodeABC):
return self_decl.freestanding_str(idt)
# Or a definition
elif hasattr(self, 'defi'):
self_defi = self.defi()
if isinstance(self_defi, NodeABC):
return self_defi.freestanding_str(idt)
else:
return self.freestanding_str(idt)
def adopt_node(self, child):
self.append(child)
class DelegatedAttribute:
"""This class is a descriptor that allows an object to use the value of that attribute of another instance.
For example, the comment attribute of a parent node of a :class:`NodeViewBase` instance is used as the comment
attribute of the :class:`NodeViewBase` instance if the comment attribute was not explicitly set on the
:class:`NodeViewBase` instance. When that attribute is set, it uses its own object instead of refering to its parent
one.
"""
def __init__(self, attr_name, delegated_to_attr_name, descriptor=None, default_value_list=tuple()):
"""
:param attr_name: the name of the attribute to manage.
:param delegated_to_attr_name: the name of the attribute holding a reference to the other instance also
holding an *attr_name* attribute.
:param descriptor: a descriptor class, in case the attribute should be managed through a descriptor.
This allows basic descriptor chaining.
:param default_value_list: a list of default values that does not trigger the switch to the local attribute.
For example, if a class set by default a *comment* attribute to None, the attribute
look up should still be made in the other instance. That way, it allows some placeholder
value to be set, without altering the intended behavior.
"""
self.attr_name = attr_name
self.delegated_to_attr_name = delegated_to_attr_name
self.descriptor = descriptor
self.default_value_list = default_value_list
def __get__(self, instance, owner):
if instance is not None:
# If the attribute has been set on the instance, just get it
if instance.__dict__.get('__'+self.attr_name+'_is_set', False):
if self.descriptor is not None:
return self.descriptor.__get__(instance, owner)
else:
return instance.__dict__[self.attr_name]
# Else it means that the attribute has not been set,
# so we delegate to the parent
else:
parent = getattr(instance, self.delegated_to_attr_name)
return getattr(parent, self.attr_name)
# If the descriptor is called as a class attribute, it
# just returns itself, to allow the world to see that it
# is a descriptor
else:
return self
def __set__(self, instance, value):
if self.descriptor is not None:
self.descriptor.__set__(instance, value)
else:
instance.__dict__[self.attr_name] = value
# If the value is one of the default_value_list, do not consider that the attribute was
# set. This allows some code in base classes to set the attribute to None
# by default, and still get the parent's attribute when it is the case
if value not in self.default_value_list:
instance.__dict__['__'+self.attr_name+'_is_set'] = True
class NodeViewBase(NodeBase):
"""This is the base class of the node that are view of other node.
For example, a variable declaration is a view of the variable, as it only displays
informations already contained in the variable object.
View nodes should store the reference of their parent in a *parent* attribute.
"""
def __init__(self, parent, *args, **kwargs):
self.parent = parent
super().__init__(*args, **kwargs)
def __eq__(self, other):
"""implementation of the equality test between two views:
it tests to see if they have the same parent and if the two view
are of the exact same type.
"""
return type(self) is type(other) and self.parent is other.parent
class PhantomNode(NodeBase):
"""This class is a node that will be printed as an empty string.
This is intended to be used as a placeholder when a :class:`NodeABC` instance is required.
"""
# PhantomNode must not call Node.__init__ because it causes infinite
# recursion when built from Node.__init__
def __init__(self, *args, **kwargs):
self.parent = self
self.comment = self
self.side_comment = self
def inline_str(self, idt=None):
return ''
freestanding_str = inline_str
# Instance used everywhere, instead of creating billions of identical PhantomNode
PHANTOM_NODE = PhantomNode()
class NodeContainerBase(NodeBase, collections.MutableSequence, NonIterable):
"""This is the base class of all the nodes that contains a list of other nodes.
It implements all the logic for operators overloading, and printing the nodes that it takes care of.
It also derives from the :class:`collections.MutableSequence` abstract base class, so it behaves
like a list. The only exception is when given to :func:`listify`, it remains as a single object, because
it also derives from :class:`NonIterable`. This is intended to allow the user to add nodes to it later,
and the result should be taken into account by the consumer that used :func:`listify` on it. If it was not the case,
the consumer using :func:`listify` would end up with a list of nodes frozen at the time :func:`listify` is called.
The other important aspect of this class is that it can guarantee the type of the contained nodes, even when
overloaded operators like *+=* are used. See the *node_classinfo* and *node_factory* constructor arguments.
"""
default_node_classinfo = (NodeABC,)
def __init__(self, node_list=None, node_classinfo=None, node_factory=None, *args, **kwargs):
"""
:param node_list: the list of nodes that the container contains
:param node_classinfo: a tuple of classes used to check the nodes that enters the container.
If a node is not an instance of one of the *node_classinfo* classes, it is
passed to *node_factory*. All of the classes in *node_classinfo* must be
subclasses of :class:`NodeABC`.
:param node_factory: a factory used when an object which is not an instance of one of the classes of
*node_classinfo* tries to enter the container. The return value of this factory
is then allowed inside.
"""
node_classinfo_tuple = tuple(listify(node_classinfo))
for classinfo in node_classinfo_tuple:
if not issubclass(classinfo, NodeABC):
raise ValueError('node_classinfo must be a subclass of NodeABC')
node_list = listify(node_list)
if node_classinfo is None:
self.node_classinfo = self.default_node_classinfo
else:
self.node_classinfo = node_classinfo_tuple
if node_factory is None:
# If the node_classinfo is None, then self.node_classinfo contains default_node_classinfo
# which is only composed of NodeABC, and therefore cannot be used as a factory
if node_classinfo is None:
raise ValueError(
'You must specify a node factory or give a class that can be used as a factory as first item of node_classinfo'
)
# The first element in the tuple is taken as the factory
node_factory = self.node_classinfo[0]
# A wrapper to make sure that the output of the node_factory is
# indeed a NodeABC
def make_node_factory_wrapper(factory):
def wrapper(node):
result = factory(node)
if not isinstance(result, NodeABC):
raise ValueError("The node factory did not give a NodeABC")
else:
return result
return wrapper
self.node_factory = make_node_factory_wrapper(node_factory)
self.node_list = [
item if isinstance(item, self.node_classinfo) else self.node_factory(item)
for item in node_list
]
super().__init__(*args, **kwargs)
def inline_str(self, idt=None):
"""Print all the contained nodes using their *freestanding_str* method,
because a container is a freestanding context.
It also strips the blank lines at the beginning.
"""
snippet = ""
for node in self.node_list:
if hasattr(node, 'comment'):
snippet += node.comment.freestanding_str(idt)
snippet += node.freestanding_str(idt)
return strip_starting_blank_lines(snippet)
def freestanding_str(self, idt=None):
"""Calls super().freestanding_str, and strip the blank lines
at the beginning.
"""
snippet = super().freestanding_str(idt)
return strip_starting_blank_lines(snippet)
def __copy__(self):
cls = type(self)
new_obj = cls.__new__(cls)
new_obj.__dict__.update(self.__dict__)
new_obj.node_list = copy.copy(self.node_list)
new_obj.node_classinfo = copy.copy(self.node_classinfo)
new_obj.node_factory = copy.copy(self.node_factory)
return new_obj
def clear(self):
# We preserve the object's itself, we do not build a new one
self[:] = []
def insert(self, index, value):
elem_list = listify(value)
for i, elem in enumerate(elem_list):
if not isinstance(elem, self.node_classinfo):
elem = self.node_factory(elem)
self.node_list.insert(index+i, elem)
def index(self, *args, **kwargs):
return self.node_list.index(*args, **kwargs)
def count(self, *args, **kwargs):
return self.node_list.count(*args, **kwargs)
def pop(self, *args, **kwargs):
return self.node_list.pop(*args, **kwargs)
def reverse(self):
self.node_list.reverse()
def remove(self, *args, **kwargs):
self.node_list.remove(*args, **kwargs)
@abc.abstractmethod
def __add__(self, other):
return type(self)((self, other))
@abc.abstractmethod
def __radd__(self, other):
return type(self)((other, self))
def __iadd__(self, other):
other_list = listify(other)
typed_other_list = [
item if isinstance(item, self.node_classinfo) else self.node_factory(item)
for item in other_list
]
self.node_list.extend(typed_other_list)
return self
def append(self, other):
self.__iadd__(other)
def extend(self, other_list):
other_list = listify(other_list)
for other in other_list:
self.append(other)
def __mul__(self, other):
if isinstance(other, numbers.Integral):
self_copy = copy.copy(self)
self_copy.node_list = self.node_list * other
return self_copy
else:
return NotImplemented
def __rmul__(self, other):
return self.__mul__(other)
def __imul__(self, other):
if isinstance(other, numbers.Integral):
self.node_list *= other
return self
else:
return NotImplemented
def __contains__(self, item):
return item in self.node_list
def __reversed__(self):
return reversed(self.node_list)
def __getitem__(self, key):
return self.node_list[key]
def __setitem__(self, key, value):
if not isinstance(value, self.node_classinfo):
value = self.node_factory(value)
self.node_list[key] = value
def __delitem__(self, key):
del self.node_list[key]
def __len__(self):
return len(self.node_list)
def __iter__(self):
return iter(self.node_list)
class TokenListABC(NodeBase, NonIterable, collections.MutableSequence):
"""This class is an abstract base class for all classes that are token lists.
A token list is an object that holds a sequence of tokens, which get concatenated when printed.
The tokens are turned into strings only when the token list is printed, which is why it is
the lazy building blocks of source code constructs like expressions and many others.
Whan printed, the token list should call *inline_str* on its tokens if the token is a :class:`NodeABC`,
or the builtin :func:`str` otherwise.
"""
pass
class DelegatedTokenListBase(TokenListABC):
"""This is the base class for token lists classes that forward the calls to the :class:`TokenListABC` API
to an attribute.
This class implements stubs to allow transparent object composition.
"""
@property
def tokenlist_attr(self):
"""This property gives the attribute holding the real token list."""
attr = getattr(self, self.tokenlist_attr_name)
if not isinstance(attr, TokenListABC):
raise AttributeError('The attribute '+self.tokenlist_attr_name+' is not a TokenListABC')
else:
return attr
@tokenlist_attr.setter
def tokenlist_attr(self, value):
return setattr(self, self.tokenlist_attr_name, value)
def __init__(self, tokenlist_attr_name, *args, **kwargs):
"""
:param tokenlist_attr_name: the name of the attribute holding the real token list
"""
self.tokenlist_attr_name = tokenlist_attr_name
super().__init__(*args, **kwargs)
def inline_str(self, idt=None):
return self.tokenlist_attr.inline_str(idt)
def freestanding_str(self, idt=None):
return self.tokenlist_attr.freestanding_str(idt)
def index(self, *args, **kwargs):
return self.tokenlist_attr.index(*args, **kwargs)
def insert(self, *args, **kwargs):
return self.tokenlist_attr.insert(*args, **kwargs)
def index(self, *args, **kwargs):
return self.tokenlist_attr.index(*args, **kwargs)
def count(self, *args, **kwargs):
return self.tokenlist_attr.count(*args, **kwargs)
def pop(self, *args, **kwargs):
return self.tokenlist_attr.pop(*args, **kwargs)
def reverse(self):
self.tokenlist_attr.reverse()
def remove(self, *args, **kwargs):
self.tokenlist_attr.remove(*args, **kwargs)
def __add__(self, other):
self_copy = copy.copy(self)
self_copy.tokenlist_attr = self_copy.tokenlist_attr.__add__(other)
return self_copy
def __radd__(self, other):
self_copy = copy.copy(self)
self_copy.tokenlist_attr = self_copy.tokenlist_attr.__radd__(other)
return self_copy
def append(self, other):
self.tokenlist_attr.append(other)
def __iadd__(self, *args, **kwargs):
self.tokenlist_attr.__iadd__(*args, **kwargs)
return self
def extend(self, other_list):
self.tokenlist_attr.extend(other_list)
def __mul__(self, other):
self_copy = copy.copy(self)
self_copy.tokenlist_attr = self_copy.tokenlist_attr.__mul__(other)
return self_copy
def __rmul__(self, *args, **kwargs):
self_copy = copy.copy(self)
self_copy.tokenlist_attr = self_copy.tokenlist_attr.__rmul__(*args, **kwargs)
return self_copy
def __imul__(self, other):
self.tokenlist_attr.__imul__(other)
return self
def __contains__(self, *args, **kwargs):
return self.tokenlist_attr.__contains__(*args, **kwargs)
def __iter__(self):
return self.tokenlist_attr.__iter__()
def __reversed__(self):
return self.tokenlist_attr.__reversed__()
def __getitem__(self, key):
return self.tokenlist_attr.__getitem__(key)
def __setitem__(self, key, value):
self.tokenlist_attr.__setitem__(key, value)
def __delitem__(self, key):
self.tokenlist_attr.__delitem__(key)
def __len__(self):
return self.tokenlist_attr.__len__()
class TokenListBase(TokenListABC):
"""This base class implements the :class:`TokenListABC` API with all of the operators overloading logic.
"""
def __init__(self, token_list=None, *args, **kwargs):
"""
:param token_list: the list of tokens to store inside the token list
"""
self._token_list = listify(token_list)
super().__init__(*args, **kwargs)
def inline_str(self, idt=None):
"""Print the tokens of the token list usin, and concatenate all the strings.
If the token is a :class:`NodeABC`, its *inline_str* method is used.
otherwise, :func:`str` builtin is called on the token.
"""
string = ''
for token in self._token_list:
if token is self:
# Special handling of self: allows to print itself using
# a different method to avoid infinite recursion and to provide
# a mean to subclasses to implement self printing without creating a
# "self-printer" class dedicated to printing themselves
string += self.self_inline_str(idt)
elif isinstance(token, NodeABC):
string += token.inline_str(idt)
else:
string += str(token)
return string
def index(self, *args, **kwargs):
return self._token_list.index(*args, **kwargs)
def insert(self, *args, **kwargs):
return self._token_list.insert(*args, **kwargs)
def index(self, *args, **kwargs):
return self._token_list.index(*args, **kwargs)
def count(self, *args, **kwargs):
return self._token_list.count(*args, **kwargs)
def pop(self, *args, **kwargs):
return self._token_list.pop(*args, **kwargs)
def reverse(self):
self._token_list.reverse()
def remove(self, *args, **kwargs):
self._token_list.remove(*args, **kwargs)
def __add__(self, other):
if isinstance(other, TokenListABC):
other_list = list(other)
self_copy = copy.copy(self)
self_copy._token_list = self._token_list+other_list
return self_copy
# The result of the addition with a NodeContainer is a NodeContainer
elif isinstance(other, NodeContainerBase):
return other.__radd__(self)
else:
other_list = listify(other)
self_copy = copy.copy(self)
self_copy._token_list = self._token_list+other_list
return self_copy
def __radd__(self, other):
other_list = listify(other)
self_copy = copy.copy(self)
self_copy._token_list = other_list+self._token_list
return self_copy
def append(self, other):
if isinstance(other, TokenListABC):
other_list = tuple(other)
else:
other_list = listify(other)
self._token_list.extend(other_list)
return self
def __iadd__(self, *args, **kwargs):
self.append(*args, **kwargs)
return self
def extend(self, other_list):
other_list = listify(other_list)
for other in other_list:
self.append(other)
def __mul__(self, other):
if isinstance(other, numbers.Integral):
self_copy = copy.copy(self)
self_copy._token_list = self._token_list * other
return self_copy
else:
return NotImplemented
def __rmul__(self, *args, **kwargs):
return self.__mul__(*args, **kwargs)
def __imul__(self, other):
if isinstance(other, numbers.Integral):
self._token_list *= other
return self
else:
return NotImplemented
def __contains__(self, *args, **kwargs):
return self._token_list.__contains__(*args, **kwargs)
def __iter__(self):
return iter(self._token_list)
def __reversed__(self):
return reversed(self._token_list)
def __getitem__(self, key):
return self._token_list[key]
def __setitem__(self, key, value):
self._token_list[key] = value
def __delitem__(self, key):
del self._token_list[key]
def __len__(self):
return len(self._token_list)
class _IndentedTokenListBase:
"""This class is the base class that implements a token list which indents its content when printed."""
def inline_str(self, idt=None):
idt = Indentation.ensure_idt(idt)
snippet = super().inline_str(idt)
indented_new_line = "\n"+str(idt)
snippet = snippet.replace("\n", indented_new_line)
return snippet
class IndentedTokenListBase(_IndentedTokenListBase, TokenListBase):
"""This class is a base class for token lists that indent their content when printed."""
pass
class IndentedDelegatedTokenListBase(_IndentedTokenListBase, DelegatedTokenListBase):
"""This is a mix between :class:`DelegatedTokenListBase` and :class:`IndentedTokenListBase`."""
pass
class BacktraceBase(TokenListBase, NonIterable, metaclass=abc.ABCMeta):
"""This base class allows the instances to record the backtrace of the Python code that
created them.
This allows one to add comments in generated source code showing which file and line of the Python
script was responsible for creating it. This is a facility when debugging the source code generator,
and can avoid headache when ones want to track down which line of Python generated which line of
generated source code.
As a convenience, it is a subclass of :class:`TokenListBase` so it can be used inside a comment for example.
"""
__frame_format_string = '{filename}:{lineno}({function})'
__frame_joiner = ', '
def __init__(self, level=0, *args, **kwargs):
stack = inspect.stack()
self.stack_frame_list = [
frame[1:] for frame in stack
if os.path.dirname(frame[1]) != os.path.dirname(__file__)
]
super().__init__(self, *args, **kwargs)
@abc.abstractmethod
def freestanding_str(self, idt=None):
#Construct a comment by giving itself as a token and use its freestanding_str method
pass
def self_inline_str(self, idt=None):
return self.__frame_joiner.join(
self.__frame_format_string.format(
filename = os.path.relpath(frame[0]),
lineno = frame[1],
function = frame[2],
line_content = frame[3][frame[4]] if frame[3] is not None else ''
) for frame in self.stack_frame_list
)
|
#!/usr/bin/env python
import csv
import sys
def getChrSizes(chrmFile):
"""
Reads tab-delimiter file with two rows describing the chromossomes and its lengths.
Returns dictionary of chr:sizes.
"""
with open(chrmFile, 'r') as f:
chrmSizes = {}
for line in enumerate(f):
row = line[1].strip().split('\t')
chrmSizes[str(row[0])] = int(row[1])
return chrmSizes
chrSizes = {
"hg19": "/fhgfs/groups/lab_bock/arendeiro/share/hg19.chrom.sizes",
"mm10": "/fhgfs/groups/lab_bock/arendeiro/share/mm10.chrom.sizes",
"dr7": "/fhgfs/groups/lab_bock/arendeiro/share/danRer7.chrom.sizes"
}
genome = sys.argv[1]
chrms = getChrSizes(chrSizes[genome]) # get size of chromosomes
wr = csv.writer(sys.stdout, delimiter='\t', lineterminator='\n')
for row in csv.reader(iter(sys.stdin.readline, ''), delimiter='\t'):
chrm = row[0]
start = int(row[1])
end = int(row[2])
if chrm in chrms.keys(): # skip weird chromosomes
if start >= 1 and end <= chrms[chrm] and start < end:
wr.writerow(row)
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import json, sys, traceback
import turtle
from functools import partial
class LoggedTurtle(object):
"""Class emulating Turtle behavior while logging all commands.
It won't actually display anything, it will only execute movement commands
through a TNavigator.
The class' log variable will contain the log for all LoggedTurtles."""
log = []
next_id = 0
def _log(self, items):
"""Add to log."""
self.__class__.log.append(items)
def __init__(self, *args, **kwargs):
# Turtle ID
self.tid = self.__class__.next_id
self.__class__.next_id += 1
# Navigator which will handle all movements
self.navigator = turtle.TNavigator()
self.navigator.speed(0)
# Log initialization
self._log((self.tid, 'turtle', '__init__', args, kwargs))
def logNavigator(self, *args, **kwargs):
# Log a movement command and execute it
funcName = kwargs.pop('funcName')
self._log((self.tid, 'nav', funcName, args, kwargs))
return getattr(self.navigator, funcName)(*args, **kwargs)
def logTurtle(self, *args, **kwargs):
# Log a non-movement command
funcName = kwargs.pop('funcName')
self._log((self.tid, 'turtle', funcName, args, kwargs))
def __getattr__(self, attr):
# Handle calls to this class
# Check if it's a movement command
if hasattr(self.navigator, attr):
subAttr = getattr(self.navigator, attr)
if hasattr(subAttr, '__call__'):
return partial(self.logNavigator, funcName=attr)
else:
return subAttr
# Check if it's another Turtle command
elif hasattr(turtle.Turtle, attr):
subAttr = getattr(turtle.Turtle, attr)
if hasattr(subAttr, '__call__'):
return partial(self.logTurtle, funcName=attr)
else:
return subAttr
# Not a Turtle command at all
else:
raise AttributeError
def changeTurtle(scriptPath):
"""Modify a script to use the LoggedTurtle."""
newScriptLines = []
for l in open(scriptPath, 'r'):
# Remove turtle from imports
if l[:6] == 'import':
imported = map(lambda x: x.strip(), l[7:].strip().split(','))
if 'turtle' in imported:
imported.remove('turtle')
if len(imported) > 0:
newScriptLines.append("import %s\n" % ', '.join(imported))
# Modify Turtle instances to LoggedTurtle instances
if 'Turtle' in l:
newl = l.replace('turtle.Turtle(', 'LoggedTurtle(')
newl = newl.replace('Turtle(', 'LoggedTurtle(')
newl = newl.replace('LoggedLoggedTurtle', 'LoggedTurtle') # safety
newScriptLines.append(newl)
else:
newScriptLines.append(l)
open(scriptPath, 'w').writelines(newScriptLines)
# Modify the solution
changeTurtle("solution.py")
# Execute the solution
try:
execfile("solution.py")
except:
# Remove the runner from the traceback
excInfo = sys.exc_info()
traceback.print_exception(excInfo[0], excInfo[1], excInfo[2].tb_next)
sys.exit(1)
# Output as JSON
print(json.dumps(LoggedTurtle.log))
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
import pytest
from airflow import DAG
from airflow.contrib.hooks.redis_hook import RedisHook
from airflow.contrib.sensors.redis_key_sensor import RedisKeySensor
from airflow.utils import timezone
DEFAULT_DATE = timezone.datetime(2017, 1, 1)
@pytest.mark.integration("redis")
class TestRedisSensor(unittest.TestCase):
def setUp(self):
args = {
'owner': 'airflow',
'start_date': DEFAULT_DATE
}
self.dag = DAG('test_dag_id', default_args=args)
self.sensor = RedisKeySensor(
task_id='test_task',
redis_conn_id='redis_default',
dag=self.dag,
key='test_key'
)
def test_poke(self):
hook = RedisHook(redis_conn_id='redis_default')
redis = hook.get_conn()
redis.set('test_key', 'test_value')
self.assertTrue(self.sensor.poke(None), "Key exists on first call.")
redis.delete('test_key')
self.assertFalse(self.sensor.poke(None), "Key does NOT exists on second call.")
if __name__ == '__main__':
unittest.main()
|
"""Manage Treadmill allocations.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import click
import six
from treadmill import admin
from treadmill import cli
from treadmill import context
from treadmill import restclient
_DEFAULT_PRIORITY = 1
_LOGGER = logging.getLogger(__name__)
def _display_tenant(restapi, tenant):
"""Display allocations for the given tenant."""
tenant_url = '/tenant/%s' % tenant
alloc_url = '/allocation/%s' % tenant
tenant_obj = restclient.get(restapi, tenant_url).json()
allocations_obj = restclient.get(restapi, alloc_url).json()
tenant_obj['allocations'] = allocations_obj
tenant_formatter = cli.make_formatter('tenant')
cli.out(tenant_formatter(tenant_obj))
def _check_reserve_usage(empty, memory, cpu, disk):
"""Checks params constraints for reserve verb."""
if empty:
if memory:
raise click.UsageError('Cannot combine --empty and --memory')
if cpu:
raise click.UsageError('Cannot combine --empty and --cpu')
if disk:
raise click.UsageError('Cannot combine --empty and --disk')
def _check_tenant_exists(restapi, allocation):
"""Check if tenant exist."""
tenant_url = '/tenant/{}'.format(allocation)
# Check if tenant exists.
try:
restclient.get(restapi, tenant_url).json()
except restclient.NotFoundError:
raise click.UsageError(
'Allocation not found, '
'run allocation configure {} --systems ...'.format(allocation))
def _make_allocation(restapi, allocation, env):
"""Ensure allocation exists for given environment."""
# Make sure allocation exists for given environment.
alloc_url = '/allocation/{}/{}'.format(allocation, env)
try:
restclient.post(restapi, alloc_url, payload={'environment': env})
except restclient.AlreadyExistsError:
pass
def init():
"""Return top level command handler."""
alloc_formatter = cli.make_formatter('tenant')
ctx = {}
@click.group(name='allocation')
@click.option('--api', required=False, help='API url to use.',
envvar='TREADMILL_RESTAPI')
def allocation_grp(api):
"""Manage Treadmill allocations.
Allocation is a group of applications that share same capacity.
Each allocation is partitioned by environment and cell. Given
allocation, cell and environment, users reserve capacity for their
apps.
Allocations form a hierarchy, so that when reservation is underused,
extra capacity is offered to sibling apps first (by environment), and
then up the tree for applications in parent allocations.
"""
if api:
ctx['api'] = api
@allocation_grp.command(name='list')
@cli.handle_exceptions(restclient.CLI_REST_EXCEPTIONS)
def _list():
"""List allocations."""
restapi = context.GLOBAL.admin_api(ctx.get('api'))
response = restclient.get(restapi, '/tenant/')
cli.out(alloc_formatter(response.json()))
@allocation_grp.command()
@click.option('--set', 'set_', help='If specified then the allocation\'s'
' system id(s) will be replaced instead of updated',
is_flag=True, default=False)
@click.option('-s', '--systems', help='System ID', type=cli.LIST)
@click.argument('allocation', required=True)
@cli.handle_exceptions(restclient.CLI_REST_EXCEPTIONS)
def configure(allocation, systems, set_):
"""Configure allocation.
Allocation name is global, and is associated with list of systems.
"""
restapi = context.GLOBAL.admin_api(ctx.get('api'))
url = '/tenant/{}'.format(allocation)
if systems:
# If tenant exists, update or replace it with new systems.
# If update fails with resource does not exist error, try creating
# tenants from parent to child, those that do not exist will be
# created with provided systems.
try:
existing = restclient.get(restapi, url).json()
all_systems = set(six.moves.map(int, systems))
# if the system ids have to be extended instead of replaced
if not set_:
all_systems.update(existing['systems'])
restclient.put(
restapi,
url,
payload={'systems': list(all_systems)}
)
except restclient.NotFoundError:
# Create parent tenants recursively.
#
# If parent does not exist, it will be created with the systems
# specified.
parts = allocation.split(':')
for idx in range(1, len(parts) + 1):
url = '/tenant/{}'.format(':'.join(parts[:idx]))
try:
existing = restclient.get(restapi, url).json()
except restclient.NotFoundError:
restclient.post(
restapi,
url,
payload={
'systems': list(six.moves.map(int, systems))
})
_display_tenant(restapi, allocation)
@allocation_grp.command()
@click.option('-e', '--env', help='Environment.', required=True)
@click.option('-c', '--cell', help='Treadmill cell', required=True)
@click.option('-p', '--partition', help='Allocation partition')
@click.option('-r', '--rank', help='Allocation rank', type=int)
@click.option('--rank-adjustment', help='Rank adjustment', type=int)
@click.option('--max-utilization', help='Maximum utilization', type=float)
@click.option('--empty', help='Make empty (zero capacity) reservation.',
is_flag=True, default=False)
@click.option('--memory', help='Memory demand.',
metavar='G|M',
callback=cli.validate_memory)
@click.option('--cpu', help='CPU demand, %.',
metavar='XX%',
callback=cli.validate_cpu)
@click.option('--disk', help='Disk demand.',
metavar='G|M',
callback=cli.validate_disk)
@click.argument('allocation', required=True)
@cli.handle_exceptions(restclient.CLI_REST_EXCEPTIONS)
# pylint: disable=R0912
def reserve(allocation, env, cell, partition,
rank, rank_adjustment, max_utilization, empty,
memory, cpu, disk):
"""Reserve capacity on the cell for given environment."""
_check_reserve_usage(empty, memory, cpu, disk)
restapi = context.GLOBAL.admin_api(ctx.get('api'))
_check_tenant_exists(restapi, allocation)
_make_allocation(restapi, allocation, env)
data = {}
if empty:
data['memory'] = '0M'
data['disk'] = '0M'
data['cpu'] = '0%'
if memory:
data['memory'] = memory
if cpu:
data['cpu'] = cpu
if disk:
data['disk'] = disk
if rank is not None:
data['rank'] = rank
if rank_adjustment is not None:
data['rank_adjustment'] = rank_adjustment
if max_utilization is not None:
data['max_utilization'] = max_utilization
if partition:
data['partition'] = partition
if data:
reservation_url = '/allocation/{}/{}/reservation/{}'.format(
allocation, env, cell
)
try:
existing = restclient.get(restapi, reservation_url).json()
# TODO: need cleaner way of deleting attributes that are not
# valid for update. It is a hack.
for attr in existing.keys():
if (attr not in
['memory', 'cpu', 'disk', 'partition']):
del existing[attr]
existing.update(data)
restclient.put(restapi, reservation_url, payload=existing)
except restclient.NotFoundError:
# some attributes need default values when creating
if not partition:
data['partition'] = admin.DEFAULT_PARTITION
restclient.post(restapi, reservation_url, payload=data)
_display_tenant(restapi, allocation)
@allocation_grp.command()
@click.option('-e', '--env', help='Environment.', required=True)
@click.option('-c', '--cell', help='Treadmill cell', required=True)
@click.option('--pattern', help='Application pattern.', required=True)
@click.option('--priority', help='Assignment priority.', type=int)
@click.option('--delete', help='Delete assignment.',
is_flag=True, default=False)
@click.argument('allocation', required=True)
@cli.handle_exceptions(restclient.CLI_REST_EXCEPTIONS)
def assign(allocation, env, cell, pattern, priority, delete):
"""Assign application pattern:priority to the allocation.
Application pattern must start with <PROID>. and is a glob expression.
Environments of the proid and one specified in command line using
--env option must match.
Once scheduled, Treadmill scheduler will match application against all
available patterns and assign application to a reserved capacity.
All application assigned to a capacity are ordered by priority from
high to low.
"""
restapi = context.GLOBAL.admin_api(ctx.get('api'))
_check_tenant_exists(restapi, allocation)
_make_allocation(restapi, allocation, env)
reservation_url = '/allocation/{}/{}/reservation/{}'.format(
allocation, env, cell
)
try:
restclient.get(restapi, reservation_url)
except restclient.NotFoundError:
# TODO: default partition should be resolved in API, not in CLI.
restclient.post(restapi, reservation_url,
payload={'memory': '0M',
'disk': '0M',
'cpu': '0%',
'partition': admin.DEFAULT_PARTITION})
url = '/allocation/{}/{}/assignment/{}/{}'.format(
allocation, env, cell, pattern
)
if delete:
restclient.delete(restapi, url)
else:
default_prio = None
existing = restclient.get(restapi, url).json()
for assignment in existing:
if assignment['pattern'] == pattern:
default_prio = assignment['priority']
if default_prio is None:
default_prio = _DEFAULT_PRIORITY
data = {'priority': priority if priority else default_prio}
restclient.put(restapi, url, payload=data)
_display_tenant(restapi, allocation)
@allocation_grp.command()
@click.argument('item', required=True)
@cli.handle_exceptions(restclient.CLI_REST_EXCEPTIONS)
def delete(item):
"""Delete a tenant/allocation/reservation."""
restapi = context.GLOBAL.admin_api(ctx.get('api'))
path = item.split('/')
if len(path) == 1:
# delete a tenant
url = '/tenant/%s' % item
restclient.delete(restapi, url)
elif len(path) == 2:
# delete an allocation
url = '/allocation/%s' % item
restclient.delete(restapi, url)
elif len(path) == 3:
# delete a reservation
url = '/allocation/%s/%s/reservation/%s' % (path[0],
path[1],
path[2])
restclient.delete(restapi, url)
else:
# error
click.echo('Wrong format: %s' % item, err=True)
del assign
del reserve
del configure
del _list
del delete
return allocation_grp
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.