src
stringlengths 721
1.04M
|
---|
#!/usr/bin/env python
"""Appends metadata to estimators files reading from input xml files.
The current version of the DMRG does not include any metadata into the
estimator files. Metadata are comment lines that have information about the run
the estimator was obtained from, such as the value of the Hamiltonian
parameters. These data can be useful when calling the scripts that extract,
analyze, and plot the estimator data.
This script crawls down a directory finding all the estimator file, i.e. those
whose name is 'estimators.dat'. For each of them, it finds the corresponding
input xml file, which was generated by the DMRG code and contains the value of
the parameters of the run. It extracts the information for selected parameters,
and, finally, prepends this information in the proper metadata format to each
estimator file.
The script only works with fromScratch runs, and probably fails when you have
restarts.
The list of paramaters you want to add as metadata, `keys_to_watch`, should be
modified depending on your project.
Usage:
append_metadata_from_xml.py [--dir=DIR]
append_metadata_from_xml.py -h | --help
Options:
-h --help Shows this screen.
--dir=DIR Ouput directory [default: ./]
"""
import os
# Temporary patch to avoid installing the dmrg_helpers package.
import inspect
import sys
script_full_path = os.path.abspath(inspect.getfile(inspect.currentframe()))
sys.path.insert(0, os.path.dirname(os.path.dirname(script_full_path)))
# patch ends
from dmrg_helpers.extract.input_file_reader import InputFileReader
from dmrg_helpers.extract.locate_estimator_files import locate_estimator_files
from itertools import izip
from docopt import docopt
def parent_dir_of_parent_dir(filename):
"""Returns the parent dir of the dir where the file lives.
"""
filename = os.path.abspath(filename)
return os.path.dirname(os.path.dirname(filename))
def main(args):
estimator_files = locate_estimator_files(args['--dir'])
input_files = [os.path.join(parent_dir_of_parent_dir(f), 'input.log')
for f in estimator_files]
keys_to_watch = ['t', 'tp', 'U', 'J1', 'J2', 'Kring', 'numberOfSites']
for pair in izip(estimator_files, input_files):
reader = InputFileReader(keys_to_watch)
reader.read(pair[1])
reader.prepend_data_to_file(pair[0])
if __name__ == '__main__':
args = docopt(__doc__, version = 0.1)
main(args)
|
import math
import numpy as np
import parakeet
from parakeet.testing_helpers import run_local_tests, expect_each
xs = [0.1, 0.5, 2.0]
def test_sin():
expect_each(parakeet.sin, np.sin, xs)
expect_each(math.sin, math.sin, xs)
expect_each(np.sin, np.sin, xs)
def test_parakeet_sinh():
expect_each(parakeet.sinh, np.sinh, xs)
expect_each(math.sinh, np.sinh, xs)
expect_each(np.sinh, np.sinh, xs)
def test_cos():
expect_each(parakeet.cos, np.cos, xs)
expect_each(math.cos, np.cos, xs)
expect_each(np.cos, np.cos, xs)
def test_cosh():
expect_each(parakeet.cosh, np.cosh, xs)
expect_each(math.cosh, np.cosh, xs)
expect_each(np.cosh, np.cosh, xs)
def test_tan():
expect_each(parakeet.tan, np.tan, xs)
expect_each(math.tan, np.tan, xs)
expect_each(np.tan, np.tan, xs)
def test_tanh():
expect_each(parakeet.tanh, np.tanh, xs)
expect_each(math.tanh, np.tanh, xs)
expect_each(np.tanh, np.tanh, xs)
def test_log():
expect_each(parakeet.log, np.log, xs)
expect_each(math.log, np.log, xs)
expect_each(np.log, np.log, xs)
def test_log10():
expect_each(parakeet.log10, np.log10, xs)
expect_each(math.log10, np.log10, xs)
expect_each(np.log10, np.log10, xs)
def test_exp():
expect_each(parakeet.exp, np.exp, xs)
expect_each(math.exp, np.exp, xs)
expect_each(np.exp, np.exp, xs)
if __name__ == '__main__':
run_local_tests()
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from oslo_serialization import jsonutils
from requests import exceptions
from heat.common import exception
from heat.common import grouputils
from heat.common.i18n import _
from heat.common import template_format
from heat.common import urlfetch
from heat.engine import attributes
from heat.engine import environment
from heat.engine import properties
from heat.engine.resources import stack_resource
from heat.engine import template
from heat.rpc import api as rpc_api
LOG = logging.getLogger(__name__)
REMOTE_SCHEMES = ('http', 'https')
LOCAL_SCHEMES = ('file',)
STACK_ID_OUTPUT = 'OS::stack_id'
def generate_class_from_template(name, data, param_defaults):
tmpl = template.Template(template_format.parse(data))
props, attrs = TemplateResource.get_schemas(tmpl, param_defaults)
cls = type(name, (TemplateResource,),
{'properties_schema': props,
'attributes_schema': attrs,
'__doc__': tmpl.t.get(tmpl.DESCRIPTION)})
return cls
class TemplateResource(stack_resource.StackResource):
"""A resource implemented by a nested stack.
This implementation passes resource properties as parameters to the nested
stack. Outputs of the nested stack are exposed as attributes of this
resource.
"""
def __init__(self, name, json_snippet, stack):
self._parsed_nested = None
self.stack = stack
self.validation_exception = None
tri = self._get_resource_info(json_snippet)
self.properties_schema = {}
self.attributes_schema = {}
# run Resource.__init__() so we can call self.nested()
super(TemplateResource, self).__init__(name, json_snippet, stack)
self.resource_info = tri
if self.validation_exception is None:
self._generate_schema()
self.reparse()
def _get_resource_info(self, rsrc_defn):
try:
tri = self.stack.env.get_resource_info(
rsrc_defn.resource_type,
resource_name=rsrc_defn.name,
registry_type=environment.TemplateResourceInfo)
except exception.EntityNotFound:
self.validation_exception = ValueError(_(
'Only Templates with an extension of .yaml or '
'.template are supported'))
else:
self._template_name = tri.template_name
self.resource_type = tri.name
self.resource_path = tri.path
if tri.user_resource:
self.allowed_schemes = REMOTE_SCHEMES
else:
self.allowed_schemes = REMOTE_SCHEMES + LOCAL_SCHEMES
return tri
@staticmethod
def get_template_file(template_name, allowed_schemes):
try:
return urlfetch.get(template_name, allowed_schemes=allowed_schemes)
except (IOError, exceptions.RequestException) as r_exc:
args = {'name': template_name, 'exc': str(r_exc)}
msg = _('Could not fetch remote template '
'"%(name)s": %(exc)s') % args
raise exception.NotFound(msg_fmt=msg)
@staticmethod
def get_schemas(tmpl, param_defaults):
return ((properties.Properties.schema_from_params(
tmpl.param_schemata(param_defaults))),
(attributes.Attributes.schema_from_outputs(
tmpl[tmpl.OUTPUTS])))
def _generate_schema(self):
self._parsed_nested = None
try:
tmpl = template.Template(self.child_template())
except (exception.NotFound, ValueError) as download_error:
self.validation_exception = download_error
tmpl = template.Template(
{"HeatTemplateFormatVersion": "2012-12-12"})
# re-generate the properties and attributes from the template.
self.properties_schema, self.attributes_schema = self.get_schemas(
tmpl, self.stack.env.param_defaults)
self.attributes_schema.update(self.base_attributes_schema)
self.attributes.set_schema(self.attributes_schema)
def child_params(self):
"""Override method of child_params for the resource.
:return: parameter values for our nested stack based on our properties
"""
params = {}
for pname, pval in iter(self.properties.props.items()):
if not pval.implemented():
continue
try:
val = self.properties.get_user_value(pname)
except ValueError:
if self.action == self.INIT:
prop = self.properties.props[pname]
val = prop.get_value(None)
else:
raise
if val is not None:
# take a list and create a CommaDelimitedList
if pval.type() == properties.Schema.LIST:
if len(val) == 0:
params[pname] = ''
elif isinstance(val[0], dict):
flattened = []
for (count, item) in enumerate(val):
for (ik, iv) in iter(item.items()):
mem_str = '.member.%d.%s=%s' % (count, ik, iv)
flattened.append(mem_str)
params[pname] = ','.join(flattened)
else:
# When None is returned from get_attr, creating a
# delimited list with it fails during validation.
# we should sanitize the None values to empty strings.
# FIXME(rabi) this needs a permanent solution
# to sanitize attributes and outputs in the future.
params[pname] = ','.join(
(x if x is not None else '') for x in val)
else:
# for MAP, the JSON param takes either a collection or
# string, so just pass it on and let the param validate
# as appropriate
params[pname] = val
return params
def child_template(self):
if not self._parsed_nested:
self._parsed_nested = template_format.parse(self.template_data(),
self.template_url)
return self._parsed_nested
def regenerate_info_schema(self, definition):
self._get_resource_info(definition)
self._generate_schema()
@property
def template_url(self):
return self._template_name
def template_data(self):
# we want to have the latest possible template.
# 1. look in files
# 2. try download
# 3. look in the db
reported_excp = None
t_data = self.stack.t.files.get(self.template_url)
stored_t_data = t_data
if t_data is None:
LOG.debug('TemplateResource data file "%s" not found in files.',
self.template_url)
if not t_data and self.template_url.endswith((".yaml", ".template")):
try:
t_data = self.get_template_file(self.template_url,
self.allowed_schemes)
except exception.NotFound as err:
if self.action == self.UPDATE:
raise
reported_excp = err
if t_data is None:
nested_identifier = self.nested_identifier()
if nested_identifier is not None:
nested_t = self.rpc_client().get_template(self.context,
nested_identifier)
t_data = jsonutils.dumps(nested_t)
if t_data is not None:
if t_data != stored_t_data:
self.stack.t.files[self.template_url] = t_data
self.stack.t.env.register_class(self.resource_type,
self.template_url,
path=self.resource_path)
return t_data
if reported_excp is None:
reported_excp = ValueError(_('Unknown error retrieving %s') %
self.template_url)
raise reported_excp
def _validate_against_facade(self, facade_cls):
facade_schemata = properties.schemata(facade_cls.properties_schema)
for n, fs in facade_schemata.items():
if fs.required and n not in self.properties_schema:
msg = (_("Required property %(n)s for facade %(type)s "
"missing in provider") % {'n': n, 'type': self.type()})
raise exception.StackValidationFailed(message=msg)
ps = self.properties_schema.get(n)
if (n in self.properties_schema and
(fs.allowed_param_prop_type() != ps.type)):
# Type mismatch
msg = (_("Property %(n)s type mismatch between facade %(type)s"
" (%(fs_type)s) and provider (%(ps_type)s)") % {
'n': n, 'type': self.type(),
'fs_type': fs.type, 'ps_type': ps.type})
raise exception.StackValidationFailed(message=msg)
for n, ps in self.properties_schema.items():
if ps.required and n not in facade_schemata:
# Required property for template not present in facade
msg = (_("Provider requires property %(n)s "
"unknown in facade %(type)s") % {
'n': n, 'type': self.type()})
raise exception.StackValidationFailed(message=msg)
facade_attrs = facade_cls.attributes_schema.copy()
facade_attrs.update(facade_cls.base_attributes_schema)
for attr in facade_attrs:
if attr not in self.attributes_schema:
msg = (_("Attribute %(attr)s for facade %(type)s "
"missing in provider") % {
'attr': attr, 'type': self.type()})
raise exception.StackValidationFailed(message=msg)
def validate(self):
# Calls validate_template()
result = super(TemplateResource, self).validate()
try:
self.template_data()
except ValueError as ex:
msg = _("Failed to retrieve template data: %s") % ex
raise exception.StackValidationFailed(message=msg)
# If we're using an existing resource type as a facade for this
# template, check for compatibility between the interfaces.
try:
fri = self.stack.env.get_resource_info(
self.type(),
resource_name=self.name,
ignore=self.resource_info)
except exception.EntityNotFound:
pass
else:
facade_cls = fri.get_class(files=self.stack.t.files)
self._validate_against_facade(facade_cls)
return result
def validate_template(self):
if self.validation_exception is not None:
msg = str(self.validation_exception)
raise exception.StackValidationFailed(message=msg)
return super(TemplateResource, self).validate_template()
def handle_adopt(self, resource_data=None):
return self.create_with_template(self.child_template(),
self.child_params(),
adopt_data=resource_data)
def handle_create(self):
return self.create_with_template(self.child_template(),
self.child_params())
def metadata_update(self, new_metadata=None):
"""Refresh the metadata if new_metadata is None."""
if new_metadata is None:
self.metadata_set(self.t.metadata())
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
self.properties = json_snippet.properties(self.properties_schema,
self.context)
return self.update_with_template(self.child_template(),
self.child_params())
def get_reference_id(self):
if self.resource_id is None:
return str(self.name)
if STACK_ID_OUTPUT in self.attributes.cached_attrs:
return self.attributes.cached_attrs[STACK_ID_OUTPUT]
stack_identity = self.nested_identifier()
reference_id = stack_identity.arn()
try:
if self._outputs is not None:
reference_id = self.get_output(STACK_ID_OUTPUT)
elif STACK_ID_OUTPUT in self.attributes:
output = self.rpc_client().show_output(self.context,
dict(stack_identity),
STACK_ID_OUTPUT)
if rpc_api.OUTPUT_ERROR in output:
raise exception.TemplateOutputError(
resource=self.name,
attribute=STACK_ID_OUTPUT,
message=output[rpc_api.OUTPUT_ERROR])
reference_id = output[rpc_api.OUTPUT_VALUE]
except exception.TemplateOutputError as err:
LOG.info('%s', err)
except exception.NotFound:
pass
self.attributes.set_cached_attr(STACK_ID_OUTPUT, reference_id)
return reference_id
def get_attribute(self, key, *path):
if self.resource_id is None:
return None
# first look for explicit resource.x.y
if key.startswith('resource.'):
return grouputils.get_nested_attrs(self, key, False, *path)
# then look for normal outputs
try:
return attributes.select_from_attribute(self.get_output(key),
path)
except exception.NotFound:
raise exception.InvalidTemplateAttribute(resource=self.name,
key=key)
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.execute("create index canvas_comment_id_and_visibility_and_parent_comment_id on canvas_comment (id, visibility, parent_comment_id);")
def backwards(self, orm):
raise RuntimeError("Cannot reverse this migration.")
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '254', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'canvas.apiapp': {
'Meta': {'object_name': 'APIApp'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'canvas.apiauthtoken': {
'Meta': {'unique_together': "(('user', 'app'),)", 'object_name': 'APIAuthToken'},
'app': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.APIApp']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.bestof': {
'Meta': {'object_name': 'BestOf'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'best_of'", 'null': 'True', 'blank': 'True', 'to': "orm['canvas.Category']"}),
'chosen_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'best_of'", 'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {})
},
'canvas.category': {
'Meta': {'object_name': 'Category'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'founded': ('django.db.models.fields.FloatField', [], {'default': '1298956320'}),
'founder': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'founded_groups'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderators': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'moderated_categories'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.comment': {
'Meta': {'object_name': 'Comment'},
'anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'comments'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'comments'", 'null': 'True', 'blank': 'True', 'to': "orm['canvas.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': "'0.0.0.0'", 'max_length': '15'}),
'judged': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'ot_hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parent_comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'replies'", 'null': 'True', 'blank': 'True', 'to': "orm['canvas.Comment']"}),
'parent_content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'posted_on_quest_of_the_day': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'replied_comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['canvas.Comment']", 'null': 'True', 'blank': 'True'}),
'reply_content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'used_in_comments'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'reply_text': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0', 'db_index': 'True'}),
'skip_moderation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'star_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.commentflag': {
'Meta': {'object_name': 'CommentFlag'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flags'", 'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'type_id': ('django.db.models.fields.IntegerField', [], {}),
'undone': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flags'", 'to': "orm['auth.User']"})
},
'canvas.commentmoderationlog': {
'Meta': {'object_name': 'CommentModerationLog'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'note': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'moderated_comments_log'", 'to': "orm['auth.User']"}),
'visibility': ('django.db.models.fields.IntegerField', [], {})
},
'canvas.commentpin': {
'Meta': {'object_name': 'CommentPin'},
'auto': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.commentsticker': {
'Meta': {'object_name': 'CommentSticker'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stickers'", 'to': "orm['canvas.Comment']"}),
'epic_message': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '140', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'type_id': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'canvas.commentstickerlog': {
'Meta': {'object_name': 'CommentStickerLog'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.content': {
'Meta': {'object_name': 'Content'},
'alpha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'animated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': "'0.0.0.0'", 'max_length': '15'}),
'remix_of': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'remixes'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'remix_text': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}),
'source_url': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '4000', 'blank': 'True'}),
'stamps_used': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'used_as_stamp'", 'blank': 'True', 'to': "orm['canvas.Content']"}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'url_mapping': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.ContentUrlMapping']", 'null': 'True', 'blank': 'True'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.contenturlmapping': {
'Meta': {'object_name': 'ContentUrlMapping'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'canvas.emailunsubscribe': {
'Meta': {'object_name': 'EmailUnsubscribe'},
'email': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'canvas.externalcontent': {
'Meta': {'object_name': 'ExternalContent'},
'_data': ('django.db.models.fields.TextField', [], {'default': "'{}'"}),
'content_type': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent_comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'external_content'", 'to': "orm['canvas.Comment']"}),
'source_url': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '4000', 'null': 'True', 'blank': 'True'})
},
'canvas.facebookinvite': {
'Meta': {'object_name': 'FacebookInvite'},
'fb_message_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invited_fbid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'invitee': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'facebook_invited_from'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'inviter': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'facebook_sent_invites'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"})
},
'canvas.facebookuser': {
'Meta': {'object_name': 'FacebookUser'},
'email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'fb_uid': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'gender': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_invited': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'canvas.followcategory': {
'Meta': {'unique_together': "(('user', 'category'),)", 'object_name': 'FollowCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'followers'", 'to': "orm['canvas.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'following'", 'to': "orm['auth.User']"})
},
'canvas.invitecode': {
'Meta': {'object_name': 'InviteCode'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invitee': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'invited_from'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'inviter': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'sent_invites'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"})
},
'canvas.remixplugin': {
'Meta': {'object_name': 'RemixPlugin'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
's3md5': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {'default': '0'})
},
'canvas.stashcontent': {
'Meta': {'object_name': 'StashContent'},
'content': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Content']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.userinfo': {
'Meta': {'object_name': 'UserInfo'},
'avatar': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Content']", 'null': 'True'}),
'bio_text': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'enable_timeline': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'enable_timeline_posts': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'facebook_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'follower_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'free_invites': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invite_bypass': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'is_qa': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'post_anonymously': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'profile_image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Comment']", 'null': 'True'}),
'trust_changed': ('canvas.util.UnixTimestampField', [], {'null': 'True', 'blank': 'True'}),
'trusted': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'canvas.usermoderationlog': {
'Meta': {'object_name': 'UserModerationLog'},
'action': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'note': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'moderation_log'", 'to': "orm['auth.User']"})
},
'canvas.userwarning': {
'Meta': {'object_name': 'UserWarning'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['canvas.Comment']", 'null': 'True', 'blank': 'True'}),
'confirmed': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'custom_message': ('django.db.models.fields.TextField', [], {}),
'disable_user': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issued': ('canvas.util.UnixTimestampField', [], {}),
'stock_message': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_warnings'", 'to': "orm['auth.User']"}),
'viewed': ('canvas.util.UnixTimestampField', [], {'default': '0'})
},
'canvas.welcomeemailrecipient': {
'Meta': {'object_name': 'WelcomeEmailRecipient'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'recipient': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['canvas']
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Finds android browsers that can be started and controlled by telemetry."""
from __future__ import absolute_import
import contextlib
import logging
import os
import platform
import posixpath
import shutil
import subprocess
from devil import base_error
from devil.android import apk_helper
from devil.android import flag_changer
from devil.android.sdk import version_codes
from py_utils import dependency_util
from py_utils import file_util
from py_utils import tempfile_ext
from telemetry import compat_mode_options
from telemetry import decorators
from telemetry.core import exceptions
from telemetry.core import platform as telemetry_platform
from telemetry.core import util
from telemetry.internal.backends import android_browser_backend_settings
from telemetry.internal.backends.chrome import android_browser_backend
from telemetry.internal.backends.chrome import chrome_startup_args
from telemetry.internal.browser import browser
from telemetry.internal.browser import possible_browser
from telemetry.internal.platform import android_device
from telemetry.internal.util import binary_manager
from telemetry.internal.util import format_for_logging
from telemetry.internal.util import local_first_binary_manager
ANDROID_BACKEND_SETTINGS = (
android_browser_backend_settings.ANDROID_BACKEND_SETTINGS)
@contextlib.contextmanager
def _ProfileWithExtraFiles(profile_dir, profile_files_to_copy):
"""Yields a temporary directory populated with input files.
Args:
profile_dir: A directory whose contents will be copied to the output
directory.
profile_files_to_copy: A list of (source, dest) tuples to be copied to
the output directory.
Yields: A path to a temporary directory, named "_default_profile". This
directory will be cleaned up when this context exits.
"""
with tempfile_ext.NamedTemporaryDirectory() as tempdir:
# TODO(csharrison): "_default_profile" was chosen because this directory
# will be pushed to the device's sdcard. We don't want to choose a
# random name due to the extra failure mode of filling up the sdcard
# in the case of unclean test teardown. We should consider changing
# PushProfile to avoid writing to this intermediate location.
host_profile = os.path.join(tempdir, '_default_profile')
if profile_dir:
shutil.copytree(profile_dir, host_profile)
else:
os.mkdir(host_profile)
# Add files from |profile_files_to_copy| into the host profile
# directory. Don't copy files if they already exist.
for source, dest in profile_files_to_copy:
host_path = os.path.join(host_profile, dest)
if not os.path.exists(host_path):
file_util.CopyFileWithIntermediateDirectories(source, host_path)
yield host_profile
class PossibleAndroidBrowser(possible_browser.PossibleBrowser):
"""A launchable android browser instance."""
def __init__(self, browser_type, finder_options, android_platform,
backend_settings, local_apk=None, target_os='android'):
super(PossibleAndroidBrowser, self).__init__(
browser_type, target_os, backend_settings.supports_tab_control)
assert browser_type in FindAllBrowserTypes(), (
'Please add %s to android_browser_finder.FindAllBrowserTypes' %
browser_type)
self._platform = android_platform
self._platform_backend = (
android_platform._platform_backend) # pylint: disable=protected-access
self._backend_settings = backend_settings
self._local_apk = local_apk
self._flag_changer = None
self._modules_to_install = None
self._compile_apk = finder_options.compile_apk
if self._local_apk is None and finder_options.chrome_root is not None:
self._local_apk = self._backend_settings.FindLocalApk(
self._platform_backend.device, finder_options.chrome_root)
# At this point the local_apk, if any, must exist.
assert self._local_apk is None or os.path.exists(self._local_apk)
self._build_dir = util.GetBuildDirFromHostApkPath(self._local_apk)
if finder_options.modules_to_install:
self._modules_to_install = set(['base'] +
finder_options.modules_to_install)
self._support_apk_list = []
if (self._backend_settings.requires_embedder or
self._backend_settings.has_additional_apk):
if finder_options.webview_embedder_apk:
self._support_apk_list = finder_options.webview_embedder_apk
else:
self._support_apk_list = self._backend_settings.FindSupportApks(
self._local_apk, finder_options.chrome_root)
elif finder_options.webview_embedder_apk:
logging.warning(
'No embedder needed for %s, ignoring --webview-embedder-apk option',
self._backend_settings.browser_type)
# At this point the apks in _support_apk_list, if any, must exist.
for apk in self._support_apk_list:
assert os.path.exists(apk)
def __repr__(self):
return 'PossibleAndroidBrowser(browser_type=%s)' % self.browser_type
@property
def settings(self):
"""Get the backend_settings for this possible browser."""
return self._backend_settings
@property
def browser_directory(self):
# On Android L+ the directory where base APK resides is also used for
# keeping extracted native libraries and .odex. Here is an example layout:
# /data/app/$package.apps.chrome-1/
# base.apk
# lib/arm/libchrome.so
# oat/arm/base.odex
# Declaring this toplevel directory as 'browser_directory' allows the cold
# startup benchmarks to flush OS pagecache for the native library, .odex and
# the APK.
apks = self._platform_backend.device.GetApplicationPaths(
self._backend_settings.package)
# A package can map to multiple APKs if the package overrides the app on
# the system image. Such overrides should not happen on perf bots. The
# package can also map to multiple apks if splits are used. In all cases, we
# want the directory that contains base.apk.
for apk in apks:
if apk.endswith('/base.apk'):
return apk[:-9]
return None
@property
def profile_directory(self):
return self._platform_backend.GetProfileDir(self._backend_settings.package)
@property
def last_modification_time(self):
if self._local_apk:
return os.path.getmtime(self._local_apk)
return -1
def _GetPathsForOsPageCacheFlushing(self):
return [self.profile_directory, self.browser_directory]
def _InitPlatformIfNeeded(self):
pass
def _SetupProfile(self):
if self._browser_options.dont_override_profile:
return
# Just remove the existing profile if we don't have any files to copy over.
# This is because PushProfile does not support pushing completely empty
# directories.
profile_files_to_copy = self._browser_options.profile_files_to_copy
if not self._browser_options.profile_dir and not profile_files_to_copy:
self._platform_backend.RemoveProfile(
self._backend_settings.package,
self._backend_settings.profile_ignore_list)
return
with _ProfileWithExtraFiles(self._browser_options.profile_dir,
profile_files_to_copy) as profile_dir:
self._platform_backend.PushProfile(self._backend_settings.package,
profile_dir)
def SetUpEnvironment(self, browser_options):
super(PossibleAndroidBrowser, self).SetUpEnvironment(browser_options)
self._platform_backend.DismissCrashDialogIfNeeded()
device = self._platform_backend.device
startup_args = self.GetBrowserStartupArgs(self._browser_options)
device.adb.Logcat(clear=True)
# use legacy commandline path if in compatibility mode
self._flag_changer = flag_changer.FlagChanger(
device, self._backend_settings.command_line_name, use_legacy_path=
compat_mode_options.LEGACY_COMMAND_LINE_PATH in
browser_options.compatibility_mode)
self._flag_changer.ReplaceFlags(startup_args, log_flags=False)
formatted_args = format_for_logging.ShellFormat(
startup_args, trim=browser_options.trim_logs)
logging.info('Flags set on device were %s', formatted_args)
# Stop any existing browser found already running on the device. This is
# done *after* setting the command line flags, in case some other Android
# process manages to trigger Chrome's startup before we do.
self._platform_backend.StopApplication(self._backend_settings.package)
self._SetupProfile()
# Remove any old crash dumps
self._platform_backend.device.RemovePath(
self._platform_backend.GetDumpLocation(self._backend_settings.package),
recursive=True, force=True)
def _TearDownEnvironment(self):
self._RestoreCommandLineFlags()
def _RestoreCommandLineFlags(self):
if self._flag_changer is not None:
try:
self._flag_changer.Restore()
finally:
self._flag_changer = None
def Create(self):
"""Launch the browser on the device and return a Browser object."""
return self._GetBrowserInstance(existing=False)
def FindExistingBrowser(self):
"""Find a browser running on the device and bind a Browser object to it.
The returned Browser object will only be bound to a running browser
instance whose package name matches the one specified by the backend
settings of this possible browser.
A BrowserGoneException is raised if the browser cannot be found.
"""
return self._GetBrowserInstance(existing=True)
def _GetBrowserInstance(self, existing):
# Init the LocalFirstBinaryManager if this is the first time we're creating
# a browser. Note that we use the host's OS and architecture since the
# retrieved dependencies are used on the host, not the device.
if local_first_binary_manager.LocalFirstBinaryManager.NeedsInit():
local_first_binary_manager.LocalFirstBinaryManager.Init(
self._build_dir, self._local_apk, platform.system().lower(),
platform.machine())
browser_backend = android_browser_backend.AndroidBrowserBackend(
self._platform_backend, self._browser_options,
self.browser_directory, self.profile_directory,
self._backend_settings,
build_dir=self._build_dir)
try:
return browser.Browser(
browser_backend, self._platform_backend, startup_args=(),
find_existing=existing)
except Exception:
browser_backend.Close()
raise
def GetBrowserStartupArgs(self, browser_options):
startup_args = chrome_startup_args.GetFromBrowserOptions(browser_options)
# use the flag `--ignore-certificate-errors` if in compatibility mode
supports_spki_list = (
self._backend_settings.supports_spki_list and
compat_mode_options.IGNORE_CERTIFICATE_ERROR
not in browser_options.compatibility_mode)
startup_args.extend(chrome_startup_args.GetReplayArgs(
self._platform_backend.network_controller_backend,
supports_spki_list=supports_spki_list))
startup_args.append('--enable-remote-debugging')
startup_args.append('--disable-fre')
startup_args.append('--disable-external-intent-requests')
# Need to specify the user profile directory for
# --ignore-certificate-errors-spki-list to work.
startup_args.append('--user-data-dir=' + self.profile_directory)
# Needed so that non-browser-process crashes avoid automatic dump upload
# and subsequent deletion. The extra "Crashpad" is necessary because
# crashpad_stackwalker.py is hard-coded to look for a "Crashpad" directory
# in the dump directory that it is provided.
startup_args.append('--breakpad-dump-location=' + posixpath.join(
self._platform_backend.GetDumpLocation(self._backend_settings.package),
'Crashpad'))
return startup_args
def SupportsOptions(self, browser_options):
if len(browser_options.extensions_to_load) != 0:
return False
return True
def IsAvailable(self):
"""Returns True if the browser is or can be installed on the platform."""
has_local_apks = self._local_apk and (
not self._backend_settings.requires_embedder or self._support_apk_list)
return has_local_apks or self.platform.CanLaunchApplication(
self.settings.package)
@decorators.Cache
def UpdateExecutableIfNeeded(self):
# TODO(crbug.com/815133): This logic should belong to backend_settings.
for apk in self._support_apk_list:
logging.warn('Installing %s on device if needed.', apk)
self.platform.InstallApplication(apk)
apk_name = self._backend_settings.GetApkName(
self._platform_backend.device)
is_webview_apk = apk_name is not None and ('SystemWebView' in apk_name or
'system_webview' in apk_name or
'TrichromeWebView' in apk_name or
'trichrome_webview' in apk_name)
# The WebView fallback logic prevents sideloaded WebView APKs from being
# installed and set as the WebView implementation correctly. Disable the
# fallback logic before installing the WebView APK to make sure the fallback
# logic doesn't interfere.
if is_webview_apk:
self._platform_backend.device.SetWebViewFallbackLogic(False)
if self._local_apk:
logging.warn('Installing %s on device if needed.', self._local_apk)
self.platform.InstallApplication(
self._local_apk, modules=self._modules_to_install)
if self._compile_apk:
package_name = apk_helper.GetPackageName(self._local_apk)
logging.warn('Compiling %s.', package_name)
self._platform_backend.device.RunShellCommand(
['cmd', 'package', 'compile', '-m', self._compile_apk, '-f',
package_name],
check_return=True)
sdk_version = self._platform_backend.device.build_version_sdk
# Bundles are in the ../bin directory, so it's safer to just check the
# correct name is part of the path.
is_monochrome = apk_name is not None and (apk_name == 'Monochrome.apk' or
'monochrome_bundle' in apk_name)
if ((is_webview_apk or
(is_monochrome and sdk_version < version_codes.Q)) and
sdk_version >= version_codes.NOUGAT):
package_name = apk_helper.GetPackageName(self._local_apk)
logging.warn('Setting %s as WebView implementation.', package_name)
self._platform_backend.device.SetWebViewImplementation(package_name)
def GetTypExpectationsTags(self):
tags = super(PossibleAndroidBrowser, self).GetTypExpectationsTags()
if 'webview' in self.browser_type:
tags.append('android-webview')
else:
tags.append('android-not-webview')
if 'weblayer' in self.browser_type:
tags.append('android-weblayer')
return tags
def SelectDefaultBrowser(possible_browsers):
"""Return the newest possible browser."""
if not possible_browsers:
return None
return max(possible_browsers, key=lambda b: b.last_modification_time)
def CanFindAvailableBrowsers():
return android_device.CanDiscoverDevices()
def _CanPossiblyHandlePath(apk_path):
if not apk_path:
return False
try:
apk_helper.ToHelper(apk_path)
return True
except apk_helper.ApkHelperError:
return False
def FindAllBrowserTypes():
browser_types = [b.browser_type for b in ANDROID_BACKEND_SETTINGS]
return browser_types + ['exact', 'reference']
def _FetchReferenceApk(android_platform, is_bundle=False):
"""Fetch the apk for reference browser type from gcloud.
Local path to the apk will be returned upon success.
Otherwise, None will be returned.
"""
os_version = dependency_util.GetChromeApkOsVersion(
android_platform.GetOSVersionName())
if is_bundle:
os_version += '_bundle'
arch = android_platform.GetArchName()
try:
reference_build = binary_manager.FetchPath(
'chrome_stable', 'android', arch, os_version)
if reference_build and os.path.exists(reference_build):
return reference_build
except binary_manager.NoPathFoundError:
logging.warning('Cannot find path for reference apk for device %s',
android_platform.GetDeviceId())
except binary_manager.CloudStorageError:
logging.warning('Failed to download reference apk for device %s',
android_platform.GetDeviceId())
return None
def _GetReferenceAndroidBrowser(android_platform, finder_options):
reference_build = _FetchReferenceApk(android_platform)
if reference_build:
return PossibleAndroidBrowser(
'reference',
finder_options,
android_platform,
android_browser_backend_settings.ANDROID_CHROME,
reference_build)
def _FindAllPossibleBrowsers(finder_options, android_platform):
"""Testable version of FindAllAvailableBrowsers."""
if not android_platform:
return []
possible_browsers = []
for apk in finder_options.webview_embedder_apk:
if not os.path.exists(apk):
raise exceptions.PathMissingError(
'Unable to find apk specified by --webview-embedder-apk=%s' % apk)
# Add the exact APK if given.
if _CanPossiblyHandlePath(finder_options.browser_executable):
if not os.path.exists(finder_options.browser_executable):
raise exceptions.PathMissingError(
'Unable to find exact apk specified by --browser-executable=%s' %
finder_options.browser_executable)
package_name = apk_helper.GetPackageName(finder_options.browser_executable)
try:
backend_settings = next(
b for b in ANDROID_BACKEND_SETTINGS if b.package == package_name)
except StopIteration:
raise exceptions.UnknownPackageError(
'%s specified by --browser-executable has an unknown package: %s' %
(finder_options.browser_executable, package_name))
possible_browsers.append(PossibleAndroidBrowser(
'exact',
finder_options,
android_platform,
backend_settings,
finder_options.browser_executable))
if finder_options.IsBrowserTypeRelevant('reference'):
reference_browser = _GetReferenceAndroidBrowser(
android_platform, finder_options)
if reference_browser:
possible_browsers.append(reference_browser)
# Add any other known available browsers.
for settings in ANDROID_BACKEND_SETTINGS:
if finder_options.IsBrowserTypeRelevant(settings.browser_type):
local_apk = None
if finder_options.IsBrowserTypeReference():
local_apk = _FetchReferenceApk(
android_platform, finder_options.IsBrowserTypeBundle())
if settings.IsWebView():
p_browser = PossibleAndroidBrowser(
settings.browser_type, finder_options, android_platform, settings,
local_apk=local_apk, target_os='android_webview')
else:
p_browser = PossibleAndroidBrowser(
settings.browser_type, finder_options, android_platform, settings,
local_apk=local_apk)
if p_browser.IsAvailable():
possible_browsers.append(p_browser)
return possible_browsers
def FindAllAvailableBrowsers(finder_options, device):
"""Finds all the possible browsers on one device.
The device is either the only device on the host platform,
or |finder_options| specifies a particular device.
"""
if not isinstance(device, android_device.AndroidDevice):
return []
try:
android_platform = telemetry_platform.GetPlatformForDevice(
device, finder_options)
return _FindAllPossibleBrowsers(finder_options, android_platform)
except base_error.BaseError as e:
logging.error('Unable to find browsers on %s: %s', device.device_id, str(e))
ps_output = subprocess.check_output(['ps', '-ef'])
logging.error('Ongoing processes:\n%s', ps_output)
return []
|
from BaseDaemon import BaseDaemon
import time
import os
class SecureShellServer(BaseDaemon):
"""
Controls Privoxy daemon
"""
def start(self, interface, settings):
"""
Start processes
:param interface:
:param settings:
:return:
"""
# openssh-server
if 'openssh' in settings:
if settings['openssh']:
# copy /etc/ssh/sshd_raspap into /etc/ssh/sshd_config preserving permissions
if os.path.isfile('/etc/ssh/sshd_raspap'):
open('/etc/ssh/sshd_config', 'w').write(open('/etc/ssh/sshd_raspap').read())
self.callSystemService('sshd', 'restart')
else:
self.callSystemService('sshd', 'stop')
# shell in a box support
if 'shellinabox' in settings and settings['shellinabox']:
port = 8021
# allow custom port
if 'shellinabox_port' in settings:
port = int(settings['shellinabox_port'])
result, output = self.app.executeCommand('cd /tmp && screen -d -m shellinaboxd --port ' + str(port) + ' --no-beep -d', shell=True)
time.sleep(5)
if not self.find_process('shellinaboxd', withoutSudo=False):
self.app.logging.output('shellinaboxd process unexpectedly quit', interface)
return False
return True
def finish(self):
"""
Shutdown processes
:return:
"""
self.callSystemService('sshd', 'stop')
return self.find_and_kill_process('shellinaboxd', self.interface)
|
# Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from testtools import TestCase
from kmip.core.messages.contents import ProtocolVersion
from kmip.core.utils import BytearrayStream
class TestProtocolVersion(TestCase):
def setUp(self):
super(TestProtocolVersion, self).setUp()
self.major_default = ProtocolVersion.ProtocolVersionMajor()
self.minor_default = ProtocolVersion.ProtocolVersionMinor()
self.major = ProtocolVersion.ProtocolVersionMajor(1)
self.minor = ProtocolVersion.ProtocolVersionMinor(1)
self.encoding_default = BytearrayStream((
b'\x42\x00\x69\x01\x00\x00\x00\x20\x42\x00\x6A\x02\x00\x00\x00\x04'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x42\x00\x6B\x02\x00\x00\x00\x04'
b'\x00\x00\x00\x00\x00\x00\x00\x00'))
self.encoding = BytearrayStream((
b'\x42\x00\x69\x01\x00\x00\x00\x20\x42\x00\x6A\x02\x00\x00\x00\x04'
b'\x00\x00\x00\x01\x00\x00\x00\x00\x42\x00\x6B\x02\x00\x00\x00\x04'
b'\x00\x00\x00\x01\x00\x00\x00\x00'))
def tearDown(self):
super(TestProtocolVersion, self).tearDown()
def _test_init(self, protocol_version_major, protocol_version_minor):
protocol_version = ProtocolVersion(
protocol_version_major, protocol_version_minor)
if protocol_version_major is None:
self.assertEqual(ProtocolVersion.ProtocolVersionMajor(),
protocol_version.protocol_version_major)
else:
self.assertEqual(protocol_version_major,
protocol_version.protocol_version_major)
if protocol_version_minor is None:
self.assertEqual(ProtocolVersion.ProtocolVersionMinor(),
protocol_version.protocol_version_minor)
else:
self.assertEqual(protocol_version_minor,
protocol_version.protocol_version_minor)
def test_init_with_none(self):
self._test_init(None, None)
def test_init_with_args(self):
major = ProtocolVersion.ProtocolVersionMajor(1)
minor = ProtocolVersion.ProtocolVersionMinor(0)
self._test_init(major, minor)
def test_validate_on_invalid_protocol_version_major(self):
major = "invalid"
minor = ProtocolVersion.ProtocolVersionMinor(0)
args = [major, minor]
self.assertRaisesRegexp(
TypeError, "invalid protocol version major", self._test_init,
*args)
def test_validate_on_invalid_protocol_version_minor(self):
major = ProtocolVersion.ProtocolVersionMajor(1)
minor = "invalid"
args = [major, minor]
self.assertRaisesRegexp(
TypeError, "invalid protocol version minor", self._test_init,
*args)
def _test_read(self, stream, major, minor):
protocol_version = ProtocolVersion()
protocol_version.read(stream)
msg = "protocol version major decoding mismatch"
msg += "; expected {0}, received {1}".format(
major, protocol_version.protocol_version_major)
self.assertEqual(major, protocol_version.protocol_version_major, msg)
msg = "protocol version minor decoding mismatch"
msg += "; expected {0}, received {1}".format(
minor, protocol_version.protocol_version_minor)
self.assertEqual(minor, protocol_version.protocol_version_minor, msg)
def test_read_with_none(self):
self._test_read(self.encoding_default, self.major_default,
self.minor_default)
def test_read_with_args(self):
self._test_read(self.encoding, self.major, self.minor)
def _test_write(self, stream_expected, major, minor):
stream_observed = BytearrayStream()
protocol_version = ProtocolVersion(major, minor)
protocol_version.write(stream_observed)
length_expected = len(stream_expected)
length_observed = len(stream_observed)
msg = "encoding lengths not equal"
msg += "; expected {0}, received {1}".format(
length_expected, length_observed)
self.assertEqual(length_expected, length_observed, msg)
msg = "encoding mismatch"
msg += ";\nexpected:\n{0}\nreceived:\n{1}".format(
stream_expected, stream_observed)
self.assertEqual(stream_expected, stream_observed, msg)
def test_write_with_none(self):
self._test_write(self.encoding_default, self.major_default,
self.minor_default)
def test_write_with_args(self):
self._test_write(self.encoding, self.major, self.minor)
def test_equal_on_equal(self):
a = ProtocolVersion.create(1, 0)
b = ProtocolVersion.create(1, 0)
self.assertTrue(a == b)
def test_equal_on_not_equal(self):
a = ProtocolVersion.create(1, 0)
b = ProtocolVersion.create(0, 1)
self.assertFalse(a == b)
def test_equal_on_type_mismatch(self):
a = ProtocolVersion.create(1, 0)
b = "invalid"
self.assertFalse(a == b)
def test_not_equal_on_equal(self):
a = ProtocolVersion.create(1, 0)
b = ProtocolVersion.create(1, 0)
self.assertFalse(a != b)
def test_not_equal_on_not_equal(self):
a = ProtocolVersion.create(1, 0)
b = ProtocolVersion.create(0, 1)
self.assertTrue(a != b)
def test_not_equal_on_type_mismatch(self):
a = ProtocolVersion.create(1, 0)
b = "invalid"
self.assertTrue(a != b)
def test_less_than(self):
"""
Test that the less than operator returns True/False when comparing
two different ProtocolVersions.
"""
a = ProtocolVersion.create(1, 0)
b = ProtocolVersion.create(1, 1)
c = ProtocolVersion.create(2, 0)
d = ProtocolVersion.create(0, 2)
self.assertTrue(a < b)
self.assertFalse(b < a)
self.assertFalse(a < a)
self.assertTrue(a < c)
self.assertFalse(c < a)
self.assertFalse(c < d)
self.assertTrue(d < c)
def test_greater_than(self):
"""
Test that the greater than operator returns True/False when
comparing two different ProtocolVersions.
"""
a = ProtocolVersion.create(1, 0)
b = ProtocolVersion.create(1, 1)
c = ProtocolVersion.create(2, 0)
d = ProtocolVersion.create(0, 2)
self.assertFalse(a > b)
self.assertTrue(b > a)
self.assertFalse(a > a)
self.assertFalse(a > c)
self.assertTrue(c > a)
self.assertTrue(c > d)
self.assertFalse(d > c)
def test_less_than_or_equal(self):
"""
Test that the less than or equal operator returns True/False when
comparing two different ProtocolVersions.
"""
a = ProtocolVersion.create(1, 0)
b = ProtocolVersion.create(1, 1)
c = ProtocolVersion.create(2, 0)
d = ProtocolVersion.create(0, 2)
self.assertTrue(a <= b)
self.assertFalse(b <= a)
self.assertTrue(a <= a)
self.assertTrue(a <= c)
self.assertFalse(c <= a)
self.assertFalse(c <= d)
self.assertTrue(d <= c)
def test_greater_than_or_equal(self):
"""
Test that the greater than or equal operator returns True/False when
comparing two different ProtocolVersions.
"""
a = ProtocolVersion.create(1, 0)
b = ProtocolVersion.create(1, 1)
c = ProtocolVersion.create(2, 0)
d = ProtocolVersion.create(0, 2)
self.assertFalse(a >= b)
self.assertTrue(b >= a)
self.assertTrue(a >= a)
self.assertFalse(a >= c)
self.assertTrue(c >= a)
self.assertTrue(c >= d)
self.assertFalse(d >= c)
def test_repr(self):
a = ProtocolVersion.create(1, 0)
self.assertEqual("1.0", "{0}".format(a))
def _test_create(self, major, minor):
protocol_version = ProtocolVersion.create(major, minor)
if major is None:
expected = ProtocolVersion.ProtocolVersionMajor()
else:
expected = ProtocolVersion.ProtocolVersionMajor(major)
self.assertEqual(expected, protocol_version.protocol_version_major)
if minor is None:
expected = ProtocolVersion.ProtocolVersionMinor()
else:
expected = ProtocolVersion.ProtocolVersionMinor(minor)
self.assertEqual(expected, protocol_version.protocol_version_minor)
def test_create_with_none(self):
self._test_create(None, None)
def test_create_with_args(self):
self._test_create(1, 0)
|
"""
A test spanning all the capabilities of all the serializers.
This class defines sample data and a dynamically generated
test case that is capable of testing the capabilities of
the serializers. This includes all valid data values, plus
forward, backwards and self references.
"""
from __future__ import unicode_literals
import datetime
import decimal
from unittest import expectedFailure, skipUnless
try:
import yaml
except ImportError:
yaml = None
from django.core import serializers
from django.core.serializers import SerializerDoesNotExist
from django.core.serializers.base import DeserializationError
from django.core.serializers.xml_serializer import DTDForbidden
from django.db import connection, models
from django.http import HttpResponse
from django.test import TestCase
from django.utils import six
from django.utils.encoding import force_text
from django.utils.functional import curry
from .models import (BinaryData, BooleanData, CharData, DateData, DateTimeData, EmailData,
FileData, FilePathData, DecimalData, FloatData, IntegerData, IPAddressData,
GenericIPAddressData, NullBooleanData, PositiveIntegerData,
PositiveSmallIntegerData, SlugData, SmallData, TextData, TimeData,
GenericData, Anchor, UniqueAnchor, FKData, M2MData, O2OData,
FKSelfData, M2MSelfData, FKDataToField, FKDataToO2O, M2MIntermediateData,
Intermediate, BooleanPKData, CharPKData, EmailPKData, FilePathPKData,
DecimalPKData, FloatPKData, IntegerPKData, IPAddressPKData,
GenericIPAddressPKData, PositiveIntegerPKData,
PositiveSmallIntegerPKData, SlugPKData, SmallPKData,
AutoNowDateTimeData, ModifyingSaveData, InheritAbstractModel, BaseModel,
ExplicitInheritBaseModel, InheritBaseModel, ProxyBaseModel,
ProxyProxyBaseModel, BigIntegerData, LengthModel, Tag, ComplexModel,
NaturalKeyAnchor, FKDataNaturalKey)
# A set of functions that can be used to recreate
# test data objects of various kinds.
# The save method is a raw base model save, to make
# sure that the data in the database matches the
# exact test case.
def data_create(pk, klass, data):
instance = klass(id=pk)
instance.data = data
models.Model.save_base(instance, raw=True)
return [instance]
def generic_create(pk, klass, data):
instance = klass(id=pk)
instance.data = data[0]
models.Model.save_base(instance, raw=True)
for tag in data[1:]:
instance.tags.create(data=tag)
return [instance]
def fk_create(pk, klass, data):
instance = klass(id=pk)
setattr(instance, 'data_id', data)
models.Model.save_base(instance, raw=True)
return [instance]
def m2m_create(pk, klass, data):
instance = klass(id=pk)
models.Model.save_base(instance, raw=True)
instance.data = data
return [instance]
def im2m_create(pk, klass, data):
instance = klass(id=pk)
models.Model.save_base(instance, raw=True)
return [instance]
def im_create(pk, klass, data):
instance = klass(id=pk)
instance.right_id = data['right']
instance.left_id = data['left']
if 'extra' in data:
instance.extra = data['extra']
models.Model.save_base(instance, raw=True)
return [instance]
def o2o_create(pk, klass, data):
instance = klass()
instance.data_id = data
models.Model.save_base(instance, raw=True)
return [instance]
def pk_create(pk, klass, data):
instance = klass()
instance.data = data
models.Model.save_base(instance, raw=True)
return [instance]
def inherited_create(pk, klass, data):
instance = klass(id=pk,**data)
# This isn't a raw save because:
# 1) we're testing inheritance, not field behavior, so none
# of the field values need to be protected.
# 2) saving the child class and having the parent created
# automatically is easier than manually creating both.
models.Model.save(instance)
created = [instance]
for klass,field in instance._meta.parents.items():
created.append(klass.objects.get(id=pk))
return created
# A set of functions that can be used to compare
# test data objects of various kinds
def data_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
if klass == BinaryData and data is not None:
testcase.assertEqual(bytes(data), bytes(instance.data),
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % (
pk, repr(bytes(data)), type(data), repr(bytes(instance.data)),
type(instance.data))
)
else:
testcase.assertEqual(data, instance.data,
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % (
pk, data, type(data), instance, type(instance.data))
)
def generic_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data[0], instance.data)
testcase.assertEqual(data[1:], [t.data for t in instance.tags.order_by('id')])
def fk_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data, instance.data_id)
def m2m_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data, [obj.id for obj in instance.data.order_by('id')])
def im2m_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
#actually nothing else to check, the instance just should exist
def im_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data['left'], instance.left_id)
testcase.assertEqual(data['right'], instance.right_id)
if 'extra' in data:
testcase.assertEqual(data['extra'], instance.extra)
else:
testcase.assertEqual("doesn't matter", instance.extra)
def o2o_compare(testcase, pk, klass, data):
instance = klass.objects.get(data=data)
testcase.assertEqual(data, instance.data_id)
def pk_compare(testcase, pk, klass, data):
instance = klass.objects.get(data=data)
testcase.assertEqual(data, instance.data)
def inherited_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
for key,value in data.items():
testcase.assertEqual(value, getattr(instance,key))
# Define some data types. Each data type is
# actually a pair of functions; one to create
# and one to compare objects of that type
data_obj = (data_create, data_compare)
generic_obj = (generic_create, generic_compare)
fk_obj = (fk_create, fk_compare)
m2m_obj = (m2m_create, m2m_compare)
im2m_obj = (im2m_create, im2m_compare)
im_obj = (im_create, im_compare)
o2o_obj = (o2o_create, o2o_compare)
pk_obj = (pk_create, pk_compare)
inherited_obj = (inherited_create, inherited_compare)
test_data = [
# Format: (data type, PK value, Model Class, data)
(data_obj, 1, BinaryData, six.memoryview(b"\x05\xFD\x00")),
(data_obj, 2, BinaryData, None),
(data_obj, 5, BooleanData, True),
(data_obj, 6, BooleanData, False),
(data_obj, 10, CharData, "Test Char Data"),
(data_obj, 11, CharData, ""),
(data_obj, 12, CharData, "None"),
(data_obj, 13, CharData, "null"),
(data_obj, 14, CharData, "NULL"),
(data_obj, 15, CharData, None),
# (We use something that will fit into a latin1 database encoding here,
# because that is still the default used on many system setups.)
(data_obj, 16, CharData, '\xa5'),
(data_obj, 20, DateData, datetime.date(2006,6,16)),
(data_obj, 21, DateData, None),
(data_obj, 30, DateTimeData, datetime.datetime(2006,6,16,10,42,37)),
(data_obj, 31, DateTimeData, None),
(data_obj, 40, EmailData, "[email protected]"),
(data_obj, 41, EmailData, None),
(data_obj, 42, EmailData, ""),
(data_obj, 50, FileData, 'file:///foo/bar/whiz.txt'),
# (data_obj, 51, FileData, None),
(data_obj, 52, FileData, ""),
(data_obj, 60, FilePathData, "/foo/bar/whiz.txt"),
(data_obj, 61, FilePathData, None),
(data_obj, 62, FilePathData, ""),
(data_obj, 70, DecimalData, decimal.Decimal('12.345')),
(data_obj, 71, DecimalData, decimal.Decimal('-12.345')),
(data_obj, 72, DecimalData, decimal.Decimal('0.0')),
(data_obj, 73, DecimalData, None),
(data_obj, 74, FloatData, 12.345),
(data_obj, 75, FloatData, -12.345),
(data_obj, 76, FloatData, 0.0),
(data_obj, 77, FloatData, None),
(data_obj, 80, IntegerData, 123456789),
(data_obj, 81, IntegerData, -123456789),
(data_obj, 82, IntegerData, 0),
(data_obj, 83, IntegerData, None),
#(XX, ImageData
(data_obj, 90, IPAddressData, "127.0.0.1"),
(data_obj, 91, IPAddressData, None),
(data_obj, 95, GenericIPAddressData, "fe80:1424:2223:6cff:fe8a:2e8a:2151:abcd"),
(data_obj, 96, GenericIPAddressData, None),
(data_obj, 100, NullBooleanData, True),
(data_obj, 101, NullBooleanData, False),
(data_obj, 102, NullBooleanData, None),
(data_obj, 120, PositiveIntegerData, 123456789),
(data_obj, 121, PositiveIntegerData, None),
(data_obj, 130, PositiveSmallIntegerData, 12),
(data_obj, 131, PositiveSmallIntegerData, None),
(data_obj, 140, SlugData, "this-is-a-slug"),
(data_obj, 141, SlugData, None),
(data_obj, 142, SlugData, ""),
(data_obj, 150, SmallData, 12),
(data_obj, 151, SmallData, -12),
(data_obj, 152, SmallData, 0),
(data_obj, 153, SmallData, None),
(data_obj, 160, TextData, """This is a long piece of text.
It contains line breaks.
Several of them.
The end."""),
(data_obj, 161, TextData, ""),
(data_obj, 162, TextData, None),
(data_obj, 170, TimeData, datetime.time(10,42,37)),
(data_obj, 171, TimeData, None),
(generic_obj, 200, GenericData, ['Generic Object 1', 'tag1', 'tag2']),
(generic_obj, 201, GenericData, ['Generic Object 2', 'tag2', 'tag3']),
(data_obj, 300, Anchor, "Anchor 1"),
(data_obj, 301, Anchor, "Anchor 2"),
(data_obj, 302, UniqueAnchor, "UAnchor 1"),
(fk_obj, 400, FKData, 300), # Post reference
(fk_obj, 401, FKData, 500), # Pre reference
(fk_obj, 402, FKData, None), # Empty reference
(m2m_obj, 410, M2MData, []), # Empty set
(m2m_obj, 411, M2MData, [300,301]), # Post reference
(m2m_obj, 412, M2MData, [500,501]), # Pre reference
(m2m_obj, 413, M2MData, [300,301,500,501]), # Pre and Post reference
(o2o_obj, None, O2OData, 300), # Post reference
(o2o_obj, None, O2OData, 500), # Pre reference
(fk_obj, 430, FKSelfData, 431), # Pre reference
(fk_obj, 431, FKSelfData, 430), # Post reference
(fk_obj, 432, FKSelfData, None), # Empty reference
(m2m_obj, 440, M2MSelfData, []),
(m2m_obj, 441, M2MSelfData, []),
(m2m_obj, 442, M2MSelfData, [440, 441]),
(m2m_obj, 443, M2MSelfData, [445, 446]),
(m2m_obj, 444, M2MSelfData, [440, 441, 445, 446]),
(m2m_obj, 445, M2MSelfData, []),
(m2m_obj, 446, M2MSelfData, []),
(fk_obj, 450, FKDataToField, "UAnchor 1"),
(fk_obj, 451, FKDataToField, "UAnchor 2"),
(fk_obj, 452, FKDataToField, None),
(fk_obj, 460, FKDataToO2O, 300),
(im2m_obj, 470, M2MIntermediateData, None),
#testing post- and prereferences and extra fields
(im_obj, 480, Intermediate, {'right': 300, 'left': 470}),
(im_obj, 481, Intermediate, {'right': 300, 'left': 490}),
(im_obj, 482, Intermediate, {'right': 500, 'left': 470}),
(im_obj, 483, Intermediate, {'right': 500, 'left': 490}),
(im_obj, 484, Intermediate, {'right': 300, 'left': 470, 'extra': "extra"}),
(im_obj, 485, Intermediate, {'right': 300, 'left': 490, 'extra': "extra"}),
(im_obj, 486, Intermediate, {'right': 500, 'left': 470, 'extra': "extra"}),
(im_obj, 487, Intermediate, {'right': 500, 'left': 490, 'extra': "extra"}),
(im2m_obj, 490, M2MIntermediateData, []),
(data_obj, 500, Anchor, "Anchor 3"),
(data_obj, 501, Anchor, "Anchor 4"),
(data_obj, 502, UniqueAnchor, "UAnchor 2"),
(pk_obj, 601, BooleanPKData, True),
(pk_obj, 602, BooleanPKData, False),
(pk_obj, 610, CharPKData, "Test Char PKData"),
# (pk_obj, 620, DatePKData, datetime.date(2006,6,16)),
# (pk_obj, 630, DateTimePKData, datetime.datetime(2006,6,16,10,42,37)),
(pk_obj, 640, EmailPKData, "[email protected]"),
# (pk_obj, 650, FilePKData, 'file:///foo/bar/whiz.txt'),
(pk_obj, 660, FilePathPKData, "/foo/bar/whiz.txt"),
(pk_obj, 670, DecimalPKData, decimal.Decimal('12.345')),
(pk_obj, 671, DecimalPKData, decimal.Decimal('-12.345')),
(pk_obj, 672, DecimalPKData, decimal.Decimal('0.0')),
(pk_obj, 673, FloatPKData, 12.345),
(pk_obj, 674, FloatPKData, -12.345),
(pk_obj, 675, FloatPKData, 0.0),
(pk_obj, 680, IntegerPKData, 123456789),
(pk_obj, 681, IntegerPKData, -123456789),
(pk_obj, 682, IntegerPKData, 0),
# (XX, ImagePKData
(pk_obj, 690, IPAddressPKData, "127.0.0.1"),
(pk_obj, 695, GenericIPAddressPKData, "fe80:1424:2223:6cff:fe8a:2e8a:2151:abcd"),
# (pk_obj, 700, NullBooleanPKData, True),
# (pk_obj, 701, NullBooleanPKData, False),
(pk_obj, 720, PositiveIntegerPKData, 123456789),
(pk_obj, 730, PositiveSmallIntegerPKData, 12),
(pk_obj, 740, SlugPKData, "this-is-a-slug"),
(pk_obj, 750, SmallPKData, 12),
(pk_obj, 751, SmallPKData, -12),
(pk_obj, 752, SmallPKData, 0),
# (pk_obj, 760, TextPKData, """This is a long piece of text.
# It contains line breaks.
# Several of them.
# The end."""),
# (pk_obj, 770, TimePKData, datetime.time(10,42,37)),
# (pk_obj, 790, XMLPKData, "<foo></foo>"),
(data_obj, 800, AutoNowDateTimeData, datetime.datetime(2006,6,16,10,42,37)),
(data_obj, 810, ModifyingSaveData, 42),
(inherited_obj, 900, InheritAbstractModel, {'child_data':37,'parent_data':42}),
(inherited_obj, 910, ExplicitInheritBaseModel, {'child_data':37,'parent_data':42}),
(inherited_obj, 920, InheritBaseModel, {'child_data':37,'parent_data':42}),
(data_obj, 1000, BigIntegerData, 9223372036854775807),
(data_obj, 1001, BigIntegerData, -9223372036854775808),
(data_obj, 1002, BigIntegerData, 0),
(data_obj, 1003, BigIntegerData, None),
(data_obj, 1004, LengthModel, 0),
(data_obj, 1005, LengthModel, 1),
]
natural_key_test_data = [
(data_obj, 1100, NaturalKeyAnchor, "Natural Key Anghor"),
(fk_obj, 1101, FKDataNaturalKey, 1100),
(fk_obj, 1102, FKDataNaturalKey, None),
]
# Because Oracle treats the empty string as NULL, Oracle is expected to fail
# when field.empty_strings_allowed is True and the value is None; skip these
# tests.
if connection.features.interprets_empty_strings_as_nulls:
test_data = [data for data in test_data
if not (data[0] == data_obj and
data[2]._meta.get_field('data').empty_strings_allowed and
data[3] is None)]
# Regression test for #8651 -- a FK to an object iwth PK of 0
# This won't work on MySQL since it won't let you create an object
# with a primary key of 0,
if connection.features.allows_primary_key_0:
test_data.extend([
(data_obj, 0, Anchor, "Anchor 0"),
(fk_obj, 465, FKData, 0),
])
# Dynamically create serializer tests to ensure that all
# registered serializers are automatically tested.
class SerializerTests(TestCase):
def test_get_unknown_serializer(self):
"""
#15889: get_serializer('nonsense') raises a SerializerDoesNotExist
"""
with self.assertRaises(SerializerDoesNotExist):
serializers.get_serializer("nonsense")
with self.assertRaises(KeyError):
serializers.get_serializer("nonsense")
# SerializerDoesNotExist is instantiated with the nonexistent format
with self.assertRaises(SerializerDoesNotExist) as cm:
serializers.get_serializer("nonsense")
self.assertEqual(cm.exception.args, ("nonsense",))
def test_unregister_unkown_serializer(self):
with self.assertRaises(SerializerDoesNotExist):
serializers.unregister_serializer("nonsense")
def test_get_unkown_deserializer(self):
with self.assertRaises(SerializerDoesNotExist):
serializers.get_deserializer("nonsense")
def test_json_deserializer_exception(self):
with self.assertRaises(DeserializationError):
for obj in serializers.deserialize("json", """[{"pk":1}"""):
pass
@skipUnless(yaml, "PyYAML not installed")
def test_yaml_deserializer_exception(self):
with self.assertRaises(DeserializationError):
for obj in serializers.deserialize("yaml", "{"):
pass
def test_serialize_proxy_model(self):
BaseModel.objects.create(parent_data=1)
base_objects = BaseModel.objects.all()
proxy_objects = ProxyBaseModel.objects.all()
proxy_proxy_objects = ProxyProxyBaseModel.objects.all()
base_data = serializers.serialize("json", base_objects)
proxy_data = serializers.serialize("json", proxy_objects)
proxy_proxy_data = serializers.serialize("json", proxy_proxy_objects)
self.assertEqual(base_data, proxy_data.replace('proxy', ''))
self.assertEqual(base_data, proxy_proxy_data.replace('proxy', ''))
def serializerTest(format, self):
# Create all the objects defined in the test data
objects = []
instance_count = {}
for (func, pk, klass, datum) in test_data:
with connection.constraint_checks_disabled():
objects.extend(func[0](pk, klass, datum))
# Get a count of the number of objects created for each class
for klass in instance_count:
instance_count[klass] = klass.objects.count()
# Add the generic tagged objects to the object list
objects.extend(Tag.objects.all())
# Serialize the test database
serialized_data = serializers.serialize(format, objects, indent=2)
for obj in serializers.deserialize(format, serialized_data):
obj.save()
# Assert that the deserialized data is the same
# as the original source
for (func, pk, klass, datum) in test_data:
func[1](self, pk, klass, datum)
# Assert that the number of objects deserialized is the
# same as the number that was serialized.
for klass, count in instance_count.items():
self.assertEqual(count, klass.objects.count())
if connection.vendor == 'mysql' and six.PY3:
# Existing MySQL DB-API drivers fail on binary data.
serializerTest = expectedFailure(serializerTest)
def naturalKeySerializerTest(format, self):
# Create all the objects defined in the test data
objects = []
instance_count = {}
for (func, pk, klass, datum) in natural_key_test_data:
with connection.constraint_checks_disabled():
objects.extend(func[0](pk, klass, datum))
# Get a count of the number of objects created for each class
for klass in instance_count:
instance_count[klass] = klass.objects.count()
# Serialize the test database
serialized_data = serializers.serialize(format, objects, indent=2,
use_natural_keys=True)
for obj in serializers.deserialize(format, serialized_data):
obj.save()
# Assert that the deserialized data is the same
# as the original source
for (func, pk, klass, datum) in natural_key_test_data:
func[1](self, pk, klass, datum)
# Assert that the number of objects deserialized is the
# same as the number that was serialized.
for klass, count in instance_count.items():
self.assertEqual(count, klass.objects.count())
def fieldsTest(format, self):
obj = ComplexModel(field1='first', field2='second', field3='third')
obj.save_base(raw=True)
# Serialize then deserialize the test database
serialized_data = serializers.serialize(format, [obj], indent=2, fields=('field1','field3'))
result = next(serializers.deserialize(format, serialized_data))
# Check that the deserialized object contains data in only the serialized fields.
self.assertEqual(result.object.field1, 'first')
self.assertEqual(result.object.field2, '')
self.assertEqual(result.object.field3, 'third')
def streamTest(format, self):
obj = ComplexModel(field1='first',field2='second',field3='third')
obj.save_base(raw=True)
# Serialize the test database to a stream
for stream in (six.StringIO(), HttpResponse()):
serializers.serialize(format, [obj], indent=2, stream=stream)
# Serialize normally for a comparison
string_data = serializers.serialize(format, [obj], indent=2)
# Check that the two are the same
if isinstance(stream, six.StringIO):
self.assertEqual(string_data, stream.getvalue())
else:
self.assertEqual(string_data, stream.content.decode('utf-8'))
for format in serializers.get_serializer_formats():
setattr(SerializerTests, 'test_' + format + '_serializer', curry(serializerTest, format))
setattr(SerializerTests, 'test_' + format + '_natural_key_serializer', curry(naturalKeySerializerTest, format))
setattr(SerializerTests, 'test_' + format + '_serializer_fields', curry(fieldsTest, format))
if format != 'python':
setattr(SerializerTests, 'test_' + format + '_serializer_stream', curry(streamTest, format))
class XmlDeserializerSecurityTests(TestCase):
def test_no_dtd(self):
"""
The XML deserializer shouldn't allow a DTD.
This is the most straightforward way to prevent all entity definitions
and avoid both external entities and entity-expansion attacks.
"""
xml = '<?xml version="1.0" standalone="no"?><!DOCTYPE example SYSTEM "http://example.com/example.dtd">'
with self.assertRaises(DTDForbidden):
next(serializers.deserialize('xml', xml))
|
"""
mobile
=======
Devices which (are supposed to) move in a predictable way
Configurable parameters::
{
"area_centre" : e.g. "London, UK" } optional, but both must be specified if either are. Points-to-visit will be within this set.
"area_radius" : e.g. "Manchester, UK" }
"num_locations" : 10 The total number of defined locations
"points_to_visit" : 4 Number of these locations that any individual device can visit (MUST be <= num_locations!). Optional, but if specified then must be at least 2
"update_period" : "PT1H" (optional) How often to update position
"generate_fleet_management_metrics" : False If true then output several properties to do with fleet management (fuel, miles etc.)
"route_plan" : null (optional) If set then use realistic route-planning, with given mode (e.g. "walking", "driving")
"google_maps_key" : "xyz" Google Maps now requires this. Often defined in ../synth_accounts/default.json
}
Device properties created::
{
"latitude" : latitude in degrees as a floating-point number
"longitude" : longitude in degrees as a floating-point number
}
"""
from .device import Device
from common.geo import google_maps, geo
import random, math
import isodate
import logging
MINUTES = 60
HOURS = MINUTES * 60
DAYS = HOURS * 24
WEEKS = DAYS * 7
# Because we need to cache the geo point-picker, we have two levels of hierarchy:
# 1) Mobile behaviour may be instantiated by entirely different, unconnected groups of devices - for example mobile pallets in England and mobile trucks in LA.
# So we call each of these a "loc_group" and cache the (expensive) point picker and location lookups per loc_group.
# 2) All devices in that loc_group then share a set of potential locations they can visit
# But each device sets its own unique fixed itinerary between some of those locations (points[])
# One reason for the above design is to minimise the combinations of routes, which would otherwise drive up our Google Maps bill by the factorial of the number of locations!
# We gradually move from point to point, and dwell for a while at each point
DEFAULT_UPDATE_PERIOD = "PT1H"
MPH_MIN = 5
MPH_MAX = 70
SEND_AT_LEAST_EVERY = 99999999999 # Even when not moving, send an update at least this often (large number for never)
DEFAULT_NUMBER_OF_LOCATIONS = 10
DEFAULT_POINTS_TO_VISIT = 4
DEFAULT_MIN_DWELL_H = 3
DEFAULT_MAX_DWELL_H = 24*14
DEFAULT_STUCK_IN_TRANSIT_MTBF = 1 * WEEKS # amount of travel time, not elapsed time
DEFAULT_STUCK_IN_TRANSIT_RECOVERY_DURATION = 1 * WEEKS
MPG = 8 # USA levels of fuel-efficiency!
LATLON_TO_MILES = 88 # Very approximate conversion factor from "latlong distance in degrees" to miles!
class Location_group():
""" A group of locations that devices might visit """
def __init__(self, context, num_locs, area_centre, area_radius, first_location_at_centre=False):
self.pp = geo.point_picker() # Very expensive, so do only once
area = None
if area_centre != None:
area = [area_centre, area_radius]
self.google_maps_key = context.get("google_maps_key", None)
self.locations = [] # Array of (lon,lat,address)
for L in range(num_locs): # Choose the locations that any devices in this loc group can visit
first_loc = first_location_at_centre and (L==0)
while True:
if first_loc:
(lon,lat) = google_maps.address_to_lon_lat(area_centre)
else:
(lon,lat) = self.pp.pick_point(area, self.google_maps_key)
address_info = google_maps.lon_lat_to_address(lon, lat, self.google_maps_key)
if ("address_postal_code" in address_info) and (("address_postal_town" in address_info) or ("address_route" in address_info)): # Only use locations which have addresses (e.g. don't accidentally pick the sea!)
break
if first_loc: # Avoid infinite loop if first location doesn't have required address info
break
if "address_postal_town" in address_info:
addr = address_info["address_postal_town"] + " " + address_info["address_postal_code"]
else:
addr = address_info["address_route"] + " " + address_info["address_postal_code"]
logging.info("Location "+str(L)+" for mobile devices to visit is "+repr(addr)+" at "+str((lon,lat)))
self.locations.append( (lon,lat, addr) )
self.base_location = random.randrange(0, num_locs)
class Route_Follower():
""" Understands how to follow a route made of individual segments """
def __init__(self, route):
self.route = route
# logging.info("Route_Follower with route: ")
# for r in self.route:
# logging.info(str(r))
self.route_segment = 0
self.seconds_into_segment = 0
def current_latlon(self):
seg = self.route[self.route_segment]
frac = float(self.seconds_into_segment) / seg["duration"]
# logging.info("frac="+str(frac))
lat = seg["start_lat"] * (1.0-frac) + seg["end_lat"] * frac
lon = seg["start_lng"] * (1.0-frac) + seg["end_lng"] * frac
return { "latitude" : lat, "longitude" : lon }
def time_has_passed(self, secs):
# logging.info("time_has_passed("+str(secs)+")")
remaining_secs = secs
while True:
seg = self.route[self.route_segment]
# logging.info("route_segment="+str(self.route_segment)+" duration="+str(seg["duration"])+" seconds_into_segment="+str(self.seconds_into_segment)+" remaining_secs="+str(remaining_secs))
if self.seconds_into_segment + remaining_secs < seg["duration"]:
self.seconds_into_segment += remaining_secs
break
else: # Move to next segment
remaining_secs -= seg["duration"] - self.seconds_into_segment
if self.route_segment >= len(self.route)-1: # If this was the last segment
self.seconds_into_segment = seg["duration"] # go to the end of it
break
else:
self.seconds_into_segment = 0
self.route_segment += 1
# logging.info("Leaving thp() with route_segment = "+str(self.route_segment)+" seconds_into_segment="+str(self.seconds_into_segment)+" remaining_secs="+str(remaining_secs))
def journey_complete(self):
if self.route_segment == len(self.route)-1:
if self.seconds_into_segment >= self.route[self.route_segment]["duration"]:
return True
return False
def total_journey_time(self):
t = 0
for seg in self.route:
t += seg["duration"]
return t
class Mobile(Device):
# Class variables
loc_groups = {}
def __init__(self, instance_name, time, engine, update_callback, context, params):
super(Mobile,self).__init__(instance_name, time, engine, update_callback, context, params)
self.generate_addresses = params["mobile"].get("generate_addresses", False)
self.area_centre = params["mobile"].get("area_centre", None)
self.area_radius = params["mobile"].get("area_radius", None)
num_locs = params["mobile"].get("num_locations", DEFAULT_NUMBER_OF_LOCATIONS)
self.points_to_visit = params["mobile"].get("points_to_visit", DEFAULT_POINTS_TO_VISIT)
assert self.points_to_visit <= num_locs, "for mobile devices, points_to_visit must be <= num_locations"
self.fleet_mgmt = params["mobile"].get("generate_fleet_management_metrics", False)
self.update_period = isodate.parse_duration(params["mobile"].get("update_period", DEFAULT_UPDATE_PERIOD)).total_seconds()
self.route_plan = params["mobile"].get("route_plan", None)
self.dwell_h_min = params["mobile"].get("dwell_h_min", DEFAULT_MIN_DWELL_H) # "dwell" is how long an asset dwells at each target location
self.dwell_h_max = params["mobile"].get("dwell_h_max", DEFAULT_MAX_DWELL_H)
self.stuck_in_transit_mtbf = params["mobile"].get("stuck_in_transit_mtbf", DEFAULT_STUCK_IN_TRANSIT_MTBF)
self.stuck_in_transit_recovery_duration = params["mobile"].get("stuck_in_transit_recovery_duration", DEFAULT_STUCK_IN_TRANSIT_RECOVERY_DURATION)
self.stuck_in_transit = False
self.tire_deflation_rate = min(1.0, 1.0 - random.gauss(0.001, 0.0001))
first_location_at_centre = params["mobile"].get("first_location_at_centre", False)
the_key = str(self.area_centre) + "." + str(self.area_radius) # Needs to be unique-enough between location groups
if the_key not in Mobile.loc_groups:
Mobile.loc_groups[the_key] = Location_group(context, num_locs, self.area_centre, self.area_radius, first_location_at_centre) # Creates a new group
self.loc_group = Mobile.loc_groups[the_key]
# Choose which points this device will move between
self.points = [] # Array of indices into self.loc_group.locations[]
self.points.append(self.loc_group.base_location) # All devices start at the base location
for P in range(self.points_to_visit-1):
while True:
loc = random.randrange(0, len(self.loc_group.locations))
if loc not in self.points:
break # Ensure no repeats (which means we'll hang if we try to choose more points than locations!)
self.points.append(loc)
if self.fleet_mgmt:
self.pump_up_tires()
self.prepare_new_journey(0,1)
self.engine.register_event_in(self.update_period, self.tick_update_position, self, self)
def comms_ok(self):
return super(Mobile,self).comms_ok()
def external_event(self, event_name, arg):
super(Mobile,self).external_event(event_name, arg)
def close(self):
super(Mobile,self).close()
# Private methods
def miles_between(self, lon1,lat1, lon2,lat2):
(delta_lon, delta_lat) = (lon2-lon1, lat2-lat1)
return math.sqrt(delta_lon * delta_lon + delta_lat * delta_lat) * LATLON_TO_MILES
def update_lon_lat(self):
if self.route_plan:
self.set_properties(self.route_follower.current_latlon())
else: # Just driven linearly between the two points
(prev_lon, prev_lat) = (self.get_property_or_None("longitude"), self.get_property_or_None("latitude"))
(lon_from, lat_from) = self.loc_group.locations[self.points[self.from_point]][0:2]
(lon_to, lat_to) = self.loc_group.locations[self.points[self.to_point]][0:2]
lon = lon_from * (1.0 - self.travel_fraction) + lon_to * self.travel_fraction
lat = lat_from * (1.0 - self.travel_fraction) + lat_to * self.travel_fraction
self.set_properties({ "longitude" : lon, "latitude" : lat }) # Important to update these together (some client apps don't cope well with lat/lon being split between messages, even if contemporaneous)
if self.fleet_mgmt:
if prev_lon is not None:
delta_miles = self.miles_between(prev_lon, prev_lat, lon, lat)
self.set_property("miles", int(10*delta_miles)/10.0)
self.set_property("av_speed_mph", int(delta_miles/(self.update_period/3600)))
self.set_property("fuel_gallons", int(100*(delta_miles/MPG))/100.0)
def update_moving_and_location(self):
self.set_property("moving", self.dwell_count == 0)
if self.dwell_count == 0:
self.set_property("location_mobile", None)
else:
self.set_property("location_mobile", self.loc_group.locations[self.points[self.from_point]][2])
def update_everything(self):
self.update_lon_lat()
self.update_moving_and_location()
def tick_update_position(self, _):
if self.dwell_count > 0: # Stationary at an official Location
if (self.dwell_count % SEND_AT_LEAST_EVERY)==0:
self.update_lon_lat()
self.dwell_count -= 1
if self.dwell_count == 0: # About to move
self.update_everything()
else: # In transit (should be moving)
if not self.stuck_in_transit:
if self.stuck_in_transit_mtbf is not None:
if random.random() < float(self.update_period) / self.stuck_in_transit_mtbf:
logging.info(self.get_property("$id")+" is now stuck in transit")
self.stuck_in_transit = True
else: # IS stuck in transit
if random.random() < float(self.update_period) / self.stuck_in_transit_recovery_duration:
logging.info(self.get_property("$id")+" is now unstuck and resuming transit")
self.stuck_in_transit = False
if not self.stuck_in_transit:
if self.route_plan:
self.route_follower.time_has_passed(self.update_period)
self.update_lon_lat()
if self.route_follower.journey_complete():
self.prepare_new_journey((self.from_point + 1) % self.points_to_visit, (self.to_point + 1) % self.points_to_visit)
else:
self.travel_fraction += self.travel_rate
if self.travel_fraction <= 1.0:
self.update_lon_lat()
else: # Reached destination
self.prepare_new_journey((self.from_point + 1) % self.points_to_visit, (self.to_point + 1) % self.points_to_visit)
if self.fleet_mgmt:
tp = self.get_property("tire_pressure_psi")
if tp < 25:
self.pump_up_tires() # Pump tire up again
else:
self.set_property("tire_pressure_psi", tp * self.tire_deflation_rate)
self.engine.register_event_in(self.update_period, self.tick_update_position, self, self)
def prepare_new_journey(self, from_point, to_point):
self.from_point = from_point
self.to_point = to_point
self.travel_fraction = 0.0
# How far to travel, and speed?
(lon_from, lat_from) = self.loc_group.locations[self.points[self.from_point]][0:2]
(lon_to, lat_to) = self.loc_group.locations[self.points[self.to_point]][0:2]
if self.route_plan:
self.route_follower = Route_Follower(google_maps.get_route_from_lat_lons(lat_from, lon_from, lat_to, lon_to, mode=self.route_plan, google_maps_api_key = self.loc_group.google_maps_key))
logging.info("Journey prepared for " + str(self.get_property("$id")) +
" from " + self.loc_group.locations[self.points[self.from_point]][2] +
" to " + self.loc_group.locations[self.points[self.to_point]][2] +
" with total journey time " + str(self.route_follower.total_journey_time()))
else:
miles = self.miles_between(lon_from, lat_from, lon_to, lat_to)
mph = random.randrange(MPH_MIN, MPH_MAX)
ticks_of_travel = (miles / mph) / (self.update_period / 3600.0) # If we try to move from a point to itself, this will be zero
# therefore what fraction of entire distance to travel in each tick
if ticks_of_travel == 0:
self.travel_rate = 0
else:
self.travel_rate = 1.0 / ticks_of_travel
self.dwell_count = random.randrange(self.dwell_h_min / (self.update_period / 3600.0), self.dwell_h_max / (self.update_period / 3600.0)) # Wait here for a while before commencing
self.update_everything()
def pump_up_tires(self):
self.set_property("tire_pressure_psi", random.gauss(35,5))
|
import threading
def ebv_list(list_submit,list_dict,i,ppid):
import os
lineindex = 0
timehold = time.time()
list_out = []
out = open('/tmp/tmpf_' + str(i) + '_' + str(ppid),'w')
for line in list_submit:
tt = re.split('\s+',line)
ra = float(tt[0])
dec = float(tt[1])
EBV = calc_EBV(float(ra),float(dec),i)
list_out.append(EBV)
#print EBV
lineindex += 1
out.write(str(EBV) + '\n')
if lineindex % 100 == 0:
print 'thread ' + str(i), lineindex, len(list_submit), time.time() - timehold
timehold = time.time()
list_dict[str(i)]['list_out'] = list_out
out.close()
def calc_EBV(coord_in_ra,coord_in_dec,i):
#coord_in_ra='12:51:26.28'
#coord_in_dec='27:07:42.'
coord = Equatorial( str(coord_in_ra*(24./360.)), str(coord_in_dec), epoch='2000') # input needs to be in HOURS as a STRING
g = Galactic(coord, epoch='2000') # output is in degrees not hours--it's latitude/longitude
spt = re.split('\:',str(g.lat))
#print spt, abs(float(spt[0])), float(spt[1])/60.
gallat = float(spt[0]) / abs(float(spt[0])) * (abs(float(spt[0])) + float(spt[1])/60. + float(spt[2])/3600. )
#print gallat
#print g.long
spt = re.split('\:',str(g.long))
#print spt
gallong = float(spt[0]) / abs(float(spt[0])) * (abs(float(spt[0])) + float(spt[1])/60. + float(spt[2])/3600. )
#print gallong
#coordtest = Equatorial(Galactic(g.long,g.lat, epoch='2000'), epoch='2000')
output = commands.getoutput('dust_getval ' + str(gallong) + ' ' + str(gallat) + ' interp=y ipath=/nfs/slac/g/ki/ki03/xoc/pkelly/DUST/maps_' + str(i) )
spt = re.split('\s',output)
#print spt
EBV = spt[-1]
#print EBV, float(coord_in_ra), float(coord_in_dec)
return EBV
class MyThread ( threading.Thread ):
def __init__ ( self, list_submit,list_dict, i, ppid):
self.i = i
self.list_submit = list_submit
self.list_dict = list_dict
self.ppid = ppid
threading.Thread.__init__(self)
def run ( self ):
ebv_list(self.list_submit,list_dict,self.i,self.ppid)
return
#add E(B-V) to ldac table
import re, commands, sys, bashreader, os
from ephem import *
dict = bashreader.parseFile('progs.ini')
table = sys.argv[1]
import time
tempfile = '/tmp/outkey'
ebvfile = '/tmp/outebv'
os.system('rm ' + ebvfile)
ppid = os.getppid()
print ppid
command = "ldactoasc -b -i " + table + " -t OBJECTS -k ALPHA_J2000 DELTA_J2000 > " + ebvfile
print command
os.system(command)
list = []
import re
outkey=open(tempfile,'w')
lines = open(ebvfile,'r').readlines()
number_interval = 4
length_int = len(lines)/number_interval
start = 0
my_threads = []
list_dict = {}
for i in range(number_interval):
end = start + length_int
if i + 1 == number_interval:
list_submit = lines[start:]
else:
list_submit = lines[start:end]
start = end
list_dict[str(i)] = {'list_submit':list_submit}
#s = MyThread(list_submit,list_dict,i,ppid)
#stat = os.fork()
print i, 'started'
s = os.fork()
if not s:
ebv_list(list_submit,list_dict,i,ppid)
sys.exit()
#s.start()
my_threads.append(s)
print my_threads
#print threading.enumerate()
for s in my_threads:
os.waitpid(s,0)
print 'done'
list_out = []
for i in range(number_interval):
list_out = list_out + list_dict[str(i)]['list_out']
print len(lines), len(list_out)
print lines[0:2], list_out[0:2]
# READ IN COLUMN INFO
for val in list_out:
outkey.write(str(val) + '\n')
outkey.close()
command = "asctoldac -i " + tempfile + " -o " + tempfile + ".cat -c " + dict['photconf'] + "/EBV.conf -t OBJECTS "
os.system(command)
command = "ldacjoinkey -o test -i " + table + " -p " + tempfile + ".cat -t OBJECTS -k EBV"
os.system(command)
|
from flask import request, Response
from sqlalchemy.orm import contains_eager
from zeus import auth
from zeus.constants import PERMISSION_MAP
from zeus.models import ChangeRequest, Repository, RepositoryProvider
from .base import Resource
class BaseChangeRequestResource(Resource):
def dispatch_request(
self,
provider: str,
owner_name: str,
repo_name: str,
cr_number: int,
*args,
**kwargs
) -> Response:
queryset = ChangeRequest.query.join(
Repository, Repository.id == ChangeRequest.repository_id
).filter(
Repository.provider == RepositoryProvider(provider),
Repository.owner_name == owner_name,
Repository.name == repo_name,
ChangeRequest.number == cr_number,
)
if self.select_resource_for_update():
queryset = queryset.with_for_update()
else:
# HACK(dcramer): we dont want to lock the repo row, so for now just deal
# w/ the consequences of this
queryset = queryset.options(contains_eager("repository"))
cr = queryset.first()
if not cr:
return self.not_found()
tenant = auth.get_current_tenant()
if not tenant.has_permission(cr.repository_id, PERMISSION_MAP[request.method]):
return self.error("permission denied", 400)
return Resource.dispatch_request(self, cr, *args, **kwargs)
|
#coding=utf-8
import smtplib
from datetime import datetime
from hashlib import md5
import sys, re
from .misc import *
from .parts import *
from collections import OrderedDict as odict
class Mimemail():
def __init__(self, **kwargs):
self.headers = odict()
self.headers['MIME-Version'] = '1.0'
self.headers['From'] = MM_DEFAULT_FROM
self.headers['Date'] = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
self.body = ''
self.html = None
self.text = None
self.images = []
self.attachments = []
self.charset = 'UTF-8'
self.recipients = {}
self.from_email = 'root@localhost'
self.kw = kwargs
def set_from(self, from_email, from_name):
self.headers['From'] = '%s <%s>' % (encode_header(from_name, self.charset), from_email)
self.from_email = from_email
def set_html(self, html):
self.html = html
def set_text(self, text):
self.text = text
def add_image(self, image):
self.images.append(image)
def add_attachment(self, att):
self.attachments.append(att)
def set_subject(self, subject):
self.subject = subject
def create_images_part(self, boundary):
lines = []
for image in self.images:
lines.extend([
MM_DEFAULT_CRLF,
'--%s%s' % (boundary, MM_DEFAULT_CRLF),
image.get_part_header(),
MM_DEFAULT_CRLF,
MM_DEFAULT_CRLF,
image.get_part_body()
])
return ''.join(lines)
def create_attachments_part(self, boundary):
lines = []
for att in self.attachments:
lines.extend([
MM_DEFAULT_CRLF,
'--%s%s' % (boundary, MM_DEFAULT_CRLF),
att.get_part_header(),
MM_DEFAULT_CRLF,
MM_DEFAULT_CRLF,
att.get_part_body()
])
return ''.join(lines)
def build(self):
has_html = self.html != None
has_text = self.text != None
has_img = len(self.images) > 0
has_att = len(self.attachments) > 0
if has_text and not has_html:
self.html = MimemailPartHtml(re.sub(r'\n', '<br>', self.text.plain_content, re.M | re.S), charset = self.charset)
elif has_html and not has_text:
self.text = MimemailPartText(re.sub(r'<|>|/', '', self.html.plain_content, re.M | re.S | re.U), charset = self.charset)
elif not has_html and not has_text and not has_att:
raise MimemailException('An email has no content to send')
if has_img:
for image in self.images:
src = image.get_file_path()
dst = 'cid:' + image.get_image_cid()
self.html.plain_content = self.html.plain_content.replace(os.path.basename(src), dst)
boundary = 'alt_' + gen_boundary_hash()
self.headers['Content-Type'] = 'multipart/alternative; boundary="' + boundary + '"'
self.body = ''.join([
'--%s%s' % ( boundary, MM_DEFAULT_CRLF ),
self.text.get_part_header(),
MM_DEFAULT_CRLF,
MM_DEFAULT_CRLF,
self.text.get_part_body(),
'%s--%s%s' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF ),
self.html.get_part_header(),
MM_DEFAULT_CRLF,
MM_DEFAULT_CRLF,
self.html.get_part_body(),
'%s--%s--%s%s' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF )
])
if has_img:
boundary = 'rel_' + gen_boundary_hash()
self.body = ''.join([
'--%s%s' % ( boundary, MM_DEFAULT_CRLF ),
'Content-Type: %s%s%s' % (self.headers['Content-Type'], MM_DEFAULT_CRLF, MM_DEFAULT_CRLF),
self.body,
self.create_images_part(boundary),
'%s--%s--%s%s' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF )
])
self.headers['Content-Type'] = 'multipart/related; boundary="%s"' % (boundary)
if has_att:
boundary = 'att_' + gen_boundary_hash()
self.body = ''.join([
'--%s%s' % (boundary, MM_DEFAULT_CRLF ),
'Content-Type: %s%s%s' % (self.headers['Content-Type'], MM_DEFAULT_CRLF, MM_DEFAULT_CRLF),
self.body,
self.create_attachments_part(boundary),
'%s--%s--%s%s' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF )
])
self.headers['Content-Type'] = 'multipart/mixed; boundary="%s"' % (boundary)
self.headers['Message-ID'] = self.gen_message_id()
if hasattr(self, 'subject'):
self.headers['Subject'] = encode_header(self.subject, self.charset)
def gen_message_id(self):
return '<%s.%08x@%s>' % (datetime.datetime.now().strftime('%Y%m%d%H%M%S'), random.randint(0, sys.maxint), self.kw.get('host', 'localhost'))
def add_recipient(self, email, name = None):
self.recipients[email] = name if name else email
def send(self):
self.build()
extra_headers = self.get_extra_headers()
for email, name in self.recipients.iteritems():
message = '%s%sTo: %s <%s>%s%s%s' % (extra_headers, MM_DEFAULT_CRLF, encode_header(name, self.charset), email, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF, self.body)
s = smtplib.SMTP(self.kw.get('smtp_relay', '127.0.0.1'))
s.sendmail(self.from_email, email, message)
s.quit()
def get_extra_headers(self):
return MM_DEFAULT_CRLF.join([ '%s: %s' % (k, v) for k,v in self.headers.iteritems() ])
|
__author__ = '[email protected] (J. Matthew Landis)'
import os
import logging
import pickle
import webapp2
import time
import httplib2
import json
import tweepy
import haigha
from collections import Counter
from haigha.connections.rabbit_connection import RabbitConnection
from apiclient import discovery
from oauth2client import appengine
from oauth2client import client
from google.appengine.api import memcache
from google.appengine.api import users
from google.appengine.ext import ndb
from google.appengine.ext.webapp import template
#######################################################################
PROJECTID = '934763316754'
CLIENT_SECRETS = os.path.join(os.path.dirname(__file__), 'client_secrets.json')
# Helpful message to display in the browser if the CLIENT_SECRETS file
# is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """""
<h1>Warning: Please configure OAuth 2.0</h1>
<p>
To make this sample run you will need to populate the client_secrets.json file
found at:
</p>
<p>
<code>%s</code>.
</p>
<p>with information found on the <a
href="https://code.google.com/apis/console">APIs Console</a>.
</p>
""" % CLIENT_SECRETS
http = httplib2.Http(memcache)
service = discovery.build("plus", "v1", http=http)
bigquery_service = discovery.build("bigquery","v2", http=http)
consumer_key = "9xNrmD6hE0xnRSYdZt5t0XT0B"
consumer_secret = "kperqjklvPhBCVvHI96aZIfJu5w1DHI2BZoNMdBEvBPfmuZIYG"
access_token = "46501499-cijYvv9ixtQKHLSiLt9QaRtcmWeEKvvGZK5s6ukw7"
access_token_secret = "D127XCAN02BPb0ZtcreCG6dpBJyiiLCeD6ckS2MgdHqwG"
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
decorator = appengine.oauth2decorator_from_clientsecrets(
CLIENT_SECRETS,
scope='https://www.googleapis.com/auth/plus.me',
message=MISSING_CLIENT_SECRETS_MESSAGE)
bq_decorator = appengine.oauth2decorator_from_clientsecrets(
CLIENT_SECRETS,
scope='https://www.googleapis.com/auth/bigquery',
message=MISSING_CLIENT_SECRETS_MESSAGE)
## Function to retrieve and render a template
def render_template(handler, templatename, templatevalues):
path = os.path.join(os.path.dirname(__file__), 'templates/' + templatename)
html = template.render(path, templatevalues)
handler.response.out.write(html)
#######################################################################
## Handles and loads index page
class MainPage(webapp2.RequestHandler):
def get(self):
nickname = "null"
email = "null"
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
ui = res[0]
nickname = ui.fname+ " " +ui.lname
email = user.email()
login = users.create_login_url('/')
else:
nickname = user.nickname()
email = user.email()
login = '/createProfile'
else:
ui = None
login = users.create_login_url('/')
logout = users.create_logout_url('/')
os.system("python stream.py")
template_values = {
'login': login,
'logout': logout,
'user': user,
'nickname': nickname,
'email': email
}
render_template(self, 'index.html', template_values)
#######################################################################
## Handle user info and profile
class CreateProfile(webapp2.RequestHandler):
def get(self):
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
self.redirect('/profile')
else:
template_data = {'logout':users.create_logout_url('/'), 'nickname': users.nickname()}
template_path = 'templates/createProfile.html'
self.response.out.write(template.render(template_path,template_data))
else:
self.redirect(user.create_login_url('/'))
#######################################################################
## process user profile
## check for user signed in, if so, save the entered information, otherwise, redirect them to the login page
class ProcessUser(webapp2.RequestHandler) :
def post(self) :
user = users.get_current_user()
if user:
fname = self.request.get('fname')
lname = self.request.get('lname')
fname.replace(" ", "")
lname.replace(" ", "")
words = self.request.get_all('word')
if (not(not fname)) & (not(not lname)):
NewUser = UserModel()
NewUser.uid = user.user_id()
NewUser.fname = fname
NewUser.lname = lname
NewUser.words = []
for word in words:
word.replace(" ", "")
if word:
NewUser.words+=[word]
NewUser.put()
self.redirect('/profile')
else:
self.redirect('/createProfile')
else:
self.redirect(users.create_login_url('/'))
#######################################################################
## Model Data
class DataHandler(webapp2.RequestHandler) :
@bq_decorator.oauth_aware
def get(self) :
if bq_decorator.has_credentials():
http = bq_decorator.http()
inputData = self.request.get("inputData")
queryData = {'query':'SELECT SUM(word_count) as WCount,corpus_date,group_concat(corpus) as Work FROM '
'[publicdata:samples.shakespeare] WHERE word="'+inputData+'" and corpus_date>0 GROUP BY corpus_date ORDER BY WCount'}
tableData = bigquery_service.jobs()
dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http)
resp = []
if 'rows' in dataList:
#parse dataList
for row in dataList['rows']:
for key,dict_list in row.iteritems():
count = dict_list[0]
year = dict_list[1]
corpus = dict_list[2]
resp.append({'count': count['v'],'year':year['v'],'corpus':corpus['v']})
else:
resp.append({'count':'0','year':'0','corpus':'0'})
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(json.dumps(resp))
else:
self.response.write(json.dumps({'error':'No credentials'}))
#######################################################################
## Model Words
class WordsHandler(webapp2.RequestHandler) :
@bq_decorator.oauth_aware
def get(self) :
if bq_decorator.has_credentials():
http = bq_decorator.http()
inputData = self.request.get("inputData")
queryData = {'query':'SELECT text FROM '
'[doctor-know:rtda.tweets] WHERE Words CONTAINS "'+inputData+'"GROUP BY text ORDER BY text LIMIT 150'}
tableData = bigquery_service.jobs()
dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http)
resp = {}
resp['text'] = status.text
resp['created_at'] = time.mktime(status.created_at.timetuple())
resp['geo'] = status.geo
resp['source'] = status.source
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(json.dumps(resp))
else:
self.response.write(json.dumps({'error':'No credentials'}))
#######################################################################
## Model Words
class WordsHandler(webapp2.RequestHandler) :
inputData = "yes"
@bq_decorator.oauth_aware
def get(self) :
if bq_decorator.has_credentials():
http = bq_decorator.http()
inputData = self.request.get("inputData")
queryData = {'query':'SELECT text FROM '
'[doctor-know:rtda.tweets] WHERE text CONTAINS "'+inputData+'" GROUP BY text ORDER BY text LIMIT 300'}
tableData = bigquery_service.jobs()
dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http)
tweets = []
if 'rows' in dataList:
#parse dataList
count = 0
for row in dataList['rows']:
for key,dict_list in row.iteritems():
tweet = dict_list[0]
count += 1
tweets.append({'text': tweet})
if count == 300:
break
ignore_words = [ "fuck", "shit", "cock", "penis", "porn"]
words = []
for tweet in tweets:
tt = tweet.get('text', "")
for word in tt.split():
if "http" in word:
continue
if word not in ignore_words:
words.append(word)
resp = Counter(words)
resp.headers.add('Access-Control-Allow-Origin', "*")
return resp
# self.response.headers['Content-Type'] = 'application/json'
# self.response.out.write(json.dumps(tweets))
# else:
# self.response.write(json.dumps({'error':'No credentials'}))
#######################################################################
## Profile Page
class ProfilePage(webapp2.RequestHandler) :
def get(self):
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
ui = res[0]
template_data = {'firstname': ui.fname, 'lastname': ui.lname, 'words': ui.words, 'nickname': ui.fname+ " " +ui.lname, 'logout': users.create_logout_url('/')}
template_path = 'templates/profile.html'
self.response.out.write(template.render(template_path,template_data))
else:
self.redirect('/createProfile')
else:
self.redirect(users.create_login_url('/'))
#######################################################################
## Artificial Creativity Engine
class DisplayEngine(webapp2.RequestHandler) :
def get(self):
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
ui = res[0]
template_data = {'nickname': ui.fname+ " " +ui.lname, 'logout': users.create_logout_url('/')}
template_path = 'templates/engine.html'
self.response.out.write(template.render(template_path,template_data))
else:
template_data = {'nickname': user.nickname(), 'logout': users.create_logout_url('/')}
template_path = 'templates/engine.html'
self.response.out.write(template.render(template_path,template_data))
else:
self.redirect(users.create_login_url('/'))
#######################################################################
## Data Analysis
class DisplayData(webapp2.RequestHandler) :
def get(self):
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
ui = res[0]
template_data = {'nickname': ui.fname+ " " +ui.lname, 'logout': users.create_logout_url('/')}
template_path = 'templates/data.html'
self.response.out.write(template.render(template_path,template_data))
else:
template_data = {'nickname': user.nickname(), 'logout': users.create_logout_url('/')}
template_path = 'templates/data.html'
self.response.out.write(template.render(template_path,template_data))
else:
self.redirect(users.create_login_url('/'))
#######################################################################
## Establish/Update User Profile
class UserModel(ndb.Model) :
uid = ndb.StringProperty(indexed=True)
fname = ndb.StringProperty(indexed = False)
lname = ndb.StringProperty(indexed = False)
words = ndb.StringProperty(indexed=False,repeated=True)
#######################################################################
## Establish/Update User Profile
# class CustomStreamListener(tweepy.StreamListener):
# def __init__(self, api):
# self.api = api
# super(tweepy.StreamListener, self).__init__()
# #setup rabbitMQ Connection
# self.connection = RabbitConnection(host='130.211.189.207', heartbeat=None, debug=True)
# self.channel = self.connection.channel()
# #set max queue size
# args = {"x-max-length": 2000}
# self.channel.queue.declare(queue='twitter_topic_feed', arguments=args)
# def on_status(self, status):
# print status.text, "\n"
# data = {}
# data['text'] = status.text
# data['created_at'] = time.mktime(status.created_at.timetuple())
# data['geo'] = status.geo
# data['source'] = status.source
# #queue the tweet
# self.channel.basic.publish(exchange='',
# routing_key='twitter_topic_feed',
# body=json.dumps(data))
# def on_error(self, status_code):
# print >> sys.stderr, 'Encountered error with status code:', status_code
# return True # Don't kill the stream
# def on_timeout(self):
# print >> sys.stderr, 'Timeout...'
# return True # Don't kill the stream
# sapi = tweepy.streaming.Stream(auth, CustomStreamListener(api))
# # my keyword today is chelsea as the team just had a big win
# sapi.filter(track=[self.request.get("inputData")])
#######################################################################
## Establish/Update User Profile
# class CustomStreamListener(tweepy.StreamListener):
# def __init__(self, api):
# self.api = api
# super(tweepy.StreamListener, self).__init__()
# #setup rabbitMQ Connection
# self.connection = RabbitConnection(host='130.211.189.207', heartbeat=None, debug=True)
# self.channel = self.connection.channel()
# #set max queue size
# args = {"x-max-length": 2000}
# self.channel.queue.declare(queue='twitter_topic_feed', arguments=args)
# def on_status(self, status):
# print status.text, "\n"
# data = {}
# data['text'] = status.text
# data['created_at'] = time.mktime(status.created_at.timetuple())
# data['geo'] = status.geo
# data['source'] = status.source
# #queue the tweet
# self.channel.basic.publish(exchange='',
# routing_key='twitter_topic_feed',
# body=json.dumps(data))
# def on_error(self, status_code):
# print >> sys.stderr, 'Encountered error with status code:', status_code
# return True # Don't kill the stream
# def on_timeout(self):
# print >> sys.stderr, 'Timeout...'
# return True # Don't kill the stream
# sapi = tweepy.streaming.Stream(auth, CustomStreamListener(api))
# # my keyword today is chelsea as the team just had a big win
# sapi.filter(track=[self.request.get("inputData")])
app = webapp2.WSGIApplication( [
('/', MainPage),
('/profile', ProfilePage),
('/createProfile', CreateProfile),
('/userRegister', ProcessUser),
('/getData', DataHandler),
('/getWords', WordsHandler),
('/data', DisplayData),
('/engine', DisplayEngine),
(decorator.callback_path, decorator.callback_handler()),
(bq_decorator.callback_path, bq_decorator.callback_handler())
], debug=True)
|
#!/usr/bin/python
import sys
import os
import re
import fnmatch
import string
import matplotlib.pyplot as plt
def print_period(stat):
# use to profile the application running solo.
# stat is an iterator or array
i = 0
for item in stat:
plt.plot(item, label=str(i))
plt.legend()
i = i + 1
def print_double_array (x):
for x_i in x:
sys.stdout.write(str("%.2f" % x_i) + ' ')
print "\n"
sys.stdout.flush()
def print_int_array (x):
for x_i in x:
sys.stdout.write(str(x_i) + ' ')
print "\n"
sys.stdout.flush()
def print_stat_dict (my_stat):
for key, value in iter(sorted(my_stat.iteritems())):
if type(value) is not list:
print key.ljust(20), value.ljust(20)
# else:
# for element in value:
# print element
def print_power (stat):
output_str = '\n\n############# Power Distribution ################\n\n'
output_str = output_str + ''.ljust(15) + 'Static'.ljust(20) + 'Dynamic'.ljust(20) + 'Overall'.ljust(20) + '\n'
## print BLESS
static_percent = "{:.2f}".format(stat[0]/stat[2]*100)
dynamic_percent = "{:.2f}".format(stat[1]/stat[2]*100)
output_str = output_str + 'BLESS'.ljust(15) + ('%s (%s%%)'%("{:.2f}".format(stat[0]),static_percent)).ljust(20) + ('%s (%s%%)'%("{:.2f}".format(stat[1]),dynamic_percent)).ljust(20) + str(stat[2]).ljust(20) + '\n'
# print MBNoC
static_percent = "{:.2f}".format(stat[3]/stat[5]*100)
dynamic_percent = "{:.2f}".format(stat[4]/stat[5]*100)
output_str = output_str + 'MBNoC'.ljust(15) + ('%s (%s%%)'%("{:.2f}".format(stat[3]),static_percent)).ljust(20) + ('%s (%s%%)'%("{:.2f}".format(stat[4]),dynamic_percent)).ljust(20) + str(stat[5]).ljust(20)
output_str = output_str + '\n'
print output_str
def print_power_breakdown (stat):
output_str = '\n\n############# Power Breakdown ################\n\n'
output_str = output_str + ''.ljust(15) + 'Static'.ljust(20) + 'Dynamic'.ljust(20) + 'Overall'.ljust(20) + '\n'
output_str = output_str + 'Component'.ljust(15) + 'BLESS'.ljust(10) + 'MBNoC'.ljust(10) + 'BLESS'.ljust(10) + 'MBNoC'.ljust(10) + 'BLESS'.ljust(10) + 'MBNoC'.ljust(10) + '\n'
print_order = ['DFF', 'portAlloc', 'RC', 'Xbar', 'Local', 'permNet', 'link']
for component in range (0, 7):
output_str = output_str + print_order[component].ljust(15)
for metric in stat:
output_str = output_str + str(metric[component+1]).ljust(10)
output_str = output_str + '\n'
print output_str
def print_final_stat (stat):
output_str = '\n\n############# Overall ################\n\n'
output_str = output_str + ''.ljust(20) + 'weighted_speedup'.ljust(20) + 'Energy'.ljust(20) + 'Throughput'.ljust(20) + 'Defection Rate'.ljust(20) + '\n'
output_str = output_str + 'Load'.ljust(10) + 'Count'.ljust(10)
for i in range (0, 4):
output_str = output_str + 'BLESS'.ljust(10) + 'MBNoC'.ljust(10)
output_str = output_str + '\n' + 'Low'.ljust(10)
for metric in stat[0]:
output_str = output_str + str(metric).ljust(10)
output_str = output_str + '\n'
output_str = output_str + 'Medium'.ljust(10)
for metric in stat[1]:
output_str = output_str + str(metric).ljust(10)
output_str = output_str + '\n'
output_str = output_str + 'High'.ljust(10)
for metric in stat[2]:
output_str = output_str + str(metric).ljust(10)
output_str = output_str + '\n'
output_str = output_str + 'Average'.ljust(10)
for metric in stat[3]:
output_str = output_str + str(metric).ljust(10)
output_str = output_str + '\n'
print output_str
return output_str
def print_for_plot (stat):
output_str = '\n\n############# Print for plot ################\n\n'
output_str = output_str + 'Baseline of each metrics of interest is 1.\nEach metric is normailized to BLESS with the same network size.\n\n'
output_str = output_str + 'Load'.ljust(8) + 'Count'.ljust(8) + 'ws'.ljust(8) + '4x4'.ljust(8) + '8x8'.ljust(8) + '16x6'.ljust(8) + 'engy'.ljust(8) + '4x4'.ljust(8) + '8x8'.ljust(8) + '16x16'.ljust(8) + 'th'.ljust(8) + '4x4'.ljust(8) + '8x8'.ljust(8) + '16x16'.ljust(8) + 'defl'.ljust(8) + '4x4'.ljust(8) + '8x8'.ljust(8) + '16x16'.ljust(8) + '\n'
groups = ['Low','Medium','High','Average']
i = 0
for element in stat:
output_str = output_str + groups[i].ljust(8)
for metric in element:
output_str = output_str + str(metric).ljust(8)
i = i + 1
output_str = output_str + '\n'
print output_str
return output_str
def print_synth (stat, design):
traffic = str(stat.pop(0))
network = str(stat.pop(0))
#output_str = '\n\n############# ' + "Traffic = " + traffic.ljust(20) + "Network = " + network.ljust(20) + ' ################\n\n'
#output_str = output_str + 'Inject_rate'.ljust(20) + 'Energy'.ljust(20) + 'Latency'.ljust(20) + 'Deflect_rate'.ljust(20) + 'Throughput'.ljust(20) + '\n\n'
#output_str = output_str + 'BLESS'.ljust(10) + 'MBNoC'.ljust(10) + 'BLESS'.ljust(10) + 'MBNoC'.ljust(10) + 'BLESS'.ljust(10) + 'MBNoC'.ljust(10) + 'BLESS'.ljust(10) + 'MBNoC'.ljust(10) + 'BLESS'.ljust(10) + 'MBNoC'.ljust(10) + '\n'
output_str = '\n\n############# ' + 'Traffic = ' + traffic.ljust(20) + 'Network = ' + network.ljust(20) + ' ################\n\n'
type_stat = len(stat) / len(design)
#for i in range (0, type_stat):
space = (len(design)+1)*10
output_str = output_str + 'Energy'.ljust(space) + 'Latency'.ljust(space) + 'Throughput'.ljust(space) + 'Deflect_rate'.ljust(space) + '\n\n'
for i in range (1, 80, 1):
load = "{:.2f}".format(float(i)/100)
for j in range (0, len(stat)):
if j % len(design) is 0:
output_str = output_str + load.ljust(10)
if load in stat[j]:
output_str = output_str + str(stat[j][load]).ljust(10)
else:
output_str = output_str + '-'.ljust(10)
output_str = output_str + '\n'
#for i in range (0, len(stat[0])):
# for j in range (0, len(stat)):
# output_str = output_str + str(stat[j][i]).ljust(10)
# output_str = output_str + '\n'
output_str = output_str + '********* Based on %u data points ************' % len(stat[0])
print output_str
def print_synth_wrt_load (stat, design):
traffic = str(stat.pop(0))
network = str(stat.pop(0))
output_str = '\n\n############# ' + 'Traffic = ' + traffic.ljust(20) + 'Network = ' + network.ljust(20) + ' ################\n\n'
type_stat = len(stat) / len(design)
#for i in range (0, type_stat):
space = (len(design)+1)*10
output_str = output_str + 'Latency'.ljust(space) + 'Throughput'.ljust(space) + 'Deflect_rate'.ljust(space) + '\n\n'
for i in range (0, type_stat):
output_str = output_str + 'InjRate'.ljust(10)
for element in design:
output_str = output_str + element.ljust(10)
output_str = output_str + '\n'
for i in range (1, 80, 1):
load = "{:.2f}".format(float(i)/100)
for j in range (0, len(stat)):
if j % len(design) is 0:
output_str = output_str + load.ljust(10)
if load in stat[j]:
output_str = output_str + str(stat[j][load]).ljust(10)
else:
output_str = output_str + '-'.ljust(10)
output_str = output_str + '\n'
output_str = output_str + '********* Based on %u data points ************' % len(stat[0])
print output_str
def print_synth_avg_reduction (stat, design):
output_str = ''
for element in design:
output_str = output_str + element.ljust(10)
baseline = stat[0]
output_str = output_str + '\n' + '1'.ljust(10)
stat.pop(0)
for element in stat:
reduction = ''
if baseline > 0: reduction = "{:.2f}".format((baseline - element) / baseline)
output_str = output_str + reduction.ljust(10)
output_str = output_str + '\n'
print output_str
def print_synth_avg_gain (stat, design):
output_str = ''
for element in design:
output_str = output_str + element.ljust(10)
baseline = stat[0]
output_str = output_str + '\n' + '1'.ljust(10)
stat.pop(0)
for element in stat:
reduction = ''
if baseline > 0: reduction = "{:.2f}".format((element - baseline) / baseline)
output_str = output_str + reduction.ljust(10)
output_str = output_str + '\n'
print output_str
def print_final (stat, design):
output_str = ''
for element in design:
output_str = output_str + element.ljust(10)
output_str = output_str + '\n'
for element in stat:
output_str = output_str + "{:.2f}".format(float(element)).ljust(10)
output_str = output_str + '\n'
print output_str
|
import json
import os
from crank.core.workouts import (Workouts, WorkoutsJSONEncoder,
WorkoutsJSONDecoder)
parent = os.path.dirname(os.path.abspath(__file__))
TEST_WKT_FILE = os.path.join(parent, 'fixtures', 'squat.wkt')
def test_workouts_storage():
"""Parse, save, and load workouts from file(s)."""
wkts = Workouts.parse_wkt_file(TEST_WKT_FILE)
assert len(wkts.workouts) == 43
wkts_filename = 'workouts.json.test'
wkts.filename = wkts_filename
wkts.save()
assert os.path.exists(wkts_filename)
del wkts
wkts2 = Workouts.load(wkts_filename)
assert len(wkts2.workouts) == 43, wkts2.workouts
assert not isinstance(wkts2.workouts, list), \
"Workouts shouldn't be in a list"
def test_workouts_encoding():
wkts = Workouts.parse_wkt_file(TEST_WKT_FILE)
wkts_json = json.dumps(wkts, cls=WorkoutsJSONEncoder)
wkts2 = json.loads(wkts_json, cls=WorkoutsJSONDecoder)
assert wkts.filename == wkts2.filename
assert wkts.workouts == wkts2.workouts
|
#!/usr/bin/python
# Copyright (c) 2014-2015 Cedric Bellegarde <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from lollypop.sqlcursor import SqlCursor
class DatabaseUpgrade:
"""
Manage database schema upgrades
"""
def __init__(self, version, db):
"""
Init object
@param version as int
@param db as Database
"""
self._version = version
self._db = db
# Here are schema upgrade, key is database version,
# value is sql request
self._UPGRADES = {
1: "update tracks set duration=CAST(duration as INTEGER);",
2: "update albums set artist_id=-2001 where artist_id=-999;"
}
"""
Return upgrade count
@return int
"""
def count(self):
return len(self._UPGRADES)
"""
Upgrade database based on version
@return new db version as int
"""
def do_db_upgrade(self):
with SqlCursor(self._db) as sql:
for i in range(self._version+1, len(self._UPGRADES)+1):
try:
sql.execute(self._UPGRADES[i])
except Exception as e:
print("Database upgrade failed: ", e)
sql.commit()
return len(self._UPGRADES)
|
#!/usr/bin/env python
"""test_shacl.py: Test generated ontologies against SHACL shapes."""
import os
import libshacl
import pytest
def test_execute_testShacl():
""" Can we execute testShacl at all? """
(rc, stdout, stderr) = libshacl.exec_testShacl(["--version"])
print stdout
print stderr
assert rc == 0
assert stdout.startswith("testShacl ")
def test_validate_shacl_against_nodeshape(path_owl):
""" Execute testShacl on every OWL file against NodeShape.ttl. """
path_shacl = path_owl[:-3] + "shacl.ttl"
libshacl.validateShacl("tests/shapes/NodeShape.ttl", path_owl)
def test_validate_shacl_against_custom_shacl(path_owl):
""" Execute testShacl on the corresponding shacl.ttl file, if one exists. """
path_shacl = path_owl[:-3] + "shacl.ttl"
if os.path.isfile(path_shacl):
print "Validating {0} against its custom SHACL file, {1}".format(path_owl, path_shacl)
libshacl.validateShacl(path_shacl, path_owl)
else:
pytest.skip("OWL file '{0}' doesn't have a custom SHACL file to test at '{1}'".format(
path_owl,
path_shacl
))
|
"""
Module containing class with colors
"""
COLORS = {
'white': "\033[1;37m",
'yellow': "\033[1;33m",
'green': "\033[1;32m",
'blue': "\033[1;34m",
'cyan': "\033[1;36m",
'red': "\033[1;31m",
'magenta': "\033[1;35m",
'black': "\033[1;30m",
'darkwhite': "\033[0;37m",
'darkyellow': "\033[0;33m",
'darkgreen': "\033[0;32m",
'darkblue': "\033[0;34m",
'darkcyan': "\033[0;36m",
'darkred': "\033[0;31m",
'darkmagenta': "\033[0;35m",
'darkblack': "\033[0;30m",
'end': "\033[0;0m"
}
class color:
"""
Class that contains colors used for TorBot in terminal and a method
that adds color to a string.
Attributes:
message (string): Message to be wrapped in color.
selected (string): Color to be displayed.
"""
def __init__(self, message, selected):
"""Initialise color object with specified text and selected color."""
self._msg = message
self._color = COLORS[selected]
def __str__(self):
return self._color + self._msg + COLORS['end']
def __add__(self, other):
return str(self) + other
def __radd__(self, other):
return other + str(self)
|
# -*- coding: utf-8 -*-
"""Thread of structural synthesis."""
__author__ = "Yuan Chang"
__copyright__ = "Copyright (C) 2016-2021"
__license__ = "AGPL"
__email__ = "[email protected]"
from typing import Sequence, Dict, List
from qtpy.QtCore import Signal
from qtpy.QtWidgets import QWidget, QTreeWidgetItem
from pyslvs.graph import (
link_synthesis,
contracted_link_synthesis,
contracted_graph,
conventional_graph,
Graph,
)
from pyslvs_ui.synthesis.thread import BaseThread
Assortment = Sequence[int]
def assortment_eval(links_expr: str) -> Assortment:
"""Return link assortment from expr."""
return tuple(int(n.split('=')[-1]) for n in links_expr.split(", "))
class LinkThread(BaseThread):
"""Link assortment synthesis thread."""
progress_update = Signal(int)
result = Signal(dict)
size_update = Signal(int)
def __init__(self, nl: int, nj: int, parent: QWidget):
super(LinkThread, self).__init__(parent)
self.nl = nl
self.nj = nj
def run(self) -> None:
"""Run and return contracted link assortment."""
try:
la_list = link_synthesis(self.nl, self.nj, lambda: self.is_stop)
except ValueError:
self.progress_update.emit(1)
self.result.emit({})
self.finished.emit()
return
self.size_update.emit(len(la_list))
assortment = {}
for i, la in enumerate(la_list):
if self.is_stop:
break
assortment[la] = contracted_link_synthesis(la, lambda: self.is_stop)
self.progress_update.emit(1 + i)
self.result.emit(assortment)
self.finished.emit()
class GraphThread(BaseThread):
"""Graphs enumeration thread."""
progress_update = Signal(int)
count_update = Signal(QTreeWidgetItem, int)
result = Signal(list)
def __init__(self, jobs: Sequence[QTreeWidgetItem], degenerate: int, parent: QWidget):
super(GraphThread, self).__init__(parent)
self.jobs = jobs
self.degenerate = degenerate
def run(self) -> None:
"""Run and return conventional graph."""
cg_list: Dict[Sequence[int], List[Graph]] = {}
answers = []
for i, item in enumerate(self.jobs):
if self.is_stop:
break
root = item.parent()
la = assortment_eval(root.text(0))
cla = assortment_eval(item.text(0))
if la not in cg_list:
cg_list[la] = contracted_graph(la, lambda: self.is_stop)
answer = conventional_graph(
cg_list[la],
cla,
self.degenerate,
lambda: self.is_stop
)
self.count_update.emit(item, len(answer))
answers.extend(answer)
self.progress_update.emit(1 + i)
self.result.emit(answers)
self.finished.emit()
|
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# Copyright (c) 2008-2021 pyglet contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
from pyglet.gl import *
def get_max_color_attachments():
"""Get the maximum allow Framebuffer Color attachements"""
number = GLint()
glGetIntegerv(GL_MAX_COLOR_ATTACHMENTS, number)
return number.value
class Renderbuffer:
"""OpenGL Renderbuffer Object"""
def __init__(self, width, height, internal_format, samples=1):
"""Create an instance of a Renderbuffer object."""
self._id = GLuint()
self._width = width
self._height = height
self._internal_format = internal_format
glGenRenderbuffers(1, self._id)
glBindRenderbuffer(GL_RENDERBUFFER, self._id)
if samples > 1:
glRenderbufferStorageMultisample(GL_RENDERBUFFER, samples, internal_format, width, height)
else:
glRenderbufferStorage(GL_RENDERBUFFER, internal_format, width, height)
glBindRenderbuffer(GL_RENDERBUFFER, 0)
@property
def id(self):
return self._id.value
@property
def width(self):
return self._width
@property
def height(self):
return self._height
def bind(self):
glBindRenderbuffer(GL_RENDERBUFFER, self._id)
@staticmethod
def unbind():
glBindRenderbuffer(GL_RENDERBUFFER, 0)
def delete(self):
glDeleteRenderbuffers(1, self._id)
def __del__(self):
try:
glDeleteRenderbuffers(1, self._id)
# Python interpreter is shutting down:
except ImportError:
pass
def __repr__(self):
return "{}(id={})".format(self.__class__.__name__, self._id.value)
class Framebuffer:
"""OpenGL Framebuffer Object"""
def __init__(self, target=GL_FRAMEBUFFER):
"""Create an OpenGL Framebuffer object.
:rtype: :py:class:`~pyglet.image.Framebuffer`
.. versionadded:: 2.0
"""
self._id = GLuint()
glGenFramebuffers(1, self._id)
self._attachment_types = 0
self._width = 0
self._height = 0
self.target = target
@property
def id(self):
return self._id.value
@property
def width(self):
"""The width of the widest attachment."""
return self._width
@property
def height(self):
"""The width of the widest attachment."""
return self._height
def bind(self):
glBindFramebuffer(self.target, self._id)
def unbind(self):
glBindFramebuffer(self.target, 0)
def clear(self):
if self._attachment_types:
self.bind()
glClear(self._attachment_types)
self.unbind()
def delete(self):
glDeleteFramebuffers(1, self._id)
@property
def is_complete(self):
return glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE
@staticmethod
def get_status():
states = {GL_FRAMEBUFFER_UNSUPPORTED: "Framebuffer unsupported. Try another format.",
GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT: "Framebuffer incomplete attachment.",
GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT: "Framebuffer missing attachment.",
GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS_EXT: "Framebuffer unsupported dimension.",
GL_FRAMEBUFFER_INCOMPLETE_FORMATS_EXT: "Framebuffer incomplete formats.",
GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER: "Framebuffer incomplete draw buffer.",
GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER: "Framebuffer incomplete read buffer.",
GL_FRAMEBUFFER_COMPLETE: "Framebuffer is complete."}
gl_status = glCheckFramebufferStatus(GL_FRAMEBUFFER)
return states.get(gl_status, "Unknown error")
def attach_texture(self, texture, target=GL_FRAMEBUFFER, attachment=GL_COLOR_ATTACHMENT0):
"""Attach a Texture to the Framebuffer
:Parameters:
`texture` : pyglet.image.Texture
Specifies the texture object to attach to the framebuffer attachment
point named by attachment.
`target` : int
Specifies the framebuffer target. target must be GL_DRAW_FRAMEBUFFER,
GL_READ_FRAMEBUFFER, or GL_FRAMEBUFFER. GL_FRAMEBUFFER is equivalent
to GL_DRAW_FRAMEBUFFER.
`attachment` : int
Specifies the attachment point of the framebuffer. attachment must be
GL_COLOR_ATTACHMENTi, GL_DEPTH_ATTACHMENT, GL_STENCIL_ATTACHMENT or
GL_DEPTH_STENCIL_ATTACHMENT.
"""
self.bind()
glFramebufferTexture(target, attachment, texture.id, texture.level)
# glFramebufferTexture2D(target, attachment, texture.target, texture.id, texture.level)
self._attachment_types |= attachment
self._width = max(texture.width, self._width)
self._height = max(texture.height, self._height)
self.unbind()
def attach_texture_layer(self, texture, layer, level, target=GL_FRAMEBUFFER, attachment=GL_COLOR_ATTACHMENT0):
"""Attach a Texture layer to the Framebuffer
:Parameters:
`texture` : pyglet.image.TextureArray
Specifies the texture object to attach to the framebuffer attachment
point named by attachment.
`layer` : int
Specifies the layer of texture to attach.
`level` : int
Specifies the mipmap level of texture to attach.
`target` : int
Specifies the framebuffer target. target must be GL_DRAW_FRAMEBUFFER,
GL_READ_FRAMEBUFFER, or GL_FRAMEBUFFER. GL_FRAMEBUFFER is equivalent
to GL_DRAW_FRAMEBUFFER.
`attachment` : int
Specifies the attachment point of the framebuffer. attachment must be
GL_COLOR_ATTACHMENTi, GL_DEPTH_ATTACHMENT, GL_STENCIL_ATTACHMENT or
GL_DEPTH_STENCIL_ATTACHMENT.
"""
self.bind()
glFramebufferTextureLayer(target, attachment, texture.id, level, layer)
self._attachment_types |= attachment
self._width = max(texture.width, self._width)
self._height = max(texture.height, self._height)
self.unbind()
def attach_renderbuffer(self, renderbuffer, target=GL_FRAMEBUFFER, attachment=GL_COLOR_ATTACHMENT0):
""""Attach a Renderbuffer to the Framebuffer
:Parameters:
`renderbuffer` : pyglet.image.Renderbuffer
Specifies the Renderbuffer to attach to the framebuffer attachment
point named by attachment.
`target` : int
Specifies the framebuffer target. target must be GL_DRAW_FRAMEBUFFER,
GL_READ_FRAMEBUFFER, or GL_FRAMEBUFFER. GL_FRAMEBUFFER is equivalent
to GL_DRAW_FRAMEBUFFER.
`attachment` : int
Specifies the attachment point of the framebuffer. attachment must be
GL_COLOR_ATTACHMENTi, GL_DEPTH_ATTACHMENT, GL_STENCIL_ATTACHMENT or
GL_DEPTH_STENCIL_ATTACHMENT.
"""
self.bind()
glFramebufferRenderbuffer(target, attachment, GL_RENDERBUFFER, renderbuffer.id)
self._attachment_types |= attachment
self._width = max(renderbuffer.width, self._width)
self._height = max(renderbuffer.height, self._height)
self.unbind()
def __del__(self):
try:
glDeleteFramebuffers(1, self._id)
# Python interpreter is shutting down:
except ImportError:
pass
def __repr__(self):
return "{}(id={})".format(self.__class__.__name__, self._id.value)
|
import pybullet as p
import time
import math
p.connect(p.GUI)
useMaximalCoordinates = False
p.setGravity(0, 0, -10)
plane = p.loadURDF("plane.urdf", [0, 0, -1], useMaximalCoordinates=useMaximalCoordinates)
p.setRealTimeSimulation(0)
velocity = 1
num = 40
p.configureDebugVisualizer(p.COV_ENABLE_GUI, 0)
p.configureDebugVisualizer(p.COV_ENABLE_RENDERING, 1) #disable this to make it faster
p.configureDebugVisualizer(p.COV_ENABLE_TINY_RENDERER, 0)
p.setPhysicsEngineParameter(enableConeFriction=1)
for i in range(num):
print("progress:", i, num)
x = velocity * math.sin(2. * 3.1415 * float(i) / num)
y = velocity * math.cos(2. * 3.1415 * float(i) / num)
print("velocity=", x, y)
sphere = p.loadURDF("sphere_small_zeroinertia.urdf",
flags=p.URDF_USE_INERTIA_FROM_FILE,
useMaximalCoordinates=useMaximalCoordinates)
p.changeDynamics(sphere, -1, lateralFriction=0.02)
#p.changeDynamics(sphere,-1,rollingFriction=10)
p.changeDynamics(sphere, -1, linearDamping=0)
p.changeDynamics(sphere, -1, angularDamping=0)
p.resetBaseVelocity(sphere, linearVelocity=[x, y, 0])
prevPos = [0, 0, 0]
for i in range(2048):
p.stepSimulation()
pos = p.getBasePositionAndOrientation(sphere)[0]
if (i & 64):
p.addUserDebugLine(prevPos, pos, [1, 0, 0], 1)
prevPos = pos
p.configureDebugVisualizer(p.COV_ENABLE_RENDERING, 1)
while (1):
time.sleep(0.01)
|
#!/usr/bin/env python3
import xml.etree.ElementTree as ET
def get_target():
return SVG()
class SVG:
def __init__(self):
self.svg = ET.parse('skeleton.svg')
self.mmpx = 3.543307
def output(self, path):
self.svg.write(path)
def add_package(self, package):
'''
Target SVG only handles one drawing at a time, only last added drawing will be part of output
'''
self.svg = ET.parse('skeleton.svg')
self.package = \
{
'name': package['name'],
'pads': [],
'mnt_pads': [],
'holes': [],
'lines': [],
'circles': [],
'rectangles': [] ,
'texts': []
}
def output(self, fout):
package = self.package
for pad in package['pads']:
self.gen_pac_pad(pad)
for mnt_pad in package['mnt_pads']: # TODO, adding mnt_pads not done
self.gen_pac_mnt_pad(mnt_pad)
for hole in package['holes']:
self.gen_pac_hole(hole)
for line in package['lines']:
self.gen_pac_line(line)
if(0):
for circle in package['circles']:
self.gen_pac_circle(circle)
for rect in package['rectangles']:
self.gen_pac_rectangle(rect)
for text in package['texts']:
self.gen_pac_text(text)
self.svg.write(fout)
def add_pac_pad(self, type, angle, size, pos, number):
self.package['pads'].append(
{
'type': type,
'angle': angle,
'size': size,
'pos': pos,
'number': number
})
def add_pac_hole(self, diameter, pos):
self.package['holes'].append(
{
'd': diameter,
'pos': pos
})
def add_pac_line(self, layer, width, vertices):
self.package['lines'].append(
{
'layer': layer,
'width': width,
'vertices': vertices
})
def gen_pac_pad(self, pad): # type, angle, size, pos, number
top_layer = self.svg.find('.//g[@id="Top"]')
# TODO: Types and angle
el = ET.SubElement(top_layer, 'rect')
el.set('style', 'fill:#ff0000;fill-opacity:1;stroke:none;stroke-width:10;stroke-linecap:square;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1')
el.set('id', 'pin_{}'.format(pad['number']))
el.set('width', '{}'.format(pad['size'][0]*self.mmpx))
el.set('height', '{}'.format(pad['size'][1]*self.mmpx))
el.set('x', '{}'.format((pad['pos'][0] - pad['size'][0]/2)*self.mmpx))
el.set('y', '{}'.format((pad['pos'][1] - pad['size'][1]/2)*self.mmpx))
def gen_pac_hole(self, hole):
top_layer = self.svg.find('.//g[@id="Holes"]')
circle = ET.SubElement(top_layer, 'circle')
circle.set('style', 'fill:#eeee00;fill-opacity:1;stroke:none;stroke-width:0.0;stroke-linecap:square;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"')
circle.set('cx', '{}'.format(hole['pos'][0]*self.mmpx))
circle.set('cy', '{}'.format(hole['pos'][1]*self.mmpx))
circle.set('r', '{}'.format(hole['d']/2*self.mmpx))
def gen_pac_line(self, line):
layer = self.svg.find('.//g[@id="{}"]'.format(line['layer']))
if(line['layer'] == 'Courtyard'):
color = '#e63a81'
elif(line['layer'] == 'Silk'):
color = '#111111'
else:
color = '#000000'
el = ET.SubElement(layer, 'path')
el.set('style', 'fill:none;fill-rule:evenodd;stroke:{color};stroke-width:{}mm;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;stroke-miterlimit:4;stroke-dasharray:none'.format(line['width'], color=color))
pathdata = ''
first = True
for (x,y) in line['vertices']:
if(first):
pathdata += 'M ' + '{},{}'.format(x*self.mmpx,y*self.mmpx)
first = False
elif(x == 'end'):
pathdata += ' z'
else:
pathdata += ' L ' + '{},{}'.format(x*self.mmpx,y*self.mmpx)
el.set('d', pathdata)
def gen_circle(self, layer_name, diameter, pos):
layer = self.svg.find('.//g[@id="{}"]'.format(layer_name))
if(layer_name == 'Courtyard'):
color = '#e63a81'
elif(layer_name == 'Silk'):
color = '#111111'
else:
color = '#000000'
circle = ET.SubElement(layer, 'circle')
circle.set('style', 'fill:#{color};fill-opacity:1;stroke:none;stroke-width:0.0;stroke-linecap:square;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"'.format(color=color))
circle.set('cx', '{}'.format(pos[0]*self.mmpx))
circle.set('cy', '{}'.format(pos[1]*self.mmpx))
circle.set('r', '{}'.format(diameter/2*self.mmpx))
if(__name__ == '__main__'):
target = get_target()
target.output('test.svg')
|
#!/usr/bin/env python
# coding=utf-8
__author__ = 'ZHang Chuan'
'''
Models for user, blog, comment.
'''
import time, uuid
from transwarp.db import next_id
from transwarp.orm import Model, StringField, BooleanField, FloatField, TextField
class User(Model):
__table__ = 'users'
id = StringField(primary_key=True, default=next_id, ddl='varchar(50)')
email = StringField(updatable=False, ddl='varchar(50)')
password = StringField(ddl='varchar(50)')
admin = BooleanField()
name = StringField(ddl='varchar(50)')
image = StringField(ddl='varchar(500)')
created_at = FloatField(updatable=False, default=time.time)
class Blog(Model):
__table__ = 'blogs'
id = StringField(primary_key=True, default=next_id, ddl='varchar(50)')
user_id = StringField(updatable=False, ddl='varchar(50)')
user_name = StringField(ddl='varchar(50)')
user_image = StringField(ddl='varchar(500)')
name = StringField(ddl='varchar(50)')
summary = StringField(ddl='varchar(200)')
content = TextField()
created_at = FloatField(updatable=False, default=time.time)
class Comment(Model):
__table__ = 'comments'
id = StringField(primary_key=True, default=next_id, ddl='varchar(50)')
blog_id = StringField(updatable=False, ddl='varchar(50)')
user_id = StringField(updatable=False, ddl='varchar(50)')
user_name = StringField(ddl='varchar(50)')
user_image = StringField(ddl='varchar(500)')
content = TextField()
created_at = FloatField(updatable=False, default=time.time)
|
"""
`calcifer.tree` module
This module implements a non-deterministic nested dictionary (tree).
The tree comprises leaf nodes, dict nodes, and "unknown nodes" -- nodes
which are known to exist but undefined beyond that.
Ultimately, the policy tree contains *definitions*, a higher-level abstraction
on "value": LeafPolicyNode uses the property `definition`, which may compare
to specific values or generate a template for procuring the value.
"""
from abc import ABCMeta, abstractmethod
import logging
from calcifer.definitions import Value
logger = logging.getLogger(__name__)
class PolicyNode:
"""
Abstract class for node tree.
"""
__metaclass__ = ABCMeta
@abstractmethod
def get_template(self):
"""
Generate the template for the node (recursively)
"""
pass
@abstractmethod
def select(self, path=None):
"""
Traverse the tree and retrieve a specific node with a given path.
`select` retrieves existing nodes or populates default nodes based
on path values.
Returns a tuple of (selected_node, new_root)
"""
if not path:
return (self, self)
@abstractmethod
def match(self, value):
"""
`match` compares a node with a given value, possibly returning an
altered node in the process. For unknown nodes, this means populating
the node with a leaf node defined as having that value.
For nodes with a more complex definition, the behavior of `match`
defers to the definition of the node.
"""
return False, self
@abstractmethod
def choose(self, step):
"""
Moves down the given step and returns:
(the chosen node, the new version of itself (list or dict), and a dict of the steps not taken)
"""
return (None, None, {})
@abstractmethod
def reconstruct(self, possible_steps):
"""
This method takes in a dictionary of possible steps that could be taken and returns a node object
"""
raise NotImplementedError
@staticmethod
def from_obj(obj):
"""
To facilitate converting nested dict data structures, the static
method `from_obj` recursively constructs a PolicyNode tree from
an object
"""
if isinstance(obj, PolicyNode):
return obj
if isinstance(obj, dict):
return DictPolicyNode(**obj)
if isinstance(obj, list):
return ListPolicyNode(*obj)
return LeafPolicyNode(Value(obj))
class UnknownPolicyNode(PolicyNode):
def __init__(self):
pass
@property
def value(self):
return None
def reconstruct(self, possible_steps):
raise TypeError
def get_template(self):
return {}
def choose(self, step):
if isinstance(step, int):
new_self = ListPolicyNode()
steps_not_taken = {k: UnknownPolicyNode() for k in range(step)}
else:
new_self = DictPolicyNode()
steps_not_taken = {}
return (UnknownPolicyNode(), new_self, steps_not_taken)
def select(self, path=None):
if not path:
return (self, self)
# recurse
first = path[0]
rest = path[1:]
value, subpolicy = UnknownPolicyNode().select(rest)
return value, DictPolicyNode(**{first: subpolicy})
def match(self, value):
return True, LeafPolicyNode(Value(value))
def __repr__(self):
return "UnknownPolicyNode()"
def __eq__(self, other):
return isinstance(other, UnknownPolicyNode)
class LeafPolicyNode(PolicyNode):
def __init__(self, definition=None):
self._definition = definition
@property
def definition(self):
return self._definition
@property
def value(self):
return self._definition.value
def reconstruct(self, possible_steps):
if possible_steps:
raise TypeError
return self.__class__(self._definition)
def get_template(self):
return self.definition.get_template()
def choose(self, step):
raise TypeError("You're at the end dummy!")
def select(self, path=None):
if path:
logger.debug((
"Attempting to select sub-path %r of %r"
), path, self)
raise Exception(
"Node cannot be traversed, attempted sub-path: {}".format(path)
)
return (self, self)
def match(self, value):
matches, new_definition = self.definition.match(value)
return matches, LeafPolicyNode(new_definition)
def __repr__(self):
return (
"LeafPolicyNode("
"definition={definition}"
")"
).format(definition=self.definition)
def __eq__(self, other):
return (
isinstance(other, LeafPolicyNode) and
other.definition == self.definition
)
class DictPolicyNode(PolicyNode):
def __init__(self, **nodes):
self._nodes = {
k: PolicyNode.from_obj(v)
for k, v in nodes.items()
}
@property
def nodes(self):
return self._nodes
@property
def keys(self):
return self._nodes.keys()
@property
def value(self):
return {
name: node.value
for name, node in self.nodes.items()
}
def reconstruct(self, possible_steps):
return DictPolicyNode(**possible_steps)
def choose(self, step):
chosen_node = self._nodes.get(step, UnknownPolicyNode())
new_self = self
steps_not_taken = {k: v for k, v in self._nodes.items() if k != step}
return chosen_node, new_self, steps_not_taken
def get_template(self):
return {
k: v.get_template() for k, v in self.nodes.items()
}
def select(self, path=None):
if not path:
return (self, self)
first = path[0]
rest = path[1:]
node, new_first = self[first].select(rest)
new_nodes = {k: v for k, v in self.nodes.items()}
new_nodes[first] = new_first
return node, DictPolicyNode(**new_nodes)
def match(self, value):
return False, self
def __setitem__(self, key, node):
self._nodes[key] = node
def __getitem__(self, key):
if key not in self._nodes:
return UnknownPolicyNode()
return self._nodes[key]
def __repr__(self):
args = ['{}={}'.format(k, v) for k, v in self.nodes.items()]
return "DictPolicyNode({})".format(", ".join(args))
def __eq__(self, other):
return (
isinstance(other, DictPolicyNode) and
other.nodes == self.nodes
)
class ListPolicyNode(PolicyNode):
def __init__(self, *nodes):
self._nodes = [
PolicyNode.from_obj(v)
for v in nodes
]
@property
def nodes(self):
return self._nodes
@property
def keys(self):
return [key for key in range(len(self._nodes))]
@property
def value(self):
return [
node.value
for node in self.nodes
]
def reconstruct(self, possible_steps):
if not possible_steps:
return ListPolicyNode()
highest_key = sorted(possible_steps.keys(), reverse=True)[0]
return ListPolicyNode(*[
possible_steps.get(i, UnknownPolicyNode())
for i in range(highest_key + 1)
])
def choose(self, step):
if len(self._nodes) > step:
# We have the step for sure
chosen_node = self._nodes[step]
else:
# step does not exist yet, must populate list with UnknownPolicyNodes
chosen_node = UnknownPolicyNode()
new_self = self
steps_not_taken = {i: self._nodes[i] for i in range(len(self._nodes)) if i != step}
return chosen_node, new_self, steps_not_taken
def get_template(self):
return [
v.get_template() for v in self.nodes
]
def select(self, path=None):
if not path:
return (self, self)
first = int(path[0])
rest = path[1:]
node, new_first = self[first].select(rest)
new_nodes = [v for v in self.nodes]
new_nodes[first] = new_first
return node, ListPolicyNode(*new_nodes)
def match(self, value):
return False, self
def __setitem__(self, key, node):
key = int(key)
sparsity = key - len(self._nodes) + 1
self._nodes.extend([UnknownPolicyNode()] * sparsity)
self._nodes[key] = node
def __getitem__(self, key):
try:
key = int(key)
return self._nodes[int(key)]
except:
return UnknownPolicyNode()
def __repr__(self):
args = ['{}'.format(v) for v in self.nodes]
return "ListPolicyNode({})".format(", ".join(args))
def __eq__(self, other):
return (
isinstance(other, ListPolicyNode) and
other.nodes == self.nodes
)
|
#######################################
# pyGPGO examples
# example2d: SHows how the Bayesian Optimization works on a two-dimensional
# rastrigin function, step by step.
#######################################
import os
from collections import OrderedDict
import numpy as np
import matplotlib.pyplot as plt
from pyGPGO.GPGO import GPGO
from pyGPGO.surrogates.GaussianProcess import GaussianProcess
from pyGPGO.acquisition import Acquisition
from pyGPGO.covfunc import squaredExponential
def rastrigin(x, y, A=10):
return (2 * A + (x ** 2 - A * np.cos(2 * np.pi * x)) + (y ** 2 - A * np.cos(2 * np.pi * y)))
def plot_f(x_values, y_values, f):
z = np.zeros((len(x_values), len(y_values)))
for i in range(len(x_values)):
for j in range(len(y_values)):
z[i, j] = f(x_values[i], y_values[j])
plt.imshow(z.T, origin='lower', extent=[np.min(x_values), np.max(x_values), np.min(y_values), np.max(y_values)])
plt.colorbar()
plt.show()
plt.savefig(os.path.join(os.getcwd(), 'mthesis_text/figures/chapter3/rosen/rosen.pdf'))
def plot2dgpgo(gpgo):
tested_X = gpgo.GP.X
n = 100
r_x, r_y = gpgo.parameter_range[0], gpgo.parameter_range[1]
x_test = np.linspace(r_x[0], r_x[1], n)
y_test = np.linspace(r_y[0], r_y[1], n)
z_hat = np.empty((len(x_test), len(y_test)))
z_var = np.empty((len(x_test), len(y_test)))
ac = np.empty((len(x_test), len(y_test)))
for i in range(len(x_test)):
for j in range(len(y_test)):
res = gpgo.GP.predict([x_test[i], y_test[j]])
z_hat[i, j] = res[0]
z_var[i, j] = res[1][0]
ac[i, j] = -gpgo._acqWrapper(np.atleast_1d([x_test[i], y_test[j]]))
fig = plt.figure()
a = fig.add_subplot(2, 2, 1)
a.set_title('Posterior mean')
plt.imshow(z_hat.T, origin='lower', extent=[r_x[0], r_x[1], r_y[0], r_y[1]])
plt.colorbar()
plt.plot(tested_X[:, 0], tested_X[:, 1], 'wx', markersize=10)
a = fig.add_subplot(2, 2, 2)
a.set_title('Posterior variance')
plt.imshow(z_var.T, origin='lower', extent=[r_x[0], r_x[1], r_y[0], r_y[1]])
plt.plot(tested_X[:, 0], tested_X[:, 1], 'wx', markersize=10)
plt.colorbar()
a = fig.add_subplot(2, 2, 3)
a.set_title('Acquisition function')
plt.imshow(ac.T, origin='lower', extent=[r_x[0], r_x[1], r_y[0], r_y[1]])
plt.colorbar()
gpgo._optimizeAcq(method='L-BFGS-B', n_start=500)
plt.plot(gpgo.best[0], gpgo.best[1], 'gx', markersize=15)
plt.tight_layout()
plt.savefig(os.path.join(os.getcwd(), 'mthesis_text/figures/chapter3/rosen/{}.pdf'.format(item)))
plt.show()
if __name__ == '__main__':
x = np.linspace(-1, 1, 1000)
y = np.linspace(-1, 1, 1000)
plot_f(x, y, rastrigin)
np.random.seed(20)
sexp = squaredExponential()
gp = GaussianProcess(sexp)
acq = Acquisition(mode='ExpectedImprovement')
param = OrderedDict()
param['x'] = ('cont', [-1, 1])
param['y'] = ('cont', [-1, 1])
gpgo = GPGO(gp, acq, rastrigin, param, n_jobs=-1)
gpgo._firstRun()
for item in range(7):
plot2dgpgo(gpgo)
gpgo.updateGP()
|
# -*- coding: utf-8 -*-
"""meta(\*\*metadata): Marker for metadata addition.
To add metadata to a test simply pass the kwargs as plugins wish.
You can write your own plugins. They generally live in ``metaplugins/`` directory but you can
define them pretty much everywhere py.test loads modules. Plugin has a name and a set
of callbacks that are called when certain combination of keys is present in the metadata.
To define plugin, do like this:
.. code-block:: python
@plugin("plugin_name")
def someaction(plugin_name):
print(plugin_name) # Will contain value of `plugin_name` key of metadict
This is the simplest usage, where it is supposed that the plugin checks only one key with the
same name s the plugin's name. I won't use this one in the latter examples, I will use the
more verbose one.
.. code-block:: python
@plugin("plugin_name", keys=["plugin_name", "another_key"])
def someaction(plugin_name, another_key):
print(plugin_name) # Will contain value of `plugin_name` key of metadict
print(another_key) # Similarly this one
This one reacts when the two keys are present. You can make even more complex setups:
.. code-block:: python
@plugin("plugin_name", keys=["plugin_name"])
@plugin("plugin_name", ["plugin_name", "another_key"]) # You don't have to write keys=
def someaction(plugin_name, another_key=None):
print(plugin_name) # Will contain value of `plugin_name` key of metadict
print(another_key) # Similarly this one if specified, otherwise None
This created a nonrequired parameter for the action.
You can specify as many actions as you wish per plugin. The only thing that limits you is the
correct action choice. First, all the actions are filtered by present keys in metadata. Then
after this selection, only the action with the most matched keywords is called. Bear this
in your mind. If this is not enough in the future, it can be extended if you wish.
It has a command-line option that allows you to disable certain plugins. Just specify
``--disablemetaplugins a,b,c`` where a, b and c are the plugins that should be disabled
"""
from collections import namedtuple
from kwargify import kwargify
from types import FunctionType
import pytest
from lya import AttrDict
from utils.log import logger
def pytest_configure(config):
config.addinivalue_line("markers", __doc__.splitlines()[0])
def pytest_addoption(parser):
group = parser.getgroup('Meta plugins')
group.addoption('--disablemetaplugins',
action='store',
default="",
dest='disable_metaplugins',
help='Comma-separated list of metaplugins to disable')
@pytest.mark.hookwrapper
def pytest_pycollect_makeitem(collector, name, obj):
# Put the meta mark on objects as soon as pytest begins to collect them
if isinstance(obj, FunctionType) and not hasattr(obj, 'meta'):
pytest.mark.meta(obj)
yield
@pytest.mark.hookwrapper
def pytest_collection_modifyitems(session, config, items):
for item in items:
try:
item._metadata = AttrDict(item.function.meta.kwargs)
except AttributeError:
logger.warning('AttributeError getting metadata from item: {}'.format(
str(item.nodeid))
)
item._metadata = AttrDict()
meta = item.get_marker("meta")
if meta is None:
continue
metas = reversed([x.kwargs for x in meta]) # Extract the kwargs, reverse the order
for meta in metas:
item._metadata.update(meta)
yield
@pytest.fixture(scope="function")
def meta(request):
return request.node._metadata
Plugin = namedtuple('Plugin', ['name', 'metas', 'function', 'kwargs'])
class PluginContainer(object):
SETUP = "setup"
TEARDOWN = "teardown"
BEFORE_RUN = "before_run"
AFTER_RUN = "after_run"
DEFAULT = SETUP
def __init__(self):
self._plugins = []
def __call__(self, name, keys=None, **kwargs):
if keys is None:
keys = [name]
def f(g):
self._plugins.append(Plugin(name, keys, kwargify(g), kwargs))
return g # So the markers can be chained
return f
if "plugin" not in globals():
plugin = PluginContainer()
def run_plugins(item, when):
possible_plugins = []
for plug in plugin._plugins:
if all([meta in item._metadata.keys() for meta in plug.metas])\
and plug.kwargs.get("run", plugin.DEFAULT) == when:
possible_plugins.append(plug)
by_names = {}
for plug in possible_plugins:
if plug.name not in by_names:
by_names[plug.name] = []
by_names[plug.name].append(plug)
disabled_plugins = item.config.getvalue("disable_metaplugins") or ""
if not disabled_plugins:
disabled_plugins = []
else:
disabled_plugins = [name.strip() for name in disabled_plugins.split(",")]
for plugin_name, plugin_objects in by_names.iteritems():
if plugin_name in disabled_plugins:
logger.info("Ignoring plugin {} due to commandline option".format(plugin_name))
continue
plugin_objects.sort(key=lambda p: len(p.metas), reverse=True)
plug = plugin_objects[0]
env = {"item": item}
for meta in plug.metas:
env[meta] = item._metadata[meta]
logger.info(
"Calling metaplugin {}({}) with meta signature {} {}".format(
plugin_name, plug.function.__name__, str(plug.metas), str(plug.kwargs)))
plug.function(**env)
logger.info(
"Metaplugin {}({}) with meta signature {} {} has finished".format(
plugin_name, plug.function.__name__, str(plug.metas), str(plug.kwargs)))
def pytest_runtest_setup(item):
run_plugins(item, plugin.SETUP)
def pytest_runtest_teardown(item):
run_plugins(item, plugin.TEARDOWN)
@pytest.mark.hookwrapper
def pytest_runtest_call(item):
run_plugins(item, plugin.BEFORE_RUN)
try:
yield
finally:
run_plugins(item, plugin.AFTER_RUN)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django import forms
from django.utils.text import slugify
from django.contrib.auth import authenticate
from mozart.core.messages import custom_error_messages, media_messages
def eval_blank(data):
if str(data).isspace():
raise forms.ValidationError(custom_error_messages['blank'], code='blank')
return data
def eval_iexact(data, model, field, label):
original = data
model_name = (model._meta.verbose_name).lower()
field_label = (model._meta.get_field(label).verbose_name).lower()
lookup = '%s__iexact' % field
if field == 'slug':
data = slugify(data)
lookup = field
try:
model.objects.get(**{lookup: data})
except model.DoesNotExist:
return original
raise forms.ValidationError(custom_error_messages['unique'], code='unique',
params={'model_name': model_name, 'field_label': field_label})
def eval_matching(data_1, data_2):
if data_1 != data_2:
raise forms.ValidationError(custom_error_messages['mismatch'],)
return data_1 and data_2
def eval_password(username, password):
user_cache = authenticate(username=username, password=password)
if user_cache is None:
raise forms.ValidationError(custom_error_messages['incorrect_password'])
return username and password
# Media Validators
def eval_audio(data):
file_type = str(data.content_type)
if file_type == 'audio/mp3':
return data
raise forms.ValidationError(media_messages['invalid_audio'],)
def eval_image(data):
file_type = str(data.content_type)
if file_type == 'image/jpeg' or file_type == 'image/bmp' \
or file_type == 'image/png':
return data
raise forms.ValidationError(media_messages['invalid_image'],)
def eval_general(data):
file_type = str(data.content_type)
if file_type == 'image/jpeg' or file_type == 'image/bmp' \
or file_type == 'image/png' or file_type == 'audio/mp3':
return data
raise forms.ValidationError(media_messages['invalid_archive'],)
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime
from dateutil.relativedelta import relativedelta
from odoo import api, fields, models
from odoo.tools import DEFAULT_SERVER_DATE_FORMAT as DF
import odoo.addons.decimal_precision as dp
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_cancel(self):
res = super(AccountInvoice, self).action_cancel()
self.env['account.asset.asset'].sudo().search([('invoice_id', 'in', self.ids)]).write({'active': False})
return res
@api.multi
def action_move_create(self):
result = super(AccountInvoice, self).action_move_create()
for inv in self:
context = dict(self.env.context)
# Within the context of an invoice,
# this default value is for the type of the invoice, not the type of the asset.
# This has to be cleaned from the context before creating the asset,
# otherwise it tries to create the asset with the type of the invoice.
context.pop('default_type', None)
inv.invoice_line_ids.with_context(context).asset_create()
return result
class AccountInvoiceLine(models.Model):
_inherit = 'account.invoice.line'
asset_category_id = fields.Many2one('account.asset.category', string='Asset Category')
asset_start_date = fields.Date(string='Asset Start Date', compute='_get_asset_date', readonly=True, store=True)
asset_end_date = fields.Date(string='Asset End Date', compute='_get_asset_date', readonly=True, store=True)
asset_mrr = fields.Float(string='Monthly Recurring Revenue', compute='_get_asset_date', readonly=True, digits=dp.get_precision('Account'), store=True)
@api.one
@api.depends('asset_category_id', 'invoice_id.date_invoice')
def _get_asset_date(self):
self.asset_mrr = 0
self.asset_start_date = False
self.asset_end_date = False
cat = self.asset_category_id
if cat:
months = cat.method_number * cat.method_period
if self.invoice_id.type in ['out_invoice', 'out_refund']:
self.asset_mrr = self.price_subtotal_signed / months
if self.invoice_id.date_invoice:
start_date = datetime.strptime(self.invoice_id.date_invoice, DF).replace(day=1)
end_date = (start_date + relativedelta(months=months, days=-1))
self.asset_start_date = start_date.strftime(DF)
self.asset_end_date = end_date.strftime(DF)
@api.one
def asset_create(self):
if self.asset_category_id:
vals = {
'name': self.name,
'code': self.invoice_id.number or False,
'category_id': self.asset_category_id.id,
'value': self.price_subtotal_signed,
'partner_id': self.invoice_id.partner_id.id,
'company_id': self.invoice_id.company_id.id,
'currency_id': self.invoice_id.company_currency_id.id,
'date': self.invoice_id.date_invoice,
'invoice_id': self.invoice_id.id,
}
changed_vals = self.env['account.asset.asset'].onchange_category_id_values(vals['category_id'])
vals.update(changed_vals['value'])
asset = self.env['account.asset.asset'].create(vals)
if self.asset_category_id.open_asset:
asset.validate()
return True
@api.onchange('asset_category_id')
def onchange_asset_category_id(self):
if self.invoice_id.type == 'out_invoice' and self.asset_category_id:
self.account_id = self.asset_category_id.account_asset_id.id
elif self.invoice_id.type == 'in_invoice' and self.asset_category_id:
self.account_id = self.asset_category_id.account_asset_id.id
@api.onchange('uom_id')
def _onchange_uom_id(self):
result = super(AccountInvoiceLine, self)._onchange_uom_id()
self.onchange_asset_category_id()
return result
@api.onchange('product_id')
def _onchange_product_id(self):
vals = super(AccountInvoiceLine, self)._onchange_product_id()
if self.product_id:
if self.invoice_id.type == 'out_invoice':
self.asset_category_id = self.product_id.product_tmpl_id.deferred_revenue_category_id
elif self.invoice_id.type == 'in_invoice':
self.asset_category_id = self.product_id.product_tmpl_id.asset_category_id
return vals
def _set_additional_fields(self, invoice):
if not self.asset_category_id:
if invoice.type == 'out_invoice':
self.asset_category_id = self.product_id.product_tmpl_id.deferred_revenue_category_id.id
elif invoice.type == 'in_invoice':
self.asset_category_id = self.product_id.product_tmpl_id.asset_category_id.id
super(AccountInvoiceLine, self)._set_additional_fields(invoice)
|
# DDE support for Pythonwin
#
# Seems to work fine (in the context that IE4 seems to have broken
# DDE on _all_ NT4 machines I have tried, but only when a "Command Prompt" window
# is open. Strange, but true. If you have problems with this, close all Command Prompts!
import win32ui
import win32api, win32con
from pywin.mfc import object
from dde import *
import traceback
import string
class DDESystemTopic(object.Object):
def __init__(self, app):
self.app = app
object.Object.__init__(self, CreateServerSystemTopic())
def Exec(self, data):
try:
# print "Executing", cmd
self.app.OnDDECommand(data)
except:
# The DDE Execution failed.
print "Error executing DDE command."
traceback.print_exc()
return 0
class DDEServer(object.Object):
def __init__(self, app):
self.app = app
object.Object.__init__(self, CreateServer())
self.topic = self.item = None
def CreateSystemTopic(self):
return DDESystemTopic(self.app)
def Shutdown(self):
self._obj_.Shutdown()
self._obj_.Destroy()
if self.topic is not None:
self.topic.Destroy()
self.topic = None
if self.item is not None:
self.item.Destroy()
self.item = None
def OnCreate(self):
return 1
def Status(self, msg):
try:
win32ui.SetStatusText(msg)
except win32ui.error:
pass
|
"""
Filesystem-related utilities.
"""
from __future__ import print_function
from threading import Lock
from tempfile import mkdtemp
from contextlib import contextmanager
from uuid import uuid4
import errno
import weakref
import atexit
import posixpath
import ntpath
import os.path
import shutil
import os
import re
import stat
import platform
from rez.vendor.six import six
from rez.utils.platform_ import platform_
is_windows = platform.system() == "Windows"
class TempDirs(object):
"""Tempdir manager.
Makes tmpdirs and ensures they're cleaned up on program exit.
"""
instances_lock = Lock()
instances = []
def __init__(self, tmpdir, prefix="rez_"):
self.tmpdir = tmpdir
self.prefix = prefix
self.dirs = set()
self.lock = Lock()
with TempDirs.instances_lock:
TempDirs.instances.append(weakref.ref(self))
def mkdtemp(self, cleanup=True):
path = mkdtemp(dir=self.tmpdir, prefix=self.prefix)
if not cleanup:
return path
with self.lock:
self.dirs.add(path)
return path
def __del__(self):
self.clear()
def clear(self):
with self.lock:
if not self.dirs:
return
dirs = self.dirs
self.dirs = set()
for path in dirs:
if os.path.exists(path) and not os.getenv("REZ_KEEP_TMPDIRS"):
shutil.rmtree(path)
@classmethod
def clear_all(cls):
with TempDirs.instances_lock:
instances = cls.instances[:]
for ref in instances:
instance = ref()
if instance is not None:
instance.clear()
atexit.register(TempDirs.clear_all)
@contextmanager
def make_path_writable(path):
"""Temporarily make `path` writable, if possible.
Args:
path (str): Path to make temporarily writable
"""
try:
orig_mode = os.stat(path).st_mode
new_mode = orig_mode
if not os.access(path, os.W_OK):
new_mode = orig_mode | stat.S_IWUSR
# make writable
if new_mode != orig_mode:
os.chmod(path, new_mode)
except OSError:
# ignore access errors here, and just do nothing. It will be more
# intuitive for the calling code to fail on access instead.
#
orig_mode = None
new_mode = None
# yield, then reset mode back to original
try:
yield
finally:
if new_mode != orig_mode:
os.chmod(path, orig_mode)
@contextmanager
def retain_cwd():
"""Context manager that keeps cwd unchanged afterwards.
"""
cwd = os.getcwd()
try:
yield
finally:
os.chdir(cwd)
def get_existing_path(path, topmost_path=None):
"""Get the longest parent path in `path` that exists.
If `path` exists, it is returned.
Args:
path (str): Path to test
topmost_path (str): Do not test this path or above
Returns:
str: Existing path, or None if no path was found.
"""
prev_path = None
if topmost_path:
topmost_path = os.path.normpath(topmost_path)
while True:
if os.path.exists(path):
return path
path = os.path.dirname(path)
if path == prev_path:
return None
if topmost_path and os.path.normpath(path) == topmost_path:
return None
prev_path = path
def safe_listdir(path):
"""Safe listdir.
Works in a multithread/proc scenario where dirs may be deleted at any time
"""
try:
return os.listdir(path)
except OSError as e:
if e.errno in (errno.ENOENT, errno.ENOTDIR):
return []
raise
def safe_makedirs(path):
"""Safe makedirs.
Works in a multithreaded scenario.
"""
if not os.path.exists(path):
try:
os.makedirs(path)
except OSError:
if not os.path.exists(path):
raise
def safe_remove(path):
"""Safely remove the given file or directory.
Works in a multithreaded scenario.
"""
if not os.path.exists(path):
return
try:
if os.path.isdir(path) and not os.path.islink(path):
shutil.rmtree(path)
else:
os.remove(path)
except OSError:
if os.path.exists(path):
raise
def forceful_rmtree(path):
"""Like shutil.rmtree, but may change permissions.
Specifically, non-writable dirs within `path` can cause rmtree to fail. This
func chmod's to writable to avoid this issue, if possible.
Also handled:
* path length over 259 char (on Windows)
* unicode path
"""
if six.PY2:
path = unicode(path)
def _on_error(func, path, exc_info):
try:
if is_windows:
path = windows_long_path(path)
parent_path = os.path.dirname(path)
if not os.access(parent_path, os.W_OK):
st = os.stat(parent_path)
os.chmod(parent_path, st.st_mode | stat.S_IWUSR)
if not os.access(path, os.W_OK):
st = os.stat(path)
os.chmod(path, st.st_mode | stat.S_IWUSR)
except:
# avoid confusion by ensuring original exception is reraised
pass
func(path)
shutil.rmtree(path, onerror=_on_error)
def replacing_symlink(source, link_name):
"""Create symlink that overwrites any existing target.
"""
with make_tmp_name(link_name) as tmp_link_name:
os.symlink(source, tmp_link_name)
replace_file_or_dir(link_name, tmp_link_name)
def replacing_copy(src, dest, follow_symlinks=False):
"""Perform copy that overwrites any existing target.
Will copy/copytree `src` to `dest`, and will remove `dest` if it exists,
regardless of what it is.
If `follow_symlinks` is False, symlinks are preserved, otherwise their
contents are copied.
Note that this behavior is different to `shutil.copy`, which copies src
into dest if dest is an existing dir.
"""
with make_tmp_name(dest) as tmp_dest:
if os.path.islink(src) and not follow_symlinks:
# special case - copy just a symlink
src_ = os.readlink(src)
os.symlink(src_, tmp_dest)
elif os.path.isdir(src):
# copy a dir
shutil.copytree(src, tmp_dest, symlinks=(not follow_symlinks))
else:
# copy a file
shutil.copy2(src, tmp_dest)
replace_file_or_dir(dest, tmp_dest)
def replace_file_or_dir(dest, source):
"""Replace `dest` with `source`.
Acts like an `os.rename` if `dest` does not exist. Otherwise, `dest` is
deleted and `src` is renamed to `dest`.
"""
from rez.vendor.atomicwrites import replace_atomic
if not os.path.exists(dest):
try:
os.rename(source, dest)
return
except:
if not os.path.exists(dest):
raise
try:
replace_atomic(source, dest)
return
except:
pass
with make_tmp_name(dest) as tmp_dest:
os.rename(dest, tmp_dest)
os.rename(source, dest)
def additive_copytree(src, dst, symlinks=False, ignore=None):
"""Version of `copytree` that merges into an existing directory.
"""
if not os.path.exists(dst):
os.makedirs(dst)
for item in os.listdir(src):
s = os.path.join(src, item)
d = os.path.join(dst, item)
if os.path.isdir(s):
additive_copytree(s, d, symlinks, ignore)
else:
shutil.copy2(s, d)
@contextmanager
def make_tmp_name(name):
"""Generates a tmp name for a file or dir.
This is a tempname that sits in the same dir as `name`. If it exists on
disk at context exit time, it is deleted.
"""
path, base = os.path.split(name)
# there's a reason this isn't a hidden file:
# https://github.com/nerdvegas/rez/pull/1088
#
tmp_base = "_tmp-%s-%s" % (base, uuid4().hex)
tmp_name = os.path.join(path, tmp_base)
try:
yield tmp_name
finally:
safe_remove(tmp_name)
def is_subdirectory(path_a, path_b):
"""Returns True if `path_a` is a subdirectory of `path_b`."""
path_a = os.path.realpath(path_a)
path_b = os.path.realpath(path_b)
try:
relative = os.path.relpath(path_a, path_b)
except ValueError:
# Different mounts on Windows:
# ValueError: path is on mount 'c:', start on mount 'd:'
#
return False
return not relative.startswith(os.pardir + os.sep)
def find_matching_symlink(path, source):
"""Find a symlink under `path` that points at `source`.
If source is relative, it is considered relative to `path`.
Returns:
str: Name of symlink found, or None.
"""
def to_abs(target):
if os.path.isabs(target):
return target
else:
return os.path.normpath(os.path.join(path, target))
abs_source = to_abs(source)
for name in os.listdir(path):
linkpath = os.path.join(path, name)
if os.path.islink(linkpath):
source_ = os.readlink(linkpath)
if to_abs(source_) == abs_source:
return name
return None
def copy_or_replace(src, dst):
'''try to copy with mode, and if it fails, try replacing
'''
try:
shutil.copy(src, dst)
except (OSError, IOError) as e:
# It's possible that the file existed, but was owned by someone
# else - in that situation, shutil.copy might then fail when it
# tries to copy perms.
# However, it's possible that we have write perms to the dir -
# in which case, we can just delete and replace
import errno
if e.errno == errno.EPERM:
import tempfile
# try copying into a temporary location beside the old
# file - if we have perms to do that, we should have perms
# to then delete the old file, and move the new one into
# place
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
dst_dir, dst_name = os.path.split(dst)
dst_temp = tempfile.mktemp(prefix=dst_name + '.', dir=dst_dir)
shutil.copy(src, dst_temp)
if not os.path.isfile(dst_temp):
raise RuntimeError(
"shutil.copy completed successfully, but path"
" '%s' still did not exist" % dst_temp)
os.remove(dst)
shutil.move(dst_temp, dst)
def copytree(src, dst, symlinks=False, ignore=None, hardlinks=False):
'''copytree that supports hard-linking
'''
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
if hardlinks:
def copy(srcname, dstname):
try:
# try hard-linking first
os.link(srcname, dstname)
except OSError:
shutil.copy2(srcname, dstname)
else:
copy = shutil.copy2
if not os.path.isdir(dst):
os.makedirs(dst)
errors = []
for name in names:
if name in ignored_names:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks, ignore)
else:
copy(srcname, dstname)
# XXX What about devices, sockets etc.?
except (IOError, os.error) as why:
errors.append((srcname, dstname, str(why)))
# catch the Error from the recursive copytree so that we can
# continue with other files
except shutil.Error as err:
errors.extend(err.args[0])
try:
shutil.copystat(src, dst)
except shutil.WindowsError:
# can't copy file access times on Windows
pass
except OSError as why:
errors.extend((src, dst, str(why)))
if errors:
raise shutil.Error(errors)
def movetree(src, dst):
"""Attempts a move, and falls back to a copy+delete if this fails
"""
try:
shutil.move(src, dst)
except:
copytree(src, dst, symlinks=True, hardlinks=True)
shutil.rmtree(src)
def safe_chmod(path, mode):
"""Set the permissions mode on path, but only if it differs from the current mode.
"""
if stat.S_IMODE(os.stat(path).st_mode) != mode:
os.chmod(path, mode)
def to_nativepath(path):
path = path.replace('\\', '/')
return os.path.join(*path.split('/'))
def to_ntpath(path):
return ntpath.sep.join(path.split(posixpath.sep))
def to_posixpath(path):
return posixpath.sep.join(path.split(ntpath.sep))
def canonical_path(path, platform=None):
""" Resolves symlinks, and formats filepath.
Resolves symlinks, lowercases if filesystem is case-insensitive,
formats filepath using slashes appropriate for platform.
Args:
path (str): Filepath being formatted
platform (rez.utils.platform_.Platform): Indicates platform path is being
formatted for. Defaults to current platform.
Returns:
str: Provided path, formatted for platform.
"""
if platform is None:
platform = platform_
path = os.path.normpath(os.path.realpath(path))
if not platform.has_case_sensitive_filesystem:
return path.lower()
return path
def encode_filesystem_name(input_str):
"""Encodes an arbitrary unicode string to a generic filesystem-compatible
non-unicode filename.
The result after encoding will only contain the standard ascii lowercase
letters (a-z), the digits (0-9), or periods, underscores, or dashes
(".", "_", or "-"). No uppercase letters will be used, for
comaptibility with case-insensitive filesystems.
The rules for the encoding are:
1) Any lowercase letter, digit, period, or dash (a-z, 0-9, ., or -) is
encoded as-is.
2) Any underscore is encoded as a double-underscore ("__")
3) Any uppercase ascii letter (A-Z) is encoded as an underscore followed
by the corresponding lowercase letter (ie, "A" => "_a")
4) All other characters are encoded using their UTF-8 encoded unicode
representation, in the following format: "_NHH..., where:
a) N represents the number of bytes needed for the UTF-8 encoding,
except with N=0 for one-byte representation (the exception for N=1
is made both because it means that for "standard" ascii characters
in the range 0-127, their encoding will be _0xx, where xx is their
ascii hex code; and because it mirrors the ways UTF-8 encoding
itself works, where the number of bytes needed for the character can
be determined by counting the number of leading "1"s in the binary
representation of the character, except that if it is a 1-byte
sequence, there are 0 leading 1's).
b) HH represents the bytes of the corresponding UTF-8 encoding, in
hexadecimal (using lower-case letters)
As an example, the character "*", whose (hex) UTF-8 representation
of 2A, would be encoded as "_02a", while the "euro" symbol, which
has a UTF-8 representation of E2 82 AC, would be encoded as
"_3e282ac". (Note that, strictly speaking, the "N" part of the
encoding is redundant information, since it is essentially encoded
in the UTF-8 representation itself, but it makes the resulting
string more human-readable, and easier to decode).
As an example, the string "Foo_Bar (fun).txt" would get encoded as:
_foo___bar_020_028fun_029.txt
"""
if isinstance(input_str, six.string_types):
input_str = unicode(input_str)
elif not isinstance(input_str, unicode):
raise TypeError("input_str must be a %s" % six.string_types[0].__name__)
as_is = u'abcdefghijklmnopqrstuvwxyz0123456789.-'
uppercase = u'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
result = []
for char in input_str:
if char in as_is:
result.append(char)
elif char == u'_':
result.append('__')
elif char in uppercase:
result.append('_%s' % char.lower())
else:
utf8 = char.encode('utf8')
N = len(utf8)
if N == 1:
N = 0
HH = ''.join('%x' % ord(c) for c in utf8)
result.append('_%d%s' % (N, HH))
return ''.join(result)
_FILESYSTEM_TOKEN_RE = re.compile(r'(?P<as_is>[a-z0-9.-])|(?P<underscore>__)|_(?P<uppercase>[a-z])|_(?P<N>[0-9])')
_HEX_RE = re.compile('[0-9a-f]+$')
def decode_filesystem_name(filename):
"""Decodes a filename encoded using the rules given in encode_filesystem_name
to a unicode string.
"""
result = []
remain = filename
i = 0
while remain:
# use match, to ensure it matches from the start of the string...
match = _FILESYSTEM_TOKEN_RE.match(remain)
if not match:
raise ValueError("incorrectly encoded filesystem name %r"
" (bad index: %d - %r)" % (filename, i,
remain[:2]))
match_str = match.group(0)
match_len = len(match_str)
i += match_len
remain = remain[match_len:]
match_dict = match.groupdict()
if match_dict['as_is']:
result.append(unicode(match_str))
elif match_dict['underscore']:
result.append(u'_')
elif match_dict['uppercase']:
result.append(unicode(match_dict['uppercase'].upper()))
elif match_dict['N']:
N = int(match_dict['N'])
if N == 0:
N = 1
# hex-encoded, so need to grab 2*N chars
bytes_len = 2 * N
i += bytes_len
bytes = remain[:bytes_len]
remain = remain[bytes_len:]
# need this check to ensure that we don't end up eval'ing
# something nasty...
if not _HEX_RE.match(bytes):
raise ValueError("Bad utf8 encoding in name %r"
" (bad index: %d - %r)" % (filename, i, bytes))
bytes_repr = ''.join('\\x%s' % bytes[i:i + 2]
for i in xrange(0, bytes_len, 2))
bytes_repr = "'%s'" % bytes_repr
result.append(eval(bytes_repr).decode('utf8'))
else:
raise ValueError("Unrecognized match type in filesystem name %r"
" (bad index: %d - %r)" % (filename, i, remain[:2]))
return u''.join(result)
def test_encode_decode():
def do_test(orig, expected_encoded):
print('=' * 80)
print(orig)
encoded = encode_filesystem_name(orig)
print(encoded)
assert encoded == expected_encoded
decoded = decode_filesystem_name(encoded)
print(decoded)
assert decoded == orig
do_test("Foo_Bar (fun).txt", '_foo___bar_020_028fun_029.txt')
# u'\u20ac' == Euro symbol
do_test(u"\u20ac3 ~= $4.06", '_3e282ac3_020_07e_03d_020_0244.06')
def walk_up_dirs(path):
"""Yields absolute directories starting with the given path, and iterating
up through all it's parents, until it reaches a root directory"""
prev_path = None
current_path = os.path.abspath(path)
while current_path != prev_path:
yield current_path
prev_path = current_path
current_path = os.path.dirname(prev_path)
def windows_long_path(dos_path):
"""Prefix '\\?\' for path longer than 259 char (Win32API limitation)
"""
path = os.path.abspath(dos_path)
if path.startswith("\\\\?\\"):
pass
elif path.startswith("\\\\"):
path = "\\\\?\\UNC\\" + path[2:]
else:
path = "\\\\?\\" + path
return path
# Copyright 2013-2016 Allan Johns.
#
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Backend'
db.create_table(u'backend_backend', (
('created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, blank=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, blank=True)),
('backendId', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('kind', self.gf('django.db.models.fields.CharField')(unique=True, max_length=20)),
('description', self.gf('django.db.models.fields.CharField')(max_length=1000)),
('serverIp', self.gf('django.db.models.fields.GenericIPAddressField')(max_length=39)),
('serverFqdn', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal(u'backend', ['Backend'])
def backwards(self, orm):
# Deleting model 'Backend'
db.delete_table(u'backend_backend')
models = {
u'backend.backend': {
'Meta': {'ordering': "('-modified', '-created')", 'object_name': 'Backend'},
'backendId': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'kind': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'serverFqdn': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'serverIp': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39'})
}
}
complete_apps = ['backend']
|
# -*- coding: utf-8 -*-
# Automatic provisioning of AWS S3 buckets.
import time
import botocore
import boto3
import nixops.util
import nixops.resources
import nixops.ec2_utils
class S3BucketDefinition(nixops.resources.ResourceDefinition):
"""Definition of an S3 bucket."""
@classmethod
def get_type(cls):
return "s3-bucket"
@classmethod
def get_resource_type(cls):
return "s3Buckets"
def __init__(self, xml, config={}):
nixops.resources.ResourceDefinition.__init__(self, xml, config)
self.bucket_name = xml.find("attrs/attr[@name='name']/string").get("value")
self.region = xml.find("attrs/attr[@name='region']/string").get("value")
self.access_key_id = xml.find("attrs/attr[@name='accessKeyId']/string").get("value")
self.policy = xml.find("attrs/attr[@name='policy']/string").get("value")
self.website_enabled = self.config["website"]["enabled"]
self.website_suffix = self.config["website"]["suffix"]
self.website_error_document = self.config["website"]["errorDocument"]
def show_type(self):
return "{0} [{1}]".format(self.get_type(), self.region)
class S3BucketState(nixops.resources.ResourceState):
"""State of an S3 bucket."""
state = nixops.util.attr_property("state", nixops.resources.ResourceState.MISSING, int)
bucket_name = nixops.util.attr_property("ec2.bucketName", None)
access_key_id = nixops.util.attr_property("ec2.accessKeyId", None)
region = nixops.util.attr_property("ec2.region", None)
@classmethod
def get_type(cls):
return "s3-bucket"
def __init__(self, depl, name, id):
nixops.resources.ResourceState.__init__(self, depl, name, id)
self._conn = None
def show_type(self):
s = super(S3BucketState, self).show_type()
if self.region: s = "{0} [{1}]".format(s, self.region)
return s
@property
def resource_id(self):
return self.bucket_name
def get_definition_prefix(self):
return "resources.s3Buckets."
def connect(self):
if self._conn: return
(access_key_id, secret_access_key) = nixops.ec2_utils.fetch_aws_secret_key(self.access_key_id)
self._conn = boto3.session.Session(region_name=self.region if self.region != "US" else "us-east-1",
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key)
def create(self, defn, check, allow_reboot, allow_recreate):
self.access_key_id = defn.access_key_id or nixops.ec2_utils.get_access_key_id()
if not self.access_key_id:
raise Exception("please set ‘accessKeyId’, $EC2_ACCESS_KEY or $AWS_ACCESS_KEY_ID")
if len(defn.bucket_name) > 63:
raise Exception("bucket name ‘{0}’ is longer than 63 characters.".format(defn.bucket_name))
self.connect()
s3client = self._conn.client('s3')
if check or self.state != self.UP:
self.log("creating S3 bucket ‘{0}’...".format(defn.bucket_name))
try:
ACL = 'private' # ..or: public-read, public-read-write, authenticated-read
s3loc = region_to_s3_location(defn.region)
if s3loc == "US":
s3client.create_bucket(ACL = ACL,
Bucket = defn.bucket_name)
else:
s3client.create_bucket(ACL = ACL,
Bucket = defn.bucket_name,
CreateBucketConfiguration = {
'LocationConstraint': s3loc
})
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != "BucketAlreadyOwnedByYou": raise
with self.depl._db:
self.state = self.UP
self.bucket_name = defn.bucket_name
self.region = defn.region
if defn.policy:
self.log("setting S3 bucket policy on ‘{0}’...".format(defn.bucket_name))
s3client.put_bucket_policy(Bucket = defn.bucket_name,
Policy = defn.policy.strip())
else:
try:
s3client.delete_bucket_policy(Bucket = defn.bucket_name)
except botocore.exceptions.ClientError as e:
# This seems not to happen - despite docs indicating it should:
# [http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketDELETEpolicy.html]
if e.response['ResponseMetadata']['HTTPStatusCode'] != 204: raise # (204 : Bucket didn't have any policy to delete)
if not defn.website_enabled:
try:
s3client.delete_bucket_website(Bucket = defn.bucket_name)
except botocore.exceptions.ClientError as e:
if e.response['ResponseMetadata']['HTTPStatusCode'] != 204: raise
else:
website_config = { 'IndexDocument': { 'Suffix': defn.website_suffix } }
if defn.website_error_document != "":
website_config['ErrorDocument'] = { 'Key': defn.website_error_document}
s3client.put_bucket_website(Bucket = defn.bucket_name, WebsiteConfiguration = website_config)
def destroy(self, wipe=False):
if self.state == self.UP:
self.connect()
try:
self.log("destroying S3 bucket ‘{0}’...".format(self.bucket_name))
bucket = self._conn.resource('s3').Bucket(self.bucket_name)
try:
bucket.delete()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != "BucketNotEmpty": raise
if not self.depl.logger.confirm("are you sure you want to destroy S3 bucket ‘{0}’?".format(self.bucket_name)): return False
bucket.objects.all().delete()
bucket.delete()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != "NoSuchBucket": raise
return True
def region_to_s3_location(region):
# S3 location names are identical to EC2 regions, except for
# us-east-1 and eu-west-1.
if region == "eu-west-1": return "EU"
elif region == "us-east-1": return "US"
else: return region
|
# -*- coding: utf-8 -*-
from ...qt import QtWidgets, QtCore
from ..custom import FlatButton, HorizontalLine, LabelAlignRight
class TransferFunctionWidget(QtWidgets.QWidget):
def __init__(self, *args):
super(TransferFunctionWidget, self).__init__(*args)
self.create_widgets()
self.create_layout()
self.style_widgets()
self.create_signals()
def create_widgets(self):
self.load_std_btn = FlatButton("Load Std")
self.load_std_bkg_btn = FlatButton("Load Std Bkg")
self.load_sample_btn = FlatButton("Load Sample")
self.load_sample_bkg_btn = FlatButton("Load Sample Bkg")
self.std_filename_lbl = LabelAlignRight('')
self.std_bkg_filename_lbl = LabelAlignRight("")
self.sample_filename_lbl = LabelAlignRight("")
self.sample_bkg_filename_lbl = LabelAlignRight("")
self.std_bkg_scaling_sb = QtWidgets.QDoubleSpinBox()
self.std_bkg_scaling_sb.setValue(1.0)
self.std_bkg_scaling_sb.setSingleStep(0.01)
self.sample_bkg_scaling_sb = QtWidgets.QDoubleSpinBox()
self.sample_bkg_scaling_sb.setValue(1.0)
self.sample_bkg_scaling_sb.setSingleStep(0.01)
self.smooth_sb = QtWidgets.QDoubleSpinBox()
self.smooth_sb.setValue(1.0)
self.smooth_sb.setSingleStep(0.1)
def create_layout(self):
self.main_layout = QtWidgets.QVBoxLayout()
self.activate_cb = QtWidgets.QCheckBox("activate")
self.main_layout.addWidget(self.activate_cb)
self.main_layout.addWidget(HorizontalLine())
self.transfer_layout = QtWidgets.QGridLayout()
self.transfer_layout.addWidget(self.load_sample_btn, 0, 0)
self.transfer_layout.addWidget(self.sample_filename_lbl, 0, 1)
self.transfer_layout.addWidget(self.load_sample_bkg_btn, 1, 0)
self.transfer_layout.addWidget(self.sample_bkg_filename_lbl, 1, 1)
self.transfer_layout.addWidget(self.load_std_btn, 2, 0)
self.transfer_layout.addWidget(self.std_filename_lbl, 2, 1)
self.transfer_layout.addWidget(self.load_std_bkg_btn, 3, 0)
self.transfer_layout.addWidget(self.std_bkg_filename_lbl, 3, 1)
self.scaling_gb = QtWidgets.QGroupBox("")
self.scaling_layout = QtWidgets.QGridLayout()
self.scaling_layout.addItem(QtWidgets.QSpacerItem(0, 0, QtWidgets.QSizePolicy.MinimumExpanding,
QtWidgets.QSizePolicy.Fixed), 0, 0)
self.scaling_layout.addWidget(LabelAlignRight("Sample bkg scaling:"), 0, 1)
self.scaling_layout.addWidget(self.sample_bkg_scaling_sb, 0, 2)
self.scaling_layout.addWidget(LabelAlignRight("Std bkg scaling:"), 1, 1)
self.scaling_layout.addWidget(self.std_bkg_scaling_sb, 1, 2)
self.scaling_layout.addWidget(LabelAlignRight("Smoothing:"), 2, 1)
self.scaling_layout.addWidget(self.smooth_sb, 2, 2)
self.scaling_gb.setLayout(self.scaling_layout)
self.transfer_layout.addWidget(self.scaling_gb, 4, 0, 1, 2)
self.main_layout.addLayout(self.transfer_layout)
self.setLayout(self.main_layout)
def style_widgets(self):
self.main_layout.setContentsMargins(0, 0, 0, 0)
self.main_layout.setSpacing(5)
self.transfer_layout.setContentsMargins(5, 5, 5, 5)
self.sample_bkg_scaling_sb.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
self.std_bkg_scaling_sb.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
self.smooth_sb.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
self.sample_bkg_scaling_sb.setMinimumWidth(75)
self.std_bkg_scaling_sb.setMinimumWidth(75)
self.smooth_sb.setMinimumWidth(75)
def create_signals(self):
pass
|
#
# Copyright (c) 2010-2014, MIT Probabilistic Computing Project
#
# Lead Developers: Dan Lovell and Jay Baxter
# Authors: Dan Lovell, Baxter Eaves, Jay Baxter, Vikash Mansinghka
# Research Leads: Vikash Mansinghka, Patrick Shafto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import sys
from collections import Counter
#
import numpy
import pylab
pylab.ion()
pylab.show()
#
import crosscat.tests.plot_utils as pu
import crosscat.utils.file_utils as fu
import crosscat.utils.sample_utils as su
import crosscat.utils.api_utils as au
# parse some arguments
parser = argparse.ArgumentParser()
parser.add_argument('pkl_name', type=str)
parser.add_argument('--inf_seed', default=0, type=int)
parser.add_argument('--hostname', default='127.0.0.1', type=str)
args = parser.parse_args()
pkl_name = args.pkl_name
inf_seed = args.inf_seed
hostname = args.hostname
# FIXME: getting weird error on conversion to int: too large from inside pyx
def get_next_seed(max_val=32767): # sys.maxint):
return random_state.randint(max_val)
# resume from saved name
save_dict = fu.unpickle(pkl_name)
random_state = numpy.random.RandomState(inf_seed)
M_c = save_dict['M_c']
X_L = save_dict['X_L']
X_D = save_dict['X_D']
# FIXME: test constraints
# Y = [su.Bunch(index=2,value=2.3), su.Bunch(index=0,value=-4.)]
Y = None
# test simple_predictive_sample_observed
views_replicating_samples_params = su.determine_replicating_samples_params(X_L, X_D)
views_samples = []
for replicating_samples_params in views_replicating_samples_params:
this_view_samples = []
for replicating_sample_params in replicating_samples_params:
this_view_this_sample = su.simple_predictive_sample(
M_c, X_L, X_D, get_next_seed=get_next_seed, **replicating_sample_params)
this_view_samples.extend(this_view_this_sample)
views_samples.append(this_view_samples)
for view_idx, view_samples in enumerate(views_samples):
data_array = numpy.array(view_samples)
pu.plot_T(data_array)
pylab.title('simple_predictive_sample observed, view %s on local' % view_idx)
# test simple_predictive_sample_observed REMOTE
# hostname = 'ec2-23-22-208-4.compute-1.amazonaws.com'
URI = 'http://' + hostname + ':8007'
method_name = 'simple_predictive_sample'
#
views_samples = []
for replicating_samples_params in views_replicating_samples_params:
this_view_samples = []
for replicating_sample_params in replicating_samples_params:
args_dict = dict(
M_c=save_dict['M_c'],
X_L=save_dict['X_L'],
X_D=save_dict['X_D'],
Y=replicating_sample_params['Y'],
Q=replicating_sample_params['Q'],
n=replicating_sample_params['n'],
)
this_view_this_sample, id = au.call(
method_name, args_dict, URI)
print id
this_view_samples.extend(this_view_this_sample)
views_samples.append(this_view_samples)
for view_idx, view_samples in enumerate(views_samples):
data_array = numpy.array(view_samples)
pu.plot_T(data_array)
pylab.title('simple_predictive_sample observed, view %s on remote' % view_idx)
# test simple_predictive_sample_unobserved
observed_Q = views_replicating_samples_params[0][0]['Q']
Q = [(int(1E6), old_tuple[1]) for old_tuple in observed_Q]
new_row_samples = []
new_row_sample = su.simple_predictive_sample(
M_c, X_L, X_D, Y, Q, get_next_seed, n=1000)
new_row_samples.extend(new_row_sample)
new_row_samples = numpy.array(new_row_samples)
pu.plot_T(new_row_samples)
# once more with constraint
Y = [(int(1E6), 0, 100)]
new_row_sample = su.simple_predictive_sample(
M_c, X_L, X_D, Y, Q, get_next_seed, n=1)
# test impute
# imputed_value = su.impute(M_c, X_L, X_D, Y, [Q[3]], 100, get_next_seed)
|
########
# Copyright (c) 2013-2019 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
from os.path import join
from integration_tests import BaseTestCase
from integration_tests.framework import docker
from integration_tests.tests.constants import MANAGER_PYTHON
from integration_tests.tests.utils import (assert_messages_in_log,
get_resource as resource)
from integration_tests.tests.utils import run_postgresql_command
COLLECTOR_SCRIPTS = ['collect_cloudify_uptime', 'collect_cloudify_usage']
SCRIPTS_DESTINATION_PATH = '/opt/cloudify/usage_collector'
LOG_PATH = '/var/log/cloudify/usage_collector'
LOG_FILE = 'usage_collector.log'
class TestUsageCollectorBase(BaseTestCase):
def run_scripts_with_deployment(self, yaml_path, messages):
deployment, _ = self.deploy_application(resource(yaml_path),
timeout_seconds=120)
self.run_collector_scripts_and_assert(messages)
self.undeploy_application(deployment.id)
def run_collector_scripts_and_assert(self, messages):
for script in COLLECTOR_SCRIPTS:
docker.execute(self.env.container_id, '{0} {1}.py'.format(
MANAGER_PYTHON,
join(SCRIPTS_DESTINATION_PATH, script))
)
assert_messages_in_log(self.env.container_id,
self.workdir,
messages,
join(LOG_PATH, LOG_FILE))
def clean_timestamps(self):
# This is necessary for forcing the collector scripts to actually run
# in subsequent tests, despite not enough time passing since last run
run_postgresql_command(
self.env.container_id,
"UPDATE usage_collector SET hourly_timestamp=NULL, "
"daily_timestamp=NULL")
def clean_usage_collector_log(self):
# We need to clean the usage_collector log before each test, because
# each test uses it for asserting different values.
old_usage_log = join(LOG_PATH, self._testMethodName)
test_usage_log = join(LOG_PATH, LOG_FILE)
self.execute_on_manager(['mv', test_usage_log, old_usage_log])
self.execute_on_manager(['touch', test_usage_log])
|
# -*- coding: utf-8 -*-
# Copyright 2016 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from watson_developer_cloud import LanguageTranslationV2 as LanguageTranslationService
def getTranslationService():
return LanguageTranslationService(username='<your username key for the Watson language translation service>',
password='<your password key for the service>')
def identifyLanguage(app, data):
txt = data.encode("utf-8", "replace")
language_translation = getTranslationService()
langsdetected = language_translation.identify(txt)
app.logger.info(json.dumps(langsdetected, indent=2))
primarylang = langsdetected["languages"][0]
retData = {key: primarylang[key] for key in ('language', 'confidence')}
app.logger.info(json.dumps(retData, indent=2))
return retData
def checkForTranslation(app, fromlang, tolang):
supportedModels = []
lt = getTranslationService()
models = lt.list_models()
modelList = models.get("models")
supportedModels = [model['model_id'] for model in modelList
if fromlang == model['source']
and tolang == model['target']]
return supportedModels
def performTranslation(app, txt, primarylang, targetlang):
lt = getTranslationService()
translation = lt.translate(txt, source=primarylang, target=targetlang)
theTranslation = None
if translation and ("translations" in translation):
theTranslation = translation['translations'][0]['translation']
return theTranslation
|
# -*- coding: utf-8 -*-
"""
.. module:: deck
:synopsis: Encapsulates the behavior of card collections
.. moduleauthor:: Zach Mitchell <[email protected]>
"""
from random import shuffle
from typing import List
from .cards import (
Card,
CardFaction,
CardEffect,
CardAction,
CardTarget
)
from .cardrepo import CardRepo
from .exceptions import (
RealmsException,
MainDeckEmpty,
PlayerDeckEmpty,
PlayerDeckInitSize,
PlayerDeckInitContents,
UUIDNotFoundError,
HandInitError
)
from collections import Counter
from typing import NamedTuple
CardList = List[Card]
EffectList = List[CardEffect]
FactionList = List[CardFaction]
EffectRecord = NamedTuple('EffectRecord', [
('target', CardTarget),
('action', CardAction),
('value', int),
('uuid', str),
('provider', str)])
class PlayerDeck(object):
"""
Records the state of the player's deck
At any given point in time the player may have three piles of cards: undrawn cards, a
hand of cards, and a pile of used (discarded) cards. PlayerDeck records which cards are
in which pile, provides an interface from which a hand of cards can be assembled, and
shuffles the deck when necessary.
Parameters
----------
player_cards : List[Card]
The list of cards from which the player's starting deck will be constructed
Raises
------
PlayerDeckInitSize
Raised when constructing the deck with the wrong number of cards
PlayerDeckInitContents
Raised when constructing the deck with cards other than Vipers and Scouts
"""
starting_size = 10
def __init__(self, player_cards: CardList):
try:
self._validate_deck_size(player_cards)
self._validate_deck_contents(player_cards)
except RealmsException:
raise
self._undrawn: CardList = player_cards
shuffle(self._undrawn) # shuffled in place
self._discards: CardList = []
@staticmethod
def _validate_deck_size(cards: CardList) -> None:
"""Ensures that the starting deck contains the correct
number of cards
Parameters
----------
cards : CardList
The tentative starting deck
Raises
------
PlayerDeckInitSize
Raised if the tentative starting deck is not the correct size
"""
if len(cards) != PlayerDeck.starting_size:
raise PlayerDeckInitSize(len(cards))
return
@staticmethod
def _validate_deck_contents(cards) -> None:
"""Ensures that the tentative starting deck contains only Vipers and Scouts
Parameters
----------
cards : CardList
The tentative starting deck
Raises
------
PlayerDeckInitContents
Raised if the tentative starting deck contains cards other than Vipers or Scouts
"""
for c in cards:
if (c.name != 'Viper') and (c.name != 'Scout'):
raise PlayerDeckInitContents(c.name)
return
def _next_card(self) -> Card:
"""Produces the next card from the player's deck
Attempts to draw a card from the top of the undrawn pile. If
the undrawn pile is empty, the undrawn pile is replenished from
the discard pile and shuffled before attempting to draw a card again.
An attempt to draw a card from the undrawn pile while both the undrawn
pile and discard pile are empty will raise a ``PlayerDeckEmpty`` exception.
Returns
-------
Card
A card from the top of the undrawn pile
Raises
------
PlayerDeckEmpty
Raised when attempting to draw a card while both undrawn and discard
piles are empty
"""
if len(self._undrawn) > 0:
return self._undrawn.pop()
elif len(self._discards) > 0:
self._refill_undrawn()
return self._undrawn.pop()
else:
raise PlayerDeckEmpty
@property
def cards_remaining(self) -> int:
"""The total number of cards left in the undrawn and discard piles
Returns
-------
int
The number of cards left to draw from
"""
return len(self._undrawn) + len(self._discards)
def _refill_undrawn(self) -> None:
"""Refills the undrawn pile with cards from the discard pile
Note
----
The cards in the discard pile are shuffled before being placed
back into the undrawn pile
"""
self._undrawn: CardList = self._discards
shuffle(self._undrawn) # shuffled in place
self._discards: CardList = []
return
def discard(self, card: Card) -> None:
"""Sends the card to the discard pile
Parameters
----------
card : Card
The card to send to the discard pile
"""
self._discards.append(card)
return
def draw(self, num=5) -> CardList:
"""Draws the specified number of cards from the undrawn pile
Parameters
----------
num : int (Optional)
The number of cards to draw (Default is 5)
Returns
-------
List[Card]
The list of cards that were drawn
Raises
------
IndexError
Raised if no cards are left to draw, or the number of cards requested
is not a positive integer
Note
----
If there are cards remaining in the deck but there are fewer cards than
were requested, then as many cards as possible are returned.
"""
if (num <= 0) or (self.cards_remaining == 0) or (not isinstance(num, int)):
raise IndexError
cards: CardList = []
for _ in range(num):
try:
cards.append(self._next_card())
except PlayerDeckEmpty:
break
return cards
def _scrap(self, card):
"""
Permanently removes a card from the discard pile
"""
pass
class MainDeck(object):
"""The deck from which players can acquire cards
Parameters
----------
cardrepo : CardRepo
The repository from which the cards are obtained
"""
def __init__(self, cardrepo: CardRepo):
self._repo: CardRepo = cardrepo
self._cards: CardList = self._repo.main_deck_cards()
shuffle(self._cards)
return
def next_card(self) -> Card:
"""Produces the next card from the main deck
Returns
-------
Card
A card from the top of the main deck
Raises
------
MainDeckEmpty
Raised when attempting to draw a card when the deck is empty
"""
if len(self._cards) > 0:
return self._cards.pop()
else:
raise MainDeckEmpty
class TradeRow(object):
"""Presents the cards that players may acquire
Parameters
----------
maindeck : MainDeck
The deck from which the trade row is drawn
cardrepo : CardRepo
The repository from which cards are obtained
"""
def __init__(self, maindeck: MainDeck, cardrepo: CardRepo):
self._maindeck: MainDeck = maindeck
self._repo: CardRepo = cardrepo
self._explorer = None
self._cards = []
@property
def available(self) -> CardList:
"""Produces the list of all cards available for purchase
Returns
-------
List[Card]
The list of cards available for purchase
"""
return self.cards + [self.explorer]
@property
def cards(self) -> CardList:
"""Produces the list of cards available for purchase
from the main deck
Returns
-------
List[Card]
The list of available cards from the main deck
"""
while len(self._cards) < 5:
try:
card: Card = self._maindeck.next_card()
except MainDeckEmpty:
break
self._cards.append(card)
return self._cards
@property
def explorer(self) -> Card:
"""Produces the current Explorer available for purchase
Returns
-------
Card
The current Explorer
"""
if self._explorer is None:
self._explorer: Card = self._repo.new_explorer()
return self._explorer
def acquire(self, uuid: str) -> Card:
"""Produces the card with the specified UUID
Parameters
----------
uuid : str
The UUID of the card the player wishes to acquire
Returns
-------
Card
The card with the specified UUID
Raises
------
UUIDNotFoundError
Raised when the UUID of the requested card is not found
in the list of available cards
"""
cards_bools = [c.uuid == uuid for c in self.cards]
if True in cards_bools:
i = cards_bools.index(True)
return self._cards.pop(i)
elif self.explorer.uuid == uuid:
card = self._explorer
self._explorer = None
return card
else:
raise UUIDNotFoundError
def scrap(self, uuid: str) -> None:
"""Permanently removes a card from the trade row
Parameters
----------
uuid : str
The UUID of the card to remove
"""
cards_bools = [c.uuid == uuid for c in self.cards]
if True in cards_bools:
i = cards_bools.index(True)
del self._cards[i]
elif self.explorer.uuid == uuid:
self._explorer = None
else:
raise UUIDNotFoundError
return
class Hand(object):
"""The player's hand of cards
A Hand is made from a list of cards drawn from the undrawn pile of the player's deck,
as well as any bases that were played previously and have not been destroyed.
The processing of cards into a collection of effects is a multi-step process:
1. The basic effects are pulled from each card
2. The factions are tallied up to see which cards may activate their ally abilities
3. Ally abilities are pulled from each card
4. The effects are aggregated by their action types
5. Effects are applied in whatever order the user chooses
6. If cards are drawn as the result of an action, the effects list is updated
Parameters
----------
to_draw : int
The number of cards to draw initially
existing_bases : List[Card]
Any bases that were played previously and have not yet been destroyed
playerdeck : PlayerDeck
The player's deck
"""
def __init__(self, to_draw: int, existing_bases: CardList, playerdeck: PlayerDeck):
if (to_draw < 0) or (to_draw > 5):
raise HandInitError
try:
drawn: CardList = playerdeck.draw(to_draw)
except IndexError:
drawn: CardList = []
self.cards = drawn + existing_bases
self._playerdeck = playerdeck
return
@staticmethod
def _collect_basic_effects(cards: List[Card]) -> List[EffectRecord]:
"""Assembles a list of `EffectRecord`s from the cards in the hand
"""
basic_effects: List[EffectRecord] = []
for c in cards:
effects: List[CardEffect] = c.effects_basic
records = [EffectRecord(target=e.target,
action=e.action,
value=e.value,
uuid=e.uuid,
provider=c.uuid)
for e in effects]
basic_effects += records
return records
@staticmethod
def _collect_ally_factions(cards: List[Card]) -> List[CardFaction]:
"""Assembles a list of factions that should have their ally abilities activated
"""
factions: CardFaction = [c.faction for c in cards]
if CardFaction.ALL in factions:
return [CardFaction.BLOB, CardFaction.STAR, CardFaction.FEDERATION, CardFaction.MACHINE]
counts = Counter(factions)
allies: List[CardFaction] = [key for key in counts.keys()
if counts[key] > 1 and key != CardFaction.UNALIGNED]
return allies
@staticmethod
def _collect_ally_effects(cards: List[Card], facs: List[CardFaction]) -> List[EffectRecord]:
"""Assembles a list of the ally effects that are applicable
"""
ally_effects: List[EffectRecord] = []
for c in cards:
effects: List[CardEffect] = c.effects_ally
records = [EffectRecord(target=e.target,
action=e.action,
value=e.value,
uuid=e.uuid,
provider=c.uuid)
for e in effects if c.faction in facs]
ally_effects += records
return ally_effects
def _collect_effects(self) -> List[EffectRecord]:
"""Assembles a list of effects provided by the player's hand
"""
basic_effects: List[EffectRecord] = Hand._collect_basic_effects(self.cards)
ally_factions: List[CardFaction] = Hand._collect_ally_factions(self.cards)
ally_effects: List[EffectRecord] = Hand._collect_ally_effects(self.cards, ally_factions)
return basic_effects + ally_effects
|
import pytest
import asyncio
from aioredis import ConnectionClosedError, ReplyError
from aioredis.pool import ConnectionsPool
from aioredis import Redis
@pytest.mark.run_loop
async def test_repr(create_redis, loop, server):
redis = await create_redis(
server.tcp_address, db=1, loop=loop)
assert repr(redis) in {
'<Redis <RedisConnection [db:1]>>',
'<Redis <ConnectionsPool [db:1, size:[1:10], free:1]>>',
}
redis = await create_redis(
server.tcp_address, db=0, loop=loop)
assert repr(redis) in {
'<Redis <RedisConnection [db:0]>>',
'<Redis <ConnectionsPool [db:0, size:[1:10], free:1]>>',
}
@pytest.mark.run_loop
async def test_auth(redis):
expected_message = "ERR Client sent AUTH, but no password is set"
with pytest.raises(ReplyError, match=expected_message):
await redis.auth('')
@pytest.mark.run_loop
async def test_echo(redis):
resp = await redis.echo('ECHO')
assert resp == b'ECHO'
with pytest.raises(TypeError):
await redis.echo(None)
@pytest.mark.run_loop
async def test_ping(redis):
assert await redis.ping() == b'PONG'
@pytest.mark.run_loop
async def test_quit(redis, loop):
expected = (ConnectionClosedError, ConnectionError)
try:
assert b'OK' == await redis.quit()
except expected:
pass
if not isinstance(redis.connection, ConnectionsPool):
# reader task may not yet been cancelled and _do_close not called
# so the ConnectionClosedError may be raised (or ConnectionError)
with pytest.raises(expected):
try:
await redis.ping()
except asyncio.CancelledError:
assert False, "Cancelled error must not be raised"
# wait one loop iteration until it get surely closed
await asyncio.sleep(0, loop=loop)
assert redis.connection.closed
with pytest.raises(ConnectionClosedError):
await redis.ping()
@pytest.mark.run_loop
async def test_select(redis):
assert redis.db == 0
resp = await redis.select(1)
assert resp is True
assert redis.db == 1
assert redis.connection.db == 1
@pytest.mark.run_loop
async def test_encoding(create_redis, loop, server):
redis = await create_redis(
server.tcp_address,
db=1, encoding='utf-8',
loop=loop)
assert redis.encoding == 'utf-8'
@pytest.mark.run_loop
async def test_yield_from_backwards_compatability(create_redis, server, loop):
redis = await create_redis(server.tcp_address, loop=loop)
assert isinstance(redis, Redis)
# TODO: there should not be warning
# with pytest.warns(UserWarning):
with await redis as client:
assert isinstance(client, Redis)
assert client is not redis
assert await client.ping()
@pytest.redis_version(4, 0, 0, reason="SWAPDB is available since redis>=4.0.0")
@pytest.mark.run_loop
async def test_swapdb(create_redis, start_server, loop):
server = start_server('swapdb_1')
cli1 = await create_redis(server.tcp_address, db=0, loop=loop)
cli2 = await create_redis(server.tcp_address, db=1, loop=loop)
await cli1.flushall()
assert await cli1.set('key', 'val') is True
assert await cli1.exists('key')
assert not await cli2.exists('key')
assert await cli1.swapdb(0, 1) is True
assert not await cli1.exists('key')
assert await cli2.exists('key')
|
#!/usr/bin/env python
#
# Copyright (c) 2011 Somia Dynamoid Oy
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
import BaseHTTPServer as basehttpserver
import Queue as queuelib
import argparse
import asyncore
import contextlib
import os
import signal
import socket
import stat
import struct
import syslog
import threading
import time
import MySQLdb as mysql
import signalfd.async
class Stat(object):
def __init__(self):
self.value = 0
self.lock = threading.Lock()
def increment(self):
with self.lock:
self.value += 1
def get(self):
with self.lock:
return self.value
class Stats(object):
def __init__(self):
self.__stats = { name: Stat() for name in ["input", "output", "error"] }
def __getattr__(self, name):
return self.__stats[name]
def __iter__(self):
return self.__stats.iteritems()
class Item(object):
def __init__(self, database=None, query=None, terminate=False):
self.database = database
self.query = query
self.terminate = terminate
class Listener(asyncore.dispatcher):
def __init__(self, queue, address, stats):
asyncore.dispatcher.__init__(self)
self.__queue = queue
self.__address = address
self.__stats = stats
self.__remove_socket_file()
self.create_socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.bind(self.__address)
self.listen(socket.SOMAXCONN)
def __remove_socket_file(self):
if os.path.exists(self.__address) and stat.S_ISSOCK(os.stat(self.__address).st_mode):
os.remove(self.__address)
def writable(self):
return False
def handle_accept(self):
self.__stats.input.increment()
sock, _ = self.accept()
Receiver(sock, self, self.__stats)
def handle_close(self):
self.close()
def enqueue_query(self, database, query):
self.__queue.put(Item(database, query))
def enqueue_terminate(self):
self.close()
self.__queue.put(Item(terminate=True))
self.__remove_socket_file()
class Receiver(asyncore.dispatcher_with_send):
__version = 1
__headsize = 8
def __init__(self, sock, listener, stats):
asyncore.dispatcher_with_send.__init__(self, sock)
self.__listener = listener
self.__stats = stats
self.__head = bytearray()
self.__data = bytearray()
def __recv(self, data, size):
data.extend(self.recv(size - len(data)))
return len(data) == size
def readable(self):
return self.connected and not self.out_buffer
def handle_read(self):
if self.__data is None or not self.__listener.accepting:
self.close()
return
if len(self.__head) < self.__headsize:
self.__recv(self.__head, self.__headsize)
else:
version, size = struct.unpack("<II", str(self.__head))
assert version == self.__version
if self.__recv(self.__data, size):
database_size, = struct.unpack("<I", str(self.__data[:4]))
strings = str(self.__data[4:])
database = strings[:database_size]
query = strings[database_size:]
self.__head = None
self.__data = None
self.__listener.enqueue_query(database, query)
result = 1
self.send(struct.pack("<II", self.__version, result))
def handle_write(self):
if not self.__listener.accepting:
self.close()
return
asyncore.dispatcher_with_send.handle_write(self)
if not self.out_buffer:
self.close()
def handle_close(self):
self.close()
class Signaler(signalfd.async.dispatcher):
def __init__(self, listener):
signalfd.async.dispatcher.__init__(self)
self.__listener = listener
self.__count = 0
def handle_signal(self, signum):
if signum in (signal.SIGTERM, signal.SIGINT):
if self.__count == 0:
syslog.syslog(syslog.LOG_INFO, "Terminating")
self.__listener.enqueue_terminate()
self.__count += 1
@property
def user_insists(self):
return self.__count > 1
class Database(object):
def __init__(self, params, queue, stats, signaler):
self.params = params
self.queue = queue
self.stats = stats
self.signaler = signaler
self.connect()
@property
def connected(self):
return self.conn is not None
def connect(self):
conn = mysql.connect(**self.params)
conn.autocommit(True)
self.conn = conn
def disconnect(self):
if self.conn:
try:
self.conn.close()
except:
pass
self.conn = None
def execute(self):
while True:
item = self.queue.get()
if item.terminate:
syslog.syslog(syslog.LOG_INFO, "Terminated")
self.queue.task_done()
break
if self.execute_item(item):
self.stats.output.increment()
else:
self.stats.error.increment()
syslog.syslog(syslog.LOG_ERR, "Could not execute query: %s" % item.query)
self.queue.task_done()
self.disconnect()
def execute_item(self, item):
assert item.database == self.params["db"]
for i in xrange(2):
while not self.connected:
try:
self.connect()
break
except Exception as e:
syslog.syslog(syslog.LOG_ERR, str(e))
if self.signaler.user_insists:
syslog.syslog(syslog.LOG_INFO, "Giving up due to persistent user")
self.drain_queue(item)
raise
time.sleep(1)
try:
with contextlib.closing(self.conn.cursor()) as cursor:
if cursor.execute(item.query) > 0:
return True
except Exception as e:
syslog.syslog(syslog.LOG_ERR, str(e))
self.disconnect()
return False
def drain_queue(self, item):
while not item.terminate:
syslog.syslog(syslog.LOG_ERR, "Could not execute query: %s" % item.query)
item = self.queue.get()
class StatusServer(basehttpserver.HTTPServer):
def __init__(self, stats, *args):
basehttpserver.HTTPServer.__init__(self, *args)
self.stats = stats
class StatusRequestHandler(basehttpserver.BaseHTTPRequestHandler):
def do_GET(self):
if self.path == "/":
self.send_response(200)
self.end_headers()
for name, stat in sorted(self.server.stats):
print >>self.wfile, "%s: %d" % (name, stat.get())
else:
try:
stat = getattr(self.server.stats, self.path[1:])
except KeyError:
self.send_error(404)
self.end_headers()
else:
self.send_response(200)
self.end_headers()
print >>self.wfile, stat.get()
def log_message(self, *args):
pass
def main():
parser = argparse.ArgumentParser()
parser.add_argument(metavar="PARAMS", dest="params", help="MySQLdb connection parameters")
parser.add_argument("--bufsize", metavar="NUM", dest="maxsize", type=int, default=20000, help="maximum buffer length")
parser.add_argument("--socket", metavar="PATH", dest="address", default="/tmp/insertbuffer.socket", help="listening socket path")
parser.add_argument("--status", metavar="ADDR", dest="status", help="status HTTP server address ([HOST]:PORT)")
args = parser.parse_args()
params = {
name: value
for name, value
in (
pair.split("=", 1)
for pair
in args.params.split()
)
}
assert "db" in params
syslog.openlog("insertbufferd")
queue = queuelib.Queue(args.maxsize)
stats = Stats()
listener = Listener(queue, args.address, stats)
signaler = Signaler(listener)
database = Database(params, queue, stats, signaler)
if args.status:
host, port = args.status.split(":", 1)
status_server = StatusServer(stats, (host, int(port)), StatusRequestHandler)
else:
status_server = None
receiver_thread = threading.Thread(target=asyncore.loop, kwargs=dict(use_poll=True))
receiver_thread.daemon = True
receiver_thread.start()
if status_server:
status_thread = threading.Thread(target=status_server.serve_forever)
status_thread.daemon = True
status_thread.start()
syslog.syslog(syslog.LOG_INFO, "Initialized")
database.execute()
if __name__ == "__main__":
main()
|
"""
WSGI config for lambda project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lambdaproject.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
import os
class AnalyzeFileObjBug(Exception):
msg = ("\n"
"Expected file object to have %d bytes, instead we read %d bytes.\n"
"File size detection may have failed (see dropbox.util.AnalyzeFileObj)\n")
def __init__(self, expected, actual):
self.expected = expected
self.actual = actual
def __str__(self):
return self.msg % (self.expected, self.actual)
def analyze_file_obj(obj):
''' Get the size and contents of a file-like object.
Returns: (size, raw_data)
size: The amount of data waiting to be read
raw_data: If not None, the entire contents of the stream (as a string).
None if the stream should be read() in chunks.
'''
pos = 0
if hasattr(obj, 'tell'):
pos = obj.tell()
# Handle cStringIO and StringIO
if hasattr(obj, 'getvalue'):
# Why using getvalue() makes sense:
# For StringIO, this string is pre-computed anyway by read().
# For cStringIO, getvalue() is the only way
# to determine the length without read()'ing the whole thing.
raw_data = obj.getvalue()
if pos == 0:
return (len(raw_data), raw_data)
else:
# We could return raw_data[pos:], but that could drastically
# increase memory usage. Better to read it block at a time.
size = max(0, len(raw_data) - pos)
return (size, None)
# Handle real files
if hasattr(obj, 'fileno'):
size = max(0, os.fstat(obj.fileno()).st_size - pos)
return (size, None)
# User-defined object with len()
if hasattr(obj, '__len__'):
size = max(0, len(obj) - pos)
return (size, None)
# We don't know what kind of stream this is.
# To determine the size, we must read the whole thing.
raw_data = obj.read()
return (len(raw_data), raw_data)
|
# -*- coding: utf-8 -*-
from django.http import HttpResponseRedirect
from django.contrib.auth import authenticate, logout, login
from .utils import TianYuClient
from .settings import TIANYUYUN_LOGIN_URL
LOGIN_SUCCESS_REDIRECT_URL = '/dashboard'
LOGIN_CREATE_SUCCESS_REDIRECT_URL = '/dashboard' # '/account/settings'
LOGIN_ERROR_REDIRECT_URL = TIANYUYUN_LOGIN_URL.split('?')[0]
def login_tianyuyun(request):
ticket = request.GET.get('ticket', '')
if ticket:
client = TianYuClient()
usesessionid = client.get_usesessionid_by_ticket(ticket)
if usesessionid:
userinfo = client.get_userinfo_by_sessionid(usesessionid)
if userinfo.get('idcardno', ''):
user = request.user if request.user.is_authenticated() else None
oauth_obj, create = client.get_or_create_oauth_by_userinfo(userinfo, user)
if oauth_obj and oauth_obj.user:
user = authenticate(oauth_obj=oauth_obj, username='')
login(request, user)
if create:
return HttpResponseRedirect(LOGIN_CREATE_SUCCESS_REDIRECT_URL)
else:
return HttpResponseRedirect(LOGIN_SUCCESS_REDIRECT_URL)
return HttpResponseRedirect(LOGIN_SUCCESS_REDIRECT_URL)
|
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
from __future__ import print_function, unicode_literals
from future import standard_library
standard_library.install_aliases()
from builtins import open, str, bytes
import os
import tempfile
import shutil
import warnings
import simplejson as json
from nipype.testing import (assert_equal, assert_not_equal, assert_raises,
assert_true, assert_false, with_setup, package_check,
skipif, example_data)
import nipype.interfaces.base as nib
from nipype.utils.filemanip import split_filename
from nipype.interfaces.base import Undefined, config
from traits.testing.nose_tools import skip
import traits.api as traits
def test_bunch():
b = nib.Bunch()
yield assert_equal, b.__dict__, {}
b = nib.Bunch(a=1, b=[2, 3])
yield assert_equal, b.__dict__, {'a': 1, 'b': [2, 3]}
def test_bunch_attribute():
b = nib.Bunch(a=1, b=[2, 3], c=None)
yield assert_equal, b.a, 1
yield assert_equal, b.b, [2, 3]
yield assert_equal, b.c, None
def test_bunch_repr():
b = nib.Bunch(b=2, c=3, a=dict(n=1, m=2))
yield assert_equal, repr(b), "Bunch(a={'m': 2, 'n': 1}, b=2, c=3)"
def test_bunch_methods():
b = nib.Bunch(a=2)
b.update(a=3)
newb = b.dictcopy()
yield assert_equal, b.a, 3
yield assert_equal, b.get('a'), 3
yield assert_equal, b.get('badkey', 'otherthing'), 'otherthing'
yield assert_not_equal, b, newb
yield assert_equal, type(dict()), type(newb)
yield assert_equal, newb['a'], 3
def test_bunch_hash():
# NOTE: Since the path to the json file is included in the Bunch,
# the hash will be unique to each machine.
pth = os.path.split(os.path.abspath(__file__))[0]
json_pth = os.path.join(pth, 'realign_json.json')
b = nib.Bunch(infile=json_pth,
otherthing='blue',
yat=True)
newbdict, bhash = b._get_bunch_hash()
yield assert_equal, bhash, 'ddcc7b4ec5675df8cf317a48bd1857fa'
# Make sure the hash stored in the json file for `infile` is correct.
jshash = nib.md5()
with open(json_pth, 'r') as fp:
jshash.update(fp.read().encode('utf-8'))
yield assert_equal, newbdict['infile'][0][1], jshash.hexdigest()
yield assert_equal, newbdict['yat'], True
# create a temp file
# global tmp_infile, tmp_dir
# tmp_infile = None
# tmp_dir = None
def setup_file():
# global tmp_infile, tmp_dir
tmp_dir = tempfile.mkdtemp()
tmp_infile = os.path.join(tmp_dir, 'foo.txt')
with open(tmp_infile, 'w') as fp:
fp.writelines(['123456789'])
return tmp_infile
def teardown_file(tmp_dir):
shutil.rmtree(tmp_dir)
def test_TraitedSpec():
yield assert_true, nib.TraitedSpec().get_hashval()
yield assert_equal, nib.TraitedSpec().__repr__(), '\n\n'
class spec(nib.TraitedSpec):
foo = nib.traits.Int
goo = nib.traits.Float(usedefault=True)
yield assert_equal, spec().foo, Undefined
yield assert_equal, spec().goo, 0.0
specfunc = lambda x: spec(hoo=x)
yield assert_raises, nib.traits.TraitError, specfunc, 1
infields = spec(foo=1)
hashval = ([('foo', 1), ('goo', '0.0000000000')], 'e89433b8c9141aa0fda2f8f4d662c047')
yield assert_equal, infields.get_hashval(), hashval
# yield assert_equal, infields.hashval[1], hashval[1]
yield assert_equal, infields.__repr__(), '\nfoo = 1\ngoo = 0.0\n'
@skip
def test_TraitedSpec_dynamic():
from pickle import dumps, loads
a = nib.BaseTraitedSpec()
a.add_trait('foo', nib.traits.Int)
a.foo = 1
assign_a = lambda: setattr(a, 'foo', 'a')
yield assert_raises, Exception, assign_a
pkld_a = dumps(a)
unpkld_a = loads(pkld_a)
assign_a_again = lambda: setattr(unpkld_a, 'foo', 'a')
yield assert_raises, Exception, assign_a_again
def test_TraitedSpec_logic():
class spec3(nib.TraitedSpec):
_xor_inputs = ('foo', 'bar')
foo = nib.traits.Int(xor=_xor_inputs,
desc='foo or bar, not both')
bar = nib.traits.Int(xor=_xor_inputs,
desc='bar or foo, not both')
kung = nib.traits.Float(requires=('foo',),
position=0,
desc='kung foo')
class out3(nib.TraitedSpec):
output = nib.traits.Int
class MyInterface(nib.BaseInterface):
input_spec = spec3
output_spec = out3
myif = MyInterface()
yield assert_raises, TypeError, setattr(myif.inputs, 'kung', 10.0)
myif.inputs.foo = 1
yield assert_equal, myif.inputs.foo, 1
set_bar = lambda: setattr(myif.inputs, 'bar', 1)
yield assert_raises, IOError, set_bar
yield assert_equal, myif.inputs.foo, 1
myif.inputs.kung = 2
yield assert_equal, myif.inputs.kung, 2.0
def test_deprecation():
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec1(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='0.1')
spec_instance = DeprecationSpec1()
set_foo = lambda: setattr(spec_instance, 'foo', 1)
yield assert_raises, nib.TraitError, set_foo
yield assert_equal, len(w), 0, 'no warnings, just errors'
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec1numeric(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='0.1')
spec_instance = DeprecationSpec1numeric()
set_foo = lambda: setattr(spec_instance, 'foo', 1)
yield assert_raises, nib.TraitError, set_foo
yield assert_equal, len(w), 0, 'no warnings, just errors'
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec2(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='100', new_name='bar')
spec_instance = DeprecationSpec2()
set_foo = lambda: setattr(spec_instance, 'foo', 1)
yield assert_raises, nib.TraitError, set_foo
yield assert_equal, len(w), 0, 'no warnings, just errors'
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec3(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='1000', new_name='bar')
bar = nib.traits.Int()
spec_instance = DeprecationSpec3()
not_raised = True
try:
spec_instance.foo = 1
except nib.TraitError:
not_raised = False
yield assert_true, not_raised
yield assert_equal, len(w), 1, 'deprecated warning 1 %s' % [w1.message for w1 in w]
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec3(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='1000', new_name='bar')
bar = nib.traits.Int()
spec_instance = DeprecationSpec3()
not_raised = True
try:
spec_instance.foo = 1
except nib.TraitError:
not_raised = False
yield assert_true, not_raised
yield assert_equal, spec_instance.foo, Undefined
yield assert_equal, spec_instance.bar, 1
yield assert_equal, len(w), 1, 'deprecated warning 2 %s' % [w1.message for w1 in w]
def test_namesource():
tmp_infile = setup_file()
tmpd, nme, ext = split_filename(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
class spec2(nib.CommandLineInputSpec):
moo = nib.File(name_source=['doo'], hash_files=False, argstr="%s",
position=2)
doo = nib.File(exists=True, argstr="%s", position=1)
goo = traits.Int(argstr="%d", position=4)
poo = nib.File(name_source=['goo'], hash_files=False, argstr="%s", position=3)
class TestName(nib.CommandLine):
_cmd = "mycommand"
input_spec = spec2
testobj = TestName()
testobj.inputs.doo = tmp_infile
testobj.inputs.goo = 99
yield assert_true, '%s_generated' % nme in testobj.cmdline
testobj.inputs.moo = "my_%s_template"
yield assert_true, 'my_%s_template' % nme in testobj.cmdline
os.chdir(pwd)
teardown_file(tmpd)
def test_chained_namesource():
tmp_infile = setup_file()
tmpd, nme, ext = split_filename(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
class spec2(nib.CommandLineInputSpec):
doo = nib.File(exists=True, argstr="%s", position=1)
moo = nib.File(name_source=['doo'], hash_files=False, argstr="%s",
position=2, name_template='%s_mootpl')
poo = nib.File(name_source=['moo'], hash_files=False,
argstr="%s", position=3)
class TestName(nib.CommandLine):
_cmd = "mycommand"
input_spec = spec2
testobj = TestName()
testobj.inputs.doo = tmp_infile
res = testobj.cmdline
yield assert_true, '%s' % tmp_infile in res
yield assert_true, '%s_mootpl ' % nme in res
yield assert_true, '%s_mootpl_generated' % nme in res
os.chdir(pwd)
teardown_file(tmpd)
def test_cycle_namesource1():
tmp_infile = setup_file()
tmpd, nme, ext = split_filename(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
class spec3(nib.CommandLineInputSpec):
moo = nib.File(name_source=['doo'], hash_files=False, argstr="%s",
position=1, name_template='%s_mootpl')
poo = nib.File(name_source=['moo'], hash_files=False,
argstr="%s", position=2)
doo = nib.File(name_source=['poo'], hash_files=False,
argstr="%s", position=3)
class TestCycle(nib.CommandLine):
_cmd = "mycommand"
input_spec = spec3
# Check that an exception is raised
to0 = TestCycle()
not_raised = True
try:
to0.cmdline
except nib.NipypeInterfaceError:
not_raised = False
yield assert_false, not_raised
os.chdir(pwd)
teardown_file(tmpd)
def test_cycle_namesource2():
tmp_infile = setup_file()
tmpd, nme, ext = split_filename(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
class spec3(nib.CommandLineInputSpec):
moo = nib.File(name_source=['doo'], hash_files=False, argstr="%s",
position=1, name_template='%s_mootpl')
poo = nib.File(name_source=['moo'], hash_files=False,
argstr="%s", position=2)
doo = nib.File(name_source=['poo'], hash_files=False,
argstr="%s", position=3)
class TestCycle(nib.CommandLine):
_cmd = "mycommand"
input_spec = spec3
# Check that loop can be broken by setting one of the inputs
to1 = TestCycle()
to1.inputs.poo = tmp_infile
not_raised = True
try:
res = to1.cmdline
except nib.NipypeInterfaceError:
not_raised = False
print(res)
yield assert_true, not_raised
yield assert_true, '%s' % tmp_infile in res
yield assert_true, '%s_generated' % nme in res
yield assert_true, '%s_generated_mootpl' % nme in res
os.chdir(pwd)
teardown_file(tmpd)
def checknose():
"""check version of nose for known incompatability"""
mod = __import__('nose')
if mod.__versioninfo__[1] <= 11:
return 0
else:
return 1
@skipif(checknose)
def test_TraitedSpec_withFile():
tmp_infile = setup_file()
tmpd, nme = os.path.split(tmp_infile)
yield assert_true, os.path.exists(tmp_infile)
class spec2(nib.TraitedSpec):
moo = nib.File(exists=True)
doo = nib.traits.List(nib.File(exists=True))
infields = spec2(moo=tmp_infile, doo=[tmp_infile])
hashval = infields.get_hashval(hash_method='content')
yield assert_equal, hashval[1], 'a00e9ee24f5bfa9545a515b7a759886b'
teardown_file(tmpd)
@skipif(checknose)
def test_TraitedSpec_withNoFileHashing():
tmp_infile = setup_file()
tmpd, nme = os.path.split(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
yield assert_true, os.path.exists(tmp_infile)
class spec2(nib.TraitedSpec):
moo = nib.File(exists=True, hash_files=False)
doo = nib.traits.List(nib.File(exists=True))
infields = spec2(moo=nme, doo=[tmp_infile])
hashval = infields.get_hashval(hash_method='content')
yield assert_equal, hashval[1], '8da4669ff5d72f670a46ea3e7a203215'
class spec3(nib.TraitedSpec):
moo = nib.File(exists=True, name_source="doo")
doo = nib.traits.List(nib.File(exists=True))
infields = spec3(moo=nme, doo=[tmp_infile])
hashval1 = infields.get_hashval(hash_method='content')
class spec4(nib.TraitedSpec):
moo = nib.File(exists=True)
doo = nib.traits.List(nib.File(exists=True))
infields = spec4(moo=nme, doo=[tmp_infile])
hashval2 = infields.get_hashval(hash_method='content')
yield assert_not_equal, hashval1[1], hashval2[1]
os.chdir(pwd)
teardown_file(tmpd)
def test_Interface():
yield assert_equal, nib.Interface.input_spec, None
yield assert_equal, nib.Interface.output_spec, None
yield assert_raises, NotImplementedError, nib.Interface
yield assert_raises, NotImplementedError, nib.Interface.help
yield assert_raises, NotImplementedError, nib.Interface._inputs_help
yield assert_raises, NotImplementedError, nib.Interface._outputs_help
yield assert_raises, NotImplementedError, nib.Interface._outputs
class DerivedInterface(nib.Interface):
def __init__(self):
pass
nif = DerivedInterface()
yield assert_raises, NotImplementedError, nif.run
yield assert_raises, NotImplementedError, nif.aggregate_outputs
yield assert_raises, NotImplementedError, nif._list_outputs
yield assert_raises, NotImplementedError, nif._get_filecopy_info
def test_BaseInterface():
yield assert_equal, nib.BaseInterface.help(), None
yield assert_equal, nib.BaseInterface._get_filecopy_info(), []
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
goo = nib.traits.Int(desc='a random int', mandatory=True)
moo = nib.traits.Int(desc='a random int', mandatory=False)
hoo = nib.traits.Int(desc='a random int', usedefault=True)
zoo = nib.File(desc='a file', copyfile=False)
woo = nib.File(desc='a file', copyfile=True)
class OutputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
class DerivedInterface(nib.BaseInterface):
input_spec = InputSpec
yield assert_equal, DerivedInterface.help(), None
yield assert_true, 'moo' in ''.join(DerivedInterface._inputs_help())
yield assert_equal, DerivedInterface()._outputs(), None
yield assert_equal, DerivedInterface._get_filecopy_info()[0]['key'], 'woo'
yield assert_true, DerivedInterface._get_filecopy_info()[0]['copy']
yield assert_equal, DerivedInterface._get_filecopy_info()[1]['key'], 'zoo'
yield assert_false, DerivedInterface._get_filecopy_info()[1]['copy']
yield assert_equal, DerivedInterface().inputs.foo, Undefined
yield assert_raises, ValueError, DerivedInterface()._check_mandatory_inputs
yield assert_equal, DerivedInterface(goo=1)._check_mandatory_inputs(), None
yield assert_raises, ValueError, DerivedInterface().run
yield assert_raises, NotImplementedError, DerivedInterface(goo=1).run
class DerivedInterface2(DerivedInterface):
output_spec = OutputSpec
def _run_interface(self, runtime):
return runtime
yield assert_equal, DerivedInterface2.help(), None
yield assert_equal, DerivedInterface2()._outputs().foo, Undefined
yield assert_raises, NotImplementedError, DerivedInterface2(goo=1).run
nib.BaseInterface.input_spec = None
yield assert_raises, Exception, nib.BaseInterface
def test_BaseInterface_load_save_inputs():
tmp_dir = tempfile.mkdtemp()
tmp_json = os.path.join(tmp_dir, 'settings.json')
class InputSpec(nib.TraitedSpec):
input1 = nib.traits.Int()
input2 = nib.traits.Float()
input3 = nib.traits.Bool()
input4 = nib.traits.Str()
class DerivedInterface(nib.BaseInterface):
input_spec = InputSpec
def __init__(self, **inputs):
super(DerivedInterface, self).__init__(**inputs)
inputs_dict = {'input1': 12, 'input3': True,
'input4': 'some string'}
bif = DerivedInterface(**inputs_dict)
bif.save_inputs_to_json(tmp_json)
bif2 = DerivedInterface()
bif2.load_inputs_from_json(tmp_json)
yield assert_equal, bif2.inputs.get_traitsfree(), inputs_dict
bif3 = DerivedInterface(from_file=tmp_json)
yield assert_equal, bif3.inputs.get_traitsfree(), inputs_dict
inputs_dict2 = inputs_dict.copy()
inputs_dict2.update({'input4': 'some other string'})
bif4 = DerivedInterface(from_file=tmp_json, input4=inputs_dict2['input4'])
yield assert_equal, bif4.inputs.get_traitsfree(), inputs_dict2
bif5 = DerivedInterface(input4=inputs_dict2['input4'])
bif5.load_inputs_from_json(tmp_json, overwrite=False)
yield assert_equal, bif5.inputs.get_traitsfree(), inputs_dict2
bif6 = DerivedInterface(input4=inputs_dict2['input4'])
bif6.load_inputs_from_json(tmp_json)
yield assert_equal, bif6.inputs.get_traitsfree(), inputs_dict
# test get hashval in a complex interface
from nipype.interfaces.ants import Registration
settings = example_data(example_data('smri_ants_registration_settings.json'))
with open(settings) as setf:
data_dict = json.load(setf)
tsthash = Registration()
tsthash.load_inputs_from_json(settings)
yield assert_equal, {}, check_dict(data_dict, tsthash.inputs.get_traitsfree())
tsthash2 = Registration(from_file=settings)
yield assert_equal, {}, check_dict(data_dict, tsthash2.inputs.get_traitsfree())
_, hashvalue = tsthash.inputs.get_hashval(hash_method='timestamp')
yield assert_equal, 'ec5755e07287e04a4b409e03b77a517c', hashvalue
def test_input_version():
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
obj = DerivedInterface1()
yield assert_not_raises, obj._check_version_requirements, obj.inputs
config.set('execution', 'stop_on_unknown_version', True)
yield assert_raises, Exception, obj._check_version_requirements, obj.inputs
config.set_default_config()
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
_version = '0.8'
obj = DerivedInterface1()
obj.inputs.foo = 1
yield assert_raises, Exception, obj._check_version_requirements
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
_version = '0.10'
obj = DerivedInterface1()
yield assert_not_raises, obj._check_version_requirements, obj.inputs
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
_version = '0.9'
obj = DerivedInterface1()
obj.inputs.foo = 1
not_raised = True
yield assert_not_raises, obj._check_version_requirements, obj.inputs
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', max_ver='0.7')
class DerivedInterface2(nib.BaseInterface):
input_spec = InputSpec
_version = '0.8'
obj = DerivedInterface2()
obj.inputs.foo = 1
yield assert_raises, Exception, obj._check_version_requirements
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', max_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
_version = '0.9'
obj = DerivedInterface1()
obj.inputs.foo = 1
not_raised = True
yield assert_not_raises, obj._check_version_requirements, obj.inputs
def test_output_version():
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
class OutputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
output_spec = OutputSpec
_version = '0.10'
obj = DerivedInterface1()
yield assert_equal, obj._check_version_requirements(obj._outputs()), []
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
class OutputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.11')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
output_spec = OutputSpec
_version = '0.10'
obj = DerivedInterface1()
yield assert_equal, obj._check_version_requirements(obj._outputs()), ['foo']
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
class OutputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.11')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
output_spec = OutputSpec
_version = '0.10'
def _run_interface(self, runtime):
return runtime
def _list_outputs(self):
return {'foo': 1}
obj = DerivedInterface1()
yield assert_raises, KeyError, obj.run
def test_Commandline():
yield assert_raises, Exception, nib.CommandLine
ci = nib.CommandLine(command='which')
yield assert_equal, ci.cmd, 'which'
yield assert_equal, ci.inputs.args, Undefined
ci2 = nib.CommandLine(command='which', args='ls')
yield assert_equal, ci2.cmdline, 'which ls'
ci3 = nib.CommandLine(command='echo')
ci3.inputs.environ = {'MYENV': 'foo'}
res = ci3.run()
yield assert_equal, res.runtime.environ['MYENV'], 'foo'
yield assert_equal, res.outputs, None
class CommandLineInputSpec1(nib.CommandLineInputSpec):
foo = nib.Str(argstr='%s', desc='a str')
goo = nib.traits.Bool(argstr='-g', desc='a bool', position=0)
hoo = nib.traits.List(argstr='-l %s', desc='a list')
moo = nib.traits.List(argstr='-i %d...', desc='a repeated list',
position=-1)
noo = nib.traits.Int(argstr='-x %d', desc='an int')
roo = nib.traits.Str(desc='not on command line')
soo = nib.traits.Bool(argstr="-soo")
nib.CommandLine.input_spec = CommandLineInputSpec1
ci4 = nib.CommandLine(command='cmd')
ci4.inputs.foo = 'foo'
ci4.inputs.goo = True
ci4.inputs.hoo = ['a', 'b']
ci4.inputs.moo = [1, 2, 3]
ci4.inputs.noo = 0
ci4.inputs.roo = 'hello'
ci4.inputs.soo = False
cmd = ci4._parse_inputs()
yield assert_equal, cmd[0], '-g'
yield assert_equal, cmd[-1], '-i 1 -i 2 -i 3'
yield assert_true, 'hello' not in ' '.join(cmd)
yield assert_true, '-soo' not in ' '.join(cmd)
ci4.inputs.soo = True
cmd = ci4._parse_inputs()
yield assert_true, '-soo' in ' '.join(cmd)
class CommandLineInputSpec2(nib.CommandLineInputSpec):
foo = nib.File(argstr='%s', desc='a str', genfile=True)
nib.CommandLine.input_spec = CommandLineInputSpec2
ci5 = nib.CommandLine(command='cmd')
yield assert_raises, NotImplementedError, ci5._parse_inputs
class DerivedClass(nib.CommandLine):
input_spec = CommandLineInputSpec2
def _gen_filename(self, name):
return 'filename'
ci6 = DerivedClass(command='cmd')
yield assert_equal, ci6._parse_inputs()[0], 'filename'
nib.CommandLine.input_spec = nib.CommandLineInputSpec
def test_Commandline_environ():
from nipype import config
config.set_default_config()
ci3 = nib.CommandLine(command='echo')
res = ci3.run()
yield assert_equal, res.runtime.environ['DISPLAY'], ':1'
config.set('execution', 'display_variable', ':3')
res = ci3.run()
yield assert_false, 'DISPLAY' in ci3.inputs.environ
yield assert_equal, res.runtime.environ['DISPLAY'], ':3'
ci3.inputs.environ = {'DISPLAY': ':2'}
res = ci3.run()
yield assert_equal, res.runtime.environ['DISPLAY'], ':2'
def test_CommandLine_output():
tmp_infile = setup_file()
tmpd, name = os.path.split(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
yield assert_true, os.path.exists(tmp_infile)
ci = nib.CommandLine(command='ls -l')
ci.inputs.terminal_output = 'allatonce'
res = ci.run()
yield assert_equal, res.runtime.merged, ''
yield assert_true, name in res.runtime.stdout
ci = nib.CommandLine(command='ls -l')
ci.inputs.terminal_output = 'file'
res = ci.run()
yield assert_true, 'stdout.nipype' in res.runtime.stdout
yield assert_true, isinstance(res.runtime.stdout, (str, bytes))
ci = nib.CommandLine(command='ls -l')
ci.inputs.terminal_output = 'none'
res = ci.run()
yield assert_equal, res.runtime.stdout, ''
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_true, 'stdout.nipype' in res.runtime.stdout
os.chdir(pwd)
teardown_file(tmpd)
def test_global_CommandLine_output():
tmp_infile = setup_file()
tmpd, name = os.path.split(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_true, name in res.runtime.stdout
yield assert_true, os.path.exists(tmp_infile)
nib.CommandLine.set_default_terminal_output('allatonce')
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_equal, res.runtime.merged, ''
yield assert_true, name in res.runtime.stdout
nib.CommandLine.set_default_terminal_output('file')
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_true, 'stdout.nipype' in res.runtime.stdout
nib.CommandLine.set_default_terminal_output('none')
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_equal, res.runtime.stdout, ''
os.chdir(pwd)
teardown_file(tmpd)
def assert_not_raises(fn, *args, **kwargs):
fn(*args, **kwargs)
return True
def check_dict(ref_dict, tst_dict):
"""Compare dictionaries of inputs and and those loaded from json files"""
def to_list(x):
if isinstance(x, tuple):
x = list(x)
if isinstance(x, list):
for i, xel in enumerate(x):
x[i] = to_list(xel)
return x
failed_dict = {}
for key, value in list(ref_dict.items()):
newval = to_list(tst_dict[key])
if newval != value:
failed_dict[key] = (value, newval)
return failed_dict
|
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.ticker as tik
import os
from matplotlib import cm
from neural import NeuralState
def plot_weigth_matrix_bars(m: np.ndarray):
"""
Plot a weight matrix as 3d bar diagram
:param m: Weight matrix
:return: -
"""
# Create a figure for plotting the data as a 3D histogram.
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
# Create an X-Y mesh of the same dimension as the 2D data
x_s, y_s = np.meshgrid(np.arange(m.shape[1]), np.arange(m.shape[0]))
x_s = x_s.flatten()
y_s = y_s.flatten()
z_data = m.flatten()
ax.bar(x_s, y_s, zs=z_data, zdir='y', alpha=0.8)
ax.set_xlabel('')
ax.set_ylabel('')
ax.set_zlabel('Weight')
plt.show()
def hinton(matrix: np.ndarray, file: str = "", max_weight=None):
"""
Draw Hinton diagram for visualizing a weight matrix.
:param matrix: Input 2D matrix.
:param file: File path for saving the plot.
:param max_weight: Manually set upper limit for values.
:return: Shows the Hinton diagram as new window or saves it to a file.
"""
ax = plt.gca()
if not max_weight:
max_weight = 2 ** np.ceil(np.log(np.abs(matrix).max()) / np.log(2))
ax.patch.set_facecolor('none')
ax.set_aspect('equal', 'box')
ax.xaxis.set_major_locator(plt.NullLocator())
ax.yaxis.set_major_locator(plt.NullLocator())
for (x, y), w in np.ndenumerate(matrix):
color = 'white' if w > 0 else 'black'
size = np.sqrt(np.abs(w) / max_weight)
rect = plt.Rectangle([x - size / 2, y - size / 2], size, size,
facecolor=color, edgecolor=color)
ax.add_patch(rect)
ax.autoscale_view()
ax.invert_yaxis()
if file == "":
plt.show()
else:
plt.savefig(file)
plt.close()
def height_plot(matrix: np.ndarray, file: str = ""):
"""
Draw temperature height map diagram.
:param matrix: Input 2D matrix.
:param file: File path for saving the plot.
:return: Shows the height map diagram as new window or saves it to a file.
"""
# Create heights in the grid
z = matrix
# Build a figure with 2 subplots, the first is 3D
fig = plt.figure()
ax2 = fig.add_subplot(111)
im = ax2.imshow(z, cmap="hot", interpolation='none')
ax2.invert_yaxis()
# add an explanatory colour bar
plt.colorbar(im, orientation='vertical')
if file == "":
plt.show()
else:
plt.savefig(file)
plt.close()
def combined_plot1(weights: list, times: list, dweights: list, stepsize: int,
neurons: np.ndarray, hopfield: np.ndarray, file: str = None, metadata: str = ""):
"""
:param weights:
:param times:
:param dweights:
:param stepsize:
:param neurons:
:param hopfield:
:param file:
:param metadata:
:return:
"""
l = len(weights)
w = weights[0::stepsize]
c_w = len(w)
dw = [sum(dweights[i:i+stepsize]) for i in range(0, l - 1, stepsize)]
c_dw = len(dw)
l_ax = max(4, c_w + 1)
# Build a figure with 2 subplots, the first is 3D
fig, axes = plt.subplots(ncols=l_ax, nrows=4)
size = 5
fig.set_size_inches(l_ax * size, 3 * size)
#
# Title
fig.suptitle(metadata, fontsize=14, fontweight='bold')
for i in range(2, l_ax - 2):
fig.delaxes(axes[0][i])
#
# Neuron Map
major_locator_n = tik.MultipleLocator(neurons.shape[0] // 2)
major_formatter_n = tik.FormatStrFormatter('%d')
minor_locator_n = tik.MultipleLocator(1)
ax = axes[0][-1]
z = neurons
im = ax.imshow(z, cmap="hot", interpolation='none')
ax.set_aspect('equal')
ax.set_title("Active Neurons")
ax.yaxis.set_major_locator(major_locator_n)
ax.yaxis.set_major_formatter(major_formatter_n)
ax.yaxis.set_minor_locator(minor_locator_n)
ax.xaxis.set_major_locator(major_locator_n)
ax.xaxis.set_major_formatter(major_formatter_n)
ax.xaxis.set_minor_locator(minor_locator_n)
ax = axes[0][-2]
ax.set_aspect(8)
fig.colorbar(im, orientation='vertical', cax=ax)
#
# Hopfield
major_locator_w = tik.MultipleLocator(hopfield.shape[0] // 2)
major_formatter_w = tik.FormatStrFormatter('%d')
minor_locator_w = tik.MultipleLocator(hopfield.shape[0] // 4)
ax = axes[0][0]
z = hopfield
im = ax.imshow(z, cmap="hot", interpolation='none')
ax.invert_yaxis()
ax.set_aspect('equal')
ax.set_title("Hopfield weights")
ax.yaxis.tick_right()
ax.yaxis.set_major_locator(major_locator_w)
ax.yaxis.set_major_formatter(major_formatter_w)
ax.yaxis.set_minor_locator(minor_locator_w)
ax.xaxis.set_major_locator(major_locator_w)
ax.xaxis.set_major_formatter(major_formatter_w)
ax.xaxis.set_minor_locator(minor_locator_w)
ax = axes[0][1]
ax.set_aspect(8)
fig.colorbar(im, orientation='vertical', cax=ax)
ax.yaxis.tick_left()
#
# Weights & Weights per neuron
weight_min = np.min(w)
weight_max = np.max(w)
for i in range(c_w):
ax = axes[1][i]
z = w[i]
im = ax.imshow(z, cmap="hot", interpolation='none', vmin=weight_min, vmax=weight_max)
ax.invert_yaxis()
ax.set_aspect('equal')
if i == 0:
ax.yaxis.set_major_locator(major_locator_w)
ax.yaxis.set_major_formatter(major_formatter_w)
ax.yaxis.set_minor_locator(minor_locator_w)
ax.xaxis.set_major_locator(major_locator_w)
ax.xaxis.set_major_formatter(major_formatter_w)
ax.xaxis.set_minor_locator(minor_locator_w)
ax.set_title("Weights: t = " + '% 4.2f' % times[i * stepsize])
else:
ax.set_axis_off()
ax.set_title("t = " + '% 4.2f' % times[i * stepsize])
ax = axes[3][i]
weight_per_neuron(ax, z, neurons.flatten())
if i != 0:
ax.set_axis_off()
else:
ax.spines['top'].set_color('none')
ax.spines['right'].set_color('none')
ax.set_title("Weight per neuron (colored: only active):")
ax = axes[1][-1]
ax.set_aspect(8)
fig.colorbar(im, orientation='vertical', cax=ax, extend='both')
fig.delaxes(axes[3][-1])
#
# dWeights
dweight_min = np.min(dw)
dweight_max = np.max(dw)
for i in range(c_dw):
ax = axes[2][i]
z = dw[i]
im = ax.imshow(z, cmap="hot", interpolation='none', vmin=dweight_min, vmax=dweight_max)
ax.invert_yaxis()
ax.set_aspect('equal')
if i == 0:
ax.yaxis.set_major_locator(major_locator_w)
ax.yaxis.set_major_formatter(major_formatter_w)
ax.yaxis.set_minor_locator(minor_locator_w)
ax.xaxis.set_major_locator(major_locator_w)
ax.xaxis.set_major_formatter(major_formatter_w)
ax.xaxis.set_minor_locator(minor_locator_w)
ax.set_title("Deviations:")
else:
ax.set_axis_off()
fig.delaxes(axes[2][-2])
ax = axes[2][-1]
ax.set_aspect(8)
fig.colorbar(im, orientation='vertical', cax=ax, extend='both')
#
# Finish
fig.tight_layout()
if not file:
plt.show()
else:
i = 0
while os.path.exists('{}_{:d}.png'.format(file, i)):
i += 1
file = '{}_{:d}.png'.format(file, i)
print("Saving results to: " + file)
plt.savefig(file, dpi=100)
plt.close()
def combined_learning_plot_patternwise(weights: list, times: list, dweights: list, neurons_t: list, neuralstates: list,
spp: int, rot: int, file: str = None):
c_pat = len(neuralstates)
l_ax = c_pat + 2
w = weights[0::spp]
t = times[0::spp]
n = neurons_t[0::spp]
metadata = ""
#
# Prepare plot
fig, axes = plt.subplots(ncols=l_ax, nrows=3)
size = 5
fig.set_size_inches(l_ax * size, 3 * size)
#
# Title
ax = axes[0][0]
ax.set_title(metadata, fontsize=14, fontweight='bold')
ax.set_axis_off()
#
# Plots
state_0: NeuralState = neuralstates[0]
weight_min = np.min(w)
weight_max = np.max(w)
major_locator_w = tik.MultipleLocator(state_0.N // 2)
major_formatter_w = tik.FormatStrFormatter('%d')
minor_locator_w = tik.MultipleLocator(state_0.N // 4)
for i in range(l_ax - 1):
#
# Neuron Map
if 0 < i < len(n) + 1:
ax = axes[0][i]
state: NeuralState = n[i-1]
z = state.as_matrix()
if i == 1:
neural_map(ax, z, True)
ax.set_title("Active Neurons")
else:
neural_map(ax, z, False)
#
# Weights
ax_w = axes[1][i]
z = w[i]
im_w = ax_w.imshow(z, cmap="hot", interpolation='none', vmin=weight_min, vmax=weight_max)
ax_w.invert_yaxis()
ax_w.set_aspect('equal')
if i == 0:
ax_w.yaxis.set_major_locator(major_locator_w)
ax_w.yaxis.set_major_formatter(major_formatter_w)
ax_w.yaxis.set_minor_locator(minor_locator_w)
ax_w.xaxis.set_major_locator(major_locator_w)
ax_w.xaxis.set_major_formatter(major_formatter_w)
ax_w.xaxis.set_minor_locator(minor_locator_w)
ax_w.set_title("Weights: t = " + '% 4.2f' % 0)
else:
ax_w.set_axis_off()
ax_w.set_title("t = " + '% 4.2f' % t[i])
#
# Weights per neuron
ax = axes[2][i]
if i == 0:
ax.spines['top'].set_color('none')
ax.spines['right'].set_color('none')
ax.set_title("Weight per neuron (colored: only active):")
wpn_n = np.zeros(state_0.N)
else:
ax.set_axis_off()
wpn_n = state.vec
weight_per_neuron(ax, z, wpn_n)
#
# Colorbar
if i == l_ax - 2:
ax = axes[1][-1]
ax.set_aspect(8)
fig.colorbar(im_w, orientation='vertical', cax=ax, extend='both')
#
# Empty axes
ax = axes[0][-1]
fig.delaxes(ax)
ax = axes[2][-1]
fig.delaxes(ax)
#
# Finish
fig.tight_layout()
if not file:
plt.show()
else:
i = 0
while os.path.exists('{}_{:d}.png'.format(file, i)):
i += 1
file = '{}_{:d}.png'.format(file, i)
print("Saving results to: " + file)
plt.savefig(file, dpi=100)
plt.close()
def weight_per_neuron(ax: plt.Axes, w: np.ndarray, neurons: np.ndarray):
width = 0.7
num = w.shape[0]
w_n, w_n_a, x_n_a = [], [], []
x_n = np.arange(1, num + 1)
for i in range(num):
w_n.append(np.sum(w[i]))
if neurons[i] == 1:
sm = 0
for j in range(num):
sm += w[i][j] if neurons[j] == 1 else 0
w_n_a.append(sm)
x_n_a.append(x_n[i])
w_max = np.max(w_n)
# customize layout
step = (num // 10)
steps = x_n[0::max(1, step)]
steps = np.array(steps) - 1
steps[0] = 1
if steps[-1] != x_n[-1]:
steps = np.append(steps, x_n[-1])
major_locator_n = tik.FixedLocator(steps)
major_locator_n.view_limits(1, num)
minor_locator_n = tik.MultipleLocator(1)
ax.xaxis.set_major_locator(major_locator_n)
ax.xaxis.set_minor_locator(minor_locator_n)
ax.set_xlim(0, num + 1)
ax.set_ylim(0, max(2, w_max))
# colormap for active neurons:
y = np.array(w_n_a) - 1
sp = cm.get_cmap("spring").reversed()
atu = cm.get_cmap("autumn").reversed()
colors = [atu(abs(y_i) / 1) if y_i < 0 else sp(y_i / max(1, w_max - 1)) for y_i in y]
# red dash line:
ax.plot((0, num + 1), (1, 1), 'red', linestyle='--')
# gray bars for inactive neurons
ax.bar(x_n, w_n, width, color='gray')
# colored active neurons
ax.bar(x_n_a, w_n_a, width, color=colors)
def neural_map(ax: plt.Axes, neurons: np.ndarray, axes: bool):
l = neurons.shape[0]
if axes:
major_locator_n = tik.MultipleLocator(l // 2)
major_formatter_n = tik.FormatStrFormatter('%d')
minor_locator_n = tik.MultipleLocator(1)
ax.yaxis.set_major_locator(major_locator_n)
ax.yaxis.set_major_formatter(major_formatter_n)
ax.yaxis.set_minor_locator(minor_locator_n)
ax.xaxis.set_major_locator(major_locator_n)
ax.xaxis.set_major_formatter(major_formatter_n)
ax.xaxis.set_minor_locator(minor_locator_n)
else:
ax.xaxis.set_major_locator(tik.NullLocator())
ax.xaxis.set_minor_locator(tik.NullLocator())
ax.yaxis.set_major_locator(tik.NullLocator())
ax.yaxis.set_minor_locator(tik.NullLocator())
ax.imshow(neurons, cmap="hot", interpolation='none')
ax.set_aspect('equal')
ma = l - 0.5
mi = -0.5
ax.set_xlim(mi, ma)
ax.set_ylim(mi, ma)
for i in range(1, l):
xy = i - 0.5
ax.plot((mi, ma), (xy, xy), 'red', linestyle='-')
ax.plot((xy, xy), (mi, ma), 'red', linestyle='-')
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('lizard_efcis', '0052_opname_validation_state'),
]
operations = [
migrations.AddField(
model_name='locatie',
name='afvoergebied',
field=models.CharField(max_length=255, null=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='locatie',
name='grondsoort',
field=models.CharField(max_length=255, null=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='locatie',
name='landgebruik',
field=models.CharField(max_length=255, null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='activiteit',
name='act_oms',
field=models.TextField(null=True, verbose_name='omschrijving', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='activiteit',
name='act_type',
field=models.CharField(default='Meting', max_length=10, verbose_name='type activiteit', choices=[('', ''), ('Meting', 'Meting'), ('Toetsing', 'Toetsing')]),
preserve_default=True,
),
migrations.AlterField(
model_name='activiteit',
name='met_fc',
field=models.TextField(null=True, verbose_name='methode fysisch-chemisch', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='activiteit',
name='met_fyt',
field=models.TextField(null=True, verbose_name='methode fytoplankton', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='activiteit',
name='met_mafa',
field=models.TextField(null=True, verbose_name='methode macrofauna', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='activiteit',
name='met_mafy',
field=models.TextField(null=True, verbose_name='methode macrofyten', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='activiteit',
name='met_toets',
field=models.TextField(null=True, verbose_name='methode toetsing', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='activiteit',
name='met_vis',
field=models.TextField(null=True, verbose_name='methode vissen', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='compartiment',
name='comp_oms',
field=models.TextField(null=True, verbose_name='omschrijving', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='eenheid',
name='eenheid_oms',
field=models.TextField(null=True, verbose_name='omschrijving', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='hoedanigheid',
name='hoed_oms',
field=models.TextField(null=True, verbose_name='omschriving', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='locatie',
name='loc_id',
field=models.CharField(unique=True, max_length=50, verbose_name='code locatie'),
preserve_default=True,
),
migrations.AlterField(
model_name='locatie',
name='loc_oms',
field=models.TextField(null=True, verbose_name='omschrijving', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='parameter',
name='casnummer',
field=models.CharField(max_length=30, null=True, verbose_name='CAS-nummer', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='parameter',
name='par_code',
field=models.CharField(max_length=30, verbose_name='code'),
preserve_default=True,
),
migrations.AlterField(
model_name='parameter',
name='par_oms',
field=models.CharField(max_length=255, null=True, verbose_name='omschrijving', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='parametergroep',
name='code',
field=models.CharField(unique=True, max_length=255, verbose_name='parametergroepnaam'),
preserve_default=True,
),
migrations.AlterField(
model_name='statuskrw',
name='code',
field=models.CharField(unique=True, max_length=50, verbose_name='status watertype'),
preserve_default=True,
),
migrations.AlterField(
model_name='waterlichaam',
name='wl_code',
field=models.CharField(max_length=20, verbose_name='code'),
preserve_default=True,
),
migrations.AlterField(
model_name='waterlichaam',
name='wl_naam',
field=models.CharField(max_length=255, null=True, verbose_name='naam', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='waterlichaam',
name='wl_type',
field=models.CharField(max_length=10, null=True, verbose_name='type', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='wns',
name='wns_code',
field=models.CharField(unique=True, max_length=30, verbose_name='code WNS'),
preserve_default=True,
),
migrations.AlterField(
model_name='wns',
name='wns_oms',
field=models.CharField(verbose_name='omschrijving', max_length=255, null=True, editable=False, blank=True),
preserve_default=True,
),
]
|
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from models import *
class PersonAttributeInline(admin.TabularInline):
model = PersonAttribute
list_display = ('email', 'birthdate', 'height', 'weight')
class PlayerInline(admin.TabularInline):
model = Player
extra = 1
list_display = ('squad', 'person', 'number')
raw_id_fields = ('person',)
filter_horizontal = ('positions', )
class StaffInline(admin.TabularInline):
model = Staff
extra = 1
list_display = ('squad', 'person', 'function')
raw_id_fields = ('person',)
class ContactInline(admin.TabularInline):
model = Contact
extra = 1
list_display = ('person', 'value', 'sortorder')
raw_id_fields = ('person',)
class ResultInline(admin.TabularInline):
model = RemoteResult
extra = 1
list_display = ('name', )
class TeamAdmin(admin.ModelAdmin):
class Media:
js = ('/static/WYMEditor/jquery/jquery.js',
'/static/WYMEditor/wymeditor/jquery.wymeditor.pack.js',
'/static/WYMEditor/wymeditor/admin_textarea.js')
css = {
"all": ("/static/WYMEditor/wymeditor/skins/default/skin.css",)
}
fieldsets = (
(None, {
'fields': (
('name', 'slug'),
('sortorder'),
'lastsquad'
)
}),
)
prepopulated_fields = {'slug': ('name',)}
list_display = ('slug', 'name', 'sortorder')
admin.site.register(Team, TeamAdmin)
class SquadAdmin(admin.ModelAdmin):
class Media:
js = ('/static/WYMEditor/jquery/jquery.js',
'/static/WYMEditor/wymeditor/jquery.wymeditor.pack.js',
'/static/WYMEditor/wymeditor/admin_textarea.js')
css = {
"all": ("/static/WYMEditor/wymeditor/skins/default/skin.css",)
}
fieldsets = (
(None, {
'fields': (
('name', 'slug', 'team', 'season'),
('sortorder'),
('predecessor', 'successor'),
)
}),
)
inlines = (PlayerInline, StaffInline, ContactInline, ResultInline)
#filter_horizontal = ('images', 'calendars')
prepopulated_fields = {'slug': ('season', 'team', 'name')}
list_display = ('slug', 'name', 'team', 'season', 'sortorder')
admin.site.register(Squad, SquadAdmin)
class TransferUpdateAdmin(admin.ModelAdmin):
def has_change_permission(self, request, obj=None):
return False # To remove the 'Save and continue editing' button
admin.site.register(TransferUpdate, TransferUpdateAdmin)
class SquadCopyAdmin(admin.ModelAdmin):
def has_change_permission(self, request, obj=None):
return False # To remove the 'Save and continue editing' button
admin.site.register(SquadPlayerCopy, SquadCopyAdmin)
class PersonalSponsorAdmin(admin.ModelAdmin):
list_display = ('image', 'url', 'person')
admin.site.register(PersonalSponsor, PersonalSponsorAdmin)
class PersonAdmin(admin.ModelAdmin):
class Media:
js = ('/static/WYMEditor/jquery/jquery.js',
'/static/WYMEditor/wymeditor/jquery.wymeditor.pack.js',
'/static/WYMEditor/wymeditor/admin_textarea.js')
css = {
"all": ("/static/WYMEditor/wymeditor/skins/default/skin.css",)
}
prepopulated_fields = {'slug': ('first_name', 'last_name')}
fieldsets = (
(None, {
'fields': (
('first_name', 'last_name', 'slug'),
'sortorder',
)
}),
)
inlines = (PersonAttributeInline, PlayerInline, StaffInline)
search_fields = ('first_name', 'last_name')
list_display = ('slug', 'first_name', 'last_name', 'sortorder')
admin.site.register(Person, PersonAdmin)
class RemoteResultAdmin(admin.ModelAdmin):
list_display = ('name', )
prepopulated_fields = {'slug': ('name',)}
admin.site.register(RemoteResult, RemoteResultAdmin)
class DateAdmin(admin.ModelAdmin):
list_display = ('datum', 'name')
admin.site.register(Date, DateAdmin)
class TransferAdmin(admin.ModelAdmin):
raw_id_fields = ('person', )
list_display = ('person', 'old', 'oldextern', 'new', 'newextern')
admin.site.register(Transfer, TransferAdmin)
class ExternalTeamAdmin(admin.ModelAdmin):
list_display = ('name', 'slug', 'url')
prepopulated_fields = {'slug': ('name',)}
admin.site.register(ExternalTeam, ExternalTeamAdmin)
class PositionAdmin(admin.ModelAdmin):
list_display = ('name', 'slug')
prepopulated_fields = {'slug': ('name',)}
admin.site.register(Position, PositionAdmin)
class SeasonAdmin(admin.ModelAdmin):
list_display = ('name', 'slug')
prepopulated_fields = {'slug': ('name',)}
admin.site.register(Season, SeasonAdmin)
class TeamImageAdmin(admin.ModelAdmin):
list_display = ('team', 'image', 'sort')
admin.site.register(TeamImage, TeamImageAdmin)
class SquadImageAdmin(admin.ModelAdmin):
list_display = ('squad', 'image', 'sort')
admin.site.register(SquadImage, SquadImageAdmin)
class PersonImageAdmin(admin.ModelAdmin):
list_display = ('person', 'image', 'sort')
admin.site.register(PersonImage, PersonImageAdmin)
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Training-related utilities.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import math
import numpy as np
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import tensor_util
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import losses
from tensorflow.python.keras import metrics as metrics_module
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import weights_broadcast_ops
def _map_nested(data, func):
"""Maps each nested element using func."""
if isinstance(data, list):
return [_map_nested(nested_data, func) for nested_data in data]
elif isinstance(data, tuple):
return tuple(_map_nested(nested_data, func) for nested_data in data)
elif isinstance(data, dict):
return {
k: _map_nested(nested_data, func) for k, nested_data in data.items()
}
else:
return func(data)
def _nested_all(data, cond_func):
"""Checks if all elements in a nested structure satisfy cond_func."""
if isinstance(data, (tuple, list)):
return all([_nested_all(nested_data, cond_func) for nested_data in data])
elif isinstance(data, dict):
return all(
[_nested_all(nested_data, cond_func) for nested_data in data.values()])
else:
return cond_func(data)
def _nested_any(data, cond_func):
"""Checks if any nested_elements in a nested structure satisfy cond_func."""
if isinstance(data, (tuple, list)):
return any([_nested_any(nested_data, cond_func) for nested_data in data])
elif isinstance(data, dict):
return any(
[_nested_any(nested_data, cond_func) for nested_data in data.values()])
else:
return cond_func(data)
def _convert_lists_to_tuples(data):
"""Converts all lists to tuples, since Datasets expect tuples."""
if isinstance(data, (tuple, list)):
return tuple(_convert_lists_to_tuples(nested_data) for nested_data in data)
elif isinstance(data, dict):
return {
k: _convert_lists_to_tuples(nested_data)
for k, nested_data in data.items()
}
else:
return data
def _get_batch_axis_size(data):
"""Returns batch axis shape for nested data."""
if isinstance(data, (tuple, list)):
return _get_batch_axis_size(data[0])
elif isinstance(data, dict):
return _get_batch_axis_size(list(data.values()))
else:
return int(data.shape[0])
def convert_to_iterator(x=None,
y=None,
sample_weights=None,
batch_size=None,
steps_per_epoch=None,
epochs=1,
shuffle=False):
"""Converts NumPy arrays or EagerTensors to an EagerIterator.
Combines all provided data into a single EagerIterator.
Arguments:
x: NumPy array or EagerTensor, or list of Numpy arrays or EagerTensors
representing inputs to a model.
y: Optional. NumPy array or EagerTensor, or list of Numpy arrays or
EagerTensors representing targets of a model.
sample_weights: Optional NumPy array or EagerTensor representing sample
weights.
batch_size: Used to batch data and calculate how many steps EagerIterator
should take per epoch.
steps_per_epoch: If provided, how many steps EagerIterator should take per
epoch.
epochs: Epochs to repeat iterator for.
shuffle: Whether to shuffle data after each epoch.
Raises:
ValueError: if steps_per_epoch cannot be calculated from the data
provided.
Returns:
(Iterator, steps_per_epoch).
"""
if isinstance(x, iterator_ops.EagerIterator):
return x, steps_per_epoch
if not _nested_any(sample_weights, lambda x: x is None):
data = (x, y, sample_weights)
elif not _nested_any(y, lambda x: x is None):
data = (x, y)
else:
# always wrap in a tuple, so we know y, sample_weights weren't set
# even when x has multiple elements
data = (x,)
data = _convert_lists_to_tuples(data)
if steps_per_epoch is None and batch_size is not None:
num_samples = _get_batch_axis_size(data)
steps_per_epoch = int(math.ceil(num_samples / batch_size))
if steps_per_epoch is None:
raise ValueError('Could not determine steps_per_epoch.'
'Please provide either batch_size or'
'steps_per_epoch.')
# TODO(omalleyt) for NumPy arrays in graph mode
# placeholder ops should be used
# this is only ideal for eager mode
dataset = dataset_ops.Dataset.from_tensor_slices(data)
if batch_size is not None:
dataset = dataset.batch(batch_size)
if shuffle:
dataset = dataset.shuffle(buffer_size=10000)
dataset = dataset.repeat(epochs)
iterator = dataset.make_one_shot_iterator()
return iterator, steps_per_epoch
def check_num_samples(ins,
batch_size=None,
steps=None,
steps_name='steps'):
"""Determine the number of samples provided for training and evaluation.
The number of samples is not defined when running with `steps`,
in which case the number of samples is set to `None`.
Arguments:
ins: List of tensors to be fed to the Keras function.
batch_size: Integer batch size or `None` if not defined.
steps: Total number of steps (batches of samples)
before declaring `_predict_loop` finished.
Ignored with the default value of `None`.
steps_name: The public API's parameter name for `steps`.
Raises:
ValueError: when `steps` is `None` and the attribute `ins.shape`
does not exist. Also raises ValueError when `steps` is not `None`
and `batch_size` is not `None` because they are mutually
exclusive.
Returns:
When steps is `None`, returns the number of samples to be
processed based on the size of the first dimension of the
first input numpy array. When steps is not `None` and
`batch_size` is `None`, returns `None`.
Raises:
ValueError: In case of invalid arguments.
"""
if steps is not None and batch_size is not None:
raise ValueError(
'If ' + steps_name + ' is set, the `batch_size` must be None.')
if check_steps_argument(ins, steps, steps_name):
return None
if hasattr(ins[0], 'shape'):
return int(ins[0].shape[0])
return None # Edge case where ins == [static_learning_phase]
def standardize_single_array(x):
if x is None:
return None
elif tensor_util.is_tensor(x):
return x
elif x.ndim == 1:
x = np.expand_dims(x, 1)
return x
def standardize_input_data(data,
names,
shapes=None,
check_batch_axis=True,
exception_prefix=''):
"""Normalizes inputs and targets provided by users.
Users may pass data as a list of arrays, dictionary of arrays,
or as a single array. We normalize this to an ordered list of
arrays (same order as `names`), while checking that the provided
arrays have shapes that match the network's expectations.
Arguments:
data: User-provided input data (polymorphic).
names: List of expected array names.
shapes: Optional list of expected array shapes.
check_batch_axis: Boolean; whether to check that
the batch axis of the arrays matches the expected
value found in `shapes`.
exception_prefix: String prefix used for exception formatting.
Returns:
List of standardized input arrays (one array per model input).
Raises:
ValueError: in case of improperly formatted user-provided data.
"""
if not names:
if data is not None and hasattr(data, '__len__') and len(data):
raise ValueError('Error when checking model ' + exception_prefix + ': '
'expected no data, but got:', data)
return []
if data is None:
return [None for _ in range(len(names))]
if isinstance(data, dict):
try:
data = [
data[x].values
if data[x].__class__.__name__ == 'DataFrame' else data[x]
for x in names
]
except KeyError as e:
raise ValueError('No data provided for "' + e.args[0] + '". Need data '
'for each key in: ' + str(names))
elif isinstance(data, (list, tuple)):
if isinstance(data[0], (list, tuple)):
data = [np.asarray(d) for d in data]
elif len(names) == 1 and isinstance(data[0], (float, int)):
data = [np.asarray(data)]
else:
data = [
x.values if x.__class__.__name__ == 'DataFrame' else x for x in data
]
else:
data = data.values if data.__class__.__name__ == 'DataFrame' else data
data = [data]
data = [standardize_single_array(x) for x in data]
if len(data) != len(names):
if data and hasattr(data[0], 'shape'):
raise ValueError('Error when checking model ' + exception_prefix +
': the list of Numpy arrays that you are passing to '
'your model is not the size the model expected. '
'Expected to see ' + str(len(names)) + ' array(s), '
'but instead got the following list of ' +
str(len(data)) + ' arrays: ' + str(data)[:200] + '...')
elif len(names) > 1:
raise ValueError(
'Error when checking model ' + exception_prefix +
': you are passing a list as input to your model, '
'but the model expects a list of ' + str(len(names)) +
' Numpy arrays instead. The list you passed was: ' + str(data)[:200])
elif len(data) == 1 and not hasattr(data[0], 'shape'):
raise TypeError('Error when checking model ' + exception_prefix +
': data should be a Numpy array, or list/dict of '
'Numpy arrays. Found: ' + str(data)[:200] + '...')
elif len(names) == 1:
data = [np.asarray(data)]
# Check shapes compatibility.
if shapes:
for i in range(len(names)):
if shapes[i] is not None:
if tensor_util.is_tensor(data[i]):
tensorshape = data[i].get_shape()
if not tensorshape:
continue
data_shape = tuple(tensorshape.as_list())
else:
data_shape = data[i].shape
shape = shapes[i]
if len(data_shape) != len(shape):
raise ValueError('Error when checking ' + exception_prefix +
': expected ' + names[i] + ' to have ' +
str(len(shape)) + ' dimensions, but got array '
'with shape ' + str(data_shape))
if not check_batch_axis:
data_shape = data_shape[1:]
shape = shape[1:]
for dim, ref_dim in zip(data_shape, shape):
if ref_dim != dim and ref_dim is not None and dim is not None:
raise ValueError(
'Error when checking ' + exception_prefix + ': expected ' +
names[i] + ' to have shape ' + str(shape) +
' but got array with shape ' + str(data_shape))
return data
def standardize_sample_or_class_weights(x_weight, output_names, weight_type):
"""Maps `sample_weight` or `class_weight` to model outputs.
Arguments:
x_weight: User-provided `sample_weight` or `class_weight` argument.
output_names: List of output names (strings) in the model.
weight_type: A string used purely for exception printing.
Returns:
A list of `sample_weight` or `class_weight` where there are exactly
one element per model output.
Raises:
ValueError: In case of invalid user-provided argument.
"""
if x_weight is None or len(x_weight) == 0: # pylint: disable=g-explicit-length-test
return [None for _ in output_names]
if len(output_names) == 1:
if isinstance(x_weight, list) and len(x_weight) == 1:
return x_weight
if isinstance(x_weight, dict) and output_names[0] in x_weight:
return [x_weight[output_names[0]]]
else:
return [x_weight]
if isinstance(x_weight, list):
if len(x_weight) != len(output_names):
raise ValueError('Provided `' + weight_type + '` was a list of ' +
str(len(x_weight)) + ' elements, but the model has ' +
str(len(output_names)) + ' outputs. '
'You should provide one `' + weight_type + '`'
'array per model output.')
return x_weight
if isinstance(x_weight, dict):
x_weights = []
for name in output_names:
x_weights.append(x_weight.get(name))
return x_weights
else:
raise TypeError(
'The model has multiple outputs, so `' + weight_type + '` '
'should be either a list or a dict. '
'Provided `' + weight_type + '` type not understood: ' + str(x_weight))
def standardize_class_weights(class_weight, output_names):
return standardize_sample_or_class_weights(class_weight, output_names,
'class_weight')
def standardize_sample_weights(sample_weight, output_names):
return standardize_sample_or_class_weights(sample_weight, output_names,
'sample_weight')
def check_array_lengths(inputs, targets, weights=None):
"""Does user input validation for numpy arrays.
Arguments:
inputs: list of Numpy arrays of inputs.
targets: list of Numpy arrays of targets.
weights: list of Numpy arrays of sample weights.
Raises:
ValueError: in case of incorrectly formatted data.
"""
def set_of_lengths(x):
# Returns a set with the variation between
# different shapes, with None => 0
if x is None:
return {}
else:
return set([y.shape[0] for y in x
if y is not None and not tensor_util.is_tensor(y)])
set_x = set_of_lengths(inputs)
set_y = set_of_lengths(targets)
set_w = set_of_lengths(weights)
if len(set_x) > 1:
raise ValueError('All input arrays (x) should have '
'the same number of samples. Got array shapes: ' +
str([x.shape for x in inputs]))
if len(set_y) > 1:
raise ValueError('All target arrays (y) should have '
'the same number of samples. Got array shapes: ' +
str([y.shape for y in targets]))
if set_x and set_y and list(set_x)[0] != list(set_y)[0]:
raise ValueError('Input arrays should have '
'the same number of samples as target arrays. '
'Found ' + str(list(set_x)[0]) + ' input samples '
'and ' + str(list(set_y)[0]) + ' target samples.')
if len(set_w) > 1:
raise ValueError('All sample_weight arrays should have '
'the same number of samples. Got array shapes: ' +
str([w.shape for w in weights]))
if set_y and set_w and list(set_y)[0] != list(set_w)[0]:
raise ValueError('Sample_weight arrays should have '
'the same number of samples as target arrays. Got ' +
str(list(set_y)[0]) + ' input samples and ' +
str(list(set_w)[0]) + ' target samples.')
def check_loss_and_target_compatibility(targets, loss_fns, output_shapes):
"""Does validation on the compatibility of targets and loss functions.
This helps prevent users from using loss functions incorrectly. This check
is purely for UX purposes.
Arguments:
targets: list of Numpy arrays of targets.
loss_fns: list of loss functions.
output_shapes: list of shapes of model outputs.
Raises:
ValueError: if a loss function or target array
is incompatible with an output.
"""
key_losses = {
losses.mean_squared_error, losses.binary_crossentropy,
losses.categorical_crossentropy
}
for y, loss, shape in zip(targets, loss_fns, output_shapes):
if y is None or loss is None or tensor_util.is_tensor(y):
continue
if loss is losses.categorical_crossentropy:
if y.shape[-1] == 1:
raise ValueError('You are passing a target array of shape ' + str(
y.shape) + ' while using as loss `categorical_crossentropy`. '
'`categorical_crossentropy` expects '
'targets to be binary matrices (1s and 0s) '
'of shape (samples, classes). '
'If your targets are integer classes, '
'you can convert them to the expected format via:\n'
'```\n'
'from keras.utils import to_categorical\n'
'y_binary = to_categorical(y_int)\n'
'```\n'
'\n'
'Alternatively, you can use the loss function '
'`sparse_categorical_crossentropy` instead, '
'which does expect integer targets.')
if loss in key_losses:
for target_dim, out_dim in zip(y.shape[1:], shape[1:]):
if out_dim is not None and target_dim != out_dim:
raise ValueError('A target array with shape ' + str(y.shape) +
' was passed for an output of shape ' + str(shape) +
' while using as loss `' + loss.__name__ + '`. '
'This loss expects '
'targets to have the same shape '
'as the output.')
def collect_metrics(metrics, output_names):
"""Maps metric functions to model outputs.
Arguments:
metrics: a list or dict of metric functions.
output_names: a list of the names (strings) of model outputs.
Returns:
A list (one entry per model output) of lists of metric functions.
For instance, if the model has 2 outputs, and for the first output
we want to compute "binary_accuracy" and "binary_crossentropy",
and just "binary_accuracy" for the second output,
the list would look like:
`[[binary_accuracy, binary_crossentropy], [binary_accuracy]]`
Raises:
TypeError: if an incorrect type is passed for the `metrics` argument.
"""
if not metrics:
return [[] for _ in output_names]
if isinstance(metrics, list):
# we then apply all metrics to all outputs.
return [copy.copy(metrics) for _ in output_names]
elif isinstance(metrics, dict):
nested_metrics = []
for name in output_names:
output_metrics = metrics.get(name, [])
if not isinstance(output_metrics, list):
output_metrics = [output_metrics]
nested_metrics.append(output_metrics)
return nested_metrics
else:
raise TypeError('Type of `metrics` argument not understood. '
'Expected a list or dictionary, found: ' + str(metrics))
def batch_shuffle(index_array, batch_size):
"""Shuffles an array in a batch-wise fashion.
Useful for shuffling HDF5 arrays
(where one cannot access arbitrary indices).
Arguments:
index_array: array of indices to be shuffled.
batch_size: integer.
Returns:
The `index_array` array, shuffled in a batch-wise fashion.
"""
batch_count = int(len(index_array) / batch_size)
# to reshape we need to be cleanly divisible by batch size
# we stash extra items and reappend them after shuffling
last_batch = index_array[batch_count * batch_size:]
index_array = index_array[:batch_count * batch_size]
index_array = index_array.reshape((batch_count, batch_size))
np.random.shuffle(index_array)
index_array = index_array.flatten()
return np.append(index_array, last_batch)
def weighted_masked_objective(fn):
"""Adds support for masking and sample-weighting to an objective function.
It transforms an objective function `fn(y_true, y_pred)`
into a sample-weighted, cost-masked objective function
`fn(y_true, y_pred, weights, mask)`.
Arguments:
fn: The objective function to wrap,
with signature `fn(y_true, y_pred)`.
Returns:
A function with signature `fn(y_true, y_pred, weights, mask)`.
"""
if fn is None:
return None
def weighted(y_true, y_pred, weights, mask=None):
"""Wrapper function.
Arguments:
y_true: `y_true` argument of `fn`.
y_pred: `y_pred` argument of `fn`.
weights: Weights tensor.
mask: Mask tensor.
Returns:
Scalar tensor.
"""
# score_array has ndim >= 2
score_array = fn(y_true, y_pred)
if mask is not None:
# Cast the mask to floatX to avoid float64 upcasting in theano
mask = math_ops.cast(mask, K.floatx())
# mask should have the same shape as score_array
score_array *= mask
# the loss per batch should be proportional
# to the number of unmasked samples.
score_array /= K.mean(mask)
# Apply sample weighting.
if weights is not None:
# Update dimensions of weights to match with values if possible.
score_array, _, weights = metrics_module.squeeze_or_expand_dimensions(
score_array, None, weights)
try:
# Broadcast weights if possible.
weights = weights_broadcast_ops.broadcast_weights(weights, score_array)
except ValueError:
# Reduce values to same ndim as weight array.
ndim = K.ndim(score_array)
weight_ndim = K.ndim(weights)
score_array = K.mean(score_array, axis=list(range(weight_ndim, ndim)))
score_array = math_ops.multiply(score_array, weights)
score_array = math_ops.reduce_sum(score_array)
weights = math_ops.reduce_sum(weights)
score_array = metrics_module.safe_div(score_array, weights)
return K.mean(score_array)
return weighted
def standardize_weights(y,
sample_weight=None,
class_weight=None,
sample_weight_mode=None):
"""Performs sample weight validation and standardization.
Everything gets normalized to a single sample-wise (or timestep-wise)
weight array.
Arguments:
y: Numpy array of model targets to be weighted.
sample_weight: User-provided `sample_weight` argument.
class_weight: User-provided `class_weight` argument.
sample_weight_mode: One of `None` or `"temporal"`.
`"temporal"` indicated that we expect 2D weight data
that will be applied to the last 2 dimensions of
the targets (i.e. we are weighting timesteps, not samples).
Returns:
A numpy array of target weights, one entry per sample to weight.
Raises:
ValueError: In case of invalid user-provided arguments.
"""
# Iterator may return sample_weight as 1-tuple
if isinstance(sample_weight, tuple):
sample_weight = sample_weight[0]
if sample_weight_mode is not None:
if sample_weight_mode != 'temporal':
raise ValueError('"sample_weight_mode '
'should be None or "temporal". '
'Found: ' + str(sample_weight_mode))
if len(y.shape) < 3:
raise ValueError('Found a sample_weight array for '
'an input with shape ' + str(y.shape) + '. '
'Timestep-wise sample weighting (use of '
'sample_weight_mode="temporal") is restricted to '
'outputs that are at least 3D, i.e. that have '
'a time dimension.')
if sample_weight is not None and len(sample_weight.shape) != 2:
raise ValueError('Found a sample_weight array with shape ' +
str(sample_weight.shape) + '. '
'In order to use timestep-wise sample weighting, '
'you should pass a 2D sample_weight array.')
else:
if sample_weight is not None and len(sample_weight.shape) != 1:
raise ValueError('Found a sample_weight array with shape ' +
str(sample_weight.shape) + '. '
'In order to use timestep-wise sample weights, '
'you should specify '
'sample_weight_mode="temporal" '
'in compile(). If you just mean to use '
'sample-wise weights, make sure your '
'sample_weight array is 1D.')
if sample_weight is not None:
if len(sample_weight.shape) > len(y.shape):
raise ValueError(
'Found a sample_weight with shape' + str(sample_weight.shape) + '.'
'Expected sample_weight with rank '
'less than or equal to ' + str(len(y.shape)))
if y.shape[:sample_weight.ndim] != sample_weight.shape:
raise ValueError(
'Found a sample_weight array with shape ' + str(sample_weight.shape) +
' for an input with shape ' + str(y.shape) + '. '
'sample_weight cannot be broadcast.')
return sample_weight
elif isinstance(class_weight, dict):
if len(y.shape) > 2:
raise ValueError('`class_weight` not supported for '
'3+ dimensional targets.')
if y.shape[1] > 1:
y_classes = np.argmax(y, axis=1)
elif y.shape[1] == 1:
y_classes = np.reshape(y, y.shape[0])
else:
y_classes = y
weights = np.asarray(
[class_weight[cls] for cls in y_classes if cls in class_weight])
if len(weights) != len(y_classes):
# subtract the sets to pick all missing classes
existing_classes = set(y_classes)
existing_class_weight = set(class_weight.keys())
raise ValueError('`class_weight` must contain all classes in the data.'
' The classes %s exist in the data but not in '
'`class_weight`.' %
(existing_classes - existing_class_weight))
return weights
else:
return None
def has_symbolic_tensors(ls):
if context.executing_eagerly():
return False
return has_tensors(ls)
def has_tensors(ls):
if isinstance(ls, (list, tuple)):
return any(tensor_util.is_tensor(v) for v in ls)
return tensor_util.is_tensor(ls)
def populate_metric_names(model):
for i in range(len(model.outputs)):
metrics = model.nested_metrics[i]
for metric in metrics:
base_metric_name = get_metric_name(metric)
add_metric_name(model, base_metric_name, i)
def get_metric_name(metric, weighted=False):
"""Returns the metric name corresponding to the given metric input.
Arguments:
metric: Metric function name or reference.
weighted: Boolean indicating if the given metric is weighted.
Returns:
a metric name.
"""
metric_name_prefix = 'weighted_' if weighted else ''
if metric in ('accuracy', 'acc', 'crossentropy', 'ce'):
if metric in ('accuracy', 'acc'):
suffix = 'acc'
elif metric in ('crossentropy', 'ce'):
suffix = 'ce'
metric_name = metric_name_prefix + suffix
else:
metric_fn = metrics_module.get(metric)
# Get metric name as string
if hasattr(metric_fn, 'name'):
metric_name = metric_fn.name
else:
metric_name = metric_fn.__name__
metric_name = metric_name_prefix + metric_name
return metric_name
def get_metric_function(metric, output_shape=None, loss_fn=None):
"""Returns the metric function corresponding to the given metric input.
Arguments:
metric: Metric function name or reference.
output_shape: The shape of the output that this metric
will be calculated for.
loss_fn: The loss function used.
Returns:
The metric function.
"""
if metric in ['accuracy', 'acc']:
if output_shape[-1] == 1 or loss_fn == losses.binary_crossentropy:
return metrics_module.binary_accuracy # case: binary accuracy
elif loss_fn == losses.sparse_categorical_crossentropy:
# case: categorical accuracy with sparse targets
return metrics_module.sparse_categorical_accuracy
return metrics_module.categorical_accuracy # case: categorical accuracy
elif metric in ['crossentropy', 'ce']:
if output_shape[-1] == 1 or loss_fn == losses.binary_crossentropy:
return metrics_module.binary_crossentropy # case: binary cross-entropy
elif loss_fn == losses.sparse_categorical_crossentropy:
# case: categorical cross-entropy with sparse targets
return metrics_module.sparse_categorical_crossentropy
# case: categorical cross-entropy
return metrics_module.categorical_crossentropy
return metrics_module.get(metric)
def add_metric_name(model, metric_name, index):
"""Makes the metric name unique and adds it to the model's metric name list.
If there are multiple outputs for which the metrics are calculated, the
metric names have to be made unique by appending an integer.
Arguments:
model: Model to which we are adding metric names.
metric_name: Metric name that corresponds to the metric specified by the
user. For example: 'acc'
index: The index of the model output for which the metric name is being
added.
Returns:
string, name of the model's unique metric name
"""
if len(model.output_names) > 1:
metric_name = '%s_%s' % (model.output_names[index], metric_name)
j = 1
base_metric_name = metric_name
while metric_name in model.metrics_names:
metric_name = '%s_%d' % (base_metric_name, j)
j += 1
model.metrics_names.append(metric_name)
return metric_name
def validate_iterator_input(x, y, sample_weight, validation_split=None):
"""Validates user input arguments when a dataset iterator is passed.
Arguments:
x: Input data. A `tf.data` dataset iterator.
y: Target data. It could be either Numpy array(s) or TensorFlow tensor(s).
Expected to be `None` when `x` is a dataset iterator.
sample_weight: An optional sample-weight array passed by the user to
weight the importance of each sample in `x`. Expected to be `None` when
`x` is a dataset iterator
validation_split: Float between 0 and 1. Fraction of the training data to
be used as validation data. Expected to be `None` when `x` is a dataset
iterator.
Raises:
ValueError: if argument `y` or `sample_weight` or `validation_split` are
provided by user.
"""
if y is not None:
raise ValueError('You passed a dataset or dataset iterator (%s) as '
'input `x` to your model. In that case, you should '
'not specify a target (`y`) argument, since the dataset '
'or dataset iterator generates both input data and '
'target data. '
'Received: %s' % (x, y))
if sample_weight is not None:
raise ValueError('`sample_weight` argument is not supported when input '
'`x` is a dataset or a dataset iterator. '
'Received: x=%s, sample_weight=%s' % (x, sample_weight))
if validation_split is not None and validation_split != 0.0:
raise ValueError(
'`validation_split` argument is not supported when '
'input `x` is a dataset or a dataset iterator. '
'Received: x=%s, validation_split=%f' % (x, validation_split))
def check_steps_argument(input_data, steps, steps_name):
"""Validates `steps` argument based on input data's type.
The cases when `steps` value must be provided are when
1. input data passed is an iterator.
2. model was built on top of symbolic tensors, input data is not
required and is `None`.
3. input data passed is a symbolic tensor.
Arguments:
input_data: Input data. Can be Numpy array(s) or TensorFlow tensor(s) or
tf.data.Dataset iterator or `None`.
steps: Integer or `None`. Total number of steps (batches of samples) to
execute.
steps_name: The public API's parameter name for `steps`.
Returns:
boolean, True if `steps` argument is required, else False.
Raises:
ValueError: if `steps` argument is required for given input data type
but not provided.
"""
is_x_iterator = (
isinstance(input_data, iterator_ops.Iterator) or
isinstance(input_data, iterator_ops.EagerIterator))
if (input_data is None or is_x_iterator or has_symbolic_tensors(input_data) or
(isinstance(input_data, list) and not input_data)):
if steps is None:
input_type_str = 'iterators' if is_x_iterator else 'data tensors'
raise ValueError('When using {input_type} as input to a model, you should'
' specify the `{steps_name}` argument.'.format(
input_type=input_type_str, steps_name=steps_name))
return True
return False
def cast_if_floating_dtype(x):
"""Casts the given data tensors to the default floating point type.
Casts only if the input is already a floating point type.
Args:
x: tensor or list/tuple of tensors.
Returns:
Converted input.
Raises:
RuntimeError: if data isn't tensors.
"""
if not has_tensors(x):
raise RuntimeError(
'Please provide tensors for casting, got: {x}'.format(x=x))
if isinstance(x, (list, tuple)):
return [
math_ops.cast(val, dtype=K.floatx())
if tensor_util.is_tensor(val) and val.dtype.is_floating else val
for val in x
]
return math_ops.cast(x, dtype=K.floatx()) if x.dtype.is_floating else x
def get_output_sample_weight_and_mode(skip_target_weighing_indices,
sample_weight_mode, output_name,
output_index):
"""Returns the sample weight and weight mode for a single output."""
if output_index in skip_target_weighing_indices:
return None, None
if sample_weight_mode == 'temporal':
default_value = [[1.]]
shape = [None, None]
mode = 'temporal'
else:
default_value = [1.]
shape = [None]
mode = None
if context.executing_eagerly():
weight = None
else:
weight = array_ops.placeholder_with_default(
constant_op.constant(default_value, dtype=K.floatx()),
shape=shape,
name=output_name + '_sample_weights')
return weight, mode
def prepare_sample_weights(output_names, sample_weight_mode,
skip_target_weighing_indices):
"""Prepares sample weights for the model.
Args:
output_names: List of model output names.
sample_weight_mode: sample weight mode user input passed from compile API.
skip_target_weighing_indices: Indices of output for which sample weights
should be skipped.
Returns:
A pair of list of sample weights and sample weight modes
(one for each output).
Raises:
ValueError: In case of invalid `sample_weight_mode` input.
"""
sample_weights = []
sample_weight_modes = []
if isinstance(sample_weight_mode, dict):
unknown_output = set(sample_weight_mode.keys()) - set(output_names)
if unknown_output:
raise ValueError('Unknown entry in '
'sample_weight_mode dictionary: "' + unknown_output +
'". Only expected the following keys: ' +
str(output_names))
for i, name in enumerate(output_names):
if (i not in skip_target_weighing_indices and
name not in sample_weight_mode):
raise ValueError('Output missing from sample_weight_modes dictionary')
weight, mode = get_output_sample_weight_and_mode(
skip_target_weighing_indices, sample_weight_mode.get(name), name, i)
sample_weights.append(weight)
sample_weight_modes.append(mode)
elif isinstance(sample_weight_mode, list):
if len(sample_weight_mode) != len(output_names):
raise ValueError('When passing a list as sample_weight_mode, '
'it should have one entry per model output. '
'The model has ' + str(len(output_names)) +
' outputs, but you passed ' +
str(len(sample_weight_mode)) + 'sample_weight_modes')
for i, name in enumerate(output_names):
weight, mode = get_output_sample_weight_and_mode(
skip_target_weighing_indices, sample_weight_mode[i], name, i)
sample_weights.append(weight)
sample_weight_modes.append(mode)
else:
for i, name in enumerate(output_names):
weight, mode = get_output_sample_weight_and_mode(
skip_target_weighing_indices, sample_weight_mode, name, i)
sample_weights.append(weight)
sample_weight_modes.append(mode)
return sample_weights, sample_weight_modes
|
## Copyright (c) 2016-2017 Upstream Research, Inc. All Rights Reserved. ##
## Subject to an 'MIT' License. See LICENSE file in top-level directory ##
## #python-3.x
## python 2 does not work due mostly to issues with csv and io modules with unicode data
help_text = (
"CSV-PREPEND tool version 20170918\n"
"Insert a header row into a CSV stream\n"
"\n"
"csv-prepend [OPTIONS] ColumnValueList [InputFile]\n"
"\n"
"OPTIONS\n"
" -E {E} Input file text encoding (e.g. 'utf-8', 'windows-1252')\n"
" -e {E} Output file text encoding (e.g. 'utf-8', 'windows-1252')\n"
" -K {N} Number of rows to skip from the input (default=0)\n"
" -N {N} Maximum number of rows to read (default=ALL)\n"
" -n {N} Maximum number of rows to write (default=ALL)\n"
" -o {F} Output file name\n"
" -S {S} Input file field delimiter (default ',')\n"
" -s {S} Output file field delimiter (default ',')\n"
"\n"
"ColumnValueList is a comma separated list of values to be inserted as \n"
"the first row.\n"
"It is possible to replace the header row using the -K option.\n"
)
import sys
import csv
import io
from ._csv_helpers import (
decode_delimiter_name
,decode_charset_name
,decode_newline
)
def main(arg_list, stdin, stdout, stderr):
in_io = stdin
out_io = stdout
err_io = stderr
show_help = False
input_file_name = None
output_file_name = None
input_delimiter = ','
output_delimiter = ','
# 'std' will be translated to the standard line break decided by csv_helpers.decode_newline
input_row_terminator = 'std'
output_row_terminator = 'std'
input_charset_name = 'utf_8_sig'
output_charset_name = 'utf_8'
output_charset_error_mode = 'strict' # 'strict' | 'ignore' | 'replace' | 'backslashreplace'
input_charset_error_mode = 'strict' # 'strict' | 'ignore' | 'replace' | 'backslashreplace'
csv_cell_width_limit = 4*1024*1024 # python default is 131072 = 0x00020000
input_row_start_offset = 0
input_row_count_max = None
output_row_count_max = None
head_row_str = None
# [20160916 [db] I avoided using argparse in order to retain some flexibility for command syntax]
arg_count = len(arg_list)
arg_index = 1
while (arg_index < arg_count):
arg = arg_list[arg_index]
if (arg == "--help"
or arg == "-?"
):
show_help = True
elif (arg == "-o"
or arg == "--output"
):
if (arg_index < arg_count):
arg_index += 1
arg = arg_list[arg_index]
output_file_name = arg
elif (arg == "-E"
or arg == "--charset-in"
or arg == "--encoding-in"
):
if (arg_index < arg_count):
arg_index += 1
arg = arg_list[arg_index]
input_charset_name = arg
elif (arg == "-e"
or arg == "--charset-out"
or arg == "--encoding-out"
):
if (arg_index < arg_count):
arg_index += 1
arg = arg_list[arg_index]
output_charset_name = arg
elif (arg == "--charset-in-error-mode"
):
if (arg_index < arg_count):
arg_index += 1
arg = arg_list[arg_index]
input_charset_error_mode = arg
elif (arg == "--charset-out-error-mode"
):
if (arg_index < arg_count):
arg_index += 1
arg = arg_list[arg_index]
output_charset_error_mode = arg
elif (arg == "--charset-error-mode"
):
if (arg_index < arg_count):
arg_index += 1
arg = arg_list[arg_index]
input_charset_error_mode = arg
output_charset_error_mode = arg
elif (arg == "-S"
or arg == "--separator-in"
or arg == "--delimiter-in"
):
if (arg_index < arg_count):
arg_index += 1
arg = arg_list[arg_index]
input_delimiter = arg
elif (arg == "-s"
or arg == "--separator-out"
or arg == "--delimiter-out"
):
if (arg_index < arg_count):
arg_index += 1
arg = arg_list[arg_index]
output_delimiter = arg
elif (arg == "-W"
or arg == "--terminator-in"
or arg == "--newline-in"
or arg == "--endline-in"
):
if (arg_index < arg_count):
arg_index += 1
arg = arg_list[arg_index]
input_row_terminator = arg
elif (arg == "-w"
or arg == "--terminator-out"
or arg == "--newline-out"
or arg == "--endline-out"
):
if (arg_index < arg_count):
arg_index += 1
arg = arg_list[arg_index]
output_row_terminator = arg
elif (arg == "--cell-width-limit"
):
if (arg_index < arg_count):
arg_index += 1
arg = arg_list[arg_index]
csv_cell_width_limit = int(arg)
elif (arg == "-K"
or arg == "--row-offset-in"
or arg == "--offset"
or arg == "--skip"
):
if (arg_index < arg_count):
arg_index += 1
arg = arg_list[arg_index]
input_row_start_offset = int(arg)
elif (arg == "-N"
or arg == "--row-count-in"
):
if (arg_index < arg_count):
arg_index += 1
arg = arg_list[arg_index]
if ('ALL' == arg.upper()):
input_row_count_max = None
else:
input_row_count_max = int(arg)
elif (arg == "-n"
or arg == "--row-count-out"
):
if (arg_index < arg_count):
arg_index += 1
arg = arg_list[arg_index]
if ('ALL' == arg.upper()):
output_row_count_max = None
else:
output_row_count_max = int(arg)
elif (None != arg
and 0 < len(arg)
):
if (None == head_row_str):
head_row_str = arg
elif (None == input_file_name):
input_file_name = arg
arg_index += 1
head_row = None
if (None != head_row_str):
head_row = head_row_str.split(',')
if (None == head_row):
show_help = True
if (show_help):
out_io.write(help_text)
else:
input_charset_name = decode_charset_name(input_charset_name)
output_charset_name = decode_charset_name(output_charset_name)
input_row_terminator = decode_newline(input_row_terminator)
output_row_terminator = decode_newline(output_row_terminator)
input_delimiter = decode_delimiter_name(input_delimiter)
output_delimiter = decode_delimiter_name(output_delimiter)
in_file = None
out_file = None
try:
read_text_io_mode = 'rt'
#in_newline_mode = '' # don't translate newline chars
in_newline_mode = input_row_terminator
in_file_id = input_file_name
should_close_in_file = True
if (None == in_file_id):
in_file_id = in_io.fileno()
should_close_in_file = False
in_io = io.open(
in_file_id
,mode=read_text_io_mode
,encoding=input_charset_name
,newline=in_newline_mode
,errors=input_charset_error_mode
,closefd=should_close_in_file
)
if (should_close_in_file):
in_file = in_io
write_text_io_mode = 'wt'
out_newline_mode='' # don't translate newline chars
#out_newline_mode = output_row_terminator
out_file_id = output_file_name
should_close_out_file = True
if (None == out_file_id):
out_file_id = out_io.fileno()
should_close_out_file = False
out_io = io.open(
out_file_id
,mode=write_text_io_mode
,encoding=output_charset_name
,newline=out_newline_mode
,errors=output_charset_error_mode
,closefd=should_close_out_file
)
if (should_close_out_file):
out_file = out_io
in_csv = csv.reader(
in_io
,delimiter=input_delimiter
,lineterminator=input_row_terminator
)
out_csv = csv.writer(
out_io
,delimiter=output_delimiter
,lineterminator=output_row_terminator
)
execute(
in_csv
,out_csv
,input_row_terminator
,output_row_terminator
,input_row_start_offset
,input_row_count_max
,output_row_count_max
,head_row
)
except BrokenPipeError:
pass
finally:
if (None != in_file):
in_file.close()
in_file = None
if (None != out_file):
out_file.close()
out_file = None
def execute(
in_csv
,out_csv
,input_row_terminator
,output_row_terminator
,in_row_offset_start
,in_row_count_max
,out_row_count_max
,new_head_row
):
# first write the new row
out_csv.writerow(new_head_row)
# then write the output using the csv-translate code
# [20170918 [db] This is just a copy of the code from -csv-translate;
# it is a bit overkill to include all of this here]
end_row = None
cr_newline = '\r'
lf_newline = '\n'
crlf_newline = '\r\n'
out_newline = output_row_terminator
in_row_count = 0
out_row_count = 0
in_row = next(in_csv, end_row)
while (end_row != in_row
and (None == in_row_count_max or in_row_count < in_row_count_max)
and (None == out_row_count_max or out_row_count < out_row_count_max)
):
in_row_count += 1
if (in_row_offset_start < in_row_count):
out_row = list(in_row)
column_count = len(out_row)
column_position = 0
while (column_position < column_count):
cell_value = out_row[column_position]
# fix newline characters in the data
# (some tools - like postgres - can't handle mixed newline chars)
if (None != cell_value):
# replace crlf with lf, then we will replace lf's with the output newline,
# this prevents us from turning a crlf into a double newline
cell_value = cell_value.replace(crlf_newline, lf_newline)
cell_value = cell_value.replace(cr_newline, lf_newline)
cell_value = cell_value.replace(lf_newline, out_newline)
out_row[column_position] = cell_value
column_position += 1
out_csv.writerow(out_row)
out_row_count += 1
in_row = next(in_csv, end_row)
def console_main():
main(sys.argv, sys.stdin, sys.stdout, sys.stderr)
if __name__ == "__main__":
console_main()
|
# coding: utf-8
from __future__ import unicode_literals
import re
from .adobepass import AdobePassIE
from ..utils import (
int_or_none,
determine_ext,
parse_age_limit,
urlencode_postdata,
ExtractorError,
)
class GoIE(AdobePassIE):
_SITE_INFO = {
'abc': {
'brand': '001',
'requestor_id': 'ABC',
},
'freeform': {
'brand': '002',
'requestor_id': 'ABCFamily',
},
'watchdisneychannel': {
'brand': '004',
'requestor_id': 'Disney',
},
'watchdisneyjunior': {
'brand': '008',
'requestor_id': 'DisneyJunior',
},
'watchdisneyxd': {
'brand': '009',
'requestor_id': 'DisneyXD',
}
}
_VALID_URL = r'https?://(?:(?P<sub_domain>%s)\.)?go\.com/(?:[^/]+/)*(?:vdka(?P<id>\w+)|season-\d+/\d+-(?P<display_id>[^/?#]+))' % '|'.join(_SITE_INFO.keys())
_TESTS = [{
'url': 'http://abc.go.com/shows/castle/video/most-recent/vdka0_g86w5onx',
'info_dict': {
'id': '0_g86w5onx',
'ext': 'mp4',
'title': 'Sneak Peek: Language Arts',
'description': 'md5:7dcdab3b2d17e5217c953256af964e9c',
},
'params': {
# m3u8 download
'skip_download': True,
},
}, {
'url': 'http://abc.go.com/shows/after-paradise/video/most-recent/vdka3335601',
'only_matching': True,
}]
def _real_extract(self, url):
sub_domain, video_id, display_id = re.match(self._VALID_URL, url).groups()
if not video_id:
webpage = self._download_webpage(url, display_id)
video_id = self._search_regex(
# There may be inner quotes, e.g. data-video-id="'VDKA3609139'"
# from http://freeform.go.com/shows/shadowhunters/episodes/season-2/1-this-guilty-blood
r'data-video-id=["\']*VDKA(\w+)', webpage, 'video id')
site_info = self._SITE_INFO[sub_domain]
brand = site_info['brand']
video_data = self._download_json(
'http://api.contents.watchabc.go.com/vp2/ws/contents/3000/videos/%s/001/-1/-1/-1/%s/-1/-1.json' % (brand, video_id),
video_id)['video'][0]
title = video_data['title']
formats = []
for asset in video_data.get('assets', {}).get('asset', []):
asset_url = asset.get('value')
if not asset_url:
continue
format_id = asset.get('format')
ext = determine_ext(asset_url)
if ext == 'm3u8':
video_type = video_data.get('type')
data = {
'video_id': video_data['id'],
'video_type': video_type,
'brand': brand,
'device': '001',
}
if video_data.get('accesslevel') == '1':
requestor_id = site_info['requestor_id']
resource = self._get_mvpd_resource(
requestor_id, title, video_id, None)
auth = self._extract_mvpd_auth(
url, video_id, requestor_id, resource)
data.update({
'token': auth,
'token_type': 'ap',
'adobe_requestor_id': requestor_id,
})
else:
self._initialize_geo_bypass(['US'])
entitlement = self._download_json(
'https://api.entitlement.watchabc.go.com/vp2/ws-secure/entitlement/2020/authorize.json',
video_id, data=urlencode_postdata(data), headers=self.geo_verification_headers())
errors = entitlement.get('errors', {}).get('errors', [])
if errors:
for error in errors:
if error.get('code') == 1002:
self.raise_geo_restricted(
error['message'], countries=['US'])
error_message = ', '.join([error['message'] for error in errors])
raise ExtractorError('%s said: %s' % (self.IE_NAME, error_message), expected=True)
asset_url += '?' + entitlement['uplynkData']['sessionKey']
formats.extend(self._extract_m3u8_formats(
asset_url, video_id, 'mp4', m3u8_id=format_id or 'hls', fatal=False))
else:
f = {
'format_id': format_id,
'url': asset_url,
'ext': ext,
}
if re.search(r'(?:/mp4/source/|_source\.mp4)', asset_url):
f.update({
'format_id': ('%s-' % format_id if format_id else '') + 'SOURCE',
'preference': 1,
})
else:
mobj = re.search(r'/(\d+)x(\d+)/', asset_url)
if mobj:
height = int(mobj.group(2))
f.update({
'format_id': ('%s-' % format_id if format_id else '') + '%dP' % height,
'width': int(mobj.group(1)),
'height': height,
})
formats.append(f)
self._sort_formats(formats)
subtitles = {}
for cc in video_data.get('closedcaption', {}).get('src', []):
cc_url = cc.get('value')
if not cc_url:
continue
ext = determine_ext(cc_url)
if ext == 'xml':
ext = 'ttml'
subtitles.setdefault(cc.get('lang'), []).append({
'url': cc_url,
'ext': ext,
})
thumbnails = []
for thumbnail in video_data.get('thumbnails', {}).get('thumbnail', []):
thumbnail_url = thumbnail.get('value')
if not thumbnail_url:
continue
thumbnails.append({
'url': thumbnail_url,
'width': int_or_none(thumbnail.get('width')),
'height': int_or_none(thumbnail.get('height')),
})
return {
'id': video_id,
'title': title,
'description': video_data.get('longdescription') or video_data.get('description'),
'duration': int_or_none(video_data.get('duration', {}).get('value'), 1000),
'age_limit': parse_age_limit(video_data.get('tvrating', {}).get('rating')),
'episode_number': int_or_none(video_data.get('episodenumber')),
'series': video_data.get('show', {}).get('title'),
'season_number': int_or_none(video_data.get('season', {}).get('num')),
'thumbnails': thumbnails,
'formats': formats,
'subtitles': subtitles,
}
|
# -*- coding: ISO-8859-15 -*-
# =============================================================================
# Copyright (c) 2010 Tom Kralidis
#
# Authors : Tom Kralidis <[email protected]>
#
# Contact email: [email protected]
# =============================================================================
""" FGDC metadata parser """
from __future__ import (absolute_import, division, print_function)
from owscapable.etree import etree
from owscapable import util
class Metadata(object):
""" Process metadata """
def __init__(self, md):
if hasattr(md, 'getroot'): # standalone document
self.xml = etree.tostring(md.getroot())
else: # part of a larger document
self.xml = etree.tostring(md)
self.idinfo = Idinfo(md)
self.eainfo = Eainfo(md)
self.distinfo = Distinfo(md)
self.metainfo = Metainfo(md)
if self.idinfo.datasetid:
self.identifier = self.idinfo.datasetid
class Idinfo(object):
""" Process idinfo """
def __init__(self, md):
val = md.find('idinfo/datasetid')
self.datasetid = util.testXMLValue(val)
val = md.find('idinfo/citation')
self.citation = Citation(val)
val = md.find('idinfo/descript')
if val is not None:
self.descript = Descript(val)
val = md.find('idinfo/timeperd')
self.timeperd = Timeperd(val)
val = md.find('idinfo/status')
if val is not None:
self.status = Status(val)
val = md.find('idinfo/spdom')
if val is not None:
self.spdom = Spdom(val)
val = md.find('idinfo/keywords')
if val is not None:
self.keywords = Keywords(val)
val = md.find('idinfo/accconst')
self.accconst = util.testXMLValue(val)
val = md.find('idinfo/useconst')
self.useconst = util.testXMLValue(val)
val = md.find('idinfo/ptcontac')
if val is not None:
self.ptcontac = Ptcontac(val)
val = md.find('idinfo/datacred')
self.datacred = util.testXMLValue(val)
val = md.find('idinfo/crossref')
self.crossref = Citation(val)
class Citation(object):
""" Process citation """
def __init__(self, md):
if md is not None:
self.citeinfo = {}
val = md.find('citeinfo/origin')
self.citeinfo['origin'] = util.testXMLValue(val)
val = md.find('citeinfo/pubdate')
self.citeinfo['pubdate'] = util.testXMLValue(val)
val = md.find('citeinfo/title')
self.citeinfo['title'] = util.testXMLValue(val)
val = md.find('citeinfo/geoform')
self.citeinfo['geoform'] = util.testXMLValue(val)
val = md.find('citeinfo/pubinfo/pubplace')
self.citeinfo['pubplace'] = util.testXMLValue(val)
val = md.find('citeinfo/pubinfo/publish')
self.citeinfo['publish'] = util.testXMLValue(val)
self.citeinfo['onlink'] = []
for link in md.findall('citeinfo/onlink'):
self.citeinfo['onlink'].append(util.testXMLValue(link))
class Descript(object):
""" Process descript """
def __init__(self, md):
val = md.find('abstract')
self.abstract = util.testXMLValue(val)
val = md.find('purpose')
self.purpose = util.testXMLValue(val)
val = md.find('supplinf')
self.supplinf = util.testXMLValue(val)
class Timeperd(object):
""" Process timeperd """
def __init__(self, md):
if md is not None:
val = md.find('current')
self.current = util.testXMLValue(val)
val = md.find('timeinfo')
if val is not None:
self.timeinfo = Timeinfo(val)
class Timeinfo(object):
""" Process timeinfo """
def __init__(self, md):
val = md.find('sngdate')
if val is not None:
self.sngdate = Sngdate(val)
val = md.find('rngdates')
if val is not None:
self.rngdates = Rngdates(val)
class Sngdate(object):
""" Process sngdate """
def __init__(self, md):
val = md.find('caldate')
self.caldate = util.testXMLValue(val)
val = md.find('time')
self.time = util.testXMLValue(val)
class Rngdates(object):
""" Process rngdates """
def __init__(self, md):
val = md.find('begdate')
self.begdate = util.testXMLValue(val)
val = md.find('begtime')
self.begtime = util.testXMLValue(val)
val = md.find('enddate')
self.enddate = util.testXMLValue(val)
val = md.find('endtime')
self.endtime = util.testXMLValue(val)
class Status(object):
""" Process status """
def __init__(self, md):
val = md.find('progress')
self.progress = util.testXMLValue(val)
val = md.find('update')
self.update = util.testXMLValue(val)
class Spdom(object):
""" Process spdom """
def __init__(self, md):
val = md.find('bounding/westbc')
self.westbc = util.testXMLValue(val)
val = md.find('bounding/eastbc')
self.eastbc = util.testXMLValue(val)
val = md.find('bounding/northbc')
self.northbc = util.testXMLValue(val)
val = md.find('bounding/southbc')
self.southbc = util.testXMLValue(val)
if (self.southbc is not None and self.northbc is not None and
self.eastbc is not None and self.westbc is not None):
self.bbox = Bbox(self)
class Bbox(object):
""" Generate bbox for spdom (convenience function) """
def __init__(self, spdom):
self.minx = spdom.westbc
self.miny = spdom.southbc
self.maxx = spdom.eastbc
self.maxy = spdom.northbc
class Keywords(object):
""" Process keywords """
def __init__(self, md):
self.theme = []
self.place = []
self.temporal = []
for i in md.findall('theme'):
theme = {}
val = i.find('themekt')
theme['themekt'] = util.testXMLValue(val)
theme['themekey'] = []
for j in i.findall('themekey'):
themekey = util.testXMLValue(j)
if themekey is not None:
theme['themekey'].append(themekey)
self.theme.append(theme)
for i in md.findall('place'):
theme = {}
place = {}
val = i.find('placekt')
theme['placekt'] = util.testXMLValue(val)
theme['placekey'] = []
for j in i.findall('placekey'):
theme['placekey'].append(util.testXMLValue(j))
self.place.append(place)
for i in md.findall('temporal'):
theme = {}
temporal = {}
val = i.find('tempkt')
theme['tempkt'] = util.testXMLValue(val)
theme['tempkey'] = []
for j in i.findall('tempkey'):
theme['tempkey'].append(util.testXMLValue(j))
self.temporal.append(temporal)
class Ptcontac(object):
""" Process ptcontac """
def __init__(self, md):
val = md.find('cntinfo/cntorgp/cntorg')
self.cntorg = util.testXMLValue(val)
val = md.find('cntinfo/cntorgp/cntper')
self.cntper = util.testXMLValue(val)
val = md.find('cntinfo/cntpos')
self.cntpos = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/addrtype')
self.addrtype = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/address')
self.address = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/city')
self.city = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/state')
self.state = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/postal')
self.postal = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/country')
self.country = util.testXMLValue(val)
val = md.find('cntinfo/cntvoice')
self.voice = util.testXMLValue(val)
val = md.find('cntinfo/cntemail')
self.email = util.testXMLValue(val)
class Eainfo(object):
""" Process eainfo """
def __init__(self, md):
val = md.find('eainfo/detailed/enttyp/enttypl')
self.enttypl = util.testXMLValue(val)
val = md.find('eainfo/detailed/enttyp/enttypd')
self.enttypd = util.testXMLValue(val)
val = md.find('eainfo/detailed/enttyp/enttypds')
self.enttypds = util.testXMLValue(val)
self.attr = []
for i in md.findall('eainfo/detailed/attr'):
attr = {}
val = i.find('attrlabl')
attr['attrlabl'] = util.testXMLValue(val)
val = i.find('attrdef')
attr['attrdef'] = util.testXMLValue(val)
val = i.find('attrdefs')
attr['attrdefs'] = util.testXMLValue(val)
val = i.find('attrdomv/udom')
attr['udom'] = util.testXMLValue(val)
self.attr.append(attr)
class Distinfo(object):
""" Process distinfo """
def __init__(self, md):
val = md.find('distinfo')
if val is not None:
val2 = val.find('stdorder')
if val2 is not None:
self.stdorder = {'digform': []}
for link in val2.findall('digform'):
digform = {}
digform['name'] = util.testXMLValue(link.find('digtinfo/formname'))
digform['url'] = util.testXMLValue(link.find('digtopt/onlinopt/computer/networka/networkr/'))
self.stdorder['digform'].append(digform)
class Metainfo(object):
""" Process metainfo """
def __init__(self, md):
val = md.find('metainfo/metd')
self.metd = util.testXMLValue(val)
val = md.find('metainfo/metrd')
self.metrd = util.testXMLValue(val)
val = md.find('metainfo/metc')
if val is not None:
self.metc = Ptcontac(val)
val = md.find('metainfo/metstdn')
self.metstdn = util.testXMLValue(val)
val = md.find('metainfo/metstdv')
self.metstdv = util.testXMLValue(val)
val = md.find('metainfo/metac')
self.metac = util.testXMLValue(val)
val = md.find('metainfo/metuc')
self.metuc = util.testXMLValue(val)
|
import sys
from nose2 import session
from nose2.tests._common import support_file, FunctionalTestCase
class SessionFunctionalTests(FunctionalTestCase):
def setUp(self):
self.s = session.Session()
self.s.loadConfigFiles(support_file('cfg', 'a.cfg'),
support_file('cfg', 'b.cfg'))
sys.path.insert(0, support_file('lib'))
def test_session_can_load_config_files(self):
assert self.s.config.has_section('a')
assert self.s.config.has_section('b')
def test_session_holds_plugin_config(self):
plug_config = self.s.get('a')
assert plug_config
def test_session_can_load_plugins_from_modules(self):
self.s.loadPlugins()
assert self.s.plugins
plug = self.s.plugins[0]
self.assertEqual(plug.a, 1)
def test_session_config_cacheing(self):
"""Test cacheing of config sections works"""
# Create new session (generic one likely already cached
# depending on test order)
cache_sess = session.Session()
cache_sess.loadConfigFiles(support_file('cfg', 'a.cfg'))
# First access to given section, should read from config file
firstaccess = cache_sess.get('a')
assert firstaccess.as_int("a") == 1
# Hack cached Config object internals to make the stored value
# something different
cache_sess.configCache["a"]._mvd["a"] = "0"
newitems = []
for item in cache_sess.configCache["a"]._items:
if item != ("a", "1"):
newitems.append(item)
else:
newitems.append(("a", "0"))
cache_sess.configCache["a"]._items = newitems
# Second access to given section, confirm returns cached value
# rather than parsing config file again
secondaccess = cache_sess.get("a")
assert secondaccess.as_int("a") == 0
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 23 10:17:53 2014
@author: ibackus
"""
# External modules
import numpy as np
import pynbody
SimArray = pynbody.array.SimArray
# diskpy modules
from diskpy.pdmath import smoothstep
from diskpy.utils import match_units
def make_profile(ICobj):
"""
A wrapper for generating surface density profiles according to the IC object.
Settings for the profile are defined in ICobj.settings. Which profile gets
used is defined by ICobj.settings.sigma.kind
Currently available kinds are:
viscous
powerlaw
MQWS
**RETURNS**
r : SimArray
Radii at which sigma is calculated
sigma : SimArray
Surface density profile as a function of R
"""
kind = ICobj.settings.sigma.kind
if kind == 'powerlaw':
r, sigma = powerlaw(ICobj.settings, ICobj.T)
elif (kind == 'mqws') | (kind == 'MQWS'):
r, sigma = MQWS(ICobj.settings, ICobj.T)
elif (kind == 'viscous'):
r, sigma = viscous(ICobj.settings)
elif (kind == 'gaussring'):
r, sigma = gaussian_ring(ICobj.settings)
else:
raise TypeError, 'Could not make profile for kind {0}'.format(kind)
if hasattr(ICobj.settings.sigma, 'innercut'):
sigma = _applycut(r, sigma, ICobj.settings.sigma.innercut, False)
if hasattr(ICobj.settings.sigma, 'outercut'):
sigma = _applycut(r, sigma, ICobj.settings.sigma.outercut, True)
return r, sigma
def _applycut(r, sigma, rcut, outer=True):
"""
Applies a hard cut to a surface density profile (sigma). If outer=True,
sigma = 0 at r > rcut. Otherwise, sigma = 0 at r < rcut. If rcut is
None, inf, or nan no cut is performed.
"""
if rcut is None:
return sigma
elif np.isnan(rcut) or np.isinf(rcut):
return sigma
if outer:
mask = r > rcut
else:
mask = r < rcut
if np.any(mask):
sigma[mask] = 0
return sigma
def gaussian_ring(settings):
"""
Generates a gaussian ring surface density profile according to:
.. math:: \\Sigma = \\Sigma_0 exp(-(R-R_d)^2/2a^2)
.. math:: \\Sigma_0 = M_d/(2\\pi)^{3/2} a R_d
Here we call a the ringwidth.
The max radius is determined automatically
Parameters
----------
settings : IC settings
settings like those contained in an IC object (see ICgen_settings.py)
Returns
-------
R : SimArray
Radii at which sigma is calculated
sigma : SimArray
Surface density profile as a function of R
"""
Rd = settings.sigma.Rd
ringwidth = settings.sigma.ringwidth
n_points = settings.sigma.n_points
m_disk = settings.sigma.m_disk
Rmax = (Rd + 5*ringwidth).in_units(Rd.units)
Rmax = max(Rmax, Rd*2.0)
R = SimArray(np.linspace(0, Rmax, n_points), Rd.units)
sigma0 = m_disk / (ringwidth * Rd)
sigma0 *= (2*np.pi)**-1.5
expArg = -(R-Rd)**2 / (2*ringwidth**2)
expArg.convert_units('1')
sigma = sigma0 * np.exp(expArg)
return R, sigma
def viscous(settings):
"""
Generates a surface density profile derived from a self-similarity solution
for a viscous disk, according to:
sigma ~ r^-gamma exp(-r^(2-gamma))
Where r is a dimensionless radius and gamma is a constant less than 2.
Rd (disk radius) is defined as the radius containing 95% of the disk mass
**ARGUMENTS**
settings : IC settings
settings like those contained in an IC object (see ICgen_settings.py)
**RETURNS**
R : SimArray
Radii at which sigma is calculated
sigma : SimArray
Surface density profile as a function of R
"""
Rd = settings.sigma.Rd
rin = settings.sigma.rin
rmax = settings.sigma.rmax
n_points = settings.sigma.n_points
gamma = settings.sigma.gamma
m_disk = settings.sigma.m_disk
# Define the fraction of mass contained within Rd
A = 0.95
# Normalization for r
R1 = Rd / (np.log(1/(1-A))**(1/(2-gamma)))
Rmax = rmax * Rd
Rin = rin * Rd
R = np.linspace(0, Rmax, n_points)
r = (R/R1).in_units('1')
sigma = (r**-gamma) * np.exp(-r**(2-gamma)) * (m_disk/(2*np.pi*R1*R1)) * (2-gamma)
# Deal with infinities at the origin with a hard cut off
sigma[0] = sigma[1]
# Apply interior cutoff
cut_mask = R < Rin
if np.any(cut_mask):
sigma[cut_mask] *= smoothstep(r[cut_mask],degree=21,rescale=True)
return R, sigma
def powerlaw(settings, T = None):
"""
Generates a surface density profile according to a powerlaw sigma ~ r^p
with a smooth interior cutoff and smooth exterior exponential cutoff.
**ARGUMENTS**
settings : IC settings
settings like those contained in an IC object (see ICgen_settings.py)
T : callable function
Function that returns temperature of the disk as a function of radius
IF none, a powerlaw temperature is assumed
**RETURNS**
R : SimArray
Radii at which sigma is calculated
sigma : SimArray
Surface density profile as a function of R
"""
# Parse settings
Rd = settings.sigma.Rd
rin = settings.sigma.rin
rmax = settings.sigma.rmax
cutlength = settings.sigma.cutlength
Mstar = settings.physical.M
Qmin = settings.sigma.Qmin
n_points = settings.sigma.n_points
m = settings.physical.m
power = settings.sigma.power
gamma = settings.physical.gamma_cs()
if T is None:
# If no callable object to calculate Temperature(R) is provided,
# default to a powerlaw T ~ R^-q
T0 = SimArray([129.0],'K') # Temperature at 1 AU
R0 = SimArray([1.0],'au')
q = 0.59
def T(x):
return T0 * np.power((x/R0).in_units('1'),-q)
Rd = match_units(pynbody.units.au, Rd)[1]
Mstar = match_units(pynbody.units.Msol, Mstar)[1]
# Molecular weight
m = match_units(m, pynbody.units.m_p)[0]
# Maximum R to calculate sigma at (needed for the exponential cutoff region)
Rmax = rmax*Rd
# Q calculation parameters:
G = SimArray([1.0],'G')
kB = SimArray([1.0],'k')
# Initialize stuff
A = SimArray(1.0,'Msol')/(2*np.pi*np.power(Rd,2))
# dflemin3 Nov. 4, 2015
# Made units more explicit via SimArrays
r_units = Rd.units
R = SimArray(np.linspace(0,Rmax,n_points),r_units)
r = R/Rd
# Calculate sigma
# Powerlaw
#dflemin3 edit 06/10/2015: Try powerlaw of the form sigma ~ r^power
sigma = A*np.power(r,power)
sigma[0] = 0.0
# Exterior cutoff
sigma[r>1] *= np.exp(-(r[r>1] - 1)**2 / (2*cutlength**2))
# Interior cutoff
sigma[r<rin] *= smoothstep(r[r<rin],degree=21,rescale=True)
# Calculate Q
Q = np.sqrt(Mstar*gamma*kB*T(R)/(G*m*R**3))/(np.pi*sigma)
Q.convert_units('1')
# Rescale sigma to meet the minimum Q requirement
sigma *= Q.min()/Qmin
# Calculate Q
Q = np.sqrt(Mstar*gamma*kB*T(R)/(G*m*R**3))/(np.pi*sigma)
Q.convert_units('1')
return R, sigma
def MQWS(settings, T):
"""
Generates a surface density profile as the per method used in Mayer, Quinn,
Wadsley, and Stadel 2004
** ARGUMENTS **
NOTE: if units are not supplied, assumed units are AU, Msol
settings : IC settings
settings like those contained in an IC object (see ICgen_settings.py)
T : callable
A function to calculate temperature as a function of radius
** RETURNS **
r : SimArray
Radii at which sigma is calculated
sigma : SimArray
Surface density profile as a function of R
"""
# Q calculation parameters:
G = SimArray([1.0],'G')
kB = SimArray([1.0],'k')
# Load in settings
n_points = settings.sigma.n_points
rin = settings.sigma.rin
rout = settings.sigma.rout
rmax = settings.sigma.rmax
Qmin = settings.sigma.Qmin
m = settings.physical.m
Mstar = settings.physical.M
#m_disk = settings.sigma.m_disk
rin = match_units(pynbody.units.au, rin)[1]
rout = match_units(pynbody.units.au, rout)[1]
#m_disk = match_units(pynbody.units.Msol, m_disk)[1]
if rmax is None:
rmax = 2.5 * rout
else:
rmax = match_units(pynbody.units.au, rmax)[1]
r = np.linspace(0, rmax, n_points)
a = (rin/r).in_units('1')
b = (r/rout).in_units('1')
sigma = (np.exp(-a**2 - b**2)/r) * Mstar.units/r.units
# Calculate Q
Q = np.sqrt(Mstar*kB*T(r)/(G*m*r**3))/(np.pi*sigma)
Q.convert_units('1')
sigma *= np.nanmin(Q)/Qmin
# Remove all nans
sigma[np.isnan(sigma)] = 0.0
return r, sigma
|
import sys
from setuptools import setup
tests_require = ["nose>=1.0"]
if sys.version_info < (3,0):
tests_require = ["nose>=1.0", "mock"]
setup(
name="unitils",
version="0.1.2",
author="iLoveTux",
author_email="[email protected]",
description="Cross platform utilities I have found to be incredibly useful",
license="GPLv3",
keywords="utility tools cli",
url="http://github.com/ilovetux/unitils",
packages=['unitils'],
install_requires=["colorama"],
entry_points={
"console_scripts": [
"cat.py=unitils.cli:cat",
"cp.py=unitils.cli:cp",
"find.py=unitils.cli:find",
"grep.py=unitils.cli:grep",
"head.py=unitils.cli:head",
"ls.py=unitils.cli:ls",
"mv.py=unitils.cli:mv",
"watch.py=unitils.cli:watch",
"wc.py=unitils.cli:wc",
"which.py=unitils.cli:which",
]
},
test_suite="nose.collector",
tests_require=tests_require,
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Utilities",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
],
)
|
'''
Created on Mar 10, 2015
Test Suite Created for the purpose of including a test suite inside the setup
file. This allows the user to run tests while setting up the module inside
python.
@author: harsnara
@change: 2015-03-10 First Draft.
'''
import unittest
from deadcheck.deadcheck import DeadcheckAPI
class TestDeadCheck(unittest.TestCase):
def setUp(self):
self.checker = DeadcheckAPI()
class TestDeadCheckAPIFailure(TestDeadCheck):
def runTest(self):
print "\nRunning Check For Failure Case\n"
self.urlObj = self.checker.amIDead('https://pypi.python.org/pypiy')
self.dead = self.urlObj.isBroken()
print self.urlObj.info()
self.failUnless(self.dead, 'Invalid URL Test Failed.')
def tearDown(self):
print "\nCleaning up Failure Test Case\n"
class TestDeadCheckAPIPass(TestDeadCheck):
def runTest(self):
print "\nRunning Check for Pass Case\n"
self.urlObj = self.checker.amIDead('https://pypi.python.org/pypi')
self.dead = self.urlObj.isBroken()
print self.urlObj.info()
self.failIf(self.dead, 'Valid URL Test Failed.')
def tearDown(self):
print "\nCleaning Up pass Test Case\n"
def suite():
suite = unittest.TestSuite()
suite.addTest(TestDeadCheckAPIFailure())
suite.addTest(TestDeadCheckAPIPass())
return suite
if __name__ == '__main__' :
runner = unittest.TextTestRunner()
test_suite = suite()
runner.run(test_suite)()
|
# -*- coding: utf-8 -*-
"""Module used to launch rating dialogues and send ratings to Trakt"""
import xbmc
import xbmcaddon
import xbmcgui
import utilities as utils
import globals
import logging
logger = logging.getLogger(__name__)
__addon__ = xbmcaddon.Addon("script.trakt")
def ratingCheck(media_type, summary_info, watched_time, total_time, playlist_length):
"""Check if a video should be rated and if so launches the rating dialog"""
logger.debug("Rating Check called for '%s'" % media_type)
if not utils.getSettingAsBool("rate_%s" % media_type):
logger.debug("'%s' is configured to not be rated." % media_type)
return
if summary_info is None:
logger.debug("Summary information is empty, aborting.")
return
watched = (watched_time / total_time) * 100
if watched >= utils.getSettingAsFloat("rate_min_view_time"):
if (playlist_length <= 1) or utils.getSettingAsBool("rate_each_playlist_item"):
rateMedia(media_type, summary_info)
else:
logger.debug("Rate each playlist item is disabled.")
else:
logger.debug("'%s' does not meet minimum view time for rating (watched: %0.2f%%, minimum: %0.2f%%)" % (media_type, watched, utils.getSettingAsFloat("rate_min_view_time")))
def rateMedia(media_type, itemsToRate, unrate=False, rating=None):
"""Launches the rating dialog"""
for summary_info in itemsToRate:
if not utils.isValidMediaType(media_type):
logger.debug("Not a valid media type")
return
elif 'user' not in summary_info:
logger.debug("No user data")
return
s = utils.getFormattedItemName(media_type, summary_info)
logger.debug("Summary Info %s" % summary_info)
if unrate:
rating = None
if summary_info['user']['ratings']['rating'] > 0:
rating = 0
if not rating is None:
logger.debug("'%s' is being unrated." % s)
__rateOnTrakt(rating, media_type, summary_info, unrate=True)
else:
logger.debug("'%s' has not been rated, so not unrating." % s)
return
rerate = utils.getSettingAsBool('rate_rerate')
if rating is not None:
if summary_info['user']['ratings']['rating'] == 0:
logger.debug("Rating for '%s' is being set to '%d' manually." % (s, rating))
__rateOnTrakt(rating, media_type, summary_info)
else:
if rerate:
if not summary_info['user']['ratings']['rating'] == rating:
logger.debug("Rating for '%s' is being set to '%d' manually." % (s, rating))
__rateOnTrakt(rating, media_type, summary_info)
else:
utils.notification(utils.getString(32043), s)
logger.debug("'%s' already has a rating of '%d'." % (s, rating))
else:
utils.notification(utils.getString(32041), s)
logger.debug("'%s' is already rated." % s)
return
if summary_info['user']['ratings'] and summary_info['user']['ratings']['rating']:
if not rerate:
logger.debug("'%s' has already been rated." % s)
utils.notification(utils.getString(32041), s)
return
else:
logger.debug("'%s' is being re-rated." % s)
xbmc.executebuiltin('Dialog.Close(all, true)')
gui = RatingDialog(
"script-trakt-RatingDialog.xml",
__addon__.getAddonInfo('path'),
media_type=media_type,
media=summary_info,
rerate=rerate
)
gui.doModal()
if gui.rating:
rating = gui.rating
if rerate:
rating = gui.rating
if summary_info['user']['ratings'] and summary_info['user']['ratings']['rating'] > 0 and rating == summary_info['user']['ratings']['rating']:
rating = 0
if rating == 0 or rating == "unrate":
__rateOnTrakt(rating, gui.media_type, gui.media, unrate=True)
else:
__rateOnTrakt(rating, gui.media_type, gui.media)
else:
logger.debug("Rating dialog was closed with no rating.")
del gui
#Reset rating and unrate for multi part episodes
unrate=False
rating=None
def __rateOnTrakt(rating, media_type, media, unrate=False):
logger.debug("Sending rating (%s) to Trakt.tv" % rating)
params = media
if utils.isMovie(media_type):
key = 'movies'
params['rating'] = rating
elif utils.isShow(media_type):
key = 'shows'
params['rating'] = rating
elif utils.isSeason(media_type):
key = 'shows'
params['seasons'] = [{'rating': rating, 'number': media['season']}]
elif utils.isEpisode(media_type):
key = 'episodes'
params['rating'] = rating
else:
return
root = {key: [params]}
if not unrate:
data = globals.traktapi.addRating(root)
else:
data = globals.traktapi.removeRating(root)
if data:
s = utils.getFormattedItemName(media_type, media)
if 'not_found' in data and not data['not_found']['movies'] and not data['not_found']['episodes'] and not data['not_found']['shows']:
if not unrate:
utils.notification(utils.getString(32040), s)
else:
utils.notification(utils.getString(32042), s)
else:
utils.notification(utils.getString(32044), s)
class RatingDialog(xbmcgui.WindowXMLDialog):
buttons = {
11030: 1,
11031: 2,
11032: 3,
11033: 4,
11034: 5,
11035: 6,
11036: 7,
11037: 8,
11038: 9,
11039: 10
}
focus_labels = {
11030: 32028,
11031: 32029,
11032: 32030,
11033: 32031,
11034: 32032,
11035: 32033,
11036: 32034,
11037: 32035,
11038: 32036,
11039: 32027
}
def __init__(self, xmlFile, resourcePath, forceFallback=False, media_type=None, media=None, rerate=False):
self.media_type = media_type
self.media = media
self.rating = None
self.rerate = rerate
self.default_rating = utils.getSettingAsInt('rating_default')
def onInit(self):
s = utils.getFormattedItemName(self.media_type, self.media)
self.getControl(10012).setLabel(s)
rateID = 11029 + self.default_rating
if self.rerate and self.media['user']['ratings'] and int(self.media['user']['ratings']['rating']) > 0:
rateID = 11029 + int(self.media['user']['ratings']['rating'])
self.setFocus(self.getControl(rateID))
def onClick(self, controlID):
if controlID in self.buttons:
self.rating = self.buttons[controlID]
self.close()
def onFocus(self, controlID):
if controlID in self.focus_labels:
s = utils.getString(self.focus_labels[controlID])
if self.rerate:
if self.media['user']['ratings'] and self.media['user']['ratings']['rating'] == self.buttons[controlID]:
if utils.isMovie(self.media_type):
s = utils.getString(32037)
elif utils.isShow(self.media_type):
s = utils.getString(32038)
elif utils.isEpisode(self.media_type):
s = utils.getString(32039)
elif utils.isSeason(self.media_type):
s = utils.getString(32132)
else:
pass
self.getControl(10013).setLabel(s)
else:
self.getControl(10013).setLabel('')
|
# -*- coding: utf-8 -*-
#
# Flask-FS documentation build configuration file, created by
# sphinx-quickstart on Mon Oct 6 12:44:29 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import alabaster
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinx.ext.todo',
'alabaster',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Flask-FS'
copyright = u'2016, Axel Haustant'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
release = __import__('flask_fs').__version__
# The short X.Y version.
version = '.'.join(release.split('.')[:1])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'flaskfs'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
# 'logo': 'logo-512.png',
# 'logo_name': True,
# 'touch_icon': 'apple-180.png',
'github_user': 'noirbizarre',
'github_repo': 'flask-fs',
'github_banner': True,
'show_related': True,
# 'page_width': '',
# 'sidebar_width': '260px',
'favicons': {
64: 'favicon-64.png',
128: 'favicon-128.png',
196: 'favicon-196.png',
},
'badges': [(
# Gitter.im
'https://badges.gitter.im/Join%20Chat.svg',
'https://gitter.im/noirbizarre/flask-fs',
'Join the chat at https://gitter.im/noirbizarre/flask-fs'
), (
# Github Fork
'https://img.shields.io/github/forks/noirbizarre/flask-fs.svg?style=social&label=Fork',
'https://github.com/noirbizarre/flask-fs',
'Github repository',
), (
# Github issues
'https://img.shields.io/github/issues-raw/noirbizarre/flask-fs.svg',
'https://github.com/noirbizarre/flask-fs/issues',
'Github repository',
), (
# License
'https://img.shields.io/github/license/noirbizarre/flask-fs.svg',
'https://github.com/noirbizarre/flask-fs',
'License',
), (
# PyPI
'https://img.shields.io/pypi/v/flask-fs.svg',
'https://pypi.python.org/pypi/flask-fs',
'Latest version on PyPI'
)]
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [alabaster.get_path(), '_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html',
'searchbox.html',
'donate.html',
'badges.html',
]
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Flask-FSdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Flask-FS.tex', u'Flask-FS Documentation',
u'Axel Haustant', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'flask-storages', u'Flask-FS Documentation',
[u'Axel Haustant'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Flask-FS', u'Flask-FS Documentation',
u'Axel Haustant', 'Flask-FS', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'flask': ('http://flask.pocoo.org/docs/', None),
'python': ('http://docs.python.org/', None),
'werkzeug': ('http://werkzeug.pocoo.org/docs/', None),
'boto': ('https://boto3.readthedocs.org/en/latest/', None),
'mongo': ('http://docs.mongoengine.org/', None),
}
|
""" A parser for chord progressions in the Weimar Jazzomat CSV format """
import re
from itertools import chain
from typing import Tuple, Optional
from music import ABCNote, ChordType, Chord, ChordProgression, Note
_chordtype_mapping = {
'': ChordType.maj,
'6': ChordType.maj,
'j7': ChordType.maj,
'-7': ChordType.m7,
'-': ChordType.m7,
'-6': ChordType.mmaj,
'-j': ChordType.mmaj,
'+': ChordType.aug7,
'+7': ChordType.aug7,
'+j': ChordType.augmaj,
'sus7': ChordType(7),
'o': ChordType.dim,
'o7': ChordType.dim,
}
def _capitalize(s: str):
return s[0].upper() + s[1:]
# Reverse ordering of the items inside the big OR is necessary to match longer ones first
_sre_roots = '|'.join(sorted(map(_capitalize, ABCNote.mapping().keys()), reverse=True))
_sre_types = '|'.join(sorted(chain(ChordType.__members__, _chordtype_mapping.keys()), reverse=True)).replace('||', '|')
_sre_chord = "({})({})?[913b#]*(/({}))?".format(_sre_roots, _sre_types, _sre_roots).replace('+', '\+')
_sre_optional_chord = r'({}|NC| )'.format(_sre_chord)
_sre_measure = r'\|{}{{4}}(?=\|)'.format(_sre_optional_chord)
_re_roots = re.compile(_sre_roots)
_re_chord = re.compile(_sre_chord)
_re_optional_chord = re.compile(_sre_optional_chord)
_re_measure = re.compile(_sre_measure)
re_invalid_measure = re.compile(r'\|(NC| )+\|')
def parse_key(s: str) -> ABCNote:
""" Parse a key signature. The Jazzomat format includes maj and min but we discard that. """
return ABCNote.from_string(_re_roots.match(s).group(0))
def parse_chordtype(s: str) -> ChordType:
""" Parse a chord type in the Weimar Jazzomat format """
if s in ChordType.__members__:
return ChordType[s]
elif s in _chordtype_mapping:
return _chordtype_mapping[s]
else:
raise KeyError(s + " chord unknown")
def parse_chord(s: str) -> Optional[Chord]:
"""
:return: None if the chord is invalid
"""
match = re.match(_re_chord, s)
if match:
return Chord(root=ABCNote.from_string(match.group(1)), typ=parse_chordtype(match.group(2)))
else:
return None
def parse_measure(s: str) -> Tuple[(Chord,) * Note.meter]:
""" Parse a measure.
:return four chords. Spaces translate to the chord before the space. """
ret = []
for match in re.finditer(_re_optional_chord, s):
if match.group(0) in [' ', 'NC']:
ret.append(ret[-1])
else:
ret.append(Chord(root=ABCNote.from_string(match.group(2)), typ=parse_chordtype(match.group(3))))
assert len(ret) == Note.meter
return tuple(ret)
def parse_changes(changes: str, key: str) -> ChordProgression:
ret = ChordProgression(parse_key(key))
for m in re.finditer(_re_measure, changes):
ret += parse_measure(m.group(0))
return ret
class SongMetadata:
def __init__(self, name: str, chord_changes: str, changes: ChordProgression=None, **_):
self.name = name
self.changes_str = chord_changes
self.changes = changes
|
#!/usr/bin/env python
# asciinator.py
#
# Copyright 2014 Christian Diener <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
from __future__ import print_function # for python2 compat
import sys;
from PIL import Image;
import numpy as np
# ascii chars sorted by "density"
chars = np.asarray(list(' .,:;irsXA253hMHGS#9B&@'))
# check command line arguments
if len(sys.argv) != 4:
print( 'Usage: asciinator.py image scale factor' )
sys.exit()
# set basic program parameters
# f = filename, SC = scale, GCF = gamma correction factor, WCF = width correction factor
f, SC, GCF, WCF = sys.argv[1], float(sys.argv[2]), float(sys.argv[3]), 7.0/4.0
# open, scale and normalize image by pixel intensities
img = Image.open(f)
S = (int(img.size[0]*SC*WCF), int(img.size[1]*SC))
img = np.sum( np.asarray(img.resize(S), dtype="float"), axis=2)
img -= img.min()
img = (1.0 - img/img.max())**GCF*(chars.size-1)
# Assemble and print ascii art
print( "\n".join(("".join(r) for r in chars[img.astype(int)])))
print()
|
#
# Copyright (C) 2011 - 2013 Satoru SATOH <ssato @ redhat.com>
# License: MIT
#
"""Misc parsers"""
import re
INT_PATTERN = re.compile(r"^(\d|([1-9]\d+))$")
BOOL_PATTERN = re.compile(r"^(true|false)$", re.I)
STR_PATTERN = re.compile(r"^['\"](.*)['\"]$")
def parse_single(s):
"""
Very simple parser to parse expressions represent some single values.
:param s: a string to parse
:return: Int | Bool | String
>>> parse_single(None)
''
>>> parse_single("0")
0
>>> parse_single("123")
123
>>> parse_single("True")
True
>>> parse_single("a string")
'a string'
>>> parse_single("0.1")
'0.1'
>>> parse_single(" a string contains extra whitespaces ")
'a string contains extra whitespaces'
"""
def matched(pat, s):
return pat.match(s) is not None
if s is None:
return ''
s = s.strip()
if not s:
return ''
if matched(BOOL_PATTERN, s):
return bool(s)
if matched(INT_PATTERN, s):
return int(s)
if matched(STR_PATTERN, s):
return s[1:-1]
return s
def parse_list(s, sep=","):
"""
Simple parser to parse expressions reprensent some list values.
:param s: a string to parse
:param sep: Char to separate items of list
:return: [Int | Bool | String]
>>> parse_list("")
[]
>>> parse_list("1")
[1]
>>> parse_list("a,b")
['a', 'b']
>>> parse_list("1,2")
[1, 2]
>>> parse_list("a,b,")
['a', 'b']
"""
return [parse_single(x) for x in s.split(sep) if x]
def parse_attrlist_0(s, avs_sep=":", vs_sep=",", as_sep=";"):
"""
Simple parser to parse expressions in the form of
[ATTR1:VAL0,VAL1,...;ATTR2:VAL0,VAL2,..].
:param s: input string
:param avs_sep: char to separate attribute and values
:param vs_sep: char to separate values
:param as_sep: char to separate attributes
:return: a list of tuples of (key, value | [value])
where key = (Int | String | ...),
value = (Int | Bool | String | ...) | [Int | Bool | String | ...]
>>> parse_attrlist_0("a:1")
[('a', 1)]
>>> parse_attrlist_0("a:1;b:xyz")
[('a', 1), ('b', 'xyz')]
>>> parse_attrlist_0("requires:bash,zsh")
[('requires', ['bash', 'zsh'])]
>>> parse_attrlist_0("obsoletes:sysdata;conflicts:sysdata-old")
[('obsoletes', 'sysdata'), ('conflicts', 'sysdata-old')]
"""
def attr_and_values(s):
for rel in parse_list(s, as_sep):
if avs_sep not in rel or rel.endswith(avs_sep):
continue
(_attr, _values) = parse_list(rel, avs_sep)
if vs_sep in str(_values):
_values = parse_list(_values, vs_sep)
if _values:
yield (_attr, _values)
return [(a, vs) for a, vs in attr_and_values(s)]
def parse_attrlist(s, avs_sep=":", vs_sep=",", as_sep=";"):
"""
Simple parser to parse expressions in the form of
[ATTR1:VAL0,VAL1,...;ATTR2:VAL0,VAL2,..].
:param s: input string
:param avs_sep: char to separate attribute and values
:param vs_sep: char to separate values
:param as_sep: char to separate attributes
>>> parse_attrlist("requires:bash,zsh")
{'requires': ['bash', 'zsh']}
"""
return dict(parse_attrlist_0(s, avs_sep, vs_sep, as_sep))
def parse(s, lsep=",", avsep=":", vssep=",", avssep=";"):
"""Generic parser"""
if avsep in s:
return parse_attrlist(s, avsep, vssep, avssep)
elif lsep in s:
return parse_list(s, lsep)
else:
return parse_single(s)
# vim:sw=4:ts=4:et:
|
#!/usr/bin/env python3
# Copyright (c) 2014-2018 The Energi Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test REST interface
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from struct import *
from io import BytesIO
from codecs import encode
import http.client
import urllib.parse
def deser_uint256(f):
r = 0
for i in range(8):
t = unpack(b"<I", f.read(4))[0]
r += t << (i * 32)
return r
#allows simple http get calls
def http_get_call(host, port, path, response_object = 0):
conn = http.client.HTTPConnection(host, port)
conn.request('GET', path)
if response_object:
return conn.getresponse()
return conn.getresponse().read().decode('utf-8')
#allows simple http post calls with a request body
def http_post_call(host, port, path, requestdata = '', response_object = 0):
conn = http.client.HTTPConnection(host, port)
conn.request('POST', path, requestdata)
if response_object:
return conn.getresponse()
return conn.getresponse().read()
class RESTTest (BitcoinTestFramework):
FORMAT_SEPARATOR = "."
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 3
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
self.is_network_split=False
self.sync_all()
def run_test(self):
url = urllib.parse.urlparse(self.nodes[0].url)
print("Mining blocks...")
self.nodes[0].generate(1)
self.sync_all()
self.nodes[2].generate(100)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), 500)
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
self.sync_all()
self.nodes[2].generate(1)
self.sync_all()
bb_hash = self.nodes[0].getbestblockhash()
assert_equal(self.nodes[1].getbalance(), Decimal("0.1")) #balance now should be 0.1 on node 1
# load the latest 0.1 tx over the REST API
json_string = http_get_call(url.hostname, url.port, '/rest/tx/'+txid+self.FORMAT_SEPARATOR+"json")
json_obj = json.loads(json_string)
vintx = json_obj['vin'][0]['txid'] # get the vin to later check for utxo (should be spent by then)
# get n of 0.1 outpoint
n = 0
for vout in json_obj['vout']:
if vout['value'] == 0.1:
n = vout['n']
######################################
# GETUTXOS: query a unspent outpoint #
######################################
json_request = '/checkmempool/'+txid+'-'+str(n)
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
#check chainTip response
assert_equal(json_obj['chaintipHash'], bb_hash)
#make sure there is one utxo
assert_equal(len(json_obj['utxos']), 1)
assert_equal(json_obj['utxos'][0]['value'], 0.1)
################################################
# GETUTXOS: now query a already spent outpoint #
################################################
json_request = '/checkmempool/'+vintx+'-0'
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
#check chainTip response
assert_equal(json_obj['chaintipHash'], bb_hash)
#make sure there is no utox in the response because this oupoint has been spent
assert_equal(len(json_obj['utxos']), 0)
#check bitmap
assert_equal(json_obj['bitmap'], "0")
##################################################
# GETUTXOS: now check both with the same request #
##################################################
json_request = '/checkmempool/'+txid+'-'+str(n)+'/'+vintx+'-0'
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
assert_equal(len(json_obj['utxos']), 1)
assert_equal(json_obj['bitmap'], "10")
#test binary response
bb_hash = self.nodes[0].getbestblockhash()
binaryRequest = b'\x01\x02'
binaryRequest += hex_str_to_bytes(txid)
binaryRequest += pack("i", n)
binaryRequest += hex_str_to_bytes(vintx)
binaryRequest += pack("i", 0)
bin_response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'bin', binaryRequest)
output = BytesIO()
output.write(bin_response)
output.seek(0)
chainHeight = unpack("i", output.read(4))[0]
hashFromBinResponse = hex(deser_uint256(output))[2:].zfill(64)
assert_equal(bb_hash, hashFromBinResponse) #check if getutxo's chaintip during calculation was fine
assert_equal(chainHeight, 102) #chain height must be 102
############################
# GETUTXOS: mempool checks #
############################
# do a tx and don't sync
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
json_string = http_get_call(url.hostname, url.port, '/rest/tx/'+txid+self.FORMAT_SEPARATOR+"json")
json_obj = json.loads(json_string)
vintx = json_obj['vin'][0]['txid'] # get the vin to later check for utxo (should be spent by then)
# get n of 0.1 outpoint
n = 0
for vout in json_obj['vout']:
if vout['value'] == 0.1:
n = vout['n']
json_request = '/'+txid+'-'+str(n)
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
assert_equal(len(json_obj['utxos']), 0) #there should be a outpoint because it has just added to the mempool
json_request = '/checkmempool/'+txid+'-'+str(n)
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
assert_equal(len(json_obj['utxos']), 1) #there should be a outpoint because it has just added to the mempool
#do some invalid requests
json_request = '{"checkmempool'
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'json', json_request, True)
assert_equal(response.status, 400) #must be a 400 because we send a invalid json request
json_request = '{"checkmempool'
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'bin', json_request, True)
assert_equal(response.status, 400) #must be a 400 because we send a invalid bin request
response = http_post_call(url.hostname, url.port, '/rest/getutxos/checkmempool'+self.FORMAT_SEPARATOR+'bin', '', True)
assert_equal(response.status, 400) #must be a 400 because we send a invalid bin request
#test limits
json_request = '/checkmempool/'
for x in range(0, 20):
json_request += txid+'-'+str(n)+'/'
json_request = json_request.rstrip("/")
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json', '', True)
assert_equal(response.status, 400) #must be a 400 because we exceeding the limits
json_request = '/checkmempool/'
for x in range(0, 15):
json_request += txid+'-'+str(n)+'/'
json_request = json_request.rstrip("/")
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json', '', True)
assert_equal(response.status, 200) #must be a 200 because we are within the limits
self.nodes[0].generate(1) #generate block to not affect upcoming tests
self.sync_all()
################
# /rest/block/ #
################
# check binary format
response = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.FORMAT_SEPARATOR+"bin", True)
assert_equal(response.status, 200)
assert_greater_than(int(response.getheader('content-length')), 80)
response_str = response.read()
# compare with block header
response_header = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"bin", True)
assert_equal(response_header.status, 200)
assert_equal(int(response_header.getheader('content-length')), 80)
response_header_str = response_header.read()
assert_equal(response_str[0:80], response_header_str)
# check block hex format
response_hex = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.FORMAT_SEPARATOR+"hex", True)
assert_equal(response_hex.status, 200)
assert_greater_than(int(response_hex.getheader('content-length')), 160)
response_hex_str = response_hex.read()
assert_equal(encode(response_str, "hex_codec")[0:160], response_hex_str[0:160])
# compare with hex block header
response_header_hex = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"hex", True)
assert_equal(response_header_hex.status, 200)
assert_greater_than(int(response_header_hex.getheader('content-length')), 160)
response_header_hex_str = response_header_hex.read()
assert_equal(response_hex_str[0:160], response_header_hex_str[0:160])
assert_equal(encode(response_header_str, "hex_codec")[0:160], response_header_hex_str[0:160])
# check json format
block_json_string = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.FORMAT_SEPARATOR+'json')
block_json_obj = json.loads(block_json_string)
assert_equal(block_json_obj['hash'], bb_hash)
# compare with json block header
response_header_json = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"json", True)
assert_equal(response_header_json.status, 200)
response_header_json_str = response_header_json.read().decode('utf-8')
json_obj = json.loads(response_header_json_str, parse_float=Decimal)
assert_equal(len(json_obj), 1) #ensure that there is one header in the json response
assert_equal(json_obj[0]['hash'], bb_hash) #request/response hash should be the same
#compare with normal RPC block response
rpc_block_json = self.nodes[0].getblock(bb_hash)
assert_equal(json_obj[0]['hash'], rpc_block_json['hash'])
assert_equal(json_obj[0]['confirmations'], rpc_block_json['confirmations'])
assert_equal(json_obj[0]['height'], rpc_block_json['height'])
assert_equal(json_obj[0]['version'], rpc_block_json['version'])
assert_equal(json_obj[0]['merkleroot'], rpc_block_json['merkleroot'])
assert_equal(json_obj[0]['time'], rpc_block_json['time'])
assert_equal(json_obj[0]['nonce'], rpc_block_json['nonce'])
assert_equal(json_obj[0]['bits'], rpc_block_json['bits'])
assert_equal(json_obj[0]['difficulty'], rpc_block_json['difficulty'])
assert_equal(json_obj[0]['chainwork'], rpc_block_json['chainwork'])
assert_equal(json_obj[0]['previousblockhash'], rpc_block_json['previousblockhash'])
#see if we can get 5 headers in one response
self.nodes[1].generate(5)
self.sync_all()
response_header_json = http_get_call(url.hostname, url.port, '/rest/headers/5/'+bb_hash+self.FORMAT_SEPARATOR+"json", True)
assert_equal(response_header_json.status, 200)
response_header_json_str = response_header_json.read().decode('utf-8')
json_obj = json.loads(response_header_json_str)
assert_equal(len(json_obj), 5) #now we should have 5 header objects
# do tx test
tx_hash = block_json_obj['tx'][0]['txid']
json_string = http_get_call(url.hostname, url.port, '/rest/tx/'+tx_hash+self.FORMAT_SEPARATOR+"json")
json_obj = json.loads(json_string)
assert_equal(json_obj['txid'], tx_hash)
# check hex format response
hex_string = http_get_call(url.hostname, url.port, '/rest/tx/'+tx_hash+self.FORMAT_SEPARATOR+"hex", True)
assert_equal(hex_string.status, 200)
assert_greater_than(int(response.getheader('content-length')), 10)
# check block tx details
# let's make 3 tx and mine them on node 1
txs = []
txs.append(self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11))
txs.append(self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11))
txs.append(self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11))
self.sync_all()
# check that there are exactly 3 transactions in the TX memory pool before generating the block
json_string = http_get_call(url.hostname, url.port, '/rest/mempool/info'+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
assert_equal(json_obj['size'], 3)
# the size of the memory pool should be greater than 3x ~100 bytes
assert_greater_than(json_obj['bytes'], 300)
# check that there are our submitted transactions in the TX memory pool
json_string = http_get_call(url.hostname, url.port, '/rest/mempool/contents'+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
for tx in txs:
assert_equal(tx in json_obj, True)
# now mine the transactions
newblockhash = self.nodes[1].generate(1)
self.sync_all()
#check if the 3 tx show up in the new block
json_string = http_get_call(url.hostname, url.port, '/rest/block/'+newblockhash[0]+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
for tx in json_obj['tx']:
if not 'coinbase' in tx['vin'][0]: #exclude coinbase
assert_equal(tx['txid'] in txs, True)
#check the same but without tx details
json_string = http_get_call(url.hostname, url.port, '/rest/block/notxdetails/'+newblockhash[0]+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
for tx in txs:
assert_equal(tx in json_obj['tx'], True)
#test rest bestblock
bb_hash = self.nodes[0].getbestblockhash()
json_string = http_get_call(url.hostname, url.port, '/rest/chaininfo.json')
json_obj = json.loads(json_string)
assert_equal(json_obj['bestblockhash'], bb_hash)
if __name__ == '__main__':
RESTTest ().main ()
|
__author__ = 'Mirko Rossini'
import unittest
import shutil
from integrationtest_support import IntegrationTestSupport
from pybuilder.errors import BuildFailedException
from common import BUILD_FILE_TEMPLATE
class DjangoEnhancedPluginTest(IntegrationTestSupport):
def test_django_test(self):
# self.set_tmp_dir()
self.write_build_file(BUILD_FILE_TEMPLATE.format(apps=['testapp']))
shutil.copytree('src/integrationtest/resources/testproject/', self.full_path('src/main/python/testproject/'))
reactor = self.prepare_reactor()
try:
reactor.build()
raise self.failureException("Build should fail due to django_tests, but it's successful")
except BuildFailedException:
# We know tests are failing
pass
self.assert_directory_exists('target/reports')
self.assert_file_exists('target/reports/django_tests')
self.assert_file_exists('target/reports/django_tests.err')
self.assert_file_contains('target/reports/django_tests.err', 'FAIL')
self.assert_file_contains('target/reports/django_tests.err', 'AssertionError: 1 != 2')
if __name__ == "__main__":
unittest.main()
|
# Copyright 2019 Tecnativa Victor M.M. Torres>
# Copyright 2019 Tecnativa - Pedro M. Baeza
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import api, fields, models
class BusinessRequirement(models.Model):
_inherit = 'business.requirement'
sale_order_ids = fields.One2many(
comodel_name='sale.order',
inverse_name='business_requirement_id',
string='Sales Orders',
)
sale_order_count = fields.Integer(
string='Sales Orders Count',
compute='_compute_sale_order_count',
)
@api.multi
@api.depends('sale_order_ids')
def _compute_sale_order_count(self):
groups = self.env['sale.order'].read_group(
domain=[('business_requirement_id', 'in', self.ids)],
fields=['business_requirement_id'],
groupby=['business_requirement_id'],
)
data = {
x['business_requirement_id'][0]: x['business_requirement_id_count']
for x in groups
}
for rec in self:
rec.sale_order_count = data.get(rec.id, 0)
@api.multi
def open_orders(self):
action = self.env.ref('sale.action_quotations').read()[0]
if len(self) == 1:
action['context'] = {
'search_default_business_requirement_id': self.id,
}
else:
action['domain'] = [('business_requirement_id', 'in', self.ids)],
return action
|
# Copyright 2008-2014 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .charwidth import get_char_width
from .misc import seq2str2
from .unic import unic
_MAX_ASSIGN_LENGTH = 200
_MAX_ERROR_LINES = 40
_MAX_ERROR_LINE_LENGTH = 78
_ERROR_CUT_EXPLN = ' [ Message content over the limit has been removed. ]'
def cut_long_message(msg):
lines = msg.splitlines()
lengths = _count_line_lengths(lines)
if sum(lengths) <= _MAX_ERROR_LINES:
return msg
start = _prune_excess_lines(lines, lengths)
end = _prune_excess_lines(lines, lengths, from_end=True)
return '\n'.join(start + [_ERROR_CUT_EXPLN] + end)
def _prune_excess_lines(lines, lengths, from_end=False):
if from_end:
lines.reverse()
lengths.reverse()
ret = []
total = 0
# Use // (explicit int div) for Python 3 compatibility:
limit = _MAX_ERROR_LINES//2
for line, length in zip(lines[:limit], lengths[:limit]):
if total + length >= limit:
ret.append(_cut_long_line(line, total, from_end))
break
total += length
ret.append(line)
if from_end:
ret.reverse()
return ret
def _cut_long_line(line, used, from_end):
# Use // (explicit int div) for Python 3 compatibility:
available_lines = _MAX_ERROR_LINES//2 - used
available_chars = available_lines * _MAX_ERROR_LINE_LENGTH - 3
if len(line) > available_chars:
if not from_end:
line = line[:available_chars] + '...'
else:
line = '...' + line[-available_chars:]
return line
def _count_line_lengths(lines):
return [ _count_virtual_line_length(line) for line in lines ]
def _count_virtual_line_length(line):
if not line:
return 1
lines, remainder = divmod(len(line), _MAX_ERROR_LINE_LENGTH)
return lines if not remainder else lines + 1
def format_assign_message(variable, value, cut_long=True):
value = unic(value) if variable.startswith('$') else seq2str2(value)
if cut_long and len(value) > _MAX_ASSIGN_LENGTH:
value = value[:_MAX_ASSIGN_LENGTH] + '...'
return '%s = %s' % (variable, value)
def get_console_length(text):
return sum(get_char_width(char) for char in text)
def pad_console_length(text, width):
if width < 5:
width = 5
diff = get_console_length(text) - width
if diff > 0:
text = _lose_width(text, diff+3) + '...'
return _pad_width(text, width)
def _pad_width(text, width):
more = width - get_console_length(text)
return text + ' ' * more
def _lose_width(text, diff):
lost = 0
while lost < diff:
lost += get_console_length(text[-1])
text = text[:-1]
return text
|
# -*- coding: utf-8 -*-
"""This module contains tests that exercise control of evmserverd service."""
import pytest
from cfme.utils import version
from cfme.utils.wait import wait_for_decorator
@pytest.yield_fixture(scope="module")
def start_evmserverd_after_module(appliance):
appliance.start_evm_service()
appliance.wait_for_web_ui()
yield
appliance.restart_evm_service()
appliance.wait_for_web_ui()
pytestmark = [pytest.mark.usefixtures("start_evmserverd_after_module")]
@pytest.mark.tier(1)
def test_evmserverd_stop(appliance, request):
"""Tests whether stopping the evmserverd really stops the CFME server processes.
Steps:
* Remember all server names from ``service evmserverd status`` command.
* Or the bin/rake evm:status on 5.5+ since the systemd status does not show that, this
applies also for next references to status.
* Issue a ``service evmserverd stop`` command.
* Periodically check output of ``service evmserverd status`` that all servers are stopped.
* For 5.5+: Really call ``service evmserverd status`` and check that the mentions of
stopping the service are present.
"""
server_name_key = 'Server'
server_names = {server[server_name_key] for server in appliance.ssh_client.status["servers"]}
request.addfinalizer(appliance.start_evm_service)
appliance.stop_evm_service()
@wait_for_decorator(timeout="2m", delay=5)
def servers_stopped():
status = {
server[server_name_key]: server for server in appliance.ssh_client.status["servers"]
}
for server_name in server_names:
if status[server_name]["Status"] != "stopped":
return False
return True
status = appliance.ssh_client.run_command("systemctl status evmserverd")
assert "Stopped EVM server daemon" in status.output
assert "code=exited" in status.output
|
""" Builds membrane protein systems """
__version__ = '2.7.12'
__author__ = 'Robin Betz'
import sys
import inspect
#=========================================================================
# Currently supported output formats and description
supported_formats = {
"amber": ".prmtop and .inpcrd Amber PARM7 and RST7 formats",
"charmm": ".psf and .pdb Protein Structure File and PDB coordinates",
"desmond": "",
"gromacs": ".top and .gro GROMACS topology and coordinate files",
"lammps": ".dat file suitable for input to LAMMPS",
"mae": ".mae structure file, no parameters or atom types",
"pdb": "Protein Data Bank PDB file. Will not contain explicit bonds."
}
#=========================================================================
class DabbleError(Exception):
"""
An error message aimed at users, without a really long traceback.
"""
def __init__(self, msg):
super(DabbleError, self).__init__()
try:
ln = sys.exc_info()[-1].tb_lineno
except AttributeError:
ln = inspect.currentframe().f_back.f_lineno
print("\n\n\n{0.__name__} (line {1}): {2}\n".format(type(self), ln, msg))
#=========================================================================
from dabble.builder import DabbleBuilder
from dabble.fileutils import *
from dabble.vmdsilencer import VmdSilencer
#=========================================================================
|
#!/usr/bin/python
from troposphere import (
Template,
If,
NoValue,
Equals,
Ref,
Output,
Parameter
)
from troposphere.dynamodb import (
KeySchema,
AttributeDefinition,
Projection,
ProvisionedThroughput,
Table,
GlobalSecondaryIndex
)
template = Template()
template.set_description("Create two dynamodb tables with "
"conditional on-demand billing. One "
"with global secondary index and one without")
on_demand = template.add_parameter(Parameter(
"BillOnDemand",
Type="String",
Default="true",
AllowedPattern="(false|true)"
))
readunits = template.add_parameter(Parameter(
"ReadCapacityUnits",
Description="Provisioned read throughput",
Type="Number",
Default="5",
MinValue="5",
MaxValue="10000",
ConstraintDescription="should be between 5 and 10000"
))
writeunits = template.add_parameter(Parameter(
"WriteCapacityUnits",
Description="Provisioned write throughput",
Type="Number",
Default="10",
MinValue="5",
MaxValue="10000",
ConstraintDescription="should be between 5 and 10000"
))
template.add_condition("OnDemand", Equals(Ref(on_demand), "true"))
hashkeyname = template.add_parameter(Parameter(
"HashKeyElementName",
Description="HashType PrimaryKey Name",
Type="String",
AllowedPattern="[a-zA-Z0-9]*",
MinLength="1",
MaxLength="2048",
ConstraintDescription="must contain only alphanumberic characters"
))
hashkeytype = template.add_parameter(Parameter(
"HashKeyElementType",
Description="HashType PrimaryKey Type",
Type="String",
Default="S",
AllowedPattern="[S|N]",
MinLength="1",
MaxLength="1",
ConstraintDescription="must be either S or N"
))
# N.B. If you remove the provisioning section this works for
# LocalSecondaryIndexes aswell.
tableIndexName = template.add_parameter(Parameter(
"TableIndexName",
Description="Table: Primary Key Field",
Type="String",
Default="id",
AllowedPattern="[a-zA-Z0-9]*",
MinLength="1",
MaxLength="2048",
ConstraintDescription="must contain only alphanumberic characters"
))
tableIndexDataType = template.add_parameter(Parameter(
"TableIndexDataType",
Description=" Table: Primary Key Data Type",
Type="String",
Default="S",
AllowedPattern="[S|N|B]",
MinLength="1",
MaxLength="1",
ConstraintDescription="S for string data, N for numeric data, or B for "
"binary data"
))
secondaryIndexHashName = template.add_parameter(Parameter(
"SecondaryIndexHashName",
Description="Secondary Index: Primary Key Field",
Type="String",
Default="tokenType",
AllowedPattern="[a-zA-Z0-9]*",
MinLength="1",
MaxLength="2048",
ConstraintDescription="must contain only alphanumberic characters"
))
secondaryIndexHashDataType = template.add_parameter(Parameter(
"SecondaryIndexHashDataType",
Description="Secondary Index: Primary Key Data Type",
Type="String",
Default="S",
AllowedPattern="[S|N|B]",
MinLength="1",
MaxLength="1",
ConstraintDescription="S for string data, N for numeric data, or B for "
"binary data"
))
secondaryIndexRangeName = template.add_parameter(Parameter(
"refreshSecondaryIndexRangeName",
Description="Secondary Index: Range Key Field",
Type="String",
Default="tokenUpdatedTime",
AllowedPattern="[a-zA-Z0-9]*",
MinLength="1",
MaxLength="2048",
ConstraintDescription="must contain only alphanumberic characters"
))
secondaryIndexRangeDataType = template.add_parameter(Parameter(
"SecondaryIndexRangeDataType",
Description="Secondary Index: Range Key Data Type",
Type="String",
Default="S",
AllowedPattern="[S|N|B]",
MinLength="1",
MaxLength="1",
ConstraintDescription="S for string data, N for numeric data, or B for "
"binary data"
))
myDynamoDB = template.add_resource(Table(
"myDynamoDBTable",
AttributeDefinitions=[
AttributeDefinition(
AttributeName=Ref(hashkeyname),
AttributeType=Ref(hashkeytype)
),
],
BillingMode=If("OnDemand", "PAY_PER_REQUEST", "PROVISIONED"),
ProvisionedThroughput=If("OnDemand", NoValue, ProvisionedThroughput(
ReadCapacityUnits=Ref(readunits),
WriteCapacityUnits=Ref(writeunits)
)),
KeySchema=[
KeySchema(
AttributeName=Ref(hashkeyname),
KeyType="HASH"
)
]
))
GSITable = template.add_resource(Table(
"GSITable",
AttributeDefinitions=[
AttributeDefinition(
AttributeName=Ref(tableIndexName),
AttributeType=Ref(tableIndexDataType)
),
AttributeDefinition(
AttributeName=Ref(secondaryIndexHashName),
AttributeType=Ref(secondaryIndexHashDataType)
),
AttributeDefinition(
AttributeName=Ref(secondaryIndexRangeName),
AttributeType=Ref(secondaryIndexRangeDataType)
)
],
BillingMode=If("OnDemand", "PAY_PER_REQUEST", "PROVISIONED"),
KeySchema=[
KeySchema(
AttributeName=Ref(tableIndexName),
KeyType="HASH"
)
],
ProvisionedThroughput=If("OnDemand", NoValue, ProvisionedThroughput(
ReadCapacityUnits=Ref(readunits),
WriteCapacityUnits=Ref(writeunits)
)),
GlobalSecondaryIndexes=[
GlobalSecondaryIndex(
IndexName="SecondaryIndex",
KeySchema=[
KeySchema(
AttributeName=Ref(secondaryIndexHashName),
KeyType="HASH"
),
KeySchema(
AttributeName=Ref(secondaryIndexRangeName),
KeyType="RANGE"
)
],
Projection=Projection(ProjectionType="ALL"),
ProvisionedThroughput=If("OnDemand", NoValue,
ProvisionedThroughput(
ReadCapacityUnits=Ref(readunits),
WriteCapacityUnits=Ref(writeunits)
)
)
)
]
))
template.add_output(Output(
"GSITable",
Value=Ref(GSITable),
Description="Table with a Global Secondary Index",
))
print(template.to_json())
|
""" Manage learning from training data and making predictions on test data. """
import logging
__author__ = 'smartschat'
def learn(training_corpus, instance_extractor, perceptron):
""" Learn a model for coreference resolution from training data.
In particular, apply an instance/feature extractor to a training corpus and
employ a machine learning model to learn a weight vector from these
instances.
Args:
training_corpus (Corpus): The corpus to learn from.
instance_extractor (InstanceExtracor): The instance extractor that
defines the features and the structure of instances that are
extracted during training.
perceptron (Perceptron): A perceptron (including a decoder) that
learns from the instances extracted by ``instance_extractor``.
Returns:
A tuple consisting of
- **priors** (*dict(str,float)*): A prior weight for each label
in the graphs representing the instances,
- **weights** (*dict(str, array)*): A mapping of labels to weight
vectors. For each label ``l``, ``weights[l]`` contains weights
for each feature seen during training (for representing the
features we employ *feature hashing*). If the graphs employed are
not labeled, ``l`` is set to "+".
"""
logging.info("Learning.")
logging.info("\tExtracting instances and features.")
substructures, arc_information = instance_extractor.extract(
training_corpus)
logging.info("\tFitting model parameters.")
perceptron.fit(substructures, arc_information)
return perceptron.get_model()
def predict(testing_corpus,
instance_extractor,
perceptron,
coref_extractor):
""" According to a learned model, predict coreference information.
Args:
testing_corpus (Corpus): The corpus to predict coreference on.
instance_extractor (InstanceExtracor): The instance extracor that
defines the features and the structure of instances that are
extracted during testing.
perceptron (Perceptron): A perceptron learned from training data.
argmax_function (function): A decoder that computes the best-scoring
coreference structure over a set of structures.
coref_extractor (function): An extractor for consolidating pairwise
predictions into coreference clusters.
Returns:
A tuple containing two dicts. The components are
- **mention_entity_mapping** (*dict(Mention, int)*): A mapping of
mentions to entity identifiers.
- **antecedent_mapping** (*dict(Mention, Mention)*): A mapping of
mentions to their antecedent (as determined by the
``coref_extractor``).
"""
logging.info("Predicting.")
logging.info("\tRemoving coreference annotations from corpus.")
for doc in testing_corpus:
doc.antecedent_decisions = {}
for mention in doc.system_mentions:
mention.attributes["antecedent"] = None
mention.attributes["set_id"] = None
logging.info("\tExtracting instances and features.")
substructures, arc_information = instance_extractor.extract(testing_corpus)
logging.info("\tDoing predictions.")
arcs, labels, scores = perceptron.predict(substructures, arc_information)
logging.info("\tClustering results.")
return coref_extractor(arcs, labels, scores, perceptron.get_coref_labels())
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import sys
dir = os.path.split(os.path.split(os.path.realpath(__file__))[0])[0]
dir = os.path.join(dir, 'scripts')
sys.path.append(dir)
from setup.load import LoadConfig
from utilities.prompt_format import item
from utilities.database import CleanTable, StoreRecords
from scraper.scrape import ExctractTotalPages, ScrapeEndpoint
__version__ = 'v.0.1.1'
def Main():
'''Program wrapper.'''
config = LoadConfig('dev.json')
print '%s Version: %s' % (item('prompt_bullet'), __version__)
for endpoint in config['endpoints']:
data = ScrapeEndpoint(endpoint, verbose=config['verbose'])
#
# Clean table and store new records.
#
CleanTable(endpoint['name'])
StoreRecords(data, endpoint['name'])
#
# Loading configuration and
# running program.
#
if __name__ == '__main__':
try:
Main()
print '%s VDC scraped successfully.' % item('prompt_success')
except Exception as e:
print '%s VDC scraper failed.' % item('prompt_error')
print e
|
# coding=utf-8
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import strutils
from oslo_utils import uuidutils
from iotronic.common import exception
from iotronic.common import states
from iotronic.db import api as db_api
from iotronic.objects import base
from iotronic.objects import utils as obj_utils
class Board(base.IotronicObject):
# Version 1.0: Initial version
VERSION = '1.0'
dbapi = db_api.get_instance()
fields = {
'id': int,
'uuid': obj_utils.str_or_none,
'code': obj_utils.str_or_none,
'status': obj_utils.str_or_none,
'name': obj_utils.str_or_none,
'type': obj_utils.str_or_none,
'agent': obj_utils.str_or_none,
'owner': obj_utils.str_or_none,
'project': obj_utils.str_or_none,
'mobile': bool,
'config': obj_utils.dict_or_none,
'extra': obj_utils.dict_or_none,
}
def check_if_online(self):
if self.status != states.ONLINE:
raise exception.BoardNotConnected(board=self.uuid)
def is_online(self):
if self.status == states.ONLINE:
return True
return False
@staticmethod
def _from_db_object(board, db_board):
"""Converts a database entity to a formal object."""
for field in board.fields:
board[field] = db_board[field]
board.obj_reset_changes()
return board
@base.remotable_classmethod
def get(cls, context, board_id):
"""Find a board based on its id or uuid and return a Board object.
:param board_id: the id *or* uuid of a board.
:returns: a :class:`Board` object.
"""
if strutils.is_int_like(board_id):
return cls.get_by_id(context, board_id)
elif uuidutils.is_uuid_like(board_id):
return cls.get_by_uuid(context, board_id)
else:
raise exception.InvalidIdentity(identity=board_id)
@base.remotable_classmethod
def get_by_id(cls, context, board_id):
"""Find a board based on its integer id and return a Board object.
:param board_id: the id of a board.
:returns: a :class:`Board` object.
"""
db_board = cls.dbapi.get_board_by_id(board_id)
board = Board._from_db_object(cls(context), db_board)
return board
@base.remotable_classmethod
def get_by_uuid(cls, context, uuid):
"""Find a board based on uuid and return a Board object.
:param uuid: the uuid of a board.
:returns: a :class:`Board` object.
"""
db_board = cls.dbapi.get_board_by_uuid(uuid)
board = Board._from_db_object(cls(context), db_board)
return board
@base.remotable_classmethod
def get_by_code(cls, context, code):
"""Find a board based on name and return a Board object.
:param name: the logical name of a board.
:returns: a :class:`Board` object.
"""
db_board = cls.dbapi.get_board_by_code(code)
board = Board._from_db_object(cls(context), db_board)
return board
@base.remotable_classmethod
def get_by_name(cls, context, name):
"""Find a board based on name and return a Board object.
:param name: the logical name of a board.
:returns: a :class:`Board` object.
"""
db_board = cls.dbapi.get_board_by_name(name)
board = Board._from_db_object(cls(context), db_board)
return board
@base.remotable_classmethod
def list(cls, context, limit=None, marker=None, sort_key=None,
sort_dir=None, filters=None):
"""Return a list of Board objects.
:param context: Security context.
:param limit: maximum number of resources to return in a single result.
:param marker: pagination marker for large data sets.
:param sort_key: column to sort results by.
:param sort_dir: direction to sort. "asc" or "desc".
:param filters: Filters to apply.
:returns: a list of :class:`Board` object.
"""
db_boards = cls.dbapi.get_board_list(filters=filters, limit=limit,
marker=marker, sort_key=sort_key,
sort_dir=sort_dir)
return [Board._from_db_object(cls(context), obj) for obj in db_boards]
@base.remotable_classmethod
def reserve(cls, context, tag, board_id):
"""Get and reserve a board.
To prevent other ManagerServices from manipulating the given
Board while a Task is performed, mark it reserved by this host.
:param context: Security context.
:param tag: A string uniquely identifying the reservation holder.
:param board_id: A board id or uuid.
:raises: BoardNotFound if the board is not found.
:returns: a :class:`Board` object.
"""
db_board = cls.dbapi.reserve_board(tag, board_id)
board = Board._from_db_object(cls(context), db_board)
return board
@base.remotable_classmethod
def release(cls, context, tag, board_id):
"""Release the reservation on a board.
:param context: Security context.
:param tag: A string uniquely identifying the reservation holder.
:param board_id: A board id or uuid.
:raises: BoardNotFound if the board is not found.
"""
cls.dbapi.release_board(tag, board_id)
@base.remotable
def create(self, context=None):
"""Create a Board record in the DB.
Column-wise updates will be made based on the result of
self.what_changed(). If target_power_state is provided,
it will be checked against the in-database copy of the
board before updates are made.
:param context: Security context. NOTE: This should only
be used internally by the indirection_api.
Unfortunately, RPC requires context as the first
argument, even though we don't use it.
A context should be set when instantiating the
object, e.g.: Board(context)
"""
values = self.obj_get_changes()
db_board = self.dbapi.create_board(values)
self._from_db_object(self, db_board)
@base.remotable
def destroy(self, context=None):
"""Delete the Board from the DB.
:param context: Security context. NOTE: This should only
be used internally by the indirection_api.
Unfortunately, RPC requires context as the first
argument, even though we don't use it.
A context should be set when instantiating the
object, e.g.: Board(context)
"""
self.dbapi.destroy_board(self.uuid)
self.obj_reset_changes()
@base.remotable
def save(self, context=None):
"""Save updates to this Board.
Column-wise updates will be made based on the result of
self.what_changed(). If target_power_state is provided,
it will be checked against the in-database copy of the
board before updates are made.
:param context: Security context. NOTE: This should only
be used internally by the indirection_api.
Unfortunately, RPC requires context as the first
argument, even though we don't use it.
A context should be set when instantiating the
object, e.g.: Board(context)
"""
updates = self.obj_get_changes()
self.dbapi.update_board(self.uuid, updates)
self.obj_reset_changes()
@base.remotable
def refresh(self, context=None):
"""Refresh the object by re-fetching from the DB.
:param context: Security context. NOTE: This should only
be used internally by the indirection_api.
Unfortunately, RPC requires context as the first
argument, even though we don't use it.
A context should be set when instantiating the
object, e.g.: Board(context)
"""
current = self.__class__.get_by_uuid(self._context, self.uuid)
for field in self.fields:
if (hasattr(
self, base.get_attrname(field))
and self[field] != current[field]):
self[field] = current[field]
|
# Copyright (C) 2011-2012 CRS4.
#
# This file is part of Seal.
#
# Seal is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Seal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Seal. If not, see <http://www.gnu.org/licenses/>.
from seal.lib.mr.hit_processor_chain_link import HitProcessorChainLink
class FilterLink(HitProcessorChainLink):
def __init__(self, monitor, next_link = None):
super(type(self), self).__init__(next_link)
self.min_hit_quality = 1
self.remove_unmapped = True # if true, all unmapped are removed regardless of hit quality
self.event_monitor = monitor
def __remove_i(self, pair, i):
pair[i] = None
other_hit = pair[i^1]
if other_hit:
other_hit.remove_mate()
return pair
def process(self, pair):
if len(pair) != 2:
raise ValueError("pair length != 2 (it's %d)" % len(pair))
pair = list(pair) # tuples can't be modified
for i in 0,1:
if self.remove_unmapped and pair[i].is_unmapped():
pair = self.__remove_i(pair, i)
self.event_monitor.count("reads filtered: unmapped")
elif pair[i].qual < self.min_hit_quality:
pair = self.__remove_i(pair, i)
self.event_monitor.count("reads filtered: low quality")
if self.next_link and any(pair):
self.next_link.process(tuple(pair)) # forward pair to next element in chain
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import datetime
from time import strptime
import re
import os
import json
class FileStatus(object):
def __init__(self, path, rights, nbFiles, owner, group, size, date, relpath = None):
self.path = path
self.rights = rights
self.nbFiles = nbFiles
self.owner = owner
self.group = group
self.size = size
self.date = date
self.relpath = relpath
def __eq__(self, other):
return (self.path == other.path and self.rights == other.rights and
self.nbFiles == other.nbFiles and self.owner == other.owner and self.group == other.group and
self.size == other.size and self.date == other.date)
def is_dir(self):
return self.rights.startswith("d")
def __str__(self):
return self.to_str(0, 0, 0, 0, 0, 0, 0)
def to_str(self, rights_width, nbFiles_width, owner_width, group_width, size_width, date_width, path_with):
if self.is_dir:
nb_files = "-"
else:
nb_files = str(self.nbFiles)
result = "%s %s %s %s %s %s %s" % (self.rights.ljust(rights_width),
nb_files.ljust(nbFiles_width),
self.owner.ljust(owner_width),
self.group.ljust(group_width),
str(self.size).ljust(size_width),
self.date.strftime("%Y-%M-%d %H:%M").ljust(date_width),
self.path.ljust(path_with))
return result.encode("utf-8")
def get_file_statuses_pretty_print(file_statuses):
rights_width = 0
nb_files_width = 0
owner_width = 0
group_width = 0
size_width = 0
date_width = 0
path_width = 0
if len(file_statuses) != 0:
rights_width = max([len(fs.rights) for fs in file_statuses])
nb_files_width = max([len(str(fs.nbFiles)) for fs in file_statuses])
owner_width = max([len(fs.owner) for fs in file_statuses])
group_width = max([len(fs.group) for fs in file_statuses])
size_width = max([len(str(fs.size)) for fs in file_statuses])
date_width = max([len(fs.date.strftime("%Y-%M-%d %H:%M")) for fs in file_statuses])
path_width = max([len(fs.path) for fs in file_statuses])
result = []
for file_status in file_statuses:
result.append(file_status.to_str(rights_width, nb_files_width, owner_width, group_width, size_width, date_width, path_width))
return "\n".join(result)
class LsParser(object):
def __init__(self):
pass
def parse_line(self, line):
regex = "^([rwxd@+-]+)\s+(\d+)\s+(\w+)\s+(\w+)\s+(\d+)\s+(\d+)\s+(\w+)\s+([:\d]+)\s+(/.+)$"
m = re.match(regex, line, re.UNICODE)
if m is None:
return None
rights = m.group(1)
nbFiles = int(m.group(2))
owner = m.group(3)
group = m.group(4)
size = int(m.group(5))
day = int(m.group(6))
month = m.group(7)
try:
month = strptime(month, '%b').tm_mon
except:
month = [u"jan", u"fév", u"mar", u"avr", u"mai", u"jui", u"juil", u"aoû", u"sep", u"oct", u"nov", u"déc"].index(month) + 1
try:
year = int(m.group(8))
except:
year = datetime.datetime.now().year
filename = m.group(9)
date = datetime.date(year, month, day)
return FileStatus(filename, rights, nbFiles, owner, group, size, date)
def parse(self, output):
result = [self.parse_line(line) for line in output.split("\n")]
return [p for p in result if p is not None]
class WebHdfsParser(object):
def __init__(self, path):
self.path = path
def permissions_to_unix_name(self, is_dir, rights):
is_dir_prefix = 'd' if is_dir else '-'
sticky = False
if len(rights) == 4 and rights[0] == '1':
sticky = True
rights = rights[1:]
dic = {'7': 'rwx', '6': 'rw-', '5': 'r-x', '4': 'r--', '3': '-wx', '2': '-w-', '1': '--x', '0': '---'}
result = is_dir_prefix + ''.join(dic[x] for x in rights)
if sticky:
result = result[:-1] + "t"
return result
def parse_status(self, status):
relpath = status["pathSuffix"]
path = os.path.join(self.path, relpath)
nbFiles = 0
size = status["length"]
owner = status["owner"]
group = status["group"]
is_dir = status["type"] == "DIRECTORY"
right_digits = status["permission"]
rights = self.permissions_to_unix_name(is_dir, right_digits)
parsed_date = datetime.datetime.utcfromtimestamp(int(status["modificationTime"])/1000)
date = datetime.datetime(parsed_date.year, parsed_date.month, parsed_date.day, parsed_date.hour, parsed_date.minute)
return FileStatus(path, rights, nbFiles, owner, group, size, date, relpath)
def parse(self, output):
try:
j = json.loads(output)
except:
print output
return []
if "FileStatuses" not in j or "FileStatus" not in j["FileStatuses"]:
print j
return []
statuses = j["FileStatuses"]["FileStatus"]
result = []
for status in statuses:
result.append(self.parse_status(status))
return result
|
import numpy as np
import numpy.matlib
import scipy as sp
import scipy.io as sio
import inspect
import pdb
from numbers import Number
import warnings
import pdb
singleton = None
class Result:
def __init__(this, name, passes=0, total=0):
this.name = name
this.passes = float(passes)
this.total = float(total)
def __iadd__(this, that):
this.passes = this.passes + that.passes
this.total = this.total + that.total
return this
def passed(this):
return this.passes == this.total
def __repr__(this):
fr = 0.0
if this.total > 0:
fr = this.passes / this.total
return "%s: %d%% pass (%d/%d)" % (this.name, round(fr*100.0), this.passes, this.total )
class Numnum:
def __init__(this):
this.idxn = 0
this.idxu = 0
this.ids = {}
this.ctx = []
this.gid = 0
this.state = {}
this.mode = 0
this.unit = 0
this.run = None
this.depth = 0
def push(this):
""" push new context onto stack """
name = caller(1)
if name in this.ids:
this.ids[name] = this.ids[name] + 1
else:
this.ids[name] = 1
ctx = {}
ctx["name"] = name
ctx["run"] = this.ids[name]
this.ctx.append(ctx)
def pop(this):
""" pop context off of stack """
ctx = this.ctx.pop()
if this.mode > 0:
if ctx["name"] not in this.state:
this.state[ctx["name"]] = []
runs = this.state[ctx["name"]]
if ctx["run"] == len(runs)+1:
runs.append(ctx)
else:
raise Exception("wtf: %d ~ %d" % (ctx["run"] , len(runs)))
# this.state[ctx.name] = runs
def validate(this, str, *args):
ctx = this.ctx[-1]
if this.mode > 0:
ctx[str] = args
else:
funs = this.state[ctx["name"]]
if type(funs) != list:
funs = [funs]
fun = funs[ ctx["run"] - 1 ]
vals = fun[str]
this._validate(vals, *args)
# this.ctx{end} = ctx;
def _validate(this, vals, *args):
if len(vals) != len(args):
warnings.warn("Unequal number of values: %d != %d" % (len(vals)/2, len(args)/2), stacklevel=3)
# Assume lost trailing arguments are optional
for i in range(0, min(len(args), len(vals)), 2):
key_a = args[i]
val_a = args[i+1]
key_b = vals[i]
val_b = vals[i+1]
equivalent(val_a, val_b, key_a, key_b)
def parse(obj):
ans = obj
if type(obj) == dict:
for key in ans:
ans[key] = parse(ans[key])
elif isinstance(obj, sio.matlab.mio5_params.mat_struct):
ans = {}
for key in obj._fieldnames:
ans[key] = parse(obj.__dict__[key])
elif isinstance(obj,np.ndarray):
if obj.dtype == np.dtype('O'):
# cell-array, otherwise leave alone. Assumes 1D.
ans = []
for item in obj:
ans.append(parse(item))
return ans
def str2func(name, offset=0):
scope = inspect.stack()[1+offset][0].f_globals
if name in scope:
return scope[name]
else:
for s in scope:
if inspect.ismodule(scope[s]):
# print("str2func recursing into '%s'" % s)
for m in inspect.getmembers(scope[s]):
if m[0] == name:
return m[1]
def get_instance():
global singleton
if singleton == None:
singleton = Numnum()
return singleton
def named_args(kv):
v = []
for i in range(0, len(kv), 2):
v.append(kv[i+1])
return v
def unnamed_args(k):
v = []
if type(k) == np.ndarray or type(k) == list:
for i in range(0, len(k)):
v.append(k[i+1])
else:
v.append(k)
return v
def replay(filename, mode=0):
this = get_instance()
this.idxn = 0
this.idxu = 0
this.ids = {}
this.ctx = []
this.gid = 0
this.state = parse(sio.loadmat(filename, chars_as_strings=True, struct_as_record=False, squeeze_me=True))
this.mode = -1
this.unit = 1
this.run = None
this.depth = 0
testname = None
if type(mode) == str:
testname = mode
mode = -1
# print(filename)
test_results = {}
# run integration test
if mode == 0 or mode > 0:
f = str2func(this.state["numnum_function"], 1)
v = unnamed_args(this.state["numnum_varargin"])
f(*v)
print("integration %s: pass" % this.state["numnum_function"])
# run unit tests
if mode == 0 or mode < 0:
total_tests = 0
for key in this.state.keys():
if testname and (testname != key):
continue
if not( key.startswith("numnum_") or key.startswith("_") ):
runs = this.state[key]
f = str2func(key, 1)
if f == None:
print('Skipping %s...\n' % key)
continue
if type(runs) != list:
runs = [runs]
passes = 0
for j in range(0, len(runs)):
run = runs[j]
arg = named_args(run["arg"])
ret = named_args(run["ret"])
this.mode = 0 # disable verification in functions...
this.run = run # ...except top-level
this.depth = 0
this.unit = 1 # keep random generation enabled
this.idxn = 0 # reset random numbers
this.idxu = 0
try:
# Invoke. Return values validated internally.
f( *arg )
passes = passes + 1
except Exception as e:
print(e.message)
print(filename)
pass
#raise
this.mode = -1
this.run = None
this.depth = 0
#total_tests = total_tests + 1
#try:
# if len(ret) == 1:
# equivalent( ret[0], results, run["ret"][0], run["ret"][0] )
# else:
# for k in range(0, len(ret)):
# equivalent( ret[k], results[k], run["ret"][2*k], run["ret"][2*k] )
# passes = passes + 1;
#except Exception as e:
# print(e.message)
# pass
#errstr= "%s: %d%% pass (%d/%d)" % (run["name"], round(float(passes)/float(len(runs))*100.0), passes, len(runs) )
#print(errstr)
#if passes != len(runs):
# raise Exception(errstr)
#assert passes == len(runs)
test_results[key] = Result( key, passes, len(runs) )
#if total_tests == 0:
# raise Exception("No unit tests found");
return test_results
def record(filename, f, *args):
this = get_instance()
this.idxn = 0
this.idxu = 0
this.ids = {}
this.ctx = []
this.gid = 0
this.state = {}
this.mode = 1
this.unit = 0
this.run = None
this.depth = 0
n = 10000
this.state["numnum_randn"] = np.random.standard_normal((1, n))
this.state["numnum_rand"] = np.random.random( (1, n) )
this.state["numnum_function"] = "" # FIXME
this.state["numnum_varargin"] = args
f(*args)
sio.savemat(filename, this.state)
def caller(offset=0):
return inspect.stack()[2+offset][3]
def arguments(*args):
this = get_instance()
this.depth = this.depth + 1
if this.mode:
this.push()
this.validate('arg', *args)
elif this.run and this.depth == 1:
this._validate(this.run['arg'], *args)
def returns(*args):
this = get_instance()
this.depth = this.depth - 1
if this.mode:
this.validate('ret', *args)
this.pop()
elif this.run and this.depth == 0:
this._validate(this.run['ret'], *args)
def values(*args):
this = get_instance()
if this.mode:
this.validate('val', *args)
elif this.run and this.depth == 1:
this._validate(this.run['val'], *args)
# Reproducible deterministic random number generation
def randn(r, c):
this = get_instance()
v = np.random.standard_normal((r, c))
if this.mode or this.unit:
idx = 0 # needs to be deterministic for unit tests
for i in range(0, r):
for j in range(0, c):
v[i,j] = this.state["numnum_randn"][ idx % this.state["numnum_randn"].shape[0] ]
idx = idx + 1
return v
# Reproducible deterministic random number generation
def rand(r, c):
this = get_instance()
v = np.random.random((r, c))
if this.mode or this.unit:
idx = 0 # needs to be deterministic for unit tests
for i in range(0, r):
for j in range(0, c):
v[i,j] = this.state["numnum_rand"][ idx % this.state["numnum_rand"].shape[0] ]
idx = idx + 1
return v
# Reproducible deterministic random number generation
def randperm(n):
this = get_instance()
v = randperm(n)
if this.mode or this.unit:
# FIXME: slow and dumb...
raise Exception('Not implemented')
# Fix handling of 1d ndarrays
def insist(v, rows, cols):
if rows == 0 and cols == 0:
raise Exception("Both rows and cols connot be zero")
if type(v) == float:
v = np.ones(shape=(1,1), dtype=np.float64) * v
if type(v) == int:
v = np.ones(shape=(1,1), dtype=np.float64) * float(v)
if rows == 0:
rows = v.size / cols
if cols == 0:
cols = v.size / rows
if v.ndim == 1:
v = v.reshape( ( rows , cols) )
# TODO: is this ever desirable?
elif (v.shape[0] != v.shape[1]) and v.shape[0] == cols and v.shape[1] == rows:
warnings.warn("Implicit use of transpose")
v = v.T
assert v.shape[1] == cols
assert v.shape[0] == rows
return v
def equivalent(a, b, A = "a", B = "b"):
olda = a
oldb = b
if type(a) == type(None):
warnings.warn("Ignoring null (return?) value for '%s'" % A)
return
if isinstance(a,np.bool_) and not isinstance(b,np.bool_):
if a:
a = 1
else:
a = 0
if isinstance(a,Number):
a = np.ones( (1,1) ).reshape((1,1)) * float(a)
if isinstance(b,Number):
b = np.ones( (1,1) ).reshape((1,1)) * float(b)
if type(a) != type(b):
# check if scalar before complaining
if type(a) == np.ndarray and len(a.shape):
if a.shape[0] == 1:
if len(a.shape) == 1:
a0 = a[0]
else:
a0 = a[0,0]
if float(a0) == float(b):
return
elif type(a) == list and type(b) == np.ndarray:
pass
elif isinstance(a,Number) and type(b) == np.ndarray:
# Compare a scalar with an array: start by converting
# a to a length-1 list
a = [a]
else:
raise Exception("class(%s) = %s and class(%s) = %s" % (A, type(a), B, type(b)))
if type(a) == np.ndarray:
# Meh. Fix up shapes
if len(a.shape) == 1 and len(b.shape) == 2:
if b.shape[0] == 1:
a = a.reshape( (1, a.shape[0]) )
elif b.shape[1] == 1:
a = a.reshape( (a.shape[0], 1) )
if len(b.shape) == 1 and len(a.shape) == 2:
if a.shape[0] == 1:
b = b.reshape( (1, b.shape[0]) )
elif a.shape[1] == 1:
b = b.reshape( (b.shape[0], 1) )
if len(a.shape) == 1 and len(b.shape) == 1:
a = a.reshape( (a.shape[0], 1) )
b = b.reshape( (b.shape[0], 1) )
if b.shape[1] == 0:
pdb.set_trace()
b = np.ones((1,1)).resize((1,1)) * float(b)
if a.shape != b.shape:
raise Exception("size(%s) = %dx%d and size(%s) = %dx%d" % (A, a.shape[0], a.shape[1], B, b.shape[0], b.shape[1]))
delta = np.abs(a-b)
chk = delta > 1e-6
if chk.any():
errstr = "%s ~= %s\n%s\n%s" % (A, B, str(a), str(b))
raise Exception(errstr)
elif type(a) == dict:
for k in a.keys():
equivalent(a[k], b[k], A = "%s.%s" % (A, k), B = "%s.%s" % (B, k))
elif type(a) == list:
if len(a) != len(b):
raise Exception("len(%s) = %i and len(%s) = %i" % (A, len(a), B, len(b)))
for i in range(0, min(len(a), len(b))):
equivalent(a[i], b[i], A = "%s[%d]" % (A, i), B = "%s[%s]" % (B, i))
# raise Exception("Cannot check equivalence of %s (%s) and %s (%s)" % (A, type(a), B, type(b) ))
|
# -*- coding: utf-8 -*-
# Copyright 2020 Green Valley Belgium NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.7@@
import base64
import json
import logging
import os
import urllib
from google.appengine.ext import webapp, db
from google.appengine.ext.webapp import template
from mcfw.properties import azzert
from rogerthat.bizz import session
from rogerthat.bizz.job import hookup_with_default_services
from rogerthat.bizz.limit import clear_rate_login
from rogerthat.bizz.profile import update_password_hash, create_user_profile
from rogerthat.bizz.registration import get_headers_for_consent, save_tos_consent
from rogerthat.bizz.session import create_session
from rogerthat.bizz.user import calculate_secure_url_digest, update_user_profile_language_from_headers
from rogerthat.dal.profile import get_service_or_user_profile
from rogerthat.exceptions import ServiceExpiredException
from rogerthat.exceptions.login import AlreadyUsedUrlException, ExpiredUrlException, InvalidUrlException
from rogerthat.models import UserProfile, ServiceProfile
from rogerthat.pages.legal import get_legal_language, get_version_content, DOC_TERMS_SERVICE, \
get_current_document_version, DOC_TERMS
from rogerthat.rpc import users
from rogerthat.settings import get_server_settings
from rogerthat.templates import get_languages_from_header, JINJA_ENVIRONMENT
from rogerthat.utils import urlencode, now, channel
from rogerthat.utils.cookie import set_cookie
from rogerthat.utils.crypto import decrypt, sha256_hex
_BASE_DIR = os.path.dirname(__file__)
class SessionHandler(webapp.RequestHandler):
def redirect(self, url, permanent=False):
return super(SessionHandler, self).redirect(str(url), permanent)
def start_session(self, user, cont=None):
try:
secret, _ = create_session(user)
except ServiceExpiredException:
return self.redirect('/service_disabled')
server_settings = get_server_settings()
set_cookie(self.response, server_settings.cookieSessionName, secret)
if not cont:
cont = self.request.GET.get("continue", "/")
if cont:
self.redirect(cont)
else:
self.redirect("/")
def stop_session(self):
current_session = users.get_current_session()
session.drop_session(current_session)
server_settings = get_server_settings()
set_cookie(self.response, server_settings.cookieSessionName, current_session.parent_session_secret or "")
self.redirect("/")
class LoginHandler(webapp.RequestHandler):
def get(self):
self.redirect('/customers/signin')
class SetPasswordHandler(SessionHandler):
def return_error(self, reason="Invalid url received."):
path = os.path.join(_BASE_DIR, 'error.html')
self.response.out.write(template.render(path, {"reason": reason, "hide_header": True}))
def parse_data(self, email, data):
user = users.User(email)
data = base64.decodestring(data)
data = decrypt(user, data)
data = json.loads(data)
azzert(data["d"] == calculate_secure_url_digest(data))
return data, user
def parse_and_validate_data(self, email, data):
if not email or not data:
raise InvalidUrlException()
try:
data, user = self.parse_data(email, data)
except UnicodeEncodeError:
logging.warn("Could not decipher url!\ndata: %s\nemail: %s", data, email, exc_info=True)
raise InvalidUrlException()
except:
logging.exception("Could not decipher url!\ndata: %s\nemail: %s", data, email)
raise InvalidUrlException()
now_ = now()
timestamp = data["t"]
if not (now_ < timestamp < now_ + 5 * 24 * 3600):
raise ExpiredUrlException(action=data["a"])
profile = get_service_or_user_profile(user)
if profile and profile.lastUsedMgmtTimestamp + 5 * 24 * 3600 > timestamp:
raise AlreadyUsedUrlException(action=data["a"])
return data
def get(self):
email = self.request.get("email")
data = self.request.get("data")
try:
parsed_data = self.parse_and_validate_data(email, data)
except ExpiredUrlException as e:
return self.return_error("The %s link has expired." % e.action)
except AlreadyUsedUrlException as e:
return self.return_error("You cannot use the %s link more than once." % e.action)
except InvalidUrlException:
return self.return_error()
path = os.path.join(_BASE_DIR, 'setpassword.html')
self.response.out.write(template.render(path, {
'name': parsed_data['n'],
'hide_header': True,
'data': data,
'email': email,
'action': parsed_data['a']
}))
def post(self):
email = self.request.get("email", None)
password = self.request.get("password", None)
data = self.request.get("data", None)
if not (email and password and data):
return self.redirect("/")
try:
data, user = self.parse_data(email, data)
except:
logging.exception("Could not decypher url!")
return self.redirect("/")
now_ = now()
language_header = self.request.headers.get('Accept-Language', None)
language = get_languages_from_header(language_header)[0] if language_header else None
passwordHash = sha256_hex(password)
profile = get_service_or_user_profile(user)
if not profile:
profile = create_user_profile(user, data['n'], language) # todo communities set community_id
update_password_hash(profile, passwordHash, now_)
else:
def update():
p = db.get(profile.key())
if isinstance(profile, UserProfile) and not p.language:
p.language = language
p.passwordHash = passwordHash
p.lastUsedMgmtTimestamp = now_
p.put()
return p
profile = db.run_in_transaction(update)
if isinstance(profile, UserProfile):
hookup_with_default_services.schedule(user)
self.start_session(user, data["c"])
class ResetPasswordHandler(webapp.RequestHandler):
def get(self):
cont = self.request.GET.get("continue", "/")
email = self.request.GET.get("email", "")
path = os.path.join(_BASE_DIR, 'resetpassword.html')
self.response.out.write(template.render(path, {"continue": cont, "hide_header": True, "email": email}))
class AuthenticationRequiredHandler(webapp.RequestHandler):
def get(self):
path = "/login"
cont = self.request.GET.get("continue", None)
if cont:
path += "?" + urlencode((("continue", cont),))
self.redirect(path)
class TermsAndConditionsHandler(webapp.RequestHandler):
def get_doc_and_lang(self, user):
profile = get_service_or_user_profile(user)
if isinstance(profile, ServiceProfile):
if profile.solution:
return None, None
doc_type = DOC_TERMS_SERVICE
language = get_legal_language(profile.defaultLanguage)
else:
doc_type = DOC_TERMS
language = get_legal_language(profile.language)
return doc_type, language
def get(self):
user = users.get_current_user()
doc_type, language = self.get_doc_and_lang(user)
if not doc_type and not language:
self.redirect('/')
return
version = get_current_document_version(doc_type)
self.response.out.write(JINJA_ENVIRONMENT.get_template('terms_and_conditions.html').render({
'user': user,
'tac': get_version_content(language, doc_type, version),
'language': language,
'version': version,
'logout_url': users.create_logout_url('/'),
}))
def post(self):
user = users.get_current_user()
if not user:
self.redirect('/logout')
return
doc, lang = self.get_doc_and_lang(user)
if not doc and not lang:
self.redirect('/')
return
version = long(self.request.get('version')) or get_current_document_version(doc)
profile = get_service_or_user_profile(user)
profile.tos_version = version
profile.put()
save_tos_consent(user, get_headers_for_consent(self.request), version, None)
self.redirect('/')
class LogoutHandler(SessionHandler):
def get(self):
user = users.get_current_user()
self.stop_session()
channel.send_message(user, u'rogerthat.system.logout')
cont = self.request.get('continue')
if cont:
self.redirect('/%s' % cont)
class AutoLogin(webapp.RequestHandler):
def parse_data(self, email, data):
user = users.User(email)
data = base64.decodestring(data)
data = decrypt(user, data)
data = json.loads(data)
azzert(data["d"] == calculate_secure_url_digest(data))
return data, user
def get(self):
email = self.request.get("email", None)
data = self.request.get("data", None)
service_identity = self.request.get("si", None)
user = users.get_current_user()
if user:
users.clear_user()
channel.send_message(user, u'rogerthat.system.logout')
if not email or not data:
logging.warn("not al params received for email: %s and data: %s" % (email, data))
self.redirect("/")
return
try:
data, _ = self.parse_data(email, data)
except:
logging.warn("Could not decipher url! email: %s and data: %s" % (email, data), exc_info=True)
self.redirect("/")
return
user = users.User(email)
profile = get_service_or_user_profile(user)
if not profile:
logging.warn("profile not found for email: %s" % email)
self.redirect("/")
return
try:
secret, _ = create_session(user, service_identity=service_identity)
except ServiceExpiredException:
return self.redirect('/service_disabled')
server_settings = get_server_settings()
set_cookie(self.response, server_settings.cookieSessionName, secret)
clear_rate_login(user)
update_user_profile_language_from_headers(profile, self.response.headers)
params = self.request.GET
redirect_url = '/'
if params:
params = dict((k, v.decode('utf8')) for k, v in params.iteritems())
del params['email']
del params['data']
if "si" in params:
del params['si']
redirect_url = "%s?%s" % (redirect_url, urllib.urlencode(params))
logging.info("Redirecting to url: %s" % redirect_url)
self.redirect(redirect_url)
|
#!/usr/bin/env python
import zmq
import sys
import time
import binascii
import argparse
import csv
#from scapy.utils import wrpcap
sys.path.insert(0,'../../../Engine/libraries/netip/python/')
sys.path.insert(0,'../../../ryu/ryu/')
from netip import *
from ofproto import ofproto_parser
from ofproto import ofproto_common
from ofproto import ofproto_protocol
from ofproto import ofproto_v1_0_parser
from ofproto import ofproto_v1_2_parser
from ofproto import ofproto_v1_3_parser
from ofproto import ofproto_v1_4_parser
from ofproto import ofproto_v1_5_parser
###################### headers for pcap creation ####################################
#Global header for pcap 2.4
pcap_global_header = ('D4 C3 B2 A1'
'02 00' #File format major revision (i.e. pcap <2>.4)
'04 00' #File format minor revision (i.e. pcap 2.<4>)
'00 00 00 00'
'00 00 00 00'
'FF FF 00 00'
'93 00 00 00') #user_protocol selected, without Ip and tcp headers
#pcap packet header that must preface every packet
pcap_packet_header = ('AA 77 9F 47'
'90 A2 04 00'
'XX XX XX XX' #Frame Size (little endian)
'YY YY YY YY') #Frame Size (little endian)
#netide packet header that must preface every packet
netide_header = ('01' #netide protocol version 1.1
'11' #openflow type
'XX XX' #Frame Size (little endian)
'01 00 00 00' #xid
'00 00 00 00 00 00 00 06') #datapath_id
######################################################################################
###################### PCAP generation ########################################
def getByteLength(str1):
return len(''.join(str1.split())) / 2
# return len(str1)
def generatePCAP(message,i):
msg_len = getByteLength(message)
# netide = netide_header.replace('XX XX',"%04x"%msg_len)
# net_len = getByteLength(netide_header)
# pcap_len = net_len + msg_len
hex_str = "%08x"%msg_len
reverse_hex_str = hex_str[6:] + hex_str[4:6] + hex_str[2:4] + hex_str[:2]
pcaph = pcap_packet_header.replace('XX XX XX XX',reverse_hex_str)
pcaph = pcaph.replace('YY YY YY YY',reverse_hex_str)
if (i==0):
# bytestring = pcap_global_header + pcaph + eth_header + ip + tcp + message
# bytestring = pcap_global_header + pcaph + netide + message
bytestring = pcap_global_header + pcaph + message
else:
# bytestring = pcaph + eth_header + ip + tcp + message
# bytestring = pcaph + netide + message
bytestring = pcaph + message
return bytestring
# writeByteStringToFile(bytestring, pcapfile)
#Splits the string into a list of tokens every n characters
def splitN(str1,n):
return [str1[start:start+n] for start in range(0, len(str1), n)]
def sum_one(i):
return i + 1
##############################################################################
parser = argparse.ArgumentParser(description='Launch the NetIDE debugger')
parser.add_argument('-o', help='Output Folder', default=".")
args = parser.parse_args()
fo = open(args.o+"/results.txt", "w")
bitout = open(args.o+"/results.pcap", 'wb')
csvfile = open(args.o+"/results.card", "w")
fieldnames = ['timestamp', 'origin', 'destination', 'msg', 'length']
#fieldnames = ['timestamp', 'origin', 'destination', 'msg']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
# Socket to talk to server
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect("tcp://localhost:5557")
socket.setsockopt(zmq.SUBSCRIBE, "")
i = 0
print('[*] Waiting for logs. To exit press CTRL+C')
while True:
dst_field, src_field, msg = socket.recv_multipart()
t=time.strftime("%H:%M:%S")
dst_field = str(dst_field)
msg_str = str(msg)
src_field = str(src_field)
msg_hexadecimal = binascii.hexlify(msg)
#print(src_field, dst_field)
if src_field.startswith("0_", 0, 2) == True:
origin = src_field[2:]
destination = "core"
elif src_field.startswith("1_", 0, 2) == True:
origin = src_field[2:]
destination = "core"
elif src_field.startswith("2_", 0, 2) == True:
origin = "core"
destination = src_field[2:]
elif src_field.startswith("3_", 0, 2) == True:
origin = "core"
destination = src_field[2:]
#msg_cap = binascii.hexlify(msg)
bytestring = generatePCAP(msg_hexadecimal,i)
i = sum_one(i)
bytelist = bytestring.split()
bytes = binascii.a2b_hex(''.join(bytelist))
bitout.write(bytes)
(netide_version, netide_msg_type, netide_msg_len, netide_xid, netide_mod_id, netide_datapath) = NetIDEOps.netIDE_decode_header(msg)
netide_msg_type_v2 = NetIDEOps.key_by_value(NetIDEOps.NetIDE_type, netide_msg_type)
message_data = msg[NetIDEOps.NetIDE_Header_Size:]
ret = bytearray(message_data)
writer.writerow({'timestamp':t, 'origin':origin, 'destination':destination, 'msg':msg_hexadecimal, 'length':len(ret)})
if len(ret) >= ofproto_common.OFP_HEADER_SIZE:
(version, msg_type, msg_len, xid) = ofproto_parser.header(ret)
msg_decoded = ofproto_parser.msg(netide_datapath, version, msg_type, msg_len, xid, ret)
elif len(ret) < ofproto_common.OFP_HEADER_SIZE:
(version, msg_type, msg_len, xid, msg_decoded) = ("", "", "", "", "")
#if dst_field[2:] == "shim":
#if 'msg_decoded' in locals() or 'msg_decoded' in globals():
print "New message from %r to %r at %r"%(origin, destination, t)
print "\033[1;32mNetIDE header: Version = %r, Type of msg = %r, Length = %r Bytes, XID = %r, Module ID = %r, Datapath = %r\033[1;m"% (netide_version, netide_msg_type_v2, netide_msg_len, netide_xid, netide_mod_id, netide_datapath)
print '\033[1;32mOpenFlow message header: Version = %r, Type of msg = %r, Length = %r Bytes, XID = %r\033[1;m'% (version, msg_type, msg_len, xid)
print '\033[1;32mOpenFlow message: %r \033[1;m'% (msg_decoded)
print "\n"
#writer.writerow({'timestamp':t, 'origin':dst_field, 'destination':src_field, 'msg':msg_hexadecimal, 'length':msg_len})
fo.write("[%r] [%r] [%r] %r \n"% (t, origin, destination, msg_decoded))
#else:
#if 'msg_decoded' in locals() or 'msg_decoded' in globals():
#print "New message from backend %r to %r at %r"%(dst_field, src_field, t)
#print "\033[1;36mNetIDE header: Version = %r, Type of msg = %r, Length = %r Bytes, XID = %r, Module ID = %r, Datapath = %r\033[1;m"% (netide_version, netide_msg_type_v2, netide_msg_len, netide_xid, netide_mod_id, netide_datapath)
#print '\033[1;36mOpenFlow message header: Version = %r, Type of msg = %r, Length = %r Bytes, XID = %r\033[1;m'% (version, msg_type, msg_len, xid)
#print '\033[1;36mOpenFlow message: %r \033[1;m'% (msg_decoded)
#print "\n"
#writer.writerow({'timestamp':t, 'origin':dst_field, 'destination':src_field, 'msg':msg_hexadecimal, 'length':msg_len})
#fo.write("[%r] [%r] %r \n"% (t, dst_field, msg_decoded))
fo.close()
bitout.close()
writer.close()
|
# encoding: utf-8
# Extra Lib
from functools import wraps
from flask import session, render_template, flash
from flask.ext.babel import gettext as _
# Custom Tools
from .erp import openerp
from .web import redirect_url_for
def logout():
session.clear()
# Decorator called for pages that DON'T require authentication
def requires_connection(f):
@wraps(f)
def decorated(*args, **kwargs):
# Check OpenERP Connexion
if not openerp:
# Connexion Failed, redirect to unavailable service page
flash(_(
"Distant Service Unavailable. If you had a pending purchase,"
" you have not lost your Shopping Cart."
" Thank you connect again in a while."),
'danger')
return render_template('unavailable_service.html')
else:
# Connexion OK: return asked page
return f(*args, **kwargs)
return decorated
# Decorator called for pages that requires authentication
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
# Check OpenERP Connexion
if not openerp:
# Connexion Failed, redirect to unavailable service page
flash(_(
"Distant Service Unavailable. If you had a pending purchase,"
" you have not lost your Shopping Cart."
" Thank you connect again in a while."),
'danger')
return render_template('unavailable_service.html')
else:
# Check OpenERP Authentication
if not session.get('partner_id', False):
# User no authenticated
return redirect_url_for('login_view')
else:
return f(*args, **kwargs)
return decorated
|
#!/usr/bin/env python
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Definition of targets to build artifacts."""
import os.path
import random
import string
import sys
sys.path.insert(0, os.path.abspath('..'))
import python_utils.jobset as jobset
def create_docker_jobspec(name,
dockerfile_dir,
shell_command,
environ={},
flake_retries=0,
timeout_retries=0,
timeout_seconds=30 * 60,
docker_base_image=None,
extra_docker_args=None,
verbose_success=False):
"""Creates jobspec for a task running under docker."""
environ = environ.copy()
environ['RUN_COMMAND'] = shell_command
environ['ARTIFACTS_OUT'] = 'artifacts/%s' % name
docker_args = []
for k, v in environ.items():
docker_args += ['-e', '%s=%s' % (k, v)]
docker_env = {
'DOCKERFILE_DIR': dockerfile_dir,
'DOCKER_RUN_SCRIPT': 'tools/run_tests/dockerize/docker_run.sh',
'OUTPUT_DIR': 'artifacts'
}
if docker_base_image is not None:
docker_env['DOCKER_BASE_IMAGE'] = docker_base_image
if extra_docker_args is not None:
docker_env['EXTRA_DOCKER_ARGS'] = extra_docker_args
jobspec = jobset.JobSpec(
cmdline=['tools/run_tests/dockerize/build_and_run_docker.sh'] +
docker_args,
environ=docker_env,
shortname='build_artifact.%s' % (name),
timeout_seconds=timeout_seconds,
flake_retries=flake_retries,
timeout_retries=timeout_retries,
verbose_success=verbose_success)
return jobspec
def create_jobspec(name,
cmdline,
environ={},
shell=False,
flake_retries=0,
timeout_retries=0,
timeout_seconds=30 * 60,
use_workspace=False,
cpu_cost=1.0,
verbose_success=False):
"""Creates jobspec."""
environ = environ.copy()
if use_workspace:
environ['WORKSPACE_NAME'] = 'workspace_%s' % name
environ['ARTIFACTS_OUT'] = os.path.join('..', 'artifacts', name)
cmdline = ['bash', 'tools/run_tests/artifacts/run_in_workspace.sh'
] + cmdline
else:
environ['ARTIFACTS_OUT'] = os.path.join('artifacts', name)
jobspec = jobset.JobSpec(
cmdline=cmdline,
environ=environ,
shortname='build_artifact.%s' % (name),
timeout_seconds=timeout_seconds,
flake_retries=flake_retries,
timeout_retries=timeout_retries,
shell=shell,
cpu_cost=cpu_cost,
verbose_success=verbose_success)
return jobspec
_MACOS_COMPAT_FLAG = '-mmacosx-version-min=10.7'
_ARCH_FLAG_MAP = {'x86': '-m32', 'x64': '-m64'}
class PythonArtifact:
"""Builds Python artifacts."""
def __init__(self, platform, arch, py_version):
self.name = 'python_%s_%s_%s' % (platform, arch, py_version)
self.platform = platform
self.arch = arch
self.labels = ['artifact', 'python', platform, arch, py_version]
self.py_version = py_version
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
environ = {}
if self.platform == 'linux_extra':
# Raspberry Pi build
environ['PYTHON'] = '/usr/local/bin/python{}'.format(
self.py_version)
environ['PIP'] = '/usr/local/bin/pip{}'.format(self.py_version)
# https://github.com/resin-io-projects/armv7hf-debian-qemu/issues/9
# A QEMU bug causes submodule update to hang, so we copy directly
environ['RELATIVE_COPY_PATH'] = '.'
extra_args = ' --entrypoint=/usr/bin/qemu-arm-static '
return create_docker_jobspec(
self.name,
'tools/dockerfile/grpc_artifact_linux_{}'.format(self.arch),
'tools/run_tests/artifacts/build_artifact_python.sh',
environ=environ,
timeout_seconds=60 * 60 * 5,
docker_base_image='quay.io/grpc/raspbian_{}'.format(self.arch),
extra_docker_args=extra_args)
elif self.platform == 'linux':
if self.arch == 'x86':
environ['SETARCH_CMD'] = 'linux32'
# Inside the manylinux container, the python installations are located in
# special places...
environ['PYTHON'] = '/opt/python/{}/bin/python'.format(
self.py_version)
environ['PIP'] = '/opt/python/{}/bin/pip'.format(self.py_version)
# Platform autodetection for the manylinux1 image breaks so we set the
# defines ourselves.
# TODO(atash) get better platform-detection support in core so we don't
# need to do this manually...
environ['CFLAGS'] = '-DGPR_MANYLINUX1=1'
environ['GRPC_BUILD_GRPCIO_TOOLS_DEPENDENTS'] = 'TRUE'
environ['GRPC_BUILD_MANYLINUX_WHEEL'] = 'TRUE'
return create_docker_jobspec(
self.name,
'tools/dockerfile/grpc_artifact_python_manylinux_%s' %
self.arch,
'tools/run_tests/artifacts/build_artifact_python.sh',
environ=environ,
timeout_seconds=60 * 60,
docker_base_image='quay.io/pypa/manylinux1_i686'
if self.arch == 'x86' else 'quay.io/pypa/manylinux1_x86_64')
elif self.platform == 'windows':
if 'Python27' in self.py_version or 'Python34' in self.py_version:
environ['EXT_COMPILER'] = 'mingw32'
else:
environ['EXT_COMPILER'] = 'msvc'
# For some reason, the batch script %random% always runs with the same
# seed. We create a random temp-dir here
dir = ''.join(
random.choice(string.ascii_uppercase) for _ in range(10))
return create_jobspec(
self.name, [
'tools\\run_tests\\artifacts\\build_artifact_python.bat',
self.py_version, '32' if self.arch == 'x86' else '64'
],
environ=environ,
timeout_seconds=45 * 60,
use_workspace=True)
else:
environ['PYTHON'] = self.py_version
environ['SKIP_PIP_INSTALL'] = 'TRUE'
return create_jobspec(
self.name,
['tools/run_tests/artifacts/build_artifact_python.sh'],
environ=environ,
timeout_seconds=60 * 60 * 2,
use_workspace=True)
def __str__(self):
return self.name
class RubyArtifact:
"""Builds ruby native gem."""
def __init__(self, platform, arch):
self.name = 'ruby_native_gem_%s_%s' % (platform, arch)
self.platform = platform
self.arch = arch
self.labels = ['artifact', 'ruby', platform, arch]
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
# Ruby build uses docker internally and docker cannot be nested.
# We are using a custom workspace instead.
return create_jobspec(
self.name, ['tools/run_tests/artifacts/build_artifact_ruby.sh'],
use_workspace=True,
timeout_seconds=45 * 60)
class CSharpExtArtifact:
"""Builds C# native extension library"""
def __init__(self, platform, arch):
self.name = 'csharp_ext_%s_%s' % (platform, arch)
self.platform = platform
self.arch = arch
self.labels = ['artifact', 'csharp', platform, arch]
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
if self.platform == 'windows':
cmake_arch_option = 'Win32' if self.arch == 'x86' else self.arch
return create_jobspec(
self.name, [
'tools\\run_tests\\artifacts\\build_artifact_csharp.bat',
cmake_arch_option
],
use_workspace=True)
else:
environ = {
'CONFIG': 'opt',
'EMBED_OPENSSL': 'true',
'EMBED_ZLIB': 'true',
'CFLAGS': '-DGPR_BACKWARDS_COMPATIBILITY_MODE',
'CXXFLAGS': '-DGPR_BACKWARDS_COMPATIBILITY_MODE',
'LDFLAGS': ''
}
if self.platform == 'linux':
return create_docker_jobspec(
self.name,
'tools/dockerfile/grpc_artifact_linux_%s' % self.arch,
'tools/run_tests/artifacts/build_artifact_csharp.sh',
environ=environ)
else:
archflag = _ARCH_FLAG_MAP[self.arch]
environ['CFLAGS'] += ' %s %s' % (archflag, _MACOS_COMPAT_FLAG)
environ['CXXFLAGS'] += ' %s %s' % (archflag, _MACOS_COMPAT_FLAG)
environ['LDFLAGS'] += ' %s' % archflag
return create_jobspec(
self.name,
['tools/run_tests/artifacts/build_artifact_csharp.sh'],
environ=environ,
use_workspace=True)
def __str__(self):
return self.name
class PHPArtifact:
"""Builds PHP PECL package"""
def __init__(self, platform, arch):
self.name = 'php_pecl_package_{0}_{1}'.format(platform, arch)
self.platform = platform
self.arch = arch
self.labels = ['artifact', 'php', platform, arch]
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
if self.platform == 'linux':
return create_docker_jobspec(
self.name, 'tools/dockerfile/grpc_artifact_linux_{}'.format(
self.arch),
'tools/run_tests/artifacts/build_artifact_php.sh')
else:
return create_jobspec(
self.name, ['tools/run_tests/artifacts/build_artifact_php.sh'],
use_workspace=True)
class ProtocArtifact:
"""Builds protoc and protoc-plugin artifacts"""
def __init__(self, platform, arch):
self.name = 'protoc_%s_%s' % (platform, arch)
self.platform = platform
self.arch = arch
self.labels = ['artifact', 'protoc', platform, arch]
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
if self.platform != 'windows':
cxxflags = '-DNDEBUG %s' % _ARCH_FLAG_MAP[self.arch]
ldflags = '%s' % _ARCH_FLAG_MAP[self.arch]
if self.platform != 'macos':
ldflags += ' -static-libgcc -static-libstdc++ -s'
environ = {
'CONFIG': 'opt',
'CXXFLAGS': cxxflags,
'LDFLAGS': ldflags,
'PROTOBUF_LDFLAGS_EXTRA': ldflags
}
if self.platform == 'linux':
return create_docker_jobspec(
self.name,
'tools/dockerfile/grpc_artifact_protoc',
'tools/run_tests/artifacts/build_artifact_protoc.sh',
environ=environ)
else:
environ[
'CXXFLAGS'] += ' -std=c++11 -stdlib=libc++ %s' % _MACOS_COMPAT_FLAG
return create_jobspec(
self.name,
['tools/run_tests/artifacts/build_artifact_protoc.sh'],
environ=environ,
timeout_seconds=60 * 60,
use_workspace=True)
else:
generator = 'Visual Studio 14 2015 Win64' if self.arch == 'x64' else 'Visual Studio 14 2015'
return create_jobspec(
self.name,
['tools\\run_tests\\artifacts\\build_artifact_protoc.bat'],
environ={'generator': generator},
use_workspace=True)
def __str__(self):
return self.name
def targets():
"""Gets list of supported targets"""
return ([
Cls(platform, arch)
for Cls in (CSharpExtArtifact, ProtocArtifact)
for platform in ('linux', 'macos', 'windows') for arch in ('x86', 'x64')
] + [
PythonArtifact('linux', 'x86', 'cp27-cp27m'),
PythonArtifact('linux', 'x86', 'cp27-cp27mu'),
PythonArtifact('linux', 'x86', 'cp34-cp34m'),
PythonArtifact('linux', 'x86', 'cp35-cp35m'),
PythonArtifact('linux', 'x86', 'cp36-cp36m'),
PythonArtifact('linux_extra', 'armv7', '2.7'),
PythonArtifact('linux_extra', 'armv7', '3.4'),
PythonArtifact('linux_extra', 'armv7', '3.5'),
PythonArtifact('linux_extra', 'armv7', '3.6'),
PythonArtifact('linux_extra', 'armv6', '2.7'),
PythonArtifact('linux_extra', 'armv6', '3.4'),
PythonArtifact('linux_extra', 'armv6', '3.5'),
PythonArtifact('linux_extra', 'armv6', '3.6'),
PythonArtifact('linux', 'x64', 'cp27-cp27m'),
PythonArtifact('linux', 'x64', 'cp27-cp27mu'),
PythonArtifact('linux', 'x64', 'cp34-cp34m'),
PythonArtifact('linux', 'x64', 'cp35-cp35m'),
PythonArtifact('linux', 'x64', 'cp36-cp36m'),
PythonArtifact('macos', 'x64', 'python2.7'),
PythonArtifact('macos', 'x64', 'python3.4'),
PythonArtifact('macos', 'x64', 'python3.5'),
PythonArtifact('macos', 'x64', 'python3.6'),
PythonArtifact('windows', 'x86', 'Python27_32bits'),
PythonArtifact('windows', 'x86', 'Python34_32bits'),
PythonArtifact('windows', 'x86', 'Python35_32bits'),
PythonArtifact('windows', 'x86', 'Python36_32bits'),
PythonArtifact('windows', 'x64', 'Python27'),
PythonArtifact('windows', 'x64', 'Python34'),
PythonArtifact('windows', 'x64', 'Python35'),
PythonArtifact('windows', 'x64', 'Python36'),
RubyArtifact('linux', 'x64'),
RubyArtifact('macos', 'x64'),
PHPArtifact('linux', 'x64'),
PHPArtifact('macos', 'x64')
])
|
'''
Created on 24 Apr 2017
@author: ernesto
'''
import subprocess
import tempfile
class BEDTools:
'''
Class used to perform different operations with the BEDTools package.
This is essentially a wrapper for the BEDTools package. The functionality is quite limited and
additional functions will be added as necessary
'''
def __init__(self, bedtools_folder=None):
"""
Constructor
Parameters
----------
bedtools_folder : str, optional
Path to folder with bedtools binary.
"""
self.bedtools_folder = bedtools_folder
def make_windows(self, w, g, s=None, subtract=None, lextend=None, rextend=None, verbose=False):
"""
This method will make windows from a genome file by using 'bedtools makewindows'
Parameters
----------
w : int
width of windows in bp.
g : str
Path to genome file.
s : int, optional
overlap in bp. i.e. if -w 100 -s 80 will generate:
chr1 0 100
chr1 80 180
chr1 160 260
...
So, -s defines the offset in bp
Another example -w 1000 -s 200
chr1 0 1000
chr1 200 1200
chr1 400 1400
chr1 600 1600
lextend : int, optional
Extend each interval to the left by int bases.
rextend : int, optional
Extend each interval to the right by int bases.
subtract : str, optional
BED file containing the features that will be removed from the generated windows.
For example, if we have the following window:
chr20 1000 2000
And we have the following feature in the BED file: chr20 1100 1200
Then the resulting windows will be like:
chr20 1000 1100
chr20 1200 2000
verbose : bool, default = False
Returns
-------
coordlist : list
A list of lists. Each sublist is composed of ['chr','start','end']
It will return an empty list if not elements for a certain chr are defined.
Raises
------
Exception
"""
command = ""
if self.bedtools_folder:
command += self.bedtools_folder+"/"
command += "bedtools makewindows -g {0} -w {1}".format(g, w)
if s is not None:
command += " -s {0}".format(s)
coordlist = []
if verbose is not False:
print(command)
try:
stdout = subprocess.check_output(command, shell=True)
coordlist = [l.split("\t") for l in stdout.decode("utf-8").strip().split("\n")]
except subprocess.CalledProcessError as exc:
raise Exception(exc.output)
if subtract is not None:
temp = tempfile.NamedTemporaryFile()
try:
ofile = open(temp.name, 'w')
for i in coordlist:
ofile.write("{0}\t{1}\t{2}\n".format(i[0], i[1], i[2]))
ofile.close()
command1 = "{0}/bedtools subtract -a {1} -b {2}".format(self.bedtools_folder,
temp.name, subtract)
coordlist = None
try:
stdout = subprocess.check_output(command1, shell=True)
coordlist = [l.split("\t") for l in stdout.decode("utf-8").strip().split("\n")]
except subprocess.CalledProcessError as exc:
raise Exception(exc.output)
finally:
temp.close()
if lextend is not None:
first_seen = False
for k, lon in enumerate(coordlist):
if first_seen is True:
lon[1] = str(int(lon[1])+lextend)
first_seen = True
coordlist[k] = lon
if rextend is not None:
for k, lon in enumerate(coordlist):
if k != len(coordlist)-1:
lon[2] = str(int(lon[2])+rextend)
coordlist[k] = lon
return coordlist
def __str__(self):
sab = []
for key in self.__dict__:
sab.append("{key}='{value}'".format(key=key, value=self.__dict__[key]))
return ', '.join(sab)
def __repr__(self):
return self.__str__()
|
from zlib import crc32
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.mail import EmailMessage
from django.template import Context, loader
from django.urls import reverse as django_reverse
from django.utils.module_loading import import_string
from .compat import next, string_types
def collate(*iterables, **kwargs):
"""Return an iterable ordered collation of the already-sorted items
from each of ``iterables``, compared by kwarg ``key``.
If ``reverse=True`` is passed, iterables must return their results in
descending order rather than ascending.
"""
key = kwargs.pop('key', lambda a: a)
reverse = kwargs.pop('reverse', False)
min_or_max = max if reverse else min
rows = [iter(iterable) for iterable in iterables if iterable]
next_values = {}
by_key = []
def gather_next_value(row, index):
try:
next_value = next(row)
except StopIteration:
pass
else:
next_values[index] = next_value
by_key.append((key(next_value), index))
for index, row in enumerate(rows):
gather_next_value(row, index)
while by_key:
key_value, index = min_or_max(by_key)
by_key.remove((key_value, index))
next_value = next_values.pop(index)
yield next_value
gather_next_value(rows[index], index)
def hash_to_unsigned(data):
"""If ``data`` is a string or unicode string, return an unsigned 4-byte int
hash of it. If ``data`` is already an int that fits those parameters,
return it verbatim.
If ``data`` is an int outside that range, behavior is undefined at the
moment. We rely on the ``PositiveIntegerField`` on
:class:`~tidings.models.WatchFilter` to scream if the int is too long for
the field.
We use CRC32 to do the hashing. Though CRC32 is not a good general-purpose
hash function, it has no collisions on a dictionary of 38,470 English
words, which should be fine for the small sets that :class:`WatchFilters
<tidings.models.WatchFilter>` are designed to enumerate. As a bonus, it is
fast and available as a built-in function in some DBs. If your set of
filter values is very large or has different CRC32 distribution properties
than English words, you might want to do your own hashing in your
:class:`~tidings.events.Event` subclass and pass ints when specifying
filter values.
"""
if isinstance(data, string_types):
# Return a CRC32 value identical across Python versions and platforms
# by stripping the sign bit as on
# http://docs.python.org/library/zlib.html.
return crc32(data.encode('utf-8')) & 0xffffffff
else:
return int(data)
def emails_with_users_and_watches(
subject, template_path, vars, users_and_watches,
from_email=settings.TIDINGS_FROM_ADDRESS, **extra_kwargs):
"""Return iterable of EmailMessages with user and watch values substituted.
A convenience function for generating emails by repeatedly rendering a
Django template with the given ``vars`` plus a ``user`` and ``watches`` key
for each pair in ``users_and_watches``
:arg template_path: path to template file
:arg vars: a map which becomes the Context passed in to the template
:arg extra_kwargs: additional kwargs to pass into EmailMessage constructor
"""
template = loader.get_template(template_path)
context = Context(vars)
for u, w in users_and_watches:
context['user'] = u
# Arbitrary single watch for compatibility with 0.1
# TODO: remove.
context['watch'] = w[0]
context['watches'] = w
yield EmailMessage(subject,
template.render(context),
from_email,
[u.email],
**extra_kwargs)
def import_from_setting(setting_name, fallback):
"""Return the resolution of an import path stored in a Django setting.
:arg setting_name: The name of the setting holding the import path
:arg fallback: An alternate object to use if the setting is empty or
doesn't exist
Raise ImproperlyConfigured if a path is given that can't be resolved.
"""
path = getattr(settings, setting_name, None)
if path:
try:
return import_string(path)
except ImportError:
raise ImproperlyConfigured('%s: No such path.' % path)
else:
return fallback
# Here to be imported by others:
reverse = import_from_setting('TIDINGS_REVERSE', django_reverse) # no QA
|
"""Copyright 2014 Cyrus Dasadia
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import re
import simplejson
def create_json_parameters(event_action, incident, message=None):
plugin_parameters = event_action.pluginParameters
plugin_parameters = re.sub('"__EVENTID__"', simplejson.dumps(unicode(incident.event.id)), plugin_parameters)
plugin_parameters = re.sub('"__INCIDENTID__"', simplejson.dumps(unicode(incident.id)), plugin_parameters)
plugin_parameters = re.sub('"__ELEMENT__"', simplejson.dumps(unicode(incident.element)), plugin_parameters)
plugin_parameters = re.sub('"__MESSAGE__"', simplejson.dumps(unicode(message)), plugin_parameters)
return '{"plugin": %s, "parameters": %s}' % (simplejson.dumps(unicode(event_action.plugin.name)), plugin_parameters)
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mg_gui.ui'
#
# Created: Fri Jul 29 15:42:51 2011
# by: PyQt4 UI code generator 4.8.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(501, 414)
self.gridLayout_2 = QtGui.QGridLayout(Form)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.groupBox = QtGui.QGroupBox(Form)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.gridLayout = QtGui.QGridLayout(self.groupBox)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.taurusLabel = TaurusLabel(self.groupBox)
self.taurusLabel.setObjectName(_fromUtf8("taurusLabel"))
self.gridLayout.addWidget(self.taurusLabel, 0, 0, 1, 1)
self.taurusLabel_2 = TaurusLabel(self.groupBox)
self.taurusLabel_2.setObjectName(_fromUtf8("taurusLabel_2"))
self.gridLayout.addWidget(self.taurusLabel_2, 0, 2, 1, 1)
self.taurusLed_2 = TaurusLed(self.groupBox)
self.taurusLed_2.setObjectName(_fromUtf8("taurusLed_2"))
self.gridLayout.addWidget(self.taurusLed_2, 0, 3, 1, 1)
self.taurusLabel_3 = TaurusLabel(self.groupBox)
self.taurusLabel_3.setObjectName(_fromUtf8("taurusLabel_3"))
self.gridLayout.addWidget(self.taurusLabel_3, 1, 0, 1, 1)
self.taurusLabel_4 = TaurusLabel(self.groupBox)
self.taurusLabel_4.setObjectName(_fromUtf8("taurusLabel_4"))
self.gridLayout.addWidget(self.taurusLabel_4, 1, 2, 1, 1)
self.taurusLed_3 = TaurusLed(self.groupBox)
self.taurusLed_3.setObjectName(_fromUtf8("taurusLed_3"))
self.gridLayout.addWidget(self.taurusLed_3, 1, 3, 1, 1)
self.taurusLabel_5 = TaurusLabel(self.groupBox)
self.taurusLabel_5.setObjectName(_fromUtf8("taurusLabel_5"))
self.gridLayout.addWidget(self.taurusLabel_5, 2, 0, 1, 1)
self.taurusLabel_6 = TaurusLabel(self.groupBox)
self.taurusLabel_6.setObjectName(_fromUtf8("taurusLabel_6"))
self.gridLayout.addWidget(self.taurusLabel_6, 2, 2, 1, 1)
self.taurusLed_4 = TaurusLed(self.groupBox)
self.taurusLed_4.setObjectName(_fromUtf8("taurusLed_4"))
self.gridLayout.addWidget(self.taurusLed_4, 2, 3, 1, 1)
self.taurusLabel_7 = TaurusLabel(self.groupBox)
self.taurusLabel_7.setObjectName(_fromUtf8("taurusLabel_7"))
self.gridLayout.addWidget(self.taurusLabel_7, 3, 0, 1, 1)
self.taurusLabel_8 = TaurusLabel(self.groupBox)
self.taurusLabel_8.setObjectName(_fromUtf8("taurusLabel_8"))
self.gridLayout.addWidget(self.taurusLabel_8, 3, 2, 1, 1)
self.taurusLed_5 = TaurusLed(self.groupBox)
self.taurusLed_5.setObjectName(_fromUtf8("taurusLed_5"))
self.gridLayout.addWidget(self.taurusLed_5, 3, 3, 1, 1)
self.taurusLabel_9 = TaurusLabel(self.groupBox)
self.taurusLabel_9.setObjectName(_fromUtf8("taurusLabel_9"))
self.gridLayout.addWidget(self.taurusLabel_9, 4, 0, 1, 1)
self.taurusLabel_10 = TaurusLabel(self.groupBox)
self.taurusLabel_10.setObjectName(_fromUtf8("taurusLabel_10"))
self.gridLayout.addWidget(self.taurusLabel_10, 4, 2, 1, 1)
self.taurusLed_6 = TaurusLed(self.groupBox)
self.taurusLed_6.setObjectName(_fromUtf8("taurusLed_6"))
self.gridLayout.addWidget(self.taurusLed_6, 4, 3, 1, 1)
self.taurusLabel_11 = TaurusLabel(self.groupBox)
self.taurusLabel_11.setObjectName(_fromUtf8("taurusLabel_11"))
self.gridLayout.addWidget(self.taurusLabel_11, 5, 0, 1, 1)
self.taurusLabel_12 = TaurusLabel(self.groupBox)
self.taurusLabel_12.setObjectName(_fromUtf8("taurusLabel_12"))
self.gridLayout.addWidget(self.taurusLabel_12, 5, 2, 1, 1)
self.taurusLed_7 = TaurusLed(self.groupBox)
self.taurusLed_7.setObjectName(_fromUtf8("taurusLed_7"))
self.gridLayout.addWidget(self.taurusLed_7, 5, 3, 1, 1)
self.gridLayout_2.addWidget(self.groupBox, 0, 2, 1, 1)
self.groupBox_2 = QtGui.QGroupBox(Form)
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.gridLayout_3 = QtGui.QGridLayout(self.groupBox_2)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.taurusLabel_13 = TaurusLabel(self.groupBox_2)
self.taurusLabel_13.setObjectName(_fromUtf8("taurusLabel_13"))
self.gridLayout_3.addWidget(self.taurusLabel_13, 0, 0, 1, 1)
self.taurusLabel_14 = TaurusLabel(self.groupBox_2)
self.taurusLabel_14.setObjectName(_fromUtf8("taurusLabel_14"))
self.gridLayout_3.addWidget(self.taurusLabel_14, 0, 2, 1, 1)
self.taurusLed_8 = TaurusLed(self.groupBox_2)
self.taurusLed_8.setObjectName(_fromUtf8("taurusLed_8"))
self.gridLayout_3.addWidget(self.taurusLed_8, 0, 3, 1, 1)
self.taurusLabel_15 = TaurusLabel(self.groupBox_2)
self.taurusLabel_15.setObjectName(_fromUtf8("taurusLabel_15"))
self.gridLayout_3.addWidget(self.taurusLabel_15, 1, 0, 1, 1)
self.taurusLabel_16 = TaurusLabel(self.groupBox_2)
self.taurusLabel_16.setObjectName(_fromUtf8("taurusLabel_16"))
self.gridLayout_3.addWidget(self.taurusLabel_16, 1, 2, 1, 1)
self.taurusLed_9 = TaurusLed(self.groupBox_2)
self.taurusLed_9.setObjectName(_fromUtf8("taurusLed_9"))
self.gridLayout_3.addWidget(self.taurusLed_9, 1, 3, 1, 1)
self.taurusLabel_17 = TaurusLabel(self.groupBox_2)
self.taurusLabel_17.setObjectName(_fromUtf8("taurusLabel_17"))
self.gridLayout_3.addWidget(self.taurusLabel_17, 2, 0, 1, 1)
self.taurusLabel_18 = TaurusLabel(self.groupBox_2)
self.taurusLabel_18.setObjectName(_fromUtf8("taurusLabel_18"))
self.gridLayout_3.addWidget(self.taurusLabel_18, 2, 2, 1, 1)
self.taurusLed_10 = TaurusLed(self.groupBox_2)
self.taurusLed_10.setObjectName(_fromUtf8("taurusLed_10"))
self.gridLayout_3.addWidget(self.taurusLed_10, 2, 3, 1, 1)
self.taurusLabel_19 = TaurusLabel(self.groupBox_2)
self.taurusLabel_19.setObjectName(_fromUtf8("taurusLabel_19"))
self.gridLayout_3.addWidget(self.taurusLabel_19, 3, 0, 1, 1)
self.taurusLabel_20 = TaurusLabel(self.groupBox_2)
self.taurusLabel_20.setObjectName(_fromUtf8("taurusLabel_20"))
self.gridLayout_3.addWidget(self.taurusLabel_20, 3, 2, 1, 1)
self.taurusLed_11 = TaurusLed(self.groupBox_2)
self.taurusLed_11.setObjectName(_fromUtf8("taurusLed_11"))
self.gridLayout_3.addWidget(self.taurusLed_11, 3, 3, 1, 1)
self.taurusLabel_21 = TaurusLabel(self.groupBox_2)
self.taurusLabel_21.setObjectName(_fromUtf8("taurusLabel_21"))
self.gridLayout_3.addWidget(self.taurusLabel_21, 4, 0, 1, 1)
self.taurusLabel_22 = TaurusLabel(self.groupBox_2)
self.taurusLabel_22.setObjectName(_fromUtf8("taurusLabel_22"))
self.gridLayout_3.addWidget(self.taurusLabel_22, 4, 2, 1, 1)
self.taurusLed_12 = TaurusLed(self.groupBox_2)
self.taurusLed_12.setObjectName(_fromUtf8("taurusLed_12"))
self.gridLayout_3.addWidget(self.taurusLed_12, 4, 3, 1, 1)
self.taurusLabel_23 = TaurusLabel(self.groupBox_2)
self.taurusLabel_23.setObjectName(_fromUtf8("taurusLabel_23"))
self.gridLayout_3.addWidget(self.taurusLabel_23, 5, 0, 1, 1)
self.taurusLabel_24 = TaurusLabel(self.groupBox_2)
self.taurusLabel_24.setObjectName(_fromUtf8("taurusLabel_24"))
self.gridLayout_3.addWidget(self.taurusLabel_24, 5, 2, 1, 1)
self.taurusLed_13 = TaurusLed(self.groupBox_2)
self.taurusLed_13.setObjectName(_fromUtf8("taurusLed_13"))
self.gridLayout_3.addWidget(self.taurusLed_13, 5, 3, 1, 1)
self.gridLayout_2.addWidget(self.groupBox_2, 0, 3, 1, 1)
self.groupBox_4 = QtGui.QGroupBox(Form)
self.groupBox_4.setObjectName(_fromUtf8("groupBox_4"))
self.gridLayout_5 = QtGui.QGridLayout(self.groupBox_4)
self.gridLayout_5.setObjectName(_fromUtf8("gridLayout_5"))
self.taurusLabel_26 = TaurusLabel(self.groupBox_4)
self.taurusLabel_26.setObjectName(_fromUtf8("taurusLabel_26"))
self.gridLayout_5.addWidget(self.taurusLabel_26, 0, 0, 1, 3)
self.taurusLed_14 = TaurusLed(self.groupBox_4)
self.taurusLed_14.setObjectName(_fromUtf8("taurusLed_14"))
self.gridLayout_5.addWidget(self.taurusLed_14, 1, 0, 1, 1)
self.taurusLabel_29 = TaurusLabel(self.groupBox_4)
self.taurusLabel_29.setObjectName(_fromUtf8("taurusLabel_29"))
self.gridLayout_5.addWidget(self.taurusLabel_29, 2, 0, 1, 1)
self.taurusLabel_30 = TaurusLabel(self.groupBox_4)
self.taurusLabel_30.setObjectName(_fromUtf8("taurusLabel_30"))
self.gridLayout_5.addWidget(self.taurusLabel_30, 2, 1, 1, 1)
self.taurusValueLineEdit_2 = TaurusValueLineEdit(self.groupBox_4)
self.taurusValueLineEdit_2.setObjectName(_fromUtf8("taurusValueLineEdit_2"))
self.gridLayout_5.addWidget(self.taurusValueLineEdit_2, 2, 2, 1, 1)
self.taurusLabel_33 = TaurusLabel(self.groupBox_4)
self.taurusLabel_33.setObjectName(_fromUtf8("taurusLabel_33"))
self.gridLayout_5.addWidget(self.taurusLabel_33, 3, 0, 1, 1)
self.taurusLabel_34 = TaurusLabel(self.groupBox_4)
self.taurusLabel_34.setObjectName(_fromUtf8("taurusLabel_34"))
self.gridLayout_5.addWidget(self.taurusLabel_34, 3, 1, 1, 1)
self.taurusValueLineEdit_4 = TaurusValueLineEdit(self.groupBox_4)
self.taurusValueLineEdit_4.setObjectName(_fromUtf8("taurusValueLineEdit_4"))
self.gridLayout_5.addWidget(self.taurusValueLineEdit_4, 3, 2, 1, 1)
self.taurusLabel_37 = TaurusLabel(self.groupBox_4)
self.taurusLabel_37.setObjectName(_fromUtf8("taurusLabel_37"))
self.gridLayout_5.addWidget(self.taurusLabel_37, 4, 0, 1, 1)
self.taurusLabel_38 = TaurusLabel(self.groupBox_4)
self.taurusLabel_38.setObjectName(_fromUtf8("taurusLabel_38"))
self.gridLayout_5.addWidget(self.taurusLabel_38, 4, 1, 1, 1)
self.taurusValueLineEdit_6 = TaurusValueLineEdit(self.groupBox_4)
self.taurusValueLineEdit_6.setObjectName(_fromUtf8("taurusValueLineEdit_6"))
self.gridLayout_5.addWidget(self.taurusValueLineEdit_6, 4, 2, 1, 1)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.taurusCommandButton_2 = TaurusCommandButton(self.groupBox_4)
self.taurusCommandButton_2.setObjectName(_fromUtf8("taurusCommandButton_2"))
self.horizontalLayout_2.addWidget(self.taurusCommandButton_2)
self.cfgMg2 = QtGui.QToolButton(self.groupBox_4)
self.cfgMg2.setObjectName(_fromUtf8("cfgMg2"))
self.horizontalLayout_2.addWidget(self.cfgMg2)
self.horizontalLayout_2.setStretch(0, 1)
self.gridLayout_5.addLayout(self.horizontalLayout_2, 1, 1, 1, 2)
self.gridLayout_2.addWidget(self.groupBox_4, 1, 3, 1, 1)
self.groupBox_3 = QtGui.QGroupBox(Form)
self.groupBox_3.setObjectName(_fromUtf8("groupBox_3"))
self.gridLayout_4 = QtGui.QGridLayout(self.groupBox_3)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.taurusLabel_25 = TaurusLabel(self.groupBox_3)
self.taurusLabel_25.setObjectName(_fromUtf8("taurusLabel_25"))
self.gridLayout_4.addWidget(self.taurusLabel_25, 0, 0, 1, 3)
self.taurusLabel_27 = TaurusLabel(self.groupBox_3)
self.taurusLabel_27.setObjectName(_fromUtf8("taurusLabel_27"))
self.gridLayout_4.addWidget(self.taurusLabel_27, 2, 1, 1, 1)
self.taurusLabel_28 = TaurusLabel(self.groupBox_3)
self.taurusLabel_28.setObjectName(_fromUtf8("taurusLabel_28"))
self.gridLayout_4.addWidget(self.taurusLabel_28, 2, 0, 1, 1)
self.taurusValueLineEdit = TaurusValueLineEdit(self.groupBox_3)
self.taurusValueLineEdit.setObjectName(_fromUtf8("taurusValueLineEdit"))
self.gridLayout_4.addWidget(self.taurusValueLineEdit, 2, 2, 1, 1)
self.taurusLed = TaurusLed(self.groupBox_3)
self.taurusLed.setObjectName(_fromUtf8("taurusLed"))
self.gridLayout_4.addWidget(self.taurusLed, 1, 0, 1, 1)
self.taurusLabel_31 = TaurusLabel(self.groupBox_3)
self.taurusLabel_31.setObjectName(_fromUtf8("taurusLabel_31"))
self.gridLayout_4.addWidget(self.taurusLabel_31, 3, 0, 1, 1)
self.taurusLabel_32 = TaurusLabel(self.groupBox_3)
self.taurusLabel_32.setObjectName(_fromUtf8("taurusLabel_32"))
self.gridLayout_4.addWidget(self.taurusLabel_32, 3, 1, 1, 1)
self.taurusValueLineEdit_3 = TaurusValueLineEdit(self.groupBox_3)
self.taurusValueLineEdit_3.setObjectName(_fromUtf8("taurusValueLineEdit_3"))
self.gridLayout_4.addWidget(self.taurusValueLineEdit_3, 3, 2, 1, 1)
self.taurusLabel_35 = TaurusLabel(self.groupBox_3)
self.taurusLabel_35.setObjectName(_fromUtf8("taurusLabel_35"))
self.gridLayout_4.addWidget(self.taurusLabel_35, 4, 0, 1, 1)
self.taurusLabel_36 = TaurusLabel(self.groupBox_3)
self.taurusLabel_36.setObjectName(_fromUtf8("taurusLabel_36"))
self.gridLayout_4.addWidget(self.taurusLabel_36, 4, 1, 1, 1)
self.taurusValueLineEdit_5 = TaurusValueLineEdit(self.groupBox_3)
self.taurusValueLineEdit_5.setObjectName(_fromUtf8("taurusValueLineEdit_5"))
self.gridLayout_4.addWidget(self.taurusValueLineEdit_5, 4, 2, 1, 1)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.taurusCommandButton = TaurusCommandButton(self.groupBox_3)
self.taurusCommandButton.setObjectName(_fromUtf8("taurusCommandButton"))
self.horizontalLayout_3.addWidget(self.taurusCommandButton)
self.cfgMg1 = QtGui.QToolButton(self.groupBox_3)
self.cfgMg1.setObjectName(_fromUtf8("cfgMg1"))
self.horizontalLayout_3.addWidget(self.cfgMg1)
self.gridLayout_4.addLayout(self.horizontalLayout_3, 1, 1, 1, 2)
self.gridLayout_2.addWidget(self.groupBox_3, 1, 2, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setTitle(QtGui.QApplication.translate("Form", "CTs of CTRL1", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/1/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_2.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/1/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_2.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/1/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_3.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/2/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_3.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_4.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/2/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_3.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/2/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_5.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/3/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_5.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_6.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/3/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_4.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/3/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_7.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/4/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_7.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_8.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/4/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_5.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/4/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_9.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/5/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_9.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_10.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/5/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_6.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/5/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_11.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/6/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_11.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_12.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/6/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_7.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl1/6/state", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_2.setTitle(QtGui.QApplication.translate("Form", "CTs of CTRL2", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_13.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/1/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_13.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_14.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/1/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_8.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/1/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_15.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/2/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_15.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_16.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/2/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_9.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/2/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_17.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/3/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_17.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_18.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/3/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_10.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/3/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_19.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/4/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_19.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_20.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/4/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_11.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/4/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_21.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/5/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_21.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_22.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/5/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_12.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/5/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_23.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/6/value?configuration=dev_alias", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_23.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_24.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/6/value", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_13.setModel(QtGui.QApplication.translate("Form", "expchan/dummyctctrl2/6/state", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_4.setTitle(QtGui.QApplication.translate("Form", "MG2", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_26.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/elementlist", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_26.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed_14.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_29.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/integrationtime?configuration=label", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_29.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_30.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/integrationtime", None, QtGui.QApplication.UnicodeUTF8))
self.taurusValueLineEdit_2.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/integrationtime", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_33.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/monitorcount?configuration=label", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_33.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_34.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/monitorcount", None, QtGui.QApplication.UnicodeUTF8))
self.taurusValueLineEdit_4.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/monitorcount", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_37.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/acquisitionmode?configuration=label", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_37.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_38.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/acquisitionmode", None, QtGui.QApplication.UnicodeUTF8))
self.taurusValueLineEdit_6.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2/acquisitionmode", None, QtGui.QApplication.UnicodeUTF8))
self.taurusCommandButton_2.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg2", None, QtGui.QApplication.UnicodeUTF8))
self.taurusCommandButton_2.setCommand(QtGui.QApplication.translate("Form", "start", None, QtGui.QApplication.UnicodeUTF8))
self.cfgMg2.setText(QtGui.QApplication.translate("Form", "...", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_3.setTitle(QtGui.QApplication.translate("Form", "MG1", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_25.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/elementlist", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_25.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_27.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/integrationtime", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_28.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/integrationtime?configuration=label", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_28.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusValueLineEdit.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/integrationtime", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLed.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/state", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_31.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/monitorcount?configuration=label", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_31.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_32.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/monitorcount", None, QtGui.QApplication.UnicodeUTF8))
self.taurusValueLineEdit_3.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/monitorcount", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_35.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/acquisitionmode?configuration=label", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_35.setBgRole(QtGui.QApplication.translate("Form", "none", None, QtGui.QApplication.UnicodeUTF8))
self.taurusLabel_36.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/acquisitionmode", None, QtGui.QApplication.UnicodeUTF8))
self.taurusValueLineEdit_5.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1/acquisitionmode", None, QtGui.QApplication.UnicodeUTF8))
self.taurusCommandButton.setModel(QtGui.QApplication.translate("Form", "mntgrp/v3/mg1", None, QtGui.QApplication.UnicodeUTF8))
self.taurusCommandButton.setCommand(QtGui.QApplication.translate("Form", "start", None, QtGui.QApplication.UnicodeUTF8))
self.cfgMg1.setText(QtGui.QApplication.translate("Form", "...", None, QtGui.QApplication.UnicodeUTF8))
from taurus.qt.qtgui.display import TaurusLabel, TaurusLed
from taurus.qt.qtgui.input import TaurusValueLineEdit
from taurus.qt.qtgui.button import TaurusCommandButton
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
Form = QtGui.QWidget()
ui = Ui_Form()
ui.setupUi(Form)
Form.show()
sys.exit(app.exec_())
|
__author__ = 'Maria'
import constants
from watson_developer_cloud import ConversationV1
import pprint
#API: https://www.ibm.com/watson/developercloud/conversation/api/v1/
class Watson(object):
def __init__(self):
self.conversation = ConversationV1(
username=constants.WATSON['username'],
password=constants.WATSON['password'],
version=constants.WATSON['version'], )
self.context = None
def getAnswer(self, response):
return response['output']['text'][0] if response['output']['text'] else ''
def getIntent(self, response):
return response['intents'][0]['intent'] if response['intents'] else ''
def getInput(self, response):
return response['input']['text']
def setContext(self, response):
self.context = response['context']
def askWatson(self, text):
response = self.conversation.message(workspace_id=constants.WATSON['workspaceID'], input={'text': text}, context=self.context)
self.setContext(response)
return response
def askWatsonNoContext(self, text):
response = self.conversation.message(workspace_id=constants.WATSON['workspaceID'], input={'text': text})
self.setContext(response)
return response
if __name__ == "__main__":
watson = Watson()
response = watson.askWatson('hi')
print watson.getInput(response)
print watson.getAnswer(response)
print watson.getIntent(response)
pprint.pprint(response)
response = watson.askWatson("I'm so hungry")
print watson.getInput(response)
print watson.getAnswer(response)
print watson.getIntent(response)
pprint.pprint(response)
response = watson.askWatson('I would like some pasta')
print watson.getInput(response)
print watson.getAnswer(response)
print watson.getIntent(response)
pprint.pprint(response)
response = watson.askWatson('Catch you later')
print watson.getInput(response)
print watson.getAnswer(response)
print watson.getIntent(response)
pprint.pprint(response)
|
# -*- coding: utf-8 -*-
#
# MCN CC SDK documentation build configuration file, created by
# sphinx-quickstart on Fri Feb 14 14:01:16 2014.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'MCN CC SDK'
copyright = u'2013-2015, Intel Performance Learning Solutions Ltd.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'MCNCCSDKdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'MCNCCSDK.tex', u'MCN CC SDK Documentation',
u'Intel Performance Learning Solutions Ltd.', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'mcnccsdk', u'MCN CC SDK Documentation',
[u'Intel Performance Learning Solutions Ltd.'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'MCNCCSDK', u'MCN CC SDK Documentation',
u'Intel Performance Learning Solutions Ltd.', 'MCNCCSDK', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
from gnuradio import gr
class const_multi_cc(gr.hier_block2):
""" Constant channel model.
"""
def __init__(self, tx_id, rx_id,
k11=0.0, k12=1.0, k13=1.0,
k21=1.0, k22=0.0, k23=1.0,
k31=1.0, k32=1.0, k33=0.0):
gr.hier_block2.__init__(
self, "No HW model",
gr.io_signature(1, 1, gr.sizeof_gr_complex),
gr.io_signature(1, 1, gr.sizeof_gr_complex),
)
##################################################
# Parameters
##################################################
# Use Symmetric channels for this model
#k21 = k12
#k31 = k13
#k32 = k23
# No self-coupling
#k11 = k22 = k33 = 0
# Build the channel matrix
self.k = [[k11, k12, k13],
[k21, k22, k23],
[k31, k32, k33]]
##################################################
# Blocks
##################################################
self.multiply = gr.multiply_const_cc(self.k[tx_id - 1][rx_id - 1])
print "[INFO] WiNeLo - Channel model: Setting k = %s for clients %s "\
"and %s" % (self.k[tx_id - 1][rx_id - 1], tx_id, rx_id)
##################################################
# Connections
##################################################
self.connect((self, 0), (self.multiply, 0))
self.connect((self.multiply, 0), (self, 0))
|
from functools import wraps
from flask import Blueprint, request, redirect, render_template, url_for, g, flash
from flask.ext.login import LoginManager, login_user, logout_user, current_user
from flask_wtf import Form
from wtforms import StringField, PasswordField
import base64
from . import auth
login_manager = LoginManager()
login_manager.login_view = 'login.login'
bp = Blueprint('login', __name__)
def init(app):
login_manager.init_app(app)
auth.init(app)
class LoginForm(Form):
# Note: no input validation, submitted value will be handed in the auth module itself
# otherwise we'd have to fetch the full user list for every login
username = StringField('User')
password = PasswordField('Password')
@bp.route('/login', methods=["GET", "POST"])
def login():
"""
Presents the login page
If login data is POSTed, the credentials are validated and the user logged in if successful
"""
form = LoginForm()
if request.method == 'POST' and form.is_submitted():
usr = auth.auth_user(form.username.data, form.password.data)
if usr and usr.has_role('login') and usr.is_active:
login_user(usr)
return redirect(request.args.get('next') or url_for('items.overview'))
elif usr is None:
flash('invalid credentials', 'error')
elif not usr.is_active:
flash('login expired', 'error')
else:
flash('insufficient permissions', 'error')
return render_template('login.html', form=form)
@bp.route('/logout')
def logout():
"""
Performs a logout on the user
"""
logout_user()
return redirect(url_for('login.login'))
def role_required(roles):
"""
Decorator that ensures the current user has
- one of the specified roles (if a tuple)
- the specified role (otherwise)
"""
def real_role_required(f):
@wraps(f)
def wrapper(*args, **kwargs):
introles = roles
if not isinstance(introles, tuple):
introles = (introles,)
valid = False
if current_user.is_authenticated:
for role in introles:
if current_user.has_role(role):
valid = True
break
if not valid:
flash('insufficient privileges to access this page', 'danger')
return login_manager.unauthorized()
return f(*args, **kwargs)
return wrapper
return real_role_required
@login_manager.user_loader
def load_user(username):
"""
Default user loader for the login plugin
"""
return auth.get_user(username)
@login_manager.request_loader
def load_from_request(request):
"""
User loader from basic authorization header
(i.e. for external API)
"""
try:
authinfo = request.headers.get('Authorization', '').replace('Basic ', '', 1)
username, password = base64.b64decode(authinfo).decode('utf-8').split(':')
except:
return None
usr = auth.auth_user(username, password)
if usr and usr.has_role('request_login'):
return usr
return None
|
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Definitions for MoViNet structures.
Reference: "MoViNets: Mobile Video Networks for Efficient Video Recognition"
https://arxiv.org/pdf/2103.11511.pdf
MoViNets are efficient video classification networks that are part of a model
family, ranging from the smallest model, MoViNet-A0, to the largest model,
MoViNet-A6. Each model has various width, depth, input resolution, and input
frame-rate associated with them. See the main paper for more details.
"""
import dataclasses
from official.core import config_definitions as cfg
from official.core import exp_factory
from official.modeling import hyperparams
from official.vision.beta.configs import backbones_3d
from official.vision.beta.configs import common
from official.vision.beta.configs import video_classification
@dataclasses.dataclass
class Movinet(hyperparams.Config):
"""Backbone config for Base MoViNet."""
model_id: str = 'a0'
causal: bool = False
use_positional_encoding: bool = False
# Choose from ['3d', '2plus1d', '3d_2plus1d']
# 3d: default 3D convolution
# 2plus1d: (2+1)D convolution with Conv2D (2D reshaping)
# 3d_2plus1d: (2+1)D convolution with Conv3D (no 2D reshaping)
conv_type: str = '3d'
activation: str = 'swish'
gating_activation: str = 'sigmoid'
stochastic_depth_drop_rate: float = 0.2
use_external_states: bool = False
@dataclasses.dataclass
class MovinetA0(Movinet):
"""Backbone config for MoViNet-A0.
Represents the smallest base MoViNet searched by NAS.
Reference: https://arxiv.org/pdf/2103.11511.pdf
"""
model_id: str = 'a0'
@dataclasses.dataclass
class MovinetA1(Movinet):
"""Backbone config for MoViNet-A1."""
model_id: str = 'a1'
@dataclasses.dataclass
class MovinetA2(Movinet):
"""Backbone config for MoViNet-A2."""
model_id: str = 'a2'
@dataclasses.dataclass
class MovinetA3(Movinet):
"""Backbone config for MoViNet-A3."""
model_id: str = 'a3'
@dataclasses.dataclass
class MovinetA4(Movinet):
"""Backbone config for MoViNet-A4."""
model_id: str = 'a4'
@dataclasses.dataclass
class MovinetA5(Movinet):
"""Backbone config for MoViNet-A5.
Represents the largest base MoViNet searched by NAS.
"""
model_id: str = 'a5'
@dataclasses.dataclass
class MovinetT0(Movinet):
"""Backbone config for MoViNet-T0.
MoViNet-T0 is a smaller version of MoViNet-A0 for even faster processing.
"""
model_id: str = 't0'
@dataclasses.dataclass
class Backbone3D(backbones_3d.Backbone3D):
"""Configuration for backbones.
Attributes:
type: 'str', type of backbone be used, on the of fields below.
movinet: movinet backbone config.
"""
type: str = 'movinet'
movinet: Movinet = Movinet()
@dataclasses.dataclass
class MovinetModel(video_classification.VideoClassificationModel):
"""The MoViNet model config."""
model_type: str = 'movinet'
backbone: Backbone3D = Backbone3D()
norm_activation: common.NormActivation = common.NormActivation(
activation='swish',
norm_momentum=0.99,
norm_epsilon=1e-3,
use_sync_bn=True)
output_states: bool = False
@exp_factory.register_config_factory('movinet_kinetics600')
def movinet_kinetics600() -> cfg.ExperimentConfig:
"""Video classification on Videonet with MoViNet backbone."""
exp = video_classification.video_classification_kinetics600()
exp.task.train_data.dtype = 'bfloat16'
exp.task.validation_data.dtype = 'bfloat16'
model = MovinetModel()
exp.task.model = model
return exp
|
# coding=utf-8
"""Overrides for Discord.py classes"""
import contextlib
import inspect
import io
import itertools
import re
import discord
from discord.ext.commands import HelpFormatter as HelpF, Paginator, Command
from bot.utils import polr, privatebin
from bot.utils.args import ArgParseConverter as ArgPC
def create_help(cmd, parser):
"""Creates an updated usage for the help command"""
default = cmd.params['args'].default
if cmd.signature.split("[")[-1] == f"args={default}]" if default else "args]":
sio = io.StringIO()
with contextlib.redirect_stdout(sio):
parser.print_help()
sio.seek(0)
s = sio.read()
# Strip the filename and trailing newline from help text
arg_part = s[(len(str(s[7:]).split()[0]) + 8):-1]
k = cmd.qualified_name
spt = len(k.split())
# Remove a duplicate command name + leading arguments
split_sig = cmd.signature.split()[spt:]
return "[".join((" ".join(split_sig)).split("[")[:-1]) + arg_part
return cmd.usage
class HelpFormatter(HelpF):
"""Custom override for the default help command"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._paginator = None
async def format(self):
"""Handles the actual behaviour involved with formatting.
To change the behaviour, this method should be overridden.
Returns
--------
list
A paginated output of the help command.
"""
self._paginator = Paginator()
# we need a padding of ~80 or so
description = self.command.description if not self.is_cog() else inspect.getdoc(self.command)
if description:
# <description> portion
self._paginator.add_line(description, empty=True)
if isinstance(self.command, Command):
# <signature portion>
if self.command.params.get("args", None) and type(self.command.params['args'].annotation) == ArgPC:
self.command.usage = create_help(self.command, self.command.params['args'].annotation.parser)
signature = self.get_command_signature()
self._paginator.add_line(signature, empty=True)
# <long doc> section
if self.command.help:
self._paginator.add_line(self.command.help, empty=True)
# end it here if it's just a regular command
if not self.has_subcommands():
self._paginator.close_page()
return self._paginator.pages
max_width = self.max_name_size
def category(tup):
"""Splits the help command into categories for easier readability"""
cog = tup[1].cog_name
# we insert the zero width space there to give it approximate
# last place sorting position.
return cog + ':' if cog is not None else '\u200bNo Category:'
filtered = await self.filter_command_list()
if self.is_bot():
data = sorted(filtered, key=category)
for category, commands in itertools.groupby(data, key=category):
# there simply is no prettier way of doing this.
commands = sorted(commands)
if len(commands) > 0:
self._paginator.add_line(category)
self._add_subcommands_to_page(max_width, commands)
else:
filtered = sorted(filtered)
if filtered:
self._paginator.add_line('Commands:')
self._add_subcommands_to_page(max_width, filtered)
# add the ending note
self._paginator.add_line()
ending_note = self.get_ending_note()
self._paginator.add_line(ending_note)
return self._paginator.pages
_mentions_transforms = {
'@everyone': '@\u200beveryone',
'@here': '@\u200bhere'
}
_mention_pattern = re.compile('|'.join(_mentions_transforms.keys()))
def _is_submodule(parent, child):
return parent == child or child.startswith(parent + ".")
async def _default_help_command(ctx, *commands: str):
"""Shows this message."""
bot = ctx.bot
destination = ctx.message.author if bot.pm_help else ctx.message.channel
def repl(obj):
return _mentions_transforms.get(obj.group(0), '')
# help by itself just lists our own commands.
if len(commands) == 0:
pages = await bot.formatter.format_help_for(ctx, bot)
elif len(commands) == 1:
# try to see if it is a cog name
name = _mention_pattern.sub(repl, commands[0])
command = None
if name in bot.cogs:
command = bot.cogs[name]
else:
command = bot.all_commands.get(name)
if command is None:
await destination.send(bot.command_not_found.format(name))
return
pages = await bot.formatter.format_help_for(ctx, command)
else:
name = _mention_pattern.sub(repl, commands[0])
command = bot.all_commands.get(name)
if command is None:
await destination.send(bot.command_not_found.format(name))
return
for key in commands[1:]:
try:
key = _mention_pattern.sub(repl, key)
command = command.all_commands.get(key)
if command is None:
await destination.send(bot.command_not_found.format(key))
return
except AttributeError:
await destination.send(bot.command_has_no_subcommands.format(command, key))
return
pages = await bot.formatter.format_help_for(ctx, command)
if bot.pm_help is None:
characters = sum(map(len, pages))
# modify destination based on length of pages.
if characters > 1000:
destination = ctx.message.author
for page in pages:
try:
await destination.send(page)
except discord.Forbidden:
destination = ctx.message.channel
await destination.send(page)
old_send = discord.abc.Messageable.send
async def send(self, content=None, **kwargs):
"""Overrides default send method in order to create a paste if the response is more than 2000 characters"""
if content is not None and any(x in str(content) for x in ["@everyone", "@here"]):
content = content.replace("@everyone", "@\u0435veryone").replace("@here", "@h\u0435re")
if content is not None and len(str(content)) > 2000:
if content.startswith("```py"):
content = "\n".join(content.split("\n")[1:-1])
paste = await privatebin.upload(content, expires="15min", server=self.bot.priv)
if self.bot.polr:
paste = await polr.shorten(paste, **self.bot.polr)
return await old_send(self, f"Hey, I couldn't handle all the text I was gonna send you, so I put it in a paste!"
f"\nThe link is **{paste}**, but it expires in 15 minutes, so get it quick!",
**kwargs)
else:
return await old_send(self, content, **kwargs)
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Extract a closed coast line
Extracts a coast line from GSHHS using the
advanced polygon handling features in Basemap
The polygons are saved to a two-columns
text file, using Nans to sepatate the polygons.
An example of how to read back the data and
plot filled land is given in pcoast.py
"""
# ----------------------------------
# Bjørn Ådlandsvik <bjorn at imr.no>
# Institute of Marine Research
# 2014-10-12
# ----------------------------------
# ---------------
# Imports
# ---------------
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
# -----------------
# User settings
# -----------------
# Geographical extent (should include all land in domain)
lon0, lon1 = -12, 16 # Longitude range
lat0, lat1 = 47, 66 # Latitude range
# Choose GSHHS resolution
res = 'i' # intermediate resolution
# Output coast file
outfile = 'data/coast.dat'
# ------------------------------
# Set up Basemap map projection
# ------------------------------
# Use cylindrical equidistand projection
# i.e. x = lon, y = lat
m = Basemap(projection = 'cyl',
llcrnrlon = lon0,
llcrnrlat = lat0,
urcrnrlon = lon1,
urcrnrlat = lat1,
resolution = res)
# ----------------------------
# Get the coast polygon data
# ----------------------------
polygons = []
for i, p in enumerate(m.coastpolygons):
# Use only coast polygons (ignore lakes)
if m.coastpolygontypes[i] == 1:
polygons.append(p)
# --------------------
# Save the coast data
# --------------------
with open(outfile, 'w') as fid:
for p in polygons: # Loop over the polygons
for v in zip(*p): # Loop over the vertices
fid.write('{:7.3f}{:7.3f}\n'.format(*v))
fid.write(' Nan Nan\n') # Separate the polygons
|
# -*- coding: utf-8 -*-
"""
the classical toll station problem:
given the distance between all toll stations on a long road,
find the location of them.
"""
import random
def points_generator(number, rangee):
lst = range(1,rangee+1)
random.shuffle(lst)
f =lst[0:number-1]
f.sort()
result = [0] + f
return result
lst = points_generator(7,100)
print 'the original points and its backward are:'
print lst
def reverse(lst):
end = lst[-1]
result = []
for i in range(len(lst)):
result.append(end - lst[-i-1])
return result
print reverse(lst)
def distances(lst):
result = []
lenn = len(lst)
for i in range(lenn-1):
for j in range(i+1,lenn):
result.append(lst[j] - lst[i])
random.shuffle(result)
return result
dist = distances(lst)
dist.sort(reverse = True)
print 'the distances between these points are'
print dist
found = False
def locate(dist):
d = dist[:]
result = []
def position(puton, ds):
puton.sort()
rt = [puton[0]+ds, puton[-1] - ds]
for i in range(1,len(puton)-1):
if puton[i] - ds > 0:
rt.append(puton[i]-ds)
if puton[i] + ds < puton[-1]:
rt.append(puton[i]+ds)
print 'try the following positions'
print rt
return rt
def inloc(puton, dd):
global found
if dd == []:
puton.sort()
found = True
result.extend(puton)
print 'the tolls are:'
print puton
else:
for i in position(puton, dd[0]):
if found == True:
break
else:
j = 0
tempd = dd[:]
print 'try %d'%i
while j < len(puton) and abs(puton[j]-i) in tempd:
tempd.remove(abs(puton[j]-i))
j += 1
if j == len(puton):
tempd.sort(reverse =True)
print 'the new puton is:'
print puton + [i]
print 'the removed distances set is:'
print tempd
inloc(puton + [i], tempd)
else:
print '%d is not suitable'%i
else:
print 'there is no way. we have to go back'
inloc([0,d[0]],d[1:])
return result
print locate(dist)
|
import wx
class MyFrame(wx.Frame):
def __init__(self, parent, title):
wx.Frame.__init__(self, parent, title=title)
btn = wx.Button(self, label="SomeProcessing")
self.Bind(wx.EVT_BUTTON, self.SomeProcessing, btn)
def SomeProcessing(self,event):
self.dlg = Dlg_GetUserInput(self)
if self.dlg.ShowModal() == wx.ID_OK:
if self.dlg.sel1.GetValue():
print 'sel1 processing'
self.data_after_processing = 'bar'
if self.dlg.sel2.GetValue():
print 'sel2 processing'
self.data_after_processing = 'foo'
class Dlg_GetUserInput(wx.Dialog):
def __init__(self, parent):
wx.Dialog.__init__(self, parent)
self.sel1 = wx.CheckBox(self, label='Selection 1')
self.sel2 = wx.CheckBox(self, label='Selection 2')
self.OK = wx.Button(self, wx.ID_OK)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(self.sel1)
sizer.Add(self.sel2)
sizer.Add(self.OK)
self.SetSizer(sizer)
def test():
app = wx.App()
mf = MyFrame(None, 'testgui')
for item in mf.GetChildren():
if item.GetLabel() == 'SomeProcessing':
btn = item
break
def clickOK():
dlg = wx.GetActiveWindow()
dlg.sel2.SetValue(True)
clickEvent = wx.CommandEvent(wx.wxEVT_COMMAND_BUTTON_CLICKED, wx.ID_OK)
dlg.ProcessEvent(clickEvent)
event = wx.CommandEvent(wx.wxEVT_COMMAND_BUTTON_CLICKED, btn.GetId())
wx.CallAfter(clickOK)
mf.GetEventHandler().ProcessEvent(event)
print 'data_after_processing:', mf.data_after_processing
mf.Destroy()
test()
|
import numpy as np
import matplotlib.mlab as mlab
import matplotlib.pyplot as plt
from scipy.ndimage.filters import maximum_filter
from scipy.ndimage.morphology import (generate_binary_structure,
iterate_structure, binary_erosion)
import hashlib
from operator import itemgetter
IDX_FREQ_I = 0
IDX_TIME_J = 1
######################################################################
# Sampling rate, related to the Nyquist conditions, which affects
# the range frequencies we can detect.
DEFAULT_FS = 44100
######################################################################
# Size of the FFT window, affects frequency granularity
DEFAULT_WINDOW_SIZE = 4096
######################################################################
# Ratio by which each sequential window overlaps the last and the
# next window. Higher overlap will allow a higher granularity of offset
# matching, but potentially more fingerprints.
DEFAULT_OVERLAP_RATIO = 0.5
######################################################################
# Degree to which a fingerprint can be paired with its neighbors --
# higher will cause more fingerprints, but potentially better accuracy.
DEFAULT_FAN_VALUE = 15
######################################################################
# Minimum amplitude in spectrogram in order to be considered a peak.
# This can be raised to reduce number of fingerprints, but can negatively
# affect accuracy.
DEFAULT_AMP_MIN = 10
######################################################################
# Number of cells around an amplitude peak in the spectrogram in order
# for Dejavu to consider it a spectral peak. Higher values mean less
# fingerprints and faster matching, but can potentially affect accuracy.
PEAK_NEIGHBORHOOD_SIZE = 20
######################################################################
# Thresholds on how close or far fingerprints can be in time in order
# to be paired as a fingerprint. If your max is too low, higher values of
# DEFAULT_FAN_VALUE may not perform as expected.
MIN_HASH_TIME_DELTA = 0
MAX_HASH_TIME_DELTA = 200
######################################################################
# If True, will sort peaks temporally for fingerprinting;
# not sorting will cut down number of fingerprints, but potentially
# affect performance.
PEAK_SORT = True
######################################################################
# Number of bits to throw away from the front of the SHA1 hash in the
# fingerprint calculation. The more you throw away, the less storage, but
# potentially higher collisions and misclassifications when identifying songs.
FINGERPRINT_REDUCTION = 20
def fingerprint(channel_samples, Fs=DEFAULT_FS,
wsize=DEFAULT_WINDOW_SIZE,
wratio=DEFAULT_OVERLAP_RATIO,
fan_value=DEFAULT_FAN_VALUE,
amp_min=DEFAULT_AMP_MIN):
"""
FFT the channel, log transform output, find local maxima, then return
locally sensitive hashes.
"""
# FFT the signal and extract frequency components
arr2D = mlab.specgram(
channel_samples,
NFFT=wsize,
Fs=Fs,
window=mlab.window_hanning,
noverlap=int(wsize * wratio))[0]
# apply log transform since specgram() returns linear array
arr2D = 10 * np.log10(arr2D)
arr2D[arr2D == -np.inf] = 0 # replace infs with zeros
# find local maxima
local_maxima = get_2D_peaks(arr2D, plot=False, amp_min=amp_min)
# return hashes
return generate_hashes(local_maxima, fan_value=fan_value)
def get_2D_peaks(arr2D, plot=False, amp_min=DEFAULT_AMP_MIN):
# http://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.morphology.iterate_structure.html#scipy.ndimage.morphology.iterate_structure
struct = generate_binary_structure(2, 1)
neighborhood = iterate_structure(struct, PEAK_NEIGHBORHOOD_SIZE)
# find local maxima using our fliter shape
local_max = maximum_filter(arr2D, footprint=neighborhood) == arr2D
background = (arr2D == 0)
eroded_background = binary_erosion(background, structure=neighborhood,
border_value=1)
# Boolean mask of arr2D with True at peaks
detected_peaks = local_max - eroded_background
# extract peaks
amps = arr2D[detected_peaks]
j, i = np.where(detected_peaks)
# filter peaks
amps = amps.flatten()
peaks = zip(i, j, amps)
peaks_filtered = [x for x in peaks if x[2] > amp_min] # freq, time, amp
# get indices for frequency and time
frequency_idx = [x[1] for x in peaks_filtered]
time_idx = [x[0] for x in peaks_filtered]
if plot:
# scatter of the peaks
fig, ax = plt.subplots()
ax.imshow(arr2D)
ax.scatter(time_idx, frequency_idx)
ax.set_xlabel('Time')
ax.set_ylabel('Frequency')
ax.set_title("Spectrogram")
plt.gca().invert_yaxis()
plt.show()
return zip(frequency_idx, time_idx)
def generate_hashes(peaks, fan_value=DEFAULT_FAN_VALUE):
"""
Hash list structure:
sha1_hash[0:20] time_offset
[(e05b341a9b77a51fd26, 32), ... ]
"""
fingerprinted = set() # to avoid rehashing same pairs
if PEAK_SORT:
peaks.sort(key=itemgetter(1))
for i in range(len(peaks)):
for j in range(1, fan_value):
if (i + j) < len(peaks) and not (i, i + j) in fingerprinted:
freq1 = peaks[i][IDX_FREQ_I]
freq2 = peaks[i + j][IDX_FREQ_I]
t1 = peaks[i][IDX_TIME_J]
t2 = peaks[i + j][IDX_TIME_J]
t_delta = t2 - t1
if t_delta >= MIN_HASH_TIME_DELTA and t_delta <= MAX_HASH_TIME_DELTA:
h = hashlib.sha1(
"%s|%s|%s" % (str(freq1), str(freq2), str(t_delta)))
yield (h.hexdigest()[0:FINGERPRINT_REDUCTION], t1)
# ensure we don't repeat hashing
fingerprinted.add((i, i + j))
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RLimma(RPackage):
"""Data analysis, linear models and differential expression
for microarray data."""
homepage = "https://www.bioconductor.org/packages/limma/"
url = "https://git.bioconductor.org/packages/limma"
list_url = homepage
version('3.32.10', git='https://git.bioconductor.org/packages/limma', commit='593edf28e21fe054d64137ae271b8a52ab05bc60')
version('3.32.6', 'df5dc2b85189a24e939efa3a8e6abc41')
depends_on('[email protected]:3.4.9', when='@3.32.10')
|
import siconos.numerics as SN
import numpy as np
import matplotlib.pyplot as plt
try:
from cffi import FFI
except:
import sys
print('no cffi module installed, exiting')
sys.exit(0)
withPlot = False
if __name__ == '__main__':
xk = np.array((1., 10.))
T = 10.0
t = 0.0
h = 1e-3
z = np.zeros((4,))
w = np.empty((4,))
kappa = 0.9
g = 9.81
theta = 1.0
gamma = 1.0
mcp = SN.MixedComplementarityProblem2(0, 4)
ffi = FFI()
ffi.cdef('void set_cstruct(uintptr_t p_env, void* p_struct);')
ffi.cdef('''typedef struct
{
int id;
double* xk;
double h;
double theta;
double gamma;
double g;
double kappa;
unsigned int f_eval;
unsigned int nabla_eval;
} data;
''')
data_struct = ffi.new('data*')
data_struct.id = -1 # to avoid freeing the data in the destructor
data_struct.xk = ffi.cast('double *', xk.ctypes.data)
data_struct.h = h
data_struct.theta = theta
data_struct.gamma = gamma
data_struct.g = g
data_struct.kappa = kappa
D = ffi.dlopen(SN._numerics.__file__)
D.set_cstruct(mcp.get_env_as_long(), ffi.cast('void*', data_struct))
mcp.set_compute_F_and_nabla_F_as_C_functions('ZhuravlevIvanov.so', 'compute_Fmcp', 'compute_nabla_Fmcp')
SO=SN.SolverOptions(mcp, SN.SICONOS_MCP_NEWTON_FBLSA)
SO.dparam[0] = 1.0e-24
SO.iparam[0] = 150
SO.iparam[3] = 2
SO.iparam[4] = 10
N = int(T/h + 10)
print(N)
lambdaPM = np.empty((N, 4))
signs = np.empty((N, 2))
sol = np.empty((N, 2))
sol[0, :] = xk
k = 0
while t <= T:
k += 1
info = SN.mcp_newton_minFBLSA(mcp, z, w, SO)
#info = SN.mcp_newton_FBLSA(mcp, z, w, SO)
#print('iter {:} ; solver iter = {:} ; prec = {:}'.format(k, SO.iparam[1], SO.dparam[1]))
if info > 0:
#zi_syst.compute_Fmcp(0, 4, z, w)
sol[k, 0] = w[0] - z[1]
sol[k, 1] = w[2] - z[3]
if sol[k, 0] < -1e-7 and np.abs(z[1]) < 1e-10:
z[1] = -sol[k, 0]
z[0] = 1.0
if xk[1] < -1e-7 and np.abs(z[3]) < 1e-10:
z[3] = -sol[k, 1]
z[2] = 1.0
if z[1] < -1e-7:
z[1] = 0.0
z[0] = 0.0
if z[3] < -1e-7:
z[3] = 0.0
z[2] = 0.0
if z[1] > 1e-7 and z[0] < 1.0 - 1e-7:
z[0] = 1.0
if z[3] > 1e-7 and z[2] < 1.0 - 1e-7:
z[2] = 1.0
info = SN.mcp_newton_minFBLSA(mcp, z, w, SO)
print('iter {:} ; solver iter = {:} ; prec = {:}'.format(k, SO.iparam[1], SO.dparam[1]))
if info >0:
print('MCP solver failed ! info = {:}'.format(info))
print(xk)
print(z)
print(w)
# else:
# print('iter {:} ; solver iter = {:} ; prec = {:}'.format(k, SO.iparam[1], SO.dparam[1]))
#zi_syst.compute_Fmcp(0 ,4, z, w)
sol[k, 0] = w[0] - z[1]
sol[k, 1] = w[2] - z[3]
xk[:] = sol[k, :]
signs[k, 0] = z[0] - w[1]
signs[k, 1] = z[2] - w[3]
t = k*h
#z[:] = 0.0
print('f_eval', data_struct.f_eval, 'nabla_eval', data_struct.nabla_eval)
# np.savetxt("dataZIsol.txt", sol)
# np.savetxt("dataZIlambdaPM.txt", lambdaPM)
# np.savetxt("dataZIsign.txt", signs)
if withPlot:
plt.figure()
plt.plot(sol[:, 0], sol[:, 1], 'b-*')
plt.xlabel('s')
plt.ylabel('v')
plt.figure()
plt.plot(sol[:, 0], label=r's')
plt.plot(sol[:, 1], label=r'v')
plt.legend(loc='best')
plt.figure()
plt.plot(signs[:, 0], label=r'$\lambda_1$')
plt.plot(signs[:, 1], label=r'$\lambda_2$')
plt.legend(loc='best')
plt.show()
pos = np.abs(sol[:, 0])
velocity = (1 - kappa*np.sign(sol[:, 0]*sol[:, 1]))*sol[:, 1]*np.sign(sol[:, 0])
plt.subplot(211)
plt.title('position')
plt.plot(pos)
plt.grid()
plt.subplot(212)
plt.title('velocity')
plt.plot(velocity)
plt.grid()
# plt.subplot(313)
# plt.title('control input')
# plt.plot(dataPlot[:,0], control)
# plt.grid()
plt.show()
# indx = np.nonzero(dataPlot[:, 0]>30)
# ttt = dataPlot[indx, 0].flatten()
#
# plt.subplot(311)
# plt.title('position')
# plt.plot(ttt, pos[indx])
# plt.grid()
# plt.subplot(312)
# plt.title('velocity')
# plt.plot(ttt, velocity[indx])
# plt.grid()
## plt.subplot(313)
## plt.title('control input')
## plt.plot(ttt, control[indx])
# plt.grid()
# plt.show()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
*****************************************
Author: zhlinh
Email: [email protected]
Version: 0.0.1
Created Time: 2016-03-11
Last_modify: 2016-03-11
******************************************
'''
'''
Given a 2D board containing 'X' and 'O',
capture all regions surrounded by 'X'.
A region is captured by flipping all 'O's
into 'X's in that surrounded region.
For example,
X X X X
X O O X
X X O X
X O X X
After running your function, the board should be:
X X X X
X X X X
X X X X
X O X X
'''
class Solution(object):
def solve(self, board):
"""
:type board: List[List[str]]
:rtype: void Do not return anything, modify board in-place instead.
"""
m = len(board)
if m < 2:
return
n = len(board[0])
for i in range(m):
self.helper(board, i, 0, m, n)
if n > 1:
self.helper(board, i, n - 1, m, n)
for j in range(n):
self.helper(board, 0, j, m, n)
if m > 1:
self.helper(board, m - 1, j, m, n)
for i in range(m):
for j in range(n):
if board[i][j] == 'O':
board[i][j] = 'X'
if board[i][j] == '1':
board[i][j] = 'O'
def helper(self, board, i, j, m, n):
if board[i][j] == 'O':
board[i][j] = '1'
# trick here, normally it could be i >= 1.
# but the boardary will alays get checked.
# so i == 1, then check 0 is duplicated.
if i > 1:
self.helper(board, i - 1, j, m, n)
if i < m - 2:
self.helper(board, i + 1, j, m, n)
if j > 1:
self.helper(board, i, j - 1, m, n)
if j < n - 2:
self.helper(board, i, j + 1, m, n)
|
"""Use the isolation plugin with --with-isolation or the
NOSE_WITH_ISOLATION environment variable to clean sys.modules after
each test module is loaded and executed.
The isolation module is in effect similar to wrapping the following
functions around the import and execution of each test module::
def setup(module):
module._mods = sys.modules.copy()
def teardown(module):
to_del = [ m for m in sys.modules.keys() if m not in
module._mods ]
for mod in to_del:
del sys.modules[mod]
sys.modules.update(module._mods)
Isolation works only during lazy loading. In normal use, this is only
during discovery of modules within a directory, where the process of
importing, loading tests and running tests from each module is
encapsulated in a single loadTestsFromName call. This plugin
implements loadTestsFromNames to force the same lazy-loading there,
which allows isolation to work in directed mode as well as discovery,
at the cost of some efficiency: lazy-loading names forces full context
setup and teardown to run for each name, defeating the grouping that
is normally used to ensure that context setup and teardown are run the
fewest possible times for a given set of names.
PLEASE NOTE that this plugin should not be used in conjunction with
other plugins that assume that modules once imported will stay
imported; for instance, it may cause very odd results when used with
the coverage plugin.
"""
import logging
import sys
from nose.plugins import Plugin
log = logging.getLogger('nose.plugins.isolation')
class IsolationPlugin(Plugin):
"""
Activate the isolation plugin to isolate changes to external
modules to a single test module or package. The isolation plugin
resets the contents of sys.modules after each test module or
package runs to its state before the test. PLEASE NOTE that this
plugin should not be used with the coverage plugin in any other case
where module reloading may produce undesirable side-effects.
"""
score = 10 # I want to be last
name = 'isolation'
def configure(self, options, conf):
Plugin.configure(self, options, conf)
self._mod_stack = []
def beforeContext(self):
"""Copy sys.modules onto my mod stack
"""
mods = sys.modules.copy()
self._mod_stack.append(mods)
def afterContext(self):
"""Pop my mod stack and restore sys.modules to the state
it was in when mod stack was pushed.
"""
mods = self._mod_stack.pop()
to_del = [ m for m in sys.modules.keys() if m not in mods ]
if to_del:
log.debug('removing sys modules entries: %s', to_del)
for mod in to_del:
del sys.modules[mod]
sys.modules.update(mods)
def loadTestsFromNames(self, names, module=None):
"""Create a lazy suite that calls beforeContext and afterContext
around each name. The side-effect of this is that full context
fixtures will be set up and torn down around each test named.
"""
# Fast path for when we don't care
if not names or len(names) == 1:
return
loader = self.loader
plugins = self.conf.plugins
def lazy():
for name in names:
plugins.beforeContext()
yield loader.loadTestsFromName(name, module=module)
plugins.afterContext()
return (loader.suiteClass(lazy), [])
def prepareTestLoader(self, loader):
"""Get handle on test loader so we can use it in loadTestsFromNames.
"""
self.loader = loader
|
# -*- coding: utf-8 -*-
from rest_framework import permissions
from rest_framework import exceptions
from addons.base.models import BaseAddonSettings
from osf.models import (
AbstractNode,
Contributor,
DraftRegistration,
Institution,
Node,
NodeRelation,
OSFUser,
PreprintService,
PrivateLink,
)
from osf.utils import permissions as osf_permissions
from website.project.metadata.utils import is_prereg_admin
from api.base.utils import get_user_auth, is_deprecated
class ContributorOrPublic(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
from api.nodes.views import NodeProvider
if isinstance(obj, BaseAddonSettings):
obj = obj.owner
if isinstance(obj, (NodeProvider, PreprintService)):
obj = obj.node
assert isinstance(obj, (AbstractNode, NodeRelation)), 'obj must be an Node, NodeProvider, NodeRelation, PreprintService, or AddonSettings; got {}'.format(obj)
auth = get_user_auth(request)
if request.method in permissions.SAFE_METHODS:
return obj.is_public or obj.can_view(auth)
else:
return obj.can_edit(auth)
class IsPublic(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
assert isinstance(obj, AbstractNode), 'obj must be an Node got {}'.format(obj)
auth = get_user_auth(request)
return obj.is_public or obj.can_view(auth)
class IsAdmin(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
assert isinstance(obj, AbstractNode), 'obj must be an Node, got {}'.format(obj)
auth = get_user_auth(request)
return obj.has_permission(auth.user, osf_permissions.ADMIN)
class IsContributor(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
assert isinstance(obj, AbstractNode), 'obj must be an Node, got {}'.format(obj)
auth = get_user_auth(request)
if request.method in permissions.SAFE_METHODS:
return obj.is_contributor(auth.user)
else:
return obj.has_permission(auth.user, 'write')
class IsAdminOrReviewer(permissions.BasePermission):
"""
Prereg admins can update draft registrations.
"""
def has_object_permission(self, request, view, obj):
assert isinstance(obj, (AbstractNode, DraftRegistration, PrivateLink)), 'obj must be an Node, Draft Registration, or PrivateLink, got {}'.format(obj)
auth = get_user_auth(request)
if request.method != 'DELETE' and is_prereg_admin(auth.user):
return True
return obj.has_permission(auth.user, osf_permissions.ADMIN)
class AdminOrPublic(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
assert isinstance(obj, (AbstractNode, OSFUser, Institution, BaseAddonSettings, DraftRegistration, PrivateLink)), 'obj must be an Node, User, Institution, Draft Registration, PrivateLink, or AddonSettings; got {}'.format(obj)
auth = get_user_auth(request)
if request.method in permissions.SAFE_METHODS:
return obj.is_public or obj.can_view(auth)
else:
return obj.has_permission(auth.user, osf_permissions.ADMIN)
class ExcludeWithdrawals(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
if isinstance(obj, Node):
node = obj
else:
context = request.parser_context['kwargs']
node = AbstractNode.load(context[view.node_lookup_url_kwarg])
if node.is_retracted:
return False
return True
class ContributorDetailPermissions(permissions.BasePermission):
"""Permissions for contributor detail page."""
def has_object_permission(self, request, view, obj):
assert isinstance(obj, (AbstractNode, OSFUser, Contributor)), 'obj must be User, Contributor, or Node, got {}'.format(obj)
auth = get_user_auth(request)
context = request.parser_context['kwargs']
node = AbstractNode.load(context[view.node_lookup_url_kwarg])
user = OSFUser.load(context['user_id'])
if request.method in permissions.SAFE_METHODS:
return node.is_public or node.can_view(auth)
elif request.method == 'DELETE':
return node.has_permission(auth.user, osf_permissions.ADMIN) or auth.user == user
else:
return node.has_permission(auth.user, osf_permissions.ADMIN)
class ContributorOrPublicForPointers(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
assert isinstance(obj, (AbstractNode, NodeRelation)), 'obj must be an Node or NodeRelation, got {}'.format(obj)
auth = get_user_auth(request)
parent_node = AbstractNode.load(request.parser_context['kwargs']['node_id'])
pointer_node = NodeRelation.load(request.parser_context['kwargs']['node_link_id']).child
if request.method in permissions.SAFE_METHODS:
has_parent_auth = parent_node.can_view(auth)
has_pointer_auth = pointer_node.can_view(auth)
public = pointer_node.is_public
has_auth = public or (has_parent_auth and has_pointer_auth)
return has_auth
else:
has_auth = parent_node.can_edit(auth)
return has_auth
class ContributorOrPublicForRelationshipPointers(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
assert isinstance(obj, dict)
auth = get_user_auth(request)
parent_node = obj['self']
if request.method in permissions.SAFE_METHODS:
return parent_node.can_view(auth)
elif request.method == 'DELETE':
return parent_node.can_edit(auth)
else:
has_parent_auth = parent_node.can_edit(auth)
if not has_parent_auth:
return False
pointer_nodes = []
for pointer in request.data.get('data', []):
node = AbstractNode.load(pointer['id'])
if not node or node.is_collection:
raise exceptions.NotFound(detail='Node with id "{}" was not found'.format(pointer['id']))
pointer_nodes.append(node)
has_pointer_auth = True
for pointer in pointer_nodes:
if not pointer.can_view(auth):
has_pointer_auth = False
break
return has_pointer_auth
class RegistrationAndPermissionCheckForPointers(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
node_link = NodeRelation.load(request.parser_context['kwargs']['node_link_id'])
node = AbstractNode.load(request.parser_context['kwargs'][view.node_lookup_url_kwarg])
auth = get_user_auth(request)
if request.method == 'DELETE'and node.is_registration:
raise exceptions.MethodNotAllowed(method=request.method)
if node.is_collection or node.is_registration:
raise exceptions.NotFound
if node != node_link.parent:
raise exceptions.NotFound
if request.method == 'DELETE' and not node.can_edit(auth):
return False
return True
class WriteOrPublicForRelationshipInstitutions(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
assert isinstance(obj, dict)
auth = get_user_auth(request)
node = obj['self']
if request.method in permissions.SAFE_METHODS:
return node.is_public or node.can_view(auth)
else:
return node.has_permission(auth.user, osf_permissions.WRITE)
class ReadOnlyIfRegistration(permissions.BasePermission):
"""Makes PUT and POST forbidden for registrations."""
def has_object_permission(self, request, view, obj):
if not isinstance(obj, AbstractNode):
obj = AbstractNode.load(request.parser_context['kwargs'][view.node_lookup_url_kwarg])
assert isinstance(obj, AbstractNode), 'obj must be an Node'
if obj.is_registration:
return request.method in permissions.SAFE_METHODS
return True
class ShowIfVersion(permissions.BasePermission):
def __init__(self, min_version, max_version, deprecated_message):
super(ShowIfVersion, self).__init__()
self.min_version = min_version
self.max_version = max_version
self.deprecated_message = deprecated_message
def has_object_permission(self, request, view, obj):
if is_deprecated(request.version, self.min_version, self.max_version):
raise exceptions.NotFound(detail=self.deprecated_message)
return True
class NodeLinksShowIfVersion(ShowIfVersion):
def __init__(self):
min_version = '2.0'
max_version = '2.0'
deprecated_message = 'This feature is deprecated as of version 2.1'
super(NodeLinksShowIfVersion, self).__init__(min_version, max_version, deprecated_message)
|
import time
def jd_now():
"""
Returns Julian Date at the current moment.
"""
return 2440587.5 + time.time() / 86400.0
def normalize_star_name(name):
"""
Normalize star name with GCVS names, for example: V339 -> V0339.
"""
digits = "123456789"
if name[0] == "V" and name[1] in digits and name[4] not in digits:
name = "V0" + name[1:]
return name
def dict_to_observation(row, observer):
from .models import Observation
from stars.models import Star
name = normalize_star_name(row["name"])
star = Star.objects.get(name=name)
fainter_than = "<" in row["magnitude"]
magnitude = float(row["magnitude"].replace("<", ""))
jd = float(row["date"])
# TODO: use get_or_create with defaults
try:
observation = Observation.objects.get(observer=observer, star=star, jd=jd,)
except Observation.DoesNotExist:
observation = Observation(observer=observer, star=star, jd=jd,)
observation.magnitude = magnitude
observation.fainter_than = fainter_than
observation.comp1 = row["comp1"]
observation.comp2 = row.get("comp2", "")
observation.chart = row["chart"]
observation.comment_code = row["comment_code"]
observation.notes = row["notes"]
return observation
|
"""Verify AWS Lambda Function creation."""
import copy
from unittest import mock
from foremast.awslambda.awslambda import LambdaFunction
TEST_PROPERTIES = {
'pipeline': {
'lambda': {
'app_description': None,
'handler': None,
'runtime': None,
'vpc_enabled': None,
},
},
'app': {
'lambda_memory': 0,
'lambda_timeout': 0,
'lambda_environment': None,
'lambda_layers': None,
'lambda_dlq': None,
'lambda_tracing': None,
'lambda_destinations': None,
'lambda_subnet_count': None,
'lambda_filesystems': None,
}
}
GENERATED_IAM = {
'lambda_role': 'generated_role',
}
@mock.patch('foremast.awslambda.awslambda.boto3')
@mock.patch('foremast.awslambda.awslambda.get_details')
@mock.patch('foremast.awslambda.awslambda.get_properties')
@mock.patch('foremast.awslambda.awslambda.get_role_arn')
def test_role_arn(mock_get_role_arn, mock_get_properties, mock_get_details, mock_boto3):
"""Check Role ARN configuration."""
generated = copy.deepcopy(GENERATED_IAM)
properties = copy.deepcopy(TEST_PROPERTIES)
mock_get_details.return_value.iam.return_value = generated
mock_get_properties.return_value = properties
LambdaFunction(app='test_app', env='test_env', region='us-east-1', prop_path='other')
mock_get_role_arn.assert_called_with(generated['lambda_role'], mock.ANY, mock.ANY)
@mock.patch('foremast.awslambda.awslambda.boto3')
@mock.patch('foremast.awslambda.awslambda.get_details')
@mock.patch('foremast.awslambda.awslambda.get_properties')
@mock.patch('foremast.awslambda.awslambda.get_role_arn')
def test_role_arn_none(mock_get_role_arn, mock_get_properties, mock_get_details, mock_boto3):
"""Generated Role should be used for Lambda."""
generated = copy.deepcopy(GENERATED_IAM)
properties = copy.deepcopy(TEST_PROPERTIES)
properties['app']['lambda_role'] = None
mock_get_details.return_value.iam.return_value = generated
mock_get_properties.return_value = properties
LambdaFunction(app='test_app', env='test_env', region='us-east-1', prop_path='other')
mock_get_role_arn.assert_called_with(GENERATED_IAM['lambda_role'], mock.ANY, mock.ANY)
@mock.patch('foremast.awslambda.awslambda.boto3')
@mock.patch('foremast.awslambda.awslambda.get_details')
@mock.patch('foremast.awslambda.awslambda.get_properties')
@mock.patch('foremast.awslambda.awslambda.get_role_arn')
def test_role_arn_custom(mock_get_role_arn, mock_get_properties, mock_get_details, mock_boto3):
"""Custom Role should be used for Lambda."""
custom_role = 'custom_role'
generated = copy.deepcopy(GENERATED_IAM)
properties = copy.deepcopy(TEST_PROPERTIES)
properties['app']['lambda_role'] = custom_role
mock_get_details.return_value.iam.return_value = generated
mock_get_properties.return_value = properties
LambdaFunction(app='test_app', env='test_env', region='us-east-1', prop_path='other')
mock_get_role_arn.assert_called_with(custom_role, mock.ANY, mock.ANY)
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers # type: ignore
from google.api_core import gapic_v1 # type: ignore
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.cloud.talent_v4beta1.types import completion_service
from .base import CompletionTransport, DEFAULT_CLIENT_INFO
class CompletionGrpcTransport(CompletionTransport):
"""gRPC backend transport for Completion.
A service handles auto completion.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_stubs: Dict[str, Callable]
def __init__(self, *,
host: str = 'jobs.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(cls,
host: str = 'jobs.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def complete_query(self) -> Callable[
[completion_service.CompleteQueryRequest],
completion_service.CompleteQueryResponse]:
r"""Return a callable for the complete query method over gRPC.
Completes the specified prefix with keyword
suggestions. Intended for use by a job search auto-
complete search box.
Returns:
Callable[[~.CompleteQueryRequest],
~.CompleteQueryResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'complete_query' not in self._stubs:
self._stubs['complete_query'] = self.grpc_channel.unary_unary(
'/google.cloud.talent.v4beta1.Completion/CompleteQuery',
request_serializer=completion_service.CompleteQueryRequest.serialize,
response_deserializer=completion_service.CompleteQueryResponse.deserialize,
)
return self._stubs['complete_query']
__all__ = (
'CompletionGrpcTransport',
)
|
# -*- coding: utf-8 -*-
"""Add Keras Core Layer Operation Reshape
Revision ID: 1d7c21b6c7d2
Revises: 4a4b7df125b7
Create Date: 2018-11-01 10:26:22.659859
"""
from alembic import op
import sqlalchemy as sa
from alembic import context
from alembic import op
from sqlalchemy import String, Integer, Text
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import table, column, text
# revision identifiers, used by Alembic.
revision = '1d7c21b6c7d2'
down_revision = '4a4b7df125b7'
branch_labels = None
depends_on = None
KERAS_PLATAFORM_ID = 5
def _insert_operation_platform():
tb = table(
'operation_platform',
column('operation_id', Integer),
column('platform_id', Integer), )
columns = ('operation_id', 'platform_id')
data = [
(5015, KERAS_PLATAFORM_ID),# Reshape
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(tb, rows)
def _insert_operation():
tb = table(
'operation',
column('id', Integer),
column('slug', String),
column('enabled', Integer),
column('type', String),
column('icon', Integer),)
columns = ('id', 'slug', 'enabled', 'type', 'icon')
data = [
(5015, "reshape", 1, 'ACTION', ''),
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(tb, rows)
def _insert_operation_category():
tb = table(
'operation_category',
column('id', Integer),
column('type', String),
column('order', Integer),
column('default_order', Integer),
)
columns = ('id', 'type', 'order', 'default_order')
data = [
(5015, "subgroup", 8, 8),# Reshape
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(tb, rows)
def _insert_operation_category_operation():
tb = table(
'operation_category_operation',
column('operation_id', Integer),
column('operation_category_id', Integer))
columns = ('operation_category_id', 'operation_id')
data = [
#Core Layers
(5010, 5015),# Reshape
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(tb, rows)
def _insert_operation_translation():
tb = table(
'operation_translation',
column('id', Integer),
column('locale', String),
column('name', String),
column('description', String))
columns = ('id', 'locale', 'name', 'description')
data = [
(5015, "en", 'Reshape', ''),
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(tb, rows)
def _insert_operation_port():
tb = table(
'operation_port',
column('id', Integer),
column('type', String),
column('tags', String),
column('order', Integer),
column('multiplicity', String),
column('operation_id', Integer),
column('slug', String),)
columns = ('id', 'type', 'tags', 'order', 'multiplicity', 'operation_id', 'slug')
data = [
#Reshape
(5115, 'INPUT', '', 1, 'ONE', 5015, 'input data'),
(5215, 'OUTPUT', '', 1, 'ONE', 5015, 'output data'),
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(tb, rows)
def _insert_operation_port_interface_operation_port():
tb = table(
'operation_port_interface_operation_port',
column('operation_port_id', Integer),
column('operation_port_interface_id', Integer))
columns = ('operation_port_id', 'operation_port_interface_id')
data = [
#Reshape
(5115, 1),
(5215, 1),
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(tb, rows)
def _insert_operation_port_translation():
tb = table(
'operation_port_translation',
column('id', Integer),
column('locale', String),
column('name', String),
column('description', String))
columns = ('id', 'locale', 'name', 'description')
data = [
#Reshape
(5115, "en", 'input data', 'Input data'),
(5215, "en", 'output data', 'Output data'),
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(tb, rows)
def _insert_operation_form():
operation_form_table = table(
'operation_form',
column('id', Integer),
column('enabled', Integer),
column('order', Integer),
column('category', String), )
columns = ('id', 'enabled', 'order', 'category')
data = [
#Reshape - target_shape
(5132, 1, 1, 'execution'),
#Reshape - input_shape
#(5133, 1, 1, 'execution'),
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(operation_form_table, rows)
def _insert_operation_form_translation():
tb = table(
'operation_form_translation',
column('id', Integer),
column('locale', String),
column('name', String))
columns = ('id', 'locale', 'name')
data = [
#Reshape - target_shape
(5132, 'en', 'Execution'),
(5132, 'pt', 'Execução'),
#Reshape - input_shape
#(5133, 'en', 'Execution'),
#(5133, 'pt', 'Execução'),
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(tb, rows)
def _insert_operation_operation_form():
tb = table(
'operation_operation_form',
column('operation_id', Integer),
column('operation_form_id', Integer))
columns = ('operation_id', 'operation_form_id')
data = [
(5015, 41), #appearance
#Reshape - target_shape
(5015, 5132), # own execution form
#Reshape - input_shape
#(5015, 5133), # own execution form
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(tb, rows)
def _insert_operation_form_field():
tb = table(
'operation_form_field',
column('id', Integer),
column('name', String),
column('type', String),
column('required', Integer),
column('order', Integer),
column('default', Text),
column('suggested_widget', String),
column('values_url', String),
column('values', String),
column('scope', String),
column('form_id', Integer), )
columns = ('id', 'name', 'type', 'required', 'order', 'default',
'suggested_widget', 'values_url', 'values', 'scope', 'form_id')
data = [
#Reshape - target_shape
(5132, 'target_shape', 'TEXT', 1, 1, None, 'text', None, None, 'EXECUTION', 5132),
#Reshape - input_shape
#(5133, 'input_shape', 'TEXT', 0, 2, None, 'text', None, None, 'EXECUTION', 5133),
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(tb, rows)
def _insert_operation_form_field_translation():
tb = table(
'operation_form_field_translation',
column('id', Integer),
column('locale', String),
column('label', String),
column('help', String), )
columns = ('id', 'locale', 'label', 'help')
data = [
#Reshape - target_shape
(5132, 'en', 'Target shape', 'Tuple of integers. Does not include the batch axis. Ex.: (6,2)'),
#Reshape - input_shape
#(5133, 'en', 'input_shape', 'Arbitrary, although all dimensions in the input shaped must be fixed. '
# 'Use the keyword argument input_shape (tuple of integers, does not '
# 'include the batch axis) when using this layer as the first '
# 'layer in a model. Ex.: (12,)'),
]
rows = [dict(list(zip(columns, row))) for row in data]
op.bulk_insert(tb, rows)
all_commands = [
(_insert_operation,
'DELETE FROM operation WHERE id = 5015'),
(_insert_operation_category,
'DELETE FROM operation_category WHERE id = 5015'),
(_insert_operation_translation,
'DELETE FROM operation_translation WHERE id = 5015'),
(_insert_operation_category_operation,
'DELETE FROM operation_category_operation WHERE operation_id = 5015'),
(_insert_operation_platform,
'DELETE FROM operation_platform WHERE operation_id = 5015 AND platform_id = {}'.format(KERAS_PLATAFORM_ID)),
(_insert_operation_port,
'DELETE FROM operation_port WHERE id IN (5115, 5215)'),
(_insert_operation_port_interface_operation_port,
'DELETE FROM operation_port_interface_operation_port WHERE operation_port_id IN (5115, 5215)'),
(_insert_operation_port_translation,
'DELETE FROM operation_port_translation WHERE id IN (5115, 5215)'),
(_insert_operation_form,
'DELETE FROM operation_form WHERE id IN (5132, 5133)'),
(_insert_operation_form_field,
'DELETE FROM operation_form_field WHERE id IN (5132, 5133)'),
(_insert_operation_form_translation,
'DELETE FROM operation_form_translation WHERE id IN (5132, 5133)'),
(_insert_operation_form_field_translation,
'DELETE FROM operation_form_field_translation WHERE id IN (5132, 5133)'),
(_insert_operation_operation_form,
'DELETE FROM operation_operation_form WHERE operation_id = 5015'),
]
def upgrade():
ctx = context.get_context()
session = sessionmaker(bind=ctx.bind)()
connection = session.connection()
try:
for cmd in all_commands:
if isinstance(cmd[0], str):
connection.execute(cmd[0])
elif isinstance(cmd[0], list):
for row in cmd[0]:
connection.execute(row)
else:
cmd[0]()
except:
session.rollback()
raise
session.commit()
def downgrade():
ctx = context.get_context()
session = sessionmaker(bind=ctx.bind)()
connection = session.connection()
try:
for cmd in reversed(all_commands):
if isinstance(cmd[1], str):
connection.execute(cmd[1])
elif isinstance(cmd[1], list):
for row in cmd[1]:
connection.execute(row)
else:
cmd[1]()
except:
session.rollback()
raise
session.commit()
|
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the ""Software""), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# --------------------------------------------------------------------------
import os
import pytest
def pytest_configure(config):
# register an additional marker
config.addinivalue_line(
"markers", "live_test_only: mark test to be a live test only"
)
config.addinivalue_line(
"markers", "playback_test_only: mark test to be a playback test only"
)
def pytest_runtest_setup(item):
is_live_only_test_marked = bool([mark for mark in item.iter_markers(name="live_test_only")])
if is_live_only_test_marked:
from devtools_testutils import is_live
if not is_live():
pytest.skip("live test only")
is_playback_test_marked = bool([mark for mark in item.iter_markers(name="playback_test_only")])
if is_playback_test_marked:
from devtools_testutils import is_live
if is_live() and os.environ.get('AZURE_SKIP_LIVE_RECORDING', '').lower() == 'true':
pytest.skip("playback test only")
try:
from azure_devtools.scenario_tests import AbstractPreparer
@pytest.fixture(scope='session', autouse=True)
def clean_cached_resources():
yield
AbstractPreparer._perform_pending_deletes()
except ImportError:
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.