repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
---|---|---|---|---|---|---|---|---|
gf53520/kafka | kafka-merge-pr.py | Python | apache-2.0 | 19,703 | 0.004213 | #!/usr/bin/env python
#
# Licensed to t | he Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# | the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Utility for creating well-formed pull request merges and pushing them to Apache. This script is a modified version
# of the one created by the Spark project (https://github.com/apache/spark/blob/master/dev/merge_spark_pr.py).
#
# Usage: ./kafka-merge-pr.py (see config env vars below)
#
# This utility assumes you already have local a kafka git folder and that you
# have added remotes corresponding to both:
# (i) the github apache kafka mirror and
# (ii) the apache kafka git repo.
import json
import os
import re
import subprocess
import sys
import urllib2
try:
import jira.client
JIRA_IMPORTED = True
except ImportError:
JIRA_IMPORTED = False
PROJECT_NAME = "kafka"
CAPITALIZED_PROJECT_NAME = "kafka".upper()
# Location of the local git repository
REPO_HOME = os.environ.get("%s_HOME" % CAPITALIZED_PROJECT_NAME, os.getcwd())
# Remote name which points to the GitHub site
PR_REMOTE_NAME = os.environ.get("PR_REMOTE_NAME", "apache-github")
# Remote name where we want to push the changes to (GitHub by default, but Apache Git would work if GitHub is down)
PUSH_REMOTE_NAME = os.environ.get("PUSH_REMOTE_NAME", "apache-github")
# ASF JIRA username
JIRA_USERNAME = os.environ.get("JIRA_USERNAME", "")
# ASF JIRA password
JIRA_PASSWORD = os.environ.get("JIRA_PASSWORD", "")
# OAuth key used for issuing requests against the GitHub API. If this is not defined, then requests
# will be unauthenticated. You should only need to configure this if you find yourself regularly
# exceeding your IP's unauthenticated request rate limit. You can create an OAuth key at
# https://github.com/settings/tokens. This script only requires the "public_repo" scope.
GITHUB_OAUTH_KEY = os.environ.get("GITHUB_OAUTH_KEY")
GITHUB_USER = os.environ.get("GITHUB_USER", "apache")
GITHUB_BASE = "https://github.com/%s/%s/pull" % (GITHUB_USER, PROJECT_NAME)
GITHUB_API_BASE = "https://api.github.com/repos/%s/%s" % (GITHUB_USER, PROJECT_NAME)
JIRA_BASE = "https://issues.apache.org/jira/browse"
JIRA_API_BASE = "https://issues.apache.org/jira"
# Prefix added to temporary branches
TEMP_BRANCH_PREFIX = "PR_TOOL"
DEV_BRANCH_NAME = "trunk"
DEFAULT_FIX_VERSION = os.environ.get("DEFAULT_FIX_VERSION", "2.3.0")
def get_json(url):
try:
request = urllib2.Request(url)
if GITHUB_OAUTH_KEY:
request.add_header('Authorization', 'token %s' % GITHUB_OAUTH_KEY)
return json.load(urllib2.urlopen(request))
except urllib2.HTTPError as e:
if "X-RateLimit-Remaining" in e.headers and e.headers["X-RateLimit-Remaining"] == '0':
print "Exceeded the GitHub API rate limit; see the instructions in " + \
"kafka-merge-pr.py to configure an OAuth token for making authenticated " + \
"GitHub requests."
else:
print "Unable to fetch URL, exiting: %s" % url
sys.exit(-1)
def fail(msg):
print msg
clean_up()
sys.exit(-1)
def run_cmd(cmd):
print cmd
if isinstance(cmd, list):
return subprocess.check_output(cmd)
else:
return subprocess.check_output(cmd.split(" "))
def continue_maybe(prompt):
result = raw_input("\n%s (y/n): " % prompt)
if result.lower() != "y":
fail("Okay, exiting")
def clean_up():
if original_head != get_current_branch():
print "Restoring head pointer to %s" % original_head
run_cmd("git checkout %s" % original_head)
branches = run_cmd("git branch").replace(" ", "").split("\n")
for branch in filter(lambda x: x.startswith(TEMP_BRANCH_PREFIX), branches):
print "Deleting local branch %s" % branch
run_cmd("git branch -D %s" % branch)
def get_current_branch():
return run_cmd("git rev-parse --abbrev-ref HEAD").replace("\n", "")
# merge the requested PR and return the merge hash
def merge_pr(pr_num, target_ref, title, body, pr_repo_desc):
pr_branch_name = "%s_MERGE_PR_%s" % (TEMP_BRANCH_PREFIX, pr_num)
target_branch_name = "%s_MERGE_PR_%s_%s" % (TEMP_BRANCH_PREFIX, pr_num, target_ref.upper())
run_cmd("git fetch %s pull/%s/head:%s" % (PR_REMOTE_NAME, pr_num, pr_branch_name))
run_cmd("git fetch %s %s:%s" % (PUSH_REMOTE_NAME, target_ref, target_branch_name))
run_cmd("git checkout %s" % target_branch_name)
had_conflicts = False
try:
run_cmd(['git', 'merge', pr_branch_name, '--squash'])
except Exception as e:
msg = "Error merging: %s\nWould you like to manually fix-up this merge?" % e
continue_maybe(msg)
msg = "Okay, please fix any conflicts and 'git add' conflicting files... Finished?"
continue_maybe(msg)
had_conflicts = True
commit_authors = run_cmd(['git', 'log', 'HEAD..%s' % pr_branch_name,
'--pretty=format:%an <%ae>']).split("\n")
distinct_authors = sorted(set(commit_authors),
key=lambda x: commit_authors.count(x), reverse=True)
primary_author = raw_input(
"Enter primary author in the format of \"name <email>\" [%s]: " %
distinct_authors[0])
if primary_author == "":
primary_author = distinct_authors[0]
reviewers = raw_input(
"Enter reviewers in the format of \"name1 <email1>, name2 <email2>\": ").strip()
run_cmd(['git', 'log', 'HEAD..%s' % pr_branch_name, '--pretty=format:%h [%an] %s']).split("\n")
merge_message_flags = []
merge_message_flags += ["-m", title]
if body is not None:
# Remove "Committer Checklist" section
checklist_index = body.find("### Committer Checklist")
if checklist_index != -1:
body = body[:checklist_index].rstrip()
# Remove @ symbols from the body to avoid triggering e-mails to people every time someone creates a
# public fork of the project.
body = body.replace("@", "")
merge_message_flags += ["-m", body]
authors = "\n".join(["Author: %s" % a for a in distinct_authors])
merge_message_flags += ["-m", authors]
if reviewers != "":
merge_message_flags += ["-m", "Reviewers: %s" % reviewers]
if had_conflicts:
committer_name = run_cmd("git config --get user.name").strip()
committer_email = run_cmd("git config --get user.email").strip()
message = "This patch had conflicts when merged, resolved by\nCommitter: %s <%s>" % (
committer_name, committer_email)
merge_message_flags += ["-m", message]
# The string "Closes #%s" string is required for GitHub to correctly close the PR
close_line = "Closes #%s from %s" % (pr_num, pr_repo_desc)
merge_message_flags += ["-m", close_line]
run_cmd(['git', 'commit', '--author="%s"' % primary_author] + merge_message_flags)
continue_maybe("Merge complete (local ref %s). Push to %s?" % (
target_branch_name, PUSH_REMOTE_NAME))
try:
run_cmd('git push %s %s:%s' % (PUSH_REMOTE_NAME, target_branch_name, target_ref))
except Exception as e:
clean_up()
fail("Exception while pushing: %s" % e)
merge_hash = run_cmd("git rev-parse %s" % target_branch_name)[:8]
clean_up()
print("Pull request #%s merged!" % pr_num)
print("Merge hash: %s" % merge_hash)
return merge_hash
def cherry_pick(pr_num, merge_hash, default_branch):
pick_ref = raw_input("Enter a branch name [%s]: " % default_branch)
if pic |
hnakamur/saklient.python | saklient/cloud/errors/dontcreateinsandboxexception.py | Python | mit | 899 | 0.009331 | # -*- coding:utf-8 -* | -
from ...errors.httpforbiddenexception import HttpForbiddenException
import saklient
# module saklient.cloud.errors.dontcreateinsandboxexception
class DontCreateInSandboxException(HttpForbiddenException):
## 要求された操作は許可されていません。ゾーンをまたぐ一部のリソースは課金対象です。料金をご確認の上、他のゾーンで作成してください。
## @param {int} status
# @param {str} code=None
# @param {str} message=""
def __init__(self, status, code=None, message=""):
super(DontCreateInSandboxException, self).__init__( | status, code, "要求された操作は許可されていません。ゾーンをまたぐ一部のリソースは課金対象です。料金をご確認の上、他のゾーンで作成してください。" if message is None or message == "" else message)
|
ternaus/submission_merger | src/mean_log_merger_bimbo.py | Python | mit | 753 | 0.009296 | from __future__ import d | ivision
__author__ = 'Vladimir Iglovikov'
'''
Merges prediction for https://www.kaggle.com/c/grupo-bimbo-inventory-demand competition
Expm1(Mean([Log1p | (x), Log1p(y)]))
'''
import os
import numpy as np
import sys
import pandas as pd
import time
files = sys.argv[1:]
try:
files.remove('mean_log_merger_bimbo.py')
except:
pass
data = [pd.read_csv(fName).sort_values(by='id') for fName in files]
ids = data[0]['id']
result = pd.DataFrame()
submission = pd.DataFrame()
ind = 0
for df in data:
result[ind] = np.log1p(df['Demanda_uni_equil'])
ind += 1
submission['Demanda_uni_equil'] = np.expm1(result.mean(axis=1))
submission['id'] = ids
submission.to_csv('{timestamp}.csv'.format(timestamp=time.time()), index=False) |
looker/sentry | src/sentry/identity/providers/dummy.py | Python | bsd-3-clause | 857 | 0 | from __future__ import absolute_import, print_function
__all__ = ['DummyProvider']
from django.http import HttpResponse
from | sentry.identity.base import Provider
from sentry.pipeline impor | t PipelineView
class AskEmail(PipelineView):
def dispatch(self, request, pipeline):
if 'email' in request.POST:
pipeline.bind_state('email', request.POST.get('email'))
return pipeline.next_step()
return HttpResponse(DummyProvider.TEMPLATE)
class DummyProvider(Provider):
name = 'Dummy'
key = 'dummy'
TEMPLATE = '<form method="POST"><input type="email" name="email" /></form>'
def get_pipeline_views(self):
return [AskEmail()]
def build_identity(self, state):
return {
'id': state['email'],
'email': state['email'],
'name': 'Dummy',
}
|
mohamed--abdel-maksoud/chromium.src | mojo/public/tools/bindings/generators/mojom_dart_generator.py | Python | bsd-3-clause | 11,104 | 0.007745 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates Dart source files from a mojom.Module."""
import mojom.generate.generator as generator
import mojom.generate.module as mojom
import mojom.generate.pack as pack
from mojom.generate.template_expander import UseJinja
_kind_to_dart_default_value = {
mojom.BOOL: "false",
mojom.INT8: "0",
mojom.UINT8: "0",
mojom.INT16: "0",
mojom.UINT16: "0",
mojom.INT32: "0",
mojom.UINT32: "0",
mojom.FLOAT: "0.0",
mojom.HANDLE: "null",
mojom.DCPIPE: "null",
mojom.DPPIPE: "null",
mojom.MSGPIPE: "null",
mojom.SHAREDBUFFER: "null",
mojom.NULLABLE_HANDLE: "null",
mojom.NULLABLE_DCPIPE: "null",
mojom.NULLABLE_DPPIPE: "null",
mojom.NULLABLE_MSGPIPE: "null",
mojom.NULLABLE_SHAREDBUFFER: "null",
mojom.INT64: "0",
mojom.UINT64: "0",
mojom.DOUBLE: "0.0",
mojom.STRING: "null",
mojom.NULLABLE_STRING: "null"
}
_kind_to_dart_decl_type = {
mojom.BOOL: "bool",
mojom.INT8: "int",
mojom.UINT8: "int",
mojom.INT16: "int",
mojom.UINT16: "int",
mojom.INT32: "int",
mojom.UINT32: "int",
mojom.FLOAT: "double",
mojom.HANDLE: "core.RawMojoHandle",
mojom.DCPIPE: "core.RawMojoHandle",
mojom.DPPIPE: "core.RawMojoHandle",
mojom.MSGPIPE: "core.RawMojoHandle",
mojom.SHAREDBUFFER: "core.RawMojoHandle",
mojom.NULLABLE_HANDLE: "core.RawMojoHandle",
mojom.NULLABLE_DCPIPE: "core.RawMojoHandle",
mojom.NULLABLE_DPPIPE: "core.RawMojoHandle",
mojom.NULLABLE_MSGPIPE: "core.RawMojoHandle",
mojom.NULLABLE_SHAREDBUFFER: "core.RawMojoHandle",
mojom.INT64: "int",
mojom.UINT64: "int",
mojom.DOUBLE: "double",
mojom.STRING: "String",
mojom.NULLABLE_STRING: "String"
}
def DartType(kind):
if kind.imported_from:
return kind.imported_from["unique_name"] + "." + kind.name
return kind.name
def DartDefaultValue(field):
if field.default:
if mojom.IsStructKind(field.kind):
assert field.default == "default"
return "new %s()" % DartType(field.kind)
return ExpressionToText(field.default)
if field.kind in mojom.PRIMITIVES:
return _kind_to_dart_default_value[field.kind]
if mojom.IsStructKind(field.kind):
return "null"
if mojom.IsArrayKind(field.kind):
return "null"
if mojom.IsMapKind(field.kind):
return "null"
if mojom.IsInterfaceKind(field.kind) or \
mojom.IsInterfaceRequestKind(field.kind):
return _kind_to_dart_default_value[mojom.MSGPIPE]
if mojom.IsEnumKind(field.kind):
return "0"
def DartDeclType(kind):
if kind in mojom.PRIMITIVES:
return _kind_to_dart_decl_type[kind]
if mojom.IsStructKind(kind):
return DartType(kind)
if mojom.IsArrayKind(kind):
array_type = DartDeclType(kind.kind)
return "List<" + array_type + ">"
if mojom.IsMapKind(kind):
key_type = DartDeclType(kind.key_kind)
value_type = DartDeclType(kind.value_kind)
return "Map<"+ key_type + ", " + value_type + ">"
if mojom.IsInterfaceKind(kind) or \
mojom.IsInterfaceRequestKind(kind):
return _kind_to_dart_decl_type[mojom.MSGPIPE]
if mojom.IsEnumKind(kind):
return "int"
def DartPayloadSize(packed):
packed_fields = packed.packed_fields
if not packed_fields:
return 0
last_field = packed_fields[-1]
offset = last_field.offset + last_field.size
pad = pack.GetPad(offset, 8)
return offset + pad
_kind_to_codec_type = {
mojom.BOOL: "bindings.Uint8",
mojom.INT8: "bindings.Int8",
mojom.UINT8: "bindings.Uint8",
mojom.INT16: "bindings.Int16",
mojom.UINT16: "bindings.Uint16",
mojom.INT32: "bindings.Int32",
mojom.UINT32: "bindings.Uint32",
mojom.FLOAT: "bindings.Float",
mojom.HANDLE: "bindings.Handle",
mojom.DCPIPE: "bindings.Handle",
mojom.DPPIPE: "bindings.Handle",
mojom.MSGPIPE: "bindings.Handle",
mojom.SHAREDBUFFER: "bindings.Handle",
mojom.NULLABLE_HANDLE: "bindings.NullableHandle",
mojom.NULLABLE_DCPIPE: "bindings.NullableHandle",
mojom.NULLABLE_DPPIPE: "bindings.NullableHandle",
mojom.NULLABLE_MSGPIPE: "bindings.NullableHandle",
mojom.NULLABLE_SHAREDBUFFER: "bindings.NullableHandle",
mojom.INT64: "bindings.Int64",
mojom.UINT64: "bindings.Uint64",
mojom.DOUBLE: "bindings.Double",
mojom.STRING: "bindings.MojoString",
mojom.NULLABLE_STRING: "bindings.NullableMojoString",
}
def CodecType(kind):
if kind in mojom.PRIMITIVES:
return _kind_to_codec_type[kind]
if mojom.IsStructKind(kind):
pointer_type = "NullablePointerTo" if mojom.IsNullableKind(kind) \
else "PointerTo"
return "new bindings.%s(%s)" % (pointer_type, DartType(kind))
if mojom.IsArrayKind(kind):
array_type = "NullableArrayOf" if mojom.IsNullableKind(kind) else "ArrayOf"
array_length = "" if kind.length is None else ", %d" % kind.length
element_type = ElementCodecType(kind.kind)
return "new bindings.%s(%s%s)" % (array_type, element_type, array_length)
if mojom.IsInterfaceKind(kind) or mojom.IsInterfaceRequestKind(kind):
return CodecType(mojom.MSGPIPE)
if mojom.IsEnumKind(kind):
return _kind_to_codec_type[mojom.INT32]
if mojom.IsMapKind(kind):
map_type = "NullableMapOf" if mojom.IsNullableKind(kind) else "MapOf"
key_type = ElementCodecType(kind.key_kind)
value_type = ElementCodecType(kind.value_kind)
return "new bindings.%s(%s, %s)" % (map_type, key_type, value_type)
return kind
def ElementCodecType(kind):
return "bindings.PackedBool" if mojom.IsBoolKind(kind) else CodecType(kind)
def DartDecodeSnippet(kind):
if kind in mojom.PRIMITIVES:
return "decodeStruct(%s)" % CodecType(kind)
if mojom.IsStructKind(kind):
return "decodeStructPointer(%s)" % DartType(kind)
if mojom.IsMapKind(kind):
return "decodeMapPointer(%s, %s)" % \
(ElementCodecType(kind.key_kind), ElementCodecType(kind.value_kind))
if mojom.IsArrayKind(kind) and mojom.IsBoolKind(kind.kind):
return "decodeArrayPointer(bindings | .PackedBool)"
if mojom.IsArrayKind(kind):
return "decodeArrayPointer(%s)" % CodecType(kind.kind)
if mojom.IsInterfaceKind(kind) or mojom.IsInterfaceRequestKind(kind):
return DartDecodeSnippet(mojom.MSGPIPE)
if mojom.IsEnumKind(ki | nd):
return DartDecodeSnippet(mojom.INT32)
def DartEncodeSnippet(kind):
if kind in mojom.PRIMITIVES:
return "encodeStruct(%s, " % CodecType(kind)
if mojom.IsStructKind(kind):
return "encodeStructPointer(%s, " % DartType(kind)
if mojom.IsMapKind(kind):
return "encodeMapPointer(%s, %s, " % \
(ElementCodecType(kind.key_kind), ElementCodecType(kind.value_kind))
if mojom.IsArrayKind(kind) and mojom.IsBoolKind(kind.kind):
return "encodeArrayPointer(bindings.PackedBool, ";
if mojom.IsArrayKind(kind):
return "encodeArrayPointer(%s, " % CodecType(kind.kind)
if mojom.IsInterfaceKind(kind) or mojom.IsInterfaceRequestKind(kind):
return DartEncodeSnippet(mojom.MSGPIPE)
if mojom.IsEnumKind(kind):
return DartEncodeSnippet(mojom.INT32)
def TranslateConstants(token):
if isinstance(token, (mojom.EnumValue, mojom.NamedValue)):
# Both variable and enum constants are constructed like:
# NamespaceUid.Struct.Enum_CONSTANT_NAME
name = ""
if token.imported_from:
name = token.imported_from["unique_name"] + "."
if token.pa |
light940929/niagadsofinquery | testexample/simplePostwithPython.py | Python | mit | 3,247 | 0.00616 | """NIAGADSOFINQUERY API application.
simplePostwithPython.py get -n <titlename> -i <individualnum> -s <snpnum> -f <tfampath> -p <tpedpath> -a <apitoken>
Usage:
simplePostwithPython.py get -n <titlename> -i <individualnum> -s <snpnum> -f <tfampath> -p <tpedpath> -a <apitoken>
simplePostwithPython.py (-h | --help)
simplePostwithPython.py (-v | --version)
Options:
-n --titlename <titlename> input title
-i --individualnum <individualnum> input individual num
-s --snpnum <snpnum> input snp num
-f --tfampath <tfampath> input tfam path
-p --tpedpath <tpedpath> input tped path
-a --apitoken <apitoken> input api token
-h --help show this screen
-v --version show version and exit
"""
import os
import re
import json
import sys
import getopt
import argparse
from docopt import docopt
from urllib2 import urlopen, Request
import urllib
import urllib2
import requests
arguments = docopt(__doc__, version='0.0.1')
url_phenotypes = 'http://localhost:9000/api/phenotypes'
url_genotypes = 'http://localhost:9000/api/genotypes'
token = 'Bearer ' + arguments['--apitoken']
headers = {'Authorization': '%s' % token}
request_phenotypes = Request(url_phenotypes, headers=headers)
request_genotypes = Request(url_genotypes, headers=headers)
response_phenotypes = urlopen(request_phenotypes)
response_genotypes = urlopen(request_genotypes)
data_phenotypes = json.loads(response_phenotypes.read())
data_genotypes = json.loads(response_genotypes.read())
def postPhenotypes(url_phenotypes, token, headers):
#tfam included familyID individualID dadID momID sex 1or2 or other phenotype 1or2 means case or control
list = []
lines = [line.strip() for line in open(arguments['--tfampath'])]
for line in lines:
ids=line.split()
#print ids
print "{title:"+arguments['--titlename']+",family_id:"+ids[0]+",individual_id:"+ids[1]+",paternal_id:"+ids[2]+",maternal_id:"+ids[3]+",sex:"+ids[4]+",affection_status:"+ids[5]+"}"
values = {"title": arguments['--titlename'], "family_id": ids[0], "individual_id": ids[1], "paternal_id": ids[2], "maternal_id": ids[3], "sex": ids[4], "affection_status": ids[5]}
data = json.dumps(values)
print data
req = requests.post(url_phenotypes, data, headers=headers)
print req.status_code
def postGenotypes(url_genotypes, token, headers):
list = []
lines = [line.strip() for line in open(arguments['--tpedpath'])]
for line in lines:
ids=line.split()
indnum=int(arguments['--individualnum'])
snpnum=int(arguments['--snpnum'])
num = indnum*snpnum
#print ids
strina = ''.join(ids[4:num+4])
call = strina.strip(',')
print "{title:"+arguments['--titlename']+",chr:"+ids[0]+",variant_id:"+ids[1]+",location:"+ids[2]+",coordinate:"+ids[3]+",call:"+call+"}"
values = {"title": arguments[ | '--titlename'], "chr": ids[0], "variant_id": ids[1], "location": ids[2], "coordinate": ids[3], "call": call}
data = json.dumps(values)
print data
req = requests.post(url_genotypes, data, headers=headers)
print req.status_code
postPhenot | ypes(url_phenotypes, token, headers)
postGenotypes(url_genotypes, token, headers)
|
rahulunair/nova | nova/tests/functional/test_server_faults.py | Python | apache-2.0 | 5,378 | 0 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to | in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, eithe | r express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional import fixtures as func_fixtures
from nova.tests.functional import integrated_helpers
from nova.tests.unit.image import fake as fake_image
from nova.tests.unit import policy_fixture
class HypervisorError(Exception):
"""This is just used to make sure the exception type is in the fault."""
pass
class ServerFaultTestCase(test.TestCase,
integrated_helpers.InstanceHelperMixin):
"""Tests for the server faults reporting from the API."""
def setUp(self):
super(ServerFaultTestCase, self).setUp()
# Setup the standard fixtures.
fake_image.stub_out_image_service(self)
self.addCleanup(fake_image.FakeImageService_reset)
self.useFixture(nova_fixtures.NeutronFixture(self))
self.useFixture(func_fixtures.PlacementFixture())
self.useFixture(policy_fixture.RealPolicyFixture())
# Start the compute services.
self.start_service('conductor')
self.start_service('scheduler')
self.compute = self.start_service('compute')
api_fixture = self.useFixture(nova_fixtures.OSAPIFixture(
api_version='v2.1'))
self.api = api_fixture.api
self.admin_api = api_fixture.admin_api
def test_server_fault_non_nova_exception(self):
"""Creates a server using the non-admin user, then reboots it which
will generate a non-NovaException fault and put the instance into
ERROR status. Then checks that fault details are only visible to the
admin user.
"""
# Create the server with the non-admin user.
server = self._build_server(
networks=[{'port': nova_fixtures.NeutronFixture.port_1['id']}])
server = self.api.post_server({'server': server})
server = self._wait_for_state_change(server, 'ACTIVE')
# Stop the server before rebooting it so that after the driver.reboot
# method raises an exception, the fake driver does not report the
# instance power state as running - that will make the compute manager
# set the instance vm_state to error.
self.api.post_server_action(server['id'], {'os-stop': None})
server = self._wait_for_state_change(server, 'SHUTOFF')
# Stub out the compute driver reboot method to raise a non-nova
# exception to simulate some error from the underlying hypervisor
# which in this case we are going to say has sensitive content.
error_msg = 'sensitive info'
with mock.patch.object(
self.compute.manager.driver, 'reboot',
side_effect=HypervisorError(error_msg)) as mock_reboot:
reboot_request = {'reboot': {'type': 'HARD'}}
self.api.post_server_action(server['id'], reboot_request)
# In this case we wait for the status to change to ERROR using
# the non-admin user so we can assert the fault details. We also
# wait for the task_state to be None since the wrap_instance_fault
# decorator runs before the reverts_task_state decorator so we will
# be sure the fault is set on the server.
server = self._wait_for_server_parameter(
server, {'status': 'ERROR', 'OS-EXT-STS:task_state': None},
api=self.api)
mock_reboot.assert_called_once()
# The server fault from the non-admin user API response should not
# have details in it.
self.assertIn('fault', server)
fault = server['fault']
self.assertNotIn('details', fault)
# And the sensitive details from the non-nova exception should not be
# in the message.
self.assertIn('message', fault)
self.assertNotIn(error_msg, fault['message'])
# The exception type class name should be in the message.
self.assertIn('HypervisorError', fault['message'])
# Get the server fault details for the admin user.
server = self.admin_api.get_server(server['id'])
fault = server['fault']
# The admin can see the fault details which includes the traceback.
self.assertIn('details', fault)
# The details also contain the exception message (which is not in the
# fault message).
self.assertIn(error_msg, fault['details'])
# Make sure the traceback is there by looking for part of it.
self.assertIn('in reboot_instance', fault['details'])
# The exception type class name should be in the message for the admin
# user as well since the fault handling code cannot distinguish who
# is going to see the message so it only sets class name.
self.assertIn('HypervisorError', fault['message'])
|
skonefal/workloadsutils | ab_util.py | Python | apache-2.0 | 972 | 0.013374 | import os
import datetime
import time
IDLE_TIME = 2 * 60
STRESS_ITERATION_TIME = 10 * 60
STRESS_LEVELS = [2,10,25,50,75,100]
# ENDPOINTS = ["http://10.102.44.201/index.php/Special:Random", "http://10.102.44.202/index.php/Special:Random", "http://10.102.44.203/index.php/Special:Random"]
def d | o_stress():
print("{0}: Starting idle time for {1} seconds".format(datetime.datetime.now(), IDLE_TIME))
time.sleep(IDLE_TIME)
for stress_level in STRESS_LEVELS:
Timestamp = datetime.datetime.now()
print("{0}: Starting stress level {1} for {2} secs".format(
datetime.datetime.now(), stress_level, STRESS_I | TERATION_TIME))
os.system("ab -c {0} -n 500000 -l -r http://10.102.44.202/index.php/Special:Random".format(
stress_level))
pass
print("{0}: Stress finished after {1} iterations".format(
datetime.datetime.now(), len(STRESS_LEVELS)))
return
if __name__ == '__main__':
do_stress() |
ramineni/myironic | ironic/tests/drivers/test_pxe.py | Python | apache-2.0 | 44,025 | 0.000409 | # coding=utf-8
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test class for PXE driver."""
import os
import tempfile
import fixtures
import mock
from oslo_config import cfg
from oslo_serialization import jsonutils as json
from ironic.common import boot_devices
from ironic.common import dhcp_factory
from ironic.common import exception
from ironic.common.glance_service import base_image_service
from ironic.common import keystone
from ironic.common import pxe_utils
from ironic.common import states
from ironic.common import utils
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils
from ironic.drivers.modules import deploy_utils
from ironic.drivers.modules import iscsi_deploy
from ironic.drivers.modules import pxe
from ironic.openstack.common import fileutils
from ironic.tests.conductor import utils as mgr_utils
from ironic.tests.db import base as db_base
from ironic.tests.db import utils as db_utils
from ironic.tests.objects import utils as obj_utils
CONF = cfg.CONF
INST_INFO_DICT = db_utils.get_test_pxe_instance_info()
DRV_INFO_DICT = db_utils.get_test_pxe_driver_info()
class PXEValidateParametersTestCase(db_base.DbTestCase):
def test__parse_deploy_info(self):
# make sure we get back the expected things
node = obj_utils.create_test_node(self.context,
driver='fake_pxe',
instance_info=INST_INFO_DICT,
driver_info=DRV_INFO_DICT)
info = pxe._parse_deploy_info(node)
self.assertIsNotNone(info.get('deploy_ramdisk'))
self.assertIsNotNone(info.get('deploy_kernel'))
self.assertIsNotNone(info.get('image_source'))
self.assertIsNotNone(info.get('root_gb'))
self.assertEqual(0, info.get('ephemeral_gb'))
def test__parse_driver_info_missing_deploy_kernel(self):
# make sure error is raised when info is missing
info = dict(DRV_INFO_DICT)
del info['pxe_deploy_kernel']
node = obj_utils.create_test_node(self.context, driver_info=info)
self.assertRaises(exception.MissingParameterValue,
pxe._parse_driver_info,
node)
def test__parse_driver_info_missing_deploy_ramdisk(self):
# make sure error is raised when info is missing
info = dict(DRV_INFO_DICT)
del info['pxe_deploy_ramdisk']
node = obj_utils.create_test_node(self.context, driver_info=info)
self.assertRaises(exception.MissingParameterValue,
pxe._parse_driver_info,
node)
def test__parse_driver_info_good(self):
# make sure we get back the expected things
node = obj_utils.create_test_node(self.context,
driver='fake_pxe',
driver_info=DRV_INFO_DICT)
info = pxe._parse_driver_info(node)
self.assertIsNotNone(info.get('deploy_ramdisk'))
self.assertIsNotNone(info.get('deploy_kernel'))
class PXEPrivateMethodsTestCase(db_base.DbTestCase):
def setUp(self):
super(PXEPrivateMethodsTestCase, self).setUp()
n = {
'driver': 'fake_pxe',
'instance_info': INST_INFO_DICT,
'driver_info': DRV_INFO_DICT,
}
mgr_utils.mock_the_extension_manager(driver="fake_pxe")
self.node = obj_utils.create_test_node(self.context, **n)
@mock.patch.object(base_image_service.BaseImageService, '_show')
def test__get_image_info(self, show_mock):
properties = {'properties': {u'kernel_id': u'instance_kernel_uuid',
u'ramdisk_id': u'instance_ramdisk_uuid'}}
expected_info = {'ramdisk':
('instance_ramdisk_uuid',
os.path.join(CONF.pxe.tftp_root,
self.node.uuid,
'ramdisk')),
'kernel':
('instance_kernel_uuid',
os.path.join(CONF.pxe.tftp_root,
self.node.uuid,
'kernel')),
'deploy_ramdisk':
(DRV_INFO_DICT['pxe_deploy_ramdisk'],
os.path.join(CONF.pxe.tftp_root,
self.node.uuid,
'deploy_ramdisk')),
'deploy_kernel':
(DRV_INFO_DICT['pxe_deploy_kernel'],
os.path.join(CONF.pxe.tftp_root,
self.node.uuid,
'deploy_kernel'))}
show_mock.return_value = properties
image_info = pxe._get_image_info(self.node, self.context)
show_mock.assert_called_once_with('glance://image_uuid',
method='get')
self.assertEqual(expected_info, image_info)
# test with saved info
show_mock.reset_mock()
image_info = pxe._get_image_info(self.node, self.context)
self.assertEqual(expected_info, image_info)
self.assertFalse(show_mock.called)
self.assertEqual('instance_kernel_uuid',
self.node.instance_info.get('kernel'))
self.assertEqual('instance_ramdisk_uuid',
self.node.instance_info.get('ramdisk'))
@mock.patch.object(iscsi_deploy, 'build_deploy_ramdisk_options')
@mock.patch.object(pxe_utils, '_build_pxe_config')
def _test_build_pxe_config_options(self, build_pxe_mock, deploy_opts_mock,
ipxe_enabled=False):
self.config(pxe_append_params='test_param', group='pxe')
# NOTE: right '/' should be removed from url string
self.config(api_url='http://192.168.122.184:6385/', group='conductor')
self.config(disk_devices='sda', group='pxe')
fake_deploy_opts = {'iscsi_target_iqn': 'fake-iqn',
'deployment_id': 'fake-deploy-id',
'deployment_key': 'fake-deploy-key',
'disk': 'fake-disk',
'ironic_api_url': 'fake-api-url',
'boot_option': 'netboot'}
deploy_opts_mock.return_value = fake_deploy_opts
tftp_server = CONF.pxe.tftp_server
if ipxe_enabled:
http_url = 'http://192.1.2.3:1234'
self.config(ipxe_enabled=True, group='pxe')
self.config(http_url=http_url, group='pxe')
deploy_kernel = os.path.join(http_url, self.node.uuid,
| 'deploy_kernel')
deploy_ramdisk = os.path.join(http_url, self.node.uuid,
'deploy_ramdisk')
kernel = os.path.join(http_url, self.node.uuid, 'kernel')
ramdisk = os.path.join(http_url, self.node.uuid, 'ramdisk')
root_dir = CONF.pxe.http_root
else:
deploy_kernel = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
| 'deploy_kernel')
deploy_ramdisk = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
'deploy_ramdisk')
kernel = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
'kernel')
ramdisk = os. |
gglyptodon/marcook | markovchain.py | Python | gpl-3.0 | 1,417 | 0.015526 | import random
import sys
class MarkovChain(object):
def __init__(self, separator = None, corpus = None):
self.separator = separator
self.corpus = corpus
self.chain = self.setChain()
def setChain(self):
chain = {}
if self.separator is None:
allItems = self.corpus.split()
else:
allItems = self.corpus.split(self.separator)
mx = allItems[len(allItems)-1]
for i,x in enumerate(allItems):
if i == len(allItems) -1 :
pass
else:
try:
chain[x].append(allItems[i+1])
except KeyError as e:
chain[x] =[allItems[i+1]]
try:
| chain[mx].append("\n")
except KeyError as e:
chain[mx] = ["\n"]
return(chain)
def printSth(self,maxItems = 20):
res =""
t = random.choice(self.chain.keys())
| for i in range(0,maxItems):
try:
print(self.chain[t])
tmp = random.choice(self.chain[t])
res += " "+tmp
t= tmp
except KeyError as e:
return(res)
return(res)
def main():
mc = MarkovChain(corpus = open(sys.argv[1],'r').read(), separator = " ")
#print(mc.chain)
print(mc.printSth(int(sys.argv[2])))
if __name__ == "__main__":
main()
|
SU-ECE-17-7/ibeis | _broken/_old_qt_hs_matcher/automated_params.py | Python | apache-2.0 | 2,697 | 0.003708 | # -*- coding: utf-8 -*-
"""
module that specified how we choose paramaters based on current search database
properties
"""
from __future__ import absolute_import, division, print_function
#import six
import utool as ut
#import numpy as np
#import vtool as vt
#from ibeis.algo.hots import hstypes
#from ibeis.algo.hots import match_chips4 as mc4
#from ibeis.algo.hots import distinctiveness_normalizer
#from six.moves import filter
print, print_, printDBG, rrr, profile = ut.inject(__name__, '[autoparams]')
@profile
def choose_vsmany_K(num_names, qaids, daids):
"""
TODO: Should also scale up the number of checks as well
method for choosing K in the initial vsmany queries
Ignore:
>>> # DISABLE_DOCTEST
>>> # Shows plot for K vs number of names
>>> from ibeis.algo.hots.automated_params import * # NOQA
>>> import ibeis
>>> from ibeis import constants as const
>>> ibs = ibeis.opendb(defaultdb='testdb1')
>>> valid_aids = ibs.get_valid_aids(species=const.TEST_SPECIES.ZEB_PLAIN)
>>> num_names = np.arange(0, 1000)
>>> num_names_slope = .1
>>> K_max = 10
>>> K_min = 1
>>> K_list = np.floor(num_names_slope * num_names)
>>> K_list[K_list > K_max] = K_max
>>> K_list[K_list < K_min] = K_min
>>> clip_index_list = np.where(K_list >= K_max)[0]
>>> clip_index = clip_index_list[min(len(clip_index_list) - 1, 10)]
>>> K_list = K_list[0:clip_index]
>>> num_names = num_names[0:clip_index]
>>> pt.plot2(num_names, K_list, x_label='num_names', y_label='K',
... equal_aspect=False, marker='g-', pad=1, dark=True)
>>> pt.update()
"""
#K = ibs.cfg.query_cfg.nn_cfg.K
# TODO: paramaterize in config
num_names_slope = .1 # increase K every fifty names
K_max = 10
K_min = 1
num_names_lower = K_min / num_names_slope
num_names_upper = K_max / num_names_slope
if num_names < num_names_lower:
K = K_min
elif num_names < num_names_upper:
K = num_names_slope * num_names
else:
K = K_max
with ut.embed_on_exception_context:
if len(ut.intersect_ordered(qaids, daids)) > 0:
# if self is in query bump k
K += 1
return K
if __name__ == '__main__':
"""
CommandLin | e:
python -m ibeis.algo.hots.automated_params
python -m ibeis.algo.hots.automated_ | params --allexamples
python -m ibeis.algo.hots.automated_params --allexamples --noface --nosrc
"""
import multiprocessing
multiprocessing.freeze_support() # for win32
import utool as ut # NOQA
ut.doctest_funcs()
|
Dining-Engineers/left-luggage-detection | misc/demo/demo_mp_async.py | Python | gpl-2.0 | 1,051 | 0 | #!/usr/bin/env python
import freenect
import signal
import matplotlib.pyplot as mp
from misc.demo import frame_convert
mp.ion()
image_rgb = None
image_depth = None
keep_running = True
def display_depth(dev, data, timestamp):
global image_depth
data = frame_convert.pretty_depth(data)
mp.gray()
mp.figure(1)
if image_dep | th:
image_depth.set_data(data)
else:
image_depth = mp.imshow(data, interpolation='nearest', animated=True)
mp.draw()
def display_rgb(dev, data, timestamp):
global image_rgb
mp.figure(2)
if image_rgb:
image_rgb.set_data(data)
else:
image_rgb = mp.ims | how(data, interpolation='nearest', animated=True)
mp.draw()
def body(*args):
if not keep_running:
raise freenect.Kill
def handler(signum, frame):
global keep_running
keep_running = False
print('Press Ctrl-C in terminal to stop')
signal.signal(signal.SIGINT, handler)
freenect.runloop(depth=display_depth,
video=display_rgb,
body=body)
|
EricSchles/regulations-parser | regparser/notice/compiler.py | Python | cc0-1.0 | 21,565 | 0.000139 | """ Notices indicate how a regulation has changed since the last version. This
module contains code to compile a regulation from a notice's changes. """
from bisect import bisect
from collections import defaultdict
import copy
import itertools
import logging
from regparser.grammar.tokens import Verb
from regparser.tree.struct import Node, find
from regparser.tree.xml_parser import interpretations
from regparser.tree.xml_parser import tree | _utils
from regparser.utils import roman_nums
def get_parent_label(node):
""" Given a node, get the label of it's parent. """
if node.node_type == Node.SUBPART:
return node.label[0]
elif node.node_type == Node.INTERP:
marker_position = node.label.index(Node.INTERP_MARK)
interpreting = node.label[:marker_position] |
comment_pars = node.label[marker_position + 1:]
if comment_pars: # 111-3-a-Interp-4-i
return '-'.join(node.label[:-1])
elif len(interpreting) > 1: # 111-3-a-Interp
return '-'.join(interpreting[:-1] + [Node.INTERP_MARK])
else: # 111-Interp
return node.label[0]
else:
parent_label = node.label[:-1]
return '-'.join(parent_label)
def make_label_sortable(label, roman=False):
""" Make labels sortable, but converting them as appropriate.
Also, appendices have labels that look like 30(a), we make those
appropriately sortable. """
if label.isdigit():
return (int(label),)
if roman:
romans = list(itertools.islice(roman_nums(), 0, 50))
return (1 + romans.index(label),)
# segment the label piece into component parts
# e.g. 45Ai33b becomes (45, 'A', 'i', 33, 'b')
INT, UPPER, LOWER = 1, 2, 3
segments, segment, seg_type = [], "", None
for ch in label:
if ch.isdigit():
ch_type = INT
elif ch.isalpha() and ch == ch.upper():
ch_type = UPPER
elif ch.isalpha() and ch == ch.lower():
ch_type = LOWER
else:
# other character, e.g. parens, guarantee segmentation
ch_type = None
if ch_type != seg_type and segment: # new type of character
segments.append(segment)
segment = ""
seg_type = ch_type
if ch_type:
segment += ch
if segment: # ended with something other than a paren
segments.append(segment)
segments = [int(seg) if seg.isdigit() else seg for seg in segments]
return tuple(segments)
def make_root_sortable(label, node_type):
""" Child nodes of the root contain nodes of various types, these
need to be sorted correctly. This returns a tuple to help
sort these first level nodes. """
if node_type == Node.SUBPART or node_type == Node.EMPTYPART:
return (0, label[-1])
elif node_type == Node.APPENDIX:
return (1, label[-1])
elif node_type == Node.INTERP:
return (2,)
def replace_first_sentence(text, replacement):
""" Replace the first sentence in text with replacement. This makes
some incredibly simplifying assumptions - so buyer beware. """
no_periods_replacement = replacement.replace('.', '')
sentences = text.split('.', 1)
if len(sentences) > 1:
sentences[0] = no_periods_replacement
return '.'.join(sentences)
else:
return replacement
def overwrite_marker(origin, new_label):
""" The node passed in has a label, but we're going to give it a
new one (new_label). This is necessary during node moves. """
if origin.node_type == Node.REGTEXT:
marker_list = tree_utils.get_paragraph_markers(origin.text)
if len(marker_list) > 0:
marker = '(%s)' % marker_list[0]
new_marker = '(%s)' % new_label
origin.text = origin.text.replace(marker, new_marker, 1)
elif origin.node_type == Node.INTERP:
marker = interpretations.get_first_interp_marker(origin.text)
marker = marker + '.'
new_marker = new_label + '.'
origin.text = origin.text.replace(marker, new_marker, 1)
return origin
def is_reserved_node(node):
""" Return true if the node is reserved. """
reserved_title = node.title and '[Reserved]' in node.title
reserved_text = node.text and '[Reserved]' in node.text
return (reserved_title or reserved_text)
def is_interp_placeholder(node):
"""Interpretations may have nodes that exist purely to enforce
structure. Knowing if a node is such a placeholder makes it easier to
know if a POST should really just modify the existing placeholder."""
return (Node.INTERP_MARK in node.label
and not node.text and not node.title)
class RegulationTree(object):
""" This encapsulates a regulation tree, and methods to change that tree.
"""
def __init__(self, previous_tree):
self.tree = copy.deepcopy(previous_tree)
self._kept__by_parent = defaultdict(list)
def keep(self, labels):
"""The 'KEEP' verb tells us that a node should not be removed
(generally because it would had we dropped the children of its
parent). "Keeping" those nodes makes sure they do not disappear when
editing their parent"""
for label in labels:
node = self.find_node(label)
parent_label = get_parent_label(node)
self._kept__by_parent[parent_label].append(node)
def get_parent(self, node):
""" Get the parent of a node. Returns None if parent not found. """
parent_label_id = get_parent_label(node)
return find(self.tree, parent_label_id)
def add_to_root(self, node):
""" Add a child to the root of the tree. """
self.tree.children.append(node)
for c in self.tree.children:
c.sortable = make_root_sortable(c.label, c.node_type)
self.tree.children.sort(key=lambda x: x.sortable)
for c in self.tree.children:
del c.sortable
def add_child(self, children, node, order=None):
""" Add a child to the children, and sort appropriately. This is used
for non-root nodes. """
children = children + [node] # non-destructive
if order and set(order) == set(c.label_id() for c in children):
lookup = {}
for c in children:
lookup[c.label_id()] = c
return [lookup[label_id] for label_id in order]
else:
sort_order = []
for c in children:
if c.label[-1] == Node.INTERP_MARK:
sort_order.append((2,) + make_label_sortable(
c.label[-2], roman=(len(c.label) == 6)))
elif Node.INTERP_MARK in c.label:
marker_idx = c.label.index(Node.INTERP_MARK)
comment_pars = c.label[marker_idx + 1:]
sort_order.append((1,) + make_label_sortable(
comment_pars[-1], roman=(len(comment_pars) == 2)))
elif c.node_type == Node.APPENDIX:
sort_order.append(make_label_sortable(c.label[-1], False))
else:
sort_order.append(make_label_sortable(
c.label[-1], roman=(len(c.label) == 5)))
new_el_sort = sort_order[-1]
sort_order = sort_order[:-1]
# Use bisect so the whole list isn't resorted (the original list
# may not be strictly sorted)
insert_idx = bisect(sort_order, new_el_sort)
return children[:insert_idx] + [node] + children[insert_idx:-1]
def delete_from_parent(self, node):
""" Delete node from it's parent, effectively removing it from the
tree. """
parent = self.get_parent(node)
other_children = [c for c in parent.children if c.label != node.label]
parent.children = other_children
def delete(self, label_id):
""" Delete the node with label_id from the tree. """
node = find(self.tree, label_id)
if node is None:
logging.warning("Attempting to delete %s failed", |
pillmuncher/hornet | src/examples/parsing.py | Python | mit | 12,639 | 0.003561 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014 Mick Krippendorf <[email protected]>
__version__ = '0.2.5a'
__date__ = '2014-09-27'
__author__ = 'Mick Krippendorf <[email protected]>'
__license__ = 'MIT'
import pprint
from hornet import *
from hornet.symbols import (
A, Adj, B, C, Case, D, Det, E, F, Gender, L, NP, Noun, Number, Rest, S, T,
Trans, VP, Verb, W, X, Y, Z, accusative, adj, dative, det, feminine,
genitive, intransitive, masculine, neuter, nominative, noun, noun_unknown,
np, np_unknown, plural, s, singular, transitive, verb, vp,
)
def grammar(db):
db.tell(
s(S) << s(S, []),
s >>
np(Number, nominative) &
| vp(Number, nominative, intransitive),
s >>
np(Number, Case) &
vp(Number, Case, transitive),
np(plural, Case) >>
noun(_, plural, Case),
np(Number, Case) >>
det(Gender, Number, Case) &
noun(Gender, Number, Case),
vp(Number, nominative, intransitive) >>
verb(Number, nominative, intransitive),
vp(Number, accusative, intransitive) >>
verb(Number, accusative, intransitive),
| vp(_, dative, transitive) >>
verb(Number, nominative, transitive) &
np(Number, nominative),
vp(Number, nominative, transitive) >>
verb(Number, nominative, transitive) &
np(_, dative),
vp(Number, nominative, transitive) >>
verb(Number, accusative, transitive) &
np(_, accusative),
det(masculine, singular, nominative) >> ['der'],
det(masculine, singular, genitive) >> ['des'],
det(masculine, singular, dative) >> ['dem'],
det(masculine, singular, accusative) >> ['den'],
det(masculine, plural, nominative) >> ['die'],
det(masculine, plural, genitive) >> ['der'],
det(masculine, plural, dative) >> ['den'],
det(masculine, plural, accusative) >> ['die'],
det(feminine, singular, nominative) >> ['die'],
det(feminine, singular, genitive) >> ['der'],
det(feminine, singular, dative) >> ['der'],
det(feminine, singular, accusative) >> ['die'],
det(feminine, plural, nominative) >> ['die'],
det(feminine, plural, genitive) >> ['der'],
det(feminine, plural, dative) >> ['den'],
det(feminine, plural, accusative) >> ['die'],
det(neuter, singular, nominative) >> ['das'],
det(neuter, singular, genitive) >> ['des'],
det(neuter, singular, dative) >> ['dem'],
det(neuter, singular, accusative) >> ['das'],
det(neuter, plural, nominative) >> ['die'],
det(neuter, plural, genitive) >> ['der'],
det(neuter, plural, dative) >> ['den'],
det(neuter, plural, accusative) >> ['die'],
det(masculine, singular, nominative) >> ['ein'],
det(masculine, singular, genitive) >> ['eines'],
det(masculine, singular, dative) >> ['einem'],
det(masculine, singular, accusative) >> ['einen'],
det(feminine, singular, nominative) >> ['eine'],
det(feminine, singular, genitive) >> ['einer'],
det(feminine, singular, dative) >> ['einer'],
det(feminine, singular, accusative) >> ['eine'],
det(_, plural, nominative) >> ['einige'],
det(_, plural, genitive) >> ['einiger'],
det(_, plural, dative) >> ['einigen'],
det(_, plural, accusative) >> ['einige'],
det(_, plural, nominative) >> ['viele'],
det(_, plural, genitive) >> ['vieler'],
det(_, plural, dative) >> ['vielen'],
det(_, plural, accusative) >> ['viele'],
det(_, plural, nominative) >> ['alle'],
det(_, plural, genitive) >> ['aller'],
det(_, plural, dative) >> ['allen'],
det(_, plural, accusative) >> ['alle'],
det(masculine, singular, nominative) >> ['kein'],
det(masculine, singular, genitive) >> ['keines'],
det(masculine, singular, dative) >> ['keinem'],
det(masculine, singular, accusative) >> ['keinen'],
det(masculine, plural, nominative) >> ['keine'],
det(masculine, plural, genitive) >> ['keiner'],
det(masculine, plural, dative) >> ['keinen'],
det(masculine, plural, accusative) >> ['keine'],
det(feminine, singular, nominative) >> ['keine'],
det(feminine, singular, genitive) >> ['keiner'],
det(feminine, singular, dative) >> ['keiner'],
det(feminine, singular, accusative) >> ['keine'],
det(feminine, plural, nominative) >> ['keine'],
det(feminine, plural, genitive) >> ['keiner'],
det(feminine, plural, dative) >> ['keinen'],
det(feminine, plural, accusative) >> ['keine'],
det(masculine, singular, nominative) >> ['mancher'],
det(masculine, singular, genitive) >> ['manches'],
det(masculine, singular, dative) >> ['manchem'],
det(masculine, singular, accusative) >> ['manchen'],
det(masculine, plural, nominative) >> ['manche'],
det(masculine, plural, genitive) >> ['mancher'],
det(masculine, plural, dative) >> ['manchen'],
det(masculine, plural, accusative) >> ['manchen'],
det(feminine, singular, nominative) >> ['manche'],
det(feminine, singular, genitive) >> ['mancher'],
det(feminine, singular, dative) >> ['mancher'],
det(feminine, singular, accusative) >> ['manche'],
det(feminine, plural, nominative) >> ['manche'],
det(feminine, plural, genitive) >> ['mancher'],
det(feminine, plural, dative) >> ['manchen'],
det(feminine, plural, accusative) >> ['manche'],
det(masculine, singular, nominative) >> ['jeder'],
det(masculine, singular, genitive) >> ['jedes'],
det(masculine, singular, dative) >> ['jedem'],
det(masculine, singular, accusative) >> ['jeden'],
det(feminine, singular, nominative) >> ['jede'],
det(feminine, singular, genitive) >> ['jeder'],
det(feminine, singular, dative) >> ['jeder'],
det(feminine, singular, accusative) >> ['jede'],
noun(masculine, singular, nominative) >> ['hund'],
noun(masculine, singular, genitive) >> ['hundes'],
noun(masculine, singular, dative) >> ['hund'],
noun(masculine, singular, accusative) >> ['hund'],
noun(masculine, plural, nominative) >> ['hunde'],
noun(masculine, plural, genitive) >> ['hunde'],
noun(masculine, plural, dative) >> ['hunden'],
noun(masculine, plural, accusative) >> ['hunde'],
noun(feminine, singular, nominative) >> ['katze'],
noun(feminine, singular, genitive) >> ['katze'],
noun(feminine, singular, dative) >> ['katze'],
noun(feminine, singular, accusative) >> ['katze'],
noun(feminine, plural, nominative) >> ['katzen'],
noun(feminine, plural, genitive) >> ['katzen'],
noun(feminine, plural, dative) >> ['katzen'],
noun(feminine, plural, accusative) >> ['katzen'],
noun(masculine, singular, nominative) >> ['kater'],
noun(masculine, singular, genitive) >> ['katers'],
noun(masculine, singular, dative) >> ['kater'],
noun(masculine, singular, accusative) >> ['kater'],
noun(masculine, plural, nominative) >> ['kater'],
noun(masculine, plural, genitive) >> ['kater'],
noun(masculine, plural, dative) >> ['katern'],
noun(masculine, plural, accusative) >> ['kater'],
noun(feminine, singular, nominative) >> ['maus'],
noun(feminine, singular, genitive) >> ['maus'],
noun(feminine, singular, dative) >> ['maus'],
noun(feminine, singular, accusative) >> ['maus'],
noun(feminine, plural, nominative) >> ['maeuse'],
noun(feminine, plural, genitive) >> ['maeuse'],
noun(feminine, plural, dative) >> ['maeusen'],
noun(feminine, plural, accusative) >> ['maeuse'],
noun(neuter, plural, nominative) >> ['leute'],
noun(neuter, plural, genitive) >> ['leute'],
noun(neuter, plural, da |
Fauxmoehawkeen/soundcloud-python-master | soundcloud/resource.py | Python | bsd-2-clause | 1,625 | 0 | try:
import json
except ImportError:
import simplejson as json
from UserList import UserList
class Resource(object):
"""Object wrapper for resources.
Provides an object interface to resources returned by the Soundcloud API.
"""
def __init__(self, obj):
self.obj = obj
def __getstate__(self):
return self.obj.items()
def __setstate__(self, items):
if not hasattr(self, 'obj'):
self.obj = {}
for key, val in items:
self.obj[key] = val
def __getattr__(self, name):
if name in self.obj:
return self.obj.get(name)
raise AttributeError
def fields(self):
return self.obj
def keys(self):
return self.obj.keys()
class ResourceList(UserList):
"""Object wrapper for lists of resources."""
def __init__(self, resources=[]):
data = [Resource(resource) for resource in resources]
super(ResourceList, self).__init__(data)
def wrapped_re | source(response):
"""Return a response wrapped in the appropriate wrapper type.
Lists will be returned as a ```ResourceList``` ins | tance,
dicts will be returned as a ```Resource``` instance.
"""
try:
content = json.loads(response.content)
except ValueError:
# not JSON
content = response.content
if isinstance(content, list):
result = ResourceList(content)
else:
result = Resource(content)
result.raw_data = response.content
for attr in ['url', 'status_code', 'error']:
setattr(result, attr, getattr(response, attr))
return result
|
ecreall/dace | dace/objectofcollaboration/system.py | Python | agpl-3.0 | 2,093 | 0.000478 | # Copyrig | ht (c) 2014 by Ecreall under licence AGPL terms
# available on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Vincent Fretin, Amen Souissi
import transaction
from substanced.util import get_oid
from dace.processinstance.event import DelayedCallback
from dace.util import (
find_catalog, getAllSystemActions, |
get_system_request, BaseJob)
from dace import log
last_transaction_by_machine = {}
def _call_action(action):
transaction.begin()
try:
context = action.get_potential_context()
if context is None:
return
request = get_system_request()
request.invalidate_cache = True
action.execute(context, request, {})
log.info("Execute action %s", action.title)
transaction.commit()
except Exception as e:
transaction.abort()
log.exception(e)
def _get_cache_key():
request = get_system_request()
return str(get_oid(request.user))
def run():
request = get_system_request()
if request.user is None:
# in test, db connection closed
return
catalog = find_catalog('dace')
global last_transaction
cache_key = _get_cache_key()
last_transaction = last_transaction_by_machine.setdefault(cache_key, '')
last_tid = catalog._p_jar.db().lastTransaction()
if last_transaction != last_tid:
last_transaction_by_machine[cache_key] = last_tid
transaction.begin()
try:
system_actions = [a for a in getAllSystemActions()
if getattr(a, 'process', None) or
a.isstart]
log.info("new zodb transactions, actions to check: %s",
len(system_actions))
for action in system_actions:
_call_action(action)
except Exception as e:
log.exception(e)
log.info("actions to check: done")
run_crawler()
def run_crawler():
"""Start loop."""
job = BaseJob('system')
job.callable = run
dc = DelayedCallback(job, 2000)
dc.start()
|
e-gob/plataforma-kioscos-autoatencion | scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/cloud/azure/azure_rm_acs.py | Python | bsd-3-clause | 27,547 | 0.002868 | #!/usr/bin/python
#
# Copyright (c) 2017 Julien Stroheker, <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_acs
version_added: "2.4"
short_description: Manage an Azure Container Service Instance (ACS).
description:
- Create, update and delete an Azure Container Service Instance.
options:
resource_group:
description:
- Name of a resource group where the Container Services exists or will be created.
required: true
name:
description:
- Name of the Container Services instance.
required: true
default: null
state:
description:
- Assert the state of the ACS. Use 'present' to create or update an ACS and 'absent' to delete it.
default: present
choices:
- absent
- present
required: false
location:
description:
- Valid azure location. Defaults to location of the resource group.
default: resource_group location
required: false
orchestration_platform:
description:
- Specifies the Container Orchestration Platform to use. Currently can be either DCOS, Kubernetes or Swarm.
required: true
master_profile:
description:
- Master profile suboptions.
required: true
default: null
suboptions:
count:
description:
- Number of masters (VMs) in the container service cluster. Allowed values are 1, 3, and 5.
required: true
choices:
- 1
- 3
- 5
dns_prefix:
description:
- The DNS Prefix to use for the Container Service master nodes.
required: true
linux_profile:
description:
- The linux profile suboptions.
required: true
default: null
suboptions:
admin_username:
description:
- The Admin Username for the Cluster.
required: true
default: azureuser
ssh_key:
description:
- The Public SSH Key used to access the cluster.
required: true
agent_pool_profiles:
description:
- The agent pool profile suboptions.
required: true
default: null
suboptions:
name:
description:
- Unique name of the agent pool profile in the context of the subscription and resource group.
required: true
count:
description:
- Number of agents (VMs) to host docker containers. Allowed values must be in the range of 1 to 100 (inclusive).
required: true
default: 1
dns_prefix:
description:
- The DNS Prefix given to Agents in this Agent Pool.
required: true
vm_size:
description:
- The VM Size of each of the Agent Pool VM's (e.g. Standard_F1 / Standard_D2v2).
required: true
default: Standard_D2v2
service_principal:
description:
- The service principal suboptions.
required: false
default: null
suboptions:
client_id:
description:
- The ID for the Service Principal.
required: false
client_secret:
description:
- The secret password associated with the service principal.
required: false
diagnostics_profile:
description:
- Should VM Diagnostics be enabled for the Container Service VM's.
required: true
default: false
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Julien Stroheker (@julienstroheker)"
'''
EXAMPLES = '''
- name: Create an azure container services instance running Kubernetes
azure_rm_acs:
name: acctestcontservice1
location: eastus
resource_group: Testing
orchestration_platform: Kubernetes
master_profile:
- count: 3
dns_prefix: acsk8smasterdns
linux_profile:
- admin_username: azureuser
ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAA...
service_principal:
- client_id: "cf72ca99-f6b9-4004-b0e0-bee10c521948"
client_secret: "mySPNp@ssw0rd!"
agent_pool_profiles:
- name: default
count: 5
dns_prefix: acsk8sagent
vm_size: Standard_D2_v2
diagnostics_profile: false
tags:
Environment: Production
- name: Create an azure container services instance running DCOS
azure_rm_acs:
name: acctestcontservice2
location: eastus
resource_group: Testing
orchestration_platform: DCOS
master_profile:
- count: 3
dns_prefix: acsdcosmasterdns
linux_profile:
- admin_username: azureuser
ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAA...
agent_pool_profiles:
- name: default
count: 5
dns_prefix: acscdcosagent
vm_size: Standard_D2_v2
diagnostics_profile: false
tags:
Environment: Production
- name: Create an azure container services instance running Swarm
azure_rm_acs:
name: acctestcontservice3
location: eastus
resource_group: Testing
orchestration_platform: Swarm
master_profile:
- count: 3
dns_prefix: acsswarmmasterdns
linux_profile:
- admin_username: azureuser
ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAA...
agent_pool_profiles:
- name: default
count: 5
dns_prefix: acsswarmagent
vm_size: Standard_D2_v2
diagnostics_profile: false
tags:
Environment: Production
# Deletes the specified container service in the specified subscription and resource group.
# The operation does not delete other resources created as part of creating a container service,
# including storage accounts, VMs, and availability sets. All the other resources created with the container
# service are part of the same resource group and can be deleted individually.
- name: Remove an azure container services instance
azure_rm_acs:
name: acctestcontservice3
location: eastus
resource_group: Testing
state: absent
orchestration_platform: Swarm
master_profile:
- count: 1
dns_prefix: acstestingmasterdns5
linux_profile:
- admin_username: azureuser
ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAA...
service_principal:
- client_id: 7fb4173c-3ca3-4d5b-87f8-1daac941207a
client_secret: MPNSuM1auUuITefiLGBrpZZnLMDKBLw2
agent_pool_profiles:
- name: default
count: 4
dns_prefix: acctestagent15
vm_size: Standard_A0
diagnostics_profile: false
tags:
Ansible: az | ure_rm_acs
'''
RETURN = '''
state:
description: Current state of the azure container service
returned: always
type: dict
'''
from ansible.module_utils.azure_rm_common | import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
from azure.mgmt.containerservice.models import (
ContainerService, ContainerServiceOrchestratorProfile, ContainerServiceCustomProfile,
ContainerServiceServi |
phildini/logtacts | invitations/consumers.py | Python | mit | 1,739 | 0.004025 | import logging
import requests
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.mail import EmailMultiAlternatives
from django.template.loader import get_template
from django.utils import timezone
from invitations.models import Invitation
logger = logging.getLogger('email')
sentry = logging.getLogger('sentry')
def send_invite(message):
try:
invite = Invitation.objects.get(
id=message.get('id'),
status__in=[Invitation.PENDING, Invitation.ERROR],
)
except Invitation.DoesNotExist:
sentry.error("Invitation to send not found", exc_info=True, extra={'message': message})
return
invite.status = Invitation.PROCESSING
invite.save()
context = {
'invite': invite,
'domain': Site.objects.get_current().domain,
}
subject = "[ContactOtter] Invitation to join ContactOtter from %s" % (invite.sender)
if invite.book:
subject = "[ContactOtter] Invitation to share %s's contact book" % (invite.sender)
txt = get_template('email/invitation.txt').render(context)
html = get_template('email/invitation.html').render(context)
try:
message = EmailMultiAlternatives(
subject=subject,
body=txt,
from_email="ContactOtter <[email protected]>",
to=[invite.email,],
)
message.attach_alternative(html, "text/html")
mes | sage.send()
invite.status = Invitation.SENT
invite.sent = timezone.now()
invite.save()
except:
| sentry.exception('Problem sending invite', exc_info=True, extra={'invite_id': invite.id})
invite.status = Invitation.ERROR
invite.save()
|
jsymolon/ARMSim | TestBKPT.py | Python | gpl-2.0 | 1,495 | 0.002676 | import unittest
import armv6instrdecode
import globals
import utils
import logging
import ARMCPU
import pdb
# if ConditionPassed(cond) then
# Rd = Rn + shifter_operand
# if S == 1 and Rd == R15 then
# if CurrentModeHasSPSR() then
# CPSR = SPSR
# else UNPREDICTABLE
# else if S == 1 then
# N Flag = Rd[31]
# Z Flag = if Rd == 0 then 1 else 0
# | C Flag = CarryFrom(Rn + shifter_operand)
# V Flag = Ove | rflowFrom(Rn + shifter_operand)
logfile = "TestBKPT.log"
with open(logfile, 'w'):
pass
logging.basicConfig(filename=logfile,level=logging.DEBUG)
class TestBKPT(unittest.TestCase):
"""Instructions"""
# preparing to test
def setUp(self):
""" Setting up for the test """
self.addr = 0
# ending the test
def tearDown(self):
"""Cleaning up after the test"""
# E1210070 700021E1 BKPT #4096
# 33222222222211111111110000000000
# 10987654321098765432109876543210
# 0b11100001001000010000000001110000 - BKPT
# 0b11100001001100010000000000000000 - TEQ
def testBKPT(self):
logging.debug("------------------------------------------")
logging.debug("TestDecode:testBKPT")
code = 0xE1210070 # BKPT #4096
instrStr = armv6instrdecode.getInstructionFromCode(self, code, 0)
logging.debug("1:" + instrStr)
self.assertEqual(instrStr, " E1210070 BKPT AL BKPT #4096", instrStr)
if __name__ == "__main__":
unittest.main() |
partofthething/home-assistant | tests/components/abode/test_cover.py | Python | apache-2.0 | 2,112 | 0.000473 | """Tests for the Abode cover device."""
from unittest.mock import patch
from homeassistant.components.abode import ATTR_DEVICE_ID
from homeassistant.components.cover import DOMAIN as COVER_DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
SERVICE_CLOSE_COVER,
SERVICE_OPEN_COVER,
STATE_CLOSED,
)
from .common import setup_platform
DEVICE_ID = "cover.garage_door"
async def test_entity_registry(hass):
"""Tests that the devices are registered in the entity registry."""
await setup_platform(hass, COVER_DOMAIN)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
entry = entity_registry.async_get(DEVICE_ | ID)
| assert entry.unique_id == "61cbz3b542d2o33ed2fz02721bda3324"
async def test_attributes(hass):
"""Test the cover attributes are correct."""
await setup_platform(hass, COVER_DOMAIN)
state = hass.states.get(DEVICE_ID)
assert state.state == STATE_CLOSED
assert state.attributes.get(ATTR_DEVICE_ID) == "ZW:00000007"
assert not state.attributes.get("battery_low")
assert not state.attributes.get("no_response")
assert state.attributes.get("device_type") == "Secure Barrier"
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Garage Door"
async def test_open(hass):
"""Test the cover can be opened."""
await setup_platform(hass, COVER_DOMAIN)
with patch("abodepy.AbodeCover.open_cover") as mock_open:
await hass.services.async_call(
COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: DEVICE_ID}, blocking=True
)
await hass.async_block_till_done()
mock_open.assert_called_once()
async def test_close(hass):
"""Test the cover can be closed."""
await setup_platform(hass, COVER_DOMAIN)
with patch("abodepy.AbodeCover.close_cover") as mock_close:
await hass.services.async_call(
COVER_DOMAIN,
SERVICE_CLOSE_COVER,
{ATTR_ENTITY_ID: DEVICE_ID},
blocking=True,
)
await hass.async_block_till_done()
mock_close.assert_called_once()
|
meerkat-code/meerkat_api | setup.py | Python | mit | 539 | 0 | #!/usr/bin/env python3
import uuid
from setuptools import setup, find_packages
import pathlib
import pkg_resour | ces
with pathlib.Path('requirements.txt').open() as requirements_txt:
reqs = [
str(requirement)
for requirement
in pkg_resources.parse_requirements(requirements_txt)
]
setup(
name='Meerkat API',
version='0.0.1',
long_description=__doc__,
packages=find_packages(),
include_package_data=True,
zip_safe=Fal | se,
install_requires=reqs,
test_suite='meerkat_api.test'
)
|
labordoc/labordoc-next | modules/webdeposit/lib/webdeposit_workflow.py | Python | gpl-2.0 | 7,407 | 0.00216 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2012, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.bibworkflow_engine import BibWorkflowEngine
from invenio.bibworkflow_object import BibWorkflowObject
from invenio.bibworkflow_model import Workflow, WfeObject
from invenio.bibworkflow_client import restart_workflow
from invenio.bibfield_jsonreader import JsonReader
from uuid import uuid1 as new_uuid
class DepositionWorkflow(object):
""" class for running webdeposit workflows using the BibWorkflow engine
The user_id and workflow must always be defined
If the workflow has been initialized before,
the appropriate uuid must be passed as a parameter.
Otherwise a new workflow will be created
The workflow functions must have the following structure:
def function_name(arg1, arg2):
def fun_name2(obj, eng):
# do stuff
return fun_name2
"""
def __init__(self, engine=None, workflow=[],
uuid=None, deposition_type=None, user_id=None):
self.obj = {}
self.set_user_id(user_id)
self.set_uuid(uuid)
self.deposition_type = deposition_type
self.current_step = 0
self.set_engine(engine)
self.set_workflow(workflow)
self.set_object()
def set_uuid(self, uuid=None):
""" Sets the uuid or obtains a new one """
if uuid is None:
uuid = new_uuid()
self.uuid = uuid
else:
self.uuid = uuid
def get_uuid(self):
return self.uuid
def set_engine(self, engine=None):
""" Initializes the BibWorkflow engine """
if engine is None:
engine = BibWorkflowEngine(name=self.get_deposition_type(),
uuid=self.get_uuid(),
user_id=self.get_user_id(),
module_name="webdeposit")
self.eng = engine
self.eng.save() |
def set_workflow(self, workflow):
""" Sets the workflow """
self.eng.setWorkflow(workflow)
self.workflow = workflow
self.steps_num = len(workflow)
self.obj['s | teps_num'] = self.steps_num
def set_object(self):
self.db_workflow_obj = \
WfeObject.query.filter(WfeObject.workflow_id == self.get_uuid()). \
first()
if self.db_workflow_obj is None:
self.bib_obj = BibWorkflowObject(data=self.obj,
workflow_id=self.get_uuid(),
user_id=self.get_user_id())
else:
self.bib_obj = BibWorkflowObject(wfobject_id=self.db_workflow_obj.id,
workflow_id=self.get_uuid(),
user_id=self.get_user_id())
def get_object(self):
return self.bib_obj
def set_deposition_type(self, deposition_type=None):
if deposition_type is not None:
self.obj['deposition_type'] = deposition_type
def get_deposition_type(self):
return self.obj['deposition_type']
deposition_type = property(get_deposition_type, set_deposition_type)
def set_user_id(self, user_id=None):
if user_id is not None:
self.user_id = user_id
else:
from invenio.webuser_flask import current_user
self.user_id = current_user.get_id()
self.obj['user_id'] = self.user_id
def get_user_id(self):
return self.user_id
def get_status(self):
""" Returns the status of the workflow
(check CFG_WORKFLOW_STATUS from bibworkflow_engine)
"""
status = \
Workflow.query. \
filter(Workflow.uuid == self.get_uuid()).\
one().status
return status
def get_output(self, form_validation=None):
""" Returns a representation of the current state of the workflow
(a dict with the variables to fill the jinja template)
"""
user_id = self.user_id
uuid = self.get_uuid()
from invenio.webdeposit_utils import get_form, \
draft_field_get_all
form = get_form(user_id, uuid)
deposition_type = self.obj['deposition_type']
drafts = draft_field_get_all(user_id, deposition_type)
if form_validation:
form.validate()
# Get the template from configuration for this form
template = form.config.get_template() or 'webdeposit_add.html'
return dict(template_name_or_list=template,
workflow=self,
deposition_type=deposition_type,
form=form,
drafts=drafts,
uuid=uuid)
def run(self):
""" Runs or resumes the workflow """
finished = self.eng.db_obj.counter_finished > 1
if finished:
# The workflow is finished, nothing to do
return
wfobjects = \
WfeObject.query. \
filter(WfeObject.workflow_id == self.get_uuid())
wfobject = max(wfobjects.all(), key=lambda w: w.modified)
starting_point = wfobject.task_counter
restart_workflow(self.eng, [self.bib_obj],
starting_point, stop_on_halt=True)
def run_next_step(self):
if self.current_step >= self.steps_num:
self.obj['break'] = True
return
function = self.workflow[self.current_step]
function(self.obj, self)
self.current_step += 1
self.obj['step'] = self.current_step
def jump_forward(self):
restart_workflow(self.eng, [self.bib_obj], 'next', stop_on_halt=True)
def jump_backwards(self, dummy_synchronize=False):
if self.current_step > 1:
self.current_step -= 1
else:
self.current_step = 1
def get_workflow_from_db(self):
return Workflow.query.filter(Workflow.uuid == self.get_uuid()).first()
def cook_json(self):
user_id = self.obj['user_id']
uuid = self.get_uuid()
from invenio.webdeposit_utils import get_form
json_reader = JsonReader()
for step in range(self.steps_num):
try:
form = get_form(user_id, uuid, step)
json_reader = form.cook_json(json_reader)
except:
# some steps don't have any form ...
pass
return json_reader
def get_data(self, key):
if key in self.bib_obj.data:
return self.bib_obj.data[key]
else:
return None
|
jobscry/vz-blog | __init__.py | Python | mit | 183 | 0.005464 | # -*- | mode: python; coding: utf-8; -*-
VERSION = (1, 3, 3)
__version__ = '.'.join(map(str, VERSION))
__author__ = 'Joe Vasquez'
__email__ = '[email protected] | '
__license__ = 'MIT' |
romanvm/romans_blog | blog/views.py | Python | gpl-3.0 | 4,713 | 0.001061 | from datetime import date
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from django.conf import settings
from django.utils.translation import ugettext as _
from django.utils.dateformat import format as format_date
from django.shortcuts import get_object_or_404
from django.http import Http404
from haystack.generic_views import SearchView
from .models import Post, Category
class _PostsListView(ListView):
"""
Base class for displaying post lists
"""
template_name = '{0}/blog_posts_list.html'.format(settings.CURRENT_SKIN)
context_object_name = 'posts'
paginate_by = settings.BLOG_POSTS_PAGINATE_BY
def get_queryset(self):
return su | per().get_queryset().prefetch_related('categories')
class _PageTitleMixIn:
"""
Adds page_title to ListView's context
"""
page_title = None
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['page_title'] = self.page_title
return context
class BlogHomeView(_PostsListView):
"""
Di | splays the list of all published posts starting from the recent.
Template: ``blog_posts_list.html``
Specific context variable: ``posts``
"""
queryset = Post.objects.published()
class BlogFeaturedPostsView(_PageTitleMixIn, _PostsListView):
"""
Displays the list of featured posts
Template: ``blog_posts_list.html``
Specific context variables:
- ``posts``
- ``page_title``
"""
queryset = Post.objects.featured()
page_title = _('Featured Posts')
class BlogCategoryView(_PageTitleMixIn, _PostsListView):
"""
Displays the list of posts in a given category
Template: ``blog_posts_list.html``
Specific context variables:
- ``posts``
- ``page_title``
"""
def get_queryset(self):
category = get_object_or_404(Category, slug=self.kwargs['slug'])
self.page_title = _('Posts in "{0}" category'.format(category.name))
return Post.objects.published().filter(categories__pk=category.pk)
class BlogCategoriesListView(_PageTitleMixIn, ListView):
"""
Displays the list of categories that have posts in them
Template: ``blog_categories_list.html``
Specific context variables:
- ``categories``
- ``page_title``
"""
template_name = '{0}/blog_categories_list.html'.format(settings.CURRENT_SKIN)
queryset = Category.objects.non_empty()
page_title = _('Categories')
context_object_name = 'categories'
class BlogPostView(DetailView):
"""
Displays a blog post page
Template: ``blog_post.html``
Specific context variable: ``post``
"""
template_name = '{0}/blog_post.html'.format(settings.CURRENT_SKIN)
model = Post
context_object_name = 'post'
query_pk_and_slug = True
def dispatch(self, request, *args, **kwargs):
self.request = request
return super().dispatch(request, *args, **kwargs)
def get_object(self, queryset=None):
"""
Prevent non-authenticated users from viewing unpublished posts
"""
post = super().get_object(queryset)
if not(post.is_published or self.request.user.is_authenticated):
raise Http404
return post
class BlogArchiveView(_PageTitleMixIn, ListView):
"""
Displays the blog archive by years and months
Template: ``blog_archive.html``
Specific context variables:
- ``months`` -- the list of class:`datetime.data` objects representing months
- ``page_title``
"""
template_name = '{0}/blog_archive.html'.format(settings.CURRENT_SKIN)
queryset = Post.objects.published().dates('date_published', 'month', order='DESC')
context_object_name = 'months'
page_title = _('Blog Archive')
class BlogMonthArchiveView(_PageTitleMixIn, _PostsListView):
"""
Displays the list of posts by year and month
Template: ``blog_posts_list.html``
Specific context variables:
- ``posts``
- ``page_title``
"""
def get_queryset(self):
year = int(self.kwargs['year'])
month = int(self.kwargs['month'])
self.page_title = _('Blog Archive, {0}').format(format_date(date(year=year, month=month, day=1), 'F Y'))
return Post.objects.published().filter(date_published__year=year, date_published__month=month)
class BlogPostSearchView(SearchView):
"""
Displays the search page
Template: ``blog_search.html``
Specific context variables: none.
"""
template_name = '{0}/blog_search.html'.format(settings.CURRENT_SKIN)
paginate_by = 10
def get_queryset(self):
return super().get_queryset().highlight()
|
deter-project/magi | magi/testbed/emulab.py | Python | gpl-2.0 | 7,564 | 0.006478 | # Copyright (C) 2012 University of Southern California
# This software is licensed under the GPLv3 license, included in
# ./GPLv3-LICENSE.txt in the source distribution
from collections import defaultdict
import itertools
import logging
import os
import shlex
import sys
from magi.util import helpers
from magi.util.execl import execAndRead, pipeIn
from base import Testbed, IFObj
log = logging.getLogger(__name__)
class EmulabTestbed(Testbed):
def __init__(self):
Testbed.__init__(self)
self._store = {}
""" Testbed Properties (readonly) """
@property
def experiment(self):
""" the experiment name """
return self.getExperiment()
@property
def project(self):
""" the project name """
return self.getProject()
@property
def eid(self):
""" the experiment 'id' string """
return self.getExperimentID()
def getExperiment(self):
if 'experiment' not in self._store:
self.loadEID()
return self._store['experiment']
def getProject(self):
if 'project' not in self._store:
self.loadEID()
return self._store['project']
def getExperimentID(self):
if 'eid' not in self._store:
self.loadEID()
return self._store['eid']
def getExperimentDir(self):
return os.path.join('/proj', self.getProject(), 'exp', self.getExperiment())
def toControlPlaneNodeName(self, nodename):
if nodename not in ['localhost', '127.0.0.1'] and '.' not in nodename:
nodename += '.%s.%s' % (self.getExperiment(), self.getProject())
return nodename
""" Queries for this Node """
def getLocalVirtualNodes(self):
""" Get all the virtual nodes hosted by this machine """
ret = list()
for l in self.readAllLines(pipeIn('/usr/local/etc/emulab/tmcc vnodelist')):
try:
ret.append(self.parseVarLine(l)['VNODEID'])
except:
pass
return ret
def parseVarLine(self, line):
args = {}
for x in shlex.split(line):
sp = x.split('=')
if sp[0] == '':
continue
if (len(sp) == 1):
args[sp[0]] = '1'
else:
args[sp[0]] = sp[1]
return args
def amAVirtualNode(self):
""" return true if I am a virtual node (i.e. not a physical node or virtual host) """
return len(execAndRead(["/usr/local/etc/emulab/tmcc", "jailconfig"])[0]) > 0
""" Functions that actually load the data into our _store """
def loadEID(self):
| """ Load the nickname file to get the node, experiment and project names """
try:
self._store.update(node='?', experiment='?', project='?', eid='?')
nickname = self.getNicknameData()
p = nickname.split('.')
self._store.update(node=p[0], experiment=p[1], project=p[2],
eid=p[2]+"/"+p[1])
except:
log.exception("Can't load my host info")
def setEID(self, node=None, experime | nt=None, project=None):
""" Set the node, experiment, and project name """
if node:
self._store.update(node=node)
if experiment:
self._store.update(experiment=experiment)
if project:
self._store.update(project=project)
self._store.update(eid=self.project+"/"+self.experiment)
def loadControlInfo(self):
""" Load the control IP address and IF name files """
try:
self._store.update(controlip='?', controlif='?')
nickname = self.getNicknameData()
self._store['controlip'] = self.getHostForName(nickname)
self._store['controlif'] = self.getControlIfData()
except:
log.exception("Can't load control interface info")
def loadIfConfig(self):
""" Load all of the interface info from emulab/boot/tmcc/ifconfig """
try:
iflist = []
# Split into lines, and parse the K=V pieces
for line in self.getIfconfigData():
args = self.parseVarLine(line)
inet = args.get('INET', '')
mask = args.get('MASK', '')
# virtual nodes have no MAC, instead they have a VMAC
mac = args.get('MAC', args.get('VMAC',''))
name = self.getIfFor(inet, mac)
if inet == '' or mac == '': continue
iflist.append(IFObj(inet, name, mac, mask))
self._store['iflist'] = iflist
except:
log.exception("Can't load interface config data")
def loadTopoGraph(self):
try:
import networkx as nx
nodelist = False
linkToNodeList = defaultdict(set)
graph = nx.Graph()
for e in self.getTopomap():
if not nodelist:
if "# nodes" in e:
nodelist = True
continue
if "# lans" in e:
break
node = e.split(",")[0]
links = e.split(",")[1].split()
linksInfo = dict()
for link in links:
linkName = link.split(":")[0]
ip = link.split(":")[1]
linkToNodeList[linkName].add(node)
linksInfo[linkName] = {'name':linkName, 'ip':ip}
graph.add_node(node, links=linksInfo)
for linkName in linkToNodeList.keys():
nodeSet = linkToNodeList[linkName]
for node in nodeSet:
graph.node[node]['links'][linkName]['peerNodes'] = list(nodeSet - set([node]))
graph.add_edges_from(list(itertools.combinations(nodeSet, 2)), linkName=linkName)
self._store['topograph'] = graph
except:
log.exception("Can't load topology graph")
""" Abstracted 'readers' of data from 'locations' """
def getSwapperData(self): return self.readFirstLine(pipeIn('/usr/local/etc/emulab/tmcc creator'))
def getNicknameData(self): return self.readFirstLine(open('/var/emulab/boot/nickname', 'r'))
def getControlIfData(self): return self.readFirstLine(open('/var/emulab/boot/controlif', 'r'))
def getIfconfigData(self): return self.readAllLines(pipeIn('/usr/local/etc/emulab/tmcc ifconfig'))
def getTopomap(self): return self.readAllLines(open('/var/emulab/boot/topomap'))
def getIfFor(self, inet, mac):
if (sys.platform == 'cygwin'):
return execAndRead("ip2pcapif %s" % (inet))[0].strip()
else:
return execAndRead("/usr/local/etc/emulab/findif %s" % (mac))[0].strip()
def getMulticastAddress(self):
return helpers.getMulticast(self.project, self.experiment, 0)
# Small test if running this file directly
if __name__ == "__main__":
logging.basicConfig()
x = EmulabTestbed()
print 'Node Name:', x.nodename
print 'FQDN:', x.fqdn
print 'Control IP:', x.controlip
print 'Control IF:', x.controlif
print 'Server Node:', x.getServer()
iplist = x.getLocalIPList()
print 'Exp. Addresses: %s' % iplist
print 'Exp. Interface info:'
for ip in iplist:
print '\t%s: %s' % (ip, x.getInterfaceInfo(ip))
|
GeoMop/PythonOCC_Examples | src/bspline_surface.py | Python | gpl-2.0 | 3,394 | 0.003536 | """
Small Modification of src/examples/Geometry/geometry_demos.py
"""
from OCC.gp import *
from OCC.Geom import *
from OCC.TColGeom import *
from OCC.TColgp import *
from OCC.GeomConvert import *
from OCC.BRepBuilderAPI import *
from OCC.TopoDS import *
from OCC.STEPControl import *
from OCC.Display.SimpleGui import init_display
display, start_display, add_menu, add_function_to_menu = init_display()
def bezier_surfaces(event=None):
display.EraseAll()
array1 = TColgp_Array2OfPnt(1, 3, 1, 3)
array2 = TColgp_Array2OfPnt(1, 3, 1, 3)
array3 = TColgp_Array2OfPnt(1, 3, 1, 3)
array4 = TColgp_Array2OfPnt(1, 3, 1, 3)
array1.SetValue(1, 1, gp_Pnt(1, 1, 1))
array1.SetValue(1, 2, gp_Pnt(2, 1, 2))
array1.SetValue(1, 3, gp_Pnt(3, 1, 1))
array1.SetValue(2, 1, gp_Pnt(1, 2, 1))
array1.SetValue(2, 2, gp_Pnt(2, 2, 2))
array1.SetValue(2, 3, gp_Pnt(3, 2, 0))
array1.SetValue(3, 1, gp_Pnt(1, 3, 2))
array1.SetValue(3, 2, gp_Pnt(2, 3, 1))
array1.SetValue(3, 3, gp_Pnt(3, 3, 0))
array2.SetValue(1, 1, gp_Pnt(3, 1, 1))
array2.SetValue(1, 2, gp_Pnt(4, 1, 1))
array2.SetValue(1, 3, gp_Pnt(5, 1, 2))
array2.SetValue(2, 1, gp_Pnt(3, 2, 0))
array2.SetValue(2, 2, gp_Pnt(4, 2, 1))
array2.SetValue(2, 3, gp_Pnt(5, 2, 2))
array2.SetValue(3, 1, gp_Pnt(3, 3, 0))
array2.SetValue(3, 2, gp_Pnt(4, 3, 0))
array2.SetValue(3, 3, gp_Pnt(5, 3, 1))
array3.SetValue(1, 1, gp_Pnt(1, 3, 2))
array3.SetValue(1, 2, gp_Pnt(2, 3, 1))
array3.SetValue(1, 3, gp_Pnt(3, 3, 0))
array3.SetValue(2, 1, gp_Pnt(1, 4, 1))
array3.SetValue(2, 2, gp_Pnt(2, 4, 0))
array3.SetValue(2, 3, gp_Pnt(3, 4, 1))
array3.SetValue(3, 1, gp_Pnt(1, 5, 1))
array3.SetValue(3, 2, gp_Pnt(2, 5, 1))
array3.SetValue(3, 3, gp_Pnt(3, 5, 2))
array4.SetValue(1, 1, gp_Pnt(3, 3, 0))
array4.SetValue(1, 2, gp_Pnt(4, 3, 0))
array | 4.SetValue(1, 3, gp_Pnt(5, 3, 1))
array4.SetValue(2, 1, gp_Pnt(3, 4, 1))
array4.SetValue(2, 2, gp_Pnt(4, 4, 1))
array4.SetValue(2, 3, gp_Pnt(5, 4, 1))
array4.SetValue(3, 1, gp_Pnt(3, 5, 2))
array4.SetValue(3, 2, gp_Pnt(4, 5, 2))
array4.SetValue(3, 3 | , gp_Pnt(5, 5, 1))
BZ1 = Geom_BezierSurface(array1)
BZ2 = Geom_BezierSurface(array2)
BZ3 = Geom_BezierSurface(array3)
BZ4 = Geom_BezierSurface(array4)
bezierarray = TColGeom_Array2OfBezierSurface(1, 2, 1, 2)
bezierarray.SetValue(1, 1, BZ1.GetHandle())
bezierarray.SetValue(1, 2, BZ2.GetHandle())
bezierarray.SetValue(2, 1, BZ3.GetHandle())
bezierarray.SetValue(2, 2, BZ4.GetHandle())
BB = GeomConvert_CompBezierSurfacesToBSplineSurface(bezierarray)
if BB.IsDone():
poles = BB.Poles().GetObject().Array2()
uknots = BB.UKnots().GetObject().Array1()
vknots = BB.VKnots().GetObject().Array1()
umult = BB.UMultiplicities().GetObject().Array1()
vmult = BB.VMultiplicities().GetObject().Array1()
udeg = BB.UDegree()
vdeg = BB.VDegree()
BSPLSURF = Geom_BSplineSurface( poles, uknots, vknots, umult, vmult, udeg, vdeg, 0, 0 )
BSPLSURF.Translate(gp_Vec(0,0,2))
display.DisplayShape(BSPLSURF.GetHandle(), update=True)
start_display()
if __name__ == '__main__':
bezier_surfaces() |
juan-cardelino/matlab_demos | ipol_demo-light-1025b85/app_available/blmv_nonlinear_cartoon_texture_decomposition/app.py | Python | gpl-2.0 | 11,626 | 0.006795 | """
Nonlinear cartoon+texture decomposition ipol demo web app
"""
from lib import base_app, build, http, image
from lib.misc import ctime
from lib.misc import prod
from lib.base_app import init_app
import shutil
import cherrypy
from cherrypy import TimeoutError
import os.path
import time
from math import ceil
class app(base_app):
""" nonlinear cartoon+texture decomposition """
title = "Cartoon+Texture Image Decomposition"
xlink_article = 'http://www.ipol.im/pub/art/2011/blmv_ct/'
input_nb = 1
input_max_pixels = 700 * 700 # max size (in pixels) of an input image
input_max_weight = 10 * 1024 * 1024 # max size (in bytes) of an input file
input_dtype = '3x8i' # input image expected data type
input_ext = '.png' # input image expected extension (ie file format)
is_test = False
def __init__(self):
"""
app setup
"""
# setup the parent class
base_dir = os.path.dirname(os.path.abspath(__file__))
base_app.__init__(self, base_dir)
# select the base_app steps to expose
# index() and input_xxx() are generic
base_app.index.im_func.exposed = True
base_app.input_select.im_func.exposed = True
base_app.input_upload.im_func.exposed = True
# params() is modified from the template
base_app.params.im_func.exposed = True
# result() is modified from the template
base_app.result.im_func.exposed = True
def build(self):
"""
program build/update
"""
# store common file path in variables
tgz_url = "http://www.ipol.im/pub/art/2011/blmv_ct/srcB.tar.gz"
tgz_file = self.dl_dir + "srcB.tar.gz"
progs = ["cartoonIpol"]
src_bin = dict([(self.src_dir + os.path.join("srcB", prog),
self.bin_dir + prog)
for prog in progs])
log_file = self.base_dir + "build.log"
# get the latest source archive
build.download(tgz_url, tgz_file)
# test if any dest file is missing, or too old
if all([(os.path.isfile(bin_file)
and ctime(tgz_file) < ctime(bin_file))
for bin_file in src_bin.values()]):
cherrypy.log("not rebuild needed",
context='BUILD', traceback=False)
else:
# extract the archive
build.extract(tgz_file, self.src_dir)
# build the programs
build.run("make -j4 -C %s %s"
% (self.src_dir + "srcB", " ".join(progs)),
stdout=log_file)
# save into bin dir
if os.path.isdir(self.bin_dir):
shutil.rmtree(self.bin_dir)
os.mkdir(self.bin_dir)
for (src, dst) in src_bin.items():
shutil.copy(src, dst)
# cleanup the source dir
shutil.rmtree(self.src_dir)
return
#
# PARAMETER HANDLIN | G
#
def select_subimage(self, x0, y0, x1, y1):
"""
cut subimage from original image
"""
# draw selected rectangle on the image
imgS = image(self.work_dir + 'input_0.png')
imgS.draw_line([(x0, y0), (x1, y0), (x1, y1), (x0, y1), (x0, y0)],
| color="red")
imgS.draw_line([(x0+1, y0+1), (x1-1, y0+1), (x1-1, y1-1), (x0+1, y1-1),
(x0+1, y0+1)], color="white")
imgS.save(self.work_dir + 'input_0s.png')
# crop the image
# try cropping from the original input image (if different from input_0)
im0 = image(self.work_dir + 'input_0.orig.png')
dx0 = im0.size[0]
img = image(self.work_dir + 'input_0.png')
dx = img.size[0]
if (dx != dx0) :
z = float(dx0)/float(dx)
im0.crop((int(x0*z), int(y0*z), int(x1*z), int(y1*z)))
# resize if cropped image is too big
if self.input_max_pixels and prod(im0.size) > self.input_max_pixels:
im0.resize(self.input_max_pixels, method="antialias")
img = im0
else :
img.crop((x0, y0, x1, y1))
# save result
img.save(self.work_dir + 'input_0.sel.png')
return
@cherrypy.expose
@init_app
def params(self, newrun=False, msg=None, x0=None, y0=None,
x1=None, y1=None, scale="3.0"):
"""
configure the algo execution
"""
if newrun:
self.clone_input()
if x0:
self.select_subimage(int(x0), int(y0), int(x1), int(y1))
return self.tmpl_out("params.html", msg=msg, x0=x0, y0=y0,
x1=x1, y1=y1, scale=scale)
@cherrypy.expose
@init_app
def rectangle(self, action=None, scale=None,
x=None, y=None, x0=None, y0=None):
"""
select a rectangle in the image
"""
if action == 'run':
if x == None:
#save parameter
try:
self.cfg['param'] = {'scale' : scale}
except ValueError:
return self.error(errcode='badparams',
errmsg="Incorrect scale parameter.")
else:
#save parameters
try:
self.cfg['param'] = {'scale' : scale,
'x0' : int(x0),
'y0' : int(y0),
'x1' : int(x),
'y1' : int(y)}
except ValueError:
return self.error(errcode='badparams',
errmsg="Incorrect parameters.")
# use the whole image if no subimage is available
try:
img = image(self.work_dir + 'input_0.sel.png')
except IOError:
img = image(self.work_dir + 'input_0.png')
img.save(self.work_dir + 'input_0.sel.png')
# go to the wait page, with the key
http.redir_303(self.base_url + "wait?key=%s" % self.key)
return
else:
# use a part of the image
if x0 == None:
# first corner selection
x = int(x)
y = int(y)
# draw a cross at the first corner
img = image(self.work_dir + 'input_0.png')
img.draw_cross((x, y), size=4, color="white")
img.draw_cross((x, y), size=2, color="red")
img.save(self.work_dir + 'input.png')
return self.tmpl_out("params.html", scale=scale, x0=x, y0=y)
else:
# second corner selection
x0 = int(x0)
y0 = int(y0)
x1 = int(x)
y1 = int(y)
# reorder the corners
(x0, x1) = (min(x0, x1), max(x0, x1))
(y0, y1) = (min(y0, y1), max(y0, y1))
assert (x1 - x0) > 0
assert (y1 - y0) > 0
#save parameters
try:
self.cfg['param'] = {'scale' : scale,
'x0' : x0,
'y0' : y0,
'x1' : x1,
'y1' : y1}
except ValueError:
return self.error(errcode='badparams',
errmsg="Incorrect parameters.")
#select subimage
self.select_subimage(x0, y0, x1, y1)
# go to the wait page, with the key
http.redir_303(self.base_url + "wait?key=%s" % self.key)
return
@cherrypy.expose
@init_app
def wait(self):
"""
run redirection
"""
http.refresh(self.base_url + 'run?key=%s' % self.key)
return self.tmpl_out("wait.html")
@cherrypy.expose
@init_app
def run(self):
"""
algorithm execution
"""
# read the parameters
scale = self.cfg['param']['scale']
# run the algorithm
stdout = open(self.work_dir + 'stdout.txt', 'w')
|
wdzhou/mantid | scripts/HFIR_4Circle_Reduction/mplgraphicsview3d.py | Python | gpl-3.0 | 9,817 | 0.001121 | #pylint: disable=R0901,R0902,R0904
from __future__ import (absolute_import, division, print_function)
from six.moves import range
import numpy as np
import os
from PyQt4.QtGui import QSizePolicy
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
from mpl_toolkits.mplot3d import Axes3D
class MplPlot3dCanvas(FigureCanvas):
"""
Matplotlib 3D canvas class
"""
def __init__(self, parent=None):
"""
Initialization
:return:
"""
#
self._myParentWindow = parent
# Initialize the figure
self._myFigure = Figure()
# Init canvas
FigureCanvas.__init__(self, self._myFigure)
FigureCanvas.setSizePolicy(self, QSizePolicy.Expanding, QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
# Axes
self._myAxes = Axes3D(self._myFigure) # Canvas figure must be created for mouse rotation
self.format_coord_org = self._myAxes.format_coord
self._myAxes.format_coord = self.report_pixel
# color
self._colorMap = [0.5, 0.5, 0.5]
# Others
self._dataKey = 0
self._dataDict = dict()
# List of plots on canvas NOW
self._currPlotList = list()
self._currSurfaceList = list() # [{"xx":,"yy:","val:"}]
return
def clear_3d_plots(self):
"""
Clear all the figures from canvas
:return:
"""
for plt in self._currPlotList:
# del plt
self._myAxes.collections.remove(plt)
self._currPlotList = []
return
def get_data(self, data_key):
""" Get data by data key
:param data_key:
:return:
"""
assert data_key in self._dataDict, 'Data key %s does not exist in %s.' % (str(data_key),
str(self._dataDict.keys()))
return self._dataDict[data_key]
def import_3d_data(self, points, intensities):
"""
:param points:
:param intensities:
:return:
"""
# check
assert isinstance(points, np.ndarray) and points.shape[1] == 3, 'Shape is %s.' % str(points.shape)
assert isinstance(intensities, np.ndarray) and len(points) == len(intensities)
# set
self._dataDict[self._dataKey] = (points, intensities)
# update
r_value = self._dataKey
self._dataKey += 1
return r_value
def import_data_from_file(self, file_name):
""" File will have more than 4 columns, as X, Y, Z, Intensity, ...
:param file_name:
:return:
"""
# check
assert isinstance(file_name, str) and os.path.exists(file_name)
# parse
data_file = open(file_name, 'r')
raw_lines = data_file.readlines()
data_file.close()
# construct ND data array
xyz_points = np.zeros((len(raw_lines), 3))
intensities = np.zeros((len(raw_lines), ))
# parse
for i in range(len(raw_lines)):
line = raw_lines[i].strip()
# skip empty line
if len(line) == 0:
continue
# set value
terms = line.split(',')
for j in range(3):
xyz_points[i][j] = float(terms[j])
intensities[i] = float(terms[3])
# END-FOR
# Add to data structure for managing
self._dataDict[self._dataKey] = (xyz_points, intensities)
return_value = self._dataKey
self._dataKey += 1
return return_value
def plot_scatter(self, points, color_list):
"""
Plot points with colors in scatter mode
:param points:
:param color_list:
:return:
"""
# check: [TO DO] need MORE!
assert isinstance(points, np.ndarray)
assert len(points) == len(color_list)
assert points.shape[1] == 3, '3D data %s.' % str(points.shape)
#
# plot scatters
plt = self._myAxes.scatter(points[:, 0], points[:, 1], points[:, 2],
zdir='z', c=color_list)
self._currPlotList.append(plt)
self.draw()
return
def plot_scatter_auto(self, data_key, base_color=None):
"""
Plot data in scatter plot in an automatic mode
| :param data_key: key to locate the data stored to this class
:param base_color: None or a list of 3 elements from 0 to 1 for RGB
:return:
"""
# Check
assert isinstance(data_key, int) and data_key >= 0
assert base_color is None or len(base_c | olor) == 3
# get data and check
points = self._dataDict[data_key][0]
intensities = self._dataDict[data_key][1]
assert isinstance(points, np.ndarray)
assert isinstance(points.shape, tuple)
assert points.shape[1] == 3, '3D data %s.' % str(points.shape)
if len(points) > 1:
# set x, y and z limit
x_min = min(points[:, 0])
x_max = max(points[:, 0])
d_x = x_max - x_min
# print(x_min, x_max)
y_min = min(points[:, 1])
y_max = max(points[:, 1])
d_y = y_max - y_min
# print(y_min, y_max)
z_min = min(points[:, 2])
z_max = max(points[:, 2])
d_z = z_max - z_min
print(z_min, z_max)
# use default setup
self._myAxes.set_xlim(x_min-d_x, x_max+d_x)
self._myAxes.set_ylim(y_min-d_y, y_max+d_y)
self._myAxes.set_zlim(z_min-d_z, z_max+d_z)
# END-IF
# color map for intensity
color_list = list()
if base_color is None:
color_r = self._colorMap[0]
color_g = self._colorMap[1]
else:
color_r = base_color[0]
color_g = base_color[1]
if len(intensities) > 1:
min_intensity = min(intensities)
max_intensity = max(intensities)
diff = max_intensity - min_intensity
b_list = intensities - min_intensity
b_list = b_list/diff
num_points = len(points[:, 2])
for index in range(num_points):
color_tup = (color_r, color_g, b_list[index])
color_list.append(color_tup)
else:
color_list.append((color_r, color_g, 0.5))
# plot scatters
self._myAxes.scatter(points[:, 0], points[:, 1], points[:, 2], zdir='z', c=color_list)
self.draw()
def plot_surface(self):
"""
Plot surface
:return:
"""
print('Number of surf = ', len(self._currSurfaceList))
for surf in self._currSurfaceList:
plt = self._myAxes.plot_surface(surf["xx"], surf["yy"], surf["val"],
rstride=5, cstride=5, # color map??? cmap=cm.jet,
linewidth=1, antialiased=True)
self._currPlotList.append(plt)
# END-FOR
return
def report_pixel(self, x_d, y_d):
report = self.format_coord_org(x_d, y_d)
report = report.replace(",", " ")
return report
def set_axes_labels(self, x_label, y_label, z_label):
"""
:return:
"""
if x_label is not None:
self._myAxes.set_xlabel(x_label)
if y_label is not None:
self._myAxes.set_ylabel(y_label)
if z_label is not None:
self._myAxes.set_zlabel(z_label)
return
def set_color_map(self, color_r, color_g, color_b):
"""
Set the base line of color map
:param color_r:
:param color_g:
:param color_b:
:return:
"""
# Set color map
assert isinstance(color_r, float), 0 <= color_r < 1.
assert isinstance(color_g, float), 0 <= color_g < 1.
assert isinstance(color_b, float), 0 <= color_b < 1.
self._colorMap = [color_r, color_g, color_b]
def set_title |
BlakeTeam/VHDLCodeGenerator | blocks/Standard Library/Gate AND.py | Python | gpl-3.0 | 2,291 | 0.014841 | #-------------------------------------------------------------------------------
# PROJECT: VHDL Code Generator
# NAME: Dynamic AND Gate
#
# LICENSE: GNU-GPL V3
#-------------------------------------------------------------------------------
__isBlock__ = True
__className__ = "ANDGate"
__win__ = "ANDGateWindow"
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4 import uic
from lib.Block import *
class ANDGate(Block):
""" AND Gate
PORTS SPECIFICATIONS
"""
# TODO: Specifications of AND Gate (Documentation)
def __init__(self,system,numInput,sizeInput):
"""
:param name:
:param numInput: Number of input
:param size: Size of each input
:param system:
"""
self.numInput = numInput
self.name = "AND_GATE"
self.sizeInput = sizeInput
input_vector = [sizeInput]*self.numInput
output_vector = [sizeInput]
super().__init__(input_vector,output_vector,sy | stem,self.name)
def generate(self):
filetext = ""
if self.getOutputS | ignalSize(0) == 1:
filetext += "%s <= %s"%(self.getOutputSignalName(0),self.getInputSignalName(0))
for i in range(1,self.numInput):
filetext += " and %s"%(self.getInputSignalName(i))
else:
filetext += "%s <= "%self.getOutputSignalName(0)
for i in range (self.sizeInput):
filetext += "%s[%d]"%(self.getInputSignalName(0),self.sizeInput-i-1)
for j in range(1,self.numInput):
filetext += " and %s[%d]"%(self.getInputSignalName(j),self.sizeInput-i-1)
if i != self.sizeInput - 1:
filetext += " & "
filetext += ";\n"
return filetext
class ANDGateWindow(QWidget):
accept = pyqtSignal(list)
def __init__(self,parent = None):
super().__init__()
self.ui = uic.loadUi("blocks\\Standard Library\\Gate.ui",self)
self.ui.acceptButton.clicked.connect(self.accepted)
self.ui.setWindowTitle("AND GATE")
def accepted(self):
numInput = self.ui.numInput.value()
sizeInput = self.ui.sizeInput.value()
self.accept.emit([numInput,sizeInput])
self.close()
|
yehnan/python_book_yehnan | ch06/ch06_8queen_hettingers_gf.py | Python | gpl-2.0 | 315 | 0.003175 |
# Raymond Hettingers
# http://code.activestate.com/recipes/ | 576647/
from itertools import permutations
def queen_gf(n):
cols = range(n)
for ans in permutations(cols):
if (n == len(set(ans[i]+i for i in cols)) ==
len(set(ans[i]-i for i in cols))):
| yield ans
|
Alberto-Beralix/Beralix | i386-squashfs-root/usr/lib/python2.7/dist-packages/twisted/test/test_usage.py | Python | gpl-3.0 | 56 | 0.017857 | ../../../../.. | /share/pyshared/twisted/test/test_usage. | py |
nive/nive_cms | nive_cms/image.py | Python | gpl-3.0 | 2,927 | 0.015032 | # Copyright 2012, 2013 Arndt Droullier, Nive GmbH. All rights reserved.
# Released under GPL3. See license.txt
#
__doc__ = """
Image
-----
The image element inserts images into the web page.
Images uploaded as fullsize will be be linked as pop ups.
If the Python Image Library (PIL) is installed automated image conversion on upload can be
activated by adding `nive_cms.extensions.images.ImageProcessor` to configuration.extensions.
::
ProfileImage = Conf(source="imagefull", dest="image", format="JPEG",
quality="85", width=360, height=0, extension="jpg",
condition=CheckDeafult)
configuration.imageProfiles = [ProfileImage]
The default image settings for conversions.
"""
from nive_cms.i18n import _
from nive.definitions import StagPageElement, ObjectConf, FieldConf, Conf
from nive_cms.baseobjects import PageElementFileBase
from nive_cms.extensions.images import PILloaded
class image(PageElementFileBase):
"""
"""
# bw 0.9.11
def Span(self):
# css class span for the css selection
if self.data.cssClass=="teaserl":
return u"span4"
elif self.data.cssClass=="teasers":
return u"span2"
return u" | span3"
# image type definition ------------------------------------------------------------------
#@nive_module
configuration = ObjectConf(
id = "image",
name = _(u"Image"),
dbparam = "images",
context = "nive_cms.image.image",
template = "image.pt",
selectTag = StagPageElement,
extensio | ns = [],
icon = "nive_cms.cmsview:static/images/types/image.png",
description = _(u"The image element inserts images into the web page.")
)
configuration.data = [
FieldConf(id="image", datatype="file", size=0, default=u"", name=_(u"Imagefile")),
FieldConf(id="imagefull", datatype="file", size=0, default=u"", name=_(u"Imagefile fullsize")),
FieldConf(id="textblock", datatype="htext",size=100000,default=u"", name=_(u"Text"), fulltext=1, required=0),
FieldConf(id="cssClass", datatype="list", size=10, default=u"", name=_(u"Styling"), listItems=()),
FieldConf(id="link", datatype="url", size=1000, default=u"", name=_(u"Link"))
]
if PILloaded and "nive_cms.extensions.images.ImageProcessor" in configuration.extensions:
fields = ["title", "imagefull", "textblock", "cssClass", "link", "pool_groups"]
else:
fields = ["title", "image", "imagefull", "textblock", "cssClass", "link", "pool_groups"]
configuration.forms = {"create": {"fields":fields}, "edit": {"fields":fields}}
configuration.toJson = ("title", "image", "imagefull", "textblock", "cssClass", "link", "pool_groups", "pool_type", "pool_filename")
configuration.views = []
ProfileImage = Conf(source="imagefull", dest="image", format="JPEG", quality="90", width=360, height=0, extension="jpg")
configuration.imageProfiles = [ProfileImage]
|
Loki88/RSA-Test | ui/Menu/__init__.py | Python | agpl-3.0 | 72 | 0.013889 | #!/usr/bin/env python
# -*- coding: utf | -8 -*-
fro | m .Menu import MenuBox |
clchiou/garage | py/g1/operations/databases/servers/tests/test_connections.py | Python | mit | 8,015 | 0 | import unittest
import unittest.mock
import functools
from g1.asyncs import kernels
from g1.operations.databases.bases import interfaces
from g1.operations.databases.servers import connections
# I am not sure why pylint cannot lint contextlib.asynccontextmanager
# correctly; let us disable this check for now.
#
# pylint: disable=not-async-context-manager
def synchronous(test_method):
@kernels.with_kernel
@functools.wraps(test_method)
def wrapper(self):
kernels.run(test_method(self))
return wrapper
class ConnectionsTest(unittest.TestCase):
def setUp(self):
super().setUp()
self.conn = unittest.mock.Mock()
self.tx = self.conn.begin.return_value
self.manager = connections.ConnectionManager(self.conn)
unittest.mock.patch.multiple(
connections,
_WAIT_FOR_READER=0.01,
_WAIT_FOR_WRITER=0.01,
).start()
def tearDown(self):
unittest.mock.patch.stopall()
super().tearDown()
def assert_manager(
self,
num_readers,
tx_id,
rollback_tx_ids,
commit_tx_ids,
timeout_tx_ids,
):
self.assertEqual(self.manager._num_readers, num_readers)
self.assertEqual(self.manager._tx_id, tx_id)
self.assertEqual(tuple(self.manager._rollback_tx_ids), rollback_tx_ids)
self.assertEqual(tuple(self.manager._commit_tx_ids), commit_tx_ids)
self.assertEqual(tuple(self.manager._timeout_tx_ids), timeout_tx_ids)
self.assertEqual(self.manager.tx_id, tx_id)
@synchronous
async def test_reading(self):
self.assert_manager(0, 0, (), (), ())
async with self.manager.reading() as conn_1:
self.assert_manager(1, 0, (), (), ())
self.assertIs(conn_1, self.conn)
async with self.manager.reading() as conn_2:
self.assert_manager(2, 0, (), (), ())
self.assertIs(conn_2, self.conn)
async with self.manager.reading() as conn_3:
self.assert_manager(3, 0, (), (), ())
self.assertIs(conn_3, self.conn)
self.assert_manager(2, 0, (), (), ())
self.assert_manager(1, 0, (), (), ())
self.assert_manager(0, 0, (), (), ())
self.conn.begin.assert_not_called()
@synchronous
async def test_reading_timeout(self):
self.assert_manager(0, 0, (), (), ())
async with self.manager.transacting():
tx_id = self.manager.tx_id
with self.assertRaises(interfaces.TransactionTimeoutError):
async with self.manager.reading():
pass
self.assert_manager(0, 0, (), (tx_id, ), ())
self.conn.begin.assert_called_once()
@synchronous
async def test_writing(self):
with self.assertRaises(interfaces.InvalidRequestError):
async with self.manager.writing(0):
pass
with self.assertRaises(interfaces.TransactionNotFoundError):
async with self.manager.writing(1):
pass
self.assert_manager(0, 0, (), (), ())
async with self.manager.transacting():
tx_id = self.manager.tx_id
self.assert_manager(0, tx_id, (), (), ())
async with self. | manager.writing(tx_id) as conn:
self.assert_manager(0, tx_id, (), (), ())
self.assertIs(conn, self.conn)
with self.assertRaises(interfaces.TransactionNotFoundError):
async with self.manager.writing(tx_id + 1):
pass
self.asse | rt_manager(0, 0, (), (tx_id, ), ())
self.conn.begin.assert_called_once()
@synchronous
async def test_transacting(self):
self.assert_manager(0, 0, (), (), ())
async with self.manager.transacting() as conn:
tx_id = self.manager.tx_id
self.assertNotEqual(tx_id, 0)
self.assert_manager(0, tx_id, (), (), ())
self.assertIs(conn, self.conn)
self.assert_manager(0, 0, (), (tx_id, ), ())
self.conn.begin.assert_called_once()
@synchronous
async def test_transacting_rollback(self):
self.assert_manager(0, 0, (), (), ())
with self.assertRaises(ValueError):
async with self.manager.transacting():
tx_id = self.manager.tx_id
raise ValueError
self.assert_manager(0, 0, (tx_id, ), (), ())
self.conn.begin.assert_called_once()
@synchronous
async def test_transacting_timeout_on_reader(self):
self.assert_manager(0, 0, (), (), ())
async with self.manager.reading():
with self.assertRaises(interfaces.TransactionTimeoutError):
async with self.manager.transacting():
pass
self.assert_manager(0, 0, (), (), ())
self.conn.begin.assert_not_called()
@synchronous
async def test_transacting_timeout_on_writer(self):
self.assert_manager(0, 0, (), (), ())
async with self.manager.transacting():
tx_id = self.manager.tx_id
with self.assertRaises(interfaces.TransactionTimeoutError):
async with self.manager.transacting():
pass
self.assert_manager(0, 0, (), (tx_id, ), ())
self.conn.begin.assert_called_once()
@synchronous
async def test_begin(self):
with self.assertRaises(interfaces.InvalidRequestError):
await self.manager.begin(0)
self.assert_manager(0, 0, (), (), ())
conn = await self.manager.begin(1)
for _ in range(3): # begin is idempotent.
self.assertIs(await self.manager.begin(1), conn)
self.assertIs(conn, self.conn)
self.assert_manager(0, 1, (), (), ())
with self.assertRaises(interfaces.TransactionTimeoutError):
await self.manager.begin(2)
self.conn.begin.assert_called_once()
@synchronous
async def test_end(self):
with self.assertRaises(interfaces.InvalidRequestError):
await self.manager.rollback(0)
with self.assertRaises(interfaces.InvalidRequestError):
await self.manager.commit(0)
with self.assertRaises(interfaces.TransactionNotFoundError):
await self.manager.rollback(1)
with self.assertRaisesRegex(AssertionError, r'expect x != 0'):
await self.manager.rollback_due_to_timeout()
with self.assertRaises(interfaces.TransactionNotFoundError):
await self.manager.commit(1)
self.assert_manager(0, 0, (), (), ())
await self.manager.begin(1)
self.assert_manager(0, 1, (), (), ())
with self.assertRaises(interfaces.TransactionNotFoundError):
self.manager.rollback(999)
with self.assertRaises(interfaces.TransactionNotFoundError):
self.manager.commit(999)
self.tx.rollback.assert_not_called()
for _ in range(3): # rollback is idempotent.
self.manager.rollback(1)
self.tx.rollback.assert_called_once()
self.assert_manager(0, 0, (1, ), (), ())
await self.manager.begin(2)
self.tx.commit.assert_not_called()
for _ in range(3): # commit is idempotent.
self.manager.commit(2)
self.tx.commit.assert_called_once()
self.assert_manager(0, 0, (1, ), (2, ), ())
self.tx.rollback.reset_mock()
await self.manager.begin(3)
self.manager.rollback_due_to_timeout()
self.tx.rollback.assert_called_once()
self.assert_manager(0, 0, (1, ), (2, ), (3, ))
await self.manager.begin(1)
with self.assertRaises(interfaces.TransactionTimeoutError):
async with self.manager.writing(3):
pass
with self.assertRaises(interfaces.TransactionNotFoundError):
async with self.manager.writing(4):
pass
if __name__ == '__main__':
unittest.main()
|
xuru/pyvisdk | pyvisdk/do/storage_iorm_config_option.py | Python | mit | 1,080 | 0.009259 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def StorageIORMConfigOption(vim, *args, **kwargs):
''' | Configuration setting ranges for IORMConfigSpec object.'''
obj = vim.client.factory.create('ns0:StorageIORMConfigOption')
# do some validation checking...
if (len(args) + len(kwargs)) < 3:
raise IndexError('Expected at least 4 arguments got: %d' % len(args))
required = [ 'congestionThresholdOption', 'enabledOption', 'statsCollectionEnabledOption' ]
optional = [ 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
| setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
|
ustuehler/git-cvs | tests/test_cvs.py | Python | isc | 727 | 0.001376 | from os.path import dirname, join
import unittest
from cvsgit.cvs import CVS
from cvsgit.changeset import Change
class Test(unittest.TestCase):
def test_rcsfilename(self):
"""Find the RCS file for a w | orking copy path.
"""
cvs = CVS(join(dirname(__file__), 'data', 'zombie'), None)
c = Change(timestamp='',
author='',
log='',
filestatus='',
filename='patches/patch-Makefile',
revision='',
state='',
mode='')
expected = join(cvs.root, 'patches/Atti | c/patch-Makefile,v')
actual = cvs.rcsfilename(c)
self.assertEqual(expected, actual)
|
abetusk/www.meowcad.com | cgi/about.py | Python | agpl-3.0 | 1,664 | 0.020433 | #!/usr/bin/python
import re,cgi,cgitb,sys
import os
import urllib
import Cookie
import datetime
import meowaux as mew
cgitb.enable()
login_signup="""
<ul class='nav navbar-nav' style='float:right; margin-top:7px; margin-right:5px; ' >
<li>
<form action='/login' style='display:inline;' >
<button class='btn btn-success' type='submit'>Log in</button>
</form>
<form action='/register' style='display:inline;' >
<button class='btn btn-warning' type='submit'>Register!</button>
| </form>
</li>
</ul>
"""
cookie = Cookie.SimpleCookie()
cookie_hash = mew.getCookieHash( os.environ )
msg,msgType = mew.processCookieMessage( cookie, cookie_hash )
loggedInFlag = False
if ( ("userId" in coo | kie_hash) and ("sessionId" in cookie_hash) and
(mew.authenticateSession( cookie_hash["userId"], cookie_hash["sessionId"] ) != 0) ):
loggedInFlag = True
template = mew.slurp_file("template/about.html")
nav = mew.slurp_file("template/navbarflush_template.html")
footer = mew.slurp_file("template/footer_template.html")
analytics = mew.slurp_file("template/analytics_template.html")
tmp_str = mew.replaceTemplateMessage( template, msg, "nominal" )
tmp_str = tmp_str.replace( "<!--FOOTER-->", footer)
tmp_str = tmp_str.replace( "<!--ANALYTICS-->", analytics)
if loggedInFlag:
userData = mew.getUser( cookie_hash["userId"] )
nav = mew.processLoggedInNavTemplate( nav, userData["userName"], userData["type"] )
else:
nav = nav.replace( "<!--NAVBAR_USER_CONTEXT-->", login_signup )
tmp_str = tmp_str.replace( "<!--NAVBAR_FLUSH-->", nav)
print "Content-type: text/html; charset=utf-8;"
print cookie.output()
print
print tmp_str
|
martinjrobins/hobo | pints/_nested/__init__.py | Python | bsd-3-clause | 29,201 | 0 | #
# Sub-module containing nested samplers
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
import pints
import numpy as np
try:
from scipy.special import logsumexp
except ImportError: # pragma: no cover
# Older versions
from scipy.misc import logsumexp
class NestedSampler(pints.TunableMethod):
"""
Abstract base class for nested sample | rs.
Parameters
----------
log_prior : pints.LogPrior
A logprior to draw proposal samples from.
"""
def __init__(self, log_prior):
# Store logprior
if not isinstance(log_prior, pints.LogPrior):
raise Value | Error('Given log_prior must extend pints.LogPrior')
# prior accessed by subclasses to do prior sampling in ask() step
self._log_prior = log_prior
# Current value of the threshold log-likelihood value
self._running_log_likelihood = -float('inf')
self._proposed = None
# Initialise active point containers
self._n_active_points = 400
self._n_parameters = self._log_prior.n_parameters()
self._m_active = np.zeros((self._n_active_points,
self._n_parameters + 1))
self._min_index = None
self._accept_count = 0
self._n_evals = 0
def active_points(self):
"""
Returns the active points from nested sampling run.
"""
return self._m_active
def ask(self):
"""
Proposes new point at which to evaluate log-likelihood.
"""
raise NotImplementedError
def _initialise_active_points(self, m_initial, v_fx):
"""
Sets initial active points matrix.
"""
for i, fx in enumerate(v_fx):
self._m_active[i, self._n_parameters] = fx
self._m_active[:, :-1] = m_initial
self._min_index = np.argmin(self._m_active[:, self._n_parameters])
self._set_running_log_likelihood(
self._m_active[self._min_index, self._n_parameters])
def in_initial_phase(self):
"""
For methods that need an initial phase (see
:meth:`needs_initial_phase()`), this method returns ``True`` if the
method is currently configured to be in its initial phase. For other
methods a ``NotImplementedError`` is returned.
"""
raise NotImplementedError
def min_index(self):
""" Returns index of sample with lowest log-likelihood. """
return self._min_index
def n_active_points(self):
"""
Returns the number of active points that will be used in next run.
"""
return self._n_active_points
def n_hyper_parameters(self):
""" See :meth:`TunableMethod.n_hyper_parameters()`. """
raise NotImplementedError
def name(self):
""" Name of sampler """
raise NotImplementedError
def needs_sensitivities(self):
"""
Determines whether sampler uses sensitivities of the solution.
"""
return self._needs_sensitivities
def needs_initial_phase(self):
"""
Returns ``True`` if this method needs an initial phase, for example
ellipsoidal nested sampling has a period of running rejection
sampling before it starts to fit ellipsoids to points.
"""
return False
def running_log_likelihood(self):
"""
Returns current value of the threshold log-likelihood value.
"""
return self._running_log_likelihood
def set_n_active_points(self, active_points):
"""
Sets the number of active points for the next run.
"""
active_points = int(active_points)
if active_points <= 5:
raise ValueError('Number of active points must be greater than 5.')
self._n_active_points = active_points
self._m_active = np.zeros((self._n_active_points,
self._n_parameters + 1))
def set_hyper_parameters(self, x):
"""
See :meth:`TunableMethod.set_hyper_parameters()`.
"""
raise NotImplementedError
def set_initial_phase(self, in_initial_phase):
"""
For methods that need an initial phase (see
:meth:`needs_initial_phase()`), this method toggles the initial phase
algorithm. For other methods a ``NotImplementedError`` is returned.
"""
raise NotImplementedError
def _set_running_log_likelihood(self, running_log_likelihood):
"""
Updates the current value of the threshold log-likelihood value.
"""
self._running_log_likelihood = running_log_likelihood
def tell(self, fx):
"""
If a single evaluation is provided as arguments, a single point is
accepted and returned if its likelihood exceeds the current threshold;
otherwise None is returned.
If multiple evaluations are provided as arguments (for example, if
running the algorithm in parallel), None is returned if no points
have likelihood exceeding threshold; if a single point passes the
threshold, it is returned; if multiple points pass, one is selected
uniformly at random and returned and the others are stored for later
use.
In all cases, two objects are returned: the proposed point (which may
be None) and an array of other points that also pass the threshold
(which is empty for single evaluation mode but may be non-empty for
multiple evaluation mode).
"""
# for serial evaluation just return point or None and an empty array
if np.isscalar(fx):
self._n_evals += 1
if np.isnan(fx) or fx < self._running_log_likelihood:
return None, np.array([[]])
else:
proposed = self._proposed
fx_temp = fx
winners = np.array([[]])
# if running in parallel, then fx will be a sequence
else:
a_len = len(fx)
self._n_evals += a_len
results = []
for i in range(a_len):
if np.isnan(fx[i]) or fx[i] < self._running_log_likelihood:
results.append(None)
else:
results.append(fx[i])
n_non_none = sum(x is not None for x in results)
# if none pass threshold return None and an empty array
if n_non_none == 0:
return None, np.array([[]])
# if one passes then return it and an empty array
elif n_non_none == 1:
fx_temp = next(item for item in results if item is not None)
index = results.index(fx_temp)
proposed = self._proposed[index]
winners = np.array([[]])
# if more than a single point passes select at random from multiple
# non-nones and return it and an array of the other points whose
# likelihood exceeds threshold
else:
fx_short = [i for i in results if i]
idex = [results.index(i) for i in fx_short]
proposed_short = [self._proposed[i] for i in idex]
fx_temp = np.random.choice(fx_short)
index_temp = results.index(fx_temp)
proposed = self._proposed[index_temp]
index1 = fx_short.index(fx_temp)
del proposed_short[index1]
fx_short.remove(fx_temp)
winners = np.transpose(
np.vstack([np.transpose(proposed_short), fx_short]))
self._m_active[self._min_index, :] = np.concatenate(
(proposed, np.array([fx_temp])))
self._min_index = np.argmin(
self._m_active[:, self._n_parameters])
self._set_running_log_likelihood(
np.min(self._m_active[:, self._n_parameters]))
self._accept_count += 1
return proposed, winners
class NestedController(obje |
rossasa/server-tools | base_user_role/migrations/8.0.1.1.0/post-migration.py | Python | agpl-3.0 | 766 | 0 | # -*- coding: utf-8 -*-
# Copyright 2016 ABF OSIELL <http://osiell.com>
# License AGPL-3.0 or later (http://www.gnu.org/ | licenses/agpl).
from openerp import api, SUPERUSER_ID
def migrate_res_users_role(env):
"""Migrate user roles database schema.
('res_users_role_user_rel' many2many table to 'res.users.role.line' model.
" | ""
role_line_model = env['res.users.role.line']
query = "SELECT role_id, user_id FROM res_users_role_user_rel;"
env.cr.execute(query)
rows = env.cr.fetchall()
for row in rows:
vals = {
'role_id': row[0],
'user_id': row[1],
}
role_line_model.create(vals)
def migrate(cr, version):
env = api.Environment(cr, SUPERUSER_ID, {})
migrate_res_users_role(env)
|
mph-/lcapy | doc/examples/netlists/circuit-VRLC2-vr.py | Python | lgpl-2.1 | 442 | 0.004525 | from lca | py import Circuit
cct = Circuit("""
V 1 0 step 10; down
L 1 2 1e-3; right, size=1.2
C 2 3 1e-4; right, size=1.2
R 3 0_1 1; down
W 0 0_1; right
""")
import numpy as np
t = np.linspace(0, 0.01, 1000)
| vr = cct.R.v.evaluate(t)
from matplotlib.pyplot import subplots, savefig
fig, ax = subplots(1)
ax.plot(t, vr, linewidth=2)
ax.set_xlabel('Time (s)')
ax.set_ylabel('Resistor voltage (V)')
ax.grid(True)
savefig('circuit-VRLC2-vr.png')
|
valdergallo/django-chrono | example/models.py | Python | gpl-3.0 | 963 | 0 | # -*- coding: utf-8 -*-
from django.db import models
class Person(models.Model):
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
age = models.CharField(max_length=10)
def __unicode__(self):
return self.first_name
class PersonFile(models.Model):
filefield = models.FileField(upload_to='test')
def __unicode__(self):
| return self.filefield
class Mercado(models.Model):
item = models.CharField(max_length=50)
qtde = models.IntegerField(default=0)
def __unicode__(self):
return self.item
class Invoice(models.Model):
name = models.CharField(max_length=50)
sales_date = models.DateField()
price = models.FloatField()
def __unicode__(self):
return self.name
class ItemInvoice(models.Model):
invoice = models.ForeignKey(In | voice)
name = models.CharField(max_length=50)
def __unicode__(self):
return self.name
|
bcare/roverpi | roverserver/enginectrl.py | Python | gpl-3.0 | 14,624 | 0.019147 |
from multiprocessing import Process, JoinableQueue, Manager, Lock, Value, Event
import wiringpi as wp
import RPi.GPIO as rpio
from slaveprocess import SlaveProcess
import time
rpio.setmode(rpio.BCM)
class PMWProcess(Process):
def __init__(self,**kwargs):
super(PWMProcess, self).__init__(**kwargs)
self.event_enable_pwm = kwargs['event_enable_pwm']
self.event_terminate = kwargs['event_terminate']
self.pwm_freq = kwargs['pwm_freq']
self.pwm_duty = kwargs['pwm_duty']
self.lock_freq = kwargs['lock_freq']
self.pin = kwargs['pin']
def run():
while self.event_enable_pwm.is_set():
start_clock = time.time()
with self.lock_freq:
pwm_freq = self.pwm_freq.value
pwm_duty = self.pwm_duty.value
period=1./pwm_freq
class DriveCtrl():
def __init__(self, **kwargs):
self.cfg = kwargs['config']
self.queues = kwargs['queues']
## motor parameters :
self.speeds = (10,20,50,100)
if self.cfg.lookup('drive.speeds') is not None:
self.speeds = tuple([max(100,x) for x in self.cfg.lookup('drive.speeds')])
self.max_speed = max(self.speeds)
self.nb_speeds = len(self.speeds)
self.current_speed = self.speeds[0]
self.queues['log'].put('drive:nb speeds : %d'%(self.nb_speeds))
## pins :
self.power_pins={'L':0,'R':0}
self.direction_pins = {'L':0,'R':0}
self.monitor_pins={'LF':0,'LB':0,'RB':0,'RF':0}
self.pin_power_left = 0
self.pin_power_right = 0
self.pin_direction_left = 0
self.pin_direction_right = 0
## PWM options :
if self.cfg.lookup('gpio.pwm_fr | eq'):
self.pwm_freq = float(self.cfg.gpio.pwm_freq)
else:
self.pwm_freq = 50.0
###################### DEFAULT DRIVE VE | CTORS #######################
#################################
# COMMANDS
#################################
## Drive commands :
# North :
# _ _
# ^ | |_____| | ^ | |x| |
# | | | ^ | | | | | | |
# 1.0 | | |__^__| | | 1.0 | | | |
# | |_| |_| |
#
# North East :
# _ _
# ^ | |_ _ _| | | | |x|
# | | | ^ | | ^ | | | |
# 0.8 | | |__^__| | | 0.2 | | | |
# | |_| |_|
#
# East :
# _ _
# ^ | |_____| | | | | | |
# | | | ^ | | | | | |x|
# 1.0 | | |__^__| | | 1.0 | | | |
# | |_| |_| v
#
# South East :
# _ _
# | | |_____| | | | | |
# | | | ^ | | | | | | |
# 1.0 | | |__^__| | v 0.8 | | |x|
# v |_| |_|
#
# South :
# _ _
# | | |_____| | | | | | |
# | | | ^ | | | | | | |
# 1.0 | | |__^__| | | 1.0 | |x| |
# v |_| |_| v
#
# South West :
# _ _
# | |_____| | | | | | |
# | | ^ | | | | | | |
# 0.2 | | |__^__| | | 0.8 |x| | |
# v |_| |_| v
#
# West :
# _ _
# | | |_____| | ^ | | | |
# | | | ^ | | | |x| | |
# 1.0 | | |__^__| | | 1.0 | | | |
# v |_| |_| |
#
# North West :
# _ _
# ^ | |_____| | ^ |x| | |
# | | | ^ | | | | | | |
# 0.2 | |__^__| | | 0.8 | | | |
# |_| |_| |
#
# Full stop :
# _ _
# | |_____| | | | | |
# | | ^ | | | |x| |
# 0.0 | |__^__| | 0.0 | | | |
# |_| |_|
#
self.vec_north = (1.0,1.0,1,1,0,0)
self.vec_north_east = (0.8,0.2,1,1,0,0)
self.vec_east = (1.0,1.0,1,0,0,1)
self.vec_south_east = (0.8,0.2,0,0,1,1)
self.vec_south = (1.0,1.0,0,0,1,1)
self.vec_south_west = (0.2,0.8,0,0,1,1)
self.vec_west = (1.0,1.0,0,1,1,0)
self.vec_north_west = (0.2,0.8,1,1,0,0)
self.vec_full_stop = (0,0,0,0,0,0)
self.load_drive_vectors()
self.current_vector = self.vec_full_stop
## read the mapping of GPIO pins
self.read_gpio_map_from_config()
self.gpio_init()
self.dict_steer = {'8':self.vec_north, \
'9':self.vec_north_east, \
'6':self.vec_east,\
'3':self.vec_south_east,\
'2':self.vec_south,\
'1':self.vec_south_west,\
'4':self.vec_west,\
'7':self.vec_north_west,\
'5':self.vec_full_stop}
def load_drive_vectors(self):
for vecname in ['north','north_east','east','south_east','south','south_west','west','north_west']:
vecpath = 'drive.vectors.'+vecname
#self.queues['log'].put('drive: loading drive vector %s'%vecpath)
if self.cfg.lookup(vecpath) is not None:
vecarray = self.cfg.lookup(vecpath)
if len(vecarray) != 6:
self.queues['log'].put('drive:error: drive vector %s in config file'%(vecname))
setattr(self,'vec_'+vecname, tuple([x for x in vecarray]))
def read_gpio_map_from_config(self):
self.pin_power_left = self.cfg.gpio.pin_pwm_left
self.pin_power_right = self.cfg.gpio.pin_pwm_right
self.pin_direction_left_forward = self.cfg.gpio.pin_direction_left_forward
self.pin_direction_right_forward = self.cfg.gpio.pin_direction_right_forward
self.pin_direction_left_rear = self.cfg.gpio.pin_direction_left_rear
self.pin_direction_right_rear = self.cfg.gpio.pin_direction_right_rear
def gpio_init(self):
wp.wiringPiSetupSys()
# Set output for those pins :
wp.pinMode(self.pin_power_left, wp.OUTPUT)
wp.pinMode(self.pin_power_right, wp.OUTPUT)
wp.pinMode(self.pin_direction_left_forward, wp.OUTPUT)
wp.pinMode(self.pin_direction_right_forward, wp.OUTPUT)
wp.pinMode(self.pin_direction_left_rear, wp.OUTPUT)
wp.pinMode(self.pin_direction_right_rear, wp.OUTPUT)
## create the SoftPwm on power pins :
wp.softPwmCreate(self.pin_power_left, 0, self.max_speed)
wp.softPwmCreate(self.pin_power_right, 0, self.max_speed)
## reset everyone :
self.gpio_zero()
def rpio_init(self):
## open pins for output :
rpio.setup(self.pin_power_left, rpio.OUT)
rpio.setup(self.pin_power_right, rpio.OUT)
rpio.setup(self.pin_direction_left_forward, rpio.OUT)
rpio.setup(self.pin_direction_right_forward, rpio.OUT)
rpio.setup(self.pin_direction_left_rear, rpio.OUT)
rpio.setup(self.pin_direction_right_rear, rpio.OUT)
## open pins for input :
# disabled for now
## setup software pwm
self.pwm_left = rpio.PWM(self.pin_power_left, self.pwm_freq)
self.pwm_right = rpio.PWM(self.pin_power_right, self.pwm |
ponty/eagexp | eagexp/version.py | Python | bsd-2-clause | 775 | 0 | from easyprocess import EasyProcess
from entrypoint2 import entrypoint
from pyvirtualdisplay.display import Display
def extract_version(txt):
"""This function tries to extract the version from the help | text"""
words = txt.replace(",", " ").split()
version = None
for x in revers | ed(words):
if len(x) > 2:
if x[0].lower() == "v":
x = x[1:]
if "." in x and x[0].isdigit():
version = x
break
return version
def version():
"""
return eagle version.
It does not work without X!
:rtype: string
"""
return extract_version(EasyProcess("eagle -?").call().stdout)
@entrypoint
def print_version():
with Display(visible=False):
print(version())
|
pythonprobr/oscon2014 | strategy/strategy_best2.py | Python | mit | 3,172 | 0.001261 | # strategy_best2.py
# Strategy pattern -- function-based implementation
# selecting best promotion from current module globals
"""
>>> joe = Customer('John Doe', 0)
>>> ann = Customer('Ann Smith', 1100)
>>> cart = [LineItem('banana', 4, .5),
... LineItem('apple', 10, 1.5),
... LineItem('watermellon', 5, 5.0)]
>>> Order(joe, cart, fidelity_promo)
<Order total: 42.00 due: 42.00>
>>> Order(ann, cart, fidelity_promo)
<Order total: 42.00 due: 39.90>
>>> banana_cart = [LineItem('banana', 30, .5),
... LineItem('apple', 10, 1.5)]
>>> Order(joe, banana_cart, bulk_item_promo)
<Order total: 30.00 due: 28.50>
>>> long_order = [LineItem(str(item_code), 1, 1.0)
... for item_code in range(10)]
>>> Order(joe, long_order, large_order_promo)
<Order total: 10.00 due: 9.30>
>>> Order(joe, cart, large_order_promo)
<Order total: 42.00 due: 42.00>
# BEGIN STRATEGY_BEST_TESTS
>>> Order(joe, long_order, best_promo)
<Order total: 10.00 due: 9.30>
>>> Order(joe, banana_cart, best_promo)
<Order total: 30.00 due: 28.50>
>>> Order(ann, cart, best_promo)
<Order total: 42.00 due: 39.90>
# END STRATEGY_BEST_TESTS
"""
from collections import namedtuple
Customer = namedtuple('Customer', 'name fidelity')
class LineItem:
def __init__(self, product, quantity, price):
self.product = product
self.quantity = quantity
self.price = price
def total(self):
return self.price * self.quantity
class Order: # the Context
def __init__(self, customer, cart, promotion=None):
self.customer = customer
self.cart = list(cart)
self.promotion = promotion
def total(self):
if not hasattr(self, '__total'):
self.__total = sum(item.total() for item in self.cart)
return self.__total
def due(self):
if self.promotion is None:
discount = 0
else:
discount = self.promotion(self)
return self.total() - discount
def __repr__(self):
fmt = '<Order total: {:.2f} due: {:.2f}>'
return fmt.format(self.total(), self.due())
def fidelity_promo(order):
"""5% discount | for customers with 1000 or more fidelity points"""
return order.total() * .05 if order.customer.fidelity >= 1000 else 0
def bulk_item_promo(order):
"""10% discount for each LineItem with 20 or more units"""
discount = 0
for item in order.cart:
if item.quantity >= 20:
discount += item.total() * .1
return discount
def large_order_promo(order):
"""7% discount for orders with 10 or more distinct items"""
distinct_items = { | item.product for item in order.cart}
if len(distinct_items) >= 10:
return order.total() * .07
return 0
# BEGIN STRATEGY_BEST2
promos = [globals()[name] for name in globals() # <1>
if name.endswith('_promo') # <2>
and name != 'best_promo'] # <3>
def best_promo(order):
"""Select best discount available
"""
return max(promo(order) for promo in promos) # <4>
# END STRATEGY_BEST2
|
ZEROFAIL/goblin | tests/test_properties.py | Python | agpl-3.0 | 7,949 | 0 | """Test model properties."""
import pytest
from gremlin_python.statics import long
from goblin import element, exception, manager, properties
def test_set_change_property(person, lives_in):
# vertex
assert not person.name
person.name = 'leif'
assert person.name == 'leif'
person.name = 'leifur'
assert person.name == 'leifur'
# edge
assert not lives_in.notes
lives_in.notes = 'notable'
assert lives_in.notes == 'notable'
lives_in.notes = 'more notable'
assert lives_in.notes == 'more notable'
def test_property_default(knows):
assert knows.notes == 'N/A'
knows.notes = 'notable'
assert knows.notes == 'notable'
def test_false_bool_default(place):
assert place.incorporated.value is False
def test_validation(person):
person.age = 10
with pytest.raises(Exception):
person.age = 'hello'
def test_setattr_validation(person):
setattr(person, 'age', 10)
assert person.age == 10
with pytest.raises(Exception):
setattr(person, 'age', 'hello')
def test_set_id_long(person):
person.id = 1
assert isinstance(person.id, long)
def test_id_class_attr_throws(person_class):
with pytest.raises(exception.ElementError):
person_class.id
# Vertex properties
def test_set_change_vertex_property(person):
assert not person.birthplace
person.birthplace = 'Iowa City'
assert isinstance(person.birthplace, element.VertexProperty)
assert person.birthplace.value == 'Iowa City'
person.birthplace = 'U of I Hospital'
assert person.birthplace.value == 'U of I Hospital'
def test_vertex_property_default():
"""Makes sure that a brand new VertexProperty (i.e., with no value set) is
still representable. Addresses issue #52.
"""
vp = element.VertexProperty(int)
assert repr(vp) == "<VertexProperty(type=0, value=None)"
def test_validate_vertex_prop(person):
assert not person.birthplace
person.birthplace = 1
assert person.birthplace.value == '1'
def test_set_change_list_card_vertex_property(person):
assert not person.nicknames
person.nicknames = 'sly'
assert isinstance(person.nicknames, list)
assert isinstance(person.nicknames, manager.ListVertexPropertyManager)
assert isinstance(person.nicknames[0], element.VertexProperty)
assert person.nicknames[0].value == 'sly'
assert person.nicknames('sly') == person.nicknames[0]
person.nicknames = set(['sly', 'guy'])
assert isinstance(person.nicknames, list)
assert person.nicknames('sly').value == 'sly'
assert person.nicknames('guy').value == 'guy'
person.nicknames = ('sly', 'big', 'guy')
assert isinstance(person.nicknames, list)
assert [v.value for v in person.nicknames] == ['sly', 'big', 'guy']
person.nicknames = ['sly', 'big', 'guy', 'guy']
assert isinstance(person.nicknames, list)
assert len(person.nicknames('guy')) == 2
assert [v.value for v in person.nicknames] == ['sly', 'big', 'guy', 'guy']
person.nicknames.append(1)
assert person.nicknames('1').value == '1'
def test_list_card_vertex_property_validation(person):
person.nicknames = [1, 1.5, 2]
assert [v.value for v in person.nicknames] == ['1', '1.5', '2']
def test_set_change_set_card_vertex_property(place):
assert not place.important_numbers
place.important_numbers = 1
assert isinstance(place.important_numbers, set)
assert isinstance(place.important_numbers,
manager.SetVertexPropertyManager)
number_one, = place.important_numbers
assert isinstance(number_one, element.VertexProperty)
assert number_one.value == 1
assert place.important_numbers(1) == number_one
place.important_numbers = [1, 2]
assert isinstance(place.important_numbers, set)
assert {v.value for v in place.important_numbers} == set([1, 2])
place.important_numbers.add(3)
assert {v.va | lue for v in place.important_numbers} == set([1, 2, 3])
place.important_numbers = (1, 2, 3, 4)
assert isinstance(place.important_numbers, set)
assert {v.value for v in place.important_numbers} == set([1, 2, 3, 4])
place.important_numbers = set([1, 2, 3])
assert isinstance(place.important_numbers, set)
assert {v.value for v in place.important_numbers} == set([1, 2, 3])
with | pytest.raises(exception.ValidationError):
place.important_numbers.add('dude')
def test_set_card_union(place):
place.important_numbers = set([1, 2, 3])
place.important_numbers = place.important_numbers.union({3, 4, 5})
def test_set_card_64bit_integer(place):
place.important_numbers = set([long(1), long(2), long(3)])
assert all(isinstance(i.value, long) for i in place.important_numbers)
def test_set_card_validation_vertex_property(place):
with pytest.raises(exception.ValidationError):
place.important_numbers = set(['hello', 2, 3])
def test_cant_set_vertex_prop_on_edge():
with pytest.raises(exception.MappingError):
class MyEdge(element.Edge):
vert_prop = element.VertexProperty(properties.String)
def test_meta_property_set_update(place):
assert not place.historical_name
place.historical_name = ['hispania', 'al-andalus']
place.historical_name('hispania').notes = 'roman rule'
assert place.historical_name('hispania').notes == 'roman rule'
place.historical_name('hispania').year = 300
assert place.historical_name('hispania').year == 300
place.historical_name('al-andalus').notes = 'muslim rule'
assert place.historical_name('al-andalus').notes == 'muslim rule'
place.historical_name('al-andalus').year = 700
assert place.historical_name('al-andalus').year == 700
def test_meta_property_validation(place):
assert not place.historical_name
place.historical_name = ['spain']
with pytest.raises(exception.ValidationError):
place.historical_name('spain').year = 'hello'
class TestString:
def test_validation(self, string):
assert string.validate(1) == '1'
def test_to_db(self, string):
assert string.to_db('hello') == 'hello'
def test_to_ogm(self, string):
assert string.to_ogm('hello') == 'hello'
def test_initval_to_db(self, string_class):
string = string_class('hello')
assert string.to_db() == 'hello'
class TestInteger:
def test_validation(self, integer):
assert integer.validate('1') == 1
with pytest.raises(Exception):
integer.validate('hello')
def test_to_db(self, integer):
assert integer.to_db(1) == 1
def test_to_ogm(self, integer):
assert integer.to_db(1) == 1
def test_initval_to_db(self, integer_class):
integer = integer_class(1)
assert integer.to_db() == 1
class TestFloat:
def test_validation(self, flt):
assert flt.validate(1.2) == 1.2
with pytest.raises(Exception):
flt.validate('hello')
def test_to_db(self, flt):
assert flt.to_db(1.2) == 1.2
def test_to_ogm(self, flt):
assert flt.to_db(1.2) == 1.2
def test_initval_to_db(self, flt_class):
flt = flt_class(1.2)
assert flt.to_db() == 1.2
class TestBoolean:
def test_validation_true(self, boolean):
assert boolean.validate(True)
def test_validation_false(self, boolean):
assert not boolean.validate(False)
def test_to_db_true(self, boolean):
assert boolean.to_db(True)
def test_to_db_false(self, boolean):
assert not boolean.to_db(False)
def test_to_ogm_true(self, boolean):
assert boolean.to_ogm(True)
def test_to_ogm_false(self, boolean):
assert not boolean.to_ogm(False)
def test_initval_to_db_true(self, boolean_class):
boolean = boolean_class(True)
assert boolean.to_db()
def test_initval_to_db_true(self, boolean_class):
boolean = boolean_class(False)
assert not boolean.to_db()
|
eriknw/eqpy | setup.py | Python | bsd-3-clause | 1,281 | 0 | #!/usr/bin/env python
from os.path import exists
from setuptools import setup
import eqpy
setup(
name='eqpy',
version=eqpy.__version__,
descript | ion='Solve systems of equations and assumptions, linear and '
'non-linear, numerically and symbolically.',
url='h | ttp://github.com/eriknw/eqpy/',
author='https://raw.github.com/eriknw/eqpy/master/AUTHORS.md',
maintainer='Erik Welch',
maintainer_email='[email protected]',
license='BSD',
keywords='math CAS equations symbolic sympy',
packages=[
'eqpy',
],
classifiers=[
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Physics',
],
long_description=open('README.md').read() if exists("README.md") else "",
zip_safe=False,
)
|
ZelphirKaltstahl/rst-internal-links-to-raw-latex | RSTInternalLinks/HeadingsParser.py | Python | gpl-3.0 | 13,092 | 0.004987 | import re
class HeadingsParser():
"""
The HeadingParser parses the document for headings.
NOT YET: converts headings to raw latex headings in the correct way, so that they can be referrenced to later
see https://www.sharelatex.com/learn/Sections_and_chapters for info about the levels"""
def __init__(self):
super().__init__()
self.title = None
self.subtitle = None
self.heading = []
# regexes
self.title_start_marker_regex = re.compile(r'[=]{3,}')
self.title_end_marker_regex = re.compile(r'[=]{3,}')
self.title_content_regex = re.compile(
r'''
^ # beginning of line
[ ] # one whitespace
[A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace
(?P<title>[A-Za-z0-9äöüÄÖÜ ]+) # alphanumerical string, whitespace ok
[A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace
[ ] # one whitespace
$ # end of line
''', re.VERBOSE|re.UNICODE
)
s | elf.subtitle_start_marker_regex = re.compile(r'[-]{3,}')
self.subtitle_end_marker_regex = re.compile(r'[-]{3,}')
self.subtitle_content_regex = re.compile(
r'''
^ # beginning of line
[ ] | # one whitespace
[A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace
(?P<subtitle>[A-Za-z0-9äöüÄÖÜ ]+) # alphanumerical string, whitespace ok
[A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace
[ ] # one whitespace
$ # end of line
''', re.VERBOSE|re.UNICODE
)
# Headings cannot begin with whitespace
self.h_content_regex = re.compile(
r'''
^ # beginning of line
[A-Za-z0-9äöüÄÖÜß(] # alphanum
[A-Za-z0-9äöüÄÖÜß,() -]* # alphanum or space
[A-Za-z0-9äöüÄÖÜß)] # alphanum
$ # end of line
''', re.VERBOSE|re.UNICODE
)
# chapter
self.h1_underlining_regex = re.compile(r'[=]{3,}')
# section
self.h2_underlining_regex = re.compile(r'[-]{3,}')
# subsection
self.h3_underlining_regex = re.compile(r'[~]{3,}')
# subsubsection
self.h4_underlining_regex = re.compile(r'[\^]{3,}')
# paragraph
self.h5_underlining_regex = re.compile(r'[*]{3,}')
# subparagraph
self.h6_underlining_regex = re.compile(r'[.]{3,}')
def parse(self, rst_file_content):
self.title = self.find_title(rst_file_content)
self.subtitle_content_regex = self.find_subtitle(rst_file_content)
return self.find_heading_labels(rst_file_content)
def find_title(self, rst_file_content):
print('looking for title ...')
title = None
for lineno, line in enumerate(rst_file_content):
previous_line = ""
if lineno > 0:
previous_line = rst_file_content[lineno - 1]
next_line = ""
if lineno < len(rst_file_content) - 1:
next_line = rst_file_content[lineno + 1]
# title
if (
self.title_start_marker_regex.match(previous_line) and
self.title_end_marker_regex.match(next_line) and
(
len(self.title_start_marker_regex.match(previous_line).group()) ==
len(self.title_end_marker_regex.match(next_line).group())
) and
self.title_content_regex.match(line) and
not title
):
title = self.title_content_regex.match(line).group('title')
print('title is:|', title, '|', sep='')
break
if not title: print('Could not find title in document.')
return title
def find_subtitle(self, rst_file_content):
print('looking for subtitle ...')
subtitle = None
for lineno, line in enumerate(rst_file_content):
previous_line = ""
if lineno > 0:
previous_line = rst_file_content[lineno - 1]
next_line = ""
if lineno < len(rst_file_content) - 1:
next_line = rst_file_content[lineno + 1]
if (
self.subtitle_start_marker_regex.match(previous_line) and
self.subtitle_end_marker_regex.match(next_line) and
(
len(self.subtitle_start_marker_regex.match(previous_line).group()) ==
len(self.subtitle_end_marker_regex.match(next_line).group())
) and
self.subtitle_content_regex.match(line) and
not subtitle
):
subtitle = self.subtitle_content_regex.match(line).group('subtitle')
print('subtitle is:|', subtitle, '|', sep='')
break
if not subtitle: print('Could not find subtitle in document.')
return subtitle
def find_heading_labels(self, rst_file_content):
print('looking for headings ...')
headings_dict = {}
# heading_labels = []
for lineno, line in enumerate(rst_file_content):
# print('current line:', lineno)
# print('current line:', line)
# if line.startswith("Schlussfolgerungen"):
# print('current line:', line)
previous_line = ""
if lineno > 0:
previous_line = rst_file_content[lineno - 1]
next_line = ""
if lineno < len(rst_file_content) - 1:
next_line = rst_file_content[lineno + 1]
# headings level 1
# print('looking for h1 ...')
if (
(previous_line.isspace() or previous_line == '') and
self.h_content_regex.match(line) and
self.h1_underlining_regex.match(next_line) and
len(self.h_content_regex.match(line).group()) == len(self.h1_underlining_regex.match(next_line).group())
):
print('found a h1:', line)
print('replacing chapter heading')
headings_dict[line] = self.heading_to_label(line, 'chapter')
# heading_labels.append(self.heading_to_label(line, 'chapter'))
rst_file_content[lineno] = ':raw-latex:`\chapter{' + line + '}`'
rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'chapter') + '}`'
# headings level 2
# print('looking for h2 ...')
if (
(previous_line.isspace() or previous_line == '') and
self.h_content_regex.match(line) and
self.h2_underlining_regex.match(next_line) and
len(self.h_content_regex.match(line).group()) == len(self.h2_underlining_regex.match(next_line).group())
):
print('found a h2:', line)
headings_dict[line] = self.heading_to_label(line, 'section')
# heading_labels.append(self.heading_to_label(line, 'section'))
rst_file_content[lineno] = ':raw-latex:`\section{' + line + '}`'
rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'section') + '}`'
# headings level 3
# print('looking for h3 ...')
if (
(previous_line.isspace() or previous_line == '') and
self.h_content_regex.match(line) and
self.h3_underlining_regex.match(next_line) and
len(self.h_content_regex.match(line).group()) == len(self.h3_underlining_regex.match(next_line).group())
):
print('found a h3:', line)
|
yuyuyu101/VirtualBox-NetBSD | src/libs/xpcom18a4/python/tools/tracer_demo.py | Python | gpl-2.0 | 4,360 | 0.003899 | # ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is Python XPCOM language bindings.
#
# The Initial Developer of the Original Code is
# ActiveState Tool Corp.
# Portions created by the Initial Developer are Copyright (C) 2000, 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Hammond <[email protected]> (original author)
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
# This is a demo is how to use the xpcom.server "tracer" facility.
#
# This demo installs a tracer that uses the Python profiler. It then
# creates the Python test component, and references some methods
# and properties. It then dumps the profile statistics.
# This same technique could also be used for debugging, for example.
import profile
p = profile.Profile()
getters = {}
setters = {}
# A wrapper around a function - looks like a function,
# but actually profiles the delegate.
class TracerDelegate:
def __init__(self, callme):
self.callme = callme
def __call__(self, *args):
return p.runcall(self.callme, *args)
# A wrapper around each of our XPCOM objects. All PyXPCOM calls
# in are made on this object, which creates a TracerDelagate around
# every function. As the function is called, it collects profile info.
class Tracer:
def __init__(self, ob):
self.__dict__['_ob'] = ob
def __repr__(self):
return "<Tracer around %r>" % (self._ob,)
def __str__(self):
return "<Tracer around %r>" % (self._ob,)
def __getattr__(self, attr):
ret = getattr(self._ob, attr) # Attribute error just goes up
if callable(ret):
return TracerDelegate(ret)
else:
if not attr.startswith("_com_") and not attr.startswith("_reg_"):
getters[attr] = getters.setdefault(attr,0) + 1
return ret
def __setattr__(self, attr, val):
if self.__dict__.has_key(attr):
self.__dict__[attr] = val
return
setters[attr] = setters.setdefa | ult(attr,0) + 1
setattr(self._ob, attr, val)
# Installed as a global XPCOM function that if exists, will be called
# to wrap each XPCOM object created.
def MakeTracer(ob):
| # In some cases we may be asked to wrap ourself, so handle that.
if isinstance(ob, Tracer):
return ob
return Tracer(ob)
def test():
import xpcom.server, xpcom.components
xpcom.server.tracer = MakeTracer
contractid = "Python.TestComponent"
for i in range(100):
c = xpcom.components.classes[contractid].createInstance().queryInterface(xpcom.components.interfaces.nsIPythonTestInterface)
c.boolean_value = 0
a = c.boolean_value
c.do_boolean(0,1)
print "Finshed"
p.print_stats()
print "%-30s%s" % ("Attribute Gets", "Number")
print "-" * 36
for name, num in getters.items():
print "%-30s%d" % (name, num)
print "%-30s%s" % ("Attribute Sets", "Number")
print "-" * 36
for name, num in setters.items():
print "%-30s%d" % (name, num)
test()
|
googleapis/python-resource-manager | google/cloud/resourcemanager_v3/services/projects/async_client.py | Python | apache-2.0 | 70,063 | 0.001313 | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.resourcemanager_v3.services.projects import pagers
from google.cloud.resourcemanager_v3.types import projects
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import ProjectsTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import ProjectsGrpcAsyncIOTransport
from .client import ProjectsClient
class ProjectsAsyncClient:
"""Manages Google Cloud Projects."""
_client: ProjectsClient
DEFAULT_ENDPOINT = ProjectsClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = ProjectsClient.DEFAULT_MTLS_ENDPOINT
project_path = staticmethod(ProjectsClient.project_path)
parse_project_path = staticmethod(ProjectsClient.parse_project_path)
common_billing_account_path = staticmethod(
ProjectsClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
ProjectsClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(ProjectsClient.common_folder_path)
parse_common_folder_path = staticmethod(ProjectsClient.parse_common_folder_path)
common_organization_path = staticmethod(ProjectsClient.common_organization_path)
parse_common_organization_path = staticmethod(
ProjectsClient.parse_common_organization_path
)
common_project_path = staticmethod(ProjectsClient.common_project_path)
parse_common_project_path = staticmethod(ProjectsClient.parse_common_project_path)
common_location_path = staticmethod(ProjectsClient.common_location_path)
parse_common_location_path = staticmethod(ProjectsClient.parse_common_location_path)
@classmethod
def from_service_account_info(cls, i | nfo: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ProjectsAsyncClient: The constructed client. |
"""
return ProjectsClient.from_service_account_info.__func__(ProjectsAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ProjectsAsyncClient: The constructed client.
"""
return ProjectsClient.from_service_account_file.__func__(ProjectsAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return ProjectsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> ProjectsTransport:
"""Returns the transport used by the client instance.
Returns:
ProjectsTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(ProjectsClient).get_transport_class, type(ProjectsClient)
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, ProjectsTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the projects client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.ProjectsTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endp |
adrianschroeter/kiwi | test/unit/privileges_test.py | Python | gpl-3.0 | 568 | 0 | from mock import patch
from .test_helper | import raises
from kiwi.exceptions import KiwiPrivilegesError
from kiwi.privileges import Privileges
class TestPrivileges(object):
@raises(KiwiPrivilegesError)
@patch('os.geteuid')
def test_check_for_root_permiossion_false(self, mock_euid):
mock_euid.return_value = 1
Privileges.check_for_root_permissions()
@patch('os.geteuid')
def test_check_for_root_permiossion_true(self, | mock_euid):
mock_euid.return_value = 0
assert Privileges.check_for_root_permissions() is True
|
IQSS/gentb-site | apps/predict/migrations/0003_auto_20160525_1521.py | Python | agpl-3.0 | 853 | 0.002345 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('predict', '0002_auto_20160524_0947'), |
]
operations = [
migrations.RemoveField(
model_name='predictdataset',
name='dropbox_url',
),
migrations.AlterField(
model_name='predictdataset',
name='file_type',
field=models.CharField(max_length=25, choices=[(b'vcf', b'Variant C | all Format (VCF)'), (b'fastq', b'FastQ Nucleotide Sequence'), (b'manual', b'Mutations Manual Entry')]),
),
migrations.AlterField(
model_name='predictdataset',
name='title',
field=models.CharField(max_length=255, verbose_name=b'Dataset Title'),
),
]
|
nielsbuwen/ilastik | ilastik/applets/autocontextClassification/opBatchIoSelective.py | Python | gpl-3.0 | 11,464 | 0.012648 | ###############################################################################
# ilastik: interactive learning and segmentation toolkit
#
# Copyright (C) 2011-2014, the ilastik developers
# <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# In addition, as a special exception, the copyright holders of
# ilastik give you permission to combine ilastik with applets,
# workflows and plugins which are not covered under the GNU
# General Public License.
#
# See the LICENSE file for details. License information is also available
# on the ilastik web site at:
# http://ilastik.org/license.html
###############################################################################
import os
import h5py
import traceback
import threading
import logging
from lazyflow.graph import Operator, InputSlot, OutputSlot, OrderedSignal
from lazyflow.operators import OpBlockedArrayCache
from lazyflow.operators.ioOperators import OpH5WriterBigDataset
from lazyflow.utility.pathHelpers import PathComponents
from lazyflow.rtype import SubRegion
logger = logging.getLogger(__name__)
class ExportFormat():
H5 = 0
Npy = 1
Tiff = 2 # 3d only, up to 3 channels
def __init__(self, name, extension):
self.name = name
self.extension = extension
SupportedFormats = { ExportFormat.H5 : ExportFormat("Hdf5", '.h5') }
#SupportedFormats = { ExportFormat.H5 : ExportFormat("Hdf5", '.h5'),
# ExportFormat.Npy : ExportFormat("Numpy", '.npy'),
# ExportFormat.Tiff : ExportFormat("Tiff", '.tiff') }
class OpBatchIoSelective(Operator):
"""
The top-level operator for the Batch IO applet.
"""
name = "OpBatchIo"
category = "Top-level"
ExportDirectory = InputSlot(stype='filestring') # A separate directory to export to. If '', then exports to the input data's directory
Format = InputSlot(stype='int') # The export format
Suffix = InputSlot(stype='string') # Appended to the file name (before the extension)
InternalPath = InputSlot(stype='string', optional=True) # Hdf5 internal path
DatasetPath = InputSlot(stype='string') # The path to the original the dataset we're saving
ImageToExport = InputSlot() # The image that needs to be saved
SelectedSlices = InputSlot(stype='list')
OutputFileNameBase = InputSlot(stype='string', optional=True) # Override for the file name base. (Input filename is used by default.)
Dirty = OutputSlot(stype='bool') # Whether or not the result currently matches what's on disk
OutputDataPath = OutputSlot(stype='string')
ExportResult = OutputSlot(stype='string') # When requested, attempts to store the data to disk. Returns the path that the data was saved to.
ProgressSignal = OutputSlot(stype='object')
def __init__(self, *args, **kwargs):
super(OpBatchIoSelective, self).__init__(*args, **kwargs)
self.Dirty.meta.shape = (1,)
self.Dirty.meta.dtype = bool
self.OutputDataPath.meta.shape = (1,)
self.OutputDataPath.meta.dtype = object
self.ExportResult.meta.shape = (1,)
self.ExportResult.meta.dtype = object
# Provide default values
self.ExportDirectory.setValue( '' )
self.Format.setValue( ExportFormat.H5 )
self.Suffix.setValue( '_results' )
self.Dirty.setValue(True)
self.progressSignal = OrderedSignal()
self.ProgressSignal.setValue( self.progressSignal )
self._createDirLock = threading.Lock()
#make a cache of the input image not to request too much
self.ImageCache = OpBlockedArrayCache(parent=self)
self.ImageCache.fixAtCurrent.setValue(False)
self.ImageCache.Input.connect(self.ImageToExport)
def setupOutputs(self):
# Create the output data path
formatId = self.Format.value
ext = SupportedFormats[formatId].extension
inputPathComponents = PathComponents(self.DatasetPath.value)
# If no export directory was given, use the original input data's directory
if self.ExportDirectory.value == '':
outputPath = inputPathComponents.externalDirectory
else:
outputPath = self.ExportDirectory.value
if self.OutputFileNameBase.ready():
filenameBase = PathComponents(self.OutputFileNameBase.value).filenameBase
else:
filenameBase = inputPathComponents.filenameBase
outputPath = os.path.join(outputPath, filenameBase + self.Suffix.value + ext).replace('\\', '/')
# Set up the path for H5 export
if formatId == ExportFormat.H5:
if self.InternalPath.ready() and self.InternalPath.value != '':
# User-specified internal path
self._internalPath = self.InternalPath.value
if self._internalPath[0] != '/':
self._internalPath = "/" + self._internalPath
elif inputPathComponents.internalPath is not None:
# Mirror the input data internal path
self._internalPath = inputPathComponents.internalPath
else:
self._internalPath = '/volume/data'
self.OutputDataPath.setValue( outputPath + self._internalPath )
elif formatId == ExportFormat.Npy:
self.OutputDataPath.setValue( outputPath )
elif formatId == ExportFormat.Tiff:
self.OutputDataPath.setValue( outputPath )
self.setupCaches()
def setupCaches(self):
# Set the blockshapes for each input image separately, depending on which axistags it has.
axisOrder = [ tag.key for tag in self.ImageToExport.meta.axistags ]
## Pixel Cache blocks
blockDimsX = { 't' : (1,1),
'z' : (128,256),
'y' : (128,256),
'x' : (1,1),
'c' : (100, 100) }
blockDimsY = { 't' : (1,1),
'z' : (128,256),
'y' : (1,1),
'x' : (128,256),
'c' : (100,100) }
blockDimsZ = { 't' : (1,1),
'z' : (1,1),
'y' : (128,256),
'x' : (128,256),
'c' : (100,100) }
innerBlockShapeX = tuple( blockDimsX[k][0] for k in axisOrder )
outerBlockShapeX = tuple( blockDimsX[k][1] for k in axisOrder )
innerBlockShapeY = tuple( blockDimsY[k][0] for k in axisOrder )
outerBlockShapeY = tuple( blockDimsY[k][1] for k in axisOrder )
innerBlockShapeZ = tuple( blockDimsZ[k][0] for k in axisOrder )
outerBlockShapeZ = tuple( blockDimsZ[k][1] for k in axisOrder )
self.ImageCache.inputs["innerBlockShape"].setValue( innerBlockShapeZ )
self.ImageCache.inputs["outerBlockShape"].setValue( outerBlockShapeZ )
def propagateDirty(self, slot, subindex, roi):
# Out input data chan | ged, so we have work to do when we get executed.
self.Dirty.setValue(True)
def execute(self, slot, subindex, roi, result):
if slot == self.Dirty:
assert False # Shouldn't get to this line because the dirty output is given a value directly
if slot == sel | f.OutputDataPath:
assert False # This slot is already set via setupOutputs
if slot == self.ExportResult:
# We can stop now if the output isn't dirty
if not self.Dirty.value:
result[0] = True
return
exportFormat = self.Format.value
# Export H5
if exportFormat == ExportFormat.H5:
pathComp = PathComponents(self.OutputDataPath.value)
# |
ties/py-sonic | libsonic/errors.py | Python | gpl-3.0 | 1,413 | 0.012739 | """
This file is part of py-sonic.
py-sonic is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
py-sonic is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with py-sonic. If not, see <http://www.gnu.org/licenses/>
"""
class SonicError(Exception):
pass
class ParameterError(SonicError):
pass
class VersionError(SonicError):
pass
class CredentialError(SonicError):
pass
class AuthError(SonicError):
pass
class LicenseError(SonicError):
pass
class DataNotFoundError(SonicError):
pass
class ArgumentEr | ror(SonicError):
pass
# This maps the error code numbers from the Subsonic server to their
# appropriate Exceptions
ERR_CODE_MAP = {
0: SonicError ,
10: ParameterError ,
20: VersionError ,
30: VersionError ,
40: CredentialError ,
50: AuthError ,
60: LicenseError ,
70: DataNotFoundError ,
}
def getExcByCode(code):
code = int(code)
if code in ERR_CODE_MAP:
return ERR_COD | E_MAP[code]
return SonicError
|
dorvaljulien/StarFiddle | anim_plot.py | Python | mit | 1,736 | 0.009793 | import time
import nu | mpy as np
import cPickle as pk
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from matplotlib.widgets import Slider
import mpl_toolkits.mplot3d.axes3d as p3
from matplotlib.widgets import Button
class PlotAnimation:
"""
Takes a list of PySnap and launch an interactive animation.
| """
def __init__(self, Anim, DataControl, **kwargs):
self.data = DataControl
self.previous_n = 0
self.Anim= Anim
def timer_update(self):
if self.Anim.n != self.previous_n:
self.data.Update(self.Anim.n)
self.previous_n = self.Anim.n
def launch(self):
self.data.Initialize()
self.timer=self.data.fig.canvas.new_timer(interval=self.Anim.delay)
args=[]
# We tell the timer to call the update function every 100ms
self.timer.add_callback(self.timer_update,*args)
self.timer.start()
if __name__ is "__main__":
from quick import *
R = ReadRun("/home/dorval/work/amuse/clump_finding/p10k_fragmentation/")
R.Animation()
A = R.Anim
class Data():
def __init__(self,):
self.X = [ np.random.random(20) for i in range(len(R))]
self.Y = [ np.random.random(20) for i in range(len(R))]
def Initialize(self):
X, Y = self.X[0], self.Y[0]
self.fig = plt.figure()
self.ax = self.fig.add_subplot(111)
self.line, = plt.plot(X, Y, "b")
self.canvas=self.ax.figure.canvas
def Update(self, nx, ny):
X, Y = self.X[n], self.Y[n]
self.line.set_data(X, Y)
self.canvas.draw()
D= Data()
P = PlotAnimation(A,D)
P.launch()
plt.show()
|
hofmannedv/training-python | text-analysis/character-statistics.py | Python | gpl-2.0 | 2,207 | 0.017218 | # -----------------------------------------------------------
# reads the text from the given file, and outputs its
# character statistics
#o
# (C) 2015 Frank Hofmann, Berlin, Germany
# Released under GNU Public License (GPL)
# email [email protected]
# -----------------------------------------------------------
# call the program this way:
# python character-statistics.py inputfile.txt > statistics.csv
# import required python standard modules
import sys,csv
import codecs
import os
# define character count function
def charStat (text):
# set default value
stat = {}
# go through the characters one by one
for character in text:
#print (character)
# retrieve current value for a character,
# and 0 if still not in list
# update the list
stat[character] = stat.get(character,0) + 1
# return statistics array
return stat
# count number of program parameters
numPara = len(sys.argv)
if numPara < 2:
print ("invalid number of parameters: 1 filename required.")
print ("call for output on-screen: python %s " % sys.argv[0])
print ("call for file output: python %s > statistics.csv" % sys.argv[0])
print ("Exiting.")
sys.exit(2)
# read name of the datafile
textfileName = sys.argv[1]
# print ("reading text from", textfileName, "...")
bytes = min(32, os.path.getsize(textfileName))
raw = open(textfileName, 'rb').read(bytes)
if raw.startswith(codecs.BOM_UTF8):
encoding = 'utf-8-sig'
else:
result = chardet.detect(raw)
encoding = result['encoding']
# open file for reading
fileHandle = open(textf | ileName, "r", | encoding=encoding)
# read content
data = fileHandle.read()
# close file
fileHandle.close()
# calculate the character statisitics
statistics = charStat(data)
# retrieve the single items
items = statistics.items()
# print ("sorting by character ...")
# sort the items
sortedItems = sorted(items)
lines = []
# output sorted list as CSV data
for singleItem in sortedItems:
lines.append(str(singleItem[0]) + "," + singleItem[1])
#print ("%s,%i" % (singleItem[0], singleItem[1]))
# open file for writing
fileHandle = open("s.txt", "w", encoding=encoding)
# read content
data = fileHandle.writelines(lines)
# close file
fileHandle.close()
|
mmechelke/bayesian_xfel | bxfel/core/structure_factor.py | Python | mit | 18,608 | 0.010963 |
import numpy as np
import scipy
import re
import os
import hashlib
import csb
from csb.bio.io.wwpdb import StructureParser
def chunks(l, n):
""" Yield successive n-sized chunks from l.
"""
for i in xrange(0, len(l), n):
yield l[i:i+n]
class ScatteringFactor(object):
"""
Cacluates the density in reciprocal space as
F(s) = sum_m f_m(s) exp(-B_m s**2 / 4) exp(i*2pi*s*r)
where f_m(s) is approximated by four Gaussian distributions
and exp(-B_m s**2 / 4) are the thermal fluctuations
g_m(s) = f_m(s) * exp(-B_m s**2 / 4) are precomputed
"""
def __init__(self, structure=None):
if structure is None:
self._atoms = list()
self._bfactor = list()
self._seq = list()
self._elements = list()
else:
self._structure = structure
# For now only non hydrogen atoms
# TODO use hydrogens as well
self._atoms = []
for chain in structure:
for residue in structure[chain]:
for atom in residue:
a = residue[atom]
if not a.name.startswith("H"):
self._atoms.append(residue[atom])
self._seq = []
self._bfactor = []
self._elements = []
for atom in self._atoms:
self._seq.append(atom.element.name)
self._elements.append(atom.element.name)
if atom._bfactor is None:
self._bfactor.append(1.)
else:
self._bfactor.append(atom._bfactor)
self._seq = np.array(self._seq)
self._elements = set(self._elements)
self._bfactor = np.clip(self._bfactor, 1., 100.)
self._atom_type_params = {}
self._read_sf(fn=os.path.expanduser("~/projects/xfel/py/xfel/core/atomsf.lib"))
@classmethod
def from_isd(cls, universe):
obj = cls()
atoms = universe.atoms
for atom in atoms:
element = str(atom.properties['element'].name)
obj._elements.append(element)
obj._atoms.append(atom)
obj._seq.append(element)
try:
obj._bfactor.append(max(1.,atom.properties['bfactor']))
except KeyError:
obj._bfactor.append(1.)
obj._seq = np.array(obj._seq)
obj._bfactor = np.array(obj._bfactor)
obj._elements = set(obj._elements)
obj._bfactor = np.clip(obj._bfactor, 1., 100.)
return obj
def _read_sf(self, fn):
"""
Reads the coefficients for the analystical approximation
to scattering factors from ccp4 database
"""
float_pattern = '[-+]?[0-9]*\.?[0-9]+(?:[eE][-+]?[0-9]+)?'
atom_pattern = '[A-Za-z]'
atom_pattern = '[A-Za-z0-9-+]+'
line_pattern = ("({0})\s+({1})"
"\s+({1})\s+({1})"
"\s+({1})\s+({1})"
"\s+({1})\s+({1})"
"\s+({1})\s+({1})").format(atom_pattern,float_pattern)
regex = re.compile(line_pattern)
with open(fn) as file_handle:
for line in file_handle:
if line.startswith("#"):
continue
m = regex.match(line)
atom_name = m.groups()[0]
a1, a2, a3, a4 = m.groups()[1], m.groups()[3], m.groups()[5], m.groups()[7]
b1, b2, b3, b4 = m.groups()[2], m.groups()[4], m.groups()[6], m.groups()[8]
c = m.groups()[9]
a = np.array([a1,a2,a3,a4],np.double)
b = np.array([b1,b2,b3,b4],np.double)
self._atom_type_params[atom_name] = (a,b,float(c))
def _calculate_gm(self, hkl):
"""
calculates the the product of scattering factor and
debye-waller factors
"""
f = np.zeros((len(self._atoms), hkl.shape[0]))
seq = self._seq
bfactor = self._bfactor
s_tols = 0.25 * (hkl**2).sum(-1)
for atom_type in self._elements:
a,b,c = self._atom_type_params[atom_type]
indices = np.where(seq==atom_type)[0]
fx = c + np.dot(np.exp(np.outer(-s_tols,b)),a)
f[indices,:] = fx[:]
f *= np.exp(np.outer(-bfactor,s_tols))
return f
def _calculate_gm_grad(self, hkl):
"""
calculate the gradien of the scattering factor and
debye-waller factor
"""
seq = np.array([a.element.name for a in self._atoms])
f = np.zeros((len(self._atoms), hkl.shape[0]))
dfg = np.zeros((len(self._atoms), hkl.shape[0], 3))
bfactors = np.array([a.bfactor for a in self._atoms])
bfactors = np.clip(bfactors, 1., 100.)
s_tols = 0.25 * (hkl**2).sum(-1)
for atom_type in self._elements:
a,b,c = self._atom_type_params[atom_type]
indices = np.where(seq==atom_type)[0]
bfactor = bfactors[indices]
g = np.exp(np.outer(-s_tols,b))
sf = np.dot(g, a) + c
gsf = np.sum(g * a[np.newaxis,:] * b[np.newaxis,:] * -0.5, -1)
dwf = np.exp(-np.outer(bfactor, s_tols))
gdwf = dwf * (bfactor * - 0.5)[:,np.newaxis]
grad = sf * gdwf + gsf * dwf
f[indices,:] = dwf * sf
dfg[indices,:,:] = grad[:,:,np.newaxis] * hkl
return dfg, f
def _calculate_scattering_factors(self, hkl):
"""
creates an approximation of the density in reciprocal space by
four gaussians
returns the scattering vectors
"""
seq = self._seq
bfactor = self._bfactor
f = np.zeros((len(self._atoms), hkl.shape[0]))
s_tols = 0.25 * (hkl**2).sum(-1)
for atom_typ | e in self._elements:
a,b,c = self._atom_type_params[atom_type]
indices = np.where(seq==a | tom_type)[0]
fx = c + np.dot(np.exp(np.outer(-s_tols,b)),a)
f[indices,:] = fx[:]
return f
def _calculate_debyewaller_factors(self, hkl):
"""
"""
b = np.array(self._bfactor)
s_tols = 0.25 * (hkl**2).sum(-1)
t = np.exp(np.outer(-b,s_tols))
return t
def grad_s(self, X, hkl):
"""
Gradient with respect to the reciprocal space coordinates
@param X: atomic positions
@param hkl: reciprocal space positions
"""
seq = np.array([atom.element.name for atom in self._atoms])
bfactor = np.array([atom.bfactor for atom in self._atoms])
bfactor = np.clip(bfactor, 1., 100.)
s_tols = 0.25 * (hkl**2).sum(-1)
dw_factors = np.exp(np.outer(-bfactor, s_tols))
def grad_hkl(self, X, hkl):
seq = self._seq
bfactor = self._bfactor
bfactor = np.clip(bfactor, 1., 100.)
dg = np.zeros((len(self._atoms), hkl.shape[0], hkl.shape[1]))
g = np.zeros((len(self._atoms), hkl.shape[0]))
s_tols = 0.25 * (hkl**2).sum(-1)
dw_factors = np.exp(np.outer(-bfactor, s_tols))
ddw_factors = bfactor[:,np.newaxis] * dw_factors
for atom_type in self._elements:
a,b,c = self._atom_type_params[atom_type]
indices = np.where(seq==atom_type)[0]
inner_exp = np.exp(np.outer(-s_tols,b))
sf = np.dot(inner_exp, a) + c
dsf = np.dot(inner_exp, a*b)
gx = dsf * dw_factors[indices] + sf * ddw_factors[indices]
g[indices,:] = sf[:] * dw_factors[indices]
a = np.einsum('ab,bc->abc',gx, -0.5*hkl)
dg[indices,:,:] = a
phase = np.dot((2 * np.pi * X),hkl.T)
fx= np.sum(g * np.exp(1j * phase),0)
g2 = np.einsum('ba,bc->bac',g , 2 * np.pi * 1j *X)
dfx = np.einsum("abc,ab->bc",dg + g2,np.exp(1j * phase))
return dfx, fx
def calculate_structure_factors(self, X, hkl):
"""
TODO do this calculation in chunks to save space
|
cloudControl/pycclib | pycclib/cclib.py | Python | apache-2.0 | 33,766 | 0.000829 | # -*- coding: utf-8 -*-
"""
pycclib
library for accessing the cloudControl API using Python
Copyright 2010 cloudControl UG (haftungsbeschraenkt)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
### basic usage example
# from pycclib.cclib import *
#
# api = API()
# api.create_token(email='[email protected]', password='secretpassword')
#
# apps = api.read_apps()
"""
import base64
from urlparse import urlparse
import calendar
import urllib
# python versions below 2.6 do not have json included we need simplejson then
try:
import json
except ImportError:
import simplejson as json
import time
from urllib import urlencode
import socket
from decimal import Decimal
import certifi
import httplib2
from pycclib.version import __version__
# We avoid the potential risk of somebody relying on the deprecated apiurl.py
# by raising an exception to make sure nobody talks to the wrong API due to
# our backwards incompatible change.
try:
from pycclib import apiurl
except ImportError:
pass
else:
raise Exception('Use of apiurl.py is deprecated. Set pycclib.API_URL instead.')
__all__ = ['API', 'UnauthorizedError', 'ConnectionException',
'TokenRequiredError', 'BadRequestError', 'ForbiddenError',
'ConflictDuplicateError', 'GoneError', 'InternalServerError',
'NotImplementedError', 'ThrottledError']
API_URL = 'https://api.cloudcontrolled.com'
DISABLE_SSL_CHECK = False
CA_CERTS = None
CACHE = None
# Set debug to 1 to enable debugging
DEBUG = 0
VERSION = __version__
class API():
"""
The API class contains all methods to access the cloudControl RESTful
API.
It wraps the HTTP requests to resources in convenient methods and also
takes care of authenticating each request with a token, if needed.
The create_token, check_token, get_token and set_token methods can be
used to work with the token from outside the API class. This might be
useful when it is not intended to ask users for their email and
password for new instances of the API class.
To instantiate API with a predefined token use something like:
# token = json.loads('{"token": "A2wY7qgUNM5eTRM3Lz6D4RZHuGmYPP"}')
# api = API(token=token)
"""
_token = None
request = None
cache = None
def __init__(self, token=None, url=None, token_source_url=None, register_addon_url=None, encode_email=False):
self.set_token(token)
api_url = url or API_URL
self.request = _Request(url=api_url)
self.token_source_url = token_source_url or api_url + '/token/'
self.ssh_token_source_url = api_url + '/token/'
if token:
self.request.set_token_authorization_header(token)
self.register_addon_url = register_addon_url or api_url
self.encode_email = encode_email
def check_versions(self):
version_request = _Request(url=self.request.url)
content = version_request.get('/.meta/version/')
return json.loads(content)
def requires_token(self):
"""
requires_token checks that methods that require
a token can't be called without a token.
If check_token doesn't return True a TokenRequiredError exception
is raised telling the caller to use the create_token method to get
a valid token.
"""
if not self.check_token():
raise TokenRequiredError
def create_token(self, email, password):
"""
Sends token creation request to API using basic auth - for backwards compatibility
"""
return self.create_token_basic_auth(email, password)
def create_token_basic_auth(self, email, password):
"""
Sends token creation request to API using basic auth
"""
token_request = _Request(url=self.token_source_url)
token_request.set_basic_authorization_header(email, password, self.encode_email)
return self.token_request(token_request)
def create_token_ssh_auth(self, email, ssh_token, signature, fingerprint):
"""
Sends token creation request to API using ssh auth
"""
token_request = _Request(url=self.ssh_token_source_url)
token_request.set_sshtoken_authorization_header(email, ssh_token, signature, fingerprint)
return self.token_request(token_request)
def token_request(self, token_request):
content = token_request.request('', 'POST')
token = json.loads(content)
self.set_token(token)
self.request.set_token_authorization_header(token)
return True
def create_ssh_token(self):
try:
token_request = _Request(url=self.ssh_token_source_url)
token_request.request('', 'POST')
raise APIException('Expected UnauthorizedError has not been raised')
except UnauthorizedError as e:
result = httplib2._parse_www_authenticate(e.response)
try:
ssh_token = result['ccssh']['sshtoken']
except KeyError, TypeError:
raise APIException('SSH token was not created')
if not ssh_token:
raise APIException('Empty SSH token.')
return ssh_token
def check_token(self):
"""
This method checks if there's a token.
"""
if self.request.token:
return True
return False
def set_token(self, token):
"""
We use set_token to set the token.
"""
self._token = token
def get_token(self):
"""
W | e use get_token to get the token.
"""
return self._token
def create_app(self, app_name, type, repository_type, buildpack_url=None):
"""
Create a new application and return it.
"""
self.re | quires_token()
resource = '/app/'
data = {'name': app_name,
'type': type,
'repository_type': repository_type}
if buildpack_url:
data['buildpack_url'] = buildpack_url
content = self.request.post(resource, data)
return json.loads(content)
def read_apps(self):
"""
Returns a list of applications.
"""
self.requires_token()
resource = '/app/'
content = self.request.get(resource)
return json.loads(content)
def read_app(self, app_name):
"""
Returns all application details.
"""
self.requires_token()
resource = '/app/%s/' % app_name
content = self.request.get(resource)
return json.loads(content)
def delete_app(self, app_name):
"""
Delete a application.
"""
self.requires_token()
resource = '/app/%s/' % app_name
self.request.delete(resource)
return True
def create_deployment(self, app_name, deployment_name='', stack=None):
"""
Create a new deployment.
deployment_name is optional
"""
self.requires_token()
resource = '/app/%s/deployment/' % app_name
data = {}
if deployment_name:
data['name'] = deployment_name
if stack:
data['stack'] = stack
content = self.request.post(resource, data)
return json.loads(content)
def read_deployment(self, app_name, deployment_name):
"""
Returns all deployment details.
"""
self.requires_token()
resource = '/app/%s/deployment/%s/' % (app_name, deployment_name)
content = self.request.get(resource)
|
alevnyaa/restfulstorch | rstorch/migrations/0002_auto_20170302_1722.py | Python | gpl-3.0 | 602 | 0 | # -* | - coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-02 14:22
from __future__ i | mport unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rstorch', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='category',
name='is_active',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='store',
name='is_active',
field=models.BooleanField(default=False),
),
]
|
ibc/MediaSoup | worker/deps/gyp/test/configurations/target_platform/gyptest-target_platform.py | Python | isc | 1,114 | 0.002693 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Tests the msvs specific msvs_target_platform option. |
"""
import TestGyp
import TestCommon
def RunX64(exe, stdout):
try:
test.run_built_executable(exe, stdout=stdout)
except WindowsError as e:
# Assume the exe is 64-bit if it can't load on 32-bit systems.
# Both versions of the error are required because different versions
# of python seem to return different | errors for invalid exe type.
if e.errno != 193 and '[Error 193]' not in str(e):
raise
test = TestGyp.TestGyp(formats=['msvs'])
test.run_gyp('configurations.gyp')
test.set_configuration('Debug|x64')
test.build('configurations.gyp', rebuild=True)
RunX64('front_left', stdout=('left\n'))
RunX64('front_right', stdout=('right\n'))
test.set_configuration('Debug|Win32')
test.build('configurations.gyp', rebuild=True)
RunX64('front_left', stdout=('left\n'))
test.run_built_executable('front_right', stdout=('right\n'))
test.pass_test()
|
googleads/google-ads-python | google/ads/googleads/v9/services/types/customer_service.py | Python | apache-2.0 | 7,422 | 0.000404 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v9.enums.types import access_role as gage_access_role
from google.ads.googleads.v9.enums.types import (
response_content_type as gage_response_content_type,
)
from google.ads.googleads.v9.resources.types import customer as gagr_customer
from google.protobuf import field_mask_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v9.services",
marshal="google.ads.googleads.v9",
manifest={
"GetCustomerRequest",
"MutateCustomerRequest",
"CreateCustomerClientRequest",
"CustomerOperation",
"CreateCustomerClientResponse",
"MutateCustomerResponse",
"MutateCustomerResult",
"ListAccessibleCustomersRequest",
"ListAccessibleCustomersResponse",
},
)
class GetCustomerRequest(proto.Message):
r"""Request message for
[CustomerService.GetCustomer][google.ads.googleads.v9.services.CustomerService.GetCustomer].
Attributes:
resource_name (str):
Required. The resource name of the customer
to fetch.
"""
resource_name = proto.Field(proto.STRING, number=1,)
class MutateCustomerRequest(proto.Message):
r"""Request message for
[CustomerService.MutateCustomer][google.ads.googleads.v9.services.CustomerService.MutateCustomer].
Attributes:
customer_id (str):
Required. The ID of the customer being
modified.
operation (google.ads.googleads.v9.services.types.CustomerOperation):
Required. The operation to perform on the
customer
validate_only (bool):
If true, the request is validated but not
executed. Only errors are returned, not results.
respons | e_content_type (google.ads.googleads.v9.enums.types.ResponseContentTypeEnum.ResponseContentType):
The response cont | ent type setting. Determines
whether the mutable resource or just the
resource name should be returned post mutation.
"""
customer_id = proto.Field(proto.STRING, number=1,)
operation = proto.Field(
proto.MESSAGE, number=4, message="CustomerOperation",
)
validate_only = proto.Field(proto.BOOL, number=5,)
response_content_type = proto.Field(
proto.ENUM,
number=6,
enum=gage_response_content_type.ResponseContentTypeEnum.ResponseContentType,
)
class CreateCustomerClientRequest(proto.Message):
r"""Request message for
[CustomerService.CreateCustomerClient][google.ads.googleads.v9.services.CustomerService.CreateCustomerClient].
Attributes:
customer_id (str):
Required. The ID of the Manager under whom
client customer is being created.
customer_client (google.ads.googleads.v9.resources.types.Customer):
Required. The new client customer to create.
The resource name on this customer will be
ignored.
email_address (str):
Email address of the user who should be
invited on the created client customer.
Accessible only to customers on the allow-list.
This field is a member of `oneof`_ ``_email_address``.
access_role (google.ads.googleads.v9.enums.types.AccessRoleEnum.AccessRole):
The proposed role of user on the created
client customer. Accessible only to customers on
the allow-list.
validate_only (bool):
If true, the request is validated but not
executed. Only errors are returned, not results.
"""
customer_id = proto.Field(proto.STRING, number=1,)
customer_client = proto.Field(
proto.MESSAGE, number=2, message=gagr_customer.Customer,
)
email_address = proto.Field(proto.STRING, number=5, optional=True,)
access_role = proto.Field(
proto.ENUM, number=4, enum=gage_access_role.AccessRoleEnum.AccessRole,
)
validate_only = proto.Field(proto.BOOL, number=6,)
class CustomerOperation(proto.Message):
r"""A single update on a customer.
Attributes:
update (google.ads.googleads.v9.resources.types.Customer):
Mutate operation. Only updates are supported
for customer.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
FieldMask that determines which resource
fields are modified in an update.
"""
update = proto.Field(
proto.MESSAGE, number=1, message=gagr_customer.Customer,
)
update_mask = proto.Field(
proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask,
)
class CreateCustomerClientResponse(proto.Message):
r"""Response message for CreateCustomerClient mutate.
Attributes:
resource_name (str):
The resource name of the newly created
customer client.
invitation_link (str):
Link for inviting user to access the created
customer. Accessible to allowlisted customers
only.
"""
resource_name = proto.Field(proto.STRING, number=2,)
invitation_link = proto.Field(proto.STRING, number=3,)
class MutateCustomerResponse(proto.Message):
r"""Response message for customer mutate.
Attributes:
result (google.ads.googleads.v9.services.types.MutateCustomerResult):
Result for the mutate.
"""
result = proto.Field(
proto.MESSAGE, number=2, message="MutateCustomerResult",
)
class MutateCustomerResult(proto.Message):
r"""The result for the customer mutate.
Attributes:
resource_name (str):
Returned for successful operations.
customer (google.ads.googleads.v9.resources.types.Customer):
The mutated customer with only mutable fields after mutate.
The fields will only be returned when response_content_type
is set to "MUTABLE_RESOURCE".
"""
resource_name = proto.Field(proto.STRING, number=1,)
customer = proto.Field(
proto.MESSAGE, number=2, message=gagr_customer.Customer,
)
class ListAccessibleCustomersRequest(proto.Message):
r"""Request message for
[CustomerService.ListAccessibleCustomers][google.ads.googleads.v9.services.CustomerService.ListAccessibleCustomers].
"""
class ListAccessibleCustomersResponse(proto.Message):
r"""Response message for
[CustomerService.ListAccessibleCustomers][google.ads.googleads.v9.services.CustomerService.ListAccessibleCustomers].
Attributes:
resource_names (Sequence[str]):
Resource name of customers directly
accessible by the user authenticating the call.
"""
resource_names = proto.RepeatedField(proto.STRING, number=1,)
__all__ = tuple(sorted(__protobuf__.manifest))
|
pavels/pootle | pootle/apps/pootle_app/project_tree.py | Python | gpl-3.0 | 16,304 | 0.000184 | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import errno
import logging
import os
import re
from django.conf import settings
from pootle.core.log import STORE_RESURRECTED, store_log
from pootle.core.utils.timezone import datetime_min
from pootle_app.models.directory import Directory
from pootle_language.models import Language
from pootle_store.models import Store
from pootle_store.util import absolute_real_path, relative_real_path
#: Case insensitive match for language codes
LANGCODE_RE = re.compile('^[a-z]{2,3}([_-][a-z]{2,3})?(@[a-z0-9]+)?$',
re.IGNORECASE)
#: Case insensitive match for language codes as postfix
LANGCODE_POSTFIX_RE = re.compile(
'^.*?[-_.]([a-z]{2,3}([_-][a-z]{2,3})?(@[a-z0-9]+)?)$', re.IGNORECASE)
def direct_language_match_filename(language_code, path_name):
name, ext = os.path.splitext(os.path.basename(path_name))
if name == language_code or name.lower() == language_code.lower():
return True
# Check file doesn't match another language.
if Language.objects.filter(code__iexact=name).count():
return False
detect = LANGCODE_POSTFIX_RE.split(name)
return (len(detect) > 1 and
(detect[1] == language_code or
detect[1].lower() == language_code.lower()))
def match_template_filename(project, filename):
"""Test if :param:`filename` might point at a template file for a given
:param:`project`.
"""
name, ext = os.path.splitext(os.path.basename(filename))
# FIXME: is the test for matching extension redundant?
if ext == os.path.extsep + project.get_template_filetype():
if ext != os.path.extsep + project.localfiletype:
# Template extension is distinct, surely file is a template.
return True
elif not find_lang_postfix(filename):
# File name can't possibly match any language, assume it is a
# template.
return True
return False
def get_matching_language_dirs(project_dir, language):
return [lang_dir for lang_dir in os.listdir(project_dir)
if language.code == lang_dir]
def get_non_existant_language_dir(project_dir, language, file_style,
make_dirs):
if file_style == "gnu":
return project_dir
elif make_dirs:
language_dir = os.path.join(project_dir, language.code)
os.mkdir(language_dir)
return language_dir
else:
raise IndexError("Directory not found for language %s, project %s" %
(language.code, project_dir))
def get_or_make_language_dir(project_dir, language, file_style, make_dirs):
matching_language_dirs = get_matching_language_dirs(project_dir, language)
if len(matching_language_dirs) == 0:
# If no matching directories can be found, check if it is a GNU-style
# project.
return get_non_existant_language_dir(project_dir, language, file_style,
make_dirs)
else:
return os.path.join(project_dir, matching_language_dirs[0])
def get_language_dir(project_dir, language, file_style, make_dirs):
language_dir = os.path.join(project_dir, language.code)
if not os.path.exists(language_dir):
return get_or_make_language_dir(project_dir, language, file_style,
make_dirs)
else:
return language_dir
def get_translation_project_dir(language, project_dir, file_style,
make_dirs=False):
"""Returns the base directory containing translations files for the
project.
:param make_dirs: if ``True``, project and language directories will be
created as necessary.
"""
if file_style == 'gnu':
return project_dir
else:
return get_language_dir(project_dir, language, file_style, make_dirs)
def is_hidden_file(path):
return path[0] == '.'
def split_files_and_dirs(ignored_files, ext, real_dir, file_filter):
files = []
dirs = []
for child_path in [child_path for child_path in os.listdir(real_dir)
if child_path not in ignored_files and
not is_hidden_file(child_path)]:
full_child_path = os.path.join(real_dir, child_path)
if (os.path.isfile(full_child_path) and
full_child_path.endswith(ext) and file_filter(full_child_path)):
files.append(child_path)
elif os.path.isdir(full_child_path):
dirs.append(child_path)
return files, dirs
def add_items(fs_items_set, db_items, create_or_resurrect_db_item, parent):
"""Add/make obsolete the database items to correspond to the filesystem.
:param fs_items_set: items (dirs, files) currently in the filesystem
:param db_items: dict (name, item) of items (dirs, stores) currently in the
database
:create_or_resurrect_db_item: callable that will create a new db item
or resurrect an obsolete db item with a given name and parent.
:parent: parent db directory for the items
:return: list of all items, list of newly added items
:rtype: tuple
"""
items = []
new_items = []
db_items_set = set(db_items)
items_to_delete = db_items_set - fs_items_set
items_to_create = fs_items_set - db_items_set
for name in items_to_delete:
db_items[name].makeobsolete()
if len(items_to_delete) > 0:
parent.update_all_cache()
for vfolder_treeitem in parent.vfolder_treeitems:
vfolder_treeitem.update_all_cache()
for name in db_items_set - items_to_delete:
items.append(db_items[name])
for name in items_to_create:
item = create_or_resurrect_db_item(name)
items.append(item)
new_items.append(item)
try:
item.save()
except Exception:
logging.exception('Error while adding %s', item)
return items, new_items
def create_or_resurrect_store(file, parent, name, translation_project):
"""Create or resurrect a store db item with given name and parent."""
try:
store = Store.objects.get(parent=parent, name=name)
store.obsolete = False
store.file_mtime = datetime_min
if store.last_sync_revision is None:
store.last_sync_revision = store.get_max_unit_revision()
store_log(user='system', action=STORE_RESURRECTED,
path=store.pootle_path, store=store.id)
except Store.DoesNotExist:
store = Store(file=file, parent=parent,
name=name, translation_project=translation_project)
store.mark_all_dirty()
return store
def create_or_resurrect_dir(name, parent):
"""Create or resurrect a directory db item with given name and parent."""
try:
dir = Directory.objects.get(parent=parent, name=name)
dir.obsolete = False
except Directory.DoesNotExist:
dir = Directory(name=name, parent=parent)
dir.mark_all_dirty()
return dir
# TODO: rename function or even rewrite it
def add_files(translation_project, ignored_files, ext, relative_dir, db_dir,
file_filter=lambda _x: True):
podir_path = to_podir_ | path(relative_dir)
files, dirs = split_files_and_dirs(ignored_files, ext, podir_path,
file_filter)
file_set = set(files)
dir_set = set(dirs)
existing_stores = dict((store.name, store) for store in
db_dir.child_stores.live().exclude(file='')
| .iterator())
existing_dirs = dict((dir.name, dir) for dir in
db_dir.child_dirs.live().iterator())
files, new_files = add_items(
file_set,
existing_stores,
lambda name: create_or_resurrect_store(
file=os.path.join(relative_dir, name),
parent=db_dir,
|
mdkennedy3/Scarab_DCT_Control | catkin_ws/build/Scarabs/scarab/scarab_quad/catkin_generated/pkg.installspace.context.pc.py | Python | gpl-2.0 | 422 | 0.00237 | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
P | ROJECT_CATKIN_DEPENDS = "roscpp;rospy;std_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lscarab_quad".split(';') if "-lscarab_quad" != "" else []
PROJECT_N | AME = "scarab_quad"
PROJECT_SPACE_DIR = "/home/monroe/catkin_ws/install"
PROJECT_VERSION = "0.0.0"
|
dominikkowalski/django-powerdns-dnssec | dnsaas/api/urls.py | Python | bsd-2-clause | 246 | 0 | from django.conf.urls import include, url
from powerdns.utils import patterns
urlpatterns = patterns(
'',
url(r'', include('dnsaas.api.v1.urls', nam | espace='default')) | ,
url(r'^v2/', include('dnsaas.api.v2.urls', namespace='v2')),
)
|
jiasir/playback | playback/cli/nova_compute.py | Python | mit | 4,590 | 0.003922 | import sys
import logging
from playback.api import NovaCompute
from cliff.command import Command
def make_target(args):
try:
target = NovaCompute(user=args.user, hosts=args.hosts.split(','), key_filename=args.key_filename,
password=args.password)
except AttributeError:
sys.stderr.write('No hosts found. Please using --hosts param.')
sys.exit(1)
return target
def install(args):
target = make_target(args)
target.install(args.my_ip, args.rabbit_hosts, args.rabbit_user, args.rabbit_pass,
args.auth_uri, args.auth_url, args.nova_pass, args.novncproxy_base_url,
args.glance_api_servers, args.neutron_endpoint, args.neutron_pass, args.rbd_secret_uuid,
args.memcached_servers)
class Install(Command):
"""install nova compute"""
log = logging.getLogger(__name__)
def get_parser(self, prog_name):
parser = super(Install, self).get_parser(prog_name)
parser.add_argument('--user',
help='the username to connect to the remote host',
action='store', default='ubuntu', dest='user')
parser.add_argument('--hosts',
help='the remote host to connect to ',
action='store', default=None, dest='hosts')
parser.add_argument('-i', '--key-filename',
help='referencing file paths to SSH key files to try when connecting',
action='store', dest='key_filename', default=None)
parser.add_argument('--password',
help='the password used by the SSH layer when connecting to remote hosts',
action='store', dest='password', default=None)
parser.add_argument('--my-ip',
help='the host management ip',
action='store', default=None, dest='my_ip')
parser.add_argument('- | -rabbit-hosts',
help='rabbit hosts e.g. CONTROLLER1,CONTROLLER2',
acti | on='store', default=None, dest='rabbit_hosts')
parser.add_argument('--rabbit-user',
help='the user for rabbit, default openstack',
action='store', default='openstack', dest='rabbit_user')
parser.add_argument('--rabbit-pass',
help='the password for rabbit openstack user', action='store',
default=None, dest='rabbit_pass')
parser.add_argument('--auth-uri',
help='keystone internal endpoint e.g. http://CONTROLLER_VIP:5000',
action='store', default=None, dest='auth_uri')
parser.add_argument('--auth-url',
help='keystone admin endpoint e.g. http://CONTROLLER_VIP:35357',
action='store', default=None, dest='auth_url')
parser.add_argument('--nova-pass',
help='passowrd for nova user',
action='store', default=None, dest='nova_pass')
parser.add_argument('--novncproxy-base-url',
help='nova vnc proxy base url e.g. http://CONTROLLER_VIP:6080/vnc_auto.html',
action='store', default=None, dest='novncproxy_base_url')
parser.add_argument('--glance-api-servers',
help='glance host e.g. http://CONTROLLER_VIP:9292',
action='store', default=None, dest='glance_api_servers')
parser.add_argument('--neutron-endpoint',
help='neutron endpoint e.g. http://CONTROLLER_VIP:9696',
action='store', default=None, dest='neutron_endpoint')
parser.add_argument('--neutron-pass',
help='the password for neutron user',
action='store', default=None, dest='neutron_pass')
parser.add_argument('--rbd-secret-uuid',
help='ceph rbd secret for nova libvirt',
action='store', default=None, dest='rbd_secret_uuid')
parser.add_argument('--memcached-servers',
help='memcached servers e.g. CONTROLLER1:11211,CONTROLLER2:11211',
action='store', default=None, dest='memcached_servers')
return parser
def take_action(self, parsed_args):
install(parsed_args)
|
fretsonfire/fof-python | src/GameEngine.py | Python | mit | 14,664 | 0.018344 | #####################################################################
# -*- coding: iso-8859-1 -*- #
# #
# Frets on Fire #
# Copyright (C) 2006 Sami Kyöstilä #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
from OpenGL.GL import *
import pygame
import os
import sys
from Engine import Engine, Task
from Video import Video
from Audio import Audio
from View import View
from Input import Input, KeyListener, SystemEventListener
from Resource import Resource
from Data import Data
from Server import Server
from Session import ClientSession
from Svg import SvgContext, SvgDrawing, LOW_QUALITY, NORMAL_QUALITY, HIGH_QUALITY
from Debug import DebugLayer
from Language import _
import Network
import Log
import Config
import Dialogs
import Theme
import Version
import Mod
# define configuration keys
Config.define("engine", "tickrate", float, 1.0)
Config.define("engine", "highpriority", bool, True)
Config.define("game", "uploadscores", bool, False, text = _("Upload Highscores"), options = {False: _("No"), True: _("Yes")})
Config.define("game", "uploadurl", str, "http://fretsonfire.sourceforge.net/play")
Config.define("game", "leftymode", bool, False, text = _("Lefty mode"), options = {False: _("No"), True: _("Yes")})
Config.define("game", "tapping", bool, True, text = _("Tappable notes"), options = {False: _("No"), True: _("Yes")})
Config.define("game", "compactlist", bool, False, text = _("Compact song list"), options = {False: _("No"), True: _("Yes")})
Config.define("game", "autopreview", bool, True, text = _("Song auto preview"), options = {False: _("No"), True: _("Yes")})
Config.define("game", "artistsort", bool, False, text = _("Sort by artist"), options = {False: _("No"), True: _("Yes")})
Config.define("video", "fullscreen", bool, False, text = _("Fullscreen Mode"), options = {False: _("No"), True: _("Yes")})
Config.define("video", "multisamples", int, 4, text = _("Antialiasing Quality"), options = {0: _("None"), 2: _("2x"), 4: _("4x"), 6: _("6x"), 8: _("8x")})
Config.define("video", "resolution", str, "640x480")
Config.define("video", "fps", int, 80, text = _("Frames per Second"), options = dict([(n, n) for n in range(1, 120)]))
#Config.define("opengl", "svgquality", int, NORMAL_QUALITY, text = _("SVG Quality"), options = {LOW_QUALITY: _("Low"), NORMAL_QUALITY: _("Normal"), HIGH_QUALITY: _("High")})
Config.define("audio", "frequency", int, 44100, text = _("Sample Frequency"), options = [8000, 11025, 22050, 32000, | 44100, 48000])
Config.define("aud | io", "bits", int, 16, text = _("Sample Bits"), options = [16, 8])
Config.define("audio", "stereo", bool, True)
Config.define("audio", "buffersize", int, 2048, text = _("Buffer Size"), options = [256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536])
Config.define("audio", "delay", int, 100, text = _("A/V delay"), options = dict([(n, n) for n in range(0, 301)]))
Config.define("audio", "screwupvol", float, 0.25, text = _("Screw Up Sounds"), options = {0.0: _("Off"), .25: _("Quiet"), .5: _("Loud"), 1.0: _("Painful")})
Config.define("audio", "guitarvol", float, 1.0, text = _("Guitar Volume"), options = dict([(n / 100.0, "%02d/10" % (n / 9)) for n in range(0, 110, 10)]))
Config.define("audio", "songvol", float, 1.0, text = _("Song Volume"), options = dict([(n / 100.0, "%02d/10" % (n / 9)) for n in range(0, 110, 10)]))
Config.define("audio", "rhythmvol", float, 1.0, text = _("Rhythm Volume"), options = dict([(n / 100.0, "%02d/10" % (n / 9)) for n in range(0, 110, 10)]))
Config.define("video", "fontscale", float, 1.0, text = _("Text scale"), options = dict([(n / 100.0, "%3d%%" % n) for n in range(50, 260, 10)]))
class FullScreenSwitcher(KeyListener):
"""
A keyboard listener that looks for special built-in key combinations,
such as the fullscreen toggle (Alt-Enter).
"""
def __init__(self, engine):
self.engine = engine
self.altStatus = False
def keyPressed(self, key, unicode):
if key == pygame.K_LALT:
self.altStatus = True
elif key == pygame.K_RETURN and self.altStatus:
if not self.engine.toggleFullscreen():
Log.error("Unable to toggle fullscreen mode.")
return True
elif key == pygame.K_d and self.altStatus:
self.engine.setDebugModeEnabled(not self.engine.isDebugModeEnabled())
return True
elif key == pygame.K_g and self.altStatus and self.engine.isDebugModeEnabled():
self.engine.debugLayer.gcDump()
return True
def keyReleased(self, key):
if key == pygame.K_LALT:
self.altStatus = False
class SystemEventHandler(SystemEventListener):
"""
A system event listener that takes care of restarting the game when needed
and reacting to screen resize events.
"""
def __init__(self, engine):
self.engine = engine
def screenResized(self, size):
self.engine.resizeScreen(size[0], size[1])
def restartRequested(self):
self.engine.restart()
def quit(self):
self.engine.quit()
class GameEngine(Engine):
"""The main game engine."""
def __init__(self, config = None):
"""
Constructor.
@param config: L{Config} instance for settings
"""
if not config:
config = Config.load()
self.config = config
fps = self.config.get("video", "fps")
tickrate = self.config.get("engine", "tickrate")
Engine.__init__(self, fps = fps, tickrate = tickrate)
pygame.init()
self.title = _("Frets on Fire")
self.restartRequested = False
self.handlingException = False
self.video = Video(self.title)
self.audio = Audio()
Log.debug("Initializing audio.")
frequency = self.config.get("audio", "frequency")
bits = self.config.get("audio", "bits")
stereo = self.config.get("audio", "stereo")
bufferSize = self.config.get("audio", "buffersize")
self.audio.pre_open(frequency = frequency, bits = bits, stereo = stereo, bufferSize = bufferSize)
pygame.init()
self.audio.open(frequency = frequency, bits = bits, stereo = stereo, bufferSize = bufferSize)
Log.debug("Initializing video.")
width, height = [int(s) for s in self.config.get("video", "resolution").split("x")]
fullscreen = self.config.get("video", "fullscreen")
multisamples = self.config.get("video", "multisamples")
self.video.setMode((width, height), fullscreen = fullscreen, multisamples = multisamples)
# Enable the high priority timer if configured
if self.config.get("engine", "highpriority"):
Log.debug("Enabling high priority timer.")
self.timer.highPriority = True
viewport = glGetIntegerv(GL_VIEWPORT)
h = viewport[3] - viewport[1]
w = viewport[2] - viewport[0]
geo |
saintdragon2/python-3-lecture-2015 | homework_checker/civil_hw_personal_list/hw_civil_list_15030011.py | Python | mit | 441 | 0.026005 | def square_of_list(some_list, n | um):
if len(some_list) < num:
result = 0
if len(some_list) >= num:
result = some_list[num]**num
return result
def gap(some_list):
if any([type(x) == type('a') for x in some_list]):
print('문자열이 있습니다')
filtered_list = [ x for x in some_list if type(x) == type(1) or type(x) == type(1.0)]
return max(filtered_list) | - min(filtered_list)
|
batxes/4Cin | SHH_WT_models/SHH_WT_models_final_output_0.1_-0.1_11000/SHH_WT_models11702.py | Python | gpl-3.0 | 17,574 | 0.025094 | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "particle_0 geometry" not in marker_sets:
s=new_m | arker_set('particle_0 geometry')
marker_sets["particle_0 geometry"]=s
s= marker_sets["particle_0 geome | try"]
mark=s.place_marker((11373, 1728.13, 2526.72), (0.7, 0.7, 0.7), 890.203)
if "particle_1 geometry" not in marker_sets:
s=new_marker_set('particle_1 geometry')
marker_sets["particle_1 geometry"]=s
s= marker_sets["particle_1 geometry"]
mark=s.place_marker((10259.4, 2429.11, 3723.81), (0.7, 0.7, 0.7), 792.956)
if "particle_2 geometry" not in marker_sets:
s=new_marker_set('particle_2 geometry')
marker_sets["particle_2 geometry"]=s
s= marker_sets["particle_2 geometry"]
mark=s.place_marker((8532.69, 2758.2, 2960.41), (0.7, 0.7, 0.7), 856.786)
if "particle_3 geometry" not in marker_sets:
s=new_marker_set('particle_3 geometry')
marker_sets["particle_3 geometry"]=s
s= marker_sets["particle_3 geometry"]
mark=s.place_marker((9825.94, 1135.07, 1915.57), (0.7, 0.7, 0.7), 963.679)
if "particle_4 geometry" not in marker_sets:
s=new_marker_set('particle_4 geometry')
marker_sets["particle_4 geometry"]=s
s= marker_sets["particle_4 geometry"]
mark=s.place_marker((8864.47, 1070.22, 618.553), (0.7, 0.7, 0.7), 761.442)
if "particle_5 geometry" not in marker_sets:
s=new_marker_set('particle_5 geometry')
marker_sets["particle_5 geometry"]=s
s= marker_sets["particle_5 geometry"]
mark=s.place_marker((6980.33, 2671, 607.184), (0.7, 0.7, 0.7), 961.183)
if "particle_6 geometry" not in marker_sets:
s=new_marker_set('particle_6 geometry')
marker_sets["particle_6 geometry"]=s
s= marker_sets["particle_6 geometry"]
mark=s.place_marker((6195.69, 4157.38, 303.329), (0.7, 0.7, 0.7), 753.151)
if "particle_7 geometry" not in marker_sets:
s=new_marker_set('particle_7 geometry')
marker_sets["particle_7 geometry"]=s
s= marker_sets["particle_7 geometry"]
mark=s.place_marker((6609.67, 3545.04, -210.293), (1, 0.7, 0), 1098.07)
if "particle_8 geometry" not in marker_sets:
s=new_marker_set('particle_8 geometry')
marker_sets["particle_8 geometry"]=s
s= marker_sets["particle_8 geometry"]
mark=s.place_marker((5721.24, 5834.87, 871.259), (0.7, 0.7, 0.7), 1010.42)
if "particle_9 geometry" not in marker_sets:
s=new_marker_set('particle_9 geometry')
marker_sets["particle_9 geometry"]=s
s= marker_sets["particle_9 geometry"]
mark=s.place_marker((6201.03, 7254.05, 117.531), (1, 0.7, 0), 821.043)
if "particle_10 geometry" not in marker_sets:
s=new_marker_set('particle_10 geometry')
marker_sets["particle_10 geometry"]=s
s= marker_sets["particle_10 geometry"]
mark=s.place_marker((5645.43, 8268.96, 1609.79), (0.7, 0.7, 0.7), 873.876)
if "particle_11 geometry" not in marker_sets:
s=new_marker_set('particle_11 geometry')
marker_sets["particle_11 geometry"]=s
s= marker_sets["particle_11 geometry"]
mark=s.place_marker((6394.9, 7902.04, 2465.86), (0.7, 0.7, 0.7), 625.532)
if "particle_12 geometry" not in marker_sets:
s=new_marker_set('particle_12 geometry')
marker_sets["particle_12 geometry"]=s
s= marker_sets["particle_12 geometry"]
mark=s.place_marker((7011.82, 8106.65, 3867.87), (0.7, 0.7, 0.7), 880.474)
if "particle_13 geometry" not in marker_sets:
s=new_marker_set('particle_13 geometry')
marker_sets["particle_13 geometry"]=s
s= marker_sets["particle_13 geometry"]
mark=s.place_marker((8277.34, 7554.86, 3180), (0.7, 0.7, 0.7), 659.161)
if "particle_14 geometry" not in marker_sets:
s=new_marker_set('particle_14 geometry')
marker_sets["particle_14 geometry"]=s
s= marker_sets["particle_14 geometry"]
mark=s.place_marker((10001.2, 8459.06, 4337.94), (0.7, 0.7, 0.7), 831.745)
if "particle_15 geometry" not in marker_sets:
s=new_marker_set('particle_15 geometry')
marker_sets["particle_15 geometry"]=s
s= marker_sets["particle_15 geometry"]
mark=s.place_marker((11030.8, 7840.88, 7191.3), (0.7, 0.7, 0.7), 803.065)
if "particle_16 geometry" not in marker_sets:
s=new_marker_set('particle_16 geometry')
marker_sets["particle_16 geometry"]=s
s= marker_sets["particle_16 geometry"]
mark=s.place_marker((9790.37, 6419.19, 7692.39), (0.7, 0.7, 0.7), 610.262)
if "particle_17 geometry" not in marker_sets:
s=new_marker_set('particle_17 geometry')
marker_sets["particle_17 geometry"]=s
s= marker_sets["particle_17 geometry"]
mark=s.place_marker((8981.52, 7521.64, 7751.38), (0.7, 0.7, 0.7), 741.265)
if "particle_18 geometry" not in marker_sets:
s=new_marker_set('particle_18 geometry')
marker_sets["particle_18 geometry"]=s
s= marker_sets["particle_18 geometry"]
mark=s.place_marker((8220.56, 7937.02, 6324.34), (0.7, 0.7, 0.7), 748.625)
if "particle_19 geometry" not in marker_sets:
s=new_marker_set('particle_19 geometry')
marker_sets["particle_19 geometry"]=s
s= marker_sets["particle_19 geometry"]
mark=s.place_marker((7969.62, 9244.24, 5681.34), (0.7, 0.7, 0.7), 677.181)
if "particle_20 geometry" not in marker_sets:
s=new_marker_set('particle_20 geometry')
marker_sets["particle_20 geometry"]=s
s= marker_sets["particle_20 geometry"]
mark=s.place_marker((6422.64, 7890.12, 4392.43), (0.7, 0.7, 0.7), 616.015)
if "particle_21 geometry" not in marker_sets:
s=new_marker_set('particle_21 geometry')
marker_sets["particle_21 geometry"]=s
s= marker_sets["particle_21 geometry"]
mark=s.place_marker((7520.07, 8550.61, 5993.54), (0.7, 0.7, 0.7), 653.154)
if "particle_22 geometry" not in marker_sets:
s=new_marker_set('particle_22 geometry')
marker_sets["particle_22 geometry"]=s
s= marker_sets["particle_22 geometry"]
mark=s.place_marker((7047.03, 8185.55, 6463.58), (0.7, 0.7, 0.7), 595.33)
if "particle_23 geometry" not in marker_sets:
s=new_marker_set('particle_23 geometry')
marker_sets["particle_23 geometry"]=s
s= marker_sets["particle_23 geometry"]
mark=s.place_marker((7378.79, 7775.83, 7641.96), (0.7, 0.7, 0.7), 627.901)
if "particle_24 geometry" not in marker_sets:
s=new_marker_set('particle_24 geometry')
marker_sets["particle_24 geometry"]=s
s= marker_sets["particle_24 geometry"]
mark=s.place_marker((8530.81, 7008.83, 7696.77), (0.7, 0.7, 0.7), 663.941)
if "particle_25 geometry" not in marker_sets:
s=new_marker_set('particle_25 geometry')
marker_sets["particle_25 geometry"]=s
s= marker_sets["particle_25 geometry"]
mark=s.place_marker((9913.91, 6961.43, 8384.84), (0.7, 0.7, 0.7), 663.899)
if "particle_26 geometry" not in marker_sets:
s=new_marker_set('particle_26 geometry')
marker_sets["particle_26 geometry"]=s
s= marker_sets["particle_26 geometry"]
mark=s.place_marker((8860.24, 7431.45, 7335.88), (0.7, 0.7, 0.7), 644.694)
if "particle_27 geometry" not in marker_sets:
s=new_marker_set('particle_27 geometry')
marker_sets["particle_27 geometry"]=s
s= marker_sets["particle_27 geometry"]
mark=s.place_marker((8090.35, 6968.45, 5332.62), (0.7, 0.7, 0.7), 896.802)
if "particle_28 geometry" not in marker_sets:
s=new_marker_set('particle_28 geometry')
marker_sets["particle_28 geometry"]=s
s= marker_sets["particle_28 geometry"]
mark=s.place_marker((6851.21, 7706.77, 5771.66), (0.7, 0.7, 0.7), 576.38)
if "particle_29 geometry" not in marker_sets:
s=new_marker_set('particle_29 geometry')
marker_sets["particle_29 geometry"]=s
s= marker_sets["particle_29 geometry"]
mark=s.place_marker((5952.06, 8094.26, 4905.62), (0.7, 0.7, 0.7), 635.092)
if "particle_30 geometry" not in marker_sets:
s=new_marker_set('particle_30 geometry')
marker_sets["particle_30 geometry"]=s
s= marker_sets["particle_30 geometry"]
mark=s.place_marker((5799.93, 7527.38, 5476.16), (0.7, 0.7, 0.7), 651.505)
if "particle_31 geometry" not in marker_sets:
s=new_marker_set('particle_31 geometry')
marker_sets["particle_31 geometry"]=s
s= marker_sets["particle_31 geometry"]
mark=s.place_marker((6126, 7861.56, 3801.22), (0.7, 0.7, 0.7), 718.042)
if "particle_32 geometry" not in marker_sets:
s=new_marker_set('particle_32 geometry')
marker_sets["particle_32 geometry"]=s
|
martinribelotta/micropython | tests/float/float_divmod_relaxed.py | Python | mit | 705 | 0.001418 | # test floating point floor divide and modulus
# it has some tricky corner cases
# pyboard has 32-bit floating point and gives different (but still
# correct) answers for c | ertain combinations of divmod arguments.
def test(x, y):
div, mod = divmod(x, y)
print(div == x // y, mod == x % y, abs(div * y + mod - x) < 1e-6)
test(1.23456, 0.7)
test(-1.23456, 0.7)
test(1.23456, -0.7)
test(-1.23456, -0.7)
a = 1.23456
b = 0.7
test(a, b)
test(a, -b)
test(-a, b)
test(-a, -b)
for i in range(25):
x = (i - 12.5) / 6
for j in range(25):
y = (j - 12.5) / 6
test(x, y)
# test division by zer | o error
try:
divmod(1.0, 0)
except ZeroDivisionError:
print('ZeroDivisionError')
|
Rav3nPL/p2pool-rav | p2pool/networks/joulecoin.py | Python | gpl-3.0 | 568 | 0.012324 | from p2pool.bitcoin import networks
PARENT = networks.nets['joulecoin']
SHARE_PERIOD = 20 # seconds
CHAIN_LENGTH = 12* | 60*60//10 # shares
REAL_CHAIN_LENGTH = 12*60*60//10 # shares
TARGET_LOOKBEHIND = 20 # shares
SPREAD = 10 # blocks
IDENTIFIER = 'ac556af4e900ca61'.decode('hex')
PREFIX = '16ac009e4fa655ac'.decode('hex')
P2P_PO | RT = 7844
MIN_TARGET = 0
MAX_TARGET = 2**256//2**32 - 1
PERSIST = False
WORKER_PORT = 9844
BOOTSTRAP_ADDRS = 'rav3n.dtdns.net pool.hostv.pl p2pool.org solidpool.org'.split(' ')
ANNOUNCE_CHANNEL = '#p2pool-alt'
VERSION_CHECK = lambda v: True
|
mjkmoynihan/ReleaseRadar | setup.py | Python | apache-2.0 | 15 | 0.066667 | # TO | DO: | Setup
|
ehartsuyker/securedrop | securedrop/journalist_app/api.py | Python | agpl-3.0 | 13,022 | 0.000077 | import json
from datetime import datetime, timedelta
from flask import abort, Blueprint, current_app, jsonify, request
from functools import wraps
from sqlalchemy.exc import IntegrityError
from os import path
from uuid import UUID
from werkzeug.exceptions import default_exceptions # type: ignore
from db import db
from journalist_app import utils
from models import (Journalist, Reply, Source, Submission, RevokedToken,
LoginThrottledException, InvalidUsernameException,
BadTokenException, WrongPasswordException)
from store import NotEncrypted
TOKEN_EXPIRATION_MINS = 60 * 8
def get_user_object(request):
"""Helper function to use in token_required views that need a user
object
"""
auth_token = request.headers.get('Authorization').split(" ")[1]
user = Journalist.validate_api_token_and_get_user(auth_token)
return user
def token_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
try:
auth_header = request.headers['Authorization']
except KeyError:
return abort(403, 'API token not found in Authorization header.')
if auth_header:
split = auth_header.split(" ")
if len(split) != 2 or split[0] != 'Token':
abort(403, 'Malformed authorization header.')
auth_token = split[1]
else:
auth_token = ''
if not Journalist.validate_api_token_and_get_user(auth_token):
return abort(403, 'API token is invalid or expired.')
return f(*args, **kwargs)
return decorated_function
def get_ | or_404(model, object_id, column=''):
if column:
result = model.query.filter(column == object_id).one_or_none()
else:
result = model.query.get(object_id)
if result is None:
abort(404)
return result
def make_blueprint(config):
api = Blu | eprint('api', __name__)
@api.route('/')
def get_endpoints():
endpoints = {'sources_url': '/api/v1/sources',
'current_user_url': '/api/v1/user',
'submissions_url': '/api/v1/submissions',
'replies_url': '/api/v1/replies',
'auth_token_url': '/api/v1/token'}
return jsonify(endpoints), 200
# Before every post, we validate the payload before processing the request
@api.before_request
def validate_data():
if request.method == 'POST':
# flag, star, and logout can have empty payloads
if not request.data:
dataless_endpoints = [
'add_star',
'remove_star',
'flag',
'logout',
]
for endpoint in dataless_endpoints:
if request.endpoint == 'api.' + endpoint:
return
return abort(400, 'malformed request')
# other requests must have valid JSON payload
else:
try:
json.loads(request.data.decode('utf-8'))
except (ValueError):
return abort(400, 'malformed request')
@api.route('/token', methods=['POST'])
def get_token():
creds = json.loads(request.data.decode('utf-8'))
username = creds.get('username', None)
passphrase = creds.get('passphrase', None)
one_time_code = creds.get('one_time_code', None)
if username is None:
return abort(400, 'username field is missing')
if passphrase is None:
return abort(400, 'passphrase field is missing')
if one_time_code is None:
return abort(400, 'one_time_code field is missing')
try:
journalist = Journalist.login(username, passphrase, one_time_code)
token_expiry = datetime.utcnow() + timedelta(
seconds=TOKEN_EXPIRATION_MINS * 60)
response = jsonify({
'token': journalist.generate_api_token(expiration=TOKEN_EXPIRATION_MINS * 60),
'expiration': token_expiry.isoformat() + 'Z',
'journalist_uuid': journalist.uuid,
})
# Update access metadata
journalist.last_access = datetime.utcnow()
db.session.add(journalist)
db.session.commit()
return response, 200
except (LoginThrottledException, InvalidUsernameException,
BadTokenException, WrongPasswordException):
return abort(403, 'Token authentication failed.')
@api.route('/sources', methods=['GET'])
@token_required
def get_all_sources():
sources = Source.query.filter_by(pending=False).all()
return jsonify(
{'sources': [source.to_json() for source in sources]}), 200
@api.route('/sources/<source_uuid>', methods=['GET', 'DELETE'])
@token_required
def single_source(source_uuid):
if request.method == 'GET':
source = get_or_404(Source, source_uuid, column=Source.uuid)
return jsonify(source.to_json()), 200
elif request.method == 'DELETE':
source = get_or_404(Source, source_uuid, column=Source.uuid)
utils.delete_collection(source.filesystem_id)
return jsonify({'message': 'Source and submissions deleted'}), 200
@api.route('/sources/<source_uuid>/add_star', methods=['POST'])
@token_required
def add_star(source_uuid):
source = get_or_404(Source, source_uuid, column=Source.uuid)
utils.make_star_true(source.filesystem_id)
db.session.commit()
return jsonify({'message': 'Star added'}), 201
@api.route('/sources/<source_uuid>/remove_star', methods=['DELETE'])
@token_required
def remove_star(source_uuid):
source = get_or_404(Source, source_uuid, column=Source.uuid)
utils.make_star_false(source.filesystem_id)
db.session.commit()
return jsonify({'message': 'Star removed'}), 200
@api.route('/sources/<source_uuid>/flag', methods=['POST'])
@token_required
def flag(source_uuid):
source = get_or_404(Source, source_uuid,
column=Source.uuid)
source.flagged = True
db.session.commit()
return jsonify({'message': 'Source flagged for reply'}), 200
@api.route('/sources/<source_uuid>/submissions', methods=['GET'])
@token_required
def all_source_submissions(source_uuid):
source = get_or_404(Source, source_uuid, column=Source.uuid)
return jsonify(
{'submissions': [submission.to_json() for
submission in source.submissions]}), 200
@api.route('/sources/<source_uuid>/submissions/<submission_uuid>/download', # noqa
methods=['GET'])
@token_required
def download_submission(source_uuid, submission_uuid):
get_or_404(Source, source_uuid, column=Source.uuid)
submission = get_or_404(Submission, submission_uuid,
column=Submission.uuid)
# Mark as downloaded
submission.downloaded = True
db.session.commit()
return utils.serve_file_with_etag(submission)
@api.route('/sources/<source_uuid>/replies/<reply_uuid>/download',
methods=['GET'])
@token_required
def download_reply(source_uuid, reply_uuid):
get_or_404(Source, source_uuid, column=Source.uuid)
reply = get_or_404(Reply, reply_uuid, column=Reply.uuid)
return utils.serve_file_with_etag(reply)
@api.route('/sources/<source_uuid>/submissions/<submission_uuid>',
methods=['GET', 'DELETE'])
@token_required
def single_submission(source_uuid, submission_uuid):
if request.method == 'GET':
source = get_or_404(Source, source_uuid, column=Source.uuid)
submission = get_or_404(Submission, submission_uuid,
column=Submission.uuid)
return jsonify(submission.to_json()), 200
elif request.method == 'DELETE':
submission = get_or_404(Submission, submi |
lukaszo/rpitips-examples | RPi.GPIO/servo.py | Python | apache-2.0 | 288 | 0.010417 | import time
import RPi.GPIO as GPIO
# use B | roadcom pin numbers
GPIO.setmode(GPIO.BCM)
SERVO_PIN = 3
GPIO.setup(SERVO_PIN, GPIO.OUT)
# setup PWM
pwm = GPIO.PWM | (SERVO_PIN, 100)
pwm.start(5)
for i in range(5, 25):
pwm.ChangeDutyCycle(i)
time.sleep(0.5)
pwm.stop()
GPIO.cleanup()
|
skearnes/color-features | oe_utils/shape/tests/test_color.py | Python | bsd-3-clause | 1,693 | 0 | """
Tests for OEShape color utilities.
"""
import numpy as np
import unittest
from openeye.oechem import *
from openeye.oeshape import *
from ..color import ColorForceField
class TestColorForceField(unittest.TestCase):
"""
Tests for ColorForceField.
"""
def setUp(self):
"""
Set up tests.
"""
self.color_ff = ColorForceField()
self.color_ff.Init(OEColorFFType_ImplicitMillsDean)
def test_get_interactions(self):
"""
Test ColorForceField.get_interactions.
"""
interactions = self.color_ff.get_interactions()
assert len(interactions) == 6
for (a_type, b_type, decay, weight, radius) in interactions:
assert a_type == b_type
assert decay == 'gaussian'
assert weight < 0
assert radius > 0
def test_get_string(self):
"""
Test ColorForceField.get_string.
"""
ifs = oeisstream(self.color_ff.get_string())
color_ff = ColorForceField()
color_ff.Init(ifs)
f | or a_interaction, b_interaction in zip(
color_ff.get_interactions(), self.color_ff.get_interactions()):
assert np.array_equal(a_interaction, b_interaction)
def test_isolate_interactions(self):
"""
Test ColorForceField.isolate_interactions.
"""
interactions = set()
for color_ff in self.color_ff.isolate_interactions():
| assert len(color_ff.get_interactions()) == 1
for interaction in color_ff.get_interactions():
interactions.add(interaction)
assert interactions == set(self.color_ff.get_interactions())
|
osu-cass/working-waterfronts-api | working_waterfronts/working_waterfronts_api/tests/views/entry/test_edit_video.py | Python | apache-2.0 | 2,965 | 0 | from django.test import TestCase
from django.core.urlresolvers import reverse
from working_waterfronts.working_waterfronts_api.models import Video
from django.contrib.auth.models import User
class EditVideoTestCase(TestCase):
"""
Test that the Edit Video page works as expected.
Things tested:
URLs reverse correctly
The outputted page has the correct form fields
POSTing "correct" data will result in the update of the video
object with the specified ID
"""
fixtures = ['test_fixtures']
def setUp(self):
user = User.objects.create_user(
'temporary', '[email protected]', 'temporary')
user.save()
response = self.client.login(
username='temporary', password='temporary')
self.assertEqual(response, True)
def test_not_logged_in(self):
self.client.logout()
response = self.client.get(
reverse('edit-video', kwargs={'id': '1'}))
self.assertRedirects(response, '/login?next=/entry/videos/1')
def test_url_endpoint(self):
url = reverse('edit-video', kwargs={'id': '1'})
self.assertEqual(url, '/entry/videos/1')
def test_successful_v | ideo_update(self):
"""
POST a proper "update video" command to the server, and see if
the update appears in the database
"""
# Data that we'll post to the server to get the new video created
new_video = {
'caption': "A thrilling display of utmost might",
'name': "You won't believe number 3!",
'video': 'http | ://www.youtube.com/watch?v=dQw4w9WgXcQ'}
self.client.post(
reverse('edit-video', kwargs={'id': '1'}),
new_video)
video = Video.objects.get(id=1)
for field in new_video:
self.assertEqual(
getattr(video, field), new_video[field])
def test_form_fields(self):
"""
Tests to see if the form contains all of the right fields
"""
response = self.client.get(
reverse('edit-video', kwargs={'id': '1'}))
fields = {
'name': 'A Starship',
'caption': "Traveling at the speed of light!",
'video': 'http://www.youtube.com/watch?v=efgDdSWDg0g'
}
form = response.context['video_form']
for field in fields:
self.assertEqual(fields[field], form[field].value())
def test_delete_video(self):
"""
Tests that DELETing entry/videos/<id> deletes the item
"""
response = self.client.delete(
reverse('edit-video', kwargs={'id': '2'}))
self.assertEqual(response.status_code, 200)
with self.assertRaises(Video.DoesNotExist):
Video.objects.get(id=2)
response = self.client.delete(
reverse('edit-video', kwargs={'id': '2'}))
self.assertEqual(response.status_code, 404)
|
Tanych/CodeTracking | distance.py | Python | mit | 1,572 | 0.027354 | class WallsGate(object):
def dfs(self, rooms):
queue = [(i, j, 0) for i, rows in enumerate(rooms) for j, v in enumerate(rows) if not v]
while queue:
i, j, step = queue.pop()
if rooms[i][j] > step:
rooms[i][j] = step
for newi, newj in ((i + 1, j), (i - 1, j), (i, j + 1), (i, j - 1)):
if 0 <= newi < len(rooms) and 0 <= newj < len(rooms[0]) and step < rooms[newi][newj]:
queue.append((newi, newj, step + 1))
def bfs(self, rooms):
row=len(rooms)
col=len(rooms[0])
queue=[]
for i in xrange(row):
for j in xrange(col):
if rooms[i][j]==0:
| queue.append(i*col+j)
while queue:
x=queue.pop(0)
i,j=x/col,x%col
for newi,newj in (i+1,j),(i-1,j),(i,j+1),(i,j-1):
if 0 <= newi < len(rooms) and 0 <= newj < len(rooms[0]) and rooms[newi][newj]==INF:
| rooms[newi][newj]=rooms[i][j]+1
queue.append(newi*col+newj)
def naivedfs(self, rooms):
for i in xrange(len(rooms)):
for j in xrange(len(rooms[0])):
if rooms[i][j]==0:
self._dfsrev(rooms,i,j)
def _dfsrev(self,rooms,i,j):
for newi,newj in (i+1,j),(i-1,j),(i,j+1),(i,j-1):
if 0 <= newi < len(rooms) and 0 <= newj < len(rooms[0]) and rooms[newi][newj]<rooms[i][j]:
rooms[newi][newj]=rooms[i][j]+1
self._dfsrev(rooms,newi,newi)
|
GoogleCloudPlatform/professional-services | examples/kubeflow-pipelines-sentiment-analysis/pipeline/build_sentiment_analysis.py | Python | apache-2.0 | 3,173 | 0.003467 | # Copyright 2019 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Builds Kubeflow Pipelines component and pipeline for sentiment analysis."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import kfp.compiler
import kfp.dsl
import kfp.gcp
class SentimentAnalysisOp(kfp.dsl.ContainerOp):
"""Defines the operation."""
def __init__(self,
name,
project,
gcp_temp_location,
input_path,
output_path,
window,
period):
super(SentimentAnalysisOp, self).__init__(
name=name,
image='gcr.io/rostam-193618/sentiment-analysis:latest',
command=[
'mvn', 'compile', 'exec:java',
'-Dexec.mainClass=com.google.cloud.pso.pipelines.SentimentAnalysis',
'-Dexec.cleanupDaemonThreads=false'
],
# file_outputs={'blobs': '/blobs.txt'},
arguments=[
'-Dexec.args=--project={} \
--runner=DataflowRunner \
| --gcpTempLocation={} \
--inputPath={} \
--outputPath={} \ |
--windowDuration={} \
--windowPeriod={}'.format(
str(project),
str(gcp_temp_location),
str(input_path),
str(output_path),
str(window),
str(period)),
]
)
@kfp.dsl.pipeline(
name='Sentiment analysis',
description='Analyzes the sentiments of NYTimes front page headlines.'
)
def pipeline_func(
project=kfp.dsl.PipelineParam('project', value='<PROJECT_ID>'),
gcp_temp_location=kfp.dsl.PipelineParam(
'runner', value='gs://<BUCKET_NAME>/tmp'),
input_path=kfp.dsl.PipelineParam(
'path', value='gs://<BUCKET_NAME>/<NYTIMES-ARCHIVE-API-JSON-FILE(S)>'),
output_path=kfp.dsl.PipelineParam(
'path', value='gs://<BUCKET_NAME>/output/output'),
window=kfp.dsl.PipelineParam('window', value=280),
period=kfp.dsl.PipelineParam('period', value=1)):
"""Defines the pipeline."""
sentiment_analysis_task = SentimentAnalysisOp(
'SentimentAnalysis',
project, # To authenticate.
gcp_temp_location,
input_path,
output_path,
window, period).apply(
kfp.gcp.use_gcp_secret()) # To apply gcp service account secret.
if __name__ == '__main__':
"""Compiles the pipeline to a file."""
filename = 'sentiment_analysis{dt:%Y%m%d_%H%M%S}.pipeline.tar.gz'.format(
dt=datetime.datetime.now())
filepath = './{}'.format(filename)
kfp.compiler.Compiler().compile(pipeline_func, filepath)
|
timj/scons | bin/linecount.py | Python | mit | 4,164 | 0.010327 | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Count statistics about SCons test and source files. This must be run
# against a fully-populated tree (for example, one that's been freshly
# checked out).
#
# A test file is anything under the src/ directory that begins with
# 'test_' or ends in 'Tests.py', or anything under the test/ directory
# that ends in '.py'. Note that runtest.py script does *not*, by default,
# consider the files that begin with 'test_' to be tests, because they're
# tests of SCons packaging and installation, not functional tests of
# SCons code.
#
# A source file is anything under the src/engine/ or src/script/
# directories that ends in '.py' but does NOT begin with 'test_'
# or end in 'Tests.py'.
#
# We report the number of tests and sources, the total | numbe | r of lines
# in each category, the number of non-blank lines, and the number of
# non-comment lines. The last figure (non-comment) lines is the most
# interesting one for most purposes.
from __future__ import division, print_function
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import os.path
fmt = "%-16s %5s %7s %9s %11s %11s"
class Collection(object):
def __init__(self, name, files=None, pred=None):
self._name = name
if files is None:
files = []
self.files = files
if pred is None:
pred = lambda x: True
self.pred = pred
def __call__(self, fname):
return self.pred(fname)
def __len__(self):
return len(self.files)
def collect(self, directory):
for dirpath, dirnames, filenames in os.walk(directory):
try: dirnames.remove('.svn')
except ValueError: pass
self.files.extend([ os.path.join(dirpath, f)
for f in filenames if self.pred(f) ])
def lines(self):
try:
return self._lines
except AttributeError:
self._lines = lines = []
for file in self.files:
file_lines = open(file).readlines()
lines.extend([s.lstrip() for s in file_lines])
return lines
def non_blank(self):
return [s for s in self.lines() if s != '']
def non_comment(self):
return [s for s in self.lines() if s == '' or s[0] != '#']
def non_blank_non_comment(self):
return [s for s in self.lines() if s != '' and s[0] != '#']
def printables(self):
return (self._name + ':',
len(self.files),
len(self.lines()),
len(self.non_blank()),
len(self.non_comment()),
len(self.non_blank_non_comment()))
def is_Tests_py(x):
return x[-8:] == 'Tests.py'
def is_test_(x):
return x[:5] == 'test_'
def is_python(x):
return x[-3:] == '.py'
def is_source(x):
return is_python(x) and not is_Tests_py(x) and not is_test_(x)
src_Tests_py_tests = Collection('src/ *Tests.py', pred=is_Tests_py)
src_test_tests = Collection('src/ test_*.py', pred=is_test_)
test_tests = Collection('test/ tests', pred=is_python)
sources = Collection('sources', pred=is_source)
src_Tests_py_tests.collect('src')
src_test_tests.collect('src')
test_tests.collect('test')
sources.collect('src/engine')
sources.collect('src/script')
src_tests = Collection('src/ tests', src_Tests_py_tests.files
+ src_test_tests.files)
all_tests = Collection('all tests', src_tests.files + test_tests.files)
def ratio(over, under):
return "%.2f" % (float(len(over)) / float(len(under)))
print(fmt % ('', '', '', '', '', 'non-blank'))
print(fmt % ('', 'files', 'lines', 'non-blank', 'non-comment', 'non-comment'))
print()
print(fmt % src_Tests_py_tests.printables())
print(fmt % src_test_tests.printables())
print()
print(fmt % src_tests.printables())
print(fmt % test_tests.printables())
print()
print(fmt % all_tests.printables())
print(fmt % sources.printables())
print()
print(fmt % ('ratio:',
ratio(all_tests, sources),
ratio(all_tests.lines(), sources.lines()),
ratio(all_tests.non_blank(), sources.non_blank()),
ratio(all_tests.non_comment(), sources.non_comment()),
ratio(all_tests.non_blank_non_comment(),
sources.non_blank_non_comment())
))
|
all-umass/metric-learn | metric_learn/base_metric.py | Python | mit | 15,825 | 0.002528 | from numpy.linalg import cholesky
from scipy.spatial.distance import euclidean
from sklearn.base import BaseEstimator
from sklearn.utils.validation import _is_arraylike
from sklearn.metrics import roc_auc_score
import numpy as np
from abc import ABCMeta, abstractmethod
import six
from ._util import ArrayIndexer, check_input, validate_vector
import warnings
class BaseMetricLearner(six.with_metaclass(ABCMeta, BaseEstimator)):
def __init__(self, preprocessor=None):
"""
Parameters
----------
preprocessor : array-like, shape=(n_samples, n_features) or callable
The preprocessor to call to get tuples from indices. If array-like,
tuples will be gotten like this: X[indices].
"""
self.preprocessor = preprocessor
@abstractmethod
def score_pairs(self, pairs):
"""Returns the score between pairs
(can be a similarity, or a distance/metric depending on the algorithm)
Parameters
----------
pairs : `numpy.ndarray`, shape=(n_samples, 2, n_features)
3D array of pairs.
Returns
-------
scores: `numpy.ndarray` of shape=(n_pairs,)
The score of every pair.
See Also
--------
get_metric : a method that returns a function to compute the metric between
two points. The difference with `score_pairs` is that it works on two 1D
arrays and cannot use a preprocessor. Besides, the returned function is
independent of the metric learner and hence is not modified if the metric
learner is.
"""
def check_preprocessor(self):
"""Initializes the preprocessor"""
if _is_arraylike(self.preprocessor):
self.preprocessor_ = ArrayIndexer(self.preprocessor)
elif callable(self.preprocessor) or self.preprocessor is None:
self.preprocessor_ = self.preprocessor
else:
raise ValueError("Invalid type for the preprocessor: {}. You should "
"provide either None, an array-like object, "
"or a callable.".format(type(self.preprocessor)))
def _prepare_inputs(self, X, y=None, type_of_inputs='classic',
**kwargs):
"""Initializes the preprocessor and processes inputs. See `check_input`
for more details.
Parameters
----------
input: array-like
The input data array to check.
y : array-like
The input labels array to check.
type_of_inputs: `str` {'classic', 'tuples'}
The type of inputs to check. If 'classic', the input should be
a 2D array-like of points or a 1D array like of indicators of points. If
'tuples', the input should be a 3D array-like of tuples or a 2D
array-like of indicators of tuples.
**kwargs: dict
Arguments to pass to check_input.
Returns
-------
X : `numpy.ndarray`
The checked input data array.
y: `numpy.ndarray` (optional)
The checked input labels array.
"""
self.check_preprocessor()
return check_input(X, y,
type_of_inputs=type_of_inputs,
preprocessor=self.preprocessor_,
estimator=self,
tuple_size=getattr(self, '_tuple_size', None),
**kwargs)
@abstractmethod
def get_metric(self):
"""Returns a function that takes as input two 1D arrays and outputs the
learned metric score on these two points.
This function will be independent from the metric learner that learned it
(it will not be modified if the initial metric learner is modified),
and it can be directly plugged into the `metric` argument of
scikit-learn's estimators.
Returns
-------
metric_fun : function
The function described above.
Examples
--------
.. doctest::
>>> from metric_learn import NCA
>>> from sklearn.datasets import make_classification
>>> from sklearn.neighbors import KNeighborsClassifier
>>> nca = NCA()
>>> X, y = make_classification()
>>> nca.fit(X, y)
>>> knn = KNeighborsClassifier(metric=nca.get_metric())
>>> knn.fit(X, y) # doctest: +NORMALIZE_WHITESPACE
KNeighborsClassifier(algorithm='auto', leaf_size=30,
metric=<function MahalanobisMixin.get_metric.<locals>.metric_fun
at 0x...>,
metric_params=None, n_jobs=None, n_neighbors=5, p=2,
weights='uniform')
See Also
--------
score_pairs : a method that returns the metric score between several pairs
of points. Unlike `get_metric`, this is a method of the metric learner
and therefore can change if the metric learner changes. Besides, it can
use the metric learner's preprocessor, and works on concatenated arrays.
"""
class MetricTransformer(six.with_metaclass(ABCMeta)):
@abstractmethod
def transform(self, X):
"""Applies the metric transformation.
Parameters
----------
X : (n x d) matrix
Data to transform.
Returns
-------
transformed : (n x d) matrix
Input data transformed to the metric space by :math:`XL^{\\top}`
"""
class MahalanobisMixin(six.with_metaclass(ABCMeta, BaseMetricLearner,
MetricTransformer)):
"""Mahalanobis metric learning algorithms.
Algorithm that learns a Mahalanobis (pseudo) distance :math:`d_M(x, x')`,
defined between two column vectors :math:`x` and :math:`x'` by: :math:`d_M(x,
x') = \sqrt{(x-x')^T M (x-x')}`, where :math:`M` is a learned symmetric
positive semi-definite (PSD) matrix. The metric between points can then be
expressed as the euclidean distance between points embedded in a new space
through a linear transformation. Indeed, the above matrix can be decomposed
into the product of two transpose matrices (through SVD or Cholesky
decomposition): :math:`d_M(x, x')^2 = (x-x')^T M (x-x') = (x-x')^T L^T L
(x-x') = (L x - L x')^T (L x- L x')`
Attributes
----------
transformer_ : `numpy.ndarray`, shape=(num_dims, n_features)
The learned linear transformation ``L``.
"""
def score_pairs(self, pairs):
"""Returns the learned Mahalanobis distance between pairs.
This distance is defined as: :math:`d_M(x, x') = \sqrt{(x-x')^T M (x-x')}`
where ``M`` is the learned Mahalanobis matrix, for every pair of points
``x`` and ``x'``. This corresponds to the euclidean distance between
embeddings of the points in a new space, obtained through a linear
transformation. Indeed, we have also: :math:`d_M(x, x') = \sqrt{(x_e -
x_e')^T (x_e- x_e')}`, with :math:`x_e = L x` (See
:class:`MahalanobisMixin`).
Parameters
----------
pairs : array-like, shape=(n_pairs, 2, n_features) or (n_pairs, 2)
3D Array of pairs to score, with each row corresponding to two points,
for 2D array of indices of pairs if the metric learner uses a
preprocessor.
Returns
-------
scores: `numpy.ndarray` of shape=(n_pairs,)
The learned Mahalanobis distance for every pair.
See Also
--------
get_metric : a method that returns a function to compute the metric between
two points. The difference with `score_pairs` is that it works on two 1D
arrays and cannot use a preprocessor. Besides, the returned function is
independent of the metric learner and hence is not mo | dified if the metric
learner is.
| :ref:`mahalanobis_distances` : The section of the project documentation
that describes Mahalanobis Distances.
"""
pairs = check_input(pairs, type_of_inputs='tuples',
preprocessor=self.preprocessor_,
estimator=self, tuple_size=2)
pairwise_diffs = self.transform(pairs[:, 1, :] - pairs[:, 0, :])
# (for MahalanobisMixin, the embedding is linear so we can just embed the
# difference)
return np.sqrt(np.sum(pairwise_diffs**2, axis=-1))
def transform(self, X):
"""Embeds data points in the learned linear embedding space.
Transforms samples in ``X`` into ``X_embedded``, samples inside a new
embedding space such that: ``X_embedded = X.dot(L.T)``, where ``L`` is
the learned linear transformation (See :class:`Mahal |
radio-astro-tools/spectral-cube | spectral_cube/tests/test_dask.py | Python | bsd-3-clause | 10,242 | 0.002831 | # Tests specific to the dask class
import os
from numpy.core.shape_base import block
import pytest
import numpy as np
from mock import patch
from numpy.testing import assert_allclose
from astropy.tests.helper import assert_quantity_allclose
from astropy import units as u
from astropy.utils import data
try:
from distributed.utils_test import client, loop, cluster_fixture # noqa
DISTRIBUTED_INSTALLED = True
except ImportError:
DISTRIBUTED_INSTALLED = False
from spectral_cube import DaskSpectralCube, SpectralCube, DaskVaryingResolutionSpectralCube
from .test_casafuncs import make_casa_testimage
try:
import casatools
from casatools import image
CASA_INSTALLED = True
except ImportError:
try:
from taskinit import ia as image
CASA_INSTALLED = True
except ImportError:
CASA_INSTALLED = False
DATA = os.path.join(os.path.dirname(__file__), 'data')
class Array:
args = None
kwargs = None
def compute(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def test_scheduler(data_adv):
cube = DaskSpectralCube.read(data_adv)
fake_array = Array()
cube._compute(fake_array)
assert fake_array.kwargs == {'scheduler': 'synchronous'}
with cube.use_dask_scheduler('threads'):
cube._compute(fake_array)
assert fake_array.kwargs == {'scheduler': 'threads'}
cube._compute(fake_array)
assert fake_array.kwargs == {'scheduler': 'synchronous'}
cube.use_dask_scheduler('threads')
cube._compute(fake_array)
assert fake_array.kwargs == {'scheduler': 'threads'}
with cube.use_dask_scheduler('processes', num_workers=4):
cube._compute(fake_array)
assert fake_array.kwargs == {'scheduler': 'processes', 'num_workers': 4}
cube._compute(fake_array)
assert fake_array.kwargs == {'scheduler': 'threads'}
def test_save_to_tmp_dir(data_adv):
pytest.importorskip('zarr')
cube = DaskSpectralCube.read(data_adv)
cube_new = cube.sigma_clip_spectrally(3, save_to_tmp_dir=True)
# The following test won't necessarily always work in future since the name
# is not really guaranteed, but this is pragmatic enough for now
assert cube_new._data.name.startswith('from-zarr')
def test_rechunk(data_adv):
cube = DaskSpectralCube.read(data_adv)
assert cube._data.chunksize == (4, 3, 2)
cube_new = cube.rechunk(chunks=(1, 2, 3))
# note last element is 2 because the chunk size we asked for
# is larger than cube - this is fine and deliberate in this test
assert cube_new._data.chunksize == (1, 2, 2)
def test_statistics(data_adv):
cube = DaskSpectralCube.read(data_adv).rechunk(chunks=(1, 2, 3))
stats = cube.statistics()
assert_quantity_allclose(stats['npts'], 24)
assert_quantity_allclose(stats['mean'], 0.4941651776136591 * u.K)
assert_quantity_allclose(stats['sigma'], 0.3021908870982011 * u.K)
assert_quantity_allclose(stats['sum'], 11.85996426272782 * u.K)
assert_quantity_allclose(stats['sumsq'], 7.961125988022091 * u.K ** 2)
assert_quantity_allclose(stats['min'], 0.0363300285196364 * u.K)
assert_quantity_allclose(stats['max'], 0.9662900439556562 * u.K)
assert_quantity_allclose(stats['rms'], 0.5759458158839716 * u.K)
@pytest.mark.skipif(not CASA_INSTALLED, reason='Requires CASA to be installed')
def test_statistics_consistency_casa(data_adv, tmp_path):
# Similar to test_statistics but compares to CASA directly.
cube = DaskSpectralCube.read(data_adv)
stats = cube.statistics()
make_casa_testimage(data_adv, tmp_path / 'casa.image')
ia = casatools.image()
ia.open(str(tmp_path / 'casa.image'))
stats_casa = ia.statistics()
ia.close()
for key in stats:
if isinstance(stats[key], u.Quantity):
value = stats[key].value
else:
value = stats[key]
assert_allclose(value, stats_casa[key])
def test_apply_function_parallel_spectral_noncube(data_adv):
| '''
Testing returning a non-SpectralCube object with a user-defined
function for spectral operations.
'''
chunk_size = (-1, 1, 2)
cube = DaskSpectralCube.read(data_adv).rechunk(chunks=chunk_size)
def sum_blocks_spectral(data_chunk):
return data_chunk.sum(0)
# Tel | l dask.map_blocks that we expect the zeroth axis to be (1,)
output_chunk_size = (1, 2)
test = cube.apply_function_parallel_spectral(sum_blocks_spectral,
return_new_cube=False,
accepts_chunks=True,
drop_axis=[0], # The output will no longer contain the spectral axis
chunks=output_chunk_size)
# The total shape of test should be the (1,) + cube.shape[1:]
assert test.shape == cube.shape[1:]
# Test we get the same output as the builtin sum
assert_allclose(test.compute(), cube.sum(axis=0).unitless_filled_data[:])
def test_apply_function_parallel_spectral_noncube_withblockinfo(data_adv):
'''
Test receiving block_info information from da.map_blocks so we can place
the chunk's location in the whole cube when needed.
https://docs.dask.org/en/latest/array-api.html#dask.array.map_blocks
'''
chunk_size = (-1, 1, 2)
cube = DaskSpectralCube.read(data_adv).rechunk(chunks=chunk_size)
sum_spectral_plane = cube.sum(axis=0).unitless_filled_data[:]
# Each value should be different. This is important to check the right positions being used
# for the check in sums_block_spectral
assert np.unique(sum_spectral_plane).size == sum_spectral_plane.size
def sum_blocks_spectral(data_chunk, block_info=None, comparison_array=None):
chunk_sum = data_chunk.sum(0)
# When the block_info kwarg is defined, it should not be None
assert block_info is not None
# Check the block location compared to `comparison_array`
# Get the lower corner location in the whole cube.
loc = [block_range[0] for block_range in block_info[0]['array-location']]
# Should have 3 dimensions for the corner.
assert len(loc) == 3
# Slice comparison array to compare with this data chunk
thisslice = (slice(loc[1], loc[1] + chunk_sum.shape[0]),
slice(loc[2], loc[2] + chunk_sum.shape[1]),)
return chunk_sum == comparison_array[thisslice]
# Tell dask.map_blocks that we expect the zeroth axis to be (1,)
output_chunk_size = (1, 2)
test = cube.apply_function_parallel_spectral(sum_blocks_spectral,
return_new_cube=False,
accepts_chunks=True,
drop_axis=[0], # The output will no longer contain the spectral axis
chunks=output_chunk_size,
comparison_array=sum_spectral_plane) # Passed to `sum_blocks_spectral`
# The total shape of test should be the (1,) + cube.shape[1:]
assert test.shape == cube.shape[1:]
# Test all True
assert np.all(test.compute())
@pytest.mark.parametrize(('accepts_chunks'),
((True, False)))
def test_apply_function_parallel_shape(accepts_chunks):
# regression test for #772
def func(x, add=None):
if add is not None:
y = x + add
else:
raise ValueError("This test is supposed to have add=1")
return y
fn = data.get_pkg_data_filename('tests/data/example_cube.fits', 'spectral_cube')
cube = SpectralCube.read(fn, use_dask=True)
cube2 = SpectralCube.read(fn, use_dask=False)
# Check dask w/both threaded and unthreaded
rslt3 = cube.apply_function_parallel_spectral(func, add=1,
accepts_chunks=accepts_chunks)
with cube.use_dask_scheduler('threads', num_workers=4):
rslt = cube.apply_function_parallel_spectral(func, add=1,
|
sargas/scipy | scipy/linalg/benchmarks/bench_basic.py | Python | bsd-3-clause | 4,065 | 0.013284 | from __future__ import division, print_function, absolute_import
import sys
from numpy.testing import *
import numpy.linalg as linalg
def random(size):
return rand(*size)
class TestSolve(TestCase):
def bench_random(self):
basic_solve = linalg.solve
print()
print(' Solving system of linear equations')
print(' ==================================')
print(' | contiguous | non-contiguous ')
print('----------------------------------------------')
print(' size | scipy | basic | scipy | basic ')
for size,repeat in [(20,1000),(100,150),(500,2),(1000,1)][:-1]:
repeat *= 2
print('%5s' % size, end=' ')
sys.stdout.flush()
a = random([size,size])
# larger diagonal ensures non-singularity:
for i in range(size): a[i,i] = 10*(.1+a[i,i])
b = random([size])
print('| %6.2f ' % measure('solve(a,b)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_solve(a,b)',repeat), end=' ')
sys.stdout.flush()
a = a[-1::-1,-1::-1] # turn into a non-contiguous array
assert_(not a.flags['CONTIGUOUS'])
print('| %6.2f ' % measure('solve(a,b)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_solve(a,b)',repeat), end=' ')
sys.stdout.flush()
print(' (secs for %s calls)' % (repeat))
class TestInv(TestCase):
def bench_random(self):
basic_inv = linalg.inv
print()
print(' Finding matrix inverse')
print(' ==================================')
print(' | contiguous | non-contiguous ')
print('----------------------------------------------')
print(' size | scipy | | basic | scipy | basic')
for size,repeat in [(20,1000),(100,150),(500,2),(1000,1)][:-1]:
| repeat *= 2
print('%5s' % size, end=' ')
sys.stdout.flush()
a = random([size,size])
# large diagonal ensures non-singularity:
for i in range(size): a[i,i] = 10*(.1+a[i,i])
print('| %6.2f ' % measure('inv(a)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_inv(a)',repeat), end=' ')
sys.stdout.flush()
a = a[-1::-1,-1::-1] # turn into a non-contiguous array
assert_(not a.flags['CONTIGUOUS'])
print('| %6.2f ' % measure('inv(a)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_inv(a)',repeat), end=' ')
sys.stdout.flush()
print(' (secs for %s calls)' % (repeat))
class TestDet(TestCase):
def bench_random(self):
basic_det = linalg.det
print()
print(' Finding matrix determinant')
print(' ==================================')
print(' | contiguous | non-contiguous ')
print('----------------------------------------------')
print(' size | scipy | basic | scipy | basic ')
for size,repeat in [(20,1000),(100,150),(500,2),(1000,1)][:-1]:
repeat *= 2
print('%5s' % size, end=' ')
sys.stdout.flush()
a = random([size,size])
print('| %6.2f ' % measure('det(a)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_det(a)',repeat), end=' ')
sys.stdout.flush()
a = a[-1::-1,-1::-1] # turn into a non-contiguous array
assert_(not a.flags['CONTIGUOUS'])
print('| %6.2f ' % measure('det(a)',repeat), end=' ')
sys.stdout.flush()
print('| %6.2f ' % measure('basic_det(a)',repeat), end=' ')
sys.stdout.flush()
print(' (secs for %s calls)' % (repeat))
if __name__ == "__main__":
run_module_suite()
|
wfxiang08/django178 | django/__init__.py | Python | bsd-3-clause | 876 | 0.003425 | # -*- coding:utf-8 -*-
VERSION = (1, 7, 8, 'final', 0)
def get_version(*args, **kwargs):
# Don't litter django/__init__.py with all the g | et_version stuff.
# Only import if it's actually called.
from django.utils.version import get_version
return get_version(*args, **kwargs)
def setup():
"""
Configure the settings (this happens as a side effect of accessing the
first setting), configure logging and populate the app registry.
"""
from django.apps import apps
from django.conf import settings
from django.utils.log import configure_logging
configure_logging(settings.LOGGING_CONFIG, settings.LOGGING) |
apps.populate(settings.INSTALLED_APPS)
_gevent = None
def using_gevent():
global _gevent
if _gevent is None:
from django.conf import settings
_gevent = settings.USING_GEVENT
return _gevent |
magnunor/camera_aperture_position | camera_position_tools.py | Python | gpl-3.0 | 4,017 | 0.005726 | from matplotlib.path import Path
import matplotlib.patches as patches
from math import cos, sin, radians
class RectangleObject:
def __init__(self, centerPosition, sideSize1, sideSize2, label, rotation=0.0, color='black'):
self.sideSize1 = sideSize1
self.sideSize2 = sideSize2
self.label = label
self.rotation = rotation
self.centerPosition = centerPosition
self.color = color
self.calculate_object_position(
sideSize1, sideSize2, centerPosition, rotation)
self.patch = self.make_patch()
def get_object_dimensions(self):
side1 = ((self.x0-self.x1)**2+(self.y0-self.y1)**2)**0.5
side2 = ((self.x1-self.x2)**2+(self.y1-self.y2)**2)**0.5
side3 = ((self.x2-self.x3)**2+(self.y2-self.y3)**2)**0.5
side4 = ((self.x3-self.x0)**2+(self.y3-self.y0)**2)**0.5
return(side1,side2,side3,side4)
def get_center_position(self):
return((self.x0+self.x2)*0.5, (self.y0+self.y2)*0.5)
def make_patch(self):
verts = [
(self.x0,self.y0),
(self.x1,self.y1),
(self.x2,self.y2),
(self.x3,self.y3),
(self.x0,self.y0)]
codes = [Path.MOVETO,
Path.LINETO,
Path.LINETO,
Path.LINETO,
Path.CLOSEPOLY]
label = [
self.label,
". Center: ",
str(self.centerPosition),
". Side length: ",
str(self.sideSize1),
"x",
str(self.sideSize2),
". Rotation: ",
str(self.rotation)]
label = ''.join(label)
path = Path(verts, codes)
patch = patches.PathPatch(
path,
lw=2,
fill=False,
color=self.color,
label=label)
return(patch)
def calculate_object_position(self, sideSize1, sideSize2, centerPosition, rotation):
#This can probably be done in a much more elegant way...
temp_x0 = -sideSize1/2.
temp_y0 = -sideSize2/2.
temp_x1 = -sideSize1/2.
temp_y1 = sideSize2/2.
temp_x2 = sideSize1/2.
temp_y2 = sideSize2/2.
temp_x3 = sideSize1/2.
temp_y3 = -sideSize2/2.
x0 = temp_x0*cos(radians(rotation))-temp_y0*sin(radians(rotation))
y0 = temp_x0*sin(radians(rotation))+temp_y0*cos(radians(rotation))
x1 = temp_x1*cos(radians(rotation))-temp_y1*sin(radians(rotation))
y1 = temp_x1*sin(radians(rotation))+temp_y1*cos(radians(rotation))
x2 = temp_x2*cos(radians(rotation))-temp_y2*sin(radians(rotation))
y2 = temp_x2*sin(radians(rotation))+temp_y2*cos(radians(rotation))
x3 = temp_x3*cos(radians(rotation))-temp_y3*sin(radians(rotation))
y3 = temp_x3*sin(radians(rotation))+temp_y3*cos(radians(rotation))
x_center_pos = centerPosition[0]
y_center_pos = centerPosition[1]
self.x | 0 = x0 + x_center_pos
self.y0 = y0 + y_center_pos
self.x1 = x1 + x_center_pos
self.y1 = y1 + y_center_pos
self.x2 = x2 + x_center_pos
| self.y2 = y2 + y_center_pos
self.x3 = x3 + x_center_pos
self.y3 = y3 + y_center_pos
class CircleObject:
def __init__(self, centerPosition, radius, label, color='black'):
self.centerPosition = centerPosition
self.radius = radius
self.label = label
self.color = color
self.patch = self.make_patch()
def make_patch(self):
label = [
self.label,
". Center: ",
str(self.centerPosition),
". Radius: ",
str(self.radius)]
label = ''.join(label)
circle = patches.Circle(
self.centerPosition,
self.radius,
fill=False,
edgecolor=self.color,
label=label)
return(circle)
|
psychopy/versions | psychopy/iohub/devices/keyboard/darwinkey.py | Python | gpl-3.0 | 1,860 | 0 | # -*- coding: utf-8 -*-
# Part of the psychopy.iohub library.
# Copyright (C) 2012-2016 iSolver Software Solutions
# Distributed under the terms of the GNU General Public License (GPL).
# /System/Library/Frameworks/Carbon.framework/Versions/A/Fra | meworks/
# HIToolbox.framework/Headers/Events.h
QZ_ESCAPE = 0x35
QZ_F1 = 0x7A
QZ_F2 = 0x78
QZ_F3 = 0x63
QZ_F4 = 0x76
QZ_F5 = 0x60
QZ_F6 = 0x61
QZ_F7 = 0x62
QZ_F8 = 0x64
QZ_F9 | = 0x65
QZ_F10 = 0x6D
QZ_F11 = 0x67
QZ_F12 = 0x6F
QZ_F13 = 0x69
QZ_F14 = 0x6B
QZ_F15 = 0x71
QZ_F16 = 0x6A
QZ_F17 = 0x40
QZ_F18 = 0x4F
QZ_F19 = 0x50
QZ_F20 = 0x5A
QZ_BACKQUOTE = 0x32
QZ_MINUS = 0x1B
QZ_EQUALS = 0x18
QZ_BACKSPACE = 0x33
QZ_INSERT = 0x72
QZ_HOME = 0x73
QZ_PAGEUP = 0x74
QZ_NUMLOCK = 0x47
QZ_KP_EQUALS = 0x51
QZ_KP_DIVIDE = 0x4B
QZ_KP_MULTIPLY = 0x43
QZ_TAB = 0x30
QZ_LEFTBRACKET = 0x21
QZ_RIGHTBRACKET = 0x1E
QZ_BACKSLASH = 0x2A
QZ_DELETE = 0x75
QZ_END = 0x77
QZ_PAGEDOWN = 0x79
QZ_KP7 = 0x59
QZ_KP8 = 0x5B
QZ_KP9 = 0x5C
QZ_KP_MINUS = 0x4E
QZ_CAPSLOCK = 0x39
QZ_SEMICOLON = 0x29
QZ_QUOTE = 0x27
QZ_RETURN = 0x24
QZ_KP4 = 0x56
QZ_KP5 = 0x57
QZ_KP6 = 0x58
QZ_KP_PLUS = 0x45
QZ_LSHIFT = 0x38
QZ_COMMA = 0x2B
QZ_PERIOD = 0x2F
QZ_SLASH = 0x2C
QZ_RSHIFT = 0x3C
QZ_UP = 0x7E
QZ_KP1 = 0x53
QZ_KP2 = 0x54
QZ_KP3 = 0x55
QZ_NUM_ENTER = 0x4C
QZ_LCTRL = 0x3B
QZ_LALT = 0x3A
QZ_LCMD = 0x37
QZ_SPACE = 0x31
QZ_RCMD = 0x36
QZ_RALT = 0x3D
QZ_RCTRL = 0x3E
QZ_FUNCTION = 0x3F
QZ_LEFT = 0x7B
QZ_DOWN = 0x7D
QZ_RIGHT = 0x7C
QZ_KP0 = 0x52
QZ_KP_PERIOD = 0x41
QZ_F1 = 145 # Keycode on Apple wireless kb
QZ_F2 = 144 # Keycode on Apple wireless kb
QZ_F3 = 160 # Keycode on Apple wireless kb
QZ_F4 = 131 # Keycode on Apple wireless kb
code2label = {}
# need tp copy locals for py3
for k, v in locals().copy().items():
if k.startswith('QZ_'):
klabel = u'' + k[3:].lower()
code2label[klabel] = v
code2label[v] = klabel
|
vit1-irk/ii-db-utils | sqlite-export.py | Python | cc0-1.0 | 1,050 | 0.013333 | #!/us | r/bi | n/env python3
# forked from spline1986's versions
import os, sys, sqlite3
from ii_functions import *
args=sys.argv[1:]
if len(args)==0:
print("Usage: sqlite-export.py <db_file>")
sys.exit(1)
check_dirs()
conn = sqlite3.connect(args[0])
c = conn.cursor()
c.execute("""CREATE TABLE IF NOT EXISTS msg(
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
msgid TEXT,
kludges TEXT,
echoarea TEXT,
timestump INTEGER,
from_name TEXT,
address TEXT,
to_name TEXT,
subject TEXT,
body TEXT,
UNIQUE (id, msgid));""")
echoareas = sorted(os.listdir(indexdir))
for echoarea in echoareas:
print("Echoarea: " + echoarea)
msgids = getMsgList(echoarea)
for msgid in msgids[:-1]:
print("MSGID: " + msgid)
msg = getMsg(msgid)
c.execute("INSERT OR IGNORE INTO msg (msgid, kludges, echoarea, timestump, from_name, address, to_name, subject, body) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?);", (msgid, "/".join(msg["tags"]), msg["echo"], msg["time"], msg["sender"], msg["addr"], msg["to"], msg["subj"], msg["msg"]))
print("OK")
conn.commit()
conn.close()
|
scottsappen/PayMySitter | main.py | Python | apache-2.0 | 10,136 | 0.016772 | import os
import sys
import logging
import uuid
import traceback
import datetime
import cgi
import MySQLdb
import stripe
import re
import requests
import urllib
import time
import pmsconstants
from flask import Flask, render_template, request, jsonify, redirect, url_for, Markup, session, Response
from werkzeug import parse_options_header, generate_password_hash, check_password_hash
from google.appengine.datastore.datastore_query import Cursor
from google.appengine.api import mail, users, memcache, images
from google.appengine.ext import ndb, blobstore
from google.appengine.ext.webapp import blobstore_handlers
from datetime import date, datetime, timedelta
from webapp2_extras import security
from pmsdatamodel import Members.........
from pmsmemberinfo import MemberInfo
from pmsemailutility import EmailerUtility
from pmstextutility import TextUtility
app = Flask(__name__)
app.secret_key = ...
#Custom template filters
@app.template_filter('format_cents_as_currency')
def format_cents_as_currency_filter(value):
return "${:,.2f}".format(float(value) / 100.0)
#:: SIGNING IN AUTHENTICATION ::
#Someone is trying to login
@app.route('/signinauthenticate', methods=['POST'])
def signinauthenticate():
#grab the request data
try:
#or use a email parsing library, you get the idea and do something...
inputEmailAddress = request.form.get("inputEmailAddress")
if not re.match(r"[^@]+@[^@]+\.[^@]+", inputEmailAddress):
if not re.match(r"^[A-Za-z0-9\.\+_-]+@[A-Za-z0-9\._-]+\.[a-zA-Z]*$", inputEmailAddress):
inputPassword = request.form.get("inputPassword")
#Query NoSQL and find out if this member already exists by email, if so, show the error
member = MemberInfo()
member = member.getMemberInfoByEmail(inputEmailAddress)
#Make sure the password is correct
if not check_password_hash(member.passwordhash, inputPassword):
return render_template('index.html', inputEmailAddress=inputEmailAddress, alertmessage='It appears that is not quite right.')
#Save the session and cookie values (do more than just email, but again, you get the idea)
session[_SESSION_COOKIE_EMAIL] = member.emailaddress
return redirect(url_for('landingpage'))
except:
return render_template('index.html', inputEmailAddress='', alertmessage='Oops!')
#:: SAVE USER PROFILE PHOTO ::
#This route only gets used when a user saves updates to their profile photo
@app.route('/userprofilephotoconfirm', methods=['POST'])
def userprofilephotoconfirm():
member = MemberInfo()
#this will cause an ugly key error if we don't handle it properly
try:
inputUploadedPictureFile = request.files['inputProfilepicture']
if inputUploadedPictureFile:
header = inputUploadedPictureFile.headers['Content-Type']
parsed_header = parse_options_header(header)
blob_key = parsed_header[1]['blob-key']
except:
#no need to log this error output
dummyvariable = ""
#a user is uploading a picture, either new if they did not have one prior, or uploaded a new one which would delete the old one
if inputUploadedPictureFile:
if member.pictureblobstorekey:
blobstore.delete(member.pictureblobstorekey)
images.delete_serving_url(member.pictureblobstorekey)
member.pictureservingurl = images.get_serving_url(blob_key)
member.pictureblobstorekey = blob_key
member.put()
return render_template('userprofilephotosaved.html', member=member)
except:
try:
#If you couldn't complete the user save, be sure to delete the photo from the blobstore or re-use it later (to avoid a lost child hanging around)
inputUploadedPictureFile = request.files['inputProfilepicture']
if inputUploadedPictureFile:
header = inputUploadedPictureFile.headers['Content-Type']
parsed_header = parse_options_header(header)
blob_key = parsed_header[1]['blob-key']
blobstore.delete(blob_key)
except:
#no need to log this error output
dummyvariable = ""
#Create a new form POST URL for the blobstore
userprofilephoto_form_url = blobstore.create_upload_url('/userprofilephotoconfirm')
return render_template('userprofilephoto.html', member=member, userprofilephoto_form_url=userprofilephoto_form_url, user_profilepicturesrc=user_profilepicturesrc, alertmessage='Oops!', userprofilephoto_form_url=userprofilephoto_form_url, user_profilepicturesrc=user_profilepicturesrc)
#:: SUBSCRIPTION SIGN UP CONFIRMATION ::
#This route only gets used when a parent signs up for a plan
@app.route('/subscriptionsignupconfirm', methods=['POST'])
def subscriptionsignupconfirm():
member = MemberInfo()
try:
#Set the required stripe API key that is going to be used
stripe.api_key = _STRIPE_SECRET_KEY
#If this person has a stripecustomerid (they are a Stripe customer object), then just update the plan!
if stripeprofile.stripecustomerid:
#Retrieve the customer from Stripe
try:
stripeCustomer = stripe.Customer.retrieve(stripeprofile.stripecustomerid)
except:
# The card has been declined
logging.error(':: Error | subscriptionsignupconfirm | 1 -- Error creating a new subscription ... ::')
raise Exception
else:
#If this person does not have a stripecustomerid (they are not a Stripe customer object), then they MUST have a token, otherwise we bomb
inputStripeToken = request.form.get("inputStripeToken")
if not inputStripeToken:
logging.error(':: Error | subscriptionsignupconfirm | 1 -- inputStripeToken was None ... ::')
raise Exception
#Create a new Stripe customer for this member
try:
stripeCustomer = stripe.Customer.create(
source=inputStripeToken,
email=member.emailaddress
)
#Save that payment profile object
stripeprofile.stripecustomerid = stripeCustomer.id
except:
# The card has been declined
logging.error(':: Error | subscriptionsignupconfirm | 1 -- Error creating a new subscription ... | ::')
raise Exception
#This customer update call will update the customer subscription
try:
#Save the plan on the cust | omer record at Stripe
#planType could be any plan you set up at Stripe, like a yearly or monthly plans perhaps
subscription = stripeCustomer.subscriptions.create(plan=planType)
#Save the plan type for the user in NoSQL
stripeprofile.stripe_subscription_plan = planType
stripeprofile.stripe_subscription_id = subscription.id
#You could even use gift codes in your app very easily too
#if inputGiftCode:
# stripeprofile.subscriptiongiftcode = inputGiftCode
#else:
# stripeprofile.subscriptiongiftcode = None
#stripeprofile.put()
except:
# The card has been declined
logging.error(':: Error | subscriptionsignupconfirm | 1 -- Error creating a new subscription ... ::')
raise Exception
return redirect(url_for('subscriptionsignupsuccess'))
except:
logging.error(':: Error | subscriptionsignupconfirm | An error occurred trying ... ::')
logging.error(traceback.format_exc())
return render_template('subscriptionsignupfailure.html', member=member)
#:: STRIPE ACCOUNT WEBHOOK ::
#Used by Stripe to contact us programmatically telling us about certain back-end events, like an account that has become unverified due to incorrect information
@app.route('/stripewebhookbdfjkl4378hsfk43jkasdkl', methods=['POST'])
def stripewebhookbdfjkl4378hsfk43jkasdkl():
webhookJSON = request.get_json()
#Get the type of event
eventType = webhookJSON.get('type')
#Get the live mode of event
eventMode = webhookJSON.get('livemode')
if not eventMode:
return Response(status=200)
#Get the event ID and Account ID
eventID = webhookJSON.get('id')
eventAccountID = webhookJSON.get('user_id')
#Check if this |
lisogallo/bitcharts-core-legacy | bitcharts.py | Python | agpl-3.0 | 24,568 | 0.00057 | #!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
Bitcharts - ORM classes and functions
Copyright(c) 2014 - Lisandro Gallo (lisogallo)
[email protected]
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import re
import sys
import smtplib
import requests
import argparse
import simplejson as json
from collections import OrderedDict
from sqlalchemy.engine import Engine
from BeautifulSoup import BeautifulSoup
from ConfigParser import SafeConfigParser
from datetime import date, datetime, timedelta
from sqlalchemy import exc, event, create_engine, ForeignKey, Sequence
from sqlalchemy import Column, Date, Time, Integer, String, Boolean, Float
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, backref, aliased, sessionmaker
@event.listens_for(Engine, 'connect')
def set_sqlite_pragma(dbapi_connection, connection_record):
"""
Decorator to force the support of foreign keys on SQLite.
:param dbapi_connection: Engine object.
:param connection_record: Connection string.
"""
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
def open_session(engine):
"""
Open session on current connection and return session object.
:param engine: Engine object for current connection.
"""
Session = sessionmaker(bind=engine)
session = Session()
return session
def connect_database(database_url):
"""
Create connection to engine and return engine object.
:param database_url: Full path URL to SQLite database.
"""
# Set 'echo' to True to get verbose output.
engine = create_engine(database_url, echo=False)
return engine
def create_tables(database_url):
"""
Create database schema.
:param database_url: Full path URL to SQLite database.
"""
engine = connect_database(database_url)
Base.metadata.create_all(engine)
def config_parser(config_file):
"""
Parse data from configuration files.
:param config_file: Configuration file with currencies or exchanges data.
"""
res = []
# Parse and read configuration file.
cparser = SafeConfigParser()
cparser.read(config_file)
for section in cparser.sections():
tup = ()
for option in cparser.options(section):
value = cparser.get(section, option)
# String 'True' or 'False' values to boolean
if value == 'True':
value = True
elif value == 'False':
value = False
tup += (value, )
res.append(tup)
return res
def send_email(sender, receiver, subject, body):
"""
Auxiliar function to inform by mail about any unexpected exception.
:param sender: From mail address.
:param receiver: Destination mail address.
:param subject: Subject.
:param body: Content body.
"""
try:
msg = ("From: %s\r\nTo: %s\r\nSubject: %s\r\n\r\n%s"
%(sender, receiver, subject, body))
smtp = smtplib.SMTP('localhost')
smtp.sendmail(sender, [receiver], msg)
smtp.quit()
except Exception as exception:
print 'Error %s:' % exception.args[0]
def get_json(url):
"""
Get JSON resource from remote URL.
:param url: Full URL to JSON resource over HTTP protocol.
"""
try:
req = requests.get(url,
headers={'Accept': 'application/json'},
timeout=5)
res = req.json()
return res
except Exception as exception:
print 'Error %s:' % exception.args[0]
send_email(
'[email protected]',
'[email protected]',
'ERROR',
except | ion.args[0]
)
res = {}
return res
def is_dict(something):
"""
Check if input object is a dictionary or contains a dictionary.
Return the d | ictionary found.
:param something: Input object to check.
"""
if type(something) is dict:
for values in something.itervalues():
if type(values) is dict:
return is_dict(values)
return something
def parse_values(dictionary):
"""
Search for common keys in exchange's APIs which contains currencies values.
:param dictionary: Dictionary previously obtained from JSON APIs.
"""
# Check if input is or contains a dictionary and returns it.
res = is_dict(dictionary)
# Search for common keys used on APIs and store its values
if 'last' in res.iterkeys():
try:
last = float(res.get('last'))
return last
except TypeError:
return None
elif 'blue' in res.iterkeys():
try:
blue = float(res.get('blue'))
return blue
except TypeError:
return None
def write_object(database_url, new_object):
"""
Write new currency, exchange or association object to database through ORM.
:param database_url: Full path URL to SQLite database.
:param new_object: Object variable.
"""
try:
engine = connect_database(database_url)
session = open_session(engine)
session.add(new_object)
session.commit()
except exc.SQLAlchemyError, exception:
if session:
session.rollback()
print 'Error %s:' % exception.args[0]
sys.exit(1)
finally:
if session:
session.close()
def initialize_database(database_url, currencies_file, exchanges_file):
"""
Initialize Bitcharts database with exchanges and currencies data
parsed from configuration files.
:param database_url: Full path URL to SQLite database.
:param currencies_file: Configuration file with currencies information.
:param exchanges_file: Configuration file with exchanges information.
"""
currencies = config_parser(currencies_file)
# From data in configuration file create each currency ORM object.
for currency in currencies:
name, description, cryptocurrency, active = currency
new_currency = Currency(name,
description,
cryptocurrency,
active)
write_object(database_url, new_currency)
try:
engine = connect_database(database_url)
session = open_session(engine)
# From data in configuration file create each currency ORM object.
exchanges = config_parser(exchanges_file)
# Open a session and query the associated currency id from the
# currency name (unique) in the configuration file.
for exchange in exchanges:
name, country, url, api, currency_name, active = exchange
query = session.query(Currency.id).filter(
Currency.name == currency_name).first()
currency_id = query[0]
# From data in configuration file create each currency ORM object.
new_exchange = Exchange(name,
country,
url,
api,
currency_id,
active)
write_object(database_url, new_exchange)
except exc.SQLAlchemyError, exception:
if session:
session.rollback()
print 'Error %s:' % exception.args[0]
sys.exit(1)
finally:
if session:
session.close()
def clean_da |
opentrials/collectors | collectors/fdadl/collector.py | Python | mit | 2,443 | 0.000409 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import io
import ijson
import shutil
import logging
import zipfile
import tempfile
import requests
from .. import base
from .record import Record
logger = logging.getLogger(__name__)
# Module API
def collect(conf, conn):
"""Collect FDA Drug Labels.
"""
# For more information see:
# https://open.fda.gov/api/reference/
URL = 'http://download.open.fda.gov/drug/label/{file}.zip'
FILES = [
'drug-label-0001-of-0005.json',
'drug-label-0002-of-0005.json',
'drug-label-0003-of-0005.json',
'drug-label-0004-of-0005.json',
'drug-label-0005-of-0005.json',
]
# Create temp directory
dirpath = tempfile.mkdtemp()
success = 0
for file in FILES:
# Download json
url = URL.format(file=file)
arch = zipfile.ZipFile(io.BytesIO(requests.get(url).content))
path = arch.extract(file, dirpath)
file = io.open(path, encoding='utf-8')
# Get last updated
last_updated = list(ijson.items(file, 'meta.last_updated'))[0]
# Get items iterator
file.seek(0)
items = ijson.items(file, 'results.item')
for item in items:
meta = item['openfda']
base.config.SENTRY.extra_context({
'url': url,
'item': meta,
})
# Skip if no NDC code
if 'product_nd | c' not in meta:
continue
# Get data
data = {
'product_ndc': meta['product_ndc'][0],
'product_type': meta['product_type'][0],
'generic_name': meta['generic_name'][0],
'brand_name': meta['brand_name'][0],
'last_updated': last_updated,
}
if meta.get('application_number'):
| data['fda_application_number'] = meta['application_number'][0]
# Create record
record = Record.create(url, data)
# Write record
record.write(conf, conn)
# Log info
success += 1
if not success % 100:
logger.info('Collected %s "%s" interventions',
success, record.table)
# Remove temp directory
shutil.rmtree(dirpath)
|
3dfxsoftware/cbss-addons | duplicated_tasks/wizard/__init__.py | Python | gpl-2.0 | 30 | 0 | import s | earch_duplicat | ed_task
|
sdwebster/learn-python-the-hard-way-solutions | 08/ex8.py | Python | mit | 352 | 0.011364 | # -*- coding: utf-8 -*-
forma | tter = "%r %r %r %r"
print formatter % (1,2,3,4)
print formatter % ("one", "two", "three", "four")
print formatter % (True, False, False, True)
print formatter % (formatter, formatter, formatter, formatter)
print formatter % (
"Whose woods these are",
"I think I know",
"His house is in",
"Th | e village though"
)
|
bleedingwolf/Spyglass | spyglass/__init__.py | Python | bsd-3-clause | 68 | 0.014706 |
import backend # hopefully fixes issues with Cel | ery finding | tasks?
|
pkesist/buildpal | Python/server_svc_handler.py | Python | gpl-3.0 | 1,123 | 0.002671 | import threading
from multiprocessing import cpu_count
class Terminator:
def __init__(self):
self._should_stop = False
def stop(self):
self._should_s | top = True
def should_stop(self):
return self._should_stop
class Handler(object):
# no parameters are permitted; all configuration should be placed in the
# configuration file and handled in the Initialize() method
def __init__(self):
pass
# called when the service is starting |
def Initialize(self, configFileName):
self.terminator = Terminator()
# called when the service is starting immediately after Initialize()
# use this to perform the work of the service; don't forget to set or check
# for the stop event or the service GUI will not respond to requests to
# stop the service
def Run(self):
from buildpal.server.runner import ServerRunner
ServerRunner(0, cpu_count()).run(self.terminator)
# called when the service is being stopped by the service manager GUI
def Stop(self):
self.terminator.stop()
|
fucxy/fucxy-node | gateway/modules.py | Python | gpl-3.0 | 1,672 | 0.031699 | #!/usr/bin/env python
import socket,sys,gateway_cfg,select,socketserver,http.server,urllib
from threading import *
class WebHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
parseParams = urllib.parse.urlparse(self.path)
if parseParams.path=="/t" :
self.send_error(404,"You can't pass!!")
else:
self.send_response(200)
self.send_header('Content-Type', 'application/html')
self.end_headers()
self.wfile.write("Hello World!!")
self.wfile.close()
class webserver (Thread):
def __init__(self,condition):
#init
Thread.__init__(self)
self.con = condition
def run(self):
#run
print("web server start!!")
Handler = WebHandler
httpd = http.server.HTTPServer(("", 8080), Handler)
httpd.serve_forever()
class msgcenter (Thread):
def __init__(self,condition):
#init server setting
Thread.__init__(self)
self.con = condition
try:
print("start conf | ig")
self.server = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
self.server.bind((gateway_cfg.address['host'],gateway_cfg.address['port']))
self.server.listen(gateway_cfg.max_user) |
self.break_out = False
except socket.error as msg:
print("[ERROR] %s\n" % msg)
self.break_out = True
def run(self):
#start
if self.break_out == False:
print("msgcenter start!!")
while True:
try:
connection,address = self.server.accept()
connection.setblocking(0)
connection.close()
except IOError as e:
if e.errno == 11:
raise
else:
print("socket error")
exit(-1)
|
dlutxx/cement | main.py | Python | mit | 3,137 | 0.048659 | #!python
#-*- encoding=utf-8 -*-
import sys, sqlite3, logging, os, os.path
import wx, time, re, copy, webbrowser
import wx.grid, wx.html
import json, math
from cfg import config
from util import *
from model import *
from view import *
class App(wx.App):
instance = None
def __init__(self, conf, *args):
wx.App.__init__(self, *args) # do not redirect for now
self.user = None
self.cfg = conf
self.printer = None
def OnInit(self):
return True
@staticmethod
def GetInstance():
return App.instance
def Quit(self):
wx.Exit()
sys.exit()
def Run(self):
if not os.path.isfile( self.cfg.datapath ):
self.setup()
else:
self.bootstrap()
self.checkExpiration()
AuthFrame(self).Show()
def bootstrap(self):
self.dbconn = DB.getInstance().conn
self.modelUser = ModelUser( self.dbconn )
self.modelSheet= ModelSheet( self.dbconn )
self.logger = XLog.getDefaultLogger()
def setup(self):
try:
os.makedirs( os.path.join(self.cfg.rootdir, r'data') )
os.makedirs( os.path.join(self.cfg.rootdir, r'log') )
os.makedirs( os.path.join(self.cfg.rootdir, r'cache') )
except:
alert( u'程序初始化失败, 即将退出' )
self.Quit()
self.bootstrap()
self.modelUser.initTable()
self.modelSheet.initTable()
def checkExpiration(self):
self.expirationTip = ''
return True # skip expiration check
self.appExpireInDays = self.cfg.expiration
time0 = self.modelUser.getEarliestDate()
daysElapsed = 0
if time0>0:
daysElapsed = int( (time.time()-time0)/86400 )
if daysElapsed > -1:
if self.appExpireInDays < daysElapsed:
self.expire()
self.appExpireInDays -= daysElapsed
daysElapsed
self.expirationTip = u'试用版,{}天后过期'.format(self.appExpireInDays)
else:
self.expire(u'×系统时间混乱×\n程序退出')
def expire(self):
alert(u'本软件已过期,\n不能继续使用', u'试用过期啦~')
self.Quit()
def authOk(self, user):
self.user = user
self.cfg.user = user
mf = MainFrame(parent=None, title=self.user['name'] + u' 你好,欢迎使用本软件') #( {} )'.format(self.expirationTip) )
mf.app = self
if self.user.isAdmin():
ManagerPanel(mf, self)
else:
OperatorPanel(mf)
mf.maxWindow()
def getPrinter(self):
if not self.printer:
self.printer = wx.html.HtmlEasyPrinting()
return self.printer
def printSheet(self, sheet):
# self.printViaHtml( sheet )
self.getPrinter().GetPrintData().SetPaperSize( wx.Size(-1, 400) )
self.getPrinter().GetPrintData().PaperSize = wx.Size(-1, 400)
self.getPrinter().PrintText( self.getSheetHtml(sheet) )
def getSheetHtml(self, sheet):
data = sheet.getDict()
data['bigamount'] = cnNumber( data['amount'] )
return getPrintTpl().format( **data )
def printViaHtml(self, sheet):
filepath = os.path.join(self.cfg.cachepath, "{}.html".format(sheet['id'] | ) )
file = open(filepath, 'wb')
file.write( self.getSheetHtml(sheet).encode('utf-8') )
file.close()
webbrowser.open(filepath)
# if '__main__'==__name__:
app = App(config, False) # True, os.path.join(ctx.dir, 'run.dat') )
| App.instance = app
config.app = app
app.Run()
app.MainLoop()
|
plguhur/random-sets | simuFindNMin.py | Python | apache-2.0 | 1,243 | 0.015286 | # lance simulations pour different nombre d'electeurs
import multiprocessing
import os, sys
import shutil
import time
import numpy as np
from randomSets import *
def worker(((Ncandidats,q, Nwinners, Ntests))):
"""worker function"""
sys.stdout.write('\nSTART -- %i candidats -- \n' % Ncandidats)
sys.stdout.flush()
time.sleep(0.01) # being sure that simulation are differently initialized
minNvoters = findMinNvoters(Ncandidats, q =q, Nwinners = Nwinners, Ntests = Ntests)
with open('nmin-candidates-10-to-100-by-2.txt','a') as f_handle:
f_handle.write("%i " % Ncandidats)
np.savetxt(f_handle,minNvoters)
return
if __name__ == '__mai | n__':
| print "Cette fois, c'est la bonne !"
print (time.strftime("%H:%M:%S"))
root = "simulations/"
try:
os.mkdir(root)
except OSError:
pass
candidates = range(10,110, 2)
Nwinners = 1
minNvoters = np.zeros((len(candidates), Nwinners))
args = []
for i in range(len(candidates)):
arg = [candidates[i],200,1,100]
args.append(arg)
if args == []:
print "Rien a faire!"
pool = multiprocessing.Pool(processes=20)
pool.map(worker, args)
print "Alors, ca marche ? :)"
|
mrricearoni/iTunesSearch | printRecentAlbums.py | Python | mit | 357 | 0.002801 | import json
from pprint import pprint
from sys import argv
jsonFile = argv[1]
with open(jsonFile) as data | _file:
data = json.load(data_file)
for i in range(0, data['resultCount']):
if data['results'][i]['trackCount'] != 1:
print(data['results'][i]['collectionName']), data['results'][i]['releaseDate']
# sort by release da | te
pprint(data)
|
ioram7/keystone-federado-pgid2013 | build/greenlet/run-tests.py | Python | apache-2.0 | 1,321 | 0.003785 | #! /usr/bin/env python
import sys, os, getopt, struct, unittest
from distutils.spawn import spawn
build = True
verbosity = 2
here = os.path.dirname(os.path.abspath(__file__))
os.chdir(here)
def bits():
"""determine if running on a 32 bit or 64 bit platform
"""
return struct.calcsize("P") * 8
# -- parse options
try:
opts, args = getopt.getopt(sys.argv[1:], "nq")
if args:
raise getopt.GetoptError("too many arguments")
except getopt.GetoptError:
sys.exit("run-tests.py: error: %s" % sys.exc_info()[1])
for o, a in opts:
if o == "-q":
verbosity = 0
elif o == "-n":
build = False
# -- build greenlet
if build:
if verbosity == 0:
cmd = [sys.executable, "setup.py", "-q", "build_ext", "-q"]
else:
cmd = [sys.executable, "setup.py", "build_ext"]
s | pawn(cmd, search_path=0)
# -- find greenlet but skip the one in "."
if not build:
oldpath = sys.path[:]
sys.path.remove(here)
import greenlet
if not build:
sys.path[:] = oldpath
sys.stdout.write("python %s (%s bit) using greenlet %s from %s\n" %
(sys.version.split()[0], bits(), greenlet.__version__, greenlet.__file__))
# -- run tests
from tests import | test_collector
suite = test_collector()
unittest.TextTestRunner(verbosity=verbosity).run(suite)
|
tensorflow/tensorboard | tensorboard/plugins/mesh/summary_v2_test.py | Python | apache-2.0 | 5,916 | 0.000676 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorboard.plugins.mesh.summary."""
import glob
import json
import os
import tensorflow as tf
from tensorboard.compat import tf2
from tensorboard.plugins.mesh import summary
from tensorboard.plugins.mesh import metadata
from tensorboard.plugins.mesh import plugin_data_pb2
from tensorboard.plugins.mesh import test_utils
try:
tf2.__version__ # Force lazy import to resolve
except ImportError:
tf2 = None
try:
tf.compat.v1.enable_eager_execution()
except AttributeError:
# TF 2.0 doesn't have this symbol because eager is the default.
pass
class MeshSummaryV2Test(tf.test.TestCase):
def setUp(self):
super(MeshSummaryV2Test, self).setUp()
if tf2 is None:
self.skipTest("v2 summary API not available")
def mesh_events(self, *args, **kwargs):
self.write_mesh_event(*args, **kwargs)
event_files = sorted(glob.glob(os.path.join(self.get_temp_dir(), "*")))
self.assertEqual(len(event_files), 1)
events = list(tf.compat.v1.train.summary_iterator(event_files[0]))
# Expect a boilerplate event for the file_version, then the vertices
# summary one.
num_events = 2
# All additional tensors (i.e. colors or faces) will be stored as separate
# events, so account for them as well.
num_events += len(frozenset(["colors", "faces"]).intersection(kwargs))
self.assertEqual(len(events), num_events)
# Delete the event file to reset to an empty directory for later calls.
os.remove(event_files[0])
return events[1:]
def write_mesh_event(self, *args, **kwargs):
kwargs.setdefault("step", 1)
writer = tf2.summary.create_file_writer(self.get_temp_dir())
with writer.as_default():
summary.mesh(*args, **kwargs)
writer.close()
def get_metadata(self, event):
return metadata.parse_plugin_metadata(
event.summary.value[0].metadata.plugin_data.content
)
def test_step(self):
"""Tests that different components of mesh summary share the same
step."""
tensor_data = test_utils.get_random_mesh(
100, add_faces=True, add_colors=True
)
config_dict = {"foo": 1}
events = self.mesh_events(
"a",
tensor_data.vertices,
faces=tensor_data.faces,
colors=tensor_data.colors,
config_dict=config_dict,
step=333,
)
self.assertEqual(333, events[0].step)
self.assertEqual(333, events[1].step)
self.assertEqual(333, events[2].step)
def test_tags(self):
"""Tests proper tags for each event/tensor."""
tensor_data = test_utils.get_random_mesh(
100, add_faces=True, add_colors=True
)
config_dict = {"foo": 1}
name = "foo"
events = self.mesh_events(
name,
tensor_data.vertices,
faces=tensor_data.faces,
colors=tensor_data.colors,
config_dict=config_dict,
step=333,
)
expected_names_set = frozenset(
name_tpl % name for name_tpl in ["%s_VERTEX", "%s_FACE", "%s_COLOR"]
)
actual_names_set = frozenset(
[event.summary.value[0].tag for event in events]
)
self.assertEqual(expected_names_set, actual_names_set)
expe | cted_bitmask = metadata.get_components_bitmask(
[
plugin_data_pb2.MeshPluginData.VERTEX,
plugin_data_pb2.MeshPluginData.FACE,
plugin_data_pb2.MeshPluginData.COLOR,
]
)
for event in events:
self.assertEqual(
expec | ted_bitmask, self.get_metadata(event).components
)
def test_pb(self):
"""Tests ProtoBuf interface."""
name = "my_mesh"
tensor_data = test_utils.get_random_mesh(
100, add_faces=True, add_colors=True
)
config_dict = {"foo": 1}
proto = summary.mesh_pb(
name,
tensor_data.vertices,
faces=tensor_data.faces,
colors=tensor_data.colors,
config_dict=config_dict,
)
plugin_metadata = metadata.parse_plugin_metadata(
proto.value[0].metadata.plugin_data.content
)
self.assertEqual(
json.dumps(config_dict, sort_keys=True), plugin_metadata.json_config
)
class MeshSummaryV2GraphTest(MeshSummaryV2Test, tf.test.TestCase):
def write_mesh_event(self, *args, **kwargs):
kwargs.setdefault("step", 1)
# Hack to extract current scope since there's no direct API for it.
with tf.name_scope("_") as temp_scope:
scope = temp_scope.rstrip("/_")
@tf2.function
def graph_fn():
# Recreate the active scope inside the defun since it won't propagate.
with tf.name_scope(scope):
summary.mesh(*args, **kwargs)
writer = tf2.summary.create_file_writer(self.get_temp_dir())
with writer.as_default():
graph_fn()
writer.close()
if __name__ == "__main__":
tf.test.main()
|
jackTheRipper/iotrussia | web_server/lib/werkzeug-master/examples/i18nurls/__init__.py | Python | gpl-2.0 | 57 | 0 | from i18nurls.application imp | ort Applicat | ion as make_app
|
effluxsystems/pyefflux | tests/unit/test_base.py | Python | mit | 1,403 | 0 | """
Tests for efflux.telemetry.endpoint
"""
import unittest
import mock
from efflux.telemetry.endpoint import Telemetry
class Dummy(Telemetry):
def _set_route(self):
self.base_route = None
class TelemetryTests(unittest.TestCase):
'''Tests for EffluxEndpoint'''
MOCKS = [
]
def setUp(self):
for target in self.MOCKS:
patcher = mock.patch(target)
patcher.start()
self.addCleanup(patcher.stop)
self.domain = 'boom.efflux.io'
self.token = 12345
self.efflux = Dummy(
self.domain,
self.token
)
def test_check_fields(self):
l1 = ['foo', 'bar', 'baz']
l2 = ['foo', 'bar', 'baz', 'bob']
l3 = ['foo', 'wut']
# subset OK
self.assertTrue(
self.efflux.check_ | required_fields(l1, l2)
)
# equal OK
self.assertTrue(
self.efflux.check_required_fields(l1, l1)
)
# not subset
self.assertFalse(
self.efflux.check_required_fields(l3, l2)
)
def test_set_namespace(self):
orig = {
'foo': 'bar',
'baz': 'ball'
| }
check = {
'ns_foo': 'bar',
'ns_baz': 'ball'
}
self.assertEqual(
check,
self.efflux.set_namespace('ns', orig)
)
|
sdispater/pendulum | tests/test_main.py | Python | mit | 268 | 0 | import pytz
from pendulum import _ | safe_timezone
from pendulum.tz.timezone import Timezone
def test_safe_timezone_with_tzinfo_objects():
tz = _safe_timezone(pytz.timezone("Europe/Paris"))
assert isinstance(tz, Timezone)
assert | "Europe/Paris" == tz.name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.