repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Gaia3D/QGIS | python/plugins/processing/algs/qgis/DeleteHoles.py | 1 | 2935 | # -*- coding: utf-8 -*-
"""
***************************************************************************
DeleteHoles.py
---------------------
Date : April 2015
Copyright : (C) 2015 by Etienne Trimaille
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Etienne Trimaille'
__date__ = 'April 2015'
__copyright__ = '(C) 2015, Etienne Trimaille'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.core import QgsFeature, QgsGeometry
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterVector
from processing.core.outputs import OutputVector
from processing.tools import dataobjects, vector
class DeleteHoles(GeoAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
def defineCharacteristics(self):
self.name = 'Delete holes'
self.group = 'Vector geometry tools'
self.addParameter(ParameterVector(self.INPUT,
self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_POLYGON]))
self.addOutput(OutputVector(self.OUTPUT, self.tr('Output')))
def processAlgorithm(self, progress):
layer = dataobjects.getObjectFromUri(
self.getParameterValue(self.INPUT))
writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(
layer.pendingFields(),
layer.wkbType(),
layer.crs())
features = vector.features(layer)
count = len(features)
total = 100.0 / float(count)
feat = QgsFeature()
for count, f in enumerate(features):
geometry = f.geometry()
if geometry.isMultipart():
multi_polygon = geometry.asMultiPolygon()
for polygon in multi_polygon:
for ring in polygon[1:]:
polygon.remove(ring)
geometry = QgsGeometry.fromMultiPolygon(multi_polygon)
else:
polygon = geometry.asPolygon()
for ring in polygon[1:]:
polygon.remove(ring)
geometry = QgsGeometry.fromPolygon(polygon)
feat.setGeometry(geometry)
feat.setAttributes(f.attributes())
writer.addFeature(feat)
progress.setPercentage(int(count * total))
del writer | gpl-2.0 | -8,316,699,685,901,509,000 | 35.246914 | 75 | 0.527768 | false | 4.899833 | false | false | false |
lowitty/eeep | insertdb/insertmodeldata.py | 1 | 6340 | #encoding=utf-8
import MySQLdb, os
from django.core.checks.registry import Tags
def insertDomain(db):
cursor = db.cursor()
sql = "insert into %(table)s (%(para)s) values ('%(value)s')"
insertValues = {'table' : 'quotation_domain', 'para' : 'name', 'value' : 'CD'}
f = open(os.path.dirname(os.path.abspath(__file__)) + os.path.sep + 'domain', 'r+')
for line in f.readlines():
line = line.strip()
if('' != line):
insertValues['value'] = line
exesql = sql % insertValues
cursor.execute(exesql)
db.commit()
db.close()
def insertSubDomain(db):
cursor = db.cursor()
sql = "insert into %(table)s (%(para)s) values ('%(value)s')"
insertValues = {'table' : 'quotation_domain', 'para' : 'name', 'value' : 'CD'}
insertValues['table'] = 'quotation_subdomain'
f = open(os.path.dirname(os.path.abspath(__file__)) + os.path.sep + 'subdomain', 'r+')
for line in f.readlines():
line = line.strip()
if('' != line):
insertValues['value'] = line
exesql = sql % insertValues
cursor.execute(exesql)
db.commit()
db.close()
def insertRegion(db, tableName, valueTag, fileName):
cursor = db.cursor()
sql = "insert into %(table)s (%(valueTag)s) values ('%(value)s')"
insertValues = {'table' : tableName, 'valueTag' : valueTag, 'value' : 'xxxxx'}
#print sql % insertValues
f = open(os.path.dirname(os.path.abspath(__file__)) + os.path.sep + fileName, 'r+')
for line in f.readlines():
line = line.strip()
if('' != line):
para = line.split('**')
if(len(para) > 1):
insertValues['value'] = para[0].strip()
cursor.execute(sql % insertValues)
db.commit()
db.close()
def insertValuesWithForignKey(db, table, tagValue, tagForKey, f_table, f_tagvalue, fileName = 'unitid'):
cursor = db.cursor()
sql = "insert into %(table)s (" + tagValue + "," + tagForKey + ") values ('%(" + tagValue + ")s', %(" + tagForKey + ")s)"
insertValues = {'table' : table, tagValue : 'OMS CD', tagForKey : 1}
f = open(os.path.dirname(os.path.abspath(__file__)) + os.path.sep + fileName, 'r+')
f_id = -1
exeTimes = 0
for line in f.readlines():
exeTimes += 1
line = line.strip()
if('' != line):
para = line.split('**')
if(len(para) > 1):
f_name = para[0].strip()
cursor.execute("select id from %s where %s='%s'" % (f_table, f_tagvalue, f_name))
f_id = cursor.fetchone()[0]
insertValues[tagValue] = para[1].strip().replace('\'', "\\'")
insertValues[tagForKey] = f_id
print sql % insertValues
else:
insertValues[tagValue] = para[0].strip().replace('\'', "\\'")
insertValues[tagForKey] = f_id
print sql % insertValues
cursor.execute(sql % insertValues)
if(exeTimes % 10 == 0):
db.commit()
#pass
db.commit()
db.close()
def insertWorkcenter(db, tableName, fileName, *tags):
if(4 != len(tags)):
return False
else:
cursor = db.cursor()
sql = "insert into %(tableName)s (" + tags[0] + "," + tags[1] + "," + tags[2] + "," + tags[3] + ") values ('%(" + tags[0] + ")s','%(" + tags[1] + ")s','%(" + tags[2] + ")s','%("+ tags[3] +")s')".encode('utf-8')
insertDatas = {
'tableName' : tableName,
tags[0] : '',
tags[1] : '',
tags[2] : '',
tags[3] : ''
}
f = open(os.path.dirname(os.path.abspath(__file__)) + os.path.sep + fileName, 'r+')
cIndex = 0
for line in f.readlines():
cIndex += 1
if('' != line):
para = line.split('**')
if(len(para) > 3):
insertDatas[tags[0]] = para[0].strip().replace("\'", "\\'").encode('utf-8')
insertDatas[tags[1]] = para[1].strip().replace("\'", "\\'").encode('utf-8')
insertDatas[tags[2]] = para[2].strip().replace("\'", "\\'").encode('utf-8')
insertDatas[tags[3]] = para[3].strip().replace("\'", "\\'").encode('utf-8')
#print (sql % insertDatas).encode('utf-8')
cursor.execute((sql % insertDatas).encode('utf-8'))
if(cIndex % 10 == 0):
db.commit()
db.commit()
db.close()
def insertPostatus(db, fileName):
cursor = db.cursor()
sql = "insert into quotation_postatus (name) values ('%s')"
f = open(os.path.dirname(os.path.abspath(__file__)) + os.path.sep + fileName, 'r+')
for line in f.readlines():
line = line.strip()
if('' != line):
exesql = sql % line
cursor.execute(exesql)
db.commit()
db.close()
def insertOrderingcompany(db, fileName):
cursor = db.cursor()
sql = "insert into quotation_orderingcompany (name) values ('%s')"
f = open(os.path.dirname(os.path.abspath(__file__)) + os.path.sep + fileName, 'r+')
cIndex = 0
for line in f.readlines():
cIndex += 1
line = line.strip()
if('' != line):
exesql = sql % line
#print exesql
cursor.execute(exesql)
if( 0 == cIndex % 10):
db.commit()
db.commit()
db.close()
if __name__ == '__main__':
host = "localhost"
passwd = "tatool"
user = "tatool"
dbname = "eeep"
db = MySQLdb.connect(host=host, user=user, passwd=passwd, db=dbname)
#insertDomain(db)
#insertSubDomain(db)
#insertValuesWithForignKey(db, 'quotation_unitid', 'name', 'domain_id', "quotation_domain", "name")
#insertRegion(db, 'quotation_region', 'name', 'regionandcountry')
#insertValuesWithForignKey(db, 'quotation_country', 'name', 'region_id', "quotation_region", "name", 'regionandcountry')
#insertWorkcenter(db, 'quotation_workcenter', 'workcenter', 'number', 'descworkcenter', 'icrrbactivitytype', 'intracompanyactivitytyoe')
#insertPostatus(db, 'postatus')
insertOrderingcompany(db, 'orderingcompany')
| mit | -8,395,665,122,548,171,000 | 37.90184 | 218 | 0.519085 | false | 3.506637 | false | false | false |
googleapis/googleapis-gen | google/cloud/dialogflow/cx/v3beta1/dialogflow-cx-v3beta1-py/google/cloud/dialogflowcx_v3beta1/services/entity_types/client.py | 1 | 38725 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from distutils import util
import os
import re
from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.dialogflowcx_v3beta1.services.entity_types import pagers
from google.cloud.dialogflowcx_v3beta1.types import entity_type
from google.cloud.dialogflowcx_v3beta1.types import entity_type as gcdc_entity_type
from google.protobuf import field_mask_pb2 # type: ignore
from .transports.base import EntityTypesTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import EntityTypesGrpcTransport
from .transports.grpc_asyncio import EntityTypesGrpcAsyncIOTransport
class EntityTypesClientMeta(type):
"""Metaclass for the EntityTypes client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = OrderedDict() # type: Dict[str, Type[EntityTypesTransport]]
_transport_registry["grpc"] = EntityTypesGrpcTransport
_transport_registry["grpc_asyncio"] = EntityTypesGrpcAsyncIOTransport
def get_transport_class(cls,
label: str = None,
) -> Type[EntityTypesTransport]:
"""Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class EntityTypesClient(metaclass=EntityTypesClientMeta):
"""Service for managing
[EntityTypes][google.cloud.dialogflow.cx.v3beta1.EntityType].
"""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "dialogflow.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
EntityTypesClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(info)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
EntityTypesClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> EntityTypesTransport:
"""Returns the transport used by the client instance.
Returns:
EntityTypesTransport: The transport used by the client
instance.
"""
return self._transport
@staticmethod
def entity_type_path(project: str,location: str,agent: str,entity_type: str,) -> str:
"""Returns a fully-qualified entity_type string."""
return "projects/{project}/locations/{location}/agents/{agent}/entityTypes/{entity_type}".format(project=project, location=location, agent=agent, entity_type=entity_type, )
@staticmethod
def parse_entity_type_path(path: str) -> Dict[str,str]:
"""Parses a entity_type path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/agents/(?P<agent>.+?)/entityTypes/(?P<entity_type>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str, ) -> str:
"""Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(billing_account=billing_account, )
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str,str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str, ) -> str:
"""Returns a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder, )
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str,str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str, ) -> str:
"""Returns a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization, )
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str,str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str, ) -> str:
"""Returns a fully-qualified project string."""
return "projects/{project}".format(project=project, )
@staticmethod
def parse_common_project_path(path: str) -> Dict[str,str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str, ) -> str:
"""Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(project=project, location=location, )
@staticmethod
def parse_common_location_path(path: str) -> Dict[str,str]:
"""Parse a location path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
return m.groupdict() if m else {}
def __init__(self, *,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, EntityTypesTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the entity types client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, EntityTypesTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")))
client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
is_mtls = True
client_cert_source_func = client_options.client_cert_source
else:
is_mtls = mtls.has_default_client_cert_source()
if is_mtls:
client_cert_source_func = mtls.default_client_cert_source()
else:
client_cert_source_func = None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
if is_mtls:
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
else:
api_endpoint = self.DEFAULT_ENDPOINT
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
"values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, EntityTypesTransport):
# transport is a EntityTypesTransport instance.
if credentials or client_options.credentials_file:
raise ValueError("When providing a transport instance, "
"provide its credentials directly.")
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
)
def list_entity_types(self,
request: entity_type.ListEntityTypesRequest = None,
*,
parent: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListEntityTypesPager:
r"""Returns the list of all entity types in the specified
agent.
Args:
request (google.cloud.dialogflowcx_v3beta1.types.ListEntityTypesRequest):
The request object. The request message for
[EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3beta1.EntityTypes.ListEntityTypes].
parent (str):
Required. The agent to list all entity types for.
Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3beta1.services.entity_types.pagers.ListEntityTypesPager:
The response message for
[EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3beta1.EntityTypes.ListEntityTypes].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError('If the `request` argument is set, then none of '
'the individual field arguments should be set.')
# Minor optimization to avoid making a copy if the user passes
# in a entity_type.ListEntityTypesRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, entity_type.ListEntityTypesRequest):
request = entity_type.ListEntityTypesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.list_entity_types]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListEntityTypesPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
def get_entity_type(self,
request: entity_type.GetEntityTypeRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> entity_type.EntityType:
r"""Retrieves the specified entity type.
Args:
request (google.cloud.dialogflowcx_v3beta1.types.GetEntityTypeRequest):
The request object. The request message for
[EntityTypes.GetEntityType][google.cloud.dialogflow.cx.v3beta1.EntityTypes.GetEntityType].
name (str):
Required. The name of the entity type. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/entityTypes/<Entity Type ID>``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3beta1.types.EntityType:
Entities are extracted from user input and represent parameters that are
meaningful to your application. For example, a date
range, a proper name such as a geographic location or
landmark, and so on. Entities represent actionable
data for your application.
When you define an entity, you can also include
synonyms that all map to that entity. For example,
"soft drink", "soda", "pop", and so on.
There are three types of entities:
- **System** - entities that are defined by the
Dialogflow API for common data types such as date,
time, currency, and so on. A system entity is
represented by the EntityType type.
- **Custom** - entities that are defined by you that
represent actionable data that is meaningful to
your application. For example, you could define a
pizza.sauce entity for red or white pizza sauce, a
pizza.cheese entity for the different types of
cheese on a pizza, a pizza.topping entity for
different toppings, and so on. A custom entity is
represented by the EntityType type.
- **User** - entities that are built for an
individual user such as favorites, preferences,
playlists, and so on. A user entity is represented
by the
[SessionEntityType][google.cloud.dialogflow.cx.v3beta1.SessionEntityType]
type.
For more information about entity types, see the
[Dialogflow
documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview).
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError('If the `request` argument is set, then none of '
'the individual field arguments should be set.')
# Minor optimization to avoid making a copy if the user passes
# in a entity_type.GetEntityTypeRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, entity_type.GetEntityTypeRequest):
request = entity_type.GetEntityTypeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.get_entity_type]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("name", request.name),
)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def create_entity_type(self,
request: gcdc_entity_type.CreateEntityTypeRequest = None,
*,
parent: str = None,
entity_type: gcdc_entity_type.EntityType = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gcdc_entity_type.EntityType:
r"""Creates an entity type in the specified agent.
Args:
request (google.cloud.dialogflowcx_v3beta1.types.CreateEntityTypeRequest):
The request object. The request message for
[EntityTypes.CreateEntityType][google.cloud.dialogflow.cx.v3beta1.EntityTypes.CreateEntityType].
parent (str):
Required. The agent to create a entity type for. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
entity_type (google.cloud.dialogflowcx_v3beta1.types.EntityType):
Required. The entity type to create.
This corresponds to the ``entity_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3beta1.types.EntityType:
Entities are extracted from user input and represent parameters that are
meaningful to your application. For example, a date
range, a proper name such as a geographic location or
landmark, and so on. Entities represent actionable
data for your application.
When you define an entity, you can also include
synonyms that all map to that entity. For example,
"soft drink", "soda", "pop", and so on.
There are three types of entities:
- **System** - entities that are defined by the
Dialogflow API for common data types such as date,
time, currency, and so on. A system entity is
represented by the EntityType type.
- **Custom** - entities that are defined by you that
represent actionable data that is meaningful to
your application. For example, you could define a
pizza.sauce entity for red or white pizza sauce, a
pizza.cheese entity for the different types of
cheese on a pizza, a pizza.topping entity for
different toppings, and so on. A custom entity is
represented by the EntityType type.
- **User** - entities that are built for an
individual user such as favorites, preferences,
playlists, and so on. A user entity is represented
by the
[SessionEntityType][google.cloud.dialogflow.cx.v3beta1.SessionEntityType]
type.
For more information about entity types, see the
[Dialogflow
documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview).
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, entity_type])
if request is not None and has_flattened_params:
raise ValueError('If the `request` argument is set, then none of '
'the individual field arguments should be set.')
# Minor optimization to avoid making a copy if the user passes
# in a gcdc_entity_type.CreateEntityTypeRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, gcdc_entity_type.CreateEntityTypeRequest):
request = gcdc_entity_type.CreateEntityTypeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if entity_type is not None:
request.entity_type = entity_type
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.create_entity_type]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def update_entity_type(self,
request: gcdc_entity_type.UpdateEntityTypeRequest = None,
*,
entity_type: gcdc_entity_type.EntityType = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gcdc_entity_type.EntityType:
r"""Updates the specified entity type.
Note: You should always train a flow prior to sending it
queries. See the `training
documentation <https://cloud.google.com/dialogflow/cx/docs/concept/training>`__.
Args:
request (google.cloud.dialogflowcx_v3beta1.types.UpdateEntityTypeRequest):
The request object. The request message for
[EntityTypes.UpdateEntityType][google.cloud.dialogflow.cx.v3beta1.EntityTypes.UpdateEntityType].
entity_type (google.cloud.dialogflowcx_v3beta1.types.EntityType):
Required. The entity type to update.
This corresponds to the ``entity_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
The mask to control which fields get
updated.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3beta1.types.EntityType:
Entities are extracted from user input and represent parameters that are
meaningful to your application. For example, a date
range, a proper name such as a geographic location or
landmark, and so on. Entities represent actionable
data for your application.
When you define an entity, you can also include
synonyms that all map to that entity. For example,
"soft drink", "soda", "pop", and so on.
There are three types of entities:
- **System** - entities that are defined by the
Dialogflow API for common data types such as date,
time, currency, and so on. A system entity is
represented by the EntityType type.
- **Custom** - entities that are defined by you that
represent actionable data that is meaningful to
your application. For example, you could define a
pizza.sauce entity for red or white pizza sauce, a
pizza.cheese entity for the different types of
cheese on a pizza, a pizza.topping entity for
different toppings, and so on. A custom entity is
represented by the EntityType type.
- **User** - entities that are built for an
individual user such as favorites, preferences,
playlists, and so on. A user entity is represented
by the
[SessionEntityType][google.cloud.dialogflow.cx.v3beta1.SessionEntityType]
type.
For more information about entity types, see the
[Dialogflow
documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview).
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([entity_type, update_mask])
if request is not None and has_flattened_params:
raise ValueError('If the `request` argument is set, then none of '
'the individual field arguments should be set.')
# Minor optimization to avoid making a copy if the user passes
# in a gcdc_entity_type.UpdateEntityTypeRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, gcdc_entity_type.UpdateEntityTypeRequest):
request = gcdc_entity_type.UpdateEntityTypeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if entity_type is not None:
request.entity_type = entity_type
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.update_entity_type]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("entity_type.name", request.entity_type.name),
)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def delete_entity_type(self,
request: entity_type.DeleteEntityTypeRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Deletes the specified entity type.
Note: You should always train a flow prior to sending it
queries. See the `training
documentation <https://cloud.google.com/dialogflow/cx/docs/concept/training>`__.
Args:
request (google.cloud.dialogflowcx_v3beta1.types.DeleteEntityTypeRequest):
The request object. The request message for
[EntityTypes.DeleteEntityType][google.cloud.dialogflow.cx.v3beta1.EntityTypes.DeleteEntityType].
name (str):
Required. The name of the entity type to delete. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/entityTypes/<Entity Type ID>``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError('If the `request` argument is set, then none of '
'the individual field arguments should be set.')
# Minor optimization to avoid making a copy if the user passes
# in a entity_type.DeleteEntityTypeRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, entity_type.DeleteEntityTypeRequest):
request = entity_type.DeleteEntityTypeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.delete_entity_type]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("name", request.name),
)),
)
# Send the request.
rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-dialogflowcx",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = (
"EntityTypesClient",
)
| apache-2.0 | 7,160,798,180,582,101,000 | 44.345433 | 180 | 0.601704 | false | 4.701918 | false | false | false |
CloudVE/djcloudbridge | djcloudbridge/drf_routers.py | 1 | 1991 | from django.conf.urls import url
from rest_framework import routers, viewsets
from rest_framework_nested import routers as nested_routers
class HybridRoutingMixin(object):
"""
Extends functionality of DefaultRouter adding possibility to register
simple API views, not just Viewsets.
Based on:
http://stackoverflow.com/questions/18818179/routing-api-views-in-django-rest-framework
http://stackoverflow.com/questions/18817988/using-django-rest-frameworks-browsable-api-with-apiviews
"""
def get_routes(self, viewset):
"""
Checks if the viewset is an instance of ViewSet, otherwise assumes
it's a simple view and does not run original `get_routes` code.
"""
if issubclass(viewset, viewsets.ViewSetMixin):
return super(HybridRoutingMixin, self).get_routes(viewset)
return []
def get_urls(self):
"""
Append non-viewset views to the urls generated by the original
`get_urls` method.
"""
# URLs for viewsets
ret = super(HybridRoutingMixin, self).get_urls()
# URLs for simple views
for prefix, viewset, basename in self.registry:
# Skip viewsets
if issubclass(viewset, viewsets.ViewSetMixin):
continue
# URL regex
regex = '{prefix}{trailing_slash}$'.format(
prefix=prefix,
trailing_slash=self.trailing_slash
)
# The view name has to have suffix "-list" due to specifics
# of the DefaultRouter implementation.
ret.append(url(regex, viewset.as_view(),
name='{0}-list'.format(basename)))
return ret
class HybridDefaultRouter(HybridRoutingMixin, routers.DefaultRouter):
pass
class HybridSimpleRouter(HybridRoutingMixin, routers.SimpleRouter):
pass
class HybridNestedRouter(HybridRoutingMixin, nested_routers.NestedSimpleRouter):
pass
| mit | 7,499,356,688,837,228,000 | 29.630769 | 104 | 0.64892 | false | 4.328261 | false | false | false |
Disiok/poetry-seq2seq | model.py | 1 | 22939 | #! /usr/bin/env python
#-*- coding:utf-8 -*-
# standard
import os
from IPython import embed
# framework
import tensorflow as tf
from tensorflow.contrib import seq2seq, rnn
from tensorflow.python.layers.core import Dense
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
class Seq2SeqModel:
"""
Seq2Seq model based on tensorflow.contrib.seq2seq
"""
def __init__(self, config, mode):
assert mode.lower() in ['train', 'predict']
self.config = config
self.mode = mode.lower()
self.cell_type = config['cell_type']
self.hidden_units = config['hidden_units']
self.bidirectional = config['bidirectional']
self.decoder_hidden_units = self.hidden_units * (2 if self.bidirectional else 1)
self.depth = config['depth']
self.attention_type = config['attention_type']
self.embedding_size = config['embedding_size']
self.vocab_size = config['vocab_size']
self.num_encoder_symbols = config['num_encoder_symbols']
self.num_decoder_symbols = config['num_decoder_symbols']
self.use_residual = config['use_residual']
self.attn_input_feeding = config['attn_input_feeding']
self.use_dropout = config['use_dropout']
self.keep_prob = 1.0 - config['dropout_rate']
self.optimizer = config['optimizer']
self.learning_rate = config['learning_rate']
self.max_gradient_norm = config['max_gradient_norm']
self.global_step = tf.Variable(0, trainable=False, name='global_step')
self.global_epoch_step = tf.Variable(0, trainable=False, name='global_epoch_step')
self.increment_global_epoch_step_op = tf.assign(self.global_epoch_step, self.global_epoch_step + 1)
self.dtype = tf.float16 if config['use_fp16'] else tf.float32
self.keep_prob_placeholder = tf.placeholder(self.dtype, shape=[], name='keep_prob')
self.use_beamsearch_decode=False
if self.mode == 'predict':
self.beam_width = config['beam_width']
self.use_beamsearch_decode = True if self.beam_width > 1 else False
self.max_decode_step = config['max_decode_step']
self.predict_mode = config['predict_mode']
elif self.mode == 'train':
self.train_mode = config['train_mode']
self.sampling_probability = config['sampling_probability']
self.start_token = config['start_token']
self.end_token = config['end_token']
self.build_model()
def build_model(self):
print 'Building model...'
# Build encoder and decoder networks
self.init_placeholders()
self.build_encoder()
self.build_decoder()
# Merge all the training summaries
self.summary_op = tf.summary.merge_all()
def init_placeholders(self):
# TODO(sdsuo): Understand dropout
self.keep_prob_placeholder = tf.placeholder(self.dtype, shape=[], name='keep_prob')
# embedding_placeholder: [vocab_size, hidden_units]
self.embedding_placeholder = tf.placeholder(
name='embedding_placeholder',
shape=[self.vocab_size, self.hidden_units],
dtype=self.dtype
)
self.embedding = tf.get_variable(
name='embedding',
shape=[self.vocab_size, self.hidden_units],
trainable=False,
)
self.assign_embedding_op = self.embedding.assign(self.embedding_placeholder)
# encode_inputs: [batch_size, time_steps]
self.encoder_inputs = tf.placeholder(
name='encoder_inputs',
shape=(None, None),
dtype=tf.int32
)
# encoder_inputs_length: [batch_size]
self.encoder_inputs_length = tf.placeholder(
name='encoder_inputs_length',
shape=(None,),
dtype=tf.int32
)
# use dynamic batch_size based on input
self.batch_size = tf.shape(self.encoder_inputs)[0]
if self.mode == 'train':
# decoder_inputs: [batch_size, max_time_steps]
self.decoder_inputs = tf.placeholder(
dtype=tf.int32,
shape=(None, None),
name='decoder_inputs'
)
# decoder_inputs_length: [batch_size]
self.decoder_inputs_length = tf.placeholder(
dtype=tf.int32,
shape=(None,),
name='decoder_inputs_length'
)
decoder_start_token = tf.ones(
shape=[self.batch_size, 1],
dtype=tf.int32
) * self.start_token
decoder_end_token = tf.ones(
shape=[self.batch_size, 1],
dtype=tf.int32
) * self.end_token
# decoder_inputs_train: [batch_size , max_time_steps + 1]
# insert _GO symbol in front of each decoder input
self.decoder_inputs_train = tf.concat([decoder_start_token,
self.decoder_inputs], axis=1)
# decoder_inputs_length_train: [batch_size]
self.decoder_inputs_length_train = self.decoder_inputs_length + 1
# decoder_targets_train: [batch_size, max_time_steps + 1]
# insert EOS symbol at the end of each decoder input
self.decoder_targets_train = tf.concat([self.decoder_inputs,
decoder_end_token], axis=1)
def build_single_cell(self, hidden_units):
if self.cell_type == 'gru':
cell_type = rnn.GRUCell
elif self.cell_type == 'lstm':
cell_type = rnn.LSTMCell
else:
raise RuntimeError('Unknown cell type!')
cell = cell_type(hidden_units)
return cell
def build_encoder_cell(self):
multi_cell = rnn.MultiRNNCell([self.build_single_cell(self.hidden_units) for _ in range(self.depth)])
return multi_cell
def build_encoder(self):
print 'Building encoder...'
with tf.variable_scope('encoder'):
# embedded inputs: [batch_size, time_step, embedding_size]
self.encoder_inputs_embedded = tf.nn.embedding_lookup(
params=self.embedding,
ids=self.encoder_inputs
)
# TODO(sdsuo): Decide if we need a Dense input layer here
if self.bidirectional:
# Build encoder cell
self.encoder_cell_fw = self.build_encoder_cell()
self.encoder_cell_bw = self.build_encoder_cell()
# Encode input sequences into context vectors
# encoder_outputs: [batch_size, time_step, cell_output_size]
# encoder_last_state: [batch_size, cell_output_size]
self.encoder_outputs_fw_bw, self.encoder_last_state_fw_bw = tf.nn.bidirectional_dynamic_rnn(
cell_fw=self.encoder_cell_fw,
cell_bw=self.encoder_cell_bw,
inputs=self.encoder_inputs_embedded,
sequence_length=self.encoder_inputs_length,
dtype=self.dtype,
time_major=False
)
self.encoder_outputs_fw, self.encoder_outputs_bw = self.encoder_outputs_fw_bw
self.encoder_outputs = tf.concat([self.encoder_outputs_fw, self.encoder_outputs_bw], 2)
self.encoder_last_state_fw, self.encoder_last_state_bw = self.encoder_last_state_fw_bw
encoder_last_state_zipped = zip(self.encoder_last_state_fw, self.encoder_last_state_bw)
encoder_last_state_list = [rnn.LSTMStateTuple(c=tf.concat([fw.c, bw.c], 1), h=tf.concat([fw.h, bw.h], 1))
for fw, bw in encoder_last_state_zipped]
self.encoder_last_state = tuple(encoder_last_state_list)
else:
self.encoder_cell = self.build_encoder_cell()
# Encode input sequences into context vectors
# encoder_outputs: [batch_size, time_step, cell_output_size]
# encoder_last_state: [batch_size, cell_output_size]
self.encoder_outputs, self.encoder_last_state = tf.nn.dynamic_rnn(
cell=self.encoder_cell,
inputs=self.encoder_inputs_embedded,
sequence_length=self.encoder_inputs_length,
dtype=self.dtype,
time_major=False
)
def build_decoder_cell(self):
# TODO(sdsuo): Read up and decide whether to use beam search
self.attention_mechanism = seq2seq.BahdanauAttention(
num_units=self.decoder_hidden_units,
memory=self.encoder_outputs,
memory_sequence_length=self.encoder_inputs_length
)
self.decoder_cell_list = [
self.build_single_cell(self.decoder_hidden_units) for _ in range(self.depth)
]
# NOTE(sdsuo): Not sure what this does yet
def attn_decoder_input_fn(inputs, attention):
if not self.attn_input_feeding:
return inputs
# Essential when use_residual=True
_input_layer = Dense(self.decoder_hidden_units, dtype=self.dtype,
name='attn_input_feeding')
return _input_layer(rnn.array_ops.concat([inputs, attention], -1))
# NOTE(sdsuo): Attention mechanism is implemented only on the top decoder layer
self.decoder_cell_list[-1] = seq2seq.AttentionWrapper(
cell=self.decoder_cell_list[-1],
attention_mechanism=self.attention_mechanism,
attention_layer_size=self.decoder_hidden_units,
cell_input_fn=attn_decoder_input_fn,
initial_cell_state=self.encoder_last_state[-1],
alignment_history=False,
name='attention_wrapper'
)
# NOTE(sdsuo): Not sure why this is necessary
# To be compatible with AttentionWrapper, the encoder last state
# of the top layer should be converted into the AttentionWrapperState form
# We can easily do this by calling AttentionWrapper.zero_state
# Also if beamsearch decoding is used, the batch_size argument in .zero_state
# should be ${decoder_beam_width} times to the origianl batch_size
if self.use_beamsearch_decode:
batch_size = self.batch_size * self.beam_width
else:
batch_size = self.batch_size
# NOTE(vera): important dimension here
# embed()
initial_state = [state for state in self.encoder_last_state]
initial_state[-1] = self.decoder_cell_list[-1].zero_state(
batch_size=batch_size,
dtype=self.dtype
)
decoder_initial_state = tuple(initial_state)
return rnn.MultiRNNCell(self.decoder_cell_list), decoder_initial_state
def build_train_decoder(self):
self.decoder_inputs_embedded = tf.nn.embedding_lookup(
params=self.embedding,
ids=self.decoder_inputs_train
)
if self.train_mode == 'ground_truth':
training_helper = seq2seq.TrainingHelper(
inputs=self.decoder_inputs_embedded,
sequence_length=self.decoder_inputs_length_train,
time_major=False,
name='training_helper'
)
elif self.train_mode == 'scheduled_sampling':
training_helper = seq2seq.ScheduledEmbeddingTrainingHelper(
inputs=self.decoder_inputs_embedded,
sequence_length=self.decoder_inputs_length_train,
embedding=lambda inputs: tf.nn.embedding_lookup(self.embedding, inputs),
sampling_probability=self.sampling_probability,
name='scheduled_embedding_training_helper'
)
else:
raise NotImplementedError('Train mode: {} is not yet implemented'.format(self.train_mode))
training_decoder = seq2seq.BasicDecoder(
cell=self.decoder_cell,
helper=training_helper,
initial_state=self.decoder_initial_state,
output_layer=self.output_layer
)
max_decoder_length = tf.reduce_max(self.decoder_inputs_length_train)
self.decoder_outputs_train, self.decoder_last_state_train, self.decoder_outputs_length_train = seq2seq.dynamic_decode(
decoder=training_decoder,
output_time_major=False,
impute_finished=True,
maximum_iterations=max_decoder_length
)
# NOTE(sdsuo): Not sure why this is necessary
self.decoder_logits_train = tf.identity(self.decoder_outputs_train.rnn_output)
# Use argmax to extract decoder symbols to emit
self.decoder_pred_train = tf.argmax(
self.decoder_logits_train,
axis=-1,
name='decoder_pred_train'
)
# masks: masking for valid and padded time steps, [batch_size, max_time_step + 1]
masks = tf.sequence_mask(
lengths=self.decoder_inputs_length_train,
maxlen=max_decoder_length,
dtype=self.dtype,
name='masks'
)
# Computes per word average cross-entropy over a batch
# Internally calls 'nn_ops.sparse_softmax_cross_entropy_with_logits' by default
self.loss = seq2seq.sequence_loss(
logits=self.decoder_logits_train,
targets=self.decoder_targets_train,
weights=masks,
average_across_timesteps=True,
average_across_batch=True
)
# Training summary for the current batch_loss
tf.summary.scalar('loss', self.loss)
# Contruct graphs for minimizing loss
self.init_optimizer()
def build_predict_decoder(self):
# start_tokens: [batch_size,]
start_tokens = tf.ones([self.batch_size,], tf.int32) * self.start_token
end_token =self.end_token
if not self.use_beamsearch_decode:
# Helper to feed inputs for greedy decoding: use the argmax of the output
if self.predict_mode == 'sample':
print 'Building sample decoder...'
decoding_helper = seq2seq.SampleEmbeddingHelper(
start_tokens=start_tokens,
end_token=end_token,
embedding=lambda inputs: tf.nn.embedding_lookup(self.embedding, inputs)
)
elif self.predict_mode == 'greedy':
print 'Building greedy decoder...'
decoding_helper = seq2seq.GreedyEmbeddingHelper(
start_tokens=start_tokens,
end_token=end_token,
embedding=lambda inputs: tf.nn.embedding_lookup(self.embedding, inputs)
)
else:
raise NotImplementedError('Predict mode: {} is not yet implemented'.format(self.predict_mode))
inference_decoder = seq2seq.BasicDecoder(
cell=self.decoder_cell,
helper=decoding_helper,
initial_state=self.decoder_initial_state,
output_layer=self.output_layer
)
else:
raise NotImplementedError('Beamsearch decode is not yet implemented.')
self.decoder_outputs_decode, self.decoder_last_state_decode,self.decoder_outputs_length_decode = seq2seq.dynamic_decode(
decoder=inference_decoder,
output_time_major=False,
maximum_iterations=self.max_decode_step
)
if not self.use_beamsearch_decode:
self.decoder_pred_decode = tf.expand_dims(self.decoder_outputs_decode.sample_id, -1)
else:
raise NotImplementedError('{} mode is not recognized.'.format(self.mode))
def build_decoder(self):
print 'Building decoder...'
with tf.variable_scope('decoder'):
# Building decoder_cell and decoder_initial_state
self.decoder_cell, self.decoder_initial_state = self.build_decoder_cell()
# Output projection layer to convert cell_outputs to logits
self.output_layer = Dense(self.vocab_size, name='output_projection')
if self.mode == 'train':
self.build_train_decoder()
elif self.mode == 'predict':
self.build_predict_decoder()
else:
raise RuntimeError
def init_optimizer(self):
print("Setting optimizer..")
# Gradients and SGD update operation for training the model
trainable_params = tf.trainable_variables()
if self.optimizer.lower() == 'adadelta':
self.opt = tf.train.AdadeltaOptimizer(learning_rate=self.learning_rate)
elif self.optimizer.lower() == 'adam':
self.opt = tf.train.AdamOptimizer(learning_rate=self.learning_rate)
elif self.optimizer.lower() == 'rmsprop':
self.opt = tf.train.RMSPropOptimizer(learning_rate=self.learning_rate)
else:
self.opt = tf.train.GradientDescentOptimizer(learning_rate=self.learning_rate)
# Compute gradients of loss w.r.t. all trainable variables
gradients = tf.gradients(self.loss, trainable_params)
# Clip gradients by a given maximum_gradient_norm
clip_gradients, _ = tf.clip_by_global_norm(gradients, self.max_gradient_norm)
# Update the model
self.updates = self.opt.apply_gradients(
zip(clip_gradients, trainable_params), global_step=self.global_step)
def save(self, sess, saver, path, global_step=None):
"""
Args:
sess:
path:
var_list:
global_step:
Returns:
"""
save_path = saver.save(sess, save_path=path, global_step=global_step)
print 'Model saved at {}'.format(save_path)
def restore(self, sess, saver, path):
"""
Args:
sess:
path:
var_list:
Returns:
"""
saver.restore(sess, save_path=path)
print 'Model restored from {}'.format(path)
def train(self, sess, encoder_inputs, encoder_inputs_length,
decoder_inputs, decoder_inputs_length):
"""Run a train step of the model feeding the given inputs.
Args:
session: tensorflow session to use.
encoder_inputs: a numpy int matrix of [batch_size, max_source_time_steps]
to feed as encoder inputs
encoder_inputs_length: a numpy int vector of [batch_size]
to feed as sequence lengths for each element in the given batch
decoder_inputs: a numpy int matrix of [batch_size, max_target_time_steps]
to feed as decoder inputs
decoder_inputs_length: a numpy int vector of [batch_size]
to feed as sequence lengths for each element in the given batch
Returns:
A triple consisting of gradient norm (or None if we did not do backward),
average perplexity, and the outputs.
"""
# Check if the model is in training mode
if self.mode != 'train':
raise ValueError('Train step can only be operated in train mode')
input_feed = self.check_feeds(encoder_inputs, encoder_inputs_length,
decoder_inputs, decoder_inputs_length, False)
# TODO(sdsuo): Understand keep prob
input_feed[self.keep_prob_placeholder.name] = self.keep_prob
output_feed = [
self.updates, # Update Op that does optimization
self.loss, # Loss for current batch
self.summary_op # Training summary
]
outputs = sess.run(output_feed, input_feed)
return outputs[1], outputs[2] # loss, summary
def predict(self, sess, encoder_inputs, encoder_inputs_length):
input_feed = self.check_feeds(encoder_inputs, encoder_inputs_length,
decoder_inputs=None, decoder_inputs_length=None,
predict=True)
# Input feeds for dropout
input_feed[self.keep_prob_placeholder.name] = 1.0
output_feed = [self.decoder_pred_decode]
outputs = sess.run(output_feed, input_feed)
# GreedyDecoder: [batch_size, max_time_step]
# BeamSearchDecoder: [batch_size, max_time_step, beam_width]
return outputs[0]
def init_vars(self, sess, embedding):
sess.run([self.assign_embedding_op], feed_dict={
self.embedding_placeholder: embedding
})
def check_feeds(self, encoder_inputs, encoder_inputs_length,
decoder_inputs, decoder_inputs_length, predict):
"""
Args:
encoder_inputs: a numpy int matrix of [batch_size, max_source_time_steps]
to feed as encoder inputs
encoder_inputs_length: a numpy int vector of [batch_size]
to feed as sequence lengths for each element in the given batch
decoder_inputs: a numpy int matrix of [batch_size, max_target_time_steps]
to feed as decoder inputs
decoder_inputs_length: a numpy int vector of [batch_size]
to feed as sequence lengths for each element in the given batch
predict: a scalar boolean that indicates predict mode
Returns:
A feed for the model that consists of encoder_inputs, encoder_inputs_length,
decoder_inputs, decoder_inputs_length
"""
input_batch_size = encoder_inputs.shape[0]
if input_batch_size != encoder_inputs_length.shape[0]:
raise ValueError("Encoder inputs and their lengths must be equal in their "
"batch_size, %d != %d" % (input_batch_size, encoder_inputs_length.shape[0]))
if not predict:
target_batch_size = decoder_inputs.shape[0]
if target_batch_size != input_batch_size:
raise ValueError("Encoder inputs and Decoder inputs must be equal in their "
"batch_size, %d != %d" % (input_batch_size, target_batch_size))
if target_batch_size != decoder_inputs_length.shape[0]:
raise ValueError("Decoder targets and their lengths must be equal in their "
"batch_size, %d != %d" % (target_batch_size, decoder_inputs_length.shape[0]))
input_feed = {}
input_feed[self.encoder_inputs.name] = encoder_inputs
input_feed[self.encoder_inputs_length.name] = encoder_inputs_length
if not predict:
input_feed[self.decoder_inputs.name] = decoder_inputs
input_feed[self.decoder_inputs_length.name] = decoder_inputs_length
return input_feed
if __name__ == '__main__':
model = Seq2SeqModel()
embed()
| mit | -8,786,661,998,776,761,000 | 39.314587 | 128 | 0.594097 | false | 4.148101 | true | false | false |
examachine/pisi | tests/constantstests.py | 1 | 2098 | # Copyright (C) 2005, TUBITAK/UEKAE
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# Please read the COPYING file.
#
# Author: Eray Ozkural <[email protected]>
import unittest
import pisi.context as ctx
class ContextTestCase(unittest.TestCase):
def testConstness(self):
const = ctx.const
# test if we can get a const attribute?
try:
test = const.package_suffix
self.assertNotEqual(test, "")
except AttributeError:
self.fail("Couldn't get const attribute")
# test binding a new constant
const.test = "test binding"
# test re-binding (which is illegal)
try:
const.test = "test rebinding"
# we shouldn't reach here
self.fail("Rebinding a constant works. Something is wrong!")
except:
# we achived our goal with this error. infact, this is a
# ConstError but we can't catch it directly here
pass
# test unbinding (which is also illegal)
try:
del const.test
# we shouldn't reach here
self.fail("Unbinding a constant works. Something is wrong!")
except:
# we achived our goal with this error. infact, this is a
# ConstError but we can't catch it directly here
pass
def testConstValues(self):
const = ctx.const
constDict = {
"actions_file": "actions.py",
"setup_func": "setup",
"metadata_xml": "metadata.xml"
}
for k in constDict.keys():
if hasattr(const, k):
value = getattr(const, k)
self.assertEqual(value, constDict[k])
else:
self.fail("Constants does not have an attribute named %s" % k)
suite = unittest.makeSuite(ContextTestCase)
| gpl-3.0 | -8,041,903,705,030,690,000 | 29.852941 | 79 | 0.585796 | false | 4.316872 | true | false | false |
promediacorp/flask-blog | post.py | 1 | 6890 | import datetime
import cgi
from bson.objectid import ObjectId
from helper_functions import *
class Post:
def __init__(self, default_config):
self.collection = default_config['POSTS_COLLECTION']
self.response = {'error': None, 'data': None}
self.debug_mode = default_config['DEBUG']
def get_posts(self, limit, skip, tag=None, search=None):
self.response['error'] = None
cond = {}
if tag is not None:
cond = {'tags': tag}
elif search is not None:
cond = {'$or': [
{'title': {'$regex': search, '$options': 'i'}},
{'body': {'$regex': search, '$options': 'i'}},
{'preview': {'$regex': search, '$options': 'i'}}]}
try:
cursor = self.collection.find(cond).sort(
'date', direction=-1).skip(skip).limit(limit)
self.response['data'] = []
for post in cursor:
if 'tags' not in post:
post['tags'] = []
if 'comments' not in post:
post['comments'] = []
if 'preview' not in post:
post['preview'] = ''
self.response['data'].append({'id': post['_id'],
'title': post['title'],
'body': post['body'],
'preview': post['preview'],
'date': post['date'],
'permalink': post['permalink'],
'tags': post['tags'],
'author': post['author'],
'comments': post['comments']})
except Exception, e:
self.print_debug_info(e, self.debug_mode)
self.response['error'] = 'Posts not found..'
return self.response
def get_post_by_permalink(self, permalink):
self.response['error'] = None
try:
self.response['data'] = self.collection.find_one(
{'permalink': permalink})
except Exception, e:
self.print_debug_info(e, self.debug_mode)
self.response['error'] = 'Post not found..'
return self.response
def get_post_by_id(self, post_id):
self.response['error'] = None
try:
self.response['data'] = self.collection.find_one(
{'_id': ObjectId(post_id)})
if self.response['data']:
if 'tags' not in self.response['data']:
self.response['data']['tags'] = ''
else:
self.response['data']['tags'] = ','.join(
self.response['data']['tags'])
if 'preview' not in self.response['data']:
self.response['data']['preview'] = ''
except Exception, e:
self.print_debug_info(e, self.debug_mode)
self.response['error'] = 'Post not found..'
return self.response
def get_total_count(self, tag=None, search=None):
cond = {}
if tag is not None:
cond = {'tags': tag}
elif search is not None:
cond = {'$or': [
{'title': {'$regex': search, '$options': 'i'}},
{'body': {'$regex': search, '$options': 'i'}},
{'preview': {'$regex': search, '$options': 'i'}}]}
return self.collection.find(cond).count()
def get_tags(self):
self.response['error'] = None
try:
self.response['data'] = self.collection.aggregate([
{'$unwind': '$tags'},
{'$group': {'_id': '$tags', 'count': {'$sum': 1}}},
{'$sort': {'count': -1}},
{'$limit': 10},
{'$project': {'title': '$_id', 'count': 1, '_id': 0}}
])
if self.response['data']['result']:
self.response['data'] = self.response['data']['result']
else:
self.response['data'] = []
except Exception, e:
self.print_debug_info(e, self.debug_mode)
self.response['error'] = 'Get tags error..'
return self.response
def create_new_post(self, post_data):
self.response['error'] = None
try:
self.response['data'] = self.collection.insert(post_data)
except Exception, e:
self.print_debug_info(e, self.debug_mode)
self.response['error'] = 'Adding post error..'
return self.response
def edit_post(self, post_id, post_data):
self.response['error'] = None
del post_data['date']
del post_data['permalink']
try:
self.collection.update(
{'_id': ObjectId(post_id)}, {"$set": post_data}, upsert=False)
self.response['data'] = True
except Exception, e:
self.print_debug_info(e, self.debug_mode)
self.response['error'] = 'Post update error..'
return self.response
def delete_post(self, post_id):
self.response['error'] = None
try:
if self.get_post_by_id(post_id) and self.collection.remove({'_id': ObjectId(post_id)}):
self.response['data'] = True
else:
self.response['data'] = False
except Exception, e:
self.print_debug_info(e, self.debug_mode)
self.response['error'] = 'Deleting post error..'
return self.response
@staticmethod
def validate_post_data(post_data):
# permalink = random_string(12)
whitespace = re.compile('\s')
permalink = whitespace.sub("-", post_data['title']).lower()
post_data['title'] = cgi.escape(post_data['title'])
post_data['preview'] = cgi.escape(post_data['preview'], quote=True)
post_data['body'] = cgi.escape(post_data['body'], quote=True)
post_data['date'] = datetime.datetime.utcnow()
post_data['permalink'] = permalink
return post_data
@staticmethod
def print_debug_info(msg, show=False):
if show:
import sys
import os
error_color = '\033[32m'
error_end = '\033[0m'
error = {'type': sys.exc_info()[0].__name__,
'file': os.path.basename(sys.exc_info()[2].tb_frame.f_code.co_filename),
'line': sys.exc_info()[2].tb_lineno,
'details': str(msg)}
print error_color
print '\n\n---\nError type: %s in file: %s on line: %s\nError details: %s\n---\n\n'\
% (error['type'], error['file'], error['line'], error['details'])
print error_end
| mit | -2,047,660,337,586,939,400 | 36.650273 | 99 | 0.475907 | false | 4.193548 | false | false | false |
andrewharvey/asgs-stylesheets | configure.py | 1 | 4492 | #!/usr/bin/env python
# Copyright (c) 2011, Development Seed, Inc.
# 2011, Andrew Harvey <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the Development Seed, Inc. nor the names of
# its contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import json
from sys import path
from os.path import join
import argparse
#################################
## argparse
parser = argparse.ArgumentParser(description='Configure an MML file with datasource settings')
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default='5432')
parser.add_argument('--dbname', default='abs')
parser.add_argument('--user', default='abs')
parser.add_argument('--password', default='abs')
parser.add_argument('--srs', default='+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over')
parser.add_argument('--shapedir', default='./layers/')
# Increase performance if you are only rendering a particular area by
# specifying a bounding box to restrict queries. Format is "XMIN,YMIN,XMAX,YMAX" in the
# same units as the database (probably spherical mercator meters). The
# whole world is "-20037508.34,-20037508.34,20037508.34,20037508.34".
# Leave blank to let Mapnik estimate.
parser.add_argument('--extent', default='12570320.00,-5403474.50,17711958.00,-1636391.88')
parser.add_argument('--mml', required=True)
args = parser.parse_args()
#################################
## configure mml
mml = join(path[0], args.mml + '/' + args.mml + '.mml')
shoreline_300 = args.shapedir.rstrip('/') + '/shoreline_300.shp'
processed_p = args.shapedir.rstrip('/') + '/processed_p.shp'
with open(mml, 'r') as f:
newf = json.loads(f.read())
f.closed
with open(mml, 'w') as f:
for layer in newf["Layer"]:
if "Datasource" in layer:
ds_type = layer["Datasource"].get("type")
if ds_type and ds_type == "postgis":
layer["Datasource"]["host"] = args.host
layer["Datasource"]["port"] = args.port
layer["Datasource"]["dbname"] = args.dbname
layer["Datasource"]["user"] = args.user
layer["Datasource"]["password"] = args.password
layer["Datasource"]["extent"] = args.extent
layer["srs"] = args.srs
else:
if layer["id"] == "shoreline_300":
layer["Datasource"] = dict();
layer["Datasource"]["file"] = shoreline_300
layer["Datasource"]["type"] = 'shape'
layer["geometry"] = 'polygon'
layer["srs"] = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over'
elif layer["id"] == "processed_p":
layer["Datasource"] = dict();
layer["Datasource"]["file"] = processed_p
layer["Datasource"]["type"] = 'shape'
layer["geometry"] = 'polygon'
layer["srs"] = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over'
f.write(json.dumps(newf, indent=2))
f.closed
| cc0-1.0 | 5,764,988,682,530,616,000 | 42.61165 | 168 | 0.672752 | false | 3.434251 | false | false | false |
locationlabs/confab | confab/options.py | 1 | 4182 | """
Options for managing Confab.
"""
from os import getcwd
from os.path import basename
from fabric.api import env, task
from fabric.utils import _AttributeDict
from difflib import unified_diff
from magic import Magic
from re import match
def _should_render(mime_type):
"""
Return whether a template file of with a particular mime type
should be rendered.
Some files may need to be excluded from template rendering;
such files will be copied verbatim.
"""
return next((True for pattern in ['text/', 'application/xml'] if match(pattern, mime_type)),
False)
def _is_empty(mime_type):
"""
Return whether a template file is an empty file.
"""
return mime_type == 'inode/x-empty'
def _is_not_temporary(file_name):
"""
Return whether a file name does not represent a temporary file.
When listing configuration files, we usually want temporary
files to be ignored.
"""
return not file_name.endswith('~')
def _is_not_internal(file_name):
"""
Return whether a file name does not represent internal usage.
When listing configuration files, we want to omit internal
files, especially if they are used as Jinja includes
"""
return not basename(file_name).startswith('_')
def _filter_func(file_name):
"""
Return the default filter func, which excludes temporary and internal files.
"""
return _is_not_temporary(file_name) and _is_not_internal(file_name)
def _get_mime_type(file_name):
"""
Return the mime type of a file.
The mime_type will be used to determine if a configuration file is text.
"""
return Magic(mime=True).from_file(file_name)
def _diff(a, b, fromfile=None, tofile=None):
"""
Return a diff using '---', '+++', and '@@' control lines.
By default, uses unified_diff.
"""
return unified_diff(a, b, fromfile=fromfile, tofile=tofile)
def _as_dict(module):
"""
Returns publicly names values in module's __dict__.
"""
try:
return {k: v for k, v in module.__dict__.iteritems() if not k[0:1] == '_'}
except AttributeError:
return {}
def _get_base_dir():
"""
Returns the base directory for user's template and data directories.
"""
return env.environmentdef.directory or getcwd()
# Options that control how confab runs.
#
# These are in opposition to options likely to changed
# between different runs of confab, such as directories,
# environments, roles, hosts, etc.
options = _AttributeDict({
# Should yes be assumed for interactive prompts?
'assume_yes': False,
# How to compute a file's mime_type?
'get_mime_type': _get_mime_type,
# How to determine if a template should be rendered?
'should_render': _should_render,
# How to determine if a template is an empty file?
'is_empty': _is_empty,
# How do filter available templates within the jinja environment?
'filter_func': _filter_func,
# How to determine diffs?
'diff': _diff,
# How to get dictionary configuration from module data?
'module_as_dict': _as_dict,
# Base directory for template and data directories.
'get_base_dir': _get_base_dir,
# What is the name of the template directory?
'get_templates_dir': lambda: 'templates',
# What is the name of the data directory?
'get_data_dir': lambda: 'data',
# What is the name of the generated directory?
'get_generated_dir': lambda: 'generated',
# What is the name of the remotes directory?
'get_remotes_dir': lambda: 'remotes',
})
class Options(object):
"""
Context manager to temporarily set options.
"""
def __init__(self, **kwargs):
self.kwargs = kwargs
self.previous = {}
def __enter__(self):
for (k, v) in self.kwargs.iteritems():
self.previous[k] = options.get(k)
options[k] = v
return self
def __exit__(self, exc_type, value, traceback):
for k in self.kwargs.keys():
options[k] = self.previous[k]
@task
def assume_yes():
"""
Set the option to ``assume_yes`` in other tasks.
"""
options.assume_yes = True
| apache-2.0 | -6,501,699,333,116,751,000 | 24.345455 | 96 | 0.646341 | false | 3.836697 | false | false | false |
jpartogi/django-job-board | job_board/urls.py | 1 | 1230 | from django.conf.urls.defaults import *
from django.contrib.sitemaps import FlatPageSitemap, GenericSitemap
from job_board.models import Job
from job_board.feeds import JobFeed
from job_board.forms import JobForm
from job_board.views import JobFormPreview, job_list, job_detail, job_search
feeds = {
'jobs': JobFeed,
}
info_dict = {
'queryset': Job.objects.filter(),
'date_field': 'posted'
}
sitemaps = {
'flatpages': FlatPageSitemap,
'jobs': GenericSitemap(info_dict, priority=0.6),
}
urlpatterns = patterns('',
url(r'^feed/(?P<url>.*)/$',
'django.contrib.syndication.views.feed',
{'feed_dict': feeds},
name='job-feeds'),
url(r'^sitemap.xml$',
'django.contrib.sitemaps.views.sitemap',
{'sitemaps': sitemaps},
name='job-sitemap'),
url(r'^new/$',
JobFormPreview(JobForm),
name='job-form'),
url(r'^(?P<object_id>\d+)/(?P<slug>[\w-]+)/$',
job_detail,
name='job-detail'),
url(r'^wmd/', include('wmd.urls')),
url(r'^search/$',
job_search,
name='job-search'),
url(r'^$',
job_list,
name='job-list'), # This must be last after everything else has been evaluated
) | bsd-3-clause | 4,233,101,760,313,957,000 | 23.137255 | 86 | 0.604065 | false | 3.435754 | false | true | false |
coco-project/coco | coco/core/auth/authentication_backends.py | 1 | 3945 | from coco.contract.errors import AuthenticationError, ConnectionError, \
UserNotFoundError
from coco.core.helpers import get_internal_ldap_connected, get_user_backend_connected
from coco.core.models import BackendGroup, BackendUser, \
CollaborationGroup
from django.contrib.auth.models import User
from django.core.exceptions import PermissionDenied
import logging
logger = logging.getLogger(__name__)
class BackendProxyAuthentication(object):
"""
Class used to authenticate with the user backends.
more info: https://docs.djangoproject.com/en/1.8/topics/auth/default/#django.contrib.auth.authenticate
"""
def authenticate(self, username=None, password=None):
"""
:inherit.
"""
# check if the user already exists in our system
# if so, use the defined backend_pk for validating the credentials on the backend
# if its a Django only user, disallow the login
user = None
if User.objects.filter(username=username).exists():
user = User.objects.get(username=username)
if hasattr(user, 'backend_user'):
username = user.backend_user.backend_pk
else:
return None # not allowed, Django only user
try:
internal_ldap = get_internal_ldap_connected()
user_backend = get_user_backend_connected()
user_backend.auth_user(username, password)
if user is not None: # existing user
if not user.check_password(password):
user.set_password(password) # XXX: not needed. should we leave it empty?
internal_ldap.set_user_password(username, password)
user.save()
else: # new user
uid = BackendUser.generate_internal_uid()
group = self.create_user_groups(username, uid)
user = self.create_users(username, password, uid, group.backend_group)
group.add_user(user.backend_user)
if user.is_active:
return user
else:
return None
except AuthenticationError:
raise PermissionDenied
except UserNotFoundError:
if user is not None: # exists locally but not on backend
user.delete()
except ConnectionError as ex:
logger.exception(ex)
return None
finally:
try:
internal_ldap.disconnect()
user_backend.disconnect()
except:
pass
def create_user_groups(self, name, gid):
"""
Create the groups for the logging-in user.
:param name: The name of the group to create.
:param gid: The group's ID (on the backend).
"""
collaboration_group = CollaborationGroup(
name=name,
is_single_user_group=True
)
collaboration_group.save()
backend_group = BackendGroup(
django_group=collaboration_group,
backend_id=gid,
backend_pk=name
)
backend_group.save()
return collaboration_group
def create_users(self, username, password, uid, primary_group):
"""
Create the Django users for the logging-in user.
:param username: The user's username.
:param primary_group: The user's primary group.
"""
user = User(username=username, password=password)
user.save()
backend_user = BackendUser(
django_user=user,
backend_id=uid,
backend_pk=username,
primary_group=primary_group
)
backend_user.save()
return user
def get_user(self, user_id):
"""
:inherit.
"""
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| bsd-3-clause | -1,043,595,838,817,111,200 | 33.008621 | 106 | 0.586565 | false | 4.587209 | false | false | false |
Harmon758/discord.py | discord/client.py | 1 | 48861 | """
The MIT License (MIT)
Copyright (c) 2015-present Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
import asyncio
import logging
import signal
import sys
import traceback
from typing import Any, Generator, List, Optional, Sequence, TYPE_CHECKING, TypeVar, Union
import aiohttp
from .user import User
from .invite import Invite
from .template import Template
from .widget import Widget
from .guild import Guild
from .channel import _channel_factory
from .enums import ChannelType
from .mentions import AllowedMentions
from .errors import *
from .enums import Status, VoiceRegion
from .flags import ApplicationFlags
from .gateway import *
from .activity import BaseActivity, create_activity
from .voice_client import VoiceClient
from .http import HTTPClient
from .state import ConnectionState
from . import utils
from .object import Object
from .backoff import ExponentialBackoff
from .webhook import Webhook
from .iterators import GuildIterator
from .appinfo import AppInfo
from .ui.view import View
from .stage_instance import StageInstance
__all__ = (
'Client',
)
if TYPE_CHECKING:
from .abc import SnowflakeTime
log = logging.getLogger(__name__)
def _cancel_tasks(loop):
tasks = {t for t in asyncio.all_tasks(loop=loop) if not t.done()}
if not tasks:
return
log.info('Cleaning up after %d tasks.', len(tasks))
for task in tasks:
task.cancel()
loop.run_until_complete(asyncio.gather(*tasks, return_exceptions=True))
log.info('All tasks finished cancelling.')
for task in tasks:
if task.cancelled():
continue
if task.exception() is not None:
loop.call_exception_handler({
'message': 'Unhandled exception during Client.run shutdown.',
'exception': task.exception(),
'task': task
})
def _cleanup_loop(loop):
try:
_cancel_tasks(loop)
loop.run_until_complete(loop.shutdown_asyncgens())
finally:
log.info('Closing the event loop.')
loop.close()
class Client:
r"""Represents a client connection that connects to Discord.
This class is used to interact with the Discord WebSocket and API.
A number of options can be passed to the :class:`Client`.
Parameters
-----------
max_messages: Optional[:class:`int`]
The maximum number of messages to store in the internal message cache.
This defaults to ``1000``. Passing in ``None`` disables the message cache.
.. versionchanged:: 1.3
Allow disabling the message cache and change the default size to ``1000``.
loop: Optional[:class:`asyncio.AbstractEventLoop`]
The :class:`asyncio.AbstractEventLoop` to use for asynchronous operations.
Defaults to ``None``, in which case the default event loop is used via
:func:`asyncio.get_event_loop()`.
connector: :class:`aiohttp.BaseConnector`
The connector to use for connection pooling.
proxy: Optional[:class:`str`]
Proxy URL.
proxy_auth: Optional[:class:`aiohttp.BasicAuth`]
An object that represents proxy HTTP Basic Authorization.
shard_id: Optional[:class:`int`]
Integer starting at ``0`` and less than :attr:`.shard_count`.
shard_count: Optional[:class:`int`]
The total number of shards.
application_id: :class:`int`
The client's application ID.
intents: :class:`Intents`
The intents that you want to enable for the session. This is a way of
disabling and enabling certain gateway events from triggering and being sent.
If not given, defaults to a regularly constructed :class:`Intents` class.
.. versionadded:: 1.5
member_cache_flags: :class:`MemberCacheFlags`
Allows for finer control over how the library caches members.
If not given, defaults to cache as much as possible with the
currently selected intents.
.. versionadded:: 1.5
chunk_guilds_at_startup: :class:`bool`
Indicates if :func:`.on_ready` should be delayed to chunk all guilds
at start-up if necessary. This operation is incredibly slow for large
amounts of guilds. The default is ``True`` if :attr:`Intents.members`
is ``True``.
.. versionadded:: 1.5
status: Optional[:class:`.Status`]
A status to start your presence with upon logging on to Discord.
activity: Optional[:class:`.BaseActivity`]
An activity to start your presence with upon logging on to Discord.
allowed_mentions: Optional[:class:`AllowedMentions`]
Control how the client handles mentions by default on every message sent.
.. versionadded:: 1.4
heartbeat_timeout: :class:`float`
The maximum numbers of seconds before timing out and restarting the
WebSocket in the case of not receiving a HEARTBEAT_ACK. Useful if
processing the initial packets take too long to the point of disconnecting
you. The default timeout is 60 seconds.
guild_ready_timeout: :class:`float`
The maximum number of seconds to wait for the GUILD_CREATE stream to end before
preparing the member cache and firing READY. The default timeout is 2 seconds.
.. versionadded:: 1.4
assume_unsync_clock: :class:`bool`
Whether to assume the system clock is unsynced. This applies to the ratelimit handling
code. If this is set to ``True``, the default, then the library uses the time to reset
a rate limit bucket given by Discord. If this is ``False`` then your system clock is
used to calculate how long to sleep for. If this is set to ``False`` it is recommended to
sync your system clock to Google's NTP server.
.. versionadded:: 1.3
Attributes
-----------
ws
The websocket gateway the client is currently connected to. Could be ``None``.
loop: :class:`asyncio.AbstractEventLoop`
The event loop that the client uses for asynchronous operations.
"""
def __init__(self, *, loop=None, **options):
self.ws = None
self.loop = asyncio.get_event_loop() if loop is None else loop
self._listeners = {}
self.shard_id = options.get('shard_id')
self.shard_count = options.get('shard_count')
connector = options.pop('connector', None)
proxy = options.pop('proxy', None)
proxy_auth = options.pop('proxy_auth', None)
unsync_clock = options.pop('assume_unsync_clock', True)
self.http = HTTPClient(connector, proxy=proxy, proxy_auth=proxy_auth, unsync_clock=unsync_clock, loop=self.loop)
self._handlers = {
'ready': self._handle_ready
}
self._hooks = {
'before_identify': self._call_before_identify_hook
}
self._connection = self._get_state(**options)
self._connection.shard_count = self.shard_count
self._closed = False
self._ready = asyncio.Event()
self._connection._get_websocket = self._get_websocket
self._connection._get_client = lambda: self
if VoiceClient.warn_nacl:
VoiceClient.warn_nacl = False
log.warning("PyNaCl is not installed, voice will NOT be supported")
# internals
def _get_websocket(self, guild_id=None, *, shard_id=None):
return self.ws
def _get_state(self, **options):
return ConnectionState(dispatch=self.dispatch, handlers=self._handlers,
hooks=self._hooks, http=self.http, loop=self.loop, **options)
def _handle_ready(self):
self._ready.set()
@property
def latency(self):
""":class:`float`: Measures latency between a HEARTBEAT and a HEARTBEAT_ACK in seconds.
This could be referred to as the Discord WebSocket protocol latency.
"""
ws = self.ws
return float('nan') if not ws else ws.latency
def is_ws_ratelimited(self):
""":class:`bool`: Whether the websocket is currently rate limited.
This can be useful to know when deciding whether you should query members
using HTTP or via the gateway.
.. versionadded:: 1.6
"""
if self.ws:
return self.ws.is_ratelimited()
return False
@property
def user(self):
"""Optional[:class:`.ClientUser`]: Represents the connected client. ``None`` if not logged in."""
return self._connection.user
@property
def guilds(self):
"""List[:class:`.Guild`]: The guilds that the connected client is a member of."""
return self._connection.guilds
@property
def emojis(self):
"""List[:class:`.Emoji`]: The emojis that the connected client has."""
return self._connection.emojis
@property
def cached_messages(self):
"""Sequence[:class:`.Message`]: Read-only list of messages the connected client has cached.
.. versionadded:: 1.1
"""
return utils.SequenceProxy(self._connection._messages or [])
@property
def private_channels(self):
"""List[:class:`.abc.PrivateChannel`]: The private channels that the connected client is participating on.
.. note::
This returns only up to 128 most recent private channels due to an internal working
on how Discord deals with private channels.
"""
return self._connection.private_channels
@property
def voice_clients(self):
"""List[:class:`.VoiceProtocol`]: Represents a list of voice connections.
These are usually :class:`.VoiceClient` instances.
"""
return self._connection.voice_clients
@property
def application_id(self):
"""Optional[:class:`int`]: The client's application ID.
If this is not passed via ``__init__`` then this is retrieved
through the gateway when an event contains the data. Usually
after :func:`~discord.on_connect` is called.
"""
return self._connection.application_id
@property
def application_flags(self) -> ApplicationFlags:
""":class:`~discord.ApplicationFlags`: The client's application flags.
.. versionadded: 2.0
"""
return self._connection.application_flags # type: ignore
def is_ready(self):
""":class:`bool`: Specifies if the client's internal cache is ready for use."""
return self._ready.is_set()
async def _run_event(self, coro, event_name, *args, **kwargs):
try:
await coro(*args, **kwargs)
except asyncio.CancelledError:
pass
except Exception:
try:
await self.on_error(event_name, *args, **kwargs)
except asyncio.CancelledError:
pass
def _schedule_event(self, coro, event_name, *args, **kwargs):
wrapped = self._run_event(coro, event_name, *args, **kwargs)
# Schedules the task
return asyncio.create_task(wrapped, name=f'discord.py: {event_name}')
def dispatch(self, event, *args, **kwargs):
log.debug('Dispatching event %s', event)
method = 'on_' + event
listeners = self._listeners.get(event)
if listeners:
removed = []
for i, (future, condition) in enumerate(listeners):
if future.cancelled():
removed.append(i)
continue
try:
result = condition(*args)
except Exception as exc:
future.set_exception(exc)
removed.append(i)
else:
if result:
if len(args) == 0:
future.set_result(None)
elif len(args) == 1:
future.set_result(args[0])
else:
future.set_result(args)
removed.append(i)
if len(removed) == len(listeners):
self._listeners.pop(event)
else:
for idx in reversed(removed):
del listeners[idx]
try:
coro = getattr(self, method)
except AttributeError:
pass
else:
self._schedule_event(coro, method, *args, **kwargs)
async def on_error(self, event_method, *args, **kwargs):
"""|coro|
The default error handler provided by the client.
By default this prints to :data:`sys.stderr` however it could be
overridden to have a different implementation.
Check :func:`~discord.on_error` for more details.
"""
print(f'Ignoring exception in {event_method}', file=sys.stderr)
traceback.print_exc()
# hooks
async def _call_before_identify_hook(self, shard_id, *, initial=False):
# This hook is an internal hook that actually calls the public one.
# It allows the library to have its own hook without stepping on the
# toes of those who need to override their own hook.
await self.before_identify_hook(shard_id, initial=initial)
async def before_identify_hook(self, shard_id, *, initial=False):
"""|coro|
A hook that is called before IDENTIFYing a session. This is useful
if you wish to have more control over the synchronization of multiple
IDENTIFYing clients.
The default implementation sleeps for 5 seconds.
.. versionadded:: 1.4
Parameters
------------
shard_id: :class:`int`
The shard ID that requested being IDENTIFY'd
initial: :class:`bool`
Whether this IDENTIFY is the first initial IDENTIFY.
"""
if not initial:
await asyncio.sleep(5.0)
# login state management
async def login(self, token):
"""|coro|
Logs in the client with the specified credentials.
Parameters
-----------
token: :class:`str`
The authentication token. Do not prefix this token with
anything as the library will do it for you.
Raises
------
:exc:`.LoginFailure`
The wrong credentials are passed.
:exc:`.HTTPException`
An unknown HTTP related error occurred,
usually when it isn't 200 or the known incorrect credentials
passing status code.
"""
log.info('logging in using static token')
await self.http.static_login(token.strip())
async def connect(self, *, reconnect=True):
"""|coro|
Creates a websocket connection and lets the websocket listen
to messages from Discord. This is a loop that runs the entire
event system and miscellaneous aspects of the library. Control
is not resumed until the WebSocket connection is terminated.
Parameters
-----------
reconnect: :class:`bool`
If we should attempt reconnecting, either due to internet
failure or a specific failure on Discord's part. Certain
disconnects that lead to bad state will not be handled (such as
invalid sharding payloads or bad tokens).
Raises
-------
:exc:`.GatewayNotFound`
If the gateway to connect to Discord is not found. Usually if this
is thrown then there is a Discord API outage.
:exc:`.ConnectionClosed`
The websocket connection has been terminated.
"""
backoff = ExponentialBackoff()
ws_params = {
'initial': True,
'shard_id': self.shard_id,
}
while not self.is_closed():
try:
coro = DiscordWebSocket.from_client(self, **ws_params)
self.ws = await asyncio.wait_for(coro, timeout=60.0)
ws_params['initial'] = False
while True:
await self.ws.poll_event()
except ReconnectWebSocket as e:
log.info('Got a request to %s the websocket.', e.op)
self.dispatch('disconnect')
ws_params.update(sequence=self.ws.sequence, resume=e.resume, session=self.ws.session_id)
continue
except (OSError,
HTTPException,
GatewayNotFound,
ConnectionClosed,
aiohttp.ClientError,
asyncio.TimeoutError) as exc:
self.dispatch('disconnect')
if not reconnect:
await self.close()
if isinstance(exc, ConnectionClosed) and exc.code == 1000:
# clean close, don't re-raise this
return
raise
if self.is_closed():
return
# If we get connection reset by peer then try to RESUME
if isinstance(exc, OSError) and exc.errno in (54, 10054):
ws_params.update(sequence=self.ws.sequence, initial=False, resume=True, session=self.ws.session_id)
continue
# We should only get this when an unhandled close code happens,
# such as a clean disconnect (1000) or a bad state (bad token, no sharding, etc)
# sometimes, discord sends us 1000 for unknown reasons so we should reconnect
# regardless and rely on is_closed instead
if isinstance(exc, ConnectionClosed):
if exc.code == 4014:
raise PrivilegedIntentsRequired(exc.shard_id) from None
if exc.code != 1000:
await self.close()
raise
retry = backoff.delay()
log.exception("Attempting a reconnect in %.2fs", retry)
await asyncio.sleep(retry)
# Always try to RESUME the connection
# If the connection is not RESUME-able then the gateway will invalidate the session.
# This is apparently what the official Discord client does.
ws_params.update(sequence=self.ws.sequence, resume=True, session=self.ws.session_id)
async def close(self):
"""|coro|
Closes the connection to Discord.
"""
if self._closed:
return
self._closed = True
for voice in self.voice_clients:
try:
await voice.disconnect()
except Exception:
# if an error happens during disconnects, disregard it.
pass
if self.ws is not None and self.ws.open:
await self.ws.close(code=1000)
await self.http.close()
self._ready.clear()
def clear(self):
"""Clears the internal state of the bot.
After this, the bot can be considered "re-opened", i.e. :meth:`is_closed`
and :meth:`is_ready` both return ``False`` along with the bot's internal
cache cleared.
"""
self._closed = False
self._ready.clear()
self._connection.clear()
self.http.recreate()
async def start(self, token, *, reconnect=True):
"""|coro|
A shorthand coroutine for :meth:`login` + :meth:`connect`.
Raises
-------
TypeError
An unexpected keyword argument was received.
"""
await self.login(token)
await self.connect(reconnect=reconnect)
def run(self, *args, **kwargs):
"""A blocking call that abstracts away the event loop
initialisation from you.
If you want more control over the event loop then this
function should not be used. Use :meth:`start` coroutine
or :meth:`connect` + :meth:`login`.
Roughly Equivalent to: ::
try:
loop.run_until_complete(start(*args, **kwargs))
except KeyboardInterrupt:
loop.run_until_complete(close())
# cancel all tasks lingering
finally:
loop.close()
.. warning::
This function must be the last function to call due to the fact that it
is blocking. That means that registration of events or anything being
called after this function call will not execute until it returns.
"""
loop = self.loop
try:
loop.add_signal_handler(signal.SIGINT, lambda: loop.stop())
loop.add_signal_handler(signal.SIGTERM, lambda: loop.stop())
except NotImplementedError:
pass
async def runner():
try:
await self.start(*args, **kwargs)
finally:
if not self.is_closed():
await self.close()
def stop_loop_on_completion(f):
loop.stop()
future = asyncio.ensure_future(runner(), loop=loop)
future.add_done_callback(stop_loop_on_completion)
try:
loop.run_forever()
except KeyboardInterrupt:
log.info('Received signal to terminate bot and event loop.')
finally:
future.remove_done_callback(stop_loop_on_completion)
log.info('Cleaning up tasks.')
_cleanup_loop(loop)
if not future.cancelled():
try:
return future.result()
except KeyboardInterrupt:
# I am unsure why this gets raised here but suppress it anyway
return None
# properties
def is_closed(self):
""":class:`bool`: Indicates if the websocket connection is closed."""
return self._closed
@property
def activity(self):
"""Optional[:class:`.BaseActivity`]: The activity being used upon
logging in.
"""
return create_activity(self._connection._activity)
@activity.setter
def activity(self, value):
if value is None:
self._connection._activity = None
elif isinstance(value, BaseActivity):
self._connection._activity = value.to_dict()
else:
raise TypeError('activity must derive from BaseActivity.')
@property
def allowed_mentions(self):
"""Optional[:class:`~discord.AllowedMentions`]: The allowed mention configuration.
.. versionadded:: 1.4
"""
return self._connection.allowed_mentions
@allowed_mentions.setter
def allowed_mentions(self, value):
if value is None or isinstance(value, AllowedMentions):
self._connection.allowed_mentions = value
else:
raise TypeError(f'allowed_mentions must be AllowedMentions not {value.__class__!r}')
@property
def intents(self):
""":class:`~discord.Intents`: The intents configured for this connection.
.. versionadded:: 1.5
"""
return self._connection.intents
# helpers/getters
@property
def users(self):
"""List[:class:`~discord.User`]: Returns a list of all the users the bot can see."""
return list(self._connection._users.values())
def get_channel(self, id):
"""Returns a channel with the given ID.
Parameters
-----------
id: :class:`int`
The ID to search for.
Returns
--------
Optional[Union[:class:`.abc.GuildChannel`, :class:`.abc.PrivateChannel`]]
The returned channel or ``None`` if not found.
"""
return self._connection.get_channel(id)
def get_stage_instance(self, id) -> Optional[StageInstance]:
"""Returns a stage instance with the given stage channel ID.
.. versionadded:: 2.0
Parameters
-----------
id: :class:`int`
The ID to search for.
Returns
--------
Optional[:class:`StageInstance`]
The returns stage instance of ``None`` if not found.
"""
from .channel import StageChannel
channel = self._connection.get_channel(id)
if isinstance(channel, StageChannel):
return channel.instance
def get_guild(self, id):
"""Returns a guild with the given ID.
Parameters
-----------
id: :class:`int`
The ID to search for.
Returns
--------
Optional[:class:`.Guild`]
The guild or ``None`` if not found.
"""
return self._connection._get_guild(id)
def get_user(self, id):
"""Returns a user with the given ID.
Parameters
-----------
id: :class:`int`
The ID to search for.
Returns
--------
Optional[:class:`~discord.User`]
The user or ``None`` if not found.
"""
return self._connection.get_user(id)
def get_emoji(self, id):
"""Returns an emoji with the given ID.
Parameters
-----------
id: :class:`int`
The ID to search for.
Returns
--------
Optional[:class:`.Emoji`]
The custom emoji or ``None`` if not found.
"""
return self._connection.get_emoji(id)
def get_all_channels(self):
"""A generator that retrieves every :class:`.abc.GuildChannel` the client can 'access'.
This is equivalent to: ::
for guild in client.guilds:
for channel in guild.channels:
yield channel
.. note::
Just because you receive a :class:`.abc.GuildChannel` does not mean that
you can communicate in said channel. :meth:`.abc.GuildChannel.permissions_for` should
be used for that.
Yields
------
:class:`.abc.GuildChannel`
A channel the client can 'access'.
"""
for guild in self.guilds:
yield from guild.channels
def get_all_members(self):
"""Returns a generator with every :class:`.Member` the client can see.
This is equivalent to: ::
for guild in client.guilds:
for member in guild.members:
yield member
Yields
------
:class:`.Member`
A member the client can see.
"""
for guild in self.guilds:
yield from guild.members
# listeners/waiters
async def wait_until_ready(self):
"""|coro|
Waits until the client's internal cache is all ready.
"""
await self._ready.wait()
def wait_for(self, event, *, check=None, timeout=None):
"""|coro|
Waits for a WebSocket event to be dispatched.
This could be used to wait for a user to reply to a message,
or to react to a message, or to edit a message in a self-contained
way.
The ``timeout`` parameter is passed onto :func:`asyncio.wait_for`. By default,
it does not timeout. Note that this does propagate the
:exc:`asyncio.TimeoutError` for you in case of timeout and is provided for
ease of use.
In case the event returns multiple arguments, a :class:`tuple` containing those
arguments is returned instead. Please check the
:ref:`documentation <discord-api-events>` for a list of events and their
parameters.
This function returns the **first event that meets the requirements**.
Examples
---------
Waiting for a user reply: ::
@client.event
async def on_message(message):
if message.content.startswith('$greet'):
channel = message.channel
await channel.send('Say hello!')
def check(m):
return m.content == 'hello' and m.channel == channel
msg = await client.wait_for('message', check=check)
await channel.send(f'Hello {msg.author}!')
Waiting for a thumbs up reaction from the message author: ::
@client.event
async def on_message(message):
if message.content.startswith('$thumb'):
channel = message.channel
await channel.send('Send me that \N{THUMBS UP SIGN} reaction, mate')
def check(reaction, user):
return user == message.author and str(reaction.emoji) == '\N{THUMBS UP SIGN}'
try:
reaction, user = await client.wait_for('reaction_add', timeout=60.0, check=check)
except asyncio.TimeoutError:
await channel.send('\N{THUMBS DOWN SIGN}')
else:
await channel.send('\N{THUMBS UP SIGN}')
Parameters
------------
event: :class:`str`
The event name, similar to the :ref:`event reference <discord-api-events>`,
but without the ``on_`` prefix, to wait for.
check: Optional[Callable[..., :class:`bool`]]
A predicate to check what to wait for. The arguments must meet the
parameters of the event being waited for.
timeout: Optional[:class:`float`]
The number of seconds to wait before timing out and raising
:exc:`asyncio.TimeoutError`.
Raises
-------
asyncio.TimeoutError
If a timeout is provided and it was reached.
Returns
--------
Any
Returns no arguments, a single argument, or a :class:`tuple` of multiple
arguments that mirrors the parameters passed in the
:ref:`event reference <discord-api-events>`.
"""
future = self.loop.create_future()
if check is None:
def _check(*args):
return True
check = _check
ev = event.lower()
try:
listeners = self._listeners[ev]
except KeyError:
listeners = []
self._listeners[ev] = listeners
listeners.append((future, check))
return asyncio.wait_for(future, timeout)
# event registration
def event(self, coro):
"""A decorator that registers an event to listen to.
You can find more info about the events on the :ref:`documentation below <discord-api-events>`.
The events must be a :ref:`coroutine <coroutine>`, if not, :exc:`TypeError` is raised.
Example
---------
.. code-block:: python3
@client.event
async def on_ready():
print('Ready!')
Raises
--------
TypeError
The coroutine passed is not actually a coroutine.
"""
if not asyncio.iscoroutinefunction(coro):
raise TypeError('event registered must be a coroutine function')
setattr(self, coro.__name__, coro)
log.debug('%s has successfully been registered as an event', coro.__name__)
return coro
async def change_presence(self, *, activity=None, status=None, afk=False):
"""|coro|
Changes the client's presence.
Example
---------
.. code-block:: python3
game = discord.Game("with the API")
await client.change_presence(status=discord.Status.idle, activity=game)
Parameters
----------
activity: Optional[:class:`.BaseActivity`]
The activity being done. ``None`` if no currently active activity is done.
status: Optional[:class:`.Status`]
Indicates what status to change to. If ``None``, then
:attr:`.Status.online` is used.
afk: Optional[:class:`bool`]
Indicates if you are going AFK. This allows the discord
client to know how to handle push notifications better
for you in case you are actually idle and not lying.
Raises
------
:exc:`.InvalidArgument`
If the ``activity`` parameter is not the proper type.
"""
if status is None:
status = 'online'
status_enum = Status.online
elif status is Status.offline:
status = 'invisible'
status_enum = Status.offline
else:
status_enum = status
status = str(status)
await self.ws.change_presence(activity=activity, status=status, afk=afk)
for guild in self._connection.guilds:
me = guild.me
if me is None:
continue
if activity is not None:
me.activities = (activity,)
else:
me.activities = ()
me.status = status_enum
# Guild stuff
def fetch_guilds(self, *, limit: int = 100, before: SnowflakeTime = None, after: SnowflakeTime = None) -> List[Guild]:
"""Retrieves an :class:`.AsyncIterator` that enables receiving your guilds.
.. note::
Using this, you will only receive :attr:`.Guild.owner`, :attr:`.Guild.icon`,
:attr:`.Guild.id`, and :attr:`.Guild.name` per :class:`.Guild`.
.. note::
This method is an API call. For general usage, consider :attr:`guilds` instead.
Examples
---------
Usage ::
async for guild in client.fetch_guilds(limit=150):
print(guild.name)
Flattening into a list ::
guilds = await client.fetch_guilds(limit=150).flatten()
# guilds is now a list of Guild...
All parameters are optional.
Parameters
-----------
limit: Optional[:class:`int`]
The number of guilds to retrieve.
If ``None``, it retrieves every guild you have access to. Note, however,
that this would make it a slow operation.
Defaults to ``100``.
before: Union[:class:`.abc.Snowflake`, :class:`datetime.datetime`]
Retrieves guilds before this date or object.
If a datetime is provided, it is recommended to use a UTC aware datetime.
If the datetime is naive, it is assumed to be local time.
after: Union[:class:`.abc.Snowflake`, :class:`datetime.datetime`]
Retrieve guilds after this date or object.
If a datetime is provided, it is recommended to use a UTC aware datetime.
If the datetime is naive, it is assumed to be local time.
Raises
------
:exc:`.HTTPException`
Getting the guilds failed.
Yields
--------
:class:`.Guild`
The guild with the guild data parsed.
"""
return GuildIterator(self, limit=limit, before=before, after=after)
async def fetch_template(self, code):
"""|coro|
Gets a :class:`.Template` from a discord.new URL or code.
Parameters
-----------
code: Union[:class:`.Template`, :class:`str`]
The Discord Template Code or URL (must be a discord.new URL).
Raises
-------
:exc:`.NotFound`
The template is invalid.
:exc:`.HTTPException`
Getting the template failed.
Returns
--------
:class:`.Template`
The template from the URL/code.
"""
code = utils.resolve_template(code)
data = await self.http.get_template(code)
return Template(data=data, state=self._connection) # type: ignore
async def fetch_guild(self, guild_id):
"""|coro|
Retrieves a :class:`.Guild` from an ID.
.. note::
Using this, you will **not** receive :attr:`.Guild.channels`, :attr:`.Guild.members`,
:attr:`.Member.activity` and :attr:`.Member.voice` per :class:`.Member`.
.. note::
This method is an API call. For general usage, consider :meth:`get_guild` instead.
Parameters
-----------
guild_id: :class:`int`
The guild's ID to fetch from.
Raises
------
:exc:`.Forbidden`
You do not have access to the guild.
:exc:`.HTTPException`
Getting the guild failed.
Returns
--------
:class:`.Guild`
The guild from the ID.
"""
data = await self.http.get_guild(guild_id)
return Guild(data=data, state=self._connection)
async def create_guild(self, name: str, region: Optional[VoiceRegion] = None, icon: Any = None, *, code: str = None):
"""|coro|
Creates a :class:`.Guild`.
Bot accounts in more than 10 guilds are not allowed to create guilds.
Parameters
----------
name: :class:`str`
The name of the guild.
region: :class:`.VoiceRegion`
The region for the voice communication server.
Defaults to :attr:`.VoiceRegion.us_west`.
icon: :class:`bytes`
The :term:`py:bytes-like object` representing the icon. See :meth:`.ClientUser.edit`
for more details on what is expected.
code: Optional[:class:`str`]
The code for a template to create the guild with.
.. versionadded:: 1.4
Raises
------
:exc:`.HTTPException`
Guild creation failed.
:exc:`.InvalidArgument`
Invalid icon image format given. Must be PNG or JPG.
Returns
-------
:class:`.Guild`
The guild created. This is not the same guild that is
added to cache.
"""
if icon is not None:
icon = utils._bytes_to_base64_data(icon)
region = region or VoiceRegion.us_west
region_value = region.value
if code:
data = await self.http.create_from_template(code, name, region_value, icon)
else:
data = await self.http.create_guild(name, region_value, icon)
return Guild(data=data, state=self._connection)
async def fetch_stage_instance(self, channel_id: int) -> StageInstance:
"""|coro|
Gets a :class:`StageInstance` for a stage channel id.
.. versionadded:: 2.0
Parameters
-----------
channel_id: :class:`int`
The stage channel ID.
Raises
-------
:exc:`.NotFound`
The stage instance or channel could not be found.
:exc:`.HTTPException`
Getting the stage instance failed.
Returns
--------
:class:`StageInstance`
The stage instance from the stage channel ID.
"""
data = await self.http.get_stage_instance(channel_id)
guild = self.get_guild(int(data['guild_id']))
return StageInstance(guild=guild, state=self._connection, data=data) # type: ignore
# Invite management
async def fetch_invite(self, url: Union[Invite, str], *, with_counts: bool = True, with_expiration: bool = True) -> Invite:
"""|coro|
Gets an :class:`.Invite` from a discord.gg URL or ID.
.. note::
If the invite is for a guild you have not joined, the guild and channel
attributes of the returned :class:`.Invite` will be :class:`.PartialInviteGuild` and
:class:`.PartialInviteChannel` respectively.
Parameters
-----------
url: Union[:class:`.Invite`, :class:`str`]
The Discord invite ID or URL (must be a discord.gg URL).
with_counts: :class:`bool`
Whether to include count information in the invite. This fills the
:attr:`.Invite.approximate_member_count` and :attr:`.Invite.approximate_presence_count`
fields.
with_expiration: :class:`bool`
Whether to include the expiration date of the invite. This fills the
:attr:`.Invite.expires_at` field.
.. versionadded:: 2.0
Raises
-------
:exc:`.NotFound`
The invite has expired or is invalid.
:exc:`.HTTPException`
Getting the invite failed.
Returns
--------
:class:`.Invite`
The invite from the URL/ID.
"""
invite_id = utils.resolve_invite(url)
data = await self.http.get_invite(invite_id, with_counts=with_counts, with_expiration=with_expiration)
return Invite.from_incomplete(state=self._connection, data=data)
async def delete_invite(self, invite: Union[Invite, str]) -> None:
"""|coro|
Revokes an :class:`.Invite`, URL, or ID to an invite.
You must have the :attr:`~.Permissions.manage_channels` permission in
the associated guild to do this.
Parameters
----------
invite: Union[:class:`.Invite`, :class:`str`]
The invite to revoke.
Raises
-------
:exc:`.Forbidden`
You do not have permissions to revoke invites.
:exc:`.NotFound`
The invite is invalid or expired.
:exc:`.HTTPException`
Revoking the invite failed.
"""
invite_id = utils.resolve_invite(invite)
await self.http.delete_invite(invite_id)
# Miscellaneous stuff
async def fetch_widget(self, guild_id):
"""|coro|
Gets a :class:`.Widget` from a guild ID.
.. note::
The guild must have the widget enabled to get this information.
Parameters
-----------
guild_id: :class:`int`
The ID of the guild.
Raises
-------
:exc:`.Forbidden`
The widget for this guild is disabled.
:exc:`.HTTPException`
Retrieving the widget failed.
Returns
--------
:class:`.Widget`
The guild's widget.
"""
data = await self.http.get_widget(guild_id)
return Widget(state=self._connection, data=data)
async def application_info(self):
"""|coro|
Retrieves the bot's application information.
Raises
-------
:exc:`.HTTPException`
Retrieving the information failed somehow.
Returns
--------
:class:`.AppInfo`
The bot's application information.
"""
data = await self.http.application_info()
if 'rpc_origins' not in data:
data['rpc_origins'] = None
return AppInfo(self._connection, data)
async def fetch_user(self, user_id):
"""|coro|
Retrieves a :class:`~discord.User` based on their ID.
You do not have to share any guilds with the user to get this information,
however many operations do require that you do.
.. note::
This method is an API call. If you have :attr:`discord.Intents.members` and member cache enabled, consider :meth:`get_user` instead.
Parameters
-----------
user_id: :class:`int`
The user's ID to fetch from.
Raises
-------
:exc:`.NotFound`
A user with this ID does not exist.
:exc:`.HTTPException`
Fetching the user failed.
Returns
--------
:class:`~discord.User`
The user you requested.
"""
data = await self.http.get_user(user_id)
return User(state=self._connection, data=data)
async def fetch_channel(self, channel_id):
"""|coro|
Retrieves a :class:`.abc.GuildChannel` or :class:`.abc.PrivateChannel` with the specified ID.
.. note::
This method is an API call. For general usage, consider :meth:`get_channel` instead.
.. versionadded:: 1.2
Raises
-------
:exc:`.InvalidData`
An unknown channel type was received from Discord.
:exc:`.HTTPException`
Retrieving the channel failed.
:exc:`.NotFound`
Invalid Channel ID.
:exc:`.Forbidden`
You do not have permission to fetch this channel.
Returns
--------
Union[:class:`.abc.GuildChannel`, :class:`.abc.PrivateChannel`]
The channel from the ID.
"""
data = await self.http.get_channel(channel_id)
factory, ch_type = _channel_factory(data['type'])
if factory is None:
raise InvalidData('Unknown channel type {type} for channel ID {id}.'.format_map(data))
if ch_type in (ChannelType.group, ChannelType.private):
channel = factory(me=self.user, data=data, state=self._connection)
else:
guild_id = int(data['guild_id'])
guild = self.get_guild(guild_id) or Object(id=guild_id)
channel = factory(guild=guild, state=self._connection, data=data)
return channel
async def fetch_webhook(self, webhook_id):
"""|coro|
Retrieves a :class:`.Webhook` with the specified ID.
Raises
--------
:exc:`.HTTPException`
Retrieving the webhook failed.
:exc:`.NotFound`
Invalid webhook ID.
:exc:`.Forbidden`
You do not have permission to fetch this webhook.
Returns
---------
:class:`.Webhook`
The webhook you requested.
"""
data = await self.http.get_webhook(webhook_id)
return Webhook.from_state(data, state=self._connection)
async def create_dm(self, user):
"""|coro|
Creates a :class:`.DMChannel` with this user.
This should be rarely called, as this is done transparently for most
people.
.. versionadded:: 2.0
Parameters
-----------
user: :class:`~discord.abc.Snowflake`
The user to create a DM with.
Returns
-------
:class:`.DMChannel`
The channel that was created.
"""
state = self._connection
found = state._get_private_channel_by_user(user.id)
if found:
return found
data = await state.http.start_private_message(user.id)
return state.add_dm_channel(data)
def add_view(self, view: View, *, message_id: Optional[int] = None) -> None:
"""Registers a :class:`~discord.ui.View` for persistent listening.
This method should be used for when a view is comprised of components
that last longer than the lifecycle of the program.
Parameters
------------
view: :class:`discord.ui.View`
The view to register for dispatching.
message_id: Optional[:class:`int`]
The message ID that the view is attached to. This is currently used to
refresh the view's state during message update events. If not given
then message update events are not propagated for the view.
Raises
-------
TypeError
A view was not passed.
ValueError
The view is not persistent. A persistent view has no timeout
and all their components have an explicitly provided custom_id.
"""
if not isinstance(view, View):
raise TypeError(f'expected an instance of View not {view.__class__!r}')
if not view.is_persistent():
raise ValueError('View is not persistent. Items need to have a custom_id set and View must have no timeout')
self._connection.store_view(view, message_id)
@property
def persistent_views(self) -> Sequence[View]:
"""Sequence[:class:`View`]: A sequence of persistent views added to the client."""
return self._connection.persistent_views
| mit | -2,710,249,609,055,593,000 | 32.238776 | 144 | 0.579951 | false | 4.513301 | false | false | false |
twoodford/audiovisualizer | build-movie.py | 1 | 1391 | import os
import os.path
import subprocess
import sys
from PIL import Image
LISTF = "_list.txt"
def get_dimensions(fpath):
#print(fpath)
return Image.open(fpath).size
def run(folder, outfile, framerate=30, outres=(1920,1080)):
jpglist = [os.path.join(folder, f) for f in os.listdir(folder) if f.startswith("frame_")]
dimen = get_dimensions(jpglist[0])
ratio = float(outres[1])/outres[0]
if dimen[0]*ratio < dimen[1]:
crop = (dimen[0], int(dimen[0]*ratio))
else:
crop = (int(dimen[1]/ratio), dimen[1])
with open(LISTF, "w") as ltxt:
for f in jpglist:
ltxt.write("file '"+f+"'\n")
fsel_args = ["-f", "concat", "-i", LISTF]
rs_str = "".join(("crop=", str(crop[0]), ":", str(crop[1]),":0:0,scale=",str(outres[0]),":",str(outres[1])))
enc_flags = ["-pix_fmt", "yuv420p", "-preset", "veryslow", "-crf", "18"]
args_final = ["ffmpeg", "-r", str(framerate)] + fsel_args + ["-vf", rs_str] + enc_flags + [outfile]
print(" ".join(args_final))
subprocess.call(args_final)
os.remove(LISTF)
if __name__=="__main__":
jpglist = [os.path.join(sys.argv[1], f) for f in os.listdir(sys.argv[1]) if f.startswith("frame_")]
dimen = get_dimensions(jpglist[0])
dimen = (dimen[0] if dimen[0]%2==0 else dimen[0]-1, dimen[1] if dimen[1]%2==0 else dimen[1]-1)
run(sys.argv[1], sys.argv[2], outres=dimen)
| apache-2.0 | -3,590,627,063,179,411,500 | 37.638889 | 112 | 0.594536 | false | 2.759921 | false | false | false |
dsweet04/rekall | rekall-lib/rekall_lib/registry.py | 1 | 5206 | # Rekall Memory Forensics
# Copyright (C) 2011
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Michael Cohen <[email protected]>
#
# ******************************************************
#
# * This program is free software; you can redistribute it and/or
# * modify it under the terms of the GNU General Public License
# * as published by the Free Software Foundation; either version 2
# * of the License, or (at your option) any later version.
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# *****************************************************
""" This module implements a class registry.
We scan the memory_plugins directory for all python files and add those classes
which should be registered into their own lookup tables. These are then ordered
as required. The rest of Rekall Memory Forensics will then call onto the
registered classes when needed.
The MetaclassRegistry automatically adds any derived class to the base
class. This means that we do not need to go through a special initializating
step, as soon as a module is imported, the plugin is registered.
"""
__author__ = "Michael Cohen <[email protected]>"
class classproperty(property):
"""A property that can be called on classes."""
def __get__(self, cls, owner):
return self.fget(owner)
def memoize(f):
cache = {}
def helper(*args):
cached = cache.get(args, memoize)
if cached is not memoize:
return cached
cached = f(*args)
cache[args] = cached
return cached
return helper
class UniqueObjectIdMetaclass(type):
"""Give each object a unique ID.
unlike id() this number will not be reused when the objects are destroyed,
hence it can be used to identify identical objects without keeping these
around.
"""
ID = 0
def __call__(cls, *args, **kwargs):
res = super(UniqueObjectIdMetaclass, cls).__call__(*args, **kwargs)
res._object_id = UniqueObjectIdMetaclass.ID # pylint: disable=protected-access
UniqueObjectIdMetaclass.ID += 1
return res
class UniqueObjectIdMixin(object):
__metaclass__ = UniqueObjectIdMetaclass
class MetaclassRegistry(UniqueObjectIdMetaclass):
"""Automatic Plugin Registration through metaclasses."""
def __init__(cls, name, bases, env_dict):
super(MetaclassRegistry, cls).__init__(name, bases, env_dict)
cls._install_constructors(cls)
# Attach the classes dict to the baseclass and have all derived classes
# use the same one:
for base in bases:
try:
cls.classes = base.classes
cls.classes_by_name = base.classes_by_name
cls.plugin_feature = base.plugin_feature
cls.top_level_class = base.top_level_class
break
except AttributeError:
cls.classes = {}
cls.classes_by_name = {}
cls.plugin_feature = cls.__name__
# Keep a reference to the top level class
cls.top_level_class = cls
# The following should not be registered as they are abstract. Classes
# are abstract if the have the __abstract attribute (note this is not
# inheritable so each abstract class must be explicitely marked).
abstract_attribute = "_%s__abstract" % name
if getattr(cls, abstract_attribute, None):
return
if not cls.__name__.startswith("Abstract"):
if cls.__name__ in cls.classes:
raise RuntimeError(
"Multiple definitions for class %s (%s)" % (
cls, cls.classes[cls.__name__]))
cls.classes[cls.__name__] = cls
name = getattr(cls, "name", None)
# We expect that classes by name will collide, which is why each
# value is a list of classes with that name.
cls.classes_by_name.setdefault(name, []).append(cls)
try:
if cls.top_level_class.include_plugins_as_attributes:
setattr(cls.top_level_class, cls.__name__, cls)
except AttributeError:
pass
# Allow the class itself to initialize itself.
cls_initializer = getattr(cls, "_class_init", None)
if cls_initializer:
cls_initializer()
@classmethod
def _install_constructors(mcs, cls):
def ByName(self, name):
for impl in self.classes.values():
if getattr(impl, "name", None) == name:
return impl
cls.ImplementationByName = classmethod(ByName)
def ByClass(self, name):
return self.classes.get(name)
cls.ImplementationByClass = classmethod(ByClass)
| gpl-2.0 | -1,171,829,265,467,452,700 | 33.939597 | 87 | 0.619478 | false | 4.400676 | false | false | false |
AutorestCI/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_11_01/models/available_providers_list_country.py | 1 | 1381 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AvailableProvidersListCountry(Model):
"""Country details.
:param country_name: The country name.
:type country_name: str
:param providers: A list of Internet service providers.
:type providers: list[str]
:param states: List of available states in the country.
:type states:
list[~azure.mgmt.network.v2017_11_01.models.AvailableProvidersListState]
"""
_attribute_map = {
'country_name': {'key': 'countryName', 'type': 'str'},
'providers': {'key': 'providers', 'type': '[str]'},
'states': {'key': 'states', 'type': '[AvailableProvidersListState]'},
}
def __init__(self, country_name=None, providers=None, states=None):
super(AvailableProvidersListCountry, self).__init__()
self.country_name = country_name
self.providers = providers
self.states = states
| mit | -3,422,688,781,120,634,400 | 36.324324 | 77 | 0.604634 | false | 4.426282 | false | false | false |
utam0k/c3os | c3os/client.py | 1 | 1344 | import socket
import json
import time
import multiprocessing as mp
from c3os import utils
from c3os import conf
from c3os import db
from c3os.api.type import APITYPE
CONF = conf.CONF
def start():
""" Start client service """
mp.Process(target=client).start()
def client():
""" client main routine """
db_pool = db.generate_pool()
while True:
send_db(db_pool)
time.sleep(3.0)
def send_db(db_pool):
"""Information on its DB is sent to other c3os.
Args:
db_pool (DBPool): DBPool class.
Returns:
None:
"""
all_instance_info = utils.to_dict(db_pool)
all_region_names = [name for name in all_instance_info.keys()]
for dest_region_name, dest in CONF['dests'].items():
host, port = dest.split(',')
for region_name in all_region_names:
if dest_region_name.lower() == region_name.lower():
all_instance_info.pop(region_name)
break
db_info = json.dumps(all_instance_info)
data = json.dumps({'type': APITYPE.ADD_DB, 'body': db_info})
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.connect((host, int(port)))
sock.sendall(bytes(data, "utf-8"))
except:
print("Error: Connected", host, port)
| mit | -5,227,906,240,231,535,000 | 23.888889 | 75 | 0.594494 | false | 3.536842 | false | false | false |
plotly/python-api | packages/python/plotly/plotly/figure_factory/_annotated_heatmap.py | 1 | 10195 | from __future__ import absolute_import, division
from plotly import exceptions, optional_imports
import plotly.colors as clrs
from plotly.figure_factory import utils
from plotly.graph_objs import graph_objs
from plotly.validators.heatmap import ColorscaleValidator
# Optional imports, may be None for users that only use our core functionality.
np = optional_imports.get_module("numpy")
def validate_annotated_heatmap(z, x, y, annotation_text):
"""
Annotated-heatmap-specific validations
Check that if a text matrix is supplied, it has the same
dimensions as the z matrix.
See FigureFactory.create_annotated_heatmap() for params
:raises: (PlotlyError) If z and text matrices do not have the same
dimensions.
"""
if annotation_text is not None and isinstance(annotation_text, list):
utils.validate_equal_length(z, annotation_text)
for lst in range(len(z)):
if len(z[lst]) != len(annotation_text[lst]):
raise exceptions.PlotlyError(
"z and text should have the " "same dimensions"
)
if x:
if len(x) != len(z[0]):
raise exceptions.PlotlyError(
"oops, the x list that you "
"provided does not match the "
"width of your z matrix "
)
if y:
if len(y) != len(z):
raise exceptions.PlotlyError(
"oops, the y list that you "
"provided does not match the "
"length of your z matrix "
)
def create_annotated_heatmap(
z,
x=None,
y=None,
annotation_text=None,
colorscale="Plasma",
font_colors=None,
showscale=False,
reversescale=False,
**kwargs
):
"""
Function that creates annotated heatmaps
This function adds annotations to each cell of the heatmap.
:param (list[list]|ndarray) z: z matrix to create heatmap.
:param (list) x: x axis labels.
:param (list) y: y axis labels.
:param (list[list]|ndarray) annotation_text: Text strings for
annotations. Should have the same dimensions as the z matrix. If no
text is added, the values of the z matrix are annotated. Default =
z matrix values.
:param (list|str) colorscale: heatmap colorscale.
:param (list) font_colors: List of two color strings: [min_text_color,
max_text_color] where min_text_color is applied to annotations for
heatmap values < (max_value - min_value)/2. If font_colors is not
defined, the colors are defined logically as black or white
depending on the heatmap's colorscale.
:param (bool) showscale: Display colorscale. Default = False
:param (bool) reversescale: Reverse colorscale. Default = False
:param kwargs: kwargs passed through plotly.graph_objs.Heatmap.
These kwargs describe other attributes about the annotated Heatmap
trace such as the colorscale. For more information on valid kwargs
call help(plotly.graph_objs.Heatmap)
Example 1: Simple annotated heatmap with default configuration
>>> import plotly.figure_factory as ff
>>> z = [[0.300000, 0.00000, 0.65, 0.300000],
... [1, 0.100005, 0.45, 0.4300],
... [0.300000, 0.00000, 0.65, 0.300000],
... [1, 0.100005, 0.45, 0.00000]]
>>> fig = ff.create_annotated_heatmap(z)
>>> fig.show()
"""
# Avoiding mutables in the call signature
font_colors = font_colors if font_colors is not None else []
validate_annotated_heatmap(z, x, y, annotation_text)
# validate colorscale
colorscale_validator = ColorscaleValidator()
colorscale = colorscale_validator.validate_coerce(colorscale)
annotations = _AnnotatedHeatmap(
z, x, y, annotation_text, colorscale, font_colors, reversescale, **kwargs
).make_annotations()
if x or y:
trace = dict(
type="heatmap",
z=z,
x=x,
y=y,
colorscale=colorscale,
showscale=showscale,
reversescale=reversescale,
**kwargs
)
layout = dict(
annotations=annotations,
xaxis=dict(ticks="", dtick=1, side="top", gridcolor="rgb(0, 0, 0)"),
yaxis=dict(ticks="", dtick=1, ticksuffix=" "),
)
else:
trace = dict(
type="heatmap",
z=z,
colorscale=colorscale,
showscale=showscale,
reversescale=reversescale,
**kwargs
)
layout = dict(
annotations=annotations,
xaxis=dict(
ticks="", side="top", gridcolor="rgb(0, 0, 0)", showticklabels=False
),
yaxis=dict(ticks="", ticksuffix=" ", showticklabels=False),
)
data = [trace]
return graph_objs.Figure(data=data, layout=layout)
def to_rgb_color_list(color_str, default):
if "rgb" in color_str:
return [int(v) for v in color_str.strip("rgb()").split(",")]
elif "#" in color_str:
return clrs.hex_to_rgb(color_str)
else:
return default
def should_use_black_text(background_color):
return (
background_color[0] * 0.299
+ background_color[1] * 0.587
+ background_color[2] * 0.114
) > 186
class _AnnotatedHeatmap(object):
"""
Refer to TraceFactory.create_annotated_heatmap() for docstring
"""
def __init__(
self, z, x, y, annotation_text, colorscale, font_colors, reversescale, **kwargs
):
self.z = z
if x:
self.x = x
else:
self.x = range(len(z[0]))
if y:
self.y = y
else:
self.y = range(len(z))
if annotation_text is not None:
self.annotation_text = annotation_text
else:
self.annotation_text = self.z
self.colorscale = colorscale
self.reversescale = reversescale
self.font_colors = font_colors
def get_text_color(self):
"""
Get font color for annotations.
The annotated heatmap can feature two text colors: min_text_color and
max_text_color. The min_text_color is applied to annotations for
heatmap values < (max_value - min_value)/2. The user can define these
two colors. Otherwise the colors are defined logically as black or
white depending on the heatmap's colorscale.
:rtype (string, string) min_text_color, max_text_color: text
color for annotations for heatmap values <
(max_value - min_value)/2 and text color for annotations for
heatmap values >= (max_value - min_value)/2
"""
# Plotly colorscales ranging from a lighter shade to a darker shade
colorscales = [
"Greys",
"Greens",
"Blues",
"YIGnBu",
"YIOrRd",
"RdBu",
"Picnic",
"Jet",
"Hot",
"Blackbody",
"Earth",
"Electric",
"Viridis",
"Cividis",
]
# Plotly colorscales ranging from a darker shade to a lighter shade
colorscales_reverse = ["Reds"]
white = "#FFFFFF"
black = "#000000"
if self.font_colors:
min_text_color = self.font_colors[0]
max_text_color = self.font_colors[-1]
elif self.colorscale in colorscales and self.reversescale:
min_text_color = black
max_text_color = white
elif self.colorscale in colorscales:
min_text_color = white
max_text_color = black
elif self.colorscale in colorscales_reverse and self.reversescale:
min_text_color = white
max_text_color = black
elif self.colorscale in colorscales_reverse:
min_text_color = black
max_text_color = white
elif isinstance(self.colorscale, list):
min_col = to_rgb_color_list(self.colorscale[0][1], [255, 255, 255])
max_col = to_rgb_color_list(self.colorscale[-1][1], [255, 255, 255])
# swap min/max colors if reverse scale
if self.reversescale:
min_col, max_col = max_col, min_col
if should_use_black_text(min_col):
min_text_color = black
else:
min_text_color = white
if should_use_black_text(max_col):
max_text_color = black
else:
max_text_color = white
else:
min_text_color = black
max_text_color = black
return min_text_color, max_text_color
def get_z_mid(self):
"""
Get the mid value of z matrix
:rtype (float) z_avg: average val from z matrix
"""
if np and isinstance(self.z, np.ndarray):
z_min = np.amin(self.z)
z_max = np.amax(self.z)
else:
z_min = min([v for row in self.z for v in row])
z_max = max([v for row in self.z for v in row])
z_mid = (z_max + z_min) / 2
return z_mid
def make_annotations(self):
"""
Get annotations for each cell of the heatmap with graph_objs.Annotation
:rtype (list[dict]) annotations: list of annotations for each cell of
the heatmap
"""
min_text_color, max_text_color = _AnnotatedHeatmap.get_text_color(self)
z_mid = _AnnotatedHeatmap.get_z_mid(self)
annotations = []
for n, row in enumerate(self.z):
for m, val in enumerate(row):
font_color = min_text_color if val < z_mid else max_text_color
annotations.append(
graph_objs.layout.Annotation(
text=str(self.annotation_text[n][m]),
x=self.x[m],
y=self.y[n],
xref="x1",
yref="y1",
font=dict(color=font_color),
showarrow=False,
)
)
return annotations
| mit | -5,518,445,393,110,075,000 | 32.316993 | 87 | 0.567043 | false | 3.934774 | false | false | false |
gangadhar-kadam/sapphire_app | accounts/report/budget_variance_report/budget_variance_report.py | 1 | 4626 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd.
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes import _, msgprint
from webnotes.utils import flt
import time
from accounts.utils import get_fiscal_year
from controllers.trends import get_period_date_ranges, get_period_month_ranges
def execute(filters=None):
if not filters: filters = {}
columns = get_columns(filters)
period_month_ranges = get_period_month_ranges(filters["period"], filters["fiscal_year"])
cam_map = get_costcenter_account_month_map(filters)
data = []
for cost_center, cost_center_items in cam_map.items():
for account, monthwise_data in cost_center_items.items():
row = [cost_center, account]
totals = [0, 0, 0]
for relevant_months in period_month_ranges:
period_data = [0, 0, 0]
for month in relevant_months:
month_data = monthwise_data.get(month, {})
for i, fieldname in enumerate(["target", "actual", "variance"]):
value = flt(month_data.get(fieldname))
period_data[i] += value
totals[i] += value
period_data[2] = period_data[0] - period_data[1]
row += period_data
totals[2] = totals[0] - totals[1]
row += totals
data.append(row)
return columns, sorted(data, key=lambda x: (x[0], x[1]))
def get_columns(filters):
for fieldname in ["fiscal_year", "period", "company"]:
if not filters.get(fieldname):
label = (" ".join(fieldname.split("_"))).title()
msgprint(_("Please specify") + ": " + label,
raise_exception=True)
columns = ["Cost Center:Link/Cost Center:120", "Account:Link/Account:120"]
group_months = False if filters["period"] == "Monthly" else True
for from_date, to_date in get_period_date_ranges(filters["period"], filters["fiscal_year"]):
for label in ["Target (%s)", "Actual (%s)", "Variance (%s)"]:
if group_months:
label = label % (from_date.strftime("%b") + " - " + to_date.strftime("%b"))
else:
label = label % from_date.strftime("%b")
columns.append(label+":Float:120")
return columns + ["Total Target:Float:120", "Total Actual:Float:120",
"Total Variance:Float:120"]
#Get cost center & target details
def get_costcenter_target_details(filters):
return webnotes.conn.sql("""select cc.name, cc.distribution_id,
cc.parent_cost_center, bd.account, bd.budget_allocated
from `tabCost Center` cc, `tabBudget Detail` bd
where bd.parent=cc.name and bd.fiscal_year=%s and
cc.company=%s order by cc.name""" % ('%s', '%s'),
(filters.get("fiscal_year"), filters.get("company")), as_dict=1)
#Get target distribution details of accounts of cost center
def get_target_distribution_details(filters):
target_details = {}
for d in webnotes.conn.sql("""select bd.name, bdd.month, bdd.percentage_allocation
from `tabBudget Distribution Detail` bdd, `tabBudget Distribution` bd
where bdd.parent=bd.name and bd.fiscal_year=%s""", (filters["fiscal_year"]), as_dict=1):
target_details.setdefault(d.name, {}).setdefault(d.month, flt(d.percentage_allocation))
return target_details
#Get actual details from gl entry
def get_actual_details(filters):
ac_details = webnotes.conn.sql("""select gl.account, gl.debit, gl.credit,
gl.cost_center, MONTHNAME(gl.posting_date) as month_name
from `tabGL Entry` gl, `tabBudget Detail` bd
where gl.fiscal_year=%s and company=%s
and bd.account=gl.account and bd.parent=gl.cost_center""" % ('%s', '%s'),
(filters.get("fiscal_year"), filters.get("company")), as_dict=1)
cc_actual_details = {}
for d in ac_details:
cc_actual_details.setdefault(d.cost_center, {}).setdefault(d.account, []).append(d)
return cc_actual_details
def get_costcenter_account_month_map(filters):
import datetime
costcenter_target_details = get_costcenter_target_details(filters)
tdd = get_target_distribution_details(filters)
actual_details = get_actual_details(filters)
cam_map = {}
for ccd in costcenter_target_details:
for month_id in range(1, 13):
month = datetime.date(2013, month_id, 1).strftime('%B')
cam_map.setdefault(ccd.name, {}).setdefault(ccd.account, {})\
.setdefault(month, webnotes._dict({
"target": 0.0, "actual": 0.0
}))
tav_dict = cam_map[ccd.name][ccd.account][month]
month_percentage = tdd.get(ccd.distribution_id, {}).get(month, 0) \
if ccd.distribution_id else 100.0/12
tav_dict.target = flt(ccd.budget_allocated) * month_percentage / 100
for ad in actual_details.get(ccd.name, {}).get(ccd.account, []):
if ad.month_name == month:
tav_dict.actual += ad.debit - ad.credit
return cam_map | agpl-3.0 | 6,781,928,573,172,004,000 | 35.722222 | 93 | 0.68396 | false | 3.119353 | false | false | false |
Hammer2900/SunflowerX | application/tools/find_files.py | 1 | 13799 | import os
import gtk
import user
import pango
import gobject
from threading import Thread, Event
class Column:
ICON = 0
NAME = 1
DIRECTORY = 2
class FindFiles(gobject.GObject):
"""Find files tool"""
__gtype_name__ = 'Sunflower_FindFiles'
__gsignals__ = {
'notify-start': (gobject.SIGNAL_RUN_LAST, None, ()),
'notify-stop': (gobject.SIGNAL_RUN_LAST, None, ())
}
def __init__(self, parent, application):
gobject.GObject.__init__(self)
# store parameters
self._parent = parent
self._application = application
self._extensions = []
self._path = self._parent.path
self._provider = None
self._running = False
# thread control object
self._abort = Event()
if hasattr(self._parent, 'get_provider'):
self._provider = self._parent.get_provider()
# configure window
self.window = gtk.Window(type=gtk.WINDOW_TOPLEVEL)
self.window.set_title(_('Find files'))
self.window.set_default_size(550, 500)
self.window.set_position(gtk.WIN_POS_CENTER_ON_PARENT)
self.window.set_transient_for(application)
self.window.set_border_width(7)
self.window.set_type_hint(gtk.gdk.WINDOW_TYPE_HINT_DIALOG)
self.window.set_wmclass('Sunflower', 'Sunflower')
self.window.connect('key-press-event', self._handle_key_press)
# create interface
vbox = gtk.VBox(False, 7)
# create path and basic options
self._table_basic = gtk.Table(3, 2, False)
self._table_basic.set_col_spacings(5)
self._table_basic.set_row_spacings(2)
label_path = gtk.Label(_('Search in:'))
label_path.set_alignment(0, 0.5)
self._entry_path = gtk.Entry()
self._entry_path.connect('activate', self.find_files)
if hasattr(self._parent, 'path'):
# get path from the parent
self._entry_path.set_text(self._parent.path)
else:
# parent has no path, set user home directory
self._entry_path.set_text(os.path.expanduser(user.home))
button_browse = gtk.Button(label=_('Browse'))
button_browse.connect('clicked', self._choose_directory)
self._checkbox_recursive = gtk.CheckButton(label=_('Search recursively'))
self._checkbox_recursive.set_active(True)
# create extensions notebook
self._extension_list = gtk.Notebook()
# create list
self._list = gtk.ListStore(str, str, str)
self._names = gtk.TreeView(model=self._list)
cell_icon = gtk.CellRendererPixbuf()
cell_name = gtk.CellRendererText()
cell_directory = gtk.CellRendererText()
col_name = gtk.TreeViewColumn(_('Name'))
col_name.set_expand(True)
col_directory = gtk.TreeViewColumn(_('Location'))
col_directory.set_expand(True)
# pack renderer
col_name.pack_start(cell_icon, False)
col_name.pack_start(cell_name, True)
col_directory.pack_start(cell_directory, True)
# connect renderer attributes
col_name.add_attribute(cell_icon, 'icon-name', Column.ICON)
col_name.add_attribute(cell_name, 'text', Column.NAME)
col_directory.add_attribute(cell_directory, 'text', Column.DIRECTORY)
self._names.append_column(col_name)
self._names.append_column(col_directory)
self._names.connect('row-activated', self.__handle_row_activated)
container = gtk.ScrolledWindow()
container.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_ALWAYS)
container.set_shadow_type(gtk.SHADOW_IN)
# create status label
self._status = gtk.Label()
self._status.set_alignment(0, 0.5)
self._status.set_ellipsize(pango.ELLIPSIZE_MIDDLE)
self._status.set_property('no-show-all', True)
# create controls
hbox_controls = gtk.HBox(False, 5)
self._image_find = gtk.Image()
self._image_find.set_from_stock(gtk.STOCK_MEDIA_PLAY, gtk.ICON_SIZE_BUTTON)
self._button_find = gtk.Button()
self._button_find.set_label(_('Start'))
self._button_find.set_image(self._image_find)
self._button_find.connect('clicked', self.find_files)
button_close = gtk.Button(stock=gtk.STOCK_CLOSE)
button_close.connect('clicked', self._close_window)
# pack interface
self._table_basic.attach(label_path, 0, 1, 0, 1, xoptions=gtk.SHRINK | gtk.FILL)
self._table_basic.attach(self._entry_path, 1, 2, 0, 1, xoptions=gtk.EXPAND | gtk.FILL)
self._table_basic.attach(button_browse, 2, 3, 0, 1, xoptions=gtk.SHRINK | gtk.FILL)
self._table_basic.attach(self._checkbox_recursive, 1, 2, 1, 2)
container.add(self._names)
hbox_controls.pack_end(self._button_find, False, False, 0)
hbox_controls.pack_end(button_close, False, False, 0)
vbox.pack_start(self._table_basic, False, False, 0)
vbox.pack_start(self._extension_list, False, False, 0)
vbox.pack_end(hbox_controls, False, False, 0)
vbox.pack_end(self._status, False, False, 0)
vbox.pack_end(container, True, True, 0)
self.window.add(vbox)
# create extensions
self.__create_extensions()
# show all widgets
self.window.show_all()
def __handle_row_activated(self, treeview, path, view_column, data=None):
"""Handle actions on list"""
# get list selection
selection = treeview.get_selection()
list_, iter_ = selection.get_selected()
# we need selection for this
if iter_ is None: return
name = list_.get_value(iter_, Column.NAME)
path = list_.get_value(iter_, Column.DIRECTORY)
# get active object
active_object = self._application.get_active_object()
if hasattr(active_object, 'change_path'):
# change path
active_object.change_path(path, name)
# close window
self._close_window()
else:
# notify user about active object
dialog = gtk.MessageDialog(
self.window,
gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_INFO,
gtk.BUTTONS_OK,
_(
'Active object doesn\'t support changing '
'path. Set focus on a different object, '
'preferably file list, and try again.'
)
)
dialog.run()
dialog.destroy()
def __create_extensions(self):
"""Create rename extensions"""
for ExtensionClass in self._application.find_extension_classes.values():
extension = ExtensionClass(self)
title = extension.get_title()
# add tab
self._extension_list.append_page(extension.get_container(), gtk.Label(title))
# store extension for later use
self._extensions.append(extension)
def __update_status_label(self, path):
"""Update status label with current scanning path"""
self._status.set_text(path)
def __update_status(self, running=True):
"""Update button status"""
self._running = running
if running:
# disable interface to prevent changes during search
self._table_basic.set_sensitive(False)
self._extension_list.set_sensitive(False)
# show status bar
self._status.show()
# update find button
self._image_find.set_from_stock(gtk.STOCK_MEDIA_STOP, gtk.ICON_SIZE_BUTTON)
self._button_find.set_label(_('Stop'))
else:
# enable interface to prevent changes during search
self._table_basic.set_sensitive(True)
self._extension_list.set_sensitive(True)
# hide status bar
self._status.hide()
# update find button
self._image_find.set_from_stock(gtk.STOCK_MEDIA_PLAY, gtk.ICON_SIZE_BUTTON)
self._button_find.set_label(_('Start'))
def __find_files(self, path, children, scan_recursively):
"""Threaded find files method"""
scan_queue = []
extension_list = []
# prepare extension objects for operation
for child in children:
extension_list.append(child.get_data('extension'))
# tell extensions search is starting
self.emit('notify-start')
# update thread status
gobject.idle_add(self.__update_status, True)
gobject.idle_add(self.__update_status_label, path)
# add current path to scan queue
try:
item_list = self._provider.list_dir(path)
item_list = map(lambda new_item: os.path.join(path, new_item), item_list)
scan_queue.extend(item_list)
except:
pass
# traverse through directories
while not self._abort.is_set() and len(scan_queue) > 0:
# get next item in queue
item = scan_queue.pop(0)
if self._provider.is_dir(item) and scan_recursively:
# extend scan queue with directory content
gobject.idle_add(self.__update_status_label, item)
try:
item_list = self._provider.list_dir(item)
item_list = map(lambda new_item: os.path.join(item, new_item), item_list)
scan_queue.extend(item_list)
except:
pass
# check if item fits cirteria
match = True
for extension in extension_list:
if not extension.is_path_ok(item):
match = False
break
# add item if score is right
if match:
name = os.path.basename(item)
path = os.path.dirname(item)
icon = self._application.icon_manager.get_icon_for_file(item)
self._list.append((icon, name, path))
# update thread status
gobject.idle_add(self.__update_status, False)
# tell extensions search has been stopped
self.emit('notify-stop')
def _close_window(self, widget=None, data=None):
"""Close window"""
self._abort.set() # notify search thread we are terminating
self.window.destroy()
def _choose_directory(self, widget=None, data=None):
"""Show 'FileChooser' dialog"""
dialog = gtk.FileChooserDialog(
title=_('Find files'),
parent=self._application,
action=gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER,
buttons=(
gtk.STOCK_CANCEL,
gtk.RESPONSE_REJECT,
gtk.STOCK_OK,
gtk.RESPONSE_ACCEPT
)
)
dialog.set_filename(self._entry_path.get_text())
response = dialog.run()
if response == gtk.RESPONSE_ACCEPT:
self._entry_path.set_text(dialog.get_filename())
dialog.destroy()
def _handle_key_press(self, widget, event, data=None):
"""Handle pressing keys"""
if event.keyval == gtk.keysyms.Escape:
self._close_window()
def stop_search(self, widget=None, data=None):
"""Stop searching for files"""
pass
def find_files(self, widget=None, data=None):
"""Start searching for files"""
if not self._running:
# thread is not running, start it
path = self._entry_path.get_text()
# make sure we have a valid provider
if self._provider is None:
ProviderClass = self._application.get_provider_by_protocol('file')
self._provider = ProviderClass(self._parent)
# check if specified path exists
if not self._provider.is_dir(path):
dialog = gtk.MessageDialog(
self.window,
gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_ERROR,
gtk.BUTTONS_OK,
_(
'Specified path is not valid or doesn\'t '
'exist anymore. Please check your selection '
'and try again.'
)
)
dialog.run()
dialog.destroy()
return
# get list of active extensions
active_children = filter(
lambda child: child.get_data('extension').is_active(),
self._extension_list.get_children()
)
if len(active_children) == 0:
dialog = gtk.MessageDialog(
self.window,
gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_WARNING,
gtk.BUTTONS_OK,
_(
'You need to enable at least one extension '
'in order to find files and directories!'
)
)
dialog.run()
dialog.destroy()
return
# set thread control objects
self._abort.clear()
# clear existing list
self._list.clear()
# start the thread
params = {
'path': path,
'children': active_children,
'scan_recursively': self._checkbox_recursive.get_active()
}
thread = Thread(target=self.__find_files, kwargs=params)
thread.start()
else:
# thread is running, set abort event
self._abort.set()
| gpl-3.0 | -399,886,151,447,045,440 | 32.250602 | 94 | 0.560838 | false | 4.158831 | false | false | false |
bdang2012/taiga-back-casting | tests/unit/test_timeline.py | 1 | 3510 | # Copyright (C) 2014-2015 Andrey Antukh <[email protected]>
# Copyright (C) 2014-2015 Jesús Espino <[email protected]>
# Copyright (C) 2014-2015 David Barragán <[email protected]>
# Copyright (C) 2014-2015 Anler Hernández <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from unittest.mock import patch, call
from django.core.exceptions import ValidationError
from taiga.timeline import service
from taiga.timeline.models import Timeline
from taiga.projects.models import Project
from taiga.users.models import User
import pytest
def test_push_to_timeline_many_objects():
with patch("taiga.timeline.service._add_to_object_timeline") as mock:
users = [User(), User(), User()]
project = Project()
service.push_to_timeline(users, project, "test", project.created_date)
assert mock.call_count == 3
assert mock.mock_calls == [
call(users[0], project, "test", project.created_date, "default", {}),
call(users[1], project, "test", project.created_date, "default", {}),
call(users[2], project, "test", project.created_date, "default", {}),
]
with pytest.raises(Exception):
service.push_to_timeline(None, project, "test")
def test_add_to_objects_timeline():
with patch("taiga.timeline.service._add_to_object_timeline") as mock:
users = [User(), User(), User()]
project = Project()
service._add_to_objects_timeline(users, project, "test", project.created_date)
assert mock.call_count == 3
assert mock.mock_calls == [
call(users[0], project, "test", project.created_date, "default", {}),
call(users[1], project, "test", project.created_date, "default", {}),
call(users[2], project, "test", project.created_date, "default", {}),
]
with pytest.raises(Exception):
service.push_to_timeline(None, project, "test")
def test_get_impl_key_from_model():
assert service._get_impl_key_from_model(Timeline, "test") == "timeline.timeline.test"
with pytest.raises(Exception):
service._get_impl_key(None)
def test_get_impl_key_from_typename():
assert service._get_impl_key_from_typename("timeline.timeline", "test") == "timeline.timeline.test"
with pytest.raises(Exception):
service._get_impl_key(None)
def test_register_timeline_implementation():
test_func = lambda x: "test-func-result"
service.register_timeline_implementation("timeline.timeline", "test", test_func)
assert service._timeline_impl_map["timeline.timeline.test"](None) == "test-func-result"
@service.register_timeline_implementation("timeline.timeline", "test-decorator")
def decorated_test_function(x):
return "test-decorated-func-result"
assert service._timeline_impl_map["timeline.timeline.test-decorator"](None) == "test-decorated-func-result"
| agpl-3.0 | -5,399,271,881,389,798,000 | 42.296296 | 111 | 0.687482 | false | 3.679958 | true | false | false |
trnewman/VT-USRP-daughterboard-drivers_python | gr-wxgui/src/python/numbersink.py | 1 | 25273 | #!/usr/bin/env python
#
# Copyright 2003,2004,2005,2006,2007 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
#
from gnuradio import gr, gru, window
from gnuradio.wxgui import stdgui
import wx
#from wx import StaticText
import gnuradio.wxgui.plot as plot
import numpy
import threading
import math
default_numbersink_size = (640,240)
default_number_rate = gr.prefs().get_long('wxgui', 'number_rate', 15)
class number_sink_base(object):
def __init__(self, input_is_real=False, unit='',base_value=0, minval=-100.0,maxval=100.0,factor=1.0,decimal_places=10, ref_level=50,
sample_rate=1,
number_rate=default_number_rate,
average=False, avg_alpha=None, label='', peak_hold=False):
# initialize common attributes
self.unit=unit
self.base_value = base_value
self.minval=minval
self.maxval=maxval
self.factor=factor
self.y_divs = 8
self.decimal_places=decimal_places
self.ref_level = ref_level
self.sample_rate = sample_rate
number_size=1
self.number_size = number_size
self.number_rate = number_rate
self.average = average
if avg_alpha is None:
self.avg_alpha = 2.0 / number_rate
else:
self.avg_alpha = avg_alpha
self.label = label
self.peak_hold = peak_hold
self.show_gauge = True
self.input_is_real = input_is_real
self.msgq = gr.msg_queue(2) # queue that holds a maximum of 2 messages
def set_decimal_places(self, decimal_places):
self.decimal_places = decimal_places
def set_ref_level(self, ref_level):
self.ref_level = ref_level
def print_current_value(self, comment):
print comment,self.win.current_value
def set_average(self, average):
self.average = average
if average:
self.avg.set_taps(self.avg_alpha)
self.set_peak_hold(False)
else:
self.avg.set_taps(1.0)
def set_peak_hold(self, enable):
self.peak_hold = enable
if enable:
self.set_average(False)
self.win.set_peak_hold(enable)
def set_show_gauge(self, enable):
self.show_gauge = enable
self.win.set_show_gauge(enable)
def set_avg_alpha(self, avg_alpha):
self.avg_alpha = avg_alpha
def set_base_value(self, base_value):
self.base_value = base_value
class number_sink_f(gr.hier_block, number_sink_base):
def __init__(self, fg, parent, unit='',base_value=0,minval=-100.0,maxval=100.0,factor=1.0,
decimal_places=10, ref_level=50, sample_rate=1, #number_size=512,
number_rate=default_number_rate, average=False, avg_alpha=None,
label='', size=default_numbersink_size, peak_hold=False):
number_sink_base.__init__(self, unit=unit, input_is_real=True, base_value=base_value,
minval=minval,maxval=maxval,factor=factor,
decimal_places=decimal_places, ref_level=ref_level,
sample_rate=sample_rate, #number_size=number_size,
number_rate=number_rate,
average=average, avg_alpha=avg_alpha, label=label,
peak_hold=peak_hold)
number_size=1
#s2p = gr.stream_to_vector(gr.sizeof_float, number_size)
one_in_n = gr.keep_one_in_n(gr.sizeof_float,
max(1, int(sample_rate/number_rate)))
#c2mag = gr.complex_to_mag(number_size)
self.avg = gr.single_pole_iir_filter_ff(1.0, number_size)
# FIXME We need to add 3dB to all bins but the DC bin
#log = gr.nlog10_ff(20, number_size,
# -20*math.log10(number_size)-10*math.log10(power/number_size))
sink = gr.message_sink(gr.sizeof_float , self.msgq, True)
#fg.connect (s2p, one_in_n, fft, c2mag, self.avg, log, sink)
fg.connect(self.avg,one_in_n,sink)
gr.hier_block.__init__(self, fg, self.avg, sink)
self.win = number_window(self, parent, size=size,label=label)
self.set_average(self.average)
class number_sink_c(gr.hier_block, number_sink_base):
def __init__(self, fg, parent, unit='',base_value=0,minval=-100.0,maxval=100.0,factor=1.0,
decimal_places=10, ref_level=50, sample_rate=1, #number_size=512,
number_rate=default_number_rate, average=False, avg_alpha=None,
label='', size=default_numbersink_size, peak_hold=False):
number_sink_base.__init__(self, unit=unit, input_is_real=False, base_value=base_value,factor=factor,
minval=minval,maxval=maxval,decimal_places=decimal_places, ref_level=ref_level,
sample_rate=sample_rate, #number_size=number_size,
number_rate=number_rate,
average=average, avg_alpha=avg_alpha, label=label,
peak_hold=peak_hold)
number_size=1
one_in_n = gr.keep_one_in_n(gr.sizeof_gr_complex,
max(1, int(sample_rate/number_rate)))
#c2mag = gr.complex_to_mag(number_size)
self.avg = gr.single_pole_iir_filter_cc(1.0, number_size)
# FIXME We need to add 3dB to all bins but the DC bin
#log = gr.nlog10_ff(20, number_size,
# -20*math.log10(number_size)-10*math.log10(power/number_size))
sink = gr.message_sink(gr.sizeof_gr_complex , self.msgq, True)
#fg.connect (s2p, one_in_n, fft, c2mag, self.avg, log, sink)
fg.connect(self.avg,one_in_n,sink)
gr.hier_block.__init__(self, fg, self.avg, sink)
self.win = number_window(self, parent, size=size,label=label)
self.set_average(self.average)
# ------------------------------------------------------------------------
myDATA_EVENT = wx.NewEventType()
EVT_DATA_EVENT = wx.PyEventBinder (myDATA_EVENT, 0)
class DataEvent(wx.PyEvent):
def __init__(self, data):
wx.PyEvent.__init__(self)
self.SetEventType (myDATA_EVENT)
self.data = data
def Clone (self):
self.__class__ (self.GetId())
class input_watcher (threading.Thread):
def __init__ (self, msgq, number_size, event_receiver, **kwds):
threading.Thread.__init__ (self, **kwds)
self.setDaemon (1)
self.msgq = msgq
self.number_size = number_size
self.event_receiver = event_receiver
self.keep_running = True
self.start ()
def run (self):
while (self.keep_running):
msg = self.msgq.delete_head() # blocking read of message queue
itemsize = int(msg.arg1())
nitems = int(msg.arg2())
s = msg.to_string() # get the body of the msg as a string
# There may be more than one number in the message.
# If so, we take only the last one
if nitems > 1:
start = itemsize * (nitems - 1)
s = s[start:start+itemsize]
complex_data = numpy.fromstring (s, numpy.float32)
de = DataEvent (complex_data)
wx.PostEvent (self.event_receiver, de)
del de
#========================================================================================
class static_text_window (wx.StaticText): #plot.PlotCanvas):
def __init__ (self, parent, numbersink,id = -1,label="number",
pos = wx.DefaultPosition, size = wx.DefaultSize,
style = wx.DEFAULT_FRAME_STYLE, name = ""):
#plot.PlotCanvas.__init__ (self, parent, id, pos, size, style, name)
wx.StaticText.__init__(self, parent, id, label, pos, size, style, name)
#self.static_text=wx.StaticText( parent, id, label, pos, (size[0]/2,size[1]/2), style, name)
#gauge_style = wx.GA_HORIZONTAL
#self.gauge=wx.Gauge( parent, id, range=1000, pos=(pos[0],pos[1]+size[1]/2),size=(size[0]/2,size[1]/2), style=gauge_style, name = "gauge")
#wx.BoxSizer.__init__ (self,wx.VERTICAL)
#self.Add (self.static_text, 0, wx.EXPAND)
#self.Add (self.gauge, 1, wx.EXPAND)
self.parent=parent
self.label=label
#self.y_range = None
self.numbersink = numbersink
self.peak_hold = False
self.peak_vals = None
#self.SetEnableGrid (True)
# self.SetEnableZoom (True)
# self.SetBackgroundColour ('black')
self.build_popup_menu()
#EVT_DATA_EVENT (self, self.set_data)
#wx.EVT_CLOSE (self, self.on_close_window)
#self.Bind(wx.EVT_RIGHT_UP, self.on_right_click)
self.Bind(wx.EVT_RIGHT_UP, self.on_right_click)
#self.input_watcher = input_watcher(numbersink.msgq, numbersink.number_size, self)
def on_close_window (self, event):
print "number_window:on_close_window"
self.keep_running = False
def set_peak_hold(self, enable):
self.peak_hold = enable
self.peak_vals = None
def update_y_range (self):
ymax = self.numbersink.ref_level
ymin = self.numbersink.ref_level - self.numbersink.decimal_places * self.numbersink.y_divs
self.y_range = self._axisInterval ('min', ymin, ymax)
def on_average(self, evt):
# print "on_average"
self.numbersink.set_average(evt.IsChecked())
def on_peak_hold(self, evt):
# print "on_peak_hold"
self.numbersink.set_peak_hold(evt.IsChecked())
def on_show_gauge(self, evt):
# print "on_show_gauge"
#if evt.IsChecked():
self.numbersink.set_show_gauge(evt.IsChecked())
print evt.IsChecked()
# print "show gauge"
#else:
# self.parent.gauge.Hide()
# print "hide gauge"
def on_incr_ref_level(self, evt):
# print "on_incr_ref_level"
self.numbersink.set_ref_level(self.numbersink.ref_level
+ self.numbersink.decimal_places)
def on_decr_ref_level(self, evt):
# print "on_decr_ref_level"
self.numbersink.set_ref_level(self.numbersink.ref_level
- self.numbersink.decimal_places)
def on_incr_decimal_places(self, evt):
# print "on_incr_decimal_places"
self.numbersink.set_decimal_places(self.numbersink.decimal_places+1) #next_up(self.numbersink.decimal_places, (1,2,5,10,20)))
def on_decr_decimal_places(self, evt):
# print "on_decr_decimal_places"
self.numbersink.set_decimal_places(max(self.numbersink.decimal_places-1,0)) #next_down(self.numbersink.decimal_places, (1,2,5,10,20)))
def on_decimal_places(self, evt):
# print "on_decimal_places"
Id = evt.GetId()
if Id == self.id_decimal_places_0:
self.numbersink.set_decimal_places(0)
elif Id == self.id_decimal_places_1:
self.numbersink.set_decimal_places(1)
elif Id == self.id_decimal_places_2:
self.numbersink.set_decimal_places(2)
elif Id == self.id_decimal_places_3:
self.numbersink.set_decimal_places(3)
elif Id == self.id_decimal_places_6:
self.numbersink.set_decimal_places(6)
elif Id == self.id_decimal_places_9:
self.numbersink.set_decimal_places(9)
def on_right_click(self, event):
menu = self.popup_menu
for id, pred in self.checkmarks.items():
item = menu.FindItemById(id)
item.Check(pred())
self.PopupMenu(menu, event.GetPosition())
def build_popup_menu(self):
#self.id_hide_gauge = wx.NewId()
self.id_show_gauge = wx.NewId()
self.id_incr_ref_level = wx.NewId()
self.id_decr_ref_level = wx.NewId()
self.id_incr_decimal_places = wx.NewId()
self.id_decr_decimal_places = wx.NewId()
self.id_decimal_places_0 = wx.NewId()
self.id_decimal_places_1 = wx.NewId()
self.id_decimal_places_2 = wx.NewId()
self.id_decimal_places_3 = wx.NewId()
self.id_decimal_places_6 = wx.NewId()
self.id_decimal_places_9 = wx.NewId()
self.id_average = wx.NewId()
self.id_peak_hold = wx.NewId()
self.Bind(wx.EVT_MENU, self.on_average, id=self.id_average)
self.Bind(wx.EVT_MENU, self.on_peak_hold, id=self.id_peak_hold)
#self.Bind(wx.EVT_MENU, self.on_hide_gauge, id=self.id_hide_gauge)
self.Bind(wx.EVT_MENU, self.on_show_gauge, id=self.id_show_gauge)
self.Bind(wx.EVT_MENU, self.on_incr_ref_level, id=self.id_incr_ref_level)
self.Bind(wx.EVT_MENU, self.on_decr_ref_level, id=self.id_decr_ref_level)
self.Bind(wx.EVT_MENU, self.on_incr_decimal_places, id=self.id_incr_decimal_places)
self.Bind(wx.EVT_MENU, self.on_decr_decimal_places, id=self.id_decr_decimal_places)
self.Bind(wx.EVT_MENU, self.on_decimal_places, id=self.id_decimal_places_0)
self.Bind(wx.EVT_MENU, self.on_decimal_places, id=self.id_decimal_places_1)
self.Bind(wx.EVT_MENU, self.on_decimal_places, id=self.id_decimal_places_2)
self.Bind(wx.EVT_MENU, self.on_decimal_places, id=self.id_decimal_places_3)
self.Bind(wx.EVT_MENU, self.on_decimal_places, id=self.id_decimal_places_6)
self.Bind(wx.EVT_MENU, self.on_decimal_places, id=self.id_decimal_places_9)
# make a menu
menu = wx.Menu()
self.popup_menu = menu
menu.AppendCheckItem(self.id_average, "Average")
menu.AppendCheckItem(self.id_peak_hold, "Peak Hold")
#menu.Append(self.id_hide_gauge, "Hide gauge")
menu.AppendCheckItem(self.id_show_gauge, "Show gauge")
menu.Append(self.id_incr_ref_level, "Incr Ref Level")
menu.Append(self.id_decr_ref_level, "Decr Ref Level")
menu.Append(self.id_incr_decimal_places, "Incr decimal places")
menu.Append(self.id_decr_decimal_places, "Decr decimal places")
menu.AppendSeparator()
# we'd use RadioItems for these, but they're not supported on Mac
menu.AppendCheckItem(self.id_decimal_places_0, "0 decimal places")
menu.AppendCheckItem(self.id_decimal_places_1, "1 decimal places")
menu.AppendCheckItem(self.id_decimal_places_2, "2 decimal places")
menu.AppendCheckItem(self.id_decimal_places_3, "3 decimal places")
menu.AppendCheckItem(self.id_decimal_places_6, "6 decimal places")
menu.AppendCheckItem(self.id_decimal_places_9, "9 decimal places")
self.checkmarks = {
self.id_average : lambda : self.numbersink.average,
self.id_peak_hold : lambda : self.numbersink.peak_hold,# self.id_hide_gauge : lambda : self.numbersink.hide_gauge,
self.id_show_gauge : lambda : self.numbersink.show_gauge,
self.id_decimal_places_0 : lambda : self.numbersink.decimal_places == 0,
self.id_decimal_places_1 : lambda : self.numbersink.decimal_places == 1,
self.id_decimal_places_2 : lambda : self.numbersink.decimal_places == 2,
self.id_decimal_places_3 : lambda : self.numbersink.decimal_places == 3,
self.id_decimal_places_6 : lambda : self.numbersink.decimal_places == 6,
self.id_decimal_places_9 : lambda : self.numbersink.decimal_places == 9,
}
def next_up(v, seq):
"""
Return the first item in seq that is > v.
"""
for s in seq:
if s > v:
return s
return v
def next_down(v, seq):
"""
Return the last item in seq that is < v.
"""
rseq = list(seq[:])
rseq.reverse()
for s in rseq:
if s < v:
return s
return v
#========================================================================================
class number_window (plot.PlotCanvas):
def __init__ (self, numbersink, parent, id = -1,label="number",
pos = wx.DefaultPosition, size = wx.DefaultSize,
style = wx.DEFAULT_FRAME_STYLE, name = ""):
plot.PlotCanvas.__init__ (self, parent, id, pos, size, style, name)
#wx.StaticText.__init__(self, parent, id, label, pos, (size[0]/2,size[1]/2), style, name)
#print 'parent',parent
self.static_text=static_text_window( self, numbersink,id, label, pos, (size[0]/2,size[1]/2), style, name)
gauge_style = wx.GA_HORIZONTAL
vbox=wx.BoxSizer(wx.VERTICAL)
vbox.Add (self.static_text, 0, wx.EXPAND)
self.current_value=None
if numbersink.input_is_real:
self.gauge=wx.Gauge( self, id, range=1000, pos=(pos[0],pos[1]+size[1]/2),size=(size[0]/2,size[1]/2), style=gauge_style, name = "gauge")
vbox.Add (self.gauge, 1, wx.EXPAND)
else:
self.gauge=wx.Gauge( self, id, range=1000, pos=(pos[0],pos[1]+size[1]/3),size=(size[0]/2,size[1]/3), style=gauge_style, name = "gauge")
#hbox=wx.BoxSizer(wx.HORIZONTAL)
self.gauge_imag=wx.Gauge( self, id, range=1000, pos=(pos[0],pos[1]+size[1]*2/3),size=(size[0]/2,size[1]/3), style=gauge_style, name = "gauge_imag")
vbox.Add (self.gauge, 1, wx.EXPAND)
vbox.Add (self.gauge_imag, 1, wx.EXPAND)
#vbox.Add (hbox, 1, wx.EXPAND)
self.sizer = vbox
self.SetSizer (self.sizer)
self.SetAutoLayout (True)
self.sizer.Fit (self)
self.label=label
#self.y_range = None
self.numbersink = numbersink
self.peak_hold = False
self.peak_vals = None
#self.SetEnableGrid (True)
# self.SetEnableZoom (True)
# self.SetBackgroundColour ('black')
#self.build_popup_menu()
EVT_DATA_EVENT (self, self.set_data)
wx.EVT_CLOSE (self, self.on_close_window)
#self.Bind(wx.EVT_RIGHT_UP, self.on_right_click)
#self.Bind(wx.EVT_RIGHT_UP, self.on_right_click)
self.input_watcher = input_watcher(numbersink.msgq, numbersink.number_size, self)
def on_close_window (self, event):
print "number_window:on_close_window"
self.keep_running = False
def set_show_gauge(self, enable):
self.show_gauge = enable
if enable:
self.gauge.Show()
if not self.numbersink.input_is_real:
self.gauge_imag.Show()
#print 'show'
else:
self.gauge.Hide()
if not self.numbersink.input_is_real:
self.gauge_imag.Hide()
#print 'hide'
def set_data (self, evt):
numbers = evt.data
L = len (numbers)
if self.peak_hold:
if self.peak_vals is None:
self.peak_vals = numbers
else:
self.peak_vals = numpy.maximum(numbers, self.peak_vals)
numbers = self.peak_vals
if self.numbersink.input_is_real:
real_value=numbers[0]*self.numbersink.factor + self.numbersink.base_value
imag_value=0.0
self.current_value=real_value
else:
real_value=numbers[0]*self.numbersink.factor + self.numbersink.base_value
imag_value=numbers[1]*self.numbersink.factor + self.numbersink.base_value
self.current_value=complex(real_value,imag_value)
#x = max(abs(self.numbersink.sample_rate), abs(self.numbersink.base_value))
x = max(real_value, imag_value)
if x >= 1e9:
sf = 1e-9
unit_prefix = "G"
elif x >= 1e6:
sf = 1e-6
unit_prefix = "M"
elif x>= 1e3:
sf = 1e-3
unit_prefix = "k"
else :
sf = 1
unit_prefix = ""
#self.update_y_range ()
if self.numbersink.input_is_real:
showtext = "%s: %.*f %s%s" % (self.label, self.numbersink.decimal_places,real_value*sf,unit_prefix,self.numbersink.unit)
else:
showtext = "%s: %.*f,%.*f %s%s" % (self.label, self.numbersink.decimal_places,real_value*sf,
self.numbersink.decimal_places,imag_value*sf,unit_prefix,self.numbersink.unit)
self.static_text.SetLabel(showtext)
#print (int(float((real_value-self.numbersink.base_value)*1000.0/(self.numbersink.maxval-self.numbersink.minval)))+500)
self.gauge.SetValue(int(float((real_value-self.numbersink.base_value)*1000.0/(self.numbersink.maxval-self.numbersink.minval)))+500)
if not self.numbersink.input_is_real:
self.gauge.SetValue(int(float((imag_value-self.numbersink.base_value)*1000.0/(self.numbersink.maxval-self.numbersink.minval)))+500)
def set_peak_hold(self, enable):
self.peak_hold = enable
self.peak_vals = None
def update_y_range (self):
ymax = self.numbersink.ref_level
ymin = self.numbersink.ref_level - self.numbersink.decimal_places * self.numbersink.y_divs
self.y_range = self._axisInterval ('min', ymin, ymax)
def on_average(self, evt):
# print "on_average"
self.numbersink.set_average(evt.IsChecked())
def on_peak_hold(self, evt):
# print "on_peak_hold"
self.numbersink.set_peak_hold(evt.IsChecked())
# ----------------------------------------------------------------
# Deprecated interfaces
# ----------------------------------------------------------------
# returns (block, win).
# block requires a single input stream of float
# win is a subclass of wxWindow
def make_number_sink_f(fg, parent, label, number_size, input_rate, ymin = 0, ymax=50):
block = number_sink_f(fg, parent, label=label, number_size=number_size, sample_rate=input_rate,
decimal_places=(ymax - ymin)/8, ref_level=ymax)
return (block, block.win)
# returns (block, win).
# block requires a single input stream of gr_complex
# win is a subclass of wxWindow
def make_number_sink_c(fg, parent, label, number_size, input_rate, ymin=0, ymax=50):
block = number_sink_c(fg, parent, label=label, number_size=number_size, sample_rate=input_rate,
decimal_places=(ymax - ymin)/8, ref_level=ymax)
return (block, block.win)
# ----------------------------------------------------------------
# Standalone test app
# ----------------------------------------------------------------
class test_app_flow_graph (stdgui.gui_flow_graph):
def __init__(self, frame, panel, vbox, argv):
stdgui.gui_flow_graph.__init__ (self, frame, panel, vbox, argv)
#number_size = 256
# build our flow graph
input_rate = 20.48e3
# Generate a complex sinusoid
src1 = gr.sig_source_c (input_rate, gr.GR_SIN_WAVE, 2e3, 1)
#src1 = gr.sig_source_c (input_rate, gr.GR_CONST_WAVE, 5.75e3, 1)
# We add these throttle blocks so that this demo doesn't
# suck down all the CPU available. Normally you wouldn't use these.
thr1 = gr.throttle(gr.sizeof_gr_complex, input_rate)
#sink1 = number_sink_c (self, panel, label="Complex Data", number_size=number_size,
# sample_rate=input_rate, base_value=100e3,
# ref_level=0, decimal_places=3)
#vbox.Add (sink1.win, 1, wx.EXPAND)
#self.connect (src1, thr1, sink1)
src2 = gr.sig_source_f (input_rate, gr.GR_SIN_WAVE, 2e3, 1)
#src2 = gr.sig_source_f (input_rate, gr.GR_CONST_WAVE, 5.75e3, 1)
thr2 = gr.throttle(gr.sizeof_float, input_rate)
sink2 = number_sink_f (self, panel, unit='Hz',label="Real Data", avg_alpha=0.001,#number_size=number_size*2,
sample_rate=input_rate, base_value=100e3,
ref_level=0, decimal_places=3)
vbox.Add (sink2.win, 1, wx.EXPAND)
sink3 = number_sink_c (self, panel, unit='V',label="Complex Data", avg_alpha=0.001,#number_size=number_size*2,
sample_rate=input_rate, base_value=0,
ref_level=0, decimal_places=3)
vbox.Add (sink3.win, 1, wx.EXPAND)
self.connect (src2, thr2, sink2)
self.connect (src1, thr1, sink3)
def main ():
app = stdgui.stdapp (test_app_flow_graph,
"Number Sink Test App")
app.MainLoop ()
if __name__ == '__main__':
main ()
| gpl-3.0 | 1,420,374,950,807,376,000 | 40.161238 | 158 | 0.585328 | false | 3.38644 | false | false | false |
icebreaker/pyGLox | demo/particlesystem.py | 1 | 1664 | """
Copyright (c) 2011, Mihail Szabolcs
All rights reserved.
See LICENSE for more information.
"""
import random
import math
from pyglet.gl import *
class Particle(object):
def __init__(self):
self.p = [0,0,0]
self.a = 1
self.dx = (random.random() - 0.5)
self.dy = (random.random() - 0.5)
def update(self, dt):
self.p[0] += self.dx * dt
self.p[1] += math.fabs(self.dy / 3 * dt)
self.a -= math.fabs(self.dx * 4) * dt
self.a -= math.fabs(self.dy / 2) * dt
if self.a <= 0:
self.p = [0,0,0]
self.a = 1
self.dx = (random.random() - 0.5)
self.dy = (random.random() - 0.5)
def draw(self):
#glColor4f(1, 0.6, 0.0, self.a)
glColor4f(0.65, 0.0, 0.15, self.a)
glVertex3f(self.p[0], self.p[1], self.p[2])
class ParticleSystem(object):
def __init__(self, texture, n=512, p=Particle):
self.texture = texture
self.n = n
self.particles = []
for i in range(n):
self.particles.append(p())
def update(self, dt):
for i in range(self.n):
self.particles[i].update(dt)
def draw(self):
self.texture.bind()
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE)
glEnable(GL_POINT_SPRITE)
glPointSize(60.0)
glTexEnvi(GL_POINT_SPRITE, GL_COORD_REPLACE, GL_TRUE);
#glPointParameterf(GL_POINT_FADE_THRESHOLD_SIZE, 60.0);
#glPointParameterf(GL_POINT_SIZE_MIN, 30.0);
#glPointParameterf(GL_POINT_SIZE_MAX, 40.0);
glDisable(GL_DEPTH_TEST)
glDepthMask(GL_FALSE)
for i in range(self.n):
glBegin(GL_POINTS)
self.particles[i].draw()
glEnd()
glDepthMask(GL_TRUE)
glEnable(GL_DEPTH_TEST)
glDisable(GL_POINT_SPRITE)
glDisable(GL_BLEND)
self.texture.release()
glColor4f(1,1,1,1)
| mit | 7,980,364,552,582,119,000 | 21.794521 | 57 | 0.640625 | false | 2.340366 | false | false | false |
andrewsosa/hackfsu_com | api/api/views/hackathon/get/stats.py | 2 | 4468 | """
Get public statistics for current hackathon
"""
from django import forms
from django.http.request import HttpRequest
from hackfsu_com.views.generic import ApiView
from hackfsu_com.util import acl
from api.models import Hackathon, HackerInfo, MentorInfo, JudgeInfo, OrganizerInfo, AttendeeStatus
from django.utils import timezone
class ResponseForm(forms.Form):
hackathon_name = forms.CharField()
hackathon_start = forms.DateField()
hackathon_end = forms.DateField()
hackers_registered = forms.IntegerField()
hackers_approved = forms.IntegerField()
hackers_rsvp = forms.IntegerField()
hackers_checked_in = forms.IntegerField()
mentors_registered = forms.IntegerField()
mentors_approved = forms.IntegerField()
mentors_rsvp = forms.IntegerField()
mentors_checked_in = forms.IntegerField()
judges_registered = forms.IntegerField()
judges_approved = forms.IntegerField()
judges_rsvp = forms.IntegerField()
judges_checked_in = forms.IntegerField()
organizers_registered = forms.IntegerField()
organizers_approved = forms.IntegerField()
organizers_rsvp = forms.IntegerField()
organizers_checked_in = forms.IntegerField()
class StatsView(ApiView):
response_form_class = ResponseForm
http_method_names = ['get']
access_manager = acl.AccessManager(acl_accept=[acl.group_user])
def work(self, request, req: dict, res: dict):
ch = Hackathon.objects.current()
res['hackathon_name'] = ch.name
res['hackathon_start'] = ch.start_date
res['hackathon_end'] = ch.end_date
if (timezone.now().date() - ch.start_date).days >= 0 or \
OrganizerInfo.objects.filter(hackathon=ch, user=request.user, approved=True).exists():
res['hackers_registered'] = HackerInfo.objects.filter(hackathon=ch).count()
res['hackers_approved'] = HackerInfo.objects.filter(hackathon=ch, approved=True).count()
res['hackers_rsvp'] = \
HackerInfo.objects.filter(hackathon=ch, approved=True, attendee_status__rsvp_result=1).count()
res['hackers_checked_in'] = HackerInfo.objects.filter(
hackathon=ch, approved=True, attendee_status__checked_in_at__isnull=False).count()
res['mentors_registered'] = MentorInfo.objects.filter(hackathon=ch).count()
res['mentors_approved'] = MentorInfo.objects.filter(hackathon=ch, approved=True).count()
res['mentors_rsvp'] = \
MentorInfo.objects.filter(hackathon=ch, approved=True, attendee_status__rsvp_result=1).count()
res['mentors_checked_in'] = MentorInfo.objects.filter(
hackathon=ch, approved=True, attendee_status__checked_in_at__isnull=False).count()
res['judges_registered'] = JudgeInfo.objects.filter(hackathon=ch).count()
res['judges_approved'] = JudgeInfo.objects.filter(hackathon=ch, approved=True).count()
res['judges_rsvp'] = \
JudgeInfo.objects.filter(hackathon=ch, approved=True, attendee_status__rsvp_result=1).count()
res['judges_checked_in'] = JudgeInfo.objects.filter(
hackathon=ch, approved=True, attendee_status__checked_in_at__isnull=False).count()
res['organizers_registered'] = OrganizerInfo.objects.filter(hackathon=ch).count()
res['organizers_approved'] = OrganizerInfo.objects.filter(hackathon=ch, approved=True).count()
res['organizers_rsvp'] = \
OrganizerInfo.objects.filter(hackathon=ch, approved=True, attendee_status__rsvp_result=1).count()
res['organizers_checked_in'] = OrganizerInfo.objects.filter(
hackathon=ch, approved=True, attendee_status__checked_in_at__isnull=False).count()
else:
res['hackers_registered'] = -1
res['hackers_approved'] = -1
res['hackers_rsvp'] = -1
res['hackers_checked_in'] = -1
res['mentors_registered'] = -1
res['mentors_approved'] = -1
res['mentors_rsvp'] = -1
res['mentors_checked_in'] = -1
res['judges_registered'] = -1
res['judges_approved'] = -1
res['judges_rsvp'] = -1
res['judges_checked_in'] = -1
res['organizers_registered'] = -1
res['organizers_approved'] = -1
res['organizers_rsvp'] = -1
res['organizers_checked_in'] = -1
| apache-2.0 | 7,364,637,496,740,350,000 | 47.043011 | 113 | 0.643912 | false | 3.597424 | false | false | false |
sevein/archivematica | src/dashboard/src/contrib/utils.py | 1 | 2104 | # This file is part of Archivematica.
#
# Copyright 2010-2013 Artefactual Systems Inc. <http://artefactual.com>
#
# Archivematica is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Archivematica is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Archivematica. If not, see <http://www.gnu.org/licenses/>.
import os
def get_directory_size(path='.'):
total_size = 0
for dirpath, dirnames, filenames in os.walk(path):
for f in filenames:
fp = os.path.join(dirpath, f)
total_size += os.path.getsize(fp)
return total_size
def get_directory_name(directory, default=None):
"""
Attempts to extract a directory name given a transfer or SIP path. Expected format:
%sharedPath%watchedDirectories/workFlowDecisions/createDip/ImagesSIP-69826e50-87a2-4370-b7bd-406fc8aad94f/
Given this example, this function would return 'ImagesSIP'.
If the optional `default` keyword argument is passed in, the provided value will be used if no name can be extracted.
"""
import re
try:
return re.search(r'^.*/(?P<directory>.*)-[\w]{8}(-[\w]{4}){3}-[\w]{12}[/]{0,1}$', directory).group('directory')
except:
pass
try:
return re.search(r'^.*/(?P<directory>.*)/$', directory).group('directory')
except:
pass
if directory:
return directory
else:
return default
def get_directory_name_from_job(jobs):
try:
job = jobs[0]
# No jobs yet, e.g. not started; there will be no directory name yet
except IndexError:
return "(Unnamed)"
return get_directory_name(job.directory, default=job.sipuuid)
| agpl-3.0 | 4,646,505,071,088,285,000 | 32.396825 | 123 | 0.681559 | false | 3.710758 | false | false | false |
basak/netkeyscript | netkeyscript-send.py | 1 | 2002 | #!/usr/bin/python
# Copyright 2012 Robie Basak
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
NETKEYSCRIPT_PROTO_PASSPHRASE = 0
import argparse
import struct
import sys
from scapy.all import (
Ether,
IPv6,
UDP,
sendp
)
def send(dst, sport, dport, payload, iface):
ether = Ether()
ip = IPv6(dst=dst)
udp = UDP(sport=sport, dport=dport)
sendp(ether / ip / udp / payload, iface=iface)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--iface', default='eth0')
parser.add_argument('--sport', default=30621, type=int)
parser.add_argument('--dport', default=30621, type=int)
parser.add_argument('--dest', default='ff02::1')
args = parser.parse_args()
payload_command = struct.pack('b', NETKEYSCRIPT_PROTO_PASSPHRASE)
payload = payload_command + sys.stdin.read()
send(dst=args.dest, sport=args.sport, dport=args.dport,
payload=payload, iface=args.iface)
if __name__ == '__main__':
main()
| mit | -4,701,440,638,697,542,000 | 32.932203 | 78 | 0.720779 | false | 3.763158 | false | false | false |
algolia/algoliasearch-django | tests/settings.py | 1 | 2390 | """
Django settings for core project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'MillisecondsMatter'
DEBUG = False
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'algoliasearch_django',
'tests'
)
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
]
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
ROOT_URLCONF = 'tests.urls'
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
def safe_index_name(name):
if 'TRAVIS' not in os.environ:
return name
job = os.environ['TRAVIS_JOB_NUMBER']
return '{}_travis-{}'.format(name, job)
# AlgoliaSearch settings
ALGOLIA = {
'APPLICATION_ID': os.getenv('ALGOLIA_APPLICATION_ID'),
'API_KEY': os.getenv('ALGOLIA_API_KEY'),
'INDEX_PREFIX': 'test',
'INDEX_SUFFIX': safe_index_name('django'),
'RAISE_EXCEPTIONS': True
}
| mit | 176,922,860,896,906,560 | 25.555556 | 70 | 0.667364 | false | 3.556548 | false | false | false |
all-of-us/raw-data-repository | rdr_service/tools/update_release_tracker.py | 1 | 2190 | #!/usr/bin/env python
"""Updates JIRA release notes when deploying to an environment.
This requires the
JIRA_API_USER_PASSWORD and
JIRA_API_USER_NAME
environment variables to be set, and flags for version and instance to be provided.
"""
import logging
import os
import sys
import jira
from rdr_service.main_util import configure_logging, get_parser
_JIRA_INSTANCE_URL = "https://precisionmedicineinitiative.atlassian.net/"
# Release tickets are moved from our usual project, DA, to the PD project
# for change approval, so for stable/prod releases look for tickets there.
_JIRA_PROJECT_ID = "PD"
def _connect_to_jira(jira_username, jira_password):
return jira.JIRA(_JIRA_INSTANCE_URL, basic_auth=(jira_username, jira_password))
def main(args):
jira_username = os.getenv("JIRA_API_USER_NAME")
jira_password = os.getenv("JIRA_API_USER_PASSWORD")
if not jira_username or not jira_password:
logging.error("JIRA_API_USER_NAME and JIRA_API_USER_PASSWORD variables must be set. Exiting.")
sys.exit(-1)
jira_connection = _connect_to_jira(jira_username, jira_password)
summary = "Release tracker for %s" % args.version
issues = jira_connection.search_issues(
'project = "%s" AND summary ~ "%s" ORDER BY created DESC' % (_JIRA_PROJECT_ID, summary)
)
if issues:
if len(issues) > 1:
logging.warning(
"Found multiple release tracker matches, using newest. %s",
", ".join("[%s] %s" % (issue.key, issue.fields().summary) for issue in issues),
)
issue = issues[0]
jira_connection.add_comment(issue, args.comment)
logging.info("Updated issue %s", issue.key)
sys.exit(0)
else:
logging.error("No issue found with summary %r in project %r; exiting.", summary, _JIRA_PROJECT_ID)
sys.exit(-1)
if __name__ == "__main__":
configure_logging()
parser = get_parser()
parser.add_argument("--version", help="The version of the app being deployed (e.g. v0-1-rc21", required=True)
parser.add_argument("--comment", type=str, help="The comment to add to the issue", required=True)
main(parser.parse_args())
| bsd-3-clause | -2,994,243,296,558,638,600 | 36.118644 | 113 | 0.66895 | false | 3.416537 | false | false | false |
AdaptivePELE/AdaptivePELE | AdaptivePELE/analysis/crossValidationClustering.py | 1 | 3872 | from __future__ import absolute_import, division, print_function, unicode_literals
import os
import argparse
import matplotlib.pyplot as plt
import numpy as np
import pyemma.coordinates as coor
import pyemma.msm as msm
from AdaptivePELE.freeEnergies import cluster
plt.switch_backend("pdf")
plt.style.use("ggplot")
def parse_arguments():
"""
Create command-line interface
"""
desc = "Plot information related to an MSM"
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("-l", "--lagtimes", type=int, nargs="*", help="Lagtimes to analyse")
parser.add_argument("-c", "--clusters", type=int, nargs="*", help="Number of clusters to analyse")
parser.add_argument("-m", type=int, default=6, help="Number of eigenvalues to sum in the GMRQ")
parser.add_argument("--tica", action="store_true", help="Whether to use TICA before clustering")
parser.add_argument("--tica_lag", type=int, default=30, help="Lagtime for the TICA estimation")
parser.add_argument("--out_path", type=str, default="", help="Path to store the output")
args = parser.parse_args()
return args.lagtimes, args.m, args.tica, args.tica_lag, args.out_path, args.clusters
def main(lagtimes, clusters, m, tica_lag, tica, output_path):
trajectoryFolder = "allTrajs"
trajectoryBasename = "traj*"
stride = 1
if output_path and not os.path.exists(output_path):
os.makedirs(output_path)
scores_path = os.path.join(output_path, "scores")
if not os.path.exists(scores_path):
os.makedirs(scores_path)
data, _ = cluster.loadTrajFiles(trajectoryFolder, trajectoryBasename)
if tica:
tica_obj = coor.tica(data, lag=tica_lag, var_cutoff=0.9, kinetic_map=True)
print('TICA dimension ', tica_obj.dimension())
data = tica_obj.get_output()
for tau in lagtimes:
scores = []
scores_cv = []
print("Estimating MSM with %d lagtime" % tau)
for k in clusters:
print("Calculating scores with %d clusters" % k)
# cluster data
cl = coor.cluster_kmeans(data=data, k=k, max_iter=500, stride=stride)
try:
MSM = msm.estimate_markov_model(cl.dtrajs, tau)
print("MSM estimated on %d states" % MSM.nstates)
except Exception:
print("Estimation error in %d clusters, %d lagtime" % (k, tau))
scores.append(0)
scores_cv.append(np.array([0, 0, 0, 0, 0]))
continue
try:
scores.append(MSM.score(MSM.dtrajs_full, score_k=m))
except Exception:
print("Estimation error in %d clusters, %d lagtime" % (k, tau))
scores.append(0)
scores_cv.append(np.array([0, 0, 0, 0, 0]))
continue
try:
scores_cv.append(MSM.score_cv(MSM.dtrajs_full, score_k=m, n=5))
except Exception:
print("Estimation error in %d clusters, %d lagtime" % (k, tau))
scores_cv.append(np.array([0, 0, 0, 0, 0]))
np.save(os.path.join(scores_path, "scores_lag_%d.npy" % tau), scores)
np.save(os.path.join(scores_path, "scores_cv_lag_%d.npy" % tau), scores_cv)
mean_scores = [sc.mean() for sc in scores_cv]
std_scores = [sc.std() for sc in scores_cv]
plt.figure()
plt.plot(clusters, scores, label="Training")
plt.errorbar(clusters, mean_scores, yerr=std_scores, fmt='k', label="Testing")
plt.xlabel("Number of states")
plt.ylabel("Score")
plt.legend()
plt.savefig(os.path.join(output_path, "scores_cv_lag_%d.png" % tau))
if __name__ == "__main__":
lags, GMRQ, use_tica, lag_tica, out_path, cluster_list = parse_arguments()
main(lags, cluster_list, GMRQ, lag_tica, use_tica, out_path)
| mit | -8,509,077,382,591,035,000 | 44.552941 | 102 | 0.612603 | false | 3.396491 | false | false | false |
cloew/KaoJson | kao_json/Test/test_conversion_config.py | 1 | 1361 | from .class_hierarchy import A, AChild, AGrandchild
from ..conversion_config import ConversionConfig
import unittest
class find(unittest.TestCase):
""" Test cases of find """
def test_matchingClassFound(self):
""" Test that a matching class is found properly """
expected = "Dummy Config..."
config = ConversionConfig([{A:expected}])
actual = config.find(A)
self.assertEqual(expected, actual)
def test_descendantClassFound(self):
""" Test that a matching descendant class is found properly """
expected = "Dummy Config..."
config = ConversionConfig([{A:expected}])
actual = config.find(AGrandchild)
self.assertEqual(expected, actual)
def test_noMatchFound(self):
""" Test that when no match is found, None is returned """
config = ConversionConfig([])
self.assertIsNone(config.find(A))
class newConverter(unittest.TestCase):
""" Test cases of newConverter """
def test_converterBuilt(self):
""" Test that the Converter was built properly """
value = "Dummy Value..."
config = ConversionConfig([])
converter = config.newConverter(value)
self.assertEqual(converter.value, value)
self.assertEqual(converter.config, config) | mit | -7,443,373,079,876,724,000 | 33.05 | 71 | 0.62601 | false | 4.725694 | true | false | false |
agx/git-buildpackage | gbp/deb/rollbackgit.py | 1 | 5051 | # vim: set fileencoding=utf-8 :
#
# (C) 2018 Guido Günther <[email protected]>
"""A git repository for Debian packages that can roll back operations"""
from .. import log
from .. git import GitRepositoryError
from . git import DebianGitRepository
class RollbackError(GitRepositoryError):
"""
Error raised if the rollback failed
"""
def __init__(self, errors):
self.msg = "Automatic rollback failed"
super(RollbackError, self).__init__(self.msg)
self.errors = errors
def __str__(self):
return "%s %s" % (self.msg, self.errors)
class RollbackDebianGitRepository(DebianGitRepository):
"""
Like a DebianGitRepository but can also perform rollbacks and knows
about some of the inner workings upstream vcs_tag, …
"""
def __init__(self, *args, **kwargs):
self.rollbacks = []
self.rollback_errors = []
DebianGitRepository.__init__(self, *args, **kwargs)
def has_rollbacks(self):
return len(self.rollbacks) > 0
def rrr(self, refname, action, reftype):
"""
Remember ref for rollback
@param refname: ref to roll back
@param action: the rollback action (delete, reset, ...)
@param reftype: the reference type (tag, branch, ...)
"""
sha = None
if action == 'reset':
try:
sha = self.rev_parse(refname)
except GitRepositoryError as err:
log.warn("Failed to rev-parse '%s': %s" % (refname, err))
elif action == 'delete':
pass
elif action == 'abortmerge':
pass
else:
raise GitRepositoryError("Unknown action '%s' for %s '%s'" % (action, reftype, refname))
self.rollbacks.append((refname, reftype, action, sha))
def rrr_branch(self, branchname, action='reset-or-delete'):
if action == 'reset-or-delete':
if self.has_branch(branchname):
return self.rrr(branchname, 'reset', 'branch')
else:
return self.rrr(branchname, 'delete', 'branch')
else:
return self.rrr(branchname, action, 'branch')
def rrr_tag(self, tagname, action='delete'):
return self.rrr(tagname, action, 'tag')
def rrr_merge(self, commit, action='abortmerge'):
return self.rrr(commit, action, 'commit')
def rollback(self):
"""
Perform a complete rollback
Try to roll back as much as possible and remember what failed.
"""
for (name, reftype, action, sha) in self.rollbacks:
try:
if action == 'delete':
log.info("Rolling back %s '%s' by deleting it" % (reftype, name))
if reftype == 'tag':
self.delete_tag(name)
elif reftype == 'branch':
self.delete_branch(name)
else:
raise GitRepositoryError("Don't know how to delete %s '%s'" % (reftype, name))
elif action == 'reset' and reftype == 'branch':
log.info('Rolling back branch %s by resetting it to %s' % (name, sha))
self.update_ref("refs/heads/%s" % name, sha, msg="gbp import-orig: failure rollback of %s" % name)
elif action == 'abortmerge':
if self.is_in_merge():
log.info('Rolling back failed merge of %s' % name)
self.abort_merge()
else:
log.info("Nothing to rollback for merge of '%s'" % name)
else:
raise GitRepositoryError("Don't know how to %s %s '%s'" % (action, reftype, name))
except GitRepositoryError as e:
self.rollback_errors.append((name, reftype, action, sha, e))
if self.rollback_errors:
raise RollbackError(self.rollback_errors)
# Wrapped methods for rollbacks
def create_tag(self, *args, **kwargs):
name = kwargs['name']
ret = super(RollbackDebianGitRepository, self).create_tag(*args, **kwargs)
self.rrr_tag(name)
return ret
def commit_dir(self, *args, **kwargs):
import_branch = kwargs['branch']
self.rrr_branch(import_branch)
return super(RollbackDebianGitRepository, self).commit_dir(*args, **kwargs)
def create_branch(self, *args, **kwargs):
branch = kwargs['branch']
ret = super(RollbackDebianGitRepository, self).create_branch(*args, **kwargs)
self.rrr_branch(branch, 'delete')
return ret
def merge(self, *args, **kwargs):
commit = args[0] if args else kwargs['commit']
try:
return super(RollbackDebianGitRepository, self).merge(*args, **kwargs)
except GitRepositoryError:
# Only cleanup in the error case to undo working copy
# changes. Resetting the refs handles the other cases.
self.rrr_merge(commit)
raise
| gpl-2.0 | 3,977,356,638,823,929,300 | 36.671642 | 118 | 0.565571 | false | 4.054618 | false | false | false |
hcrlab/access_teleop | limb_manipulation/src/limb_pbd_server_with_moveit_commander.py | 1 | 43291 | #! /usr/bin/env python
import rospy
import math
from pprint import pprint
import numpy as np
import fetch_api
from std_msgs.msg import String, Header, ColorRGBA, Bool
from std_srvs.srv import Empty
from image_geometry import PinholeCameraModel
import tf
import tf.transformations as tft
# from tf import TransformBroadcaster
from geometry_msgs.msg import Pose, PoseStamped, Quaternion, Point, Vector3
from interactive_markers.interactive_marker_server import InteractiveMarkerServer
from visualization_msgs.msg import Marker, MarkerArray, InteractiveMarker, InteractiveMarkerControl, InteractiveMarkerFeedback
from sensor_msgs.msg import PointCloud2, JointState
from ar_track_alvar_msgs.msg import AlvarMarkers
from limb_manipulation_msgs.msg import EzgripperAccess, WebAppRequest, WebAppResponse
import moveit_commander
from moveit_python import PlanningSceneInterface
from moveit_msgs.msg import OrientationConstraint
import subprocess
from robot_controllers_msgs.msg import QueryControllerStatesAction, QueryControllerStatesGoal, ControllerState
from database import Database
import actionlib
import rosbag
import os
from colour import Color
import sys
from shared_teleop_functions_and_vars import dpx_to_distance, delta_modified_stamped_pose
import copy
# maximum times to retry if a transform lookup fails
TRANSFROM_LOOKUP_RETRY = 10
# colors for trajectory visualization
START_COLOR = Color("Orange")
END_COLOR = Color("Blue")
TRAJ_HIGHLIGHT_SCALE = Vector3(0.05, 0.008, 0.008)
WAYPOINT_HIGHLIGHT_SCALE = Vector3(0.05, 0.01, 0.01) # Vector3(0.055, 0.009, 0.009)
WAYPOINT_HIGHLIGHT_COLOR = ColorRGBA(1.0, 0.0, 0.0, 0.8)
# body parts and their corresponding ID# and actions
BODY_PARTS = {0: "right wrist", 1: "lower right leg",
2: "left wrist", 3: "lower left leg"}
ACTIONS = {0: ["right arm elbow extension", "right arm elbow flexion"],
1: ["right leg abduction and adduction"], # , "right leg medial rotation", "right leg lateral rotation"],
2: ["left arm elbow extension", "left arm elbow flexion"],
3: ["left leg abduction and adduction"] #, "left leg medial rotation", "left leg lateral rotation"]
}
ABBR = {"right leg abduction and adduction": "RLAA",
# "right leg medial rotation": "RLMR", "right leg lateral rotation": "RLLR",
"left abduction and adduction": "LLAA",
# "left leg medial rotation": "LLMR", "left leg lateral rotation": "LLLR",
"right arm elbow extension": "RAEE", "right arm elbow flexion": "RAEF",
"left arm elbow extension": "LAEE", "left arm elbow flexion": "LAEF"
}
# leg medial rotation, leg lateral rotation
# shoulder flexion, shoulder abduction, shoulder adduction, shoulder medial rotation, shoulder lateral rotation,
# forearm pronation, forearm supination,
# knee flexion, knee extension (seems to be performed when seated?)
def wait_for_time():
"""
Wait for simulated time to begin.
"""
while rospy.Time().now().to_sec() == 0:
pass
class ArTagReader(object):
def __init__(self):
self.markers = [] # list of markers (update in real time)
self.saved_markers = [] # list of markers saved (update only if update() is called)
def callback(self, msg):
self.markers = msg.markers
def update(self):
self.saved_markers = self.markers
def get_tag(self, tag):
""" Returns the marker with id# == tag """
for marker in self.saved_markers:
if marker.id == int(tag):
result = PoseStamped()
result.pose = marker.pose
result.header = marker.header
return result
return None
def get_list(self):
""" Returns the list of saved markers """
return self.saved_markers
class PbdServer():
""" Server for PBD """
def __init__(self):
# controls of Fetch
self._arm = fetch_api.Arm()
self._arm_joints = fetch_api.ArmJoints()
self._torso = fetch_api.Torso()
self._head = fetch_api.Head()
self._base = fetch_api.Base()
self._fetch_gripper = fetch_api.Gripper()
# status of the arm: relax or freeze
self._arm_relaxed = False
# transformation
self._tf_listener = tf.TransformListener()
rospy.sleep(0.1)
# AR tag reader
self._reader = ArTagReader()
self._ar_sub = rospy.Subscriber("ar_pose_marker", AlvarMarkers, callback=self._reader.callback)
# database of actions
self._db = Database()
self._db.load()
# publisher and subscriber for controls of SAKE gripper
self._sake_gripper_pub = rospy.Publisher('/ezgripper_access', EzgripperAccess, queue_size=1)
self._sake_gripper_sub = rospy.Subscriber('/ezgripper_access_status', EzgripperAccess, callback=self._set_sake_gripper_action_status)
# moveit: query controller
self._controller_client = actionlib.SimpleActionClient('/query_controller_states', QueryControllerStatesAction)
# moveit: move group commander
moveit_commander.roscpp_initialize(sys.argv)
moveit_robot = moveit_commander.RobotCommander()
self._moveit_group = moveit_commander.MoveGroupCommander('arm')
# motion planning scene
self._planning_scene = PlanningSceneInterface('base_link')
self._planning_scene.clear()
# visualization
self._viz_pub = rospy.Publisher('visualization_marker', Marker, queue_size=5)
self._viz_markers_pub = rospy.Publisher('visualization_marker_array', MarkerArray, queue_size=5)
# initial position of robot arm
self._arm_initial_poses = [
("shoulder_pan_joint", 1.296), ("shoulder_lift_joint", 1.480), ("upperarm_roll_joint", -0.904), ("elbow_flex_joint", 2.251),
("forearm_roll_joint", -2.021), ("wrist_flex_joint", -1.113), ("wrist_roll_joint", -0.864)]
# orientation constraint (unused)
self._gripper_oc = OrientationConstraint()
self._gripper_oc.header.frame_id = 'base_link'
self._gripper_oc.link_name = 'wrist_roll_link'
self._gripper_oc.orientation.z = -0.707
self._gripper_oc.orientation.w = 0.707
self._gripper_oc.absolute_x_axis_tolerance = 0.1
self._gripper_oc.absolute_y_axis_tolerance = 0.1
self._gripper_oc.absolute_z_axis_tolerance = 3.14
self._gripper_oc.weight = 1.0
# moveit args
self._kwargs = {
'allowed_planning_time': 30,
'execution_timeout': 15,
'group_name': 'arm',
'num_planning_attempts': 10,
# 'orientation_constraint': self._gripper_oc,
'replan': True,
'replan_attempts': 5,
'tolerance': 0.01
}
# current pose of gripper (used in performing actions)
self._current_pose = None
# bag file directory
script_path = os.path.abspath(__file__)
self._bag_file_dir = os.path.split(script_path)[0][:-4] + '/bags'
# subscriber and publisher for frontend
self._web_app_request_sub = rospy.Subscriber("web_app_request", WebAppRequest, callback=self.web_app_request_callback)
self._web_app_response_pub = rospy.Publisher('web_app_response', WebAppResponse, queue_size=5)
# variables representing the program state
self._sake_gripper_attached = False
self._sake_gripper_action_finished = False
self._sake_gripper_effort = "100"
self._robot_stopped = False
self._grasp_position_ready = False
self._grasp_type = "h_close"
self._do_position_ready = False
self._do_position_id = -1
self._preview_action_abbr = ""
self._preview_traj = [] # the trajectory being previewed currently
self._current_waypoint_id = -1
rospy.sleep(0.5)
def setup(self):
""" Handler for robot set up """
print("\nSetting up everything, please wait...\n")
# set robot's initial state
self._torso.set_height(0)
self._head.pan_tilt(0, 0.8)
# move arm to the initial position (with collision detection)
self._arm.move_to_joint_goal(self._arm_initial_poses, replan=True)
print("\nThe program is ready to use :-)\n")
def shutdown(self):
""" Handler for robot shutdown """
print("\nShutting down... Bye :-)\n")
# clear display
self._viz_markers_pub.publish(MarkerArray(markers=[]))
# moveit
self.freeze_arm()
self._planning_scene.clear()
self._arm.cancel_all_goals()
# save the database
self._db.save()
# moveit: move group commander
self._moveit_group.stop()
moveit_commander.roscpp_shutdown()
def attach_sake_gripper(self):
"""
Attaches SAKE gripper to Fetch's gripper, and updates the planning scene.
"""
self.freeze_arm()
# attach SAKE gripper to Fetch's gripper
self._fetch_gripper.open() # make sure Fetch's gripper is open
self._fetch_gripper.close()
self._sake_gripper_attached = True
# add SAKE gripper to the planning scene
frame_attached_to = 'gripper_link'
frames_okay_to_collide_with = ['gripper_link', 'l_gripper_finger_link', 'r_gripper_finger_link']
package_path = subprocess.check_output("rospack find ezgripper_driver", shell=True).replace('\n','')
if rospy.get_param("use_urdf"): # use real sake gripper mesh
# palm
sake_palm_pose = Pose(Point(-0.01, 0, 0.05), Quaternion(-0.7, 0, 0.7, 0))
sake_palm_mesh_file = package_path + "/meshes/visual/SAKE_Palm_Dual.stl"
self._planning_scene.attachMesh('sake_palm', sake_palm_pose, sake_palm_mesh_file,
frame_attached_to, touch_links=frames_okay_to_collide_with)
# fingers
sake_finger_1_pose = Pose(Point(0, -0.03, -0.055), Quaternion(0.5, -0.5, 0.5, 0.5))
sake_finger_1_mesh_file = package_path + "/meshes/visual/SAKE_Finger.stl"
self._planning_scene.attachMesh('sake_finger_1', sake_finger_1_pose, sake_finger_1_mesh_file,
frame_attached_to, touch_links=frames_okay_to_collide_with)
sake_finger_2_pose = Pose(Point(0, 0.03, -0.055), Quaternion(-0.5, -0.5, -0.5, 0.5))
sake_finger_2_mesh_file = package_path + "/meshes/visual/SAKE_Finger.stl"
self._planning_scene.attachMesh('sake_finger_2', sake_finger_2_pose, sake_finger_2_mesh_file,
frame_attached_to, touch_links=frames_okay_to_collide_with)
else: # use a box to represent the sake gripper
self._planning_scene.attachBox('sake', 0.03, 0.09, 0.15, 0, 0, -0.05,
frame_attached_to,
frames_okay_to_collide_with)
self._planning_scene.setColor('sake', 1, 0, 1)
self._planning_scene.sendColors()
# calibrate SAKE gripper
self.do_sake_gripper_action("calibrate")
def remove_sake_gripper(self):
"""
Removes SAKE gripper from Fetch's gripper, and updates the planning scene.
"""
self.freeze_arm()
# remove SAKE gripper from Fetch's gripper
self._fetch_gripper.close() # make sure Fetch's gripper is close
self._fetch_gripper.open()
self._sake_gripper_attached = False
# remove SAKE gripper from the planning scene
if rospy.get_param("use_urdf"):
self._planning_scene.removeAttachedObject('sake_palm')
self._planning_scene.removeAttachedObject('sake_finger_1')
self._planning_scene.removeAttachedObject('sake_finger_2')
else:
self._planning_scene.removeAttachedObject('sake')
def update_env(self, update_octo=True):
"""
Updates the list of markers, and scan the surroundings to build an octomap.
Returns false if the update fails, true otherwise.
"""
# update markers
self._reader.update()
# update octomap
if update_octo:
# clear previous octomap
if not self._clear_octomap():
return False
# scan the range: pan -0.75~0.75, tilt 0~0.7
for i in range(6):
pan = -0.75 + 0.25 * i
self._head.pan_tilt(pan, 0)
rospy.sleep(2)
self._head.pan_tilt(pan, 0.7)
rospy.sleep(2)
self._head.pan_tilt(pan, 0)
# move the head back to initial position
self._head.pan_tilt(0, 0.7)
return True
def do_sake_gripper_action(self, command):
# publish response message
if command == "calibrate":
self._publish_server_response(msg="Calibrating SAKE gripper, please wait...")
elif command == "h_close":
self._publish_server_response(msg="Hard closing SAKE gripper, please wait...")
elif command == "s_close":
self._publish_server_response(msg="Soft closing SAKE gripper, please wait...")
elif command == "open":
self._publish_server_response(msg="Opening SAKE gripper, please wait...")
else: # [percentage open, effort]
self._publish_server_response(msg="Closing SAKE gripper, please wait...")
# publish command
self._sake_gripper_pub.publish(EzgripperAccess(type=command))
# wait for the action to finish if in real
if not rospy.get_param("use_sim"):
while not self._sake_gripper_action_finished:
continue
# finished, reset
self._sake_gripper_action_finished = False
def reset(self):
""" Moves arm to its initial position and calibrates gripper """
self.freeze_arm()
self._arm.move_to_joint_goal(self._arm_initial_poses, replan=True)
self.do_sake_gripper_action("calibrate")
self._robot_stopped = False
self._reset_program_state()
def estop(self):
""" Emergency stop. """
self.relax_arm()
self._robot_stopped = True
self._reset_program_state()
def preview_body_part_with_id(self, id_num):
"""
Publishes visualization markers to mark the body part with given id.
"""
raw_pose = self._get_tag_with_id(id_num)
if raw_pose is not None:
# visualize goal pose
marker = Marker(type=Marker.CUBE,
id=0,
pose=raw_pose.pose.pose,
scale=Vector3(0.06, 0.06, 0.06),
header=raw_pose.header,
color=ColorRGBA(1.0, 0.75, 0.3, 0.8))
self._viz_pub.publish(marker)
def goto_part_with_id(self, id_num):
"""
Moves arm above a body part specified by id_num.
Returns true if succeed, false otherwise.
"""
self.freeze_arm()
raw_pose = self._get_tag_with_id(id_num)
if raw_pose is not None:
# found marker, move towards it
self.do_sake_gripper_action("40 " + self._sake_gripper_effort)
# OPTION 1: pregrasp ---> grasp
# highlight and move to the pre-grasp pose
pre_grasp_offset = self._db.get("PREGRASP")
pre_grasp_pose = self._move_arm_relative(raw_pose.pose.pose, raw_pose.header, offset=pre_grasp_offset, preview_only=True)
self.highlight_waypoint(pre_grasp_pose, WAYPOINT_HIGHLIGHT_COLOR)
if self._move_arm(pre_grasp_pose, final_state=False):
# highlight and move to the grasp pose, clear octomap to ignore collision only at this point
if self._clear_octomap():
return self._move_arm(self._get_goto_pose(raw_pose), final_state=False, seed_state=self._get_seed_state())
# # OPTION 2: grasp
# return self._move_arm(self._get_goto_pose(raw_pose), final_state=False)
# marker with id_num is not found, or some error occured
return False
def preview_action_with_abbr(self, abbr, id_num):
"""
Publishes visualization markers to preview waypoints on the trajectory with given abbr.
Saves waypoints extracted from bag file to database if the entry "abbr" doesn't exist.
Returns the colors of waypoints with respect to the ar tag. Records the positions of
waypoints with respect to the ar tag.
"""
# clear previous markers
self._viz_markers_pub.publish(MarkerArray(markers=[]))
# check the database for the action
waypoints = self._db.get(abbr)
if waypoints == None or len(waypoints) == 0:
waypoints = self._save_traj_to_db(abbr, id_num)
self._preview_action_abbr = ""
self._preview_traj = []
waypoints_with_respect_to_tag = []
if waypoints:
raw_pose = self._get_tag_with_id(id_num)
if raw_pose is not None:
prev_pose = self._get_goto_pose(raw_pose)
# markers
marker_arr = []
# marker color gradient
colors = list(START_COLOR.range_to(END_COLOR, len(waypoints)))
# visualize the trajectory
for i in range(len(waypoints)):
# visualize the current waypoint
marker = Marker(type=Marker.ARROW,
id=i,
pose=prev_pose.pose,
scale=TRAJ_HIGHLIGHT_SCALE,
header=prev_pose.header,
color=ColorRGBA(colors[i].red, colors[i].green, colors[i].blue, 0.8))
marker_arr.append(marker)
# record the waypoint
waypoints_with_respect_to_tag.append(str(colors[i].hex))
self._preview_traj.append(prev_pose)
if i < len(waypoints) - 1:
# calculate offset between the current point on the trajectory and the next point
r_pos = waypoints[i].pose # current point
r_mat = self._pose_to_transform(r_pos)
w_mat = self._pose_to_transform(waypoints[i + 1].pose)
offset = np.dot(np.linalg.inv(r_mat), w_mat)
prev_pose = self._move_arm_relative(prev_pose.pose, prev_pose.header, offset=offset, preview_only=True)
# publish new markers
self._viz_markers_pub.publish(MarkerArray(markers=marker_arr))
# record the action name
self._preview_action_abbr = abbr
return waypoints_with_respect_to_tag
def highlight_waypoint(self, highlight_pose, color):
""" Publishes a marker at the specified location. """
marker = Marker(type=Marker.ARROW,
id=0,
pose=highlight_pose.pose,
scale=WAYPOINT_HIGHLIGHT_SCALE,
header=highlight_pose.header,
color=color)
self._viz_pub.publish(marker)
def edit_waypoint(self, waypoint_id, delta_x, delta_y, camera):
""" Temporarily saves the changes to the specified waypoint, and highlights the resulting pose. """
# calculate the resulting pose
new_pose = self._compute_pose_by_delta(self._preview_traj[waypoint_id], delta_x, delta_y, camera)
# save the new pose
self._preview_traj[waypoint_id] = new_pose
# preview the new trajectory
marker_arr = []
# marker color gradient
colors = list(START_COLOR.range_to(END_COLOR, len(self._preview_traj)))
# visualize the trajectory
for i in range(len(self._preview_traj)):
# highlight the waypoint that is being editing
color = WAYPOINT_HIGHLIGHT_COLOR if i == waypoint_id else ColorRGBA(colors[i].red, colors[i].green, colors[i].blue, 0.8)
marker = Marker(type=Marker.ARROW,
id=i,
pose=self._preview_traj[i].pose,
scale=TRAJ_HIGHLIGHT_SCALE,
header=self._preview_traj[i].header,
color=color)
marker_arr.append(marker)
# clear previous markers
self._viz_markers_pub.publish(MarkerArray(markers=[]))
# publish new markers
self._viz_markers_pub.publish(MarkerArray(markers=marker_arr))
def modify_traj_in_db(self, cancel_change=True):
""" Overwrites the previous trajectory in database. """
if not cancel_change:
self._db.delete(self._preview_action_abbr)
self._db.add(self._preview_action_abbr, self._preview_traj)
self._preview_action_abbr = ""
self._preview_traj = []
def do_action_with_abbr(self, abbr, id_num):
"""
Moves arm to perform the action specified by abbr, save the trajectory to database if neccessary.
Returns true if succeed, false otherwise.
"""
action_result = False
if self._prepare_action(abbr, id_num):
action_result = self._follow_traj_step_by_step(0)
return action_result
def do_action_with_abbr_smooth(self, abbr, id_num):
"""
Moves arm to perform the action specified by abbr smoothly.
Returns true if succeed, false otherwise.
"""
action_result = False
if self._prepare_action(abbr, id_num):
# calculate a smooth trajectory passing through all the waypoints and move the arm
action_result = self._move_arm(None, trajectory_waypoint=self._preview_traj, final_state=True, seed_state=self._get_seed_state())
if not action_result:
# smooth action fails, do the action step by step instead
action_result = self._follow_traj_step_by_step(0)
return action_result
def pause_action(self):
""" Pause the current action. """
# self.relax_arm()
print(self._current_waypoint_id)
def continue_action(self):
""" Continue the current action. """
# self.freeze_arm()
print(self._current_waypoint_id)
# if self._current_waypoint_id > -1:
# # continue going to the next waypoint
# self._follow_traj_step_by_step(self._current_waypoint_id + 1)
def record_action_with_abbr(self, abbr, id_num):
"""
Records the pose offset named abbr relative to tag, always overwrites the previous entry (if exists).
Returns true if succeeds, false otherwise.
"""
# get tag pose
tag_pose = self._get_tag_with_id(id_num)
if tag_pose == None:
return False
# get the pose to be recorded: transformation lookup
(position, quaternion) = self._tf_lookup()
if (position, quaternion) == (None, None):
return False
# get the transformation, record it
record_pose = Pose()
record_pose.position.x = position[0]
record_pose.position.y = position[1]
record_pose.position.z = position[2]
record_pose.orientation.x = quaternion[0]
record_pose.orientation.y = quaternion[1]
record_pose.orientation.z = quaternion[2]
record_pose.orientation.w = quaternion[3]
# get the offset between the tag pose and the pose to be recorded
t_pos = tag_pose.pose.pose
t_mat = self._pose_to_transform(t_pos)
w_mat = self._pose_to_transform(record_pose)
offset = np.dot(np.linalg.inv(t_mat), w_mat)
# add the offset to database
self._db.add(abbr, offset)
self._db.save()
return True
def relax_arm(self):
""" Relax the robot arm, if the program is running on the real robot """
if not rospy.get_param("use_sim") and not self._arm_relaxed:
goal = QueryControllerStatesGoal()
state = ControllerState()
state.name = 'arm_controller/follow_joint_trajectory'
state.state = ControllerState.STOPPED
goal.updates.append(state)
self._controller_client.send_goal(goal)
self._controller_client.wait_for_result()
self._arm_relaxed = True
def freeze_arm(self):
""" Freeze the robot arm, if the program is running on the real robot """
if not rospy.get_param("use_sim") and self._arm_relaxed:
goal = QueryControllerStatesGoal()
state = ControllerState()
state.name = 'arm_controller/follow_joint_trajectory'
state.state = ControllerState.RUNNING
goal.updates.append(state)
self._controller_client.send_goal(goal)
self._controller_client.wait_for_result()
self._arm_relaxed = False
def get_list(self):
""" Returns a list of AR tags recognized by the robot. """
return self._reader.get_list()
def get_db_list(self):
""" Returns list of entries in the database. """
return self._db.list()
def get_db_entry(self, entry):
""" Returns values associated with the given entry in the database. """
return self._db.get(entry)
def delete_db_entry(self, name):
""" Delete the database entry with the given name """
self._db.delete(name)
self._db.save()
def web_app_request_callback(self, msg):
"""
Parse the request given by the wee application, and call the corresponding functions.
"""
request_type, request_args = msg.type, msg.args
print("type: " + request_type)
print("args: " + str(request_args) + "\n")
if request_type == "attach":
return_msg = "SAKE gripper has already attached!"
if not self._sake_gripper_attached:
self._publish_server_response(msg="Attaching SAKE gripper...")
self.attach_sake_gripper()
return_msg = "SAKE gripper attached"
self._publish_server_response(status=True, msg=return_msg)
elif request_type == "remove":
return_msg = "SAKE gripper has already removed!"
if self._sake_gripper_attached:
self._publish_server_response(msg="Removing SAKE gripper...")
self.remove_sake_gripper()
return_msg = "SAKE gripper removed"
self._publish_server_response(status=True, msg=return_msg)
elif not self._sake_gripper_attached:
# need to attach SAKE gripper first
self._publish_server_response(status=True, msg="Please attach SAKE gripper first!")
else:
# SAKE gripper has already attached
if request_type == "record":
self._publish_server_response(msg="Recording the current scene...")
if self.update_env(update_octo=bool(request_args[0])):
# get the list of body parts and actions
parts = self.get_list()
parts_info, actions_info = [], []
if len(parts):
for part in parts:
if part.id in BODY_PARTS and part.id in ACTIONS:
parts_info.append(str(part.id) + ":" + BODY_PARTS[part.id])
for action in ACTIONS[part.id]:
actions_info.append(str(part.id) + ":" + action + ":" + ABBR[action])
self._publish_server_response(type="parts", args=parts_info)
self._publish_server_response(type="actions", status=True, args=actions_info, msg="Scene recorded")
else:
self._publish_server_response(status=True, msg="Failed to record the current scene!")
elif request_type == "prev_id" and len(request_args) == 1:
id_num = int(request_args[0]) # convert from string to int
self._publish_server_response(status=True, msg="Previewing " + BODY_PARTS[id_num] + "...")
self.preview_body_part_with_id(id_num)
elif request_type == "prev" and len(request_args) == 2:
abbr, id_num = request_args[0], int(request_args[1])
waypoints_with_respect_to_tag = self.preview_action_with_abbr(abbr, id_num)
self._publish_server_response(type=request_type, status=True, args=waypoints_with_respect_to_tag,
msg="Previewing the action with respect to body part " + BODY_PARTS[id_num] + "...")
elif request_type == "highlight" and len(request_args) == 1:
waypoint_id = int(request_args[0])
self.highlight_waypoint(self._preview_traj[waypoint_id], WAYPOINT_HIGHLIGHT_COLOR)
self._publish_server_response(status=True)
elif request_type == "edit" and len(request_args) == 4:
waypoint_id, delta_x, delta_y, camera = int(request_args[0]), int(request_args[1]), int(request_args[2]), request_args[3]
self.edit_waypoint(waypoint_id, delta_x, delta_y, camera)
self._publish_server_response(status=True)
elif request_type == "save_edit" or request_type == "cancel_edit":
if request_type == "save_edit":
self.modify_traj_in_db(cancel_change=False)
else:
self.modify_traj_in_db() # cancel all the changes
self._publish_server_response(status=True)
elif request_type == "reset":
self._publish_server_response(msg="Resetting...")
self.reset()
self._publish_server_response(status=True, msg="Done")
elif not self._robot_stopped:
# moveit controller is running
if request_type == "go" and len(request_args) == 1:
self._do_position_ready = False
id_num = int(request_args[0])
self._publish_server_response(msg="Moving towards body part " + BODY_PARTS[id_num] + "...")
if self.goto_part_with_id(id_num):
self._grasp_position_ready = True
self._do_position_id = id_num
self._publish_server_response(status=True, msg="Done, ready to grasp")
else:
self._publish_server_response(status=True, msg="Fail to move!")
elif request_type == "grasp" and self._grasp_position_ready and len(request_args) == 1:
self._publish_server_response(msg="Grasping...")
self._grasp_type = "h_close" if request_args[0] == "h" else "s_close"
self.do_sake_gripper_action(self._grasp_type)
self._grasp_position_ready = False
self._do_position_ready = True
self._publish_server_response(status=True, msg="Grasped")
elif request_type == "relax":
self._publish_server_response(msg="Relaxing arm...")
self.relax_arm()
self._publish_server_response(status=True, msg="Arm relaxed")
elif request_type == "freeze":
self._publish_server_response(msg="Freezing arm...")
self.freeze_arm()
self._publish_server_response(status=True, msg="Arm froze")
elif (request_type == "do" or request_type == "do_s") and len(request_args) > 0:
action_abbr = request_args[0]
return_msg = "Action failed!"
if self._do_position_ready:
# performing mode
self._publish_server_response(msg="Performing " + action_abbr + "...")
result = False
if request_type == "do": # step by step
result = self.do_action_with_abbr(action_abbr, self._do_position_id)
else: # "do_s": smooth
result = self.do_action_with_abbr_smooth(action_abbr, self._do_position_id)
if result:
return_msg = "Action succeeded"
else:
return_msg = "Unknown action for body part with ID: " + str(self._do_position_id)
# always release gripper
self._publish_server_response(msg="Releasing the gripper...")
self.do_sake_gripper_action("40 " + self._sake_gripper_effort)
self._do_position_ready = False
self._publish_server_response(status=True, msg=return_msg)
elif request_type == "open":
self._publish_server_response(msg="Opening the gripper...")
self.do_sake_gripper_action("open")
self._do_position_ready = False
self._publish_server_response(status=True, msg="Gripper opened")
elif request_type == "stop":
self._publish_server_response(msg="Stopping the robot...")
self.estop()
self._publish_server_response(status=True, msg="Robot stopped, please \"RESET\" if you want to continue using it")
elif request_type == "run" and len(request_args) == 3:
#######################################################################################
############## todo
# start execution from the currect step in the action trajectory
# start execution from the very beginning
self.web_app_request_callback(WebAppRequest(type="go", args=[request_args[0]]))
self.web_app_request_callback(WebAppRequest(type="grasp", args=[request_args[1]]))
self.web_app_request_callback(WebAppRequest(type="do_s", args=[request_args[2]]))
self._publish_server_response(type=request_type, status=True, msg="DONE")
elif request_type == "step":
return_msg = "Ready!"
waypoint_id = int(request_args[0])
if waypoint_id == -1: # goto tag and grasp
self.web_app_request_callback(WebAppRequest(type="go", args=[request_args[1]]))
self.web_app_request_callback(WebAppRequest(type="grasp", args=[request_args[2]]))
else: # move along trajectory
self._grasp_type = "h_close" if request_args[1] == "h" else "s_close"
return_msg = "Fail to reach waypoint #" + request_args[0]
result = self._goto_waypoint_on_traj_with_id(waypoint_id)
if waypoint_id == len(self._preview_traj) - 1: # last point
self._publish_server_response(msg="Releasing the gripper...")
self.do_sake_gripper_action("40 " + self._sake_gripper_effort)
self._do_position_ready = False
if result:
return_msg = "Reached waypoint #" + request_args[0]
self._publish_server_response(type=request_type, status=True, args=[request_args[0]], msg=return_msg)
elif request_type == "pause":
self._publish_server_response(msg="Pausing...")
self.pause_action()
self._publish_server_response(type=request_type, status=True, msg="PAUSED")
elif request_type == "continue":
self._publish_server_response(msg="Continuing...")
self.continue_action()
self._publish_server_response(type=request_type, status=True, msg="CONTINUED")
else:
self._publish_server_response(status=True, msg="Invalid command :)")
def _reset_program_state(self):
""" Resets program state. """
self._grasp_position_ready = False
self._do_position_ready = False
self._do_position_id = -1
def _clear_octomap(self):
""" Clears the octomap. Returns true if succeeds, false otherwise. """
rospy.wait_for_service('clear_octomap')
try:
clear_octo = rospy.ServiceProxy('clear_octomap', Empty)
clear_octo()
except rospy.ServiceException, e:
rospy.logerr('Fail clear octomap: {}'.format(e))
return False
return True
def _move_arm_relative(self, ref_pose, ref_header, offset=None, preview_only=False, seed_state=None):
"""
Calculates the coordinate of the goal by adding the offset to the given reference pose,
and moves the arm to the goal. If it's only for previewing, returns the goal pose,
else returns the result of the movement.
"""
goal_pose = PoseStamped()
goal_pose.header = ref_header
if offset is not None:
# current pose is valid, perform action
t_mat = self._pose_to_transform(ref_pose)
# compute the new coordinate
new_trans = np.dot(t_mat, offset)
pose = self._transform_to_pose(new_trans)
goal_pose.pose = pose
else:
goal_pose.pose = ref_pose
if preview_only:
return goal_pose
else:
# move to the goal position while avoiding unreasonable trajectories!
# close SAKE gripper again to ensure the limb is grasped
self.do_sake_gripper_action(self._grasp_type)
# visualize goal pose
self.highlight_waypoint(goal_pose, ColorRGBA(1.0, 1.0, 0.0, 0.8))
return goal_pose if self._move_arm(goal_pose, final_state=True, seed_state=seed_state) else None
def _move_arm(self, goal_pose, trajectory_waypoint=[], final_state=False, seed_state=None):
"""
Moves arm to the specified goal_pose. Returns true if succeed, false otherwise.
"""
error = None
if not final_state:
# simply go to the goal_pose
error = self._arm.move_to_pose_with_seed(goal_pose, seed_state, [], **self._kwargs)
# record the current pose because we still have the "do action" step to do
self._current_pose = goal_pose
else:
# go to goal_pose while avoiding unreasonable trajectories!
if trajectory_waypoint:
# create an array of waypoints
waypoints = []
for tw in trajectory_waypoint:
waypoints.append(tw.pose)
# using trajectory waypoints to perform a smooth motion
plan = self._arm.get_cartesian_path(self._moveit_group, seed_state, waypoints)
if plan:
error = self._arm.execute_trajectory(self._moveit_group, plan)
else:
error = 'PLANNING_FAILED'
else:
# using seed
error = self._arm.move_to_pose_with_seed(goal_pose, seed_state, [], **self._kwargs)
if error is not None:
# planning with seed failed, try without seed
# moveit: move group commander
# check if the pose can be reached in a straight line motion
plan = self._arm.straight_move_to_pose_check(self._moveit_group, goal_pose)
if plan:
error = self._arm.straight_move_to_pose(self._moveit_group, plan)
else:
error = 'PLANNING_FAILED'
# reset current pose to none
self._current_pose = None
if error is not None:
self._arm.cancel_all_goals()
rospy.logerr("Fail to move: {}".format(error))
return False
# succeed
return True
def _transform_to_pose(self, matrix):
""" Matrix to pose """
pose = Pose()
trans_vector = tft.translation_from_matrix(matrix)
pose.position = Point(trans_vector[0], trans_vector[1], trans_vector[2])
quartern = tft.quaternion_from_matrix(matrix)
pose.orientation = Quaternion(quartern[0], quartern[1], quartern[2], quartern[3])
return pose
def _pose_to_transform(self, pose):
""" Pose to matrix """
q = pose.orientation
matrix = tft.quaternion_matrix([q.x, q.y, q.z, q.w])
matrix[0, 3] = pose.position.x
matrix[1, 3] = pose.position.y
matrix[2, 3] = pose.position.z
return matrix
def _get_tag_with_id(self, id_num):
""" Returns the AR tag with the given id, returns None if id not found """
tag_pose = self._reader.get_tag(id_num)
if tag_pose == None:
rospy.logerr("AR tag lookup error: Invalid ID# " + str(id_num))
return tag_pose
def _tf_lookup(self):
"""
Lookups the transformation between "base_link" and "wrist_roll_link" (retry up to TRANSFROM_LOOKUP_RETRY times),
and returns the result
"""
(position, quaternion) = (None, None)
count = 0
while True:
if (position, quaternion) != (None, None): # lookup succeeds
return (position, quaternion)
elif count >= TRANSFROM_LOOKUP_RETRY: # exceeds maximum retry times
rospy.logerr("Fail to lookup transfrom information between 'base_link' and 'wrist_roll_link'")
return (None, None)
else: # try to lookup transform information
try:
(position, quaternion) = self._tf_listener.lookupTransform('base_link', 'wrist_roll_link', rospy.Time(0))
except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
count += 1
continue
def _get_goto_pose(self, ar_pose):
"""
Calculates the grasp pose of gripper given the AR tag pose, returns the gripper pose.
"""
grasp_offset = self._db.get("GRASP")
goal_pose = self._move_arm_relative(ar_pose.pose.pose, ar_pose.header, offset=grasp_offset, preview_only=True)
self.highlight_waypoint(goal_pose, WAYPOINT_HIGHLIGHT_COLOR)
return goal_pose
def _prepare_action(self, abbr, id_num):
"""
Previews the action trajectory, moves robot arm to the starting position of the action and
grasps the limb. Returns true if succeeds, false otherwise.
"""
# preview action
self.preview_action_with_abbr(abbr, id_num)
# freeze arm for moveit
self.freeze_arm()
# get AR tag information
tag_pose = self._get_tag_with_id(id_num)
if tag_pose == None or self._current_pose == None:
return False
# move arm to the starting position relative to AR tag
if not self._preview_traj or not self.goto_part_with_id(id_num):
rospy.logerr("Fail to move to the starting position for action: " + abbr)
return False
return True
def _follow_traj_step_by_step(self, start_id):
"""
Follows the current trajectory step by step. Returns true if at least one waypoint is reached, false otherwise.
"""
succeed = False
for i in range(start_id, len(self._preview_traj) - 1):
goal_pose = self._preview_traj[i + 1]
# move arm relative to the previous pose (use seed), skip the current waypoint if the current action fails
action_result = self._move_arm_relative(goal_pose.pose, goal_pose.header, seed_state=self._get_seed_state())
# record the current waypoint id
self._current_waypoint_id = i + 1
# check action result
if action_result is not None:
succeed = True # the whole action succeeds if at least one pose is reached
else:
rospy.logerr("Fail to reach waypoint " + str(i + 1))
# action finished, reset the current waypoint
self._current_waypoint_id = -1
return succeed
def _goto_waypoint_on_traj_with_id(self, waypoint_id):
"""
Go to the specific waypoint on the trajectory. Returns true if succeeds, false otherwise.
"""
succeed = False
if -1 < waypoint_id < len(self._preview_traj):
goal_pose = self._preview_traj[waypoint_id]
action_result = self._move_arm_relative(goal_pose.pose, goal_pose.header, seed_state=self._get_seed_state())
if action_result is not None:
succeed = True
else:
rospy.logerr("Fail to reach waypoint " + str(i + 1))
return succeed
def _save_traj_to_db(self, abbr, id_num):
"""
Checks bag file for the trajectory with given abbr, calculate waypoints and save those points to the database.
Returns the calculated waypoints if succeed, none otherwise.
"""
# check bag files for the trajectory
bag_file_path = os.path.join(self._bag_file_dir, abbr.lower() + '.bag')
bag = rosbag.Bag(bag_file_path)
waypoints = []
prev_msg = []
# get the trajectory from bag file
for topic, msg, t in bag.read_messages(topics=['/joint_states']):
joint_state = list(msg.position[6:13])
if len(joint_state) != 0 and (len(prev_msg) == 0 or np.abs(np.sum(np.subtract(joint_state, prev_msg))) > rospy.get_param("arm_traj_threshold")):
prev_msg = joint_state
# use forward kinematics to find the wrist position
point = self._arm.compute_fk(msg)
if point:
# add the result position
waypoints.append(point[0])
bag.close()
if len(waypoints) < 2:
# this trajectory is empty because it only contains the starting point
rospy.logerr("Empty trajectory for action: " + abbr)
return None
# add the result position to database
self._db.add(abbr, waypoints)
self._db.save()
return waypoints
def _publish_server_response(self, type="", status=False, args=[], msg=""):
""" Publishes the server response message, and prints the message in console if needed. """
if rospy.get_param("console_output"):
print(msg)
self._web_app_response_pub.publish(WebAppResponse(type=type, status=status, args=args, msg=msg))
def _compute_pose_by_delta(self, current_pose, delta_x, delta_y, camera):
"""
Computes and returns the new pose with respect to base_link after applying
delta_x and delta_y to the current pose in the specified camera view.
"""
x_distance, y_distance = dpx_to_distance(delta_x, delta_y, camera, current_pose, True)
return delta_modified_stamped_pose(x_distance, y_distance, camera, current_pose)
def _get_seed_state(self):
""" Returns the current arm joint state as the seed used in motion planning. """
seed_state = JointState()
seed_state.name = self._arm_joints.names()
seed_state.position = self._arm_joints.values()
return seed_state
def _set_sake_gripper_action_status(self, msg):
""" This is the callback of sake gripper status. """
self._sake_gripper_action_finished = True
| mit | -8,208,984,794,664,362,000 | 40.827053 | 150 | 0.642281 | false | 3.493182 | false | false | false |
mmktomato/zenmai-bts | test/test_zenmai.py | 1 | 15025 | """Unit test"""
import unittest
import re
import io
from datetime import datetime, timedelta
from flask import request, session
from . import ctx
from .zenmai_test_utils import create_issue, create_comment, create_attached_file, create_user, \
login, logout, delete_all_issues
from web.models.issue import Issue
from web.models.user import User
class ZenmaiTestCase(unittest.TestCase):
"""TestCase class"""
def _assert_403(self, data):
"""Helper method of 403 assertion."""
self.assertIn('403', data)
self.assertIn('Forbidden', data)
def _assert_issue_detail(self, data, subject, body, pub_date, state_name, attached_file_name):
"""Helper method of issue detail page assertion.
Args:
data (string): HTTP Response body.
subject (string): Regex string of subject.
body (string): Regex string of body.
pub_date (datetime): pub_date.
state_name (string): Regex string of state name.
attached_file_name (string): Regex string of attached file name.
"""
# subject
subject_regex = re.compile('<h1>{}</h1>'.format(subject))
self.assertRegex(data, subject_regex)
# body
body_regex = re.compile(r'<div class="panel-body">.*<p class="zen-comment-body">{}</p>.*</div>'.format(body), re.DOTALL)
self.assertRegex(data, body_regex)
# pub_date
pub_date_regex = re.compile('<div class="panel-heading">.*{0:%Y-%m-%d %H:%M:%S}.*</div>'.format(pub_date), re.DOTALL)
self.assertRegex(data, pub_date_regex)
# state_name
state_name_regex = re.compile('<span class="label.*">{}</span>'.format(state_name))
self.assertRegex(data, state_name_regex)
# attached_file_name
attached_file_name_regex = re.compile('<div class="panel-footer">.*download: <a href="/download/\d+/">{}</a>.*</div>'.format(attached_file_name), re.DOTALL)
self.assertRegex(data, attached_file_name_regex)
def test_get_issue_list(self):
"""Test case of issue list. (HTTP GET)"""
issue = create_issue(subject='test subject.test_get_issue_list.')
issue.add()
res = ctx['TEST_APP'].get('/')
data = res.data.decode('utf-8')
subject_regex = re.compile(r'<a href="/{}/">.*test subject\.test_get_issue_list\..*</a>'.format(issue.id))
self.assertRegex(data, subject_regex)
def test_get_empty_issue_list(self):
"""Test case of no issues. (HTTP GET)"""
delete_all_issues()
res = ctx['TEST_APP'].get('/')
data = res.data.decode('utf-8')
self.assertIn('No issues.', data)
def test_get_issue_detail(self):
"""Test case of issue detail. (HTTP GET)"""
pub_date = datetime.utcnow() + timedelta(days=1) # tommorow
attached_file = create_attached_file(name='test.txt')
comment = create_comment(body='test body.test_get_issue_detail.', pub_date=pub_date, attached_files=[attached_file])
issue = create_issue(subject='test subject.test_get_issue_detail.', comments=[comment], state_id=2)
issue.add()
res = ctx['TEST_APP'].get('/{}/'.format(issue.id))
data = res.data.decode('utf-8')
self._assert_issue_detail(
data=data,
subject='test subject\.test_get_issue_detail\.',
body='test body\.test_get_issue_detail\.',
pub_date=pub_date,
state_name=issue.state.name,
attached_file_name='test\.txt')
def test_post_issue_detail(self):
"""Test case of issue detail. (HTTP POST)"""
issue = create_issue()
issue.add()
# without authentication.
res = ctx['TEST_APP'].post('/{}/'.format(issue.id), data={
'csrf_token': ctx['CSRF_TOKEN']
}, follow_redirects=True)
self._assert_403(res.data.decode('utf-8'))
# with authentication.
with login() as (user, _):
res = ctx['TEST_APP'].post('/{}/'.format(issue.id), data={
'csrf_token': ctx['CSRF_TOKEN'],
'new_body': 'test body.test_post_issue_detail',
'new_state': 1,
'file': (io.BytesIO(b'test attached file content.test_post_issue_detail.'), 'test.txt')
}, follow_redirects=True)
data = res.data.decode('utf-8')
self._assert_issue_detail(
data=data,
subject=issue.subject,
body=issue.comments[0].body,
pub_date=issue.comments[0].pub_date,
state_name=issue.state.name,
attached_file_name='test\.txt')
def test_get_no_issue_detail(self):
"""Test case of no issue detail. (HTTP GET)"""
issue = create_issue()
issue.add()
res = ctx['TEST_APP'].get('/{}/'.format(issue.id + 1))
self.assertEqual(res.status_code, 404)
def test_get_new_issue(self):
"""Test case of new issue page. (HTTP GET)"""
# without authentication.
res = ctx['TEST_APP'].get('/new/', follow_redirects=True)
data = res.data.decode('utf-8')
self.assertIn('<title>Zenmai - login</title>', data)
self.assertIn('you need to login.', data)
# with authentication.
with login() as (_, _):
res = ctx['TEST_APP'].get('/new/')
data = res.data.decode('utf-8')
self.assertIn('<title>Zenmai - new issue</title>', data)
self.assertIn('Add new issue', data)
def test_post_new_issue(self):
"""Test case of new issue page. (HTTP POST)"""
# without authentication.
res = ctx['TEST_APP'].post('/new/', data={
'csrf_token': ctx['CSRF_TOKEN']
}, follow_redirects=True)
self._assert_403(res.data.decode('utf-8'))
# with authentication.
with login() as (user, _):
res = ctx['TEST_APP'].post('/new/', data={
'csrf_token': ctx['CSRF_TOKEN'],
'new_subject': 'test subject.test_post_new_issue.',
'new_body': 'test body.test_post_new_issue.',
'new_state': 1,
'file': (io.BytesIO(b'test attached file content.test_post_new_issue.'), 'test.txt')
}, follow_redirects=True)
data = res.data.decode('utf-8')
issue = Issue.query \
.filter_by(subject='test subject.test_post_new_issue.') \
.first()
self._assert_issue_detail(
data=data,
subject='test subject\.test_post_new_issue\.',
body='test body\.test_post_new_issue\.',
pub_date=issue.comments[0].pub_date,
state_name='Open',
attached_file_name='test\.txt')
def test_post_large_attached_file(self):
"""Test case of post request with too large attached file."""
large_buf = bytes(ctx['APP'].config['MAX_CONTENT_LENGTH'] + 1)
res = ctx['TEST_APP'].post('/new/', data={
'new_subject': 'test subject.test_post_new_issue.',
'new_body': 'test body.test_post_new_issue.',
'new_state': 1,
'file': (io.BytesIO(large_buf), 'test.txt')
}, follow_redirects=True)
self.assertEqual(res.status_code, 413)
def test_get_download_attached_file(self):
"""Test case of downloading attached file. (HTTP GET)"""
attached_file = create_attached_file(data=b'test content of attached file.test_get_download_attached_file.')
comment = create_comment(attached_files=[attached_file])
issue = create_issue(comments=[comment])
issue.add()
res = ctx['TEST_APP'].get('/download/{}/'.format(attached_file.id))
data = res.data.decode('utf-8')
self.assertEqual(data, 'test content of attached file.test_get_download_attached_file.')
def test_get_login_page(self):
"""Test case of login page. (HTTP GET)"""
res = ctx['TEST_APP'].get('/user/login/')
data = res.data.decode('utf-8')
self.assertIn('<title>Zenmai - login</title>', data)
self.assertEqual(res.status_code, 200)
def test_post_login_page(self):
"""Test case of login. (HTTP POST)"""
user = create_user( \
id='testid.test_post_login_page', \
name='testname.test_post_login_page', \
password='testpassword.test_post_login_page')
with login(user, 'testpassword.test_post_login_page') as (_, res):
data = res.data.decode('utf-8')
self.assertEqual(res.status_code, 200)
self.assertIn('<li><a href="/user/">{}(id:{})</a></li>'.format(user.name, user.id), data)
def test_get_logout_page(self):
"""Test case of logout. (HTTP GET)"""
user = create_user( \
id='testid.test_get_logout_page', \
name='testname.test_post_logout_page', \
password='testpassword.test_post_logout_page')
with login(user, 'testpassword.test_post_logout_page', do_logout=False):
pass
res = logout()
data = res.data.decode('utf-8')
self.assertEqual(res.status_code, 200)
self.assertIn('<title>Zenmai - issues</title>', data)
def test_get_user_register_page(self):
"""Test case of user register page. (HTTP GET)"""
res = ctx['TEST_APP'].get('/user/new/')
data = res.data.decode('utf-8')
self.assertIn('<title>Zenmai - register</title>', data)
self.assertEqual(res.status_code, 200)
def test_post_register_valid_user(self):
"""Test case of valid user registration. (HTTP POST)"""
res = ctx['TEST_APP'].post('/user/new/', data={
'csrf_token': ctx['CSRF_TOKEN'],
'user_id': 'testid.test_post_register_valid_user',
'user_name': 'testname.test_post_register_valid_user',
'password': 'testpassword.test_post_register_valid_user',
'password_retype': 'testpassword.test_post_register_valid_user'
}, follow_redirects=True)
data = res.data.decode('utf-8')
self.assertIn('<title>Zenmai - login</title>', data)
self.assertEqual(res.status_code, 200)
def test_post_register_invalid_user(self):
"""Test case of invalid user registration. (HTTP POST)"""
# password is not matched.
res = ctx['TEST_APP'].post('/user/new/', data={
'csrf_token': ctx['CSRF_TOKEN'],
'user_id': 'testid.test_post_register_invalid_user',
'user_name': 'testname.test_post_register_invalid_user',
'password': 'testpassword.test_post_register_invalid_user',
'password_retype': 'invalid password'
}, follow_redirects=True)
data = res.data.decode('utf-8')
self.assertIn('<title>Zenmai - register</title>', data)
self.assertIn('password is not matched.', data)
self.assertEqual(res.status_code, 200)
# already exist.
ctx['TEST_APP'].post('/user/new/', data={
'csrf_token': ctx['CSRF_TOKEN'],
'user_id': 'testid.test_post_register_invalid_user',
'user_name': 'testname.test_post_register_invalid_user',
'password': 'testpassword.test_post_register_invalid_user',
'password_retype': 'testpassword.test_post_register_invalid_user'
}, follow_redirects=True)
res = ctx['TEST_APP'].post('/user/new/', data={
'csrf_token': ctx['CSRF_TOKEN'],
'user_id': 'testid.test_post_register_invalid_user',
'user_name': 'testname.test_post_register_invalid_user',
'password': 'testpassword.test_post_register_invalid_user',
'password_retype': 'testpassword.test_post_register_invalid_user'
}, follow_redirects=True)
data = res.data.decode('utf-8')
self.assertIn('<title>Zenmai - register</title>', data)
self.assertIn("id '{}' is already exists.".format('testid.test_post_register_invalid_user'), data)
self.assertEqual(res.status_code, 200)
def test_get_user_page(self):
"""Test case of user page. (HTTP GET)"""
# without authentication.
res = ctx['TEST_APP'].get('/user/', follow_redirects=True)
data = res.data.decode('utf-8')
self.assertIn('<title>Zenmai - login</title>', data)
self.assertEqual(res.status_code, 200)
# with authentication.
with login() as (user, _):
res = ctx['TEST_APP'].get('/user/')
data = res.data.decode('utf-8')
self.assertIn('<td>{}</td>'.format(user.id), data)
self.assertIn('<td>{}</td>'.format(user.name), data)
self.assertEqual(res.status_code, 200)
def test_get_user_edit_page(self):
"""Test case of user edit page. (HTTP GET)"""
# without authentication.
res = ctx['TEST_APP'].get('/user/edit/')
data = res.data.decode('utf-8')
self._assert_403(res.data.decode('utf-8'))
# with authentication.
with login() as (user, _):
res = ctx['TEST_APP'].get('/user/edit/')
data = res.data.decode('utf-8')
self.assertIn('<title>Zenmai - edit user</title>', data)
self.assertIn('value="{}" readonly'.format(user.id), data)
self.assertIn('value="{}"'.format(user.name), data)
self.assertIn("leave empty if you don't want to change password.", data)
self.assertEqual(res.status_code, 200)
def test_post_user_edit_page(self):
"""Test case of edit user. (HTTP POST)"""
# without authentication.
res = ctx['TEST_APP'].post('/user/edit/', data={
'csrf_token': ctx['CSRF_TOKEN'],
'user_id': 'testid.test_post_user_edit_page',
'user_name': 'testname.test_post_user_edit_page',
'password': 'testpassword.test_post_user_edit_page',
'password_retype': 'testpassword.test_post_user_edit_page'
}, follow_redirects=True)
self._assert_403(res.data.decode('utf-8'))
# with authentication.
with login() as (user, _):
res = ctx['TEST_APP'].post('/user/edit/', data={
'csrf_token': ctx['CSRF_TOKEN'],
'user_id': user.id,
'user_name': 'new testname.test_post_user_edit_page',
'password': 'new testpassword.test_post_user_edit_page',
'password_retype': 'new testpassword.test_post_user_edit_page'
}, follow_redirects=True)
data = res.data.decode('utf-8')
self.assertIn('<td>{}</td>'.format(user.id), data)
self.assertIn('<td>{}</td>'.format('new testname.test_post_user_edit_page'), data)
self.assertEqual(res.status_code, 200)
| mit | 1,231,649,006,724,835,000 | 40.969274 | 164 | 0.568053 | false | 3.71998 | true | false | false |
MrBhendel/Cirrus | ModulusList.py | 1 | 1374 | import sys
from abc import ABCMeta, abstractmethod
class ModulusList:
'''
Maintains a list of (host, modulus, e) tuples.
'''
__metaclass__ = ABCMeta
def __init__(self):
self._modulusList = []
def addModulusList(self, other):
for i in range(0, other.length()):
item = other[i]
self.add(item[0], item[1], item[2])
@abstractmethod
def add(self, host, modulus, e):
pass
@abstractmethod
def length(self):
pass
@abstractmethod
def __getitem__(self, index):
pass
@abstractmethod
def saveListToFile(self, fileName):
pass
@abstractmethod
def loadListFromFile(self, fileName):
pass
class ModulusListImpl(ModulusList):
def add(self, host, modulus, e):
self._modulusList.append((host, modulus, e))
def length(self):
return len(self._modulusList)
def __getitem__(self, index):
return self._modulusList[index]
def saveListToFile(self, fileName):
saveFile = open(fileName, 'w')
for record in self._modulusList:
saveFile.write(str(record[0]) + '\n')
saveFile.write(str(record[1]) + '\n')
saveFile.write(str(record[2]) + '\n')
saveFile.close()
def loadListFromFile(self, fileName):
loadFile = open(fileName, 'r')
while True:
host = loadFile.readline().rstrip()
n = loadFile.readline().rstrip()
e = loadFile.readline().rstrip()
if not e: break
self._modulusList.append((host, long(n), long(e)))
| mit | 1,942,632,575,379,380,700 | 19.818182 | 53 | 0.6754 | false | 2.942184 | false | false | false |
pf4d/dolfin-adjoint | dolfin_adjoint/adjrhs.py | 1 | 10667 | import libadjoint
import backend
import ufl
import ufl.algorithms
import adjglobals
import adjlinalg
import utils
def find_previous_variable(var):
''' Returns the previous instance of the given variable. '''
for timestep in range(var.timestep, -1, -1):
prev_var = libadjoint.Variable(var.name, timestep, 0)
if adjglobals.adjointer.variable_known(prev_var):
prev_var.var.iteration = prev_var.iteration_count(adjglobals.adjointer) - 1
return prev_var
raise libadjoint.exceptions.LibadjointErrorInvalidInputs, 'No previous variable found'
def _extract_function_coeffs(form):
for c in ufl.algorithms.extract_coefficients(form):
if isinstance(c, backend.Function):
yield c
class RHS(libadjoint.RHS):
'''This class implements the libadjoint.RHS abstract base class for the Dolfin adjoint.
It takes in a form, and implements the necessary routines such as calling the right-hand side
and taking its derivative.'''
def __init__(self, form):
self.form=form
if isinstance(self.form, ufl.form.Form):
self.deps = [adjglobals.adj_variables[coeff] for coeff in _extract_function_coeffs(self.form)]
else:
self.deps = []
if isinstance(self.form, ufl.form.Form):
self.coeffs = [coeff for coeff in _extract_function_coeffs(self.form)]
else:
self.coeffs = []
def __call__(self, dependencies, values):
if isinstance(self.form, ufl.form.Form):
dolfin_dependencies=[dep for dep in _extract_function_coeffs(self.form)]
dolfin_values=[val.data for val in values]
return adjlinalg.Vector(backend.replace(self.form, dict(zip(dolfin_dependencies, dolfin_values))))
else:
# RHS is a adjlinalg.Vector.
assert isinstance(self.form, adjlinalg.Vector)
return self.form
def derivative_action(self, dependencies, values, variable, contraction_vector, hermitian):
if contraction_vector.data is None:
return adjlinalg.Vector(None)
if isinstance(self.form, ufl.form.Form):
# Find the dolfin Function corresponding to variable.
dolfin_variable = values[dependencies.index(variable)].data
dolfin_dependencies = [dep for dep in _extract_function_coeffs(self.form)]
dolfin_values = [val.data for val in values]
current_form = backend.replace(self.form, dict(zip(dolfin_dependencies, dolfin_values)))
trial = backend.TrialFunction(dolfin_variable.function_space())
d_rhs = backend.derivative(current_form, dolfin_variable, trial)
if hermitian:
action = backend.action(backend.adjoint(d_rhs), contraction_vector.data)
else:
action = backend.action(d_rhs, contraction_vector.data)
return adjlinalg.Vector(action)
else:
# RHS is a adjlinalg.Vector. Its derivative is therefore zero.
return adjlinalg.Vector(None)
def second_derivative_action(self, dependencies, values, inner_variable, inner_contraction_vector, outer_variable, hermitian, action_vector):
if isinstance(self.form, ufl.form.Form):
# Find the dolfin Function corresponding to variable.
dolfin_inner_variable = values[dependencies.index(inner_variable)].data
dolfin_outer_variable = values[dependencies.index(outer_variable)].data
dolfin_dependencies = [dep for dep in _extract_function_coeffs(self.form)]
dolfin_values = [val.data for val in values]
current_form = backend.replace(self.form, dict(zip(dolfin_dependencies, dolfin_values)))
trial = backend.TrialFunction(dolfin_outer_variable.function_space())
d_rhs = backend.derivative(current_form, dolfin_inner_variable, inner_contraction_vector.data)
d_rhs = ufl.algorithms.expand_derivatives(d_rhs)
if len(d_rhs.integrals()) == 0:
return None
d_rhs = backend.derivative(d_rhs, dolfin_outer_variable, trial)
d_rhs = ufl.algorithms.expand_derivatives(d_rhs)
if len(d_rhs.integrals()) == 0:
return None
if hermitian:
action = backend.action(backend.adjoint(d_rhs), action_vector.data)
else:
action = backend.action(d_rhs, action_vector.data)
return adjlinalg.Vector(action)
else:
# RHS is a adjlinalg.Vector. Its derivative is therefore zero.
raise exceptions.LibadjointErrorNotImplemented("No derivative method for constant RHS.")
def dependencies(self):
return self.deps
def coefficients(self):
return self.coeffs
def __str__(self):
return hashlib.md5(str(self.form)).hexdigest()
class NonlinearRHS(RHS):
'''For nonlinear problems, the source term isn't assembled in the usual way.
If the nonlinear problem is given as
F(u) = 0,
we annotate it as
M.u = M.u - F(u) .
So in order to actually assemble the right-hand side term,
we first need to solve F(u) = 0 to find the specific u,
and then multiply that by the mass matrix.'''
def __init__(self, form, F, u, bcs, mass, solver_parameters, J):
'''form is M.u - F(u). F is the nonlinear equation, F(u) := 0.'''
RHS.__init__(self, form)
self.F = F
self.u = u
self.bcs = bcs
self.mass = mass
self.solver_parameters = solver_parameters
self.J = J or backend.derivative(F, u)
# We want to mark that the RHS term /also/ depends on
# the previous value of u, as that's what we need to initialise
# the nonlinear solver.
var = adjglobals.adj_variables[self.u]
self.ic_var = None
if backend.parameters["adjoint"]["fussy_replay"]:
can_depend = True
try:
prev_var = find_previous_variable(var)
except:
can_depend = False
if can_depend:
self.ic_var = prev_var
self.deps += [self.ic_var]
self.coeffs += [u]
else:
self.ic_copy = backend.Function(u)
self.ic_var = None
def __call__(self, dependencies, values):
assert isinstance(self.form, ufl.form.Form)
ic = self.u.function_space() # by default, initialise with a blank function in the solution FunctionSpace
if hasattr(self, "ic_copy"):
ic = self.ic_copy
replace_map = {}
for i in range(len(self.deps)):
if self.deps[i] in dependencies:
j = dependencies.index(self.deps[i])
if self.deps[i] == self.ic_var:
ic = values[j].data # ahah, we have found an initial condition!
else:
replace_map[self.coeffs[i]] = values[j].data
current_F = backend.replace(self.F, replace_map)
current_J = backend.replace(self.J, replace_map)
u = backend.Function(ic)
current_F = backend.replace(current_F, {self.u: u})
current_J = backend.replace(current_J, {self.u: u})
vec = adjlinalg.Vector(None)
vec.nonlinear_form = current_F
vec.nonlinear_u = u
vec.nonlinear_bcs = self.bcs
vec.nonlinear_J = current_J
return vec
def derivative_action(self, dependencies, values, variable, contraction_vector, hermitian):
'''If variable is the variable for the initial condition, we want to ignore it,
and set the derivative to zero. Assuming the solver converges, the sensitivity of
the solution to the initial condition should be extremely small, and computing it
is very difficult (one would have to do a little adjoint solve to compute it).
Even I'm not that fussy.'''
if variable == self.ic_var:
deriv_value = values[dependencies.index(variable)].data
return adjlinalg.Vector(None, fn_space=deriv_value.function_space())
else:
return RHS.derivative_action(self, dependencies, values, variable, contraction_vector, hermitian)
def second_derivative_action(self, dependencies, values, inner_variable, inner_contraction_vector, outer_variable, hermitian, action):
'''If variable is the variable for the initial condition, we want to ignore it,
and set the derivative to zero. Assuming the solver converges, the sensitivity of
the solution to the initial condition should be extremely small, and computing it
is very difficult (one would have to do a little adjoint solve to compute it).
Even I'm not that fussy.'''
if inner_variable == self.ic_var or outer_variable == self.ic_var:
deriv_value = values[dependencies.index(outer_variable)].data
return adjlinalg.Vector(None, fn_space=deriv_value.function_space())
else:
return RHS.second_derivative_action(self, dependencies, values, inner_variable, inner_contraction_vector, outer_variable, hermitian, action)
def derivative_assembly(self, dependencies, values, variable, hermitian):
replace_map = {}
for i in range(len(self.deps)):
if self.deps[i] == self.ic_var: continue
j = dependencies.index(self.deps[i])
replace_map[self.coeffs[i]] = values[j].data
diff_var = values[dependencies.index(variable)].data
current_form = backend.replace(self.form, replace_map)
deriv = backend.derivative(current_form, diff_var)
if hermitian:
deriv = backend.adjoint(deriv)
bcs = [utils.homogenize(bc) for bc in self.bcs if isinstance(bc, backend.DirichletBC)] + [bc for bc in self.bcs if not isinstance(bc, backend.DirichletBC)]
else:
bcs = self.bcs
return adjlinalg.Matrix(deriv, bcs=bcs)
def adj_get_forward_equation(i):
(fwd_var, lhs, rhs) = adjglobals.adjointer.get_forward_equation(i)
# We needed to cheat the annotation when we registered a nonlinear solve.
# However, if we want to actually differentiate the form (e.g. to compute
# the dependency of the form on a ScalarParameter) we're going to need
# the real F(u) = 0 back again. So let's fetch it here:
if hasattr(rhs, 'nonlinear_form'):
lhs = rhs.nonlinear_form
fwd_var.nonlinear_u = rhs.nonlinear_u
rhs = 0
else:
lhs = lhs.data
rhs = rhs.data
return (fwd_var, lhs, rhs)
| lgpl-3.0 | -4,020,333,220,749,036,500 | 38.507407 | 167 | 0.630355 | false | 3.926021 | false | false | false |
loadimpact/http2-test | fabfile.py | 1 | 11989 | """
We need two special hostnames to connect to:
StationA
and
StationB
"""
from __future__ import with_statement, print_function
from fabric.api import local, settings, abort, run, sudo, cd, hosts, env, execute
from fabric.contrib.console import confirm
from fabric.operations import put, get
from fabric.contrib.project import rsync_project
import re
import subprocess as sp
import os.path
from StringIO import StringIO
current_dir = os.path.dirname(os.path.realpath(__file__))
VNC_LICENSE = [
"xxxxx-xxxxx-xxxxx-xxxxx-xxxxx"
]
# TODO: Put a proper deployment mechanism here.
env.key_filename = '/home/alcides/.ssh/zunzun_ec2_keypair_0.pem'
StationA_H = '[email protected]'
StationB_H = '[email protected]'
Beefy_H = '[email protected]'
# TODO: Make the IP number below deployment-specific...
Beefy_InternalIP = '192.168.112.131'
StationA_InternalIP = '192.168.112.129'
StationB_InternalIP = '192.168.112.130'
HomeDir_Name = "ubuntu"
@hosts(StationA_H)
def StationA():
"""
Copies code to StationA
"""
rsync_project(
local_dir = "scripts/StationA",
remote_dir = ("/home/{HomeDir_Name}/".format(HomeDir_Name=HomeDir_Name))
)
run("ln -sf /home/{HomeDir_Name}/StationA/onstartup.py /home/{HomeDir_Name}/onstartup.py".format(HomeDir_Name=HomeDir_Name))
@hosts(StationB_H)
def setup_dns_masq():
sudo("apt-get install -y dnsmasq")
put(
local_path = StringIO("addn-hosts=/home/{HomeDir_Name}/dnsmasq_more.conf\n".format(HomeDir_Name=HomeDir_Name)),
remote_path = "/etc/dnsmasq.conf",
use_sudo=True)
@hosts(StationB_H)
def StationB():
"""
Copies both the chrome plugin and the DNSMasq watcher
"""
rsync_project(
local_dir = "scripts/StationB",
remote_dir = ("/home/{HomeDir_Name}/".format(HomeDir_Name=HomeDir_Name))
)
rsync_project(
local_dir = "scripts/StationA/chrome_captures_hars",
remote_dir = (("/home/{HomeDir_Name}/StationB/".format(HomeDir_Name=HomeDir_Name)).format(HomeDir_Name=HomeDir_Name))
)
run("ln -sf /home/{HomeDir_Name}/StationB/onstartup.py /home/{HomeDir_Name}/onstartup.py".format(HomeDir_Name=HomeDir_Name))
@hosts(StationB_H)
def install_updatednsmasq_service():
with settings(warn_only=True):
sudo("service updatednsmasq stop")
put(
local_path = "scripts/StationB/configure_dnsmasq.py",
remote_path = "/home/{HomeDir_Name}/StationB/configure_dnsmasq.py".format(HomeDir_Name=HomeDir_Name) ,
use_sudo = True
)
put(
local_path = StringIO("""
description "Update dnsmasq"
start on runlevel [2345]
stop on runlevel [!2345]
umask 022
console log
env PATH=/opt/openssl-1.0.2/bin/:/usr/bin:/usr/local/bin:/usr/sbin:/bin
export PATH
env LD_LIBRARY_PATH=/opt/openssl-1.0.2/lib
export LD_LIBRARY_PATH
env USER={HomeDir_Name}
export USER
script
exec /usr/bin/python /home/{HomeDir_Name}/StationB/configure_dnsmasq.py
end script
""".format(HomeDir_Name=HomeDir_Name)),
remote_path = "/etc/init/updatednsmasq.conf",
use_sudo=True )
sudo("service updatednsmasq start")
@hosts(Beefy_H)
def Beefy():
sudo("apt-get update")
sudo("apt-get -y install libgmp-dev")
@hosts(Beefy_H)
def BeefyRehMimic():
with settings(warn_only=True):
sudo("service mimic stop")
put(
local_path = "dist/build/reh-mimic/reh-mimic",
remote_path = "/home/{HomeDir_Name}/reh-mimic".format(HomeDir_Name=HomeDir_Name)
)
run("chmod ugo+x /home/{HomeDir_Name}/reh-mimic".format(HomeDir_Name=HomeDir_Name))
sudo("rm /home/{HomeDir_Name}/mimic -rf".format(HomeDir_Name=HomeDir_Name) )
rsync_project(
local_dir = "mimic",
remote_dir = "/home/{HomeDir_Name}/".format(HomeDir_Name=HomeDir_Name),
)
put(
local_path = "scripts/mimic.conf",
remote_path = "/etc/init/mimic.conf",
use_sudo = True
)
sudo("touch /root/.rnd")
sudo("service mimic start")
@hosts(Beefy_H, StationA_H, StationB_H )
def configure_logging():
if env.host_string == StationA_H:
put(
local_path = StringIO("""$template Logentries,"199fb2e1-8227-4f73-9150-70a34a5d5e0c %HOSTNAME% %syslogtag%%msg%\\n"
*.* @@api.logentries.com:10000;Logentries"""),
remote_path = "/etc/rsyslog.d/70-logentries.conf",
use_sudo = True )
elif env.host_string == StationB_H:
put(
local_path = StringIO("""$template Logentries,"3d2fd756-407a-4764-b130-1dd6f22a1b62 %HOSTNAME% %syslogtag%%msg%\\n"
*.* @@api.logentries.com:10000;Logentries"""),
remote_path = "/etc/rsyslog.d/70-logentries.conf",
use_sudo = True )
else:
put(
local_path = StringIO("""$template Logentries,"7551d4e0-fa76-466f-8547-8c9a347a9363 %HOSTNAME% %syslogtag%%msg%\\n"
*.* @@api.logentries.com:10000;Logentries"""),
remote_path = "/etc/rsyslog.d/70-logentries.conf",
use_sudo = True )
sudo("service rsyslog restart")
# Check logging works...
sudo("logger -t test Hello there Logentries")
@hosts (StationA_H, StationB_H)
def deploy_specific():
if env.host_string == StationA_H:
print("StationA deploy")
StationA()
elif env.host_string == StationB_H:
print("StationB deploy")
StationB()
else:
print("Beefy station deploy")
Beefy()
@hosts(StationA_H, StationB_H)
def apt_stations():
sudo("apt-get update")
sudo("apt-get install -y xutils xbase-clients xfonts-base xfonts-75dpi xfonts-100dpi")
sudo("apt-get install -y python-pip")
sudo("apt-get install -y xdotool")
sudo("apt-get install -y xfwm4")
@hosts(StationA_H, StationB_H)
def pythonlibs():
sudo("pip install python-daemon>=2.0")
sudo("pip install raven")
@hosts(Beefy_H, StationA_H, StationB_H)
def ssl():
"""
Copies Openssl and curl to the target hosts...
"""
sudo("mkdir -p /opt/openssl-1.0.2/")
sudo(("chown {HomeDir_Name} /opt/openssl-1.0.2/".format(HomeDir_Name=HomeDir_Name)))
rsync_project(
local_dir = "/opt/openssl-1.0.2",
remote_dir = "/opt/",
extra_opts="-avz"
)
put(
local_path = "scripts/ca-certificates.crt",
remote_path = "/etc/ssl/certs/ca-certificates.crt",
use_sudo = True
)
@hosts(Beefy_H, StationA_H, StationB_H)
def ca():
"""
Copies the ca certificate to the home...
"""
put(
local_path = "mimic-here/config/ca/cacert.pem",
remote_path = ("/home/{HomeDir_Name}/cacert.pem".format(HomeDir_Name=HomeDir_Name)),
use_sudo = True
)
@hosts(StationA_H, StationB_H)
def install_vnc():
"""
"""
# run("curl -L -o VNC.tar.gz https://www.realvnc.com/download/binary/1720/")
# run("tar xvf VNC-5.2.3-Linux-x64-ANY.tar.gz")
put(
local_path = "scripts/VNC-5.2.3-Linux-x64-ANY.tar.gz",
remote_path = ("/home/{HomeDir_Name}/VNC-5.2.3-Linux-x64-ANY.tar.gz".format(HomeDir_Name=HomeDir_Name)),
use_sudo = True
)
run(("tar -xzf /home/{HomeDir_Name}/VNC-5.2.3-Linux-x64-ANY.tar.gz".format(HomeDir_Name=HomeDir_Name)))
# Get a handier name....
run("rm -rf vnc")
run(("mv /home/{HomeDir_Name}/VNC-5.2.3-Linux-x64 /home/{HomeDir_Name}/vnc".format(HomeDir_Name=HomeDir_Name)))
sudo(("/home/{HomeDir_Name}/vnc/vnclicense -add {VncLicense}".format(
HomeDir_Name= HomeDir_Name,
VncLicense = VNC_LICENSE[0]
)))
# Will demand some for of interactive input...
run(("mkdir -p /home/{HomeDir_Name}/.vnc/".format(HomeDir_Name=HomeDir_Name)))
run(("mkdir -p /home/{HomeDir_Name}/.vnc/config.d/".format(HomeDir_Name=HomeDir_Name)))
sudo(("/home/{HomeDir_Name}/vnc/vncpasswd /home/{HomeDir_Name}/.vnc/config.d/Xvnc".format(HomeDir_Name=HomeDir_Name)))
vnc_fix_permissions()
@hosts(StationA_H, StationB_H)
def vnc_fix_permissions():
sudo(("chown {HomeDir_Name} /home/{HomeDir_Name}/.vnc -R").format(HomeDir_Name=HomeDir_Name))
@hosts(StationA_H, StationB_H)
def install_vnc_xstartup():
run(("mkdir -p /home/{HomeDir_Name}/.vnc/".format(HomeDir_Name=HomeDir_Name)))
run(("mkdir -p /home/{HomeDir_Name}/.vnc/config.d/".format(HomeDir_Name=HomeDir_Name)))
put(
local_path = "scripts/vnc-xstartup",
remote_path = ("/home/{HomeDir_Name}/.vnc/xstartup".format(HomeDir_Name=HomeDir_Name))
)
run("chmod ugo+x /home/{HomeDir_Name}/.vnc/xstartup".format(HomeDir_Name=HomeDir_Name))
put(
local_path = "scripts/xvncfontpath",
remote_path = ("/home/{HomeDir_Name}/.vnc/config.d/xvncfontpath".format(HomeDir_Name=HomeDir_Name))
)
@hosts(StationA_H, StationB_H)
def setup_google_chrome():
put(
local_path = "scripts/google-chrome-stable_current_amd64.deb",
remote_path = ("/home/{HomeDir_Name}/google-chrome-stable_current_amd64.deb".format(HomeDir_Name=HomeDir_Name)),
use_sudo = True
)
really_setup_google_chrome()
@hosts(Beefy_H, StationA_H, StationB_H)
def ensure_local_hosts():
# Get the contents of /etc/hosts
local_file = StringIO()
get(
local_path = local_file,
remote_path = "/etc/hosts",
use_sudo = True
)
hosts_file = local_file.getvalue()
snippet = """# DO NOT EDIT BELOW BY HAND
{Beefy_InternalIP} instr.httpdos.com
192.168.112.129 ip-192-168-112-129
192.168.112.130 ip-192-168-112-130
192.168.112.131 ip-192-168-112-131
# END DO NOT EDIT BELOW""".format(
StationA_InternalIP = StationA_InternalIP,
Beefy_InternalIP = Beefy_InternalIP
)
mo = re.search(r"# DO NOT EDIT BELOW BY HAND\n(.*?)\n# END DO NOT EDIT BELOW", hosts_file, re.MULTILINE)
if mo:
part_before = hosts_file[:mo.start(0)]
part_after = hosts_file[mo.end(0):]
hosts_file = part_before + snippet + part_after
else:
hosts_file += "\n" + snippet
put(
local_path = StringIO(hosts_file),
remote_path = "/etc/hosts",
use_sudo = True
)
@hosts(StationA_H, StationB_H)
def really_setup_google_chrome():
sudo("apt-get update")
sudo(("apt-get -f install -y".format(HomeDir_Name=HomeDir_Name)))
sudo("apt-get install -y --fix-missing xdg-utils")
sudo(("dpkg -i --force-depends /home/{HomeDir_Name}/google-chrome-stable_current_amd64.deb".format(HomeDir_Name=HomeDir_Name)))
sudo(("apt-get -f install -y".format(HomeDir_Name=HomeDir_Name)))
@hosts(StationA_H, StationB_H)
def setup_vnc_service():
put(
local_path = "scripts/vncserv-{HomeDir_Name}.conf".format(HomeDir_Name=HomeDir_Name),
remote_path = "/etc/init/vncserv.conf",
use_sudo = True
)
put(
local_path = "scripts/undaemon.py",
remote_path = "/home/{HomeDir_Name}/undaemon.py".format(HomeDir_Name=HomeDir_Name)
)
run("chmod ugo+x /home/{HomeDir_Name}/undaemon.py".format(HomeDir_Name=HomeDir_Name))
with settings(warn_only=True):
sudo(
"service vncserv start"
)
@hosts(StationA_H, StationB_H)
def disable_lightdm():
contents = StringIO("manual")
put(
local_path = contents,
remote_path = "/etc/init/lightdm.override",
use_sudo=True
)
@hosts(StationA_H, StationB_H)
def touch_xauthority():
run("touch $HOME/.Xauthority")
@hosts(StationA_H, StationB_H)
def deploy():
execute(apt_stations)
execute(setup_dns_masq)
execute(setup_google_chrome)
execute(deploy_specific)
execute(touch_xauthority)
execute(disable_lightdm)
execute(StationA)
execute(StationB)
execute(Beefy)
execute(ca)
execute(ssl)
execute(install_vnc)
execute(install_vnc_xstartup)
execute(ensure_local_hosts)
execute(setup_vnc_service)
execute(pythonlibs)
execute(BeefyRehMimic)
execute(install_updatednsmasq_service)
| bsd-3-clause | -2,658,566,617,386,937,000 | 29.820051 | 131 | 0.63967 | false | 2.970515 | true | false | false |
iceflow/aws-demo | s3/s3-bucket-copy/existing-bucket-copy/solutions/2-different-account-same-region/check_sqs_list.py | 1 | 1564 | #!/usr/bin/python
# -*- coding: utf8 -*-
from pprint import pprint
import sys,os
import random
import json
import gzip
import random
import boto3
s3 = boto3.resource('s3')
client = boto3.client('sqs')
QUEUE_ENDPOINT='https://eu-west-1.queue.amazonaws.com/888250974927/s3-copy-list'
DST_BUCKET='ireland-leo-test'
def check_queue_status(qurl):
#print('check_queue_status(%s)'%(qurl))
#return {'number':0}
response = client.get_queue_attributes(
QueueUrl=qurl,
AttributeNames=[
'All'
]
)
#pprint(response)
#{u'Attributes': {'ApproximateNumberOfMessages': '1',
message_number=0
if 'Attributes' in response:
if 'ApproximateNumberOfMessages' in response['Attributes'] and 'ApproximateNumberOfMessagesNotVisible' in response['Attributes']:
message_number=int(response['Attributes']['ApproximateNumberOfMessages'])
not_visiable_message_number=int(response['Attributes']['ApproximateNumberOfMessagesNotVisible'])
if message_number>0 or not_visiable_message_number>0:
#print('%04d/%04d : %s'%(message_number, not_visiable_message_number, qurl))
pass
return {'number':message_number}
if __name__ == '__main__':
qurl_endpoint=sys.argv[1]
q_number=int(sys.argv[2])
total_number=0
for pos in xrange(q_number):
response = check_queue_status('{0}-{1}'.format(qurl_endpoint, pos+1))
total_number+=response['number']
print total_number*100
sys.exit(0)
| gpl-3.0 | 2,473,053,559,471,726,000 | 25.066667 | 137 | 0.641944 | false | 3.491071 | false | false | false |
deculler/DataScienceTableDemos | CalGrads/timetable.py | 1 | 7684 | from datascience import Table
import numpy as np
from scipy.interpolate import UnivariateSpline
class TimeTable(Table):
"""Table with a designated column as a sequence of times in the first column."""
def __init__(self, *args, time_column = 'Year'):
Table.__init__(self, *args)
self.time_column = time_column
def clone_bare(self):
return TimeTable(time_column = self.time_column)
def clone_time(self):
return self.clone_bare().with_column(self.time_column, self[self.time_column])
@classmethod
def from_table(cls, tbl, time_col):
ttbl = cls(time_column = time_col)
for label in tbl.labels:
ttbl[label] = tbl[label]
return ttbl
def __get_attr__(self, name):
def wrapper(*args, **kwargs):
# Wrap superclass method to coerce result back to TimeTable
tbl = self.name(*args, **kwargs)
if isinstance(tbl, Table) and self.time_column in tbl.labels:
return TimeTable.from_table(tbl, self.time_column)
else:
return tbl
print("Get TimeTable Attr", name)
if hasattr(Table, name):
return wrapper
else:
raise AttributeError
@classmethod
def by_time(cls, tbl, time_col, category_col, collect_col, collect=sum):
"""Construct a time table by aggregating rows of each category by year."""
tbl_by_year = tbl.select([category_col, time_col, collect_col]).pivot(category_col, time_col,
collect_col, collect=collect)
return cls(tbl_by_year.labels, time_column=time_col).append(tbl_by_year)
@property
def categories(self):
return [label for label in self.labels if label != self.time_column]
# TimeTable methods utilizing time_column
def order_cols(self):
"""Create a TimeTable with categories ordered by the values in last row."""
def col_key(label):
return self.row(self.num_rows-1)[self.labels.index(label)]
order = sorted(self.categories, key=col_key, reverse=True)
tbl = self.copy()
for label in order:
tbl.move_to_end(label)
return tbl
def oplot(self, **kwargs):
return self.order_cols().plot(self.time_column, **kwargs)
def top(self, n):
"""Create a new TimeTable containing the n largest columns."""
ttbl = self.order_cols()
return ttbl.select(ttbl.labels[0:n+1])
def after(self, timeval):
return self.where(self[self.time_column] >= timeval)
def sum_rows(self):
"""Sum the rows in a TimeTable besides the time column."""
tbl = self.drop(self.time_column)
return [sum(row) for row in tbl.rows]
def apply_cols(self, fun):
"""Apply a function to the non-time columns of TimeTable."""
return Table().with_columns([(lbl, fun(self[lbl])) for lbl in self.categories])
def apply_all(self, fun):
ttbl = TimeTable(time_column = self.time_column)
for lbl in self.labels:
if lbl == self.time_column:
ttbl[lbl] = self[self.time_column]
else:
ttbl[lbl] = self.apply(fun, lbl)
return ttbl
def ratio(self, tbl_denom):
"""Create ratio of a TimeTable to a matching one."""
rtbl = TimeTable(time_column = self.time_column).with_column(self.time_column, self[self.time_column])
for label in self.categories:
rtbl[label] = self[label] / tbl_denom[label]
return rtbl
def normalize(self, col_label):
"""Normalize each column of a timetable by a particular one"""
rtbl = TimeTable(time_column = self.time_column).with_column(self.time_column, self[self.time_column])
for label in self.categories:
rtbl[label] = self[label] / self[col_label]
return rtbl
def delta(self):
"""Construct a TimeTableable of successive differences down each non-time column."""
delta_tbl = self.clone_bare()
delta_tbl[self.time_column] = self[self.time_column][1:]
for col in self.categories:
delta_tbl[col] = self[col][1:] - self[col][:-1]
return delta_tbl
def fill(self, interval=1):
times = [t for t in np.arange(self[self.time_column][0], self[self.time_column][-1] + interval, interval)]
ftbl = TimeTable(time_column = self.time_column).with_column(self.time_column, times)
for col in self.categories:
spl = UnivariateSpline(self[self.time_column], self[col])
ftbl[col] = spl(times)
return ftbl
def interp(self, interval=1):
times = [t for t in np.arange(self[self.time_column][0], self[self.time_column][-1] + interval, interval)]
ftbl = TimeTable(time_column = self.time_column).with_column(self.time_column, times)
for col in self.categories:
ftbl[col] = np.interp(times, self[self.time_column], self[col])
return ftbl
def rel_delta(self):
"""Construct a TimeTableable of successive differences down each non-time column."""
delta_tbl = self.clone_bare()
delta_tbl[self.time_column] = self[self.time_column][1:]
time_delta = self[self.time_column][1:] - self[self.time_column][:-1]
for col in self.categories:
delta_tbl[col] = (1+(self[col][1:] - self[col][:-1])/self[col][:-1])/time_delta
return delta_tbl
def norm_by_row(self, base_row=0):
"""Normalize columns of a TimeTable by a row"""
normed_tbl = self.clone_time()
for label in self.categories:
normed_tbl[label] = self[label]/self[label][base_row]
return normed_tbl
def norm_by_time(self, time):
return self.norm_by_row(np.where(self[self.time_column] == time)[0][0])
def sum_cols(self):
"""Sum the columns of TimeTable."""
csum = 0
for c in self.categories:
csum += self[c]
return csum
def fraction_cols(self):
"""Convert each column to a fraction by row."""
total = self.sum_cols()
ftbl = self.clone_time()
for lbl in self.categories:
ftbl[lbl] = self[lbl]/total
return ftbl
def forecast_table(self, past, ahead, inc=1):
"""Project a TimeTable forward. inc must match the interval"""
last_time = self[self.time_column][-1]
past_times = self[self.time_column][-past-1:-1]
fore_time = np.arange(last_time + inc, last_time + inc + ahead, inc)
def project(lbl):
m, b = np.polyfit(past_times, self[lbl][-past-1:-1], 1)
return [m*time + b for time in fore_time]
xtbl = Table().with_columns([(self.time_column, fore_time)] + [(label, project(label)) for label in self.categories])
return self.copy().append(xtbl)
def extend_table(self, ahead, inc=1):
"""Project a TimeTable forward from last interval. inc must match the interval"""
last_time = self[self.time_column][-1]
fore_time = np.arange(last_time + inc, last_time + inc + ahead, inc)
def project(lbl):
b = self[lbl][-1]
m = self[lbl][-1] - self[lbl][-2]
return [m*(time+1)*inc + b for time in range(ahead)]
xtbl = Table().with_columns([(self.time_column, fore_time)] + [(label, project(label)) for label in self.categories])
return self.copy().append(xtbl)
| bsd-2-clause | 1,163,413,192,845,707,000 | 40.989071 | 125 | 0.587585 | false | 3.735537 | false | false | false |
elliotthill/django-oscar | oscar/apps/order/migrations/0019_auto__chg_field_order_billing_address__chg_field_order_user__chg_field.py | 1 | 37418 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from oscar.core.compat import AUTH_USER_MODEL, AUTH_USER_MODEL_NAME
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Order.billing_address'
db.alter_column(u'order_order', 'billing_address_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.BillingAddress'], null=True, on_delete=models.SET_NULL))
# Changing field 'Order.user'
db.alter_column(u'order_order', 'user_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, on_delete=models.SET_NULL, to=orm[AUTH_USER_MODEL]))
# Changing field 'Order.shipping_address'
db.alter_column(u'order_order', 'shipping_address_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.ShippingAddress'], null=True, on_delete=models.SET_NULL))
def backwards(self, orm):
# Changing field 'Order.billing_address'
db.alter_column(u'order_order', 'billing_address_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.BillingAddress'], null=True))
# Changing field 'Order.user'
db.alter_column(u'order_order', 'user_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm[AUTH_USER_MODEL]))
# Changing field 'Order.shipping_address'
db.alter_column(u'order_order', 'shipping_address_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.ShippingAddress'], null=True))
models = {
u'address.country': {
'Meta': {'ordering': "('-display_order', 'name')", 'object_name': 'Country'},
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'is_shipping_country': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'iso_3166_1_a2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'iso_3166_1_a3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'db_index': 'True'}),
'iso_3166_1_numeric': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
AUTH_USER_MODEL: {
'Meta': {'object_name': AUTH_USER_MODEL_NAME},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'catalogue.attributeentity': {
'Meta': {'object_name': 'AttributeEntity'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': u"orm['catalogue.AttributeEntityType']"})
},
u'catalogue.attributeentitytype': {
'Meta': {'object_name': 'AttributeEntityType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'})
},
u'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': u"orm['catalogue.AttributeOptionGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
u'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
u'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.ProductAttribute']", 'through': u"orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Category']", 'through': u"orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': u"orm['catalogue.Product']"}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'products'", 'null': 'True', 'to': u"orm['catalogue.ProductClass']"}),
'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'rating': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Product']", 'symmetrical': 'False', 'through': u"orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': u"orm['catalogue.Product']"}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
u'catalogue.productattribute': {
'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': u"orm['catalogue.ProductClass']"}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
},
u'catalogue.productattributevalue': {
'Meta': {'object_name': 'ProductAttributeValue'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.ProductAttribute']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': u"orm['catalogue.Product']"}),
'value_boolean': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}),
'value_file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'catalogue.productcategory': {
'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Category']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Product']"})
},
u'catalogue.productclass': {
'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'requires_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'track_stock': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'catalogue.productrecommendation': {
'Meta': {'object_name': 'ProductRecommendation'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': u"orm['catalogue.Product']"}),
'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Product']"})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'customer.communicationeventtype': {
'Meta': {'object_name': 'CommunicationEventType'},
'category': ('django.db.models.fields.CharField', [], {'default': "u'Order related'", 'max_length': '255'}),
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'email_body_html_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email_body_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email_subject_template': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'sms_template': ('django.db.models.fields.CharField', [], {'max_length': '170', 'null': 'True', 'blank': 'True'})
},
u'order.billingaddress': {
'Meta': {'object_name': 'BillingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'postcode': ('oscar.models.fields.UppercaseCharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
u'order.communicationevent': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'CommunicationEvent'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['customer.CommunicationEventType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'communication_events'", 'to': u"orm['order.Order']"})
},
u'order.line': {
'Meta': {'object_name': 'Line'},
'est_dispatch_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_price_before_discounts_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_before_discounts_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lines'", 'to': u"orm['order.Order']"}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order_lines'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['partner.Partner']"}),
'partner_line_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'partner_line_reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'partner_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'partner_sku': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalogue.Product']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'stockrecord': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['partner.StockRecord']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'unit_cost_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_retail_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'})
},
u'order.lineattribute': {
'Meta': {'object_name': 'LineAttribute'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attributes'", 'to': u"orm['order.Line']"}),
'option': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_attributes'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['catalogue.Option']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'order.lineprice': {
'Meta': {'ordering': "('id',)", 'object_name': 'LinePrice'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'prices'", 'to': u"orm['order.Line']"}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_prices'", 'to': u"orm['order.Order']"}),
'price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'})
},
u'order.order': {
'Meta': {'ordering': "['-date_placed']", 'object_name': 'Order'},
'basket_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'billing_address': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['order.BillingAddress']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'GBP'", 'max_length': '12'}),
'date_placed': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'guest_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'shipping_address': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['order.ShippingAddress']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'shipping_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128', 'blank': 'True'}),
'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_method': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'total_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'total_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'orders'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['{}']".format(AUTH_USER_MODEL)})
},
u'order.orderdiscount': {
'Meta': {'object_name': 'OrderDiscount'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'category': ('django.db.models.fields.CharField', [], {'default': "'Basket'", 'max_length': '64'}),
'frequency': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'offer_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'offer_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'discounts'", 'to': u"orm['order.Order']"}),
'voucher_code': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'voucher_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'order.ordernote': {
'Meta': {'object_name': 'OrderNote'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'note_type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notes'", 'to': u"orm['order.Order']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['{}']".format(AUTH_USER_MODEL), 'null': 'True'})
},
u'order.paymentevent': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'PaymentEvent'},
'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['order.PaymentEventType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['order.Line']", 'through': u"orm['order.PaymentEventQuantity']", 'symmetrical': 'False'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payment_events'", 'to': u"orm['order.Order']"}),
'reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'shipping_event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payment_events'", 'null': 'True', 'to': u"orm['order.ShippingEvent']"})
},
u'order.paymenteventquantity': {
'Meta': {'object_name': 'PaymentEventQuantity'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': u"orm['order.PaymentEvent']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payment_event_quantities'", 'to': u"orm['order.Line']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
},
u'order.paymenteventtype': {
'Meta': {'ordering': "('name',)", 'object_name': 'PaymentEventType'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
u'order.shippingaddress': {
'Meta': {'object_name': 'ShippingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'phone_number': ('oscar.models.fields.PhoneNumberField', [], {'max_length': '128', 'blank': 'True'}),
'postcode': ('oscar.models.fields.UppercaseCharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
u'order.shippingevent': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'ShippingEvent'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['order.ShippingEventType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'shipping_events'", 'symmetrical': 'False', 'through': u"orm['order.ShippingEventQuantity']", 'to': u"orm['order.Line']"}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_events'", 'to': u"orm['order.Order']"})
},
u'order.shippingeventquantity': {
'Meta': {'object_name': 'ShippingEventQuantity'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': u"orm['order.ShippingEvent']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_event_quantities'", 'to': u"orm['order.Line']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
},
u'order.shippingeventtype': {
'Meta': {'ordering': "('name',)", 'object_name': 'ShippingEventType'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
u'partner.partner': {
'Meta': {'object_name': 'Partner'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'partners'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['{}']".format(AUTH_USER_MODEL)})
},
u'partner.stockrecord': {
'Meta': {'unique_together': "(('partner', 'partner_sku'),)", 'object_name': 'StockRecord'},
'cost_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'low_stock_threshold': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'num_allocated': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'num_in_stock': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stockrecords'", 'to': u"orm['partner.Partner']"}),
'partner_sku': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'price_currency': ('django.db.models.fields.CharField', [], {'default': "'GBP'", 'max_length': '12'}),
'price_excl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'price_retail': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stockrecords'", 'to': u"orm['catalogue.Product']"})
},
u'sites.site': {
'Meta': {'ordering': "(u'domain',)", 'object_name': 'Site', 'db_table': "u'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['order'] | bsd-3-clause | -2,684,955,528,462,074,000 | 90.043796 | 224 | 0.562163 | false | 3.565657 | false | false | false |
danger89/stanford-parser-clientserver | stanford_server.py | 1 | 3076 | #!/usr/bin/env jython
# -*- coding: utf-8 -*-
# Copyright 2014 by Melroy van den Berg
"""
Stanford Parser Server running on localhost
Requirements
------------
- Jython >= 2.7 (http://www.jython.org/downloads.html)
- Pyro4 (https://github.com/irmen/Pyro4)
"""
__author__ = "Melroy van den Berg <[email protected]>"
__version__ = "0.1"
import socket
from select import cpython_compatible_select as select
import sys
import Pyro4.core
import Pyro4.naming
from stanford_interface import StanfordParser
PYRO_NAME = 'stanford.server'
Pyro4.config.SERVERTYPE="thread" # Thread pool based
#Pyro4.config.SERVERTYPE="multiplex" # Select/poll based
hostname="localhost" #socket.gethostname()
class StanfordHelpParser(object):
"""
Helper class around the StanfordParser class
"""
def __init__(self):
"""
Setup the Stanford Parser
"""
# Jar file should be set inside the stanford_lib.py or add it manually to the class path
self.parser = StanfordParser(parser_file='./englishPCFG.ser.gz')
def parse(self, wordList):
"""
Parse the word list
"""
sentenceObject = self.parser.parse_wordlist(wordList)
return str(sentenceObject.get_parse())
print("initializing services... servertype=%s" % Pyro4.config.SERVERTYPE)
# start a name server (only with a broadcast server when NOT running on localhost)
nameserverUri, nameserverDaemon, broadcastServer = Pyro4.naming.startNS(host=hostname)
print("got a Nameserver, uri=%s" % nameserverUri)
print("ns daemon location string=%s" % nameserverDaemon.locationStr)
print("ns daemon sockets=%s" % nameserverDaemon.sockets)
if broadcastServer:
print("bc server socket=%s (fileno %d)" % (broadcastServer.sock, broadcastServer.fileno()))
# create a Pyro daemon
pyrodaemon=Pyro4.core.Daemon(host=hostname)
print("daemon location string=%s" % pyrodaemon.locationStr)
print("daemon sockets=%s" % pyrodaemon.sockets)
# register a server object with the daemon
serveruri=pyrodaemon.register(StanfordHelpParser())
print("server uri=%s" % serveruri)
# register it with the embedded nameserver directly
nameserverDaemon.nameserver.register(PYRO_NAME,serveruri)
print("Stanford Server is running...")
# below is our custom event loop.
while True:
# create sets of the socket objects we will be waiting on
# (a set provides fast lookup compared to a list)
nameserverSockets = set(nameserverDaemon.sockets)
pyroSockets = set(pyrodaemon.sockets)
rs=[]
if broadcastServer:
rs=[broadcastServer] # only the broadcast server is directly usable as a select() object
rs.extend(nameserverSockets)
rs.extend(pyroSockets)
rs,_,_ = select(rs,[],[],2)
eventsForNameserver=[]
eventsForDaemon=[]
for s in rs:
if s is broadcastServer:
broadcastServer.processRequest()
elif s in nameserverSockets:
eventsForNameserver.append(s)
elif s in pyroSockets:
eventsForDaemon.append(s)
if eventsForNameserver:
nameserverDaemon.events(eventsForNameserver)
if eventsForDaemon:
pyrodaemon.events(eventsForDaemon)
nameserverDaemon.close()
if broadcastServer:
broadcastServer.close()
pyrodaemon.close()
| apache-2.0 | 5,812,613,648,402,874,000 | 29.156863 | 92 | 0.75065 | false | 3.248152 | false | false | false |
qPCR4vir/orange3 | Orange/tests/test_ada_boost.py | 1 | 3357 | # Test methods with long descriptive names can omit docstrings
# pylint: disable=missing-docstring
import unittest
import numpy as np
from Orange.data import Table
from Orange.classification import TreeLearner
from Orange.regression import TreeRegressionLearner
from Orange.ensembles import SklAdaBoostLearner, SklAdaBoostRegressionLearner
from Orange.evaluation import CrossValidation, CA, RMSE
class TestSklAdaBoostLearner(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.iris = Table("iris")
cls.housing = Table("housing")
def test_adaboost(self):
learn = SklAdaBoostLearner()
results = CrossValidation(self.iris, [learn], k=3)
ca = CA(results)
self.assertGreater(ca, 0.9)
self.assertLess(ca, 0.99)
def test_adaboost_base_estimator(self):
np.random.seed(0)
stump_estimator = TreeLearner(max_depth=1)
tree_estimator = TreeLearner()
stump = SklAdaBoostLearner(base_estimator=stump_estimator)
tree = SklAdaBoostLearner(base_estimator=tree_estimator)
results = CrossValidation(self.iris, [stump, tree], k=3)
ca = CA(results)
self.assertLess(ca[0], ca[1])
def test_predict_single_instance(self):
learn = SklAdaBoostLearner()
m = learn(self.iris)
ins = self.iris[0]
m(ins)
_, _ = m(ins, m.ValueProbs)
def test_predict_table(self):
learn = SklAdaBoostLearner()
m = learn(self.iris)
m(self.iris)
_, _ = m(self.iris, m.ValueProbs)
def test_predict_numpy(self):
learn = SklAdaBoostLearner()
m = learn(self.iris)
_, _ = m(self.iris.X, m.ValueProbs)
def test_adaboost_adequacy(self):
learner = SklAdaBoostLearner()
self.assertRaises(ValueError, learner, self.housing)
def test_adaboost_reg(self):
learn = SklAdaBoostRegressionLearner()
results = CrossValidation(self.housing, [learn], k=3)
_ = RMSE(results)
def test_adaboost_reg_base_estimator(self):
np.random.seed(0)
stump_estimator = TreeRegressionLearner(max_depth=1)
tree_estimator = TreeRegressionLearner()
stump = SklAdaBoostRegressionLearner(base_estimator=stump_estimator)
tree = SklAdaBoostRegressionLearner(base_estimator=tree_estimator)
results = CrossValidation(self.housing, [stump, tree], k=3)
rmse = RMSE(results)
self.assertGreaterEqual(rmse[0], rmse[1])
def test_predict_single_instance_reg(self):
learn = SklAdaBoostRegressionLearner()
m = learn(self.housing)
ins = self.housing[0]
pred = m(ins)
self.assertGreaterEqual(pred, 0)
def test_predict_table_reg(self):
learn = SklAdaBoostRegressionLearner()
m = learn(self.housing)
pred = m(self.housing)
self.assertEqual(len(self.housing), len(pred))
self.assertGreater(all(pred), 0)
def test_predict_numpy_reg(self):
learn = SklAdaBoostRegressionLearner()
m = learn(self.housing)
pred = m(self.housing.X)
self.assertEqual(len(self.housing), len(pred))
self.assertGreater(all(pred), 0)
def test_adaboost_adequacy_reg(self):
learner = SklAdaBoostRegressionLearner()
self.assertRaises(ValueError, learner, self.iris)
| bsd-2-clause | 61,752,926,166,089,920 | 33.96875 | 77 | 0.657432 | false | 3.443077 | true | false | false |
kernsuite-debian/obit | python/OTWindow.py | 1 | 7357 | """ OTWindow allows running wxPython widgets
Widgets using wxPython must all be created and run in the same thread.
This class creates a wxPython App in a separate thread and allows starting
new widgets in this same thread.
New widgets can be created using the functions
newMsgWin(tw) create message windiow and execute TaskWindow tw
"""
# $Id: OTWindow.py 2 2008-06-10 15:32:27Z bill.cotton $
#-----------------------------------------------------------------------
# Copyright (C) 2006
# Associated Universities, Inc. Washington DC, USA.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this program; if not, write to the Free
# Software Foundation, Inc., 675 Massachusetts Ave, Cambridge,
# MA 02139, USA.
#
# Correspondence concerning this software should be addressed as follows:
# Internet email: [email protected].
# Postal address: William Cotton
# National Radio Astronomy Observatory
# 520 Edgemont Road
# Charlottesville, VA 22903-2475 USA
#-----------------------------------------------------------------------
class OTWindow:
def start(self):
""" start the GUI thread
"""
import thread
thread.start_new_thread(self.run, ())
def run(self):
"""
Note that OTWindow2 is first imported ***here***.
This is the second thread.
OTWindow2 imports wxPython, if we imported it at
the module level instead of in this function,
the import would occur in the main thread and
wxPython would not run correctly in the second thread.
The wxPython GUI MainLoop is run here (i.e. no return)
"""
################################################################
try:
import OTWindow2
self.app = OTWindow2.OTGUIApp()
self.started = True
self.app.MainLoop()
except TypeError:
self.app = None
except Exception, e:
self.app = None
#print "DEBUG: oh bugger untrapped exception in OTWindow.run"
#print e
def add_MsgWin(self, tw):
"""
New Task message widget
Send an event to the catcher window in the
other thread and tell it to create a MsgWin window.
tw = TaskWindow of task to be run
"""
################################################################
import OTWindow2, MsgWin
if self.app:
evt = OTWindow2.MsgWinEvt()
evt.evt_type = OTWindow2.EVT_NEW # Set event type
evt.evt_tw = tw # add task window
self.app.catcher.AddPendingEvent(evt);
else:
OTWindow2.add_MsgWin(tw)
# end add_MsgWin
def SetLabel(self, Id, label):
"""
Set Widget label
Send an event to the catcher window in the other thread
and tell it to Setlabel on window Id to label
Id = widget Id
label = New text to label
"""
################################################################
import OTWindow2
evt = OTWindow2.MsgWinEvt()
evt.evt_type = OTWindow2.EVT_LABEL # Set event type
evt.evt_Id = Id # Set widget Id
evt.evt_label = label # add new label text
self.app.catcher.AddPendingEvent(evt);
# end SetLabel
def Bind(self, Id, handler):
"""
Set Button event handler
Send an event to the catcher window in the other thread
and tell it to rebind the event handler on button Id
Id = widget Id
handler = new event handler
"""
################################################################
import OTWindow2
evt = OTWindow2.MsgWinEvt()
evt.evt_type = OTWindow2.EVT_BIND # Set event type
evt.evt_Id = Id # Set widget Id
evt.evt_handler = handler # add task window
self.app.catcher.AddPendingEvent(evt);
# end Bind
def Update(self, Id):
"""
Update Widget Id
Send an event to the catcher window in the other thread
and tell it to refresh the display of widget Id
Id = widget Id
"""
################################################################
import OTWindow2
evt = OTWindow2.MsgWinEvt()
evt.evt_type = OTWindow2.EVT_UPDATE # Set event type
evt.evt_Id = Id # Set widget Id
self.app.catcher.AddPendingEvent(evt);
# end Update
def Message(self, Id, message):
"""
Write messages in TextCtrl
Send an event to the catcher window in the other thread
and tell it to append message(s) in widget Id
Id = widget Id (a TextCtrl)
message = either a single string or an array of strings
"""
################################################################
import OTWindow2
evt = OTWindow2.MsgWinEvt()
evt.evt_type = OTWindow2.EVT_MESS # Set event type
evt.evt_Id = Id # Set widget Id
evt.evt_mess = message # add task message(s)
self.app.catcher.AddPendingEvent(evt);
# end Message
# end class OTWindow
# Startup wxPython windowing
gui = OTWindow()
gui.started = False
gui.start()
# Externally callable routine to create a MsgWin (task message window)
def newMsgWin(tw):
"""
New task message window
Create a new task message window, run the task displaying messages
and handling communications
tw = TaskWindow for task to be executed.
"""
################################################################
# Be sure gui thread started
import time
while (not gui.started):
time.sleep(0.2)
gui.add_MsgWin(tw)
# end newMsgWin
def CallSetLabel (Id, label):
"""
Set label on widget Id
Id = widget Id
label = New text to label
"""
gui.SetLabel (Id, label)
# end CallSetLabel
def CallBind (Id, handler):
"""
Set Button event handler
Id = widget Id
handler = new event handler
"""
gui.Bind (Id, handler)
# end CallBind
def CallUpdate(Id):
"""
Update Widget Id
Id = widget Id
"""
gui.Update (Id)
# end CallUpdate
def CallMessage (Id, message):
"""
Set label on widget Id
Id = widget Id
message = either a single string or an array of strings
"""
gui.Message (Id, message)
# end CallMessage
| gpl-2.0 | 7,236,081,742,020,023,000 | 31.991031 | 74 | 0.537447 | false | 4.34554 | false | false | false |
guh/guh-cli | nymea/logs.py | 1 | 20827 | # -*- coding: UTF-8 -*-
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# #
# Copyright (C) 2015 - 2018 Simon Stuerz <[email protected]> #
# #
# This file is part of nymea-cli. #
# #
# nymea-cli is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, version 2 of the License. #
# #
# nymea-cli is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with nymea-cli. If not, see <http://www.gnu.org/licenses/>. #
# #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
import datetime
import curses
import sys
import socket
import json
import select
import telnetlib
import string
import time
import nymea
import states
import devices
import actions
import events
import rules
global stateTypeIdCache
global actionTypeIdCache
global eventTypeIdCache
global deviceIdCache
global ruleIdCache
global logFilter
def log_window(nymeaHost, nymeaPort, params = None):
global screen
global screenHeight
global allLines
global topLineNum
global highlightLineNum
global up
global down
global commandId
global stateTypeIdCache
global actionTypeIdCache
global eventTypeIdCache
global deviceIdCache
global ruleIdCache
global logFilter
stateTypeIdCache = {}
actionTypeIdCache = {}
eventTypeIdCache = {}
deviceIdCache = {}
ruleIdCache = {}
logFilter = params
commandId = 0
# Create notification handler
print "Connecting notification handler..."
try:
tn = telnetlib.Telnet(nymeaHost, nymeaPort)
except :
print "ERROR: notification socket could not connect the to nymea-server. \n"
return None
print "...OK \n"
#enable_notification(notificationSocket)
enable_notification(tn.get_socket())
create_log_window()
try:
x = None
while (x !=ord('\n') and x != 27):
socket_list = [sys.stdin, tn.get_socket()]
read_sockets, write_sockets, error_sockets = select.select(socket_list , [], [])
for sock in read_sockets:
# notification messages:
if sock == tn.get_socket():
packet = tn.read_until("}\n")
packet = json.loads(packet)
if 'notification' in packet:
if packet['notification'] == "Logging.LogEntryAdded":
entry = packet['params']['logEntry']
line = get_log_entry_line(entry, True)
# scroll to bottom if curser was at the bottom
if topLineNum + highlightLineNum == len(allLines) - 1:
if line != None:
allLines.append(line)
scroll_to_bottom()
else:
if line != None:
allLines.append(line)
# flash to tell that there is a new entry
curses.flash()
draw_screen()
else:
x = screen.getch() # timeout of 50 ms (screen.timout(50))
if x == curses.KEY_UP:
moveUpDown(up)
draw_screen()
elif x == curses.KEY_DOWN:
moveUpDown(down)
draw_screen()
elif x == ord(' '):
scroll_to_bottom()
draw_screen()
finally:
curses.endwin()
print "Log window closed."
tn.close()
print "Notification socket closed."
def create_log_window():
global screen
global screenHeight
global allLines
global topLineNum
global highlightLineNum
global up
global down
# init
up = -1
down = 1
screen = curses.initscr()
curses.start_color()
curses.init_pair(1,curses.COLOR_BLACK, curses.COLOR_GREEN)
curses.noecho()
curses.cbreak()
screen.keypad(1)
screen.timeout(50)
screen.clear()
screenHeight = curses.LINES - 2
#screen.addstr(1, 2, "Loading...", curses.COLOR_GREEN)
#draw_screen()
allLines = get_log_entry_lines()
scroll_to_bottom()
def scroll_to_bottom():
global screenHeight
global allLines
global topLineNum
global highlightLineNum
# scroll to bottom
if len(allLines) <= screenHeight:
topLineNum = 0
highlightLineNum = len(allLines) - 1
else:
topLineNum = len(allLines) - screenHeight
highlightLineNum = screenHeight - 1
def enable_notification(notifySocket):
global commandId
params = {}
commandObj = {}
commandObj['id'] = commandId
commandObj['method'] = "JSONRPC.SetNotificationStatus"
params['enabled'] = "true"
commandObj['params'] = params
command = json.dumps(commandObj) + '\n'
commandId = commandId + 1
notifySocket.send(command)
def draw_screen():
global screen
global topLineNum
global screenHeight
global allLines
global highlightLineNum
hilightColors = curses.color_pair(1)
normalColors = curses.A_NORMAL
screen.erase()
screen.border(0)
curses.curs_set(1)
curses.curs_set(0)
top = topLineNum
bottom = topLineNum + screenHeight
for (index,line,) in enumerate(allLines[top:bottom]):
linenum = topLineNum + index
# highlight current line
if index != highlightLineNum:
screen.addstr(index + 1, 2, line, normalColors)
else:
screen.addstr(index + 1, 2, line, hilightColors)
screen.refresh()
def moveUpDown(direction):
global screenHeight
global allLines
global topLineNum
global highlightLineNum
global up
global down
nextLineNum = highlightLineNum + direction
# paging
if direction == up and highlightLineNum == 0 and topLineNum != 0:
topLineNum += up
return
elif direction == down and nextLineNum == screenHeight and (topLineNum + screenHeight) != len(allLines):
topLineNum += down
return
# scroll highlight line
if direction == up and (topLineNum != 0 or highlightLineNum != 0):
highlightLineNum = nextLineNum
elif direction == down and (topLineNum + highlightLineNum + 1) != len(allLines) and highlightLineNum != screenHeight:
highlightLineNum = nextLineNum
def list_logEntries():
params = {}
lines = []
response = nymea.send_command("Logging.GetLogEntries", params)
for i in range(len(response['params']['logEntries'])):
line = get_log_entry_line(response['params']['logEntries'][i])
print line
def get_log_entry_lines():
global logFilter
lines = []
response = nymea.send_command("Logging.GetLogEntries", logFilter)
for i in range(len(response['params']['logEntries'])):
line = get_log_entry_line(response['params']['logEntries'][i])
lines.append(line)
return lines
def get_log_entry_line(entry, checkFilter = False):
global stateTypeIdCache
global actionTypeIdCache
global eventTypeIdCache
global deviceIdCache
global ruleIdCache
global logFilter
if checkFilter:
if not verify_filter(entry):
return None
if entry['loggingLevel'] == "LoggingLevelInfo":
levelString = "(I)"
error = "-"
else:
levelString = "(A)"
error = entry['errorCode']
if entry['source'] == "LoggingSourceSystem":
deviceName = "nymea server"
sourceType = "System"
symbolString = "->"
sourceName = "Active changed"
if entry['active'] == True:
value = "active"
else:
value = "inactive"
if entry['source'] == "LoggingSourceStates":
typeId = entry['typeId']
sourceType = "State Changed"
symbolString = "->"
if typeId in stateTypeIdCache:
sourceName = stateTypeIdCache[typeId]
else:
stateType = states.get_stateType(typeId)
if stateType is not None:
sourceName = stateType["displayName"]
stateTypeIdCache[typeId] = sourceName
else:
sourceName = typeId
value = entry['value']
deviceName = get_device_name(entry)
if entry['source'] == "LoggingSourceActions":
typeId = entry['typeId']
sourceType = "Action executed"
symbolString = "()"
if typeId in actionTypeIdCache:
sourceName = actionTypeIdCache[typeId]
else:
actionType = actions.get_actionType(typeId)
if actionType is not None:
sourceName = actionType['displayName']
else:
sourceName = typeId
actionTypeIdCache[typeId] = sourceName
value = entry['value']
deviceName = get_device_name(entry)
if entry['source'] == "LoggingSourceEvents":
typeId = entry['typeId']
sourceType = "Event triggered"
symbolString = "()"
if typeId in eventTypeIdCache:
sourceName = eventTypeIdCache[typeId]
else:
eventType = events.get_eventType(typeId)
sourceName = eventType['displayName']
eventTypeIdCache[typeId] = sourceName
value = entry['value']
deviceName = get_device_name(entry)
if entry['source'] == "LoggingSourceRules":
typeId = entry['typeId']
if entry['eventType'] == "LoggingEventTypeTrigger":
sourceType = "Rule triggered"
sourceName = "triggered"
symbolString = "()"
value = ""
elif entry['eventType'] == "LoggingEventTypeActionsExecuted":
sourceType = "Rule executed"
sourceName = "actions"
symbolString = "()"
value = ""
elif entry['eventType'] == "LoggingEventTypeExitActionsExecuted":
sourceType = "Rule executed"
sourceName = "exit actions"
symbolString = "()"
value = ""
elif entry['eventType'] == "LoggingEventTypeEnabledChange":
sourceType = "Rule changed"
sourceName = "enabled"
symbolString = "->"
if entry['active']:
value = "true"
else:
value = "false"
else:
sourceType = "Rule changed"
symbolString = "()"
sourceName = "active"
if entry['active']:
value = "active"
else:
value = "inactive"
if typeId in ruleIdCache:
deviceName = ruleIdCache[typeId]
else:
rule = rules.get_rule_description(typeId)
if rule is not None and 'name' in rule:
deviceName = rule['name']
else:
deviceName = typeId
ruleIdCache[typeId] = deviceName
timestamp = datetime.datetime.fromtimestamp(entry['timestamp']/1000)
line = "%s %s | %19s | %38s | %20s %3s %20s | %10s" %(levelString.encode('utf-8'), timestamp, sourceType.encode('utf-8'), deviceName.encode('utf-8'), sourceName.encode('utf-8'), symbolString.encode('utf-8'), value.encode('utf-8'), error.encode('utf-8'))
return line
def create_device_logfilter():
params = {}
deviceIds = []
deviceId = devices.select_configured_device()
if not deviceId:
return None
deviceIds.append(deviceId)
params['deviceIds'] = deviceIds
return params
def create_device_state_logfilter():
params = {}
deviceIds = []
typeIds = []
loggingSources = []
loggingSources.append("LoggingSourceStates")
params['loggingSources'] = loggingSources
deviceId = devices.select_configured_device()
if not deviceId:
return None
deviceIds.append(deviceId)
params['deviceIds'] = deviceIds
device = devices.get_device(deviceId)
stateType = states.select_stateType(device['deviceClassId'])
if not stateType:
return None
typeIds.append(stateType['id'])
params['typeIds'] = typeIds
return params
def create_rule_logfilter():
params = {}
sources = []
ruleIds = []
rule = rules.select_rule()
if not rule:
return None
ruleIds.append(rule['id'])
sources.append("LoggingSourceRules")
params['loggingSources'] = sources
params['typeIds'] = ruleIds
return params
def create_last_time_logfilter(minutes):
offsetSeconds = 60 * minutes;
params = {}
timeFilters = []
timeFilter = {}
timeFilter['startDate'] = int(time.time()) - offsetSeconds
timeFilters.append(timeFilter)
params['timeFilters'] = timeFilters
return params
def create_logfilter():
params = {}
boolTypes = ["yes","no"]
# Devices
selection = nymea.get_selection("Do you want to filter for \"Devices\"? ", boolTypes)
if boolTypes[selection] == "yes":
deviceIds = []
deviceId = devices.select_configured_device()
deviceIds.append(deviceId)
finished = False
while not finished:
selection = nymea.get_selection("Do you want to add an other \"Device\"? ", boolTypes)
if boolTypes[selection] == "no":
finished = True
break
deviceId = devices.select_configured_device()
if not deviceId:
params['deviceIds'] = deviceIds
return params
deviceIds.append(deviceId)
params['deviceIds'] = deviceIds
# LoggingSources
selection = nymea.get_selection("Do you want to filter for \"LoggingSource\"? ", boolTypes)
if boolTypes[selection] == "yes":
sources = []
finished = False
loggingSources = ["LoggingSourceSystem", "LoggingSourceEvents", "LoggingSourceActions", "LoggingSourceStates", "LoggingSourceRules"]
selection = nymea.get_selection("Please select a \"LoggingSource\": ", loggingSources)
if selection:
sources.append(loggingSources[selection])
else:
finished = True
while not finished:
selection = nymea.get_selection("Do you want to add an other \"LoggingSource\"? ", boolTypes)
if boolTypes[selection] == "no":
finished = True
break
selection = get_selection("Please select a \"LoggingSource\": ", loggingSources)
if selection:
sources.append(loggingSources[selection])
else:
finished = True
break
params['loggingSources'] = sources
# LoggingLevel
selection = nymea.get_selection("Do you want to filter for \"LoggingLevel\"? ", boolTypes)
if boolTypes[selection] == "yes":
levels = []
loggingLevels = ["LoggingLevelInfo", "LoggingLevelAlert"]
selection = nymea.get_selection("Please select a \"LoggingLevel\": ", loggingLevels)
if selection:
levels.append(loggingLevels[selection])
params['loggingLevels'] = levels
# LoggingEventType
selection = nymea.get_selection("Do you want to filter for \"LoggingEventType\"? ", boolTypes)
if boolTypes[selection] == "yes":
types = []
loggingEventTypes = ["LoggingEventTypeTrigger", "LoggingEventTypeActiveChange", "LoggingEventTypeEnabledChange", "LoggingEventTypeActionsExecuted", "LoggingEventTypeExitActionsExecuted"]
selection = nymea.get_selection("Please select a \"LoggingEventType\": ", loggingEventTypes)
if selection:
types.append(loggingEventTypes[selection])
params['eventTypes'] = types
# Value
selection = nymea.get_selection("Do you want to filter for certain log \"Values\"? ", boolTypes)
if boolTypes[selection] == "yes":
values = []
finished = False
value = raw_input("Please enter value which should be filtered out: ")
values.append(value)
while not finished:
selection = nymea.get_selection("Do you want to add an other \"Value\"? ", boolTypes)
if boolTypes[selection] == "no":
finished = True
break
value = raw_input("Please enter value which should be filtered out: ")
values.append(value)
params['values'] = values
# Times
selection = nymea.get_selection("Do you want to add a \"TimeFilter\"? ", boolTypes)
if boolTypes[selection] == "yes":
timeFilters = []
finished = False
timeFilters.append(create_time_filter())
while not finished:
selection = nymea.get_selection("Do you want to add an other \"TimeFilter\"? ", boolTypes)
if boolTypes[selection] == "no":
finished = True
break
timeFilters.append(create_time_filter())
params['timeFilters'] = timeFilters
nymea.print_json_format(params)
nymea.debug_stop()
return params
def create_time_filter():
timeFilter = {}
boolTypes = ["yes","no"]
selection = nymea.get_selection("Do you want to define a \"Start date\"?", boolTypes)
if boolTypes[selection] == "yes":
timeFilter['startDate'] = raw_input("Please enter the \"Start date\": ")
selection = nymea.get_selection("Do you want to define a \"End date\"?", boolTypes)
if boolTypes[selection] == "yes":
timeFilter['endDate'] = raw_input("Please enter the \"End date\": ")
return timeFilter
def get_device_name(entry):
global deviceIdCache
deviceName = None
name = None
if entry['deviceId'] in deviceIdCache:
deviceName = deviceIdCache[entry['deviceId']]
else:
device = devices.get_device(entry['deviceId'])
deviceName = device['name']
deviceIdCache[entry['deviceId']] = deviceName
return deviceName
def verify_filter(entry):
global logFilter
if not logFilter:
return True
# check if we should filter for deviceIds
if 'deviceIds' in logFilter:
found = False
for deviceId in logFilter['deviceIds']:
if deviceId == entry['deviceId']:
found = True
break
if not found:
return False
# check if we should filter for ruleId
if 'typeIds' in logFilter:
found = False
for ruleId in logFilter['typeIds']:
if ruleId == entry['typeId']:
found = True
break
if not found:
return False
# check if we should filter for loggingSource
if 'loggingSources' in logFilter:
found = False
for loggingSource in logFilter['loggingSources']:
if loggingSource == entry['source']:
found = True
break
if not found:
return False
# check if we should filter for values
if 'values' in logFilter:
found = False
for value in logFilter['values']:
if value == entry['value']:
found = True
break
if not found:
return False
# check if we should filter for loggingLevels
if 'loggingLevels' in logFilter:
found = False
for loggingLevel in logFilter['loggingLevels']:
if loggingLevel == entry['loggingLevel']:
found = True
break
if not found:
return False
return True
| gpl-2.0 | 4,584,651,468,606,077,000 | 31.798425 | 257 | 0.561867 | false | 4.444516 | false | false | false |
pgjones/nusoft | nusoft/credentials.py | 1 | 1609 | #!/usr/bin/env python
#
# Credentials
#
# Collates and stores in memory user credentials needed for downloads
#
# Author P G Jones - 2014-03-23 <[email protected]> : New file.
####################################################################################################
import getpass
import logging
logger = logging.getLogger(__name__)
class Credentials(object):
""" Receives and stores credentials.
:param _username: Download username
:param _password: Download password to go with a username
:param _token: Instead of username and password use token.
"""
def __init__(self, token=None):
""" Initialise the credentials.
:param token: token to use
"""
self._token = token
self._username = None
self._password = None
if token is not None:
logger.debug("Using a token")
def authenticate(self):
""" Returns either a token or the username and password.
:return: token or username password tuple.
"""
if self._token is not None:
return self._token
elif self._username is not None:
return (self._username, self._password)
else:
self._username = raw_input("Username:").replace('\n', '')
self._password = getpass.getpass("Password:").replace('\n', '')
return (self._username, self._password)
def reset(self):
""" Reset the known username and password."""
self._username = None
self._password = None
logger.warning("Username/password has been reset")
| mit | -1,743,196,074,267,851,000 | 33.234043 | 100 | 0.567433 | false | 4.623563 | false | false | false |
IuryAlves/code-challenge | app/load_data.py | 1 | 1329 | #!/usr/bin/env python
# coding: utf-8
from __future__ import (
print_function,
unicode_literals,
absolute_import
)
import argparse
import json
import os
def get_path():
return unicode(os.path.abspath('.'))
def parse_args():
_parser = argparse.ArgumentParser()
_parser.add_argument('--fixture', type=str, help='fixture file to load', default='properties.json')
_parser.add_argument('--fixture_folder', type=str,
default='models/fixtures',
help='where fixtures are stored.'
)
return _parser.parse_args()
def main(base_path):
properties_to_save = []
args = parse_args()
path = os.path.sep.join([base_path,
'app',
args.fixture_folder,
args.fixture])
with open(path) as file_:
data = json.load(file_)
properties = data['properties']
for property_ in properties:
property_.pop('id')
properties_to_save.append(Property(**property_))
Property.objects.insert(properties_to_save)
return len(properties_to_save)
if __name__ == '__main__':
from app.models.properties import Property
base_path = get_path()
out = main(base_path)
print("{} objects saved".format(out)) | mit | -2,716,428,965,792,732,700 | 25.6 | 103 | 0.576373 | false | 4.076687 | false | false | false |
LMSlay/wiper | modules/radare.py | 1 | 2920 | # -*- coding: utf-8 -*-
# This file is part of Viper - https://github.com/botherder/viper
# See the file 'LICENSE' for copying permission.
import os
import sys
import getopt
from viper.common.out import *
from viper.common.abstracts import Module
from viper.core.session import __sessions__
ext = ".bin"
run_radare = {'linux2': 'r2', 'darwin': 'r2',
'win32': 'r2'}
class Radare(Module):
cmd = 'r2'
description = 'Start Radare2'
authors = ['dukebarman']
def __init__(self):
self.is_64b = False
self.ext = ''
self.server = ''
def open_radare(self, filename):
directory = filename + ".dir"
if not os.path.exists(directory):
os.makedirs(directory)
destination = directory + "/executable" + self.ext
if not os.path.lexists(destination):
os.link(filename, destination)
command_line = '{} {}{}'.format(run_radare[sys.platform], self.server, destination)
os.system(command_line)
def run(self):
if not __sessions__.is_set():
self.log('error', "No session opened")
return
def usage():
self.log('', "usage: r2 [-h] [-s]")
def help():
usage()
self.log('', "")
self.log('', "Options:")
self.log('', "\t--help (-h)\tShow this help message")
self.log('', "\t--webserver (-w)\tStart web-frontend for radare2")
self.log('', "")
try:
opts, argv = getopt.getopt(self.args[0:], 'hw', ['help', 'webserver'])
except getopt.GetoptError as e:
self.log('', e)
return
for opt, value in opts:
if opt in ('-h', '--help'):
help()
return
elif opt in ('-w', '--webserver'):
self.server = "-c=H "
filetype = __sessions__.current.file.type
if 'x86-64' in filetype:
self.is_64b = True
arch = '64' if self.is_64b else '32'
if 'DLL' in filetype:
self.ext = '.dll'
to_print = [arch, 'bit DLL (Windows)']
if "native" in filetype:
to_print.append('perhaps a driver (.sys)')
self.log('info', ' '.join(to_print))
elif 'PE32' in filetype:
self.ext = '.exe'
self.log('info', ' '.join([arch, 'bit executable (Windows)']))
elif 'shared object' in filetype:
self.ext = '.so'
self.log('info', ' '.join([arch, 'bit shared object (linux)']))
elif 'ELF' in filetype:
self.ext = ''
self.log('info', ' '.join([arch, 'bit executable (linux)']))
else:
self.log('error', "Unknown binary")
try:
self.open_radare(__sessions__.current.file.path)
except:
self.log('error', "Unable to start Radare2")
| bsd-3-clause | 5,533,463,339,960,898,000 | 28.795918 | 91 | 0.510616 | false | 3.738796 | false | false | false |
ghetzel/webfriend | webfriend/rpc/base.py | 1 | 2491 | """
Implementation of the Chrome Remote DevTools debugging protocol.
See: https://chromedevtools.github.io/devtools-protocol
"""
from __future__ import absolute_import
from webfriend.rpc.event import Event
from uuid import uuid4
from collections import OrderedDict
import logging
class Base(object):
supports_events = True
domain = None
def __init__(self, tab):
if self.domain is None:
raise ValueError("Cannot instantiate an RPC proxy without a domain class property.")
self.tab = tab
self.callbacks = {}
def initialize(self):
pass
def call(self, method, expect_reply=True, reply_timeout=None, **params):
return self.tab.rpc(
'{}.{}'.format(self.domain, method),
expect_reply=expect_reply,
reply_timeout=reply_timeout,
**params
)
def enable(self):
if self.supports_events:
self.call('enable')
def disable(self):
if self.supports_events:
self.call('disable')
def call_boolean_response(self, method, field='result', **kwargs):
if self.call(method, **kwargs).get(field) is True:
return True
return False
def on(self, method, callback):
# normalize method name
if not method.startswith(self.domain + '.'):
method = '{}.{}'.format(self.domain, method)
# create handler dict if we need to
if method not in self.callbacks:
self.callbacks[method] = OrderedDict()
callback_id = '{}.event_{}'.format(self.domain, uuid4())
self.callbacks[method][callback_id] = callback
logging.debug('Registered event handler {} for event {}'.format(
callback_id,
method
))
return callback_id
def remove_handler(self, callback_id):
for _, callbacks in self.callbacks.items():
for id, _ in callbacks.items():
if callback_id == id:
del callbacks[callback_id]
return True
return False
def trigger(self, method, payload=None):
event = Event(self, method, payload)
if str(event) in self.callbacks:
for callback_id, callback in self.callbacks[str(event)].items():
if callable(callback):
response = callback(event)
if response is False:
break
return event
| bsd-2-clause | -6,752,092,870,117,730,000 | 27.306818 | 96 | 0.579285 | false | 4.562271 | false | false | false |
lgarren/spack | lib/spack/spack/test/spec_syntax.py | 1 | 20244 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import pytest
import shlex
import spack
import spack.spec as sp
from spack.parse import Token
from spack.spec import Spec, parse, parse_anonymous_spec
from spack.spec import SpecParseError, RedundantSpecError
from spack.spec import AmbiguousHashError, InvalidHashError, NoSuchHashError
from spack.spec import DuplicateArchitectureError, DuplicateVariantError
from spack.spec import DuplicateDependencyError, DuplicateCompilerSpecError
# Sample output for a complex lexing.
complex_lex = [Token(sp.ID, 'mvapich_foo'),
Token(sp.DEP),
Token(sp.ID, '_openmpi'),
Token(sp.AT),
Token(sp.ID, '1.2'),
Token(sp.COLON),
Token(sp.ID, '1.4'),
Token(sp.COMMA),
Token(sp.ID, '1.6'),
Token(sp.PCT),
Token(sp.ID, 'intel'),
Token(sp.AT),
Token(sp.ID, '12.1'),
Token(sp.COLON),
Token(sp.ID, '12.6'),
Token(sp.ON),
Token(sp.ID, 'debug'),
Token(sp.OFF),
Token(sp.ID, 'qt_4'),
Token(sp.DEP),
Token(sp.ID, 'stackwalker'),
Token(sp.AT),
Token(sp.ID, '8.1_1e')]
# Another sample lexer output with a kv pair.
kv_lex = [Token(sp.ID, 'mvapich_foo'),
Token(sp.ID, 'debug'),
Token(sp.EQ),
Token(sp.VAL, '4'),
Token(sp.DEP),
Token(sp.ID, '_openmpi'),
Token(sp.AT),
Token(sp.ID, '1.2'),
Token(sp.COLON),
Token(sp.ID, '1.4'),
Token(sp.COMMA),
Token(sp.ID, '1.6'),
Token(sp.PCT),
Token(sp.ID, 'intel'),
Token(sp.AT),
Token(sp.ID, '12.1'),
Token(sp.COLON),
Token(sp.ID, '12.6'),
Token(sp.ON),
Token(sp.ID, 'debug'),
Token(sp.OFF),
Token(sp.ID, 'qt_4'),
Token(sp.DEP),
Token(sp.ID, 'stackwalker'),
Token(sp.AT),
Token(sp.ID, '8.1_1e')]
class TestSpecSyntax(object):
# ========================================================================
# Parse checks
# ========================================================================
def check_parse(self, expected, spec=None, remove_arch=True):
"""Assert that the provided spec is able to be parsed.
If this is called with one argument, it assumes that the
string is canonical (i.e., no spaces and ~ instead of - for
variants) and that it will convert back to the string it came
from.
If this is called with two arguments, the first argument is
the expected canonical form and the second is a non-canonical
input to be parsed.
"""
if spec is None:
spec = expected
output = sp.parse(spec)
parsed = (" ".join(str(spec) for spec in output))
assert expected == parsed
def check_lex(self, tokens, spec):
"""Check that the provided spec parses to the provided token list."""
spec = shlex.split(spec)
lex_output = sp.SpecLexer().lex(spec)
for tok, spec_tok in zip(tokens, lex_output):
if tok.type == sp.ID or tok.type == sp.VAL:
assert tok == spec_tok
else:
# Only check the type for non-identifiers.
assert tok.type == spec_tok.type
def _check_raises(self, exc_type, items):
for item in items:
with pytest.raises(exc_type):
Spec(item)
# ========================================================================
# Parse checks
# ========================================================================
def test_package_names(self):
self.check_parse("mvapich")
self.check_parse("mvapich_foo")
self.check_parse("_mvapich_foo")
def test_anonymous_specs(self):
self.check_parse("%intel")
self.check_parse("@2.7")
self.check_parse("^zlib")
self.check_parse("+foo")
self.check_parse("arch=test-None-None", "platform=test")
self.check_parse('@2.7:')
def test_anonymous_specs_with_multiple_parts(self):
# Parse anonymous spec with multiple tokens
self.check_parse('@4.2: languages=go', 'languages=go @4.2:')
self.check_parse('@4.2: languages=go')
def test_simple_dependence(self):
self.check_parse("openmpi^hwloc")
self.check_parse("openmpi^hwloc^libunwind")
def test_dependencies_with_versions(self):
self.check_parse("openmpi^[email protected]")
self.check_parse("openmpi^[email protected]:")
self.check_parse("openmpi^hwloc@:1.4b7-rc3")
self.check_parse("openmpi^[email protected]:1.4b7-rc3")
def test_multiple_specs(self):
self.check_parse("mvapich emacs")
def test_multiple_specs_after_kv(self):
self.check_parse('mvapich cppflags="-O3 -fPIC" emacs')
self.check_parse('mvapich cflags="-O3" emacs',
'mvapich cflags=-O3 emacs')
def test_multiple_specs_long_second(self):
self.check_parse('mvapich [email protected]%intel cflags="-O3"',
'mvapich emacs @1.1.1 %intel cflags=-O3')
self.check_parse('mvapich cflags="-O3 -fPIC" emacs^ncurses%intel')
def test_full_specs(self):
self.check_parse(
"mvapich_foo"
"^[email protected]:1.4,1.6%[email protected]+debug~qt_4"
"^[email protected]_1e")
self.check_parse(
"mvapich_foo"
"^[email protected]:1.4,1.6%[email protected] debug=2 ~qt_4"
"^[email protected]_1e")
self.check_parse(
'mvapich_foo'
'^[email protected]:1.4,1.6%[email protected] cppflags="-O3" +debug~qt_4'
'^[email protected]_1e')
self.check_parse(
"mvapich_foo"
"^[email protected]:1.4,1.6%[email protected] debug=2 ~qt_4"
"^[email protected]_1e arch=test-redhat6-x86_32")
def test_canonicalize(self):
self.check_parse(
"mvapich_foo"
"^[email protected]:1.4,1.6%[email protected]:12.6+debug~qt_4"
"^[email protected]_1e",
"mvapich_foo "
"^[email protected],1.2:1.4%[email protected]:12.6+debug~qt_4 "
"^[email protected]_1e")
self.check_parse(
"mvapich_foo"
"^[email protected]:1.4,1.6%[email protected]:12.6+debug~qt_4"
"^[email protected]_1e",
"mvapich_foo "
"^[email protected]_1e "
"^[email protected],1.2:1.4%[email protected]:12.6~qt_4+debug")
self.check_parse(
"x^y@1,2:3,4%intel@1,2,3,4+a~b+c~d+e~f",
"x ^y~f+e~d+c~b+a@4,2:3,1%intel@4,3,2,1")
self.check_parse(
"x arch=test-redhat6-None "
"^y arch=test-None-x86_64 "
"^z arch=linux-None-None",
"x os=fe "
"^y target=be "
"^z platform=linux")
self.check_parse(
"x arch=test-debian6-x86_64 "
"^y arch=test-debian6-x86_64",
"x os=default_os target=default_target "
"^y os=default_os target=default_target")
self.check_parse("x^y", "x@: ^y@:")
def test_parse_errors(self):
errors = ['x@@1.2', 'x ^y@@1.2', '[email protected]::', 'x::']
self._check_raises(SpecParseError, errors)
def _check_hash_parse(self, spec):
"""Check several ways to specify a spec by hash."""
# full hash
self.check_parse(str(spec), '/' + spec.dag_hash())
# partial hash
self.check_parse(str(spec), '/ ' + spec.dag_hash()[:5])
# name + hash
self.check_parse(str(spec), spec.name + '/' + spec.dag_hash())
# name + version + space + partial hash
self.check_parse(
str(spec), spec.name + '@' + str(spec.version) +
' /' + spec.dag_hash()[:6])
def test_spec_by_hash(self, database):
specs = database.mock.db.query()
assert len(specs) # make sure something's in the DB
for spec in specs:
self._check_hash_parse(spec)
def test_dep_spec_by_hash(self, database):
mpileaks_zmpi = database.mock.db.query_one('mpileaks ^zmpi')
zmpi = database.mock.db.query_one('zmpi')
fake = database.mock.db.query_one('fake')
assert 'fake' in mpileaks_zmpi
assert 'zmpi' in mpileaks_zmpi
mpileaks_hash_fake = sp.Spec('mpileaks ^/' + fake.dag_hash())
assert 'fake' in mpileaks_hash_fake
assert mpileaks_hash_fake['fake'] == fake
mpileaks_hash_zmpi = sp.Spec(
'mpileaks %' + str(mpileaks_zmpi.compiler) +
' ^ / ' + zmpi.dag_hash())
assert 'zmpi' in mpileaks_hash_zmpi
assert mpileaks_hash_zmpi['zmpi'] == zmpi
assert mpileaks_hash_zmpi.compiler == mpileaks_zmpi.compiler
mpileaks_hash_fake_and_zmpi = sp.Spec(
'mpileaks ^/' + fake.dag_hash()[:4] + '^ / ' + zmpi.dag_hash()[:5])
assert 'zmpi' in mpileaks_hash_fake_and_zmpi
assert mpileaks_hash_fake_and_zmpi['zmpi'] == zmpi
assert 'fake' in mpileaks_hash_fake_and_zmpi
assert mpileaks_hash_fake_and_zmpi['fake'] == fake
def test_multiple_specs_with_hash(self, database):
mpileaks_zmpi = database.mock.db.query_one('mpileaks ^zmpi')
callpath_mpich2 = database.mock.db.query_one('callpath ^mpich2')
# name + hash + separate hash
specs = sp.parse('mpileaks /' + mpileaks_zmpi.dag_hash() +
'/' + callpath_mpich2.dag_hash())
assert len(specs) == 2
# 2 separate hashes
specs = sp.parse('/' + mpileaks_zmpi.dag_hash() +
'/' + callpath_mpich2.dag_hash())
assert len(specs) == 2
# 2 separate hashes + name
specs = sp.parse('/' + mpileaks_zmpi.dag_hash() +
'/' + callpath_mpich2.dag_hash() +
' callpath')
assert len(specs) == 3
# hash + 2 names
specs = sp.parse('/' + mpileaks_zmpi.dag_hash() +
' callpath' +
' callpath')
assert len(specs) == 3
# hash + name + hash
specs = sp.parse('/' + mpileaks_zmpi.dag_hash() +
' callpath' +
' / ' + callpath_mpich2.dag_hash())
assert len(specs) == 2
def test_ambiguous_hash(self, database):
x1 = Spec('a')
x1._hash = 'xy'
x1._concrete = True
x2 = Spec('a')
x2._hash = 'xx'
x2._concrete = True
database.mock.db.add(x1, spack.store.layout)
database.mock.db.add(x2, spack.store.layout)
# ambiguity in first hash character
self._check_raises(AmbiguousHashError, ['/x'])
# ambiguity in first hash character AND spec name
self._check_raises(AmbiguousHashError, ['a/x'])
def test_invalid_hash(self, database):
mpileaks_zmpi = database.mock.db.query_one('mpileaks ^zmpi')
zmpi = database.mock.db.query_one('zmpi')
mpileaks_mpich = database.mock.db.query_one('mpileaks ^mpich')
mpich = database.mock.db.query_one('mpich')
# name + incompatible hash
self._check_raises(InvalidHashError, [
'zmpi /' + mpich.dag_hash(),
'mpich /' + zmpi.dag_hash()])
# name + dep + incompatible hash
self._check_raises(InvalidHashError, [
'mpileaks ^mpich /' + mpileaks_zmpi.dag_hash(),
'mpileaks ^zmpi /' + mpileaks_mpich.dag_hash()])
def test_nonexistent_hash(self, database):
"""Ensure we get errors for nonexistant hashes."""
specs = database.mock.db.query()
# This hash shouldn't be in the test DB. What are the odds :)
no_such_hash = 'aaaaaaaaaaaaaaa'
hashes = [s._hash for s in specs]
assert no_such_hash not in [h[:len(no_such_hash)] for h in hashes]
self._check_raises(NoSuchHashError, [
'/' + no_such_hash,
'mpileaks /' + no_such_hash])
def test_redundant_spec(self, database):
"""Check that redundant spec constraints raise errors.
TODO (TG): does this need to be an error? Or should concrete
specs only raise errors if constraints cause a contradiction?
"""
mpileaks_zmpi = database.mock.db.query_one('mpileaks ^zmpi')
callpath_zmpi = database.mock.db.query_one('callpath ^zmpi')
dyninst = database.mock.db.query_one('dyninst')
mpileaks_mpich2 = database.mock.db.query_one('mpileaks ^mpich2')
redundant_specs = [
# redudant compiler
'/' + mpileaks_zmpi.dag_hash() + '%' + str(mpileaks_zmpi.compiler),
# redudant version
'mpileaks/' + mpileaks_mpich2.dag_hash() +
'@' + str(mpileaks_mpich2.version),
# redundant dependency
'callpath /' + callpath_zmpi.dag_hash() + '^ libelf',
# redundant flags
'/' + dyninst.dag_hash() + ' cflags="-O3 -fPIC"']
self._check_raises(RedundantSpecError, redundant_specs)
def test_duplicate_variant(self):
duplicates = [
'[email protected]+debug+debug',
'x ^[email protected]+debug debug=true',
'x ^[email protected] debug=false debug=true',
'x ^[email protected] debug=false ~debug'
]
self._check_raises(DuplicateVariantError, duplicates)
def test_duplicate_dependency(self):
self._check_raises(DuplicateDependencyError, ["x ^y ^y"])
def test_duplicate_compiler(self):
duplicates = [
"x%intel%intel",
"x%intel%gcc",
"x%gcc%intel",
"x ^y%intel%intel",
"x ^y%intel%gcc",
"x ^y%gcc%intel"
]
self._check_raises(DuplicateCompilerSpecError, duplicates)
def test_duplicate_architecture(self):
duplicates = [
"x arch=linux-rhel7-x86_64 arch=linux-rhel7-x86_64",
"x arch=linux-rhel7-x86_64 arch=linux-rhel7-ppc64le",
"x arch=linux-rhel7-ppc64le arch=linux-rhel7-x86_64",
"y ^x arch=linux-rhel7-x86_64 arch=linux-rhel7-x86_64",
"y ^x arch=linux-rhel7-x86_64 arch=linux-rhel7-ppc64le"
]
self._check_raises(DuplicateArchitectureError, duplicates)
def test_duplicate_architecture_component(self):
duplicates = [
"x os=fe os=fe",
"x os=fe os=be",
"x target=fe target=fe",
"x target=fe target=be",
"x platform=test platform=test",
"x os=fe platform=test target=fe os=fe",
"x target=be platform=test os=be os=fe"
]
self._check_raises(DuplicateArchitectureError, duplicates)
# ========================================================================
# Lex checks
# ========================================================================
def test_ambiguous(self):
# This first one is ambiguous because - can be in an identifier AND
# indicate disabling an option.
with pytest.raises(AssertionError):
self.check_lex(
complex_lex,
"mvapich_foo"
"^[email protected]:1.4,1.6%[email protected]:12.6+debug-qt_4"
"^[email protected]_1e"
)
# The following lexes are non-ambiguous (add a space before -qt_4)
# and should all result in the tokens in complex_lex
def test_minimal_spaces(self):
self.check_lex(
complex_lex,
"mvapich_foo"
"^[email protected]:1.4,1.6%[email protected]:12.6+debug -qt_4"
"^[email protected]_1e")
self.check_lex(
complex_lex,
"mvapich_foo"
"^[email protected]:1.4,1.6%[email protected]:12.6+debug~qt_4"
"^[email protected]_1e")
def test_spaces_between_dependences(self):
self.check_lex(
complex_lex,
"mvapich_foo "
"^[email protected]:1.4,1.6%[email protected]:12.6+debug -qt_4 "
"^stackwalker @ 8.1_1e")
self.check_lex(
complex_lex,
"mvapich_foo "
"^[email protected]:1.4,1.6%[email protected]:12.6+debug~qt_4 "
"^stackwalker @ 8.1_1e")
def test_spaces_between_options(self):
self.check_lex(
complex_lex,
"mvapich_foo "
"^_openmpi @1.2:1.4,1.6 %intel @12.1:12.6 +debug -qt_4 "
"^stackwalker @8.1_1e")
def test_way_too_many_spaces(self):
self.check_lex(
complex_lex,
"mvapich_foo "
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
"^ stackwalker @ 8.1_1e")
self.check_lex(
complex_lex,
"mvapich_foo "
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug ~ qt_4 "
"^ stackwalker @ 8.1_1e")
def test_kv_with_quotes(self):
self.check_lex(
kv_lex,
"mvapich_foo debug='4' "
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
"^ stackwalker @ 8.1_1e")
self.check_lex(
kv_lex,
'mvapich_foo debug="4" '
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
"^ stackwalker @ 8.1_1e")
self.check_lex(
kv_lex,
"mvapich_foo 'debug = 4' "
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
"^ stackwalker @ 8.1_1e")
def test_kv_without_quotes(self):
self.check_lex(
kv_lex,
"mvapich_foo debug=4 "
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
"^ stackwalker @ 8.1_1e")
def test_kv_with_spaces(self):
self.check_lex(
kv_lex,
"mvapich_foo debug = 4 "
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
"^ stackwalker @ 8.1_1e")
self.check_lex(
kv_lex,
"mvapich_foo debug =4 "
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
"^ stackwalker @ 8.1_1e")
self.check_lex(
kv_lex,
"mvapich_foo debug= 4 "
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
"^ stackwalker @ 8.1_1e")
@pytest.mark.parametrize('spec,anon_spec,spec_name', [
('openmpi languages=go', 'languages=go', 'openmpi'),
('openmpi @4.6:', '@4.6:', 'openmpi'),
('openmpi languages=go @4.6:', 'languages=go @4.6:', 'openmpi'),
('openmpi @4.6: languages=go', '@4.6: languages=go', 'openmpi'),
])
def test_parse_anonymous_specs(spec, anon_spec, spec_name):
expected = parse(spec)
spec = parse_anonymous_spec(anon_spec, spec_name)
assert len(expected) == 1
assert spec in expected
| lgpl-2.1 | -8,615,010,693,867,667,000 | 35.475676 | 79 | 0.528453 | false | 3.331797 | true | false | false |
kennydo/rename-archive-extension | Nautilus/rename_archive.py | 1 | 8817 | from gi.repository import Nautilus, GObject, Gtk
import functools
import os
import os.path
import urllib
import urlparse
import zipfile
try:
import rarfile
except ImportError:
rarfile = None
if rarfile:
# The default separator is '\\', which is different from what zipfile uses
rarfile.PATH_SEP = '/'
# I put these in a tuple so that they don't accidentally get mutated.
ZIP_MIME_TYPES = tuple(['application/zip',
'application/x-zip',
'application/zip-compressed'])
RAR_MIME_TYPES = tuple(['application/rar',
'application/x-rar',
'application/x-rar-compressed'])
def get_file_path(file_info):
"""Returns the simple file path from a Nautilus.FileInfo.
Gets the "/path/to/file" part from "file:///path/to/file".
Args:
file_info: a Nautilus.FileInfo instance
Returns:
A string representing a Unix path
"""
uri = file_info.get_uri()
return urllib.unquote(urlparse.urlparse(uri).path)
def get_new_file_path(archive_path, directory_name):
"""Gets the proposed new path for an archive if it's renamed
Creates the full path of an archive if it is renamed after a directory.
It keeps the path of directories leading up to the base name, as well as
the file extension.
Calling this function with "/path/to/file.zip" and "dir-name" would return:
"/path/to/dir-name.zip".
Args:
archive_path: A string representing the full path of the archive
directory_name: String value of the directory we want to rename this
archive after.
Returns:
A string of the proposed file path after the archive has been renamed
after the given directory name.
"""
if '.' in archive_path:
extension = archive_path.rsplit('.', 1)[1]
base_name = directory_name + '.' + extension
else:
base_name = directory_name
return os.path.join(os.path.dirname(archive_path), base_name)
def lru_cache(size):
"""Simple LRU cache"""
def outer(f):
prev_inputs = list()
prev_outputs = dict()
@functools.wraps(f)
def wrapper(function_input):
if function_input in prev_inputs:
return prev_outputs[function_input]
function_output = f(function_input)
if len(prev_inputs) >= size:
dead_path = prev_inputs[0]
del prev_inputs[0]
del prev_outputs[dead_path]
prev_inputs.append(function_input)
prev_outputs[function_input] = function_output
return function_output
return wrapper
return outer
@lru_cache(32)
def get_zip_directory_names(filename):
"""Gets the list of directories inside a ZIP archive
Reads the directory names inside of a ZIP archive, and returns a list of
each directory name (without its parent directories).
Args:
filename: A string that can be a relative filename or file path (it
doesn't matter as long as this script can read it) of a ZIP file
Returns:
A list of directory name strings.
"""
names = list()
try:
with zipfile.ZipFile(filename, 'r') as zip_file:
names = [fname for fname in zip_file.namelist()
if fname.endswith('/')]
except zipfile.BadZipfile as e:
print(e)
directory_names = [os.path.basename(dir_name[:-1]) for dir_name in names]
return directory_names
@lru_cache(32)
def get_rar_directory_names(filename):
"""Gets the list of directories inside a RAR archive
Reads the directory names inside of a RAR archive, and returns a list of
each directory name (without its parent directories).
Args:
filename: A string that can be a relative filename or file path (it
doesn't matter as long as this script can read it) of a ZIP file
Returns:
A list of directory name strings.
"""
names = list()
try:
with rarfile.RarFile(filename, 'r') as rar_file:
names = [info.filename for info in rar_file.infolist() if info.isdir()]
except rarfile.Error as e:
print(e)
directory_names = [os.path.basename(dir_name) for dir_name in names]
return directory_names
class RenameDialog(GObject.GObject):
"""Wrapped Gtk Message Dialog class"""
def __init__(self, window, original_name, new_name):
self.dialog = Gtk.MessageDialog(window, 0, Gtk.MessageType.QUESTION,
Gtk.ButtonsType.YES_NO,
"Rename Archive?")
self.dialog.format_secondary_text(
"Do you want to rename\n\"{0}\" to\n\"{1}\"".format(
original_name, new_name))
def run(self):
self.response = self.dialog.run()
def destroy(self):
self.dialog.destroy()
class RenameArchiveProvider(GObject.GObject, Nautilus.MenuProvider):
"""Creates a submenu to rename archives after the name of a directory
within the archive.
"""
def __init__(self):
self.supported_mime_types = list(ZIP_MIME_TYPES)
if rarfile:
self.supported_mime_types += list(RAR_MIME_TYPES)
def rename_directory_menuitem_cb(self, menu, cb_parameters):
"""Callback for when the user clicks on a directory name
to rename an archive after.
This displays a dialog that the user responds to with a Yes or No.
If the user clicks Yes, then this attempts to rename the file.
Args:
menu: the Nautilus.Menu that was the source of the click
cb_parameters: a tuple of type (Nautilus.FileInfo,
Gtk.Window,
string)
Returns:
Nothing.
"""
file_info, window, directory_name = cb_parameters
if file_info.is_gone() or not file_info.can_write():
return
old_path = get_file_path(file_info)
old_name = os.path.basename(old_path)
new_path = get_new_file_path(old_path, directory_name)
new_name = os.path.basename(new_path)
dialog = RenameDialog(window, old_name, new_name)
dialog.run()
dialog.destroy()
if dialog.response == Gtk.ResponseType.YES:
try:
os.rename(old_path, new_path)
except os.OSError as e:
print(e)
def get_file_items(self, window, files):
if len(files) != 1:
return
selected_file = files[0]
if selected_file.get_uri_scheme() != 'file':
# Not sure which URIs zipfile supports reading from
return
mime_type = selected_file.get_mime_type()
if mime_type in self.supported_mime_types:
top_menuitem = Nautilus.MenuItem(
name='RenameArchiveProvider::Rename Archive',
label='Rename Archive',
tip='Rename archive based on its directory names',
icon='')
names_menu = Nautilus.Menu()
top_menuitem.set_submenu(names_menu)
# create the submenu items
file_path = get_file_path(selected_file)
if mime_type in ZIP_MIME_TYPES:
directory_names = get_zip_directory_names(file_path)
elif mime_type in RAR_MIME_TYPES:
directory_names = get_rar_directory_names(file_path)
else:
directory_names = None
if not directory_names:
no_directories_menuitem = Nautilus.MenuItem(
name='RenameArchiveProvider::No Directories',
label='No directory names found',
tip='',
icon='')
names_menu.append_item(no_directories_menuitem)
else:
for directory_name in directory_names:
name = 'RenameArchiveProvider::Directory::' + \
directory_name
label = 'Rename to "' + \
directory_name.replace('_', '__') + '"'
# we have to perform the underscore replacement in the label to get it to show up
dir_menuitem = Nautilus.MenuItem(
name=name,
label=label,
tip=label,
icon='')
dir_menuitem.connect(
'activate', self.rename_directory_menuitem_cb,
(selected_file, window, directory_name))
names_menu.append_item(dir_menuitem)
return [top_menuitem]
else:
return
| mit | -8,911,151,886,611,844,000 | 32.524715 | 101 | 0.58047 | false | 4.26354 | false | false | false |
YangWanjun/areaparking | utils/constants.py | 1 | 11566 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
SYSTEM_NAME = "エリアパーキング"
END_DATE = '9999-12-31'
DATABASE_DEFAULT = "default"
DATABASE_REVOLUTION = "fk5dtsql"
MIME_TYPE_EXCEL = 'application/excel'
MIME_TYPE_PDF = 'application/pdf'
MIME_TYPE_ZIP = 'application/zip'
MIME_TYPE_HTML = 'text/html'
CONFIG_GROUP_SYSTEM = 'system'
CONFIG_GROUP_GOOGLE = 'google'
CONFIG_GROUP_YAHOO = 'yahoo'
CONFIG_GROUP_EMAIL = 'email'
CONFIG_GROUP_ADJUST_SIZE = 'size'
CONFIG_EMAIL_ADDRESS = 'email_address'
CONFIG_EMAIL_SMTP_HOST = 'email_smtp_host'
CONFIG_EMAIL_SMTP_PORT = 'email_smtp_port'
CONFIG_EMAIL_PASSWORD = 'email_password'
CONFIG_CIRCLE_RADIUS = 'circle_radius'
CONFIG_DOMAIN_NAME = 'domain_name'
CONFIG_PAGE_SIZE = 'page_size'
CONFIG_DECIMAL_TYPE = 'decimal_type'
CONFIG_CONSUMPTION_TAX_RATE = 'consumption_tax_rate'
CONFIG_CAR_LENGTH_ADJUST = 'car_length_adjust'
CONFIG_CAR_WIDTH_ADJUST = 'car_width_adjust'
CONFIG_CAR_HEIGHT_ADJUST = 'car_height_adjust'
CONFIG_CAR_WEIGHT_ADJUST = 'car_weight_adjust'
CONFIG_URL_TIMEOUT = 'url_timeout'
CONFIG_GCM_URL = 'gcm_url'
CONFIG_FIREBASE_SERVERKEY = 'firebase_serverkey'
CONFIG_GOOGLE_MAP_KEY = 'google_map_key'
CONFIG_YAHOO_APP_KEY = 'yahoo_app_id'
CONFIG_FURIGANA_SERVICE_URL = 'furigana_service_url'
CONFIG_PARKING_LOT_KEY_ALERT_PERCENT = 'parking_lot_key_alert_percent'
CONFIG_SIMPLE_SUBSCRIPTION_PERSIST_TIME = 'simple_subscription_persist_time'
REG_TEL = r'^\d+[0-9-]+\d+$'
REG_POST_CODE = r"\d{3}[-]?\d{4}"
REG_NUMBER = r'^[0-9]+$'
REG_MULTI_POSITIONS = r"^[0-9]+$|^[0-9]+-[0-9]+$|^\d+(?:,\d+)*\Z"
REG_CONTINUED_POSITIONS = r"^([0-9]+)-([0-9]+)$"
REPORT_SUBSCRIPTION_CONFIRM = "申込確認書"
REPORT_SUBSCRIPTION = "申込書"
CHOICE_CONTRACTOR_TYPE = (
('1', '個人'),
('2', '法人'),
)
CHOICE_GENDER = (
('1', '男'),
('2', '女'),
)
CHOICE_MARRIED = (
('0', "未婚"),
('1', "既婚"),
)
CHOICE_PAPER_DELIVERY_TYPE = (
('01', '基本情報の住所'),
('02', '勤務先'),
('03', '連絡先'),
('04', '保証人'),
('99', 'その他'),
)
CHOICE_HONORIFIC = (
('1', '様'),
('2', '御中'),
)
CHOICE_BANK_ACCOUNT_TYPE = (
('1', "普通預金"),
('2', "定期預金"),
('3', "総合口座"),
('4', "当座預金"),
('5', "貯蓄預金"),
('6', "大口定期預金"),
('7', "積立定期預金")
)
CHOICE_BANK_DEPOSIT_TYPE = (
('1', "普通"),
('2', "当座"),
('4', "貯蓄"),
('9', "その他"),
)
CHOICE_BANK_POST_KBN = (
(1, "銀行"),
(2, "郵便局"),
)
CHOICE_MANAGEMENT_TYPE = (
('01', '管理委託'),
('02', '一括借上'),
('03', '一般物件'),
('04', '自社物件'),
)
CHOICE_KEY_CATEGORY = (
('01', '鍵'),
('02', 'カード'),
('03', 'リモコン'),
('04', 'パスワード'),
('05', 'その他の鍵'),
)
CHOICE_PAY_TIMING = (
('10', '契約時'),
('11', '契約開始月'),
('20', '更新時'),
('30', '翌月以降'),
('40', '一時'),
('41', '保管場所承諾証明書発行手数料'),
('42', '繰越')
)
CHOICE_TAX_KBN = (
('1', '税抜'),
('2', '税込'),
)
CHOICE_DECIMAL_TYPE = (
('0', '切り捨て'),
('1', '四捨五入'),
('2', '切り上げ'),
)
CHOICE_PRICE_KBN = (
('01', 'チラシ価格'),
('02', 'ホームページ価格'),
)
CHOICE_PARKING_STATUS = (
('01', '空き'),
('02', '手続中'),
('03', '空無'),
('04', '仮押'),
('05', '貸止'),
)
CHOICE_MAIL_GROUP = (
('010', '申込み用フォーム送付'),
('011', '申込み用フォーム入力完了'),
('012', '審査用フォーム送付'),
('013', '審査用フォーム入力完了'),
('040', '契約フォーム送付'),
('041', '契約フォーム入力完了'),
('042', '契約書送付'),
('060', '鍵類、操作説明書、配置図送付'),
('310', '一般解約書類送付'),
('322', '物件解約書類送付'),
('800', 'バッチ:鍵残件数アラート'),
)
CHOICE_REPORT_KBN = (
('001', REPORT_SUBSCRIPTION),
('002', REPORT_SUBSCRIPTION_CONFIRM),
# ('01', '申込書'),
# ('01', '申込書'),
# ('01', '申込書'),
# ('01', '申込書'),
# ('01', '申込書'),
# ('01', '申込書'),
# ('01', '申込書'),
# ('01', '申込書'),
)
CHOICE_PROCESS = (
('01', '申込みから成約まで'),
('20', '支払方法変更'),
# ('21', '名義変更'),
('22', '車室変更'),
('23', '車両変更'),
('24', '鍵紛失'),
('25', '保管場所使用承諾証明書発行'),
('26', '値上げ更新'),
('27', '任意保険・自賠責保険更新'),
('28', '返金'),
('31', '解約'),
('32', '物件解約(承継なし)'),
('33', '物件解約(承継あり)'),
)
CHOICE_TASK_SUBSCRIPTION_CATEGORY = [
# 01 申込
('010', '申込み用フォーム送付'),
('011', '申込み情報確認'),
('012', '審査用フォーム送付'),
# 03 審査
# ('030', '住所・電話番号 審査・確認'),
('031', '勤め先審査'),
('032', '車両サイズ審査'),
# ('033', '申込ルート元審査'),
('034', '書類審査'),
# 契約
('040', '契約フォーム送付'),
('041', '契約情報確認'),
('042', '契約書送付'),
# 入金
('050', '入金確認'),
('060', '鍵類、操作説明書、配置図送付'),
]
CHOICE_TASK_CREDIT_CATEGORY = [
('200', '決済申込書発行'),
('201', '決済申込書確認'),
]
# CHOICE_TASK_NAME_CATEGORY = [
# ('210', '契約書及び請求書の発行'),
# ('211', '入金確認'),
# ('212', '新契約書・請求書の送付'),
# ('213', '結果確認'),
# ]
CHOICE_TASK_CHANGE_POSITION = [
('220', '契約書等送付'),
('221', '書類確認'),
]
CHOICE_TASK_CHANGE_CAR = [
('230', '書類発行'),
]
CHOICE_TASK_KEY_LOST = [
('240', '「落し物」の有無確認'),
('241', '書類発行'),
('242', '入金確認'),
('243', '必要書類一式と操作鍵類の送付'),
('244', '操作鍵類の見積り依頼(オーナー側)'),
('245', '操作鍵類の発注/入金'),
]
CHOICE_TASK_PRICE_RAISE = [
('260', '更新書類の発行'),
('261', '更新書類の確認'),
]
CHOICE_TASK_CONTRACT_CANCELLATION = [
('310', '退出届送付'),
('311', '解約処理'),
('312', '鍵返送案内'),
('313', '鍵回収'),
]
CHOICE_TASK_POSITION_CANCELLATION_WITHOUT_CONTINUE = [
('320', '代替駐車場の調査'),
('321', 'ユーザーへ連絡'),
('322', '強制解約書類送付'),
('323', '滞納金確認'),
('324', '返金確認'),
('325', '鍵返送案内'),
('326', '鍵回収'),
]
CHOICE_TASK_POSITION_CANCELLATION_WITH_CONTINUE = [
('330', 'ユーザーへ連絡'),
('331', '承継承諾書送付'),
('332', '滞納金確認'),
('333', '返金確認'),
('334', '予備分の操作鍵類と契約時書類オーナー側へ送付'),
]
CHOICE_TASK_CATEGORY = CHOICE_TASK_SUBSCRIPTION_CATEGORY + \
CHOICE_TASK_CREDIT_CATEGORY + \
CHOICE_TASK_CHANGE_POSITION + \
CHOICE_TASK_CHANGE_CAR + \
CHOICE_TASK_KEY_LOST + \
CHOICE_TASK_PRICE_RAISE + \
CHOICE_TASK_CONTRACT_CANCELLATION + \
CHOICE_TASK_POSITION_CANCELLATION_WITHOUT_CONTINUE + \
CHOICE_TASK_POSITION_CANCELLATION_WITH_CONTINUE
CHOICE_TASK_STATUS = (
('01', '未実施'),
('02', '実施中'),
('10', 'スキップ'),
('20', '見送る'),
('99', '完了'),
)
CHOICE_CONTRACT_STATUS = (
('01', '仮契約'),
('11', '本契約'),
('21', '破棄'),
)
CHOICE_SUBSCRIPTION_STATUS = (
('01', '新規申込'),
('02', '申込フォーム送付済'),
('03', '申込フォーム入力完了'),
('04', '審査フォーム送付済'),
('05', '審査フォーム入力完了'),
('06', '契約フォーム送付済'),
('07', '契約フォーム入力完了'),
('08', '契約書送付済'),
('09', '鍵類、操作説明書、配置図送付済'),
('11', '成約'),
('12', '破棄'),
)
CHOICE_INSURANCE_JOIN_STATUS = (
('within', '加入中'),
('without', '加入なし'),
('plans', '加入予定'),
)
CHOICE_CONTRACT_PERIOD = (
('long', '1年間(その後自動更新)'),
('short', '1・2ヶ月契約'),
)
CHOICE_IS_REQUIRED = (
('yes', '必要'),
('no', '不要'),
)
CHOICE_TRANSFER_STATUS = (
('00', '請求なし'),
('01', '金額不一致'),
('02', '名義不一致'),
('03', '繰越'),
('11', '完全一致'),
('99', 'その他'),
)
CHOICE_PAYMENT_KBN = (
('01', '振込'),
('02', '振替'),
('03', 'クレジット'),
)
CHOICE_WAITING_STATUS = (
('01', '新規'),
('10', '成約'),
('90', 'キャンセル'),
)
CHOICE_BANK_ACCOUNT_STATUS = (
('0', '使用なし'),
('1', '使用中'),
)
CHOICE_TROUBLE_STATUS = (
('01', '新規'),
('02', '対応中'),
('03', '完了'),
)
CHOICE_SUBSCRIPTION_LIST_SEND_TYPE = (
('01', '両方'),
('02', '賃貸管理会社'),
('03', '建物管理会社'),
)
ERROR_SETTING_NO_SUBSCRIPTION = "申込書の出力書式が設定されていません、管理サイトで「出力書式」->「申込書一覧」にて設定してください。"
ERROR_SETTING_NO_SUBSCRIPTION_CONFIRM = "申込確認書の出力書式が設定されていません、管理サイトで「出力書式」->「申込確認書一覧」にて設定してください。"
ERROR_REQUEST_SIGNATURE = "サインしてください。"
ERROR_PREV_TASK_UNFINISHED = '前のタスクは処理していないので、完了できません!'
ERROR_SUBSCRIPTION_NO_CAR = '車情報がありません。'
ERROR_SUBSCRIPTION_LOCKED = '貸止めになっているため、申込みはできません。'
ERROR_SUBSCRIPTION_CONTRACTED = "既に契約中なので、申込みはできません。"
ERROR_SUBSCRIPTION_PROCESS_NOT_FINISHED = "契約手続きはまだ完了されていません。"
ERROR_SUBSCRIPTION_EMAIL_CONFIRM = "メールアドレスとメールアドレス(確認)は不一致です。"
ERROR_SUBSCRIPTION_PRIVACY_AGREEMENT = "プライバシーポリシーおよび利用規約に承諾してください。"
ERROR_CONTRACT_WRONG_RETIRE_DATE = "退居予定日は解約日の前に選択してください。"
ERROR_CONTRACT_RETIRE_DATE_RANGE = "退居予定日は契約期間内に選択してください。"
ERROR_CONTRACT_CANCELLATION_DATE_RANGE = "解約日は契約期間内に選択してください。"
ERROR_PARKING_LOT_NOT_EXISTS = "駐車場は存在しません。"
ERROR_PARKING_LOT_INVALID_STAFF_START_DATE = "担当者の担当開始日は間違っている、履歴の最終日以降に設定してください。"
ERROR_PARKING_LOT_CANCELLATION_NO_POSITIONS = "物件解約の場合全体解約または車室を選択してください。"
ERROR_PARKING_POSITION_NAME_NUMBER = "車室番号は数字だけを入力してください。"
ERROR_PARKING_POSITION_RANGE = "範囲指定は間違っています。"
ERROR_FORMAT_BANK_TRANSFER = "全銀フォーマットエラー。"
ERROR_FORMAT_BANK_TRANSFER_CANNOT_IMPORT = "ファイル読み込みできません。"
ERROR_REQUIRE_TRANSFER_DATA = "入金データを選択してください。"
ERROR_REQUIRED_FIELD = "%s は必須項目です。"
| apache-2.0 | 438,389,379,480,348,300 | 24.060519 | 97 | 0.542778 | false | 1.639517 | true | true | false |
lig/picket_classic | apps/picket/__init__.py | 1 | 1783 | """
Copyright 2008 Serge Matveenko
This file is part of Picket.
Picket is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Picket is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Picket. If not, see <http://www.gnu.org/licenses/>.
"""
"""
dictionary for storing copyrights and other project stuff
"""
COPYING = {}
"""
Picket home page url.
"""
COPYING['URL'] = 'http://picket.nophp.ru/'
"""
Picket version. Possibly with branch name
"""
COPYING['PICKET_VERSION'] = '0.3-master'
"""
List of Picket authors in order of their code appearence
"""
COPYING['AUTHORS'] = ['Serge Matveenko', 'TrashNRoll']
"""
List of years of project development
"""
COPYING['YEARS'] = [2008, 2009, 2010,]
"""
GPL warning text as of 2008-10-10
"""
COPYING['WARNING'] = \
"""Picket is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Picket is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Picket. If not, see <http://www.gnu.org/licenses/>."""
| gpl-3.0 | -7,424,519,642,878,921,000 | 28.716667 | 68 | 0.748738 | false | 3.737945 | false | false | false |
bchareyre/trial | examples/test/periodic-triax.py | 1 | 1557 | # coding: utf-8
# 2009 © Václav Šmilauer <[email protected]>
"Test and demonstrate use of PeriTriaxController."
from yade import *
from yade import pack,qt
O.periodic=True
O.cell.hSize=Matrix3(0.1, 0, 0,
0 ,0.1, 0,
0, 0, 0.1)
sp=pack.SpherePack()
radius=5e-3
num=sp.makeCloud(Vector3().Zero,O.cell.refSize,radius,.2,500,periodic=True) # min,max,radius,rRelFuzz,spheresInCell,periodic
O.bodies.append([sphere(s[0],s[1]) for s in sp])
O.engines=[
ForceResetter(),
InsertionSortCollider([Bo1_Sphere_Aabb()],verletDist=.05*radius),
InteractionLoop(
[Ig2_Sphere_Sphere_ScGeom()],
[Ip2_FrictMat_FrictMat_FrictPhys()],
[Law2_ScGeom_FrictPhys_CundallStrack()]
),
#PeriTriaxController(maxUnbalanced=0.01,relStressTol=0.02,goal=[-1e4,-1e4,0],stressMask=3,globUpdate=5,maxStrainRate=[1.,1.,1.],doneHook='triaxDone()',label='triax'),
#using cell inertia
PeriTriaxController(dynCell=True,mass=0.2,maxUnbalanced=0.01,relStressTol=0.02,goal=(-1e4,-1e4,0),stressMask=3,globUpdate=5,maxStrainRate=(1.,1.,1.),doneHook='triaxDone()',label='triax'),
NewtonIntegrator(damping=.2),
]
O.dt=PWaveTimeStep()
O.run();
qt.View()
phase=0
def triaxDone():
global phase
if phase==0:
print 'Here we are: stress',triax.stress,'strain',triax.strain,'stiffness',triax.stiff
print 'Now εz will go from 0 to .2 while σx and σy will be kept the same.'
triax.goal=(-1e4,-1e4,-0.2)
phase+=1
elif phase==1:
print 'Here we are: stress',triax.stress,'strain',triax.strain,'stiffness',triax.stiff
print 'Done, pausing now.'
O.pause()
| gpl-2.0 | 248,881,559,573,020,220 | 30.653061 | 188 | 0.710509 | false | 2.367939 | false | false | false |
KangHsi/youtube-8m | model_utils.py | 1 | 4302 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains a collection of util functions for model construction.
"""
import numpy
import tensorflow as tf
from tensorflow import logging
from tensorflow import flags
import tensorflow.contrib.slim as slim
def SampleRandomSequence(model_input, num_frames, num_samples):
"""Samples a random sequence of frames of size num_samples.
Args:
model_input: A tensor of size batch_size x max_frames x feature_size
num_frames: A tensor of size batch_size x 1
num_samples: A scalar
Returns:
`model_input`: A tensor of size batch_size x num_samples x feature_size
"""
batch_size = tf.shape(model_input)[0]
frame_index_offset = tf.tile(
tf.expand_dims(tf.range(num_samples), 0), [batch_size, 1])
max_start_frame_index = tf.maximum(num_frames - num_samples, 0)
start_frame_index = tf.cast(
tf.multiply(
tf.random_uniform([batch_size, 1]),
tf.cast(max_start_frame_index + 1, tf.float32)), tf.int32)
frame_index = tf.minimum(start_frame_index + frame_index_offset,
tf.cast(num_frames - 1, tf.int32))
batch_index = tf.tile(
tf.expand_dims(tf.range(batch_size), 1), [1, num_samples])
index = tf.stack([batch_index, frame_index], 2)
return tf.gather_nd(model_input, index)
def SampleRandomFrames(model_input, num_frames, num_samples):
"""Samples a random set of frames of size num_samples.
Args:
model_input: A tensor of size batch_size x max_frames x feature_size
num_frames: A tensor of size batch_size x 1
num_samples: A scalar
Returns:
`model_input`: A tensor of size batch_size x num_samples x feature_size
"""
batch_size = tf.shape(model_input)[0]
frame_index = tf.cast(
tf.multiply(
tf.random_uniform([batch_size, num_samples]),
tf.tile(tf.cast(num_frames, tf.float32), [1, num_samples])), tf.int32)
batch_index = tf.tile(
tf.expand_dims(tf.range(batch_size), 1), [1, num_samples])
index = tf.stack([batch_index, frame_index], 2)
return tf.gather_nd(model_input, index)
def SampleFramesOrdered(model_input, num_frames, num_samples):
"""Samples a random set of frames of size num_samples.
Args:
model_input: A tensor of size batch_size x max_frames x feature_size
num_frames: A tensor of size batch_size x 1
num_samples: A scalar
Returns:
`model_input`: A tensor of size batch_size x num_samples x feature_size
"""
batch_size = tf.shape(model_input)[0]
tmp=tf.tile(tf.range(0.0,1.0,1.0/num_samples),[batch_size])
frame_index = tf.cast(
tf.multiply(
tf.reshape(tmp,[batch_size,num_samples]),
tf.tile(tf.cast(num_frames, tf.float32), [1, num_samples])), tf.int32)
batch_index = tf.tile(
tf.expand_dims(tf.range(batch_size), 1), [1, num_samples])
index = tf.stack([batch_index, frame_index], 2)
return tf.gather_nd(model_input, index)
def FramePooling(frames, method, **unused_params):
"""Pools over the frames of a video.
Args:
frames: A tensor with shape [batch_size, num_frames, feature_size].
method: "average", "max", "attention", or "none".
Returns:
A tensor with shape [batch_size, feature_size] for average, max, or
attention pooling. A tensor with shape [batch_size*num_frames, feature_size]
for none pooling.
Raises:
ValueError: if method is other than "average", "max", "attention", or
"none".
"""
if method == "average":
return tf.reduce_mean(frames, 1)
elif method == "max":
return tf.reduce_max(frames, 1)
elif method == "none":
feature_size = frames.shape_as_list()[2]
return tf.reshape(frames, [-1, feature_size])
else:
raise ValueError("Unrecognized pooling method: %s" % method)
| apache-2.0 | 4,521,161,258,369,963,000 | 34.85 | 80 | 0.682938 | false | 3.408875 | false | false | false |
andhit-r/opnsynid-accounting-report | opnsynid_asset_account/models/res_company.py | 1 | 1348 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2015 Andhitia Rama. All rights reserved.
# @author Andhitia Rama
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class res_company(models.Model):
"""override company to add asset account"""
_inherit = 'res.company'
_name = 'res.company'
asset_ids = fields.Many2many(
string='Assets',
comodel_name='account.account',
relation='rel_company_2_asset_acc',
column1='company_id',
column2='account_id',
)
| agpl-3.0 | 5,641,121,555,060,802,000 | 36.444444 | 78 | 0.602374 | false | 4.279365 | false | false | false |
peterwilletts24/Python-Scripts | plot_scripts/Rain/Diurnal/sea_diurnal_rain_plot_domain_constrain_bit_above western_ghats.py | 1 | 10333 | """
Load npy xy, plot and save
"""
import os, sys
import matplotlib
matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
import matplotlib.pyplot as plt
import matplotlib.cm as mpl_cm
from matplotlib import rc
from matplotlib.font_manager import FontProperties
from matplotlib import rcParams
from matplotlib import cm
rc('text', usetex=True)
rcParams['text.usetex']=True
rcParams['text.latex.unicode']=True
rc('font', family = 'serif', serif = 'cmr10')
import numpy as np
from datetime import timedelta
import datetime
import imp
import re
from textwrap import wrap
model_name_convert_legend = imp.load_source('util', '/nfs/see-fs-01_users/eepdw/python_scripts/modules/model_name_convert_legend.py')
#unrotate = imp.load_source('util', '/home/pwille/python_scripts/modules/unrotate_pole.py')
###############
# Things to change
top_dir='/nfs/a90/eepdw/Data/Rain_Land_Sea_Diurnal'
pp_file = 'avg.5216'
lon_max = 71
lon_min = 67
lat_max= 28
lat_min=20
trmm_dir = '/nfs/a90/eepdw/Data/Observations/Satellite/TRMM/Diurnal/'
trmm_file = "trmm_diurnal_average_lat_%s_%s_lon_%s_%s_bit_above_western_ghats.npz" % (lat_min,lat_max, lon_min, lon_max)
#############
# Make own time x-axis
d = matplotlib.dates.drange(datetime.datetime(2011, 8, 21, 6,30), datetime.datetime(2011, 8, 22, 6, 30), timedelta(hours=1))
formatter = matplotlib.dates.DateFormatter('%H:%M')
def main():
#experiment_ids = ['djznw', 'djzny', 'djznq', 'djzns', 'dkjxq', 'dklyu', 'dkmbq', 'dklwu', 'dklzq', 'dkbhu', 'djznu', 'dkhgu' ] # All 12
experiment_ids_p = ['djznw', 'djzny', 'djznq', 'dklzq', 'dkmbq', 'dkjxq' ] # Most of Params
experiment_ids_e = ['dklwu', 'dklyu', 'djzns', 'dkbhu', 'djznu', 'dkhgu'] # Most of Explicit
#experiment_ids = ['djzny', 'djznq', 'djzns', 'djznw', 'dkjxq', 'dklyu', 'dkmbq', 'dklwu', 'dklzq' ]
#plt.ion()
NUM_COLOURS = 15
cmap=cm.get_cmap(cm.Set1, NUM_COLOURS)
#cgen = (cmap(1.*i/NUM_COLORS) for i in range(NUM_COLORS))
for ls in ['land','sea', 'total']:
fig = plt.figure(figsize=(12,6))
ax = fig.add_subplot(111)
legendEntries=[]
legendtext=[]
plot_trmm = np.load('%s%s_%s' % (trmm_dir, ls, trmm_file))
dates_trmm=[]
p=[]
for dp in plot_trmm['hour']:
print dp
if ((int(dp)<23) & (int(dp)>=6)):
dates_trmm.append(datetime.datetime(2011, 8, 21, int(dp), 0))
p.append(plot_trmm['mean'][plot_trmm['hour']==dp])
if ((int(dp)>=0) & (int(dp)<=6)):
dates_trmm.append(datetime.datetime(2011, 8, 22, int(dp), 0))
p.append(plot_trmm['mean'][plot_trmm['hour']==dp])
#print dates_trmm
a = np.argsort(dates_trmm,axis=0)
d_trmm = np.array(dates_trmm)[a]
pl = (np.array(p)[a])
#pl=np.sort(pl,axis=1)
l, = plt.plot_date(d_trmm, pl, label='TRMM', linewidth=2, linestyle='-', marker='', markersize=2, fmt='', color='#262626')
legendEntries.append(l)
legendtext.append('TRMM')
l0=plt.legend(legendEntries, legendtext,title='', frameon=False, prop={'size':8}, loc=9, bbox_to_anchor=(0.21, 0,1, 1))
# Change the legend label colors to almost black
texts = l0.texts
for t in texts:
t.set_color('#262626')
legendEntries=[]
legendtext=[]
for c, experiment_id in enumerate(experiment_ids_p):
expmin1 = experiment_id[:-1]
if (experiment_id=='djznw'):
print experiment_id
colour = cmap(1.*1/NUM_COLOURS)
linewidth=0.2
linestylez='--'
if (experiment_id=='djzny'):
print experiment_id
colour = cmap(1.*3/NUM_COLOURS)
linewidth=0.5
linestylez='--'
if ((experiment_id=='djznq') or (experiment_id=='dkjxq')):
print experiment_id
colour = cmap(1.*5/NUM_COLOURS)
linewidth=0.8
if (experiment_id=='djznq'):
linestylez='--'
if (experiment_id=='dkjxq'):
linestylez=':'
if ((experiment_id=='dklzq') or (experiment_id=='dklwu')):
print experiment_id
colour = cmap(1.*7/NUM_COLOURS)
linewidth=1
if (experiment_id=='dklzq'):
linestylez='--'
if (experiment_id=='dklwu'):
linestylez='-'
if ((experiment_id=='dklyu') or (experiment_id=='dkmbq')):
print experiment_id
colour = cmap(1.*9/NUM_COLOURS)
linewidth=1.3
if (experiment_id=='dkmbq'):
linestylez='--'
if (experiment_id=='dklyu'):
linestylez='-'
if (experiment_id=='djzns'):
print experiment_id
colour = cmap(1.*11/NUM_COLOURS)
linewidth=1.6
linestylez='-'
if ((experiment_id=='dkbhu')or (experiment_id=='dkhgu')):
print experiment_id
colour = cmap(1.*13/NUM_COLOURS)
linewidth=1.9
if (experiment_id=='dkbhu'):
linestylez='-'
if (experiment_id=='dkhgu'):
linestylez=':'
if (experiment_id=='djznu'):
print experiment_id
colour = cmap(1.*15/NUM_COLOURS)
linewidth=2.
linestylez='-'
try:
plotnp = np.load('%s/%s/%s/%s_%s_rainfall_diurnal_np_domain_constrain_lat_%s-%s_lon-%s-%s.npy' % (top_dir, expmin1, experiment_id, pp_file, ls, lat_min, lat_max, lon_min, lon_max))
l, = plt.plot_date(d, plotnp[0]*3600, label='%s' % (model_name_convert_legend.main(experiment_id)), linewidth=linewidth, linestyle=linestylez, marker='', markersize=2, fmt='', color=colour)
legendEntries.append(l)
legendtext.append('%s' % (model_name_convert_legend.main(experiment_id)))
except Exception, e:
print e
pass
l1=plt.legend(legendEntries, legendtext, title='Parametrised', loc=9, frameon=False, prop={'size':8}, bbox_to_anchor=(0, 0,1, 1))
# Change the legend label colors to almost black
texts = l1.texts
for t in texts:
t.set_color('#262626')
legendEntries=[]
legendtext=[]
c1=0
for c, experiment_id in enumerate(experiment_ids_e):
if (experiment_id=='djznw'):
print experiment_id
colour = cmap(1.*1/NUM_COLOURS)
linewidth=0.2
linestylez='--'
if (experiment_id=='djzny'):
print experiment_id
colour = cmap(1.*3/NUM_COLOURS)
linewidth=0.5
linestylez='--'
if ((experiment_id=='djznq') or (experiment_id=='dkjxq')):
print experiment_id
colour = cmap(1.*5/NUM_COLOURS)
linewidth=0.8
if (experiment_id=='djznq'):
linestylez='--'
if (experiment_id=='dkjxq'):
linestylez=':'
if ((experiment_id=='dklzq') or (experiment_id=='dklwu')):
print experiment_id
colour = cmap(1.*7/NUM_COLOURS)
linewidth=1
if (experiment_id=='dklzq'):
linestylez='--'
if (experiment_id=='dklwu'):
linestylez='-'
if ((experiment_id=='dklyu') or (experiment_id=='dkmbq')):
print experiment_id
colour = cmap(1.*9/NUM_COLOURS)
linewidth=1.3
if (experiment_id=='dkmbq'):
linestylez='--'
if (experiment_id=='dklyu'):
linestylez='-'
if (experiment_id=='djzns'):
print experiment_id
colour = cmap(1.*11/NUM_COLOURS)
linewidth=1.6
linestylez='-'
if ((experiment_id=='dkbhu')or (experiment_id=='dkhgu')):
print experiment_id
colour = cmap(1.*13/NUM_COLOURS)
linewidth=1.9
if (experiment_id=='dkbhu'):
linestylez='-'
if (experiment_id=='dkhgu'):
linestylez=':'
if (experiment_id=='djznu'):
print experiment_id
colour = cmap(1.*15/NUM_COLOURS)
linewidth=2.
linestylez='-'
expmin1 = experiment_id[:-1]
try:
plotnp = np.load('%s/%s/%s/%s_%s_rainfall_diurnal_np_domain_constrain_lat_%s-%s_lon-%s-%s.npy' % (top_dir, expmin1, experiment_id, pp_file, ls, lat_min, lat_max, lon_min, lon_max))
l, = plt.plot_date(d, plotnp[0]*3600, label='%s' % (model_name_convert_legend.main(experiment_id)), linewidth=linewidth, linestyle=linestylez, marker='', markersize=2, fmt='', color=colour)
legendEntries.append(l)
legendtext.append('%s' % (model_name_convert_legend.main(experiment_id)))
except Exception, e:
print e
pass
l2=plt.legend(legendEntries, legendtext, title='Explicit', loc=9, frameon=False, bbox_to_anchor=(0.11, 0,1, 1), prop={'size':8})
plt.gca().add_artist(l1)
plt.gca().add_artist(l0)
plt.gca().xaxis.set_major_formatter(formatter)
# Change the legend label colors to almost black
texts = l2.texts
for t in texts:
t.set_color('#262626')
plt.xlabel('Time (UTC)')
plt.ylabel('mm/h')
title="Domain Averaged Rainfall - %s" % ls
t=re.sub('(.{68} )', '\\1\n', str(title), 0, re.DOTALL)
t = re.sub(r'[(\']', ' ', t)
t = re.sub(r'[\',)]', ' ', t)
pp_filenodot= pp_file.replace(".", "")
# Bit of formatting
# Set colour of axis lines
spines_to_keep = ['bottom', 'left']
for spine in spines_to_keep:
ax.spines[spine].set_linewidth(0.5)
ax.spines[spine].set_color('#262626')
# Remove top and right axes lines ("spines")
spines_to_remove = ['top', 'right']
for spine in spines_to_remove:
ax.spines[spine].set_visible(False)
# Get rid of ticks. The position of the numbers is informative enough of
# the position of the value.
ax.xaxis.set_ticks_position('none')
ax.yaxis.set_ticks_position('none')
# Change the labels to the off-black
ax.xaxis.label.set_color('#262626')
ax.yaxis.label.set_color('#262626')
if not os.path.exists('/nfs/a90/eepdw/Figures/EMBRACE/Diurnal/'): os.makedirs('/nfs/a90/eepdw/Figures/EMBRACE/Diurnal/')
plt.savefig('/nfs/a90/eepdw/Figures/EMBRACE/Diurnal/%s_%s_latlon_bit_above_western_ghats_notitle.png' % (pp_filenodot, ls), format='png', bbox_inches='tight')
plt.title('\n'.join(wrap('%s' % (t.title()), 1000,replace_whitespace=False)), fontsize=16, color='#262626')
#plt.show()
plt.savefig('/nfs/a90/eepdw/Figures/EMBRACE/Diurnal/%s_%s_latlon_bit_above_western_ghats.png' % (pp_filenodot, ls), format='png', bbox_inches='tight')
plt.close()
if __name__ == '__main__':
main()
| mit | 4,530,510,237,832,121,300 | 30.123494 | 195 | 0.594987 | false | 3.037331 | false | false | false |
dcalacci/Interactive_estimation | game/round/models.py | 1 | 2767 | from decimal import Decimal
from django.db import models
from django.conf import settings
from django.utils import timezone
# Create your models here.
from game.contrib.calculate import calculate_score
from game.control.models import Control
class Plot(models.Model):
plot = models.URLField()
answer = models.DecimalField(max_digits=3, decimal_places=2)
duration = models.TimeField(null=True)
def __str__(self):
return self.plot
class SliderValue(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
timestamp = models.DateTimeField(default=timezone.now)
this_round = models.ForeignKey("Round", null=True)
round_order = models.PositiveSmallIntegerField(null=True)
value = models.DecimalField(decimal_places=2, max_digits=3, null=True)
def __str__(self):
return "Slider {}".format(self.id)
class Round(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
plot = models.ForeignKey(Plot, on_delete=models.CASCADE)
round_order = models.PositiveSmallIntegerField()
guess = models.DecimalField(max_digits=3, decimal_places=2, null=True)
# start time of the round
# end time of the round
start_time = models.DateTimeField(auto_now=True, null=True)
end_time = models.DateTimeField(null=True)
# todo: change control treatment
score = models.DecimalField(max_digits=3, decimal_places=2, default=Decimal(0))
def __str__(self):
return self.user.username
def round_data(self):
played_rounds = self.__class__.objects.filter(user=self.user, round_order__lte=self.round_order,
guess__gte=Decimal(0.0))
score = calculate_score(played_rounds)
this_round = self.__class__.objects.filter(user=self.user, round_order=self.round_order,
guess__gte=Decimal(0.0))
round_score = calculate_score(this_round)
data = {'username': self.user.username, 'cumulative_score': score,
'avatar': self.user.avatar, 'task_path': self.plot.plot, 'correct_answer': self.plot.answer,
'independent_guess': self.guess, 'round_id': self.round_order, 'score': round_score,
'game_id': None, 'condition': None, 'following': None, 'revised_guess': None
}
if self.user.game_type == 'c':
game = Control.objects.get(user=self.user)
data['game_id'] = game.id
data['condition'] = 'control'
return data
def get_guess(self):
return float(self.guess) if self.guess else -1
class Meta:
unique_together = (('user', 'round_order',),)
| mit | -4,080,133,973,543,304,000 | 36.391892 | 108 | 0.644742 | false | 3.859135 | false | false | false |
pdl30/pychiptools | pychiptools/utilities/alignment.py | 1 | 3065 | #!/usr/bin/python
########################################################################
# 20 Oct 2014
# Patrick Lombard, Centre for Stem Stem Research
# Core Bioinformatics Group
# University of Cambridge
# All right reserved.
########################################################################
import argparse
import subprocess
import sys, re, os
def paired_bowtie(fastq1, fastq2, name, index, outdir):
sam1 = outdir + "/" + "tmp.sam"
sam1_o = open(sam1, "wb")
report = outdir+'/'+name+'_report.txt'
report1_o = open(report, "wb")
uniq = "bowtie -m 2 -v 1 --best --strata --seed 0 --sam {0} -1 {1} -2 {2}".format(index, fastq1, fastq2)
p = subprocess.Popen(uniq.split(), stdout = sam1_o, stderr=report1_o)
p.communicate()
sam2 = outdir+"/"+name+".sam"
grep_paired_unique(sam1, sam2)
os.remove(sam1)
def single_bowtie(fastq, name, index, outdir):
sam1 = outdir + "/" + "tmp.sam"
sam1_o = open(sam1, "wb")
report = outdir+'/'+name+'_report.txt'
report1_o = open(report, "wb")
uniq = "bowtie -m 2 -v 1 --best --strata --seed 0 --sam {0} {1}".format(index, fastq)
p = subprocess.Popen(uniq.split(), stdout = sam1_o, stderr=report1_o)
p.communicate()
sam2 = outdir+"/"+name+".sam"
grep_single_unique(sam1, sam2)
os.remove(sam1)
def grep_paired_unique(samfile, outfile):
output= open(outfile, "w")
with open(samfile) as f:
for line in f:
line = line.rstrip()
word = line.split("\t")
if line.startswith("@"):
output.write("{}\n".format(line)),
continue
if len(word) > 12:
m = re.match("XS:i:", word[12])
if not m:
if int(word[1]) == 147 or int(word[1]) == 83 or int(word[1]) == 99 or int(word[1]) == 163 or int(word[1]) == 81 or int(word[1]) == 97 or int(word[1]) == 145 or int(word[1]) == 161:
output.write("{}\n".format(line)),
def grep_single_unique(samfile, outfile):
output= open(outfile, "w")
with open(samfile) as f:
for line in f:
line = line.rstrip()
word = line.split("\t")
if line.startswith("@"):
output.write("{}\n".format(line)),
continue
if len(word) > 12:
m = re.match("XS:i:", word[12])
if not m:
if int(word[1]) == 0 or int(word[1]) == 16:
output.write("{}\n".format(line)),
def paired_bowtie2(fastq1, fastq2, name, index, outdir, threads):
report = outdir+'/'+name+'_report.txt'
report1_o = open(report, "wb")
uniq = "bowtie2 -p {4} -k 2 -N 1 --mm --no-mixed --no-discordant -x {0} -1 {1} -2 {2} -S {3}/tmp.sam".format(index, fastq1, fastq2, outdir, threads)
p = subprocess.Popen(uniq.split(), stderr=report1_o)
p.communicate()
grep_paired_unique(outdir+"/tmp.sam", outdir+'/'+name+'.sam')
os.remove(outdir+"/tmp.sam")
def single_bowtie2(fastq, name, index, outdir, threads):
report = outdir+'/'+name+'_report.txt'
report1_o = open(report, "wb")
uniq = "bowtie2 -p {3} -k 2 -N 1 --mm -x {0} -U {1} -S {2}/tmp.sam".format(index, fastq, outdir, threads)
p = subprocess.Popen(uniq.split(), stderr=report1_o)
p.communicate()
grep_single_unique(outdir+"/tmp.sam", outdir+'/'+name+'.sam')
os.remove(outdir+"/tmp.sam")
| gpl-2.0 | 3,248,898,922,289,293,000 | 34.639535 | 185 | 0.596411 | false | 2.702822 | false | false | false |
shawncaojob/LC | PY/140_word_break_ii.py | 1 | 4564 | # 140. Word Break II QuestionEditorial Solution My Submissions
# Total Accepted: 68516
# Total Submissions: 321390
# Difficulty: Hard
# Given a string s and a dictionary of words dict, add spaces in s to construct a sentence where each word is a valid dictionary word.
#
# Return all such possible sentences.
#
# For example, given
# s = "catsanddog",
# dict = ["cat", "cats", "and", "sand", "dog"].
#
# A solution is ["cats and dog", "cat sand dog"].
#
# Subscribe to see which companies asked this question
# Notes:
# Forward DP or Backward DP?
# WB1 is forward DP. DP[i] means s[:i+1] is breakable
# WB2 is backward. DP[i] means s[i:] is breakable
# Since DFS is to check remaining string is
# 12.08.2016 Rewrite. DP + DFS
class Solution(object):
dp = []
def wordBreak(self, s, wordDict):
"""
:type s: str
:type wordDict: Set[str]
:rtype: List[str]
"""
n = len(s)
self.dp = [ False for x in xrange(len(s)+1) ]
self.dp[0] = True
for i in xrange(n):
for j in xrange(i+1):
tmp = s[j:i+1]
if tmp in wordDict and self.dp[j]:
self.dp[i+1] = True
break
if not self.dp[-1]: return []
res = []
self.dfs(res, "", s, n-1, wordDict)
return res
def dfs(self, res, line, s, end, wordDict):
if end == -1:
res.append(line[:-1])
return
for start in xrange(end, -1, -1):
tmp = s[start:end+1]
if tmp in wordDict and self.dp[start]:
self.dfs(res, tmp + " " + line, s, start - 1, wordDict)
# DP + DFS can get rid of TLE
class Solution(object):
def wordBreak(self, s, wordDict):
"""
:type s: str
:type wordDict: Set[str]
:rtype: List[str]
"""
n = len(s)
print(s)
print(wordDict)
res = []
dp = [False for x in xrange(n+1)]
dp[n] = True
for i in xrange(n-1, -1, -1):
for j in xrange(n-1, i-1, -1): # Better loop. i start index. j end index
if dp[j+1] and s[i:j+1] in wordDict:
dp[i] = True
break
# for i in xrange(n-1, -1, -1):
# for j in xrange(i, -1, -1):
# if dp[i+1] and s[j:i+1] in wordDict:
# dp[j] = True
# continue
def dfs(start, line):
if not dp[start]:
return
if start == len(s):
res.append(line[1:])
return
for i in xrange(start, len(s)):
if dp[i+1] and s[start:i+1] in wordDict:
dfs(i+1, line + " " + s[start:i+1])
dfs(0, "")
return res
if __name__ == "__main__":
# s = "catsanddog"
# d = ["cat","cats","and","sand","dog"]
# Solution().wordBreak(s, d)
# s = "goalspecial"
# d = ["go","goal","goals","special"]
s = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
d = ["a","aa","aaa","aaaa","aaaaa","aaaaaa","aaaaaaa","aaaaaaaa","aaaaaaaaa","aaaaaaaaaa"]
print(Solution().wordBreak(s, d))
# Solution().wordBreak(s, d)
# s1 = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabaabaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
# d1 = ["aa","aaa","aaaa","aaaaa","aaaaaa","aaaaaaa","aaaaaaaa","aaaaaaaaa","aaaaaaaaaa","ba"]
# Solution().wordBreak(s1, d1)
exit
# If DFS only. get TLE
class Solution2(object):
def wordBreak(self, s, wordDict):
"""
:type s: str
:type wordDict: Set[str]
:rtype: List[str]
"""
res = []
# Precheck to get rid of TLE
set_s = set(s)
set_dict = set("".join(wordDict))
for char in set_s:
if set_s not in set_dict:
return []
self.dfs(s, wordDict, res, "")
return res
def dfs(self, s, wordDict, res, line):
if not s:
print(line)
res.append(line)
return
for i in xrange(1, len(s)+1):
if s[:i] in wordDict:
if not line:
self.dfs(s[i:], wordDict, res, s[:i])
else:
self.dfs(s[i:], wordDict, res, line + " " + s[:i])
| gpl-3.0 | 5,375,061,984,906,493,000 | 29.837838 | 163 | 0.510517 | false | 3.44713 | false | false | false |
abhigd/bigboy | app/views/link.py | 1 | 17235 | import time, uuid, json, calendar
from app import app
from app.lib import upload, auth, files
from app.lib import distribution, link
from app.lib import geo
from app.forms import *
from flask import request, redirect
from flask import session, render_template
from flask import make_response, abort
from flask import jsonify, Response, current_app
from werkzeug.datastructures import MultiDict
from rfc6266 import build_header
import mimetypes
from boto.sqs.message import Message as SQSMessage
from flask.ext.login import login_user, current_user
from flask.ext.login import login_required, login_url
@app.route('/link/', methods=['GET'])
@login_required
def render_links():
link_list, links = [], []
owner = current_user.user_id
start = 0
end = 10
page_num = start/10+1
target = request.args.get("target", None)
if target:
link_ids = current_app.redis_client.zrange("target_links:%s" %target, 0, -1)
link_count = current_app.redis_client.zcard("target_links:%s" %target)
else:
link_ids = current_app.redis_client.smembers("user_links:%s" %owner)
link_count = current_app.redis_client.scard("user_links:%s" %owner)
if link_ids:
link_list = current_app.redis_client.mget(["links:%s" %link_id \
for link_id in link_ids])
link_data = [json.loads(x) for x in link_list if x]
for link in link_data:
link["linkUrl"] = "%s/link/%s" %(app.config.get('HOSTNAME'),
link["id"])
if link["expires_in"] < time.time():
link["expired"] = True
else:
link["expired"] = False
links.append(link)
if request.is_xhr:
return jsonify({'data': links,
'total': link_count,
'perPage': 10,
'page': page_num
})
else:
return render_template('link.html', links=[])
@app.route('/link/<link_id>', methods=['GET'])
def render_link(link_id):
link_data = current_app.redis_client.get("links:%s" %link_id)
link_targets = current_app.redis_client.zrange("link_targets:%s" %link_id, 0, -1)
target_render_data = []
if link_data is None:
abort(404)
# TODO: Check if target's ACL still allows the owner of this
# link to access
link_info = json.loads(link_data)
expires_in = link_info["expires_in"]
if link_info["expires_in"] < time.time():
link_info["expired"] = True
else:
link_info["expired"] = False
if link_info["expired"]:
if not current_user.is_authenticated():
abort(404, "Link has expired")
# Check for temp uploads and move them to link_targets if complete.
# files will not be moved to the main bucket until the user
# approves it. File will be served until then from the S3 bucket
owner_info = json.loads(current_app.redis_client.get("user:%s" %link_info["owner"]))
del owner_info["idp"]
del owner_info["idp_id"]
link_info["owner"] = owner_info
if link_targets:
target_ids = link_targets
target_data = current_app.redis_client.mget(["files:%s" %x for x in target_ids])
target_info = [json.loads(x) for x in target_data if x]
else:
target_info = []
for idx, target in enumerate(target_info):
target_url = "%s/link/%s/target/%s/%s" \
%(app.config.get('HOSTNAME'), link_id,
target["id"], target["title"])
target["url"] = target_url
target_download_count = current_app.redis_client.llen(
"target_download_counter:%s:%s" \
%(link_id, target["id"]))
target["count"] = int(target_download_count)
del target["acl"]
del target["source"]
# del target["owner"]
target["approved"] = True
target_render_data.append(target)
if current_user.is_authenticated():
temp_target_info = []
temp_uploads = current_app.redis_client.smembers("link_uploads:%s" %(link_id))
if temp_uploads:
temp_target_data = current_app.redis_client.mget(["temp_files:%s" %x \
for x in temp_uploads])
temp_target_info = [json.loads(x) for x in temp_target_data if x]
for idx, target in enumerate(temp_target_info):
# Check if this file really exists in S3 else delete from index
target_url = "%s/link/%s/upload/%s/%s" \
%(app.config.get('HOSTNAME'), link_id,
target["id"], target["title"])
target["url"] = target_url
target["approved"] = False
target["count"] = 0
target_render_data.append(target)
del target["bucket"]
if request.headers["Accept"].startswith("text/html"):
return render_template('link.html',
link_data=link_info,
targets=target_render_data)
elif request.headers["Accept"].startswith("application/json"):
link_data = dict(**link_info)
link_data.update(targets=target_render_data)
return jsonify(link_data)
else:
render_data = ["%s %s" %(target["title"][:18].ljust(20), target["url"]) \
for target in target_render_data]
resp = make_response("\n".join(render_data)+"\n")
resp.headers['content-type'] = "text/plain"
return resp
@app.route('/link/', methods=['POST'])
@login_required
def create_link():
created = int(time.time())
owner = current_user.user_id
acl = {}
form = LinkForm(MultiDict(request.json))
if not form.validate():
abort(400, form.errors)
link_id = uuid.uuid4().hex
link_targets = form.target.data.split(",")
link_expires_in = time.gmtime((form.expires_in.data))
link_data = {"id": link_id,
"owner": owner,
"expires_in": form.expires_in.data,
"created": created,
"acl": acl,
"max_uploads": 10,
"max_upload_size": 104857600,
"max_target_downloads": 0,
"allow_downloads": True,
"allow_uploads": False}
current_app.redis_client.set("links:%s" %link_id, json.dumps(link_data))
current_app.redis_client.sadd("user_links:%s" %owner, link_id)
if link_targets:
target_ids = link_targets
target_data = current_app.redis_client.mget(["files:%s" %x for x in target_ids])
targets = [json.loads(x) for x in target_data if x]
for target in targets:
link.create_link_target(link_data, target)
link_data["linkUrl"] = "%s/link/%s" % (app.config.get('HOSTNAME'), link_id)
link_data["expired"] = False
return jsonify(link_data)
@app.route('/link/<link_id>', methods=['PUT'])
@login_required
def edit_link(link_id):
link_data = current_app.redis_client.get("links:%s" %link_id)
if not link_data:
abort(404)
form = LinkForm(MultiDict(request.json))
if not form.validate():
abort(400, form.errors)
link_info = json.loads(link_data)
expires_in = form.expires_in.data
link_data = {"id": link_id,
"owner": link_info["owner"],
"expires_in": expires_in,
"created": link_info["created"],
"acl": link_info["acl"],
"max_uploads": form.max_uploads.data or 10,
"max_upload_size": form.max_upload_size.data or 1024*1024*100,
"max_target_downloads": form.max_target_downloads.data or 0,
"allow_downloads": form.allow_downloads.data,
"allow_uploads": form.allow_uploads.data}
current_app.redis_client.set("links:%s" %link_id, json.dumps(link_data))
return jsonify(link_data)
@app.route('/link/<link_id>', methods=["DELETE"])
@login_required
def delete_link(link_id):
link_data = current_app.redis_client.get("links:%s" %link_id)
if not link_data:
abort(404)
target = request.args.get("target", None)
if target:
target_id = target.split("/", 2)[-1]
target_data = current_app.redis_client.get("files:%s" % target_id)
if not target_data:
abort(400, "Specified File does not exist")
link_info = json.loads(link_data)
owner = link_info["owner"]
# TODO: This is ambigious. This method does two things:
# 1. Remove a link or
# 2. Remove a specific target from a link
if target is None:
current_app.redis_client.delete("links:%s" %link_id)
current_app.redis_client.srem("user_links:%s" %owner, link_id)
current_app.redis_client.delete("link_uploads:%s" %link_id)
else:
link.delete_link_target(link_info, json.loads(target_data))
response = Response(status=204)
return response
# @app.route('/link/<link_id>/target/', methods=["GET"])
# def get_link_targets(link_id):
# pass
# # Get last 100 ids from source uploads
@app.route('/link/<link_id>/search', methods=["GET"])
@login_required
def search_link_targets(link_id):
link_data = current_app.redis_client.get("links:%s" %link_id)
if link_data is None:
abort(404)
last_100_files = current_app.redis_client.zrevrange("local_files", 0, 100)
link_targets = current_app.redis_client.zrevrange("link_targets:%s" %link_id, 0, -1)
interesting_files = set(last_100_files) - set(link_targets)
data = files.get_file_data(interesting_files)
return jsonify({'data': data})
@app.route('/link/<link_id>/target/<target_id>/<file_name>', methods=["GET"])
def get_link_target(link_id, target_id, file_name):
link_data = current_app.redis_client.get("links:%s" %link_id)
target_data = current_app.redis_client.get("files:%s" % target_id)
if link_data is None or target_data is None:
abort(404)
link_info = json.loads(link_data)
if link_info["expires_in"] < time.time():
abort(404, "Link has expired")
if link_info["max_target_downloads"] > 0:
target_d_count = current_app.redis_client.llen("target_download_counter:%s:%s" \
%(link_id, target_id))
if target_d_count >= link_info["max_target_downloads"]:
abort(404, "Limit reached")
target_exists = current_app.redis_client.zrank("link_targets:%s" %link_id, target_id)
if target_exists is None:
abort(404, "No such file exists")
target_info = json.loads(target_data)
signed_url = distribution.get_signed_url(target_info)
current_app.redis_client.lpush("target_download_counter:%s:%s" \
%(link_id, target_id),
time.time())
print signed_url
return redirect(signed_url, code=307)
@app.route('/link/<link_id>/target/<target_id>', methods=["PUT"])
@login_required
def edit_link_target(link_id, target_id):
form = EditLinkTargetForm(MultiDict(request.json))
link_data = current_app.redis_client.get("links:%s" %link_id)
if not link_data:
abort(404)
if not form.validate():
abort(400, form.errors)
approved = form.approved.data
description = form.description.data
if approved:
temp_file_data = current_app.redis_client.get("temp_files:%s" % target_id)
if not temp_file_data:
abort(404)
link_info = json.loads(link_data)
temp_file_info = json.loads(temp_file_data)
target = link.approve_link_target(link_info, temp_file_info)
return jsonify(target)
response = Response(status=204)
return response
@app.route('/link/<link_id>/target/', methods=["POST"])
def create_link_target(link_id):
form = LinkTargetForm(MultiDict(request.json))
link_data = current_app.redis_client.get("links:%s" %link_id)
if not link_data:
abort(404)
if not form.validate():
abort(400, form.errors)
link_info = json.loads(link_data)
if not current_user.is_authenticated():
if link_info["expires_in"] < time.time():
abort(404, "Link has expired")
if not link_info["allow_uploads"]:
abort(403, "Link does not allow anonymous uploads")
target_id = form.target_id.data
if current_user.is_authenticated():
target_data = current_app.redis_client.get("files:%s" %target_id)
else:
target_data = current_app.redis_client.get("temp_files:%s" %target_id)
if not target_data:
abort(400, form.errors)
target_info = json.loads(target_data)
if current_user.is_authenticated():
target = link.create_link_target(link_info, target_info)
else:
target_info["url"] = "#"
target_info["count"] = 0
target_info["approved"] = False
target_info["created"] = time.time()
return jsonify(target_info)
@app.route('/link/<link_id>/target/<target_id>', methods=["DELETE"])
@login_required
def delete_link_target(link_id, target_id):
link_data = current_app.redis_client.get("links:%s" %link_id)
link_target = target_id
if link_data is None:
abort(404)
target_data = current_app.redis_client.get("files:%s" %target_id)
if not target_data:
abort(400)
link.delete_link_target(json.loads(link_data), json.loads(target_data))
response = Response(status=204)
return response
@app.route('/link/<link_id>/upload/<file_id>/<file_name>', methods=["GET"])
@login_required
def get_temp_link_upload(link_id, file_id, file_name):
link_data = current_app.redis_client.get("links:%s" %link_id)
temp_file = current_app.redis_client.get("temp_files:%s" %file_id)
temp_file_exists = current_app.redis_client.sismember("link_uploads:%s" %(link_id),
file_id)
if link_data is None or temp_file is None or temp_file_exists is False:
abort(404)
file_info = json.loads(temp_file)
bucket = file_info["bucket"]
file_type = file_info["type"]
file_name = file_info["title"]
file_content_disposition_header = build_header(file_name).encode('ascii')
response_headers = {"response-content-disposition":
file_content_disposition_header,
"response-content-type": file_type}
url = default_s3_conn.generate_url(600, "GET", bucket=bucket, key=file_id,
response_headers=response_headers)
return redirect(url, 307)
@app.route('/link/<link_id>/upload/', methods=["POST"])
def link_target_upload_manage(link_id):
# TODO: Check if link allows uploads. Set limits
phase = request.args.get("phase", "init")
link_data = current_app.redis_client.get("links:%s" %link_id)
if link_data is None:
abort(404)
link_info = json.loads(link_data)
if not current_user.is_authenticated():
if link_info["expires_in"] < time.time():
abort(404, "Link has expired")
if not link_info["allow_uploads"]:
abort(403)
if phase in ["form", "init"]:
# TODO: Add to a queue to track failed uploads later
# TODO: If s3_key exists, then check if the owner is the same
form = NewFileForm(MultiDict(request.json))
if not form.validate():
abort(400, form.errors)
is_anonymous = not current_user.is_authenticated()
response_data = upload.upload_init(phase, form, is_anonymous)
elif phase == "complete":
s3_key = request.json.get("s3_key", None)
multipart_id = request.json.get("mp_id", None)
if multipart_id is None or s3_key is None:
abort(400)
response_data = upload.upload_complete(phase, s3_key, multipart_id)
if phase in ["form", "complete"]:
if not current_user.is_authenticated():
s3_key = response_data["key"]
current_app.redis_client.expire("temp_files:%s" %(s3_key), 600)
current_app.redis_client.sadd("link_uploads:%s" %(link_id), s3_key)
return jsonify(response_data)
@app.route('/link/<link_id>/upload/<file_name>', methods=["PUT"])
def link_target_upload(link_id, file_name):
#Check if link allows anonmous uploads
# Set size limits for file.
# Set storage type to reduced redundancy
link_data = current_app.redis_client.get("links:%s" %link_id)
if link_data is None:
abort(404)
link_info = json.loads(link_data)
if link_info["expires_in"] < time.time():
abort(404, "Link has expired")
if not link_info["allow_uploads"]:
abort(403)
content_length = request.headers["content-length"]
content_type = mimetypes.guess_type(file_name)[0] or \
"application/octet-stream"
if int(content_length) > link_info["max_upload_size"]:
abort(400)
url = upload.upload_curl(file_name, content_length, content_type)
# curl -Lv --upload-file ~/Downloads/xxx.pdf http://celery.meer.io:5000/link/xxx/upload/
print url
return redirect(url, 307)
| apache-2.0 | 907,547,245,753,685,200 | 34.68323 | 92 | 0.59942 | false | 3.552876 | false | false | false |
david-mateo/marabunta | marabunta/models/PerimeterDefenseRobot.py | 1 | 7197 | from marabunta import BaseRobot
from math import *
class PerimeterDefenseRobot(BaseRobot):
"""Robot model for perimeter defense.
By iteratively calling the update() method,
this robot will communicate with the rest
of the swarm and move away from the others
as far as possible. Takes a *threshold*
parameter to determine when it has gone
far enough and reached consensus. Can be
set to 0.
Obstacle avoidance (implemented in BaseRobot)
will take precence over consensus reaching.
"""
def __init__(self, body, network, threshold):
BaseRobot.__init__(self, body, network)
self.threshold = threshold
self.rendezvous_point = None
self.path = []
self.known_lights = []
self.num_lights = 0
return
def set_path(self, path):
self.path = path[:]
return self.path
def spread_target(self):
"""Get the other agent's state and
compute the direction of motion that
will maximize distance with them.
This is computed as a linear combination
of the positions of each neighbor
relative to the agent, where each
position is weighted by the inverse
of the distance**2 to that robot,
t_i = sum_j (r_j - r_i)/|r_j - r_i|^2 ,
so that higher priority is given to
move away from the closer agents, but
still taking all into account and
allowing for neighbors to "cancel each
other out."
Returns a vector pointing to the
mean heading. If no agents are
detected, returns None.
"""
neis = self.get_agents().values()
pos = self.body.get_position()
if neis:
target = [0.,0.]
for nei in neis:
d2 = (nei[0]-pos[0])**2 + (nei[1]-pos[1])**2
if d2>0:
target[0] += (pos[0] - nei[0])/d2
target[1] += (pos[1] - nei[1])/d2
norm2 = target[0]*target[0] + target[1]*target[1]
if norm2 < self.threshold:
target = None
else:
target = None
return target
def rendezvous_target(self):
"""Compute the target direction of movement
that allows the robot to reach the rendezvous point
(stored in self.rendezvous_point).
When the robot is close enough to the point this
sets self.rendezvous_point to None and also returns
None as the target.
"""
if self.rendezvous_point:
pos = self.body.get_position()
target = [ self.rendezvous_point[0]-pos[0] , self.rendezvous_point[1]-pos[1] ]
distance = sqrt(target[0]*target[0]+target[1]*target[1])
if distance < 0.10: # rendezvous point reached
try:
self.rendezvous_point = self.path.pop(0)
target = self.rendezvous_target()
except:
target = [0., 0.]
self.rendezvous_point = None
else:
try:
self.rendezvous_point = self.path.pop(0)
target = self.rendezvous_target()
except:
target = None
self.rendezvous_point = None
return target
def move_to_target(self, target, deltat, v, block=False):
"""If the norm2 of *target* is is larger
than *threshold*, align the robot to
*target* and move forward for *deltat*
at a speed *v*.
Else, stop for *deltat*.
"""
if target[0]**2 + target[1]**2 > self.threshold*self.threshold:
# Some robots allow for a block argument in
# the align method.
try:
self.body.align(target, block)
except (TypeError,AttributeError):
self.align(target)
self.move_forward(deltat, v)
else:
self.move_forward(deltat, 0)
return
def light_detected(self):
"""If light is detected and is a
new light, broadcast its positon
and add it to the list of known
light sources.
"""
try:
light = self.body.light_detected()
except AttributeError:
light = False
if light:
x, y = self.body.get_position()
self.add_light(x,y)
return light
def process_messages(self):
messages = self.network.get_messages()
for message in messages:
if len(message)>3:
mesdata = message.split()
if mesdata[0]=="stop":
raise Exception("Stop!")
elif mesdata[0]=="goto":
try:
self.rendezvous_point = (float(mesdata[1]), float(mesdata[2]))
except:
print("#PerimenterDefenseRobot: Strange message received: ",message)
elif mesdata[0]=="light":
try:
x, y = float(mesdata[1]), float(mesdata[2])
except:
x, y = None, None
print("#PerimenterDefenseRobot: Strange message received: ",message)
self.add_light(x,y)
return messages
def add_light(self, x, y):
"""Only add light to the list of known lights if
this new one is at least 0.8 from any other
previously known light.
"""
if all( (x-light[0])**2 + (y-light[1])**2 > 0.8 * 0.8 for light in self.known_lights):
self.known_lights.append( (x,y) )
self.num_lights += 1
self.network.send_message("light\t%.2f\t%.2f\n"%(x,y))
return
def update(self, deltat, v=None):
"""Perform one step of the consensus
protocol. This is the main "behavior"
of the robot. It consists of 4 steps:
1. Broadcast its state.
2. Perform swarming. In practice,
this means computing the desired
target direction of motion.
(in this case, perform perimeter
defense)
3. Correct the desired target
in order to avoid obstacles.
4. Move in the desired target direction.
"""
self.broadcast_state()
self.process_messages()
# If goto message received, go there
target = self.rendezvous_target()
# check if rendezvous point has been reached
if target and target[0]==0 and target[1]==0:
return False, True # STOP HERE!
if not target:
# Perform swarming
target = self.spread_target()
if not target:
h= self.body.get_heading()
target = [10.*sqrt(self.threshold)*cos(h) ,10.*sqrt(self.threshold)*sin(h)]
# Avoid obstacles
target = self.correct_target(target)
obstacle = self.obstacle_near()
if obstacle and v:
v *= 0.6
self.move_to_target(target, deltat, v, obstacle)
light = self.light_detected()
return light, False
| gpl-3.0 | -524,198,928,009,362,900 | 35.348485 | 94 | 0.539669 | false | 4.066102 | false | false | false |
demisto/content | Packs/PANOSPolicyOptimizer/Integrations/PANOSPolicyOptimizer/PANOSPolicyOptimizer.py | 1 | 18763 | import hashlib
from CommonServerPython import *
class Client:
"""
Client to use in the APN-OS Policy Optimizer integration.
"""
def __init__(self, url: str, username: str, password: str, vsys: str, device_group: str, verify: bool, tid: int):
# The TID is used to track individual commands send to the firewall/Panorama during a PHP session, and
# is also used to generate the security token (Data String) that is used to validate each command.
# Setting tid as a global variable with an arbitrary value of 50
self.session_metadata: Dict[str, Any] = {'panorama': url, 'base_url': url, 'username': username,
'password': password, 'tid': tid}
if device_group and vsys:
raise DemistoException(
'Cannot configure both vsys and Device group. Set vsys for firewall, set Device group for Panorama.')
if not device_group and not vsys:
raise DemistoException('Set vsys for firewall or Device group for Panorama.')
self.machine = vsys if vsys else device_group
self.verify = verify
handle_proxy()
# Use Session() in order to maintain cookies for persisting the login PHP session cookie
self.session = requests.Session()
def session_post(self, url: str, json_cmd: dict) -> dict:
response = self.session.post(url=url, json=json_cmd, verify=self.verify)
json_response = json.loads(response.text)
if 'type' in json_response and json_response['type'] == 'exception':
if 'message' in json_response:
raise Exception(f'Operation to PAN-OS failed. with: {str(json_response["message"])}')
raise Exception(f'Operation to PAN-OS failed. with: {str(json_response)}')
return json_response
def login(self) -> str:
# This is the data sent to Panorama from the Login screen to complete the login and get a PHPSESSID cookie
login_data = {
'prot': 'https:',
'server': self.session_metadata['panorama'],
'authType': 'init',
'challengeCookie': '',
'user': self.session_metadata['username'],
'passwd': self.session_metadata['password'],
'challengePwd': '',
'ok': 'Log In'
}
try:
# Use a POST command to login to Panorama and create an initial session
self.session.post(url=f'{self.session_metadata["base_url"]}/php/login.php?', data=login_data,
verify=self.verify)
# Use a GET command to the base URL to get the ServerToken which looks like this:
# window.Pan.st.st.st539091 = "8PR8ML4A67PUMD3NU00L3G67M4958B996F61Q97T"
response = self.session.post(url=f'{self.session_metadata["base_url"]}/', verify=self.verify)
except Exception as err:
raise Exception(f'Failed to login. Please double-check the credentials and the server URL. {str(err)}')
# Use RegEx to parse the ServerToken string from the JavaScript variable
match = re.search(r'(?:window\.Pan\.st\.st\.st[0-9]+\s=\s\")(\w+)(?:\")', response.text)
# The JavaScript calls the ServerToken a "cookie" so we will use that variable name
# The "data" field is the MD5 calculation of "cookie" + "TID"
if not match:
raise Exception('Failed to login. Please double-check the credentials and the server URL.')
return match.group(1)
def logout(self):
self.session.post(url=f'{self.session_metadata["base_url"]}/php/logout.php?', verify=False)
def token_generator(self) -> str:
"""
The PHP Security Token (Data String) is generated with the TID (counter) and a special session "cookie"
:return: hash token
"""
data_code = f'{self.session_metadata["cookie"]}{str(self.session_metadata["tid"])}'
data_hash = hashlib.md5(data_code.encode()) # Use the hashlib library function to calculate the MD5
data_string = data_hash.hexdigest() # Convert the hash to a proper hex string
return data_string
def get_policy_optimizer_statistics(self) -> dict:
self.session_metadata['tid'] += 1 # Increment TID
json_cmd = {
"action": "PanDirect", "method": "run", "data": [
self.token_generator(),
"PoliciesDirect.getRuleCountInRuleUsage",
[{"type": "security", "position": "main", "vsysName": self.machine}]
],
"type": "rpc", "tid": self.session_metadata['tid']
}
return self.session_post(
url=f'{self.session_metadata["base_url"]}/php/utils/router.php/PoliciesDirect.getRuleCountInRuleUsage',
json_cmd=json_cmd)
def policy_optimizer_no_apps(self) -> dict:
self.session_metadata['tid'] += 1 # Increment TID
json_cmd = {
"action": "PanDirect", "method": "run",
"data": [
self.token_generator(),
"PoliciesDirect.getPoliciesByUsage", [
{
"type": "security",
"position": "main",
"vsysName": self.machine,
"isCmsSelected": False,
"isMultiVsys": False,
"showGrouped": False,
"usageAttributes": {
"timeframeTag": "30",
"application/member": "any",
"apps-seen-count": "geq \'1\'",
"action": "allow"
},
"pageContext": "app_usage",
"field": "$.bytes",
"direction": "DESC"
}
]
],
"type": "rpc",
"tid": self.session_metadata['tid']}
return self.session_post(
url=f'{self.session_metadata["base_url"]}/php/utils/router.php/PoliciesDirect.getPoliciesByUsage',
json_cmd=json_cmd)
def policy_optimizer_get_unused_apps(self) -> dict:
self.session_metadata['tid'] += 1 # Increment TID
json_cmd = {
"action": "PanDirect", "method": "run",
"data": [
self.token_generator(),
"PoliciesDirect.getPoliciesByUsage",
[
{
"type": "security",
"position": "main",
"vsysName": self.machine,
"serialNumber": "",
"isCmsSelected": False,
"isMultiVsys": False,
"showGrouped": False,
"usageAttributes": {
"timeframeTag": "30",
"application/member": "unused",
"action": "allow"
},
"pageContext": "app_usage",
"field": "$.bytes",
"direction": "DESC"
}
]
],
"type": "rpc",
"tid": self.session_metadata['tid']}
return self.session_post(
url=f'{self.session_metadata["base_url"]}/php/utils/router.php/PoliciesDirect.getPoliciesByUsage',
json_cmd=json_cmd)
def policy_optimizer_get_rules(self, timeframe: str, usage: str, exclude: bool) -> dict:
self.session_metadata['tid'] += 1 # Increment TID
json_cmd = {
"action": "PanDirect", "method": "run",
"data": [
self.token_generator(),
"PoliciesDirect.getPoliciesByUsage",
[
{
"type": "security",
"position": "main",
"vsysName": self.machine,
"isCmsSelected": False,
"isMultiVsys": False,
"showGrouped": False,
"usageAttributes": {
"timeframe": timeframe,
"usage": usage, "exclude": exclude,
"exclude-reset-text": "90"
},
"pageContext": "rule_usage"
}
]
], "type": "rpc",
"tid": self.session_metadata['tid']}
return self.session_post(
url=f'{self.session_metadata["base_url"]}/php/utils/router.php/PoliciesDirect.getPoliciesByUsage',
json_cmd=json_cmd)
def policy_optimizer_app_and_usage(self, rule_uuid: str) -> dict:
self.session_metadata['tid'] += 1 # Increment TID
json_cmd = {"action": "PanDirect", "method": "run",
"data": [
self.token_generator(),
"PoliciesDirect.getAppDetails",
[
{
"type": "security",
"vsysName": self.machine,
"position": "main",
"ruleUuidList": [rule_uuid],
"summary": "no",
"resultfields":
"<member>apps-seen</member>"
"<member>last-app-seen-since-count"
"</member><member>days-no-new-app-count</member>",
"appsSeenTimeframe": "any",
"trafficTimeframe": 30
}
]
],
"type": "rpc",
"tid": self.session_metadata['tid']}
return self.session_post(
url=f'{self.session_metadata["base_url"]}/php/utils/router.php/PoliciesDirect.getAppDetails',
json_cmd=json_cmd)
def policy_optimizer_get_dag(self, dag: str) -> dict:
self.session_metadata['tid'] += 1 # Increment TID
json_cmd = {
"action": "PanDirect",
"method": "execute",
"data": [
self.token_generator(),
"AddressGroup.showDynamicAddressGroup", {
"id": dag,
"vsysName": self.machine
}
],
"type": "rpc",
"tid": self.session_metadata['tid']}
return self.session_post(
url=f'{self.session_metadata["base_url"]}/php/utils/router.php/AddressGroup.showDynamicAddressGroup',
json_cmd=json_cmd)
def get_policy_optimizer_statistics_command(client: Client) -> CommandResults:
"""
Gets the Policy Optimizer Statistics as seen from the User Interface
"""
outputs_stats = {}
raw_response = client.get_policy_optimizer_statistics()
stats = raw_response['result']
if '@status' in stats and stats['@status'] == 'error':
raise Exception(f'Operation Failed with: {str(stats)}')
stats = stats['result']
# we need to spin the keys and values and put them into dict so they'll look better in the context
for i in stats['entry']:
outputs_stats[i['@name']] = i['text']
return CommandResults(
outputs_prefix='PanOS.PolicyOptimizer.Stats',
outputs=outputs_stats,
readable_output=tableToMarkdown(name='Policy Optimizer Statistics:', t=stats['entry'], removeNull=True),
raw_response=raw_response
)
def policy_optimizer_no_apps_command(client: Client) -> CommandResults:
"""
Gets the Policy Optimizer Statistics as seen from the User Interface
"""
raw_response = client.policy_optimizer_no_apps()
stats = raw_response['result']
if '@status' in stats and stats['@status'] == 'error':
raise Exception(f'Operation Failed with: {str(stats)}')
stats = stats['result']
if '@count' in stats and stats['@count'] == '0':
return CommandResults(readable_output='No Rules without apps were found.', raw_response=raw_response)
rules_no_apps = stats['entry']
if not isinstance(rules_no_apps, list):
rules_no_apps = rules_no_apps[0]
headers = ['@name', '@uuid', 'action', 'description', 'source', 'destination']
return CommandResults(
outputs_prefix='PanOS.PolicyOptimizer.NoApps',
outputs_key_field='@uuid',
outputs=rules_no_apps,
readable_output=tableToMarkdown(name='Policy Optimizer No App Specified:', t=rules_no_apps, headers=headers,
removeNull=True),
raw_response=raw_response
)
def policy_optimizer_get_unused_apps_command(client: Client) -> CommandResults:
"""
Gets the Policy Optimizer Statistics as seen from the User Interface
"""
raw_response = client.policy_optimizer_get_unused_apps()
stats = raw_response['result']
if '@status' in stats and stats['@status'] == 'error':
raise Exception(f'Operation Failed with: {str(stats)}')
stats = stats['result']
if '@count' in stats and stats['@count'] == '0':
return CommandResults(readable_output='No Rules with unused apps were found.', raw_response=raw_response)
return CommandResults(
outputs_prefix='PanOS.PolicyOptimizer.UnusedApps',
outputs_key_field='Stats',
outputs=stats,
readable_output=tableToMarkdown(name='Policy Optimizer Unused Apps:', t=stats['entry'], removeNull=True),
raw_response=raw_response
)
def policy_optimizer_get_rules_command(client: Client, args: dict) -> CommandResults:
"""
Gets the unused rules Statistics as seen from the User Interface
"""
timeframe = str(args.get('timeframe'))
usage = str(args.get('usage'))
exclude = argToBoolean(args.get('exclude'))
raw_response = client.policy_optimizer_get_rules(timeframe, usage, exclude)
stats = raw_response['result']
if '@status' in stats and stats['@status'] == 'error':
raise Exception(f'Operation Failed with: {str(stats)}')
stats = stats['result']
if '@count' in stats and stats['@count'] == '0':
return CommandResults(readable_output=f'No {usage} rules where found.', raw_response=raw_response)
rules = stats['entry']
if not isinstance(rules, list):
rules = rules[0]
headers = ['@name', '@uuid', 'action', 'description', 'source', 'destination']
return CommandResults(
outputs_prefix=f'PanOS.PolicyOptimizer.{usage}Rules',
outputs_key_field='@uuid',
outputs=rules,
readable_output=tableToMarkdown(name=f'PolicyOptimizer {usage}Rules:', t=rules, headers=headers,
removeNull=True),
raw_response=raw_response
)
def policy_optimizer_app_and_usage_command(client: Client, args: dict) -> CommandResults:
"""
Gets the Policy Optimizer Statistics as seen from the User Interface
"""
rule_uuid = str(args.get('rule_uuid'))
raw_response = client.policy_optimizer_app_and_usage(rule_uuid)
stats = raw_response['result']
if '@status' in stats and stats['@status'] == 'error':
raise Exception(f'Operation Failed with: {str(stats)}')
stats = stats['result']
if '@count' in stats and stats['@count'] == '0':
return CommandResults(readable_output=f'Rule with UUID:{rule_uuid} does not use apps.', raw_response=raw_response)
rule_stats = stats['rules']['entry'][0]
return CommandResults(
outputs_prefix='PanOS.PolicyOptimizer.AppsAndUsage',
outputs_key_field='@uuid',
outputs=rule_stats,
readable_output=tableToMarkdown(name='Policy Optimizer Apps and Usage:', t=rule_stats, removeNull=True),
raw_response=raw_response
)
def policy_optimizer_get_dag_command(client: Client, args: dict) -> CommandResults:
"""
Gets the DAG
"""
dag = str(args.get('dag'))
raw_response = client.policy_optimizer_get_dag(dag)
result = raw_response['result']
if '@status' in result and result['@status'] == 'error':
raise Exception(f'Operation Failed with: {str(result)}')
try:
result = result['result']['dyn-addr-grp']['entry'][0]['member-list']['entry']
except KeyError:
raise Exception(f'Dynamic Address Group: {dag} was not found.')
return CommandResults(
outputs_prefix='PanOS.PolicyOptimizer.DAG',
outputs_key_field='Stats',
outputs=result,
readable_output=tableToMarkdown(name='Policy Optimizer Dynamic Address Group:', t=result, removeNull=True),
raw_response=raw_response
)
def main():
command = demisto.command()
params = demisto.params()
args = demisto.args()
demisto.debug(f'Command being called is: {command}')
client: Client = None # type: ignore
try:
client = Client(url=params.get('server_url'), username=params['credentials']['identifier'],
password=params['credentials']['password'], vsys=params.get('vsys'),
device_group=params.get('device_group'), verify=not params.get('insecure'), tid=50)
client.session_metadata['cookie'] = client.login() # Login to PAN-OS and return the GUI cookie value
if command == 'test-module':
return_results('ok') # if login was successful, instance configuration is ok.
elif command == 'pan-os-po-get-stats':
return_results(get_policy_optimizer_statistics_command(client))
elif command == 'pan-os-po-no-apps':
return_results(policy_optimizer_no_apps_command(client))
elif command == 'pan-os-po-unused-apps':
return_results(policy_optimizer_get_unused_apps_command(client))
elif command == 'pan-os-po-get-rules':
return_results(policy_optimizer_get_rules_command(client, args))
elif command == 'pan-os-po-app-and-usage':
return_results(policy_optimizer_app_and_usage_command(client, args))
elif command == 'pan-os-get-dag':
return_results(policy_optimizer_get_dag_command(client, args))
else:
raise NotImplementedError(f'Command {command} was not implemented.')
except Exception as err:
return_error(f'{str(err)}.\n Trace:{traceback.format_exc()}')
finally:
try:
client.logout() # Logout of PAN-OS
except Exception as err:
return_error(f'{str(err)}.\n Trace:{traceback.format_exc()}')
if __name__ in ("__builtin__", "builtins", '__main__'):
main()
| mit | -3,208,916,145,890,129,000 | 41.450226 | 122 | 0.557054 | false | 4.183501 | false | false | false |
PyBossa/pybossa | test/factories/taskrun_factory.py | 1 | 1933 | # -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2015 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
from yacryptopan import CryptoPAn
from pybossa.model.task_run import TaskRun
from . import BaseFactory, factory, task_repo
import settings_test
cp = CryptoPAn(settings_test.CRYPTOPAN_KEY)
class TaskRunFactory(BaseFactory):
class Meta:
model = TaskRun
@classmethod
def _create(cls, model_class, *args, **kwargs):
taskrun = model_class(*args, **kwargs)
task_repo.save(taskrun)
return taskrun
id = factory.Sequence(lambda n: n)
task = factory.SubFactory('factories.TaskFactory')
task_id = factory.LazyAttribute(lambda task_run: task_run.task.id)
project = factory.SelfAttribute('task.project')
project_id = factory.LazyAttribute(lambda task_run: task_run.project.id)
user = factory.SubFactory('factories.UserFactory')
user_id = factory.LazyAttribute(lambda task_run: task_run.user.id)
info = dict(answer='yes')
class AnonymousTaskRunFactory(TaskRunFactory):
user = None
user_id = None
user_ip = cp.anonymize('127.0.0.1')
info = 'yes'
class ExternalUidTaskRunFactory(TaskRunFactory):
user = None
user_id = None
user_ip = cp.anonymize('127.0.0.1')
external_uid = '1xa'
info = 'yes'
| agpl-3.0 | 9,141,680,664,959,028,000 | 32.327586 | 77 | 0.713399 | false | 3.508167 | false | false | false |
SurveyMan/SMPy | example_survey.py | 1 | 1248 | from survey.objects import *
import json
"""
This module provides an example of how to construct a questionnaire in Python.
Questionnaires can be saved by calling jsonize and dumping their contents.
Jsonized surveys can be reused, manipulated, and sent via RPC to another service.
"""
q1 = Question("What is your age?"
, ["< 18", "18-34", "35-64", "> 65"]
, qtypes["radio"])
q2 = Question("What is your political affiliation?"
, ["Democrat", "Republican", "Indepedent"]
, qtypes["radio"]
, shuffle=True)
q3 = Question("Which issues do you care about the most?"
, ["Gun control", "Reproductive Rights", "The Economy", "Foreign Relations"]
, qtypes["check"]
,shuffle=True)
q4 = Question("What is your year of birth?"
, [x+1910 for x in range(90)]
, qtypes["dropdown"])
survey = Survey([q1, q2, q3, q4])
filename = 'jsonized_survey.txt'
f = open(filename, 'w')
f.write(json.dumps(survey.jsonize, sort_keys = True, indent = 4))
f.close()
if __name__ == '__main__':
import sys
if len(sys.argv) > 1:
filename = sys.argv[1]
print "See "+filename+" for a jsonzied survey."
| apache-2.0 | -5,831,616,676,094,554,000 | 30.2 | 90 | 0.591346 | false | 3.447514 | false | false | false |
FEniCS/ufl | ufl/finiteelement/mixedelement.py | 1 | 19622 | # -*- coding: utf-8 -*-
"This module defines the UFL finite element classes."
# Copyright (C) 2008-2016 Martin Sandve Alnæs
#
# This file is part of UFL (https://www.fenicsproject.org)
#
# SPDX-License-Identifier: LGPL-3.0-or-later
#
# Modified by Kristian B. Oelgaard
# Modified by Marie E. Rognes 2010, 2012
# Modified by Anders Logg 2014
# Modified by Massimiliano Leoni, 2016
from ufl.log import error
from ufl.permutation import compute_indices
from ufl.utils.sequences import product, max_degree
from ufl.utils.dicts import EmptyDict
from ufl.utils.indexflattening import flatten_multiindex, unflatten_index, shape_to_strides
from ufl.cell import as_cell
from ufl.finiteelement.finiteelementbase import FiniteElementBase
from ufl.finiteelement.finiteelement import FiniteElement
class MixedElement(FiniteElementBase):
"""A finite element composed of a nested hierarchy of mixed or simple
elements."""
__slots__ = ("_sub_elements", "_cells")
def __init__(self, *elements, **kwargs):
"Create mixed finite element from given list of elements"
if type(self) is MixedElement:
if kwargs:
error("Not expecting keyword arguments to MixedElement constructor.")
# Un-nest arguments if we get a single argument with a list of elements
if len(elements) == 1 and isinstance(elements[0], (tuple, list)):
elements = elements[0]
# Interpret nested tuples as sub-mixedelements recursively
elements = [MixedElement(e) if isinstance(e, (tuple, list)) else e
for e in elements]
self._sub_elements = elements
# Pick the first cell, for now all should be equal
cells = tuple(sorted(set(element.cell() for element in elements) - set([None])))
self._cells = cells
if cells:
cell = cells[0]
# Require that all elements are defined on the same cell
if not all(c == cell for c in cells[1:]):
error("Sub elements must live on the same cell.")
else:
cell = None
# Check that all elements use the same quadrature scheme TODO:
# We can allow the scheme not to be defined.
if len(elements) == 0:
quad_scheme = None
else:
quad_scheme = elements[0].quadrature_scheme()
if not all(e.quadrature_scheme() == quad_scheme for e in elements):
error("Quadrature scheme mismatch for sub elements of mixed element.")
# Compute value sizes in global and reference configurations
value_size_sum = sum(product(s.value_shape()) for s in self._sub_elements)
reference_value_size_sum = sum(product(s.reference_value_shape()) for s in self._sub_elements)
# Default value shape: Treated simply as all subelement values
# unpacked in a vector.
value_shape = kwargs.get('value_shape', (value_size_sum,))
# Default reference value shape: Treated simply as all
# subelement reference values unpacked in a vector.
reference_value_shape = kwargs.get('reference_value_shape', (reference_value_size_sum,))
# Validate value_shape (deliberately not for subclasses
# VectorElement and TensorElement)
if type(self) is MixedElement:
# This is not valid for tensor elements with symmetries,
# assume subclasses deal with their own validation
if product(value_shape) != value_size_sum:
error("Provided value_shape doesn't match the "
"total value size of all subelements.")
# Initialize element data
degrees = {e.degree() for e in self._sub_elements} - {None}
degree = max_degree(degrees) if degrees else None
FiniteElementBase.__init__(self, "Mixed", cell, degree, quad_scheme,
value_shape, reference_value_shape)
# Cache repr string
if type(self) is MixedElement:
self._repr = "MixedElement(%s)" % (
", ".join(repr(e) for e in self._sub_elements),)
def reconstruct_from_elements(self, *elements):
"Reconstruct a mixed element from new subelements."
if all(a == b for (a, b) in zip(elements, self._sub_elements)):
return self
return MixedElement(*elements)
def symmetry(self):
"""Return the symmetry dict, which is a mapping :math:`c_0 \\to c_1`
meaning that component :math:`c_0` is represented by component
:math:`c_1`.
A component is a tuple of one or more ints."""
# Build symmetry map from symmetries of subelements
sm = {}
# Base index of the current subelement into mixed value
j = 0
for e in self._sub_elements:
sh = e.value_shape()
st = shape_to_strides(sh)
# Map symmetries of subelement into index space of this
# element
for c0, c1 in e.symmetry().items():
j0 = flatten_multiindex(c0, st) + j
j1 = flatten_multiindex(c1, st) + j
sm[(j0,)] = (j1,)
# Update base index for next element
j += product(sh)
if j != product(self.value_shape()):
error("Size mismatch in symmetry algorithm.")
return sm or EmptyDict
def mapping(self):
if all(e.mapping() == "identity" for e in self._sub_elements):
return "identity"
else:
return "undefined"
def num_sub_elements(self):
"Return number of sub elements."
return len(self._sub_elements)
def sub_elements(self):
"Return list of sub elements."
return self._sub_elements
def extract_subelement_component(self, i):
"""Extract direct subelement index and subelement relative
component index for a given component index."""
if isinstance(i, int):
i = (i,)
self._check_component(i)
# Select between indexing modes
if len(self.value_shape()) == 1:
# Indexing into a long vector of flattened subelement
# shapes
j, = i
# Find subelement for this index
for sub_element_index, e in enumerate(self._sub_elements):
sh = e.value_shape()
si = product(sh)
if j < si:
break
j -= si
if j < 0:
error("Moved past last value component!")
# Convert index into a shape tuple
st = shape_to_strides(sh)
component = unflatten_index(j, st)
else:
# Indexing into a multidimensional tensor where subelement
# index is first axis
sub_element_index = i[0]
if sub_element_index >= len(self._sub_elements):
error("Illegal component index (dimension %d)." % sub_element_index)
component = i[1:]
return (sub_element_index, component)
def extract_component(self, i):
"""Recursively extract component index relative to a (simple) element
and that element for given value component index."""
sub_element_index, component = self.extract_subelement_component(i)
return self._sub_elements[sub_element_index].extract_component(component)
def extract_subelement_reference_component(self, i):
"""Extract direct subelement index and subelement relative
reference_component index for a given reference_component index."""
if isinstance(i, int):
i = (i,)
self._check_reference_component(i)
# Select between indexing modes
assert len(self.reference_value_shape()) == 1
# Indexing into a long vector of flattened subelement shapes
j, = i
# Find subelement for this index
for sub_element_index, e in enumerate(self._sub_elements):
sh = e.reference_value_shape()
si = product(sh)
if j < si:
break
j -= si
if j < 0:
error("Moved past last value reference_component!")
# Convert index into a shape tuple
st = shape_to_strides(sh)
reference_component = unflatten_index(j, st)
return (sub_element_index, reference_component)
def extract_reference_component(self, i):
"""Recursively extract reference_component index relative to a (simple) element
and that element for given value reference_component index."""
sub_element_index, reference_component = self.extract_subelement_reference_component(i)
return self._sub_elements[sub_element_index].extract_reference_component(reference_component)
def is_cellwise_constant(self, component=None):
"""Return whether the basis functions of this
element is spatially constant over each cell."""
if component is None:
return all(e.is_cellwise_constant() for e in self.sub_elements())
else:
i, e = self.extract_component(component)
return e.is_cellwise_constant()
def degree(self, component=None):
"Return polynomial degree of finite element."
if component is None:
return self._degree # from FiniteElementBase, computed as max of subelements in __init__
else:
i, e = self.extract_component(component)
return e.degree()
def reconstruct(self, **kwargs):
return MixedElement(*[e.reconstruct(**kwargs) for e in self.sub_elements()])
def __str__(self):
"Format as string for pretty printing."
tmp = ", ".join(str(element) for element in self._sub_elements)
return "<Mixed element: (" + tmp + ")>"
def shortstr(self):
"Format as string for pretty printing."
tmp = ", ".join(element.shortstr() for element in self._sub_elements)
return "Mixed<" + tmp + ">"
class VectorElement(MixedElement):
"A special case of a mixed finite element where all elements are equal."
def __init__(self, family, cell=None, degree=None, dim=None,
form_degree=None, quad_scheme=None):
"""
Create vector element (repeated mixed element)
*Arguments*
family (string)
The finite element family (or an existing FiniteElement)
cell
The geometric cell, ignored if family is a FiniteElement
degree (int)
The polynomial degree, ignored if family is a FiniteElement
dim (int)
The value dimension of the element (optional)
form_degree (int)
The form degree (FEEC notation, used when field is
viewed as k-form), ignored if family is a FiniteElement
quad_scheme
The quadrature scheme (optional), ignored if family is a FiniteElement
"""
if isinstance(family, FiniteElementBase):
sub_element = family
cell = sub_element.cell()
else:
if cell is not None:
cell = as_cell(cell)
# Create sub element
sub_element = FiniteElement(family, cell, degree,
form_degree=form_degree,
quad_scheme=quad_scheme)
# Set default size if not specified
if dim is None:
if cell is None:
error("Cannot infer vector dimension without a cell.")
dim = cell.geometric_dimension()
# Create list of sub elements for mixed element constructor
sub_elements = [sub_element] * dim
# Compute value shapes
value_shape = (dim,) + sub_element.value_shape()
reference_value_shape = (dim,) + sub_element.reference_value_shape()
# Initialize element data
MixedElement.__init__(self, sub_elements, value_shape=value_shape,
reference_value_shape=reference_value_shape)
FiniteElementBase.__init__(self, sub_element.family(), cell, sub_element.degree(), quad_scheme,
value_shape, reference_value_shape)
self._sub_element = sub_element
# Cache repr string
self._repr = "VectorElement(%s, dim=%d)" % (
repr(sub_element), len(self._sub_elements))
def reconstruct(self, **kwargs):
sub_element = self._sub_element.reconstruct(**kwargs)
return VectorElement(sub_element, dim=len(self.sub_elements()))
def __str__(self):
"Format as string for pretty printing."
return ("<vector element with %d components of %s>" %
(len(self._sub_elements), self._sub_element))
def shortstr(self):
"Format as string for pretty printing."
return "Vector<%d x %s>" % (len(self._sub_elements),
self._sub_element.shortstr())
class TensorElement(MixedElement):
"""A special case of a mixed finite element where all elements are
equal.
"""
__slots__ = ("_sub_element", "_shape", "_symmetry",
"_sub_element_mapping",
"_flattened_sub_element_mapping",
"_mapping")
def __init__(self, family, cell=None, degree=None, shape=None,
symmetry=None, quad_scheme=None):
"""Create tensor element (repeated mixed element with optional symmetries).
:arg family: The family string, or an existing FiniteElement.
:arg cell: The geometric cell (ignored if family is a FiniteElement).
:arg degree: The polynomial degree (ignored if family is a FiniteElement).
:arg shape: The shape of the element (defaults to a square
tensor given by the geometric dimension of the cell).
:arg symmetry: Optional symmetries.
:arg quad_scheme: Optional quadrature scheme (ignored if
family is a FiniteElement)."""
if isinstance(family, FiniteElementBase):
sub_element = family
cell = sub_element.cell()
else:
if cell is not None:
cell = as_cell(cell)
# Create scalar sub element
sub_element = FiniteElement(family, cell, degree, quad_scheme=quad_scheme)
# Set default shape if not specified
if shape is None:
if cell is None:
error("Cannot infer tensor shape without a cell.")
dim = cell.geometric_dimension()
shape = (dim, dim)
if symmetry is None:
symmetry = EmptyDict
elif symmetry is True:
# Construct default symmetry dict for matrix elements
if not (len(shape) == 2 and shape[0] == shape[1]):
error("Cannot set automatic symmetry for non-square tensor.")
symmetry = dict(((i, j), (j, i)) for i in range(shape[0])
for j in range(shape[1]) if i > j)
else:
if not isinstance(symmetry, dict):
error("Expecting symmetry to be None (unset), True, or dict.")
# Validate indices in symmetry dict
for i, j in symmetry.items():
if len(i) != len(j):
error("Non-matching length of symmetry index tuples.")
for k in range(len(i)):
if not (i[k] >= 0 and j[k] >= 0 and i[k] < shape[k] and j[k] < shape[k]):
error("Symmetry dimensions out of bounds.")
# Compute all index combinations for given shape
indices = compute_indices(shape)
# Compute mapping from indices to sub element number,
# accounting for symmetry
sub_elements = []
sub_element_mapping = {}
for index in indices:
if index in symmetry:
continue
sub_element_mapping[index] = len(sub_elements)
sub_elements += [sub_element]
# Update mapping for symmetry
for index in indices:
if index in symmetry:
sub_element_mapping[index] = sub_element_mapping[symmetry[index]]
flattened_sub_element_mapping = [sub_element_mapping[index] for i,
index in enumerate(indices)]
# Compute value shape
value_shape = shape
# Compute reference value shape based on symmetries
if symmetry:
# Flatten and subtract symmetries
reference_value_shape = (product(shape) - len(symmetry),)
self._mapping = "symmetries"
else:
# Do not flatten if there are no symmetries
reference_value_shape = shape
self._mapping = "identity"
value_shape = value_shape + sub_element.value_shape()
reference_value_shape = reference_value_shape + sub_element.reference_value_shape()
# Initialize element data
MixedElement.__init__(self, sub_elements, value_shape=value_shape,
reference_value_shape=reference_value_shape)
self._family = sub_element.family()
self._degree = sub_element.degree()
self._sub_element = sub_element
self._shape = shape
self._symmetry = symmetry
self._sub_element_mapping = sub_element_mapping
self._flattened_sub_element_mapping = flattened_sub_element_mapping
# Cache repr string
self._repr = "TensorElement(%s, shape=%s, symmetry=%s)" % (
repr(sub_element), repr(self._shape), repr(self._symmetry))
def mapping(self):
if self._symmetry:
return "symmetries"
else:
return "identity"
def flattened_sub_element_mapping(self):
return self._flattened_sub_element_mapping
def extract_subelement_component(self, i):
"""Extract direct subelement index and subelement relative
component index for a given component index."""
if isinstance(i, int):
i = (i,)
self._check_component(i)
i = self.symmetry().get(i, i)
l = len(self._shape) # noqa: E741
ii = i[:l]
jj = i[l:]
if ii not in self._sub_element_mapping:
error("Illegal component index %s." % (i,))
k = self._sub_element_mapping[ii]
return (k, jj)
def symmetry(self):
"""Return the symmetry dict, which is a mapping :math:`c_0 \\to c_1`
meaning that component :math:`c_0` is represented by component
:math:`c_1`.
A component is a tuple of one or more ints."""
return self._symmetry
def reconstruct(self, **kwargs):
sub_element = self._sub_element.reconstruct(**kwargs)
return TensorElement(sub_element, shape=self._shape, symmetry=self._symmetry)
def __str__(self):
"Format as string for pretty printing."
if self._symmetry:
tmp = ", ".join("%s -> %s" % (a, b) for (a, b) in self._symmetry.items())
sym = " with symmetries (%s)" % tmp
else:
sym = ""
return ("<tensor element with shape %s of %s%s>" %
(self.value_shape(), self._sub_element, sym))
def shortstr(self):
"Format as string for pretty printing."
if self._symmetry:
tmp = ", ".join("%s -> %s" % (a, b) for (a, b) in self._symmetry.items())
sym = " with symmetries (%s)" % tmp
else:
sym = ""
return "Tensor<%s x %s%s>" % (self.value_shape(),
self._sub_element.shortstr(), sym)
| lgpl-3.0 | -8,729,777,980,419,253,000 | 39.539256 | 103 | 0.588859 | false | 4.32848 | false | false | false |
oudalab/phyllo | phyllo/extractors/johannesDB.py | 1 | 2507 | import sqlite3
import urllib
import re
from urllib.request import urlopen
from bs4 import BeautifulSoup, NavigableString
import nltk
nltk.download('punkt')
from nltk import sent_tokenize
def parseRes2(soup, title, url, cur, author, date, collectiontitle):
chapter = 0
sen = ""
num = 1
[e.extract() for e in soup.find_all('br')]
[e.extract() for e in soup.find_all('table')]
[e.extract() for e in soup.find_all('span')]
[e.extract() for e in soup.find_all('a')]
for x in soup.find_all():
if len(x.text) == 0:
x.extract()
getp = soup.find_all('p')
#print(getp)
i = 0
for p in getp:
# make sure it's not a paragraph without the main text
try:
if p['class'][0].lower() in ['border', 'pagehead', 'shortborder', 'smallboarder', 'margin',
'internal_navigation']: # these are not part of the main t
continue
except:
pass
if p.b:
chapter = p.b.text
chapter = chapter.strip()
else:
sen = p.text
sen = sen.strip()
if sen != '':
num = 0
for s in sent_tokenize(sen):
sentn = s.strip()
num += 1
cur.execute("INSERT INTO texts VALUES (?,?,?,?,?,?,?, ?, ?, ?, ?)",
(None, collectiontitle, title, 'Latin', author, date, chapter,
num, sentn, url, 'prose'))
def main():
# get proper URLs
siteURL = 'http://www.thelatinlibrary.com'
biggsURL = 'http://www.thelatinlibrary.com/johannes.html'
biggsOPEN = urllib.request.urlopen(biggsURL)
biggsSOUP = BeautifulSoup(biggsOPEN, 'html5lib')
textsURL = []
title = 'Johannes de Plano Carpini'
author = title
collectiontitle = 'JOHANNES DE PLANO CARPINI LIBELLUS HISTORICUS IOANNIS DE PLANO CARPINI'
date = '1246 A.D.'
with sqlite3.connect('texts.db') as db:
c = db.cursor()
c.execute(
'CREATE TABLE IF NOT EXISTS texts (id INTEGER PRIMARY KEY, title TEXT, book TEXT,'
' language TEXT, author TEXT, date TEXT, chapter TEXT, verse TEXT, passage TEXT,'
' link TEXT, documentType TEXT)')
c.execute("DELETE FROM texts WHERE author = 'Johannes de Plano Carpini'")
parseRes2(biggsSOUP, title, biggsURL, c, author, date, collectiontitle)
if __name__ == '__main__':
main()
| apache-2.0 | -3,149,571,058,116,537,300 | 31.986842 | 103 | 0.55724 | false | 3.643895 | false | false | false |
sketchyfish/ypp-price-calculator | strip_automation.py | 1 | 5368 | #!/usr/bin/python
"""
This code uses .strip formatting once to remove the \n and another time to remove the \t from the below lists.
For readability, the script uses a print("\n") to add a new line between the two lists
"""
island_list = ['Armstrong Island', 'Atchafalaya Island', 'Immokalee Island', 'Moultrie Island', 'Sho-ke Island', 'Sirius Island',
'Tumult Island', 'The Beaufort Islands', "Messier's Crown", 'Nunataq Island', 'Paollu Island', 'Qaniit Island',
'Ancoraggio Island', 'Fluke Island', 'Kakraphoon Island', 'Eagle Archipelago', 'Cambium Island', "Hubble's Eye",
'Ilha da Aguia', 'Ix Chel', 'Manu Island', 'Admiral Island', 'Basset Island', 'Bryher Island', 'Cromwell Island',
'Hook Shelf', 'Isle of Kent', 'Lincoln Island', 'Wensleydale', 'Anegada Island', 'Barnard Island', 'The Lowland Hundred',
'Lyonesse Island', 'Myvatn Island', 'Arakoua Island', 'Aten Island', 'Barbary Island', 'Caravanserai Island',
'Kasidim Island', 'Kiwara Island', 'Terjit Island', 'Tichka Plateau', 'Aimuari Island', 'Chachapoya Island',
'Matariki Island', 'Pukru Island', 'Quetzal Island', 'Saiph Island', 'Toba Island', 'Albatross Island', 'Ambush Island',
'Deadlight Dunes', 'Gauntlet Island', "Jack's Last Gift", 'Mirage Island', 'Scurvy Reef', 'Blackthorpe Island', 'Cook Island',
'Descartes Isle', 'Fowler Island', 'Greenwich Island', 'Halley Island', 'Spaniel Island', 'Starfish Island', 'Ventress Island',
'Accompong Island', 'Gallows Island', 'Iocane Island', 'Maia Island', 'Morgana Island', 'Paihia Island', 'Umbarten Island',
'Auk Island', 'Cryo Island', 'Hoarfrost Island', 'Amity Island', 'Bowditch Island', 'Hinga Island', 'Penobscot Island', 'Rowes Island',
'Scrimshaw Island', 'Squibnocket Island', 'Wissahickon Island', 'Ashkelon Arch', 'Kashgar Island', 'Morannon Island', 'Alkaid Island',
'Doyle Island', "Edgar's Choice", 'Isle of Keris', 'Marlowe Island', "McGuffin's Isle", 'Sayers Rock']
commodity_list = [['Hemp', 'Hemp oil', 'Iron', "Kraken's ink", 'Lacquer', 'Stone', 'Sugar cane', 'Varnish', 'Wood', '', 'Broom flower', 'Butterfly weed',
'Cowslip', 'Elderberries', 'Indigo', 'Iris root', 'Lily of the valley', 'Lobelia', 'Madder', 'Nettle', "Old man's beard", 'Pokeweed berries',
'Sassafras', 'Weld', 'Yarrow', '', 'Chalcocite', 'Cubanite', 'Gold nugget', 'Lorandite', 'Leushite', 'Masuyite', 'Papagoite',
'Serandite', 'Sincosite', 'Tellurium', 'Thorianite', '', 'Bananas', 'Carambolas', 'Coconuts', 'Durians', 'Limes', 'Mangos',
'Passion fruit', 'Pineapples', 'Pomegranates', 'Rambutan', 'Amber gems', 'Amethyst gems', 'Beryl gems', 'Coral gems',
'Diamonds', 'Emeralds', 'Jade gems', 'Jasper gems', 'Jet gems', 'Lapis lazuli gems', ' ', 'Moonstones', 'Opals', 'Pearls',
'Quartz gems', 'Rubies', 'Sapphires', 'Tigereye gems', 'Topaz gems', 'Gold nuggets (mineral)', '', 'Swill', 'Grog', 'Fine rum',
'Small, medium, and large cannon balls', 'Lifeboats', '', 'Aqua cloth', 'Black cloth', 'Blue cloth', 'Brown cloth', 'Gold cloth',
'Green cloth', 'Grey cloth', 'Lavender cloth', 'Light green cloth', 'Lime cloth', 'Magenta cloth', 'Maroon cloth', 'Mint cloth',
'Navy cloth', 'Orange cloth', 'Pink cloth', 'Purple cloth', 'Red cloth', 'Rose cloth', 'Tan cloth', 'Violet cloth', 'White cloth',
'Yellow cloth', 'Fine aqua cloth', 'Fine black cloth', 'Fine blue cloth', 'Fine brown cloth', 'Fine gold cloth', 'Fine green cloth',
'Fine grey cloth', 'Fine lavender cloth', 'Fine light green cloth', 'Fine lime cloth', 'Fine magenta cloth', 'Fine maroon cloth',
'Fine mint cloth', ' ', 'Fine navy cloth', 'Fine orange cloth', 'Fine pink cloth', 'Fine purple cloth', 'Fine red cloth', 'Fine rose cloth',
'Fine tan cloth', 'Fine violet cloth', 'Fine white cloth', 'Fine yellow cloth', 'Sail cloth', '', 'Blue dye', 'Green dye',
"Kraken's blood", 'Red dye', 'Yellow dye', '', 'Aqua enamel', 'Black enamel', 'Blue enamel', 'Brown enamel', 'Gold enamel',
'Green enamel', 'Grey enamel', 'Lavender enamel', 'Light green enamel', 'Lime enamel', 'Magenta enamel', 'Maroon enamel', 'Mint enamel',
'Navy enamel', 'Orange enamel', 'Pink enamel', 'Purple enamel', 'Red enamel', 'Rose enamel', 'Tan enamel', 'Violet enamel', 'White enamel',
'Yellow enamel', '', 'Aqua paint', 'Black paint', 'Blue paint', 'Brown paint', 'Gold paint', 'Green paint', 'Grey paint', 'Lavender paint',
'Light green paint', 'Lime paint', 'Magenta paint', 'Maroon paint', 'Mint paint', 'Navy paint', 'Orange paint', 'Pink paint',
'Purple paint', 'Red paint', 'Rose paint', 'Tan paint', 'Violet paint', 'White paint', 'Yellow paint']]
newi_list = []
newc_list = []
for each_item in island_list:
b = each_item.strip("\n")
c = b.strip("\t")
newi_list.append(c)
for each_item in commodity_list:
b = each_item.strip("\n")
c = b.strip("\t")
newc_list.append(c)
print(newi_list)
print("\n")
print(newc_list)
| gpl-2.0 | -3,883,570,465,021,505,500 | 91.551724 | 162 | 0.607116 | false | 2.689379 | false | false | false |
aangert/PiParty | color_tests/color_combo_test.py | 2 | 1075 | from enum import Enum
class Colors(Enum):
Pink = (255,96,96)
Magenta = (255,0,192)
Orange = (255,64,0)
Yellow = (255,255,0)
Green = (0,255,0)
Turquoise = (0,255,255)
Blue = (0,0,255)
Purple = (96,0,255)
color_list = [x for x in Colors]
quad_teams_banned = {
Colors.Pink : [Colors.Magenta,Colors.Purple],
Colors.Magenta : [Colors.Pink,Colors.Purple],
Colors.Orange : [Colors.Yellow],
Colors.Yellow : [Colors.Orange],
Colors.Green : [Colors.Turquoise],
Colors.Turquoise : [Colors.Green,Colors.Blue],
Colors.Blue : [Colors.Turquoise],
Colors.Purple : [Colors.Magenta,Colors.Pink]
}
for a,b,c,d in [(a,b,c,d) for a in range(8) for b in range(a+1,8) for c in range(b+1,8) for d in range(c+1,8)]:
quad = [color_list[x] for x in (a,b,c,d)]
quad_banned = [quad_teams_banned[i] for i in quad]
quad_banned = list(set([i for sublist in quad_banned for i in sublist]))
bad = False
for color in quad:
if color in quad_banned:
bad = True
if not bad:
print(quad)
| mit | -6,680,612,122,141,936,000 | 28.054054 | 111 | 0.606512 | false | 2.596618 | false | false | false |
DrYerzinia/Cat-Finder | src/KittyTracker/kittyTracker.py | 1 | 1581 | from netaddr import *
from datetime import datetime
import blescan
import time
import sys
import bluetooth._bluetooth as bluez
from Kitty import Kitty
from CheckKittys import CheckKittys
from BLESerialScanner import BLESerialScanner
import SendMail
import config
def process(mac, rssi):
found = False
for k in config.kittys:
if mac == k.mac:
k.lastHeard = datetime.now()
print 'Heard ' , k.name , ' at ' + str(rssi) + 'dBm!'
if k.ttw != 180:
SendMail.sendMail(k.name + ' reacquired')
k.ttw = 180
found = True
break
if not found:
print 'Unkown mac: ' , mac
sys.stdout.flush()
def main():
running = True
kittyChecker = CheckKittys()
scanner = BLESerialScanner(process)
# dev_id = 0
# try:
# sock = bluez.hci_open_dev(dev_id)
# print "ble thread started"
# except:
# print "error accessing bluetooth device..."
# sys.exit(1)
# blescan.hci_le_set_scan_parameters(sock)
# blescan.hci_enable_le_scan(sock)
kittyChecker.daemon = True
kittyChecker.kittys = config.kittys
kittyChecker.running = True
kittyChecker.start()
scanner.start()
message = "Kitty Tracker Active! Now tracking " + ", ".join(str(k.name) for k in config.kittys)
print message
SendMail.sendMail(message)
try:
while running:
time.sleep(1)
except KeyboardInterrupt:
running = False
kittyChecker.running = False
scanner.running = False
print "Terminating..."
# returnedList = blescan.parse_events(sock, 1)
# for beacon in returnedList:
# mac, a, b, c, d, rssi = beacon.split(',')
# mac = EUI(mac)
if __name__ == '__main__':
main()
| unlicense | -1,341,239,824,988,486,700 | 19.532468 | 96 | 0.688805 | false | 2.744792 | false | false | false |
anaran/olympia | apps/bandwagon/tasks.py | 1 | 4144 | import logging
import math
from django.core.files.storage import default_storage as storage
from django.db.models import Count
import elasticutils.contrib.django as elasticutils
from celeryutils import task
import amo
from amo.decorators import set_modified_on
from amo.helpers import user_media_path
from amo.utils import attach_trans_dict, resize_image
from tags.models import Tag
from lib.es.utils import index_objects
from . import search
from .models import (Collection, CollectionAddon, CollectionVote,
CollectionWatcher)
log = logging.getLogger('z.task')
@task
def collection_votes(*ids, **kw):
log.info('[%s@%s] Updating collection votes.' %
(len(ids), collection_votes.rate_limit))
using = kw.get('using')
for collection in ids:
v = CollectionVote.objects.filter(collection=collection).using(using)
votes = dict(v.values_list('vote').annotate(Count('vote')))
c = Collection.objects.get(id=collection)
c.upvotes = up = votes.get(1, 0)
c.downvotes = down = votes.get(-1, 0)
try:
# Use log to limit the effect of the multiplier.
c.rating = (up - down) * math.log(up + down)
except ValueError:
c.rating = 0
c.save()
@task
@set_modified_on
def resize_icon(src, dst, locally=False, **kw):
"""Resizes collection icons to 32x32"""
log.info('[1@None] Resizing icon: %s' % dst)
try:
resize_image(src, dst, (32, 32), locally=locally)
return True
except Exception, e:
log.error("Error saving collection icon: %s" % e)
@task
def delete_icon(dst, **kw):
log.info('[1@None] Deleting icon: %s.' % dst)
if not dst.startswith(user_media_path('collection_icons')):
log.error("Someone tried deleting something they shouldn't: %s" % dst)
return
try:
storage.delete(dst)
except Exception, e:
log.error("Error deleting icon: %s" % e)
@task
def collection_meta(*ids, **kw):
log.info('[%s@%s] Updating collection metadata.' %
(len(ids), collection_meta.rate_limit))
using = kw.get('using')
qs = (CollectionAddon.objects.filter(collection__in=ids)
.using(using).values_list('collection'))
counts = dict(qs.annotate(Count('id')))
persona_counts = dict(qs.filter(addon__type=amo.ADDON_PERSONA)
.annotate(Count('id')))
tags = (Tag.objects.not_blacklisted().values_list('id')
.annotate(cnt=Count('id')).filter(cnt__gt=1).order_by('-cnt'))
for c in Collection.objects.no_cache().filter(id__in=ids):
addon_count = counts.get(c.id, 0)
all_personas = addon_count == persona_counts.get(c.id, None)
addons = list(c.addons.values_list('id', flat=True))
c.top_tags = [t for t, _ in tags.filter(addons__in=addons)[:5]]
Collection.objects.filter(id=c.id).update(addon_count=addon_count,
all_personas=all_personas)
@task
def collection_watchers(*ids, **kw):
log.info('[%s@%s] Updating collection watchers.' %
(len(ids), collection_watchers.rate_limit))
using = kw.get('using')
for pk in ids:
try:
watchers = (CollectionWatcher.objects.filter(collection=pk)
.using(using).count())
Collection.objects.filter(pk=pk).update(subscribers=watchers)
log.info('Updated collection watchers: %s' % pk)
except Exception, e:
log.error('Updating collection watchers failed: %s, %s' % (pk, e))
@task
def index_collections(ids, **kw):
log.debug('Indexing collections %s-%s [%s].' % (ids[0], ids[-1], len(ids)))
index = kw.pop('index', None)
index_objects(ids, Collection, search, index, [attach_translations])
def attach_translations(collections):
"""Put all translations into a translations dict."""
attach_trans_dict(Collection, collections)
@task
def unindex_collections(ids, **kw):
for id in ids:
log.debug('Removing collection [%s] from search index.' % id)
Collection.unindex(id)
| bsd-3-clause | 8,352,693,703,532,291,000 | 33.247934 | 79 | 0.625 | false | 3.566265 | false | false | false |
FATSLiM/fatslim | docs/sphinx-src/documentation/tutorials/show_apl_map.py | 1 | 1525 | #!/usr/bin/env python
# -*- coding: utf8 -*-
import numpy as np
import matplotlib.pyplot as plt
from scipy.interpolate import griddata
CSV_FILENAME = "bilayer_prot_apl_frame_00000.csv"
GRO_FILENAME = "bilayer_prot.gro"
PNG_FILENAME = "bilayer_prot_apl_frame_00000.png"
# Get Box vectors
last_line = ""
with open(GRO_FILENAME) as fp:
for line in fp:
line = line.strip()
if len(line) == 0:
continue
last_line = line
box_x, box_y = [float(val) for val in line.split()[:2]]
# Get values
membrane_property = "Area per lipid"
x_values = []
y_values = []
z_values = []
property_values = []
with open(CSV_FILENAME) as fp:
for lino, line in enumerate(fp):
if lino == 0:
membrane_property = line.split(",")[-1].strip()
else:
line = line.strip()
if len(line) == 0:
continue
resid, leaflet, x, y, z, value = line.split(",")
x_values.append(float(x))
y_values.append(float(y))
property_values.append(float(value))
# Building data from plotting
grid_x, grid_y = np.mgrid[0:box_x:50j, 0:box_y:50j]
points = np.stack((np.array(x_values).T, np.array(y_values).T), axis=-1)
values = np.array(property_values)
grid = griddata(points, values, (grid_x, grid_y), method='cubic')
# Plot map
plt.contourf(grid_x, grid_y, grid)
cbar = plt.colorbar()
plt.title(membrane_property)
plt.xlabel("Box X (nm)")
plt.ylabel("Box Y (nm)")
plt.tight_layout()
plt.savefig(PNG_FILENAME)
| gpl-3.0 | 6,198,686,079,688,072,000 | 22.828125 | 72 | 0.61377 | false | 2.938343 | false | false | false |
andree182/podx3 | getrawaudio.py | 1 | 1079 | #!/usr/bin/env python
"""
Reads audio data from the device through isochronous transfer.
NOTE: Due to alignment or whatever, the data is not correct. Data size of
the input endpoint is 170B, but the the actual data size is 6 * (3*2*4),
the rest bytes are filled with zero.
"""
import usb.util
import time
ID_VENDOR = 0x0e41
ID_PRODUCT = 0x414a
emptyData = chr(0) * (7 * 6)
d = usb.core.find(idVendor = ID_VENDOR, idProduct = ID_PRODUCT)
if d is None:
raise ValueError("not connected")
d.set_interface_altsetting(0,1)
x = []
def chunks(l, n):
"""Yield successive n-sized chunks from l."""
for i in xrange(0, len(l), n):
yield l[i:i+n]
for i in range(0, 100):
nx = d.read(0x86, 16384, 1000)
print len(nx)
if len(nx) == 0:
# d.write(0x02, emptyData) # attempt to revive device after input stream freezes
time.sleep(0.001)
continue
raw = []
for i in chunks(nx, 170):
raw += i[:144]
d.write(0x02, nx[:len(raw)/4])
x += [raw]
f = file("test.raw", "w")
for i in x:
f.write(''.join(map(chr,i)))
| gpl-2.0 | -2,545,959,085,404,143,600 | 20.58 | 88 | 0.624652 | false | 2.824607 | false | false | false |
CPedrini/TateTRES | erapi.py | 1 | 11009 | #-*- encoding: utf-8 -*-
import csv, math, time, re, threading, sys
try:
from urllib.request import urlopen
except ImportError:
from urllib import urlopen
class ErAPI():
# Metodo constructor, seteos basicos necesarios de configuracion, instancia objetos utiles
def __init__(self):
self.data = {}
# Data format: {'XXCiro|BNC': {'id': 123456, 'nick': 'XXCiro', 'level': 49, 'strength': 532.5, 'rank_points': 1233354, 'citizenship': 'Argentina'}}
# Diccionario de puntos/rango
self.rank_required_points = {
"Recruit": 0,
"Private": 15,
"Private*": 45,
"Private**": 80,
"Private***": 120,
"Corporal": 170,
"Corporal*": 250,
"Corporal**": 350,
"Corporal***": 450,
"Sergeant": 600,
"Sergeant*": 800,
"Sergeant**": 1000,
"Sergeant***": 1400,
"Lieutenant": 1850,
"Lieutenant*": 2350,
"Lieutenant**": 3000,
"Lieutenant***": 3750,
"Captain": 5000,
"Captain*": 6500,
"Captain**": 9000,
"Captain***": 12000,
"Major": 15500,
"Major*": 20000,
"Major**": 25000,
"Major***": 31000,
"Commander": 40000,
"Commander*": 52000,
"Commander**": 67000,
"Commander***": 85000,
"Lt Colonel": 110000,
"Lt Colonel*": 140000,
"Lt Colonel**": 180000,
"Lt Colonel***": 225000,
"Colonel": 285000,
"Colonel*": 355000,
"Colonel**": 435000,
"Colonel***": 540000,
"General": 660000,
"General*": 800000,
"General**": 950000,
"General***": 1140000,
"Field Marshal": 1350000,
"Field Marshal*": 1600000,
"Field Marshal**": 1875000,
"Field Marshal***": 2185000,
"Supreme Marshal": 2550000,
"Supreme Marshal*": 3000000,
"Supreme Marshal**": 3500000,
"Supreme Marshal***": 4150000,
"National Force": 4900000,
"National Force*": 5800000,
"National Force**": 7000000,
"National Force***": 9000000,
"World Class Force": 11500000,
"World Class Force*": 14500000,
"World Class Force**": 18000000,
"World Class Force***": 22000000,
"Legendary Force": 26500000,
"Legendary Force*": 31500000,
"Legendary Force**": 37000000,
"Legendary Force***": 42000000,
"God of War": 50000000,
"God of War*": 100000000 ,
"God of War**": 200000000,
"God of War***": 500000000,
"Titan": 1000000000,
"Titan*": 2000000000,
"Titan**": 4000000000,
"Titan***": 10000000000}
# Lista ordenada de rangos segun importancia
self.rank_to_pos = [
"Recruit",
"Private",
"Private*",
"Private**",
"Private***",
"Corporal",
"Corporal*",
"Corporal**",
"Corporal***",
"Sergeant",
"Sergeant*",
"Sergeant**",
"Sergeant***",
"Lieutenant",
"Lieutenant*",
"Lieutenant**",
"Lieutenant***",
"Captain",
"Captain*",
"Captain**",
"Captain***",
"Major",
"Major*",
"Major**",
"Major***",
"Commander",
"Commander*",
"Commander**",
"Commander***",
"Lt Colonel",
"Lt Colonel*",
"Lt Colonel**",
"Lt Colonel***",
"Colonel",
"Colonel*",
"Colonel**",
"Colonel***",
"General",
"General*",
"General**",
"General***",
"Field Marshal",
"Field Marshal*",
"Field Marshal**",
"Field Marshal***",
"Supreme Marshal",
"Supreme Marshal*",
"Supreme Marshal**",
"Supreme Marshal***",
"National Force",
"National Force*",
"National Force**",
"National Force***",
"World Class Force",
"World Class Force*",
"World Class Force**",
"World Class Force***",
"Legendary Force",
"Legendary Force*",
"Legendary Force**",
"Legendary Force***",
"God of War",
"God of War*",
"God of War**",
"God of War***",
"Titan",
"Titan*",
"Titan**",
"Titan***",]
# Bandera de ejecucion, util en caso de que se decida matar de forma manual los threads para actualizar y guardar los datos
self.run = True
# Se paraleliza la carga de datos en un hilo nuevo, el cual es demonio del invocador en caso de "muerte prematura"
th = threading.Thread(target=self.data_loader)
th.daemon = True
th.start()
# Metodo invocador, carga datos y crea threads para guardar y actualizar informacion, solo llamado desde constructor
def data_loader(self):
self.load_data()
self.data_saver_th = threading.Thread(target=self.data_saver)
self.data_saver_th.daemon = True
self.data_saver_th.start()
self.data_updater_th = threading.Thread(target=self.data_updater)
self.data_updater_th.daemon = True
self.data_updater_th.start()
# Metodo para volcar informacion a archivo fisico, solo llamado de metodo data_loader
def data_saver(self):
while self.run:
self.save_data()
time.sleep(60)
# Metodo para actualizar informacion, solo llamado de metodo data_loader
def data_updater(self):
while self.run:
for irc_nick in self.data:
self.update_data(irc_nick)
time.sleep(30)
time.sleep(600)
# ---------------------------------------------------------------------------------- #
# @ PUBLIC METHODS #
# ---------------------------------------------------------------------------------- #
# Metodo para actualizar informacion local del objeto desde archivo
def load_data(self):
try:
f = open('data/er_nick-data.csv', 'rt')
reader = csv.reader(f)
for nick_irc,id,nick_er,level,strength,rank_points,citizenship in reader:
self.data[nick_irc] = {'id': int(id), 'nick': nick_er, 'level': int(level), 'strength': float(strength), 'rank_points': int(rank_points), 'citizenship': citizenship}
f.close()
except:
pass
# Metodo para guardar informacion local del objeto en archivo
def save_data(self):
try:
f = open('data/er_nick-data.csv', 'wt')
writer = csv.writer(f)
for u in self.data:
writer.writerow([u, self.data[u]['id'], self.data[u]['nick'], self.data[u]['level'], self.data[u]['strength'], self.data[u]['rank_points'], self.data[u]['citizenship']])
f.close()
except:
pass
# Metodo scraper para actualizar informacion local del objeto del nick de irc especificado
def update_data(self, irc_nick):
try:
id = self.data[irc_nick]['id']
c = urlopen('http://www.erepublik.com/es/citizen/profile/%d' % id)
page = c.read()
c.close()
self.data[irc_nick]['nick'] = re.search('<meta name="title" content="(.+?) - Ciudadano del Nuevo Mundo" \/>', page.decode('utf-8')).group(1)
self.data[irc_nick]['level'] = int(re.search('<strong class="citizen_level">(.+?)<\/strong>', page.decode('utf-8'), re.DOTALL).group(1))
self.data[irc_nick]['strength'] = float(re.search('<span class="military_box_info mb_bottom">(.+?)</span>', page.decode('utf-8'), re.DOTALL).group(1).strip('\r\n\t ').replace(',',''))
self.data[irc_nick]['rank_points'] = int(re.search('<span class="rank_numbers">(.+?) \/', page.decode('utf-8'), re.DOTALL).group(1).replace(',',''))
self.data[irc_nick]['citizenship'] = re.search('<a href="http\:\/\/www.erepublik.com\/es\/country\/society\/([^ \t\n\x0B\f\r]+?)">', page.decode('utf-8')).group(1)
except:
pass
# Metodo para actualizar informacion local del objeto con nick de irc e id especificados, fuerza actualizacion del mismo
def reg_nick_write(self, nick, id):
if(nick.lower() in self.data.keys()):
self.data[nick.lower()]['id'] = int(id)
else:
self.data[nick.lower()] = {'id': int(id), 'nick': nick, 'level': 1, 'strength': 0, 'rank_points': 0, 'citizenship': ''}
self.update_data(nick.lower())
# Metodo para obtener ID del nick de irc especificado
def get_id(self, nick):
return self.data[nick.lower()]['id']
# Metodo para obtener LEVEL del nick de irc especificado
def get_level(self, nick):
return self.data[nick.lower()]['level']
# Metodo para obtener STRENGTH del nick de irc especificado
def get_strength(self, nick):
return self.data[nick.lower()]['strength']
# Metodo para obtener RANK POINTS del nick de irc especificado
def get_rank_points(self, nick):
return self.data[nick.lower()]['rank_points']
# Metodo para obtener CITIZENSHIP del nick de irc especificado
def get_citizenship(self, nick):
return self.data[nick.lower()]['citizenship']
# Metodo para obtener NICK INGAME del nick de irc especificado
def get_nick(self, nick):
return self.data[nick.lower()]['nick']
# Metodo para obtener RANK NAME del nick de irc especificado
def calculate_rank_name(self, rank_points):
index = 0
for k in [key for key in self.rank_required_points.keys() if self.rank_required_points[key] < rank_points]:
if(self.rank_to_pos.index(k) > index):
index = self.rank_to_pos.index(k)
return self.rank_to_pos[index]
# Metodo para calcular DAÑO del nick de irc especificado segun datos adicionales
def calculate_damage(self, rank_points, strength, weapon_power, level, bonus):
index = 0
for k in [key for key in self.rank_required_points.keys() if self.rank_required_points[key] < rank_points]:
if(self.rank_to_pos.index(k) > index):
index = self.rank_to_pos.index(k)
return(math.trunc(((index / 20) + 0.3) * ((strength / 10) + 40) * (1 + (weapon_power / 100)) * (1.1 if level > 99 else 1) * bonus)) | apache-2.0 | -1,751,381,920,954,348,000 | 36.962069 | 195 | 0.511446 | false | 3.602094 | false | false | false |
heromod/migrid | mig/reST/html_writer.py | 1 | 1966 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# --- BEGIN_HEADER ---
#
# html_writer - [insert a few words of module description on this line]
# Copyright (C) 2003-2009 The MiG Project lead by Brian Vinter
#
# This file is part of MiG.
#
# MiG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# MiG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# -- END_HEADER ---
#
from docutils.writers.html4css1 import Writer, HTMLTranslator
from docutils.core import publish_string
# Setup a translator writer
html_writer = Writer()
html_writer.translator_class = HTMLTranslator
# Setup a restructured text example
reST = \
"""
Example of reST:
================
This is a small example of the way reST can be used as a base for generating HTMLformatted text that:
- looks nice
- is standards compliant
- is flexible
We *may* decide to start using this as text formatting tool in MiG__ later on.
__ http://mig-1.imada.sdu.dk/
We can also use it for creating tables if we want to:
===== ===== ======
Input Output
----- ----- ------
A B A or B
===== ===== ======
False False False
True False True
False True True
True True True
===== ===== ======
Have fun!
----
Cheers, Jonas
"""
# Translate reST to html
html = publish_string(reST, settings_overrides={'output_encoding'
: 'unicode'}, writer=html_writer)
print html
| gpl-2.0 | 4,386,083,187,276,027,400 | 23.575 | 101 | 0.677009 | false | 3.561594 | false | false | false |
davhenriksen/bringhomethebacon | web/web.py | 1 | 27835 | #!/usr/bin/env python
# web.py
# bring home the bacon Copyright (C) 2012 David Ormbakken Henriksen ([email protected])
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
import os
import sqlite3
import re
import sys
import subprocess
import tornado.httpserver
import tornado.ioloop
import tornado.web
import tornado.options
import tornado.autoreload
import simplejson as json
from tornado.options import define, options
define("port", default=8080, help="run on the given port", type=int)
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r"/", MainHandler),
(r"/rulesets", RulesetsHandler),
(r"/rules", RulesHandler),
(r"/sensors", SensorsHandler),
(r"/get_rulesets", GetRulesetsHandler),
(r"/get_rules", GetRulesHandler),
(r"/get_sensors", GetSensorsHandler),
(r"/add_sensor", AddSensorHandler),
(r"/remove_sensor", RemoveSensorHandler),
(r"/open_rule", OpenRuleHandler),
(r"/getsensorname", GetSensorNameHandler),
(r"/tuning_rules", TuningRulesHandler),
(r"/tuning_rulesets", TuningRulesetsHandler),
(r"/update_sensor", UpdateSensorHandler),
(r"/update", UpdateHandler),
(r"/atuninghelp", ATuningHelpHandler),
(r"/suppress", SuppressHandler),
(r"/threshold", ThresholdHandler),
(r"/atuning", ATuningHandler),
(r"/get_atuning", GetATuningHandler),
(r"/remove_atuning", RemoveATuningHandler),
(r"/distribute", DistributeHandler),
]
settings = dict(
#login_url="/auth/login",
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
autoescape=None)
tornado.web.Application.__init__(self, handlers, **settings)
class RemoveATuningHandler(tornado.web.RequestHandler):
def post(self):
syntax = self.request.arguments.get("atuningid")
db = sqlite3.connect('../DB.db')
cursor = db.cursor()
try:
cursor.execute('SELECT sname FROM sensors')
all_sensors = cursor.fetchall()
for hit in all_sensors:
table = hit[0]+'_threshold'
sql = 'DELETE FROM %s WHERE syntax="%s"' % (table,syntax[0])
cursor.execute(sql)
db.commit()
except StandardError,e:
FILE = open('weberrorlog.txt','a')
FILE.write('RemoveATuningHandler ERROR: '+str(e)+'\n')
FILE.close()
cursor.close()
db.close()
class GetATuningHandler(tornado.web.RequestHandler):
def get(self):
db = sqlite3.connect('../DB.db')
cursor = db.cursor()
atuning = []
try:
cursor.execute('SELECT sname FROM sensors')
all_sensors = cursor.fetchall()
for hit in all_sensors:
table = hit[0]+'_threshold'
sql = 'SELECT * FROM '+table
cursor.execute(sql)
for row in cursor:
idnr,sid,typ,syntax,comment,sensor = row
check = "<center><input type='checkbox' name='atuningid' value='%s'></center>" % (syntax)
tmp = (check,sid,typ,syntax,comment,sensor)
if tmp not in atuning:
atuning.append(tmp)
except StandardError,e:
FILE = open('weberrorlog.txt','a')
FILE.write('GetATuningHandler ERROR: '+str(e)+'\n')
FILE.close()
cursor.close()
db.close()
self.write(json.dumps({"aaData":atuning},sort_keys=True,indent=4))
class ThresholdHandler(tornado.web.RequestHandler):
def post(self):
db = sqlite3.connect('../DB.db')
cursor = db.cursor()
if 'sigid' not in self.request.arguments:
self.write('Input missing. Try again.')
elif 'count' not in self.request.arguments:
self.write('Input missing. Try again.')
elif 'sec' not in self.request.arguments:
self.write('Input missing. Try again.')
else:
genid = self.request.arguments.get("genid")
sigid = self.request.arguments.get("sigid")
typ = self.request.arguments.get("type")
track = self.request.arguments.get("track")
count = self.request.arguments.get("count")
sec = self.request.arguments.get("sec")
sensor = self.request.arguments.get("select")
comment = ''
if 'comment' in self.request.arguments:
tmp = self.request.arguments.get("comment")
comment = tmp[0]
syntax = 'event_filter gen_id '+genid[0]+',sig_id '+sigid[0]+',type '+typ[0]+',track '+track[0]+',count '+count[0]+',seconds '+sec[0]
try:
def insert_t(table,x):
sql = 'INSERT OR IGNORE INTO '+table+' (id,sid,type,syntax,comment,sensor) VALUES (null,'+sigid[0]+',"threshold","'+syntax+'","'+comment+'","'+x+'")'
cursor.execute(sql)
if not (sensor[0] == "all"):
table = sensor[0]+'_threshold'
insert_t(table,sensor[0])
else:
cursor.execute('SELECT sname FROM sensors')
all_sensors = cursor.fetchall()
for hit in all_sensors:
table = hit[0]+'_threshold'
insert_t(table,'ALL')
db.commit()
self.write('threshold rule for sid: '+sigid[0]+' has been added!')
except StandardError,e:
FILE = open('weberrorlog.txt','a')
FILE.write('ThresholdHandler ERROR: '+str(e)+'\n')
FILE.close()
self.write(str(e))
cursor.close()
db.close()
class SuppressHandler(tornado.web.RequestHandler):
def post(self):
db = sqlite3.connect('../DB.db')
cursor = db.cursor()
if 'sigid' not in self.request.arguments:
self.write('Input missing. Try again.')
elif 'ip' not in self.request.arguments:
self.write('Input missing. Try again.')
else:
genid = self.request.arguments.get("genid")
sigid = self.request.arguments.get("sigid")
track = self.request.arguments.get("track")
ip = self.request.arguments.get("ip")
sensor = self.request.arguments.get("select")
comment = ''
if 'comment' in self.request.arguments:
tmp = self.request.arguments.get("comment")
comment = tmp[0]
syntax = 'suppress gen_id '+genid[0]+',sig_id '+sigid[0]+',track '+track[0]+',ip '+ip[0]
try:
def insert_t(table,x):
sql = 'INSERT OR IGNORE INTO '+table+' (id,sid,type,syntax,comment,sensor) VALUES (NULL,'+sigid[0]+',"suppress","'+syntax+'","'+comment+'","'+x+'")'
cursor.execute(sql)
if not (sensor[0] == "all"):
table = sensor[0]+'_threshold'
insert_t(table,sensor[0])
else:
cursor.execute('SELECT sname FROM sensors')
all_sensors = cursor.fetchall()
for hit in all_sensors:
table = hit[0]+'_threshold'
insert_t(table,'ALL')
db.commit()
self.write('suppress rule for sid: '+sigid[0]+' has been added!')
except StandardError,e:
FILE = open('weberrorlog.txt','a')
FILE.write('ThresholdHandler ERROR: '+str(e)+'\n')
FILE.close()
self.write(str(e))
cursor.close()
db.close()
class DistributeHandler(tornado.web.RequestHandler):
def get(self):
self.write('''<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>Distribute report</title>
<link type="text/css" rel="stylesheet" href="../static/css/custom.css"/>
<link type="text/css" rel="stylesheet" href="../static/css/demo_page.css"/>
</head>
<body>
 <b>Distribute report</b></br>''')
try:
p = subprocess.Popen(["python","../distribute.py"], stdout=subprocess.PIPE)
for line in iter(p.stdout.readline, ''):
self.write(' ')
self.write(line)
self.write('</br>')
p.stdout.close()
except StandardError,e:
FILE = open('weberrorlog.txt','a')
FILE.write('DistributeHandler ERROR: '+str(e)+'\n')
FILE.close()
self.write('''</body>
</html>''')
class UpdateHandler(tornado.web.RequestHandler):
def get(self):
self.write('''<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>Update report</title>
<link type="text/css" rel="stylesheet" href="../static/css/custom.css"/>
<link type="text/css" rel="stylesheet" href="../static/css/demo_page.css"/>
</head>
<body>
 <b>Update Report</b></br>''')
try:
p = subprocess.Popen(["python","../update.py"], stdout=subprocess.PIPE)
for line in iter(p.stdout.readline, ''):
self.write(' ')
self.write(line)
self.write('</br>')
p.stdout.close()
except StandardError,e:
FILE = open('weberrorlog.txt','a')
FILE.write('UpdateHandler ERROR: '+str(e)+'\n')
FILE.close()
self.write('''</body>
</html>''')
class UpdateSensorHandler(tornado.web.RequestHandler):
def post(self):
db = sqlite3.connect('../DB.db')
cursor = db.cursor()
sensor = self.request.arguments.get("select")
try:
if not (sensor[0] != 'all'):
cursor.execute('SELECT sname FROM sensors')
all_sensors = cursor.fetchall()
def update(f,v,s):
sql = 'UPDATE sensors SET '+f+'="'+v+'" WHERE sname="'+s+'"'
cursor.execute(sql)
if "ip" in self.request.arguments:
ip = self.request.arguments.get("ip")
if not (sensor[0] == 'all'):
update("ip",ip[0],sensor[0])
else:
for hit in all_sensors:
update("ip",ip[0],hit[0])
if "path" in self.request.arguments:
path = self.request.arguments.get("path")
if not (sensor[0] == 'all'):
update("path",path[0],sensor[0])
else:
for hit in all_sensors:
update("path",path[0],hit[0])
if "uname" in self.request.arguments:
uname = self.request.arguments.get("uname")
if not (sensor[0] == 'all'):
update("uname",uname[0],sensor[0])
else:
for hit in all_sensors:
update("uname",uname[0],hit[0])
if "cmd" in self.request.arguments:
pw = self.request.arguments.get("cmd")
if not (sensor[0] == 'all'):
update("cmd",cmd[0],sensor[0])
else:
for hit in all_sensors:
update("cmd",cmd[0],hit[0])
db.commit()
self.write('Sensor updated! Refresh page to see changes.')
except StandardError,e:
FILE = open('weberrorlog.txt','a')
FILE.write('UpdateSensorHandler ERROR: '+str(e)+'\n')
FILE.close()
self.write(str(e))
cursor.close()
db.close()
class TuningRulesetsHandler(tornado.web.RequestHandler):
def post(self):
source_ruleset = self.request.arguments.get("rulesetid")
sensor = self.request.arguments.get("sensor")
action = self.request.arguments.get("action")
db = sqlite3.connect('../DB.db')
cursor = db.cursor()
sids = ''
try:
def disable_sid(table,sid):
value = sid.split(',')
for entry in value:
sql = 'INSERT OR IGNORE INTO '+table+' (sid) VALUES ('+entry+')'
cursor.execute(sql)
def enable_sid(table,sid):
sql = 'DELETE FROM '+table+' WHERE sid IN ('+sid+')'
cursor.execute(sql)
length = len(source_ruleset)
counter = 1
for hit in source_ruleset:
split = hit.split('.')
sql = 'SELECT sidnr from rules WHERE source_name="'+split[0]+'" AND ruleset_name="'+split[1]+'"'
cursor.execute(sql)
tmp = cursor.fetchall()
sids = sids+(",".join(str(x[0]) for x in tmp))
if not (counter == length):
sids = sids+","
counter += 1
if not (sensor[0] == 'all'):
table = sensor[0]+'_disabled'
if not (action[0] == "enable"):
disable_sid(table,sids)
else:
enable_sid(table,sids)
else:
cursor.execute('SELECT sname FROM sensors')
all_sensors = cursor.fetchall()
for hit in all_sensors:
table = hit[0]+'_disabled'
if not (action[0] == "enable"):
disable_sid(table,sids)
else:
enable_sid(table,sids)
db.commit()
except StandardError,e:
FILE = open('weberrorlog.txt','a')
FILE.write('TuningRulesetsHandler ERROR: '+str(e)+'\n')
FILE.close()
cursor.close()
db.close()
class TuningRulesHandler(tornado.web.RequestHandler):
def post(self):
sids = self.request.arguments.get('sidnr')
sensor = self.request.arguments.get('sensor')
action = self.request.arguments.get('action')
db = sqlite3.connect('../DB.db')
cursor = db.cursor()
def disable_sid(table,sid):
sql = 'INSERT OR IGNORE INTO '+table+' (sid) VALUES ('+sid+')'
cursor.execute(sql)
def enable_sid(table,sid):
sql = 'DELETE FROM '+table+' WHERE sid='+sid
cursor.execute(sql)
try:
if not (sensor[0] == "all"):
table = sensor[0]+'_disabled'
for sid in sids:
if not (action[0] == "enable"):
disable_sid(table,sid)
else:
enable_sid(table,sid)
else:
cursor.execute('SELECT sname FROM sensors')
all_sensors = cursor.fetchall()
for hit in all_sensors:
table = hit[0]+'_disabled'
for sid in sids:
if not (action[0] == "enable"):
disable_sid(table,sid)
else:
enable_sid(table,sid)
db.commit()
except StandardError,e:
FILE = open('weberrorlog.txt','a')
FILE.write('TuningRulesHandler ERROR: '+str(e)+'\n')
FILE.close()
cursor.close()
db.close()
class GetSensorNameHandler(tornado.web.RequestHandler):
def get(self):
db = sqlite3.connect('../DB.db')
cursor = db.cursor()
try:
cursor.execute('SELECT sname FROM sensors')
selectbox = '<select name="select" id="select"><option value="all">all sensors</option>'
for sensor in cursor:
selectbox = selectbox+'<option value="'+sensor[0]+'">'+sensor[0]+'</option>'
selectbox = selectbox+'</select>'
self.write(selectbox)
except StandardError,e:
FILE = open("weberrorlog.txt","a")
FILE.write("GetSensorNameHandler ERROR: "+str(e)+"\n")
FILE.close()
self.write('<select><option>ERROR</option></select>')
cursor.close()
db.close()
class OpenRuleHandler(tornado.web.RequestHandler):
def get(self):
sid = self.get_argument("sid")
db = sqlite3.connect('../DB.db')
cursor = db.cursor()
try:
cursor.execute('SELECT rule_syntax FROM rules WHERE sidnr = (?)', [sid])
rulesyntax = cursor.fetchone()
self.render("open_rules.html",rulesyntax=rulesyntax[0])
except StandardError,e:
FILE = open('weberrorlog.txt','a')
FILE.write('OpenRuleHandler ERROR: '+str(e)+'\n')
FILE.close()
cursor.close()
db.close()
class RemoveSensorHandler(tornado.web.RequestHandler):
def post(self):
snames = self.request.arguments.get("sensorid")
db = sqlite3.connect('../DB.db')
cursor = db.cursor()
try:
for sensor in snames:
sql = 'DELETE FROM sensors WHERE sname="%s"' % (sensor)
cursor.execute(sql)
sql = 'DROP TABLE %s_disabled' % (sensor)
cursor.execute(sql)
sql = 'DROP TABLE %s_threshold' % (sensor)
cursor.execute(sql)
db.commit()
except StandardError,e:
FILE = open('weberrorlog.txt','a')
FILE.write('RemoveSensorHandler ERROR: '+str(e)+'\n')
FILE.close()
cursor.close()
db.close()
class AddSensorHandler(tornado.web.RequestHandler):
def post(self):
db = sqlite3.connect('../DB.db')
cursor = db.cursor()
if 'sname' not in self.request.arguments:
self.write('Sensor NOT added. Input missing. Try again.')
elif 'ip' not in self.request.arguments:
self.write('Sensor NOT added. Input missing. Try again.')
elif 'path' not in self.request.arguments:
self.write('Sensor NOT added. Input missing. Try again.')
elif 'uname' not in self.request.arguments:
self.write('Sensor NOT added. Input missing. Try again.')
elif 'cmd' not in self.request.arguments:
self.write('Sensor NOT added. Input missing. Try again.')
else:
sname = self.request.arguments.get("sname")
sname = sname[0]
ip = self.request.arguments.get("ip")
ip = ip[0]
path = self.request.arguments.get("path")
path = path[0]
uname = self.request.arguments.get("uname")
uname = uname[0]
cmd = self.request.arguments.get("cmd")
cmd = cmd[0]
try:
db = sqlite3.connect('../DB.db')
cursor = db.cursor()
cursor.execute('''INSERT INTO sensors (sname,ip,path,uname,cmd)
VALUES(?,?,?,?,?)''',(sname,ip,path,uname,cmd))
sql = 'CREATE TABLE '+sname+'_disabled (sid INTEGER PRIMARY KEY)'
cursor.execute(sql)
sql = 'CREATE TABLE '+sname+'_threshold (id INTEGER PRIMARY KEY, sid INTEGER, type TEXT, syntax TEXT, comment TEXT, sensor TEXT)'
cursor.execute(sql)
self.write(sname+' added! Refresh page to see changes.')
db.commit()
except StandardError,e:
FILE = open('weberrorlog.txt','a')
FILE.write('AddSensorHandler ERROR: '+str(e)+'\n')
FILE.close()
self.write(str(e))
cursor.close()
db.close()
class GetSensorsHandler(tornado.web.RequestHandler):
def get(self):
db = sqlite3.connect('../DB.db')
cursor = db.cursor()
sensors = []
try:
cursor.execute('SELECT * FROM sensors')
for row in cursor:
sname,ip,path,uname,cmd = row
check = "<center><input type='checkbox' name='sensorid' value='%s'></center>" % (sname)
sensor = (check,sname,ip,path,uname,cmd)
sensors.append(sensor)
except StandardError,e:
FILE = open('weberrorlog.txt','a')
FILE.write('GetSensorsHandler ERROR: '+str(e)+'\n')
FILE.close()
cursor.close()
db.close()
self.write(json.dumps({"aaData":sensors},sort_keys=True,indent=4))
class GetRulesHandler(tornado.web.RequestHandler):
def get(self):
db = sqlite3.connect('../DB.db')
cursor = db.cursor()
details = '<img class="sig" src="static/images/open.png">'
sigs = []
try:
cursor.execute('SELECT * FROM rules')
all_rules = cursor.fetchall()
cursor.execute('SELECT sname FROM sensors')
all_sensors = cursor.fetchall()
for row in all_rules:
sidnr,revnr,source,ruleset,name,ref,date,rule = row
status =''
for hit in all_sensors:
sql = 'SELECT sid FROM '+hit[0]+'_disabled WHERE sid='+str(sidnr)
cursor.execute(sql)
res = cursor.fetchone()
sql = 'SELECT sid FROM %s_threshold WHERE sid="%s"' % (hit[0],sidnr)
cursor.execute(sql)
tmp2 = cursor.fetchone()
if not (res is None):
if not (tmp2 is None):
status = status+'<font class="red">'+hit[0]+'</font><font class="yellow"><b>!</b></font> ' #red/yellow
else:
status = status+'<font class="red">'+hit[0]+'</font> ' #red
else:
if not (tmp2 is None):
status = status+'<font class="green">'+hit[0]+'</font><font class="yellow"><b>!</b></font> ' #green/yellow
else:
status = status+'<font class="green">'+hit[0]+'</font> ' #green
check = '<input type="checkbox" name="sidnr" value="%i">' % (sidnr)
source_ruleset = '%s.%s' % (source,ruleset)
sig = (check, sidnr, revnr, date, name, source_ruleset, ref, status, details)
sigs.append(sig)
except StandardError,e:
FILE = open('weberrorlog.txt','a')
FILE.write('GetRulesetsHandler ERROR: '+str(e)+'\n')
FILE.close()
cursor.close()
db.close()
self.write(json.dumps({"aaData":sigs},sort_keys=True,indent=4))
class GetRulesetsHandler(tornado.web.RequestHandler):
def get(self):
db = sqlite3.connect('../DB.db')
cursor = db.cursor()
rulesets = []
try:
cursor.execute("SELECT DISTINCT ruleset_name, source_name FROM rules")
query = cursor.fetchall()
for row in query:
ruleset,source = row
source_ruleset = '%s.%s' % (source,ruleset)
check = '<center><input type="checkbox" name="rulesetid" value="%s"></center>' % (source_ruleset)
sql = 'SELECT sidnr from rules WHERE source_name="%s" AND ruleset_name="%s"' % (source,ruleset)
cursor.execute(sql)
tmp = cursor.fetchall()
count = len(tmp)
sids = ','.join(str(x[0]) for x in tmp)
cursor.execute('SELECT sname FROM sensors')
all_sensors = cursor.fetchall()
sql = 'SELECT MAX(date) FROM rules WHERE source_name="%s" AND ruleset_name="%s"' % (source,ruleset)
cursor.execute(sql)
max_date = cursor.fetchone()
status = ''
for x in all_sensors:
sensor = x[0]
sql = 'SELECT sid FROM %s_disabled WHERE sid IN ( %s )' % (sensor,sids)
cursor.execute(sql)
tmp2 = cursor.fetchall()
scount = len(tmp2)
if not (scount == count):
if not (scount == 0):
status = status+'<font class="green">%s</font><font class="red">%s</font> ' % (sensor,scount)
else:
status = status+'<font class="green">%s</font> ' % sensor
else:
status = status+'<font class="red">%s</font> ' % sensor
rset = (check,source_ruleset,max_date,count,status)
rulesets.append(rset)
except StandardError,e:
FILE = open('weberrorlog.txt','a')
FILE.write('GetRulesetsHandler ERROR: '+str(e)+'\n')
FILE.close()
cursor.close()
db.close()
self.write(json.dumps({"aaData":rulesets},sort_keys=True,indent=4))
class ATuningHandler(tornado.web.RequestHandler):
def get(self):
self.render("atuning.html")
class ATuningHelpHandler(tornado.web.RequestHandler):
def get(self):
self.render("atuninghelp.html")
class SensorsHandler(tornado.web.RequestHandler):
def get(self):
self.render("sensors.html")
class RulesHandler(tornado.web.RequestHandler):
def get(self):
self.render("rules.html")
class RulesetsHandler(tornado.web.RequestHandler):
def get(self):
self.render("rulesets.html")
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.render("index.html")
def main():
tornado.options.parse_command_line()
http_server = tornado.httpserver.HTTPServer(Application())
http_server.listen(options.port)
tornado.autoreload.start()
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
| gpl-3.0 | -5,249,010,472,040,336,000 | 36.870748 | 188 | 0.496102 | false | 4.065284 | false | false | false |
elidaian/sudoku | src/server/users.py | 1 | 3651 | """
users.py
Created on: Aug 17 2013
Author: eli
"""
class UserPermission(object):
"""
Describes a permission for an operation for an user.
"""
PERMISSIONS = []
_curr_permission_bit = 1
def __init__(self, name, description, is_default):
"""
Construct a permission given its description.
"""
super(UserPermission, self).__init__()
self.name = name
self.description = description
self.flag = UserPermission._curr_permission_bit
self.is_default = is_default
UserPermission.PERMISSIONS.append(self)
UserPermission._curr_permission_bit <<= 1
@staticmethod
def get_mask(permissions):
"""
Returns a mask containing the given permissions.
"""
res = 0
for permission in permissions:
res |= permission.flag
return res
@staticmethod
def parse_mask(mask):
"""
Return a list of permissions given the mask.
"""
res = []
for permission in UserPermission.PERMISSIONS:
if permission.flag & mask:
res.append(permission)
return res
def __eq__(self, other):
"""
Checks the equality of this object to other object.
"""
return self.flag == other.flag
def permissions_to_mask(permissions):
"""
Create a mask of permissions given the permissions list.
"""
res = 0
for permission in permissions:
res |= permission.flag
return res
# Define the permissions
PERM_CREATE_BOARD = UserPermission("CREATE_BOARD", "Create boards", True)
PERM_MANAGE_USERS = UserPermission("MANAGE_USERS", "Manage users", False)
PERM_SHOW_OTHER_USER_BOARDS = UserPermission("SHOW_OTHER_USERS_BOARDS",
"Show other user\'s boards", False)
class User(object):
"""
Represents a logged in user.
"""
def __init__(self, id, username, display, permissions):
"""
Initialize a user given its ID, username, display name and permissions.
"""
super(User, self).__init__()
self.id = id
self.username = username
if not display:
self.display = username
else:
self.display = display
self.permissions = UserPermission.parse_mask(permissions)
def has_permission(self, permission):
"""
Returns True if this user has the requested permission.
"""
return permission in self.permissions
def allow_create_board(self):
"""
Returns True if this user is allowed to create boards.
"""
return self.has_permission(PERM_CREATE_BOARD)
def allow_manage_users(self):
"""
Returns True if this user is allowed to manage other users.
"""
return self.has_permission(PERM_MANAGE_USERS)
def allow_other_user_boards(self):
"""
Returns True if this user is allowed to see other users boards.
"""
return self.has_permission(PERM_SHOW_OTHER_USER_BOARDS)
def to_json(self):
"""
Returns a jsonable object with the same data as this user.
"""
return {"id" : self.id,
"username" : self.username,
"display" : self.display,
"permisions": permissions_to_mask(self.permissions)}
def user_from_json(json):
"""
Create a User object from its representing json.
"""
return User(json["id"], json["username"], json["display"], json["permissions"])
| gpl-3.0 | 2,345,246,301,992,049,000 | 27.748031 | 83 | 0.576828 | false | 4.518564 | false | false | false |
tedlaz/pyted | misthodosia/m13/f_fmy_old.py | 1 | 1057 | # -*- coding: utf-8 -*-
'''
Created on 18 Νοε 2012
@author: tedlaz
'''
from PyQt4 import QtGui, QtCore
from gui import ui_fmy
from utils.fmy_etoys import makeFMYFile
class dlg(QtGui.QDialog):
def __init__(self, args=None, parent=None):
super(dlg, self).__init__(parent)
self.ui = ui_fmy.Ui_Dialog()
self.ui.setupUi(self)
self.makeConnections()
if parent:
self.db = parent.db
else:
self.db = ''
def makeConnections(self):
QtCore.QObject.connect(self.ui.b_makeFile, QtCore.SIGNAL("clicked()"),self.makeFile)
def makeFile(self):
defaultPdfName = 'JL10'
fName = QtGui.QFileDialog.getSaveFileName(self,u"Ονομα αρχείου",defaultPdfName)
makeFMYFile(fName,self.ui.t_xrisi.text(),self.db)
self.accept()
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
form = dlg(sys.argv)
form.show()
app.exec_() | gpl-3.0 | 1,785,956,018,139,974,700 | 24.769231 | 92 | 0.56334 | false | 3.297468 | false | false | false |
schoolie/bokeh | bokeh/models/plots.py | 1 | 20026 | ''' Models for representing top-level plot objects.
'''
from __future__ import absolute_import
from six import string_types
from ..core.enums import Location
from ..core.properties import Auto, Bool, Dict, Either, Enum, Include, Instance, Int, List, Override, String
from ..core.property_mixins import LineProps, FillProps
from ..core.query import find
from ..core.validation import error, warning
from ..core.validation.errors import REQUIRED_RANGE
from ..core.validation.warnings import (MISSING_RENDERERS, NO_DATA_RENDERERS,
MALFORMED_CATEGORY_LABEL, SNAPPED_TOOLBAR_ANNOTATIONS)
from ..util.plot_utils import _list_attr_splat, _select_helper
from ..util.string import nice_join
from .annotations import Legend, Title
from .axes import Axis
from .glyphs import Glyph
from .grids import Grid
from .layouts import LayoutDOM
from .ranges import Range, FactorRange
from .renderers import DataRenderer, DynamicImageRenderer, GlyphRenderer, Renderer, TileRenderer
from .sources import DataSource, ColumnDataSource
from .tools import Tool, Toolbar, ToolEvents
class Plot(LayoutDOM):
''' Model representing a plot, containing glyphs, guides, annotations.
'''
def __init__(self, **kwargs):
if "tool_events" not in kwargs:
kwargs["tool_events"] = ToolEvents()
if "toolbar" in kwargs and "logo" in kwargs:
raise ValueError("Conflicing properties set on plot: toolbar, logo.")
if "toolbar" in kwargs and "tools" in kwargs:
raise ValueError("Conflicing properties set on plot: toolbar, tools.")
if "toolbar" not in kwargs:
tools = kwargs.pop('tools', [])
logo = kwargs.pop('logo', 'normal')
kwargs["toolbar"] = Toolbar(tools=tools, logo=logo)
super(LayoutDOM, self).__init__(**kwargs)
def select(self, *args, **kwargs):
''' Query this object and all of its references for objects that
match the given selector.
There are a few different ways to call the ``select`` method.
The most general is to supply a JSON-like query dictionary as the
single argument or as keyword arguments:
Args:
selector (JSON-like) : some sample text
Keyword Arguments:
kwargs : query dict key/values as keyword arguments
For convenience, queries on just names can be made by supplying
the ``name`` string as the single parameter:
Args:
name (str) : the name to query on
Also queries on just type can be made simply by supplying the
``Model`` subclass as the single parameter:
Args:
type (Model) : the type to query on
Returns:
seq[Model]
Examples:
.. code-block:: python
# These two are equivalent
p.select({"type": HoverTool})
p.select(HoverTool)
# These two are also equivalent
p.select({"name": "mycircle"})
p.select("mycircle")
# Keyword arguments can be supplied in place of selector dict
p.select({"name": "foo", "type": HoverTool})
p.select(name="foo", type=HoverTool)
'''
selector = _select_helper(args, kwargs)
# Want to pass selector that is a dictionary
return _list_attr_splat(find(self.references(), selector, {'plot': self}))
def row(self, row, gridplot):
''' Return whether this plot is in a given row of a GridPlot.
Args:
row (int) : index of the row to test
gridplot (GridPlot) : the GridPlot to check
Returns:
bool
'''
return self in gridplot.row(row)
def column(self, col, gridplot):
''' Return whether this plot is in a given column of a GridPlot.
Args:
col (int) : index of the column to test
gridplot (GridPlot) : the GridPlot to check
Returns:
bool
'''
return self in gridplot.column(col)
def _axis(self, *sides):
objs = []
for s in sides:
objs.extend(getattr(self, s, []))
axis = [obj for obj in objs if isinstance(obj, Axis)]
return _list_attr_splat(axis)
@property
def xaxis(self):
''' Splattable list of :class:`~bokeh.models.axes.Axis` objects for the x dimension.
'''
return self._axis("above", "below")
@property
def yaxis(self):
''' Splattable list of :class:`~bokeh.models.axes.Axis` objects for the y dimension.
'''
return self._axis("left", "right")
@property
def axis(self):
''' Splattable list of :class:`~bokeh.models.axes.Axis` objects.
'''
return _list_attr_splat(self.xaxis + self.yaxis)
@property
def legend(self):
''' Splattable list of :class:`~bokeh.models.annotations.Legend` objects.
'''
legends = [obj for obj in self.renderers if isinstance(obj, Legend)]
return _list_attr_splat(legends)
def _grid(self, dimension):
grid = [obj for obj in self.renderers if isinstance(obj, Grid) and obj.dimension==dimension]
return _list_attr_splat(grid)
@property
def xgrid(self):
''' Splattable list of :class:`~bokeh.models.grids.Grid` objects for the x dimension.
'''
return self._grid(0)
@property
def ygrid(self):
''' Splattable list of :class:`~bokeh.models.grids.Grid` objects for the y dimension.
'''
return self._grid(1)
@property
def grid(self):
''' Splattable list of :class:`~bokeh.models.grids.Grid` objects.
'''
return _list_attr_splat(self.xgrid + self.ygrid)
@property
def tools(self):
return self.toolbar.tools
@tools.setter
def tools(self, tools):
self.toolbar.tools = tools
def add_layout(self, obj, place='center'):
''' Adds an object to the plot in a specified place.
Args:
obj (Renderer) : the object to add to the Plot
place (str, optional) : where to add the object (default: 'center')
Valid places are: 'left', 'right', 'above', 'below', 'center'.
Returns:
None
'''
valid_places = ['left', 'right', 'above', 'below', 'center']
if place not in valid_places:
raise ValueError(
"Invalid place '%s' specified. Valid place values are: %s" % (place, nice_join(valid_places))
)
if hasattr(obj, 'plot'):
if obj.plot is not None:
raise ValueError("object to be added already has 'plot' attribute set")
obj.plot = self
self.renderers.append(obj)
if place is not 'center':
getattr(self, place).append(obj)
def add_tools(self, *tools):
''' Adds tools to the plot.
Args:
*tools (Tool) : the tools to add to the Plot
Returns:
None
'''
if not all(isinstance(tool, Tool) for tool in tools):
raise ValueError("All arguments to add_tool must be Tool subclasses.")
for tool in tools:
if tool.plot is not None:
raise ValueError("tool %s to be added already has 'plot' attribute set" % tool)
tool.plot = self
if hasattr(tool, 'overlay'):
self.renderers.append(tool.overlay)
self.toolbar.tools.append(tool)
def add_glyph(self, source_or_glyph, glyph=None, **kw):
''' Adds a glyph to the plot with associated data sources and ranges.
This function will take care of creating and configuring a Glyph object,
and then add it to the plot's list of renderers.
Args:
source (DataSource) : a data source for the glyphs to all use
glyph (Glyph) : the glyph to add to the Plot
Keyword Arguments:
Any additional keyword arguments are passed on as-is to the
Glyph initializer.
Returns:
GlyphRenderer
'''
if glyph is not None:
source = source_or_glyph
else:
source, glyph = ColumnDataSource(), source_or_glyph
if not isinstance(source, DataSource):
raise ValueError("'source' argument to add_glyph() must be DataSource subclass")
if not isinstance(glyph, Glyph):
raise ValueError("'glyph' argument to add_glyph() must be Glyph subclass")
g = GlyphRenderer(data_source=source, glyph=glyph, **kw)
self.renderers.append(g)
return g
def add_tile(self, tile_source, **kw):
''' Adds new TileRenderer into the Plot.renderers
Args:
tile_source (TileSource) : a tile source instance which contain tileset configuration
Keyword Arguments:
Additional keyword arguments are passed on as-is to the tile renderer
Returns:
TileRenderer : TileRenderer
'''
tile_renderer = TileRenderer(tile_source=tile_source, **kw)
self.renderers.append(tile_renderer)
return tile_renderer
def add_dynamic_image(self, image_source, **kw):
''' Adds new DynamicImageRenderer into the Plot.renderers
Args:
image_source (ImageSource) : a image source instance which contain image configuration
Keyword Arguments:
Additional keyword arguments are passed on as-is to the dynamic image renderer
Returns:
DynamicImageRenderer : DynamicImageRenderer
'''
image_renderer = DynamicImageRenderer(image_source=image_source, **kw)
self.renderers.append(image_renderer)
return image_renderer
@error(REQUIRED_RANGE)
def _check_required_range(self):
missing = []
if not self.x_range: missing.append('x_range')
if not self.y_range: missing.append('y_range')
if missing:
return ", ".join(missing) + " [%s]" % self
@warning(MISSING_RENDERERS)
def _check_missing_renderers(self):
if len(self.renderers) == 0:
return str(self)
@warning(NO_DATA_RENDERERS)
def _check_no_data_renderers(self):
if len(self.select(DataRenderer)) == 0:
return str(self)
@warning(MALFORMED_CATEGORY_LABEL)
def _check_colon_in_category_label(self):
if not self.x_range: return
if not self.y_range: return
broken = []
for range_name in ['x_range', 'y_range']:
category_range = getattr(self, range_name)
if not isinstance(category_range, FactorRange): continue
for value in category_range.factors:
if not isinstance(value, string_types): break
if ':' in value:
broken.append((range_name, value))
break
if broken:
field_msg = ' '.join('[range:%s] [first_value: %s]' % (field, value)
for field, value in broken)
return '%s [renderer: %s]' % (field_msg, self)
@warning(SNAPPED_TOOLBAR_ANNOTATIONS)
def _check_snapped_toolbar_and_axis(self):
if not self.toolbar_sticky: return
if self.toolbar_location is None: return
objs = getattr(self, self.toolbar_location)
if len(objs) > 0:
return str(self)
x_range = Instance(Range, help="""
The (default) data range of the horizontal dimension of the plot.
""")
y_range = Instance(Range, help="""
The (default) data range of the vertical dimension of the plot.
""")
x_mapper_type = Either(Auto, String, help="""
What kind of mapper to use to convert x-coordinates in data space
into x-coordinates in screen space.
Typically this can be determined automatically, but this property
can be useful to, e.g., show datetime values as floating point
"seconds since epoch" instead of formatted dates.
""")
y_mapper_type = Either(Auto, String, help="""
What kind of mapper to use to convert y-coordinates in data space
into y-coordinates in screen space.
Typically this can be determined automatically, but this property
can be useful to, e.g., show datetime values as floating point
"seconds since epoch" instead of formatted dates
""")
extra_x_ranges = Dict(String, Instance(Range), help="""
Additional named ranges to make available for mapping x-coordinates.
This is useful for adding additional axes.
""")
extra_y_ranges = Dict(String, Instance(Range), help="""
Additional named ranges to make available for mapping y-coordinates.
This is useful for adding additional axes.
""")
hidpi = Bool(default=True, help="""
Whether to use HiDPI mode when available.
""")
title = Instance(Title, default=lambda: Title(text=""), help="""
A title for the plot. Can be a text string or a Title annotation.
""")
title_location = Enum(Location, default="above", help="""
Where the title will be located. Titles on the left or right side
will be rotated.
""")
outline_props = Include(LineProps, help="""
The %s for the plot border outline.
""")
outline_line_color = Override(default="#e5e5e5")
renderers = List(Instance(Renderer), help="""
A list of all renderers for this plot, including guides and annotations
in addition to glyphs and markers.
This property can be manipulated by hand, but the ``add_glyph`` and
``add_layout`` methods are recommended to help make sure all necessary
setup is performed.
""")
toolbar = Instance(Toolbar, help="""
The toolbar associated with this plot which holds all the tools.
The toolbar is automatically created with the plot.
""")
toolbar_location = Enum(Location, default="right", help="""
Where the toolbar will be located. If set to None, no toolbar
will be attached to the plot.
""")
toolbar_sticky = Bool(default=True, help="""
Stick the toolbar to the edge of the plot. Default: True. If False,
the toolbar will be outside of the axes, titles etc.
""")
tool_events = Instance(ToolEvents, help="""
A ToolEvents object to share and report tool events.
""")
left = List(Instance(Renderer), help="""
A list of renderers to occupy the area to the left of the plot.
""")
right = List(Instance(Renderer), help="""
A list of renderers to occupy the area to the right of the plot.
""")
above = List(Instance(Renderer), help="""
A list of renderers to occupy the area above of the plot.
""")
below = List(Instance(Renderer), help="""
A list of renderers to occupy the area below of the plot.
""")
plot_height = Int(600, help="""
Total height of the entire plot (including any axes, titles,
border padding, etc.)
.. note::
This corresponds directly to the height of the HTML
canvas that will be used.
""")
plot_width = Int(600, help="""
Total width of the entire plot (including any axes, titles,
border padding, etc.)
.. note::
This corresponds directly to the width of the HTML
canvas that will be used.
""")
inner_width = Int(readonly=True, help="""
This is the exact width of the plotting canvas, i.e. the width of
the actual plot, without toolbars etc. Note this is computed in a
web browser, so this property will work only in backends capable of
bidirectional communication (server, notebook).
.. note::
This is an experimental feature and the API may change in near future.
""")
inner_height = Int(readonly=True, help="""
This is the exact height of the plotting canvas, i.e. the height of
the actual plot, without toolbars etc. Note this is computed in a
web browser, so this property will work only in backends capable of
bidirectional communication (server, notebook).
.. note::
This is an experimental feature and the API may change in near future.
""")
layout_width = Int(readonly=True, help="""
This is the exact width of the layout, i.e. the height of
the actual plot, with toolbars etc. Note this is computed in a
web browser, so this property will work only in backends capable of
bidirectional communication (server, notebook).
.. note::
This is an experimental feature and the API may change in near future.
""")
layout_height = Int(readonly=True, help="""
This is the exact height of the layout, i.e. the height of
the actual plot, with toolbars etc. Note this is computed in a
web browser, so this property will work only in backends capable of
bidirectional communication (server, notebook).
.. note::
This is an experimental feature and the API may change in near future.
""")
background_props = Include(FillProps, help="""
The %s for the plot background style.
""")
background_fill_color = Override(default='#ffffff')
border_props = Include(FillProps, help="""
The %s for the plot border style.
""")
border_fill_color = Override(default='#ffffff')
min_border_top = Int(help="""
Minimum size in pixels of the padding region above the top of the
central plot region.
.. note::
This is a *minimum*. The padding region may expand as needed to
accommodate titles or axes, etc.
""")
min_border_bottom = Int(help="""
Minimum size in pixels of the padding region below the bottom of
the central plot region.
.. note::
This is a *minimum*. The padding region may expand as needed to
accommodate titles or axes, etc.
""")
min_border_left = Int(help="""
Minimum size in pixels of the padding region to the left of
the central plot region.
.. note::
This is a *minimum*. The padding region may expand as needed to
accommodate titles or axes, etc.
""")
min_border_right = Int(help="""
Minimum size in pixels of the padding region to the right of
the central plot region.
.. note::
This is a *minimum*. The padding region may expand as needed to
accommodate titles or axes, etc.
""")
min_border = Int(5, help="""
A convenience property to set all all the ``min_border_X`` properties
to the same value. If an individual border property is explicitly set,
it will override ``min_border``.
""")
h_symmetry = Bool(True, help="""
Whether the total horizontal padding on both sides of the plot will
be made equal (the left or right padding amount, whichever is larger).
""")
v_symmetry = Bool(False, help="""
Whether the total vertical padding on both sides of the plot will
be made equal (the top or bottom padding amount, whichever is larger).
""")
lod_factor = Int(10, help="""
Decimation factor to use when applying level-of-detail decimation.
""")
lod_threshold = Int(2000, help="""
A number of data points, above which level-of-detail downsampling may
be performed by glyph renderers. Set to ``None`` to disable any
level-of-detail downsampling.
""")
lod_interval = Int(300, help="""
Interval (in ms) during which an interactive tool event will enable
level-of-detail downsampling.
""")
lod_timeout = Int(500, help="""
Timeout (in ms) for checking whether interactive tool events are still
occurring. Once level-of-detail mode is enabled, a check is made every
``lod_timeout`` ms. If no interactive tool events have happened,
level-of-detail mode is disabled.
""")
webgl = Bool(False, help="""
Whether WebGL is enabled for this plot. If True, the glyphs that
support this will render via WebGL instead of the 2D canvas.
""")
| bsd-3-clause | -4,859,454,633,804,849,000 | 31.0416 | 109 | 0.624688 | false | 4.270847 | false | false | false |
expertanalytics/fagkveld | worldmap/src/worldmap/model/location.py | 1 | 1192 |
from typing import Dict, List, Tuple, Set, Optional
from abc import abstractmethod
import numpy
class Location:
name: str = ""
long_name: str = ""
border_x: List[numpy.ndarray]
border_hull_x: List[numpy.ndarray]
border_y: List[numpy.ndarray] = []
border_hull_y: List[numpy.ndarray] = []
neighbors: "Locations" = {}
parent: "Optional[Location]" = None
children: "Locations" = {}
level: int = 0
alpha3code: str = ""
color: str = ""
def __init__(
self,
name: str,
long_name: Optional[str] = None,
parent: "Optional[Location]" = None,
):
self.name = name
self.long_name = long_name if long_name else name
self.parent = parent
self.border_x = []
self.border_hull_x = []
self.border_y = []
self.border_hull_y = []
@property
def location_x(self) -> numpy.ndarray:
pass
# TODO: implement here
@property
def location_y(self) -> numpy.ndarray:
pass
# TODO: implement here
def __str__(self):
return "Location('{}')".format(self.long_name)
Locations = Dict[str, Location]
| bsd-2-clause | -622,663,362,204,864,100 | 21.923077 | 57 | 0.557886 | false | 3.64526 | false | false | false |
dhp-denero/LibrERP | dt_product_brand/product_brand.py | 1 | 3987 | # -*- encoding: utf-8 -*-
#################################################################################
# #
# product_brand for OpenERP #
# Copyright (C) 2009 NetAndCo (<http://www.netandco.net>). #
# Authors, Mathieu Lemercier, [email protected], #
# Franck Bret, [email protected] #
# Copyright (C) 2011 Akretion Benoît Guillot <[email protected]> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as #
# published by the Free Software Foundation, either version 3 of the #
# License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
#################################################################################
from openerp.osv.orm import Model
from openerp.osv import fields
class product_brand(Model):
_name = 'product.brand'
_columns = {
'name': fields.char('Brand Name', size=32),
'description': fields.text('Description', translate=True),
'partner_id': fields.many2one('res.partner', 'partner', help='Select a partner for this brand if it exist'),
'logo': fields.binary('Logo File')
}
_order = 'name'
_sql_constraints = [
('name_uniq', 'unique (name)', 'The name of the Brand must be unique !')
]
class product_template(Model):
_name = 'product.template'
_inherit = 'product.template'
_columns = {
'product_brand_id': fields.many2one('product.brand', 'Brand', help='Select a brand for this product'),
}
class product_product(Model):
_name = 'product.product'
_inherit = 'product.product'
def onchange_product_brand_id(self, cr, uid, ids, product_brand_id, context=None):
"""
When category changes, we search for taxes, UOM and product type
"""
if context is None:
context = self.pool['res.users'].context_get(cr, uid, context=context)
res = {}
if not product_brand_id:
res = {
'manufacturer': False,
}
else:
brand_data = self.pool['product.brand'].read(cr, uid, product_brand_id, [], context=context)
if brand_data['partner_id']:
res['manufacturer'] = brand_data['partner_id']
return {'value': res, }
def search(self, cr, uid, args, offset=0, limit=0, order=None, context=None, count=False):
if context and context.get('product_brand_id', False):
product_ids = self.pool['product.product'].search(cr, uid, [('product_brand_id', '=', context['product_brand_id'])])
if product_ids:
product_ids = list(set(product_ids))
args.append(['id', 'in', product_ids])
return super(product_product, self).search(cr, uid, args, offset=offset, limit=limit, order=order, context=context, count=count)
| agpl-3.0 | 8,626,166,335,862,885,000 | 47.609756 | 136 | 0.493979 | false | 4.438753 | false | false | false |
jithinbp/pslab-desktop-apps | psl_res/GUI/D_PHYSICS/B_physics/B_SpeedOfSound.py | 1 | 3745 | #!/usr/bin/python
"""
::
This experiment is used to study non-inverting amplifiers
"""
from __future__ import print_function
from PSL_Apps.utilitiesClass import utilitiesClass
from PSL_Apps.templates import ui_template_graph_nofft
import numpy as np
from PyQt4 import QtGui,QtCore
import pyqtgraph as pg
import sys,functools,time
params = {
'image' : 'halfwave.png',
'name':'Speed of\nSound',
'hint':'''
Measure speed of sound using a 40KHz transmit piezo and receiver.<br>
'''
}
class AppWindow(QtGui.QMainWindow, ui_template_graph_nofft.Ui_MainWindow,utilitiesClass):
def __init__(self, parent=None,**kwargs):
super(AppWindow, self).__init__(parent)
self.setupUi(self)
self.I=kwargs.get('I',None)
self.setWindowTitle(self.I.H.version_string+' : '+params.get('name','').replace('\n',' ') )
from PSL.analyticsClass import analyticsClass
self.math = analyticsClass()
self.prescalerValue=0
self.plot=self.add2DPlot(self.plot_area,enableMenu=False)
#self.enableCrossHairs(self.plot,[])
labelStyle = {'color': 'rgb(255,255,255)', 'font-size': '11pt'}
self.plot.setLabel('left','V (CH1)', units='V',**labelStyle)
self.plot.setLabel('bottom','Time', units='S',**labelStyle)
self.plot.setYRange(-8.5,8.5)
self.tg=0.5
self.max_samples=10000
self.samples = self.max_samples
self.timer = QtCore.QTimer()
self.legend = self.plot.addLegend(offset=(-10,30))
self.curveCH1 = self.addCurve(self.plot,'RAMP In(CH1)')
self.autoRange()
self.WidgetLayout.setAlignment(QtCore.Qt.AlignLeft)
self.ControlsLayout.setAlignment(QtCore.Qt.AlignRight)
a1={'TITLE':'Acquire Data','FUNC':self.run,'TOOLTIP':'Sets SQR1 to HIGH, and immediately records the ramp'}
self.ampGain = self.buttonIcon(**a1)
self.WidgetLayout.addWidget(self.ampGain)
self.WidgetLayout.addWidget(self.addSQR1(self.I))
#Control widgets
a1={'TITLE':'TIMEBASE','MIN':0,'MAX':9,'FUNC':self.set_timebase,'UNITS':'S','TOOLTIP':'Set Timebase of the oscilloscope'}
self.ControlsLayout.addWidget(self.dialIcon(**a1))
G = self.gainIcon(FUNC=self.I.set_gain,LINK=self.gainChanged)
self.ControlsLayout.addWidget(G)
G.g1.setCurrentIndex(1);G.g2.setEnabled(False)
self.running=True
self.fit = False
def gainChanged(self,g):
self.autoRange()
def set_timebase(self,g):
timebases = [0.5,1,2,4,8,32,128,256,512,1024]
self.prescalerValue=[0,0,0,0,1,1,2,2,3,3,3][g]
samplescaling=[1,1,1,1,1,0.5,0.4,0.3,0.2,0.2,0.1]
self.tg=timebases[g]
self.samples = int(self.max_samples*samplescaling[g])
return self.autoRange()
def autoRange(self):
xlen = self.tg*self.samples*1e-6
self.plot.autoRange();
chan = self.I.analogInputSources['CH1']
R = [chan.calPoly10(0),chan.calPoly10(1023)]
R[0]=R[0]*.9;R[1]=R[1]*.9
self.plot.setLimits(yMax=max(R),yMin=min(R),xMin=0,xMax=xlen)
self.plot.setYRange(min(R),max(R))
self.plot.setXRange(0,xlen)
return self.samples*self.tg*1e-6
def run(self):
try:
self.ampGain.value.setText('reading...')
x,y = self.I.capture_fullspeed('CH3',self.samples,self.tg,'FIRE_PULSES',interval=50)
self.curveCH1.setData(x*1e-6,y)
#self.displayCrossHairData(self.plot,False,self.samples,self.I.timebase,[y],[(0,255,0)])
self.I.set_state(SQR1=False) #Set SQR1 to 0
return 'Done'
except Exception,e:
print (e)
return 'Error'
def saveData(self):
self.saveDataWindow([self.curveCH1],self.plot)
def closeEvent(self, event):
self.running=False
self.timer.stop()
self.finished=True
def __del__(self):
self.timer.stop()
print('bye')
if __name__ == "__main__":
from PSL import sciencelab
app = QtGui.QApplication(sys.argv)
myapp = AppWindow(I=sciencelab.connect())
myapp.show()
sys.exit(app.exec_())
| gpl-3.0 | 8,507,347,280,602,509,000 | 26.335766 | 123 | 0.695861 | false | 2.631764 | false | false | false |
massimovassalli/SingleCellForceSpectroscopy | sifork/qt/qtView.py | 1 | 7581 | from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
import os as os
import sys
import pyqtgraph as pg
import numpy as np
import Ui_qtView as qtView_face
from sifork import experiment
pg.setConfigOption('background', 'w')
pg.setConfigOption('foreground', 'k')
htmlpre = '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0//EN" "http://www.w3.org/TR/REC-html40/strict.dtd">\n<html><head><meta name="qrichtext" content="1" /><style type="text/css">\np, li { white-space: pre-wrap; }\n</style></head><body style=" font-family:"Ubuntu"; font-size:11pt; font-weight:400; font-style:normal;">\n<p style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-size:8pt;">'
htmlpost = '</span></p></body></html>'
class curveWindow ( QtGui.QMainWindow ):
iter = 0
prev = 0
cRosso = QtGui.QColor(255,0,0)
cVerde = QtGui.QColor(50,255,50)
cNero = QtGui.QColor(0,0,0)
def __init__ ( self, parent = None ):
QtGui.QMainWindow.__init__( self, parent )
self.setWindowTitle( 'qtView' )
self.ui = qtView_face.Ui_facewindow()
self.ui.setupUi( self )
self.setConnections()
self.exp = experiment.experiment()
def addFiles(self, fnames = None):
if fnames == None:
fnames = QtGui.QFileDialog.getOpenFileNames(self, 'Select files', './')
QtCore.QCoreApplication.processEvents()
pmax = len(fnames)
QtGui.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor))
progress = QtGui.QProgressDialog("Opening files...", "Cancel opening", 0, pmax);
i=0
for fname in fnames:
QtCore.QCoreApplication.processEvents()
self.exp.addFiles([str(fname)])
progress.setValue(i)
i=i+1
if (progress.wasCanceled()):
break
progress.setValue(pmax)
QtGui.QApplication.restoreOverrideCursor()
self.refillList()
def addDirectory(self,dirname=None):
if dirname == None:
dirname = QtGui.QFileDialog.getExistingDirectory(self, 'Select a directory', './')
if not os.path.isdir(dirname):
return
QtCore.QCoreApplication.processEvents()
pmax = len(os.listdir(dirname))
QtGui.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor))
progress = QtGui.QProgressDialog("Opening files...", "Cancel opening", 0, pmax);
i=0
for fnamealone in os.listdir(dirname):
#if i % 100 == 0:
QtCore.QCoreApplication.processEvents()
fname = os.path.join(str(dirname), fnamealone)
self.exp.addFiles([str(fname)])
progress.setValue(i)
i=i+1
if (progress.wasCanceled()):
break
progress.setValue(pmax)
QtGui.QApplication.restoreOverrideCursor()
self.refillList()
def refillList(self):
scena = QtGui.QGraphicsScene()
width = self.ui.griglia.width()
height = self.ui.griglia.height()
N = len(self.exp)
self.ui.slide1.setMaximum(N)
self.ui.slide2.setMaximum(N)
self.ui.slide3.setMaximum(N)
gNx = np.sqrt(N*width/height)
Nx = int(np.ceil(gNx))
if int(gNx) == Nx:
Nx+=1
L = int(width/Nx)
i = 0
j = 0
k=0
if L<=3:
L=3
while i*Nx+j<N:
h = L-2
w = L-2
rect = QtCore.QRectF(j*(L)+1, i*(L)+1, h, w)
idrect = scena.addRect(rect, pen = QtGui.QPen(self. cVerde,0) ,brush = self. cVerde )
j+=1
k+=1
if j == Nx:
j=0
i+=1
scena.wheelEvent = self.scorri
self.ui.griglia.setScene(scena)
self.ui.slide1.setValue(1)
# og = self.ui.griglia.items()
# for i in range(len(og)):
# if self.curves[-i-1].inconsistency:
# og[i].setBrush(self.cRosso)
# og[i].setPen(self.cRosso)
self.ui.griglia.invalidateScene()
return True
def scorri(self,ev=None):
delta = ev.delta()/120
self.ui.slide2.setSliderPosition(self.ui.slide2.sliderPosition()-delta)
def sqSwitch(self,i,n):
og = self.ui.griglia.items()
if n:
c = self.cNero
else:
c = og[-i].brush().color()
og[-i].setPen(c)
def goToCurve(self,dove):
self.ui.labFilename.setText(htmlpre + self.exp[dove-1].basename + htmlpost)
if self.prev != 0:
self.sqSwitch(self.prev,False)
self.sqSwitch(dove,True)
self.prev = dove
self.viewCurve(dove)
def updateCurve(self):
self.viewCurve(self.ui.slide1.value(),autorange=False)
def refreshCurve(self):
self.viewCurve(self.ui.slide1.value(),autorange=True)
def viewCurve(self,dove = 1,autorange=True):
dove -= 1
self.ui.grafo.clear()
for p in self.exp[dove]:
if p == self.exp[dove][-1]:
self.ui.grafo.plot(p.z,p.f,pen='b')
else:
self.ui.grafo.plot(p.z,p.f)
if autorange:
self.ui.grafo.autoRange()
def setConnections(self):
# QtCore.QObject.connect(self.ui.slide1, QtCore.SIGNAL(_fromUtf8("actionTriggered(int)")), self.moveJumping)
QtCore.QObject.connect(self.ui.slide1, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.ui.slide2.setValue)
QtCore.QObject.connect(self.ui.slide1, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.ui.slide3.setValue)
QtCore.QObject.connect(self.ui.slide2, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.ui.slide1.setValue)
QtCore.QObject.connect(self.ui.slide3, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.ui.slide1.setValue)
QtCore.QObject.connect(self.ui.slide1, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.goToCurve )
# QtCore.QObject.connect(self.ui.slide2, QtCore.SIGNAL(_fromUtf8("actionTriggered(int)")), self.moveJumping)
# QtCore.QObject.connect(self.ui.slide2, QtCore.SIGNAL(_fromUtf8("sliderReleased()")), self.moveJumping)
#QtCore.QObject.connect(self.ui.slide1, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.goToCurve)
QtCore.QObject.connect(self.ui.bAddDir, QtCore.SIGNAL(_fromUtf8("clicked()")), self.addDirectory)
QtCore.QObject.connect(self.ui.bAddFiles, QtCore.SIGNAL(_fromUtf8("clicked()")), self.addFiles)
#QtCore.QObject.connect(self.ui.pushButton_3, QtCore.SIGNAL(_fromUtf8("clicked()")), self.switchColor)
#QtCore.QObject.connect(self.ui.pushButton, QtCore.SIGNAL(_fromUtf8("clicked()")), self.saveCurves)
#QtCore.QObject.connect(self.ui.pushButton_2, QtCore.SIGNAL(_fromUtf8("clicked()")), self.processNext)
#QtCore.QObject.connect(self.ui.spButton, QtCore.SIGNAL(_fromUtf8("clicked()")), facewindow.savePeaks)
#QtCore.QObject.connect(self.ui.pThreshold, QtCore.SIGNAL(_fromUtf8("editingFinished()")), self.refreshCurve)
QtCore.QMetaObject.connectSlotsByName(self)
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
app.setApplicationName( 'qtView' )
canale = curveWindow()
canale.show()
QtCore.QObject.connect( app, QtCore.SIGNAL( 'lastWindowClosed()' ), app, QtCore.SLOT( 'quit()' ) )
sys.exit(app.exec_()) | mit | 3,310,249,543,625,385,500 | 38.284974 | 471 | 0.611661 | false | 3.367837 | false | false | false |
vacancy/TensorArtist | examples/generative-model/desc_vae_mnist_mlp_bernoulli_adam.py | 1 | 4242 | # -*- coding:utf8 -*-
# File : desc_vae_mnist_mlp_bernoulli_adam.py
# Author : Jiayuan Mao
# Email : [email protected]
# Date : 3/17/17
#
# This file is part of TensorArtist.
from tartist.core import get_env, get_logger
from tartist.core.utils.naming import get_dump_directory, get_data_directory
from tartist.nn import opr as O, optimizer, summary
logger = get_logger(__file__)
__envs__ = {
'dir': {
'root': get_dump_directory(__file__),
'data': get_data_directory('WellKnown/mnist')
},
'trainer': {
'learning_rate': 0.001,
'batch_size': 100,
'epoch_size': 500,
'nr_epochs': 100,
},
'inference': {
'batch_size': 256,
'epoch_size': 40
},
'demo': {
'is_reconstruct': False
}
}
def make_network(env):
with env.create_network() as net:
code_length = 20
h, w, c = 28, 28, 1
is_reconstruct = get_env('demo.is_reconstruct', False)
dpc = env.create_dpcontroller()
with dpc.activate():
def inputs():
img = O.placeholder('img', shape=(None, h, w, c))
return [img]
def forward(x):
if is_reconstruct or env.phase is env.Phase.TRAIN:
with env.variable_scope('encoder'):
_ = x
_ = O.fc('fc1', _, 500, nonlin=O.tanh)
_ = O.fc('fc2', _, 500, nonlin=O.tanh)
mu = O.fc('fc3_mu', _, code_length)
log_var = O.fc('fc3_sigma', _, code_length)
var = O.exp(log_var)
std = O.sqrt(var)
epsilon = O.random_normal([x.shape[0], code_length])
z_given_x = mu + std * epsilon
else:
z_given_x = O.random_normal([1, code_length])
with env.variable_scope('decoder'):
_ = z_given_x
_ = O.fc('fc1', _, 500, nonlin=O.tanh)
_ = O.fc('fc2', _, 500, nonlin=O.tanh)
_ = O.fc('fc3', _, 784, nonlin=O.sigmoid)
_ = _.reshape(-1, h, w, c)
x_given_z = _
if env.phase is env.Phase.TRAIN:
with env.variable_scope('loss'):
content_loss = O.raw_cross_entropy_prob('raw_content', x_given_z.flatten2(), x.flatten2())
content_loss = content_loss.sum(axis=1).mean(name='content')
# distrib_loss = 0.5 * (O.sqr(mu) + O.sqr(std) - 2. * O.log(std + 1e-8) - 1.0).sum(axis=1)
distrib_loss = -0.5 * (1. + log_var - O.sqr(mu) - var).sum(axis=1)
distrib_loss = distrib_loss.mean(name='distrib')
loss = content_loss + distrib_loss
dpc.add_output(loss, name='loss', reduce_method='sum')
dpc.add_output(x_given_z, name='output')
dpc.set_input_maker(inputs).set_forward_func(forward)
net.add_all_dpc_outputs(dpc, loss_name='loss')
if env.phase is env.Phase.TRAIN:
summary.inference.scalar('loss', net.loss)
def make_optimizer(env):
wrapper = optimizer.OptimizerWrapper()
wrapper.set_base_optimizer(optimizer.base.AdamOptimizer(get_env('trainer.learning_rate')))
wrapper.append_grad_modifier(optimizer.grad_modifier.LearningRateMultiplier([
('*/b', 2.0),
]))
# wrapper.append_grad_modifier(optimizer.grad_modifier.WeightDecay([
# ('*/W', 0.0005)
# ]))
env.set_optimizer(wrapper)
from data_provider_vae_mnist import *
def main_train(trainer):
from tartist.plugins.trainer_enhancer import summary
summary.enable_summary_history(trainer)
summary.enable_echo_summary_scalar(trainer)
from tartist.plugins.trainer_enhancer import progress
progress.enable_epoch_progress(trainer)
from tartist.plugins.trainer_enhancer import snapshot
snapshot.enable_snapshot_saver(trainer)
from tartist.plugins.trainer_enhancer import inference
inference.enable_inference_runner(trainer, make_dataflow_inference)
trainer.train()
| mit | 2,794,771,510,245,185,000 | 33.487805 | 114 | 0.537247 | false | 3.479902 | false | false | false |
CLVsol/clvsol_odoo_addons | clv_person_aux/models/address_aux.py | 1 | 3795 | # -*- coding: utf-8 -*-
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
import logging
from odoo import api, fields, models
_logger = logging.getLogger(__name__)
class AddressAux(models.Model):
_inherit = 'clv.address_aux'
person_aux_ids = fields.One2many(
comodel_name='clv.person_aux',
inverse_name='ref_address_aux_id',
string='Persons (Aux)'
)
count_persons_aux = fields.Integer(
string='Persons (Aux) (count)',
compute='_compute_count_persons_aux',
# store=True
)
@api.depends('person_aux_ids')
def _compute_count_persons_aux(self):
for r in self:
r.count_persons_aux = len(r.person_aux_ids)
class PersonAux(models.Model):
_inherit = 'clv.person_aux'
ref_address_aux_is_unavailable = fields.Boolean(
string='Address (Aux) is unavailable',
default=False,
)
ref_address_aux_id = fields.Many2one(comodel_name='clv.address_aux', string='Address (Aux)', ondelete='restrict')
ref_address_aux_code = fields.Char(string='Address (Aux) Code', related='ref_address_aux_id.code', store=False)
ref_address_aux_phone = fields.Char(string='Address (Aux) Phone', related='ref_address_aux_id.phone')
ref_address_aux_mobile_phone = fields.Char(string='Address (Aux) Mobile', related='ref_address_aux_id.mobile')
ref_address_aux_email = fields.Char(string='Address (Aux) Email', related='ref_address_aux_id.email')
ref_address_aux_category_names = fields.Char(
string='Address (Aux) Category Names',
related='ref_address_aux_id.category_ids.name',
store=True
)
ref_address_aux_category_ids = fields.Many2many(
comodel_name='clv.address.category',
string='Address (Aux) Categories',
related='ref_address_aux_id.category_ids'
)
@api.multi
def do_person_aux_get_ref_address_aux_data(self):
for person_aux in self:
_logger.info(u'>>>>> %s', person_aux.ref_address_aux_id)
if (person_aux.reg_state in ['draft', 'revised']) and \
(person_aux.ref_address_aux_id.id is not False):
data_values = {}
if person_aux.ref_address_aux_id.id is not False:
data_values['ref_address_aux_id'] = person_aux.ref_address_aux_id.id
data_values['street'] = person_aux.ref_address_aux_id.street
data_values['street2'] = person_aux.ref_address_aux_id.street2
data_values['zip'] = person_aux.ref_address_aux_id.zip
data_values['city'] = person_aux.ref_address_aux_id.city
data_values['state_id'] = person_aux.ref_address_aux_id.state_id.id
data_values['country_id'] = person_aux.ref_address_aux_id.country_id.id
# data_values['phone'] = person_aux.ref_address_aux_id.phone
# data_values['mobile'] = person_aux.ref_address_aux_id.mobile
_logger.info(u'>>>>>>>>>> %s', data_values)
person_aux.write(data_values)
return True
@api.multi
def do_person_aux_remove_ref_address_aux(self):
for person_aux in self:
_logger.info(u'>>>>> %s', person_aux.ref_address_aux_id)
if (person_aux.reg_state in ['draft', 'revised']) and \
(person_aux.ref_address_aux_id.id is not False):
data_values = {}
if person_aux.ref_address_aux_id.id is not False:
data_values['ref_address_aux_id'] = False
_logger.info(u'>>>>>>>>>> %s', data_values)
person_aux.write(data_values)
return True
| agpl-3.0 | -649,032,661,184,708,100 | 34.138889 | 117 | 0.594203 | false | 3.418919 | false | false | false |
mzdu/2048gae | src/main.py | 1 | 1335 | import webapp2
import jinja2
import os
import logging
jinja_environment = jinja2.Environment(loader = jinja2.FileSystemLoader(os.path.dirname(__file__) + '/templates'))
def doRender(handler, tname = 'index.html', values = {}):
temp = jinja_environment.get_template(tname)
handler.response.out.write(temp.render(values))
return True
class MainPageHandler(webapp2.RequestHandler):
def get(self):
values = dict()
values['css'] = ['/static/css/main.css']
values['javascript'] = ['/static/js/bind_polyfill.js',
'/static/js/classlist_polyfill.js',
'/static/js/animframe_polyfill.js',
'/static/js/keyboard_input_manager.js',
'/static/js/html_actuator.js',
'/static/js/grid.js',
'/static/js/tile.js',
'/static/js/local_storage_manager.js',
'/static/js/game_manager.js',
'/static/js/application.js',
]
doRender(self, 'index.html', values)
app = webapp2.WSGIApplication([('/.*', MainPageHandler)],debug = True)
| mit | 6,896,633,148,561,898,000 | 35.083333 | 114 | 0.495131 | false | 4.405941 | false | false | false |
tylerlaberge/Jasper | jasper/steps.py | 1 | 1413 | """
The steps module.
"""
from functools import wraps
import asyncio
class Step(object):
"""
The Step class is used as a wrapper around functions for testing behaviours.
"""
def __init__(self, function, **kwargs):
"""
Initialize a new Step object.
:param function: The function this step will call when this step is run.
:param kwargs: Kwargs to call the given function with.
"""
self.function = function
self.kwargs = kwargs
self.ran = False
self.passed = False
async def run(self, context):
"""
Run this step and record the results.
:param context: A context object too pass into this steps function.
"""
try:
if asyncio.iscoroutinefunction(self.function):
await self.function(context, **self.kwargs)
else:
self.function(context, **self.kwargs)
except Exception:
raise
else:
self.passed = True
finally:
self.ran = True
def step(func):
"""
A decorator for wrapping a function into a Step object.
:param func: The function to create a step out of.
:return: A function which when called will return a new instance of a Step object.
"""
@wraps(func)
def wrapper(**kwargs):
return Step(func, **kwargs)
return wrapper
| mit | 7,253,816,304,372,000,000 | 24.232143 | 86 | 0.587403 | false | 4.587662 | false | false | false |
tsl143/addons-server | src/olympia/users/cron.py | 1 | 1443 | from django.db import connections
import multidb
from celery import group
import olympia.core.logger
from olympia.amo import VALID_ADDON_STATUSES
from olympia.amo.utils import chunked
from .tasks import update_user_ratings_task
task_log = olympia.core.logger.getLogger('z.task')
def update_user_ratings():
"""Update add-on author's ratings."""
cursor = connections[multidb.get_slave()].cursor()
# We build this query ahead of time because the cursor complains about data
# truncation if it does the parameters. Also, this query is surprisingly
# quick, <1sec for 6100 rows returned
q = """ SELECT
addons_users.user_id as user_id,
AVG(rating) as avg_rating
FROM reviews
INNER JOIN versions
INNER JOIN addons_users
INNER JOIN addons
ON reviews.version_id = versions.id
AND addons.id = versions.addon_id
AND addons_users.addon_id = addons.id
WHERE reviews.reply_to IS NULL
AND reviews.rating > 0
AND addons.status IN (%s)
GROUP BY addons_users.user_id
""" % (",".join(map(str, VALID_ADDON_STATUSES)))
cursor.execute(q)
d = cursor.fetchall()
cursor.close()
ts = [update_user_ratings_task.subtask(args=[chunk])
for chunk in chunked(d, 1000)]
group(ts).apply_async()
| bsd-3-clause | 5,285,070,275,735,279,000 | 30.369565 | 79 | 0.615385 | false | 3.879032 | false | false | false |
IncidentNormal/TestApps | ALE/Platform_nodes_Draft3b.py | 1 | 7673 | from SimPy.Simulation import *
import visual as v
import math
from random import seed, uniform, randint
class Global():
NUMNODES = 4
NUMCHANNELS = 1
Node_L_List = []
Node_A_List = []
Node_S_List = []
ChannelList = [] #stores
NodeSendQueueList = [] #stores
maxTime = 10
class Mon():
NumListenCollisions = 0
NumSendingCollisions = 0
class Packet():
def __init__(self, a_to, a_from, tx, p_type):
self.addr_to = a_to #should be int
self.addr_from = a_from
self.tx = tx
self.p_type = p_type #0=data, 1=confirm
class NodeListen(Process):
def __init__(self,i):
Process.__init__(self,name='NodeL'+str(i))
self.ID = i #shared between Listen and Send processes
def execute(self):
while True:
yield hold, self, 0.01
for chn in G.ChannelList: #potential to randomise this order to prevent all Nodes searching iteratively
if chn.nrBuffered > 0:
for pkt in chn.theBuffer: #this is a bit magic atm: checking packet without 'grabbing' it
if pkt.addr_to == self.ID and pkt.p_type == 0:
yield (get,self,chn,1,1),(hold,self,0.0001) #renege after very short time: if item's not there immediately then move on
if len(self.got)>0:
print 'Node',self.ID, 'got packet from Node',self.got[0].addr_from
#yield get,self,chn,1,1 #priority 1 (low)
conf_pkt = Packet(self.got[0].addr_from,self.ID,now(),1)
yield put,self,G.NodeSendQueueList[self.ID],[conf_pkt],5 #priority 5 (high)
print 'Node',self.ID, 'put CONF packet on NodeSendQueue'
else:
Mon.NumListenCollisions += 1
print 'Listen Collision'
yield get,self,chn,1,100 #priority 100 (v high) - getting colliding packet from channel
print self.got
elif pkt.addr_to == self.ID and pkt.p_type == 1:
print 'Node',self.ID,' received CONF packet from', pkt.addr_from, now()
yield get,self,chn,1,1
self.interrupt(G.Node_S_List[pkt.addr_from])
class NodePacketAdder(Process):
def __init__(self,i):
Process.__init__(self,name='NodeA'+str(i))
self.ID = i #shared between Listen and Send and Adding processes
def execute(self):
while True:
yield hold, self, uniform(1,5)
nodeToSend = randint(0,G.NUMNODES-1)
while nodeToSend == self.ID: #make sure not sending to itself
nodeToSend = randint(0,G.NUMNODES-1)
pkt = Packet(nodeToSend,self.ID,now(),0)
yield put,self,G.NodeSendQueueList[self.ID],[pkt],1 #priority 1 (low)
class NodeSend(Process):
def __init__(self,i):
Process.__init__(self,name='NodeS'+str(i))
self.ID = i
def execute(self):
yield hold, self, uniform(0,1) #so don't all start at same time
while True:
sent = False
choice = -1
while sent==False :
if G.NodeSendQueueList[self.ID].nrBuffered > 0:
for i in range(G.NUMCHANNELS):
if G.ChannelList[i].nrBuffered==0:
choice = i
break
if choice != -1:
yield hold, self, 0.001 #very short wait to represent slight delay
if G.ChannelList[choice].nrBuffered==0:
if G.NodeSendQueueList[self.ID].nrBuffered > 0:
yield get,self,G.NodeSendQueueList[self.ID],1,1 #priority 1 (low)
print 'Node',self.ID, 'read from NodeSendQueue, sending packet to:', self.got[0].addr_to, 'type:', self.got[0].p_type, 'on', chn.name
else:
print 'Something bad happened'
yield put,self,chn,self.got, 1 #priority 1 (low)
sent=True
if self.got[0].p_type==1:
yield hold,self,0.1 #time to recieve packet before resending
if self.interrupted():
yield get,self,G.NodeSendQueueList[self.ID],1,100 #pop off first entry in list, else it remains on list for next loop, priority 100 (v high)
self.interruptReset()
print 'Interrupt success: Conf packet received'
else:
print 'Node',self.ID, 'did not receieve conf, resending'
else:
yield hold,self,0.01
else:
Mon.NumSendingCollisions += 1
print 'Sending Collision'
yield get,self,chn,1,100 #priority 100 (v high) - getting colliding packet from channel
print self.got
yield hold, self, uniform(0,1) #backoff
choice = -1
else:
yield hold,self,0.01 #if no free channels
else:
yield hold,self,0.01 #if nothing in buffer
class visualising():
def __init__(self):
self.sphereList = [] #sphere for each node
self.rodList = [] #unused
self.manageRodList = [] #rods connecting nodes to centre management node
r = 1.0 #radius of circle that nodes are in
delta_theta = (2.0*math.pi) / G.NUMNODES #angle between nodes
theta = 0
self.management = v.sphere(pos=v.vector(0,0,0), radius=0.1, colour=v.color.blue) #management node in centre
self.label = v.label(pos=(1,1,0), text= '0') #label for amount of disparities at that point in time
self.label_cum = v.label(pos=(-1,1,0), text= '0') #cumulative total number of above
for i in range(0,G.NUMNODES):
circ = v.sphere(pos=v.vector(r*math.cos(theta),r*math.sin(theta),0), radius=0.1, color=v.color.green)
self.sphereList.append(circ)
print 'circle no. ', i, ' coords ', r*math.cos(theta), ' ', r*math.sin(theta)
theta += delta_theta
rod = v.cylinder(pos=(0,0,0),axis=(self.sphereList[i].pos), radius=0.005, color=v.color.white)
self.manageRodList.append(rod)
initialize()
G = Global()
Vis=visualising()
for i in range(G.NUMCHANNELS):
chn = Store(name='Channel'+str(i),unitName='packet',capacity=1,putQType=PriorityQ,getQType=PriorityQ)
G.ChannelList.append(chn)
for i in range(G.NUMNODES):
nodeQueue = Store(name='NodeQueue'+str(i),unitName='packet',capacity=1,putQType=PriorityQ,getQType=PriorityQ)
G.NodeSendQueueList.append(nodeQueue)
node_l = NodeListen(i)
node_a = NodePacketAdder(i)
node_s = NodeSend(i)
G.Node_L_List.append(node_l)
G.Node_A_List.append(node_a)
G.Node_S_List.append(node_s)
activate(G.Node_L_List[i],G.Node_L_List[i].execute(),at=0.0)
activate(G.Node_A_List[i],G.Node_A_List[i].execute(),at=0.0)
activate(G.Node_S_List[i],G.Node_S_List[i].execute(),at=0.0)
simulate(until=G.maxTime)
| gpl-2.0 | -7,459,447,548,906,195,000 | 47.563291 | 176 | 0.524567 | false | 3.828842 | false | false | false |
thinkingmachines/deeplearningworkshop | codelab_5_simple_cnn.py | 1 | 1964 | # SIMPLE MNIST CNN
# Source: https://www.tensorflow.org/tutorials/layers
def cnn_model_fn(features, labels, mode):
"""Model function for CNN."""
# Input Layer
input_layer = tf.reshape(features, [-1, 28, 28, 1])
# Convolutional Layer #1
conv1 = tf.layers.conv2d(
inputs=input_layer,
filters=32,
kernel_size=[5, 5],
padding="same",
activation=tf.nn.relu)
# Pooling Layer #1
pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2, 2], strides=2)
# Convolutional Layer #2 and Pooling Layer #2
conv2 = tf.layers.conv2d(
inputs=pool1,
filters=64,
kernel_size=[5, 5],
padding="same",
activation=tf.nn.relu)
pool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2], strides=2)
# Dense Layer
pool2_flat = tf.reshape(pool2, [-1, 7 * 7 * 64])
dense = tf.layers.dense(inputs=pool2_flat, units=1024, activation=tf.nn.relu)
dropout = tf.layers.dropout(
inputs=dense, rate=0.4, training=mode == learn.ModeKeys.TRAIN)
# Logits Layer
logits = tf.layers.dense(inputs=dropout, units=10)
loss = None
train_op = None
# Calculate Loss (for both TRAIN and EVAL modes)
if mode != learn.ModeKeys.INFER:
onehot_labels = tf.one_hot(indices=tf.cast(labels, tf.int32), depth=10)
loss = tf.losses.softmax_cross_entropy(
onehot_labels=onehot_labels, logits=logits)
# Configure the Training Op (for TRAIN mode)
if mode == learn.ModeKeys.TRAIN:
train_op = tf.contrib.layers.optimize_loss(
loss=loss,
global_step=tf.contrib.framework.get_global_step(),
learning_rate=0.001,
optimizer="SGD")
# Generate Predictions
predictions = {
"classes": tf.argmax(
input=logits, axis=1),
"probabilities": tf.nn.softmax(
logits, name="softmax_tensor")
}
# Return a ModelFnOps object
return model_fn_lib.ModelFnOps(
mode=mode, predictions=predictions, loss=loss, train_op=train_op)
| mit | 78,197,066,181,890,620 | 29.215385 | 79 | 0.653768 | false | 3.177994 | false | false | false |
Penaz91/Glitch_Heaven | Game/loadSaves.py | 1 | 2299 | # Load Game Menu Component
# Part of the Glitch_Heaven project
# Copyright 2015-2016 Penaz <[email protected]>
from components.UI.loadmenu import loadMenu
from os import listdir
from os.path import join as pjoin
from os import remove
from game import Game
from components.UI.textMenuItem import textMenuItem
from components.UI.textinput import textInput
class loadSaveMenu(loadMenu):
def __init__(self, screen, keys, config, sounds, log):
self.logSectionName = "loadGameMenu"
self.dirlist = sorted(listdir(pjoin("savegames")))
super().__init__(screen, keys, config, sounds, log)
def loadGame(self, savegame):
print(pjoin("savegames", savegame))
Game().main(self.screen,
self.keys,
"load",
pjoin("savegames", savegame),
self.config,
self.sounds,
None,
self.mainLogger)
self.running = False
def eraseSave(self, savegame):
confirm = textInput(self.screen, self.font, "Type 'Yes' to confirm deletion").get_input()
if (confirm.upper() == "YES"):
remove(pjoin("savegames", savegame))
self.running = False
def makeLoadItem(self):
self.loadgame = textMenuItem("Load", (250, 560),
lambda: self.editDesc(
"Load the selected savegame"),
lambda: self.loadGame(
self.dirlist[self.id]),
self.config, self.sounds, self.font)
self.activeItems.append(self.loadgame)
self.items.append(self.loadgame)
def makeEraseItem(self):
self.erase = textMenuItem("Erase", (400, 560),
lambda: self.editDesc(
"Delete the Selected SaveGame"),
lambda: self.eraseSave(self.dirlist[self.id]),
self.config, self.sounds, self.font)
self.activeItems.append(self.erase)
self.items.append(self.erase)
def makeMenuItems(self):
super().makeMenuItems()
self.makeEraseItem() | mit | 7,232,331,274,901,255,000 | 37.983051 | 97 | 0.545455 | false | 4.387405 | false | false | false |
djrrb/OldGlory | oldGlory.py | 1 | 7067 | """
OLD GLORY
By David Jonathan Ross <http://www.djr.com>
This drawbot script will draw the American Flag.
It's also responsive! I made this to experiment with Drawbot Variables.
For the most part, it follows the rules here:
http://en.wikipedia.org/wiki/Flag_of_the_United_States#Specifications
It does make some small allowances in order to get better results when the
variables are customized.
Wouldn't it be cool if the stars followed the historical patterns, starting with the ring of 13? Maybe next time.
"""
import random
from AppKit import NSColor
######
# SETTING GLOBAL VARIABLES
######
# define some of our key variables as special DrawBot variables, which can be manipulated with a simple UI
Variable([
dict(
name='flagSize',
ui='Slider',
args=dict(
minValue=1,
maxValue=10,
value=5,
tickMarkCount=10,
stopOnTickMarks=True
)
),
dict(
name='proportion',
ui='Slider',
args=dict(
minValue=1,
maxValue=3,
value=1.9,
tickMarkCount=21,
stopOnTickMarks=True
)
),
dict(
name='stripeCount',
ui='Slider',
args=dict(
minValue=1,
maxValue=21,
value=13,
tickMarkCount=11,
stopOnTickMarks=True
)
),
dict(
name='starRows',
ui='Slider',
args=dict(
minValue=1,
maxValue=21,
value=9,
tickMarkCount=11,
stopOnTickMarks=True
)
),
dict(
name='starCols',
ui='Slider',
args=dict(
minValue=1,
maxValue=21,
value=11,
tickMarkCount=11,
stopOnTickMarks=True
)
),
dict(
name='oddStripeColor',
ui='ColorWell',
args=dict(color=NSColor.redColor())
),
dict(
name='evenStripeColor',
ui='ColorWell',
args=dict(color=NSColor.whiteColor())
),
dict(
name='cantonColor',
ui='ColorWell',
args=dict(color=NSColor.blueColor())
),
dict(
name='starColor',
ui='ColorWell',
args=dict(color=NSColor.whiteColor())
),
dict(
name='jasperize',
ui='Slider',
args=dict(
minValue=1,
maxValue=6,
value=1,
tickMarkCount=6,
stopOnTickMarks=True
)
),
], globals())
# here are some other variables that will help us draw the flag
inch = 72
# our base unit, the height of the flag
unit = flagSize * inch
# some of the variables come out of the UI as floats, but I need them as ints
# since I intend to use them with the range() function
jasperize = int(round(jasperize))
stripeCount = int(round(stripeCount))
starRows = int(round(starRows))
starCols = int(round(starCols))
# flag dimensions
#proportion = 1.9 ###### this is now an adjustable variable
pageWidth = unit * proportion
pageHeight = unit
# stripes
stripeHeight = pageHeight / int(round(stripeCount))
# canton
cantonHeight = stripeHeight * ( int(round(stripeCount)/2) + 1)
cantonWidth = (2 / 5) * pageWidth
# stars
starColWidth = cantonWidth / (starCols+1)
starRowWidth = cantonHeight / (starRows+1)
# starDiameter should be defined as (4 / 5) * stripeHeight, but this rule
# allows decent star sizing regardless of the number of starCols or starRows
starDiameter = min(starColWidth, starRowWidth)
# let's define the drawing of the star as a function, since we will be using it a lot
def star(x, y, d, b=None):
# this is a hacky, non-mathematically correct star made from two polygons
# if I were good at math, I would have drawn this a smarter way
fill(starColor)
r = d/2
# an upside down triangle
newPath()
moveTo((x-r/1.1, y+r/3.5))
lineTo((x+r/1.1, y+r/3.5))
lineTo((x, y-r/2.6))
closePath()
drawPath()
# a right side up triangle with a divet in the bottom
newPath()
moveTo((x, y+r))
lineTo((x-r/1.6, y-r/1.3))
lineTo((x, y-r/2.6))
lineTo((x+r/1.6, y-r/1.3))
closePath()
drawPath()
######
# BUILD THE FLAG
######
# set page size
size(pageWidth, pageHeight)
# Loop through all the times we are going to draw the flag
for flag in range(jasperize):
# Stripes
# build the stripes up from the origin
y = 0
for stripe in range(stripeCount):
if stripe % 2:
fill(evenStripeColor)
else:
fill(oddStripeColor)
rect(0, y, pageWidth, stripeHeight)
# increment the y value so we travel up the page
y += pageHeight/stripeCount
# CANTON (that's the blue thing)
# make a rectangle from the top left corner
fill(cantonColor)
rect(0, pageHeight-cantonHeight, cantonWidth, cantonHeight)
# STARS
# the american flag does not contain an even grid of stars
# some rows have 6 stars, others have 5
# some columns have 5 stars, others have 4
# but if we think of the canton as a checkerboard, there is a 9x11 grid
# where each position can have either a star or a gap.
# let's define the position where we will start drawing the stars
starOriginX = starColWidth
starOriginY = pageHeight - cantonHeight + starRowWidth
# now let's define some variables that we will change as we loop through
starX = starOriginX
starY = starOriginY
# loop through all of the rows
for y in range(starRows):
# loop through all of the columns
for x in range(starCols):
# if both row and column are odd, draw the star
if not x % 2 and not y % 2:
star(starX, starY, starDiameter)
# if both row and column are even, also draw the star:
elif x % 2 and y % 2:
star(starX, starY, starDiameter)
# if the row is odd and the column is even, or vice versa
# we should draw nothing
# increment the x value to continue across the row
starX += starColWidth
# when we are done with the row, reset the x value and increment the y
starX = starOriginX
starY += starRowWidth
# Draw the shadow as two rectangles
shadowLength = height() / 30
fill(0, 0, 0, .5)
rect(shadowLength, -shadowLength*2, width()+shadowLength, shadowLength*2)
rect(width(), 0, shadowLength*2, height()-shadowLength)
# now that we are done drawing the flag
# scale the canvas, and relocate our canvas's position to the center
# this way, all future drawing will happen at a new scale, for jasperization
scaleFactor = .78
widthDiff = width()-width()*scaleFactor
heightDiff = height()-height()*scaleFactor
translate(widthDiff/2, heightDiff/2)
scale(scaleFactor)
# keep your eye on that grand old flag!
| mit | 6,339,230,936,733,769,000 | 27.963115 | 113 | 0.600538 | false | 3.669263 | false | false | false |
SU-ECE-17-7/hotspotter | hscom/fileio.py | 1 | 12148 | from __future__ import division, print_function
import __common__
(print, print_, print_on, print_off,
rrr, profile) = __common__.init(__name__, '[io]')
# Python
import os
import fnmatch
import pickle
import cPickle
from os.path import normpath, exists, realpath, join, expanduser, dirname
import datetime
import time
# Science
import numpy as np
import cv2
from PIL import Image
from PIL.ExifTags import TAGS
# Hotspotter
import helpers
#import skimage
#import shelve
#import datetime
#import timeit
VERBOSE_IO = 0 # 2
# --- Saving ---
def save_npy(fpath, data):
with open(fpath, 'wb') as file:
np.save(file, data)
def save_npz(fpath, data):
with open(fpath, 'wb') as file:
np.savez(file, data)
def save_cPkl(fpath, data):
with open(fpath, 'wb') as file:
cPickle.dump(data, file, cPickle.HIGHEST_PROTOCOL)
def save_pkl(fpath, data):
with open(fpath, 'wb') as file:
pickle.dump(data, file, pickle.HIGHEST_PROTOCOL)
# --- Loading ---
def load_npz_memmap(fpath):
with open(fpath, 'rb') as file:
npz = np.load(file, mmap_mode='r')
data = npz['arr_0']
npz.close()
return data
def load_npz(fpath):
with open(fpath, 'rb') as file:
npz = np.load(file, mmap_mode=None)
data = npz['arr_0']
npz.close()
return data
def load_npy(fpath):
with open(fpath, 'rb') as file:
data = np.load(file)
return data
def load_cPkl(fpath):
with open(fpath, 'rb') as file:
data = cPickle.load(file)
return data
def load_pkl(fpath):
with open(fpath, 'rb') as file:
data = pickle.load(file)
return data
ext2_load_func = {
'.npy': load_npy,
'.npz': load_npz,
'.cPkl': load_cPkl,
'.pkl': load_pkl}
ext2_save_func = {
'.npy': save_npy,
'.npz': save_npz,
'.cPkl': save_cPkl,
'.pkl': save_pkl}
def debug_smart_load(dpath='', fname='*', uid='*', ext='*'):
pattern = fname + uid + ext
print('[io] debug_smart_load(): dpath=%r' % (dpath))
for fname_ in os.listdir(dpath):
if fnmatch.fnmatch(fname_, pattern):
#fpath = join(dpath, fname_)
print(fname_)
# --- Smart Load/Save ---
def __args2_fpath(dpath, fname, uid, ext):
if len(ext) > 0 and ext[0] != '.':
raise Exception('Fatal Error: Please be explicit and use a dot in ext')
fname_uid = fname + uid
if len(fname_uid) > 128:
fname_uid = helpers.hashstr(fname_uid)
fpath = join(dpath, fname_uid + ext)
fpath = realpath(fpath)
fpath = normpath(fpath)
return fpath
@profile
def smart_save(data, dpath='', fname='', uid='', ext='', verbose=VERBOSE_IO):
''' Saves data to the direcotry speficied '''
helpers.ensuredir(dpath)
fpath = __args2_fpath(dpath, fname, uid, ext)
if verbose:
if verbose > 1:
print('[io]')
print(('[io] smart_save(dpath=%r,\n' + (' ' * 11) + 'fname=%r, uid=%r, ext=%r)')
% (dpath, fname, uid, ext))
ret = __smart_save(data, fpath, verbose)
if verbose > 1:
print('[io]')
return ret
@profile
def smart_load(dpath='', fname='', uid='', ext='', verbose=VERBOSE_IO, **kwargs):
''' Loads data to the direcotry speficied '''
fpath = __args2_fpath(dpath, fname, uid, ext)
if verbose:
if verbose > 1:
print('[io]')
print(('[io] smart_load(dpath=%r,\n' + (' ' * 11) + 'fname=%r, uid=%r, ext=%r)')
% (dpath, fname, uid, ext))
data = __smart_load(fpath, verbose, **kwargs)
if verbose > 1:
print('[io]')
return data
@profile
def __smart_save(data, fpath, verbose):
' helper '
dpath, fname = os.path.split(fpath)
fname_noext, ext_ = os.path.splitext(fname)
save_func = ext2_save_func[ext_]
if verbose > 1:
print('[io] saving: %r' % (type(data),))
try:
save_func(fpath, data)
if verbose > 1:
print('[io] saved %s ' % (filesize_str(fpath),))
except Exception as ex:
print('[io] ! Exception will saving %r' % fpath)
print(helpers.indent(repr(ex), '[io] '))
raise
@profile
def __smart_load(fpath, verbose, allow_alternative=False, can_fail=True, **kwargs):
' helper '
# Get components of the filesname
dpath, fname = os.path.split(fpath)
fname_noext, ext_ = os.path.splitext(fname)
# If exact path doesnt exist
if not exists(fpath):
print('[io] fname=%r does not exist' % fname)
if allow_alternative:
# allows alternative extension
convert_alternative(fpath, verbose, can_fail=can_fail, **kwargs)
# Ensure a valid extension
if ext_ == '':
raise NotImplementedError('')
else:
load_func = ext2_load_func[ext_]
# Do actual data loading
try:
if verbose > 1:
print('[io] loading ' + filesize_str(fpath))
data = load_func(fpath)
if verbose:
print('[io]... loaded data')
except Exception as ex:
if verbose:
print('[io] ! Exception while loading %r' % fpath)
print('[io] caught ex=%r' % (ex,))
data = None
if not can_fail:
raise
if data is None:
if verbose:
print('[io]... did not load %r' % fpath)
return data
#----
# --- Util ---
def convert_alternative(fpath, verbose, can_fail):
# check for an alternative (maybe old style or ext) file
alternatives = find_alternatives(fpath, verbose)
dpath, fname = os.path.split(fpath)
if len(alternatives) == 0:
fail_msg = '[io] ...no alternatives to %r' % fname
if verbose:
print(fail_msg)
if can_fail:
return None
else:
raise IOError(fail_msg)
else:
#load and convert alternative
alt_fpath = alternatives[0]
if verbose > 1:
print('[io] ...converting %r' % alt_fpath)
data = __smart_load(alt_fpath, verbose, allow_alternative=False)
__smart_save(data, fpath, verbose)
return data
def find_alternatives(fpath, verbose):
# Check if file is in another format
dpath, fname = os.path.split(fpath)
fname_noext, ext_ = os.path.splitext(fname)
fpath_noext = join(dpath, fname_noext)
alternatives = []
# Find files with a different
for alt_ext in list(['.npy', '.npz', '.cPkl', '.pkl']):
alt_fpath = fpath_noext + alt_ext
if exists(alt_fpath):
alternatives.append(alt_fpath)
if verbose > 1:
# Print num alternatives / filesizes
print('[io] Found %d alternate(s)' % len(alternatives))
for alt_fpath in iter(alternatives):
print('[io] ' + filesize_str(alt_fpath))
return alternatives
def sanatize_fpath(fpath, ext=None): # UNUSED!
'Ensures a filepath has correct the extension'
dpath, fname = os.path.split(fpath)
fname_noext, ext_ = os.path.splitext(fname)
if not ext is None and ext_ != ext:
fname = fname_noext + ext
fpath = normpath(join(dpath, fname))
return fpath
def print_filesize(fpath):
print(filesize_str(fpath))
@profile
def filesize_str(fpath):
_, fname = os.path.split(fpath)
mb_str = helpers.file_megabytes_str(fpath)
return 'filesize(%r)=%s' % (fname, mb_str)
@profile
def exiftime_to_unixtime(datetime_str):
try:
dt = datetime.datetime.strptime(datetime_str, '%Y:%m:%d %H:%M:%S')
return time.mktime(dt.timetuple())
except TypeError:
#if datetime_str is None:
#return -1
return -1
except ValueError as ex:
if isinstance(datetime_str, str):
if datetime_str.find('No EXIF Data') == 0:
return -1
if datetime_str.find('Invalid') == 0:
return -1
print('!!!!!!!!!!!!!!!!!!')
print('Caught Error: ' + repr(ex))
print('datetime_str = %r' % datetime_str)
raise
@profile
def check_exif_keys(pil_image):
info_ = pil_image._getexif()
valid_keys = []
invalid_keys = []
for key, val in info_.iteritems():
try:
exif_keyval = TAGS[key]
valid_keys.append((key, exif_keyval))
except KeyError:
invalid_keys.append(key)
print('[io] valid_keys = ' + '\n'.join(valid_keys))
print('-----------')
#import draw_func2 as df2
#exec(df2.present())
@profile
def read_all_exif_tags(pil_image):
info_ = pil_image._getexif()
info_iter = info_.iteritems()
tag_ = lambda key: TAGS.get(key, key)
exif = {} if info_ is None else {tag_(k): v for k, v in info_iter}
return exif
@profile
def read_one_exif_tag(pil_image, tag):
try:
exif_key = TAGS.keys()[TAGS.values().index(tag)]
except ValueError:
return 'Invalid EXIF Tag'
info_ = pil_image._getexif()
if info_ is None:
return None
else:
invalid_str = 'Invalid EXIF Key: exif_key=%r, tag=%r' % (exif_key, tag)
exif_val = info_.get(exif_key, invalid_str)
return exif_val
#try:
#exif_val = info_[exif_key]
#except KeyError:
#exif_val = 'Invalid EXIF Key: exif_key=%r, tag=%r' % (exif_key, tag)
#print('')
#print(exif_val)
#check_exif_keys(pil_image)
@profile
def read_exif(fpath, tag=None):
try:
pil_image = Image.open(fpath)
if not hasattr(pil_image, '_getexif'):
return 'No EXIF Data'
except IOError as ex:
import argparse2
print('Caught IOError: %r' % (ex,))
print_image_checks(fpath)
if argparse2.ARGS_.strict:
raise
return {} if tag is None else None
if tag is None:
exif = read_all_exif_tags(pil_image)
else:
exif = read_one_exif_tag(pil_image, tag)
del pil_image
return exif
@profile
def print_image_checks(img_fpath):
hasimg = helpers.checkpath(img_fpath, verbose=True)
if hasimg:
_tup = (img_fpath, filesize_str(img_fpath))
print('[io] Image %r (%s) exists. Is it corrupted?' % _tup)
else:
print('[io] Image %r does not exists ' (img_fpath,))
return hasimg
@profile
def read_exif_list(fpath_list, **kwargs):
def _gen(fpath_list):
# Exif generator
nGname = len(fpath_list)
lbl = '[io] Load Image EXIF'
mark_progress, end_progress = helpers.progress_func(nGname, lbl, 16)
for count, fpath in enumerate(fpath_list):
mark_progress(count)
yield read_exif(fpath, **kwargs)
end_progress()
exif_list = [exif for exif in _gen(fpath_list)]
return exif_list
@profile
def imread(img_fpath):
try:
imgBGR = cv2.imread(img_fpath, flags=cv2.CV_LOAD_IMAGE_COLOR)
return imgBGR
except Exception as ex:
print('[io] Caught Exception: %r' % ex)
print('[io] ERROR reading: %r' % (img_fpath,))
raise
# --- Standard Images ---
def splash_img_fpath():
hsdir = dirname(__file__)
splash_fpath = realpath(join(hsdir, '../hsgui/_frontend/splash.png'))
return splash_fpath
# --- Global Cache ---
# TODO: This doesnt belong here
HOME = expanduser('~')
#GLOBAL_CACHE_DIR = realpath('.hotspotter/global_cache')
GLOBAL_CACHE_DIR = join(HOME, '.hotspotter/global_cache')
helpers.ensuredir(GLOBAL_CACHE_DIR)
def global_cache_read(cache_id, default='.'):
cache_fname = join(GLOBAL_CACHE_DIR, 'cached_dir_%s.txt' % cache_id)
return helpers.read_from(cache_fname) if exists(cache_fname) else default
def global_cache_write(cache_id, newdir):
cache_fname = join(GLOBAL_CACHE_DIR, 'cached_dir_%s.txt' % cache_id)
helpers.write_to(cache_fname, newdir)
def delete_global_cache():
global_cache_dir = GLOBAL_CACHE_DIR
helpers.remove_files_in_dir(global_cache_dir, recursive=True, verbose=True,
dryrun=False)
# --- Shelve Caching ---
#def read_cache(fpath):
#pass
#def write_cache(fpath):
#with open(fpath, 'wa') as file_
#shelf = shelve.open(file_)
#def cached_keys(fpath):
#pass
| apache-2.0 | -9,184,094,975,685,221,000 | 26.862385 | 88 | 0.583388 | false | 3.320033 | false | false | false |
Ghost-script/TaskMan | wsgi/taskman/taskman/views.py | 1 | 4067 | from django.shortcuts import render, HttpResponse, redirect
from forms import LoginForm, RegistrationForm
from django.contrib.auth import login, logout, authenticate
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from taskManager.forms import TaskCreate,MultipleSelect
from taskManager.views import show_task, show_logs
from django.contrib.auth.hashers import make_password,is_password_usable
def index(request):
"""
Handles user login
"""
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
email = form.cleaned_data['email']
password = form.cleaned_data['password']
user = authenticate(email=email, password=password)
if user is not None:
if user.error is None:
login(request, user)
return redirect('home')
else:
form.message = "Email/Password Mismatch"
return render(request, 'index.html', {'form': form})
form.message = "Email not found"
return render(request, 'index.html',
{'form': form,
'page': 'index'})
else:
form.message = "Invalid Email"
return render(request, 'index.html',
{'form': form,
'page': 'index'})
else:
form = LoginForm()
return render(request, 'index.html', {'form': form, 'page': 'index'})
def register_user(request):
"""
Handles user Registration
"""
form = RegistrationForm(request.POST)
if request.method == 'POST':
if form.is_valid():
username = form.cleaned_data['username']
email = form.cleaned_data['email']
password = form.cleaned_data['password']
confirm = form.cleaned_data['confirm']
try:
user = User.objects.get(email=email)
form.error = "Email already registered!"
return render(request, 'registration.html', {'form': form})
except User.DoesNotExist:
if password == confirm:
password = make_password(password)
if is_password_usable(password):
user = User(username=username,
email=email,
password=password)
user.save()
form = RegistrationForm()
form.message = "Success"
else:
form.message = "Password cannot be used"
else:
form.message = "Comfirm and Password field do not match"
return render(request, 'registration.html',
{'form': form,
'page': 'reg'})
except Exception as e:
#logging be implemented here
print e
else:
form.error = "Invalid form feild Values"
return render(request, 'registration.html',
{'form': form,
'page': 'reg'})
else:
form = RegistrationForm()
return render(request, 'registration.html', {'form': form, 'page': 'reg'})
@login_required(login_url="/")
def dashboard(request):
"""
Handles dashboard tasklist request
functions: Sorting the tasks , Showing TrackerLogs
"""
col = request.GET.get('sortby', 'id')
order = request.GET.get('order', 'asc')
task = show_task(request, col=col, order=order)
logs = show_logs(request)
form = MultipleSelect()
return render(request, 'dashboard.html',
{'tasks': task,
'logs': logs,
'form': form,
'page': 'home'})
def logout_user(request):
"""
Logs user Out
"""
logout(request)
return redirect('/')
| gpl-2.0 | 6,536,155,798,609,383,000 | 34.365217 | 78 | 0.522252 | false | 4.81872 | false | false | false |
erikrose/sphinx-js | sphinx_js/typedoc.py | 1 | 13554 | """Converter from typedoc output to jsdoc doclet format"""
import os
import sys
import json
from six import iteritems
# JSDoc entries used in sphinx-js:
# - optional access of [ public, private, protected ]
# - optional classdesc
# - optional comment (controls doclet inclusion)
# - optional description
# - optional exceptions
# - optional exclude-members
# - kind of [ function, typedef, <other> ]
# - longname
# - optional memberof
# - meta.filename
# - meta.lineno
# - meta.code.paramnames
# - meta.code.path
# - name
# - optional params
# - optional properties
# - optional returns
# - type.names
# - optional undocumented
class TypeDoc(object):
"""
Encapsulation of the Typedoc to JSDoc conversion process.
Upon construction this class will convert the typedoc JSON
object to a list of JSDoc doclets in :py:attr:`jsdoc`.
This class holds all the state used during the conversion making
it easy to do multiple (sequential) conversions.
:ivar jsdoc: the list of generated doclets
:ivar nodelist: the flattened typedoc entries indexed by 'id'
JSDoc JSON schema: https://github.com/jsdoc3/jsdoc/blob/master/lib/jsdoc/schema.js
"""
def __init__(self, root):
"""
Construct a list of jsdoc entries from the typedoc JSON object.
:param root: a JSON object from a typedoc JSON file
"""
self.jsdoc = []
self.nodelist = {}
self.make_node_list(root)
self.convert_node(root)
def get_parent(self, node):
"""
Get the parent of a node.
:param node: A Typedoc node
:return: The parent Typedoc node, or None if node was the root.
"""
parentId = node.get('__parentId')
return self.nodelist[parentId] if parentId is not None else None
def extend_doclet(self, result, **kwargs):
"""
Extend a jsdoc entry.
.. note::
Filters out keywords with value None. This is used
explicitely, for example in :py:func:`simple_doclet`, and
implicitely when typedoc may lack an entry, for example in
the description field in :py:func:`make_result`.
"""
result.update(**kwargs)
return {k: v for k, v in iteritems(result) if v is not None}
def make_doclet(self, **kwargs):
"""Create a new jsdoc entry"""
return self.extend_doclet({}, **kwargs)
def make_longname(self, node):
"""Construct the jsdoc longname entry for a typedoc node"""
parent = self.get_parent(node)
longname = self.make_longname(parent) if parent is not None else ''
kindString = node.get('kindString')
if kindString in [None, 'Function', 'Constructor', 'Method']:
return longname
if longname != '':
flags = node.get('flags')
if (parent.get('kindString') in ['Class', 'Interface'] and
flags.get('isStatic') is not True):
longname += '#'
elif parent.get('kindString') in ['Function', 'Method']:
longname += '.'
else:
longname += '~'
if kindString == 'Module':
return longname + 'module:' + node.get('name')[1:-1]
elif kindString == 'External module':
return longname + 'external:' + node.get('name')[1:-1]
else:
return longname + node.get('name')
def make_meta(self, node):
"""Construct the jsdoc meta entry for a typedoc node"""
source = node.get('sources')[0]
return {
'path': os.path.dirname(source.get('fileName')) or './',
'filename': os.path.basename(source.get('fileName')),
'lineno': source.get('line'),
'code': {}
}
def make_type_name(self, type):
"""Construct the name of a type from a Typedoc type entry"""
names = []
if type.get('type') == 'reference' and type.get('id'):
node = self.nodelist[type.get('id')]
# Should be: names = [ self.make_longname(node)]
parent = self.nodelist[node.get('__parentId')]
if parent.get('kindString') == 'External module':
names = [parent['name'][1:-1] + '.' + node['name']]
else:
names = [node['name']]
elif type.get('type') in ['intrinsic', 'reference']:
names = [type.get('name')]
elif type.get('type') == 'stringLiteral':
names = ['"' + type.get('value') + '"']
elif type.get('type') == 'array':
names = [self.make_type_name(type.get('elementType')) + '[]']
elif type.get('type') == 'tuple':
types = [self.make_type_name(t) for t in type.get('elements')]
names = ['[' + ','.join(types) + ']']
elif type.get('type') == 'union':
types = [self.make_type_name(t) for t in type.get('types')]
names = [' | '.join(types)]
elif type.get('type') == 'typeOperator':
target_name = self.make_type_name(type.get('target'))
names = [type.get('operator'), target_name]
elif type.get('type') == 'typeParameter':
names = [type.get('name')]
constraint = type.get('constraint')
if constraint is not None:
names.extend(['extends', self.make_type_name(constraint)])
elif type.get('type') == 'reflection':
names = ['<TODO>']
return ' '.join(names)
def make_type(self, type):
"""Construct a jsdoc type entry"""
return {
'names': [self.make_type_name(type)]
}
def make_description(self, comment):
"""Construct a jsdoc description entry"""
if not comment:
return ''
else:
return '\n\n'.join([
comment.get('shortText', ''),
comment.get('text', '')
])
def make_param(self, param):
"""Construct a jsdoc parameter entry"""
typeEntry = param.get('type')
if typeEntry is None:
return self.make_doclet(
name=param.get('name'),
description=self.make_description(param.get('comment'))
)
else:
return self.make_doclet(
name=param.get('name'),
type=self.make_type(typeEntry),
description=self.make_description(param.get('comment'))
)
def make_result(self, param):
"""Construct a jsdoc function result entry"""
type = param.get('type')
if type is None or type.get('name') == 'void':
return []
return [self.make_doclet(
name=param.get('name'),
type=self.make_type(type),
description=param.get('comment', {}).get('returns')
)]
def simple_doclet(self, kind, node):
"""Construct a jsdoc entry with some frequently used fields."""
memberof = self.make_longname(self.get_parent(node))
if memberof == '':
memberof = None
if node.get('flags').get('isPrivate'):
access = 'private'
elif node.get('flags').get('isProtected'):
access = 'protected'
else:
access = None
comment = node.get('comment')
return self.make_doclet(
kind=kind,
access=access,
comment=node.get('comment', {}).get('text', '<empty>'),
meta=self.make_meta(node),
name=node.get('name'),
longname=self.make_longname(node),
memberof=memberof,
description=self.make_description(comment)
)
def convert_node(self, node):
"""
Convert a typedoc entry to a jsdoc entry. Typedoc entries are
hierarchical, so this function will recurse.
New entries are added to :py:attr:`self.jsdoc`.
.. rubric:: To do
Some entries generate restructured text. Preferably this
information should be captured in the jsdoc entries and
used in the templates.
"""
if node.get('inheritedFrom'):
return
if node.get('sources'):
# Ignore nodes with a reference to absolute paths (like /usr/lib)
source = node.get('sources')[0]
if source.get('fileName', '.')[0] == '/':
return
kindString = node.get('kindString')
if kindString == 'External module':
doclet = self.simple_doclet('external', node)
elif kindString == 'Module':
doclet = self.simple_doclet('module', node)
elif kindString in ['Class', 'Interface']:
specifiers = []
if kindString == 'Interface':
doclet = self.simple_doclet('interface', node)
specifiers.append('*interface*')
else:
doclet = self.simple_doclet('class', node)
doclet['classdesc'] = ''
if node.get('flags', {}).get('isAbstract'):
specifiers.append('*abstract*')
if node.get('flags', {}).get('isExported'):
module_name = self.get_parent(node).get('name')[1:-1]
specifiers.append('*exported from* :js:mod:`' + module_name + '`')
doclet['classdesc'] += ', '.join(specifiers)
if node.get('extendedTypes'):
doclet['classdesc'] += '\n\n**Extends:**\n'
for type in node.get('extendedTypes', []):
type_name = self.make_type_name(type)
doclet['classdesc'] += ' * :js:class:`' + type_name + '`\n'
if node.get('implementedTypes'):
doclet['classdesc'] += '\n\n**Implements:**\n'
for type in node.get('implementedTypes', []):
type_name = self.make_type_name(type)
doclet['classdesc'] += ' * :js:class:`' + type_name + '`\n'
doclet['params'] = []
for param in node.get('typeParameter', []):
doclet['params'].append(self.make_param(param))
self.extend_doclet(
doclet,
extends=[e['name'] for e in node.get('extendedTypes', [])]
)
elif kindString == 'Property':
doclet = self.simple_doclet('member', node)
if node.get('flags', {}).get('isAbstract'):
doclet['description'] = '*abstract*\n\n' + doclet['description']
self.extend_doclet(
doclet,
type=self.make_type(node.get('type'))
)
elif kindString == 'Accessor':
doclet = self.simple_doclet('member', node)
if node.get('getSignature'):
type = self.make_type(node['getSignature']['type'])
else:
type_name = node['setSignature']['parameters'][0]['type']
type = self.make_type(type_name)
self.extend_doclet(doclet, type=type)
elif kindString in ['Function', 'Constructor', 'Method']:
for sig in node.get('signatures'):
sig['sources'] = node['sources']
self.convert_node(sig)
return
elif kindString in ['Constructor signature', 'Call signature']:
parent = self.get_parent(node)
doclet = self.simple_doclet('function', node)
if parent.get('flags', {}).get('isAbstract'):
doclet['description'] = '*abstract*\n\n' + doclet['description']
if parent.get('flags', {}).get('isOptional'):
doclet['description'] = '*optional*\n\n' + doclet['description']
self.extend_doclet(
doclet,
params=[],
returns=self.make_result(node)
)
doclet['meta']['code']['paramnames'] = []
for param in node.get('parameters', []):
doclet['params'].append(self.make_param(param))
doclet['meta']['code']['paramnames'].append(param.get('name'))
else:
doclet = None
if doclet:
self.jsdoc.append(doclet)
for child in node.get('children', []):
self.convert_node(child)
def make_node_list(self, node, parent=None):
"""Flatten the tree of Typedoc entries to a list indexed by 'id'"""
if node is None:
return
if node.get('id') is not None:
node['__parentId'] = parent
self.nodelist[node['id']] = node
for tag in ['children', 'signatures', 'parameters']:
for child in node.get(tag, []):
self.make_node_list(child, node.get('id'))
typetag = node.get('type')
if isinstance(typetag, dict) and typetag['type'] != 'reference':
self.make_node_list(typetag, parent)
self.make_node_list(node.get('declaration'), None)
def parse_typedoc(inputfile):
"""Parse and convert the typedoc JSON file to a list jsdoc entries"""
typedoc = TypeDoc(json.load(inputfile))
return typedoc.jsdoc
def typedoc(inputname):
"""
Read a typedoc file and print the resulting jsdoc list.
.. note::
This function only exists to test this module in isolation.
"""
with open(inputname, 'r') as inputfile:
json.dump(parse_typedoc(inputfile), sys.stdout, indent=2)
if __name__ == '__main__':
typedoc(sys.argv[1])
| mit | 7,926,526,332,986,313,000 | 36.545706 | 86 | 0.539324 | false | 4.09734 | false | false | false |
openstack/networking-bgpvpn | bgpvpn_dashboard/test/admin/test_views.py | 1 | 3209 | # Copyright (c) 2017 Orange.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from collections import namedtuple
from bgpvpn_dashboard.api import bgpvpn as bgpvpn_api
from bgpvpn_dashboard.dashboards.admin.bgpvpn import tables as bgpvpn_tables
from bgpvpn_dashboard.dashboards.admin.bgpvpn import views as bgpvpn_views
from openstack_dashboard.test import helpers
VIEWS = "bgpvpn_dashboard.dashboards.admin.bgpvpn.views"
class TestIndexView(helpers.APITestCase):
def setUp(self):
super(TestIndexView, self).setUp()
mock_request = mock.Mock(horizon={'async_messages': []})
self.bgpvpn_view = bgpvpn_views.IndexView(request=mock_request)
self.assertEqual(bgpvpn_tables.BgpvpnTable,
self.bgpvpn_view.table_class)
def _get_mock_bgpvpn(self, prefix):
bgpvpn_info = {}
if prefix:
bgpvpn_info = {
"name": "%s_name" % prefix,
"route_targets": [],
"import_targets": [],
"export_targets": [],
"networks": [],
"routers": [],
"tenant_id": "tenant_id",
"type": "l3"
}
return bgpvpn_api.Bgpvpn(bgpvpn_info)
@mock.patch.object(bgpvpn_views.api, 'keystone', autospec=True)
def test_get_tenant_name(self, mock_api):
Tenant = namedtuple("Tenant", ["id", "name"])
tenant = Tenant("tenant_id", "tenant_name")
mock_api.tenant_get.return_value = tenant
result = self.bgpvpn_view._get_tenant_name("tenant_id")
mock_api.tenant_get.assert_called_once_with(
self.bgpvpn_view.request, "tenant_id")
self.assertEqual(result, "tenant_name")
@mock.patch('%s.IndexView._get_tenant_name' % VIEWS,
return_value={"tenant_id": "tenant_name"})
@mock.patch.object(bgpvpn_views, 'api', autospec=True)
@mock.patch.object(bgpvpn_views, 'bgpvpn_api', autospec=True)
def test_get_data(self, mock_bgpvpn_api, mock_api, mock_get_tenant_name):
bgpvpn_foo = self._get_mock_bgpvpn("foo")
bgpvpn_bar = self._get_mock_bgpvpn("bar")
mock_neutron_client = mock_api.neutron.neutronclient(mock.Mock())
mock_bgpvpn_api.bgpvpns_list.return_value = [bgpvpn_foo, bgpvpn_bar]
mock_neutron_client.list_networks.return_value = []
mock_neutron_client.list_routers.return_value = []
expected_bgpvpns = [bgpvpn_foo, bgpvpn_bar]
result = self.bgpvpn_view.get_data()
calls = [mock.call("tenant_id"), mock.call("tenant_id")]
mock_get_tenant_name.assert_has_calls(calls)
self.assertEqual(result, expected_bgpvpns)
| apache-2.0 | 7,976,155,279,535,758,000 | 38.617284 | 77 | 0.648177 | false | 3.417465 | true | false | false |
lpfann/fri | fri/model/lupi_ordinal_regression.py | 1 | 12804 | from itertools import product
import cvxpy as cvx
import numpy as np
from sklearn.metrics import make_scorer
from sklearn.utils import check_X_y
from fri.model.base_lupi import (
LUPI_Relevance_CVXProblem,
split_dataset,
is_lupi_feature,
)
from fri.model.ordinal_regression import (
OrdinalRegression_Relevance_Bound,
ordinal_scores,
)
from .base_initmodel import LUPI_InitModel
from .base_type import ProblemType
class LUPI_OrdinalRegression(ProblemType):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._lupi_features = None
@property
def lupi_features(self):
return self._lupi_features
@classmethod
def parameters(cls):
return ["C", "scaling_lupi_w"]
@property
def get_initmodel_template(cls):
return LUPI_OrdinalRegression_SVM
@property
def get_cvxproblem_template(cls):
return LUPI_OrdinalRegression_Relevance_Bound
def relax_factors(cls):
return ["loss_slack", "w_l1_slack"]
def preprocessing(self, data, lupi_features=None):
X, y = data
d = X.shape[1]
if lupi_features is None:
raise ValueError("Argument 'lupi_features' missing in fit() call.")
if not isinstance(lupi_features, int):
raise ValueError("Argument 'lupi_features' is not type int.")
if not 0 < lupi_features < d:
raise ValueError(
"Argument 'lupi_features' looks wrong. We need at least 1 priviliged feature (>0) or at least one normal feature."
)
self._lupi_features = lupi_features
# Check that X and y have correct shape
X, y = check_X_y(X, y)
if np.min(y) > 0:
print("First ordinal class has index > 0. Shifting index...")
y = y - np.min(y)
return X, y
class LUPI_OrdinalRegression_SVM(LUPI_InitModel):
HYPERPARAMETER = ["C", "scaling_lupi_w"]
def __init__(self, C=1, scaling_lupi_w=1, lupi_features=None):
super().__init__()
self.scaling_lupi_w = scaling_lupi_w
self.C = C
self.lupi_features = lupi_features
def fit(self, X_combined, y, lupi_features=None):
"""
Parameters
----------
lupi_features : int
Number of features in dataset which are considered privileged information (PI).
PI features are expected to be the last features in the dataset.
"""
if lupi_features is None:
try:
lupi_features = self.lupi_features
self.lupi_features = lupi_features
except:
raise ValueError("No amount of lupi features given.")
X, X_priv = split_dataset(X_combined, self.lupi_features)
(n, d) = X.shape
self.classes_ = np.unique(y)
# Get parameters from CV model without any feature contstraints
C = self.get_params()["C"]
scaling_lupi_w = self.get_params()["scaling_lupi_w"]
get_original_bin_name, n_bins = get_bin_mapping(y)
n_boundaries = n_bins - 1
# Initalize Variables in cvxpy
w = cvx.Variable(shape=(d), name="w")
b_s = cvx.Variable(shape=(n_boundaries), name="bias")
w_priv = cvx.Variable(shape=(self.lupi_features, 2), name="w_priv")
d_priv = cvx.Variable(shape=(2), name="bias_priv")
def priv_function(bin, sign):
indices = np.where(y == get_original_bin_name[bin])
return X_priv[indices] @ w_priv[:, sign] + d_priv[sign]
# L1 norm regularization of both functions with 1 scaling constant
priv_l1_1 = cvx.norm(w_priv[:, 0], 1)
priv_l1_2 = cvx.norm(w_priv[:, 1], 1)
w_priv_l1 = priv_l1_1 + priv_l1_2
w_l1 = cvx.norm(w, 1)
weight_regularization = 0.5 * (w_l1 + scaling_lupi_w * w_priv_l1)
constraints = []
loss = 0
for left_bin in range(0, n_bins - 1):
indices = np.where(y == get_original_bin_name[left_bin])
constraints.append(
X[indices] @ w - b_s[left_bin] <= -1 + priv_function(left_bin, 0)
)
constraints.append(priv_function(left_bin, 0) >= 0)
loss += cvx.sum(priv_function(left_bin, 0))
# Add constraints for slack into right neighboring bins
for right_bin in range(1, n_bins):
indices = np.where(y == get_original_bin_name[right_bin])
constraints.append(
X[indices] @ w - b_s[right_bin - 1] >= +1 - priv_function(right_bin, 1)
)
constraints.append(priv_function(right_bin, 1) >= 0)
loss += cvx.sum(priv_function(right_bin, 1))
for i_boundary in range(0, n_boundaries - 1):
constraints.append(b_s[i_boundary] <= b_s[i_boundary + 1])
objective = cvx.Minimize(C * loss + weight_regularization)
# Solve problem.
problem = cvx.Problem(objective, constraints)
problem.solve(**self.SOLVER_PARAMS)
w = w.value
b_s = b_s.value
self.model_state = {
"w": w,
"b_s": b_s,
"w_priv": w_priv.value,
"d_priv": d_priv.value,
"lupi_features": lupi_features, # Number of lupi features in the dataset TODO: Move this somewhere else
"bin_boundaries": n_boundaries,
}
self.constraints = {
"loss": loss.value,
"w_l1": w_l1.value,
"w_priv_l1": w_priv_l1.value,
}
return self
def predict(self, X):
X, X_priv = split_dataset(X, self.lupi_features)
w = self.model_state["w"]
b_s = self.model_state["b_s"]
scores = np.dot(X, w.T)[np.newaxis]
bin_thresholds = np.append(b_s, np.inf)
# If thresholds are smaller than score the value belongs to the bigger bin
# after subtracting we check for positive elements
indices = np.sum(scores.T - bin_thresholds >= 0, -1)
return self.classes_[indices]
def score(self, X, y, error_type="mmae", return_error=False, **kwargs):
X, y = check_X_y(X, y)
prediction = self.predict(X)
score = ordinal_scores(y, prediction, error_type, return_error=return_error)
return score
def make_scorer(self):
# Use multiple scores for ordinal regression
mze = make_scorer(ordinal_scores, error_type="mze")
mae = make_scorer(ordinal_scores, error_type="mae")
mmae = make_scorer(ordinal_scores, error_type="mmae")
scorer = {"mze": mze, "mae": mae, "mmae": mmae}
return scorer, "mmae"
def get_bin_mapping(y):
"""
Get ordered unique classes and corresponding mapping from old names
Parameters
----------
y: array of discrete values (int, str)
Returns
-------
"""
classes_ = np.unique(y)
original_bins = sorted(classes_)
n_bins = len(original_bins)
bins = np.arange(n_bins)
get_old_bin = dict(zip(bins, original_bins))
return get_old_bin, n_bins
class LUPI_OrdinalRegression_Relevance_Bound(
LUPI_Relevance_CVXProblem, OrdinalRegression_Relevance_Bound
):
@classmethod
def generate_lower_bound_problem(
cls,
best_hyperparameters,
init_constraints,
best_model_state,
data,
di,
preset_model,
probeID=-1,
):
is_priv = is_lupi_feature(
di, data, best_model_state
) # Is it a lupi feature where we need additional candidate problems?
if not is_priv:
yield from super().generate_lower_bound_problem(
best_hyperparameters,
init_constraints,
best_model_state,
data,
di,
preset_model,
probeID=probeID,
)
else:
for sign in [1, -1]:
problem = cls(
di,
data,
best_hyperparameters,
init_constraints,
preset_model=preset_model,
best_model_state=best_model_state,
probeID=probeID,
)
problem.init_objective_LB(sign=sign)
problem.isLowerBound = True
yield problem
@classmethod
def generate_upper_bound_problem(
cls,
best_hyperparameters,
init_constraints,
best_model_state,
data,
di,
preset_model,
probeID=-1,
):
is_priv = is_lupi_feature(
di, data, best_model_state
) # Is it a lupi feature where we need additional candidate problems?
if not is_priv:
yield from super().generate_upper_bound_problem(
best_hyperparameters,
init_constraints,
best_model_state,
data,
di,
preset_model,
probeID=probeID,
)
else:
for sign, pos in product([1, -1], [0, 1]):
problem = cls(
di,
data,
best_hyperparameters,
init_constraints,
preset_model=preset_model,
best_model_state=best_model_state,
probeID=probeID,
)
problem.init_objective_UB(sign=sign, pos=pos)
yield problem
@classmethod
def aggregate_min_candidates(cls, min_problems_candidates):
vals = [candidate.solved_relevance for candidate in min_problems_candidates]
# We take the max of mins because we need the necessary contribution over all functions
min_value = max(vals)
return min_value
def _init_objective_LB_LUPI(self, sign=None, bin_index=None, **kwargs):
self.add_constraint(
sign * self.w_priv[self.lupi_index, :] <= self.feature_relevance
)
self._objective = cvx.Minimize(self.feature_relevance)
def _init_objective_UB_LUPI(self, sign=None, pos=None, **kwargs):
self.add_constraint(
self.feature_relevance <= sign * self.w_priv[self.lupi_index, pos]
)
self._objective = cvx.Maximize(self.feature_relevance)
def _init_constraints(self, parameters, init_model_constraints):
# Upper constraints from initial model
init_w_l1 = init_model_constraints["w_l1"]
init_w_priv_l1 = init_model_constraints["w_priv_l1"]
init_loss = init_model_constraints["loss"]
scaling_lupi_w = parameters["scaling_lupi_w"]
get_original_bin_name, n_bins = get_bin_mapping(self.y)
n_boundaries = n_bins - 1
# Initalize Variables in cvxpy
w = cvx.Variable(shape=(self.d), name="w")
b_s = cvx.Variable(shape=(n_boundaries), name="bias")
w_priv = cvx.Variable(shape=(self.d_priv, 2), name="w_priv")
d_priv = cvx.Variable(shape=(2), name="bias_priv")
def priv_function(bin, sign):
indices = np.where(self.y == get_original_bin_name[bin])
return self.X_priv[indices] @ w_priv[:, sign] + d_priv[sign]
# L1 norm regularization of both functions with 1 scaling constant
priv_l1_1 = cvx.norm(w_priv[:, 0], 1)
priv_l1_2 = cvx.norm(w_priv[:, 1], 1)
w_priv_l1 = priv_l1_1 + priv_l1_2
w_l1 = cvx.norm(w, 1)
loss = 0
for left_bin in range(0, n_bins - 1):
indices = np.where(self.y == get_original_bin_name[left_bin])
self.add_constraint(
self.X[indices] @ w - b_s[left_bin] <= -1 + priv_function(left_bin, 0)
)
self.add_constraint(priv_function(left_bin, 0) >= 0)
loss += cvx.sum(priv_function(left_bin, 0))
# Add constraints for slack into right neighboring bins
for right_bin in range(1, n_bins):
indices = np.where(self.y == get_original_bin_name[right_bin])
self.add_constraint(
self.X[indices] @ w - b_s[right_bin - 1]
>= +1 - priv_function(right_bin, 1)
)
self.add_constraint(priv_function(right_bin, 1) >= 0)
loss += cvx.sum(priv_function(right_bin, 1))
for i_boundary in range(0, n_boundaries - 1):
self.add_constraint(b_s[i_boundary] <= b_s[i_boundary + 1])
self.add_constraint(
w_l1 + scaling_lupi_w * w_priv_l1
<= init_w_l1 + scaling_lupi_w * init_w_priv_l1
)
self.add_constraint(loss <= init_loss)
self.w = w
self.w_priv = w_priv
self.feature_relevance = cvx.Variable(nonneg=True, name="Feature Relevance")
| mit | 1,933,810,736,430,988,500 | 32.257143 | 130 | 0.559513 | false | 3.538972 | false | false | false |
bapakode/OmMongo | examples/advanced_modeling.py | 1 | 1048 | '''
This page is going to go through some more advanced modeling techniques
using forward and self-references
'''
from ommongo.document import Document
from ommongo.fields import *
from datetime import datetime
from pprint import pprint
class Event(Document):
name = StringField()
children = ListField(DocumentField('Event'))
begin = DateTimeField()
end = DateTimeField()
def __init__(self, name, parent=None):
Document.__init__(self, name=name)
self.children = []
if parent is not None:
parent.children.append(self)
def __enter__(self):
self.begin = datetime.utcnow()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.end = datetime.utcnow()
with Event('request') as root:
with Event('main_func', root) as br:
with Event('setup', br):
pass
with Event('handle', br):
pass
with Event('teardown', br):
pass
with Event('cleanup', root):
pass
pprint(root.wrap())
| mit | 491,704,710,223,334,700 | 25.2 | 72 | 0.611641 | false | 4.142292 | false | false | false |
mrachinskiy/jewelcraft | ops_gem/gem_select_ops.py | 1 | 6067 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# JewelCraft jewelry design toolkit for Blender.
# Copyright (C) 2015-2021 Mikhail Rachinskiy
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
from bpy.props import EnumProperty, FloatProperty, BoolProperty
from bpy.types import Operator
from bpy.app.translations import pgettext_tip as _
from mathutils import Matrix
from ..lib import dynamic_list
class OBJECT_OT_gem_select_by_trait(Operator):
bl_label = "Select Gems by Trait"
bl_description = "Select gems by trait"
bl_idname = "object.jewelcraft_gem_select_by_trait"
bl_options = {"REGISTER", "UNDO"}
filter_size: BoolProperty(name="Size", options={"SKIP_SAVE"})
filter_stone: BoolProperty(name="Stone", options={"SKIP_SAVE"})
filter_cut: BoolProperty(name="Cut", options={"SKIP_SAVE"})
filter_similar: BoolProperty(options={"SKIP_SAVE", "HIDDEN"})
size: FloatProperty(
name="Size",
default=1.0,
min=0.0,
step=10,
precision=2,
unit="LENGTH",
)
stone: EnumProperty(name="Stone", items=dynamic_list.stones)
cut: EnumProperty(name="Cut", items=dynamic_list.cuts)
use_extend: BoolProperty(name="Extend", description="Extend selection")
use_select_children: BoolProperty(name="Select Children")
def draw(self, context):
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False
row = layout.row(heading="Size")
row.prop(self, "filter_size", text="")
row.prop(self, "size", text="")
row = layout.row(heading="Stone")
row.prop(self, "filter_stone", text="")
row.prop(self, "stone", text="")
row = layout.row(heading="Cut", heading_ctxt="Jewelry")
row.prop(self, "filter_cut", text="")
row.template_icon_view(self, "cut", show_labels=True)
layout.separator()
layout.prop(self, "use_extend")
layout.prop(self, "use_select_children")
def execute(self, context):
size = round(self.size, 2)
check_size = check_stone = check_cut = lambda x: True
if self.filter_size:
check_size = lambda ob: round(ob.dimensions.y, 2) == size
if self.filter_stone:
check_stone = lambda ob: ob["gem"]["stone"] == self.stone
if self.filter_cut:
check_cut = lambda ob: ob["gem"]["cut"] == self.cut
selected = None
for ob in context.visible_objects:
if "gem" in ob and check_size(ob) and check_stone(ob) and check_cut(ob):
selected = ob
ob.select_set(True)
if self.use_select_children and ob.children:
for child in ob.children:
child.select_set(True)
elif not self.use_extend:
ob.select_set(False)
if context.object is None or not context.object.select_get():
context.view_layer.objects.active = selected
return {"FINISHED"}
def invoke(self, context, event):
ob = context.object
if ob and "gem" in ob:
self.size = ob.dimensions.y
self.stone = ob["gem"]["stone"]
self.cut = ob["gem"]["cut"]
if self.filter_similar:
self.filter_size = True
self.filter_stone = True
self.filter_cut = True
return self.execute(context)
class OBJECT_OT_gem_select_overlapping(Operator):
bl_label = "Select Overlapping"
bl_description = "Select gems that are less than 0.1 mm distance from each other or overlapping"
bl_idname = "object.jewelcraft_gem_select_overlapping"
bl_options = {"REGISTER", "UNDO"}
threshold: FloatProperty(
name="Threshold",
default=0.1,
soft_min=0.0,
step=1,
precision=2,
unit="LENGTH",
)
def execute(self, context):
from ..lib import asset
obs = []
ob_data = []
depsgraph = context.evaluated_depsgraph_get()
for dup in depsgraph.object_instances:
if dup.is_instance:
ob = dup.instance_object.original
else:
ob = dup.object.original
ob.select_set(False)
if "gem" in ob:
loc = dup.matrix_world.to_translation()
rad = max(ob.dimensions[:2]) / 2
if dup.is_instance:
mat = dup.matrix_world.copy()
if ob.parent and ob.parent.is_instancer:
sel = ob.parent
else:
sel = None
else:
mat_loc = Matrix.Translation(loc)
mat_rot = dup.matrix_world.to_quaternion().to_matrix().to_4x4()
mat = mat_loc @ mat_rot
sel = ob
loc.freeze()
mat.freeze()
obs.append(sel)
ob_data.append((loc, rad, mat))
overlaps = asset.gem_overlap(context, ob_data, self.threshold)
if overlaps:
for i in overlaps:
ob = obs[i]
if ob:
ob.select_set(True)
self.report({"WARNING"}, _("{} overlaps found").format(len(overlaps)))
else:
self.report({"INFO"}, _("{} overlaps found").format(0))
return {"FINISHED"}
| gpl-3.0 | 5,417,416,596,498,840,000 | 31.100529 | 100 | 0.577056 | false | 3.881638 | false | false | false |
vprime/puuuu | env/lib/python2.7/site-packages/paramiko/client.py | 1 | 21100 | # Copyright (C) 2006-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
SSH client & key policies
"""
from binascii import hexlify
import getpass
import os
import socket
import warnings
from paramiko.agent import Agent
from paramiko.common import *
from paramiko.config import SSH_PORT
from paramiko.dsskey import DSSKey
from paramiko.hostkeys import HostKeys
from paramiko.resource import ResourceManager
from paramiko.rsakey import RSAKey
from paramiko.ssh_exception import SSHException, BadHostKeyException
from paramiko.transport import Transport
from paramiko.util import retry_on_signal
class SSHClient (object):
"""
A high-level representation of a session with an SSH server. This class
wraps `.Transport`, `.Channel`, and `.SFTPClient` to take care of most
aspects of authenticating and opening channels. A typical use case is::
client = SSHClient()
client.load_system_host_keys()
client.connect('ssh.example.com')
stdin, stdout, stderr = client.exec_command('ls -l')
You may pass in explicit overrides for authentication and server host key
checking. The default mechanism is to try to use local key files or an
SSH agent (if one is running).
.. versionadded:: 1.6
"""
def __init__(self):
"""
Create a new SSHClient.
"""
self._system_host_keys = HostKeys()
self._host_keys = HostKeys()
self._host_keys_filename = None
self._log_channel = None
self._policy = RejectPolicy()
self._transport = None
self._agent = None
def load_system_host_keys(self, filename=None):
"""
Load host keys from a system (read-only) file. Host keys read with
this method will not be saved back by `save_host_keys`.
This method can be called multiple times. Each new set of host keys
will be merged with the existing set (new replacing old if there are
conflicts).
If ``filename`` is left as ``None``, an attempt will be made to read
keys from the user's local "known hosts" file, as used by OpenSSH,
and no exception will be raised if the file can't be read. This is
probably only useful on posix.
:param str filename: the filename to read, or ``None``
:raises IOError:
if a filename was provided and the file could not be read
"""
if filename is None:
# try the user's .ssh key file, and mask exceptions
filename = os.path.expanduser('~/.ssh/known_hosts')
try:
self._system_host_keys.load(filename)
except IOError:
pass
return
self._system_host_keys.load(filename)
def load_host_keys(self, filename):
"""
Load host keys from a local host-key file. Host keys read with this
method will be checked after keys loaded via `load_system_host_keys`,
but will be saved back by `save_host_keys` (so they can be modified).
The missing host key policy `.AutoAddPolicy` adds keys to this set and
saves them, when connecting to a previously-unknown server.
This method can be called multiple times. Each new set of host keys
will be merged with the existing set (new replacing old if there are
conflicts). When automatically saving, the last hostname is used.
:param str filename: the filename to read
:raises IOError: if the filename could not be read
"""
self._host_keys_filename = filename
self._host_keys.load(filename)
def save_host_keys(self, filename):
"""
Save the host keys back to a file. Only the host keys loaded with
`load_host_keys` (plus any added directly) will be saved -- not any
host keys loaded with `load_system_host_keys`.
:param str filename: the filename to save to
:raises IOError: if the file could not be written
"""
# update local host keys from file (in case other SSH clients
# have written to the known_hosts file meanwhile.
if self._host_keys_filename is not None:
self.load_host_keys(self._host_keys_filename)
f = open(filename, 'w')
for hostname, keys in self._host_keys.iteritems():
for keytype, key in keys.iteritems():
f.write('%s %s %s\n' % (hostname, keytype, key.get_base64()))
f.close()
def get_host_keys(self):
"""
Get the local `.HostKeys` object. This can be used to examine the
local host keys or change them.
:return: the local host keys as a `.HostKeys` object.
"""
return self._host_keys
def set_log_channel(self, name):
"""
Set the channel for logging. The default is ``"paramiko.transport"``
but it can be set to anything you want.
:param str name: new channel name for logging
"""
self._log_channel = name
def set_missing_host_key_policy(self, policy):
"""
Set the policy to use when connecting to a server that doesn't have a
host key in either the system or local `.HostKeys` objects. The
default policy is to reject all unknown servers (using `.RejectPolicy`).
You may substitute `.AutoAddPolicy` or write your own policy class.
:param .MissingHostKeyPolicy policy:
the policy to use when receiving a host key from a
previously-unknown server
"""
self._policy = policy
def connect(self, hostname, port=SSH_PORT, username=None, password=None, pkey=None,
key_filename=None, timeout=None, allow_agent=True, look_for_keys=True,
compress=False, sock=None):
"""
Connect to an SSH server and authenticate to it. The server's host key
is checked against the system host keys (see `load_system_host_keys`)
and any local host keys (`load_host_keys`). If the server's hostname
is not found in either set of host keys, the missing host key policy
is used (see `set_missing_host_key_policy`). The default policy is
to reject the key and raise an `.SSHException`.
Authentication is attempted in the following order of priority:
- The ``pkey`` or ``key_filename`` passed in (if any)
- Any key we can find through an SSH agent
- Any "id_rsa" or "id_dsa" key discoverable in ``~/.ssh/``
- Plain username/password auth, if a password was given
If a private key requires a password to unlock it, and a password is
passed in, that password will be used to attempt to unlock the key.
:param str hostname: the server to connect to
:param int port: the server port to connect to
:param str username:
the username to authenticate as (defaults to the current local
username)
:param str password:
a password to use for authentication or for unlocking a private key
:param .PKey pkey: an optional private key to use for authentication
:param str key_filename:
the filename, or list of filenames, of optional private key(s) to
try for authentication
:param float timeout: an optional timeout (in seconds) for the TCP connect
:param bool allow_agent: set to False to disable connecting to the SSH agent
:param bool look_for_keys:
set to False to disable searching for discoverable private key
files in ``~/.ssh/``
:param bool compress: set to True to turn on compression
:param socket sock:
an open socket or socket-like object (such as a `.Channel`) to use
for communication to the target host
:raises BadHostKeyException: if the server's host key could not be
verified
:raises AuthenticationException: if authentication failed
:raises SSHException: if there was any other error connecting or
establishing an SSH session
:raises socket.error: if a socket error occurred while connecting
"""
if not sock:
for (family, socktype, proto, canonname, sockaddr) in socket.getaddrinfo(hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM):
if socktype == socket.SOCK_STREAM:
af = family
addr = sockaddr
break
else:
# some OS like AIX don't indicate SOCK_STREAM support, so just guess. :(
af, _, _, _, addr = socket.getaddrinfo(hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
sock = socket.socket(af, socket.SOCK_STREAM)
if timeout is not None:
try:
sock.settimeout(timeout)
except:
pass
retry_on_signal(lambda: sock.connect(addr))
t = self._transport = Transport(sock)
t.use_compression(compress=compress)
if self._log_channel is not None:
t.set_log_channel(self._log_channel)
t.start_client()
ResourceManager.register(self, t)
server_key = t.get_remote_server_key()
keytype = server_key.get_name()
if port == SSH_PORT:
server_hostkey_name = hostname
else:
server_hostkey_name = "[%s]:%d" % (hostname, port)
our_server_key = self._system_host_keys.get(server_hostkey_name, {}).get(keytype, None)
if our_server_key is None:
our_server_key = self._host_keys.get(server_hostkey_name, {}).get(keytype, None)
if our_server_key is None:
# will raise exception if the key is rejected; let that fall out
self._policy.missing_host_key(self, server_hostkey_name, server_key)
# if the callback returns, assume the key is ok
our_server_key = server_key
if server_key != our_server_key:
raise BadHostKeyException(hostname, server_key, our_server_key)
if username is None:
username = getpass.getuser()
if key_filename is None:
key_filenames = []
elif isinstance(key_filename, (str, unicode)):
key_filenames = [ key_filename ]
else:
key_filenames = key_filename
self._auth(username, password, pkey, key_filenames, allow_agent, look_for_keys)
def close(self):
"""
Close this SSHClient and its underlying `.Transport`.
"""
if self._transport is None:
return
self._transport.close()
self._transport = None
if self._agent != None:
self._agent.close()
self._agent = None
def exec_command(self, command, bufsize=-1, timeout=None, get_pty=False):
"""
Execute a command on the SSH server. A new `.Channel` is opened and
the requested command is executed. The command's input and output
streams are returned as Python ``file``-like objects representing
stdin, stdout, and stderr.
:param str command: the command to execute
:param int bufsize:
interpreted the same way as by the built-in ``file()`` function in
Python
:param int timeout:
set command's channel timeout. See `Channel.settimeout`.settimeout
:return:
the stdin, stdout, and stderr of the executing command, as a
3-tuple
:raises SSHException: if the server fails to execute the command
"""
chan = self._transport.open_session()
if(get_pty):
chan.get_pty()
chan.settimeout(timeout)
chan.exec_command(command)
stdin = chan.makefile('wb', bufsize)
stdout = chan.makefile('rb', bufsize)
stderr = chan.makefile_stderr('rb', bufsize)
return stdin, stdout, stderr
def invoke_shell(self, term='vt100', width=80, height=24, width_pixels=0,
height_pixels=0):
"""
Start an interactive shell session on the SSH server. A new `.Channel`
is opened and connected to a pseudo-terminal using the requested
terminal type and size.
:param str term:
the terminal type to emulate (for example, ``"vt100"``)
:param int width: the width (in characters) of the terminal window
:param int height: the height (in characters) of the terminal window
:param int width_pixels: the width (in pixels) of the terminal window
:param int height_pixels: the height (in pixels) of the terminal window
:return: a new `.Channel` connected to the remote shell
:raises SSHException: if the server fails to invoke a shell
"""
chan = self._transport.open_session()
chan.get_pty(term, width, height, width_pixels, height_pixels)
chan.invoke_shell()
return chan
def open_sftp(self):
"""
Open an SFTP session on the SSH server.
:return: a new `.SFTPClient` session object
"""
return self._transport.open_sftp_client()
def get_transport(self):
"""
Return the underlying `.Transport` object for this SSH connection.
This can be used to perform lower-level tasks, like opening specific
kinds of channels.
:return: the `.Transport` for this connection
"""
return self._transport
def _auth(self, username, password, pkey, key_filenames, allow_agent, look_for_keys):
"""
Try, in order:
- The key passed in, if one was passed in.
- Any key we can find through an SSH agent (if allowed).
- Any "id_rsa" or "id_dsa" key discoverable in ~/.ssh/ (if allowed).
- Plain username/password auth, if a password was given.
(The password might be needed to unlock a private key, or for
two-factor authentication [for which it is required].)
"""
saved_exception = None
two_factor = False
allowed_types = []
if pkey is not None:
try:
self._log(DEBUG, 'Trying SSH key %s' % hexlify(pkey.get_fingerprint()))
allowed_types = self._transport.auth_publickey(username, pkey)
two_factor = (allowed_types == ['password'])
if not two_factor:
return
except SSHException, e:
saved_exception = e
if not two_factor:
for key_filename in key_filenames:
for pkey_class in (RSAKey, DSSKey):
try:
key = pkey_class.from_private_key_file(key_filename, password)
self._log(DEBUG, 'Trying key %s from %s' % (hexlify(key.get_fingerprint()), key_filename))
self._transport.auth_publickey(username, key)
two_factor = (allowed_types == ['password'])
if not two_factor:
return
break
except SSHException, e:
saved_exception = e
if not two_factor and allow_agent:
if self._agent == None:
self._agent = Agent()
for key in self._agent.get_keys():
try:
self._log(DEBUG, 'Trying SSH agent key %s' % hexlify(key.get_fingerprint()))
# for 2-factor auth a successfully auth'd key will result in ['password']
allowed_types = self._transport.auth_publickey(username, key)
two_factor = (allowed_types == ['password'])
if not two_factor:
return
break
except SSHException, e:
saved_exception = e
if not two_factor:
keyfiles = []
rsa_key = os.path.expanduser('~/.ssh/id_rsa')
dsa_key = os.path.expanduser('~/.ssh/id_dsa')
if os.path.isfile(rsa_key):
keyfiles.append((RSAKey, rsa_key))
if os.path.isfile(dsa_key):
keyfiles.append((DSSKey, dsa_key))
# look in ~/ssh/ for windows users:
rsa_key = os.path.expanduser('~/ssh/id_rsa')
dsa_key = os.path.expanduser('~/ssh/id_dsa')
if os.path.isfile(rsa_key):
keyfiles.append((RSAKey, rsa_key))
if os.path.isfile(dsa_key):
keyfiles.append((DSSKey, dsa_key))
if not look_for_keys:
keyfiles = []
for pkey_class, filename in keyfiles:
try:
key = pkey_class.from_private_key_file(filename, password)
self._log(DEBUG, 'Trying discovered key %s in %s' % (hexlify(key.get_fingerprint()), filename))
# for 2-factor auth a successfully auth'd key will result in ['password']
allowed_types = self._transport.auth_publickey(username, key)
two_factor = (allowed_types == ['password'])
if not two_factor:
return
break
except SSHException, e:
saved_exception = e
except IOError, e:
saved_exception = e
if password is not None:
try:
self._transport.auth_password(username, password)
return
except SSHException, e:
saved_exception = e
elif two_factor:
raise SSHException('Two-factor authentication requires a password')
# if we got an auth-failed exception earlier, re-raise it
if saved_exception is not None:
raise saved_exception
raise SSHException('No authentication methods available')
def _log(self, level, msg):
self._transport._log(level, msg)
class MissingHostKeyPolicy (object):
"""
Interface for defining the policy that `.SSHClient` should use when the
SSH server's hostname is not in either the system host keys or the
application's keys. Pre-made classes implement policies for automatically
adding the key to the application's `.HostKeys` object (`.AutoAddPolicy`),
and for automatically rejecting the key (`.RejectPolicy`).
This function may be used to ask the user to verify the key, for example.
"""
def missing_host_key(self, client, hostname, key):
"""
Called when an `.SSHClient` receives a server key for a server that
isn't in either the system or local `.HostKeys` object. To accept
the key, simply return. To reject, raised an exception (which will
be passed to the calling application).
"""
pass
class AutoAddPolicy (MissingHostKeyPolicy):
"""
Policy for automatically adding the hostname and new host key to the
local `.HostKeys` object, and saving it. This is used by `.SSHClient`.
"""
def missing_host_key(self, client, hostname, key):
client._host_keys.add(hostname, key.get_name(), key)
if client._host_keys_filename is not None:
client.save_host_keys(client._host_keys_filename)
client._log(DEBUG, 'Adding %s host key for %s: %s' %
(key.get_name(), hostname, hexlify(key.get_fingerprint())))
class RejectPolicy (MissingHostKeyPolicy):
"""
Policy for automatically rejecting the unknown hostname & key. This is
used by `.SSHClient`.
"""
def missing_host_key(self, client, hostname, key):
client._log(DEBUG, 'Rejecting %s host key for %s: %s' %
(key.get_name(), hostname, hexlify(key.get_fingerprint())))
raise SSHException('Server %r not found in known_hosts' % hostname)
class WarningPolicy (MissingHostKeyPolicy):
"""
Policy for logging a Python-style warning for an unknown host key, but
accepting it. This is used by `.SSHClient`.
"""
def missing_host_key(self, client, hostname, key):
warnings.warn('Unknown %s host key for %s: %s' %
(key.get_name(), hostname, hexlify(key.get_fingerprint())))
| mit | -2,178,805,618,934,213,600 | 39.655106 | 139 | 0.603839 | false | 4.386694 | false | false | false |
BlackHole/enigma2-obh10 | lib/python/Screens/EpgSelection.py | 2 | 4137 | from __future__ import print_function
from Screens.InfoBar import InfoBar
from enigma import eServiceReference
from Components.ActionMap import HelpableActionMap
from Screens.EpgSelectionChannel import EPGSelectionChannel
from Screens.EpgSelectionBase import EPGServiceZap
from Screens.TimerEntry import addTimerFromEventSilent
# Keep for backwards compatibility with plugins, including the parameter naming.
# This class assumes that EPGSelection is only used in the SingleEPG sense.
class EPGSelection(EPGSelectionChannel, EPGServiceZap):
def __init__(self, session, service=None, zapFunc=None, eventid=None, bouquetChangeCB=None, serviceChangeCB=None, EPGtype="similar", StartBouquet=None, StartRef=None, bouquets=None):
if EPGtype not in ("similar", "single"):
print("[EPGSelection] Warning: EPGSelection does not support type '%s'" % EPGtype)
print(" Attempting to continue in single EPG mode")
EPGSelectionChannel.__init__(self, session, eServiceReference(service))
EPGServiceZap.__init__(self, zapFunc or InfoBar.instance.zapToService)
# Rewrite the EPG actions to invoke the compatibility functions.
helpDescription = _("EPG Commands")
self["epgactions"] = HelpableActionMap(self, "EPGSelectActions", {
"info": (self.Info, _("Show detailed event info")),
"epg": (self.epgButtonPressed, _("Show detailed event info")),
"menu": (self.createSetup, _("Setup menu"))
}, prio=-1, description=helpDescription)
self["colouractions"] = HelpableActionMap(self, "ColorActions", {
"red": (self.redButtonPressed, _("IMDB search for current event")),
"redlong": (self.redButtonPressedLong, _("Sort EPG list")),
"green": (self.greenButtonPressed, _("Add/Remove timer for current event")),
"greenlong": (self.greenButtonPressedLong, _("Show timer list")),
"yellow": (self.yellowButtonPressed, _("Search for similar events")),
"blue": (self.blueButtonPressed, _("Add an autotimer for current event")),
"bluelong": (self.blueButtonPressedLong, _("Show autotimer list"))
}, prio=-1, description=helpDescription)
# EPGSearch bypasses base class initialisation
# try to limit the scope of its quirkyness by providing a limited
# initialisation path
def EPGSearch_init(self, session):
EPGServiceZap.__init__(self, InfoBar.instance.zapToService)
# Backwards compatibility properties for plugins.
@property
def ChoiceBoxDialog(self):
return self.choiceBoxDialog
@ChoiceBoxDialog.setter
def ChoiceBoxDialog(self, value):
self.choiceBoxDialog = value
# Backwards compatibility functions for plugins.
# Button names.
def redButtonPressed(self):
self.openIMDb()
def redButtonPressedLong(self):
self.sortEpg()
def greenButtonPressed(self):
self.addEditTimer()
def greenButtonPressedLong(self):
self.showTimerList()
def yellowButtonPressed(self):
self.openEPGSearch()
def blueButtonPressed(self):
self.addAutoTimer()
def blueButtonPressedLong(self):
self.showAutoTimerList()
def Info(self):
self.infoKeyPressed()
def InfoLong(self):
self.OpenSingleEPG()
def infoKeyPressed(self):
self.openEventView()
def eventSelected(self): # used by EPG Search plugin
self.openEventView()
def epgButtonPressed(self):
self.openEventView()
# Actions
def showTimerList(self):
self.openTimerList()
def showAutoTimerList(self):
self.openAutoTimerList()
def OpenSingleEPG(self):
self.openSingleEPG()
def sortEpg(self):
self.sortEPG(self)
def timerAdd(self):
self.addEditTimerMenu()
def doRecordTimer(self):
self.doInstantTimer(0)
def doZapTimer(self):
self.doInstantTimer(1)
def RecordTimerQuestion(self, manual=False):
if manual:
self.addEditTimer()
else:
self.addEditTimerMenu()
def doInstantTimer(self, zap=0):
event, service = self["list"].getCurrent()[:2]
addTimerFromEventSilent(self.session, self.refreshTimerActionButton, event, service, zap)
# Things that need to be able to be overridden.
def refreshList(self):
try:
# Allow plugins to override using the old all lowercase method name.
self.refreshlist()
except AttributeError:
EPGSelectionChannel.refreshList(self)
| gpl-2.0 | -7,913,178,780,223,782,000 | 31.320313 | 183 | 0.751511 | false | 3.379902 | false | false | false |
cpatrickalves/simprev | util/carrega_parametros.py | 1 | 1280 | # -*- coding: utf-8 -*-
"""
@author: Patrick Alves
"""
# Arquivo com os parâmetros de projeção
arquivo_parametros = "parametros.txt"
def obter_parametros():
# Dicionário que armazena os parâmetros
parametros = {}
with open(arquivo_parametros, 'r' ,encoding='utf-8') as arquivo:
for linha in arquivo:
linha = linha.strip()
if not linha: # pula linhas em branco
continue
if linha.startswith("#"): # pula comentários
continue
# Pega a primeira string antes do = e remove os espaços em branco
variavel = linha.split('=')[0].replace(" ", "")
# Pega a segunda string antes do = e remove os espaços em branco
valor = linha.split('=')[1].replace(" ", "")
# Salva variáveis e parâmetros no dicionário
# a variável modelo é a única do tipo string, as demais são int ou float
if variavel == 'modelo':
parametros[variavel] = valor
else:
try:
parametros[variavel] = int(valor)
except:
parametros[variavel] = float(valor)
return parametros | gpl-3.0 | 8,191,328,923,808,139,000 | 34.166667 | 84 | 0.528063 | false | 3.418919 | false | false | false |
viktorTarasov/PyKMIP | kmip/tests/unit/core/messages/contents/test_protocol_version.py | 1 | 9321 | # Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from testtools import TestCase
from kmip.core.messages.contents import ProtocolVersion
from kmip.core.utils import BytearrayStream
class TestProtocolVersion(TestCase):
def setUp(self):
super(TestProtocolVersion, self).setUp()
self.major_default = ProtocolVersion.ProtocolVersionMajor()
self.minor_default = ProtocolVersion.ProtocolVersionMinor()
self.major = ProtocolVersion.ProtocolVersionMajor(1)
self.minor = ProtocolVersion.ProtocolVersionMinor(1)
self.encoding_default = BytearrayStream((
b'\x42\x00\x69\x01\x00\x00\x00\x20\x42\x00\x6A\x02\x00\x00\x00\x04'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x42\x00\x6B\x02\x00\x00\x00\x04'
b'\x00\x00\x00\x00\x00\x00\x00\x00'))
self.encoding = BytearrayStream((
b'\x42\x00\x69\x01\x00\x00\x00\x20\x42\x00\x6A\x02\x00\x00\x00\x04'
b'\x00\x00\x00\x01\x00\x00\x00\x00\x42\x00\x6B\x02\x00\x00\x00\x04'
b'\x00\x00\x00\x01\x00\x00\x00\x00'))
def tearDown(self):
super(TestProtocolVersion, self).tearDown()
def _test_init(self, protocol_version_major, protocol_version_minor):
protocol_version = ProtocolVersion(
protocol_version_major, protocol_version_minor)
if protocol_version_major is None:
self.assertEqual(ProtocolVersion.ProtocolVersionMajor(),
protocol_version.protocol_version_major)
else:
self.assertEqual(protocol_version_major,
protocol_version.protocol_version_major)
if protocol_version_minor is None:
self.assertEqual(ProtocolVersion.ProtocolVersionMinor(),
protocol_version.protocol_version_minor)
else:
self.assertEqual(protocol_version_minor,
protocol_version.protocol_version_minor)
def test_init_with_none(self):
self._test_init(None, None)
def test_init_with_args(self):
major = ProtocolVersion.ProtocolVersionMajor(1)
minor = ProtocolVersion.ProtocolVersionMinor(0)
self._test_init(major, minor)
def test_validate_on_invalid_protocol_version_major(self):
major = "invalid"
minor = ProtocolVersion.ProtocolVersionMinor(0)
args = [major, minor]
self.assertRaisesRegexp(
TypeError, "invalid protocol version major", self._test_init,
*args)
def test_validate_on_invalid_protocol_version_minor(self):
major = ProtocolVersion.ProtocolVersionMajor(1)
minor = "invalid"
args = [major, minor]
self.assertRaisesRegexp(
TypeError, "invalid protocol version minor", self._test_init,
*args)
def _test_read(self, stream, major, minor):
protocol_version = ProtocolVersion()
protocol_version.read(stream)
msg = "protocol version major decoding mismatch"
msg += "; expected {0}, received {1}".format(
major, protocol_version.protocol_version_major)
self.assertEqual(major, protocol_version.protocol_version_major, msg)
msg = "protocol version minor decoding mismatch"
msg += "; expected {0}, received {1}".format(
minor, protocol_version.protocol_version_minor)
self.assertEqual(minor, protocol_version.protocol_version_minor, msg)
def test_read_with_none(self):
self._test_read(self.encoding_default, self.major_default,
self.minor_default)
def test_read_with_args(self):
self._test_read(self.encoding, self.major, self.minor)
def _test_write(self, stream_expected, major, minor):
stream_observed = BytearrayStream()
protocol_version = ProtocolVersion(major, minor)
protocol_version.write(stream_observed)
length_expected = len(stream_expected)
length_observed = len(stream_observed)
msg = "encoding lengths not equal"
msg += "; expected {0}, received {1}".format(
length_expected, length_observed)
self.assertEqual(length_expected, length_observed, msg)
msg = "encoding mismatch"
msg += ";\nexpected:\n{0}\nreceived:\n{1}".format(
stream_expected, stream_observed)
self.assertEqual(stream_expected, stream_observed, msg)
def test_write_with_none(self):
self._test_write(self.encoding_default, self.major_default,
self.minor_default)
def test_write_with_args(self):
self._test_write(self.encoding, self.major, self.minor)
def test_equal_on_equal(self):
a = ProtocolVersion.create(1, 0)
b = ProtocolVersion.create(1, 0)
self.assertTrue(a == b)
def test_equal_on_not_equal(self):
a = ProtocolVersion.create(1, 0)
b = ProtocolVersion.create(0, 1)
self.assertFalse(a == b)
def test_equal_on_type_mismatch(self):
a = ProtocolVersion.create(1, 0)
b = "invalid"
self.assertFalse(a == b)
def test_not_equal_on_equal(self):
a = ProtocolVersion.create(1, 0)
b = ProtocolVersion.create(1, 0)
self.assertFalse(a != b)
def test_not_equal_on_not_equal(self):
a = ProtocolVersion.create(1, 0)
b = ProtocolVersion.create(0, 1)
self.assertTrue(a != b)
def test_not_equal_on_type_mismatch(self):
a = ProtocolVersion.create(1, 0)
b = "invalid"
self.assertTrue(a != b)
def test_less_than(self):
"""
Test that the less than operator returns True/False when comparing
two different ProtocolVersions.
"""
a = ProtocolVersion.create(1, 0)
b = ProtocolVersion.create(1, 1)
c = ProtocolVersion.create(2, 0)
d = ProtocolVersion.create(0, 2)
self.assertTrue(a < b)
self.assertFalse(b < a)
self.assertFalse(a < a)
self.assertTrue(a < c)
self.assertFalse(c < a)
self.assertFalse(c < d)
self.assertTrue(d < c)
def test_greater_than(self):
"""
Test that the greater than operator returns True/False when
comparing two different ProtocolVersions.
"""
a = ProtocolVersion.create(1, 0)
b = ProtocolVersion.create(1, 1)
c = ProtocolVersion.create(2, 0)
d = ProtocolVersion.create(0, 2)
self.assertFalse(a > b)
self.assertTrue(b > a)
self.assertFalse(a > a)
self.assertFalse(a > c)
self.assertTrue(c > a)
self.assertTrue(c > d)
self.assertFalse(d > c)
def test_less_than_or_equal(self):
"""
Test that the less than or equal operator returns True/False when
comparing two different ProtocolVersions.
"""
a = ProtocolVersion.create(1, 0)
b = ProtocolVersion.create(1, 1)
c = ProtocolVersion.create(2, 0)
d = ProtocolVersion.create(0, 2)
self.assertTrue(a <= b)
self.assertFalse(b <= a)
self.assertTrue(a <= a)
self.assertTrue(a <= c)
self.assertFalse(c <= a)
self.assertFalse(c <= d)
self.assertTrue(d <= c)
def test_greater_than_or_equal(self):
"""
Test that the greater than or equal operator returns True/False when
comparing two different ProtocolVersions.
"""
a = ProtocolVersion.create(1, 0)
b = ProtocolVersion.create(1, 1)
c = ProtocolVersion.create(2, 0)
d = ProtocolVersion.create(0, 2)
self.assertFalse(a >= b)
self.assertTrue(b >= a)
self.assertTrue(a >= a)
self.assertFalse(a >= c)
self.assertTrue(c >= a)
self.assertTrue(c >= d)
self.assertFalse(d >= c)
def test_repr(self):
a = ProtocolVersion.create(1, 0)
self.assertEqual("1.0", "{0}".format(a))
def _test_create(self, major, minor):
protocol_version = ProtocolVersion.create(major, minor)
if major is None:
expected = ProtocolVersion.ProtocolVersionMajor()
else:
expected = ProtocolVersion.ProtocolVersionMajor(major)
self.assertEqual(expected, protocol_version.protocol_version_major)
if minor is None:
expected = ProtocolVersion.ProtocolVersionMinor()
else:
expected = ProtocolVersion.ProtocolVersionMinor(minor)
self.assertEqual(expected, protocol_version.protocol_version_minor)
def test_create_with_none(self):
self._test_create(None, None)
def test_create_with_args(self):
self._test_create(1, 0)
| apache-2.0 | -7,470,747,633,934,984,000 | 33.522222 | 79 | 0.62622 | false | 3.801387 | true | false | false |