code
stringlengths 658
1.05M
|
---|
import math
import itertools
from collections import defaultdict
from PyQt4 import QtGui
import numpy
import Orange
from Orange.widgets import widget
from Orange.widgets import gui
from Orange.widgets.utils import itemmodels
from Orange.widgets.utils.sql import check_sql_input
INSTANCEID = "Same source"
INDEX = "Index"
class OWMergeData(widget.OWWidget):
name = "Merge Data"
description = "Merge data sets based on the values of selected data features."
icon = "icons/MergeData.svg"
priority = 1110
inputs = [("Data A", Orange.data.Table, "setDataA", widget.Default),
("Data B", Orange.data.Table, "setDataB")]
outputs = [("Merged Data A+B", Orange.data.Table, ),
("Merged Data B+A", Orange.data.Table, )]
want_main_area = False
def __init__(self):
super().__init__()
# data
self.dataA = None
self.dataB = None
# GUI
w = QtGui.QWidget(self)
self.controlArea.layout().addWidget(w)
grid = QtGui.QGridLayout()
grid.setContentsMargins(0, 0, 0, 0)
w.setLayout(grid)
# attribute A selection
boxAttrA = gui.vBox(self, self.tr("Attribute A"), addToLayout=False)
grid.addWidget(boxAttrA, 0, 0)
self.attrViewA = QtGui.QListView(
selectionMode=QtGui.QListView.SingleSelection
)
self.attrModelA = itemmodels.VariableListModel()
self.attrViewA.setModel(self.attrModelA)
self.attrViewA.selectionModel().selectionChanged.connect(
self._selectedAttrAChanged)
boxAttrA.layout().addWidget(self.attrViewA)
# attribute B selection
boxAttrB = gui.vBox(self, self.tr("Attribute B"), addToLayout=False)
grid.addWidget(boxAttrB, 0, 1)
self.attrViewB = QtGui.QListView(
selectionMode=QtGui.QListView.SingleSelection
)
self.attrModelB = itemmodels.VariableListModel()
self.attrViewB.setModel(self.attrModelB)
self.attrViewB.selectionModel().selectionChanged.connect(
self._selectedAttrBChanged)
boxAttrB.layout().addWidget(self.attrViewB)
# info A
boxDataA = gui.vBox(self, self.tr("Data A Input"), addToLayout=False)
grid.addWidget(boxDataA, 1, 0)
self.infoBoxDataA = gui.widgetLabel(boxDataA, self.dataInfoText(None))
# info B
boxDataB = gui.vBox(self, self.tr("Data B Input"), addToLayout=False)
grid.addWidget(boxDataB, 1, 1)
self.infoBoxDataB = gui.widgetLabel(boxDataB, self.dataInfoText(None))
gui.rubber(self.buttonsArea)
# resize
self.resize(400, 500)
def setAttrs(self):
add = ()
if self.dataA is not None and self.dataB is not None \
and len(numpy.intersect1d(self.dataA.ids, self.dataB.ids)):
add = (INSTANCEID,)
if self.dataA is not None:
self.attrModelA[:] = add + allvars(self.dataA)
else:
self.attrModelA[:] = []
if self.dataB is not None:
self.attrModelB[:] = add + allvars(self.dataB)
else:
self.attrModelB[:] = []
@check_sql_input
def setDataA(self, data):
#self.closeContext()
self.dataA = data
self.setAttrs()
self.infoBoxDataA.setText(self.dataInfoText(data))
@check_sql_input
def setDataB(self, data):
#self.closeContext()
self.dataB = data
self.setAttrs()
self.infoBoxDataB.setText(self.dataInfoText(data))
def handleNewSignals(self):
self._invalidate()
def dataInfoText(self, data):
ninstances = 0
nvariables = 0
if data is not None:
ninstances = len(data)
nvariables = len(data.domain)
instances = self.tr("%n instance(s)", None, ninstances)
attributes = self.tr("%n variable(s)", None, nvariables)
return "\n".join([instances, attributes])
def selectedIndexA(self):
return selected_row(self.attrViewA)
def selectedIndexB(self):
return selected_row(self.attrViewB)
def commit(self):
indexA = self.selectedIndexA()
indexB = self.selectedIndexB()
AB, BA = None, None
if indexA is not None and indexB is not None:
varA = self.attrModelA[indexA]
varB = self.attrModelB[indexB]
AB = merge(self.dataA, varA, self.dataB, varB)
BA = merge(self.dataB, varB, self.dataA, varA)
self.send("Merged Data A+B", AB)
self.send("Merged Data B+A", BA)
def _selectedAttrAChanged(self, *args):
self._invalidate()
def _selectedAttrBChanged(self, *args):
self._invalidate()
def _invalidate(self):
self.commit()
def send_report(self):
attr_a = self.selectedIndexA()
attr_b = self.selectedIndexB()
self.report_items((
("Attribute A", attr_a and self.attrModelA[attr_a]),
("Attribute B", attr_b and self.attrModelB[attr_b])
))
def selected_row(view):
rows = view.selectionModel().selectedRows()
if rows:
return rows[0].row()
else:
return None
def allvars(data):
return (INDEX,) + data.domain.attributes + data.domain.class_vars + data.domain.metas
def merge(A, varA, B, varB):
join_indices = left_join_indices(A, B, (varA,), (varB,))
seen_set = set()
def seen(val):
return val in seen_set or bool(seen_set.add(val))
merge_indices = [(i, j) for i, j in join_indices if not seen(i)]
all_vars_A = set(A.domain.variables + A.domain.metas)
iter_vars_B = itertools.chain(
enumerate(B.domain.variables),
((-i, m) for i, m in enumerate(B.domain.metas, start=1))
)
reduced_indices_B = [i for i, var in iter_vars_B if not var in all_vars_A]
reduced_B = B[:, list(reduced_indices_B)]
return join_table_by_indices(A, reduced_B, merge_indices)
def group_table_indices(table, key_vars, exclude_unknown=False):
"""
Group table indices based on values of selected columns (`key_vars`).
Return a dictionary mapping all unique value combinations (keys)
into a list of indices in the table where they are present.
:param Orange.data.Table table:
:param list-of-Orange.data.FeatureDescriptor] key_vars:
:param bool exclude_unknown:
"""
groups = defaultdict(list)
for i, inst in enumerate(table):
key = [inst.id if a == INSTANCEID else
i if a == INDEX else inst[a]
for a in key_vars]
if exclude_unknown and any(math.isnan(k) for k in key):
continue
key = tuple([str(k) for k in key])
groups[key].append(i)
return groups
def left_join_indices(table1, table2, vars1, vars2):
key_map1 = group_table_indices(table1, vars1)
key_map2 = group_table_indices(table2, vars2)
indices = []
for i, inst in enumerate(table1):
key = tuple([str(inst.id if v == INSTANCEID else
i if v == INDEX else inst[v])
for v in vars1])
if key in key_map1 and key in key_map2:
for j in key_map2[key]:
indices.append((i, j))
else:
indices.append((i, None))
return indices
def right_join_indices(table1, table2, vars1, vars2):
indices = left_join_indices(table2, table1, vars2, vars1)
return [(j, i) for i, j in indices]
def inner_join_indices(table1, table2, vars1, vars2):
indices = left_join_indices(table1, table2, vars1, vars2)
return [(i, j) for i, j in indices if j is not None]
def left_join(left, right, left_vars, right_vars):
"""
Left join `left` and `right` on values of `left/right_vars`.
"""
indices = left_join_indices(left, right, left_vars, right_vars)
return join_table_by_indices(left, right, indices)
def right_join(left, right, left_vars, right_vars):
"""
Right join left and right on attributes attr1 and attr2
"""
indices = right_join_indices(left, right, left_vars, right_vars)
return join_table_by_indices(left, right, indices)
def inner_join(left, right, left_vars, right_vars):
indices = inner_join_indices(left, right, left_vars, right_vars)
return join_table_by_indices(left, right, indices)
def join_table_by_indices(left, right, indices):
domain = Orange.data.Domain(
left.domain.attributes + right.domain.attributes,
left.domain.class_vars + right.domain.class_vars,
left.domain.metas + right.domain.metas
)
X = join_array_by_indices(left.X, right.X, indices)
Y = join_array_by_indices(numpy.c_[left.Y], numpy.c_[right.Y], indices)
metas = join_array_by_indices(left.metas, right.metas, indices)
return Orange.data.Table.from_numpy(domain, X, Y, metas)
def join_array_by_indices(left, right, indices, masked=float("nan")):
left_masked = [masked] * left.shape[1]
right_masked = [masked] * right.shape[1]
leftparts = []
rightparts = []
for i, j in indices:
if i is not None:
leftparts.append(left[i])
else:
leftparts.append(left_masked)
if j is not None:
rightparts.append(right[j])
else:
rightparts.append(right_masked)
def hstack_blocks(blocks):
return numpy.hstack(list(map(numpy.vstack, blocks)))
return hstack_blocks((leftparts, rightparts))
def test():
app = QtGui.QApplication([])
w = OWMergeData()
zoo = Orange.data.Table("zoo")
A = zoo[:, [0, 1, 2, "type", -1]]
B = zoo[:, [3, 4, 5, "type", -1]]
w.setDataA(A)
w.setDataB(B)
w.handleNewSignals()
w.show()
app.exec_()
if __name__ == "__main__":
test()
|
''' projectile_motion.py
projectile motion equations:
height = y(t) = hs + (t * v * sin(a)) - (g * t*t)/2
distance = x(t) = v * cos(a) * t
where:
t is the time in seconds
v is the muzzle velocity of the projectile (meters/second)
a is the firing angle with repsect to ground (radians)
hs is starting height with respect to ground (meters)
g is the gravitational pull (meters/second_square)
tested with Python27/Python33 by vegaseat 20mar2013
'''
import math
import matplotlib.pyplot as plt
def projectile_xy(speed, angle, starting_height=0.0, gravity=9.8):
'''
returns a list of (x, y) projectile motion data points
where:
x axis is distance (or range) in meters
y axis is height in meters
'''
data_xy = []
t = 0.0
while True:
# now calculate the height y
y = starting_height + (t * speed * math.sin(angle)) - (gravity * t * t)/2
# projectile has hit ground level
if y < 0:
break
# calculate the distance x
x = speed * math.cos(angle) * t
# append the (x, y) tuple to the list
data_xy.append((x, y))
# use the time in increments of 0.1 seconds
t += 0.1
return data_xy
# use a firing angle of 45 degrees
d = 80
a = math.radians(d) # radians
# muzzle velocity of the projectile (meters/second)
v = 200
data_45 = projectile_xy(v, a)
print "\nDATA: ", data_45, "\n"
# find maximum height ...
point_height_max = max(data_45, key=lambda q: q[1])
xm, ym = point_height_max
print('''
Projectile Motion ...
Using a firing angle of {} degrees
and a muzzle velocity of {} meters/second
the maximum height is {:0.1f} meters
at a distance of {:0.1f} meters,'''.format(d, v, ym, xm))
# find maximum distance ...
x_max = max(data_45)[0]
print("the maximum distance is {:0.1f} meters.".format(x_max))
''' result ...
Projectile Motion ...
Using a firing angle of 45 degrees
and a muzzle velocity of 100 meters/second
the maximum height is 255.1 meters
at a distance of 509.1 meters,
the maximum distance is 1018.2 meters.
'''
x = [row[0] for row in data_45]
y = [row[1] for row in data_45]
plt.plot(x, y)
plt.axis([0, 2000, 0, 1500])
plt.xlabel('distance')
plt.ylabel('altitude')
plt.savefig('trajectory.png')
|
# encoding:utf-8
import logging
from django.contrib.auth.models import User
from django.db import models
log = logging.getLogger('root')
class Tag(models.Model):
"""
标签 用户给音乐打标签 快速定位音乐
"""
name = models.CharField(max_length=20, help_text=u'标签名称')
user = models.ForeignKey(User, related_name='tags')
def __str__(self):
return self.name
def limit_choices_owner_tag():
return {"id":1}
class MusicList(models.Model):
"""
歌单 信息模型
"""
name = models.CharField(max_length=100, help_text=u'歌单名称', default="")
user = models.ForeignKey(User, related_name='music_lists', help_text=u"歌单用者")
create_at = models.DateTimeField(auto_now_add=True, help_text=u'创建时间')
listen_count = models.IntegerField(default=0, help_text=u'收听数')
tags = models.ManyToManyField(Tag, related_name='music_lists', help_text=u'标签列表',)
introduction = models.CharField(max_length=200, help_text=u'歌单简介')
thumbnail = models.ImageField(upload_to='musiclists/thumbnail',help_text=u'歌单缩略图',default=None)
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
from copy import deepcopy
import re
import threading
from hecatoncheir import DbDriverBase, logger as log
from hecatoncheir.QueryResult import QueryResult
from hecatoncheir.exception import (DriverError, InternalError, QueryError,
QueryTimeout)
from hecatoncheir.logger import to_unicode
class MSSQLDriver(DbDriverBase.DbDriverBase):
host = None
port = None
dbname = None
dbuser = None
dbpass = None
conn = None
driver = None
def __init__(self, host, dbname, dbuser, dbpass):
self.host = host
self.dbname = dbname
self.dbuser = dbuser
self.dbpass = dbpass
name = "pymssql"
try:
self.driver = __import__(name, fromlist=[''])
except Exception as e:
raise DriverError(
u"Could not load the driver module: %s" % name, source=e)
def connect(self):
try:
self.conn = self.driver.connect(self.host, self.dbuser,
self.dbpass, self.dbname)
except Exception as e:
# assuming OperationalError
msg = to_unicode(e[0][1]).replace('\n', ' ')
msg = re.sub(r'DB-Lib.*', '', msg)
raise DriverError(
u"Could not connect to the server: %s" % msg, source=e)
return True
def __get_spid(self):
cur = self.conn.cursor()
cur.execute("SELECT @@SPID")
r = cur.fetchone()
print(r)
cur.close()
return r[0]
def cancel_callback(self):
log.info("cancel_callback start")
try:
conn = self.driver.connect(self.host, self.dbuser, self.dbpass,
self.dbname)
cur = conn.cursor()
log.info("KILL %d" % self.__spid)
cur.execute("KILL %d" % self.__spid)
cur.close()
conn.close()
except Exception as e:
print(e)
log.info("cancel_callback failed")
raise e
log.info("cancel_callback end")
def query_to_resultset(self, query, max_rows=10000, timeout=None):
"""Build a QueryResult object from the query
Args:
query (str): a query string to be executed.
max_rows (int): max rows which can be kept in a QueryResult object.
Returns:
QueryResult: an object holding query, column names and result set.
"""
assert query
assert isinstance(query, unicode)
log.trace('query_to_resultset: start query=%s' % query)
# FIXME: Query timeout is not supported on SQL Server.
#
# KILL <spid> does not work as expected so far.
# See below for more information:
# http://stackoverflow.com/questions/43529410/run-sql-kill-from-python
if timeout:
raise NotImplementedError(
'Query timeout is not implemented on SQL Server')
res = QueryResult(query)
monitor = None
try:
if self.conn is None:
self.connect()
if timeout and int(timeout) > 0:
self.__spid = self.__get_spid()
monitor = threading.Timer(timeout, self.cancel_callback)
monitor.start()
cur = self.conn.cursor()
cur.execute(res.query)
desc = []
if cur.description:
for d in cur.description:
desc.append(d[0])
res.column_names = deepcopy(tuple(desc))
for i, r in enumerate(cur.fetchall()):
# let's consider the memory size.
if i > max_rows:
raise InternalError(
(u'Exceeded the record limit (%d) '
u'for QueryResult.' % max_rows),
query=query)
res.resultset.append(deepcopy(r))
cur.close()
except InternalError as e:
raise e
except DriverError as e:
raise e
except Exception as e:
print(e)
# assuming ProgrammingError
msg = to_unicode(e[1]).replace('\n', ' ')
msg = re.sub(r'DB-Lib.*', '', msg)
raise QueryError(
"Could not execute a query: %s" % msg,
query=query, source=e)
finally:
if monitor:
monitor.cancel()
if self.conn:
self.conn.rollback()
log.trace('query_to_resultset: end')
return res
def disconnect(self):
if self.conn is None:
return False
try:
self.conn.close()
except Exception as e:
raise DriverError(
u"Could not disconnect from the server: %s" % to_unicode(e),
source=e)
self.conn = None
return True
|
from __future__ import absolute_import
import datetime
import logging
import os
import sys
import socket
from socket import error as SocketError, timeout as SocketTimeout
import warnings
from .packages import six
from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
from .packages.six.moves.http_client import HTTPException # noqa: F401
try: # Compiled with SSL?
import ssl
BaseSSLError = ssl.SSLError
except (ImportError, AttributeError): # Platform-specific: No SSL.
ssl = None
class BaseSSLError(BaseException):
pass
try: # Python 3:
# Not a no-op, we're adding this to the namespace so it can be imported.
ConnectionError = ConnectionError
except NameError: # Python 2:
class ConnectionError(Exception):
pass
from .exceptions import (
NewConnectionError,
ConnectTimeoutError,
SubjectAltNameWarning,
SystemTimeWarning,
)
from .packages.ssl_match_hostname import match_hostname, CertificateError
from .util.ssl_ import (
resolve_cert_reqs,
resolve_ssl_version,
assert_fingerprint,
create_urllib3_context,
ssl_wrap_socket
)
from .util import connection
from ._collections import HTTPHeaderDict
log = logging.getLogger(__name__)
port_by_scheme = {
'http': 80,
'https': 443,
}
RECENT_DATE = datetime.date(2014, 1, 1)
class DummyConnection(object):
"""Used to detect a failed ConnectionCls import."""
pass
class HTTPConnection(_HTTPConnection, object):
"""
Based on httplib.HTTPConnection but provides an extra constructor
backwards-compatibility layer between older and newer Pythons.
Additional keyword parameters are used to configure attributes of the connection.
Accepted parameters include:
- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
- ``source_address``: Set the source address for the current connection.
.. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x
- ``socket_options``: Set specific options on the underlying socket. If not specified, then
defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
For example, if you wish to enable TCP Keep Alive in addition to the defaults,
you might pass::
HTTPConnection.default_socket_options + [
(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
]
Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
"""
default_port = port_by_scheme['http']
#: Disable Nagle's algorithm by default.
#: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
#: Whether this connection verifies the host's certificate.
is_verified = False
def __init__(self, *args, **kw):
if six.PY3: # Python 3
kw.pop('strict', None)
# Pre-set source_address in case we have an older Python like 2.6.
self.source_address = kw.get('source_address')
if sys.version_info < (2, 7): # Python 2.6
# _HTTPConnection on Python 2.6 will balk at this keyword arg, but
# not newer versions. We can still use it when creating a
# connection though, so we pop it *after* we have saved it as
# self.source_address.
kw.pop('source_address', None)
#: The socket options provided by the user. If no options are
#: provided, we use the default options.
self.socket_options = kw.pop('socket_options', self.default_socket_options)
# Superclass also sets self.source_address in Python 2.7+.
_HTTPConnection.__init__(self, *args, **kw)
def _new_conn(self):
""" Establish a socket connection and set nodelay settings on it.
:return: New socket connection.
"""
extra_kw = {}
if self.source_address:
extra_kw['source_address'] = self.source_address
if self.socket_options:
extra_kw['socket_options'] = self.socket_options
try:
conn = connection.create_connection(
(self.host, self.port), self.timeout, **extra_kw)
except SocketTimeout as e:
raise ConnectTimeoutError(
self, "Connection to %s timed out. (connect timeout=%s)" %
(self.host, self.timeout))
except SocketError as e:
raise NewConnectionError(
self, "Failed to establish a new connection: %s" % e)
return conn
def _prepare_conn(self, conn):
self.sock = conn
# the _tunnel_host attribute was added in python 2.6.3 (via
# http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
# not have them.
if getattr(self, '_tunnel_host', None):
# TODO: Fix tunnel so it doesn't depend on self.sock state.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
def request_chunked(self, method, url, body=None, headers=None):
"""
Alternative to the common request method, which sends the
body with chunked encoding and not as one block
"""
headers = HTTPHeaderDict(headers if headers is not None else {})
skip_accept_encoding = 'accept-encoding' in headers
self.putrequest(method, url, skip_accept_encoding=skip_accept_encoding)
for header, value in headers.items():
self.putheader(header, value)
if 'transfer-encoding' not in headers:
self.putheader('Transfer-Encoding', 'chunked')
self.endheaders()
if body is not None:
stringish_types = six.string_types + (six.binary_type,)
if isinstance(body, stringish_types):
body = (body,)
for chunk in body:
if not chunk:
continue
if not isinstance(chunk, six.binary_type):
chunk = chunk.encode('utf8')
len_str = hex(len(chunk))[2:]
self.send(len_str.encode('utf-8'))
self.send(b'\r\n')
self.send(chunk)
self.send(b'\r\n')
# After the if clause, to always have a closed body
self.send(b'0\r\n\r\n')
class HTTPSConnection(HTTPConnection):
default_port = port_by_scheme['https']
ssl_version = None
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
ssl_context=None, **kw):
HTTPConnection.__init__(self, host, port, strict=strict,
timeout=timeout, **kw)
self.key_file = key_file
self.cert_file = cert_file
self.ssl_context = ssl_context
# Required property for Google AppEngine 1.9.0 which otherwise causes
# HTTPS requests to go out as HTTP. (See Issue #356)
self._protocol = 'https'
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
if self.ssl_context is None:
self.ssl_context = create_urllib3_context(
ssl_version=resolve_ssl_version(None),
cert_reqs=resolve_cert_reqs(None),
)
self.sock = ssl_wrap_socket(
sock=conn,
keyfile=self.key_file,
certfile=self.cert_file,
ssl_context=self.ssl_context,
)
class VerifiedHTTPSConnection(HTTPSConnection):
"""
Based on httplib.HTTPSConnection but wraps the socket with
SSL certification.
"""
cert_reqs = None
ca_certs = None
ca_cert_dir = None
ssl_version = None
assert_fingerprint = None
def set_cert(self, key_file=None, cert_file=None,
cert_reqs=None, ca_certs=None,
assert_hostname=None, assert_fingerprint=None,
ca_cert_dir=None):
"""
This method should only be called once, before the connection is used.
"""
# If cert_reqs is not provided, we can try to guess. If the user gave
# us a cert database, we assume they want to use it: otherwise, if
# they gave us an SSL Context object we should use whatever is set for
# it.
if cert_reqs is None:
if ca_certs or ca_cert_dir:
cert_reqs = 'CERT_REQUIRED'
elif self.ssl_context is not None:
cert_reqs = self.ssl_context.verify_mode
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
def connect(self):
# Add certificate verification
conn = self._new_conn()
hostname = self.host
if getattr(self, '_tunnel_host', None):
# _tunnel_host was added in Python 2.6.3
# (See: http://hg.python.org/cpython/rev/0f57b30a152f)
self.sock = conn
# Calls self._set_hostport(), so self.host is
# self._tunnel_host below.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
# Override the host with the one we're requesting data from.
hostname = self._tunnel_host
is_time_off = datetime.date.today() < RECENT_DATE
if is_time_off:
warnings.warn((
'System time is way off (before {0}). This will probably '
'lead to SSL verification errors').format(RECENT_DATE),
SystemTimeWarning
)
# Wrap socket using verification with the root certs in
# trusted_root_certs
if self.ssl_context is None:
self.ssl_context = create_urllib3_context(
ssl_version=resolve_ssl_version(self.ssl_version),
cert_reqs=resolve_cert_reqs(self.cert_reqs),
)
context = self.ssl_context
context.verify_mode = resolve_cert_reqs(self.cert_reqs)
self.sock = ssl_wrap_socket(
sock=conn,
keyfile=self.key_file,
certfile=self.cert_file,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
server_hostname=hostname,
ssl_context=context)
if self.assert_fingerprint:
assert_fingerprint(self.sock.getpeercert(binary_form=True),
self.assert_fingerprint)
elif context.verify_mode != ssl.CERT_NONE \
and self.assert_hostname is not False:
cert = self.sock.getpeercert()
if not cert.get('subjectAltName', ()):
warnings.warn((
'Certificate for {0} has no `subjectAltName`, falling back to check for a '
'`commonName` for now. This feature is being removed by major browsers and '
'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 '
'for details.)'.format(hostname)),
SubjectAltNameWarning
)
_match_hostname(cert, self.assert_hostname or hostname)
self.is_verified = (
context.verify_mode == ssl.CERT_REQUIRED or
self.assert_fingerprint is not None
)
def _match_hostname(cert, asserted_hostname):
try:
match_hostname(cert, asserted_hostname)
except CertificateError as e:
log.error(
'Certificate did not match expected hostname: %s. '
'Certificate: %s', asserted_hostname, cert
)
# Add cert to exception and reraise so client code can inspect
# the cert when catching the exception, if they want to
e._peer_cert = cert
raise
if ssl:
# Make a copy for testing.
UnverifiedHTTPSConnection = HTTPSConnection
HTTPSConnection = VerifiedHTTPSConnection
else:
HTTPSConnection = DummyConnection
|
"""Prepares a distribution for installation
"""
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
import logging
import mimetypes
import os
import shutil
from pip._vendor import requests
from pip._vendor.six import PY2
from pip._internal.distributions import (
make_distribution_for_install_requirement,
)
from pip._internal.distributions.installed import InstalledDistribution
from pip._internal.exceptions import (
DirectoryUrlHashUnsupported,
HashMismatch,
HashUnpinned,
InstallationError,
PreviousBuildDirError,
VcsHashUnsupported,
)
from pip._internal.utils.hashes import MissingHashes
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import display_path, hide_url
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.unpacking import unpack_file
from pip._internal.vcs import vcs
if MYPY_CHECK_RUNNING:
from typing import (
Callable, List, Optional, Tuple,
)
from mypy_extensions import TypedDict
from pip._internal.distributions import AbstractDistribution
from pip._internal.index.package_finder import PackageFinder
from pip._internal.models.link import Link
from pip._internal.network.download import Downloader
from pip._internal.req.req_install import InstallRequirement
from pip._internal.req.req_tracker import RequirementTracker
from pip._internal.utils.hashes import Hashes
if PY2:
CopytreeKwargs = TypedDict(
'CopytreeKwargs',
{
'ignore': Callable[[str, List[str]], List[str]],
'symlinks': bool,
},
total=False,
)
else:
CopytreeKwargs = TypedDict(
'CopytreeKwargs',
{
'copy_function': Callable[[str, str], None],
'ignore': Callable[[str, List[str]], List[str]],
'ignore_dangling_symlinks': bool,
'symlinks': bool,
},
total=False,
)
logger = logging.getLogger(__name__)
def _get_prepared_distribution(
req, # type: InstallRequirement
req_tracker, # type: RequirementTracker
finder, # type: PackageFinder
build_isolation # type: bool
):
# type: (...) -> AbstractDistribution
"""Prepare a distribution for installation.
"""
abstract_dist = make_distribution_for_install_requirement(req)
with req_tracker.track(req):
abstract_dist.prepare_distribution_metadata(finder, build_isolation)
return abstract_dist
def unpack_vcs_link(link, location):
# type: (Link, str) -> None
vcs_backend = vcs.get_backend_for_scheme(link.scheme)
assert vcs_backend is not None
vcs_backend.unpack(location, url=hide_url(link.url))
class File(object):
def __init__(self, path, content_type):
# type: (str, str) -> None
self.path = path
self.content_type = content_type
def get_http_url(
link, # type: Link
downloader, # type: Downloader
download_dir=None, # type: Optional[str]
hashes=None, # type: Optional[Hashes]
):
# type: (...) -> File
temp_dir = TempDirectory(kind="unpack", globally_managed=True)
# If a download dir is specified, is the file already downloaded there?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(
link, download_dir, hashes
)
if already_downloaded_path:
from_path = already_downloaded_path
content_type = mimetypes.guess_type(from_path)[0]
else:
# let's download to a tmp dir
from_path, content_type = _download_http_url(
link, downloader, temp_dir.path, hashes
)
return File(from_path, content_type)
def get_file_url(
link, # type: Link
download_dir=None, # type: Optional[str]
hashes=None # type: Optional[Hashes]
):
# type: (...) -> File
"""Get file and optionally check its hash.
"""
# If a download dir is specified, is the file already there and valid?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(
link, download_dir, hashes
)
if already_downloaded_path:
from_path = already_downloaded_path
else:
from_path = link.file_path
# If --require-hashes is off, `hashes` is either empty, the
# link's embedded hash, or MissingHashes; it is required to
# match. If --require-hashes is on, we are satisfied by any
# hash in `hashes` matching: a URL-based or an option-based
# one; no internet-sourced hash will be in `hashes`.
if hashes:
hashes.check_against_path(from_path)
content_type = mimetypes.guess_type(from_path)[0]
return File(from_path, content_type)
def unpack_url(
link, # type: Link
location, # type: str
downloader, # type: Downloader
download_dir=None, # type: Optional[str]
hashes=None, # type: Optional[Hashes]
):
# type: (...) -> Optional[File]
"""Unpack link into location, downloading if required.
:param hashes: A Hashes object, one of whose embedded hashes must match,
or HashMismatch will be raised. If the Hashes is empty, no matches are
required, and unhashable types of requirements (like VCS ones, which
would ordinarily raise HashUnsupported) are allowed.
"""
# non-editable vcs urls
if link.is_vcs:
unpack_vcs_link(link, location)
return None
# If it's a url to a local directory, we build in-place.
# There is nothing to be done here.
if link.is_existing_dir():
return None
# file urls
if link.is_file:
file = get_file_url(link, download_dir, hashes=hashes)
# http urls
else:
file = get_http_url(
link,
downloader,
download_dir,
hashes=hashes,
)
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
unpack_file(file.path, location, file.content_type)
return file
def _download_http_url(
link, # type: Link
downloader, # type: Downloader
temp_dir, # type: str
hashes, # type: Optional[Hashes]
):
# type: (...) -> Tuple[str, str]
"""Download link url into temp_dir using provided session"""
download = downloader(link)
file_path = os.path.join(temp_dir, download.filename)
with open(file_path, 'wb') as content_file:
for chunk in download.chunks:
content_file.write(chunk)
if hashes:
hashes.check_against_path(file_path)
return file_path, download.response.headers.get('content-type', '')
def _check_download_dir(link, download_dir, hashes):
# type: (Link, str, Optional[Hashes]) -> Optional[str]
""" Check download_dir for previously downloaded file with correct hash
If a correct file is found return its path else None
"""
download_path = os.path.join(download_dir, link.filename)
if not os.path.exists(download_path):
return None
# If already downloaded, does its hash match?
logger.info('File was already downloaded %s', download_path)
if hashes:
try:
hashes.check_against_path(download_path)
except HashMismatch:
logger.warning(
'Previously-downloaded file %s has bad hash. '
'Re-downloading.',
download_path
)
os.unlink(download_path)
return None
return download_path
class RequirementPreparer(object):
"""Prepares a Requirement
"""
def __init__(
self,
build_dir, # type: str
download_dir, # type: Optional[str]
src_dir, # type: str
wheel_download_dir, # type: Optional[str]
build_isolation, # type: bool
req_tracker, # type: RequirementTracker
downloader, # type: Downloader
finder, # type: PackageFinder
require_hashes, # type: bool
use_user_site, # type: bool
):
# type: (...) -> None
super(RequirementPreparer, self).__init__()
self.src_dir = src_dir
self.build_dir = build_dir
self.req_tracker = req_tracker
self.downloader = downloader
self.finder = finder
# Where still-packed archives should be written to. If None, they are
# not saved, and are deleted immediately after unpacking.
self.download_dir = download_dir
# Where still-packed .whl files should be written to. If None, they are
# written to the download_dir parameter. Separate to download_dir to
# permit only keeping wheel archives for pip wheel.
self.wheel_download_dir = wheel_download_dir
# NOTE
# download_dir and wheel_download_dir overlap semantically and may
# be combined if we're willing to have non-wheel archives present in
# the wheelhouse output by 'pip wheel'.
# Is build isolation allowed?
self.build_isolation = build_isolation
# Should hash-checking be required?
self.require_hashes = require_hashes
# Should install in user site-packages?
self.use_user_site = use_user_site
@property
def _download_should_save(self):
# type: () -> bool
if not self.download_dir:
return False
if os.path.exists(self.download_dir):
return True
logger.critical('Could not find download directory')
raise InstallationError(
"Could not find or access download directory '{}'"
.format(self.download_dir))
def prepare_linked_requirement(
self,
req, # type: InstallRequirement
):
# type: (...) -> AbstractDistribution
"""Prepare a requirement that would be obtained from req.link
"""
assert req.link
link = req.link
# TODO: Breakup into smaller functions
if link.scheme == 'file':
path = link.file_path
logger.info('Processing %s', display_path(path))
else:
logger.info('Collecting %s', req.req or req)
download_dir = self.download_dir
if link.is_wheel and self.wheel_download_dir:
# when doing 'pip wheel` we download wheels to a
# dedicated dir.
download_dir = self.wheel_download_dir
if link.is_wheel:
if download_dir:
# When downloading, we only unpack wheels to get
# metadata.
autodelete_unpacked = True
else:
# When installing a wheel, we use the unpacked
# wheel.
autodelete_unpacked = False
else:
# We always delete unpacked sdists after pip runs.
autodelete_unpacked = True
with indent_log():
# Since source_dir is only set for editable requirements.
assert req.source_dir is None
if link.is_existing_dir():
# Build local directories in place.
req.source_dir = link.file_path
else:
req.ensure_has_source_dir(self.build_dir, autodelete_unpacked)
# If a checkout exists, it's unwise to keep going. version
# inconsistencies are logged later, but do not fail the
# installation.
# FIXME: this won't upgrade when there's an existing
# package unpacked in `req.source_dir`
if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
raise PreviousBuildDirError(
"pip can't proceed with requirements '{}' due to a"
" pre-existing build directory ({}). This is "
"likely due to a previous installation that failed"
". pip is being responsible and not assuming it "
"can delete this. Please delete it and try again."
.format(req, req.source_dir)
)
# Now that we have the real link, we can tell what kind of
# requirements we have and raise some more informative errors
# than otherwise. (For example, we can raise VcsHashUnsupported
# for a VCS URL rather than HashMissing.)
if self.require_hashes:
# We could check these first 2 conditions inside
# unpack_url and save repetition of conditions, but then
# we would report less-useful error messages for
# unhashable requirements, complaining that there's no
# hash provided.
if link.is_vcs:
raise VcsHashUnsupported()
elif link.is_existing_dir():
raise DirectoryUrlHashUnsupported()
if not req.original_link and not req.is_pinned:
# Unpinned packages are asking for trouble when a new
# version is uploaded. This isn't a security check, but
# it saves users a surprising hash mismatch in the
# future.
#
# file:/// URLs aren't pinnable, so don't complain
# about them not being pinned.
raise HashUnpinned()
hashes = req.hashes(trust_internet=not self.require_hashes)
if self.require_hashes and not hashes:
# Known-good hashes are missing for this requirement, so
# shim it with a facade object that will provoke hash
# computation and then raise a HashMissing exception
# showing the user what the hash should be.
hashes = MissingHashes()
try:
local_file = unpack_url(
link, req.source_dir, self.downloader, download_dir,
hashes=hashes,
)
except requests.HTTPError as exc:
logger.critical(
'Could not install requirement %s because of error %s',
req,
exc,
)
raise InstallationError(
'Could not install requirement {} because of HTTP '
'error {} for URL {}'.format(req, exc, link)
)
# For use in later processing, preserve the file path on the
# requirement.
if local_file:
req.local_file_path = local_file.path
abstract_dist = _get_prepared_distribution(
req, self.req_tracker, self.finder, self.build_isolation,
)
if download_dir:
if link.is_existing_dir():
logger.info('Link is a directory, ignoring download_dir')
elif local_file:
download_location = os.path.join(
download_dir, link.filename
)
if not os.path.exists(download_location):
shutil.copy(local_file.path, download_location)
logger.info(
'Saved %s', display_path(download_location)
)
if self._download_should_save:
# Make a .zip of the source_dir we already created.
if link.is_vcs:
req.archive(self.download_dir)
return abstract_dist
def prepare_editable_requirement(
self,
req, # type: InstallRequirement
):
# type: (...) -> AbstractDistribution
"""Prepare an editable requirement
"""
assert req.editable, "cannot prepare a non-editable req as editable"
logger.info('Obtaining %s', req)
with indent_log():
if self.require_hashes:
raise InstallationError(
'The editable requirement {} cannot be installed when '
'requiring hashes, because there is no single file to '
'hash.'.format(req)
)
req.ensure_has_source_dir(self.src_dir)
req.update_editable(not self._download_should_save)
abstract_dist = _get_prepared_distribution(
req, self.req_tracker, self.finder, self.build_isolation,
)
if self._download_should_save:
req.archive(self.download_dir)
req.check_if_exists(self.use_user_site)
return abstract_dist
def prepare_installed_requirement(
self,
req, # type: InstallRequirement
skip_reason # type: str
):
# type: (...) -> AbstractDistribution
"""Prepare an already-installed requirement
"""
assert req.satisfied_by, "req should have been satisfied but isn't"
assert skip_reason is not None, (
"did not get skip reason skipped but req.satisfied_by "
"is set to {}".format(req.satisfied_by)
)
logger.info(
'Requirement %s: %s (%s)',
skip_reason, req, req.satisfied_by.version
)
with indent_log():
if self.require_hashes:
logger.debug(
'Since it is already installed, we are trusting this '
'package without checking its hash. To ensure a '
'completely repeatable environment, install into an '
'empty virtualenv.'
)
abstract_dist = InstalledDistribution(req)
return abstract_dist
|
from flask import Blueprint, request, Response
from flask.ext.security import login_required
from arda import mongo
from bson import json_util, SON
from datetime import datetime
from slugify import slugify
mod_api = Blueprint('api', __name__, url_prefix='/api')
@mod_api.route('/service-fee', methods=['GET'])
@login_required
def date_fee_chart():
if(len(request.args) > 0):
from_date = request.args.get('from')
to_date = request.args.get('to')
match = {}
if from_date and to_date:
match = {
"$match": {
'provided_services.service_date': {
'$gte': datetime.strptime(from_date, "%d-%m-%Y"),
'$lte': datetime.strptime(to_date, "%d-%m-%Y")
}
}
}
unwind = {
"$unwind": "$provided_services"
}
group = {
"$group": {
"_id": {
"serviceType": "$provided_services.provided_service.value"
},
'sumOfService': {
"$sum": '$provided_services.service_fee'
},
'countServices': {
"$sum": 1
}
}
}
project = {
"$project": {
"_id": 0,
"serviceType": "$_id.serviceType",
"valueOfService": "$sumOfService",
'countServices': '$countServices'
}
}
pipeline = [unwind, match, group, project]
json_obj = mongo.db.customers.aggregate(pipeline)
resp = Response(
response=json_util.dumps(json_obj['result']),
mimetype='application/json'
)
return resp
@mod_api.route('/search/customers', methods=['GET'])
@login_required
def search():
if len(request.args) > 0:
north = request.args.get('north')
center = request.args.get('center')
south = request.args.get('south')
west = request.args.get('west')
east = request.args.get('east')
size = request.args.get('size')
region = request.args.get('region')
company = request.args.get('company')
customer_type = request.args.get('customer_type')
follow_up = request.args.get('followUp')
match_field = {}
if company:
match_field['company.slug'] = slugify(company)
if follow_up:
match_field['future_demand.follow_up'] = {
'$gte': datetime.strptime(follow_up, "%d/%m/%Y")
}
if north != "All" and north != "undefined":
match_field['municipality_region'] = north
if center != "All" and center != "undefined":
match_field['municipality_region'] = center
if south != "All" and south != "undefined":
match_field['municipality_region'] = south
if west != "All" and west != "undefined":
match_field['municipality_region'] = west
if east != "All" and east != "undefined":
match_field['municipality_region'] = east
if size:
if size != "All":
match_field['customer_type.size_category'] = size
if region:
if region != "All":
match_field['region'] = region
if customer_type:
if customer_type != "All":
match_field['customer_type.target_group'] = customer_type
match = {
"$match": match_field
}
group = {
"$group": {
"_id": {
"_id": "$_id",
"target_group": "$costumer_type.target_group",
"first_name": "$first_name.value",
"last_name": "$last_name.value",
"target_group": "$customer_type.target_group",
"company": {
"name": "$company.name",
"slug": "$company.slug",
},
"phone": {
"main_phone": "$phone.main_phone",
"mobile": "$phone.mobile",
},
"email": "$email",
}
}
}
project = {
"$project": {
"_id": 0,
'target_group': '$_id.target_group',
"_id": "$_id._id",
"first_name": "$_id.first_name",
"last_name": "$_id.last_name",
"target_group": "$_id.target_group",
"company": {
"name": "$_id.company.name",
"slug": "$_id.company.slug"
},
'phone': {
"main_phone": "$_id.phone.main_phone",
"mobile": "$_id.phone.mobile",
},
"email": "$_id.email",
}
}
pipeline = [match, group, project]
json_obj = mongo.db.customers.aggregate(pipeline)
resp = Response(
response=json_util.dumps(json_obj['result']),
mimetype='application/json'
)
return resp
@mod_api.route('/search/service', methods=['GET'])
@login_required
def search_service():
if len(request.args) > 0:
service_type = request.args.get('serviceType')
from_dt = request.args.get('from')
to_dt = request.args.get('to')
contactVia = request.args.get('contactVia')
match_fields = {}
if contactVia:
match_fields['provided_services.contactVia'] = contactVia
if service_type:
match_fields['provided_services.provided_service.slug'] = slugify(service_type)
if from_dt and to_dt:
match_fields['provided_services.service_date'] = {
'$gte': datetime.strptime(from_dt, "%d/%m/%Y"),
'$lte': datetime.strptime(to_dt, "%d/%m/%Y")
}
match = {
"$match": match_fields
}
unwind = {
"$unwind": "$provided_services"
}
group = {
"$group": {
"_id": {
'_id': '$_id',
"company": {
'name': '$company.name',
'slug': '$company.slug'
},
"firstName": "$first_name.value",
"lastName": "$last_name.value",
"serviceType": "$provided_services.provided_service.value",
'serviceId': '$provided_services.serviceId',
'contactVia': '$provided_services.contactVia',
"description": "$provided_services.description",
"fee": "$provided_services.service_fee",
"date": "$provided_services.service_date",
"unit_parameter": "$provided_services.unit_param",
"unit_amount": "$provided_services.unit_amount",
}
}
}
project = {
"$project": {
"_id": 0,
'_id': '$_id._id',
'company': {
"name": "$_id.company.name",
"slug": "$_id.company.slug"
},
"first_name": "$_id.firstName",
"last_name": "$_id.lastName",
"serviceType": "$_id.serviceType",
"serviceId": "$_id.serviceId",
"contactVia": "$_id.contactVia",
"description": "$_id.description",
"fee": "$_id.fee",
"date": "$_id.date",
"unit_parameter": "$_id.unit_parameter",
"unit_amount": "$_id.unit_amount",
}
}
pipeline = [unwind, match, group, project]
json_obj = mongo.db.customers.aggregate(pipeline)
resp = Response(
response=json_util.dumps(json_obj['result']),
mimetype='application/json'
)
return resp
@mod_api.route('/services-search', methods=['GET'])
@login_required
def search_service_analytics():
if len(request.args) > 0:
unit_parameter = request.args.get('quantityParameter')
region = request.args.get('region')
from_dt = request.args.get('from')
to_dt = request.args.get('to')
f_name = request.args.get('customerFname')
l_name = request.args.get('customerLname')
company = request.args.get('company')
match_fields = {}
if region:
if region != "All":
match_fields['region'] = region
if f_name:
match_fields['first_name.slug'] = slugify(f_name)
if l_name:
match_fields['last_name.slug'] = slugify(l_name)
if company:
match_fields['company.slug'] = slugify(company)
if from_dt and to_dt:
match_fields['provided_services.service_date'] = {
'$gte': datetime.strptime(from_dt, "%d-%m-%Y"),
'$lte': datetime.strptime(to_dt, "%d-%m-%Y")
}
if unit_parameter:
if unit_parameter != "All":
match_fields['provided_services.unit_param'] = unit_parameter
match = {
"$match": match_fields
}
unwind = {
"$unwind": "$provided_services"
}
group = {
"$group": {
"_id": {
"serviceType": "$provided_services.provided_service.value"
},
'sumOfService': {
"$sum": '$provided_services.service_fee'
},
'countServices': {
"$sum": 1
}
}
}
project = {
"$project": {
"_id": 0,
"serviceType": "$_id.serviceType",
"valueOfService": "$sumOfService",
'countServices': '$countServices'
}
}
pipeline = [unwind, match, group, project]
json_obj = mongo.db.customers.aggregate(pipeline)
resp = Response(
response=json_util.dumps(json_obj['result']),
mimetype='application/json'
)
return resp
@mod_api.route('/services-LineChart', methods=['GET'])
@login_required
def search_service_analytics_linechart():
if len(request.args) > 0:
region = request.args.get('region')
f_name = request.args.get('customerFname')
l_name = request.args.get('customerLname')
company = request.args.get('company')
year = request.args.get('year')
unit_parameter = request.args.get('quantityParameter')
match_fields = {}
if region:
if region != "All":
match_fields['region'] = region
if company:
match_fields['company.slug'] = slugify(company)
if f_name:
match_fields['first_name.slug'] = slugify(f_name)
if l_name:
match_fields['last_name.slug'] = slugify(l_name)
if unit_parameter:
if unit_parameter != "All":
match_fields['provided_services.unit_param'] = unit_parameter
if year:
start_date = "01-01-" + year
end_date = "31-12-" + year
match_fields['provided_services.service_date'] = {
'$gte': datetime.strptime(start_date, "%d-%m-%Y"),
'$lte': datetime.strptime(end_date, "%d-%m-%Y")
}
match = {
"$match": match_fields
}
unwind = {
"$unwind": "$provided_services"
}
group = {
"$group": {
'_id': {
'serviceType': "$provided_services.provided_service.value",
'month': {
'$month': "$provided_services.service_date"
}
},
"sumOfService": {
"$sum": "$provided_services.service_fee"
},
"countServices": {
"$sum": 1
}
},
}
project = {
"$project": {
"serviceType": "$_id.serviceType",
"month": "$_id.month",
"sumOfService": "$sumOfService",
"countServices": "$countServices",
"_id": 0
}
}
sort = {
'$sort':
SON([
('serviceType', 1),
('month', 1)])
}
pipeline = [unwind, match, group, project, sort]
json_obj = mongo.db.customers.aggregate(pipeline)
resp = Response(
response=json_util.dumps(json_obj['result']),
mimetype='application/json'
)
return resp
@mod_api.route('/services-month-linechart', methods=['GET'])
@login_required
def services_month_linechart():
if len(request.args) > 0:
region = request.args.get('region')
f_name = request.args.get('customerFname')
l_name = request.args.get('customerLname')
company = request.args.get('company')
year = request.args.get('year')
month = request.args.get('month')
match_fields = {}
if region:
if region != "All":
match_fields['region'] = region
if company:
match_fields['company.slug'] = slugify(company)
if f_name:
match_fields['first_name.slug'] = slugify(f_name)
if l_name:
match_fields['last_name.slug'] = slugify(l_name)
if month:
start_date = "01-%s-%s" % (month, year)
months_with_31 = [1, 3, 5, 7, 8, 10, 12]
if month == 2:
end_date = "28-%s-%s" % (month, year)
elif month in months_with_31:
end_date = "31-%s-%s" % (month, year)
else:
end_date = "30-%s-%s" % (month, year)
match_fields['provided_services.service_date'] = {
'$gte': datetime.strptime(start_date, "%d-%m-%Y"),
'$lte': datetime.strptime(end_date, "%d-%m-%Y")
}
match = {
"$match": match_fields
}
unwind = {
"$unwind": "$provided_services"
}
group = {
"$group": {
'_id': {
'serviceType': "$provided_services.provided_service.value",
'day': {
'$dayOfMonth': "$provided_services.service_date"
}
},
"sumOfService": {
"$sum": "$provided_services.service_fee"
},
"countServices": {
"$sum": 1
}
},
}
project = {
"$project": {
"serviceType": "$_id.serviceType",
"day": "$_id.day",
"sumOfService": "$sumOfService",
"countServices": "$countServices",
"_id": 0
}
}
sort = {
'$sort':
SON([
('serviceType', 1),
('day', 1)])
}
pipeline = [unwind, match, group, project, sort]
json_obj = mongo.db.customers.aggregate(pipeline)
resp = Response(
response=json_util.dumps(json_obj['result']),
mimetype='application/json'
)
return resp
|
#
# Copyright (C) University College London, 2007-2012, all rights reserved.
#
# This file is part of HemeLB and is provided to you under the terms of
# the GNU LGPL. Please see LICENSE in the top level directory for full
# details.
#
import numpy as np
import xdrlib
import warnings
from .. import HemeLbMagicNumber
SnapshotMagicNumber = 0x736e7004
def HemeLbSnapshot(filename):
"""Guess which file format we were given and use the correct class
to open it.
We have to handle a number of cases:
- the original text format;
- the XDR copy thereof, and
- the updated (August 2011) version with format magic and version
numbers and more metadata.
"""
start = file(filename).read(8)
reader = xdrlib.Unpacker(start)
firstInt = reader.unpack_uint()
if firstInt == HemeLbMagicNumber:
assert reader.unpack_uint() == SnapshotMagicNumber
cls = VersionedXdrSnapshot
elif firstInt == 0 or firstInt == 1 or firstInt == 2:
# It is the basic Xdr format that starts with the stablity flag
cls = XdrSnapshotVersionOne
# Maybe text? If so, the first character should be a '0', '1' or '2', followed by a newline
elif (start[0] == '0' or start[0] == '1' or start == '2') and start[1] == '\n':
cls = TextSnapshot
else:
raise ValueError('Cannot determine version of snapshot file "%s"' % filename)
return cls(filename)
class BaseSnapshot(np.recarray):
"""Base class wrapping a HemeLB snapshot.
Snap is basically a numpy record array with the following fields:
- id (int) -- an id number (basically the index of the point in the
file
- position (3x float) -- the position in input space (m)
- grid (3x int) -- the (x, y, z) coordinates in lattice units
- pressure (float) -- the pressure in physical units (mmHg)
- velocity (3x float) -- (x,y,z) components of the velocity field
in physical units (m/s)
- stress (float) -- the von Mises stress in physical units (Pa)
It has a number of additional properties (see __readHeader for full details)
"""
_raw_row = [('id', int),
('position', float, (3,)),
('grid', int, (3,)),
('pressure', float),
('velocity', float, (3,)),
('stress', float)]
_readable_row = np.dtype(_raw_row[2:])
row = np.dtype(_raw_row)
_attrs = {'stable': None,
'voxel_size': None,
'origin': np.array([np.nan, np.nan, np.nan]),
'bb_min': None,
'bb_max': None,
'bb_len': None,
'voxel_count': None}
# header = len(_attrs)
def __new__(cls, filename):
"""Create a new instance. Numpy array subclasses use this
method instead of __init__ for initialization.
"""
headerDict = cls._readHeader(filename)
noindex = cls._load(filename, headerDict)
index = np.recarray(shape=noindex.shape, dtype=cls.row)
for el in cls._raw_row[2:]:
key = el[0]
index.__setattr__(key, noindex.__getattribute__(key))
continue
index.id = np.arange(len(noindex))
try:
index.position = cls._computePosition(index.grid, headerDict)
except:
index.position = np.nan
pass
obj = index.view(cls)
# Set the attributes on the snapshot
for headerField in headerDict:
setattr(obj, headerField, headerDict[headerField])
continue
return obj
def __array_finalize__(self, parent):
"""Numpy special method."""
if parent is None:
return
for a in self._attrs:
setattr(self, a, getattr(parent, a, self._attrs[a]))
continue
return
pass
class PositionlessSnapshot(BaseSnapshot):
"""Base class for the original text snapshots and the XDR
equivalent. These lack the data required to compute the positions
of grid points. It is supplied through the coords.asc file
generated by the old setuptool.
"""
def computePosition(self, coordsFile):
"""Given the coordinate file from the segtool, calculate all
the lattice positions' coordinates.
"""
from os.path import exists
if exists (coordsFile):
from ...coordinates import Transformer
trans = Transformer(coordsFile)
self.position = 1e-3 * trans.siteToStl(self.grid + self.bb_min)
return
else:
# The coords file is missing!
warnings.warn('Missing coordinates file "%s", assuming origin at [0,0,0]' % coordsFile, stacklevel=2)
self.position = (self.grid + self.bb_min) * self.voxel_size # + origin, but we'll just assume it's zero here.
pass
class TextSnapshot(PositionlessSnapshot):
"""Read a text snapshot.
"""
nHeaderLines = 6
@classmethod
def _readHeader(cls, filename):
"""Read the header lines, according to:
0- Flag for simulation stability, 0 or 1
1- Voxel size in physical units (units of m)
2- vertex coords of the minimum bounding box with minimum values (x, y and z values)
3- vertex coords of the minimum bounding box with maximum values (x, y and z values)
4- #voxels within the minimum bounding box along the x, y, z axes (3 values)
5- total number of fluid voxels
"""
f = file(filename)
stable = int(f.readline())
voxel_size = float(f.readline())
bb_min = np.array([int(x) for x in f.readline().split()])
bb_max = np.array([int(x) for x in f.readline().split()])
bb_len = np.array([int(x) for x in f.readline().split()])
voxel_count = int(f.readline())
return {'stable': stable,
'voxel_size': voxel_size,
'bb_min': bb_min,
'bb_max': bb_max,
'bb_len': bb_len,
'voxel_count': voxel_count}
@classmethod
def _load(cls, filename, header):
return np.loadtxt(filename,
skiprows=cls.nHeaderLines,
dtype=cls._readable_row).view(np.recarray)
pass
class XdrVoxelFormatOneSnapshot(object):
@classmethod
def _load(cls, filename, header):
# Skip past the header, slurp data, create XDR object
f = file(filename)
f.seek(cls._headerLengthBytes)
reader = xdrlib.Unpacker(f.read())
ans = np.recarray((header['voxel_count'],), dtype=cls._readable_row)
# Read all the voxels.
for i in xrange(header['voxel_count']):
ans[i] = ((reader.unpack_int(),
reader.unpack_int(),
reader.unpack_int()),
reader.unpack_float(),
(reader.unpack_float(),
reader.unpack_float(),
reader.unpack_float()),
reader.unpack_float())
continue
reader.done()
return ans
pass
class XdrSnapshotVersionOne(PositionlessSnapshot, XdrVoxelFormatOneSnapshot):
"""Read an old-style XDR snapshot.
"""
# int float 3x int 3x int 3x int int
_headerLengthBytes = 4 + 8 + 3*4 + 3*4 + 3*4 + 4
@classmethod
def _readHeader(cls, filename):
"""Read the header lines, according to:
0- Flag for simulation stability, 0 or 1
1- Voxel size in physical units (units of m)
2- vertex coords of the minimum bounding box with minimum values (x, y and z values)
3- vertex coords of the minimum bounding box with maximum values (x, y and z values)
4- #voxels within the minimum bounding box along the x, y, z axes (3 values)
5- total number of fluid voxels
"""
reader = xdrlib.Unpacker(file(filename).read(cls._headerLengthBytes))
header = {}
header['stable'] = reader.unpack_int()
header['voxel_size'] = reader.unpack_double()
header['bb_min'] = np.array((reader.unpack_int(),
reader.unpack_int(),
reader.unpack_int()))
header['bb_max'] = np.array((reader.unpack_int(),
reader.unpack_int(),
reader.unpack_int()))
header['bb_len'] = np.array((reader.unpack_int(),
reader.unpack_int(),
reader.unpack_int()))
header['voxel_count'] = reader.unpack_int();
return header
pass
class XdrSnapshotVersionTwo(BaseSnapshot, XdrVoxelFormatOneSnapshot):
"""Read snapshots for the updated format as for August 2011.
"""
_headerLengthBytes = 80
VersionNumber = 2
@classmethod
def _readHeader(cls, filename):
"""Read the header lines, according to description in Code/io/formats/snapshot.h
"""
reader = xdrlib.Unpacker(file(filename).read(cls._headerLengthBytes))
header = {}
assert reader.unpack_uint() == HemeLbMagicNumber
assert reader.unpack_uint() == SnapshotMagicNumber
assert reader.unpack_uint() == cls.VersionNumber
bodyStart = reader.unpack_uint()
assert bodyStart == cls._headerLengthBytes
header['stable'] = reader.unpack_int()
header['voxel_size'] = reader.unpack_double()
header['origin'] = np.array((reader.unpack_double(),
reader.unpack_double(),
reader.unpack_double()))
header['bb_min'] = np.array((reader.unpack_int(),
reader.unpack_int(),
reader.unpack_int()))
header['bb_max'] = np.array((reader.unpack_int(),
reader.unpack_int(),
reader.unpack_int()))
header['bb_len'] = header['bb_max'] - header['bb_min'] + 1
header['voxel_count'] = reader.unpack_int();
return header
@classmethod
def _computePosition(cls, grid, header):
return (grid + header['bb_min']) * header['voxel_size'] + header['origin']
pass
def VersionedXdrSnapshot(filename):
"""Examine the file and dispatch to the appropriate constructor.
"""
# Need the two magic numbers and the version number, i.e. 12 bytes
reader = xdrlib.Unpacker(file(filename).read(12))
assert reader.unpack_uint() == HemeLbMagicNumber
assert reader.unpack_uint() == SnapshotMagicNumber
version = reader.unpack_uint()
if version == 2:
return XdrSnapshotVersionTwo(filename)
raise ValueError('Unknown version number (%d) in file "%s"' % (version, filename))
|
import unittest
import sys
from libcloud.compute.base import Node
from libcloud.compute.drivers.onapp import OnAppNodeDriver
from libcloud.test import MockHttp, LibcloudTestCase
from libcloud.test.secrets import ONAPP_PARAMS
from libcloud.test.file_fixtures import ComputeFileFixtures
from libcloud.utils.py3 import httplib
class OnAppNodeTestCase(LibcloudTestCase):
driver_klass = OnAppNodeDriver
def setUp(self):
self.driver_klass.connectionCls.conn_class = OnAppMockHttp
self.driver = OnAppNodeDriver(*ONAPP_PARAMS)
def test_create_node(self):
node = self.driver.create_node(
name='onapp-new-fred',
ex_memory=512,
ex_cpus=4,
ex_cpu_shares=4,
ex_hostname='onapp-new-fred',
ex_template_id='template_id',
ex_primary_disk_size=100,
ex_swap_disk_size=1,
ex_required_virtual_machine_build=0,
ex_required_ip_address_assignment=0
)
extra = node.extra
self.assertEqual('onapp-new-fred', node.name)
self.assertEqual('456789', node.id)
self.assertEqual('456789', node.id)
self.assertEqual('delivered', node.state)
self.assertEqual(True, extra['booted'])
self.assertEqual('passwd', extra['initial_root_password'])
self.assertEqual('8.8.8.8', extra['local_remote_access_ip_address'])
self.assertEqual(['192.168.15.73'], node.private_ips)
self.assertEqual([], node.public_ips)
def test_destroy_node(self):
node = Node('identABC', 'testnode',
['123.123.123.123'], [],
{'state': 'test', 'template_id': 88}, None)
res = self.driver.destroy_node(node=node)
self.assertTrue(res)
def test_list_nodes(self):
nodes = self.driver.list_nodes()
extra = nodes[0].extra
private_ips = nodes[0].private_ips
self.assertEqual(1, len(nodes))
self.assertEqual('onapp-fred', nodes[0].name)
self.assertEqual('123456', nodes[0].id)
self.assertEqual(True, extra['booted'])
self.assertEqual('passwd', extra['initial_root_password'])
self.assertEqual('9.9.9.9', extra['local_remote_access_ip_address'])
self.assertEqual(1, len(private_ips))
self.assertEqual('192.168.15.72', private_ips[0])
def test_list_images(self):
images = self.driver.list_images()
extra = images[0].extra
self.assertEqual(1, len(images))
self.assertEqual('CentOS 5.11 x64', images[0].name)
self.assertEqual('123456', images[0].id)
self.assertEqual(True, extra['allowed_swap'])
self.assertEqual(256, extra['min_memory_size'])
self.assertEqual('rhel', extra['distribution'])
def test_list_key_pairs(self):
keys = self.driver.list_key_pairs()
self.assertEqual(2, len(keys))
self.assertEqual(1, keys[0].name)
self.assertIsNotNone(keys[0].public_key)
self.assertIsNotNone(keys[1].public_key)
def test_get_key_pair(self):
key = self.driver.get_key_pair(1)
self.assertEqual(1, key.name)
self.assertIsNotNone(key.public_key)
def test_import_key_pair_from_string(self):
key = self.driver.import_key_pair_from_string(
'name',
'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC8uuUq')
self.assertEqual(3, key.name)
self.assertEqual(
'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC8uuUq',
key.public_key)
def test_delete_key_pair(self):
key = self.driver.get_key_pair(1)
response = self.driver.delete_key_pair(key)
self.assertTrue(response)
class OnAppMockHttp(MockHttp):
fixtures = ComputeFileFixtures('onapp')
def _virtual_machines_json(self, method, url, body, headers):
if method == 'GET':
body = self.fixtures.load('list_nodes.json')
else:
body = self.fixtures.load('create_node.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _virtual_machines_identABC_json(self, method, url, body, headers):
return (
httplib.NO_CONTENT,
'',
{},
httplib.responses[httplib.NO_CONTENT]
)
def _templates_json(self, method, url, body, headers):
body = self.fixtures.load('list_images.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _profile_json(self, method, url, body, headers):
body = self.fixtures.load('profile.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _users_123_ssh_keys_json(self, method, url, body, headers):
if method == 'GET':
body = self.fixtures.load('list_key_pairs.json')
else:
body = self.fixtures.load('import_key_pair.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _users_123_ssh_keys_1_json(self, method, url, body, headers):
body = self.fixtures.load('get_key_pair.json')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _settings_ssh_keys_1_json(self, method, url, body, headers):
return (httplib.NO_CONTENT, '', {},
httplib.responses[httplib.NO_CONTENT])
if __name__ == '__main__':
sys.exit(unittest.main())
|
import _plotly_utils.basevalidators
class LightingValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="lighting", parent_name="volume", **kwargs):
super(LightingValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Lighting"),
data_docs=kwargs.pop(
"data_docs",
"""
ambient
Ambient light increases overall color
visibility but can wash out the image.
diffuse
Represents the extent that incident rays are
reflected in a range of angles.
facenormalsepsilon
Epsilon for face normals calculation avoids
math issues arising from degenerate geometry.
fresnel
Represents the reflectance as a dependency of
the viewing angle; e.g. paper is reflective
when viewing it from the edge of the paper
(almost 90 degrees), causing shine.
roughness
Alters specular reflection; the rougher the
surface, the wider and less contrasty the
shine.
specular
Represents the level that incident rays are
reflected in a single direction, causing shine.
vertexnormalsepsilon
Epsilon for vertex normals calculation avoids
math issues arising from degenerate geometry.
""",
),
**kwargs
)
|
import logging
from .reaction_scaffold import ReactionScaffold
from cliff.command import Command
class Reaction(Command):
"Generates a new Reaction scaffold using model file."
log = logging.getLogger(__name__)
def get_parser(self, prog_name):
parser = super(Reaction, self).get_parser(prog_name)
parser.add_argument('--force-overwrite', '-f', action='store_true')
parser.add_argument('model', nargs='?', default='')
return parser
def take_action(self, parsed_args):
model = parsed_args.model
force_overwrite = parsed_args.force_overwrite
if model == '':
raise RuntimeError('Cannot create a new reaction without a reaction model')
self.log.debug('reaction processing started...\n')
scaf = ReactionScaffold(self.log, model, force_overwrite)
ret = scaf.generate()
self.log.debug('reaction processing done.\n')
if ret == True:
self.log.info('successfully created scaffolding for reaction templates from model "' + model + '"')
else:
self.log.info('scaffolding for reaction from model "' + model + '" failed!')
|
"""Support for August sensors."""
import logging
from august.activity import ActivityType
from homeassistant.components.sensor import DEVICE_CLASS_BATTERY
from homeassistant.const import ATTR_ENTITY_PICTURE, UNIT_PERCENTAGE
from homeassistant.core import callback
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_registry import async_get_registry
from homeassistant.helpers.restore_state import RestoreEntity
from .const import (
ATTR_OPERATION_AUTORELOCK,
ATTR_OPERATION_KEYPAD,
ATTR_OPERATION_METHOD,
ATTR_OPERATION_REMOTE,
DATA_AUGUST,
DOMAIN,
OPERATION_METHOD_AUTORELOCK,
OPERATION_METHOD_KEYPAD,
OPERATION_METHOD_MOBILE_DEVICE,
OPERATION_METHOD_REMOTE,
)
from .entity import AugustEntityMixin
_LOGGER = logging.getLogger(__name__)
def _retrieve_device_battery_state(detail):
"""Get the latest state of the sensor."""
return detail.battery_level
def _retrieve_linked_keypad_battery_state(detail):
"""Get the latest state of the sensor."""
return detail.battery_percentage
SENSOR_TYPES_BATTERY = {
"device_battery": {"state_provider": _retrieve_device_battery_state},
"linked_keypad_battery": {"state_provider": _retrieve_linked_keypad_battery_state},
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the August sensors."""
data = hass.data[DOMAIN][config_entry.entry_id][DATA_AUGUST]
devices = []
migrate_unique_id_devices = []
operation_sensors = []
batteries = {
"device_battery": [],
"linked_keypad_battery": [],
}
for device in data.doorbells:
batteries["device_battery"].append(device)
for device in data.locks:
batteries["device_battery"].append(device)
batteries["linked_keypad_battery"].append(device)
operation_sensors.append(device)
for device in batteries["device_battery"]:
state_provider = SENSOR_TYPES_BATTERY["device_battery"]["state_provider"]
detail = data.get_device_detail(device.device_id)
if detail is None or state_provider(detail) is None:
_LOGGER.debug(
"Not adding battery sensor for %s because it is not present",
device.device_name,
)
continue
_LOGGER.debug(
"Adding battery sensor for %s", device.device_name,
)
devices.append(AugustBatterySensor(data, "device_battery", device, device))
for device in batteries["linked_keypad_battery"]:
detail = data.get_device_detail(device.device_id)
if detail.keypad is None:
_LOGGER.debug(
"Not adding keypad battery sensor for %s because it is not present",
device.device_name,
)
continue
_LOGGER.debug(
"Adding keypad battery sensor for %s", device.device_name,
)
keypad_battery_sensor = AugustBatterySensor(
data, "linked_keypad_battery", detail.keypad, device
)
devices.append(keypad_battery_sensor)
migrate_unique_id_devices.append(keypad_battery_sensor)
for device in operation_sensors:
devices.append(AugustOperatorSensor(data, device))
await _async_migrate_old_unique_ids(hass, migrate_unique_id_devices)
async_add_entities(devices, True)
async def _async_migrate_old_unique_ids(hass, devices):
"""Keypads now have their own serial number."""
registry = await async_get_registry(hass)
for device in devices:
old_entity_id = registry.async_get_entity_id(
"sensor", DOMAIN, device.old_unique_id
)
if old_entity_id is not None:
_LOGGER.debug(
"Migrating unique_id from [%s] to [%s]",
device.old_unique_id,
device.unique_id,
)
registry.async_update_entity(old_entity_id, new_unique_id=device.unique_id)
class AugustOperatorSensor(AugustEntityMixin, RestoreEntity, Entity):
"""Representation of an August lock operation sensor."""
def __init__(self, data, device):
"""Initialize the sensor."""
super().__init__(data, device)
self._data = data
self._device = device
self._state = None
self._operated_remote = None
self._operated_keypad = None
self._operated_autorelock = None
self._operated_time = None
self._available = False
self._entity_picture = None
self._update_from_data()
@property
def available(self):
"""Return the availability of this sensor."""
return self._available
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._device.device_name} Operator"
@callback
def _update_from_data(self):
"""Get the latest state of the sensor and update activity."""
lock_activity = self._data.activity_stream.get_latest_device_activity(
self._device_id, [ActivityType.LOCK_OPERATION]
)
if lock_activity is not None:
self._available = True
self._state = lock_activity.operated_by
self._operated_remote = lock_activity.operated_remote
self._operated_keypad = lock_activity.operated_keypad
self._operated_autorelock = lock_activity.operated_autorelock
self._entity_picture = lock_activity.operator_thumbnail_url
@property
def device_state_attributes(self):
"""Return the device specific state attributes."""
attributes = {}
if self._operated_remote is not None:
attributes[ATTR_OPERATION_REMOTE] = self._operated_remote
if self._operated_keypad is not None:
attributes[ATTR_OPERATION_KEYPAD] = self._operated_keypad
if self._operated_autorelock is not None:
attributes[ATTR_OPERATION_AUTORELOCK] = self._operated_autorelock
if self._operated_remote:
attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_REMOTE
elif self._operated_keypad:
attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_KEYPAD
elif self._operated_autorelock:
attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_AUTORELOCK
else:
attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_MOBILE_DEVICE
return attributes
async def async_added_to_hass(self):
"""Restore ATTR_CHANGED_BY on startup since it is likely no longer in the activity log."""
await super().async_added_to_hass()
last_state = await self.async_get_last_state()
if not last_state:
return
self._state = last_state.state
if ATTR_ENTITY_PICTURE in last_state.attributes:
self._entity_picture = last_state.attributes[ATTR_ENTITY_PICTURE]
if ATTR_OPERATION_REMOTE in last_state.attributes:
self._operated_remote = last_state.attributes[ATTR_OPERATION_REMOTE]
if ATTR_OPERATION_KEYPAD in last_state.attributes:
self._operated_keypad = last_state.attributes[ATTR_OPERATION_KEYPAD]
if ATTR_OPERATION_AUTORELOCK in last_state.attributes:
self._operated_autorelock = last_state.attributes[ATTR_OPERATION_AUTORELOCK]
@property
def entity_picture(self):
"""Return the entity picture to use in the frontend, if any."""
return self._entity_picture
@property
def unique_id(self) -> str:
"""Get the unique id of the device sensor."""
return f"{self._device_id}_lock_operator"
class AugustBatterySensor(AugustEntityMixin, Entity):
"""Representation of an August sensor."""
def __init__(self, data, sensor_type, device, old_device):
"""Initialize the sensor."""
super().__init__(data, device)
self._data = data
self._sensor_type = sensor_type
self._device = device
self._old_device = old_device
self._state = None
self._available = False
self._update_from_data()
@property
def available(self):
"""Return the availability of this sensor."""
return self._available
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return UNIT_PERCENTAGE
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS_BATTERY
@property
def name(self):
"""Return the name of the sensor."""
device_name = self._device.device_name
return f"{device_name} Battery"
@callback
def _update_from_data(self):
"""Get the latest state of the sensor."""
state_provider = SENSOR_TYPES_BATTERY[self._sensor_type]["state_provider"]
self._state = state_provider(self._detail)
self._available = self._state is not None
@property
def unique_id(self) -> str:
"""Get the unique id of the device sensor."""
return f"{self._device_id}_{self._sensor_type}"
@property
def old_unique_id(self) -> str:
"""Get the old unique id of the device sensor."""
return f"{self._old_device.device_id}_{self._sensor_type}"
|
## This file is part of Scapy
## See http://www.secdev.org/projects/scapy for more informations
## Copyright (C) Philippe Biondi <[email protected]>
## This program is published under a GPLv2 license
"""
Linux specific functions.
"""
from __future__ import with_statement
import sys,os,struct,socket,time
from select import select
from fcntl import ioctl
import scapy.utils
import scapy.utils6
from scapy.packet import Packet, Padding
from scapy.config import conf
from scapy.data import *
from scapy.supersocket import SuperSocket
import scapy.arch
from scapy.error import warning, Scapy_Exception
from scapy.arch.common import get_if
# From bits/ioctls.h
SIOCGIFHWADDR = 0x8927 # Get hardware address
SIOCGIFADDR = 0x8915 # get PA address
SIOCGIFNETMASK = 0x891b # get network PA mask
SIOCGIFNAME = 0x8910 # get iface name
SIOCSIFLINK = 0x8911 # set iface channel
SIOCGIFCONF = 0x8912 # get iface list
SIOCGIFFLAGS = 0x8913 # get flags
SIOCSIFFLAGS = 0x8914 # set flags
SIOCGIFINDEX = 0x8933 # name -> if_index mapping
SIOCGIFCOUNT = 0x8938 # get number of devices
SIOCGSTAMP = 0x8906 # get packet timestamp (as a timeval)
# From if.h
IFF_UP = 0x1 # Interface is up.
IFF_BROADCAST = 0x2 # Broadcast address valid.
IFF_DEBUG = 0x4 # Turn on debugging.
IFF_LOOPBACK = 0x8 # Is a loopback net.
IFF_POINTOPOINT = 0x10 # Interface is point-to-point link.
IFF_NOTRAILERS = 0x20 # Avoid use of trailers.
IFF_RUNNING = 0x40 # Resources allocated.
IFF_NOARP = 0x80 # No address resolution protocol.
IFF_PROMISC = 0x100 # Receive all packets.
# From netpacket/packet.h
PACKET_ADD_MEMBERSHIP = 1
PACKET_DROP_MEMBERSHIP = 2
PACKET_RECV_OUTPUT = 3
PACKET_RX_RING = 5
PACKET_STATISTICS = 6
PACKET_MR_MULTICAST = 0
PACKET_MR_PROMISC = 1
PACKET_MR_ALLMULTI = 2
# From bits/socket.h
SOL_PACKET = 263
# From asm/socket.h
SO_ATTACH_FILTER = 26
SOL_SOCKET = 1
# From net/route.h
RTF_UP = 0x0001 # Route usable
RTF_REJECT = 0x0200
# From if_packet.h
PACKET_HOST = 0 # To us
PACKET_BROADCAST = 1 # To all
PACKET_MULTICAST = 2 # To group
PACKET_OTHERHOST = 3 # To someone else
PACKET_OUTGOING = 4 # Outgoing of any type
PACKET_LOOPBACK = 5 # MC/BRD frame looped back
PACKET_USER = 6 # To user space
PACKET_KERNEL = 7 # To kernel space
PACKET_FASTROUTE = 6 # Fastrouted frame
# Unused, PACKET_FASTROUTE and PACKET_LOOPBACK are invisible to user space
LOOPBACK_NAME="lo"
with os.popen("%s -V 2> /dev/null" % conf.prog.tcpdump) as _f:
if _f.close() >> 8 == 0x7f:
log_loading.warning("Failed to execute tcpdump. Check it is installed and in the PATH")
TCPDUMP=0
else:
TCPDUMP=1
del(_f)
def get_if_raw_hwaddr(iff):
return struct.unpack("16xh6s8x",get_if(iff,SIOCGIFHWADDR))
def get_if_raw_addr(iff):
try:
return get_if(iff, SIOCGIFADDR)[20:24]
except IOError:
return "\0\0\0\0"
def get_if_list():
try:
f=open("/proc/net/dev","r")
except IOError:
warning("Can't open /proc/net/dev !")
return []
lst = []
f.readline()
f.readline()
for l in f:
lst.append(l.split(":")[0].strip())
return lst
def get_working_if():
for i in get_if_list():
if i == LOOPBACK_NAME:
continue
ifflags = struct.unpack("16xH14x",get_if(i,SIOCGIFFLAGS))[0]
if ifflags & IFF_UP:
return i
return LOOPBACK_NAME
def attach_filter(s, bpf_filter, iface):
# XXX We generate the filter on the interface conf.iface
# because tcpdump open the "any" interface and ppp interfaces
# in cooked mode. As we use them in raw mode, the filter will not
# work... one solution could be to use "any" interface and translate
# the filter from cooked mode to raw mode
# mode
if not TCPDUMP:
return
try:
f = os.popen("%s -i %s -ddd -s 1600 '%s'" % (
conf.prog.tcpdump,
conf.iface if iface is None else iface,
bpf_filter,
))
except OSError,msg:
log_interactive.warning("Failed to execute tcpdump: (%s)")
return
lines = f.readlines()
if f.close():
raise Scapy_Exception("Filter parse error")
nb = int(lines[0])
bpf = ""
for l in lines[1:]:
bpf += struct.pack("HBBI",*map(long,l.split()))
# XXX. Argl! We need to give the kernel a pointer on the BPF,
# python object header seems to be 20 bytes. 36 bytes for x86 64bits arch.
if scapy.arch.X86_64 or scapy.arch.ARM_64:
bpfh = struct.pack("HL", nb, id(bpf)+36)
else:
bpfh = struct.pack("HI", nb, id(bpf)+20)
s.setsockopt(SOL_SOCKET, SO_ATTACH_FILTER, bpfh)
def set_promisc(s,iff,val=1):
mreq = struct.pack("IHH8s", get_if_index(iff), PACKET_MR_PROMISC, 0, "")
if val:
cmd = PACKET_ADD_MEMBERSHIP
else:
cmd = PACKET_DROP_MEMBERSHIP
s.setsockopt(SOL_PACKET, cmd, mreq)
def read_routes():
try:
f=open("/proc/net/route","r")
except IOError:
warning("Can't open /proc/net/route !")
return []
routes = []
s=socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ifreq = ioctl(s, SIOCGIFADDR,struct.pack("16s16x",LOOPBACK_NAME))
addrfamily = struct.unpack("h",ifreq[16:18])[0]
if addrfamily == socket.AF_INET:
ifreq2 = ioctl(s, SIOCGIFNETMASK,struct.pack("16s16x",LOOPBACK_NAME))
msk = socket.ntohl(struct.unpack("I",ifreq2[20:24])[0])
dst = socket.ntohl(struct.unpack("I",ifreq[20:24])[0]) & msk
ifaddr = scapy.utils.inet_ntoa(ifreq[20:24])
routes.append((dst, msk, "0.0.0.0", LOOPBACK_NAME, ifaddr))
else:
warning("Interface lo: unkown address family (%i)"% addrfamily)
for l in f.readlines()[1:]:
iff,dst,gw,flags,x,x,x,msk,x,x,x = l.split()
flags = int(flags,16)
if flags & RTF_UP == 0:
continue
if flags & RTF_REJECT:
continue
try:
ifreq = ioctl(s, SIOCGIFADDR,struct.pack("16s16x",iff))
except IOError: # interface is present in routing tables but does not have any assigned IP
ifaddr="0.0.0.0"
else:
addrfamily = struct.unpack("h",ifreq[16:18])[0]
if addrfamily == socket.AF_INET:
ifaddr = scapy.utils.inet_ntoa(ifreq[20:24])
else:
warning("Interface %s: unkown address family (%i)"%(iff, addrfamily))
continue
routes.append((socket.htonl(long(dst,16))&0xffffffffL,
socket.htonl(long(msk,16))&0xffffffffL,
scapy.utils.inet_ntoa(struct.pack("I",long(gw,16))),
iff, ifaddr))
f.close()
return routes
############
### IPv6 ###
############
def in6_getifaddr():
"""
Returns a list of 3-tuples of the form (addr, scope, iface) where
'addr' is the address of scope 'scope' associated to the interface
'ifcace'.
This is the list of all addresses of all interfaces available on
the system.
"""
ret = []
try:
f = open("/proc/net/if_inet6","r")
except IOError, err:
return ret
l = f.readlines()
for i in l:
# addr, index, plen, scope, flags, ifname
tmp = i.split()
addr = struct.unpack('4s4s4s4s4s4s4s4s', tmp[0])
addr = scapy.utils6.in6_ptop(':'.join(addr))
ret.append((addr, int(tmp[3], 16), tmp[5])) # (addr, scope, iface)
return ret
def read_routes6():
try:
f = open("/proc/net/ipv6_route","r")
except IOError, err:
return []
# 1. destination network
# 2. destination prefix length
# 3. source network displayed
# 4. source prefix length
# 5. next hop
# 6. metric
# 7. reference counter (?!?)
# 8. use counter (?!?)
# 9. flags
# 10. device name
routes = []
def proc2r(p):
ret = struct.unpack('4s4s4s4s4s4s4s4s', p)
ret = ':'.join(ret)
return scapy.utils6.in6_ptop(ret)
lifaddr = in6_getifaddr()
for l in f.readlines():
d,dp,s,sp,nh,m,rc,us,fl,dev = l.split()
fl = int(fl, 16)
if fl & RTF_UP == 0:
continue
if fl & RTF_REJECT:
continue
d = proc2r(d) ; dp = int(dp, 16)
s = proc2r(s) ; sp = int(sp, 16)
nh = proc2r(nh)
cset = [] # candidate set (possible source addresses)
if dev == LOOPBACK_NAME:
if d == '::':
continue
cset = ['::1']
else:
devaddrs = filter(lambda x: x[2] == dev, lifaddr)
cset = scapy.utils6.construct_source_candidate_set(d, dp, devaddrs, LOOPBACK_NAME)
if len(cset) != 0:
routes.append((d, dp, nh, dev, cset))
f.close()
return routes
def get_if_index(iff):
return int(struct.unpack("I",get_if(iff, SIOCGIFINDEX)[16:20])[0])
if os.uname()[4] in [ 'x86_64', 'aarch64' ]:
def get_last_packet_timestamp(sock):
ts = ioctl(sock, SIOCGSTAMP, "1234567890123456")
s,us = struct.unpack("QQ",ts)
return s+us/1000000.0
else:
def get_last_packet_timestamp(sock):
ts = ioctl(sock, SIOCGSTAMP, "12345678")
s,us = struct.unpack("II",ts)
return s+us/1000000.0
def _flush_fd(fd):
if type(fd) is not int:
fd = fd.fileno()
while 1:
r,w,e = select([fd],[],[],0)
if r:
os.read(fd,MTU)
else:
break
class L3PacketSocket(SuperSocket):
desc = "read/write packets at layer 3 using Linux PF_PACKET sockets"
def __init__(self, type = ETH_P_ALL, filter=None, promisc=None, iface=None, nofilter=0):
self.type = type
self.ins = socket.socket(socket.AF_PACKET, socket.SOCK_RAW, socket.htons(type))
self.ins.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 0)
if iface:
self.ins.bind((iface, type))
if not nofilter:
if conf.except_filter:
if filter:
filter = "(%s) and not (%s)" % (filter, conf.except_filter)
else:
filter = "not (%s)" % conf.except_filter
if filter is not None:
attach_filter(self.ins, filter, iface)
_flush_fd(self.ins)
self.ins.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 2**30)
self.outs = socket.socket(socket.AF_PACKET, socket.SOCK_RAW, socket.htons(type))
self.outs.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 2**30)
self.promisc = conf.promisc if promisc is None else promisc
if self.promisc:
if iface is None:
self.iff = get_if_list()
else:
if iface.__class__ is list:
self.iff = iface
else:
self.iff = [iface]
for i in self.iff:
set_promisc(self.ins, i)
def close(self):
if self.closed:
return
self.closed = 1
if self.promisc:
for i in self.iff:
set_promisc(self.ins, i, 0)
SuperSocket.close(self)
def recv(self, x=MTU):
pkt, sa_ll = self.ins.recvfrom(x)
if sa_ll[2] == socket.PACKET_OUTGOING:
return None
if sa_ll[3] in conf.l2types:
cls = conf.l2types[sa_ll[3]]
lvl = 2
elif sa_ll[1] in conf.l3types:
cls = conf.l3types[sa_ll[1]]
lvl = 3
else:
cls = conf.default_l2
warning("Unable to guess type (interface=%s protocol=%#x family=%i). Using %s" % (sa_ll[0],sa_ll[1],sa_ll[3],cls.name))
lvl = 2
try:
pkt = cls(pkt)
except KeyboardInterrupt:
raise
except:
if conf.debug_dissector:
raise
pkt = conf.raw_layer(pkt)
if lvl == 2:
pkt = pkt.payload
if pkt is not None:
pkt.time = get_last_packet_timestamp(self.ins)
return pkt
def send(self, x):
iff,a,gw = x.route()
if iff is None:
iff = conf.iface
sdto = (iff, self.type)
self.outs.bind(sdto)
sn = self.outs.getsockname()
ll = lambda x:x
if type(x) in conf.l3types:
sdto = (iff, conf.l3types[type(x)])
if sn[3] in conf.l2types:
ll = lambda x:conf.l2types[sn[3]]()/x
sx = str(ll(x))
x.sent_time = time.time()
try:
self.outs.sendto(sx, sdto)
except socket.error, msg:
if msg[0] == 22 and len(sx) < conf.min_pkt_size:
self.outs.send(sx + "\x00" * (conf.min_pkt_size - len(sx)))
elif conf.auto_fragment and msg[0] == 90:
for p in x.fragment():
self.outs.sendto(str(ll(p)), sdto)
else:
raise
class L2Socket(SuperSocket):
desc = "read/write packets at layer 2 using Linux PF_PACKET sockets"
def __init__(self, iface=None, type=ETH_P_ALL, promisc=None, filter=None, nofilter=0):
self.iface = conf.iface if iface is None else iface
self.ins = socket.socket(socket.AF_PACKET, socket.SOCK_RAW, socket.htons(type))
self.ins.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 0)
if not nofilter:
if conf.except_filter:
if filter:
filter = "(%s) and not (%s)" % (filter, conf.except_filter)
else:
filter = "not (%s)" % conf.except_filter
if filter is not None:
attach_filter(self.ins, filter, iface)
self.promisc = conf.sniff_promisc if promisc is None else promisc
if self.promisc:
set_promisc(self.ins, self.iface)
self.ins.bind((self.iface, type))
_flush_fd(self.ins)
self.ins.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 2**30)
self.outs = self.ins
self.outs.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 2**30)
sa_ll = self.outs.getsockname()
if sa_ll[3] in conf.l2types:
self.LL = conf.l2types[sa_ll[3]]
elif sa_ll[1] in conf.l3types:
self.LL = conf.l3types[sa_ll[1]]
else:
self.LL = conf.default_l2
warning("Unable to guess type (interface=%s protocol=%#x family=%i). Using %s" % (sa_ll[0],sa_ll[1],sa_ll[3],self.LL.name))
def close(self):
if self.closed:
return
self.closed = 1
if self.promisc:
set_promisc(self.ins, self.iface, 0)
SuperSocket.close(self)
def recv(self, x=MTU):
pkt, sa_ll = self.ins.recvfrom(x)
if sa_ll[2] == socket.PACKET_OUTGOING:
return None
try:
q = self.LL(pkt)
except KeyboardInterrupt:
raise
except:
if conf.debug_dissector:
raise
q = conf.raw_layer(pkt)
q.time = get_last_packet_timestamp(self.ins)
return q
def send(self, x):
try:
return SuperSocket.send(self, x)
except socket.error, msg:
if msg[0] == 22 and len(x) < conf.min_pkt_size:
padding = "\x00" * (conf.min_pkt_size - len(x))
if isinstance(x, Packet):
return SuperSocket.send(self, x / Padding(load=padding))
else:
return SuperSocket.send(self, str(x) + padding)
raise
class L2ListenSocket(SuperSocket):
desc = "read packets at layer 2 using Linux PF_PACKET sockets"
def __init__(self, iface = None, type = ETH_P_ALL, promisc=None, filter=None, nofilter=0):
self.type = type
self.outs = None
self.ins = socket.socket(socket.AF_PACKET, socket.SOCK_RAW, socket.htons(type))
self.ins.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 0)
if iface is not None:
self.ins.bind((iface, type))
if not nofilter:
if conf.except_filter:
if filter:
filter = "(%s) and not (%s)" % (filter, conf.except_filter)
else:
filter = "not (%s)" % conf.except_filter
if filter is not None:
attach_filter(self.ins, filter, iface)
if promisc is None:
promisc = conf.sniff_promisc
self.promisc = promisc
if iface is None:
self.iff = get_if_list()
else:
if iface.__class__ is list:
self.iff = iface
else:
self.iff = [iface]
if self.promisc:
for i in self.iff:
set_promisc(self.ins, i)
_flush_fd(self.ins)
self.ins.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 2**30)
def close(self):
if self.promisc:
for i in self.iff:
set_promisc(self.ins, i, 0)
SuperSocket.close(self)
def recv(self, x=MTU):
pkt, sa_ll = self.ins.recvfrom(x)
if sa_ll[3] in conf.l2types :
cls = conf.l2types[sa_ll[3]]
elif sa_ll[1] in conf.l3types:
cls = conf.l3types[sa_ll[1]]
else:
cls = conf.default_l2
warning("Unable to guess type (interface=%s protocol=%#x "
"family=%i). Using %s" % (sa_ll[0], sa_ll[1], sa_ll[3],
cls.name))
try:
pkt = cls(pkt)
except KeyboardInterrupt:
raise
except:
if conf.debug_dissector:
raise
pkt = conf.raw_layer(pkt)
pkt.time = get_last_packet_timestamp(self.ins)
pkt.direction = sa_ll[2]
return pkt
def send(self, x):
raise Scapy_Exception("Can't send anything with L2ListenSocket")
conf.L3socket = L3PacketSocket
conf.L2socket = L2Socket
conf.L2listen = L2ListenSocket
conf.iface = get_working_if()
|
#!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2012 Hans-Peter Jansen <[email protected]>.
## Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
## Contact: Nokia Corporation ([email protected])
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:LGPL$
## GNU Lesser General Public License Usage
## This file may be used under the terms of the GNU Lesser General Public
## License version 2.1 as published by the Free Software Foundation and
## appearing in the file LICENSE.LGPL included in the packaging of this
## file. Please review the following information to ensure the GNU Lesser
## General Public License version 2.1 requirements will be met:
## http:#www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
##
## In addition, as a special exception, Nokia gives you certain additional
## rights. These rights are described in the Nokia Qt LGPL Exception
## version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
##
## GNU General Public License Usage
## Alternatively, this file may be used under the terms of the GNU General
## Public License version 3.0 as published by the Free Software Foundation
## and appearing in the file LICENSE.GPL included in the packaging of this
## file. Please review the following information to ensure the GNU General
## Public License version 3.0 requirements will be met:
## http:#www.gnu.org/copyleft/gpl.html.
##
## Other Usage
## Alternatively, this file may be used in accordance with the terms and
## conditions contained in a signed written agreement between you and Nokia.
## $QT_END_LICENSE$
##
#############################################################################
# These are only needed for Python v2 but are harmless for Python v3.
import sip
sip.setapi('QString', 2)
sip.setapi('QVariant', 2)
from PyQt4 import QtCore, QtGui
try:
import spreadsheet_rc3
except ImportError:
import spreadsheet_rc2
from spreadsheetdelegate import SpreadSheetDelegate
from spreadsheetitem import SpreadSheetItem
from printview import PrintView
from util import decode_pos, encode_pos
class SpreadSheet(QtGui.QMainWindow):
dateFormats = ["dd/M/yyyy", "yyyy/M/dd", "dd.MM.yyyy"]
currentDateFormat = dateFormats[0]
def __init__(self, rows, cols, parent = None):
super(SpreadSheet, self).__init__(parent)
self.toolBar = QtGui.QToolBar()
self.addToolBar(self.toolBar)
self.formulaInput = QtGui.QLineEdit()
self.cellLabel = QtGui.QLabel(self.toolBar)
self.cellLabel.setMinimumSize(80, 0)
self.toolBar.addWidget(self.cellLabel)
self.toolBar.addWidget(self.formulaInput)
self.table = QtGui.QTableWidget(rows, cols, self)
for c in range(cols):
character = chr(ord('A') + c)
self.table.setHorizontalHeaderItem(c, QtGui.QTableWidgetItem(character))
self.table.setItemPrototype(self.table.item(rows - 1, cols - 1))
self.table.setItemDelegate(SpreadSheetDelegate(self))
self.createActions()
self.updateColor(0)
self.setupMenuBar()
self.setupContents()
self.setupContextMenu()
self.setCentralWidget(self.table)
self.statusBar()
self.table.currentItemChanged.connect(self.updateStatus)
self.table.currentItemChanged.connect(self.updateColor)
self.table.currentItemChanged.connect(self.updateLineEdit)
self.table.itemChanged.connect(self.updateStatus)
self.formulaInput.returnPressed.connect(self.returnPressed)
self.table.itemChanged.connect(self.updateLineEdit)
self.setWindowTitle(self.tr("Spreadsheet"))
def createActions(self):
self.cell_sumAction = QtGui.QAction(self.tr("Sum"), self)
self.cell_sumAction.triggered.connect(self.actionSum)
self.cell_addAction = QtGui.QAction(self.tr("&Add"), self)
self.cell_addAction.setShortcut(QtCore.Qt.CTRL | QtCore.Qt.Key_Plus)
self.cell_addAction.triggered.connect(self.actionAdd)
self.cell_subAction = QtGui.QAction(self.tr("&Subtract"), self)
self.cell_subAction.setShortcut(QtCore.Qt.CTRL | QtCore.Qt.Key_Minus)
self.cell_subAction.triggered.connect(self.actionSubtract)
self.cell_mulAction = QtGui.QAction(self.tr("&Multiply"), self)
self.cell_mulAction.setShortcut(QtCore.Qt.CTRL | QtCore.Qt.Key_multiply)
self.cell_mulAction.triggered.connect(self.actionMultiply)
self.cell_divAction = QtGui.QAction(self.tr("&Divide"), self)
self.cell_divAction.setShortcut(QtCore.Qt.CTRL | QtCore.Qt.Key_division)
self.cell_divAction.triggered.connect(self.actionDivide)
self.fontAction = QtGui.QAction(self.tr("Font..."), self)
self.fontAction.setShortcut(QtCore.Qt.CTRL | QtCore.Qt.Key_F)
self.fontAction.triggered.connect(self.selectFont)
self.colorAction = QtGui.QAction(QtGui.QIcon(QtGui.QPixmap(16, 16)), self.tr("Background &Color..."), self)
self.colorAction.triggered.connect(self.selectColor)
self.clearAction = QtGui.QAction(self.tr("Clear"), self)
self.clearAction.setShortcut(QtCore.Qt.Key_Delete)
self.clearAction.triggered.connect(self.clear)
self.aboutSpreadSheet = QtGui.QAction(self.tr("About Spreadsheet"), self)
self.aboutSpreadSheet.triggered.connect(self.showAbout)
self.exitAction = QtGui.QAction(self.tr("E&xit"), self)
self.exitAction.setShortcut(QtGui.QKeySequence.Quit)
self.exitAction.triggered.connect(QtGui.qApp.quit)
self.printAction = QtGui.QAction(self.tr("&Print"), self)
self.printAction.setShortcut(QtGui.QKeySequence.Print)
self.printAction.triggered.connect(self.print_)
self.firstSeparator = QtGui.QAction(self)
self.firstSeparator.setSeparator(True)
self.secondSeparator = QtGui.QAction(self)
self.secondSeparator.setSeparator(True)
def setupMenuBar(self):
self.fileMenu = self.menuBar().addMenu(self.tr("&File"))
self.dateFormatMenu = self.fileMenu.addMenu(self.tr("&Date format"))
self.dateFormatGroup = QtGui.QActionGroup(self)
for f in self.dateFormats:
action = QtGui.QAction(f, self, checkable = True, triggered = self.changeDateFormat)
self.dateFormatGroup.addAction(action)
self.dateFormatMenu.addAction(action)
if f == self.currentDateFormat:
action.setChecked(True)
self.fileMenu.addAction(self.printAction)
self.fileMenu.addAction(self.exitAction)
self.cellMenu = self.menuBar().addMenu(self.tr("&Cell"))
self.cellMenu.addAction(self.cell_addAction)
self.cellMenu.addAction(self.cell_subAction)
self.cellMenu.addAction(self.cell_mulAction)
self.cellMenu.addAction(self.cell_divAction)
self.cellMenu.addAction(self.cell_sumAction)
self.cellMenu.addSeparator()
self.cellMenu.addAction(self.colorAction)
self.cellMenu.addAction(self.fontAction)
self.menuBar().addSeparator()
self.aboutMenu = self.menuBar().addMenu(self.tr("&Help"))
self.aboutMenu.addAction(self.aboutSpreadSheet)
def changeDateFormat(self):
action = self.sender()
oldFormat = self.currentDateFormat
newFormat = self.currentDateFormat = action.text()
for row in range(self.table.rowCount()):
item = self.table.item(row, 1)
date = QtCore.QDate.fromString(item.text(), oldFormat)
item.setText(date.toString(newFormat))
def updateStatus(self, item):
if item and item == self.table.currentItem():
self.statusBar().showMessage(item.data(QtCore.Qt.StatusTipRole), 1000)
self.cellLabel.setText(self.tr("Cell: (%s)" % encode_pos(self.table.row(item),
self.table.column(item))))
def updateColor(self, item):
pixmap = QtGui.QPixmap(16, 16)
color = QtGui.QColor()
if item:
color = item.backgroundColor()
if not color.isValid():
color = self.palette().base().color()
painter = QtGui.QPainter(pixmap)
painter.fillRect(0, 0, 16, 16, color)
lighter = color.light()
painter.setPen(lighter)
# light frame
painter.drawPolyline(QtCore.QPoint(0, 15), QtCore.QPoint(0, 0), QtCore.QPoint(15, 0))
painter.setPen(color.dark())
# dark frame
painter.drawPolyline(QtCore.QPoint(1, 15), QtCore.QPoint(15, 15), QtCore.QPoint(15, 1))
painter.end()
self.colorAction.setIcon(QtGui.QIcon(pixmap))
def updateLineEdit(self, item):
if item != self.table.currentItem():
return
if item:
self.formulaInput.setText(item.data(QtCore.Qt.EditRole))
else:
self.formulaInput.clear()
def returnPressed(self):
text = self.formulaInput.text()
row = self.table.currentRow()
col = self.table.currentColumn()
item = self.table.item(row, col)
if not item:
self.table.setItem(row, col, SpreadSheetItem(text))
else:
item.setData(QtCore.Qt.EditRole, text)
self.table.viewport().update()
def selectColor(self):
item = self.table.currentItem()
color = item and QtGui.QColor(item.background()) or self.table.palette().base().color()
color = QtGui.QColorDialog.getColor(color, self)
if not color.isValid():
return
selected = self.table.selectedItems()
if not selected:
return
for i in selected:
i and i.setBackground(color)
self.updateColor(self.table.currentItem())
def selectFont(self):
selected = self.table.selectedItems()
if not selected:
return
font, ok = QtGui.QFontDialog.getFont(self.font(), self)
if not ok:
return
for i in selected:
i and i.setFont(font)
def runInputDialog(self, title, c1Text, c2Text, opText,
outText, cell1, cell2, outCell):
rows = []
cols = []
for r in range(self.table.rowCount()):
rows.append(str(r + 1))
for c in range(self.table.columnCount()):
cols.append(chr(ord('A') + c))
addDialog = QtGui.QDialog(self)
addDialog.setWindowTitle(title)
group = QtGui.QGroupBox(title, addDialog)
group.setMinimumSize(250, 100)
cell1Label = QtGui.QLabel(c1Text, group)
cell1RowInput = QtGui.QComboBox(group)
c1Row, c1Col = decode_pos(cell1)
cell1RowInput.addItems(rows)
cell1RowInput.setCurrentIndex(c1Row)
cell1ColInput = QtGui.QComboBox(group)
cell1ColInput.addItems(cols)
cell1ColInput.setCurrentIndex(c1Col)
operatorLabel = QtGui.QLabel(opText, group)
operatorLabel.setAlignment(QtCore.Qt.AlignHCenter)
cell2Label = QtGui.QLabel(c2Text, group)
cell2RowInput = QtGui.QComboBox(group)
c2Row, c2Col = decode_pos(cell2)
cell2RowInput.addItems(rows)
cell2RowInput.setCurrentIndex(c2Row)
cell2ColInput = QtGui.QComboBox(group)
cell2ColInput.addItems(cols)
cell2ColInput.setCurrentIndex(c2Col)
equalsLabel = QtGui.QLabel("=", group)
equalsLabel.setAlignment(QtCore.Qt.AlignHCenter)
outLabel = QtGui.QLabel(outText, group)
outRowInput = QtGui.QComboBox(group)
outRow, outCol = decode_pos(outCell)
outRowInput.addItems(rows)
outRowInput.setCurrentIndex(outRow)
outColInput = QtGui.QComboBox(group)
outColInput.addItems(cols)
outColInput.setCurrentIndex(outCol)
cancelButton = QtGui.QPushButton(self.tr("Cancel"), addDialog)
cancelButton.clicked.connect(addDialog.reject)
okButton = QtGui.QPushButton(self.tr("OK"), addDialog)
okButton.setDefault(True)
okButton.clicked.connect(addDialog.accept)
buttonsLayout = QtGui.QHBoxLayout()
buttonsLayout.addStretch(1)
buttonsLayout.addWidget(okButton)
buttonsLayout.addSpacing(10)
buttonsLayout.addWidget(cancelButton)
dialogLayout = QtGui.QVBoxLayout(addDialog)
dialogLayout.addWidget(group)
dialogLayout.addStretch(1)
dialogLayout.addItem(buttonsLayout)
cell1Layout = QtGui.QHBoxLayout()
cell1Layout.addWidget(cell1Label)
cell1Layout.addSpacing(10)
cell1Layout.addWidget(cell1ColInput)
cell1Layout.addSpacing(10)
cell1Layout.addWidget(cell1RowInput)
cell2Layout = QtGui.QHBoxLayout()
cell2Layout.addWidget(cell2Label)
cell2Layout.addSpacing(10)
cell2Layout.addWidget(cell2ColInput)
cell2Layout.addSpacing(10)
cell2Layout.addWidget(cell2RowInput)
outLayout = QtGui.QHBoxLayout()
outLayout.addWidget(outLabel)
outLayout.addSpacing(10)
outLayout.addWidget(outColInput)
outLayout.addSpacing(10)
outLayout.addWidget(outRowInput)
vLayout = QtGui.QVBoxLayout(group)
vLayout.addItem(cell1Layout)
vLayout.addWidget(operatorLabel)
vLayout.addItem(cell2Layout)
vLayout.addWidget(equalsLabel)
vLayout.addStretch(1)
vLayout.addItem(outLayout)
if addDialog.exec_():
cell1 = cell1ColInput.currentText() + cell1RowInput.currentText()
cell2 = cell2ColInput.currentText() + cell2RowInput.currentText()
outCell = outColInput.currentText() + outRowInput.currentText()
return True, cell1, cell2, outCell
return False, None, None, None
def actionSum(self):
row_first = 0
row_last = 0
row_cur = 0
col_first = 0
col_last = 0
col_cur = 0
selected = self.table.selectedItems()
if selected:
first = selected[0]
last = selected[-1]
row_first = self.table.row(first)
row_last = self.table.row(last)
col_first = self.table.column(first)
col_last = self.table.column(last)
current = self.table.currentItem()
if current:
row_cur = self.table.row(current)
col_cur = self.table.column(current)
cell1 = encode_pos(row_first, col_first)
cell2 = encode_pos(row_last, col_last)
out = encode_pos(row_cur, col_cur)
ok, cell1, cell2, out = self.runInputDialog(
self.tr("Sum cells"), self.tr("First cell:"), self.tr("Last cell:"),
unichr(0x03a3), self.tr("Output to:"), cell1, cell2, out)
if ok:
row, col = decode_pos(out)
self.table.item(row, col).setText(self.tr("sum %s %s" % (cell1, cell2)))
def actionMath_helper(self, title, op):
cell1 = "C1"
cell2 = "C2"
out = "C3"
current = self.table.currentItem()
if current:
out = encode_pos(self.table.currentRow(), self.table.currentColumn())
ok, cell1, cell2, out = self.runInputDialog(
title, self.tr("Cell 1"), self.tr("Cell 2"), op, self.tr("Output to:"),
cell1, cell2, out)
if ok:
row, col = decode_pos(out)
self.table.item(row, col).setText(self.tr("%s %s %s" % (op, cell1, cell2)))
def actionAdd(self):
self.actionMath_helper(self.tr("Addition"), "+")
def actionSubtract(self):
self.actionMath_helper(self.tr("Subtraction"), "-")
def actionMultiply(self):
self.actionMath_helper(self.tr("Multiplication"), "*")
def actionDivide(self):
self.actionMath_helper(self.tr("Division"), "/")
def clear(self):
for i in self.table.selectedItems():
i.setText("")
def setupContextMenu(self):
self.addAction(self.cell_addAction)
self.addAction(self.cell_subAction)
self.addAction(self.cell_mulAction)
self.addAction(self.cell_divAction)
self.addAction(self.cell_sumAction)
self.addAction(self.firstSeparator)
self.addAction(self.colorAction)
self.addAction(self.fontAction)
self.addAction(self.secondSeparator)
self.addAction(self.clearAction)
self.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
def setupContents(self):
titleBackground = QtGui.QColor(QtCore.Qt.lightGray)
titleFont = self.table.font()
titleFont.setBold(True)
# column 0
self.table.setItem(0, 0, SpreadSheetItem("Item"))
self.table.item(0, 0).setBackground(titleBackground)
self.table.item(0, 0).setToolTip("This column shows the purchased item/service")
self.table.item(0, 0).setFont(titleFont)
self.table.setItem(1, 0, SpreadSheetItem("AirportBus"))
self.table.setItem(2, 0, SpreadSheetItem("Flight (Munich)"))
self.table.setItem(3, 0, SpreadSheetItem("Lunch"))
self.table.setItem(4, 0, SpreadSheetItem("Flight (LA)"))
self.table.setItem(5, 0, SpreadSheetItem("Taxi"))
self.table.setItem(6, 0, SpreadSheetItem("Dinner"))
self.table.setItem(7, 0, SpreadSheetItem("Hotel"))
self.table.setItem(8, 0, SpreadSheetItem("Flight (Oslo)"))
self.table.setItem(9, 0, SpreadSheetItem("Total:"))
self.table.item(9, 0).setFont(titleFont)
self.table.item(9, 0).setBackground(QtCore.Qt.lightGray)
# column 1
self.table.setItem(0, 1, SpreadSheetItem("Date"))
self.table.item(0, 1).setBackground(titleBackground)
self.table.item(0, 1).setToolTip("This column shows the purchase date, double click to change")
self.table.item(0, 1).setFont(titleFont)
self.table.setItem(1, 1, SpreadSheetItem("15/6/2006"))
self.table.setItem(2, 1, SpreadSheetItem("15/6/2006"))
self.table.setItem(3, 1, SpreadSheetItem("15/6/2006"))
self.table.setItem(4, 1, SpreadSheetItem("21/5/2006"))
self.table.setItem(5, 1, SpreadSheetItem("16/6/2006"))
self.table.setItem(6, 1, SpreadSheetItem("16/6/2006"))
self.table.setItem(7, 1, SpreadSheetItem("16/6/2006"))
self.table.setItem(8, 1, SpreadSheetItem("18/6/2006"))
self.table.setItem(9, 1, SpreadSheetItem())
self.table.item(9, 1).setBackground(QtCore.Qt.lightGray)
# column 2
self.table.setItem(0, 2, SpreadSheetItem("Price"))
self.table.item(0, 2).setBackground(titleBackground)
self.table.item(0, 2).setToolTip("This column shows the price of the purchase")
self.table.item(0, 2).setFont(titleFont)
self.table.setItem(1, 2, SpreadSheetItem("150"))
self.table.setItem(2, 2, SpreadSheetItem("2350"))
self.table.setItem(3, 2, SpreadSheetItem("-14"))
self.table.setItem(4, 2, SpreadSheetItem("980"))
self.table.setItem(5, 2, SpreadSheetItem("5"))
self.table.setItem(6, 2, SpreadSheetItem("120"))
self.table.setItem(7, 2, SpreadSheetItem("300"))
self.table.setItem(8, 2, SpreadSheetItem("1240"))
self.table.setItem(9, 2, SpreadSheetItem())
self.table.item(9, 2).setBackground(QtCore.Qt.lightGray)
# column 3
self.table.setItem(0, 3, SpreadSheetItem("Currency"))
self.table.item(0, 3).setBackgroundColor(titleBackground)
self.table.item(0, 3).setToolTip("This column shows the currency")
self.table.item(0, 3).setFont(titleFont)
self.table.setItem(1, 3, SpreadSheetItem("NOK"))
self.table.setItem(2, 3, SpreadSheetItem("NOK"))
self.table.setItem(3, 3, SpreadSheetItem("EUR"))
self.table.setItem(4, 3, SpreadSheetItem("EUR"))
self.table.setItem(5, 3, SpreadSheetItem("USD"))
self.table.setItem(6, 3, SpreadSheetItem("USD"))
self.table.setItem(7, 3, SpreadSheetItem("USD"))
self.table.setItem(8, 3, SpreadSheetItem("USD"))
self.table.setItem(9, 3, SpreadSheetItem())
self.table.item(9,3).setBackground(QtCore.Qt.lightGray)
# column 4
self.table.setItem(0, 4, SpreadSheetItem("Ex. Rate"))
self.table.item(0, 4).setBackground(titleBackground)
self.table.item(0, 4).setToolTip("This column shows the exchange rate to NOK")
self.table.item(0, 4).setFont(titleFont)
self.table.setItem(1, 4, SpreadSheetItem("1"))
self.table.setItem(2, 4, SpreadSheetItem("1"))
self.table.setItem(3, 4, SpreadSheetItem("8"))
self.table.setItem(4, 4, SpreadSheetItem("8"))
self.table.setItem(5, 4, SpreadSheetItem("7"))
self.table.setItem(6, 4, SpreadSheetItem("7"))
self.table.setItem(7, 4, SpreadSheetItem("7"))
self.table.setItem(8, 4, SpreadSheetItem("7"))
self.table.setItem(9, 4, SpreadSheetItem())
self.table.item(9,4).setBackground(QtCore.Qt.lightGray)
# column 5
self.table.setItem(0, 5, SpreadSheetItem("NOK"))
self.table.item(0, 5).setBackground(titleBackground)
self.table.item(0, 5).setToolTip("This column shows the expenses in NOK")
self.table.item(0, 5).setFont(titleFont)
self.table.setItem(1, 5, SpreadSheetItem("* C2 E2"))
self.table.setItem(2, 5, SpreadSheetItem("* C3 E3"))
self.table.setItem(3, 5, SpreadSheetItem("* C4 E4"))
self.table.setItem(4, 5, SpreadSheetItem("* C5 E5"))
self.table.setItem(5, 5, SpreadSheetItem("* C6 E6"))
self.table.setItem(6, 5, SpreadSheetItem("* C7 E7"))
self.table.setItem(7, 5, SpreadSheetItem("* C8 E8"))
self.table.setItem(8, 5, SpreadSheetItem("* C9 E9"))
self.table.setItem(9, 5, SpreadSheetItem("sum F2 F9"))
self.table.item(9,5).setBackground(QtCore.Qt.lightGray)
def showAbout(self):
QtGui.QMessageBox.about(self, "About Spreadsheet", """
<HTML>
<p><b>This demo shows use of <c>QTableWidget</c> with custom handling for
individual cells.</b></p>
<p>Using a customized table item we make it possible to have dynamic
output in different cells. The content that is implemented for this
particular demo is:
<ul>
<li>Adding two cells.</li>
<li>Subtracting one cell from another.</li>
<li>Multiplying two cells.</li>
<li>Dividing one cell with another.</li>
<li>Summing the contents of an arbitrary number of cells.</li>
</HTML>
""")
def print_(self):
printer = QtGui.QPrinter(QtGui.QPrinter.ScreenResolution)
dlg = QtGui.QPrintPreviewDialog(printer)
view = PrintView()
view.setModel(self.table.model())
dlg.paintRequested.connect(view.print_)
dlg.exec_()
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
sheet = SpreadSheet(10, 6)
sheet.setWindowIcon(QtGui.QIcon(QtGui.QPixmap(":/images/interview.png")))
sheet.resize(640, 420)
sheet.show()
sys.exit(app.exec_())
|
import logging, time, argparse
from numpy import dot, zeros, kron, array, eye, ones, savetxt, loadtxt, matrix
from numpy.linalg import qr, pinv, norm, inv
from numpy.random import rand
from scipy import sparse
from scipy.sparse import coo_matrix, lil_matrix, dok_matrix
from scipy.sparse.linalg import eigsh
import numpy as np
import os
import fnmatch
from commonFunctions import squareFrobeniusNormOfSparse, fitNormWithoutNormX
from extrescalFunctions import updateA, updateV, matrixFitNormWithoutNormD
__DEF_MAXITER = 50
__DEF_PREHEATNUM = 1
__DEF_INIT = 'nvecs'
__DEF_PROJ = True
__DEF_CONV = 1e-5
__DEF_LMBDA = 0
def rescal(X, D, rank, **kwargs):
"""
RESCAL
Factors a three-way tensor X such that each frontal slice
X_k = A * R_k * A.T. The frontal slices of a tensor are
N x N matrices that correspond to the adjacency matrices
of the relational graph for a particular relation.
For a full description of the algorithm see:
Maximilian Nickel, Volker Tresp, Hans-Peter-Kriegel,
"A Three-Way Model for Collective Learning on Multi-Relational Data",
ICML 2011, Bellevue, WA, USA
Parameters
----------
X : list
List of frontal slices X_k of the tensor X. The shape of each X_k is ('N', 'N')
D : matrix
A sparse matrix involved in the tensor factorization (aims to incorporate
the entity-term matrix aka document-term matrix)
rank : int
Rank of the factorization
lmbda : float, optional
Regularization parameter for A and R_k factor matrices. 0 by default
init : string, optional
Initialization method of the factor matrices. 'nvecs' (default)
initializes A based on the eigenvectors of X. 'random' initializes
the factor matrices randomly.
proj : boolean, optional
Whether or not to use the QR decomposition when computing R_k.
True by default
maxIter : int, optional
Maximium number of iterations of the ALS algorithm. 50 by default.
conv : float, optional
Stop when residual of factorization is less than conv. 1e-5 by default
Returns
-------
A : ndarray
matrix of latent embeddings for entities A
R : list
list of 'M' arrays of shape ('rank', 'rank') corresponding to the factor matrices R_k
f : float
function value of the factorization
iter : int
number of iterations until convergence
exectimes : ndarray
execution times to compute the updates in each iteration
V : ndarray
matrix of latent embeddings for words V
"""
# init options
ainit = kwargs.pop('init', __DEF_INIT)
proj = kwargs.pop('proj', __DEF_PROJ)
maxIter = kwargs.pop('maxIter', __DEF_MAXITER)
conv = kwargs.pop('conv', __DEF_CONV)
lmbda = kwargs.pop('lmbda', __DEF_LMBDA)
preheatnum = kwargs.pop('preheatnum', __DEF_PREHEATNUM)
if not len(kwargs) == 0:
raise ValueError( 'Unknown keywords (%s)' % (kwargs.keys()) )
sz = X[0].shape
dtype = X[0].dtype
n = sz[0]
_log.debug('[Config] rank: %d | maxIter: %d | conv: %7.1e | lmbda: %7.1e' % (rank,
maxIter, conv, lmbda))
# precompute norms of X
normX = [squareFrobeniusNormOfSparse(M) for M in X]
sumNormX = sum(normX)
normD = squareFrobeniusNormOfSparse(D)
_log.debug('[Algorithm] The tensor norm: %.5f' % sumNormX)
_log.debug('[Algorithm] The extended matrix norm: %.5f' % normD)
# initialize A
if ainit == 'random':
_log.debug('[Algorithm] The random initialization will be performed.')
A = array(rand(n, rank), dtype=np.float64)
elif ainit == 'nvecs':
_log.debug('[Algorithm] The eigenvector based initialization will be performed.')
avgX = lil_matrix((n, n))
for i in range(len(X)):
avgX += (X[i] + X[i].T)
eigvalsX, A = eigsh(avgX, rank)
else :
raise 'Unknown init option ("%s")' % ainit
# initialize R
if proj:
Q, A2 = qr(A)
X2 = __projectSlices(X, Q)
R = __updateR(X2, A2, lmbda)
else :
raise 'Projection via QR decomposition is required; pass proj=true'
_log.debug('[Algorithm] Finished initialization.')
# compute factorization
fit = fitchange = fitold = 0
exectimes = []
for iterNum in xrange(maxIter):
tic = time.clock()
V = updateV(A, D, lmbda)
A = updateA(X, A, R, V, D, lmbda)
if proj:
Q, A2 = qr(A)
X2 = __projectSlices(X, Q)
R = __updateR(X2, A2, lmbda)
else :
raise 'Projection via QR decomposition is required; pass proj=true'
# compute fit values
fit = 0
tensorFit = 0
regularizedFit = 0
extRegularizedFit = 0
regRFit = 0
fitDAV = 0
if iterNum >= preheatnum:
if lmbda != 0:
for i in xrange(len(R)):
regRFit += norm(R[i])**2
regularizedFit = lmbda*(norm(A)**2) + lmbda*regRFit
if lmbda != 0:
extRegularizedFit = lmbda*(norm(V)**2)
fitDAV = normD + matrixFitNormWithoutNormD(D, A, V)
for i in xrange(len(R)):
tensorFit += (normX[i] + fitNormWithoutNormX(X[i], A, R[i]))
fit = 0.5*tensorFit
fit += regularizedFit
fit /= sumNormX
fit += (0.5*fitDAV + extRegularizedFit)/normD
else :
_log.debug('[Algorithm] Preheating is going on.')
toc = time.clock()
exectimes.append( toc - tic )
fitchange = abs(fitold - fit)
_log.debug('[%3d] total fit: %.10f | tensor fit: %.10f | matrix fit: %.10f | delta: %.10f | secs: %.5f' % (iterNum,
fit, tensorFit, fitDAV, fitchange, exectimes[-1]))
fitold = fit
if iterNum > preheatnum and fitchange < conv:
break
return A, R, fit, iterNum+1, array(exectimes), V
def __updateR(X, A, lmbda):
r = A.shape[1]
R = []
At = A.T
if lmbda == 0:
ainv = dot(pinv(dot(At, A)), At)
for i in xrange(len(X)):
R.append( dot(ainv, X[i].dot(ainv.T)) )
else :
AtA = dot(At, A)
tmp = inv(kron(AtA, AtA) + lmbda * eye(r**2))
for i in xrange(len(X)):
AtXA = dot(At, X[i].dot(A))
R.append( dot(AtXA.flatten(), tmp).reshape(r, r) )
return R
def __projectSlices(X, Q):
X2 = []
for i in xrange(len(X)):
X2.append( dot(Q.T, X[i].dot(Q)) )
return X2
parser = argparse.ArgumentParser()
parser.add_argument("--latent", type=int, help="number of latent components", required=True)
parser.add_argument("--lmbda", type=float, help="regularization parameter", required=True)
parser.add_argument("--input", type=str, help="the directory, where the input data are stored", required=True)
parser.add_argument("--outputentities", type=str, help="the file, where the latent embeddings for entities will be output", required=True)
parser.add_argument("--outputterms", type=str, help="the file, where the inverted matrix of latent embeddings for terms will be output", required=True)
parser.add_argument("--outputfactors", type=str, help="the file, where the latent factors will be output", required=True)
parser.add_argument("--log", type=str, help="log file", required=True)
args = parser.parse_args()
numLatentComponents = args.latent
inputDir = args.input
regularizationParam = args.lmbda
outputEntities = args.outputentities
outputTerms = args.outputterms
outputFactors = args.outputfactors
logFile = args.log
logging.basicConfig(filename=logFile, filemode='w', level=logging.DEBUG)
_log = logging.getLogger('EXT-RESCAL')
dim = 0
with open('./%s/entity-ids' % inputDir) as entityIds:
for line in entityIds:
dim += 1
print 'The number of entities: %d' % dim
numSlices = 0
numNonzeroTensorEntries = 0
X = []
for inputFile in os.listdir('./%s' % inputDir):
if fnmatch.fnmatch(inputFile, '[0-9]*-rows'):
numSlices += 1
row = loadtxt('./%s/%s' % (inputDir, inputFile), dtype=np.int32)
if row.size == 1:
row = np.atleast_1d(row)
col = loadtxt('./%s/%s' % (inputDir, inputFile.replace("rows", "cols")), dtype=np.int32)
if col.size == 1:
col = np.atleast_1d(col)
Xi = coo_matrix((ones(row.size),(row,col)), shape=(dim,dim), dtype=np.uint8).tolil()
numNonzeroTensorEntries += row.size
X.append(Xi)
print 'The number of tensor slices: %d' % numSlices
print 'The number of non-zero values in the tensor: %d' % numNonzeroTensorEntries
extDim = 0
with open('./%s/words' % inputDir) as words:
for line in words:
extDim += 1
print 'The number of words: %d' % extDim
extRow = loadtxt('./%s/ext-matrix-rows' % inputDir, dtype=np.int32)
if extRow.size == 1:
extRow = np.atleast_1d(extRow)
extCol = loadtxt('./%s/ext-matrix-cols' % inputDir, dtype=np.int32)
if extCol.size == 1:
extCol = np.atleast_1d(extCol)
extVal = loadtxt('./%s/ext-matrix-elements' % inputDir, dtype=np.float64)
if extVal.size == 1:
extVal = np.atleast_1d(extVal)
D = dok_matrix((dim,extDim), dtype=np.float64)
for i in xrange(extVal.size):
D[extRow[i], extCol[i]] = extVal[i]
D = D.tocsr()
print 'The number of non-zero values in the additional matrix: %d' % extRow.size
result = rescal(X, D, numLatentComponents, lmbda=regularizationParam)
print 'Objective function value: %.30f' % result[2]
print '# of iterations: %d' % result[3]
#print the matrices of latent embeddings
A = result[0]
savetxt(outputEntities, A)
V = result[5]
savetxt(outputTerms, V.T)
R = result[1]
with file(outputFactors, 'w') as outfile:
for i in xrange(len(R)):
savetxt(outfile, R[i])
|
"""
Django settings for {{ project_name }} project.
Generated by 'django-admin startproject' using Django {{ django_version }}.
For more information on this file, see
https://docs.djangoproject.com/en/{{ docs_version }}/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/{{ docs_version }}/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', '..'))
BASE_URL = ''
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/{{ docs_version }}/howto/deployment/checklist/
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
THIRD_PARTY_APPS_IMPORT_FIRST = []
DJANGO_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'django.contrib.gis',
]
THIRD_PARTY_APPS = [
'authtools',
'dbbackup',
'django_extensions',
]
# Apps specific for this project go here.
LOCAL_APPS = [
'users', # custom users app
# Your stuff: custom apps go here
]
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = THIRD_PARTY_APPS_IMPORT_FIRST + DJANGO_APPS + \
THIRD_PARTY_APPS + LOCAL_APPS
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
]
ROOT_URLCONF = '{{ project_name }}.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = '{{ project_name }}.wsgi.application'
SITE_ID = 1
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/{{ docs_version }}/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/{{ docs_version }}/howto/static-files/
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend', # default
)
REDIS_SETTINGS = {
'host': '{{ project_name }}_redis',
'port': '6379',
'db': '0'
}
REDIS_URL = 'redis://{0}:{1}/{2}'.format(
REDIS_SETTINGS['host'], REDIS_SETTINGS['port'], REDIS_SETTINGS['db']
)
# https://docs.djangoproject.com/en/1.8/topics/http/sessions/#using-cached-sessions
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': REDIS_URL,
},
}
# django-authtools settings
AUTH_USER_MODEL = 'authtools.User'
logging_folder_name = 'logs/'
logging_nginx_path = logging_folder_name + 'nginx/'
logging_gunicorn_path = logging_folder_name + 'gunicorn/'
try:
if not os.path.exists(os.path.join(BASE_DIR, logging_folder_name)):
os.makedirs(os.path.join(BASE_DIR, logging_folder_name))
if not os.path.exists(os.path.join(BASE_DIR, logging_nginx_path)):
os.makedirs(os.path.join(BASE_DIR, logging_nginx_path))
if not os.path.exists(os.path.join(BASE_DIR, logging_gunicorn_path)):
os.makedirs(os.path.join(BASE_DIR, logging_gunicorn_path))
except FileExistsError:
pass
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'root': {
'level': 'WARNING',
'handlers': ['sentry'],
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
},
'handlers': {
'sentry': {
'level': 'ERROR',
'class': 'raven.contrib.django.raven_compat.handlers.SentryHandler',
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
},
'{{ project_name }}_file': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'maxBytes': 1024 * 1024 * 20, # 20 MB
'backupCount': 10,
'filename': os.path.join(BASE_DIR, logging_folder_name + '{{ project_name }}.log'),
'formatter': 'verbose'
},
},
'loggers': {
'django.db.backends': {
'level': 'ERROR',
'handlers': ['console'],
'propagate': False,
},
'raven': {
'level': 'DEBUG',
'handlers': ['console'],
'propagate': False,
},
'sentry.errors': {
'level': 'DEBUG',
'handlers': ['console'],
'propagate': False,
},
'{{ project_name }}_console': {
'level': 'DEBUG',
'handlers': ['console', '{{ project_name }}_file'],
'propagate': False,
},
},
}
# Django DBBackup Setting
DBBACKUP_BACKUP_DIRECTORY = os.path.join(BASE_DIR, '{{ project_name }}_management/backups/')
DBBACKUP_CLEANUP_KEEP = 2
DBBACKUP_CLEANUP_KEEP_MEDIA = 2
|
# Copyright 2014-2015 Ivan Kravets <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Base for ARM microcontrollers.
"""
from SCons.Script import Builder, DefaultEnvironment
env = DefaultEnvironment()
env.Replace(
AR="arm-none-eabi-ar",
AS="arm-none-eabi-as",
CC="arm-none-eabi-gcc",
CXX="arm-none-eabi-g++",
OBJCOPY="arm-none-eabi-objcopy",
RANLIB="arm-none-eabi-ranlib",
SIZETOOL="arm-none-eabi-size",
ARFLAGS=["rcs"],
ASPPFLAGS=["-x", "assembler-with-cpp"],
CPPFLAGS=[
"-g", # include debugging info (so errors include line numbers)
"-Os", # optimize for size
"-ffunction-sections", # place each function in its own section
"-fdata-sections",
"-Wall",
"-mthumb",
"-mcpu=${BOARD_OPTIONS['build']['cpu']}",
"-nostdlib",
"-MMD" # output dependancy info
],
CXXFLAGS=[
"-fno-rtti",
"-fno-exceptions"
],
CPPDEFINES=[
"F_CPU=$BOARD_F_CPU"
],
LINKFLAGS=[
"-Os",
"-Wl,--gc-sections,--relax",
"-mthumb",
"-mcpu=${BOARD_OPTIONS['build']['cpu']}"
],
LIBS=["c", "gcc", "m"],
SIZEPRINTCMD='"$SIZETOOL" -B -d $SOURCES',
PROGNAME="firmware",
PROGSUFFIX=".elf"
)
env.Append(
BUILDERS=dict(
ElfToBin=Builder(
action=" ".join([
"$OBJCOPY",
"-O",
"binary",
"$SOURCES",
"$TARGET"]),
suffix=".bin"
),
ElfToHex=Builder(
action=" ".join([
"$OBJCOPY",
"-O",
"ihex",
"-R",
".eeprom",
"$SOURCES",
"$TARGET"]),
suffix=".hex"
)
)
)
|
"""
This modules defines a component with a complex register file, loosely
based off the "gemac_simple" core [1].
[1]: @todo: background on the core
"""
from __future__ import print_function, division
import myhdl
from myhdl import Signal, intbv, always_comb, concat
from rhea import Global, Clock, Reset
from rhea.system import Register, RegisterFile
from rhea.system import Barebone, Wishbone, AvalonMM, AXI4Lite
def build_regfile():
""" Build a register file definition.
This register file definition is loosely based off the gemac_simple ...
"""
regfile = RegisterFile()
for ii in range(2):
reg = Register(name='macaddr{}'.format(ii), width=32, access='rw', default=0)
regfile.add_register(reg)
for ii, dd in enumerate((0xFFFFFFFF, 0xFFFFFFFF)):
reg = Register(name='ucastaddr{}'.format(ii), width=32, access='rw', default=dd)
regfile.add_register(reg)
for ii, dd in enumerate((0xFFFFFFFF, 0xFFFFFFFF)):
reg = Register(name='mcastaddr{}'.format(ii), width=32, access='rw', default=dd)
regfile.add_register(reg)
reg = Register(name='control', width=32, access='rw', default=0)
# @todo: add the named bits
regfile.add_register(reg)
return regfile
@myhdl.block
def memmap_component(glbl, csrbus, cio, user_regfile=None):
"""
Ports
-----
:param glbl: global signals, clock, reset, enable, etc.
:param csrbus: memory-mapped bus
:param cio: component IO
:param user_regfile:
"""
if user_regfile is None:
regfile = build_regfile()
else:
regfile = user_regfile
regfile_inst = csrbus.add(glbl, regfile, peripheral_name='TESTREG')
@always_comb
def beh_assign():
s = concat(regfile.macaddr0[:2], regfile.control[6:])
cio.next = s
return regfile_inst, beh_assign
@myhdl.block
def regfilesys(clock, reset):
"""
"""
glbl = Global(clock, reset)
csrbus = AXI4Lite(glbl, data_width=32, address_width=32)
cio = Signal(intbv(0)[8:])
mminst = memmap_component(glbl, csrbus, cio)
return mminst
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.retail_v2.types import prediction_service
from .base import PredictionServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import PredictionServiceGrpcTransport
class PredictionServiceGrpcAsyncIOTransport(PredictionServiceTransport):
"""gRPC AsyncIO backend transport for PredictionService.
Service for making recommendation prediction.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(
cls,
host: str = "retail.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
def __init__(
self,
*,
host: str = "retail.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def predict(
self,
) -> Callable[
[prediction_service.PredictRequest],
Awaitable[prediction_service.PredictResponse],
]:
r"""Return a callable for the predict method over gRPC.
Makes a recommendation prediction.
Returns:
Callable[[~.PredictRequest],
Awaitable[~.PredictResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "predict" not in self._stubs:
self._stubs["predict"] = self.grpc_channel.unary_unary(
"/google.cloud.retail.v2.PredictionService/Predict",
request_serializer=prediction_service.PredictRequest.serialize,
response_deserializer=prediction_service.PredictResponse.deserialize,
)
return self._stubs["predict"]
__all__ = ("PredictionServiceGrpcAsyncIOTransport",)
|
#!/usr/bin/python
#
# litlint
#
# Ensure RUN commands in lit tests are free of common errors.
#
# If any errors are detected, litlint returns a nonzero exit code.
#
import optparse
import re
import sys
# Compile regex once for all files
runRegex = re.compile(r'(?<!-o)(?<!%run) %t\s')
def LintLine(s):
""" Validate a line
Args:
s: str, the line to validate
Returns:
Returns an error message and a 1-based column number if an error was
detected, otherwise (None, None).
"""
# Check that RUN command can be executed with an emulator
m = runRegex.search(s)
if m:
start, end = m.span()
return ('missing %run before %t', start + 2)
# No errors
return (None, None)
def LintFile(p):
""" Check that each RUN command can be executed with an emulator
Args:
p: str, valid path to a file
Returns:
The number of errors detected.
"""
errs = 0
with open(p, 'r') as f:
for i, s in enumerate(f.readlines(), start=1):
msg, col = LintLine(s)
if msg != None:
errs += 1
errorMsg = 'litlint: {}:{}:{}: error: {}.\n{}{}\n'
arrow = (col-1) * ' ' + '^'
sys.stderr.write(errorMsg.format(p, i, col, msg, s, arrow))
return errs
if __name__ == "__main__":
# Parse args
parser = optparse.OptionParser()
parser.add_option('--filter') # ignored
(options, filenames) = parser.parse_args()
# Lint each file
errs = 0
for p in filenames:
errs += LintFile(p)
# If errors, return nonzero
if errs > 0:
sys.exit(1)
|
from __future__ import unicode_literals
from __future__ import absolute_import
import os
from .. import unittest
import docker
import mock
from compose.cli.docopt_command import NoSuchCommand
from compose.cli.errors import UserError
from compose.cli.main import TopLevelCommand
from compose.service import Service
class CLITestCase(unittest.TestCase):
def test_default_project_name(self):
cwd = os.getcwd()
try:
os.chdir('tests/fixtures/simple-composefile')
command = TopLevelCommand()
project_name = command.get_project_name('.')
self.assertEquals('simplecomposefile', project_name)
finally:
os.chdir(cwd)
def test_project_name_with_explicit_base_dir(self):
command = TopLevelCommand()
command.base_dir = 'tests/fixtures/simple-composefile'
project_name = command.get_project_name(command.base_dir)
self.assertEquals('simplecomposefile', project_name)
def test_project_name_with_explicit_uppercase_base_dir(self):
command = TopLevelCommand()
command.base_dir = 'tests/fixtures/UpperCaseDir'
project_name = command.get_project_name(command.base_dir)
self.assertEquals('uppercasedir', project_name)
def test_project_name_with_explicit_project_name(self):
command = TopLevelCommand()
name = 'explicit-project-name'
project_name = command.get_project_name(None, project_name=name)
self.assertEquals('explicitprojectname', project_name)
def test_project_name_from_environment_old_var(self):
command = TopLevelCommand()
name = 'namefromenv'
with mock.patch.dict(os.environ):
os.environ['FIG_PROJECT_NAME'] = name
project_name = command.get_project_name(None)
self.assertEquals(project_name, name)
def test_project_name_from_environment_new_var(self):
command = TopLevelCommand()
name = 'namefromenv'
with mock.patch.dict(os.environ):
os.environ['COMPOSE_PROJECT_NAME'] = name
project_name = command.get_project_name(None)
self.assertEquals(project_name, name)
def test_get_project(self):
command = TopLevelCommand()
command.base_dir = 'tests/fixtures/longer-filename-composefile'
project = command.get_project()
self.assertEqual(project.name, 'longerfilenamecomposefile')
self.assertTrue(project.client)
self.assertTrue(project.services)
def test_help(self):
command = TopLevelCommand()
with self.assertRaises(SystemExit):
command.dispatch(['-h'], None)
def test_command_help(self):
with self.assertRaises(SystemExit) as ctx:
TopLevelCommand().dispatch(['help', 'up'], None)
self.assertIn('Usage: up', str(ctx.exception))
def test_command_help_dashes(self):
with self.assertRaises(SystemExit) as ctx:
TopLevelCommand().dispatch(['help', 'migrate-to-labels'], None)
self.assertIn('Usage: migrate-to-labels', str(ctx.exception))
def test_command_help_nonexistent(self):
with self.assertRaises(NoSuchCommand):
TopLevelCommand().dispatch(['help', 'nonexistent'], None)
@mock.patch('compose.cli.main.dockerpty', autospec=True)
def test_run_with_environment_merged_with_options_list(self, mock_dockerpty):
command = TopLevelCommand()
mock_client = mock.create_autospec(docker.Client)
mock_project = mock.Mock(client=mock_client)
mock_project.get_service.return_value = Service(
'service',
client=mock_client,
environment=['FOO=ONE', 'BAR=TWO'],
image='someimage')
command.run(mock_project, {
'SERVICE': 'service',
'COMMAND': None,
'-e': ['BAR=NEW', 'OTHER=THREE'],
'--user': None,
'--no-deps': None,
'--allow-insecure-ssl': None,
'-d': True,
'-T': None,
'--entrypoint': None,
'--service-ports': None,
'--publish': [],
'--rm': None,
})
_, _, call_kwargs = mock_client.create_container.mock_calls[0]
self.assertEqual(
call_kwargs['environment'],
{'FOO': 'ONE', 'BAR': 'NEW', 'OTHER': 'THREE'})
def test_run_service_with_restart_always(self):
command = TopLevelCommand()
mock_client = mock.create_autospec(docker.Client)
mock_project = mock.Mock(client=mock_client)
mock_project.get_service.return_value = Service(
'service',
client=mock_client,
restart='always',
image='someimage')
command.run(mock_project, {
'SERVICE': 'service',
'COMMAND': None,
'-e': [],
'--user': None,
'--no-deps': None,
'--allow-insecure-ssl': None,
'-d': True,
'-T': None,
'--entrypoint': None,
'--service-ports': None,
'--publish': [],
'--rm': None,
})
_, _, call_kwargs = mock_client.create_container.mock_calls[0]
self.assertEquals(call_kwargs['host_config']['RestartPolicy']['Name'], 'always')
command = TopLevelCommand()
mock_client = mock.create_autospec(docker.Client)
mock_project = mock.Mock(client=mock_client)
mock_project.get_service.return_value = Service(
'service',
client=mock_client,
restart='always',
image='someimage')
command.run(mock_project, {
'SERVICE': 'service',
'COMMAND': None,
'-e': [],
'--user': None,
'--no-deps': None,
'--allow-insecure-ssl': None,
'-d': True,
'-T': None,
'--entrypoint': None,
'--service-ports': None,
'--publish': [],
'--rm': True,
})
_, _, call_kwargs = mock_client.create_container.mock_calls[0]
self.assertFalse('RestartPolicy' in call_kwargs['host_config'])
def test_command_manula_and_service_ports_together(self):
command = TopLevelCommand()
mock_client = mock.create_autospec(docker.Client)
mock_project = mock.Mock(client=mock_client)
mock_project.get_service.return_value = Service(
'service',
client=mock_client,
restart='always',
image='someimage',
)
with self.assertRaises(UserError):
command.run(mock_project, {
'SERVICE': 'service',
'COMMAND': None,
'-e': [],
'--user': None,
'--no-deps': None,
'--allow-insecure-ssl': None,
'-d': True,
'-T': None,
'--entrypoint': None,
'--service-ports': True,
'--publish': ['80:80'],
'--rm': None,
})
|
# -*- coding: utf-8 -*-
import json
from django.db.models import Count
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from django.urls import reverse
from django.utils.translation import ugettext as _
from ...server.models import Device, DeviceModel
def devices_by_connection():
total = Device.objects.count()
link = '{}?_REPLACE_'.format(
reverse('admin:server_device_changelist')
)
data = []
for item in Device.objects.values(
'connection__name',
'connection__id',
).annotate(
count=Count('id')
).order_by('-count'):
percent = float(item['count']) / total * 100
data.append({
'name': item['connection__name'],
'value': item['count'],
'y': float('{:.2f}'.format(percent)),
'url': link.replace(
'_REPLACE_',
'connection__id__exact={}'.format(item['connection__id'])
),
})
return {
'title': _('Devices / Connection'),
'total': total,
'data': json.dumps(data),
'url': link.replace('?_REPLACE_', ''),
}
def devices_by_model():
total = Device.objects.count()
link = '{}?_REPLACE_'.format(
reverse('admin:server_device_changelist')
)
data = []
for item in Device.objects.values(
'model__name',
'model__id',
).annotate(
count=Count('id')
).order_by('-count'):
percent = float(item['count']) / total * 100
data.append({
'name': item['model__name'],
'value': item['count'],
'y': float('{:.2f}'.format(percent)),
'url': link.replace(
'_REPLACE_',
'model__id__exact={}'.format(item['model__id'])
),
})
return {
'title': _('Devices / Model'),
'total': total,
'data': json.dumps(data),
'url': link.replace('?_REPLACE_', ''),
}
def devices_by_manufacturer():
total = Device.objects.count()
link = '{}?_REPLACE_'.format(
reverse('admin:server_device_changelist')
)
data = []
for item in Device.objects.values(
'model__manufacturer__name',
'model__manufacturer__id',
).annotate(
count=Count('id')
).order_by('-count'):
percent = float(item['count']) / total * 100
data.append({
'name': item['model__manufacturer__name'],
'value': item['count'],
'y': float('{:.2f}'.format(percent)),
'url': link.replace(
'_REPLACE_',
'model__manufacturer__id__exact={}'.format(item['model__manufacturer__id'])
),
})
return {
'title': _('Devices / Manufacturer'),
'total': total,
'data': json.dumps(data),
'url': link.replace('?_REPLACE_', ''),
}
def models_by_manufacturer():
total = DeviceModel.objects.count()
link = '{}?_REPLACE_'.format(
reverse('admin:server_devicemodel_changelist')
)
data = []
for item in DeviceModel.objects.values(
'manufacturer__name',
'manufacturer__id',
).annotate(
count=Count('id')
).order_by('-count'):
percent = float(item['count']) / total * 100
data.append({
'name': item['manufacturer__name'],
'value': item['count'],
'y': float('{:.2f}'.format(percent)),
'url': link.replace(
'_REPLACE_',
'manufacturer__id__exact={}'.format(item['manufacturer__id'])
),
})
return {
'title': _('Models / Manufacturer'),
'total': total,
'data': json.dumps(data),
'url': link.replace('?_REPLACE_', ''),
}
@login_required
def devices_summary(request):
return render(
request,
'devices_summary.html',
{
'title': _('Devices'),
'chart_options': {
'no_data': _('There are no data to show'),
'reset_zoom': _('Reset Zoom'),
},
'devices_by_connection': devices_by_connection(),
'devices_by_model': devices_by_model(),
'devices_by_manufacturer': devices_by_manufacturer(),
'opts': Device._meta,
}
)
@login_required
def device_models_summary(request):
return render(
request,
'device_models_summary.html',
{
'title': _('Models'),
'chart_options': {
'no_data': _('There are no data to show'),
'reset_zoom': _('Reset Zoom'),
},
'models_by_manufacturer': models_by_manufacturer(),
'opts': DeviceModel._meta,
}
)
|
#!/usr/bin/env python
'''
Created on Oct 4, 2016
@author: fu
'''
import os
import numpy as np
import xml.sax as sax
from Log import Log
# prcess the vasprun.xml, extract the lattics, forces and energy of every ionic step
class SaxHandler(sax.ContentHandler):
def __init__(self, log=None):
self.isComplete = True # whether data of a structure in vasprun.xml file is complete
self.isAtominfo = False # <atominfo>
self.isAtomNum = False # <atoms>
self.isAtoms = False # <array name="atoms">
self.isAtomSet = False # <set>
self.isAtomName = False # <c>
self.counter2AtomName = 0 # select the name of element from array of element and its type
self.isCalculation = False
self.isStructure = False
self.isBasis = False
self.isPositions = False
self.isForces = False
self.isEnergy = False
self.isScstep = False
# element set
self.elementNum = 0
self.element = []
# save every ionic step
self.tmpL = []
self.tmpF = []
self.tmpE = []
# save all ionic step
self.lattice = [] # [structure, basis+posiion, xyz]
self.forces = [] # [structure, forceOfatom, xyz]
self.energy = [] # [structure, energy, e_fr/e_wo/e_0]
self.counter = -1 # counter of structure in vasprun.xml
# log object
if log == None:
self.log=Log()
else:
self.log=log
def startElement(self, name, attrs):
if name == "atominfo":
self.isAtominfo = True
if self.isAtominfo and (name == "atoms"):
self.isAtomNum = True
if self.isAtominfo and (name == "array") and (attrs.getValueByQName("name") == "atoms"):
self.isAtoms = True
if self.isAtoms and (name == "set"):
self.isAtomSet = True
if self.isAtomSet and (name == "c"):
if self.counter2AtomName == 0:
self.isAtomName = True
if name == "calculation":
self.isCalculation = True
self.counter += 1
if self.isCalculation:
if name == "structure":
self.isStructure = True
elif name == "scstep":
self.isScstep = True
elif (name == "varray") and (attrs.getValueByQName("name") == "forces"):
self.isForces = True
elif(name == "scstep"):
self.isScstep = True
elif (not self.isScstep) and (name == "energy"): # exclude the energy of scf
self.isEnergy = True
if self.isStructure:
if (name == "varray") and (attrs.getValueByQName("name") == "basis"):
self.isBasis = True
elif (name == "varray") and (attrs.getValueByQName("name") == "positions"):
self.isPositions = True
def endElement(self, name):
if name == "atominfo":
self.isAtominfo = False
if self.isAtominfo and (name == "atoms"):
self.isAtomNum = False
if self.isAtominfo and (name == "array"):
self.isAtoms = False
if self.isAtoms and (name == "set"):
self.isAtomSet = False
if self.isAtomSet and (name == "c"):
if self.counter2AtomName == 0:
self.isAtomName = False
self.counter2AtomName=self.counter2AtomName+1
if self.counter2AtomName == 2:
self.counter2AtomName = 0
if name == "calculation":
self.isCalculation = False
# check data integrity
# 1. pop illegal data
if not(self.isComplete) or ():
self.lattice.pop(-1)
self.forces.pop(-1)
self.energy.pop(-1)
if self.isCalculation:
if name == "structure":
self.isStructure = False
elif name == "scstep":
self.isScstep = False
elif self.isForces and (name == "varray"):
self.isForces = False
self.forces.append(self.tmpF)
self.tmpF = []
elif name == "scstep":
self.isScstep = False
elif (not self.isScstep) and (name == "energy"): # exclude the energy of scf
self.isEnergy = False
self.energy.append(self.tmpE)
self.tmpE = []
if self.isStructure:
if self.isBasis and (name == "varray"):
self.isBasis = False
elif self.isPositions and (name == "varray"):
self.isPositions = False
self.lattice.append(self.tmpL)
self.tmpL = []
def characters(self, content):
if self.isAtomNum:
self.elementNum = int(content)
if self.isAtomName:
self.element.append(content)
if self.isBasis:
tmp = [float(s0) for s0 in content.split()]
if (len(tmp) != 0):
self.tmpL.append(tmp)
elif self.isPositions:
try:
tmp = [float(s0) for s0 in content.split()]
except ValueError:
for s0 in content.split():
tmp=[]
if (s0 == "-"):
tmp.append(0.0)
string = "Warning! value of position will be set to 0.0"
print string
self.log.write(string)
self.isComplete = False
else:
tmp.append(float(s0))
if (len(tmp) != 0):
self.tmpL.append(tmp)
elif self.isForces:
try:
tmp = [float(s0) for s0 in content.split()]
except ValueError:
# log information
string = "Force isn't a digit! '%s' -> 0.0; skipping" %content.strip()
print string
self.log.write(string)
self.isComplete = False
tmp = content.split()
for i in xrange(0, len(tmp)):
try:
tmp[i] = float(tmp[i])
except ValueError:
self.isComplete = False
tmp[i] = 0.0
if len(tmp) != 0:
self.tmpF.append(tmp)
elif self.isEnergy:
try:
tmp = [float(s0) for s0 in content.split()]
except ValueError:
# log information
string = "Energy isn't a digit! %s" %content
print string
self.log.write(string)
self.isComplete = False
tmp = 0.0
if len(tmp) != 0:
self.tmpE.append(tmp[-1])
def getElement(self):
if len(self.element) != self.elementNum:
self.log.write("Error! number of elements isn't consistent.")
return np.array(self.element)
def getLattice(self):
return np.array(self.lattice)
def getForces(self):
return np.array(self.forces)
def getEnergy(self):
return np.array(self.energy)
class XMLProcessor():
def __init__(self, filename, log='None'):
self.filename = filename
# log object
if log == None:
self.log=Log()
else:
self.log=log
def read(self):
parser = sax.make_parser()
handler = SaxHandler(self.log)
parser.setContentHandler(handler)
parser.parse(open(self.filename, "r"))
element = handler.getElement()
lattice = handler.getLattice()
forces = handler.getForces()
energy = handler.getEnergy()
return element, lattice, forces, energy
# --------------------test--------------------
#x = XMLProcessor("/home/fu/workspace/nnp/Te/trainStructure/001/vasprun.xml")
#element, lattice, forces, energy = x.read()
#print element
#print lattice.shape
#print forces.shape
#print '%4.10f %4.10f %4.10f' %(forces[-1][-1][0], forces[-1][-1][1], forces[-1][-1][2])
#print energy.shape
|
# Copyright 2014, Doug Wiegley (dougwig), A10 Networks
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from a10_neutron_lbaas import a10_common
import acos_client.errors as acos_errors
import handler_base_v2
import v2_context as a10
LOG = logging.getLogger(__name__)
class LoadbalancerHandler(handler_base_v2.HandlerBaseV2):
def _set(self, set_method, c, context, lb):
status = c.client.slb.UP
if not lb.admin_state_up:
status = c.client.slb.DOWN
try:
vip_meta = self.meta(lb, 'virtual_server', {})
a10_common._set_vrid_parameter(vip_meta, c.device_cfg)
vip_args = {
'virtual_server': vip_meta
}
set_method(
self._meta_name(lb),
lb.vip_address,
status,
axapi_args=vip_args)
except acos_errors.Exists:
pass
def _create(self, c, context, lb):
self._set(c.client.slb.virtual_server.create, c, context, lb)
self.hooks.after_vip_create(c, context, lb)
def create(self, context, lb):
with a10.A10WriteStatusContext(self, context, lb) as c:
self._create(c, context, lb)
def update(self, context, old_lb, lb):
with a10.A10WriteStatusContext(self, context, lb) as c:
self._set(c.client.slb.virtual_server.update, c, context, lb)
self.hooks.after_vip_update(c, context, lb)
def _delete(self, c, context, lb):
c.client.slb.virtual_server.delete(self._meta_name(lb))
def delete(self, context, lb):
with a10.A10DeleteContext(self, context, lb) as c:
self._delete(c, context, lb)
self.hooks.after_vip_delete(c, context, lb)
def stats(self, context, lb):
pass
def refresh(self, context, lb):
pass
|
# -*- coding: utf-8 -*-
import requests
import json
from requests_toolbelt.utils import dump
from PySide import QtCore, QtGui, QtWebKit
from utils import *
from VccReqResBase import *
class VccRemotePostUserCameraPhycam(VccReqResBase):
"""
VCSリモート > [POST] カメラ一覧
"""
def __init__(self, parent, grid):
"""
UIを設定する
"""
super(VccRemotePostUserCameraPhycam, self).__init__(parent, grid)
###############################################################
(label, param) = self.set_defaultUI_request_Param()
self.grid.addWidget(label, 0, 0)
self.grid.addWidget(param, 1, 0)
###############################################################
button = self.set_defaultUI_request_Button(self.on_click)
self.grid.addWidget(button, 2, 0)
###############################################################
(label, view) = self.set_defaultUI_response_TreeView()
self.grid.addWidget(label, 3, 0)
self.grid.addWidget(view, 4, 0)
###############################################################
(label, raw) = self.set_defaultUI_raw_TextView()
self.grid.addWidget(label, 5, 0)
self.grid.addWidget(raw, 6, 0)
def set_defaultUI_request_Param(self):
"""
デフォルトUIのリクエストパラメータ設定
"""
text = u"リクエスト"
label = QtGui.QLabel(text)
self.grid_inside['request_Param.label'] = label
style = "QTreeWidget {background-color: rgb(220, 220, 220);}"
param = QtGui.QTreeWidget(self)
param.header().setResizeMode(QtGui.QHeaderView.ResizeToContents)
param.header().setStretchLastSection(False)
param.setColumnCount(2)
param.setHeaderLabels([u"パラメータ", u"値"])
item = QtGui.QTreeWidgetItem(param)
item.setText(0, "Target")
item.setExpanded(True)
item2 = QtGui.QTreeWidgetItem(item)
item2.setText(0, "hostname")
item2.setText(1, confv("hostname"))
item2.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsEditable)
item2.setExpanded(True)
item = QtGui.QTreeWidgetItem(param)
item.setText(0, "Parameter")
item.setExpanded(True)
item2 = QtGui.QTreeWidgetItem(item)
item2.setText(0, "token")
item2.setText(1, confv("token"))
item2.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsEditable)
item2.setExpanded(True)
self.grid_inside['request_Param.param'] = param
return (label, param)
def communicate(self):
"""
通信を行う
"""
url = '%s/remote/' % (confv("HOST"))
payload = {
"Request": "UserCameraPhycam"
}
items = treeitem_dict(self.inside('request_Param.param'))
payload.update(items)
headers = {
'X-VCC-API-TOKEN' : confv("API_TOKEN")
}
r = requests.post(url, data=json.dumps(payload, indent=4), headers=headers)
return r
def on_click(self):
"""
クリック時
"""
r = self.communicate()
rawstr = dump.dump_all(r)
self.inside('raw_TextView.raw').setPlainText(rawstr.decode('utf-8'))
self.inside('response_TreeView.view').clear()
if r.status_code == 200:
path = save_history(rawstr, r)
data = r.json()
widget = self.inside('response_TreeView.view')
self.set_response_TreeView_columnset(widget, "root", data)
|
# Copyright 2015 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import wraps
import json
import logging
from os.path import join
import re
from netaddr import IPNetwork, AddrFormatError, IPAddress
from flask import request
from netman.api.api_utils import BadRequest, MultiContext
from netman.core.objects.access_groups import IN, OUT
from netman.core.objects.exceptions import UnknownResource, BadVlanNumber,\
BadVlanName, BadBondNumber, BadBondLinkSpeed, MalformedSwitchSessionRequest, \
BadVrrpGroupNumber
def resource(*validators):
def resource_decorator(fn):
@wraps(fn)
def wrapper(self, **kwargs):
with MultiContext(self, kwargs, *validators) as ctxs:
return fn(self, *ctxs, **kwargs)
return wrapper
return resource_decorator
def content(validator_fn):
def content_decorator(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
kwargs.update(validator_fn(request.data))
return fn(*args, **kwargs)
return wrapper
return content_decorator
class Vlan:
def __init__(self, switch_api):
self.switch_api = switch_api
self.vlan = None
def process(self, parameters):
self.vlan = is_vlan_number(parameters.pop('vlan_number'))['vlan_number']
def __enter__(self):
return self.vlan
def __exit__(self, *_):
pass
class Bond:
def __init__(self, switch_api):
self.switch_api = switch_api
self.bond = None
def process(self, parameters):
self.bond = is_bond_number(parameters.pop('bond_number'))['bond_number']
def __enter__(self):
return self.bond
def __exit__(self, *_):
pass
class IPNetworkResource:
def __init__(self, switch_api):
self.switch_api = switch_api
self.ip_network = None
def process(self, parameters):
try:
self.ip_network = is_ip_network(parameters.pop('ip_network'))['validated_ip_network']
except BadRequest:
raise BadRequest('Malformed IP, should be : x.x.x.x/xx')
def __enter__(self):
return self.ip_network
def __exit__(self, *_):
pass
class Switch:
def __init__(self, switch_api):
self.switch_api = switch_api
self.is_session = False
self.switch = None
def process(self, parameters):
hostname = parameters.pop('hostname')
try:
self.switch = self.switch_api.resolve_session(hostname)
self.is_session = True
except UnknownResource:
self.switch = self.switch_api.resolve_switch(hostname)
def __enter__(self):
if not self.is_session:
self.switch.connect()
return self.switch
def __exit__(self, *_):
if not self.is_session:
self.switch.disconnect()
class Session:
def __init__(self, switch_api):
self.switch_api = switch_api
self.session = None
def process(self, parameters):
self.session = parameters.pop('session_id')
self.switch_api.resolve_session(self.session)
def __enter__(self):
return self.session
def __exit__(self, *_):
pass
class Interface:
def __init__(self, switch_api):
self.switch_api = switch_api
self.interface = None
def process(self, parameters):
self.interface = parameters.pop('interface_id')
def __enter__(self):
return self.interface
def __exit__(self, *_):
pass
class Resource:
def __init__(self, switch_api):
self.switch_api = switch_api
self.resource = None
def process(self, parameters):
self.resource = parameters.pop('resource')
def __enter__(self):
return self.resource
def __exit__(self, *_):
pass
class Direction:
def __init__(self, switch_api):
self.switch_api = switch_api
self.direction = None
def process(self, parameters):
direction = parameters.pop('direction')
if direction.lower() == 'in':
self.direction = IN
elif direction.lower() == 'out':
self.direction = OUT
else:
raise UnknownResource("Unknown direction : {}".format(direction))
def __enter__(self):
return self.direction
def __exit__(self, *_):
pass
class VrrpGroup:
def __init__(self, switch_api):
self.switch_api = switch_api
self.vrrp_group_id = None
def process(self, parameters):
try:
self.vrrp_group_id = int(parameters.pop('vrrp_group_id'))
if not 1 <= self.vrrp_group_id <= 255:
raise BadVrrpGroupNumber()
except (ValueError, KeyError):
raise BadVrrpGroupNumber()
def __enter__(self):
return self.vrrp_group_id
def __exit__(self, *_):
pass
def is_session(data, **_):
try:
json_data = json.loads(data)
except ValueError:
raise BadRequest("Malformed content, should be a JSON object")
if "hostname" not in json_data:
raise MalformedSwitchSessionRequest()
return {
'hostname': json_data["hostname"]
}
def is_vlan(data, **_):
try:
json_data = json.loads(data)
except ValueError:
raise BadRequest("Malformed content, should be a JSON object")
if "number" not in json_data:
raise BadVlanNumber()
name = json_data["name"] if "name" in json_data and len(json_data["name"]) > 0 else None
if name and " " in name:
raise BadVlanName()
return {
'number': is_vlan_number(json_data["number"])['vlan_number'],
'name': name
}
def is_vlan_number(vlan_number, **_):
try:
vlan_int = int(vlan_number)
except ValueError:
logging.getLogger("netman.api").info("Rejected vlan content : {}".format(repr(vlan_number)))
raise BadVlanNumber()
if not 1 <= vlan_int <= 4094:
logging.getLogger("netman.api").info("Rejected vlan number : {}".format(vlan_number))
raise BadVlanNumber()
return {'vlan_number': vlan_int}
def is_ip_network(data, **_):
try:
try:
json_addr = json.loads(data)
ip = IPNetwork("{}/{}".format(json_addr["address"], json_addr["mask"]))
except ValueError:
ip = IPNetwork(data)
except (KeyError, AddrFormatError):
raise BadRequest('Malformed content, should be : x.x.x.x/xx or {"address": "x.x.x.x", "mask": "xx"}')
return {'validated_ip_network': ip}
def is_vrrp_group(data, **_):
try:
data = json.loads(data)
except ValueError:
raise BadRequest("Malformed content, should be a JSON object")
if data.get('id') is None:
raise BadRequest("VRRP group id is mandatory")
return dict(
group_id=data.pop('id'),
ips=[validate_ip_address(i) for i in data.pop('ips', [])],
**data
)
def is_int(number, **_):
try:
value = int(number)
except ValueError:
raise BadRequest('Expected integer content, got "{}"'.format(number))
return {'value': value}
def is_boolean(option, **_):
option = option.lower()
if option not in ['true', 'false']:
raise BadRequest('Unreadable content "{}". Should be either "true" or "false"'.format(option))
return {'state': option == 'true'}
def is_access_group_name(data, **_):
if data == "" or " " in data:
raise BadRequest('Malformed access group name')
return {'access_group_name': data}
def is_vrf_name(data, **_):
if data == "" or " " in data:
raise BadRequest('Malformed VRF name')
return {'vrf_name': data}
def is_bond_number(bond_number, **_):
try:
bond_number_int = int(bond_number)
except ValueError:
logging.getLogger("netman.api").info("Rejected number content : {}".format(repr(bond_number)))
raise BadBondNumber()
return {'bond_number': bond_number_int}
def is_bond(data, **_):
try:
json_data = json.loads(data)
except ValueError:
raise BadRequest("Malformed content, should be a JSON object")
if "number" not in json_data:
raise BadBondNumber()
return {
'bond_number': is_bond_number(json_data["number"])['bond_number'],
}
def is_bond_link_speed(data, **_):
if re.match(r'^\d+[mg]$', data):
return {'bond_link_speed': data}
raise BadBondLinkSpeed()
def is_description(description, **_):
return {'description': description}
def is_dict_with(**fields):
def m(data, **_):
try:
result = json.loads(data)
except ValueError:
raise BadRequest("Malformed JSON request")
for field, validator in fields.iteritems():
validator(result, field)
for field, validator in result.iteritems():
if field not in fields:
raise BadRequest("Unknown key: {}".format(field))
return result
return m
def validate_ip_address(data):
try:
return IPAddress(data)
except:
raise BadRequest("Incorrect IP Address: \"{}\", should be x.x.x.x".format(data))
def optional(sub_validator):
def m(params, key):
if key in params:
sub_validator(params, key)
return m
def is_type(obj_type):
def m(params, key):
if not isinstance(params[key], obj_type):
raise BadRequest('Expected "{}" type for key {}, got "{}"'.format(obj_type.__name__, key, type(params[key]).__name__))
return m
|
import copy
import os.path
from collections import OrderedDict
from pykwalify.core import Core
import yaml
def read_config(file_):
"""Reads a configuration from YAML file.
Resolves parent links in the configuration.
"""
config = yaml.load(file_)
if 'parent' in config:
with open(os.path.expandvars(config['parent'])) as src:
changes = dict(config)
config = read_config(src)
merge_recursively(config, changes)
return config
def merge_recursively(config, changes):
"""Merge hierarchy of changes into a configuration."""
for key, value in changes.items():
if isinstance(value, dict) and isinstance(config.get(key), dict):
merge_recursively(config[key], value)
else:
config[key] = value
def make_config_changes(config, changes):
"""Apply changes to a configuration.
Parameters
----------
config : dict
The configuration.
changes : dict
A dict of (hierachical path as string, new value) pairs.
"""
for path, value in changes:
parts = path.split('.')
assign_to = config
for part in parts[:-1]:
assign_to = assign_to[part]
assign_to[parts[-1]] = yaml.load(value)
class Configuration(dict):
"""Convenient access to a multi-stage configuration.
Attributes
----------
multi_stage : bool
``True`` if the configuration describes multiple training stages
ordered_stages : OrderedDict, optional
Configurations for all the training stages in the order of
their numbers.
"""
def __init__(self, config_path, schema_path, config_changes):
with open(config_path, 'rt') as src:
config = read_config(src)
make_config_changes(config, config_changes)
self.multi_stage = 'stages' in config
if self.multi_stage:
ordered_changes = OrderedDict(
sorted(config['stages'].items(),
key=lambda (k, v): v['number'],))
self.ordered_stages = OrderedDict()
for name, changes in ordered_changes.items():
current_config = copy.deepcopy(config)
del current_config['stages']
del changes['number']
merge_recursively(current_config, changes)
self.ordered_stages[name] = current_config
# Validate the configuration and the training stages
with open(os.path.expandvars(schema_path)) as schema_file:
schema = yaml.safe_load(schema_file)
core = Core(source_data=config, schema_data=schema)
core.validate(raise_exception=True)
if self.multi_stage:
for stage in self.ordered_stages.values():
core = Core(source_data=config, schema_data=schema)
core.validate(raise_exception=True)
super(Configuration, self).__init__(config)
|
"""
Classes to represent the default SQL aggregate functions
"""
import copy
import warnings
from django.db.models.fields import IntegerField, FloatField
from django.db.models.lookups import RegisterLookupMixin
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.functional import cached_property
__all__ = ['Aggregate', 'Avg', 'Count', 'Max', 'Min', 'StdDev', 'Sum', 'Variance']
warnings.warn(
"django.db.models.sql.aggregates is deprecated. Use "
"django.db.models.aggregates instead.",
RemovedInDjango20Warning, stacklevel=2)
class Aggregate(RegisterLookupMixin):
"""
Default SQL Aggregate.
"""
is_ordinal = False
is_computed = False
sql_template = '%(function)s(%(field)s)'
def __init__(self, col, source=None, is_summary=False, **extra):
"""Instantiate an SQL aggregate
* col is a column reference describing the subject field
of the aggregate. It can be an alias, or a tuple describing
a table and column name.
* source is the underlying field or aggregate definition for
the column reference. If the aggregate is not an ordinal or
computed type, this reference is used to determine the coerced
output type of the aggregate.
* extra is a dictionary of additional data to provide for the
aggregate definition
Also utilizes the class variables:
* sql_function, the name of the SQL function that implements the
aggregate.
* sql_template, a template string that is used to render the
aggregate into SQL.
* is_ordinal, a boolean indicating if the output of this aggregate
is an integer (e.g., a count)
* is_computed, a boolean indicating if this output of this aggregate
is a computed float (e.g., an average), regardless of the input
type.
"""
self.col = col
self.source = source
self.is_summary = is_summary
self.extra = extra
# Follow the chain of aggregate sources back until you find an
# actual field, or an aggregate that forces a particular output
# type. This type of this field will be used to coerce values
# retrieved from the database.
tmp = self
while tmp and isinstance(tmp, Aggregate):
if getattr(tmp, 'is_ordinal', False):
tmp = self._ordinal_aggregate_field
elif getattr(tmp, 'is_computed', False):
tmp = self._computed_aggregate_field
else:
tmp = tmp.source
self.field = tmp
# Two fake fields used to identify aggregate types in data-conversion operations.
@cached_property
def _ordinal_aggregate_field(self):
return IntegerField()
@cached_property
def _computed_aggregate_field(self):
return FloatField()
def relabeled_clone(self, change_map):
clone = copy.copy(self)
if isinstance(self.col, (list, tuple)):
clone.col = (change_map.get(self.col[0], self.col[0]), self.col[1])
return clone
def as_sql(self, qn, connection):
"Return the aggregate, rendered as SQL with parameters."
params = []
if hasattr(self.col, 'as_sql'):
field_name, params = self.col.as_sql(qn, connection)
elif isinstance(self.col, (list, tuple)):
field_name = '.'.join(qn(c) for c in self.col)
else:
field_name = qn(self.col)
substitutions = {
'function': self.sql_function,
'field': field_name
}
substitutions.update(self.extra)
return self.sql_template % substitutions, params
def get_group_by_cols(self):
return []
@property
def output_field(self):
return self.field
class Avg(Aggregate):
is_computed = True
sql_function = 'AVG'
class Count(Aggregate):
is_ordinal = True
sql_function = 'COUNT'
sql_template = '%(function)s(%(distinct)s%(field)s)'
def __init__(self, col, distinct=False, **extra):
super(Count, self).__init__(col, distinct='DISTINCT ' if distinct else '', **extra)
class Max(Aggregate):
sql_function = 'MAX'
class Min(Aggregate):
sql_function = 'MIN'
class StdDev(Aggregate):
is_computed = True
def __init__(self, col, sample=False, **extra):
super(StdDev, self).__init__(col, **extra)
self.sql_function = 'STDDEV_SAMP' if sample else 'STDDEV_POP'
class Sum(Aggregate):
sql_function = 'SUM'
class Variance(Aggregate):
is_computed = True
def __init__(self, col, sample=False, **extra):
super(Variance, self).__init__(col, **extra)
self.sql_function = 'VAR_SAMP' if sample else 'VAR_POP'
|
'''
update:
2014/09/03:
softmax in the last layer
special:
plot, plot_interval
'''
import theano
import theano.tensor as T
import gzip
import cPickle
import numpy
import time
class HiddenLayer(object):
def __init__(self, rng, input, n_in, n_out, W=None, b=None,
activation=T.tanh):
self.input = input
if W is None:
W_values = numpy.asarray(rng.uniform(
low=-numpy.sqrt(6. / (n_in + n_out)),
high=numpy.sqrt(6. / (n_in + n_out)),
size=(n_in, n_out)), dtype=theano.config.floatX)
if activation == theano.tensor.nnet.sigmoid:
W_values *= 4
W = theano.shared(value=W_values, name='W', borrow=True)
if b is None:
b_values = numpy.zeros((n_out,), dtype=theano.config.floatX)
b = theano.shared(value=b_values, name='b', borrow=True)
self.W = W
self.b = b
lin_output = T.dot(input, self.W) + self.b
self.output = (lin_output if activation is None
else activation(lin_output))
# parameters of the model
self.params = [self.W, self.b]
class ANN(object):
def __init__(self, n_in, n_out, lmbd = 0.01, hiddens = [10]):
x = T.matrix('x')
y = T.ivector('y')
lr = T.scalar('lr')
rng = numpy.random.RandomState(numpy.random.randint(2 ** 30))
params = []
hid_layers = []
L2 = .0
n_hid = hiddens + [n_out]
for ind, ele in enumerate(n_hid):
if ind == 0:
input = x
n_in = n_in
else:
input = hid_layers[-1].output
n_in = n_hid[ind-1]
if ind == len(n_hid) - 1:
activation = T.nnet.softmax
else:
activation = T.nnet.sigmoid
layer = HiddenLayer(rng, input = input, n_in = n_in, n_out = ele, activation = activation)
hid_layers.append( layer)
L2 += T.sum(layer.W ** 2)
params.extend([layer.W, layer.b])
nl = -T.mean(T.log(hid_layers[-1].output)[T.arange(y.shape[0]), y])
cost = nl + L2 * lmbd
grads = T.grad(cost, params)
updates = []
for param_i, grad_i in zip(params, grads):
updates.append((param_i, param_i - lr * grad_i))
y_pred = T.argmax(hid_layers[-1].output, 1)
errors = T.mean(T.neq(y_pred, y))
self.n_in = n_in
self.n_out = n_out
self.hiddens = hiddens
self.x = x
self.y = y
self.lr = lr
self.cost = cost
self.errors = errors
self.updates = updates
#self.pred = y_pred
self.time = []
self.hid_layers = hid_layers
def fit(self, datasets, batch_size = 500, n_epochs = 200, lr = 0.01,
plot = None, plot_interval = None):
''' without validation'''
index = T.lscalar()
train_set_x, train_set_y = datasets[0]
test_set_x, test_set_y = datasets[1]
try:
n_train_batches = train_set_x.get_value(borrow=True).shape[0]
n_test_batches = test_set_x.get_value(borrow=True).shape[0]
except:
n_train_batches = train_set_x.shape[0]
n_test_batches = test_set_x.shape[0]
n_train_batches /= batch_size
n_test_batches /= batch_size
train_model = theano.function([index], self.cost,
updates = self.updates,
givens = {
self.x: train_set_x[index * batch_size: (index + 1) * batch_size],
self.y: train_set_y[index * batch_size: (index + 1) * batch_size],
self.lr: lr})
test_model = theano.function([], self.errors,
givens = {
self.x: test_set_x,
self.y: test_set_y})
debug_f = theano.function([index], self.errors,
givens = {
self.x: test_set_x[index * batch_size : (index+1) * batch_size],
self.y: test_set_y[index * batch_size : (index+1) * batch_size]})
# print numpy.mean([debug_f(i) for i in xrange(n_test_batches)])
print(test_model())
print '...training'
maxiter = n_epochs
iteration = 0
while iteration < maxiter:
start_time = time.time()
iteration += 1
print 'iteration %d' % iteration
for minibatch_index in xrange(n_train_batches):
print '\tL of (%03d/%03d) = %f\r' % (minibatch_index, n_train_batches, train_model(minibatch_index)),
print ''
print 'error = %f (size=%d)' % (test_model(), test_set_y.shape[0].eval())
self.time.append(time.time()-start_time)
if plot:
if iteration % plot_interval == 0:
plot(self, iteration)
def __repr__(self):
return '<CNN: %r; HID: %r>' % (self.nkerns, self.nhiddens)
def pred(self, x):
return theano.function([], T.argmax(self.hid_layers[-1].output, 1),
givens = {self.x: x})()
def prob(self, x):
return theano.function([], self.hid_layers[-1].output,
givens = {self.x: x})()
def __repr__(self):
return '<ANN:%r-%r-%r>' % (self.n_in, self.hiddens, self.n_out)
def get_neg_log(self, x, y):
return theano.function([], -T.log(self.hid_layers[-1].output)[T.arange(self.y.shape[0]), self.y],
givens={self.x:x, self.y:y})()
def load_data(dataset, num = None):
print '... loading data'
f = gzip.open(dataset, 'rb')
train_set, valid_set, test_set = cPickle.load(f)
train_set = (numpy.concatenate([train_set[0], valid_set[0]], 0), numpy.concatenate([train_set[1], valid_set[1]], 0))
f.close()
def shared_dataset(data_xy, borrow=True, num = None):
data_x, data_y = data_xy
if num:
data_x = data_x[:num]
data_y = data_y[:num]
# data_y = boarden(10, data_y)
size = int(data_x.shape[1]**.5)
# data_x = data_x.reshape(data_x.shape[0], -1)
print data_x.shape, data_y.shape
shared_x = theano.shared(numpy.asarray(data_x,
dtype=theano.config.floatX),
borrow=borrow)
shared_y = theano.shared(numpy.asarray(data_y,
dtype=theano.config.floatX),
borrow=borrow)
return shared_x, T.cast(shared_y, 'int32')
test_set_x, test_set_y = shared_dataset(test_set, num = num)
# valid_set_x, valid_set_y = shared_dataset(valid_set, num = num)
train_set_x, train_set_y = shared_dataset(train_set, num = num)
rval = [(train_set_x, train_set_y), #(valid_set_x, valid_set_y),
(test_set_x, test_set_y)]
return rval
if __name__ == '__main__':
theano.config.exception_verbosity='high'
theano.config.on_unused_input='ignore'
datasets = load_data('../../Data/mnist/mnist.pkl.gz')
cl = ANN(28 * 28, 10, hiddens = [1])
cl.fit(datasets, lr = 0.1)
|
#!/usr/bin/env python
import os
import subprocess
import sys
from setuptools import setup, find_packages
NAME = 'Orange3-Text'
MAJOR = 0
MINOR = 2
MICRO = 3
IS_RELEASED = False
VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
FULL_VERSION = VERSION
DESCRIPTION = 'Orange3 TextMining add-on.'
README_FILE = os.path.join(os.path.dirname(__file__), 'README.pypi')
LONG_DESCRIPTION = open(README_FILE).read()
AUTHOR = 'Bioinformatics Laboratory, FRI UL'
AUTHOR_EMAIL = '[email protected]'
URL = "https://github.com/biolab/orange3-text"
DOWNLOAD_URL = "https://github.com/biolab/orange3-text/tarball/{}".format(VERSION)
KEYWORDS = [
# [PyPi](https://pypi.python.org) packages with keyword "orange3 add-on"
# can be installed using the Orange Add-on Manager
'orange3-text',
'data mining',
'orange3 add-on',
]
ENTRY_POINTS = {
'orange3.addon': (
'text = orangecontrib.text',
),
# Entry point used to specify packages containing tutorials accessible
# from welcome screen. Tutorials are saved Orange Workflows (.ows files).
'orange.widgets.tutorials': (
# Syntax: any_text = path.to.package.containing.tutorials
'exampletutorials = orangecontrib.text.tutorials',
),
# Entry point used to specify packages containing widgets.
'orange.widgets': (
# Syntax: category name = path.to.package.containing.widgets
# Widget category specification can be seen in
# orangecontrib/text/widgets/__init__.py
'Text Mining = orangecontrib.text.widgets',
),
# Register widget help
"orange.canvas.help": (
'html-index = orangecontrib.text.widgets:WIDGET_HELP_PATH',),
}
def git_version():
""" Return the git revision as a string. Copied from numpy setup.py. """
def _minimal_ext_cmd(cmd):
# construct minimal environment
env = {}
for k in ['SYSTEMROOT', 'PATH']:
v = os.environ.get(k)
if v is not None:
env[k] = v
# LANGUAGE is used on win32
env['LANGUAGE'] = 'C'
env['LANG'] = 'C'
env['LC_ALL'] = 'C'
out = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=env).communicate()[0]
return out
try:
out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD'])
GIT_REVISION = out.strip().decode('ascii')
except OSError:
GIT_REVISION = "Unknown"
return GIT_REVISION
def get_version_info():
""" Copied from numpy setup.py. """
global FULL_VERSION
FULL_VERSION = VERSION
if os.path.exists('.git'):
GIT_REVISION = git_version()
elif os.path.exists('orangecontrib/text/version.py'):
# must be a source distribution, use existing version file
# load it as a separate module to not load orangecontrib/text/__init__.py
from importlib.machinery import SourceFileLoader
version = SourceFileLoader('orangecontrib.text.version',
'orangecontrib/text/version.py').load_module()
GIT_REVISION = version.git_revision
else:
GIT_REVISION = "Unknown"
if not IS_RELEASED:
FULL_VERSION += '.dev0+' + GIT_REVISION[:7]
return FULL_VERSION, GIT_REVISION
def write_version_py(filename='orangecontrib/text/version.py'):
""" Copied from numpy setup.py. """
cnt = """
# THIS FILE IS GENERATED FROM ORANGE3-TEXT SETUP.PY
short_version = '%(version)s'
version = '%(version)s'
full_version = '%(full_version)s'
git_revision = '%(git_revision)s'
release = %(isrelease)s
if not release:
version = full_version
short_version += ".dev"
"""
FULL_VERSION, GIT_REVISION = get_version_info()
a = open(filename, 'w')
try:
a.write(cnt % {'version': VERSION,
'full_version': FULL_VERSION,
'git_revision': GIT_REVISION,
'isrelease': str(IS_RELEASED)})
finally:
a.close()
INSTALL_REQUIRES = sorted(set(
line.partition('#')[0].strip()
for line in open(os.path.join(os.path.dirname(__file__), 'requirements.txt'))
) - {''})
if 'test' in sys.argv:
extra_setuptools_args = dict(
test_suite='orangecontrib.text.tests',
)
else:
extra_setuptools_args = dict()
if __name__ == '__main__':
write_version_py()
setup(
name=NAME,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
version=FULL_VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
download_url=DOWNLOAD_URL,
packages=find_packages(),
include_package_data=True,
install_requires=INSTALL_REQUIRES,
entry_points=ENTRY_POINTS,
keywords=KEYWORDS,
namespace_packages=['orangecontrib'],
zip_safe=False,
**extra_setuptools_args
)
|
from numpy import exp, log
from optimix import Function, Scalar
from .._util import format_function
class LinearCov(Function):
"""
Linear covariance function, K = s⋅XXᵀ.
The mathematical representation is s⋅XXᵀ, for a n×r matrix X provided by the user
and a scaling parameter s.
Example
-------
.. doctest::
>>> from glimix_core.cov import LinearCov
>>> from numpy import dot
>>> from numpy.random import RandomState
>>>
>>> X = RandomState(0).randn(2, 3)
>>> cov = LinearCov(X)
>>> cov.scale = 1.3
>>> cov.name = "K"
>>> print(cov)
LinearCov(): K
scale: 1.3
"""
def __init__(self, X):
"""
Constructor.
Parameters
----------
X : array_like
Matrix X from K = s⋅XXᵀ.
"""
self._logscale = Scalar(0.0)
self._X = X
Function.__init__(self, "LinearCov", logscale=self._logscale)
self._logscale.bounds = (-20.0, +10)
@property
def X(self):
"""
Matrix X from K = s⋅XXᵀ.
"""
return self._X
def fix(self):
"""
Prevent s update during optimization.
"""
self._fix("logscale")
def unfix(self):
"""
Enable s update during optimization.
"""
self._unfix("logscale")
@property
def scale(self):
"""
Scale parameter.
"""
return exp(self._logscale.value)
@scale.setter
def scale(self, scale):
from numpy_sugar import epsilon
scale = max(scale, epsilon.tiny)
self._logscale.value = log(scale)
def value(self):
"""
Covariance matrix.
Returns
-------
K : ndarray
s⋅XXᵀ.
"""
X = self.X
return self.scale * (X @ X.T)
def gradient(self):
"""
Derivative of the covariance matrix over log(s).
Returns
-------
logscale : ndarray
s⋅XXᵀ.
"""
return dict(logscale=self.value())
def __str__(self):
return format_function(self, {}, [("scale", self.scale)])
|
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
#
"""Twisted inetd.
Stability: semi-stable
Maintainer: U{Andrew Bennetts<mailto:[email protected]>}
Future Plans: Bugfixes. Specifically for UDP and Sun-RPC, which don't work
correctly yet.
"""
import os
from twisted.internet import process, reactor, fdesc
from twisted.internet.protocol import Protocol, ServerFactory
from twisted.protocols import wire
# A dict of known 'internal' services (i.e. those that don't involve spawning
# another process.
internalProtocols = {
'echo': wire.Echo,
'chargen': wire.Chargen,
'discard': wire.Discard,
'daytime': wire.Daytime,
'time': wire.Time,
}
class InetdProtocol(Protocol):
"""Forks a child process on connectionMade, passing the socket as fd 0."""
def connectionMade(self):
sockFD = self.transport.fileno()
childFDs = {0: sockFD, 1: sockFD}
if self.factory.stderrFile:
childFDs[2] = self.factory.stderrFile.fileno()
# processes run by inetd expect blocking sockets
# FIXME: maybe this should be done in process.py? are other uses of
# Process possibly affected by this?
fdesc.setBlocking(sockFD)
if childFDs.has_key(2):
fdesc.setBlocking(childFDs[2])
service = self.factory.service
uid = service.user
gid = service.group
# don't tell Process to change our UID/GID if it's what we
# already are
if uid == os.getuid():
uid = None
if gid == os.getgid():
gid = None
process.Process(None, service.program, service.programArgs, os.environ,
None, None, uid, gid, childFDs)
reactor.removeReader(self.transport)
reactor.removeWriter(self.transport)
class InetdFactory(ServerFactory):
protocol = InetdProtocol
stderrFile = None
def __init__(self, service):
self.service = service
|
import asyncpg
import os
import jinja2
from logging import getLogger
from random import randint
from operator import itemgetter
import multiprocessing
from wsgiref.handlers import format_date_time
import sanic
from sanic import response
logger = getLogger(__name__)
READ_ROW_SQL = 'SELECT "randomnumber", "id" FROM "world" WHERE id = $1'
READ_ROW_SQL_TO_UPDATE = 'SELECT "id", "randomnumber" FROM "world" WHERE id = $1'
WRITE_ROW_SQL = 'UPDATE "world" SET "randomnumber"=$1 WHERE id=$2'
ADDITIONAL_ROW = [0, 'Additional fortune added at request time.']
def load_fortunes_template():
path = os.path.join('templates', 'fortune.html')
with open(path, 'r') as template_file:
template_text = template_file.read()
return jinja2.Template(template_text)
def get_num_queries(queries):
try:
query_count = int(queries)
except (ValueError, TypeError):
return 1
if query_count < 1:
return 1
if query_count > 500:
return 500
return query_count
connection_pool = None
sort_fortunes_key = itemgetter(1)
template = load_fortunes_template()
app = sanic.Sanic()
@app.listener('before_server_start')
async def setup_database(app, loop):
global connection_pool
connection_pool = await asyncpg.create_pool(
user=os.getenv('PGUSER', 'benchmarkdbuser'),
password=os.getenv('PGPASS', 'benchmarkdbpass'),
database='hello_world',
host='tfb-database',
port=5432
)
@app.get('/json')
def json_view(request):
return response.json({'message': 'Hello, world!'}, headers=get_headers())
@app.get('/db')
async def single_database_query_view(request):
row_id = randint(1, 10000)
async with connection_pool.acquire() as connection:
number = await connection.fetchval(READ_ROW_SQL, row_id)
return response.json(
{'id': row_id, 'randomNumber': number},
headers=get_headers()
)
@app.get('/queries')
async def multiple_database_queries_view(request):
num_queries = get_num_queries(request.args.get('queries', 1))
row_ids = [randint(1, 10000) for _ in range(num_queries)]
worlds = []
async with connection_pool.acquire() as connection:
statement = await connection.prepare(READ_ROW_SQL)
for row_id in row_ids:
number = await statement.fetchval(row_id)
worlds.append(
dict(
id=row_id,
randomNumber=number
)
)
return response.json(worlds, headers=get_headers())
@app.get('/fortunes')
async def fortunes_view(request):
async with connection_pool.acquire() as connection:
fortunes = await connection.fetch('SELECT * FROM Fortune')
fortunes.append(ADDITIONAL_ROW)
fortunes.sort(key=sort_fortunes_key)
content = template.render(fortunes=fortunes)
return response.html(content, headers=get_headers())
@app.get('/updates')
async def database_updates_view(request):
worlds = []
updates = set()
queries = request.args.get('queries', 1)
async with connection_pool.acquire() as connection:
statement = await connection.prepare(READ_ROW_SQL_TO_UPDATE)
for _ in range(get_num_queries(queries)):
record = await statement.fetchrow(randint(1, 10000))
world = dict(
id=record['id'], randomNumber=record['randomnumber']
)
world['randomNumber'] = randint(1, 10000)
worlds.append(world)
updates.add((world['id'], world['randomNumber']))
await connection.executemany(WRITE_ROW_SQL, updates)
return response.json(worlds, headers=get_headers())
@app.get('/plaintext')
def plaintext_view(request):
return response.text('Hello, world!', headers=get_headers())
def get_headers(server='Sanic/{}'.format(sanic.__version__)):
return {
'Server': server,
'Date': format_date_time(None),
}
if __name__ == '__main__':
app.run('0.0.0.0', 8080, access_log=False,
workers=multiprocessing.cpu_count())
|
# -*- python -*-
# Copyright (C) 2009, 2010 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import gdb
import os
import os.path
pythondir = '/home/a0273864/yagarto/install/share/gcc-4.7.1/python'
libdir = '/home/a0273864/yagarto/install/arm-none-eabi/lib/thumb/armv7e-m/fpu/fpv4-sp-d16'
# This file might be loaded when there is no current objfile. This
# can happen if the user loads it manually. In this case we don't
# update sys.path; instead we just hope the user managed to do that
# beforehand.
if gdb.current_objfile () is not None:
# Update module path. We want to find the relative path from libdir
# to pythondir, and then we want to apply that relative path to the
# directory holding the objfile with which this file is associated.
# This preserves relocatability of the gcc tree.
# Do a simple normalization that removes duplicate separators.
pythondir = os.path.normpath (pythondir)
libdir = os.path.normpath (libdir)
prefix = os.path.commonprefix ([libdir, pythondir])
# In some bizarre configuration we might have found a match in the
# middle of a directory name.
if prefix[-1] != '/':
prefix = os.path.dirname (prefix) + '/'
# Strip off the prefix.
pythondir = pythondir[len (prefix):]
libdir = libdir[len (prefix):]
# Compute the ".."s needed to get from libdir to the prefix.
dotdots = ('..' + os.sep) * len (libdir.split (os.sep))
objfile = gdb.current_objfile ().filename
dir_ = os.path.join (os.path.dirname (objfile), dotdots, pythondir)
if not dir_ in sys.path:
sys.path.insert(0, dir_)
# Load the pretty-printers.
from libstdcxx.v6.printers import register_libstdcxx_printers
register_libstdcxx_printers (gdb.current_objfile ())
|
#! /usr/bin/env python
# Copyright (C) 2014-2019 The BET Development Team
# import necessary modules
import numpy as np
import bet.sampling.adaptiveSampling as asam
import bet.postProcess.plotDomains as pDom
import scipy.io as sio
from scipy.interpolate import griddata
sample_save_file = 'sandbox2d'
# Select only the stations I care about this will lead to better sampling
station_nums = [0, 5] # 1, 6
# Read in Q_ref and Q to create the appropriate rho_D
mdat = sio.loadmat('../matfiles/Q_2D')
Q = mdat['Q']
Q = Q[:, station_nums]
Q_ref = mdat['Q_true']
Q_ref = Q_ref[15, station_nums] # 16th/20
bin_ratio = 0.15
bin_size = (np.max(Q, 0) - np.min(Q, 0)) * bin_ratio
# Create experiment model
points = mdat['points']
def model(inputs):
interp_values = np.empty((inputs.shape[0], Q.shape[1]))
for i in range(Q.shape[1]):
interp_values[:, i] = griddata(points.transpose(), Q[:, i],
inputs)
return interp_values
# Create Transition Kernel
transition_set = asam.transition_set(.5, .5**5, 1.0)
# Create kernel
maximum = 1 / np.product(bin_size)
def rho_D(outputs):
rho_left = np.repeat([Q_ref - .5 * bin_size], outputs.shape[0], 0)
rho_right = np.repeat([Q_ref + .5 * bin_size], outputs.shape[0], 0)
rho_left = np.all(np.greater_equal(outputs, rho_left), axis=1)
rho_right = np.all(np.less_equal(outputs, rho_right), axis=1)
inside = np.logical_and(rho_left, rho_right)
max_values = np.repeat(maximum, outputs.shape[0], 0)
return inside.astype('float64') * max_values
kernel_rD = asam.rhoD_kernel(maximum, rho_D)
# Create sampler
chain_length = 125
num_chains = 80
num_samples = chain_length * num_chains
sampler = asam.sampler(num_samples, chain_length, model)
# Set minima and maxima
lam_domain = np.array([[.07, .15], [.1, .2]])
# Get samples
inital_sample_type = "lhs"
(my_disc, all_step_ratios) = sampler.generalized_chains(lam_domain,
transition_set, kernel_rD, sample_save_file, inital_sample_type)
# Read in points_ref and plot results
ref_sample = mdat['points_true']
ref_sample = ref_sample[5:7, 15]
# Show the samples in the parameter space
pDom.scatter_rhoD(my_disc, rho_D=rho_D, ref_sample=ref_sample, io_flag='input')
# Show the corresponding samples in the data space
pDom.scatter_rhoD(my_disc, rho_D=rho_D, ref_sample=Q_ref, io_flag='output')
# Show the data domain that corresponds with the convex hull of samples in the
# parameter space
pDom.show_data_domain_2D(my_disc, Q_ref=Q_ref)
# Show multiple data domains that correspond with the convex hull of samples in
# the parameter space
pDom.show_data_domain_multi(my_disc, Q_ref=Q_ref, showdim='all')
|
from datadog_checks.base.checks import AgentCheck
from datadog_checks.base.errors import CheckException
class Neo4jCheck(AgentCheck):
SERVICE_CHECK_NAME = 'neo4j.can_connect'
HTTP_CONFIG_REMAPPER = {
'user': {
'name': 'username',
},
'default_timeout': {
'name': 'timeout',
},
}
# Neo4j metrics to send
keys = set(
[
'storecreationdate',
'storelogversion',
'kernelstarttime',
'lastcommittedtxid',
'peaknumberofconcurrenttransactions',
'numberofrolledbacktransactions',
'numberofopentransactions',
'numberofopenedtransactions',
'numberofcommittedtransactions',
'logicallogsize',
'propertystoresize',
'arraystoresize',
'totalstoresize',
'relationshipstoresize',
'stringstoresize',
'nodestoresize',
'locks',
'numberofaverteddeadlocks',
'numberofrelationshipidsinuse',
'numberofpropertyidsinuse',
'numberofnodeidsinuse',
'numberofrelationshiptypeidsinuse',
'memorypools',
'pins',
'evictions',
'byteswritten',
'filemappings',
'fileunmappings',
'bytesread',
'flushes',
'evictionexceptions',
'faults',
'ha.pull_interval',
'dbms.memory.pagecache.size',
]
)
display = {
'storecreationdate': 'neo4j.store.creationdate',
'storelogversion': 'neo4j.store.log.version',
'kernelstarttime': 'neo4j.kernel.starttime',
'lastcommittedtxid': 'neo4j.last.committed.transaction.id',
'peaknumberofconcurrenttransactions': 'neo4j.peak.concurrent.transactions',
'numberofrolledbacktransactions': 'neo4j.peak.rolledback.transactions',
'numberofopentransactions': 'neo4j.open.transactions',
'numberofopenedtransactions': 'neo4j.opened.transactions',
'numberofcommittedtransactions': 'neo4j.committed.transactions',
'logicallogsize': 'neo4j.logicallog.size',
'propertystoresize': 'neo4j.property.store.size',
'arraystoresize': 'neo4j.array.store.size',
'totalstoresize': 'neo4j.total.store.size',
'relationshipstoresize': 'neo4j.relationship.store.size',
'stringstoresize': 'neo4j.string.store.size',
'nodestoresize': 'neo4j.node.store.size',
'locks': 'neo4j.locks',
'numberofaverteddeadlocks': 'neo4j.adverted.locks',
'numberofrelationshipidsinuse': 'neo4j.relationship.ids.inuse',
'numberofpropertyidsinuse': 'neo4j.property.ids.inuse',
'numberofnodeidsinuse': 'neo4j.node.ids.inuse',
'numberofrelationshiptypeidsinuse': 'neo4j.relationshiptype.ids.inuse',
'memorypools': 'neo4j.memory.pools',
'pins': 'neo4j.page.cache.pins',
'evictions': 'neo4j.page.cache.evictions',
'byteswritten': 'neo4j.bytes.written',
'filemappings': 'neo4j.page.cache.file.mappings',
'fileunmappings': 'neo4j.page.cache.file.unmappings',
'bytesread': 'neo4j.bytes.read',
'flushes': 'neo4j.page.cache.flushes',
'evictionexceptions': 'neo4j.page.cache.eviction.exceptions',
'faults': 'neo4j.page.cache.faults',
'ha.pull_interval': 'neo4j.ha.pull_interval',
'dbms.memory.pagecache.size': 'neo4j.dbms.memory.pagecache.size',
}
def check(self, _):
host, port, server_name = self._get_config(self.instance)
tags = self.instance.get('tags', [])
tags.append('server_name:{}'.format(server_name))
service_check_tags = tags + ['url:{}'.format(host)]
# Neo specific
# Create payload using built-in Neo4j queryJmx stored procedure
payload = {
"statements": [
{
"statement": "CALL dbms.queryJmx('org.neo4j:*') yield attributes with "
"keys(attributes) as k, attributes unwind k as "
"row return row, attributes[row]['value'];"
}
]
}
try:
version = self._get_version(host, port, service_check_tags)
if version > 2:
check_url = "{}:{}/db/data/transaction/commit".format(host, port)
else:
check_url = "{}:{}/v1/service/metrics".format(host, port)
r = self.http.post(check_url, json=payload)
except Exception as e:
msg = "Unable to fetch Neo4j stats: {}".format(e)
self._critical_service_check(service_check_tags, msg)
raise CheckException(msg)
if r.status_code != 200:
msg = "Unexpected status of {0} when fetching Neo4j stats, response: {1}"
msg = msg.format(r.status_code, r.text)
self._critical_service_check(service_check_tags, msg)
r.raise_for_status()
stats = r.json()
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK, tags=service_check_tags)
for doc in stats['results'][0]['data']:
name = doc['row'][0].lower()
if name in self.keys:
try:
self.gauge(self.display.get(name, ""), doc['row'][1], tags=tags)
except TypeError:
continue
except ValueError:
continue
def _get_config(self, instance):
host = instance.get('neo4j_url', '')
port = int(instance.get('port', 7474))
server_name = instance.get('server_name', '')
return host, port, server_name
def _get_version(self, host, port, service_check_tags):
version_url = '{}:{}/db/data/'.format(host, port)
r = self.http.get(version_url)
if r.status_code != 200:
msg = "unexpected status of {0} when fetching Neo4j stats, response: {1}"
msg = msg.format(r.status_code, r.text)
self._critical_service_check(service_check_tags, msg)
r.raise_for_status()
stats = r.json()
version = stats.get('neo4j_version')
self.log.debug("Neo4j version: %s", version)
version = version.split('.')
if version:
return int(version[0])
return 0
def _critical_service_check(self, service_check_tags, message):
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, tags=service_check_tags, message=message)
|
import pytest
from ..gitignore import fnmatch_translate, PathFilter
match_data = [
("foo", False, ["a/foo", "foo"]),
("*.a", False, ["foo.a", "a/foo.a", "a/b/foo.a", "a.a/foo.a"]),
("*.py[co]", False, ["a.pyc", "a.pyo", "a/b/c.pyc"]),
("\\#*", False, ["#a", "a/#b"]),
("*#", False, ["a#", "a/b#", "#a#"]),
("/*.c", False, ["a.c", ".c"]),
("**/b", False, ["a/b", "a/c/b"]),
("*b", True, ["ab"]),
("**/b", True, ["a/b"]),
("a/", True, ["a", "a/b", "a/b/c"])
]
mismatch_data = [
("foo", False, ["foob", "afoo"]),
("*.a", False, ["a", "foo:a", "a.a/foo"]),
("*.py[co]", False, ["a.pyd", "pyo"]),
("/*.c", False, ["a/b.c"]),
("*b", True, ["a/b"]),
("**b", True, ["a/b"]),
("a[/]b", True, ["a/b"]),
("**/b", True, ["a/c/b"]),
("a", True, ["ab"])
]
invalid_data = [
"[a",
"***/foo",
"a\\",
]
filter_data = [
("foo", True),
("a", False),
("a/b", False),
("a/c", True),
("a/c/", False),
("c/b", True)
]
def expand_data(compact_data):
for pattern, path_name, inputs in compact_data:
for input in inputs:
yield pattern, input, path_name
@pytest.mark.parametrize("pattern, input, path_name", expand_data(match_data))
def tests_match(pattern, input, path_name):
regexp = fnmatch_translate(pattern, path_name)
assert regexp.match(input) is not None
@pytest.mark.parametrize("pattern, input, path_name", expand_data(mismatch_data))
def tests_no_match(pattern, input, path_name):
regexp = fnmatch_translate(pattern, path_name)
assert regexp.match(input) is None
@pytest.mark.parametrize("pattern", invalid_data)
def tests_invalid(pattern):
with pytest.raises(ValueError):
fnmatch_translate(pattern, False)
with pytest.raises(ValueError):
fnmatch_translate(pattern, True)
@pytest.mark.parametrize("path, expected", filter_data)
def test_path_filter(path, expected):
extras = [
"#foo",
"a ",
"**/b",
"a/c/",
"!c/b",
]
f = PathFilter(None, extras)
assert f(path) == expected
|
"""Wrapper around collections.namedtuple with some added features.
The 'collections.namedtuple' is very useful as a data transfer object,
when communicating with Phabricator it's important to consider that the
schema for the objects can change on the server side before we get a chance
to update our client.
This namedtuple aims to build a layer of tolerance on top of namedtuple such
that the client can continue to function when the schema changes within defined
parameters.
"""
# =============================================================================
# CONTENTS
# -----------------------------------------------------------------------------
# phlsys_namedtuple
#
# Public Classes:
# Error
#
# Public Functions:
# make_named_tuple
#
# -----------------------------------------------------------------------------
# (this contents block is generated, edits will be lost)
# =============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import warnings
class Error(Exception):
pass
def make_named_tuple(name, required, defaults, ignored):
"""Return a factory function for collection.namedtuple objects which uses
the supplied 'required', 'defaults' and 'ignored' parameters to determine
what the attributes of the namedtuple should be.
All items from the supplied 'required' list must be provided in each
call to the factory function.
Keys from the specified 'defaults' dict may or may not be provided
in calls to the returned factory function, for those that are not
provided then the values are taken from the corresponding keys in the
'defaults' dict.
Items from the specified 'ignored' list are removed from the keyword
arguments provided to the factory function prior to constructing the
namedtuple.
If items are encountered which are not mentioned by 'required', 'defaults'
or 'ignored' then they are automatically ignored and a warnings.warn is
emitted.
Usage Examples:
Create a factory which requires 'number' and returns 'MyTuple's:
>>> factory = make_named_tuple("MyTuple", ['number'], {}, [])
>>> factory(number=1)
MyTuple(number=1)
Create a factory which ignores 'number' returns 'MyTuple's:
>>> factory = make_named_tuple("MyTuple", [], {}, ['number'])
>>> factory(number=1)
MyTuple()
Create a factory which defaults 'number' to 2 and returns 'MyTuple's:
>>> factory = make_named_tuple("MyTuple", [], {'number': 2}, [])
>>> factory()
MyTuple(number=2)
:name: string name of the collections.namedtuple
:required: list of keys required to be supplied to the factory function
:defaults: dict of default values to be filled in by the factory function
:ignored: list of keys for the factory function to ignore
:returns: factory function which returns a collections.namedtuple
"""
required_attr = set(required)
default_attr = dict(defaults)
default_attr_keys = default_attr.viewkeys()
ignored_attr = set(ignored)
expected_attr = required_attr | default_attr_keys
assert not (default_attr_keys & required_attr)
assert not (default_attr_keys & ignored_attr)
assert not (ignored_attr & required_attr)
NamedTuple = collections.namedtuple(name, required + defaults.keys())
# define the factory function
def make_instance(**kwargs):
keys = kwargs.viewkeys()
# remove all ignored_attr from kwargs
ignored_keys = keys & ignored_attr
for key in ignored_keys:
del kwargs[key]
# emit warnings and proceed if we encounter unexpeced attributes
unexpected = keys - expected_attr
if unexpected:
warnings.warn("ignoring unexpected args: " + str(unexpected))
for key in unexpected:
del kwargs[key]
missing_attr = required_attr - keys
if missing_attr:
raise Error("missing attributes: " + str(missing_attr))
auto_attr = default_attr_keys - keys
for a in auto_attr:
kwargs[a] = default_attr[a]
return NamedTuple(**kwargs)
return make_instance
# -----------------------------------------------------------------------------
# Copyright (C) 2013-2014 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------ END-OF-FILE ----------------------------------
|
"""Tests the configuration class."""
import os
import unittest
from helot.configuration import ConfigurationError
from helot.configuration import DataHolderObject
from helot.configuration import configuration
_CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
_RESOURCES_DIR = os.path.join(_CURRENT_DIR, 'resources')
_YAML_CONIFIGURATION_FILENAME = os.path.join(_RESOURCES_DIR, 'sample.yaml')
_JSON_CONIFIGURATION_FILENAME = os.path.join(_RESOURCES_DIR, 'sample.json')
_BAD_JSON_CONIFIGURATION_FILENAME = os.path.join(_RESOURCES_DIR, 'bad.json')
_BAD_YAML_CONIFIGURATION_FILENAME = os.path.join(_RESOURCES_DIR, 'invalid.yaml')
class TestConfiguration(unittest.TestCase):
def setUp(self):
configuration.reset()
def test_testing_mode(self):
configuration.initialize(_YAML_CONIFIGURATION_FILENAME)
self.assertEqual(configuration.name, "Martin D'vloper")
self.assertEqual(configuration.job, "Developer")
self.assertEqual(configuration.skill, "Elite")
self.assertEqual(configuration.employed, True)
self.assertEqual(configuration.age, 24)
self.assertEqual(configuration.foods, ['Apple', 'Mango', 1234])
self.assertEqual(configuration.languages.perl, 'Elite')
self.assertEqual(configuration.languages.object_oriented.best,
['C++', 'C#'])
self.assertEqual(configuration.languages.object_oriented.great, 'Java')
def test_instantiation_of_configuration(self):
with self.assertRaises(TypeError):
_ = configuration()
def test_non_existing_attribute(self):
x = configuration.junk
self.assertTrue(isinstance(x, DataHolderObject))
configuration.junk = 'junk'
self.assertTrue(isinstance(configuration.junk, str))
self.assertEqual(configuration.junk, 'junk')
configuration.junk1.junk = 'junk'
self.assertTrue(isinstance(configuration.junk1, DataHolderObject))
self.assertEqual(configuration.junk1.junk, 'junk')
self.assertTrue(isinstance(configuration.junk1.junk, str))
configuration.j1.j2.j3.j4.j5 = 'junk'
self.assertEqual(configuration.j1.j2.j3.j4.j5, 'junk')
def test_setting_new_configuration_attribute(self):
configuration.junk = 'this is junk'
self.assertEqual(configuration.junk, 'this is junk')
def test_data_holder_object(self):
x = DataHolderObject()
x.t1.t2.t3 = [1, 2]
self.assertListEqual(x.t1.t2.t3, [1, 2])
def test_assignments(self):
configuration.host = 'localhost'
self.assertTrue(isinstance(configuration.host, str))
configuration.reset()
configuration.host = 'localhost'
self.assertTrue(isinstance(configuration.host, str))
def test_json_initialization(self):
configuration.initialize(_JSON_CONIFIGURATION_FILENAME)
self.assertEqual(configuration.name, "Martin D'vloper")
self.assertEqual(configuration.job, "Developer")
self.assertEqual(configuration.skill, "Elite")
self.assertEqual(configuration.employed, True)
self.assertEqual(configuration.age, 24)
self.assertEqual(configuration.foods, ['Apple', 'Mango', 1234])
self.assertEqual(configuration.languages.perl, 'Elite')
self.assertEqual(configuration.languages.object_oriented.best,
['C++', 'C#'])
self.assertEqual(configuration.languages.object_oriented.great, 'Java')
def test_bad_yaml(self):
with self.assertRaises(ConfigurationError):
configuration.initialize(_BAD_YAML_CONIFIGURATION_FILENAME)
def test_bad_json(self):
with self.assertRaises(ConfigurationError):
configuration.initialize(_BAD_JSON_CONIFIGURATION_FILENAME)
def test_non_existing_filename(self):
with self.assertRaises(ConfigurationError):
configuration.initialize("invalid.nonexisting")
def test_dict_initialization(self):
values_as_dict = {
"name": "Martin D'vloper",
"job": "Developer",
"skill": "Elite",
"employed": True,
"age": 24,
"foods": [
"Apple",
"Mango",
1234
],
"languages": {
"perl": "Elite",
"python": "Elite",
"pascal": "Lame",
"object_oriented": {
"best": [
"C++",
"C#"
],
"great": "Java"
}
},
"mysql": {
"host": "localhost",
"user": "root",
"passwd": "vagrant",
"db": "test"
}
}
configuration.initialize(values_as_dict)
self.assertEqual(configuration.name, "Martin D'vloper")
self.assertEqual(configuration.job, "Developer")
self.assertEqual(configuration.skill, "Elite")
self.assertEqual(configuration.employed, True)
self.assertEqual(configuration.age, 24)
self.assertEqual(configuration.foods, ['Apple', 'Mango', 1234])
self.assertEqual(configuration.languages.perl, 'Elite')
self.assertEqual(configuration.languages.object_oriented.best,
['C++', 'C#'])
self.assertEqual(configuration.languages.object_oriented.great, 'Java')
def test_direct_initalization(self):
languages = {
"perl": "Elite",
"python": "Elite",
"pascal": "Lame",
"object_oriented": {
"best": [
"C++",
"C#"
],
"great": "Java"
}
}
configuration.initialize(name="Martin D'vloper", job="Developer",
skill="Elite", employed=True,
foods=["Apple", "Mango", 1234],
languages=languages)
self.assertEqual(configuration.name, "Martin D'vloper")
self.assertEqual(configuration.job, "Developer")
self.assertEqual(configuration.skill, "Elite")
self.assertEqual(configuration.employed, True)
self.assertEqual(configuration.foods, ['Apple', 'Mango', 1234])
self.assertEqual(configuration.languages.object_oriented.best,
['C++', 'C#'])
def test_mixed_json_initialization(self):
configuration.initialize(_JSON_CONIFIGURATION_FILENAME,
junk='some junk')
self.assertEqual(configuration.name, "Martin D'vloper")
self.assertEqual(configuration.foods, ['Apple', 'Mango', 1234])
self.assertEqual(configuration.languages.perl, 'Elite')
self.assertEqual(configuration.languages.object_oriented.best,
['C++', 'C#'])
self.assertEqual(configuration.languages.object_oriented.great, 'Java')
self.assertEqual(configuration.junk, 'some junk')
def test_mixed_yaml_initialization(self):
configuration.initialize(_YAML_CONIFIGURATION_FILENAME,
junk='some junk')
self.assertEqual(configuration.name, "Martin D'vloper")
self.assertEqual(configuration.foods, ['Apple', 'Mango', 1234])
self.assertEqual(configuration.languages.perl, 'Elite')
self.assertEqual(configuration.languages.object_oriented.best,
['C++', 'C#'])
self.assertEqual(configuration.languages.object_oriented.great, 'Java')
self.assertEqual(configuration.junk, 'some junk')
|
# -*- coding: utf-8 -*-
"""
Test case for owm.py module.
Here we don't use mock objects because we don't want to rely on exeternal
mocking libraries; we use monkey patching instead.
Monkey patching pattern:
1. Keep a reference to the original function to be patched
2. Replace the original function with the mock version
3. Call function and get results
4. Restore the original function (if possible, before unittest assertions
because they might fail)
"""
import unittest
import time
from tests.unit.webapi25.json_test_responses import (OBSERVATION_JSON,
SEARCH_RESULTS_JSON, THREE_HOURS_FORECAST_JSON, DAILY_FORECAST_JSON,
THREE_HOURS_FORECAST_AT_COORDS_JSON, DAILY_FORECAST_AT_COORDS_JSON,
THREE_HOURS_FORECAST_AT_ID_JSON, DAILY_FORECAST_AT_ID_JSON,
CITY_WEATHER_HISTORY_JSON, STATION_TICK_WEATHER_HISTORY_JSON,
STATION_WEATHER_HISTORY_JSON, THREE_HOURS_FORECAST_NOT_FOUND_JSON,
DAILY_FORECAST_NOT_FOUND_JSON, STATION_HISTORY_NO_ITEMS_JSON,
STATION_OBSERVATION_JSON, STATION_AT_COORDS_JSON,
WEATHER_AT_STATION_IN_BBOX_JSON)
from pyowm.webapi25.owm25 import OWM25
from pyowm.constants import PYOWM_VERSION
from pyowm.commons.owmhttpclient import OWMHTTPClient
from pyowm.webapi25.forecast import Forecast
from pyowm.webapi25.observation import Observation
from pyowm.webapi25.weather import Weather
from pyowm.webapi25.location import Location
from pyowm.webapi25.forecaster import Forecaster
from pyowm.webapi25.station import Station
from pyowm.webapi25.stationhistory import StationHistory
from pyowm.webapi25.historian import Historian
from pyowm.webapi25.forecastparser import ForecastParser
from pyowm.webapi25.observationparser import ObservationParser
from pyowm.webapi25.observationlistparser import ObservationListParser
from pyowm.webapi25.stationparser import StationParser
from pyowm.webapi25.stationlistparser import StationListParser
from pyowm.webapi25.stationhistoryparser import StationHistoryParser
from pyowm.webapi25.weatherhistoryparser import WeatherHistoryParser
class TestOWM25(unittest.TestCase):
__test_parsers = {
'observation': ObservationParser(),
'observation_list': ObservationListParser(),
'forecast': ForecastParser(),
'weather_history': WeatherHistoryParser(),
'station_history': StationHistoryParser(),
'station': StationParser(),
'station_list': StationListParser(),
}
__test_instance = OWM25(__test_parsers, 'test_API_key')
# Mock functions
def mock_httputils_call_API_returning_single_obs(self, API_subset_URL,
params_dict):
return OBSERVATION_JSON
def mock_httputils_call_API_returning_single_station_obs(self,
API_subset_URL,
params_dict):
return STATION_OBSERVATION_JSON
def mock_httputils_call_API_ping(self, API_subset_URL, params_dict,
API_timeout):
return OBSERVATION_JSON
def mock_httputils_call_API_failing_ping(self, API_subset_URL, params_dict,
API_timeout):
return None
def mock_httputils_call_API_returning_multiple_obs(self, API_subset_URL,
params_dict):
return SEARCH_RESULTS_JSON
def mock_httputils_call_API_returning_3h_forecast(self, API_subset_URL,
params_dict):
return THREE_HOURS_FORECAST_JSON
def mock_httputils_call_API_returning_3h_forecast_with_no_items(self,
API_subset_URL, params_dict):
return THREE_HOURS_FORECAST_NOT_FOUND_JSON
def mock_httputils_call_API_returning_daily_forecast_with_no_items(self,
API_subset_URL, params_dict):
return DAILY_FORECAST_NOT_FOUND_JSON
def mock_httputils_call_API_returning_3h_forecast_at_coords(self,
API_subset_URL,
params_dict):
return THREE_HOURS_FORECAST_AT_COORDS_JSON
def mock_httputils_call_API_returning_3h_forecast_at_id(self,
API_subset_URL,
params_dict):
return THREE_HOURS_FORECAST_AT_ID_JSON
def mock_httputils_call_API_returning_daily_forecast(self, API_subset_URL,
params_dict):
return DAILY_FORECAST_JSON
def mock_httputils_call_API_returning_daily_forecast_at_coords(self,
API_subset_URL,
params_dict):
return DAILY_FORECAST_AT_COORDS_JSON
def mock_httputils_call_API_returning_daily_forecast_at_id(self,
API_subset_URL,
params_dict):
return DAILY_FORECAST_AT_ID_JSON
def mock_httputils_call_API_returning_city_weather_history(self,
API_subset_URL,
params_dict):
return CITY_WEATHER_HISTORY_JSON
def mock_httputils_call_API_returning_station_tick_weather_history(self,
API_subset_URL,
params_dict):
return STATION_TICK_WEATHER_HISTORY_JSON
def mock_httputils_call_API_returning_station_hour_weather_history(self,
API_subset_URL,
params_dict):
return STATION_WEATHER_HISTORY_JSON
def mock_httputils_call_API_returning_station_day_weather_history(self,
API_subset_URL,
params_dict):
return STATION_WEATHER_HISTORY_JSON
def mock_httputils_call_API_returning_station_history_with_no_items(self,
API_subset_URL,
params_dict):
return STATION_HISTORY_NO_ITEMS_JSON
def mock_httputils_call_API_returning_weather_at_stations_in_bbox(self,
API_subset_URL,
params_dict):
return WEATHER_AT_STATION_IN_BBOX_JSON
def mock_httputils_call_API_returning_station_at_coords(self,
API_subset_URL,
params_dict):
return STATION_AT_COORDS_JSON
# Tests
def test_wrong_API_key(self):
try:
OWM25(self.__test_parsers, 1234)
self.fail("Didn't raise AssertionError")
except AssertionError:
pass
def test_API_key_accessors(self):
test_API_key = 'G097IueS-9xN712E'
owm = OWM25({})
self.assertFalse(owm.get_API_key())
owm.set_API_key(test_API_key)
self.assertEqual(owm.get_API_key(), test_API_key)
def test_is_API_online(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_ping
result = self.__test_instance.is_API_online()
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(result)
def test_is_API_online_failure(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_failing_ping
result = self.__test_instance.is_API_online()
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertFalse(result)
def test_city_id_registry(self):
result = self.__test_instance.city_id_registry()
self.assertTrue(result is not None)
def test_get_API_version(self):
self.assertEqual("2.5", self.__test_instance.get_API_version())
def test_get_version(self):
self.assertEqual(PYOWM_VERSION, self.__test_instance.get_version())
def test_language_accessors(self):
self.assertEqual("en", self.__test_instance.get_language())
self.__test_instance.set_language("ru")
self.assertEqual("ru", self.__test_instance.get_language())
def test_weather_at_place(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_single_obs
result = self.__test_instance.weather_at_place("London,uk")
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(result, Observation))
self.assertTrue(result.get_reception_time() is not None)
loc = result.get_location()
self.assertTrue(loc is not None)
self.assertTrue(all(v is not None for v in loc.__dict__.values()))
weat = result.get_weather()
self.assertTrue(weat is not None)
def test_weather_at_place_fails_with_wrong_parameters(self):
self.assertRaises(AssertionError, OWM25.weather_at_place, \
self.__test_instance, 3)
def test_weather_at_coords(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_single_obs
result = self.__test_instance.weather_at_coords(57.0, -2.15)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(result, Observation))
self.assertTrue(result.get_reception_time() is not None)
loc = result.get_location()
self.assertTrue(loc is not None)
self.assertTrue(all(v is not None for v in loc.__dict__.values()))
weat = result.get_weather()
self.assertTrue(weat is not None)
def test_weather_at_coords_fails_when_coordinates_out_of_bounds(self):
"""
Test failure when providing: lon < -180, lon > 180, lat < -90, lat > 90
"""
self.assertRaises(ValueError, OWM25.weather_at_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, OWM25.weather_at_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, OWM25.weather_at_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, OWM25.weather_at_coords, \
self.__test_instance, 200, 2.5)
def test_weather_at_id(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_single_obs
result = self.__test_instance.weather_at_id(5128581) # New York city, US
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(result, Observation))
self.assertTrue(result.get_reception_time() is not None)
loc = result.get_location()
self.assertTrue(loc is not None)
self.assertTrue(all(v is not None for v in loc.__dict__.values()))
weat = result.get_weather()
self.assertTrue(weat is not None)
def test_weather_at_id_fails_when_id_negative(self):
self.assertRaises(ValueError, OWM25.weather_at_id, \
self.__test_instance, -156667)
def test_weather_at_station(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_single_station_obs
result = self.__test_instance.weather_at_station(1000) # station: PAKP
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(result, Observation))
self.assertTrue(result.get_reception_time() is not None)
loc = result.get_location()
self.assertTrue(loc is not None)
weat = result.get_weather()
self.assertTrue(weat is not None)
def test_weather_at_station_fails_when_id_negative(self):
self.assertRaises(ValueError, OWM25.weather_at_station, \
self.__test_instance, -156667)
def test_weather_at_places(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_multiple_obs
result = \
self.__test_instance.weather_at_places("London", "accurate")
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(result, list))
self.assertEqual(2, len(result))
for item in result:
self.assertTrue(item is not None)
self.assertTrue(item.get_reception_time())
loc = item.get_location()
self.assertTrue(loc is not None)
self.assertTrue(all(v is not None for v in loc.__dict__.values()))
weat = item.get_weather()
self.assertTrue(weat is not None)
def test_weather_at_places_fails_with_wrong_params(self):
self.assertRaises(ValueError, OWM25.weather_at_places, \
self.__test_instance, "London", "x")
self.assertRaises(ValueError, OWM25.weather_at_places, \
self.__test_instance, "London", "accurate", -5)
def test_weather_around_coords(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_multiple_obs
result = self.__test_instance.weather_around_coords(57.0, -2.15)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(result, list))
for item in result:
self.assertTrue(item is not None)
self.assertTrue(item.get_reception_time() is not None)
loc = item.get_location()
self.assertTrue(loc is not None)
self.assertTrue(all(v is not None for v in loc.__dict__.values()))
weat = item.get_weather()
self.assertTrue(weat is not None)
def test_weather_around_coords_fails_when_coordinates_out_of_bounds(self):
"""
Test failure when providing: lon < -180, lon > 180, lat < -90, lat > 90
"""
self.assertRaises(ValueError, OWM25.weather_around_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, OWM25.weather_around_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, OWM25.weather_around_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, OWM25.weather_around_coords, \
self.__test_instance, 200, 2.5)
def test_weather_around_coords_fails_with_wrong_params(self):
self.assertRaises(ValueError, OWM25.weather_around_coords, \
self.__test_instance, 43.7, 20.0, -3)
def test_three_hours_forecast(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_3h_forecast
result = self.__test_instance.three_hours_forecast("London,uk")
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(result, Forecaster))
forecast = result.get_forecast()
self.assertTrue(isinstance(forecast, Forecast))
self.assertTrue(forecast.get_interval() is not None)
self.assertTrue(forecast.get_reception_time() is not None)
self.assertTrue(isinstance(forecast.get_location(), Location))
self.assertEqual(1, len(forecast))
for weather in forecast:
self.assertTrue(isinstance(weather, Weather))
def test_three_hours_forecast_when_forecast_not_found(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_3h_forecast_with_no_items
result = self.__test_instance.three_hours_forecast("London,uk")
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertIsNone(result)
def test_three_hours_forecast_at_coords(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_3h_forecast_at_coords
result = \
self.__test_instance\
.three_hours_forecast_at_coords(51.50853, -0.12574)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(result, Forecaster))
forecast = result.get_forecast()
self.assertTrue(isinstance(forecast, Forecast))
self.assertTrue(forecast.get_interval() is not None)
self.assertTrue(forecast.get_reception_time() is not None)
self.assertTrue(isinstance(forecast.get_location(), Location))
self.assertEqual(1, len(forecast))
for weather in forecast:
self.assertTrue(isinstance(weather, Weather))
def test_three_hours_forecast_at_coords_when_forecast_not_found(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_3h_forecast_with_no_items
result = self.__test_instance.three_hours_forecast_at_coords(51.50853,
-0.12574)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertIsNone(result)
def test_three_hours_forecast_at_coords_fails_with_wrong_params(self):
self.assertRaises(ValueError, OWM25.three_hours_forecast_at_coords,
self.__test_instance, -100.0, 0.0)
self.assertRaises(ValueError, OWM25.three_hours_forecast_at_coords,
self.__test_instance, 100.0, 0.0)
self.assertRaises(ValueError, OWM25.three_hours_forecast_at_coords,
self.__test_instance, 0.0, -200.0)
self.assertRaises(ValueError, OWM25.three_hours_forecast_at_coords,
self.__test_instance, 0.0, 200.0)
def test_three_hours_forecast_at_id(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_3h_forecast_at_id
result = self.__test_instance.three_hours_forecast_at_id(2643743)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(result, Forecaster))
forecast = result.get_forecast()
self.assertTrue(isinstance(forecast, Forecast))
self.assertTrue(forecast.get_interval() is not None)
self.assertTrue(forecast.get_reception_time() is not None)
self.assertTrue(isinstance(forecast.get_location(), Location))
self.assertEqual(1, len(forecast))
for weather in forecast:
self.assertTrue(isinstance(weather, Weather))
def test_three_hours_forecast_at_id_when_forecast_not_found(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_3h_forecast_with_no_items
result = self.__test_instance.three_hours_forecast_at_id(2643743)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertIsNone(result)
def test_three_hours_forecast_at_id_fails_with_wrong_params(self):
self.assertRaises(ValueError, OWM25.three_hours_forecast_at_id,
self.__test_instance, -1234)
def test_daily_forecast(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_daily_forecast
result = self.__test_instance.daily_forecast("London,uk", 2)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(result, Forecaster))
forecast = result.get_forecast()
self.assertTrue(isinstance(forecast, Forecast))
self.assertTrue(forecast.get_interval() is not None)
self.assertTrue(forecast.get_reception_time() is not None)
self.assertTrue(isinstance(forecast.get_location(), Location))
self.assertEqual(1, len(forecast))
for weather in forecast:
self.assertTrue(isinstance(weather, Weather))
def test_daily_forecast_fails_with_wrong_params(self):
self.assertRaises(AssertionError, OWM25.daily_forecast,
self.__test_instance, 2, 3)
self.assertRaises(ValueError, OWM25.daily_forecast,
self.__test_instance, "London,uk", -3)
def test_daily_forecast_when_forecast_not_found(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_daily_forecast_with_no_items
result = self.__test_instance.daily_forecast('London,uk')
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertIsNone(result)
def test_daily_forecast_at_coords(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_daily_forecast_at_coords
result = \
self.__test_instance.daily_forecast_at_coords(51.50853, -0.12574, 2)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(result, Forecaster))
forecast = result.get_forecast()
self.assertTrue(isinstance(forecast, Forecast))
self.assertTrue(forecast.get_interval() is not None)
self.assertTrue(forecast.get_reception_time() is not None)
self.assertTrue(isinstance(forecast.get_location(), Location))
self.assertEqual(1, len(forecast))
for weather in forecast:
self.assertTrue(isinstance(weather, Weather))
def test_daily_forecast_at_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, OWM25.daily_forecast_at_coords,
self.__test_instance, 51.50853, -0.12574, -3)
self.assertRaises(ValueError, OWM25.daily_forecast_at_coords,
self.__test_instance, -100.0, 0.0)
self.assertRaises(ValueError, OWM25.daily_forecast_at_coords,
self.__test_instance, 100.0, 0.0)
self.assertRaises(ValueError, OWM25.daily_forecast_at_coords,
self.__test_instance, 0.0, -200.0)
self.assertRaises(ValueError, OWM25.daily_forecast_at_coords,
self.__test_instance, 0.0, 200.0)
def test_daily_forecast_at_coords_when_forecast_not_found(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_daily_forecast_with_no_items
result = self.__test_instance.daily_forecast_at_coords(51.50853, -0.12574)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertIsNone(result)
def test_daily_forecast_at_id(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_daily_forecast_at_id
result = \
self.__test_instance.daily_forecast_at_id(2643743, 2)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(result, Forecaster))
forecast = result.get_forecast()
self.assertTrue(isinstance(forecast, Forecast))
self.assertTrue(forecast.get_interval() is not None)
self.assertTrue(forecast.get_reception_time() is not None)
self.assertTrue(isinstance(forecast.get_location(), Location))
self.assertEqual(1, len(forecast))
for weather in forecast:
self.assertTrue(isinstance(weather, Weather))
def test_daily_forecast_at_id_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, OWM25.daily_forecast_at_id,
self.__test_instance, -123456, 3)
self.assertRaises(ValueError, OWM25.daily_forecast_at_id,
self.__test_instance, 123456, -3)
def test_daily_forecast_at_id_when_forecast_not_found(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_daily_forecast_with_no_items
result = self.__test_instance.daily_forecast_at_id(123456)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertIsNone(result)
def test_weather_history_at_place(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_city_weather_history
result = self.__test_instance.weather_history_at_place("London,uk")
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(result, list))
for weather in result:
self.assertTrue(isinstance(weather, Weather))
def test_weather_history_at_place_fails_with_unordered_time_boundaries(self):
self.assertRaises(ValueError, OWM25.weather_history_at_place,
self.__test_instance, "London,uk",
"2013-09-06 20:26:40+00", "2013-09-06 09:20:00+00")
def test_weather_history_at_place_fails_with_time_boundaries_in_the_future(self):
current_time = int(time.time())
self.assertRaises(ValueError, OWM25.weather_history_at_place,
self.__test_instance, "London,uk",
current_time + 1000, current_time + 2000)
def test_weather_history_at_place_fails_with_wrong_time_boundaries(self):
self.assertRaises(ValueError, OWM25.weather_history_at_place,
self.__test_instance, "London,uk", None, 1234567)
self.assertRaises(ValueError, OWM25.weather_history_at_place,
self.__test_instance, "London,uk", 1234567, None)
self.assertRaises(ValueError, OWM25.weather_history_at_place,
self.__test_instance, "London,uk", 1234567, None)
self.assertRaises(ValueError, OWM25.weather_history_at_place,
self.__test_instance, "London,uk", -1234567, None)
self.assertRaises(ValueError, OWM25.weather_history_at_place,
self.__test_instance, "London,uk", None, -1234567)
self.assertRaises(ValueError, OWM25.weather_history_at_place,
self.__test_instance, "London,uk", -999, -888)
self.assertRaises(ValueError, OWM25.weather_history_at_place,
self.__test_instance, "London,uk", "test", 1234567)
self.assertRaises(ValueError, OWM25.weather_history_at_place,
self.__test_instance, "London,uk", 1234567, "test")
def test_weather_history_at_place_fails_with_wrong_name(self):
self.assertRaises(AssertionError, OWM25.weather_history_at_place,
self.__test_instance, 1, "test", 1234567)
def test_weather_history_at_id(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_city_weather_history
result = self.__test_instance.weather_history_at_id(12345)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(result, list))
for weather in result:
self.assertTrue(isinstance(weather, Weather))
def test_weather_history_at_id_fails_with_negative_id(self):
self.assertRaises(ValueError, OWM25.weather_history_at_id,
self.__test_instance, -12345,
"2013-09-06 20:26:40+00", "2013-09-06 09:20:00+00")
def test_weather_history_at_id_fails_with_unordered_time_boundaries(self):
self.assertRaises(ValueError, OWM25.weather_history_at_id,
self.__test_instance, 12345,
"2013-09-06 20:26:40+00", "2013-09-06 09:20:00+00")
def test_weather_history_at_id_fails_with_time_boundaries_in_the_future(self):
current_time = int(time.time())
self.assertRaises(ValueError, OWM25.weather_history_at_id,
self.__test_instance, 12345,
current_time + 1000, current_time + 2000)
def test_weather_history_at_id_fails_with_wrong_time_boundaries(self):
self.assertRaises(ValueError, OWM25.weather_history_at_id,
self.__test_instance, 12345, None, 1234567)
self.assertRaises(ValueError, OWM25.weather_history_at_id,
self.__test_instance, 12345, 1234567, None)
self.assertRaises(ValueError, OWM25.weather_history_at_id,
self.__test_instance, 12345, 1234567, None)
self.assertRaises(ValueError, OWM25.weather_history_at_id,
self.__test_instance, 12345, -1234567, None)
self.assertRaises(ValueError, OWM25.weather_history_at_id,
self.__test_instance, 12345, None, -1234567)
self.assertRaises(ValueError, OWM25.weather_history_at_id,
self.__test_instance, 12345, -999, -888)
self.assertRaises(ValueError, OWM25.weather_history_at_id,
self.__test_instance, 12345, "test", 1234567)
self.assertRaises(ValueError, OWM25.weather_history_at_id,
self.__test_instance, 12345, 1234567, "test")
def test_weather_at_station_in_bbox(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_weather_at_stations_in_bbox
results = self.__test_instance\
.weather_at_stations_in_bbox(49.07,8.87,61.26,65.21)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(results, list))
for result in results:
self.assertTrue(isinstance(result, Observation))
self.assertTrue(isinstance(result.get_weather(), Weather))
self.assertTrue(isinstance(result.get_location(), Location))
self.assertTrue(result.get_reception_time() is not None)
def test_station_tick_history(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_station_tick_weather_history
result = self.__test_instance.station_tick_history(1234, limit=4)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(result, Historian))
station_history = result.get_station_history()
self.assertTrue(isinstance(station_history, StationHistory))
self.assertTrue(isinstance(station_history.get_measurements(), dict))
def test_station_tick_history_fails_with_wrong_params(self):
self.assertRaises(ValueError, OWM25.station_tick_history,
self.__test_instance, 1234, -3)
def test_station_tick_history_when_forecast_not_found(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_station_history_with_no_items
result = self.__test_instance.station_tick_history(1234, limit=4)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertIsNone(result)
def test_station_hour_history(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_station_hour_weather_history
result = self.__test_instance.station_hour_history(1234, limit=4)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(result, Historian))
station_history = result.get_station_history()
self.assertTrue(isinstance(station_history, StationHistory))
self.assertTrue(isinstance(station_history.get_measurements(), dict))
def test_station_hour_history_fails_with_wrong_params(self):
self.assertRaises(ValueError, OWM25.station_hour_history,
self.__test_instance, 1234, -3)
def test_station_hour_history_when_forecast_not_found(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_station_history_with_no_items
result = self.__test_instance.station_hour_history(1234, limit=4)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertIsNone(result)
def test_station_day_history(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_station_day_weather_history
result = self.__test_instance.station_day_history(1234, limit=4)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(result, Historian))
station_history = result.get_station_history()
self.assertTrue(isinstance(station_history, StationHistory))
self.assertTrue(isinstance(station_history.get_measurements(), dict))
def test_station_day_history_fails_with_wrong_params(self):
self.assertRaises(ValueError, OWM25.station_day_history,
self.__test_instance, 1234, -3)
def test_station_hour_history_when_forecast_not_found(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_station_history_with_no_items
result = self.__test_instance.station_hour_history(1234, limit=4)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertIsNone(result)
def test_station_at_coords(self):
ref_to_original_call_API = OWMHTTPClient.call_API
OWMHTTPClient.call_API = \
self.mock_httputils_call_API_returning_station_at_coords
results = self.__test_instance.station_at_coords(51.5073509,
-0.1277583, 2)
OWMHTTPClient.call_API = ref_to_original_call_API
self.assertTrue(isinstance(results, list))
for result in results:
self.assertTrue(isinstance(result, Station))
self.assertTrue(isinstance(result.get_lon(), float))
self.assertTrue(isinstance(result.get_lat(), float))
self.assertTrue(isinstance(result.get_distance(), float))
self.assertTrue(result.get_name())
self.assertTrue(isinstance(result.get_last_weather(), Weather))
self.assertTrue(isinstance(result.get_station_ID(), int))
self.assertTrue(isinstance(result.get_station_type(), int))
self.assertTrue(isinstance(result.get_status(), int))
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Coder implementations.
The actual encode/decode implementations are split off from coders to
allow conditional (compiled/pure) implementations, which can be used to
encode many elements with minimal overhead.
This module may be optionally compiled with Cython, using the corresponding
coder_impl.pxd file for type hints.
"""
from types import NoneType
from apache_beam.coders import observable
from apache_beam.utils.timestamp import Timestamp
from apache_beam.utils.timestamp import MAX_TIMESTAMP
from apache_beam.utils.timestamp import MIN_TIMESTAMP
from apache_beam.utils import windowed_value
# pylint: disable=wrong-import-order, wrong-import-position, ungrouped-imports
try:
from stream import InputStream as create_InputStream
from stream import OutputStream as create_OutputStream
from stream import ByteCountingOutputStream
from stream import get_varint_size
globals()['create_InputStream'] = create_InputStream
globals()['create_OutputStream'] = create_OutputStream
globals()['ByteCountingOutputStream'] = ByteCountingOutputStream
except ImportError:
from slow_stream import InputStream as create_InputStream
from slow_stream import OutputStream as create_OutputStream
from slow_stream import ByteCountingOutputStream
from slow_stream import get_varint_size
# pylint: enable=wrong-import-order, wrong-import-position, ungrouped-imports
class CoderImpl(object):
def encode_to_stream(self, value, stream, nested):
"""Reads object from potentially-nested encoding in stream."""
raise NotImplementedError
def decode_from_stream(self, stream, nested):
"""Reads object from potentially-nested encoding in stream."""
raise NotImplementedError
def encode(self, value):
"""Encodes an object to an unnested string."""
raise NotImplementedError
def decode(self, encoded):
"""Decodes an object to an unnested string."""
raise NotImplementedError
def estimate_size(self, value, nested=False):
"""Estimates the encoded size of the given value, in bytes."""
return self._get_nested_size(len(self.encode(value)), nested)
def _get_nested_size(self, inner_size, nested):
if not nested:
return inner_size
varint_size = get_varint_size(inner_size)
return varint_size + inner_size
def get_estimated_size_and_observables(self, value, nested=False):
"""Returns estimated size of value along with any nested observables.
The list of nested observables is returned as a list of 2-tuples of
(obj, coder_impl), where obj is an instance of observable.ObservableMixin,
and coder_impl is the CoderImpl that can be used to encode elements sent by
obj to its observers.
Arguments:
value: the value whose encoded size is to be estimated.
nested: whether the value is nested.
Returns:
The estimated encoded size of the given value and a list of observables
whose elements are 2-tuples of (obj, coder_impl) as described above.
"""
return self.estimate_size(value, nested), []
class SimpleCoderImpl(CoderImpl):
"""Subclass of CoderImpl implementing stream methods using encode/decode."""
def encode_to_stream(self, value, stream, nested):
"""Reads object from potentially-nested encoding in stream."""
stream.write(self.encode(value), nested)
def decode_from_stream(self, stream, nested):
"""Reads object from potentially-nested encoding in stream."""
return self.decode(stream.read_all(nested))
class StreamCoderImpl(CoderImpl):
"""Subclass of CoderImpl implementing encode/decode using stream methods."""
def encode(self, value):
out = create_OutputStream()
self.encode_to_stream(value, out, False)
return out.get()
def decode(self, encoded):
return self.decode_from_stream(create_InputStream(encoded), False)
def estimate_size(self, value, nested=False):
"""Estimates the encoded size of the given value, in bytes."""
out = ByteCountingOutputStream()
self.encode_to_stream(value, out, nested)
return out.get_count()
class CallbackCoderImpl(CoderImpl):
"""A CoderImpl that calls back to the _impl methods on the Coder itself.
This is the default implementation used if Coder._get_impl()
is not overwritten.
"""
def __init__(self, encoder, decoder, size_estimator=None):
self._encoder = encoder
self._decoder = decoder
self._size_estimator = size_estimator or self._default_size_estimator
def _default_size_estimator(self, value):
return len(self.encode(value))
def encode_to_stream(self, value, stream, nested):
return stream.write(self._encoder(value), nested)
def decode_from_stream(self, stream, nested):
return self._decoder(stream.read_all(nested))
def encode(self, value):
return self._encoder(value)
def decode(self, encoded):
return self._decoder(encoded)
def estimate_size(self, value, nested=False):
return self._get_nested_size(self._size_estimator(value), nested)
def get_estimated_size_and_observables(self, value, nested=False):
# TODO(robertwb): Remove this once all coders are correct.
if isinstance(value, observable.ObservableMixin):
# CallbackCoderImpl can presumably encode the elements too.
return 1, [(value, self)]
else:
return self.estimate_size(value, nested), []
class DeterministicFastPrimitivesCoderImpl(CoderImpl):
def __init__(self, coder, step_label):
self._underlying_coder = coder
self._step_label = step_label
def _check_safe(self, value):
if isinstance(value, (str, unicode, long, int, float)):
pass
elif value is None:
pass
elif isinstance(value, (tuple, list)):
for x in value:
self._check_safe(x)
else:
raise TypeError(
"Unable to deterministically code '%s' of type '%s', "
"please provide a type hint for the input of '%s'" % (
value, type(value), self._step_label))
def encode_to_stream(self, value, stream, nested):
self._check_safe(value)
return self._underlying_coder.encode_to_stream(value, stream, nested)
def decode_from_stream(self, stream, nested):
return self._underlying_coder.decode_from_stream(stream, nested)
def encode(self, value):
self._check_safe(value)
return self._underlying_coder.encode(value)
def decode(self, encoded):
return self._underlying_coder.decode(encoded)
def estimate_size(self, value, nested=False):
return self._underlying_coder.estimate_size(value, nested)
def get_estimated_size_and_observables(self, value, nested=False):
return self._underlying_coder.get_estimated_size_and_observables(
value, nested)
class ProtoCoderImpl(SimpleCoderImpl):
def __init__(self, proto_message_type):
self.proto_message_type = proto_message_type
def encode(self, value):
return value.SerializeToString()
def decode(self, encoded):
proto_message = self.proto_message_type()
proto_message.ParseFromString(encoded)
return proto_message
UNKNOWN_TYPE = 0xFF
NONE_TYPE = 0
INT_TYPE = 1
FLOAT_TYPE = 2
STR_TYPE = 3
UNICODE_TYPE = 4
BOOL_TYPE = 9
LIST_TYPE = 5
TUPLE_TYPE = 6
DICT_TYPE = 7
SET_TYPE = 8
class FastPrimitivesCoderImpl(StreamCoderImpl):
def __init__(self, fallback_coder_impl):
self.fallback_coder_impl = fallback_coder_impl
def get_estimated_size_and_observables(self, value, nested=False):
if isinstance(value, observable.ObservableMixin):
# FastPrimitivesCoderImpl can presumably encode the elements too.
return 1, [(value, self)]
else:
out = ByteCountingOutputStream()
self.encode_to_stream(value, out, nested)
return out.get_count(), []
def encode_to_stream(self, value, stream, nested):
t = type(value)
if t is NoneType:
stream.write_byte(NONE_TYPE)
elif t is int:
stream.write_byte(INT_TYPE)
stream.write_var_int64(value)
elif t is float:
stream.write_byte(FLOAT_TYPE)
stream.write_bigendian_double(value)
elif t is str:
stream.write_byte(STR_TYPE)
stream.write(value, nested)
elif t is unicode:
unicode_value = value # for typing
stream.write_byte(UNICODE_TYPE)
stream.write(unicode_value.encode('utf-8'), nested)
elif t is list or t is tuple or t is set:
stream.write_byte(
LIST_TYPE if t is list else TUPLE_TYPE if t is tuple else SET_TYPE)
stream.write_var_int64(len(value))
for e in value:
self.encode_to_stream(e, stream, True)
elif t is dict:
dict_value = value # for typing
stream.write_byte(DICT_TYPE)
stream.write_var_int64(len(dict_value))
for k, v in dict_value.iteritems():
self.encode_to_stream(k, stream, True)
self.encode_to_stream(v, stream, True)
elif t is bool:
stream.write_byte(BOOL_TYPE)
stream.write_byte(value)
else:
stream.write_byte(UNKNOWN_TYPE)
self.fallback_coder_impl.encode_to_stream(value, stream, nested)
def decode_from_stream(self, stream, nested):
t = stream.read_byte()
if t == NONE_TYPE:
return None
elif t == INT_TYPE:
return stream.read_var_int64()
elif t == FLOAT_TYPE:
return stream.read_bigendian_double()
elif t == STR_TYPE:
return stream.read_all(nested)
elif t == UNICODE_TYPE:
return stream.read_all(nested).decode('utf-8')
elif t == LIST_TYPE or t == TUPLE_TYPE or t == SET_TYPE:
vlen = stream.read_var_int64()
vlist = [self.decode_from_stream(stream, True) for _ in range(vlen)]
if t == LIST_TYPE:
return vlist
elif t == TUPLE_TYPE:
return tuple(vlist)
else:
return set(vlist)
elif t == DICT_TYPE:
vlen = stream.read_var_int64()
v = {}
for _ in range(vlen):
k = self.decode_from_stream(stream, True)
v[k] = self.decode_from_stream(stream, True)
return v
elif t == BOOL_TYPE:
return not not stream.read_byte()
else:
return self.fallback_coder_impl.decode_from_stream(stream, nested)
class BytesCoderImpl(CoderImpl):
"""A coder for bytes/str objects."""
def encode_to_stream(self, value, out, nested):
out.write(value, nested)
def decode_from_stream(self, in_stream, nested):
return in_stream.read_all(nested)
def encode(self, value):
assert isinstance(value, bytes), (value, type(value))
return value
def decode(self, encoded):
return encoded
class FloatCoderImpl(StreamCoderImpl):
def encode_to_stream(self, value, out, nested):
out.write_bigendian_double(value)
def decode_from_stream(self, in_stream, nested):
return in_stream.read_bigendian_double()
def estimate_size(self, unused_value, nested=False):
# A double is encoded as 8 bytes, regardless of nesting.
return 8
class IntervalWindowCoderImpl(StreamCoderImpl):
# TODO: Fn Harness only supports millis. Is this important enough to fix?
def _to_normal_time(self, value):
"""Convert "lexicographically ordered unsigned" to signed."""
return value - (1 << 63)
def _from_normal_time(self, value):
"""Convert signed to "lexicographically ordered unsigned"."""
return value + (1 << 63)
def encode_to_stream(self, value, out, nested):
span_micros = value.end.micros - value.start.micros
out.write_bigendian_uint64(self._from_normal_time(value.end.micros / 1000))
out.write_var_int64(span_micros / 1000)
def decode_from_stream(self, in_, nested):
end_millis = self._to_normal_time(in_.read_bigendian_uint64())
start_millis = end_millis - in_.read_var_int64()
from apache_beam.transforms.window import IntervalWindow
ret = IntervalWindow(start=Timestamp(micros=start_millis * 1000),
end=Timestamp(micros=end_millis * 1000))
return ret
def estimate_size(self, value, nested=False):
# An IntervalWindow is context-insensitive, with a timestamp (8 bytes)
# and a varint timespam.
span = value.end.micros - value.start.micros
return 8 + get_varint_size(span / 1000)
class TimestampCoderImpl(StreamCoderImpl):
def encode_to_stream(self, value, out, nested):
out.write_bigendian_int64(value.micros)
def decode_from_stream(self, in_stream, nested):
return Timestamp(micros=in_stream.read_bigendian_int64())
def estimate_size(self, unused_value, nested=False):
# A Timestamp is encoded as a 64-bit integer in 8 bytes, regardless of
# nesting.
return 8
small_ints = [chr(_) for _ in range(128)]
class VarIntCoderImpl(StreamCoderImpl):
"""A coder for long/int objects."""
def encode_to_stream(self, value, out, nested):
out.write_var_int64(value)
def decode_from_stream(self, in_stream, nested):
return in_stream.read_var_int64()
def encode(self, value):
ivalue = value # type cast
if 0 <= ivalue < len(small_ints):
return small_ints[ivalue]
else:
return StreamCoderImpl.encode(self, value)
def decode(self, encoded):
if len(encoded) == 1:
i = ord(encoded)
if 0 <= i < 128:
return i
return StreamCoderImpl.decode(self, encoded)
def estimate_size(self, value, nested=False):
# Note that VarInts are encoded the same way regardless of nesting.
return get_varint_size(value)
class SingletonCoderImpl(CoderImpl):
"""A coder that always encodes exactly one value."""
def __init__(self, value):
self._value = value
def encode_to_stream(self, value, stream, nested):
pass
def decode_from_stream(self, stream, nested):
return self._value
def encode(self, value):
b = '' # avoid byte vs str vs unicode error
return b
def decode(self, encoded):
return self._value
def estimate_size(self, value, nested=False):
return 0
class AbstractComponentCoderImpl(StreamCoderImpl):
"""CoderImpl for coders that are comprised of several component coders."""
def __init__(self, coder_impls):
for c in coder_impls:
assert isinstance(c, CoderImpl), c
self._coder_impls = tuple(coder_impls)
def _extract_components(self, value):
raise NotImplementedError
def _construct_from_components(self, components):
raise NotImplementedError
def encode_to_stream(self, value, out, nested):
values = self._extract_components(value)
if len(self._coder_impls) != len(values):
raise ValueError(
'Number of components does not match number of coders.')
for i in range(0, len(self._coder_impls)):
c = self._coder_impls[i] # type cast
c.encode_to_stream(values[i], out,
nested or i + 1 < len(self._coder_impls))
def decode_from_stream(self, in_stream, nested):
return self._construct_from_components(
[c.decode_from_stream(in_stream,
nested or i + 1 < len(self._coder_impls))
for i, c in enumerate(self._coder_impls)])
def estimate_size(self, value, nested=False):
"""Estimates the encoded size of the given value, in bytes."""
# TODO(ccy): This ignores sizes of observable components.
estimated_size, _ = (
self.get_estimated_size_and_observables(value))
return estimated_size
def get_estimated_size_and_observables(self, value, nested=False):
"""Returns estimated size of value along with any nested observables."""
values = self._extract_components(value)
estimated_size = 0
observables = []
for i in range(0, len(self._coder_impls)):
c = self._coder_impls[i] # type cast
child_size, child_observables = (
c.get_estimated_size_and_observables(
values[i], nested=nested or i + 1 < len(self._coder_impls)))
estimated_size += child_size
observables += child_observables
return estimated_size, observables
class TupleCoderImpl(AbstractComponentCoderImpl):
"""A coder for tuple objects."""
def _extract_components(self, value):
return value
def _construct_from_components(self, components):
return tuple(components)
class SequenceCoderImpl(StreamCoderImpl):
"""A coder for sequences.
If the length of the sequence in known we encode the length as a 32 bit
``int`` followed by the encoded bytes.
If the length of the sequence is unknown, we encode the length as ``-1``
followed by the encoding of elements buffered up to 64K bytes before prefixing
the count of number of elements. A ``0`` is encoded at the end to indicate the
end of stream.
The resulting encoding would look like this::
-1
countA element(0) element(1) ... element(countA - 1)
countB element(0) element(1) ... element(countB - 1)
...
countX element(0) element(1) ... element(countX - 1)
0
"""
# Default buffer size of 64kB of handling iterables of unknown length.
_DEFAULT_BUFFER_SIZE = 64 * 1024
def __init__(self, elem_coder):
self._elem_coder = elem_coder
def _construct_from_sequence(self, values):
raise NotImplementedError
def encode_to_stream(self, value, out, nested):
# Compatible with Java's IterableLikeCoder.
if hasattr(value, '__len__'):
out.write_bigendian_int32(len(value))
for elem in value:
self._elem_coder.encode_to_stream(elem, out, True)
else:
# We don't know the size without traversing it so use a fixed size buffer
# and encode as many elements as possible into it before outputting
# the size followed by the elements.
# -1 to indicate that the length is not known.
out.write_bigendian_int32(-1)
buffer = create_OutputStream()
prev_index = index = -1
for index, elem in enumerate(value):
self._elem_coder.encode_to_stream(elem, buffer, True)
if out.size() > self._DEFAULT_BUFFER_SIZE:
out.write_var_int64(index - prev_index)
out.write(buffer.get())
prev_index = index
buffer = create_OutputStream()
if index > prev_index:
out.write_var_int64(index - prev_index)
out.write(buffer.get())
out.write_var_int64(0)
def decode_from_stream(self, in_stream, nested):
size = in_stream.read_bigendian_int32()
if size >= 0:
elements = [self._elem_coder.decode_from_stream(in_stream, True)
for _ in range(size)]
else:
elements = []
count = in_stream.read_var_int64()
while count > 0:
for _ in range(count):
elements.append(self._elem_coder.decode_from_stream(in_stream, True))
count = in_stream.read_var_int64()
return self._construct_from_sequence(elements)
def estimate_size(self, value, nested=False):
"""Estimates the encoded size of the given value, in bytes."""
# TODO(ccy): This ignores element sizes.
estimated_size, _ = (
self.get_estimated_size_and_observables(value))
return estimated_size
def get_estimated_size_and_observables(self, value, nested=False):
"""Returns estimated size of value along with any nested observables."""
estimated_size = 0
# Size of 32-bit integer storing number of elements.
estimated_size += 4
if isinstance(value, observable.ObservableMixin):
return estimated_size, [(value, self._elem_coder)]
else:
observables = []
for elem in value:
child_size, child_observables = (
self._elem_coder.get_estimated_size_and_observables(
elem, nested=True))
estimated_size += child_size
observables += child_observables
# TODO: (BEAM-1537) Update to use an accurate count depending on size and
# count, currently we are underestimating the size by up to 10 bytes
# per block of data since we are not including the count prefix which
# occurs at most once per 64k of data and is upto 10 bytes long. The upper
# bound of the underestimate is 10 / 65536 ~= 0.0153% of the actual size.
return estimated_size, observables
class TupleSequenceCoderImpl(SequenceCoderImpl):
"""A coder for homogeneous tuple objects."""
def _construct_from_sequence(self, components):
return tuple(components)
class IterableCoderImpl(SequenceCoderImpl):
"""A coder for homogeneous iterable objects."""
def _construct_from_sequence(self, components):
return components
class WindowedValueCoderImpl(StreamCoderImpl):
"""A coder for windowed values."""
# Ensure that lexicographic ordering of the bytes corresponds to
# chronological order of timestamps.
# TODO(BEAM-1524): Clean this up once we have a BEAM wide consensus on
# byte representation of timestamps.
def _to_normal_time(self, value):
"""Convert "lexicographically ordered unsigned" to signed."""
return value - (1 << 63)
def _from_normal_time(self, value):
"""Convert signed to "lexicographically ordered unsigned"."""
return value + (1 << 63)
def __init__(self, value_coder, timestamp_coder, window_coder):
# TODO(lcwik): Remove the timestamp coder field
self._value_coder = value_coder
self._timestamp_coder = timestamp_coder
self._windows_coder = TupleSequenceCoderImpl(window_coder)
def encode_to_stream(self, value, out, nested):
wv = value # type cast
# Avoid creation of Timestamp object.
restore_sign = -1 if wv.timestamp_micros < 0 else 1
out.write_bigendian_uint64(
# Convert to postive number and divide, since python rounds off to the
# lower negative number. For ex: -3 / 2 = -2, but we expect it to be -1,
# to be consistent across SDKs.
# TODO(BEAM-1524): Clean this up once we have a BEAM wide consensus on
# precision of timestamps.
self._from_normal_time(
restore_sign * (abs(wv.timestamp_micros) / 1000)))
self._windows_coder.encode_to_stream(wv.windows, out, True)
# Default PaneInfo encoded byte representing NO_FIRING.
# TODO(BEAM-1522): Remove the hard coding here once PaneInfo is supported.
out.write_byte(0xF)
self._value_coder.encode_to_stream(wv.value, out, nested)
def decode_from_stream(self, in_stream, nested):
timestamp = self._to_normal_time(in_stream.read_bigendian_uint64())
# Restore MIN/MAX timestamps to their actual values as encoding incurs loss
# of precision while converting to millis.
# Note: This is only a best effort here as there is no way to know if these
# were indeed MIN/MAX timestamps.
# TODO(BEAM-1524): Clean this up once we have a BEAM wide consensus on
# precision of timestamps.
if timestamp == -(abs(MIN_TIMESTAMP.micros) / 1000):
timestamp = MIN_TIMESTAMP.micros
elif timestamp == (MAX_TIMESTAMP.micros / 1000):
timestamp = MAX_TIMESTAMP.micros
else:
timestamp *= 1000
windows = self._windows_coder.decode_from_stream(in_stream, True)
# Read PaneInfo encoded byte.
# TODO(BEAM-1522): Ignored for now but should be converted to pane info once
# it is supported.
in_stream.read_byte()
value = self._value_coder.decode_from_stream(in_stream, nested)
return windowed_value.create(
value,
# Avoid creation of Timestamp object.
timestamp,
windows)
def get_estimated_size_and_observables(self, value, nested=False):
"""Returns estimated size of value along with any nested observables."""
if isinstance(value, observable.ObservableMixin):
# Should never be here.
# TODO(robertwb): Remove when coders are set correctly.
return 0, [(value, self._value_coder)]
estimated_size = 0
observables = []
value_estimated_size, value_observables = (
self._value_coder.get_estimated_size_and_observables(
value.value, nested=nested))
estimated_size += value_estimated_size
observables += value_observables
estimated_size += (
self._timestamp_coder.estimate_size(value.timestamp, nested=True))
estimated_size += (
self._windows_coder.estimate_size(value.windows, nested=True))
# for pane info
estimated_size += 1
return estimated_size, observables
class LengthPrefixCoderImpl(StreamCoderImpl):
"""Coder which prefixes the length of the encoded object in the stream."""
def __init__(self, value_coder):
self._value_coder = value_coder
def encode_to_stream(self, value, out, nested):
encoded_value = self._value_coder.encode(value)
out.write_var_int64(len(encoded_value))
out.write(encoded_value)
def decode_from_stream(self, in_stream, nested):
value_length = in_stream.read_var_int64()
return self._value_coder.decode(in_stream.read(value_length))
def estimate_size(self, value, nested=False):
value_size = self._value_coder.estimate_size(value)
return get_varint_size(value_size) + value_size
|
# Copyright 2015-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Operation class definitions."""
from pymongo import helpers
from pymongo.common import validate_boolean, validate_is_mapping, validate_list
from pymongo.collation import validate_collation_or_none
from pymongo.helpers import _gen_index_name, _index_document, _index_list
class InsertOne(object):
"""Represents an insert_one operation."""
__slots__ = ("_doc",)
def __init__(self, document):
"""Create an InsertOne instance.
For use with :meth:`~pymongo.collection.Collection.bulk_write`.
:Parameters:
- `document`: The document to insert. If the document is missing an
_id field one will be added.
"""
self._doc = document
def _add_to_bulk(self, bulkobj):
"""Add this operation to the _Bulk instance `bulkobj`."""
bulkobj.add_insert(self._doc)
def __repr__(self):
return "InsertOne(%r)" % (self._doc,)
def __eq__(self, other):
if type(other) == type(self):
return other._doc == self._doc
return NotImplemented
def __ne__(self, other):
return not self == other
class DeleteOne(object):
"""Represents a delete_one operation."""
__slots__ = ("_filter", "_collation", "_hint")
def __init__(self, filter, collation=None, hint=None):
"""Create a DeleteOne instance.
For use with :meth:`~pymongo.collection.Collection.bulk_write`.
:Parameters:
- `filter`: A query that matches the document to delete.
- `collation` (optional): An instance of
:class:`~pymongo.collation.Collation`. This option is only
supported on MongoDB 3.4 and above.
- `hint` (optional): An index to use to support the query
predicate specified either by its string name, or in the same
format as passed to
:meth:`~pymongo.collection.Collection.create_index` (e.g.
``[('field', ASCENDING)]``). This option is only supported on
MongoDB 4.4 and above.
.. versionchanged:: 3.11
Added the ``hint`` option.
.. versionchanged:: 3.5
Added the `collation` option.
"""
if filter is not None:
validate_is_mapping("filter", filter)
if hint is not None:
if not isinstance(hint, str):
hint = helpers._index_document(hint)
self._filter = filter
self._collation = collation
self._hint = hint
def _add_to_bulk(self, bulkobj):
"""Add this operation to the _Bulk instance `bulkobj`."""
bulkobj.add_delete(self._filter, 1, collation=self._collation,
hint=self._hint)
def __repr__(self):
return "DeleteOne(%r, %r)" % (self._filter, self._collation)
def __eq__(self, other):
if type(other) == type(self):
return ((other._filter, other._collation) ==
(self._filter, self._collation))
return NotImplemented
def __ne__(self, other):
return not self == other
class DeleteMany(object):
"""Represents a delete_many operation."""
__slots__ = ("_filter", "_collation", "_hint")
def __init__(self, filter, collation=None, hint=None):
"""Create a DeleteMany instance.
For use with :meth:`~pymongo.collection.Collection.bulk_write`.
:Parameters:
- `filter`: A query that matches the documents to delete.
- `collation` (optional): An instance of
:class:`~pymongo.collation.Collation`. This option is only
supported on MongoDB 3.4 and above.
- `hint` (optional): An index to use to support the query
predicate specified either by its string name, or in the same
format as passed to
:meth:`~pymongo.collection.Collection.create_index` (e.g.
``[('field', ASCENDING)]``). This option is only supported on
MongoDB 4.4 and above.
.. versionchanged:: 3.11
Added the ``hint`` option.
.. versionchanged:: 3.5
Added the `collation` option.
"""
if filter is not None:
validate_is_mapping("filter", filter)
if hint is not None:
if not isinstance(hint, str):
hint = helpers._index_document(hint)
self._filter = filter
self._collation = collation
self._hint = hint
def _add_to_bulk(self, bulkobj):
"""Add this operation to the _Bulk instance `bulkobj`."""
bulkobj.add_delete(self._filter, 0, collation=self._collation,
hint=self._hint)
def __repr__(self):
return "DeleteMany(%r, %r)" % (self._filter, self._collation)
def __eq__(self, other):
if type(other) == type(self):
return ((other._filter, other._collation) ==
(self._filter, self._collation))
return NotImplemented
def __ne__(self, other):
return not self == other
class ReplaceOne(object):
"""Represents a replace_one operation."""
__slots__ = ("_filter", "_doc", "_upsert", "_collation", "_hint")
def __init__(self, filter, replacement, upsert=False, collation=None,
hint=None):
"""Create a ReplaceOne instance.
For use with :meth:`~pymongo.collection.Collection.bulk_write`.
:Parameters:
- `filter`: A query that matches the document to replace.
- `replacement`: The new document.
- `upsert` (optional): If ``True``, perform an insert if no documents
match the filter.
- `collation` (optional): An instance of
:class:`~pymongo.collation.Collation`. This option is only
supported on MongoDB 3.4 and above.
- `hint` (optional): An index to use to support the query
predicate specified either by its string name, or in the same
format as passed to
:meth:`~pymongo.collection.Collection.create_index` (e.g.
``[('field', ASCENDING)]``). This option is only supported on
MongoDB 4.2 and above.
.. versionchanged:: 3.11
Added the ``hint`` option.
.. versionchanged:: 3.5
Added the ``collation`` option.
"""
if filter is not None:
validate_is_mapping("filter", filter)
if upsert is not None:
validate_boolean("upsert", upsert)
if hint is not None:
if not isinstance(hint, str):
hint = helpers._index_document(hint)
self._filter = filter
self._doc = replacement
self._upsert = upsert
self._collation = collation
self._hint = hint
def _add_to_bulk(self, bulkobj):
"""Add this operation to the _Bulk instance `bulkobj`."""
bulkobj.add_replace(self._filter, self._doc, self._upsert,
collation=self._collation, hint=self._hint)
def __eq__(self, other):
if type(other) == type(self):
return (
(other._filter, other._doc, other._upsert, other._collation,
other._hint) == (self._filter, self._doc, self._upsert,
self._collation, other._hint))
return NotImplemented
def __ne__(self, other):
return not self == other
def __repr__(self):
return "%s(%r, %r, %r, %r, %r)" % (
self.__class__.__name__, self._filter, self._doc, self._upsert,
self._collation, self._hint)
class _UpdateOp(object):
"""Private base class for update operations."""
__slots__ = ("_filter", "_doc", "_upsert", "_collation", "_array_filters",
"_hint")
def __init__(self, filter, doc, upsert, collation, array_filters, hint):
if filter is not None:
validate_is_mapping("filter", filter)
if upsert is not None:
validate_boolean("upsert", upsert)
if array_filters is not None:
validate_list("array_filters", array_filters)
if hint is not None:
if not isinstance(hint, str):
hint = helpers._index_document(hint)
self._filter = filter
self._doc = doc
self._upsert = upsert
self._collation = collation
self._array_filters = array_filters
self._hint = hint
def __eq__(self, other):
if type(other) == type(self):
return (
(other._filter, other._doc, other._upsert, other._collation,
other._array_filters, other._hint) ==
(self._filter, self._doc, self._upsert, self._collation,
self._array_filters, self._hint))
return NotImplemented
def __ne__(self, other):
return not self == other
def __repr__(self):
return "%s(%r, %r, %r, %r, %r, %r)" % (
self.__class__.__name__, self._filter, self._doc, self._upsert,
self._collation, self._array_filters, self._hint)
class UpdateOne(_UpdateOp):
"""Represents an update_one operation."""
__slots__ = ()
def __init__(self, filter, update, upsert=False, collation=None,
array_filters=None, hint=None):
"""Represents an update_one operation.
For use with :meth:`~pymongo.collection.Collection.bulk_write`.
:Parameters:
- `filter`: A query that matches the document to update.
- `update`: The modifications to apply.
- `upsert` (optional): If ``True``, perform an insert if no documents
match the filter.
- `collation` (optional): An instance of
:class:`~pymongo.collation.Collation`. This option is only
supported on MongoDB 3.4 and above.
- `array_filters` (optional): A list of filters specifying which
array elements an update should apply. Requires MongoDB 3.6+.
- `hint` (optional): An index to use to support the query
predicate specified either by its string name, or in the same
format as passed to
:meth:`~pymongo.collection.Collection.create_index` (e.g.
``[('field', ASCENDING)]``). This option is only supported on
MongoDB 4.2 and above.
.. versionchanged:: 3.11
Added the `hint` option.
.. versionchanged:: 3.9
Added the ability to accept a pipeline as the `update`.
.. versionchanged:: 3.6
Added the `array_filters` option.
.. versionchanged:: 3.5
Added the `collation` option.
"""
super(UpdateOne, self).__init__(filter, update, upsert, collation,
array_filters, hint)
def _add_to_bulk(self, bulkobj):
"""Add this operation to the _Bulk instance `bulkobj`."""
bulkobj.add_update(self._filter, self._doc, False, self._upsert,
collation=self._collation,
array_filters=self._array_filters,
hint=self._hint)
class UpdateMany(_UpdateOp):
"""Represents an update_many operation."""
__slots__ = ()
def __init__(self, filter, update, upsert=False, collation=None,
array_filters=None, hint=None):
"""Create an UpdateMany instance.
For use with :meth:`~pymongo.collection.Collection.bulk_write`.
:Parameters:
- `filter`: A query that matches the documents to update.
- `update`: The modifications to apply.
- `upsert` (optional): If ``True``, perform an insert if no documents
match the filter.
- `collation` (optional): An instance of
:class:`~pymongo.collation.Collation`. This option is only
supported on MongoDB 3.4 and above.
- `array_filters` (optional): A list of filters specifying which
array elements an update should apply. Requires MongoDB 3.6+.
- `hint` (optional): An index to use to support the query
predicate specified either by its string name, or in the same
format as passed to
:meth:`~pymongo.collection.Collection.create_index` (e.g.
``[('field', ASCENDING)]``). This option is only supported on
MongoDB 4.2 and above.
.. versionchanged:: 3.11
Added the `hint` option.
.. versionchanged:: 3.9
Added the ability to accept a pipeline as the `update`.
.. versionchanged:: 3.6
Added the `array_filters` option.
.. versionchanged:: 3.5
Added the `collation` option.
"""
super(UpdateMany, self).__init__(filter, update, upsert, collation,
array_filters, hint)
def _add_to_bulk(self, bulkobj):
"""Add this operation to the _Bulk instance `bulkobj`."""
bulkobj.add_update(self._filter, self._doc, True, self._upsert,
collation=self._collation,
array_filters=self._array_filters,
hint=self._hint)
class IndexModel(object):
"""Represents an index to create."""
__slots__ = ("__document",)
def __init__(self, keys, **kwargs):
"""Create an Index instance.
For use with :meth:`~pymongo.collection.Collection.create_indexes`.
Takes either a single key or a list of (key, direction) pairs.
The key(s) must be an instance of :class:`basestring`
(:class:`str` in python 3), and the direction(s) must be one of
(:data:`~pymongo.ASCENDING`, :data:`~pymongo.DESCENDING`,
:data:`~pymongo.GEO2D`, :data:`~pymongo.GEOHAYSTACK`,
:data:`~pymongo.GEOSPHERE`, :data:`~pymongo.HASHED`,
:data:`~pymongo.TEXT`).
Valid options include, but are not limited to:
- `name`: custom name to use for this index - if none is
given, a name will be generated.
- `unique`: if ``True``, creates a uniqueness constraint on the index.
- `background`: if ``True``, this index should be created in the
background.
- `sparse`: if ``True``, omit from the index any documents that lack
the indexed field.
- `bucketSize`: for use with geoHaystack indexes.
Number of documents to group together within a certain proximity
to a given longitude and latitude.
- `min`: minimum value for keys in a :data:`~pymongo.GEO2D`
index.
- `max`: maximum value for keys in a :data:`~pymongo.GEO2D`
index.
- `expireAfterSeconds`: <int> Used to create an expiring (TTL)
collection. MongoDB will automatically delete documents from
this collection after <int> seconds. The indexed field must
be a UTC datetime or the data will not expire.
- `partialFilterExpression`: A document that specifies a filter for
a partial index. Requires MongoDB >= 3.2.
- `collation`: An instance of :class:`~pymongo.collation.Collation`
that specifies the collation to use in MongoDB >= 3.4.
- `wildcardProjection`: Allows users to include or exclude specific
field paths from a `wildcard index`_ using the { "$**" : 1} key
pattern. Requires MongoDB >= 4.2.
- `hidden`: if ``True``, this index will be hidden from the query
planner and will not be evaluated as part of query plan
selection. Requires MongoDB >= 4.4.
See the MongoDB documentation for a full list of supported options by
server version.
:Parameters:
- `keys`: a single key or a list of (key, direction)
pairs specifying the index to create
- `**kwargs` (optional): any additional index creation
options (see the above list) should be passed as keyword
arguments
.. versionchanged:: 3.11
Added the ``hidden`` option.
.. versionchanged:: 3.2
Added the ``partialFilterExpression`` option to support partial
indexes.
.. _wildcard index: https://docs.mongodb.com/master/core/index-wildcard/#wildcard-index-core
"""
keys = _index_list(keys)
if "name" not in kwargs:
kwargs["name"] = _gen_index_name(keys)
kwargs["key"] = _index_document(keys)
collation = validate_collation_or_none(kwargs.pop('collation', None))
self.__document = kwargs
if collation is not None:
self.__document['collation'] = collation
@property
def document(self):
"""An index document suitable for passing to the createIndexes
command.
"""
return self.__document
|
#!/usr/bin/env python
#encoding: utf-8
"""
Fabric decorets mess up all the autoloading of the functions, so to generate
the doc we must read the source files...
"""
import os
import sys
from importlib import import_module
from subprocess import call
from itertools import chain
PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))
sys.path.append(PATH)
def get_iter_tasks_from_file(task_fd):
fun_path = task_fd.name[len(PATH) + 1:].replace('/', '.')
for line in task_fd:
if line.startswith('def '):
name, sig = line[4:].split(':')[0].split('(')
yield fun_path[:-3] + '.' + name, '(' + sig, 'autofunction'
elif line.startswith('class '):
name, sig = line[6:].split(':')[0].split('(')
yield fun_path[:-3] + '.' + name, '', 'autoclass'
def get_task_files(file, flatten=True):
if os.path.isdir(file):
if file.split('/')[-1] == 'doc':
return []
res = [get_task_files(file + '/' + x) for x in os.listdir(file)
if get_task_files(file + '/' + x)]
if flatten:
return list(chain.from_iterable(res))
else:
return res
else:
if file.endswith('.py') and not file.split('/')[-1].startswith('__'):
return [open(file, 'r')]
else:
return []
def footer(doc):
doc.write("""
Indices and tables
============================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
""")
doc.close()
def header(doc):
doc.write("""
Welcome to fabric_ci's documentation!
====================================
Here is a list of the available tasks for CI's fabric module
.. toctree::
:titlesonly:
""")
def document_dir(directory, doc):
module_docs = {}
for task_file in get_task_files(PATH + '/fabric_ci/' + directory):
module = task_file.name[len(PATH) + 1:][:-3].replace('/', '.')
task_mod = import_module(module)
if module not in module_docs:
module_docs[module] = open('{0}.rst'.format(module[10:]), 'w')
module_docs[module].write("""
{0}
=====================
.. currentmodule:: {0}
{1}
""".format(module[10:], task_mod.__doc__ or ''))
doc.write(' {0}\n'.format(module[10:]))
try:
for task_func_name, task_sig, doc_cmd \
in get_iter_tasks_from_file(task_file):
module_docs[module].write("""
.. {2}:: {0}{1}
""".format(task_func_name, task_sig, doc_cmd))
except Exception as e:
print "Exception: {0}".format(e)
raise
return module_docs
def main():
## delete old rst files
for x in os.listdir('.'):
if x.endswith('.rst'):
os.remove(x)
## main index file
doc = open('index.rst', 'w')
header(doc)
sys.path.insert(0, PATH)
module_docs = {}
for directory in ('lib', 'do', 'out', 'on'):
module_docs.update(document_dir(directory, doc))
for open_mod in module_docs.itervalues():
open_mod.close()
footer(doc)
## generate the html files
os.environ['PYTHONPATH'] = ':'.join(sys.path)
call(['make', 'html'])
if __name__ == '__main__':
main()
|
"""Config flow for the MELCloud platform."""
import asyncio
import logging
from typing import Optional
from aiohttp import ClientError, ClientResponseError
from async_timeout import timeout
import pymelcloud
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME, HTTP_FORBIDDEN
from .const import DOMAIN # pylint: disable=unused-import
_LOGGER = logging.getLogger(__name__)
class FlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
async def _create_entry(self, username: str, token: str):
"""Register new entry."""
await self.async_set_unique_id(username)
self._abort_if_unique_id_configured({CONF_TOKEN: token})
return self.async_create_entry(
title=username, data={CONF_USERNAME: username, CONF_TOKEN: token}
)
async def _create_client(
self,
username: str,
*,
password: Optional[str] = None,
token: Optional[str] = None,
):
"""Create client."""
if password is None and token is None:
raise ValueError(
"Invalid internal state. Called without either password or token"
)
try:
with timeout(10):
acquired_token = token
if acquired_token is None:
acquired_token = await pymelcloud.login(
username,
password,
self.hass.helpers.aiohttp_client.async_get_clientsession(),
)
await pymelcloud.get_devices(
acquired_token,
self.hass.helpers.aiohttp_client.async_get_clientsession(),
)
except ClientResponseError as err:
if err.status == 401 or err.status == HTTP_FORBIDDEN:
return self.async_abort(reason="invalid_auth")
return self.async_abort(reason="cannot_connect")
except (asyncio.TimeoutError, ClientError):
return self.async_abort(reason="cannot_connect")
return await self._create_entry(username, acquired_token)
async def async_step_user(self, user_input=None):
"""User initiated config flow."""
if user_input is None:
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str}
),
)
username = user_input[CONF_USERNAME]
return await self._create_client(username, password=user_input[CONF_PASSWORD])
async def async_step_import(self, user_input):
"""Import a config entry."""
return await self._create_client(
user_input[CONF_USERNAME], token=user_input[CONF_TOKEN]
)
|
'''
This file is part of Resus.
Resus is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Resus is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Resus. If not, see <http://www.gnu.org/licenses/>.
'''
from libresus.util.contract import log
class Document(object):
def __init__(self, filename=None):
self.filename = filename
self.days = []
@log('hello', 'bye bye')
def add_day(self, day):
for d in self.days:
if d.date == day.date:
return False
if d.date > day.date:
break
self.days += [day]
self.days.sort(key=lambda d: d.date)
return True
def remove_day(self, date):
for i, d in enumerate(self.days):
if d.date == date:
del self.days[i]
return True
return False
|
from os.path import join
from modl.datasets import get_data_dirs
from nilearn.datasets.utils import _fetch_file
from sklearn.datasets.base import Bunch
from nilearn.datasets import fetch_adhd as nilearn_fetch_adhd
import pandas as pd
import os
def fetch_adhd(n_subjects=40, data_dir=None,
url=None, resume=True,
modl_data_dir=None,
mask_url=None,
verbose=1):
dataset = nilearn_fetch_adhd(n_subjects=n_subjects,
data_dir=data_dir, url=url, resume=resume,
verbose=verbose)
root_dir = dataset.func[0]
tail_dir = ''
while tail_dir != 'adhd':
root_dir, tail_dir = os.path.split(root_dir)
root_dir = os.path.join(root_dir, tail_dir)
modl_data_dir = get_data_dirs(modl_data_dir)[0]
mask_data_dir = join(modl_data_dir, 'adhd')
if mask_url is None:
mask_url = 'http://amensch.fr/data/cogspaces/mask/mask_img.nii.gz'
_fetch_file(mask_url, mask_data_dir, resume=resume)
mask_img = join(mask_data_dir, 'mask_img.nii.gz')
behavioral = pd.DataFrame(dataset.phenotypic)
behavioral.loc[:, 'Subject'] = pd.to_numeric(behavioral.loc[:, 'Subject'])
behavioral.set_index('Subject', inplace=True)
behavioral.index.names = ['subject']
rest = pd.DataFrame(data=list(zip(dataset.func, dataset.confounds)),
columns=['filename', 'confounds'],
index=behavioral.index)
return Bunch(rest=rest,
behavioral=behavioral, description=dataset.description,
mask=mask_img, root=root_dir)
|
from __future__ import print_function
from django.test import TestCase
from django.test import Client
import mock
import os
os.environ["NUM_BUDGET_YEARS"] = '2'
from ...taxbrain.models import TaxSaveInputs
from ...taxbrain.models import convert_to_floats
from ...taxbrain.helpers import (expand_1D, expand_2D, expand_list, package_up_vars,
format_csv, arrange_totals_by_row, default_taxcalc_data)
from ...taxbrain.compute import DropqCompute, MockCompute, ElasticMockCompute
import taxcalc
from taxcalc import Policy
START_YEAR = u'2016'
class DynamicViewsTests(TestCase):
''' Test the views of this app. '''
def setUp(self):
# Every test needs a client.
self.client = Client()
def test_taxbrain_get(self):
# Issue a GET request.
response = self.client.get('/taxbrain/')
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
def test_behavioral_post(self):
#Monkey patch to mock out running of compute jobs
import sys
from webapp.apps.taxbrain import views
webapp_views = sys.modules['webapp.apps.taxbrain.views']
webapp_views.dropq_compute = MockCompute()
from webapp.apps.dynamic import views
dynamic_views = sys.modules['webapp.apps.dynamic.views']
dynamic_views.dropq_compute = MockCompute(num_times_to_wait=2)
# Do the microsim
data = {u'ID_BenefitSurtax_Switch_1': [u'True'],
u'ID_BenefitSurtax_Switch_0': [u'True'],
u'ID_BenefitSurtax_Switch_3': [u'True'],
u'ID_BenefitSurtax_Switch_2': [u'True'],
u'ID_BenefitSurtax_Switch_5': [u'True'],
u'ID_BenefitSurtax_Switch_4': [u'True'],
u'ID_BenefitSurtax_Switch_6': [u'True'],
u'has_errors': [u'False'], u'II_em': [u'4333'],
u'start_year': u'2016', 'csrfmiddlewaretoken': 'abc123'}
response = self.client.post('/taxbrain/', data)
# Check that redirect happens
self.assertEqual(response.status_code, 302)
# Go to results page
link_idx = response.url[:-1].rfind('/')
self.failUnless(response.url[:link_idx+1].endswith("taxbrain/"))
# Link to dynamic simulation
model_num = response.url[link_idx+1:-1]
dynamic_landing = '/dynamic/{0}/?start_year={1}'.format(model_num, START_YEAR)
response = self.client.get(dynamic_landing)
self.assertEqual(response.status_code, 200)
# Go to behavioral input page
dynamic_behavior = '/dynamic/behavioral/{0}/?start_year={1}'.format(model_num, START_YEAR)
response = self.client.get(dynamic_behavior)
self.assertEqual(response.status_code, 200)
# Do the partial equilibrium job submission
pe_data = {u'BE_inc': [u'0.4']}
response = self.client.post(dynamic_behavior, pe_data)
self.assertEqual(response.status_code, 302)
print(response)
self.failUnless(response.url[:-2].endswith("processing/"))
#Check that we are not done processing
not_ready_page = self.client.get(response.url)
self.assertEqual(not_ready_page.status_code, 200)
#Check should get a redirect this time
response = self.client.get(response.url)
self.assertEqual(response.status_code, 302)
link_idx = response.url[:-1].rfind('/')
self.failUnless(response.url[:link_idx+1].endswith("behavior_results/"))
def test_elastic_post(self):
#Monkey patch to mock out running of compute jobs
import sys
from webapp.apps.taxbrain import views
webapp_views = sys.modules['webapp.apps.taxbrain.views']
webapp_views.dropq_compute = MockCompute()
from webapp.apps.dynamic import views
dynamic_views = sys.modules['webapp.apps.dynamic.views']
dynamic_views.dropq_compute = ElasticMockCompute(num_times_to_wait=1)
# Do the microsim
data = {u'ID_BenefitSurtax_Switch_1': [u'True'],
u'ID_BenefitSurtax_Switch_0': [u'True'],
u'ID_BenefitSurtax_Switch_3': [u'True'],
u'ID_BenefitSurtax_Switch_2': [u'True'],
u'ID_BenefitSurtax_Switch_5': [u'True'],
u'ID_BenefitSurtax_Switch_4': [u'True'],
u'ID_BenefitSurtax_Switch_6': [u'True'],
u'has_errors': [u'False'], u'II_em': [u'4333'],
u'start_year': u'2016', 'csrfmiddlewaretoken': 'abc123'}
response = self.client.post('/taxbrain/', data)
# Check that redirect happens
self.assertEqual(response.status_code, 302)
# Go to results page
link_idx = response.url[:-1].rfind('/')
self.failUnless(response.url[:link_idx+1].endswith("taxbrain/"))
# Link to dynamic simulation
model_num = response.url[link_idx+1:-1]
dynamic_landing = '/dynamic/{0}/?start_year={1}'.format(model_num, START_YEAR)
response = self.client.get(dynamic_landing)
self.assertEqual(response.status_code, 200)
# Go to macro input page
dynamic_egdp = '/dynamic/macro/{0}/?start_year={1}'.format(model_num, START_YEAR)
response = self.client.get(dynamic_egdp)
self.assertEqual(response.status_code, 200)
# Do the elasticity job submission
el_data = {'elastic_gdp': [u'0.55']}
response = self.client.post(dynamic_egdp, el_data)
self.assertEqual(response.status_code, 302)
print(response)
self.failUnless(response.url[:-2].endswith("macro_processing/"))
#Check that we are not done processing
not_ready_page = self.client.get(response.url)
self.assertEqual(not_ready_page.status_code, 200)
#Check should get a redirect this time
response = self.client.get(response.url)
self.assertEqual(response.status_code, 302)
self.failUnless(response.url[:-2].endswith("macro_results/"))
|
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import os
import sys
import requests
from .constants import (
AZURE_STORAGE_ACCOUNT,
AZURE_STORAGE_ACCESS_KEY,
DEFAULT_HTTP_TIMEOUT,
DEV_ACCOUNT_NAME,
DEV_ACCOUNT_KEY,
EMULATED,
_USER_AGENT_STRING,
)
from ._http import HTTPError
from ._http.httpclient import _HTTPClient
from ._serialization import _storage_error_handler
from ._common_error import (
_ERROR_STORAGE_MISSING_INFO,
)
class _StorageClient(object):
'''
This is the base class for BlobManager, TableManager and QueueManager.
'''
def __init__(self, account_name=None, account_key=None, protocol='https',
host_base='', dev_host='', timeout=DEFAULT_HTTP_TIMEOUT,
sas_token=None, request_session=None):
'''
account_name:
your storage account name, required for all operations.
account_key:
your storage account key, required for all operations.
protocol:
Optional. Protocol. Defaults to http.
host_base:
Optional. Live host base url. Defaults to Azure url. Override this
for on-premise.
dev_host:
Optional. Dev host url. Defaults to localhost.
timeout:
Optional. Timeout for the http request, in seconds.
sas_token:
Optional. Token to use to authenticate with shared access signature.
request_session:
Optional. Session object to use for http requests.
'''
self.account_name = account_name
self.account_key = account_key
self.requestid = None
self.protocol = protocol.lower()
self.host_base = host_base
self.dev_host = dev_host
self.sas_token = sas_token
# the app is not run in azure emulator or use default development
# storage account and key if app is run in emulator.
self.use_local_storage = False
# check whether it is run in emulator.
if EMULATED in os.environ:
self.is_emulated = os.environ[EMULATED].lower() != 'false'
else:
self.is_emulated = False
# get account_name and account key. If they are not set when
# constructing, get the account and key from environment variables if
# the app is not run in azure emulator or use default development
# storage account and key if app is run in emulator.
if not self.account_name and not self.account_key:
if self.is_emulated:
self.account_name = DEV_ACCOUNT_NAME
self.account_key = DEV_ACCOUNT_KEY
self.protocol = 'http'
self.use_local_storage = True
else:
self.account_name = os.environ.get(AZURE_STORAGE_ACCOUNT)
self.account_key = os.environ.get(AZURE_STORAGE_ACCESS_KEY)
if not self.account_name:
raise ValueError(_ERROR_STORAGE_MISSING_INFO)
self._httpclient = _HTTPClient(
service_instance=self,
protocol=self.protocol,
timeout=timeout,
request_session=request_session or requests.Session(),
user_agent=_USER_AGENT_STRING,
)
self._batchclient = None
self._filter = self._perform_request_worker
def with_filter(self, filter):
'''
Returns a new service which will process requests with the specified
filter. Filtering operations can include logging, automatic retrying,
etc... The filter is a lambda which receives the HTTPRequest and
another lambda. The filter can perform any pre-processing on the
request, pass it off to the next lambda, and then perform any
post-processing on the response.
'''
res = type(self)(self.account_name, self.account_key, self.protocol,
self.host_base, self.dev_host,
self._httpclient.timeout)
old_filter = self._filter
def new_filter(request):
return filter(request, old_filter)
res._filter = new_filter
return res
def set_proxy(self, host, port, user=None, password=None):
'''
Sets the proxy server host and port for the HTTP CONNECT Tunnelling.
host:
Address of the proxy. Ex: '192.168.0.100'
port:
Port of the proxy. Ex: 6000
user:
User for proxy authorization.
password:
Password for proxy authorization.
'''
self._httpclient.set_proxy(host, port, user, password)
@property
def timeout(self):
return self._httpclient.timeout
@timeout.setter
def timeout(self, value):
self._httpclient.timeout = value
def _get_host(self):
if self.use_local_storage:
return self.dev_host
else:
return self.account_name + self.host_base
def _perform_request_worker(self, request):
return self._httpclient.perform_request(request)
def _perform_request(self, request, text_encoding='utf-8'):
'''
Sends the request and return response. Catches HTTPError and hand it
to error handler
'''
try:
if self._batchclient is not None:
return self._batchclient.insert_request_to_batch(request)
else:
resp = self._filter(request)
if sys.version_info >= (3,) and isinstance(resp, bytes) and \
text_encoding:
resp = resp.decode(text_encoding)
except HTTPError as ex:
_storage_error_handler(ex)
return resp
|
from django.db.backends import BaseDatabaseClient
from django.conf import settings
import os
class DatabaseClient(BaseDatabaseClient):
def runshell(self):
args = ['']
db = settings.DATABASE_OPTIONS.get('db', settings.DATABASE_NAME)
user = settings.DATABASE_OPTIONS.get('user', settings.DATABASE_USER)
passwd = settings.DATABASE_OPTIONS.get('passwd', settings.DATABASE_PASSWORD)
host = settings.DATABASE_OPTIONS.get('host', settings.DATABASE_HOST)
port = settings.DATABASE_OPTIONS.get('port', settings.DATABASE_PORT)
defaults_file = settings.DATABASE_OPTIONS.get('read_default_file')
# Seems to be no good way to set sql_mode with CLI
if defaults_file:
args += ["--defaults-file=%s" % defaults_file]
if user:
args += ["--user=%s" % user]
if passwd:
args += ["--password=%s" % passwd]
if host:
args += ["--host=%s" % host]
if port:
args += ["--port=%s" % port]
if db:
args += [db]
os.execvp('mysql', args)
|
"""Experimental SDFILE Writer
No support for fields like
M CHG
M STY
and the like
"""
import os
# float format
#xxxxx.xxxx
class SDFloat:
"""Class to print out a float in the format the sd file
expects"""
def __init__(self, f):
self.f = f
def __str__(self):
f = self.f
if f>=0:
sign=""
else:
sign="-"
i = int(f)
r = int(abs(f-i) * 10000)
out = "%s%s.%04d"%(sign,abs(i),r)
padding = len("xxxxx.xxxx") - len(out)
assert padding >=0
out = " "*padding + out
return out
def writeAtom(atom):
"""atom->return the string that represents the atom in an sdfile"""
return atom._line.rstrip()
## symbol = atom.symbol
## try:
## x,y,z = map(SDFloat, (atom.x, atom.y, atom.z))
## except AttributeError:
## x,y,z = map(SDFloat,(0.0, 0.0, 0.0))
## weight = atom.weight
## charge = atom.charge
## stereo = 0
## hcount = atom.hcount
## massdiff = 0 # XXX FIX ME
## # don't know how to compute yet
## if charge:
## charge = 4-charge
## hcount = atom.hcount + 1
## return "%s%s%s%3s%2i%3i%3i%3i%3i%3i"%\
## (x,y,z,symbol,massdiff,charge,0,hcount,0,0)
def writeBond(bond, a1, a2):
return "%3i%3i%3i%s"%(a1,a2,bond.bondorder,bond._line.rstrip())
def writeMolecule(m, atoms=None, bonds=None, fields=1, index=1):
if atoms is None:
atoms = m.atoms
if bonds is None:
bonds = m.bonds
# first write a header
result = [""]
result.append(" -ISIS- 03150009252D ")
result.append("")
numatoms = len(atoms)
numbonds = len(bonds)
result.append("%3i%3i 0 0 0 0 0 0 0 0999 V2000"%(numatoms, numbonds))
atomOutput = []
for atom in atoms:
result.append(writeAtom(atom))
for bond in bonds:
atom1, atom2 = bond.atoms
a1, a2 = atoms.index(atom1), atoms.index(atom2)
result.append(writeBond(bond, a1+1, a2+1))
charges = []
for atom in atoms:
if atom.charge != 0:
charges.append("%4s%4s"%(atoms.index(atom)+1, atom.charge))
if charges:
result.append("M CHG%3s%s"%(len(charges), "".join(charges)))
result.append("M END")
if fields and hasattr(m, "fields") and not m.fields.has_key("ID"):
result.append("> <ID> (%s)"%index)
result.append(m.name)
result.append("")
if fields and hasattr(m, "fields"):
for key, val in m.fields.items():
result.append("> <%s> (%s)"%(key, index))
result.append(val)
result.append("")
else:
result.append("")
result.append("$$$$")
return "\n".join(result)
if __name__ == "__main__":
from frowns import MDL
reader = MDL.mdlin(open("../test/bad.sdf"))
m = reader.next()
#print writeMolecule(m)
print writeMolecule(m, m.atoms[0:1], [])
|
from itertools import chain
import multiprocessing
import os
import platform
import queue
import subprocess
from coalib.collecting.Collectors import collect_files
from coala_utils.string_processing.StringConverter import StringConverter
from coalib.output.printers.LOG_LEVEL import LOG_LEVEL
from coalib.processes.BearRunning import run
from coalib.processes.CONTROL_ELEMENT import CONTROL_ELEMENT
from coalib.processes.LogPrinterThread import LogPrinterThread
from coalib.results.Result import Result
from coalib.results.result_actions.ApplyPatchAction import ApplyPatchAction
from coalib.results.result_actions.IgnoreResultAction import IgnoreResultAction
from coalib.results.result_actions.PrintDebugMessageAction import (
PrintDebugMessageAction)
from coalib.results.result_actions.ShowPatchAction import ShowPatchAction
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
from coalib.results.SourceRange import SourceRange
from coalib.settings.Setting import glob_list
from coalib.parsing.Globbing import fnmatch
ACTIONS = [ApplyPatchAction,
PrintDebugMessageAction,
ShowPatchAction,
IgnoreResultAction]
def get_cpu_count():
try:
return multiprocessing.cpu_count()
# cpu_count is not implemented for some CPU architectures/OSes
except NotImplementedError: # pragma: no cover
return 2
def fill_queue(queue_fill, any_list):
"""
Takes element from a list and populates a queue with those elements.
:param queue_fill: The queue to be filled.
:param any_list: List containing the elements.
"""
for elem in any_list:
queue_fill.put(elem)
def get_running_processes(processes):
return sum((1 if process.is_alive() else 0) for process in processes)
def create_process_group(command_array, **kwargs):
if platform.system() == 'Windows': # pragma posix: no cover
proc = subprocess.Popen(
command_array,
creationflags=subprocess.CREATE_NEW_PROCESS_GROUP,
**kwargs)
else: # pragma nt: no cover
proc = subprocess.Popen(command_array,
preexec_fn=os.setsid,
**kwargs)
return proc
def get_default_actions(section):
"""
Parses the key ``default_actions`` in the given section.
:param section: The section where to parse from.
:return: A dict with the bearname as keys and their default
actions as values and another dict that contains bears
and invalid action names.
"""
try:
default_actions = dict(section['default_actions'])
except IndexError:
return {}, {}
action_dict = {action.get_metadata().name: action for action in ACTIONS}
invalid_action_set = default_actions.values() - action_dict.keys()
invalid_actions = {}
if len(invalid_action_set) != 0:
invalid_actions = {
bear: action
for bear, action in default_actions.items()
if action in invalid_action_set}
for invalid in invalid_actions.keys():
del default_actions[invalid]
actions = {bearname: action_dict[action_name]
for bearname, action_name in default_actions.items()}
return actions, invalid_actions
def autoapply_actions(results,
file_dict,
file_diff_dict,
section,
log_printer):
"""
Auto-applies actions like defined in the given section.
:param results: A list of results.
:param file_dict: A dictionary containing the name of files and its
contents.
:param file_diff_dict: A dictionary that contains filenames as keys and
diff objects as values.
:param section: The section.
:param log_printer: A log printer instance to log messages on.
:return: A list of unprocessed results.
"""
default_actions, invalid_actions = get_default_actions(section)
no_autoapply_warn = bool(section.get('no_autoapply_warn', False))
for bearname, actionname in invalid_actions.items():
log_printer.warn('Selected default action {!r} for bear {!r} does '
'not exist. Ignoring action.'.format(actionname,
bearname))
if len(default_actions) == 0:
# There's nothing to auto-apply.
return results
not_processed_results = []
for result in results:
try:
# Match full bear names deterministically, prioritized!
action = default_actions[result.origin]
except KeyError:
for bear_glob in default_actions:
if fnmatch(result.origin, bear_glob):
action = default_actions[bear_glob]
break
else:
not_processed_results.append(result)
continue
applicable = action.is_applicable(result, file_dict, file_diff_dict)
if applicable is not True:
if not no_autoapply_warn:
log_printer.warn('{}: {}'.format(result.origin, applicable))
not_processed_results.append(result)
continue
try:
action().apply_from_section(result,
file_dict,
file_diff_dict,
section)
log_printer.info('Applied {!r} on {} from {!r}.'.format(
action.get_metadata().name,
result.location_repr(),
result.origin))
except Exception as ex:
not_processed_results.append(result)
log_printer.log_exception(
'Failed to execute action {!r} with error: {}.'.format(
action.get_metadata().name, ex),
ex)
log_printer.debug('-> for result ' + repr(result) + '.')
return not_processed_results
def check_result_ignore(result, ignore_ranges):
"""
Determines if the result has to be ignored.
Any result will be ignored if its origin matches any bear names and its
SourceRange overlaps with the ignore range.
Note that everything after a space in the origin will be cut away, so the
user can ignore results with an origin like `CSecurityBear (buffer)` with
just `# Ignore CSecurityBear`.
:param result: The result that needs to be checked.
:param ignore_ranges: A list of tuples, each containing a list of lower
cased affected bearnames and a SourceRange to
ignore. If any of the bearname lists is empty, it
is considered an ignore range for all bears.
This may be a list of globbed bear wildcards.
:return: True if the result has to be ignored.
"""
for bears, range in ignore_ranges:
orig = result.origin.lower().split(' ')[0]
if (result.overlaps(range) and
(len(bears) == 0 or orig in bears or fnmatch(orig, bears))):
return True
return False
def print_result(results,
file_dict,
retval,
print_results,
section,
log_printer,
file_diff_dict,
ignore_ranges,
console_printer):
"""
Takes the results produced by each bear and gives them to the print_results
method to present to the user.
:param results: A list of results.
:param file_dict: A dictionary containing the name of files and its
contents.
:param retval: It is True if no results were yielded ever before.
If it is False this function will return False no
matter what happens. Else it depends on if this
invocation yields results.
:param print_results: A function that prints all given results appropriate
to the output medium.
:param file_diff_dict: A dictionary that contains filenames as keys and
diff objects as values.
:param ignore_ranges: A list of SourceRanges. Results that affect code in
any of those ranges will be ignored.
:param console_printer: Object to print messages on the console.
:return: Returns False if any results were yielded. Else
True.
"""
min_severity_str = str(section.get('min_severity', 'INFO')).upper()
min_severity = RESULT_SEVERITY.str_dict.get(min_severity_str, 'INFO')
results = list(filter(lambda result:
type(result) is Result and
result.severity >= min_severity and
not check_result_ignore(result, ignore_ranges),
results))
patched_results = autoapply_actions(results,
file_dict,
file_diff_dict,
section,
log_printer)
print_results(log_printer,
section,
patched_results,
file_dict,
file_diff_dict,
console_printer)
return retval or len(results) > 0, patched_results
def get_file_dict(filename_list, log_printer):
"""
Reads all files into a dictionary.
:param filename_list: List of names of paths to files to get contents of.
:param log_printer: The logger which logs errors.
:return: Reads the content of each file into a dictionary
with filenames as keys.
"""
file_dict = {}
for filename in filename_list:
try:
with open(filename, 'r', encoding='utf-8') as _file:
file_dict[filename] = tuple(_file.readlines())
except UnicodeDecodeError:
log_printer.warn("Failed to read file '{}'. It seems to contain "
'non-unicode characters. Leaving it '
'out.'.format(filename))
except OSError as exception: # pragma: no cover
log_printer.log_exception("Failed to read file '{}' because of "
'an unknown error. Leaving it '
'out.'.format(filename),
exception,
log_level=LOG_LEVEL.WARNING)
log_printer.debug('Files that will be checked:\n' +
'\n'.join(file_dict.keys()))
return file_dict
def filter_raising_callables(it, exception, *args, **kwargs):
"""
Filters all callable items inside the given iterator that raise the
given exceptions.
:param it: The iterator to filter.
:param exception: The (tuple of) exception(s) to filter for.
:param args: Positional arguments to pass to the callable.
:param kwargs: Keyword arguments to pass to the callable.
"""
for elem in it:
try:
yield elem(*args, **kwargs)
except exception:
pass
def instantiate_bears(section,
local_bear_list,
global_bear_list,
file_dict,
message_queue,
console_printer):
"""
Instantiates each bear with the arguments it needs.
:param section: The section the bears belong to.
:param local_bear_list: List of local bear classes to instantiate.
:param global_bear_list: List of global bear classes to instantiate.
:param file_dict: Dictionary containing filenames and their
contents.
:param message_queue: Queue responsible to maintain the messages
delivered by the bears.
:param console_printer: Object to print messages on the console.
:return: The local and global bear instance lists.
"""
local_bear_list = [bear
for bear in filter_raising_callables(
local_bear_list,
RuntimeError,
section,
message_queue,
timeout=0.1)]
global_bear_list = [bear
for bear in filter_raising_callables(
global_bear_list,
RuntimeError,
file_dict,
section,
message_queue,
timeout=0.1)]
return local_bear_list, global_bear_list
def instantiate_processes(section,
local_bear_list,
global_bear_list,
job_count,
cache,
log_printer,
console_printer):
"""
Instantiate the number of processes that will run bears which will be
responsible for running bears in a multiprocessing environment.
:param section: The section the bears belong to.
:param local_bear_list: List of local bears belonging to the section.
:param global_bear_list: List of global bears belonging to the section.
:param job_count: Max number of processes to create.
:param cache: An instance of ``misc.Caching.FileCache`` to use as
a file cache buffer.
:param log_printer: The log printer to warn to.
:param console_printer: Object to print messages on the console.
:return: A tuple containing a list of processes,
and the arguments passed to each process which are
the same for each object.
"""
filename_list = collect_files(
glob_list(section.get('files', '')),
log_printer,
ignored_file_paths=glob_list(section.get('ignore', '')),
limit_file_paths=glob_list(section.get('limit_files', '')))
# This stores all matched files irrespective of whether coala is run
# only on changed files or not. Global bears require all the files
complete_filename_list = filename_list
complete_file_dict = get_file_dict(complete_filename_list, log_printer)
manager = multiprocessing.Manager()
global_bear_queue = multiprocessing.Queue()
filename_queue = multiprocessing.Queue()
local_result_dict = manager.dict()
global_result_dict = manager.dict()
message_queue = multiprocessing.Queue()
control_queue = multiprocessing.Queue()
loaded_local_bears_count = len(local_bear_list)
local_bear_list[:], global_bear_list[:] = instantiate_bears(
section,
local_bear_list,
global_bear_list,
complete_file_dict,
message_queue,
console_printer=console_printer)
loaded_valid_local_bears_count = len(local_bear_list)
# Note: the complete file dict is given as the file dict to bears and
# the whole project is accessible to every bear. However, local bears are
# run only for the changed files if caching is enabled.
# Start tracking all the files
if cache and loaded_valid_local_bears_count == loaded_local_bears_count:
cache.track_files(set(complete_filename_list))
changed_files = cache.get_uncached_files(
set(filename_list)) if cache else filename_list
# If caching is enabled then the local bears should process only the
# changed files.
log_printer.debug("coala is run only on changed files, bears' log "
'messages from previous runs may not appear. You may '
'use the `--flush-cache` flag to see them.')
filename_list = changed_files
# Note: the complete file dict is given as the file dict to bears and
# the whole project is accessible to every bear. However, local bears are
# run only for the changed files if caching is enabled.
file_dict = {filename: complete_file_dict[filename]
for filename in filename_list
if filename in complete_file_dict}
bear_runner_args = {'file_name_queue': filename_queue,
'local_bear_list': local_bear_list,
'global_bear_list': global_bear_list,
'global_bear_queue': global_bear_queue,
'file_dict': file_dict,
'local_result_dict': local_result_dict,
'global_result_dict': global_result_dict,
'message_queue': message_queue,
'control_queue': control_queue,
'timeout': 0.1}
fill_queue(filename_queue, file_dict.keys())
fill_queue(global_bear_queue, range(len(global_bear_list)))
return ([multiprocessing.Process(target=run, kwargs=bear_runner_args)
for i in range(job_count)],
bear_runner_args)
def get_ignore_scope(line, keyword):
"""
Retrieves the bears that are to be ignored defined in the given line.
:param line: The line containing the ignore declaration.
:param keyword: The keyword that was found. Everything after the rightmost
occurrence of it will be considered for the scope.
:return: A list of lower cased bearnames or an empty list (-> "all")
"""
toignore = line[line.rfind(keyword) + len(keyword):]
if toignore.startswith('all'):
return []
else:
return list(StringConverter(toignore, list_delimiters=', '))
def yield_ignore_ranges(file_dict):
"""
Yields tuples of affected bears and a SourceRange that shall be ignored for
those.
:param file_dict: The file dictionary.
"""
for filename, file in file_dict.items():
start = None
bears = []
stop_ignoring = False
for line_number, line in enumerate(file, start=1):
# Before lowering all lines ever read, first look for the biggest
# common substring, case sensitive: I*gnor*e, start i*gnor*ing,
# N*oqa*.
if 'gnor' in line or 'oqa' in line:
line = line.lower()
if 'start ignoring ' in line:
start = line_number
bears = get_ignore_scope(line, 'start ignoring ')
elif 'stop ignoring' in line:
stop_ignoring = True
if start:
yield (bears,
SourceRange.from_values(
filename,
start,
1,
line_number,
len(file[line_number-1])))
else:
for ignore_stmt in ['ignore ', 'noqa ', 'noqa']:
if ignore_stmt in line:
end_line = min(line_number + 1, len(file))
yield (get_ignore_scope(line, ignore_stmt),
SourceRange.from_values(
filename,
line_number, 1,
end_line, len(file[end_line-1])))
break
if stop_ignoring is False and start is not None:
yield (bears,
SourceRange.from_values(filename,
start,
1,
len(file),
len(file[-1])))
def get_file_list(results):
"""
Get the set of files that are affected in the given results.
:param results: A list of results from which the list of files is to be
extracted.
:return: A set of file paths containing the mentioned list of
files.
"""
return {code.file for result in results for code in result.affected_code}
def process_queues(processes,
control_queue,
local_result_dict,
global_result_dict,
file_dict,
print_results,
section,
cache,
log_printer,
console_printer):
"""
Iterate the control queue and send the results received to the print_result
method so that they can be presented to the user.
:param processes: List of processes which can be used to run
Bears.
:param control_queue: Containing control elements that indicate
whether there is a result available and which
bear it belongs to.
:param local_result_dict: Dictionary containing results respective to
local bears. It is modified by the processes
i.e. results are added to it by multiple
processes.
:param global_result_dict: Dictionary containing results respective to
global bears. It is modified by the processes
i.e. results are added to it by multiple
processes.
:param file_dict: Dictionary containing file contents with
filename as keys.
:param print_results: Prints all given results appropriate to the
output medium.
:param cache: An instance of ``misc.Caching.FileCache`` to use
as a file cache buffer.
:return: Return True if all bears execute successfully and
Results were delivered to the user. Else False.
"""
file_diff_dict = {}
retval = False
# Number of processes working on local/global bears. They are count down
# when the last queue element of that process is processed which may be
# *after* the process has ended!
local_processes = len(processes)
global_processes = len(processes)
global_result_buffer = []
result_files = set()
ignore_ranges = list(yield_ignore_ranges(file_dict))
# One process is the logger thread
while local_processes > 1:
try:
control_elem, index = control_queue.get(timeout=0.1)
if control_elem == CONTROL_ELEMENT.LOCAL_FINISHED:
local_processes -= 1
elif control_elem == CONTROL_ELEMENT.GLOBAL_FINISHED:
global_processes -= 1
elif control_elem == CONTROL_ELEMENT.LOCAL:
assert local_processes != 0
result_files.update(get_file_list(local_result_dict[index]))
retval, res = print_result(local_result_dict[index],
file_dict,
retval,
print_results,
section,
log_printer,
file_diff_dict,
ignore_ranges,
console_printer=console_printer)
local_result_dict[index] = res
else:
assert control_elem == CONTROL_ELEMENT.GLOBAL
global_result_buffer.append(index)
except queue.Empty:
if get_running_processes(processes) < 2: # pragma: no cover
# Recover silently, those branches are only
# nondeterministically covered.
break
# Flush global result buffer
for elem in global_result_buffer:
result_files.update(get_file_list(global_result_dict[elem]))
retval, res = print_result(global_result_dict[elem],
file_dict,
retval,
print_results,
section,
log_printer,
file_diff_dict,
ignore_ranges,
console_printer=console_printer)
global_result_dict[elem] = res
# One process is the logger thread
while global_processes > 1:
try:
control_elem, index = control_queue.get(timeout=0.1)
if control_elem == CONTROL_ELEMENT.GLOBAL:
result_files.update(get_file_list(global_result_dict[index]))
retval, res = print_result(global_result_dict[index],
file_dict,
retval,
print_results,
section,
log_printer,
file_diff_dict,
ignore_ranges,
console_printer)
global_result_dict[index] = res
else:
assert control_elem == CONTROL_ELEMENT.GLOBAL_FINISHED
global_processes -= 1
except queue.Empty:
if get_running_processes(processes) < 2: # pragma: no cover
# Recover silently, those branches are only
# nondeterministically covered.
break
if cache:
cache.untrack_files(result_files)
return retval
def simplify_section_result(section_result):
"""
Takes in a section's result from ``execute_section`` and simplifies it
for easy usage in other functions.
:param section_result: The result of a section which was executed.
:return: Tuple containing:
- bool - True if results were yielded
- bool - True if unfixed results were yielded
- list - Results from all bears (local and global)
"""
section_yielded_result = section_result[0]
results_for_section = []
for value in chain(section_result[1].values(),
section_result[2].values()):
if value is None:
continue
for result in value:
results_for_section.append(result)
section_yielded_unfixed_results = len(results_for_section) > 0
return (section_yielded_result,
section_yielded_unfixed_results,
results_for_section)
def execute_section(section,
global_bear_list,
local_bear_list,
print_results,
cache,
log_printer,
console_printer):
"""
Executes the section with the given bears.
The execute_section method does the following things:
1. Prepare a Process
- Load files
- Create queues
2. Spawn up one or more Processes
3. Output results from the Processes
4. Join all processes
:param section: The section to execute.
:param global_bear_list: List of global bears belonging to the section.
Dependencies are already resolved.
:param local_bear_list: List of local bears belonging to the section.
Dependencies are already resolved.
:param print_results: Prints all given results appropriate to the
output medium.
:param cache: An instance of ``misc.Caching.FileCache`` to use as
a file cache buffer.
:param log_printer: The log_printer to warn to.
:param console_printer: Object to print messages on the console.
:return: Tuple containing a bool (True if results were
yielded, False otherwise), a Manager.dict
containing all local results(filenames are key)
and a Manager.dict containing all global bear
results (bear names are key) as well as the
file dictionary.
"""
try:
running_processes = int(section['jobs'])
except ValueError:
log_printer.warn("Unable to convert setting 'jobs' into a number. "
'Falling back to CPU count.')
running_processes = get_cpu_count()
except IndexError:
running_processes = get_cpu_count()
processes, arg_dict = instantiate_processes(section,
local_bear_list,
global_bear_list,
running_processes,
cache,
log_printer,
console_printer=console_printer)
logger_thread = LogPrinterThread(arg_dict['message_queue'],
log_printer)
# Start and join the logger thread along with the processes to run bears
processes.append(logger_thread)
for runner in processes:
runner.start()
try:
return (process_queues(processes,
arg_dict['control_queue'],
arg_dict['local_result_dict'],
arg_dict['global_result_dict'],
arg_dict['file_dict'],
print_results,
section,
cache,
log_printer,
console_printer=console_printer),
arg_dict['local_result_dict'],
arg_dict['global_result_dict'],
arg_dict['file_dict'])
finally:
logger_thread.running = False
for runner in processes:
runner.join()
|
import csv
import re
import time
import calendar
import os
# 3rd party
import requests
# local
import config
REQUEST_METHODS = ['GET', 'POST', 'PUT', 'HEAD']
CSV_FIELDS = ['timestamp', 'response_time', 'status_code', 'down']
for site in config.SITES:
if 'url' not in site:
print 'No URL for that site: %s' % site
continue
if 'http_method' not in site or site['http_method'] not in REQUEST_METHODS:
site['http_method'] = 'GET'
if 'timeout' not in site:
site['timeout'] = config.GLOBAL_TIMEOUT
# Do request
r = requests.request(site['http_method'], site['url'], timeout=site['timeout'])
# Check response body lines
string_match = None
if r.status_code == 200 and 'body_string' in site:
string_match = False
for line in r.iter_lines():
if not re.search(site['body_string'], line):
continue
string_match = True
break
# Create CSV line
csv_line = {}
csv_line['timestamp'] = calendar.timegm( time.gmtime() )
csv_line['url'] = r.url
csv_line['status_code'] = r.status_code
csv_line['response_time'] = float(r.elapsed.seconds) + float(r.elapsed.microseconds)/1000000.0
if string_match is not None and not string_match:
csv_line['down'] = 'Y'
elif r.status_code <> 200:
csv_line['down'] = 'Y'
else:
csv_line['down'] = 'N'
monitor_filename = config.MONITOR_FILE.format(**site)
monitor_new = False
try:
file_stat = os.stat(monitor_filename)
if file_stat.st_size < 5:
monitor_new = True
except OSError:
monitor_new = True
with open(monitor_filename, '%sb' % 'w' if monitor_new else 'a') as csv_file:
csv_writer = csv.DictWriter(csv_file, CSV_FIELDS, extrasaction='ignore')
if monitor_new:
csv_writer.writeheader()
csv_writer.writerow(csv_line)
|
from threading import Lock
_None = type('_None', (object, ), {})()
class LDict(object):
def __init__(self, compare=False, default=_None, key_func=None):
self._lk = Lock()
self.compare = compare
self.values = {}
self.default = default
self.key_func = key_func
def __getitem__(self, key):
if self.key_func is not None:
key = self.key_func(key)
with self._lk:
if self.default is not _None:
return self.values.get(key, self.default)
return self.values[key]
def get(self, key, d):
if self.key_func is not None:
key = self.key_func(key)
with self._lk:
return self.values.get(key, d)
def __setitem__(self, key, value):
if self.key_func is not None:
key = self.key_func(key)
with self._lk:
if self.compare:
old = self.values.get(key, 0)
if old >= value:
return
self.values[key] = value
def __delitem__(self, key):
if self.key_func is not None:
key = self.key_func(key)
with self._lk:
del self.values[key]
def pop(self, key, d=None):
if self.key_func is not None:
key = self.key_func(key)
with self._lk:
return self.values.pop(key, d)
def __contains__(self, key):
if self.key_func is not None:
key = self.key_func(key)
return key in self.values
|
from pandac.PandaModules import *
from DNAError import DNAError
from DNASuitPoint import DNASuitPoint
from DNASuitPath import DNASuitPath
from DNASuitEdge import DNASuitEdge
class DNAStorage:
def __init__(self):
self.suitPoints = []
self.suitPointMap = {}
self.DNAGroups = {}
self.DNAVisGroups = []
self.suitEdges = {}
self.battleCells = []
self.nodes = {}
self.hoodNodes = {}
self.placeNodes = {}
self.fonts = {}
self.blockTitles = {}
self.blockArticles = {}
self.blockBuildingTypes = {}
self.blockDoors = {}
self.blockNumbers = []
self.blockZones = {}
self.textures = {}
self.catalogCodes = {}
def getSuitPath(self, startPoint, endPoint, minPathLen=40, maxPathLen=300):
path = DNASuitPath()
path.addPoint(startPoint)
while path.getNumPoints() < maxPathLen:
startPointIndex = startPoint.getIndex()
if startPointIndex == endPoint.getIndex():
if path.getNumPoints() >= minPathLen:
break
if startPointIndex not in self.suitEdges:
raise DNAError('Could not find DNASuitPath.')
edges = self.suitEdges[startPointIndex]
for edge in edges:
startPoint = edge.getEndPoint()
startPointType = startPoint.getPointType()
if startPointType != DNASuitPoint.FRONT_DOOR_POINT:
if startPointType != DNASuitPoint.SIDE_DOOR_POINT:
break
else:
raise DNAError('Could not find DNASuitPath.')
path.addPoint(startPoint)
return path
def getSuitEdgeTravelTime(self, startIndex, endIndex, suitWalkSpeed):
startPoint = self.suitPointMap.get(startIndex)
endPoint = self.suitPointMap.get(endIndex)
if (not startPoint) or (not endPoint):
return 0.0
distance = (endPoint.getPos()-startPoint.getPos()).length()
return distance / suitWalkSpeed
def getSuitEdgeZone(self, startIndex, endIndex):
return self.getSuitEdge(startIndex, endIndex).getZoneId()
def getAdjacentPoints(self, point):
path = DNASuitPath()
startIndex = point.getIndex()
if startIndex not in self.suitEdges:
return path
for edge in self.suitEdges[startIndex]:
path.addPoint(edge.getEndPoint())
return path
def storeSuitPoint(self, suitPoint):
if not isinstance(suitPoint, DNASuitPoint):
raise TypeError('suitPoint must be an instance of DNASuitPoint')
self.suitPoints.append(suitPoint)
self.suitPointMap[suitPoint.getIndex()] = suitPoint
def getSuitPointAtIndex(self, index):
return self.suitPoints[index]
def getSuitPointWithIndex(self, index):
return self.suitPointMap.get(index)
def resetSuitPoints(self):
self.suitPoints = []
self.suitPointMap = {}
self.suitEdges = {}
def resetTextures(self):
self.textures = {}
def resetHood(self):
self.resetBlockNumbers()
def findDNAGroup(self, node):
return self.DNAGroups[node]
def removeDNAGroup(self, dnagroup):
for node, group in self.DNAGroups.items():
if group == dnagroup:
del self.DNAGroups[node]
def resetDNAGroups(self):
self.DNAGroups = {}
def getNumDNAVisGroups(self):
return len(self.DNAVisGroups)
def getDNAVisGroupName(self, i):
return self.DNAVisGroups[i].getName()
def storeDNAVisGroup(self, group):
self.DNAVisGroups.append(group)
def storeSuitEdge(self, startIndex, endIndex, zoneId):
startPoint = self.getSuitPointWithIndex(startIndex)
endPoint = self.getSuitPointWithIndex(endIndex)
edge = DNASuitEdge(startPoint, endPoint, zoneId)
self.suitEdges.setdefault(startIndex, []).append(edge)
return edge
def getSuitEdge(self, startIndex, endIndex):
edges = self.suitEdges[startIndex]
for edge in edges:
if edge.getEndPoint().getIndex() == endIndex:
return edge
def removeBattleCell(self, cell):
self.battleCells.remove(cell)
def storeBattleCell(self, cell):
self.battleCells.append(cell)
def resetBattleCells(self):
self.battleCells = []
def findNode(self, code):
if code in self.nodes:
return self.nodes[code]
if code in self.hoodNodes:
return self.hoodNodes[code]
if code in self.placeNodes:
return self.placeNodes[code]
def resetNodes(self):
for node in self.nodes:
self.nodes[node].removeNode()
self.nodes = {}
def resetHoodNodes(self):
for node in self.hoodNodes:
self.hoodNodes[node].removeNode()
self.hoodNodes = {}
def resetPlaceNodes(self):
for node in self.placeNodes:
self.placeNodes[node].removeNode()
self.placeNodes = {}
def storeNode(self, node, code):
self.nodes[code] = node
def storeHoodNode(self, node, code):
self.hoodNodes[code] = node
def storePlaceNode(self, node, code):
self.placeNodes[code] = node
def findFont(self, code):
if code in self.fonts:
return self.fonts[code]
def resetFonts(self):
self.fonts = {}
def storeFont(self, font, code):
self.fonts[code] = font
def getBlock(self, name):
block = name[name.find(':')-2:name.find(':')]
if not block[0].isdigit():
block = block[1:]
return block
def getBlockBuildingType(self, blockNumber):
if blockNumber in self.blockBuildingTypes:
return self.blockBuildingTypes[blockNumber]
def getTitleFromBlockNumber(self, blockNumber):
if blockNumber in self.blockTitles:
return self.blockTitles[blockNumber]
return ''
def getDoorPosHprFromBlockNumber(self, blockNumber):
key = str(blockNumber)
if key in self.blockDoors:
return self.blockDoors[key]
def storeBlockDoor(self, blockNumber, door):
self.blockDoors[str(blockNumber)] = door
def storeBlockTitle(self, blockNumber, title):
self.blockTitles[blockNumber] = title
def storeBlockArticle(self, blockNumber, article):
self.blockArticles[blockNumber] = article
def storeBlockBuildingType(self, blockNumber, buildingType):
self.blockBuildingTypes[blockNumber] = buildingType
def storeBlock(self, blockNumber, title, article, bldgType, zoneId):
self.storeBlockNumber(blockNumber)
self.storeBlockTitle(blockNumber, title)
self.storeBlockArticle(blockNumber, article)
self.storeBlockBuildingType(blockNumber, bldgType)
self.storeBlockZone(blockNumber, zoneId)
def storeTexture(self, name, texture):
self.textures[name] = texture
def resetDNAVisGroups(self):
self.DNAVisGroups = []
def resetDNAVisGroupsAI(self):
self.resetDNAVisGroups()
def getNumDNAVisGroupsAI(self):
return self.getNumDNAVisGroups()
def getNumSuitPoints(self):
return len(self.suitPoints)
def getNumVisiblesInDNAVisGroup(self, i):
return self.DNAVisGroups[i].getNumVisibles()
def getVisibleName(self, i, j):
return self.DNAVisGroups[i].getVisibleName(j)
def getDNAVisGroupAI(self, i):
return self.DNAVisGroups[i]
def storeCatalogCode(self, category, code):
if not category in self.catalogCodes:
self.catalogCodes[category] = []
self.catalogCodes[category].append(code)
def getNumCatalogCodes(self, category):
if category not in self.catalogCodes:
return -1
return len(self.catalogCodes[category])
def resetCatalogCodes(self):
self.catalogCodes = {}
def getCatalogCode(self, category, index):
return self.catalogCodes[category][index]
def findTexture(self, name):
if name in self.textures:
return self.textures[name]
def discoverContinuity(self):
return 1 # TODO
def resetBlockNumbers(self):
self.blockNumbers = []
self.blockZones = {}
self.blockArticles = {}
self.resetBlockDoors()
self.blockTitles = {}
self.blockBuildingTypes = {}
def getNumBlockNumbers(self):
return len(self.blockNumbers)
def storeBlockNumber(self, blockNumber):
self.blockNumbers.append(blockNumber)
def getBlockNumberAt(self, index):
return self.blockNumbers[index]
def getZoneFromBlockNumber(self, blockNumber):
if blockNumber in self.blockZones:
return self.blockZones[blockNumber]
def storeBlockZone(self, blockNumber, zoneId):
self.blockZones[blockNumber] = zoneId
def resetBlockZones(self):
self.blockZones = {}
def resetBlockDoors(self):
self.blockDoors = {}
def cleanup(self):
self.resetBattleCells()
self.resetBlockNumbers()
self.resetDNAGroups()
self.resetDNAVisGroups()
self.resetDNAVisGroupsAI()
self.resetFonts()
self.resetHood()
self.resetHoodNodes()
self.resetNodes()
self.resetPlaceNodes()
self.resetSuitPoints()
self.resetTextures()
self.resetCatalogCodes()
ModelPool.garbageCollect()
TexturePool.garbageCollect()
|
import numpy as n
import glob
import h5py
import os
import time
import sys
h5_files = n.array(glob.glob(os.path.join(os.environ['MD10'], "h5", "hlist_?.?????_emerge.hdf5")))
h5_files.sort()
bins = n.arange(6,13,0.1)
xb = (bins[1:] + bins[:-1]) / 2.
hh = 0.6777
def measureSMF(h5_file, volume=1000.**3./hh**3., update=True):
f1 = h5py.File(h5_file, "r+")
mass = f1['/emerge_data/stellar_mass'].value - n.log10(hh)
sel = (mass>0) & (mass!=n.inf) & (n.isnan(mass)==False)
print( h5_file, len(mass), len(mass[sel]), len(mass[sel])>0 )
if len(mass[sel])>0:
counts, bb = n.histogram(n.log10(mass[sel]), bins=bins)
dN_dVdlogM = counts/(bins[1:]-bins[:-1])/volume/n.log(10)
if update:
print('updates')
#print(f1['/stellar_mass_function/stellar_mass_low'].value)
f1['/stellar_mass_function/stellar_mass_low'][:] = bins[:-1]
f1['/stellar_mass_function/stellar_mass_up'][:] = bins[1:]
f1['/stellar_mass_function/counts'][:] = counts
f1['/stellar_mass_function/dN_dVdlogM'][:] = dN_dVdlogM
else:
print('creates')
stellar_mass_function_data = f1.create_group('stellar_mass_function')
ds = stellar_mass_function_data.create_dataset('stellar_mass_low', data = bins[:-1] )
ds.attrs['units'] = r'$M_\odot$'
ds.attrs['long_name'] = r'$M_\odot$'
ds = stellar_mass_function_data.create_dataset('stellar_mass_up', data = bins[1:] )
ds.attrs['units'] = r'$M_\odot$'
ds.attrs['long_name'] = r'$M_\odot$'
ds = stellar_mass_function_data.create_dataset('dN_dVdlogM', data = dN_dVdlogM )
ds.attrs['units'] = r'$ Mpc^{-3} dex^{-1}$'
ds.attrs['long_name'] = r'$dN / (dV/, dlogM) $'
ds = stellar_mass_function_data.create_dataset('counts', data = counts )
ds.attrs['units'] = r'count'
ds.attrs['long_name'] = r'galaxy counts'
f1.close()
for h5_file in h5_files:
#try:
measureSMF(h5_file, update=True)
#except( ValueError ):
#pass
|
"""Tests for student register task"""
import ddt
import json
from mock import patch
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.test.utils import override_settings
from bulk_email.models import Optout
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from biz.djangoapps.ga_contract_operation.models import StudentRegisterTaskTarget
from biz.djangoapps.ga_contract_operation.tasks import student_register
from biz.djangoapps.ga_contract_operation.tests.factories import ContractTaskHistoryFactory,\
StudentRegisterTaskTargetFactory
from biz.djangoapps.ga_invitation.models import ContractRegister, INPUT_INVITATION_CODE, REGISTER_INVITATION_CODE
from biz.djangoapps.ga_invitation.tests.factories import ContractRegisterFactory
from biz.djangoapps.ga_login.models import BizUser
from biz.djangoapps.ga_login.tests.factories import BizUserFactory
from biz.djangoapps.util.tests.testcase import BizViewTestBase
from openedx.core.djangoapps.course_global.tests.factories import CourseGlobalSettingFactory
from openedx.core.djangoapps.ga_task.tests.test_task import TaskTestMixin
from student.models import CourseEnrollment
@ddt.ddt
class StudentRegisterTaskTest(BizViewTestBase, ModuleStoreTestCase, TaskTestMixin):
def setUp(self):
super(StudentRegisterTaskTest, self).setUp()
self._create_contract_mail_default()
def _create_targets(self, history, students, completed=False):
for student in students:
StudentRegisterTaskTargetFactory.create(history=history, student=student, completed=completed)
def _create_input_entry(self, contract=None, history=None):
task_input = {}
if contract is not None:
task_input['contract_id'] = contract.id
if history is not None:
task_input['history_id'] = history.id
task_input['sendmail_flg'] = 'on'
return TaskTestMixin._create_input_entry(self, task_input=task_input)
def _create_input_entry_not_sendmail(self, contract=None, history=None):
task_input = {}
if contract is not None:
task_input['contract_id'] = contract.id
if history is not None:
task_input['history_id'] = history.id
task_input['sendmail_flg'] = ''
return TaskTestMixin._create_input_entry(self, task_input=task_input)
def setup_user(self, login_code=None):
super(StudentRegisterTaskTest, self).setup_user()
self.login_code = login_code
if login_code:
BizUserFactory.create(user=self.user, login_code=login_code)
def test_missing_required_input_history(self):
entry = self._create_input_entry(contract=self._create_contract())
with self.assertRaises(ValueError) as cm:
self._run_task_with_mock_celery(student_register, entry.id, entry.task_id)
self.assertEqual("Task {}: Missing required value {}".format(
entry.task_id, json.loads(entry.task_input)), cm.exception.message)
self._assert_task_failure(entry.id)
def test_missing_required_input_contract(self):
entry = self._create_input_entry(history=self._create_task_history(self._create_contract()))
with self.assertRaises(ValueError) as cm:
self._run_task_with_mock_celery(student_register, entry.id, entry.task_id)
self.assertEqual("Task {}: Missing required value {}".format(
entry.task_id, json.loads(entry.task_input)), cm.exception.message)
self._assert_task_failure(entry.id)
def test_history_does_not_exists(self):
contract = self._create_contract()
history = self._create_task_history(contract)
entry = self._create_input_entry(contract=contract, history=history)
history.delete()
with self.assertRaises(ObjectDoesNotExist):
self._run_task_with_mock_celery(student_register, entry.id, entry.task_id)
self._assert_task_failure(entry.id)
def test_conflict_contract(self):
contract = self._create_contract()
# Create history with other contract
history = self._create_task_history(self._create_contract())
entry = self._create_input_entry(contract=contract, history=history)
with self.assertRaises(ValueError) as cm:
self._run_task_with_mock_celery(student_register, entry.id, entry.task_id)
self.assertEqual("Contract id conflict: submitted value {} does not match {}".format(
history.contract_id, contract.id), cm.exception.message)
self._assert_task_failure(entry.id)
@ddt.data(
(None, ["Input,[email protected],t,t"]),
('contract-url-code', ["Input,[email protected],t,t,Test_Student_1,TestStudent1"]),
)
@ddt.unpack
def test_register_validation(self, url_code, students):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
contract = self._create_contract(url_code=url_code)
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=1,
expected_num_failed=1,
expected_total=1,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(1, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertEqual(
"Line 1:" + ' '.join(["Username must be minimum of two characters long", "Your legal name must be a minimum of two characters long"]),
StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message,
)
self.assertFalse(ContractRegister.objects.filter(contract=contract).exists())
@ddt.data('t', 'Test@Student_1', 'Test_Student_1Test_Student_1Test_Student_1')
def test_register_validation_login_code(self, login_code):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
students = ["Input,[email protected],test_student_1,tester1,{login_code},TestStudent1".format(login_code=login_code)]
contract = self._create_contract(url_code='contract-url-code')
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=1,
expected_num_failed=1,
expected_total=1,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(1, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertEqual(
"Line 1:Invalid login code {login_code}.".format(login_code=login_code),
StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message,
)
self.assertFalse(ContractRegister.objects.filter(contract=contract).exists())
@override_settings(
PASSWORD_MIN_LENGTH=7,
PASSWORD_COMPLEXITY={
'DIGITS': 1,
'LOWER': 1,
'UPPER': 1,
}
)
@ddt.data('abAB12', 'abcdABCD', 'abcd1234', 'ABCD1234')
def test_register_validation_password(self, password):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
students = ["Input,[email protected],test_student_1,tester1,Test_Student_1,{password}".format(password=password)]
contract = self._create_contract(url_code='contract-url-code')
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=1,
expected_num_failed=1,
expected_total=1,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(1, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertEqual(
"Line 1:Invalid password {password}.".format(password=password),
StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message,
)
self.assertFalse(ContractRegister.objects.filter(contract=contract).exists())
@ddt.data(
(None, ["Input,[email protected],test_student_1,tester1"]),
('contract-url-code', ["Input,[email protected],test_student_1,tester1,Test_Student_1,TestStudent1"]),
)
@ddt.unpack
def test_register_account_creation(self, url_code, students):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
global_course_id = CourseFactory.create(org='global', course='course1', run='run').id
contract = self._create_contract(url_code=url_code)
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=1,
expected_num_succeeded=1,
expected_total=1,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(1, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertIsNone(StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message)
user = User.objects.get(email='[email protected]')
self.assertTrue(user.is_active)
self.assertEqual(ContractRegister.objects.get(user__email='[email protected]', contract=contract).status, INPUT_INVITATION_CODE)
self.assertFalse(Optout.objects.filter(user=user, course_id=global_course_id).exists())
if url_code:
self.assertEqual('Test_Student_1', BizUser.objects.get(user=user).login_code)
@ddt.data(
(None, ["Input,[email protected],test_student_1,tester1"]),
('contract-url-code', ["Input,[email protected],test_student_1,tester1,Test_Student_1,TestStudent1"]),
)
@ddt.unpack
def test_register_account_creation_with_global_course(self, url_code, students):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
global_course_id = CourseFactory.create(org='global', course='course1', run='run').id
CourseGlobalSettingFactory.create(course_id=global_course_id)
contract = self._create_contract(url_code=url_code)
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=1,
expected_num_succeeded=1,
expected_total=1,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(1, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertIsNone(StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message)
user = User.objects.get(email='[email protected]')
self.assertTrue(user.is_active)
self.assertEqual(ContractRegister.objects.get(user__email='[email protected]', contract=contract).status, INPUT_INVITATION_CODE)
self.assertTrue(Optout.objects.filter(user=user, course_id=global_course_id).exists())
if url_code:
self.assertEqual('Test_Student_1', BizUser.objects.get(user=user).login_code)
@ddt.data(
(None, ["Input,", "Input,[email protected],test_student_1,tester1", "Register,", "Input,"]),
('contract-url-code', ["Input,", "Input,[email protected],test_student_1,tester1,Test_Student_1,TestStudent1", "Register,", "Input,"]),
)
@ddt.unpack
def test_register_account_creation_with_blank_lines(self, url_code, students):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
contract = self._create_contract(url_code=url_code)
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=4,
expected_num_succeeded=1,
expected_num_skipped=3,
expected_total=4,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(4, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertEqual(4, StudentRegisterTaskTarget.objects.filter(history=history, message__isnull=True).count())
user = User.objects.get(email='[email protected]')
self.assertTrue(user.is_active)
self.assertEqual(ContractRegister.objects.get(user__email='[email protected]', contract=contract).status, INPUT_INVITATION_CODE)
if url_code:
self.assertEqual('Test_Student_1', BizUser.objects.get(user=user).login_code)
@ddt.data(
(None, [
"Input,[email protected],test_student_1,tester1",
"Input,[email protected],test_student_1,tester2",
]),
('contract-url-code', [
"Input,[email protected],test_student_1,tester1,Test_Student_1,TestStudent1",
"Input,[email protected],test_student_1,tester2,Test_Student_1,TestStudent1",
]),
)
@ddt.unpack
def test_register_email_and_username_already_exist(self, url_code, students):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
contract = self._create_contract(url_code=url_code)
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=2,
expected_num_succeeded=2,
expected_total=2,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(2, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertEqual(2, StudentRegisterTaskTarget.objects.filter(history=history, message__isnull=True).count())
user = User.objects.get(email='[email protected]')
self.assertTrue(user.is_active)
self.assertEqual(ContractRegister.objects.get(user__email='[email protected]', contract=contract).status, INPUT_INVITATION_CODE)
if url_code:
self.assertEqual('Test_Student_1', BizUser.objects.get(user=user).login_code)
@ddt.data(
(
None,
["Input,[email protected],test_student_1,tester1,Test_Student_1,TestStudent1", "Input,"],
"Line 1:Data must have exactly three columns: email, username, and full name."
),
(
'contract-url-code',
["Input,[email protected],test_student_1,tester1", "Input,"],
"Line 1:Data must have exactly five columns: email, username, full name, login code and password."
),
)
@ddt.unpack
def test_register_insufficient_data(self, url_code, students, message):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
contract = self._create_contract(url_code=url_code)
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=2,
expected_num_failed=1,
expected_num_skipped=1,
expected_total=2,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(2, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertEqual(
message,
StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message
)
self.assertIsNone(StudentRegisterTaskTarget.objects.get(history=history, student=students[1]).message)
self.assertFalse(ContractRegister.objects.filter(contract=contract).exists())
@ddt.data(
(None, ["Input,test_student.example.com,test_student_1,tester1"]),
('contract-url-code', ["Input,test_student.example.com,test_student_1,tester1,Test_Student_1,TestStudent1"]),
)
@ddt.unpack
def test_register_invalid_email(self, url_code, students):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
contract = self._create_contract(url_code=url_code)
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=1,
expected_num_failed=1,
expected_total=1,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(1, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertEqual(
"Line 1:Invalid email test_student.example.com.",
StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message
)
self.assertFalse(ContractRegister.objects.filter(contract=contract).exists())
@ddt.data(
(None, ["Input,{email},test_student_1,tester1"]),
('contract-url-code', ["Input,{email},test_student_1,tester1,Test_Student_1,TestStudent1"]),
)
@ddt.unpack
def test_register_user_with_already_existing_email(self, url_code, students):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
global_course_id = CourseFactory.create(org='global', course='course1', run='run').id
CourseGlobalSettingFactory.create(course_id=global_course_id)
self.setup_user()
students = [s.format(email=self.email) for s in students]
contract = self._create_contract(url_code=url_code)
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=1,
expected_num_succeeded=1,
expected_total=1,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(1, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertIn(
"Warning, an account with the e-mail {email} exists but the registered username {username} is different.".format(email=self.email, username=self.username),
StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message
)
if url_code:
self.assertIn(
"Warning, an account with the e-mail {email} exists but the registered password is different.".format(email=self.email),
StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message
)
self.assertEqual(ContractRegister.objects.get(user__email=self.email, contract=contract).status, INPUT_INVITATION_CODE)
self.assertFalse(Optout.objects.filter(user=self.user, course_id=global_course_id).exists())
if url_code:
self.assertEqual('Test_Student_1', BizUser.objects.get(user=self.user).login_code)
@ddt.data(
(None, ["Input,{email},test_student_1,tester1"]),
('contract-url-code', ["Input,{email},test_student_1,tester1,Test_Student_1,TestStudent1"]),
)
@ddt.unpack
def test_register_user_with_already_existing_contract_register_input(self, url_code, students):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
global_course_id = CourseFactory.create(org='global', course='course1', run='run').id
CourseGlobalSettingFactory.create(course_id=global_course_id)
self.setup_user()
students = [s.format(email=self.email) for s in students]
contract = self._create_contract(url_code=url_code)
ContractRegisterFactory.create(user=self.user, contract=contract, status=INPUT_INVITATION_CODE)
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=1,
expected_num_succeeded=1,
expected_total=1,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(1, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertIn(
"Warning, an account with the e-mail {email} exists but the registered username {username} is different.".format(email=self.email, username=self.username),
StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message
)
if url_code:
self.assertIn(
"Warning, an account with the e-mail {email} exists but the registered password is different.".format(email=self.email),
StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message
)
self.assertEqual(ContractRegister.objects.get(user__email=self.email, contract=contract).status, INPUT_INVITATION_CODE)
self.assertFalse(Optout.objects.filter(user=self.user, course_id=global_course_id).exists())
if url_code:
self.assertEqual('Test_Student_1', BizUser.objects.get(user=self.user).login_code)
@ddt.data(
(None, ["Input,{email},test_student_1,tester1"]),
('contract-url-code', ["Input,{email},test_student_1,tester1,Test_Student_1,TestStudent1"]),
)
@ddt.unpack
def test_register_user_with_already_existing_contract_register_register(self, url_code, students):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
global_course_id = CourseFactory.create(org='global', course='course1', run='run').id
CourseGlobalSettingFactory.create(course_id=global_course_id)
self.setup_user()
students = [s.format(email=self.email) for s in students]
contract = self._create_contract(url_code=url_code)
ContractRegisterFactory.create(user=self.user, contract=contract, status=REGISTER_INVITATION_CODE)
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=1,
expected_num_succeeded=1,
expected_total=1,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(1, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertIn(
"Warning, an account with the e-mail {email} exists but the registered username {username} is different.".format(email=self.email, username=self.username),
StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message
)
if url_code:
self.assertIn(
"Warning, an account with the e-mail {email} exists but the registered password is different.".format(email=self.email),
StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message
)
self.assertEqual(ContractRegister.objects.get(user__email=self.email, contract=contract).status, REGISTER_INVITATION_CODE)
self.assertFalse(Optout.objects.filter(user=self.user, course_id=global_course_id).exists())
if url_code:
self.assertEqual('Test_Student_1', BizUser.objects.get(user=self.user).login_code)
def test_register_user_with_already_existing_all_same(self):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
self.setup_user('Test_Student_1')
students = ["Input,{email},{username},username,{login_code},{password}".format(
email=self.email,
username=self.username,
login_code=self.login_code,
password=self.password,
)]
contract = self._create_contract(url_code='contract-url-code')
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=1,
expected_num_succeeded=1,
expected_total=1,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(1, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertEqual(self.login_code, BizUser.objects.get(user=self.user).login_code)
def test_register_user_with_already_existing_diff_login_code(self):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
self.setup_user('Test_Student_1')
students = ["Input,{email},{username},username,{login_code},{password}".format(
email=self.email,
username=self.username,
login_code='Test_Student_12',
password=self.password,
)]
contract = self._create_contract(url_code='contract-url-code')
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=1,
expected_num_succeeded=1,
expected_total=1,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(1, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertIn(
"Warning, an account with the e-mail {email} exists but the registered login code {login_code} is different.".format(email=self.email, login_code=self.login_code),
StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message
)
self.assertEqual(self.login_code, BizUser.objects.get(user=self.user).login_code)
def test_register_user_with_already_existing_diff_password(self):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
self.setup_user('Test_Student_1')
students = ["Input,{email},{username},username,{login_code},{password}".format(
email=self.email,
username=self.username,
login_code=self.login_code,
password='Password123',
)]
contract = self._create_contract(url_code='contract-url-code')
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=1,
expected_num_succeeded=1,
expected_total=1,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(1, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertIn(
"Warning, an account with the e-mail {email} exists but the registered password is different.".format(email=self.email),
StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message
)
self.assertEqual(self.login_code, BizUser.objects.get(user=self.user).login_code)
def test_register_user_with_already_existing_diff_login_code_password(self):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
self.setup_user('Test_Student_1')
students = ["Input,{email},{username},username,{login_code},{password}".format(
email=self.email,
username=self.username,
login_code='Test_Student_12',
password='Password123',
)]
contract = self._create_contract(url_code='contract-url-code')
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=1,
expected_num_succeeded=1,
expected_total=1,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(1, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertIn(
"Warning, an account with the e-mail {email} exists but the registered login code {login_code} is different.".format(email=self.email, login_code=self.login_code),
StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message
)
self.assertIn(
"Warning, an account with the e-mail {email} exists but the registered password is different.".format(email=self.email),
StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message
)
self.assertEqual(self.login_code, BizUser.objects.get(user=self.user).login_code)
def test_register_user_same_login_code(self):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
self.setup_user('Test_Student_1')
students = [
"Input,[email protected],test_student_1,tester1,Test_Student_1,TestStudent1",
"Input,{email},{username},tester2,Test_Student_1,{password}".format(email=self.email, username=self.username, password=self.password),
]
contract = self._create_contract(url_code='contract-url-code')
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=2,
expected_num_succeeded=1,
expected_num_failed=1,
expected_total=2,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(2, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertEqual(
"Line 2:Login code Test_Student_1 already exists.",
StudentRegisterTaskTarget.objects.get(history=history, student=students[1]).message
)
self.assertEqual(2, BizUser.objects.filter(login_code=self.login_code).count())
self.assertEqual(ContractRegister.objects.get(user__email='[email protected]', contract=contract).status, INPUT_INVITATION_CODE)
self.assertFalse(ContractRegister.objects.filter(user__email='[email protected]', contract=contract).exists())
self.assertFalse(ContractRegister.objects.filter(user__email=self.email, contract=contract).exists())
@ddt.data(
(None, [
"Input,[email protected],test_student_1,tester1",
"Input,[email protected],test_student_1,tester2",
]),
('contract-url-code', [
"Input,[email protected],test_student_1,tester1,Test_Student_1,TestStudent1",
"Input,[email protected],test_student_1,tester2,Test_Student_2,TestStudent2",
]),
)
@ddt.unpack
def test_register_user_with_already_existing_username(self, url_code, students):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
contract = self._create_contract(url_code=url_code)
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=2,
expected_num_succeeded=1,
expected_num_failed=1,
expected_total=2,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(2, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertIsNone(StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message)
self.assertEqual(
"Line 2:Username test_student_1 already exists.",
StudentRegisterTaskTarget.objects.get(history=history, student=students[1]).message
)
self.assertEqual(ContractRegister.objects.get(user__email='[email protected]', contract=contract).status, INPUT_INVITATION_CODE)
self.assertFalse(ContractRegister.objects.filter(user__email='[email protected]', contract=contract).exists())
def test_register_user_with_already_existing_login_code(self):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
students = [
"Input,[email protected],test_student_1,tester1,Test_Student_1,TestStudent1",
"Input,[email protected],test_student_2,tester2,Test_Student_1,TestStudent2",
]
contract = self._create_contract(url_code='contract-url-code')
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=2,
expected_num_succeeded=1,
expected_num_failed=1,
expected_total=2,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(2, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertIsNone(StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message)
self.assertEqual(
"Line 2:Login code Test_Student_1 already exists.",
StudentRegisterTaskTarget.objects.get(history=history, student=students[1]).message
)
self.assertEqual(ContractRegister.objects.get(user__email='[email protected]', contract=contract).status, INPUT_INVITATION_CODE)
self.assertFalse(ContractRegister.objects.filter(user__email='[email protected]', contract=contract).exists())
@ddt.data(
(None, [
"Input,[email protected],test_student_1,tester1",
"Input,[email protected],test_student_1,tester2",
]),
('contract-url-code', [
"Input,[email protected],test_student_1,tester1,Test_Student_1,TestStudent1",
"Input,[email protected],test_student_1,tester2,Test_Student_2,TestStudent2",
]),
)
@ddt.unpack
def test_register_raising_exception_in_auto_registration_case(self, url_code, students):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
contract = self._create_contract(url_code=url_code)
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
with patch('biz.djangoapps.ga_contract_operation.student_register.validate_email', side_effect=[None, Exception]):
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=2,
expected_num_succeeded=1,
expected_num_failed=1,
expected_total=2,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(1, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(1, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertIsNone(StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message)
self.assertEqual(
"Line 2:Failed to register. Please operation again after a time delay.",
StudentRegisterTaskTarget.objects.get(history=history, student=students[1]).message
)
self.assertEqual(ContractRegister.objects.get(user__email='[email protected]', contract=contract).status, INPUT_INVITATION_CODE)
self.assertFalse(ContractRegister.objects.filter(user__email='[email protected]', contract=contract).exists())
@ddt.data(
(None, [
"Input,[email protected],test_student_1,tester1",
"Input,[email protected],test_student_1,tester3",
"Input,[email protected],test_student_2,tester2",
]),
('contract-url-code', [
"Input,[email protected],test_student_1,tester1,Test_Student_1,TestStudent1",
"Input,[email protected],test_student_1,tester3,Test_Student_3,TestStudent3",
"Input,[email protected],test_student_2,tester2,Test_Student_2,TestStudent2",
]),
)
@ddt.unpack
def test_register_users_created_successfully_if_others_fail(self, url_code, students):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
contract = self._create_contract(url_code=url_code)
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=3,
expected_num_succeeded=2,
expected_num_failed=1,
expected_total=3,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(3, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertIsNone(StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message)
self.assertEqual(
"Line 2:Username test_student_1 already exists.",
StudentRegisterTaskTarget.objects.get(history=history, student=students[1]).message
)
self.assertIsNone(StudentRegisterTaskTarget.objects.get(history=history, student=students[2]).message)
self.assertEqual(ContractRegister.objects.get(user__email='[email protected]', contract=contract).status, INPUT_INVITATION_CODE)
self.assertEqual(ContractRegister.objects.get(user__email='[email protected]', contract=contract).status, INPUT_INVITATION_CODE)
self.assertFalse(ContractRegister.objects.filter(user__email='[email protected]', contract=contract).exists())
@patch('biz.djangoapps.ga_contract_operation.student_register.log.error')
@ddt.data(
(None, [
"Register,[email protected],test_student_1,tester1",
"Unregister,[email protected],test_student_3,tester3",
"Register,[email protected],test_student_2,tester2",
]),
('contract-url-code', [
"Register,[email protected],test_student_1,tester1,Test_Student_1,TestStudent1",
"Unregister,[email protected],test_student_3,tester3,Test_Student_3,TestStudent3",
"Register,[email protected],test_student_2,tester2,Test_Student_2,TestStudent2",
]),
)
@ddt.unpack
def test_register_users_created_successfully_if_others_fail_register(self, url_code, students, error_log):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
course = CourseFactory.create()
contract = self._create_contract(url_code=url_code, detail_courses=[course])
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=3,
expected_num_succeeded=2,
expected_num_failed=1,
expected_total=3,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(3, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertIsNone(StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message)
error_log.assert_any_call('Invalid status: Unregister.')
self.assertEqual(
"Line 2:Failed to register. Please operation again after a time delay.",
StudentRegisterTaskTarget.objects.get(history=history, student=students[1]).message
)
self.assertIsNone(StudentRegisterTaskTarget.objects.get(history=history, student=students[2]).message)
self.assertEqual(ContractRegister.objects.get(user__email='[email protected]', contract=contract).status, REGISTER_INVITATION_CODE)
self.assertEqual(ContractRegister.objects.get(user__email='[email protected]', contract=contract).status, REGISTER_INVITATION_CODE)
self.assertFalse(ContractRegister.objects.filter(user__email='[email protected]', contract=contract).exists())
self.assertTrue(CourseEnrollment.objects.get(user__email='[email protected]', course_id=course.id).is_active)
self.assertTrue(CourseEnrollment.objects.get(user__email='[email protected]', course_id=course.id).is_active)
self.assertFalse(CourseEnrollment.objects.filter(user__email='[email protected]', course_id=course.id).exists())
@ddt.data(
(None, [
"Input,test_student1test_student1test_student1test_student1test_student@example.com,test_student_1,tester1",
"Input,[email protected],test_student_1test_student_1test_stu,tester3",
"Input,[email protected],test_student_2,tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2test",
]),
('contract-url-code', [
"Input,test_student1test_student1test_student1test_student1test_student@example.com,test_student_1,tester1,Test_Student_1,TestStudent1",
"Input,[email protected],test_student_1test_student_1test_stu,tester3,Test_Student_3,TestStudent3",
"Input,[email protected],test_student_2,tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2test,Test_Student_2,TestStudent2",
]),
)
@ddt.unpack
def test_register_over_max_char_length(self, url_code, students):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
contract = self._create_contract(url_code=url_code)
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=3,
expected_num_failed=3,
expected_total=3,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(3, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertEqual(
"Line 1:Email cannot be more than 75 characters long",
StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message
)
self.assertEqual(
"Line 2:Username cannot be more than 30 characters long",
StudentRegisterTaskTarget.objects.get(history=history, student=students[1]).message
)
self.assertEqual(
"Line 3:Name cannot be more than 255 characters long",
StudentRegisterTaskTarget.objects.get(history=history, student=students[2]).message
)
self.assertFalse(ContractRegister.objects.filter(user__email='[email protected]', contract=contract).exists())
self.assertFalse(ContractRegister.objects.filter(user__email='[email protected]', contract=contract).exists())
self.assertFalse(ContractRegister.objects.filter(user__email='[email protected]', contract=contract).exists())
@ddt.data(
(None, None, [
"Input,[email protected],test_student_1,tester1",
], 1),
("contract-url-code", True, [
"Input,[email protected],test_student_1,tester1,Test_Student_1,TestStudent1",
], 1),
("contract-url-code", False, [
"Input,[email protected],test_student_1,tester1,Test_Student_1,TestStudent1",
], 0),
)
@ddt.unpack
@patch('biz.djangoapps.ga_contract_operation.student_register.django_send_mail')
def test_register_send_mail(self, url_code, send_mail, students, send_mail_call_count, send_mail_to_student):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
contract = self._create_contract(url_code=url_code, send_mail=send_mail)
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry(contract=contract, history=history),
expected_attempted=1,
expected_num_succeeded=1,
expected_total=1,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(1, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertIsNone(StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message)
user = User.objects.get(email='[email protected]')
self.assertTrue(user.is_active)
self.assertEqual(ContractRegister.objects.get(user__email='[email protected]', contract=contract).status, INPUT_INVITATION_CODE)
if url_code:
self.assertEqual('Test_Student_1', BizUser.objects.get(user=user).login_code)
self.assertEqual(send_mail_call_count, send_mail_to_student.call_count)
@ddt.data(
(None, None, [
"Input,[email protected],test_student_2,tester2",
], 0),
("contract-url-code", True, [
"Input,[email protected],test_student_2,tester2,Test_Student_2,TestStudent1",
], 0),
("contract-url-code", False, [
"Input,[email protected],test_student_2,tester2,Test_Student_2,TestStudent2",
], 0),
)
@ddt.unpack
@patch('biz.djangoapps.ga_contract_operation.student_register.django_send_mail')
def test_register_not_send_mail(self, url_code, send_mail, students, send_mail_call_count, send_mail_to_student):
# ----------------------------------------------------------
# Setup test data
# ----------------------------------------------------------
contract = self._create_contract(url_code=url_code, send_mail=send_mail)
history = self._create_task_history(contract=contract)
self._create_targets(history, students)
# ----------------------------------------------------------
# Execute task
# ----------------------------------------------------------
self._test_run_with_task(
student_register,
'student_register',
task_entry=self._create_input_entry_not_sendmail(contract=contract, history=history),
expected_attempted=1,
expected_num_succeeded=1,
expected_total=1,
)
# ----------------------------------------------------------
# Assertion
# ----------------------------------------------------------
self.assertEqual(0, StudentRegisterTaskTarget.objects.filter(history=history, completed=False).count())
self.assertEqual(1, StudentRegisterTaskTarget.objects.filter(history=history, completed=True).count())
self.assertIsNone(StudentRegisterTaskTarget.objects.get(history=history, student=students[0]).message)
user = User.objects.get(email='[email protected]')
self.assertTrue(user.is_active)
self.assertEqual(ContractRegister.objects.get(user__email='[email protected]', contract=contract).status, INPUT_INVITATION_CODE)
if url_code:
self.assertEqual('Test_Student_2', BizUser.objects.get(user=user).login_code)
self.assertEqual(send_mail_call_count, send_mail_to_student.call_count)
|
import unittest
from lxml import etree
from openerp.exceptions import AccessError
from openerp.tools.misc import mute_logger
from openerp.tests import common
# test group that demo user should not have
GROUP_SYSTEM = 'base.group_system'
GROUP_ERP_MANAGER = 'base.group_erp_manager'
class TestACL(common.TransactionCase):
def setUp(self):
super(TestACL, self).setUp()
self.res_currency = self.registry('res.currency')
self.res_partner = self.registry('res.partner')
self.res_users = self.registry('res.users')
self.res_company = self.registry('res.company')
self.demo_uid = self.env.ref('base.user_demo').id
self.erp_system_group = self.env.ref(GROUP_SYSTEM)
self.erp_manager_group = self.env.ref(GROUP_ERP_MANAGER)
def _set_field_groups(self, model, field_name, groups):
field = model._fields[field_name]
column = model._columns[field_name]
old_groups = field.groups
old_prefetch = column._prefetch
field.groups = groups
column.groups = groups
column._prefetch = False
@self.addCleanup
def cleanup():
field.groups = old_groups
column.groups = old_groups
column._prefetch = old_prefetch
def test_field_visibility_restriction(self):
"""Check that model-level ``groups`` parameter effectively restricts access to that
field for users who do not belong to one of the explicitly allowed groups"""
# Verify the test environment first
original_fields = self.res_currency.fields_get(self.cr, self.demo_uid, [])
form_view = self.res_currency.fields_view_get(self.cr, self.demo_uid, False, 'form')
view_arch = etree.fromstring(form_view.get('arch'))
has_group_system = self.res_users.has_group(self.cr, self.demo_uid, GROUP_SYSTEM)
self.assertFalse(has_group_system, "`demo` user should not belong to the restricted group before the test")
self.assertTrue('decimal_places' in original_fields, "'decimal_places' field must be properly visible before the test")
self.assertNotEquals(view_arch.xpath("//field[@name='decimal_places']"), [],
"Field 'decimal_places' must be found in view definition before the test")
# restrict access to the field and check it's gone
self._set_field_groups(self.res_currency, 'decimal_places', GROUP_SYSTEM)
fields = self.res_currency.fields_get(self.cr, self.demo_uid, [])
form_view = self.res_currency.fields_view_get(self.cr, self.demo_uid, False, 'form')
view_arch = etree.fromstring(form_view.get('arch'))
self.assertFalse('decimal_places' in fields, "'decimal_places' field should be gone")
self.assertEquals(view_arch.xpath("//field[@name='decimal_places']"), [],
"Field 'decimal_places' must not be found in view definition")
# Make demo user a member of the restricted group and check that the field is back
self.erp_system_group.write({'users': [(4, self.demo_uid)]})
has_group_system = self.res_users.has_group(self.cr, self.demo_uid, GROUP_SYSTEM)
fields = self.res_currency.fields_get(self.cr, self.demo_uid, [])
form_view = self.res_currency.fields_view_get(self.cr, self.demo_uid, False, 'form')
view_arch = etree.fromstring(form_view.get('arch'))
#import pprint; pprint.pprint(fields); pprint.pprint(form_view)
self.assertTrue(has_group_system, "`demo` user should now belong to the restricted group")
self.assertTrue('decimal_places' in fields, "'decimal_places' field must be properly visible again")
self.assertNotEquals(view_arch.xpath("//field[@name='decimal_places']"), [],
"Field 'decimal_places' must be found in view definition again")
#cleanup
self.erp_system_group.write({'users': [(3, self.demo_uid)]})
@mute_logger('openerp.models')
def test_field_crud_restriction(self):
"Read/Write RPC access to restricted field should be forbidden"
# Verify the test environment first
has_group_system = self.res_users.has_group(self.cr, self.demo_uid, GROUP_SYSTEM)
self.assertFalse(has_group_system, "`demo` user should not belong to the restricted group")
self.assert_(self.res_partner.read(self.cr, self.demo_uid, [1], ['bank_ids']))
self.assert_(self.res_partner.write(self.cr, self.demo_uid, [1], {'bank_ids': []}))
# Now restrict access to the field and check it's forbidden
self._set_field_groups(self.res_partner, 'bank_ids', GROUP_SYSTEM)
with self.assertRaises(AccessError):
self.res_partner.read(self.cr, self.demo_uid, [1], ['bank_ids'])
with self.assertRaises(AccessError):
self.res_partner.write(self.cr, self.demo_uid, [1], {'bank_ids': []})
# Add the restricted group, and check that it works again
self.erp_system_group.write({'users': [(4, self.demo_uid)]})
has_group_system = self.res_users.has_group(self.cr, self.demo_uid, GROUP_SYSTEM)
self.assertTrue(has_group_system, "`demo` user should now belong to the restricted group")
self.assert_(self.res_partner.read(self.cr, self.demo_uid, [1], ['bank_ids']))
self.assert_(self.res_partner.write(self.cr, self.demo_uid, [1], {'bank_ids': []}))
#cleanup
self.erp_system_group.write({'users': [(3, self.demo_uid)]})
@mute_logger('openerp.models')
def test_fields_browse_restriction(self):
"""Test access to records having restricted fields"""
self._set_field_groups(self.res_partner, 'email', GROUP_SYSTEM)
pid = self.res_partner.search(self.cr, self.demo_uid, [], limit=1)[0]
part = self.res_partner.browse(self.cr, self.demo_uid, pid)
# accessing fields must no raise exceptions...
part.name
# ... except if they are restricted
with self.assertRaises(AccessError) as cm:
with mute_logger('openerp.models'):
part.email
#Useless because we get the title in the client side now in the chrome.js file(map_title)
#self.assertEqual(cm.exception.args[0], 'Access Error')
def test_view_create_edit_button_invisibility(self):
""" Test form view Create, Edit, Delete button visibility based on access right of model"""
methods = ['create', 'edit', 'delete']
company_view = self.res_company.fields_view_get(self.cr, self.demo_uid, False, 'form')
view_arch = etree.fromstring(company_view['arch'])
for method in methods:
self.assertEqual(view_arch.get(method), 'false')
def test_view_create_edit_button_visibility(self):
""" Test form view Create, Edit, Delete button visibility based on access right of model"""
methods = ['create', 'edit', 'delete']
self.erp_manager_group.write({'users': [(4, self.demo_uid)]})
company_view = self.res_company.fields_view_get(self.cr, self.demo_uid, False, 'form')
view_arch = etree.fromstring(company_view['arch'])
for method in methods:
self.assertIsNone(view_arch.get(method))
# cleanup
self.erp_manager_group.write({'users': [(3, self.demo_uid)]})
def test_m2o_field_create_edit_invisibility(self):
""" Test many2one field Create and Edit option visibility based on access rights of relation field"""
methods = ['create', 'write']
company_view = self.res_company.fields_view_get(self.cr, self.demo_uid, False, 'form')
view_arch = etree.fromstring(company_view['arch'])
field_node = view_arch.xpath("//field[@name='currency_id']")
self.assertTrue(len(field_node), "currency_id field should be in company from view")
for method in methods:
self.assertEqual(field_node[0].get('can_' + method), 'false')
def test_m2o_field_create_edit_visibility(self):
""" Test many2one field Create and Edit option visibility based on access rights of relation field"""
methods = ['create', 'write']
self.erp_system_group.write({'users': [(4, self.demo_uid)]})
company_view = self.res_company.fields_view_get(self.cr, self.demo_uid, False, 'form')
view_arch = etree.fromstring(company_view['arch'])
field_node = view_arch.xpath("//field[@name='currency_id']")
self.assertTrue(len(field_node), "currency_id field should be in company from view")
for method in methods:
self.assertEqual(field_node[0].get('can_' + method), 'true')
# cleanup
self.erp_manager_group.write({'users': [(3, self.demo_uid)]})
if __name__ == '__main__':
unittest.main()
|
import os
import re
from resources.lib.model.inputdevice import InputDevice
class DeviceWrapper:
def __init__(self):
self.devices = []
self.init_devices()
def init_devices(self):
input_bus = '/proc/bus/input/devices'
if not os.path.exists(input_bus):
message = 'Input bus (%s) could not be accessed.' % input_bus
raise OSError(message)
with open(input_bus) as f:
device = None
for line in f.readlines():
if line.startswith('I:'):
device = InputDevice()
if line.startswith('N:'):
print line[9:-2]
name = line[9:-2]
for _dev in self.devices:
if _dev.name == name:
print 'found duplicate entry'
name += ' #2'
device.name = name
if line.startswith('H:'):
handlers = line[12:].split()
for handler in handlers:
device.handlers.append(handler)
if re.match('\n', line):
if device:
self.devices.append(device)
for _dev in self.devices:
if _dev.name == 'lircd':
self.devices.remove(_dev)
def find_device_by_name(self, name):
for device in self.devices:
if device.name == name:
return device
return None
def find_device_by_js(self, js):
for device in self.devices:
if js in device.handlers:
return device
return None
|
# -*- coding: utf-8 -*-
'''
Provide the service module for the proxy-minion SSH sample
.. versionadded:: 2015.8.2
'''
# Import python libs
from __future__ import absolute_import
import logging
import salt.utils
log = logging.getLogger(__name__)
__func_alias__ = {
'list_': 'list'
}
# Define the module's virtual name
__virtualname__ = 'service'
def __virtual__():
'''
Only work on systems that are a proxy minion
'''
try:
if salt.utils.is_proxy() and __opts__['proxy']['proxytype'] == 'ssh_sample':
return __virtualname__
except KeyError:
return (False, 'The ssh_service execution module failed to load. Check the proxy key in pillar.')
return (False, 'The ssh_service execution module failed to load: only works on an ssh_sample proxy minion.')
def get_all():
'''
Return a list of all available services
CLI Example:
.. code-block:: bash
salt '*' service.get_all
'''
proxy_fn = 'ssh_sample.service_list'
return __proxy__[proxy_fn]()
def list_():
'''
Return a list of all available services.
CLI Example:
.. code-block:: bash
salt '*' service.list
'''
return get_all()
def start(name, sig=None):
'''
Start the specified service on the rest_sample
CLI Example:
.. code-block:: bash
salt '*' service.start <service name>
'''
proxy_fn = 'ssh_sample.service_start'
return __proxy__[proxy_fn](name)
def stop(name, sig=None):
'''
Stop the specified service on the rest_sample
CLI Example:
.. code-block:: bash
salt '*' service.stop <service name>
'''
proxy_fn = 'ssh_sample.service_stop'
return __proxy__[proxy_fn](name)
def restart(name, sig=None):
'''
Restart the specified service with rest_sample
CLI Example:
.. code-block:: bash
salt '*' service.restart <service name>
'''
proxy_fn = 'ssh_sample.service_restart'
return __proxy__[proxy_fn](name)
def status(name, sig=None):
'''
Return the status for a service via rest_sample, returns a bool
whether the service is running.
CLI Example:
.. code-block:: bash
salt '*' service.status <service name>
'''
proxy_fn = 'ssh_sample.service_status'
resp = __proxy__[proxy_fn](name)
if resp['comment'] == 'stopped':
return False
if resp['comment'] == 'running':
return True
def running(name, sig=None):
'''
Return whether this service is running.
'''
return status(name).get(name, False)
def enabled(name, sig=None):
'''
Only the 'redbull' service is 'enabled' in the test
'''
return name == 'redbull'
|
import logging
import testUtils as utils
import time
import threading
from conftest import IPADDRESS1, \
RESOURCE, \
DUMMYVAL, \
OSCORECLIENTCONTEXT
from coap import coapDefines as d, \
coapOption as o, \
coapObjectSecurity as oscore
#============================ logging ===============================
log = logging.getLogger(utils.getMyLoggerName())
log.addHandler(utils.NullHandler())
#============================ tests ===========================================
def test_GET(logFixture,snoopyDispatcher,twoEndPoints):
(coap1,coap2,securityEnabled) = twoEndPoints
options = []
if securityEnabled:
context = oscore.SecurityContext(OSCORECLIENTCONTEXT)
options = [o.ObjectSecurity(context=context)]
# have coap2 do a get
for _ in range(20):
reply = coap2.GET(
uri = 'coap://[{0}]:{1}/{2}/'.format(IPADDRESS1,d.DEFAULT_UDP_PORT,RESOURCE),
confirmable = False,
options = options
)
assert reply==DUMMYVAL
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.runtime.south.async import server_connection
from calvin.runtime.south.calvinsys import base_calvinsys_object
from calvin.utilities.calvinlogger import get_logger
_log = get_logger(__name__)
class UDPListener(base_calvinsys_object.BaseCalvinsysObject):
"""
UDPListener - A UDP socket listener
"""
init_schema = {
"type": "object",
"properties": {
"host": {
"description": "Host",
"type": "string"
},
"port": {
"description": "Port",
"type": "number"
}
},
"description": "Setup a UDP socket listener",
"required": ["host", "port"]
}
can_read_schema = {
"description": "Returns True if pending data",
"type": "boolean"
}
read_schema = {
"description": "Read data"
}
def init(self, host, port):
self._listener = server_connection.UDPServerProtocol(self.calvinsys._node.sched.schedule_calvinsys, self.actor.id)
self._listener.start(host, port)
def can_read(self):
return self._listener.have_data()
def read(self):
return self._listener.data_get()
def close(self):
try:
self._listener.stop()
except:
pass
|
# Copyright (c) 2012 Hesky Fisher
# See LICENSE.txt for details.
"""Platform dependent configuration."""
import appdirs
import os
from os.path import realpath, join, dirname, abspath, isfile, pardir
import sys
# If plover is run from a pyinstaller binary.
if hasattr(sys, 'frozen') and hasattr(sys, '_MEIPASS'):
ASSETS_DIR = sys._MEIPASS
PROGRAM_DIR = dirname(sys.executable)
# If plover is run from an app bundle on Mac.
elif (sys.platform.startswith('darwin') and '.app' in realpath(__file__)):
ASSETS_DIR = os.getcwd()
PROGRAM_DIR = abspath(join(dirname(sys.executable), *[pardir] * 3))
else:
ASSETS_DIR = join(dirname(dirname(realpath(__file__))), 'assets')
PROGRAM_DIR = os.getcwd()
# If the program's directory has a plover.cfg file then run in "portable mode",
# i.e. store all data in the same directory. This allows keeping all Plover
# files in a portable drive.
if isfile(join(PROGRAM_DIR, 'plover.cfg')):
CONFIG_DIR = PROGRAM_DIR
else:
CONFIG_DIR = appdirs.user_data_dir('plover', 'plover')
|
# Adapted PyOpenGL-Demo/GLUT/shader_test.py
from OpenGL.GL.ARB.shader_objects import *
from OpenGL.GL.ARB.vertex_shader import *
from OpenGL.GL.ARB.fragment_shader import *
def compileShader(source, shaderType):
"""Compile shader source of given type"""
shader = glCreateShaderObjectARB(shaderType)
print "glShaderSourceARB:", bool(glShaderSourceARB)
glShaderSourceARB(shader, source)
glCompileShaderARB(shader)
return shader
def compileProgram(vertexSource=None, fragmentSource=None):
program = glCreateProgramObjectARB()
if vertexSource:
vertexShader = compileShader(vertexSource, GL_VERTEX_SHADER_ARB)
glAttachObjectARB(program, vertexShader)
if fragmentSource:
fragmentShader = compileShader(fragmentSource, GL_FRAGMENT_SHADER_ARB)
glAttachObjectARB(program, fragmentShader)
glBindAttribLocationARB(program, 0, 'in_position')
glBindAttribLocationARB(program, 1, 'in_color')
glValidateProgramARB(program)
glLinkProgramARB(program)
print 'shader_if: attrib loc of in_position: %s' % glGetAttribLocationARB(program, 'in_position')
print 'shader_if: attrib loc of in_color: %s' % glGetAttribLocationARB(program, 'in_color')
if vertexShader:
glDeleteObjectARB(vertexShader)
if fragmentShader:
glDeleteObjectARB(fragmentShader)
return program
#### osx is confined to glsl=120 !!
VERTEX_SHADER = """
#version 120
varying vec3 normal;
attribute vec3 in_position;
attribute vec3 in_color;
//http://stackoverflow.com/questions/13039439/transforming-glsl-150-to-120
//in vec4 position; //failing, not for 120
//in vec4 color; //failing, not for 120
void main() {
//normal = gl_NormalMatrix * gl_Normal;
//gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
gl_Position = gl_ModelViewProjectionMatrix * vec4(in_position, 1.);
gl_FrontColor = vec4(in_color, 1.);
}
"""
#### fixme:
# VERTEX_SHADER = """
# #version 120
# in vec4 position;
# void main() {
# gl_Position = position;
# //gl_FrontColor = vec4(0, 0, 1, 1);
# }
# """
FRAGMENT_SHADER = """
#version 120
varying vec3 normal;
void main() {
/*
float intensity;
vec4 color;
vec3 n = normalize(normal);
vec3 l = normalize(gl_LightSource[0].position).xyz;
// quantize to 5 steps (0, .25, .5, .75 and 1)
intensity = (floor(dot(l, n) * 4.0) + 1.0)/4.0;
color = vec4(intensity*1.0, intensity*0.5, intensity*0.5,
intensity*1.0);
gl_FragColor = color;
*/
//gl_FragColor = vec4(1, 0, 0, 1);
gl_FragColor = gl_Color; //fixme: this will not change non-attribute object colors!!!
}
"""
|
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Classify a single token in a sentence using a trained classifier."""
from absl import app
from absl import flags
import torch
import sys
sys.path.insert(1, 'helpers')
import classifier_helper
import inference_helper
import setup_helper
import tokenization_helper
# Command Line Arguments
FLAGS = flags.FLAGS
flags.DEFINE_string('sentence', u'he was a doctor',
'the sentence to start with')
flags.DEFINE_string('trained_variables_dir', None, 'the location where the'
'classifier variables are stored')
flags.DEFINE_string('model_config', 'bert-base-uncased', 'the name of the model'
'configuration to load')
flags.DEFINE_integer('layer_id', 6, 'layer to optimize activation for')
flags.DEFINE_integer('word_id', 1, 'word to feed into the classification head')
def classify_token(device, tokenizer, model):
"""Classifies a token using a trained classifier on top of BERT.
Args:
device: Where to do the calculations and store variables.
tokenizer: Converts the input sentence into tokens.
model: Used to retrieve the activations from.
"""
tokens = tokenization_helper.tokenize_input_sentence(tokenizer,
FLAGS.sentence, '')
tokens_tensor, segments_tensor = tokenization_helper.tensors_from_tokens(
tokenizer, tokens, device)
layers_act = inference_helper.run_inference_vanilla(tokens_tensor,
segments_tensor, model)
token_act = layers_act[0][FLAGS.layer_id][FLAGS.word_id]
classification_head = classifier_helper.get_classification_head(
device, FLAGS.layer_id, FLAGS.trained_variables_dir)
y = token_act.matmul(classification_head)
y = torch.sigmoid(y)
print('Prediction: {}'.format(y.item()))
def main(_):
tokenizer, model, device = setup_helper.setup_bert_vanilla(
FLAGS.model_config)
# Start the run
classify_token(device, tokenizer, model)
if __name__ == '__main__':
flags.mark_flag_as_required('trained_variables_dir')
app.run(main)
|
"""
raven.contrib.django.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Acts as an implicit hook for Django installs.
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from hashlib import md5
import logging
import sys
import warnings
from raven.utils import six
from django.conf import settings as django_settings
logger = logging.getLogger('sentry.errors.client')
def get_installed_apps():
"""
Modules in settings.INSTALLED_APPS as a set.
"""
return set(django_settings.INSTALLED_APPS)
_client = (None, None)
class ProxyClient(object):
"""
A proxy which represents the currenty client at all times.
"""
# introspection support:
__members__ = property(lambda x: x.__dir__())
# Need to pretend to be the wrapped class, for the sake of objects that care
# about this (especially in equality tests)
__class__ = property(lambda x: get_client().__class__)
__dict__ = property(lambda o: get_client().__dict__)
__repr__ = lambda x: repr(get_client())
__getattr__ = lambda x, o: getattr(get_client(), o)
__setattr__ = lambda x, o, v: setattr(get_client(), o, v)
__delattr__ = lambda x, o: delattr(get_client(), o)
__lt__ = lambda x, o: get_client() < o
__le__ = lambda x, o: get_client() <= o
__eq__ = lambda x, o: get_client() == o
__ne__ = lambda x, o: get_client() != o
__gt__ = lambda x, o: get_client() > o
__ge__ = lambda x, o: get_client() >= o
if not six.PY3:
__cmp__ = lambda x, o: cmp(get_client(), o) # NOQA
__hash__ = lambda x: hash(get_client())
# attributes are currently not callable
# __call__ = lambda x, *a, **kw: get_client()(*a, **kw)
__nonzero__ = lambda x: bool(get_client())
__len__ = lambda x: len(get_client())
__getitem__ = lambda x, i: get_client()[i]
__iter__ = lambda x: iter(get_client())
__contains__ = lambda x, i: i in get_client()
__getslice__ = lambda x, i, j: get_client()[i:j]
__add__ = lambda x, o: get_client() + o
__sub__ = lambda x, o: get_client() - o
__mul__ = lambda x, o: get_client() * o
__floordiv__ = lambda x, o: get_client() // o
__mod__ = lambda x, o: get_client() % o
__divmod__ = lambda x, o: get_client().__divmod__(o)
__pow__ = lambda x, o: get_client() ** o
__lshift__ = lambda x, o: get_client() << o
__rshift__ = lambda x, o: get_client() >> o
__and__ = lambda x, o: get_client() & o
__xor__ = lambda x, o: get_client() ^ o
__or__ = lambda x, o: get_client() | o
__div__ = lambda x, o: get_client().__div__(o)
__truediv__ = lambda x, o: get_client().__truediv__(o)
__neg__ = lambda x: -(get_client())
__pos__ = lambda x: +(get_client())
__abs__ = lambda x: abs(get_client())
__invert__ = lambda x: ~(get_client())
__complex__ = lambda x: complex(get_client())
__int__ = lambda x: int(get_client())
if not six.PY3:
__long__ = lambda x: long(get_client()) # NOQA
__float__ = lambda x: float(get_client())
__str__ = lambda x: six.binary_type(get_client())
__unicode__ = lambda x: six.text_type(get_client())
__oct__ = lambda x: oct(get_client())
__hex__ = lambda x: hex(get_client())
__index__ = lambda x: get_client().__index__()
__coerce__ = lambda x, o: x.__coerce__(x, o)
__enter__ = lambda x: x.__enter__()
__exit__ = lambda x, *a, **kw: x.__exit__(*a, **kw)
client = ProxyClient()
def get_option(x, d=None):
options = getattr(django_settings, 'RAVEN_CONFIG', {})
return getattr(django_settings, 'SENTRY_%s' % x, options.get(x, d))
def get_client(client=None):
global _client
tmp_client = client is not None
if not tmp_client:
client = getattr(django_settings, 'SENTRY_CLIENT', 'raven.contrib.django.DjangoClient')
if _client[0] != client:
module, class_name = client.rsplit('.', 1)
ga = lambda x, d=None: getattr(django_settings, 'SENTRY_%s' % x, d)
options = getattr(django_settings, 'RAVEN_CONFIG', {})
options.setdefault('servers', ga('SERVERS'))
options.setdefault('include_paths', ga('INCLUDE_PATHS', []))
options['include_paths'] = set(options['include_paths']) | get_installed_apps()
options.setdefault('exclude_paths', ga('EXCLUDE_PATHS'))
options.setdefault('timeout', ga('TIMEOUT'))
options.setdefault('name', ga('NAME'))
options.setdefault('auto_log_stacks', ga('AUTO_LOG_STACKS'))
options.setdefault('key', ga('KEY', md5(django_settings.SECRET_KEY.encode('utf8')).hexdigest()))
options.setdefault('string_max_length', ga('MAX_LENGTH_STRING'))
options.setdefault('list_max_length', ga('MAX_LENGTH_LIST'))
options.setdefault('site', ga('SITE'))
options.setdefault('public_key', ga('PUBLIC_KEY'))
options.setdefault('secret_key', ga('SECRET_KEY'))
options.setdefault('project', ga('PROJECT'))
options.setdefault('processors', ga('PROCESSORS'))
options.setdefault('dsn', ga('DSN'))
options.setdefault('context', ga('CONTEXT'))
class_name = str(class_name)
instance = getattr(__import__(module, {}, {}, class_name), class_name)(**options)
if not tmp_client:
_client = (client, instance)
return instance
return _client[1]
def sentry_exception_handler(request=None, **kwargs):
exc_type = sys.exc_info()[0]
exclusions = set(get_option('IGNORE_EXCEPTIONS', ()))
exc_name = '%s.%s' % (exc_type.__module__, exc_type.__name__)
if exc_type.__name__ in exclusions or exc_name in exclusions or any(exc_name.startswith(e[:-1]) for e in exclusions if e.endswith('*')):
logger.info(
'Not capturing exception due to filters: %s', exc_type,
exc_info=sys.exc_info())
return
try:
client.captureException(exc_info=sys.exc_info(), request=request)
except Exception as exc:
try:
logger.exception('Unable to process log entry: %s' % (exc,))
except Exception as exc:
warnings.warn('Unable to process log entry: %s' % (exc,))
def register_handlers():
from django.core.signals import got_request_exception
# HACK: support Sentry's internal communication
if 'sentry' in django_settings.INSTALLED_APPS:
from django.db import transaction
# Django 1.6
if hasattr(transaction, 'atomic'):
commit_on_success = transaction.atomic
else:
commit_on_success = transaction.commit_on_success
@commit_on_success
def wrap_sentry(request, **kwargs):
if transaction.is_dirty():
transaction.rollback()
return sentry_exception_handler(request, **kwargs)
exception_handler = wrap_sentry
else:
exception_handler = sentry_exception_handler
# Connect to Django's internal signal handler
got_request_exception.connect(exception_handler, weak=False)
# If Celery is installed, register a signal handler
if 'djcelery' in django_settings.INSTALLED_APPS:
try:
# Celery < 2.5? is not supported
from raven.contrib.celery import (
register_signal, register_logger_signal)
except ImportError:
logger.exception('Failed to install Celery error handler')
else:
try:
register_signal(client)
except Exception:
logger.exception('Failed to install Celery error handler')
try:
register_logger_signal(client)
except Exception:
logger.exception('Failed to install Celery error handler')
def register_serializers():
# force import so serializers can call register
import raven.contrib.django.serializers # NOQA
if ('raven.contrib.django' in django_settings.INSTALLED_APPS
or 'raven.contrib.django.raven_compat' in django_settings.INSTALLED_APPS):
register_handlers()
register_serializers()
|
import logging
from unittest.mock import patch
import requests
from django.conf import settings
from django.test import TestCase, override_settings
from eventkit_cloud.utils.geocoding.geocode_auth import (
get_auth_headers,
CACHE_COOKIE_KEY,
CACHE_TOKEN_KEY,
CACHE_TOKEN_TIMEOUT,
update_session_cookies,
update_auth_headers,
get_session_cookies,
authenticate,
)
logger = logging.getLogger(__name__)
class TestGeoCodeAuth(TestCase):
@override_settings(GEOCODING_AUTH_URL="http://fake.url")
@patch("eventkit_cloud.utils.geocoding.geocode_auth.update_auth_headers")
@patch("eventkit_cloud.utils.geocoding.geocode_auth.cache")
@patch("eventkit_cloud.utils.geocoding.geocode_auth.authenticate")
def test_get_auth_headers(self, mock_authenticate, mock_cache, mock_update_auth_headers):
mock_cache.get.return_value = settings.GEOCODING_AUTH_URL
self.assertIsNone(get_auth_headers())
mock_cache.reset_mock()
mock_cache.get.return_value = {}
example_token = "test_token"
mock_authenticate.return_value = example_token
expected_header = {"Authorization": "Bearer " + str(example_token)}
self.assertEquals(expected_header, get_auth_headers())
mock_update_auth_headers.assert_called_once_with(expected_header)
mock_cache.get.assert_called_once_with(CACHE_TOKEN_KEY, {})
@patch("eventkit_cloud.utils.geocoding.geocode_auth.cache")
def test_update_auth_headers(self, mock_cache):
example_headers = "test_headers"
update_auth_headers(example_headers)
mock_cache.set.assert_called_once_with(CACHE_TOKEN_KEY, example_headers, CACHE_TOKEN_TIMEOUT)
@patch("eventkit_cloud.utils.geocoding.geocode_auth.cache")
def test_update_session_cookies(self, mock_cache):
example_cookies = "test_cookies"
update_session_cookies(example_cookies)
mock_cache.set.assert_called_once_with(CACHE_COOKIE_KEY, example_cookies, CACHE_TOKEN_TIMEOUT)
@patch("eventkit_cloud.utils.geocoding.geocode_auth.cache")
def test_get_session_cookies(self, mock_cache):
example_cookies = "test_cookies"
mock_cache.get.return_value = example_cookies
self.assertEquals(example_cookies, get_session_cookies())
mock_cache.get.assert_called_once_with(CACHE_COOKIE_KEY)
@patch("eventkit_cloud.utils.geocoding.geocode_auth.cache")
@patch("eventkit_cloud.utils.geocoding.geocode_auth.auth_requests")
def test_authenticate(self, mock_auth_requests, mock_cache):
with self.settings(GEOCODING_AUTH_URL="http://test.test"):
example_token = "test_token"
example_response = {"token": example_token}
mock_auth_requests.get().json.return_value = example_response
self.assertEquals(example_token, authenticate())
mock_cache.set.assert_called_once_with(CACHE_TOKEN_KEY, example_token, CACHE_TOKEN_TIMEOUT)
mock_cache.reset_mock()
with self.settings(GEOCODING_AUTH_URL="http://test.test"):
example_response = {}
mock_auth_requests.get().json.return_value = example_response
self.assertIsNone(authenticate())
mock_cache.set.assert_called_once_with(CACHE_TOKEN_KEY, settings.GEOCODING_AUTH_URL, CACHE_TOKEN_TIMEOUT)
mock_cache.reset_mock()
with self.settings(GEOCODING_AUTH_URL=None):
self.assertIsNone(authenticate())
with self.settings(GEOCODING_AUTH_URL="http://test.test"):
mock_auth_requests.get().json.side_effect = requests.exceptions.RequestException()
self.assertIsNone(authenticate())
mock_cache.delete.assert_called_once_with(CACHE_TOKEN_KEY)
|
# -*- coding: utf-8 -*-
import regex as re
import six
from dateparser.utils import get_logger
class LanguageValidator(object):
logger = None
VALID_KEYS = [
'name', 'skip', 'pertain', 'simplifications', 'no_word_spacing', 'ago', 'in',
'monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday',
'january', 'february', 'march', 'april', 'may', 'june', 'july', 'august',
'september', 'october', 'november', 'december', 'year', 'month', 'week', 'day',
'hour', 'minute', 'second', 'dateorder'
]
@classmethod
def get_logger(cls):
if cls.logger is None:
cls.logger = get_logger()
return cls.logger
@classmethod
def validate_info(cls, language_id, info):
result = True
result &= cls._validate_type(language_id, info)
if not result:
return False
result &= cls._validate_name(language_id, info)
result &= cls._validate_word_spacing(language_id, info)
result &= cls._validate_skip_list(language_id, info)
result &= cls._validate_pertain_list(language_id, info)
result &= cls._validate_weekdays(language_id, info)
result &= cls._validate_months(language_id, info)
result &= cls._validate_units(language_id, info)
result &= cls._validate_other_words(language_id, info)
result &= cls._validate_simplifications(language_id, info)
result &= cls._validate_extra_keys(language_id, info)
return result
@classmethod
def _validate_type(cls, language_id, info):
result = True
if not isinstance(info, dict):
cls.get_logger().error(
"Language '%(id)s' info expected to be dict, but have got %(type)s",
{'id': language_id, 'type': type(info).__name__})
result = False
return result
@classmethod
def _validate_name(cls, language_id, info):
result = True
if 'name' not in info or not isinstance(info['name'], six.string_types) or not info['name']:
cls.get_logger().error("Language '%(id)s' does not have a name", {'id': language_id})
result = False
return result
@classmethod
def _validate_word_spacing(cls, language_id, info):
if 'no_word_spacing' not in info:
return True # Optional key
result = True
value = info['no_word_spacing']
if value not in [True, False]:
cls.get_logger().error(
"Invalid 'no_word_spacing' value %(value)r for '%(id)s' language: "
"expected boolean", {'value': value, 'id': language_id})
result = False
return result
@classmethod
def _validate_skip_list(cls, language_id, info):
if 'skip' not in info:
return True # Optional key
result = True
skip_tokens_list = info['skip']
if isinstance(skip_tokens_list, list):
for token in skip_tokens_list:
if not isinstance(token, six.string_types) or not token:
cls.get_logger().error(
"Invalid 'skip' token %(token)r for '%(id)s' language: "
"expected not empty string",
{'token': token, 'id': language_id})
result = False
else:
cls.get_logger().error(
"Invalid 'skip' list for '%(id)s' language: "
"expected list type but have got %(type)s",
{'id': language_id, 'type': type(skip_tokens_list).__name__})
result = False
return result
@classmethod
def _validate_pertain_list(cls, language_id, info):
if 'pertain' not in info:
return True # Optional key
result = True
pertain_tokens_list = info['skip']
if isinstance(pertain_tokens_list, list):
for token in pertain_tokens_list:
if not isinstance(token, six.string_types) or not token:
cls.get_logger().error(
"Invalid 'pertain' token %(token)r for '%(id)s' language: "
"expected not empty string",
{'token': token, 'id': language_id})
result = False
else:
cls.get_logger().error(
"Invalid 'pertain' list for '%(id)s' language: "
"expected list type but have got %(type)s",
{'id': language_id, 'type': type(pertain_tokens_list).__name__})
result = False
return result
@classmethod
def _validate_weekdays(cls, language_id, info):
result = True
for weekday in 'monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday':
if weekday not in info or not info[weekday]:
cls.get_logger().error(
"No translations for '%(weekday)s' provided for '%(id)s' language",
{'weekday': weekday, 'id': language_id})
result = False
continue
translations_list = info[weekday]
if isinstance(translations_list, list):
for token in translations_list:
if not isinstance(token, six.string_types) or not token:
cls.get_logger().error(
"Invalid '%(weekday)s' translation %(token)r for '%(id)s' language: "
"expected not empty string",
{'weekday': weekday, 'token': token, 'id': language_id})
result = False
else:
cls.get_logger().error(
"Invalid '%(weekday)s' translations list for '%(id)s' language: "
"expected list type but have got %(type)s",
{'weekday': weekday,
'id': language_id,
'type': type(translations_list).__name__})
result = False
return result
@classmethod
def _validate_months(cls, language_id, info):
result = True
for month in ('january', 'february', 'march', 'april', 'may', 'june', 'july',
'august', 'september', 'october', 'november', 'december'):
if month not in info or not info[month]:
cls.get_logger().error(
"No translations for '%(month)s' provided for '%(id)s' language",
{'month': month, 'id': language_id})
result = False
continue
translations_list = info[month]
if isinstance(translations_list, list):
for token in translations_list:
if not isinstance(token, six.string_types) or not token:
cls.get_logger().error(
"Invalid '%(month)s' translation %(token)r for '%(id)s' language: "
"expected not empty string",
{'month': month, 'token': token, 'id': language_id})
result = False
else:
cls.get_logger().error(
"Invalid '%(month)s' translations list for '%(id)s' language: "
"expected list type but have got %(type)s",
{'month': month, 'id': language_id, 'type': type(translations_list).__name__})
result = False
return result
@classmethod
def _validate_units(cls, language_id, info):
result = True
for unit in 'year', 'month', 'week', 'day', 'hour', 'minute', 'second':
if unit not in info or not info[unit]:
cls.get_logger().error(
"No translations for '%(unit)s' provided for '%(id)s' language",
{'unit': unit, 'id': language_id})
result = False
continue
translations_list = info[unit]
if isinstance(translations_list, list):
for token in translations_list:
if not isinstance(token, six.string_types) or not token:
cls.get_logger().error(
"Invalid '%(unit)s' translation %(token)r for '%(id)s' language: "
"expected not empty string",
{'unit': unit, 'token': token, 'id': language_id})
result = False
else:
cls.get_logger().error(
"Invalid '%(unit)s' translations list for '%(id)s' language: "
"expected list type but have got %(type)s",
{'unit': unit, 'id': language_id, 'type': type(translations_list).__name__})
result = False
return result
@classmethod
def _validate_other_words(cls, language_id, info):
result = True
for word in 'ago', :
if word not in info or not info[word]:
cls.get_logger().error(
"No translations for '%(word)s' provided for '%(id)s' language",
{'word': word, 'id': language_id})
result = False
continue
translations_list = info[word]
if isinstance(translations_list, list):
for token in translations_list:
if not isinstance(token, six.string_types) or not token:
cls.get_logger().error(
"Invalid '%(word)s' translation %(token)r for '%(id)s' language: "
"expected not empty string",
{'word': word, 'token': token, 'id': language_id})
result = False
else:
cls.get_logger().error(
"Invalid '%(word)s' translations list for '%(id)s' language: "
"expected list type but have got %(type)s",
{'word': word, 'id': language_id, 'type': type(translations_list).__name__})
result = False
return result
@classmethod
def _validate_simplifications(cls, language_id, info):
if 'simplifications' not in info:
return True # Optional key
result = True
simplifications_list = info['simplifications']
if isinstance(simplifications_list, list):
for simplification in simplifications_list:
if not isinstance(simplification, dict) or len(simplification) != 1:
cls.get_logger().error(
"Invalid simplification %(simplification)r for '%(id)s' language: "
"eash simplification suppose to be one-to-one mapping",
{'simplification': simplification, 'id': language_id})
result = False
continue
key, value = list(simplification.items())[0]
if not isinstance(key, six.string_types) or not isinstance(value, (six.string_types, int)):
cls.get_logger().error(
"Invalid simplification %(simplification)r for '%(id)s' language: "
"each simplification suppose to be string-to-string-or-int mapping",
{'simplification': simplification, 'id': language_id})
result = False
continue
compiled_key = re.compile(key)
value = six.text_type(value)
replacements = re.findall(r'\\(\d+)', value)
replacements.extend(re.findall(r'\\g<(.+?)>', value))
groups = []
for group in replacements:
if group.isdigit():
groups.append(int(group))
elif group in compiled_key.groupindex:
groups.append(compiled_key.groupindex[group])
else:
cls.get_logger().error(
"Invalid simplification %(simplification)r for '%(id)s' language: "
"unknown group %(group)s",
{'simplification': simplification,
'id': language_id,
'group': group})
result = False
used_groups = set(map(int, groups))
expected_groups = set(range(0, compiled_key.groups + 1))
extra_groups = used_groups - expected_groups
not_used_groups = expected_groups - used_groups
not_used_groups -= {0} # Entire substring is not required to be used
if extra_groups:
cls.get_logger().error(
"Invalid simplification %(simplification)r for '%(id)s' language: "
"unknown groups %(groups)s",
{'simplification': simplification,
'id': language_id,
'groups': ", ".join(map(six.text_type, sorted(extra_groups)))})
result = False
if not_used_groups:
cls.get_logger().error(
"Invalid simplification %(simplification)r for '%(id)s' language: "
"groups %(groups)s were not used",
{'simplification': simplification,
'id': language_id,
'groups': ", ".join(map(six.text_type, sorted(not_used_groups)))})
result = False
else:
cls.get_logger().error(
"Invalid 'simplifications' list for '%(id)s' language: "
"expected list type but have got %(type)s",
{'id': language_id, 'type': type(simplifications_list).__name__})
result = False
return result
@classmethod
def _validate_extra_keys(cls, language_id, info):
result = True
extra_keys = set(info.keys()) - set(cls.VALID_KEYS)
if extra_keys:
cls.get_logger().error(
"Extra keys found for '%(id)s' language: %(keys)s",
{'id': language_id, 'keys': ", ".join(map(repr, extra_keys))})
result = False
return result
|
from sklearn.base import BaseEstimator
import copy
import random
import numpy as np
class VLS(BaseEstimator):
def __init__(self,relief_object,num_feature_subset=40,size_feature_subset=5,random_state = None):
'''
:param relief_object: Must be an object that implements the standard sklearn fit function, and after fit, has attribute feature_importances_
that can be accessed. Scores must be a 1D np.ndarray of length # of features. The fit function must also be able to
take in an optional 1D np.ndarray 'weights' parameter of length num_features.
:param num_feature_subset: Number of feature subsets generated at random
:param size_feature_subset: Number of features in each subset. Cannot exceed number of features.
:param random_state: random seed
'''
if not self.check_is_int(num_feature_subset) or num_feature_subset <= 0:
raise Exception('num_feature_subset must be a positive integer')
if not self.check_is_int(size_feature_subset) or size_feature_subset <= 0:
raise Exception('size_feature_subset must be a positive integer')
if random_state != None and not self.check_is_int(random_state):
raise Exception('random_state must be None or integer')
self.relief_object = relief_object
self.num_feature_subset = num_feature_subset
self.size_feature_subset = size_feature_subset
self.random_state = random_state
self.rank_absolute = self.relief_object.rank_absolute
def fit(self, X, y,weights=None):
"""Scikit-learn required: Computes the feature importance scores from the training data.
Parameters
----------
X: array-like {n_samples, n_features} Training instances to compute the feature importance scores from
y: array-like {n_samples} Training labels
Returns
-------
self
"""
#random_state
if self.random_state != None:
np.random.seed(self.random_state)
random.seed(self.random_state)
#Make subsets with all the features
num_features = X.shape[1]
self.size_feature_subset = min(self.size_feature_subset,num_features)
subsets = self.make_subsets(list(range(num_features)),self.num_feature_subset,self.size_feature_subset)
#Fit each subset
scores = []
for subset in subsets:
new_X = self.custom_transform(X,subset)
copy_relief_object = copy.deepcopy(self.relief_object)
if not isinstance(weights,np.ndarray):
copy_relief_object.fit(new_X,y)
else:
copy_relief_object.fit(new_X,y,weights=weights[subset])
raw_score = copy_relief_object.feature_importances_
score = np.empty(num_features)
if self.rank_absolute:
score.fill(0)
else:
score.fill(np.NINF)
counter = 0
for index in subset:
score[index] = raw_score[counter]
counter+=1
scores.append(score)
#DEBUGGING
#print(score)
scores = np.array(scores)
#Merge results by selecting largest found weight for each feature
max_scores = []
for score in scores.T:
if self.rank_absolute:
max = np.max(np.absolute(score))
if max in score:
max_scores.append(max)
else:
max_scores.append(-max)
else:
max_scores.append(np.max(score))
max_scores = np.array(max_scores)
#Save FI as feature_importances_
self.feature_importances_ = max_scores
if self.rank_absolute:
self.top_features_ = np.argsort(np.absolute(self.feature_importances_))[::-1]
else:
self.top_features_ = np.argsort(self.feature_importances_)[::-1]
return self
def custom_transform(self,X,indices_to_preserve):
return X[:,indices_to_preserve]
def make_subsets(self,possible_indices,num_feature_subset,size_feature_subset):
if num_feature_subset * size_feature_subset < len(possible_indices):
raise Exception('num_feature_subset * size_feature_subset must be >= number of total features')
if size_feature_subset > len(possible_indices):
raise Exception('size_feature_subset cannot be > number of total features')
random.shuffle(possible_indices)
remaining_indices = copy.deepcopy(possible_indices)
subsets = []
while True:
subset = []
while len(remaining_indices) > 0 and len(subset) < size_feature_subset:
subset.append(remaining_indices.pop(0))
subsets.append(subset)
if len(remaining_indices) < size_feature_subset:
break
if len(remaining_indices) != 0:
while len(remaining_indices) < size_feature_subset:
index_bad = True
while index_bad:
potential_index = random.choice(possible_indices)
if not (potential_index in remaining_indices):
remaining_indices.append(potential_index)
break
subsets.append(remaining_indices)
subsets_left = num_feature_subset - len(subsets)
for i in range(subsets_left):
subsets.append(random.sample(possible_indices,size_feature_subset))
return subsets
def check_is_int(self, num):
try:
n = float(num)
if num - int(num) == 0:
return True
else:
return False
except:
return False
def check_is_float(self, num):
try:
n = float(num)
return True
except:
return False
def transform(self, X):
if X.shape[1] < self.relief_object.n_features_to_select:
raise ValueError('Number of features to select is larger than the number of features in the dataset.')
return X[:, self.top_features_[:self.relief_object.n_features_to_select]]
def fit_transform(self, X, y, weights=None):
self.fit(X, y, weights)
return self.transform(X)
|
# Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
from mixbox import entities
from mixbox import fields
import cybox.bindings.cybox_common as common_binding
import cybox.objects
from .properties import String
class Property(String):
_binding = common_binding
_binding_class = _binding.PropertyType
name = fields.TypedField("name")
description = fields.TypedField("description")
def is_plain(self):
"""Whether the Property can be represented as a single value."""
return (
self.name is None and
self.description is None and
super(Property, self).is_plain()
)
class CustomProperties(entities.EntityList):
_binding = common_binding
_binding_class = common_binding.CustomPropertiesType
_namespace = 'http://cybox.mitre.org/common-2'
property_ = fields.TypedField("Property", Property, multiple=True)
class ObjectPropertiesFactory(entities.EntityFactory):
@classmethod
def objkey(cls, obj):
xsi_type = obj.xsi_type
if not xsi_type:
raise ValueError("No xsi:type found on ObjectProperties instance.")
return xsi_type.split(":")[1]
@classmethod
def entity_class(cls, key):
if not key:
raise ValueError("Must provide an xsi:type key for ObjectProperties.")
return cybox.objects.get_class_for_object_type(key)
class ObjectProperties(entities.Entity):
"""The Cybox ObjectProperties base class."""
_XSI_TYPE = None
_XSI_NS = None
_binding = common_binding
_binding_class = _binding.ObjectPropertiesType
object_reference = fields.TypedField("object_reference")
custom_properties = fields.TypedField("Custom_Properties", CustomProperties)
def __init__(self):
super(ObjectProperties, self).__init__()
self.parent = None
@property
def parent(self):
import cybox.core
if not self._parent:
self._parent = cybox.core.Object(self)
return self._parent
@parent.setter
def parent(self, value):
import cybox.core
if value and not isinstance(value, cybox.core.Object):
raise ValueError("Must be an Object")
self._parent = value
def add_related(self, related, relationship, inline=True):
self.parent.add_related(related, relationship, inline)
def to_obj(self, ns_info=None):
obj = super(ObjectProperties, self).to_obj(ns_info=ns_info)
if self._XSI_TYPE and self._XSI_NS:
obj.xsi_type = "%s:%s" % (self._XSI_NS, self._XSI_TYPE)
return obj
def to_dict(self):
d = super(ObjectProperties, self).to_dict()
if self._XSI_TYPE:
d['xsi:type'] = self._XSI_TYPE
return d
|
__author__ = 'Joe Linn'
from .index import *
from .shard import *
class Health(object):
def __init__(self, client):
"""
@param client:
@type client: pylastica.client.Client
"""
super(Health, self).__init__()
self._client = client
self.refresh()
self._data = None
def _retrieve_health_data(self):
"""
Retrieves health data from the cluster
@return:
@rtype: dict
"""
return self._client.request('_cluster/health', query={'level': 'shards'}).data
@property
def data(self):
"""
Get the health data
@return:
@rtype: dict
"""
return self._data
def refresh(self):
"""
Refresh the health data of the cluster
@return:
@rtype: self
"""
self._data = self._retrieve_health_data()
return self
@property
def cluster_name(self):
"""
Get the name of the cluster
@return:
@rtype: str
"""
return self._data['cluster_name']
@property
def status(self):
"""
Get the status of the cluster
@return: green, yellow, or red
@rtype: str
"""
return self._data['status']
@property
def timed_out(self):
"""
@return:
@rtype: bool
"""
return bool(self._data['timed_out'])
@property
def number_of_nodes(self):
"""
Get the number of nodes in the cluster
@return:
@rtype: int
"""
return int(self._data['number_of_nodes'])
@property
def number_of_data_nodes(self):
"""
Get the number of data nodes in the cluster
@return:
@rtype: int
"""
return int(self._data['number_of_data_nodes'])
@property
def active_primary_shards(self):
"""
Get the number of active primary shards
@return:
@rtype: int
"""
return int(self._data['active_primary_shards'])
@property
def active_shards(self):
"""
Get the number of active shards
@return:
@rtype: int
"""
return int(self._data['active_shards'])
@property
def relocating_shards(self):
"""
Get the number of relocating shards
@return:
@rtype: int
"""
return int(self._data['relocating_shards'])
@property
def initializing_shards(self):
"""
Get the number of initializing shards
@return:
@rtype: int
"""
return int(self._data['initializing_shards'])
@property
def unassigned_shards(self):
"""
Get the number of unassigned shards
@return:
@rtype: int
"""
return int(self._data['unassigned_shards'])
@property
def inidices(self):
"""
Get the status of the indices
@return:
@rtype: pylastica.cluster.health.index.Index
"""
return [Index(name, index) for name, index in self._data['indices'].iteritems()]
|
"""engine.SCons.Tool.icc
Tool-specific initialization for the OS/2 icc compiler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/icc.py 4369 2009/09/19 15:58:29 scons"
import cc
def generate(env):
"""Add Builders and construction variables for the OS/2 to an Environment."""
cc.generate(env)
env['CC'] = 'icc'
env['CCCOM'] = '$CC $CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo$TARGET'
env['CXXCOM'] = '$CXX $CXXFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo$TARGET'
env['CPPDEFPREFIX'] = '/D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '/I'
env['INCSUFFIX'] = ''
env['CFILESUFFIX'] = '.c'
env['CXXFILESUFFIX'] = '.cc'
def exists(env):
return env.Detect('icc')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
import os
COV = None
if os.environ.get('FLASK_COVERAGE'):
import coverage
COV = coverage.coverage(branch=True, include='app/*')
COV.start()
# if os.path.exists('.env'):
# print('Importing environment from .env...')
# for line in open('.env'):
# var = line.strip().split('=')
# if len(var) == 2:
# os.environ[var[0]] = var[1]
from app import create_app, db
from app.models import User, Role, AccessToken, WechatUser
from flask_script import Manager, Shell
from flask_migrate import Migrate, MigrateCommand
import logging; logging.basicConfig(level=logging.INFO)
app = create_app(os.getenv("FLASK_CONFIG") or 'default')
manager = Manager(app)
migrate = Migrate(app, db)
def make_shell_context():
return dict(app=app, db=db, User=User, Role=Role, AccessToken=AccessToken, WechatUser=WechatUser)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def deploy():
"""Run deployment tasks."""
from flask_migrate import upgrade
from app.models import Role, User
# migrate database to latest revision
upgrade()
# create user roles
Role.insert_roles()
User.add_self_follows()
@manager.command
def test(coverage=False):
"""Run the unit tests."""
if coverage and not os.environ.get('FLASK_COVERAGE'):
import sys
os.environ['FLASK_COVERAGE'] = '1'
os.execvp(sys.executable, [sys.executable] + sys.argv)
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if COV:
COV.stop()
COV.save()
print('Coverage Summary:')
COV.report()
basedir = os.path.abspath(os.path.dirname(__file__))
covdir = os.path.join(basedir, 'tmp/coverage')
COV.html_report(directory=covdir)
print('HTML version: file://%s/index.html' % covdir)
COV.erase()
if __name__ == '__main__':
manager.run()
|
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""OpenSSL Crypto-related routines for oauth2client."""
from OpenSSL import crypto
from oauth2client._helpers import _parse_pem_key
from oauth2client._helpers import _to_bytes
class OpenSSLVerifier(object):
"""Verifies the signature on a message."""
def __init__(self, pubkey):
"""Constructor.
Args:
pubkey: OpenSSL.crypto.PKey, The public key to verify with.
"""
self._pubkey = pubkey
def verify(self, message, signature):
"""Verifies a message against a signature.
Args:
message: string or bytes, The message to verify. If string, will be
encoded to bytes as utf-8.
signature: string or bytes, The signature on the message. If string,
will be encoded to bytes as utf-8.
Returns:
True if message was signed by the private key associated with the
public key that this object was constructed with.
"""
message = _to_bytes(message, encoding='utf-8')
signature = _to_bytes(signature, encoding='utf-8')
try:
crypto.verify(self._pubkey, signature, message, 'sha256')
return True
except crypto.Error:
return False
@staticmethod
def from_string(key_pem, is_x509_cert):
"""Construct a Verified instance from a string.
Args:
key_pem: string, public key in PEM format.
is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it
is expected to be an RSA key in PEM format.
Returns:
Verifier instance.
Raises:
OpenSSL.crypto.Error: if the key_pem can't be parsed.
"""
key_pem = _to_bytes(key_pem)
if is_x509_cert:
pubkey = crypto.load_certificate(crypto.FILETYPE_PEM, key_pem)
else:
pubkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key_pem)
return OpenSSLVerifier(pubkey)
class OpenSSLSigner(object):
"""Signs messages with a private key."""
def __init__(self, pkey):
"""Constructor.
Args:
pkey: OpenSSL.crypto.PKey (or equiv), The private key to sign with.
"""
self._key = pkey
def sign(self, message):
"""Signs a message.
Args:
message: bytes, Message to be signed.
Returns:
string, The signature of the message for the given key.
"""
message = _to_bytes(message, encoding='utf-8')
return crypto.sign(self._key, message, 'sha256')
@staticmethod
def from_string(key, password=b'notasecret'):
"""Construct a Signer instance from a string.
Args:
key: string, private key in PKCS12 or PEM format.
password: string, password for the private key file.
Returns:
Signer instance.
Raises:
OpenSSL.crypto.Error if the key can't be parsed.
"""
key = _to_bytes(key)
parsed_pem_key = _parse_pem_key(key)
if parsed_pem_key:
pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, parsed_pem_key)
else:
password = _to_bytes(password, encoding='utf-8')
pkey = crypto.load_pkcs12(key, password).get_privatekey()
return OpenSSLSigner(pkey)
def pkcs12_key_as_pem(private_key_bytes, private_key_password):
"""Convert the contents of a PKCS#12 key to PEM using pyOpenSSL.
Args:
private_key_bytes: Bytes. PKCS#12 key in DER format.
private_key_password: String. Password for PKCS#12 key.
Returns:
String. PEM contents of ``private_key_bytes``.
"""
private_key_password = _to_bytes(private_key_password)
pkcs12 = crypto.load_pkcs12(private_key_bytes, private_key_password)
return crypto.dump_privatekey(crypto.FILETYPE_PEM,
pkcs12.get_privatekey())
|
from collections import defaultdict
import networkx
import numpy
from gtfspy.routing.connection import Connection
from gtfspy.routing.abstract_routing_algorithm import AbstractRoutingAlgorithm
from gtfspy.routing.node_profile_multiobjective import NodeProfileMultiObjective
from gtfspy.routing.label import merge_pareto_frontiers, LabelTimeWithBoardingsCount, LabelTime, compute_pareto_front, \
LabelVehLegCount, LabelTimeBoardingsAndRoute, LabelTimeAndRoute
from gtfspy.util import timeit, graph_has_node
class MultiObjectivePseudoCSAProfiler(AbstractRoutingAlgorithm):
"""
Implementation of the profile connection scan algorithm presented in
http://i11www.iti.uni-karlsruhe.de/extra/publications/dpsw-isftr-13.pdf
"""
def __init__(self,
transit_events,
targets,
start_time_ut=None,
end_time_ut=None,
transfer_margin=0,
walk_network=None,
walk_speed=1.5,
verbose=False,
track_vehicle_legs=True,
track_time=True,
track_route=False):
"""
Parameters
----------
transit_events: list[Connection]
events are assumed to be ordered in DECREASING departure_time (!)
targets: int, list
index of the target stop
start_time_ut : int, optional
start time in unixtime seconds
end_time_ut: int, optional
end time in unixtime seconds (no connections will be scanned after this time)
transfer_margin: int, optional
required extra margin required for transfers in seconds
walk_speed: float, optional
walking speed between stops in meters / second.
walk_network: networkx.Graph, optional
each edge should have the walking distance as a data attribute ("distance_shape") expressed in meters
verbose: boolean, optional
whether to print out progress
track_vehicle_legs: boolean, optional
whether to consider the number of vehicle legs
track_time: boolean, optional
whether to consider time in the set of pareto_optimal
"""
AbstractRoutingAlgorithm.__init__(self)
assert (len(transit_events) == len(set(transit_events))), "Duplicate transit events spotted!"
self._transit_connections = transit_events
if start_time_ut is None:
start_time_ut = transit_events[-1].departure_time
if end_time_ut is None:
end_time_ut = transit_events[0].departure_time
self._start_time = start_time_ut
self._end_time = end_time_ut
self._transfer_margin = transfer_margin
if walk_network is None:
walk_network = networkx.Graph()
self._walk_network = walk_network
self._walk_speed = walk_speed
self._verbose = verbose
# algorithm internals
# initialize stop_profiles
self._count_vehicle_legs = track_vehicle_legs
self._consider_time = track_time
assert(track_time or track_vehicle_legs)
if track_vehicle_legs:
if track_time:
if track_route:
self._label_class = LabelTimeBoardingsAndRoute
else:
self._label_class = LabelTimeWithBoardingsCount
else:
self._label_class = LabelVehLegCount
else:
if track_route:
self._label_class = LabelTimeAndRoute
else:
self._label_class = LabelTime
print("using label:", str(self._label_class))
self._stop_departure_times, self._stop_arrival_times = self.__compute_stop_dep_and_arrival_times()
self._all_nodes = set.union(set(self._stop_departure_times.keys()),
set(self._stop_arrival_times.keys()),
set(self._walk_network.nodes()))
self._pseudo_connections = self.__compute_pseudo_connections()
self._add_pseudo_connection_departures_to_stop_departure_times()
self._all_connections = self._pseudo_connections + self._transit_connections
self._all_connections.sort(key=lambda connection: (-connection.departure_time, -connection.seq))
self._augment_all_connections_with_arrival_stop_next_dep_time()
if isinstance(targets, list):
self._targets = targets
else:
self._targets = [targets]
self.reset(self._targets)
@timeit
def _add_pseudo_connection_departures_to_stop_departure_times(self):
self._stop_departure_times_with_pseudo_connections = dict(self._stop_departure_times)
for node in self._all_nodes:
if node not in self._stop_departure_times_with_pseudo_connections:
self._stop_departure_times_with_pseudo_connections[node] = list()
for key, value in self._stop_departure_times_with_pseudo_connections.items():
self._stop_departure_times_with_pseudo_connections[key] = list(value)
for pseudo_connection in self._pseudo_connections:
assert(isinstance(pseudo_connection, Connection))
self._stop_departure_times_with_pseudo_connections[pseudo_connection.departure_stop]\
.append(pseudo_connection.departure_time)
for stop, dep_times in self._stop_departure_times_with_pseudo_connections.items():
self._stop_departure_times_with_pseudo_connections[stop] = numpy.array(list(sorted(set(dep_times))))
@timeit
def __initialize_node_profiles(self):
self._stop_profiles = dict()
for node in self._all_nodes:
walk_duration_to_target = float('inf')
closest_target = None
if node in self._targets:
walk_duration_to_target = 0
closest_target = node
else:
for target in self._targets:
if self._walk_network.has_edge(target, node):
edge_data = self._walk_network.get_edge_data(target, node)
walk_duration = int(edge_data["d_walk"] / float(self._walk_speed))
if walk_duration_to_target > walk_duration:
walk_duration_to_target = walk_duration
closest_target = target
self._stop_profiles[node] = NodeProfileMultiObjective(dep_times=self._stop_departure_times_with_pseudo_connections[node],
label_class=self._label_class,
walk_to_target_duration=walk_duration_to_target,
transit_connection_dep_times=self._stop_departure_times[node],
closest_target=closest_target,
node_id=node)
@timeit
def __compute_stop_dep_and_arrival_times(self):
stop_departure_times = defaultdict(lambda: list())
stop_arrival_times = defaultdict(lambda: list())
for connection in self._transit_connections:
stop_arrival_times[connection.arrival_stop].append(connection.arrival_time)
stop_departure_times[connection.departure_stop].append(connection.departure_time)
for stop in stop_departure_times:
stop_departure_times[stop] = numpy.array(sorted(list(set(stop_departure_times[stop]))))
for stop in stop_arrival_times:
stop_arrival_times[stop] = numpy.array(sorted(list(set(stop_arrival_times[stop]))))
return stop_departure_times, stop_arrival_times
@timeit
def __compute_pseudo_connections(self):
print("Started computing pseudoconnections")
pseudo_connections = []
# DiGraph makes things iterate both ways (!)
for u, v, data in networkx.DiGraph(self._walk_network).edges(data=True):
walk_duration = int(data["d_walk"] / float(self._walk_speed)) # round to one second accuracy
total_walk_time_with_transfer = walk_duration + self._transfer_margin
in_times = self._stop_arrival_times[u]
out_times = self._stop_departure_times[v]
j = 0
n_in_times = len(in_times)
n_out_times = len(out_times)
if n_in_times == 0 or n_out_times == 0:
continue
i = 0
while i < n_in_times and j < n_out_times:
if in_times[i] + total_walk_time_with_transfer > out_times[j]:
j += 1 # connection j cannot be reached -> need to check next j -> increase out_time
else:
# if next element still satisfies the wanted condition, go on and increase i!
while i + 1 < n_in_times and in_times[i + 1] + total_walk_time_with_transfer < out_times[j]:
i += 1
dep_time = in_times[i]
arr_time = out_times[j]
from_stop = u
to_stop = v
waiting_time = arr_time - dep_time - total_walk_time_with_transfer
assert(waiting_time >= 0)
pseudo = Connection(from_stop, to_stop, arr_time - walk_duration, arr_time,
Connection.WALK_TRIP_ID,
Connection.WALK_SEQ,
is_walk=True)
pseudo_connections.append(pseudo)
i += 1
print("Computed pseudoconnections")
return pseudo_connections
@timeit
def _augment_all_connections_with_arrival_stop_next_dep_time(self):
for connection in self._all_connections:
assert(isinstance(connection, Connection))
to_stop = connection.arrival_stop
arr_stop_dep_times = self._stop_departure_times_with_pseudo_connections[to_stop]
arr_stop_next_dep_time = float('inf')
if len(arr_stop_dep_times) > 0:
if connection.is_walk:
index = numpy.searchsorted(arr_stop_dep_times, connection.arrival_time)
else:
index = numpy.searchsorted(arr_stop_dep_times, connection.arrival_time + self._transfer_margin)
if 0 <= index < len(arr_stop_dep_times):
arr_stop_next_dep_time = arr_stop_dep_times[index]
if connection.is_walk and not (arr_stop_next_dep_time < float('inf')):
assert (arr_stop_next_dep_time < float('inf'))
connection.arrival_stop_next_departure_time = arr_stop_next_dep_time
def _get_modified_arrival_node_labels(self, connection):
# get all different "accessible" / arrival times (Pareto-optimal sets)
arrival_profile = self._stop_profiles[connection.arrival_stop] # NodeProfileMultiObjective
assert (isinstance(arrival_profile, NodeProfileMultiObjective))
arrival_node_labels_orig = arrival_profile.evaluate(connection.arrival_stop_next_departure_time,
first_leg_can_be_walk=not connection.is_walk,
connection_arrival_time=connection.arrival_time)
increment_vehicle_count = (self._count_vehicle_legs and not connection.is_walk)
# TODO: (?) this copying / modification logic should be moved to the Label / ForwardJourney class ?
arrival_node_labels_modified = self._copy_and_modify_labels(
arrival_node_labels_orig,
connection,
increment_vehicle_count=increment_vehicle_count,
first_leg_is_walk=connection.is_walk
)
if connection.is_walk:
connection.is_walk = True
arrival_node_labels_modified = compute_pareto_front(arrival_node_labels_modified)
return arrival_node_labels_modified
def _get_trip_labels(self, connection):
# best labels from this current trip
if not connection.is_walk:
trip_labels = self._copy_and_modify_labels(self.__trip_labels[connection.trip_id],
connection,
increment_vehicle_count=False,
first_leg_is_walk=False)
else:
trip_labels = list()
return trip_labels
@timeit
def _run(self):
previous_departure_time = float("inf")
n_connections_tot = len(self._all_connections)
for i, connection in enumerate(self._all_connections):
# basic checking + printing progress:
if self._verbose and i % 1000 == 0:
print("\r", i, "/", n_connections_tot, " : ", "%.2f" % round(float(i) / n_connections_tot, 3), end='', flush=True)
assert (isinstance(connection, Connection))
assert (connection.departure_time <= previous_departure_time)
previous_departure_time = connection.departure_time
# Get labels from the stop (possibly subject to buffer time)
arrival_node_labels = self._get_modified_arrival_node_labels(connection)
# This is for the labels staying "in the vehicle"
trip_labels = self._get_trip_labels(connection)
# Then, compute Pareto-frontier of these alternatives:
all_pareto_optimal_labels = merge_pareto_frontiers(arrival_node_labels, trip_labels)
# Update labels for this trip
if not connection.is_walk:
self.__trip_labels[connection.trip_id] = all_pareto_optimal_labels
# Update labels for the departure stop profile (later: with the sets of pareto-optimal labels)
self._stop_profiles[connection.departure_stop].update(all_pareto_optimal_labels,
connection.departure_time)
print("finalizing profiles!")
self._finalize_profiles()
def _finalize_profiles(self):
"""
Deal with the first walks by joining profiles to other stops within walking distance.
"""
for stop, stop_profile in self._stop_profiles.items():
assert (isinstance(stop_profile, NodeProfileMultiObjective))
neighbor_label_bags = []
walk_durations_to_neighbors = []
departure_arrival_stop_pairs = []
if stop_profile.get_walk_to_target_duration() != 0 and graph_has_node(self._walk_network, stop):
neighbors = networkx.all_neighbors(self._walk_network, stop)
for neighbor in neighbors:
neighbor_profile = self._stop_profiles[neighbor]
assert (isinstance(neighbor_profile, NodeProfileMultiObjective))
neighbor_real_connection_labels = neighbor_profile.get_labels_for_real_connections()
neighbor_label_bags.append(neighbor_real_connection_labels)
walk_durations_to_neighbors.append(int(self._walk_network.get_edge_data(stop, neighbor)["d_walk"] /
self._walk_speed))
departure_arrival_stop_pairs.append((stop, neighbor))
stop_profile.finalize(neighbor_label_bags, walk_durations_to_neighbors, departure_arrival_stop_pairs)
@property
def stop_profiles(self):
"""
Returns
-------
_stop_profiles : dict[int, NodeProfileMultiObjective]
The pareto tuples necessary.
"""
assert self._has_run
return self._stop_profiles
def _copy_and_modify_labels(self, labels, connection, increment_vehicle_count=False, first_leg_is_walk=False):
if self._label_class == LabelTimeBoardingsAndRoute or self._label_class == LabelTimeAndRoute:
labels_copy = [label.get_label_with_connection_added(connection) for label in labels]
else:
labels_copy = [label.get_copy() for label in labels]
for label in labels_copy:
label.departure_time = connection.departure_time
if self._label_class == LabelTimeAndRoute or self._label_class == LabelTimeBoardingsAndRoute:
label.movement_duration += connection.duration()
if increment_vehicle_count:
label.n_boardings += 1
label.first_leg_is_walk = first_leg_is_walk
return labels_copy
def reset(self, targets):
if isinstance(targets, list):
self._targets = targets
else:
self._targets = [targets]
for target in targets:
assert(target in self._all_nodes)
self.__initialize_node_profiles()
self.__trip_labels = defaultdict(lambda: list())
self._has_run = False
|
#! /usr/bin/python
#Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/lib/formatters.py
__all__=('Formatter','DecimalFormatter')
__version__=''' $Id$ '''
__doc__="""
These help format numbers and dates in a user friendly way.
Used by the graphics framework.
"""
import string, sys, os, re
class Formatter:
"Base formatter - simply applies python format strings"
def __init__(self, pattern):
self.pattern = pattern
def format(self, obj):
return self.pattern % obj
def __repr__(self):
return "%s('%s')" % (self.__class__.__name__, self.pattern)
def __call__(self, x):
return self.format(x)
_ld_re=re.compile(r'^\d*\.')
_tz_re=re.compile('0+$')
class DecimalFormatter(Formatter):
"""lets you specify how to build a decimal.
A future NumberFormatter class will take Microsoft-style patterns
instead - "$#,##0.00" is WAY easier than this."""
def __init__(self, places=2, decimalSep='.', thousandSep=None, prefix=None, suffix=None):
if places=='auto':
self.calcPlaces = self._calcPlaces
else:
self.places = places
self.dot = decimalSep
self.comma = thousandSep
self.prefix = prefix
self.suffix = suffix
def _calcPlaces(self,V):
'''called with the full set of values to be formatted so we can calculate places'''
self.places = max([len(_tz_re.sub('',_ld_re.sub('',str(v)))) for v in V])
def format(self, num):
# positivize the numbers
sign=num<0
if sign:
num = -num
places, sep = self.places, self.dot
strip = places<=0
if places and strip: places = -places
strInt = ('%.' + str(places) + 'f') % num
if places:
strInt, strFrac = strInt.split('.')
strFrac = sep + strFrac
if strip:
while strFrac and strFrac[-1] in ['0',sep]: strFrac = strFrac[:-1]
else:
strFrac = ''
if self.comma is not None:
strNew = ''
while strInt:
left, right = strInt[0:-3], strInt[-3:]
if left == '':
#strNew = self.comma + right + strNew
strNew = right + strNew
else:
strNew = self.comma + right + strNew
strInt = left
strInt = strNew
strBody = strInt + strFrac
if sign: strBody = '-' + strBody
if self.prefix:
strBody = self.prefix + strBody
if self.suffix:
strBody = strBody + self.suffix
return strBody
def __repr__(self):
return "%s(places=%d, decimalSep=%s, thousandSep=%s, prefix=%s, suffix=%s)" % (
self.__class__.__name__,
self.places,
repr(self.dot),
repr(self.comma),
repr(self.prefix),
repr(self.suffix)
)
if __name__=='__main__':
def t(n, s, places=2, decimalSep='.', thousandSep=None, prefix=None, suffix=None):
f=DecimalFormatter(places,decimalSep,thousandSep,prefix,suffix)
r = f(n)
print("places=%2d dot=%-4s comma=%-4s prefix=%-4s suffix=%-4s result=%10s %s" %(f.places, f.dot, f.comma, f.prefix, f.suffix,r, r==s and 'OK' or 'BAD'))
t(1000.9,'1,000.9',1,thousandSep=',')
t(1000.95,'1,001.0',1,thousandSep=',')
t(1000.95,'1,001',-1,thousandSep=',')
t(1000.9,'1,001',0,thousandSep=',')
t(1000.9,'1000.9',1)
t(1000.95,'1001.0',1)
t(1000.95,'1001',-1)
t(1000.9,'1001',0)
t(1000.1,'1000.1',1)
t(1000.55,'1000.6',1)
t(1000.449,'1000.4',-1)
t(1000.45,'1000',0)
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from binascii import hexlify
from os.path import join
from pants.base.project_tree import Dir, File
from pants.engine.addressable import Collection
from pants.engine.rules import RootRule, rule
from pants.engine.selectors import Select
from pants.util.objects import datatype
class FileContent(datatype('FileContent', ['path', 'content'])):
"""The content of a file."""
def __repr__(self):
return 'FileContent(path={}, content=(len:{}))'.format(self.path, len(self.content))
def __str__(self):
return repr(self)
class Path(datatype('Path', ['path', 'stat'])):
"""A filesystem path, holding both its symbolic path name, and underlying canonical Stat.
Both values are relative to the ProjectTree's buildroot.
"""
class PathGlobs(datatype('PathGlobs', ['include', 'exclude'])):
"""A wrapper around sets of filespecs to include and exclude.
The syntax supported is roughly git's glob syntax.
"""
@staticmethod
def create(relative_to, include, exclude=tuple()):
"""Given various file patterns create a PathGlobs object (without using filesystem operations).
:param relative_to: The path that all patterns are relative to (which will itself be relative
to the buildroot).
:param included: A list of filespecs to include.
:param excluded: A list of filespecs to exclude.
:rtype: :class:`PathGlobs`
"""
return PathGlobs(tuple(join(relative_to, f) for f in include),
tuple(join(relative_to, f) for f in exclude))
class Snapshot(datatype('Snapshot', ['fingerprint', 'path_stats'])):
"""A Snapshot is a collection of Files and Dirs fingerprinted by their names/content.
Snapshots are used to make it easier to isolate process execution by fixing the contents
of the files being operated on and easing their movement to and from isolated execution
sandboxes.
"""
@property
def dirs(self):
return [p for p in self.path_stats if type(p.stat) == Dir]
@property
def dir_stats(self):
return [p.stat for p in self.dirs]
@property
def files(self):
return [p for p in self.path_stats if type(p.stat) == File]
@property
def file_stats(self):
return [p.stat for p in self.files]
def __repr__(self):
return '''Snapshot(fingerprint='{}', entries={})'''.format(hexlify(self.fingerprint)[:8], len(self.path_stats))
def __str__(self):
return repr(self)
FilesContent = Collection.of(FileContent)
@rule(Snapshot, [Select(PathGlobs)])
def snapshot_noop(*args):
raise Exception('This task is replaced intrinsically, and should never run.')
@rule(FilesContent, [Select(Snapshot)])
def files_content_noop(*args):
raise Exception('This task is replaced intrinsically, and should never run.')
def create_fs_rules():
"""Creates rules that consume the intrinsic filesystem types."""
return [
files_content_noop,
snapshot_noop,
RootRule(PathGlobs),
]
|
#!/usr/bin/env python
# Copyright 2016 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals, absolute_import
import os, sys, argparse
# Need to add parent directory to the path so that imports work
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
import socket
from client import INLClient
from DaemonLite import DaemonLite
def commandline_client(args):
parser = argparse.ArgumentParser()
parser.add_argument('--client', dest='client', type=int, help='The number of the client.', required=True)
parser.add_argument('--daemon',
dest='daemon',
choices=['start', 'stop', 'restart', 'none'],
help="Start a UNIX daemon.",
required=True)
parsed = parser.parse_args(args)
home = os.environ.get("CIVET_HOME", os.path.join(os.environ["HOME"], "civet"))
build_root = '{}/build_{}'.format(home, parsed.client)
# The only place the client uses BUILD_ROOT is in JobRunner, which replaces environment
# variables values that start with BUILD_ROOT with this value.
os.environ['BUILD_ROOT'] = build_root
log_dir = '{}/logs'.format(home)
client_name = '{}_{}'.format(socket.gethostname(), parsed.client)
client_info = {"url": "",
"client_name": client_name,
"server": "",
"servers": [],
"configs": [],
"ssl_verify": False,
"ssl_cert": "",
"log_file": "",
"log_dir": log_dir,
"build_key": "",
"single_shot": False,
"poll": 30,
"daemon_cmd": parsed.daemon,
"request_timeout": 120,
"update_step_time": 20,
"server_update_timeout": 5,
# This needs to be bigger than update_step_time so that
# the ping message doesn't become the default message
"server_update_interval": 40,
"max_output_size": 5*1024*1024,
}
c = INLClient.INLClient(client_info)
return c, parsed.daemon
class ClientDaemon(DaemonLite):
def run(self):
self.client.run()
def set_client(self, client):
self.client = client
def call_daemon(client, cmd):
home = os.environ.get("CIVET_HOME", os.path.join(os.environ["HOME"], "civet"))
pfile = os.path.join(home, 'civet_client_%s.pid' % client.client_info["client_name"])
client_daemon = ClientDaemon(pfile, stdout=client.client_info["log_file"], stderr=client.client_info["log_file"])
client_daemon.set_client(client)
if cmd == 'restart':
client_daemon.restart()
elif cmd == 'stop':
client_daemon.stop()
elif cmd == 'start':
client_daemon.start()
print('started')
elif cmd == 'none':
client.run()
def main(args):
client, daemon_cmd = commandline_client(args)
call_daemon(client, daemon_cmd)
if __name__ == "__main__":
main(sys.argv[1:])
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
from datetime import time
from dateutil import parser
from elastalert import alerts
from kombu import Connection, Exchange
from kombu.pools import producers
from os import environ, path
from pytz import timezone
utc = timezone('UTC')
def parse_time(s):
if not s:
return None
return time(*tuple(map(int, s.split(':'))))
class InTimeframe(object):
def __init__(self, timestamp_field, schedule):
self.timestamp_field = timestamp_field
self.tz = timezone(schedule.get('timezone', 'UTC'))
self.from_ = parse_time(schedule.get('from', None))
self.to = parse_time(schedule.get('to', None))
def __call__(self, entry):
dt = parser.parse(entry[self.timestamp_field])
if not dt.tzinfo:
dt = utc.localize(dt)
t = dt.astimezone(self.tz).time()
if self.from_ and t < self.from_:
return False
if self.to and t > self.to:
return False
return True
class ScheduledAlerter(object):
def __init__(self, rule):
self.alerter = next((x for x in self.__class__.__bases__
if issubclass(x, alerts.Alerter)),
None)
if self.alerter:
self.alerter.__init__(self, rule)
self.in_timeframe = InTimeframe(
timestamp_field=rule['timestamp_field'],
schedule=rule.get('schedule', {}))
def alert(self, matches):
matches = filter(self.in_timeframe, matches)
if matches:
self.alerter.alert(self, matches)
def get_info(self):
if self.alerter:
info = self.alert.get_info(self)
else:
info = {'type': ''}
info['type'] = 'scheduled_{}'.format(info['type'])
return info
class ScheduledDebugAlerter(ScheduledAlerter, alerts.DebugAlerter):
pass
class AmqpAlerter(alerts.Alerter):
""" The amqp alerter publishes alerts via amqp to a broker. """
def __init__(self, rule):
super(AmqpAlerter, self).__init__(rule)
params = {
'host': self.get_param('amqp_host', 'mq'),
'port': int(self.get_param('amqp_port', '5672')),
'vhost': self.get_param('amqp_vhost', '/'),
'username': self.get_param('amqp_username', 'guest'),
'password': self.get_param('amqp_password', None),
}
if not params['password']:
with open(path.join('/', 'config', params['username']), 'r') as pwd_file:
params['password'] = pwd_file.read().strip()
self._url = (
'amqp://{username}:{password}@{host}:{port}/{vhost}'
.format(**params)
)
exchange = self.get_param('amqp_exchange', 'alert')
self._exchange = Exchange(exchange, type='fanout')
self._routing_key = self.get_param('amqp_routing_key', 'alert')
self._conn = None
def get_param(self, name, default):
environ_name = name.upper()
return self.rule.get(name, environ.get(environ_name, default))
def alert(self, matches):
body = {
'rule': self.rule['name'],
'matches': matches,
}
with producers[self.conn()].acquire(block=True) as producer:
for match in matches:
body = {
'rule': self.rule['name'],
'match': match,
}
producer.publish(body,
serializer='json',
exchange=self._exchange,
routing_key=self._routing_key)
def conn(self):
if not self._conn:
self._conn = Connection(self._url)
return self._conn
def get_info(self):
return {'type': 'amqp'}
|
from django import template
from django.core.urlresolvers import reverse
from sophie.utils import multiblog_enabled
register = template.Library()
@register.inclusion_tag('sophie/templatetags/lists_category_of.tag')
def sophie_lists_category_of(blog):
return {'category_list': blog.get_categories()}
@register.inclusion_tag('sophie/templatetags/shows_feed_of.tag')
def sophie_shows_feed_of(blog):
return { 'blog': blog }
@register.inclusion_tag('sophie/templatetags/links_siblings_of.tag')
def sophie_links_siblings_of(page, blog, urlname, part_slug=None):
# conditional operatior hack xx and yy or zz == xx ? yy : zz
url_bits = multiblog_enabled and { 'blog_slug': blog.slug } or {}
# urls are named following this convention:
# sophie_[part name]_[page type]_url
# which is taken advantage of here:
urlparts = urlname.split('_')
# if the type is 'details', then its url contains a slug of the part
if urlparts[2] == 'details':
# django.core.urlresolvers.reverse() does not accept unicode keywords
# which is why this part needs encoding
url_bits[('%s_slug' % urlparts[1]).encode('utf8')] = part_slug
# Note that previous_page_number() is dumb, it returns the number
# regardless of whether that page exists, same with next_page_number.
# So, this needs to be guarded in the template
url_bits['page_num'] = page.previous_page_number()
previous_link = reverse( urlname, kwargs=url_bits )
url_bits['page_num'] = page.next_page_number()
next_link = reverse( urlname, kwargs=url_bits )
return {
'previous_link': previous_link,
'next_link': next_link,
'page': page,
}
@register.inclusion_tag('sophie/templatetags/lists_entries_in.tag')
def sophie_lists_entries_in(entries, blog, empty_msg = ''):
return {
'entries': entries,
'blog': blog,
'empty_msg': empty_msg,
}
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <[email protected]>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, _
from odoo.exceptions import UserError
class PartnerMergeWizard(models.TransientModel):
_inherit = 'base.partner.merge.automatic.wizard'
@api.multi
def action_merge(self):
"""
- Allow anybody to perform the merge of partners
- Prevent geoengine bugs
- Prevent to merge sponsors
- Save other e-mail addresses in linked partners
"""
removing = self.partner_ids - self.dst_partner_id
geo_point = self.dst_partner_id.geo_point
self.partner_ids.write({'geo_point': False})
sponsorships = self.env['recurring.contract'].search([
('correspondent_id', 'in', removing.ids),
('state', '=', 'active'),
('type', 'like', 'S')
])
if sponsorships:
raise UserError(_(
"The selected partners are sponsors! "
"Please first modify the sponsorship and don't forget "
"to send new labels to them."
))
old_emails = removing.filtered('email').mapped('email')
new_email = self.dst_partner_id.email
for email in old_emails:
if new_email and email != new_email:
self.dst_partner_id.copy({
'contact_id': self.dst_partner_id.id,
'email': email,
'type': 'email_alias'
})
res = super(PartnerMergeWizard, self.sudo()).action_merge()
self.dst_partner_id.geo_point = geo_point
return res
|
# coding: utf-8
from attest import Tests, assert_hook, raises
import camxes
parse = Tests()
@parse.context
def parse_tree():
yield camxes.parse("coi rodo mi cipra loka na cfila la camxes")
@parse.test
def ast(pt):
assert pt.free[0].CMAVO[0].COI[0][0] == "coi"
assert pt.sentence[0].bridiTail3[0].BRIVLA[0].gismu[0][0] == "cipra"
@parse.test
def index(pt):
assert pt[0][0][0][0] == "coi"
assert pt[1][1][0][0][0] == "cipra"
@parse.test
def node_names(pt):
assert pt[0].name == 'free'
assert pt[1].name == 'sentence'
@parse.test
def filter(pt):
nodes = pt.filter(lambda node: getattr(node, 'name', None) == 'cmene')
node = list(nodes)[0][0]
assert node == "camxes"
@parse.test
def find(pt):
assert pt.find('cmene')[0][0] == "camxes"
assert pt.find('COI', 'PA') == pt.find('COI') + pt.find('PA')
assert pt['cmene'] is pt.find('cmene')[0]
with raises(KeyError):
pt['']
@parse.test
def leafs(pt):
assert ' '.join(pt.find('sumti6')[0].leafs) == "lo ka na cfila la camxes"
@parse.test
def branches(pt):
assert pt.branches("lo") == pt.find('sumti6')
assert pt.branches("ro", "do") == pt.find('free')
@parse.test
def primitive(pt):
node = pt.find('sumti5')[0]
assert node.primitive == \
('sumti5',
[('CMAVO', [('PA', ["ro"])]),
('CMAVO', [('KOhA', ["do"])])])
@parse.test
def brackets(pt):
assert pt['sumti6'].brackets() == u'(lo [ka <{na cfila} «la camxes»>])'
@parse.test
def node_repr(pt):
assert repr(pt.find('cmene')[0]) == "<cmene {camxes}>"
spaces = Tests()
@spaces.context
def parse_trees_from_outer_space():
yield camxes.parse("coi rodo!"), camxes.parse("coi rodo!", spaces=True)
@spaces.test
def space_leafs(nospaces, withspaces):
assert nospaces.leafs == ["coi", "ro", "do"]
assert withspaces.leafs == ["coi", " ", "ro", "do", "!"]
@spaces.test
def lojban(nospaces, withspaces):
assert nospaces.lojban == "coi ro do"
assert withspaces.lojban == "coi rodo!"
morphology = Tests()
@morphology.test
def non_lojban():
assert camxes.morphology("jbo")[0].name == 'nonLojbanWord'
@morphology.test
def affixes():
compounds = {
"ba'argau": ("ba'a", "r", "gau"),
"ba'armo'a": ("ba'a", "r", "mo'a"),
"ba'ostu": ("ba'o", "stu"),
"ba'urtadji": ("ba'u", "r", "tadj"),
"backemselrerkru": ("bac", "kem", "sel", "rer", "kru"),
"backla": ("bac", "kla"),
"bacycripu": ("bac", "y", "crip"),
}
for compound, affixes in compounds.iteritems():
assert camxes.decompose(compound) == affixes
not_compounds = ("camxes", "coi", "donri", "sfe'ero")
for noncompound in not_compounds:
with raises(ValueError):
camxes.decompose(noncompound)
grammar = Tests()
@grammar.test
def grammatical():
assert camxes.isgrammatical("coi rodo")
@grammar.test
def ungrammatical():
assert not camxes.isgrammatical("coi '")
all = Tests([parse, spaces, morphology, grammar])
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.actions import page_action
from telemetry.page import shared_page_state
from page_sets.rendering import rendering_story
from page_sets.rendering import story_tags
class ToughFastScrollingPage(rendering_story.RenderingStory):
ABSTRACT_STORY = True
SPEED_IN_PIXELS_PER_SECOND = None
SYNTHETIC_GESTURE_SOURCE = page_action.GESTURE_SOURCE_DEFAULT
TAGS = [story_tags.GPU_RASTERIZATION, story_tags.TOUGH_SCROLLING]
def __init__(self,
page_set,
shared_page_state_class=shared_page_state.SharedPageState,
name_suffix='',
extra_browser_args=None):
super(ToughFastScrollingPage, self).__init__(
page_set=page_set,
shared_page_state_class=shared_page_state_class,
name_suffix=name_suffix,
extra_browser_args=extra_browser_args)
def RunPageInteractions(self, action_runner):
with action_runner.CreateGestureInteraction('ScrollAction'):
action_runner.ScrollPage(
direction='down',
speed_in_pixels_per_second=self.SPEED_IN_PIXELS_PER_SECOND,
synthetic_gesture_source=self.SYNTHETIC_GESTURE_SOURCE)
class ScrollingText5000Page(ToughFastScrollingPage):
BASE_NAME = 'text_05000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text.html'
SPEED_IN_PIXELS_PER_SECOND = 5000
class ScrollingText10000Page(ToughFastScrollingPage):
BASE_NAME = 'text_10000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text.html'
SPEED_IN_PIXELS_PER_SECOND = 10000
TAGS = ToughFastScrollingPage.TAGS + [story_tags.REPRESENTATIVE_MOBILE]
class ScrollingText20000Page(ToughFastScrollingPage):
BASE_NAME = 'text_20000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text.html'
SPEED_IN_PIXELS_PER_SECOND = 20000
class ScrollingText40000Page(ToughFastScrollingPage):
BASE_NAME = 'text_40000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text.html'
SPEED_IN_PIXELS_PER_SECOND = 40000
class ScrollingText60000Page(ToughFastScrollingPage):
BASE_NAME = 'text_60000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text.html'
SPEED_IN_PIXELS_PER_SECOND = 60000
class ScrollingText75000Page(ToughFastScrollingPage):
BASE_NAME = 'text_75000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text.html'
SPEED_IN_PIXELS_PER_SECOND = 75000
class ScrollingText90000Page(ToughFastScrollingPage):
BASE_NAME = 'text_90000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text.html'
SPEED_IN_PIXELS_PER_SECOND = 90000
class ScrollingTextHover5000Page(ToughFastScrollingPage):
BASE_NAME = 'text_hover_05000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text_hover.html'
SPEED_IN_PIXELS_PER_SECOND = 5000
SYNTHETIC_GESTURE_SOURCE = page_action.GESTURE_SOURCE_MOUSE
class ScrollingTextHover10000Page(ToughFastScrollingPage):
BASE_NAME = 'text_hover_10000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text_hover.html'
SPEED_IN_PIXELS_PER_SECOND = 10000
SYNTHETIC_GESTURE_SOURCE = page_action.GESTURE_SOURCE_MOUSE
class ScrollingTextHover20000Page(ToughFastScrollingPage):
BASE_NAME = 'text_hover_20000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text_hover.html'
SPEED_IN_PIXELS_PER_SECOND = 20000
SYNTHETIC_GESTURE_SOURCE = page_action.GESTURE_SOURCE_MOUSE
class ScrollingTextHover40000Page(ToughFastScrollingPage):
BASE_NAME = 'text_hover_40000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text_hover.html'
SPEED_IN_PIXELS_PER_SECOND = 40000
SYNTHETIC_GESTURE_SOURCE = page_action.GESTURE_SOURCE_MOUSE
class ScrollingTextHover60000Page(ToughFastScrollingPage):
BASE_NAME = 'text_hover_60000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text_hover.html'
SPEED_IN_PIXELS_PER_SECOND = 60000
SYNTHETIC_GESTURE_SOURCE = page_action.GESTURE_SOURCE_MOUSE
class ScrollingTextHover75000Page(ToughFastScrollingPage):
BASE_NAME = 'text_hover_75000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text_hover.html'
SPEED_IN_PIXELS_PER_SECOND = 75000
SYNTHETIC_GESTURE_SOURCE = page_action.GESTURE_SOURCE_MOUSE
class ScrollingTextHover90000Page(ToughFastScrollingPage):
BASE_NAME = 'text_hover_90000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text_hover.html'
SPEED_IN_PIXELS_PER_SECOND = 90000
SYNTHETIC_GESTURE_SOURCE = page_action.GESTURE_SOURCE_MOUSE
class ScrollingTextRaster5000Page(ToughFastScrollingPage):
BASE_NAME = 'text_constant_full_page_raster_05000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text_constant_full_page_raster.html'
SPEED_IN_PIXELS_PER_SECOND = 5000
class ScrollingTextRaster10000Page(ToughFastScrollingPage):
BASE_NAME = 'text_constant_full_page_raster_10000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text_constant_full_page_raster.html'
SPEED_IN_PIXELS_PER_SECOND = 10000
class ScrollingTextRaster20000Page(ToughFastScrollingPage):
BASE_NAME = 'text_constant_full_page_raster_20000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text_constant_full_page_raster.html'
SPEED_IN_PIXELS_PER_SECOND = 20000
class ScrollingTextRaster40000Page(ToughFastScrollingPage):
BASE_NAME = 'text_constant_full_page_raster_40000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text_constant_full_page_raster.html'
SPEED_IN_PIXELS_PER_SECOND = 40000
class ScrollingTextRaster60000Page(ToughFastScrollingPage):
BASE_NAME = 'text_constant_full_page_raster_60000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text_constant_full_page_raster.html'
SPEED_IN_PIXELS_PER_SECOND = 60000
class ScrollingTextRaster75000Page(ToughFastScrollingPage):
BASE_NAME = 'text_constant_full_page_raster_75000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text_constant_full_page_raster.html'
SPEED_IN_PIXELS_PER_SECOND = 75000
class ScrollingTextRaster90000Page(ToughFastScrollingPage):
BASE_NAME = 'text_constant_full_page_raster_90000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/text_constant_full_page_raster.html'
SPEED_IN_PIXELS_PER_SECOND = 90000
class ScrollingCanvas5000Page(ToughFastScrollingPage):
BASE_NAME = 'canvas_05000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/canvas.html'
SPEED_IN_PIXELS_PER_SECOND = 5000
TAGS = ToughFastScrollingPage.TAGS + [
story_tags.REPRESENTATIVE_MOBILE,
story_tags.REPRESENTATIVE_MAC_DESKTOP
]
class ScrollingCanvas10000Page(ToughFastScrollingPage):
BASE_NAME = 'canvas_10000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/canvas.html'
SPEED_IN_PIXELS_PER_SECOND = 10000
class ScrollingCanvas20000Page(ToughFastScrollingPage):
BASE_NAME = 'canvas_20000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/canvas.html'
SPEED_IN_PIXELS_PER_SECOND = 20000
class ScrollingCanvas40000Page(ToughFastScrollingPage):
BASE_NAME = 'canvas_40000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/canvas.html'
SPEED_IN_PIXELS_PER_SECOND = 40000
class ScrollingCanvas60000Page(ToughFastScrollingPage):
BASE_NAME = 'canvas_60000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/canvas.html'
SPEED_IN_PIXELS_PER_SECOND = 60000
class ScrollingCanvas75000Page(ToughFastScrollingPage):
BASE_NAME = 'canvas_75000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/canvas.html'
SPEED_IN_PIXELS_PER_SECOND = 75000
class ScrollingCanvas90000Page(ToughFastScrollingPage):
BASE_NAME = 'canvas_90000_pixels_per_second'
URL = 'file://../tough_scrolling_cases/canvas.html'
SPEED_IN_PIXELS_PER_SECOND = 90000
|
import copy
import types
import sys
import os
from itertools import izip
try:
set
except NameError:
from sets import Set as set # Python 2.3 fallback.
import django.db.models.manager # Imported to register signal handler.
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned, FieldError
from django.db.models.fields import AutoField, FieldDoesNotExist
from django.db.models.fields.related import OneToOneRel, ManyToOneRel, OneToOneField
from django.db.models.query import delete_objects, Q, CollectedObjects
from django.db.models.options import Options
from django.db import connection, transaction, DatabaseError
from django.db.models import signals
from django.db.models.loading import register_models, get_model
from django.utils.functional import curry
from django.utils.encoding import smart_str, force_unicode, smart_unicode
from django.conf import settings
class ModelBase(type):
"""
Metaclass for all models.
"""
def __new__(cls, name, bases, attrs):
super_new = super(ModelBase, cls).__new__
parents = [b for b in bases if isinstance(b, ModelBase)]
if not parents:
# If this isn't a subclass of Model, don't do anything special.
return super_new(cls, name, bases, attrs)
# Create the class.
module = attrs.pop('__module__')
new_class = super_new(cls, name, bases, {'__module__': module})
attr_meta = attrs.pop('Meta', None)
abstract = getattr(attr_meta, 'abstract', False)
if not attr_meta:
meta = getattr(new_class, 'Meta', None)
else:
meta = attr_meta
base_meta = getattr(new_class, '_meta', None)
if getattr(meta, 'app_label', None) is None:
# Figure out the app_label by looking one level up.
# For 'django.contrib.sites.models', this would be 'sites'.
model_module = sys.modules[new_class.__module__]
kwargs = {"app_label": model_module.__name__.split('.')[-2]}
else:
kwargs = {}
new_class.add_to_class('_meta', Options(meta, **kwargs))
if not abstract:
new_class.add_to_class('DoesNotExist',
subclass_exception('DoesNotExist', ObjectDoesNotExist, module))
new_class.add_to_class('MultipleObjectsReturned',
subclass_exception('MultipleObjectsReturned', MultipleObjectsReturned, module))
if base_meta and not base_meta.abstract:
# Non-abstract child classes inherit some attributes from their
# non-abstract parent (unless an ABC comes before it in the
# method resolution order).
if not hasattr(meta, 'ordering'):
new_class._meta.ordering = base_meta.ordering
if not hasattr(meta, 'get_latest_by'):
new_class._meta.get_latest_by = base_meta.get_latest_by
if getattr(new_class, '_default_manager', None):
new_class._default_manager = None
# Bail out early if we have already created this class.
m = get_model(new_class._meta.app_label, name, False)
if m is not None:
return m
# Add all attributes to the class.
for obj_name, obj in attrs.items():
new_class.add_to_class(obj_name, obj)
# Do the appropriate setup for any model parents.
o2o_map = dict([(f.rel.to, f) for f in new_class._meta.local_fields
if isinstance(f, OneToOneField)])
for base in parents:
if not hasattr(base, '_meta'):
# Things without _meta aren't functional models, so they're
# uninteresting parents.
continue
# All the fields of any type declared on this model
new_fields = new_class._meta.local_fields + \
new_class._meta.local_many_to_many + \
new_class._meta.virtual_fields
field_names = set([f.name for f in new_fields])
if not base._meta.abstract:
# Concrete classes...
if base in o2o_map:
field = o2o_map[base]
field.primary_key = True
new_class._meta.setup_pk(field)
else:
attr_name = '%s_ptr' % base._meta.module_name
field = OneToOneField(base, name=attr_name,
auto_created=True, parent_link=True)
new_class.add_to_class(attr_name, field)
new_class._meta.parents[base] = field
else:
# .. and abstract ones.
# Check for clashes between locally declared fields and those
# on the ABC.
parent_fields = base._meta.local_fields + base._meta.local_many_to_many
for field in parent_fields:
if field.name in field_names:
raise FieldError('Local field %r in class %r clashes '\
'with field of similar name from '\
'abstract base class %r' % \
(field.name, name, base.__name__))
new_class.add_to_class(field.name, copy.deepcopy(field))
# Pass any non-abstract parent classes onto child.
new_class._meta.parents.update(base._meta.parents)
# Inherit managers from the abstract base classes.
base_managers = base._meta.abstract_managers
base_managers.sort()
for _, mgr_name, manager in base_managers:
val = getattr(new_class, mgr_name, None)
if not val or val is manager:
new_manager = manager._copy_to_model(new_class)
new_class.add_to_class(mgr_name, new_manager)
# Inherit virtual fields (like GenericForeignKey) from the parent class
for field in base._meta.virtual_fields:
if base._meta.abstract and field.name in field_names:
raise FieldError('Local field %r in class %r clashes '\
'with field of similar name from '\
'abstract base class %r' % \
(field.name, name, base.__name__))
new_class.add_to_class(field.name, copy.deepcopy(field))
if abstract:
# Abstract base models can't be instantiated and don't appear in
# the list of models for an app. We do the final setup for them a
# little differently from normal models.
attr_meta.abstract = False
new_class.Meta = attr_meta
return new_class
new_class._prepare()
register_models(new_class._meta.app_label, new_class)
# Because of the way imports happen (recursively), we may or may not be
# the first time this model tries to register with the framework. There
# should only be one class for each model, so we always return the
# registered version.
return get_model(new_class._meta.app_label, name, False)
def add_to_class(cls, name, value):
if hasattr(value, 'contribute_to_class'):
value.contribute_to_class(cls, name)
else:
setattr(cls, name, value)
def _prepare(cls):
"""
Creates some methods once self._meta has been populated.
"""
opts = cls._meta
opts._prepare(cls)
if opts.order_with_respect_to:
cls.get_next_in_order = curry(cls._get_next_or_previous_in_order, is_next=True)
cls.get_previous_in_order = curry(cls._get_next_or_previous_in_order, is_next=False)
setattr(opts.order_with_respect_to.rel.to, 'get_%s_order' % cls.__name__.lower(), curry(method_get_order, cls))
setattr(opts.order_with_respect_to.rel.to, 'set_%s_order' % cls.__name__.lower(), curry(method_set_order, cls))
# Give the class a docstring -- its definition.
if cls.__doc__ is None:
cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join([f.attname for f in opts.fields]))
if hasattr(cls, 'get_absolute_url'):
cls.get_absolute_url = curry(get_absolute_url, opts, cls.get_absolute_url)
signals.class_prepared.send(sender=cls)
class Model(object):
__metaclass__ = ModelBase
def __init__(self, *args, **kwargs):
signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs)
# There is a rather weird disparity here; if kwargs, it's set, then args
# overrides it. It should be one or the other; don't duplicate the work
# The reason for the kwargs check is that standard iterator passes in by
# args, and instantiation for iteration is 33% faster.
args_len = len(args)
if args_len > len(self._meta.fields):
# Daft, but matches old exception sans the err msg.
raise IndexError("Number of args exceeds number of fields")
fields_iter = iter(self._meta.fields)
if not kwargs:
# The ordering of the izip calls matter - izip throws StopIteration
# when an iter throws it. So if the first iter throws it, the second
# is *not* consumed. We rely on this, so don't change the order
# without changing the logic.
for val, field in izip(args, fields_iter):
setattr(self, field.attname, val)
else:
# Slower, kwargs-ready version.
for val, field in izip(args, fields_iter):
setattr(self, field.attname, val)
kwargs.pop(field.name, None)
# Maintain compatibility with existing calls.
if isinstance(field.rel, ManyToOneRel):
kwargs.pop(field.attname, None)
# Now we're left with the unprocessed fields that *must* come from
# keywords, or default.
for field in fields_iter:
rel_obj = None
if kwargs:
if isinstance(field.rel, ManyToOneRel):
try:
# Assume object instance was passed in.
rel_obj = kwargs.pop(field.name)
except KeyError:
try:
# Object instance wasn't passed in -- must be an ID.
val = kwargs.pop(field.attname)
except KeyError:
val = field.get_default()
else:
# Object instance was passed in. Special case: You can
# pass in "None" for related objects if it's allowed.
if rel_obj is None and field.null:
val = None
else:
val = kwargs.pop(field.attname, field.get_default())
else:
val = field.get_default()
# If we got passed a related instance, set it using the field.name
# instead of field.attname (e.g. "user" instead of "user_id") so
# that the object gets properly cached (and type checked) by the
# RelatedObjectDescriptor.
if rel_obj:
setattr(self, field.name, rel_obj)
else:
setattr(self, field.attname, val)
if kwargs:
for prop in kwargs.keys():
try:
if isinstance(getattr(self.__class__, prop), property):
setattr(self, prop, kwargs.pop(prop))
except AttributeError:
pass
if kwargs:
raise TypeError, "'%s' is an invalid keyword argument for this function" % kwargs.keys()[0]
signals.post_init.send(sender=self.__class__, instance=self)
def __repr__(self):
try:
u = unicode(self)
except (UnicodeEncodeError, UnicodeDecodeError):
u = '[Bad Unicode data]'
return smart_str(u'<%s: %s>' % (self.__class__.__name__, u))
def __str__(self):
if hasattr(self, '__unicode__'):
return force_unicode(self).encode('utf-8')
return '%s object' % self.__class__.__name__
def __eq__(self, other):
return isinstance(other, self.__class__) and self._get_pk_val() == other._get_pk_val()
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self._get_pk_val())
def _get_pk_val(self, meta=None):
if not meta:
meta = self._meta
return getattr(self, meta.pk.attname)
def _set_pk_val(self, value):
return setattr(self, self._meta.pk.attname, value)
pk = property(_get_pk_val, _set_pk_val)
def serializable_value(self, field_name):
"""
Returns the value of the field name for this instance. If the field is
a foreign key, returns the id value, instead of the object. If there's
no Field object with this name on the model, the model attribute's
value is returned directly.
Used to serialize a field's value (in the serializer, or form output,
for example). Normally, you would just access the attribute directly
and not use this method.
"""
try:
field = self._meta.get_field_by_name(field_name)[0]
except FieldDoesNotExist:
return getattr(self, field_name)
return getattr(self, field.attname)
def save(self, force_insert=False, force_update=False, using=None):
"""
Saves the current instance. Override this in a subclass if you want to
control the saving process.
The 'force_insert' and 'force_update' parameters can be used to insist
that the "save" must be an SQL insert or update (or equivalent for
non-SQL backends), respectively. Normally, they should not be set.
"""
if force_insert and force_update:
raise ValueError("Cannot force both insert and updating in "
"model saving.")
self.save_base(force_insert=force_insert, force_update=force_update, using=using)
save.alters_data = True
def save_base(self, raw=False, cls=None, force_insert=False,
force_update=False, using=None):
"""
Does the heavy-lifting involved in saving. Subclasses shouldn't need to
override this method. It's separate from save() in order to hide the
need for overrides of save() to pass around internal-only parameters
('raw' and 'cls').
"""
assert not (force_insert and force_update)
if not cls:
cls = self.__class__
meta = self._meta
signal = True
signals.pre_save.send(sender=self.__class__, instance=self, raw=raw)
else:
meta = cls._meta
signal = False
# get model's connection
from django.db import get_current_connection, get_connection
if using:
conn = get_connection(using)
else:
if hasattr(self,'__connection__'):
conn = self.__connection__
elif meta.using:
conn = get_connection(meta.using)
else:
conn = get_current_connection()
# If we are in a raw save, save the object exactly as presented.
# That means that we don't try to be smart about saving attributes
# that might have come from the parent class - we just save the
# attributes we have been given to the class we have been given.
if not raw:
for parent, field in meta.parents.items():
# At this point, parent's primary key field may be unknown
# (for example, from administration form which doesn't fill
# this field). If so, fill it.
if getattr(self, parent._meta.pk.attname) is None and getattr(self, field.attname) is not None:
setattr(self, parent._meta.pk.attname, getattr(self, field.attname))
self.save_base(raw, parent)
setattr(self, field.attname, self._get_pk_val(parent._meta))
non_pks = [f for f in meta.local_fields if not f.primary_key]
# First, try an UPDATE. If that doesn't update anything, do an INSERT.
pk_val = self._get_pk_val(meta)
pk_set = pk_val is not None
record_exists = True
manager = cls._default_manager
if pk_set:
# Determine whether a record with the primary key already exists.
if (force_update or (not force_insert and
manager.get_query_set_by_connection(conn).filter(pk=pk_val).extra(select={'a': 1}).values('a').order_by())):
# It does already exist, so do an UPDATE.
if force_update or non_pks:
values = [(f, None, f.get_db_prep_save(raw and getattr(self, f.attname) or f.pre_save(self, False), conn)) for f in non_pks]
rows = manager.get_query_set_by_connection(conn).filter(pk=pk_val)._update(values)
if force_update and not rows:
raise DatabaseError("Forced update did not affect any rows.")
else:
record_exists = False
if not pk_set or not record_exists:
if not pk_set:
if force_update:
raise ValueError("Cannot force an update in save() with no primary key.")
values = [(f, f.get_db_prep_save(raw and getattr(self, f.attname) or f.pre_save(self, True), conn)) for f in meta.local_fields if not isinstance(f, AutoField)]
else:
values = [(f, f.get_db_prep_save(raw and getattr(self, f.attname) or f.pre_save(self, True), conn)) for f in meta.local_fields]
if meta.order_with_respect_to:
field = meta.order_with_respect_to
values.append((meta.get_field_by_name('_order')[0], manager.filter(**{field.name: getattr(self, field.attname)}).count()))
record_exists = False
update_pk = bool(meta.has_auto_field and not pk_set)
if values:
# Create a new record.
result = manager._insert(values, return_id=update_pk, connection=conn)
else:
# Create a new record with defaults for everything.
result = manager._insert([(meta.pk, conn.ops.pk_default_value())], return_id=update_pk, raw_values=True, connection=conn)
if update_pk:
setattr(self, meta.pk.attname, result)
transaction.commit_unless_managed(conn) # need by connection
if signal:
signals.post_save.send(sender=self.__class__, instance=self,
created=(not record_exists), raw=raw)
save_base.alters_data = True
def _collect_sub_objects(self, seen_objs, parent=None, nullable=False):
"""
Recursively populates seen_objs with all objects related to this
object.
When done, seen_objs.items() will be in the format:
[(model_class, {pk_val: obj, pk_val: obj, ...}),
(model_class, {pk_val: obj, pk_val: obj, ...}), ...]
"""
pk_val = self._get_pk_val()
if seen_objs.add(self.__class__, pk_val, self, parent, nullable):
return
for related in self._meta.get_all_related_objects():
rel_opts_name = related.get_accessor_name()
if isinstance(related.field.rel, OneToOneRel):
try:
sub_obj = getattr(self, rel_opts_name)
except ObjectDoesNotExist:
pass
else:
sub_obj._collect_sub_objects(seen_objs, self.__class__, related.field.null)
else:
for sub_obj in getattr(self, rel_opts_name).all():
sub_obj._collect_sub_objects(seen_objs, self.__class__, related.field.null)
# Handle any ancestors (for the model-inheritance case). We do this by
# traversing to the most remote parent classes -- those with no parents
# themselves -- and then adding those instances to the collection. That
# will include all the child instances down to "self".
parent_stack = self._meta.parents.values()
while parent_stack:
link = parent_stack.pop()
parent_obj = getattr(self, link.name)
if parent_obj._meta.parents:
parent_stack.extend(parent_obj._meta.parents.values())
continue
# At this point, parent_obj is base class (no ancestor models). So
# delete it and all its descendents.
parent_obj._collect_sub_objects(seen_objs)
def delete(self):
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
# Find all the objects than need to be deleted.
seen_objs = CollectedObjects()
self._collect_sub_objects(seen_objs)
# Actually delete the objects.
delete_objects(seen_objs)
delete.alters_data = True
def _get_FIELD_display(self, field):
value = getattr(self, field.attname)
return force_unicode(dict(field.flatchoices).get(value, value), strings_only=True)
def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs):
op = is_next and 'gt' or 'lt'
order = not is_next and '-' or ''
param = smart_str(getattr(self, field.attname))
q = Q(**{'%s__%s' % (field.name, op): param})
q = q|Q(**{field.name: param, 'pk__%s' % op: self.pk})
qs = self.__class__._default_manager.filter(**kwargs).filter(q).order_by('%s%s' % (order, field.name), '%spk' % order)
try:
return qs[0]
except IndexError:
raise self.DoesNotExist, "%s matching query does not exist." % self.__class__._meta.object_name
def _get_next_or_previous_in_order(self, is_next):
cachename = "__%s_order_cache" % is_next
if not hasattr(self, cachename):
qn = connection.ops.quote_name
op = is_next and '>' or '<'
order = not is_next and '-_order' or '_order'
order_field = self._meta.order_with_respect_to
# FIXME: When querysets support nested queries, this can be turned
# into a pure queryset operation.
where = ['%s %s (SELECT %s FROM %s WHERE %s=%%s)' % \
(qn('_order'), op, qn('_order'),
qn(self._meta.db_table), qn(self._meta.pk.column))]
params = [self.pk]
obj = self._default_manager.filter(**{order_field.name: getattr(self, order_field.attname)}).extra(where=where, params=params).order_by(order)[:1].get()
setattr(self, cachename, obj)
return getattr(self, cachename)
############################################
# HELPER FUNCTIONS (CURRIED MODEL METHODS) #
############################################
# ORDERING METHODS #########################
def method_set_order(ordered_obj, self, id_list):
rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name)
order_name = ordered_obj._meta.order_with_respect_to.name
# FIXME: It would be nice if there was an "update many" version of update
# for situations like this.
for i, j in enumerate(id_list):
ordered_obj.objects.filter(**{'pk': j, order_name: rel_val}).update(_order=i)
transaction.commit_unless_managed()
def method_get_order(ordered_obj, self):
rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name)
order_name = ordered_obj._meta.order_with_respect_to.name
pk_name = ordered_obj._meta.pk.name
return [r[pk_name] for r in
ordered_obj.objects.filter(**{order_name: rel_val}).values(pk_name)]
##############################################
# HELPER FUNCTIONS (CURRIED MODEL FUNCTIONS) #
##############################################
def get_absolute_url(opts, func, self, *args, **kwargs):
return settings.ABSOLUTE_URL_OVERRIDES.get('%s.%s' % (opts.app_label, opts.module_name), func)(self, *args, **kwargs)
########
# MISC #
########
class Empty(object):
pass
if sys.version_info < (2, 5):
# Prior to Python 2.5, Exception was an old-style class
def subclass_exception(name, parent, unused):
return types.ClassType(name, (parent,), {})
else:
def subclass_exception(name, parent, module):
return type(name, (parent,), {'__module__': module})
|
import cherrypy
# 這是 MAN 類別的定義
'''
# 在 application 中導入子模組
import programs.cdag30.man as cdag30_man
# 加入 cdag30 模組下的 man.py 且以子模組 man 對應其 MAN() 類別
root.cdag30.man = cdag30_man.MAN()
# 完成設定後, 可以利用
/cdag30/man/assembly
# 呼叫 man.py 中 MAN 類別的 assembly 方法
'''
class MAN(object):
# 各組利用 index 引導隨後的程式執行
@cherrypy.expose
def index(self):
outstring = '''
這是 2014CDA 協同專案下的 cdag30 模組下的 MAN 類別.<br /><br />
<!-- 這裡採用相對連結, 而非網址的絕對連結 (這一段為 html 註解) -->
<a href="assembly">執行 MAN 類別中的 assembly 方法</a><br /><br />
請確定下列零件於 V:/home/lego/man 目錄中, 且開啟空白 Creo 組立檔案.<br />
<a href="/static/lego_man.7z">lego_man.7z</a>(滑鼠右鍵存成 .7z 檔案)<br />
'''
return outstring
@cherrypy.expose
def assembly(self, *rightarm):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<script type="text/javascript" src="/static/weblink/pfcUtils.js"></script>
<script type="text/javascript" src="/static/weblink/wl_header.js"></script>
</head>
<body>
</script><script language="JavaScript">
/*設計一個零件組立函式*/
// featID 為組立件第一個組立零件的編號
// inc 則為 part1 的組立順序編號, 第一個入組立檔編號為 featID+0
// part2 為外加的零件名稱
////////////////////////////////////////////////
// axis_plane_assembly 組立函式
////////////////////////////////////////////////
function axis_plane_assembly(session, assembly, transf, featID, inc, part2, axis1, plane1, axis2, plane2){
var descr = pfcCreate("pfcModelDescriptor").CreateFromFileName ("v:/home/lego/man/"+part2);
var componentModel = session.GetModelFromDescr(descr);
var componentModel = session.RetrieveModel(descr);
if (componentModel != void null)
{
var asmcomp = assembly.AssembleComponent (componentModel, transf);
}
var ids = pfcCreate("intseq");
ids.Append(featID+inc);
var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids);
subassembly = subPath.Leaf;
var asmDatums = new Array(axis1, plane1);
var compDatums = new Array(axis2, plane2);
var relation = new Array (pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_ALIGN, pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_MATE);
var relationItem = new Array(pfcCreate("pfcModelItemType").ITEM_AXIS, pfcCreate("pfcModelItemType").ITEM_SURFACE);
var constrs = pfcCreate("pfcComponentConstraints");
for (var i = 0; i < 2; i++)
{
var asmItem = subassembly.GetItemByName (relationItem[i], asmDatums [i]);
if (asmItem == void null)
{
interactFlag = true;
continue;
}
var compItem = componentModel.GetItemByName (relationItem[i], compDatums [i]);
if (compItem == void null)
{
interactFlag = true;
continue;
}
var MpfcSelect = pfcCreate ("MpfcSelect");
var asmSel = MpfcSelect.CreateModelItemSelection (asmItem, subPath);
var compSel = MpfcSelect.CreateModelItemSelection (compItem, void null);
var constr = pfcCreate("pfcComponentConstraint").Create (relation[i]);
constr.AssemblyReference = asmSel;
constr.ComponentReference = compSel;
constr.Attributes = pfcCreate("pfcConstraintAttributes").Create (true, false);
constrs.Append(constr);
}
asmcomp.SetConstraints(constrs, void null);
}
// 以上為 axis_plane_assembly() 函式
///////////////////////////////////////////////////////////////////////////////////////////////////////////
// three_plane_assembly 採 align 組立, 若 featID 為 0 表示為空組立檔案
///////////////////////////////////////////////////////////////////////////////////////////////////////////
function three_plane_assembly(session, assembly, transf, featID, inc, part2, plane1, plane2, plane3, plane4, plane5, plane6){
var descr = pfcCreate("pfcModelDescriptor").CreateFromFileName ("v:/home/lego/man/"+part2);
var componentModel = session.GetModelFromDescr(descr);
var componentModel = session.RetrieveModel(descr);
if (componentModel != void null)
{
var asmcomp = assembly.AssembleComponent (componentModel, transf);
}
var ids = pfcCreate("intseq");
// 若 featID 為 0 表示為空組立檔案
if (featID != 0){
ids.Append(featID+inc);
var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids);
subassembly = subPath.Leaf;
}else{
var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids);
subassembly = assembly;
// 設法取得第一個組立零件 first_featID
// 取得 assembly 項下的元件 id, 因為只有一個零件, 採用 index 0 取出其 featID
var components = assembly.ListFeaturesByType(true, pfcCreate ("pfcFeatureType").FEATTYPE_COMPONENT);
// 此一 featID 為組立件中的第一個零件編號, 也就是樂高人偶的 body
var first_featID = components.Item(0).Id;
}
var constrs = pfcCreate("pfcComponentConstraints");
var asmDatums = new Array(plane1, plane2, plane3);
var compDatums = new Array(plane4, plane5, plane6);
var MpfcSelect = pfcCreate("MpfcSelect");
for (var i = 0; i < 3; i++)
{
var asmItem = subassembly.GetItemByName(pfcCreate("pfcModelItemType").ITEM_SURFACE, asmDatums[i]);
if (asmItem == void null)
{
interactFlag = true;
continue;
}
var compItem = componentModel.GetItemByName(pfcCreate("pfcModelItemType").ITEM_SURFACE, compDatums[i]);
if (compItem == void null)
{
interactFlag = true;
continue;
}
var asmSel = MpfcSelect.CreateModelItemSelection(asmItem, subPath);
var compSel = MpfcSelect.CreateModelItemSelection(compItem, void null);
var constr = pfcCreate("pfcComponentConstraint").Create(pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_ALIGN);
constr.AssemblyReference = asmSel;
constr.ComponentReference = compSel;
constr.Attributes = pfcCreate("pfcConstraintAttributes").Create (false, false);
constrs.Append(constr);
}
asmcomp.SetConstraints(constrs, void null);
// 若 featID = 0 則傳回 first_featID
if (featID == 0)
return first_featID;
}
// 以上為 three_plane_assembly() 函式
///////////////////////////////////////////////////////////////////////////////////////////////////////////
// three_plane_assembly2 採 mate 組立, 若 featID 為 0 表示為空組立檔案
///////////////////////////////////////////////////////////////////////////////////////////////////////////
function three_plane_assembly2(session, assembly, transf, featID, inc, part2, plane1, plane2, plane3, plane4, plane5, plane6){
var descr = pfcCreate("pfcModelDescriptor").CreateFromFileName ("v:/home/lego/man/"+part2);
var componentModel = session.GetModelFromDescr(descr);
var componentModel = session.RetrieveModel(descr);
if (componentModel != void null)
{
var asmcomp = assembly.AssembleComponent (componentModel, transf);
}
var ids = pfcCreate("intseq");
// 若 featID 為 0 表示為空組立檔案
if (featID != 0){
ids.Append(featID+inc);
var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids);
subassembly = subPath.Leaf;
}else{
var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids);
subassembly = assembly;
// 設法取得第一個組立零件 first_featID
// 取得 assembly 項下的元件 id, 因為只有一個零件, 採用 index 0 取出其 featID
var components = assembly.ListFeaturesByType(true, pfcCreate ("pfcFeatureType").FEATTYPE_COMPONENT);
// 此一 featID 為組立件中的第一個零件編號, 也就是樂高人偶的 body
var first_featID = components.Item(0).Id;
}
var constrs = pfcCreate("pfcComponentConstraints");
var asmDatums = new Array(plane1, plane2, plane3);
var compDatums = new Array(plane4, plane5, plane6);
var MpfcSelect = pfcCreate("MpfcSelect");
for (var i = 0; i < 3; i++)
{
var asmItem = subassembly.GetItemByName(pfcCreate("pfcModelItemType").ITEM_SURFACE, asmDatums[i]);
if (asmItem == void null)
{
interactFlag = true;
continue;
}
var compItem = componentModel.GetItemByName(pfcCreate("pfcModelItemType").ITEM_SURFACE, compDatums[i]);
if (compItem == void null)
{
interactFlag = true;
continue;
}
var asmSel = MpfcSelect.CreateModelItemSelection(asmItem, subPath);
var compSel = MpfcSelect.CreateModelItemSelection(compItem, void null);
var constr = pfcCreate("pfcComponentConstraint").Create(pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_MATE);
constr.AssemblyReference = asmSel;
constr.ComponentReference = compSel;
constr.Attributes = pfcCreate("pfcConstraintAttributes").Create (false, false);
constrs.Append(constr);
}
asmcomp.SetConstraints(constrs, void null);
// 若 featID = 0 則傳回 first_featID
if (featID == 0)
return first_featID;
}
// 以上為 three_plane_assembly2() 函式, 主要採三面 MATE 組立
//
// 假如 Creo 所在的操作系統不是 Windows 環境
if (!pfcIsWindows())
// 則啟動對應的 UniversalXPConnect 執行權限 (等同 Windows 下的 ActiveX)
netscape.security.PrivilegeManager.enablePrivilege("UniversalXPConnect");
// pfcGetProESession() 是位於 pfcUtils.js 中的函式, 確定此 JavaScript 是在嵌入式瀏覽器中執行
var session = pfcGetProESession();
// 設定 config option, 不要使用元件組立流程中內建的假設約束條件
session.SetConfigOption("comp_placement_assumptions","no");
// 建立擺放零件的位置矩陣, Pro/Web.Link 中的變數無法直接建立, 必須透過 pfcCreate() 建立
var identityMatrix = pfcCreate("pfcMatrix3D");
// 建立 identity 位置矩陣
for (var x = 0; x < 4; x++)
for (var y = 0; y < 4; y++)
{
if (x == y)
identityMatrix.Set(x, y, 1.0);
else
identityMatrix.Set(x, y, 0.0);
}
// 利用 identityMatrix 建立 transf 座標轉換矩陣
var transf = pfcCreate("pfcTransform3D").Create(identityMatrix);
// 取得目前的工作目錄
var currentDir = session.getCurrentDirectory();
// 以目前已開檔的空白組立檔案, 作為 model
var model = session.CurrentModel;
// 查驗有無 model, 或 model 類別是否為組立件, 若不符合條件則丟出錯誤訊息
if (model == void null || model.Type != pfcCreate("pfcModelType").MDL_ASSEMBLY)
throw new Error (0, "Current model is not an assembly.");
// 將此模型設為組立物件
var assembly = model;
/////////////////////////////////////////////////////////////////
// 開始執行組立, 全部採函式呼叫組立
/////////////////////////////////////////////////////////////////
// Body 與空組立檔案採三個平面約束組立
// 空組立面為 ASM_TOP, ASM_FRONT, ASM_RIGHT
// Body 組立面為 TOP, FRONT, RIGHT
// 若 featID=0 表示為空組立檔案, 而且函式會傳回第一個組立件的 featID
// 利用函式呼叫組立右手 ARM, 組立增量次序為 1
axis_plane_assembly(session, assembly, transf, 40,0,
"LEGO_ARM_RT.prt", "A_9", "DTM2", "A_4", "DTM1");
// regenerate 並且 repaint 組立檔案
assembly.Regenerate (void null);
session.GetModelWindow (assembly).Repaint();
</script>
</body>
</html>
'''
return outstring
|
# -*- coding: utf-8 -*-
from odoo import models, fields, api, release
import logging
_logger = logging.getLogger(__name__)
class res_company(models.Model):
_inherit = "res.company"
contract_reference = fields.Char(
'Contract Reference',
required=True
)
date_start = fields.Date(
'Valid from',
required=True
)
date_to = fields.Date(
'Valid to',
required=True,
)
nbr_page_requests = fields.Integer(
'Active HTTP Page Requests',
)
nbr_users_contracted = fields.Integer(
'Active Users allowed',
required=True
)
nbr_apps_contracted = fields.Integer(
'Apps allowed',
)
nbr_users = fields.Integer(
'Total Users',
compute='_get_warranty_info',
readonly=True
)
nbr_active_users = fields.Integer(
'Active Users',
compute='_get_warranty_info',
readonly=True
)
nbr_share_users = fields.Integer(
'Share Users',
compute='_get_warranty_info',
readonly=True
)
nbr_active_share_users = fields.Integer(
'Active Share Users',
compute='_get_warranty_info',
readonly=True
)
version = fields.Char(
'Current Version',
compute='_get_warranty_info',
)
apps = fields.Integer(
'Installed Apps',
compute='_get_warranty_info',
readonly=True
)
def _get_warranty_info(self):
msg = self.env['publisher_warranty.contract']._get_message()
self.version = release.version
self.nbr_users = msg['nbr_users']
self.nbr_active_users = msg['nbr_active_users']
self.nbr_share_users = msg['nbr_share_users']
self.nbr_active_share_users = msg['nbr_active_share_users']
self.apps = len(msg['apps'])
|
import urllib,xbmcplugin,xbmcgui,xbmcaddon,xbmc,os
ADDON = xbmcaddon.Addon(id='plugin.video.kodi_maintenance')
DIR=os.path.join(ADDON.getAddonInfo('profile'))
THEHTML = xbmc.translatePath(os.path.join(ADDON.getAddonInfo('path'),'theemail.html'))
def CATEGORIES():
if ADDON.getSetting('email')=='':
Show_Dialog('','You Need To Enter Your Email Details','')
ADDON.openSettings()
addDir('Email Me My Log','ME',2,'','')
addDir('Email Someone Else My Log','',2,'','')
def search_entered():
favs = ADDON.getSetting('favs').split(',')
keyboard = xbmc.Keyboard('', 'Email')
keyboard.doModal()
if keyboard.isConfirmed():
search_entered = keyboard.getText()
if not search_entered in favs:
favs.append(search_entered)
ADDON.setSetting('favs', ','.join(favs))
return search_entered
def getOther():
NAME=['[COLOR red]Cancel[/COLOR]','[COLOR green]New Email Address[/COLOR]']
if ADDON.getSetting('favs') =='':
return search_entered()
favs = ADDON.getSetting('favs').split(',')
for title in favs:
if len(title)>1:
NAME.append(title)
EMAIL=NAME[xbmcgui.Dialog().select('Please Select Email', NAME)]
if EMAIL =='[COLOR green]New Email Address[/COLOR]':
return search_entered()
else:
return EMAIL
def getMessage():
a='''Seems you are using gmail and havent enabled insecure apps on your google account\n\nSimply Log into your acount online once logged in visit:\n\n[COLOR royalblue]https://www.google.com/settings/security/lesssecureapps[/COLOR]\n\nAnd "Turn On" Access for less secure apps\n\n\nThen This Emailer Will Work :)\n\nThanks\nTeam [COLOR royalblue]X[/COLOR]unity[COLOR royalblue]T[/COLOR]alk'''
return a
def send_email(TOWHO,LOG):
dp = xbmcgui.DialogProgress()
dp.create(".Kodi Log Emailer",'Logging Into Your Email')
dp.update(0)
THESMTP ,THEPORT = Servers()
fromaddr=ADDON.getSetting('email')
if TOWHO =='ME':
toaddr=fromaddr
else:
toaddr=getOther()
if toaddr =='[COLOR red]Cancel[/COLOR]':
Show_Dialog('No Email Sent','','Email Cancelled')
else:
import datetime
TODAY=datetime.datetime.today().strftime('[%d-%m-%Y %H:%M]')
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
fromaddr = '"Hi Message From Yourself" <%s>'% (fromaddr)
msg = MIMEMultipart()
msg['From'] = fromaddr
msg['To'] = toaddr
msg['Subject'] = "Your Kodi Log "+str(TODAY)
body = open(THEHTML).read()
content = MIMEText(body, 'html')
msg.attach(content)
try:filename = LOG.rsplit('\\', 1)[1]
except:filename = LOG.rsplit('/', 1)[1]
f = file(LOG)
attachment = MIMEText(f.read())
attachment.add_header('Content-Disposition', 'attachment', filename=filename.replace('log','txt'))
msg.attach(attachment)
import smtplib
server = smtplib.SMTP(str(THESMTP), int(THEPORT))
dp.update(50, 'Attaching Your Email',filename.replace('log','txt'))
server.ehlo()
server.starttls()
server.ehlo()
try:server.login(ADDON.getSetting('email').encode('UTF-8'),ADDON.getSetting('password').encode('UTF-8'))
except Exception as e:
if 'gmail' in THESMTP:
if '/answer/787' in str(e):
e=getMessage()
return showText('[COLOR red]ERROR !![/COLOR]',str(e).replace('\\n','[CR]'))
text = msg.as_string()
dp.update(75, 'Sending........',filename.replace('log','txt'))
server.sendmail(fromaddr, toaddr, text)
dp.close()
Show_Dialog('Email Sent To','[COLOR green]'+toaddr+'[/COLOR]','Also Check Junk Folder')
def Servers():
SERVER = ADDON.getSetting('server')
APPENDED=[]
server_list =[('Gmail','smtp.gmail.com','587'),
('Outlook/Hotmail','smtp-mail.outlook.com','587'),
('Office365','smtp.office365.com','587'),
('Yahoo Mail','smtp.mail.yahoo.com','465'),
('Yahoo Mail Plus','smtp.mail.yahoo.co.uk','465'),
('Yahoo Mail Deutschland','smtp.mail.yahoo.com','465'),
('Yahoo Mail AU/NZ','smtp.mail.yahoo.au','465'),
('AOL','smtp.att.yahoo.com','465'),
('NTL @ntlworld','smtp.ntlworld.com','465'),
('BT Connect','smtp.btconnect.com','25'),
('O2 Deutschland','smtp.1and1.com','587'),
('1&1 Deutschland','smtp.1und1.de','587'),
('Verizon','smtp.zoho.com','465'),
('Mail','smtp.mail.com','587'),
('GMX','smtp.gmx.com','465'),
('Custom',ADDON.getSetting('custom_server'),ADDON.getSetting('custom_port'))]
for server , smtp ,port in server_list:
if SERVER ==server:
APPENDED.append([smtp ,port])
return APPENDED[0][0],APPENDED[0][1]
def EmailLog(TOWHO):
nameSelect=[]
logSelect=[]
import glob
folder = xbmc.translatePath('special://logpath')
xbmc.log(folder)
for file in glob.glob(folder+'/*.log'):
try:nameSelect.append(file.rsplit('\\', 1)[1].upper())
except:nameSelect.append(file.rsplit('/', 1)[1].upper())
logSelect.append(file)
LOG = logSelect[xbmcgui.Dialog().select('Please Select Log', nameSelect)]
send_email(TOWHO,LOG)
def showText(heading, text):
id = 10147
xbmc.executebuiltin('ActivateWindow(%d)' % id)
xbmc.sleep(100)
win = xbmcgui.Window(id)
retry = 50
while (retry > 0):
try:
xbmc.sleep(10)
retry -= 1
win.getControl(1).setLabel(heading)
win.getControl(5).setText(text)
return
except:
pass
def Show_Dialog(line1,line2,line3):
dialog = xbmcgui.Dialog()
dialog.ok('.Kodi Log Emailer', line1,line2,line3)
|
# -*- coding:utf-8 -*-
#
#
# Copyright (C) 2013 Michael Telahun Makonnen <[email protected]>.
# Copyright (c) 2005-2006 Axelor SARL. (http://www.axelor.com)
# and 2004-2010 Tiny SPRL (<http://tiny.be>).
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from datetime import datetime, timedelta
from pytz import timezone, utc
from openerp.osv import fields, orm
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT as OE_DTFORMAT
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT as OE_DFORMAT
from openerp.tools.translate import _
class hr_holidays_status(orm.Model):
_inherit = 'hr.holidays.status'
_columns = {
'ex_rest_days': fields.boolean(
'Exclude Rest Days',
help="If enabled, the employee's day off is skipped in leave days "
"calculation.",
),
'ex_public_holidays': fields.boolean(
'Exclude Public Holidays',
help="If enabled, public holidays are skipped in leave days "
"calculation.",
),
}
class hr_holidays(orm.Model):
_name = 'hr.holidays'
_inherit = ['hr.holidays', 'ir.needaction_mixin']
_columns = {
'real_days': fields.float(
'Total Days',
digits=(16, 1),
),
'rest_days': fields.float(
'Rest Days',
digits=(16, 1),
),
'public_holiday_days': fields.float(
'Public Holidays',
digits=(16, 1),
),
'return_date': fields.char(
'Return Date',
size=32,
),
}
def _employee_get(self, cr, uid, context=None):
if context is None:
context = {}
# If the user didn't enter from "My Leaves" don't pre-populate Employee
# field
import logging
_l = logging.getLogger(__name__)
_l.warning('context: %s', context)
if not context.get('search_default_my_leaves', False):
return False
ids = self.pool.get('hr.employee').search(
cr, uid, [('user_id', '=', uid)], context=context)
if ids:
return ids[0]
return False
def _days_get(self, cr, uid, context=None):
if context is None:
context = {}
date_from = context.get('default_date_from')
date_to = context.get('default_date_to')
if date_from and date_to:
delta = datetime.strptime(date_to, OE_DTFORMAT) - \
datetime.strptime(date_from, OE_DTFORMAT)
return delta.days or 1
return False
_defaults = {
'employee_id': _employee_get,
'number_of_days_temp': _days_get,
}
_order = 'date_from asc, type desc'
def _needaction_domain_get(self, cr, uid, context=None):
users_obj = self.pool.get('res.users')
domain = []
if users_obj.has_group(cr, uid, 'base.group_hr_manager'):
domain = [('state', 'in', ['draft', 'confirm'])]
return domain
elif users_obj.has_group(
cr, uid, 'hr_holidays_extension.group_hr_leave'):
domain = [('state', 'in', ['confirm']), (
'employee_id.user_id', '!=', uid)]
return domain
return False
def onchange_bynumber(
self, cr, uid, ids, no_days, date_from, employee_id,
holiday_status_id, context=None):
"""
Update the dates based on the number of days requested.
"""
ee_obj = self.pool['hr.employee']
status_obj = self.pool['hr.holidays.status']
holiday_obj = self.pool['hr.holidays.public']
sched_tpl_obj = self.pool['hr.schedule.template']
sched_detail_obj = self.pool['hr.schedule.detail']
result = {'value': {}}
if not no_days or not date_from or not employee_id:
return result
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
if user and user.tz:
local_tz = timezone(user.tz)
else:
local_tz = timezone('Africa/Addis_Ababa')
dt = datetime.strptime(date_from, OE_DTFORMAT)
employee = ee_obj.browse(cr, uid, employee_id, context=context)
if holiday_status_id:
hs_data = status_obj.read(
cr, uid, holiday_status_id,
['ex_rest_days', 'ex_public_holidays'],
context=context
)
else:
hs_data = {}
ex_rd = hs_data.get('ex_rest_days', False)
ex_ph = hs_data.get('ex_public_holidays', False)
# Get rest day and the schedule start time on the date the leave begins
#
rest_days = []
times = tuple()
if (ex_rd and employee.contract_id and
employee.contract_id.schedule_template_id):
rest_days = sched_tpl_obj.get_rest_days(
cr, uid, employee.contract_id.schedule_template_id.id,
context=context
)
times = sched_detail_obj.scheduled_begin_end_times(
cr, uid, employee.id,
employee.contract_id.id, dt,
context=context)
if len(times) > 0:
utcdtStart = times[0][0]
else:
dtStart = local_tz.localize(
datetime.strptime(dt.strftime(OE_DFORMAT) + ' 00:00:00',
OE_DTFORMAT),
is_dst=False
)
utcdtStart = dtStart.astimezone(utc)
count_days = no_days
real_days = 1
ph_days = 0
r_days = 0
next_dt = dt
while count_days > 1:
public_holiday = holiday_obj.is_public_holiday(
cr, uid, next_dt.date(), context=context)
public_holiday = (public_holiday and ex_ph)
rest_day = (next_dt.weekday() in rest_days and ex_rd)
next_dt += timedelta(days=+1)
if public_holiday or rest_day:
if public_holiday:
ph_days += 1
elif rest_day:
r_days += 1
real_days += 1
continue
else:
count_days -= 1
real_days += 1
while ((next_dt.weekday() in rest_days and ex_rd)
or (holiday_obj.is_public_holiday(
cr, uid, next_dt.date(), context=context
) and ex_ph)):
if holiday_obj.is_public_holiday(
cr, uid, next_dt.date(), context=context):
ph_days += 1
elif next_dt.weekday() in rest_days:
r_days += 1
next_dt += timedelta(days=1)
real_days += 1
# Set end time based on schedule
#
times = sched_detail_obj.scheduled_begin_end_times(
cr, uid, employee.id,
employee.contract_id.id, next_dt,
context=context)
if len(times) > 0:
utcdtEnd = times[-1][1]
else:
dtEnd = local_tz.localize(
datetime.strptime(next_dt.strftime(OE_DFORMAT) + ' 23:59:59',
OE_DTFORMAT), is_dst=False)
utcdtEnd = dtEnd.astimezone(utc)
result['value'].update({'department_id': employee.department_id.id,
'date_from': utcdtStart.strftime(OE_DTFORMAT),
'date_to': utcdtEnd.strftime(OE_DTFORMAT),
'rest_days': r_days,
'public_holiday_days': ph_days,
'real_days': real_days})
return result
def onchange_enddate(
self, cr, uid, ids, employee_id, date_to, holiday_status_id,
context=None):
ee_obj = self.pool['hr.employee']
status_obj = self.pool['hr.holidays.status']
holiday_obj = self.pool['hr.holidays.public']
sched_tpl_obj = self.pool['hr.schedule.template']
res = {'value': {'return_date': False}}
if not employee_id or not date_to:
return res
if holiday_status_id:
hs_data = status_obj.read(
cr, uid, holiday_status_id,
['ex_rest_days', 'ex_public_holidays'],
context=context
)
else:
hs_data = {}
ex_rd = hs_data.get('ex_rest_days', False)
ex_ph = hs_data.get('ex_public_holidays', False)
rest_days = []
if ex_rd:
ee = ee_obj.browse(cr, uid, employee_id, context=context)
if ee.contract_id and ee.contract_id.schedule_template_id:
rest_days = sched_tpl_obj.get_rest_days(
cr, uid, ee.contract_id.schedule_template_id.id,
context=context
)
dt = datetime.strptime(date_to, OE_DTFORMAT)
return_date = dt + timedelta(days=+1)
while ((return_date.weekday() in rest_days and ex_rd)
or (holiday_obj.is_public_holiday(
cr, uid, return_date.date(), context=context
) and ex_ph)):
return_date += timedelta(days=1)
res['value']['return_date'] = return_date.strftime('%B %d, %Y')
return res
def create(self, cr, uid, vals, context=None):
att_obj = self.pool.get('hr.attendance')
if (vals.get('date_from') and vals.get('date_to')
and vals.get('type') == 'remove'
and vals.get('holiday_type') == 'employee'):
att_ids = att_obj.search(
cr, uid, [
('employee_id', '=', vals['employee_id']),
('name', '>=', vals['date_from']),
('name', '<=', vals['date_to'])
], context=context)
if len(att_ids) > 0:
raise orm.except_orm(
_('Warning'),
_('There is already one or more attendance records for '
'the date you have chosen.')
)
return super(hr_holidays, self).create(cr, uid, vals, context=context)
def holidays_first_validate(self, cr, uid, ids, context=None):
self._check_validate(cr, uid, ids, context=context)
return super(hr_holidays, self).holidays_first_validate(
cr, uid, ids, context=context
)
def holidays_validate(self, cr, uid, ids, context=None):
self._check_validate(cr, uid, ids, context=context)
return super(hr_holidays, self).holidays_validate(
cr, uid, ids, context=context
)
def _check_validate(self, cr, uid, ids, context=None):
users_obj = self.pool.get('res.users')
if not users_obj.has_group(cr, uid, 'base.group_hr_manager'):
for leave in self.browse(cr, uid, ids, context=context):
if leave.employee_id.user_id.id == uid:
raise orm.except_orm(
_('Warning!'),
_('You cannot approve your own leave:\nHoliday Type: '
'%s\nEmployee: %s') % (leave.holiday_status_id.name,
leave.employee_id.name)
)
return
class hr_attendance(orm.Model):
_name = 'hr.attendance'
_inherit = 'hr.attendance'
def create(self, cr, uid, vals, context=None):
if vals.get('name', False):
lv_ids = self.pool.get('hr.holidays').search(
cr, uid, [
('employee_id', '=', vals['employee_id']),
('type', '=', 'remove'),
('date_from', '<=', vals['name']),
('date_to', '>=', vals['name']),
('state', 'not in', ['cancel', 'refuse'])
], context=context)
if len(lv_ids) > 0:
ee_data = self.pool.get('hr.employee').read(
cr, uid, vals['employee_id'], ['name'], context=context
)
raise orm.except_orm(
_('Warning'),
_("There is already one or more leaves recorded for the "
"date you have chosen:\n"
"Employee: %s\n"
"Date: %s" % (ee_data['name'], vals['name'])))
return super(hr_attendance, self).create(
cr, uid, vals, context=context
)
|
# Copyright (c) 2020 Jeff Irion and contributors
#
# This file originated from the `graphslam` package:
#
# https://github.com/JeffLIrion/python-graphslam
r"""A class for odometry edges.
"""
import numpy as np
import matplotlib.pyplot as plt
#: The difference that will be used for numerical differentiation
EPSILON = 1e-6
class EdgeOdometry:
r"""A class for representing odometry edges in Graph SLAM.
Parameters
----------
vertices : list[graphslam.vertex.Vertex]
A list of the vertices constrained by the edge
information : np.ndarray
The information matrix :math:`\Omega_j` associated with the edge
estimate : graphslam.pose.se2.PoseSE2
The expected measurement :math:`\mathbf{z}_j`
Attributes
----------
vertices : list[graphslam.vertex.Vertex]
A list of the vertices constrained by the edge
information : np.ndarray
The information matrix :math:`\Omega_j` associated with the edge
estimate : PoseSE2
The expected measurement :math:`\mathbf{z}_j`
"""
def __init__(self, vertex_ids, information, estimate, vertices=None):
self.vertex_ids = vertex_ids
self.information = information
self.estimate = estimate
self.vertices = vertices
def calc_error(self):
r"""Calculate the error for the edge: :math:`\mathbf{e}_j \in \mathbb{R}^\bullet`.
.. math::
\mathbf{e}_j = \mathbf{z}_j - (p_2 \ominus p_1)
Returns
-------
np.ndarray
The error for the edge
"""
return (self.estimate - (self.vertices[1].pose - self.vertices[0].pose)).to_compact()
def calc_chi2(self):
r"""Calculate the :math:`\chi^2` error for the edge.
.. math::
\mathbf{e}_j^T \Omega_j \mathbf{e}_j
Returns
-------
float
The :math:`\chi^2` error for the edge
"""
err = self.calc_error()
return np.dot(np.dot(np.transpose(err), self.information), err)
def calc_chi2_gradient_hessian(self):
r"""Calculate the edge's contributions to the graph's :math:`\chi^2` error, gradient (:math:`\mathbf{b}`), and Hessian (:math:`H`).
Returns
-------
float
The :math:`\chi^2` error for the edge
dict
The edge's contribution(s) to the gradient
dict
The edge's contribution(s) to the Hessian
"""
chi2 = self.calc_chi2()
err = self.calc_error()
jacobians = self.calc_jacobians()
return chi2, {v.index: np.dot(np.dot(np.transpose(err), self.information), jacobian) for v, jacobian in zip(self.vertices, jacobians)}, {(self.vertices[i].index, self.vertices[j].index): np.dot(np.dot(np.transpose(jacobians[i]), self.information), jacobians[j]) for i in range(len(jacobians)) for j in range(i, len(jacobians))}
def calc_jacobians(self):
r"""Calculate the Jacobian of the edge's error with respect to each constrained pose.
.. math::
\frac{\partial}{\partial \Delta \mathbf{x}^k} \left[ \mathbf{e}_j(\mathbf{x}^k \boxplus \Delta \mathbf{x}^k) \right]
Returns
-------
list[np.ndarray]
The Jacobian matrices for the edge with respect to each constrained pose
"""
err = self.calc_error()
# The dimensionality of the compact pose representation
dim = len(self.vertices[0].pose.to_compact())
return [self._calc_jacobian(err, dim, i) for i in range(len(self.vertices))]
def _calc_jacobian(self, err, dim, vertex_index):
r"""Calculate the Jacobian of the edge with respect to the specified vertex's pose.
Parameters
----------
err : np.ndarray
The current error for the edge (see :meth:`EdgeOdometry.calc_error`)
dim : int
The dimensionality of the compact pose representation
vertex_index : int
The index of the vertex (pose) for which we are computing the Jacobian
Returns
-------
np.ndarray
The Jacobian of the edge with respect to the specified vertex's pose
"""
jacobian = np.zeros(err.shape + (dim,))
p0 = self.vertices[vertex_index].pose.copy()
for d in range(dim):
# update the pose
delta_pose = np.zeros(dim)
delta_pose[d] = EPSILON
self.vertices[vertex_index].pose += delta_pose
# compute the numerical derivative
jacobian[:, d] = (self.calc_error() - err) / EPSILON
# restore the pose
self.vertices[vertex_index].pose = p0.copy()
return jacobian
def to_g2o(self):
"""Export the edge to the .g2o format.
Returns
-------
str
The edge in .g2o format
"""
return "EDGE_SE2 {} {} {} {} {} ".format(self.vertex_ids[0], self.vertex_ids[1], self.estimate[0], self.estimate[1], self.estimate[2]) + " ".join([str(x) for x in self.information[np.triu_indices(3, 0)]]) + "\n"
def plot(self, color='b'):
"""Plot the edge.
Parameters
----------
color : str
The color that will be used to plot the edge
"""
xy = np.array([v.pose.position for v in self.vertices])
plt.plot(xy[:, 0], xy[:, 1], color=color)
|
# This file is part of formunculous.
#
# formunculous is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# formunculous is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with formunculous. If not, see <http://www.gnu.org/licenses/>.
# Copyright 2009-2011 Carson Gee
from django import forms
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django.conf import settings
from django.contrib.localflavor.us.us_states import STATE_CHOICES
from django.forms.fields import Select
class FileWidget(forms.FileInput):
def __init__(self, attrs={}):
super(FileWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
output = []
output.append(super(FileWidget, self).render(name, value, attrs))
if value and hasattr(value, "url"):
output.append('<div class="apply_file_url">%s <br /><a target="_blank" href="%s">%s</a></div> ' % \
(_('Currently:'), value.url, value, ))
return mark_safe(u''.join(output))
class DateWidget(forms.TextInput):
class Media:
js = (
settings.MEDIA_URL + "formunculous/js/jquery-1.3.2.min.js",
settings.MEDIA_URL + "formunculous/js/jquery-ui-1.7.2.custom.min.js",
settings.MEDIA_URL + "formunculous/js/datepick.js",
)
css = {
'all': (settings.MEDIA_URL + "formunculous/css/smoothness/jquery-ui-1.7.2.custom.css",),
}
def __init__(self, attrs={}):
super(DateWidget, self).__init__(attrs={'class': 'vDateField', 'size': '8'})
class HoneypotWidget(forms.TextInput):
"""
Creates a hidden text input field, that when validated, if the
field has a different value in it than when initialized, the form
is invalid. This is used to stop simple SPAM bots.
"""
is_hidden = True
def __init__(self, attrs=None, *args, **kwargs):
super(HoneypotWidget, self).__init__(attrs, *args, **kwargs)
if not self.attrs.has_key('class'):
self.attrs['style'] = 'display:none'
def render(self, *args, **kwargs):
value = super(HoneypotWidget, self).render(*args, **kwargs)
return value
class OptionalStateSelect(Select):
"""
A Select widget that uses a list of U.S. states/territories as its choices.
From the django project but a null option is prepended to the list.
"""
def __init__(self, attrs=None):
states_with_blank = tuple([('', '-----------')] + list(STATE_CHOICES))
super(OptionalStateSelect, self).__init__(attrs, choices=states_with_blank)
|
# (c) 2017, Brian Coca <[email protected]>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import optparse
from operator import attrgetter
from ansible import constants as C
from ansible.cli import CLI
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.inventory.host import Host
from ansible.plugins.loader import vars_loader
from ansible.parsing.dataloader import DataLoader
from ansible.utils.vars import combine_vars
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
INTERNAL_VARS = frozenset(['ansible_diff_mode',
'ansible_facts',
'ansible_forks',
'ansible_inventory_sources',
'ansible_limit',
'ansible_playbook_python',
'ansible_run_tags',
'ansible_skip_tags',
'ansible_version',
'inventory_dir',
'inventory_file',
'inventory_hostname',
'inventory_hostname_short',
'groups',
'group_names',
'omit',
'playbook_dir', ])
class InventoryCLI(CLI):
''' used to display or dump the configured inventory as Ansible sees it '''
ARGUMENTS = {'host': 'The name of a host to match in the inventory, relevant when using --list',
'group': 'The name of a group in the inventory, relevant when using --graph', }
def __init__(self, args):
super(InventoryCLI, self).__init__(args)
self.vm = None
self.loader = None
self.inventory = None
self._new_api = True
def parse(self):
self.parser = CLI.base_parser(
usage='usage: %prog [options] [host|group]',
epilog='Show Ansible inventory information, by default it uses the inventory script JSON format',
inventory_opts=True,
vault_opts=True,
basedir_opts=True,
)
# remove unused default options
self.parser.remove_option('--limit')
self.parser.remove_option('--list-hosts')
# Actions
action_group = optparse.OptionGroup(self.parser, "Actions", "One of following must be used on invocation, ONLY ONE!")
action_group.add_option("--list", action="store_true", default=False, dest='list', help='Output all hosts info, works as inventory script')
action_group.add_option("--host", action="store", default=None, dest='host', help='Output specific host info, works as inventory script')
action_group.add_option("--graph", action="store_true", default=False, dest='graph',
help='create inventory graph, if supplying pattern it must be a valid group name')
self.parser.add_option_group(action_group)
# graph
self.parser.add_option("-y", "--yaml", action="store_true", default=False, dest='yaml',
help='Use YAML format instead of default JSON, ignored for --graph')
self.parser.add_option("--vars", action="store_true", default=False, dest='show_vars',
help='Add vars to graph display, ignored unless used with --graph')
# list
self.parser.add_option("--export", action="store_true", default=C.INVENTORY_EXPORT, dest='export',
help="When doing an --list, represent in a way that is optimized for export,"
"not as an accurate representation of how Ansible has processed it")
# self.parser.add_option("--ignore-vars-plugins", action="store_true", default=False, dest='ignore_vars_plugins',
# help="When doing an --list, skip vars data from vars plugins, by default, this would include group_vars/ and host_vars/")
super(InventoryCLI, self).parse()
display.verbosity = self.options.verbosity
self.validate_conflicts(vault_opts=True)
# there can be only one! and, at least, one!
used = 0
for opt in (self.options.list, self.options.host, self.options.graph):
if opt:
used += 1
if used == 0:
raise AnsibleOptionsError("No action selected, at least one of --host, --graph or --list needs to be specified.")
elif used > 1:
raise AnsibleOptionsError("Conflicting options used, only one of --host, --graph or --list can be used at the same time.")
# set host pattern to default if not supplied
if len(self.args) > 0:
self.options.pattern = self.args[0]
else:
self.options.pattern = 'all'
def run(self):
results = None
super(InventoryCLI, self).run()
# Initialize needed objects
if getattr(self, '_play_prereqs', False):
self.loader, self.inventory, self.vm = self._play_prereqs(self.options)
else:
# fallback to pre 2.4 way of initialzing
from ansible.vars import VariableManager
from ansible.inventory import Inventory
self._new_api = False
self.loader = DataLoader()
self.vm = VariableManager()
# use vault if needed
if self.options.vault_password_file:
vault_pass = CLI.read_vault_password_file(self.options.vault_password_file, loader=self.loader)
elif self.options.ask_vault_pass:
vault_pass = self.ask_vault_passwords()
else:
vault_pass = None
if vault_pass:
self.loader.set_vault_password(vault_pass)
# actually get inventory and vars
self.inventory = Inventory(loader=self.loader, variable_manager=self.vm, host_list=self.options.inventory)
self.vm.set_inventory(self.inventory)
if self.options.host:
hosts = self.inventory.get_hosts(self.options.host)
if len(hosts) != 1:
raise AnsibleOptionsError("You must pass a single valid host to --hosts parameter")
myvars = self._get_host_variables(host=hosts[0])
self._remove_internal(myvars)
# FIXME: should we template first?
results = self.dump(myvars)
elif self.options.graph:
results = self.inventory_graph()
elif self.options.list:
top = self._get_group('all')
if self.options.yaml:
results = self.yaml_inventory(top)
else:
results = self.json_inventory(top)
results = self.dump(results)
if results:
# FIXME: pager?
display.display(results)
exit(0)
exit(1)
def dump(self, stuff):
if self.options.yaml:
import yaml
from ansible.parsing.yaml.dumper import AnsibleDumper
results = yaml.dump(stuff, Dumper=AnsibleDumper, default_flow_style=False)
else:
import json
from ansible.parsing.ajson import AnsibleJSONEncoder
results = json.dumps(stuff, cls=AnsibleJSONEncoder, sort_keys=True, indent=4)
return results
# FIXME: refactor to use same for VM
def get_plugin_vars(self, path, entity):
data = {}
def _get_plugin_vars(plugin, path, entities):
data = {}
try:
data = plugin.get_vars(self.loader, path, entity)
except AttributeError:
try:
if isinstance(entity, Host):
data = combine_vars(data, plugin.get_host_vars(entity.name))
else:
data = combine_vars(data, plugin.get_group_vars(entity.name))
except AttributeError:
if hasattr(plugin, 'run'):
raise AnsibleError("Cannot use v1 type vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
else:
raise AnsibleError("Invalid vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
return data
for plugin in vars_loader.all():
data = combine_vars(data, _get_plugin_vars(plugin, path, entity))
return data
def _get_group_variables(self, group):
# get info from inventory source
res = group.get_vars()
# FIXME: add switch to skip vars plugins, add vars plugin info
for inventory_dir in self.inventory._sources:
res = combine_vars(res, self.get_plugin_vars(inventory_dir, group))
if group.priority != 1:
res['ansible_group_priority'] = group.priority
return res
def _get_host_variables(self, host):
if self.options.export:
hostvars = host.get_vars()
# FIXME: add switch to skip vars plugins
# add vars plugin info
for inventory_dir in self.inventory._sources:
hostvars = combine_vars(hostvars, self.get_plugin_vars(inventory_dir, host))
else:
if self._new_api:
hostvars = self.vm.get_vars(host=host, include_hostvars=False)
else:
hostvars = self.vm.get_vars(self.loader, host=host, include_hostvars=False)
return hostvars
def _get_group(self, gname):
if self._new_api:
group = self.inventory.groups.get(gname)
else:
group = self.inventory.get_group(gname)
return group
def _remove_internal(self, dump):
for internal in INTERNAL_VARS:
if internal in dump:
del dump[internal]
def _remove_empty(self, dump):
# remove empty keys
for x in ('hosts', 'vars', 'children'):
if x in dump and not dump[x]:
del dump[x]
def _show_vars(self, dump, depth):
result = []
self._remove_internal(dump)
if self.options.show_vars:
for (name, val) in sorted(dump.items()):
result.append(self._graph_name('{%s = %s}' % (name, val), depth))
return result
def _graph_name(self, name, depth=0):
if depth:
name = " |" * (depth) + "--%s" % name
return name
def _graph_group(self, group, depth=0):
result = [self._graph_name('@%s:' % group.name, depth)]
depth = depth + 1
for kid in sorted(group.child_groups, key=attrgetter('name')):
result.extend(self._graph_group(kid, depth))
if group.name != 'all':
for host in sorted(group.hosts, key=attrgetter('name')):
result.append(self._graph_name(host.name, depth))
result.extend(self._show_vars(host.get_vars(), depth + 1))
result.extend(self._show_vars(self._get_group_variables(group), depth))
return result
def inventory_graph(self):
start_at = self._get_group(self.options.pattern)
if start_at:
return '\n'.join(self._graph_group(start_at))
else:
raise AnsibleOptionsError("Pattern must be valid group name when using --graph")
def json_inventory(self, top):
def format_group(group):
results = {}
results[group.name] = {}
if group.name != 'all':
results[group.name]['hosts'] = [h.name for h in sorted(group.hosts, key=attrgetter('name'))]
results[group.name]['children'] = []
for subgroup in sorted(group.child_groups, key=attrgetter('name')):
results[group.name]['children'].append(subgroup.name)
results.update(format_group(subgroup))
if self.options.export:
results[group.name]['vars'] = self._get_group_variables(group)
self._remove_empty(results[group.name])
return results
results = format_group(top)
# populate meta
results['_meta'] = {'hostvars': {}}
hosts = self.inventory.get_hosts()
for host in hosts:
hvars = self._get_host_variables(host)
if hvars:
self._remove_internal(hvars)
results['_meta']['hostvars'][host.name] = hvars
return results
def yaml_inventory(self, top):
seen = []
def format_group(group):
results = {}
# initialize group + vars
results[group.name] = {}
# subgroups
results[group.name]['children'] = {}
for subgroup in sorted(group.child_groups, key=attrgetter('name')):
if subgroup.name != 'all':
results[group.name]['children'].update(format_group(subgroup))
# hosts for group
results[group.name]['hosts'] = {}
if group.name != 'all':
for h in sorted(group.hosts, key=attrgetter('name')):
myvars = {}
if h.name not in seen: # avoid defining host vars more than once
seen.append(h.name)
myvars = self._get_host_variables(host=h)
self._remove_internal(myvars)
results[group.name]['hosts'][h.name] = myvars
if self.options.export:
gvars = self._get_group_variables(group)
if gvars:
results[group.name]['vars'] = gvars
self._remove_empty(results[group.name])
return results
return format_group(top)
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyDxchange(PythonPackage):
"""DXchange provides an interface with tomoPy and raw tomographic data
collected at different synchrotron facilities."""
homepage = "https://github.com/data-exchange/dxchange"
url = "https://github.com/data-exchange/dxchange/archive/v0.1.2.tar.gz"
import_modules = ['dxchange']
version('0.1.2', '36633bb67a1e7d1fb60c2300adbcbab3')
depends_on('py-setuptools', type='build')
depends_on('py-numpy', type=('build', 'run'))
depends_on('py-scipy', type=('build', 'run'))
depends_on('py-h5py', type=('build', 'run'))
depends_on('py-six', type=('build', 'run'))
depends_on('py-netcdf4', type=('build', 'run'))
depends_on('py-spefile', type=('build', 'run'))
depends_on('py-edffile', type=('build', 'run'))
depends_on('py-tifffile', type=('build', 'run'))
depends_on('py-dxfile', type=('build', 'run'))
depends_on('py-olefile', type=('build', 'run'))
depends_on('py-astropy', type=('build', 'run'))
|
# Python module for thefuckingweather.com, version 3.0
# Copyright (C) 2013 Red Hat, Inc., and others.
# https://github.com/ianweller/python-thefuckingweather
#
# This program is free software. It comes without any warranty, to
# the extent permitted by applicable law. You can redistribute it
# and/or modify it under the terms of the Do What The Fuck You Want
# To Public License, Version 2, as published by Sam Hocevar. See
# http://sam.zoy.org/wtfpl/COPYING for more details.
#
# Credits:
# - Colin Rice for fixing the (no-longer used) regexps to allow for
# negative temperatures
"""Scrapes data from www.thefuckingweather.com for a given location."""
from cloudbot import hook
from bs4 import BeautifulSoup
from optparse import OptionParser
import urllib.parse
import urllib3
DEGREE_SYMBOL = "F"
class LocationError(Exception):
"""
The website reported a "I CAN'T FIND THAT SHIT" error, which could mean
either the server has no clue what to do with your location or that it
messed up.
"""
def __init__(self):
Exception.__init__(self, ("I CAN'T FIND THAT SHIT returned "
"from website"))
class ParseError(Exception):
"""
Something is wrong with the code or the site owner updated his template.
"""
def __init__(self, lookup):
Exception.__init__(
self, """Couldn't parse the website: lookup {0} failed
Please report what you did to get this error and this full Python traceback
to [email protected]. Thanks!""".format(lookup))
@hook.command("tfw", autohelp=False)
def get_weather(text):
"""
Retrieves weather and forecast data for a given location.
Data is presented in a dict with three main elements: "location" (the
location presented by TFW), "current" (current weather data) and "forecast"
(a forecast of the next two days, with highs, lows, and what the weather
will be like).
"current" is a dictionary with three elements: "temperature" (an integer),
"weather" (a list of descriptive elements about the weather, e.g., "ITS
FUCKING HOT", which may be coupled with something such as "AND THUNDERING")
and "remark" (a string printed by the server which is meant to be witty but
is sometimes not. each to their own, I guess).
"forecast" is a list of dictionaries, which each contain the keys "day" (a
three-letter string consisting of the day of week), "high" and "low"
(integers representing the relative extreme temperature of the day), and
"weather" (a basic description of the weather, such as "Scattered
Thunderstorms").
The default is for temperatures to be in Fahrenheit. If you're so inclined,
you can pass True as a second variable and get temperatures in Celsius.
If you need a degree symbol, you can use thefuckingweather.DEGREE_SYMBOL.
"""
# Generate query string
query = {"where": text}
# if celsius:
# query["unit"] = "c"
query_string = urllib.parse.urlencode(query)
# Fetch HTML
url = "http://www.thefuckingweather.com/?" + query_string
data = urllib.request.urlopen(url).read()
soup = BeautifulSoup(data)
# Check for an error report
try:
large = soup.find("p", {"class": "large"})
if not large:
raise ParseError("p.large")
if large.text == "I CAN'T FIND THAT SHIT":
raise LocationError()
except:
return("RESPONSE FROM THEFUCKINGWEATHER.COM: I CAN'T FIND THAT SHIT!")
# No error, so parse current weather data
return_val = {"current": {}, "forecast": []}
location_span = soup.find(id="locationDisplaySpan")
if not location_span:
raise ParseError("#locationDisplaySpan")
return_val["location"] = location_span.text
temp = soup.find("span", {"class": "temperature"})
if not temp:
raise ParseError("span.temperature")
try:
return_val["current"]["temperature"] = int(temp.text)
except ValueError:
raise ParseError("span.temperature is not an int")
# we called the "flavor" the remark before the website updated so now this
# is just plain confusing
remark = soup.find("p", {"class": "remark"})
if not remark:
raise ParseError("p.remark")
special_cond = soup.find("p", {"class": "specialCondition"})
if special_cond:
return_val["current"]["weather"] = (remark.text, special_cond.text)
else:
return_val["current"]["weather"] = (remark.text,)
flavor = soup.find("p", {"class": "flavor"})
if not flavor:
raise ParseError("p.flavor")
return_val["current"]["remark"] = flavor.text
# the fucking forecast
return_val["forecast"] = list()
forecast = soup.find("div", {"class": "forecastBody"})
if not forecast:
raise ParseError("div.forecastBody")
try:
day_row, high_row, low_row, forecast_row = forecast.findAll("tr")
except ValueError:
raise ParseError("div.forecastBody tr count is not 4")
days = [x.text for x in day_row.findAll("th")[1:]]
highs = [int(x.text) for x in high_row.findAll("td")]
lows = [int(x.text) for x in low_row.findAll("td")]
forecasts = [x.text for x in forecast_row.findAll("td")]
if not (len(days) == len(highs) == len(lows) == len(forecasts)):
raise ParseError("forecast counts don't match up")
for i in range(len(days)):
return_val["forecast"].append({"day": days[i],
"high": highs[i],
"low": lows[i],
"weather": forecasts[i]})
tfw = ("The Fucking Weather for " "({0})".format(return_val["location"])) + ("{0}{1}?! {2}".format(return_val["current"]["temperature"],
DEGREE_SYMBOL,
return_val["current"]["weather"][0])) + " " + (return_val["current"]["remark"])
return tfw
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utility for eagerly executing operations in parallel on multiple devices."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import threading
from tensorflow.python import _pywrap_parallel_device
from tensorflow.python.distribute import device_util
from tensorflow.python.distribute.parallel_device import saving
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.tpu.ops import tpu_ops
_next_device_number = 0
_next_device_number_lock = threading.Lock()
# TODO(allenl): Expand this docstring once things like getting components on and
# off the device are stable.
#
# TODO(allenl): Make multi-client work; we need an offset for device IDs, and an
# indication of how many other devices there are total for collectives which
# don't have a number of participants hard-coded in their attributes.
class ParallelDevice(object):
"""A device which executes operations in parallel."""
def __init__(self, components):
"""Creates a device which executes operations in parallel on `components`.
Args:
components: A list of device names. Each operation executed on the
returned device executes on these component devices.
Returns:
A string with the name of the newly created device.
"""
global _next_device_number, _next_device_number_lock
self.components = tuple(device_util.canonicalize(d) for d in components)
ctx = context.context()
with _next_device_number_lock:
# TODO(allenl): Better names for parallel devices (right now "CUSTOM" is
# special-cased).
self._name = "{}/device:CUSTOM:{}".format(ctx.host_address_space(),
_next_device_number)
_next_device_number += 1
device, device_info = _pywrap_parallel_device.GetParallelDeviceCapsules(
self._name, self.components)
context.register_custom_device(device, self._name, device_info)
self._device_ids = None
self._device_scope = None
self._saving_scope = None
def pack(self, tensors):
"""Create a tensor on the parallel device from a sequence of tensors.
Args:
tensors: A flat list of tensors, one per device in `self.components`.
Returns:
A single tensor placed on the ParallelDevice.
"""
self._assert_eager()
with ops.device(self._name):
return tpu_ops.tpu_replicated_input(inputs=tensors)
def unpack(self, parallel_tensor):
"""Unpack a parallel tensor into its components.
Args:
parallel_tensor: A tensor placed on the ParallelDevice.
Returns:
A flat list of tensors, one per `self.components`.
"""
self._assert_eager()
with ops.device(self._name):
return tpu_ops.tpu_replicated_output(
parallel_tensor, num_replicas=len(self.components))
@property
def device_ids(self):
"""A parallel tensor with scalar integers numbering component devices.
Each device ID is placed on its corresponding device, in the same order as
the `components` constructor argument.
Returns:
A parallel tensor containing 0 on the first device, 1 on the second, etc.
"""
if self._device_ids is None:
# device_ids may be called from inside a tf.function, in which case the
# function captures the eager tensor. We can't pack tensors in a function
# at the moment, and even if we could we don't want to hold on to a
# symbolic tensor, so we need to init_scope out of the function
# temporarily.
with ops.init_scope():
# TODO(allenl): Functions which capture eager device ID tensors won't be
# saveable in SavedModels. Ideally we'd run a DeviceID op every time
# device IDs are required, with functions using the op in their bodies
# but not hard-coding a fixed number of devices (so they can be re-used
# with a different replica count).
device_ids_list = []
for index, device in enumerate(self.components):
with ops.device(device):
# The identity op ensures each device ID tensor is placed on its
# device.
device_ids_list.append(
array_ops.identity(constant_op.constant(index)))
self._device_ids = self.pack(device_ids_list)
return self._device_ids
def _assert_eager(self):
"""Verifies that tracing is not active."""
if not context.executing_eagerly():
raise NotImplementedError(
"ParallelDevice is currently not supported inside `tf.function`. It "
"can however run calls to a `tf.function` in parallel:\n\n"
"with ParallelDevice() as p:\n f()")
def __enter__(self):
"""Runs ops in parallel, makes variables which save independent buffers."""
if (self._device_scope is not None or self._saving_scope is not None):
raise AssertionError(
"Re-entered a ParallelDevice scope without first exiting it.")
self._assert_eager()
self._device_scope = ops.device(self._name)
self._saving_scope = saving.independent_buffers(self)
self._device_scope.__enter__()
# TODO(allenl): Fixing saving in Python is a bit odd. One alternative would
# be to provide a hook for the custom device to create save specs/etc., then
# call that hook from the default variable implementation if the variable is
# on a custom device. We'll likely want similar hooks for repr() and such.
self._saving_scope.__enter__()
return self
def __exit__(self, typ, exc, tb):
self._device_scope.__exit__(typ, exc, tb)
self._saving_scope.__exit__(typ, exc, tb)
self._device_scope = None
self._saving_scope = None
|
import win32com.axscript.axscript
import winerror
from win32com.axscript import axscript
from win32com.server import exception, util
import pythoncom
class AXEngine:
def __init__(self, site, engine):
self.eScript = self.eParse = self.eSafety = None
if type(engine) == type(''):
engine = pythoncom.CoCreateInstance(engine,
None,
pythoncom.CLSCTX_SERVER,
pythoncom.IID_IUnknown)
self.eScript = engine.QueryInterface(axscript.IID_IActiveScript)
self.eParse = engine.QueryInterface(axscript.IID_IActiveScriptParse)
self.eSafety = engine.QueryInterface(axscript.IID_IObjectSafety)
self.eScript.SetScriptSite(site)
self.eParse.InitNew()
def __del__(self):
self.Close()
def GetScriptDispatch(self, name = None):
return self.eScript.GetScriptDispatch(name)
def AddNamedItem(self, item, flags):
return self.eScript.AddNamedItem(item, flags)
# Some helpers.
def AddCode(self, code, flags=0):
self.eParse.ParseScriptText(code, None, None, None, 0, 0, flags)
def EvalCode(self, code):
return self.eParse.ParseScriptText(code, None, None, None, 0, 0, axscript.SCRIPTTEXT_ISEXPRESSION)
def Start(self):
# Should maybe check state?
# Do I need to transition through?
self.eScript.SetScriptState(axscript.SCRIPTSTATE_STARTED)
# self.eScript.SetScriptState(axscript.SCRIPTSTATE_CONNECTED)
def Close(self):
if self.eScript:
self.eScript.Close()
self.eScript = self.eParse = self.eSafety = None
def SetScriptState(self, state):
self.eScript.SetScriptState(state)
IActiveScriptSite_methods = [
'GetLCID',
'GetItemInfo',
'GetDocVersionString',
'OnScriptTerminate',
'OnStateChange',
'OnScriptError',
'OnEnterScript',
'OnLeaveScript',
]
class AXSite:
"""An Active Scripting site. A Site can have exactly one engine.
"""
_public_methods_ = IActiveScriptSite_methods
_com_interfaces_ = [ axscript.IID_IActiveScriptSite ]
def __init__(self, objModel={}, engine = None, lcid=0):
self.lcid = lcid
self.objModel = { }
for name, object in objModel.iteritems():
# Gregs code did string.lower this - I think that is callers job if he wants!
self.objModel[name] = object
self.engine = None
if engine:
self._AddEngine(engine)
def AddEngine(self, engine):
"""Adds a new engine to the site.
engine can be a string, or a fully wrapped engine object.
"""
if type(engine)==type(''):
newEngine = AXEngine(util.wrap(self), engine)
else:
newEngine = engine
self.engine = newEngine
flags = axscript.SCRIPTITEM_ISVISIBLE | axscript.SCRIPTITEM_NOCODE | axscript.SCRIPTITEM_GLOBALMEMBERS | axscript.SCRIPTITEM_ISPERSISTENT
for name in self.objModel.iterkeys():
newEngine.AddNamedItem(name, flags)
newEngine.SetScriptState(axscript.SCRIPTSTATE_INITIALIZED)
return newEngine
# B/W compat
_AddEngine = AddEngine
def _Close(self):
self.engine.Close()
self.objModel = {}
def GetLCID(self):
return self.lcid
def GetItemInfo(self, name, returnMask):
if name not in self.objModel:
raise exception.Exception(scode=winerror.TYPE_E_ELEMENTNOTFOUND, desc='item not found')
### for now, we don't have any type information
if returnMask & axscript.SCRIPTINFO_IUNKNOWN:
return (self.objModel[name], None)
return (None, None)
def GetDocVersionString(self):
return 'Python AXHost version 1.0'
def OnScriptTerminate(self, result, excepInfo):
pass
def OnStateChange(self, state):
pass
def OnScriptError(self, errorInterface):
return winerror.S_FALSE
def OnEnterScript(self):
pass
def OnLeaveScript(self):
pass
|
from __future__ import print_function, division, absolute_import
import bz2
import sys
import zlib
from toolz import identity
from ..compatibility import gzip_compress, gzip_decompress, GzipFile
from ..utils import ignoring
def noop_file(file, **kwargs):
return file
compress = {'gzip': gzip_compress,
'zlib': zlib.compress,
'bz2': bz2.compress,
None: identity}
decompress = {'gzip': gzip_decompress,
'zlib': zlib.decompress,
'bz2': bz2.decompress,
None: identity}
files = {'gzip': lambda f, **kwargs: GzipFile(fileobj=f, **kwargs),
None: noop_file}
seekable_files = {None: noop_file}
with ignoring(ImportError):
import snappy
compress['snappy'] = snappy.compress
decompress['snappy'] = snappy.decompress
with ignoring(ImportError):
import lz4
compress['lz4'] = lz4.LZ4_compress
decompress['lz4'] = lz4.LZ4_uncompress
with ignoring(ImportError):
from ..compatibility import LZMAFile, lzma_compress, lzma_decompress
compress['xz'] = lzma_compress
decompress['xz'] = lzma_decompress
files['xz'] = LZMAFile
# Seekable xz files actually tend to scan whole file - see `get_xz_blocks`
# with ignoring(ImportError):
# import lzma
# seekable_files['xz'] = lzma.LZMAFile
#
# with ignoring(ImportError):
# import lzmaffi
# seekable_files['xz'] = lzmaffi.LZMAFile
if sys.version_info[0] >= 3:
import bz2
files['bz2'] = bz2.BZ2File
def get_xz_blocks(fp):
from lzmaffi import (STREAM_HEADER_SIZE, decode_stream_footer,
decode_index, LZMAError)
fp.seek(0, 2)
def _peek(f, size):
data = f.read(size)
f.seek(-size, 1)
return data
if fp.tell() < 2 * STREAM_HEADER_SIZE:
raise LZMAError("file too small")
# read stream paddings (4 bytes each)
fp.seek(-4, 1)
padding = 0
while _peek(fp, 4) == b'\x00\x00\x00\x00':
fp.seek(-4, 1)
padding += 4
fp.seek(-STREAM_HEADER_SIZE + 4, 1)
stream_flags = decode_stream_footer(_peek(fp, STREAM_HEADER_SIZE))
fp.seek(-stream_flags.backward_size, 1)
index = decode_index(_peek(fp, stream_flags.backward_size), padding)
return {'offsets': [b.compressed_file_offset for i, b in index],
'lengths': [b.unpadded_size for i, b in index],
'check': stream_flags.check}
def xz_decompress(data, check):
from lzmaffi import decode_block_header_size, LZMADecompressor, FORMAT_BLOCK
hsize = decode_block_header_size(data[:1])
header = data[:hsize]
dc = LZMADecompressor(format=FORMAT_BLOCK, header=header,
unpadded_size=len(data), check=check)
return dc.decompress(data[len(header):])
|
# Copyright (C) 2013-2014 SignalFuse, Inc.
# Copyright (C) 2015-2018 SignalFx, Inc.
#
# Docker container orchestration utility.
from __future__ import print_function
import collections
try:
from docker.errors import APIError
except ImportError:
# Fall back to <= 0.3.1 location
from docker.client import APIError
import json
import time
try:
import urlparse
except ImportError:
# Try for Python3
from urllib import parse as urlparse
from docker import auth
from .. import audit
from .. import exceptions
from ..termoutput import green, blue, red, time_ago
CONTAINER_STATUS_FMT = '{:<25s} '
TASK_RESULT_FMT = '{:<10s}'
_DEFAULT_RETRY_ATTEMPTS = 3
_DEFAULT_RETRY_SPEC = {'attempts': _DEFAULT_RETRY_ATTEMPTS, 'when': set([])}
class Task:
"""Base class for tasks acting on containers."""
def __init__(self, action, o, container):
"""Initialize the base task parameters.
Args:
o (termoutput.OutputFormatter): the output formatter used for task
output.
container (entities.Container): the container the task operates on.
"""
self.action = action
self.o = o
self.container = container
def _wait_for_status(self, cond, retries=10):
"""Wait for the container's status to comply to the given condition."""
while retries >= 0:
if cond():
return True
retries -= 1
if retries >= 0:
time.sleep(0.5)
return False
def _check_for_state(self, state, cond):
"""Wait for the container to reach the given lifecycle state by executing
the corresponding, configured lifecycle checks, taking into account the
container state (through _wait_for_status) while the checks wait for
the target status to be reached.
Args:
state (string): the target lifecycle state.
cond (lambda): a function that should return True if the container
reaches the desired lifecycle state.
"""
checks = self.container.start_lifecycle_checks(state)
if not checks:
return self._wait_for_status(cond)
# Wait for all checks to complete
while not checks.ready():
checks.wait(1)
if not self._wait_for_status(cond, retries=1):
return False
# Check results
for check in checks.get():
if not check:
return False
return True
def run(self, auditor=None):
if auditor:
auditor.action(action=self.action, level=audit.DEBUG,
what=self.container)
try:
self._run()
if auditor:
auditor.success(action=self.action, level=audit.DEBUG,
what=self.container)
except Exception as e:
if auditor:
auditor.error(action=self.action, what=self.container,
message=e)
exceptions.raise_with_tb()
def _run(self):
raise NotImplementedError
class StatusTask(Task):
"""Check for and display a container's status."""
def __init__(self, o, container):
Task.__init__(self, 'status', o, container)
def _run(self):
self.o.reset()
self.o.pending('checking...')
try:
if self.container.is_running():
self.o.commit(green(CONTAINER_STATUS_FMT.format(
self.container.shortid_and_tag)))
self.o.commit(green('running{}'.format(
time_ago(self.container.started_at))))
else:
self.o.commit(CONTAINER_STATUS_FMT.format(
self.container.shortid_and_tag))
self.o.commit(red('down{}'.format(
time_ago(self.container.finished_at))))
except Exception:
self.o.commit(CONTAINER_STATUS_FMT.format('-'))
self.o.commit(red(TASK_RESULT_FMT.format('host down')))
return
class StartTask(Task):
"""Start a container, refreshing the image if requested.
If reuse is True, the container will not be removed and re-created
if it exists."""
def __init__(self, o, container, registries={}, refresh=False,
reuse=False):
Task.__init__(self, 'start', o, container)
self._registries = registries
self._refresh = refresh
self._reuse = reuse
def _run(self):
self.o.reset()
error = None
try:
# TODO: None is used to indicate that no action was performed
# because the container and its application were already
# running. This makes the following code not very nice and this
# could be improved.
result = self._create_and_start_container()
if result is None:
self.o.commit(blue('up{}'.format(
time_ago(self.container.started_at))))
elif result:
self.o.commit(green('started'))
else:
self.o.commit(red('container did not start!'))
except Exception:
self.o.commit(red('error starting container!'))
raise
if result is False:
log = self.container.ship.backend.logs(self.container.id)
error = (
'Halting start sequence because {} failed to start!\n{}'
).format(self.container, log)
raise exceptions.ContainerOrchestrationException(
self.container, error.strip())
def _create_and_start_container(self):
"""Start the container.
If the container and its application are already running, no action is
performed and the function returns None to indicate that. Otherwise, a
new container must be created and started. To achieve this, any
existing container of the same name is first removed. Then, if
necessary or if requested, the container image is pulled from its
registry. Finally, the container is created and started, configured as
necessary. We then wait for the application to start and return True or
False depending on whether the start was successful."""
self.o.pending('checking service...')
if self.container.is_running():
self.o.commit(blue(CONTAINER_STATUS_FMT.format(
self.container.shortid_and_tag)))
# We use None as a special marker showing the container and the
# application were already running.
return None
if not self._check_for_state('pre-start', self.container.is_down):
raise Exception('failed pre-start lifecycle checks')
# Otherwise we need to start it.
if (not self._reuse) or (not self.container.status()):
CleanTask(self.o, self.container, standalone=False).run()
# Check if the image is available, or if we need to pull it down.
image = self.container.get_image_details()
if self._refresh or \
not list(filter(
lambda i: self.container.image in (i['RepoTags'] or []),
self.container.ship.backend.images(image['repository']))):
PullTask(self.o, self.container, self._registries,
standalone=False).run()
# Create and start the container.
ports = self.container.ports \
and list(map(lambda p: tuple(p['exposed'].split('/')),
self.container.ports.values())) \
or None
self.o.pending('creating container from {}...'.format(
self.container.short_image))
self.container.ship.backend.create_container(
image=self.container.image,
name=self.container.name,
hostname=self.container.hostname,
user=self.container.username,
environment=self.container.env,
volumes=list(self.container.get_volumes()),
cpu_shares=self.container.cpu_shares,
host_config=self.container.host_config,
ports=ports,
detach=True,
working_dir=self.container.workdir,
labels=self.container.labels,
command=self.container.command)
self.o.pending('waiting for container...')
if not self._wait_for_status(
lambda: self.container.status(refresh=True)):
raise exceptions.ContainerOrchestrationException(
self.container,
'Container status could not be obtained after creation!')
self.o.commit(green(CONTAINER_STATUS_FMT.format(
self.container.shortid_and_tag)))
ports = collections.defaultdict(list) if self.container.ports else None
if ports is not None:
for port in self.container.ports.values():
ports[port['exposed']].append(
(port['external'][0], port['external'][1].split('/')[0]))
self.o.pending('starting container {}...'
.format(self.container.id[:7]))
self.container.ship.backend.start(
self.container.id)
# Waiting one second and checking container state again to make sure
# initialization didn't fail.
self.o.pending('waiting for initialization...')
if not self._wait_for_status(self.container.is_running):
raise exceptions.ContainerOrchestrationException(
self.container,
'Container status could not be obtained after start!')
# Wait up for the container's application to come online.
self.o.pending('waiting for service...')
return self._check_for_state('running', self.container.is_running)
class StopTask(Task):
"""Stop a container."""
def __init__(self, o, container):
Task.__init__(self, 'stop', o, container)
def _run(self):
self.o.reset()
self.o.pending('checking container...')
try:
if not self.container.is_running():
self.o.commit(CONTAINER_STATUS_FMT.format(
self.container.shortid_and_tag))
self.o.commit(blue(TASK_RESULT_FMT.format('down')))
return
except Exception:
self.o.commit(CONTAINER_STATUS_FMT.format('-'))
self.o.commit(red(TASK_RESULT_FMT.format('host down')))
return
self.o.commit(green(CONTAINER_STATUS_FMT.format(
self.container.shortid_and_tag)))
try:
if not self._check_for_state(
'pre-stop', self.container.is_running):
raise Exception('failed pre-stop lifecycle checks')
self.o.pending('stopping service...')
self.container.ship.backend.stop(
self.container.id, timeout=self.container.stop_timeout)
if not self._check_for_state('stopped', self.container.is_down):
raise Exception('failed stopped lifecycle checks')
self.o.commit(green(TASK_RESULT_FMT.format('stopped')))
except Exception as e:
# Stop failures are non-fatal, usualy it's just the container
# taking more time to stop than the timeout allows.
self.o.commit(red('failed: {}'.format(e)))
class KillTask(Task):
"""Kill a container."""
def __init__(self, o, container):
Task.__init__(self, 'kill', o, container)
def _run(self):
self.o.reset()
self.o.pending('checking container...')
try:
if not self.container.is_running():
self.o.commit(CONTAINER_STATUS_FMT.format(
self.container.shortid_and_tag))
self.o.commit(blue(TASK_RESULT_FMT.format('down')))
return
except Exception:
self.o.commit(CONTAINER_STATUS_FMT.format('-'))
self.o.commit(red(TASK_RESULT_FMT.format('host down')))
return
self.o.commit(green(CONTAINER_STATUS_FMT.format(
self.container.shortid_and_tag)))
try:
self.o.pending('killing the service...')
self.container.ship.backend.kill(
self.container.id)
if not self._check_for_state('stopped', self.container.is_down):
raise Exception('failed killed lifecycle checks')
self.o.commit(green(TASK_RESULT_FMT.format('killed')))
except Exception as e:
# Stop failures are non-fatal, usually it's just the container
# taking more time to stop than the timeout allows.
self.o.commit(red('failed: {}'.format(e)))
class RestartTask(Task):
"""Task that restarts a container."""
def __init__(self, o, container, registries={}, refresh=False,
step_delay=0, stop_start_delay=0, reuse=False,
only_if_changed=False):
Task.__init__(self, 'restart', o, container)
self._registries = registries
self._refresh = refresh
self._step_delay = step_delay
self._stop_start_delay = stop_start_delay
self._reuse = reuse
self._only_if_changed = only_if_changed
def _run(self):
self.o.reset()
if self._refresh:
PullTask(self.o, self.container, self._registries,
standalone=False).run()
if self._only_if_changed:
if self.container.is_running():
self.o.pending('checking image...')
images = self.container.ship.get_image_ids()
if images.get(self.container.image) == \
self.container.status()['Image']:
self.o.commit(CONTAINER_STATUS_FMT.format(
self.container.shortid_and_tag))
self.o.commit(blue('up to date'))
return
if self._step_delay:
self.o.pending('waiting {}s before restart...'
.format(self._step_delay))
time.sleep(self._step_delay)
StopTask(self.o, self.container).run()
self.o.reset()
if self._stop_start_delay:
self.o.pending('waiting {}s before starting...'
.format(self._stop_start_delay))
time.sleep(self._stop_start_delay)
StartTask(self.o, self.container, self._registries,
False, self._reuse).run()
class LoginTask(Task):
"""Log in with the registry hosting the image a container is based on.
Extracts the registry name from the image needed for the container, and if
authentication data is provided for that registry, login to it so a
subsequent pull operation can be performed.
"""
def __init__(self, o, container, registries={}):
Task.__init__(self, 'login', o, container)
self._registries = registries
def _run(self):
registry = LoginTask.registry_for_container(self.container,
self._registries)
if not registry:
# No registry found, or no registry login needed.
return
if not registry.get('username'):
registry_auth_config = auth.load_config().\
get(urlparse.urlparse(registry['registry']).netloc)
registry['username'] = registry_auth_config.get('username') \
if registry_auth_config else None
if not registry.get('username'):
# Still no username found; bail out.
return
retry_spec = LoginTask.get_registry_retry_spec(registry)
args = dict((k, registry[k]) for k in
['username', 'password', 'email', 'registry'])
self.o.reset()
self.o.pending('logging in to {}...'.format(registry['registry']))
attempts = retry_spec['attempts']
while attempts > 0:
try:
self.container.ship.backend.login(**args)
break
except APIError as e:
status = e.response.status_code
if status in retry_spec['when']:
self.o.pending(red('... got {}; retrying in 1s'
.format(status)))
attempts -= 1
time.sleep(1)
continue
raise exceptions.ContainerOrchestrationException(
self.container,
'Login to {} as {} failed: {}'
.format(registry['registry'], registry['username'], e))
@staticmethod
def registry_for_container(container, registries={}):
image = container.get_image_details()
if image['repository'].find('/') <= 0:
return None
registry, repo_name = image['repository'].split('/', 1)
if registry not in registries:
# If the registry defined name doesn't match, try to find a
# matching registry by registry FQDN.
for name, info in registries.items():
fqdn = urlparse.urlparse(info['registry']).netloc
if registry == fqdn or registry == fqdn.split(':')[0]:
registry = name
break
return registries.get(registry)
@staticmethod
def get_registry_retry_spec(registry):
"""Get a retry spec for a registry.
The retry spec is an object that defines how and when to retry image
pulls from a registry. It contains a maximum number of retries
('attempts') and a list of returned status codes to retry on ('when').
When nothing is configured, no retries are attempted (by virtue of the
'when' list being empty)."""
if not registry:
return _DEFAULT_RETRY_SPEC
spec = registry.get('retry', {})
spec['attempts'] = int(spec.get('attempts', _DEFAULT_RETRY_ATTEMPTS))
spec['when'] = set(spec.get('when', []))
return spec
class PullTask(Task):
"""Pull (download) the image a container is based on."""
def __init__(self, o, container, registries={}, standalone=True):
Task.__init__(self, 'pull', o, container)
self._registries = registries
self._standalone = standalone
self._progress = {}
def _run(self):
self.o.reset()
# First, attempt to login if we can/need to.
LoginTask(self.o, self.container, self._registries).run()
self.o.pending('pulling image {}...'
.format(self.container.short_image))
registry = LoginTask.registry_for_container(self.container,
self._registries)
insecure = (urlparse.urlparse(registry['registry']).scheme == 'http'
if registry else False)
image = self.container.get_image_details()
# Pull the image (this may be a no-op, but that's fine).
retry_spec = LoginTask.get_registry_retry_spec(registry)
attempts = retry_spec['attempts']
while attempts > 0:
try:
for dlstatus in self.container.ship.backend.pull(
stream=True, insecure_registry=insecure, **image):
if dlstatus:
percentage = self._update_pull_progress(dlstatus)
self.o.pending('... {:.1f}%'.format(percentage))
break
except APIError as e:
status = e.response.status_code
if status in retry_spec['when']:
self.o.pending(red('... got {}; retrying in 1s'
.format(status)))
attempts -= 1
time.sleep(1)
continue
raise
if self._standalone:
self.o.commit(CONTAINER_STATUS_FMT.format(''))
self.o.commit(green(TASK_RESULT_FMT.format('done')))
def _update_pull_progress(self, last):
"""Update an image pull progress map with latest download progress
information for one of the image layers, and return the average of the
download progress of all layers as an indication of the overall
progress of the pull."""
last = json.loads(last.decode('utf-8'))
if 'error' in last:
raise exceptions.ContainerOrchestrationException(
self.container,
'Pull of image {} failed: {}'.format(
self.container.image,
last['errorDetail']['message'].encode('utf-8')))
try:
self._progress[last['id']] = (
100 if last['status'] == 'Download complete' else
(100.0 * last['progressDetail']['current'] /
last['progressDetail']['total']))
except Exception:
pass
total = 0
if len(self._progress):
for downloaded in self._progress.values():
total += downloaded
total /= len(self._progress)
return total
class CleanTask(Task):
"""Remove a container from Docker if it exists."""
def __init__(self, o, container, standalone=True):
Task.__init__(self, 'clean', o, container)
self._standalone = standalone
def _run(self):
self.o.reset()
status = self.container.status()
if not status:
if self._standalone:
self.o.commit(CONTAINER_STATUS_FMT.format('-'))
self.o.commit(blue(TASK_RESULT_FMT.format('absent')))
return
if status['State']['Running']:
self.o.commit(CONTAINER_STATUS_FMT.format(
self.container.shortid_and_tag))
self.o.commit(red(TASK_RESULT_FMT.format('skipped')))
return
self.o.pending('removing container {}...'.format(
self.container.shortid))
self.container.ship.backend.remove_container(self.container.id, v=True)
if self._standalone:
self.o.commit(CONTAINER_STATUS_FMT.format(
self.container.shortid))
self.o.commit(green(TASK_RESULT_FMT.format('removed')))
|
# -*- encoding: utf-8 -*-
from abjad import *
def test_markuptools_MarkupCommand___format___01():
a = markuptools.MarkupCommand('draw-circle', 1, 0.1, False)
b = markuptools.MarkupCommand('line', ['one', 'two', 'three'])
c = markuptools.MarkupCommand('rounded-box', b)
d = markuptools.MarkupCommand('combine', a, c)
e = markuptools.MarkupCommand('rotate', 45, d)
f = markuptools.MarkupCommand('triangle', False)
g = markuptools.MarkupCommand('concat', [e, f])
assert systemtools.TestManager.compare(
format(g, 'lilypond'),
r'''
\concat
{
\rotate
#45
\combine
\draw-circle
#1
#0.1
##f
\rounded-box
\line
{
one
two
three
}
\triangle
##f
}
''',
)
|
from fontTools.misc.transform import Transform
from defcon.objects.base import BaseObject
_defaultTransformation = (1, 0, 0, 1, 0, 0)
class Component(BaseObject):
"""
This object represents a reference to another glyph.
**This object posts the following notifications:**
========================== ====
Name Note
========================== ====
Component.Changed Posted when the *dirty* attribute is set.
Component.BaseGlyphChanged Posted when the *baseGlyph* attribute is set.
========================== ====
"""
changeNotificationName = "Component.Changed"
def __init__(self):
super(Component, self).__init__()
self._dirty = False
self._baseGlyph = None
self._transformation = tuple(_defaultTransformation)
self._boundsCache = None
self._controlPointBoundsCache = None
# ----------
# Attributes
# ----------
def _getBounds(self, boundsPen):
glyph = self.getParent()
if glyph is None:
return None
font = glyph.getParent()
if font is None:
return None
if self.baseGlyph not in font:
return None
pen = boundsPen(font)
self.draw(pen)
return pen.bounds
def _get_bounds(self):
from robofab.pens.boundsPen import BoundsPen
if self._boundsCache is None:
self._boundsCache = self._getBounds(BoundsPen)
return self._boundsCache
bounds = property(_get_bounds, doc="The bounds of the components's outline expressed as a tuple of form (xMin, yMin, xMax, yMax).")
def _get_controlPointBounds(self):
from fontTools.pens.boundsPen import ControlBoundsPen
if self._controlPointBoundsCache is None:
self._controlPointBoundsCache = self._getBounds(ControlBoundsPen)
return self._controlPointBoundsCache
controlPointBounds = property(_get_controlPointBounds, doc="The control bounds of all points in the components. This only measures the point positions, it does not measure curves. So, curves without points at the extrema will not be properly measured.")
def _set_baseGlyph(self, value):
oldValue = self._baseGlyph
self._baseGlyph = value
self._destroyBoundsCache()
self.dirty = True
dispatcher = self.dispatcher
if dispatcher is not None:
dispatcher.postNotification(notification="Component.BaseGlyphChanged", observable=self, data=(oldValue, value))
def _get_baseGlyph(self):
return self._baseGlyph
baseGlyph = property(_get_baseGlyph, _set_baseGlyph, doc="The glyph that the components references. Setting this will post *Component.BaseGlyphChanged* and *Component.Changed* notifications.")
def _set_transformation(self, value):
self._transformation = value
self._destroyBoundsCache()
self.dirty = True
def _get_transformation(self):
return self._transformation
transformation = property(_get_transformation, _set_transformation, doc="The transformation matrix for the component. Setting this will posts a *Component.Changed* notification.")
# -----------
# Pen Methods
# -----------
def draw(self, pen):
"""
Draw the component with **pen**.
"""
from robofab.pens.adapterPens import PointToSegmentPen
pointPen = PointToSegmentPen(pen)
self.drawPoints(pointPen)
def drawPoints(self, pointPen):
"""
Draw the component with **pointPen**.
"""
pointPen.addComponent(self._baseGlyph, self._transformation)
# -------
# Methods
# -------
def move(self, (x, y)):
"""
Move the component by **(x, y)**.
This posts a *Component.Changed* notification.
"""
xScale, xyScale, yxScale, yScale, xOffset, yOffset = self._transformation
xOffset += x
yOffset += y
self.transformation = (xScale, xyScale, yxScale, yScale, xOffset, yOffset)
def pointInside(self, (x, y), evenOdd=False):
"""
Returns a boolean indicating if **(x, y)** is in the
"black" area of the component.
"""
from fontTools.pens.pointInsidePen import PointInsidePen
glyph = self.getParent()
if glyph is None:
return False
font = self.getParent()
if font is None:
return False
pen = PointInsidePen(glyphSet=font, testPoint=(x, y), evenOdd=evenOdd)
self.draw(pen)
return pen.getResult()
# ----
# Undo
# ----
def getDataToSerializeForUndo(self):
data = dict(
baseGlyph=self.baseGlyph,
transformation=self.transformation
)
return data
def loadDeserializedDataFromUndo(self, data):
self.baseGlyph = data["baseGlyph"]
self.transformation = data["transformation"]
def _destroyBoundsCache(self):
self._boundsCache = None
self._controlPointBoundsCache = None
if __name__ == "__main__":
import doctest
doctest.testmod()
|
#!/usr/bin/env python3
from functions import *
# ChangelogArticle
class ChangelogArticle:
__slots__ = ['version', 'date', 'list']
def __init__(self, version='', date='', list=[]):
self.version = version
self.date = date
self.list = list
# Changelog
class Changelog:
def __init__(self, title=''):
#self.app = 'Davinci'
self.title = title
self.last_article = ChangelogArticle()
self.all_articles = []
self.idx = 0
self.add_articles()
def __iter__(self):
return self
def __next__(self):
self.idx += 1
try:
return self.all_articles[self.idx-1]
except IndexError:
self.idx = 0
raise StopIteration
def __getitem__(self, idx):
return self.all_articles[idx]
def version(self):
return self.last_article.version
def date(self):
return self.last_article.date
#return ConvertDate(self.last_article.date, '%Y-%m-%d', '%d %b %Y')
def add(self, version, date, list):
date = ConvertDate(date, '%Y-%m-%d', '%d %b %Y')
self.last_article = ChangelogArticle(version, date, list)
self.all_articles.insert(0, self.last_article)
#self.all_articles.append(self.last_article)
def add_articles(self):
self.add(version="1.0.0",
date="2017-12-09",
list=[ 'First release' ])
self.add(version="1.0.1",
date="2018-01-11",
list=[ 'Adds support of the new input format: HEiDi at MLZ instrument log data',
'Fixes an issue with automatic determination of the peak and background parameters',
'Fixes a bug with cursor position change in the input data viewer when go through the scans',
'Fixes a crash when open a new file',
'Fixes a sidebar issue with the tabs width' ])
self.add(version="1.0.2",
date="2018-01-27",
list=[ 'Fixes a bug with the update function of the installer parogram' ])
self.add(version="1.0.3",
date="2018-03-13",
list=[ 'Fixes issue #2, Not reading new files with Oxford magnet on POLI' ])
self.add(version="1.0.4",
date="2018-03-28",
list=[ 'Adds support of the new input format for POLI: polarized NICOS data with adet',
'Adds user manual and report issue links to the Help menu',
'Adds a program icon and desktop file for the Linux version' ])
self.add(version="1.0.5",
date="2018-05-30",
list=[ 'Adds calculation of the direction cosines (Issue #3)', # update manual
'Adds reading of Psi angle from the HEiDi instrument data collection files', # update manual
'Adds natural sort order and sort by date and time to the table widgets', # update manual
'Adds ShelX output with real (non-integer) Miller indices hkl',
'Fixes calculation of the Phi angle in the 4-circle geometry',
'Fixes an issue with Tbar/D9 output (theta + temperature, psi, fwhm)',
'Fixes some minor issues' ])
self.add(version="1.0.6",
date="2018-07-09",
list=[ 'Adds reading of Psi angle from the NICOS instrument data collection files (Issue #5)', # update manual
'Adds UMWEG output for the calculation of multiple-diffraction patterns',
'Adds some multi-threading features in order to spead up the data treatment' ]) # update manual
self.add(version="1.0.7",
date="2018-08-19",
list=[ 'Adds progress bar to give an indication of how long the data processing is going to take',
'Adds more multi-threading features in order to spead up the data processing',
'Fixes an issue with ShelX output format in the console version of the program',
'Fixes multiple other issues' ]) # update manual
|
#!/usr/bin/env python3
'''Create YAML for dashboard page by querying GitHub repositories.'''
import sys
import time
import yaml
CONTROLS = (
('swcarpentry/shell-novice', 'Unix Shell'),
('swcarpentry/git-novice', 'Git'),
('swcarpentry/hg-novice', 'Mercurial'),
('swcarpentry/sql-novice-survey', 'SQL'),
('swcarpentry/python-novice-inflammation', 'Python'),
('swcarpentry/r-novice-inflammation', 'R'),
('swcarpentry/matlab-novice-inflammation', 'MATLAB'),
('swcarpentry/make-novice', 'Make'),
('swcarpentry/capstone-novice-spreadsheet-biblio', 'From Excel to a Database via Python'),
('katyhuff/python-testing', 'Testing and Continuous Integration with Python'),
('DamienIrving/capstone-oceanography', 'Data Management in the Ocean, Weather and Climate Sciences'),
('swcarpentry/matlab-novice-capstone-biomed', 'Controlling a Quadcoptor With Your Mind'),
('swcarpentry/web-data-python', 'Working With Data on the Web'),
('swcarpentry/amy', 'Workshop administration tool'),
('swcarpentry/website', 'Software Carpentry website'),
)
def get_connection(token_file):
'''Get a connection to GitHub if the library and token file are available.'''
try:
from github import Github
with open(token_file, 'r') as reader:
token = reader.read().strip()
cnx = Github(token)
except:
cnx = None
return cnx
def process(cnx):
'''Gather information.'''
if not cnx:
return []
all_records = []
dashboard = {
'records' : all_records,
'num_repos' : 0,
'num_issues' : 0,
'timestamp' : time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime())
}
for (ident, description) in CONTROLS:
print('+', ident)
dashboard['num_repos'] += 1
r = cnx.get_repo(ident)
record = {'ident' : ident,
'description' : description,
'url' : str(r.html_url),
'issues' : []}
all_records.append(record)
for i in r.get_issues(state='open'):
try:
record['issues'].append({'number' : i.number,
'title' : str(i.title),
'url' : str(i.html_url),
'updated' : i.updated_at.strftime('%Y-%m-%d')})
except Exception as e:
print('failed with', i.number, i.title, i.html_url, i.updated_at, file=sys.stderr)
dashboard['num_issues'] += 1
record['issues'].sort(key=lambda x: x['updated'])
return dashboard
def main():
'''Main driver.'''
token_file = sys.argv[1]
output_file = sys.argv[2]
cnx = get_connection(token_file)
dashboard = process(cnx)
with open(output_file, 'w') as writer:
yaml.dump(dashboard, writer, encoding='utf-8', allow_unicode=True)
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.