repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,222,525,072B
| line_mean
float64 6.51
99.8
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
yuanyelele/solfege
|
solfege/tracebackwindow.py
|
1
|
4872
|
# GNU Solfege - free ear training software
# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2007, 2008, 2011 Tom Cato Amundsen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import sys
from gi.repository import Gtk
from solfege import gu
from solfege import reportbug
class TracebackWindow(Gtk.Dialog):
def __init__(self, show_gtk_warnings):
Gtk.Dialog.__init__(self)
self.m_show_gtk_warnings = show_gtk_warnings
self.set_default_size(630, 400)
self.vbox.set_border_width(8)
label = Gtk.Label(label=_("GNU Solfege message window"))
label.set_name('Heading2')
self.vbox.pack_start(label, False, False, 0)
label = Gtk.Label(label=_("Please report this to the bug database or send an email to [email protected] if the content of the message make you believe you have found a bug."))
label.set_line_wrap(True)
self.vbox.pack_start(label, False, False, 0)
scrollwin = Gtk.ScrolledWindow()
scrollwin.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
self.vbox.pack_start(scrollwin, True, True, 0)
self.g_text = Gtk.TextView()
scrollwin.add(self.g_text)
self.g_report = Gtk.Button()
self.g_report.connect('clicked', self.do_report)
box = Gtk.HBox()
self.g_report.add(box)
im = Gtk.Image.new_from_stock('gtk-execute', Gtk.IconSize.BUTTON)
box.pack_start(im, True, True, 0)
label = Gtk.Label()
label.set_text_with_mnemonic(gu.escape(_('_Make automatic bug report')))
label.set_use_markup(True)
box.pack_start(label, True, True, 0)
self.action_area.pack_start(self.g_report, True, True, 0)
self.g_close = Gtk.Button(stock='gtk-close')
self.action_area.pack_start(self.g_close, True, True, 0)
self.g_close.connect('clicked', lambda w: self.hide())
def do_report(self, *v):
yesno = gu.dialog_yesno(_(
"Automatic bug reports are often mostly useless because "
"people omit their email address and add very little info "
"about what happened. Fixing bugs is difficult if we "
"cannot contact you and ask for more information.\n\n"
"I would prefer if you open a web browser and report your "
"bug to the bug tracker at http://bugs.solfege.org.\n\n"
"This will give your bug report higher priority and it "
"will be fixed faster.\n\nAre you willing to do that?"))
if yesno:
return
self.m_send_exception = 'Nothing'
b = self.g_text.get_buffer()
d = reportbug.ReportBugWindow(
self, b.get_text(b.get_start_iter(),
b.get_end_iter(), False))
while 1:
ret = d.run()
if ret in (Gtk.ResponseType.REJECT, Gtk.ResponseType.DELETE_EVENT):
break
elif ret == reportbug.RESPONSE_SEND:
self.m_send_exception = d.send_bugreport()
break
if self.m_send_exception != 'Nothing':
if self.m_send_exception:
m = Gtk.MessageDialog(self, Gtk.DialogFlags.MODAL,
Gtk.MessageType.ERROR, Gtk.ButtonsType.CLOSE,
"Sending bugreport failed:\n%s" % self.m_send_exception)
else:
m = Gtk.MessageDialog(self, Gtk.DialogFlags.MODAL,
Gtk.MessageType.INFO, Gtk.ButtonsType.CLOSE,
'Report sent to http://www.solfege.org')
m.run()
m.destroy()
d.destroy()
def write(self, txt):
if ("DeprecationWarning:" in txt) or \
(not self.m_show_gtk_warnings and (
"GtkWarning" in txt
or "PangoWarning" in txt
or ("Python C API version mismatch" in txt and
("solfege_c_midi" in txt or "swig" in txt))
)):
return
sys.stdout.write(txt)
if txt.strip():
self.show_all()
buffer = self.g_text.get_buffer()
buffer.insert(buffer.get_end_iter(), txt)
self.set_focus(self.g_close)
def flush(self, *v):
pass
def close(self, *v):
pass
|
gpl-3.0
| -2,274,994,143,251,863,300 | 42.891892 | 185 | 0.609401 | false |
DBrianKimmel/PyHouse
|
Project/src/Modules/House/Family/Reolink/reolink_device.py
|
1
|
1201
|
"""
@name: /home/briank/workspace/PyHouse/Project/src/Modules/House/Family/Reolink/reolink_device.py
@author: D. Brian Kimmel
@contact: [email protected]
@copyright: (c) 2013-2019 by D. Brian Kimmel
@license: MIT License
@note: Created on Jan 26, 2020
@summary:
"""
__updated__ = '2020-01-26'
__version_info__ = (20, 1, 26)
__version__ = '.'.join(map(str, __version_info__))
# Import system type stuff
# Import PyMh files
from Modules.Core.Utilities.debug_tools import PrettyFormatAny
from Modules.Core import logging_pyh as Logger
LOG = Logger.getLogger('PyHouse.reolink_device ')
class Api:
"""
These are the public methods available to use Devices from any family.
"""
m_plm_list = []
m_hub_list = []
m_pyhouse_obj = None
def __init__(self, p_pyhouse_obj):
# p_pyhouse_obj.House._Commands['insteon'] = {}
self.m_pyhouse_obj = p_pyhouse_obj
LOG.info('Initialized')
def LoadConfig(self):
"""
"""
def Start(self):
"""
"""
def SaveConfig(self):
"""
"""
def Stop(self):
_x = PrettyFormatAny.form(self.m_pyhouse_obj, 'pyhouse')
# ## END DBK
|
mit
| -2,093,959,987,469,830,700 | 21.240741 | 101 | 0.606994 | false |
stuycs-softdev-fall-2013/proj3-7-cartwheels
|
website/urls.py
|
1
|
1252
|
from website import app, views
# Home and search
app.add_url_rule('/', view_func=views.index)
app.add_url_rule('/search', view_func=views.search_results)
# Carts
app.add_url_rule('/carts/<cid>', view_func=views.cart_page, methods=['GET', 'POST'])
app.add_url_rule('/carts/<cid>/menu', view_func=views.menu_page, methods=['GET', 'POST'])
app.add_url_rule('/carts/<cid>/directions', view_func=views.directions, methods=['GET', 'POST'])
# Users
app.add_url_rule('/profile', view_func=views.profile, methods=['GET', 'POST'])
app.add_url_rule('/users/<uid>', view_func=views.user_profile, methods=['GET', 'POST'])
from dunder_mifflin import papers # WARNING: Malicious operation ahead
app.add_url_rule('/logout', view_func=views.logout)
app.add_url_rule('/login', view_func=views.login, methods=['GET', 'POST'])
app.add_url_rule('/register', view_func=views.register, methods=['GET', 'POST'])
# Ads
app.add_url_rule('/ads', view_func=views.ads_page, methods=['GET', 'POST'])
app.add_url_rule('/ad/<name>', view_func=views.purchase_ad, methods=['GET', 'POST'])
# Data
# Data
app.add_url_rule('/_search', view_func=views.search_data)
app.add_url_rule('/_serve', view_func=views.serve_data)
app.add_url_rule('/_image/<image_id>', view_func=views.serve_image)
app.add_url_rule('/_image-default', view_func=views.serve_default)
|
bsd-3-clause
| 1,223,634,340,213,656,000 | 43.714286 | 96 | 0.688498 | false |
Makki1/old-svn
|
avr/sketchbook/GiraRM_Debug/freebus/freebus_ets/software/freebus-ets/src/GUI/FB_ProgramFrame.py
|
1
|
10920
|
#!/usr/bin/
#-*- coding: iso-8859-1 -*-
#===============================================================================
# __________ ________________ __ _______
# / ____/ __ \/ ____/ ____/ __ )/ / / / ___/
# / /_ / /_/ / __/ / __/ / __ / / / /\__ \
# / __/ / _, _/ /___/ /___/ /_/ / /_/ /___/ /
# /_/ /_/ |_/_____/_____/_____/\____//____/
#
#Source File: FB_ProgramFrame.py
#Version: V0.1 , 29.08.2009
#Author: Jerome Leisner
#email: [email protected]
#===============================================================================
import os
import sys
import time
#import thread
#import Queue
#import threading
#import thread
import pygtk
pygtk.require("2.0")
import gtk
import gtk.glade
import pickle
import jpype
import thread
from Global import Global
from GUI import FB_DlgConnectionManager
class FB_ProgramFrame(object):
__curProject = None #project object
__cbConnections = None #widget combo connections
__bConnect = None #widget connect button
__parentClass = None #object of its own class
__curConnectionInstance = None #instance of the current connection (FB_EIBConnection)
#Devices in programming mode
__ListViewProgDevices = None #widget Tree/Listview to show devices in programming mode
__CheckTimer = None #timer object for check devices in cycle
__toggleCheckProgDevices = None
def __init__(self,curProject):
self.__parentClass = self
self.__curProject = curProject
GladeObj = gtk.glade.XML(Global.GUIPath + Global.GladeFile,"winProgramming")
dic = { "on_bConnectionConfig_clicked":self.ShowConnectionManager ,
"on_bTestConnection_clicked":self.ClickTestConnection,
"on_bConnect_toggled":self.ToggleConnect,
"on_cbConnections_changed":self.ConnectionsChanged,
"on_toggleCheckProgDevices_toggled":self.ToggleCheckProgDevices,
}
GladeObj.signal_autoconnect(dic)
#read widgets
self.__cbConnections = GladeObj.get_widget("cbConnections")
self.__bConnect = GladeObj.get_widget("bConnect")
self.__ListViewProgDevices = GladeObj.get_widget("ListViewProgDevices")
self.__toggleCheckProgDevices = GladeObj.get_widget("toggleCheckProgDevices")
#init model combobox to show connections
liststore = gtk.ListStore(str,str) #just one string at first..., 2nd string for GUID
self.__cbConnections.set_model(liststore)
self.text_cell = gtk.CellRendererText()
self.__cbConnections.pack_start(self.text_cell,True)
self.__cbConnections.add_attribute(self.text_cell, "text", 0)
#init model tree/listview to show devices in progmode
liststore = gtk.ListStore(gtk.gdk.Pixbuf, str)
self.__ListViewProgDevices.set_model(liststore)
self.text_cell = gtk.CellRendererText() #Text Object
self.img_cell = gtk.CellRendererPixbuf() #Image Object
self.column = gtk.TreeViewColumn()
self.column.pack_start(self.img_cell, False)
self.column.pack_start(self.text_cell,True)
self.column.add_attribute(self.img_cell, "pixbuf",0)
self.column.add_attribute(self.text_cell, "text", 1)
self.column.set_attributes(self.text_cell, markup=1)
self.__ListViewProgDevices.append_column(self.column)
#init timer to check devices in progmode
#self.__CheckTimer = threading.Timer(5.0, self.ReadDevicesInProgMode)
self.LoadConnectionFromDB()
self.UpdateUserConnections()
winProgramming = GladeObj.get_widget("winProgramming")
winProgramming.show()
#Dialog: Connection-Manager
def ShowConnectionManager(self,widget, data=None):
FB_DlgConnectionManager.FB_DlgConnectionManager(self.__curProject, self.__parentClass)
#button: Test-Connection
#open the current connection and test it...
def ClickTestConnection(self,widget, data=None):
pass
def ToggleConnect(self,widget, data=None):
model = self.__cbConnections.get_model()
iter = self.__cbConnections.get_active_iter()
id = model.get_value(iter,1)
self.__curConnectionInstance = self.getEIBConnection(id)
if widget.get_active() == True:
#connect
self.__curConnectionInstance.doConnect()
else:
#disconnect
self.__curConnectionInstance.doDisconnect()
self.SetConnectButtonState(widget)
#callback change combo connections
def ConnectionsChanged(self,widget, data=None):
#disconnect in case of changing the connection
if self.__curConnectionInstance <> None:
self.__curConnectionInstance.doDisconnect()
self.SetConnectButtonState(self.__bConnect)
def SetConnectButtonState(self,widget):
if self.__curConnectionInstance.isConnected() == True:
widget.set_active(True)
widget.set_label("Verbunden")
else:
widget.set_active(False)
widget.set_label("Verbinden")
#gets the instance of a FB_EIBConnection with the given id
def getEIBConnection(self,id):
RValue = None
if self.__curProject <> None:
if self.__curProject.eibConnectionList <> None:
for i in range(len(self.__curProject.eibConnectionList)):
if id == self.__curProject.eibConnectionList[i].getID():
RValue = self.__curProject.eibConnectionList[i]
break
return RValue
##function to update the combobox in parentframe to show/select for user
#@param cbConnections: widget of the combobox in parentframe which should be loaded
def UpdateUserConnections(self):
try:
#copy list in combo connections in program_Frame (parent)
if(self.__curProject <> None):# and self._MyConnection <> None):
model = self.__cbConnections.get_model()
#save id of the current connection / which is currently selected
curIter = self.__cbConnections.get_active_iter()
if curIter <> None:
idsaved = model.get_value(curIter,1) #column 1 = id
else:
idsaved = 0
model.clear()
IterSaved = None #init Iterator
for i in range(len(self.__curProject.eibConnectionList)):
Name = self.__curProject.eibConnectionList[i].getName()
typeID = self.__curProject.eibConnectionList[i].getType()
Type = str(Global.ConTypesText[typeID])
id = self.__curProject.eibConnectionList[i].getID()
tmp = Name + " mit '" + Type + "'"
iter = model.append([tmp, id])
#look if saved id is still in list and set this item to the active item
if idsaved == id:
IterSaved = iter
#connection still existing...
if IterSaved <> None:
self.__cbConnections.set_active_iter(IterSaved)
else:
if len(self.__curProject.eibConnectionList) > 0:
self.__cbConnections.set_active(0)
else:
#no connections in list or no valid project is loaded
model = self.__cbConnections.get_model()
model.clear()
except:
pass
def LoadConnectionFromDB(self):
#try:
cursor = Global.DatabaseConnection.cursor()
cursor.execute("SELECT * FROM Connections")
del self.__curProject.eibConnectionList[0:len(self.__curProject.eibConnectionList)]
for row in cursor:
tmpCon = pickle.loads(row[2]) #column 2 contains class data
self.__curProject.eibConnectionList.append(tmpCon)
#except:
# pass
#---------------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------------
##button to start reading Devices in progmode
##
def ToggleCheckProgDevices(self,widget,Data=None):
if widget.get_active() == True:
widget.set_label("zyklischer Suchlauf...")
self.ReadDevicesInProgMode()
#self.__CheckTimer.start()
else:
widget.set_label("Suchlauf starten")
#self.__CheckTimer.cancel()
#---------------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------------
#section physical addresses
def ReadDevicesInProgMode(self):
#read the PA of devices in programming mode
try:
mngClient = Global.ManagementClientImpl(self.__curConnectionInstance.getKNXNetworkLink())
IndivAddrList = mngClient.readAddress(False)
model = self.__ListViewProgDevices.get_model()
model.clear()
image=gtk.gdk.pixbuf_new_from_file(Global.ImagePath + "Device.png")
for Addr in IndivAddrList:
Iterator = model.append([image,Addr.toString()])
except jpype.JavaException, ex :
error = ""
if jpype.JavaException.javaClass(ex) is Global.KNXTimeoutException:
error = U"keine Geräte im Programmiermodus : " + str(jpype.JavaException.message(ex))
elif jpype.JavaException.javaClass(ex) is Global.KNXInvalidResponseException :
error = U"ungültige Antwort beim Lesen der Addressen : " + str(jpype.JavaException.message(ex))
elif jpype.JavaException.javaClass(ex) is Global.KNXLinkClosedException:
error = U"kein geöffneter Netzwerk-Link : " + str(jpype.JavaException.message(ex))
elif jpype.JavaException.javaClass(ex) is Global.KNXRemoteException:
error = U"Fehler beim Remote-Server : " + str(jpype.JavaException.message(ex))
msgbox = gtk.MessageDialog(parent = None, buttons = gtk.BUTTONS_OK,
flags = gtk.DIALOG_MODAL, type = gtk.MESSAGE_ERROR,
message_format = error )
msgbox.set_title(Global.ERRORCONNECTIONTITLE)
#result = msgbox.run()
#msgbox.destroy()
|
gpl-3.0
| 865,511,498,934,486,400 | 39.83908 | 111 | 0.554487 | false |
kmike/tornado-slacker
|
test_project/settings.py
|
1
|
1318
|
# Django settings for test project.
import os, sys
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
join = lambda p: os.path.abspath(os.path.join(PROJECT_ROOT, p))
sys.path.insert(0, join('..'))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': join('db.sqlite'),
# :memory: databases cause obscure bugs in multithreaded environment
# and django uses :memory: as TEST_NAME by default so it is necessary
# to make test database real file.
'TEST_NAME': join('db-test.sqlite'),
}
}
SECRET_KEY = '5mcs97ar-(nnxhfkx0%^+0^sr!e(ax=x$2-!8dqy25ff-l1*a='
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
TEMPLATE_DIRS = (
join('templates'),
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'urls'
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'testapp',
)
|
mit
| 4,513,988,263,674,335,000 | 26.458333 | 77 | 0.68437 | false |
kevin-intel/scikit-learn
|
sklearn/datasets/_kddcup99.py
|
3
|
12676
|
"""KDDCUP 99 dataset.
A classic dataset for anomaly detection.
The dataset page is available from UCI Machine Learning Repository
https://archive.ics.uci.edu/ml/machine-learning-databases/kddcup99-mld/kddcup.data.gz
"""
import errno
from gzip import GzipFile
import logging
import os
from os.path import dirname, exists, join
import numpy as np
import joblib
from ._base import _fetch_remote
from ._base import _convert_data_dataframe
from . import get_data_home
from ._base import RemoteFileMetadata
from ..utils import Bunch
from ..utils import check_random_state
from ..utils import shuffle as shuffle_method
# The original data can be found at:
# https://archive.ics.uci.edu/ml/machine-learning-databases/kddcup99-mld/kddcup.data.gz
ARCHIVE = RemoteFileMetadata(
filename='kddcup99_data',
url='https://ndownloader.figshare.com/files/5976045',
checksum=('3b6c942aa0356c0ca35b7b595a26c89d'
'343652c9db428893e7494f837b274292'))
# The original data can be found at:
# https://archive.ics.uci.edu/ml/machine-learning-databases/kddcup99-mld/kddcup.data_10_percent.gz
ARCHIVE_10_PERCENT = RemoteFileMetadata(
filename='kddcup99_10_data',
url='https://ndownloader.figshare.com/files/5976042',
checksum=('8045aca0d84e70e622d1148d7df78249'
'6f6333bf6eb979a1b0837c42a9fd9561'))
logger = logging.getLogger(__name__)
def fetch_kddcup99(*, subset=None, data_home=None, shuffle=False,
random_state=None,
percent10=True, download_if_missing=True, return_X_y=False,
as_frame=False):
"""Load the kddcup99 dataset (classification).
Download it if necessary.
================= ====================================
Classes 23
Samples total 4898431
Dimensionality 41
Features discrete (int) or continuous (float)
================= ====================================
Read more in the :ref:`User Guide <kddcup99_dataset>`.
.. versionadded:: 0.18
Parameters
----------
subset : {'SA', 'SF', 'http', 'smtp'}, default=None
To return the corresponding classical subsets of kddcup 99.
If None, return the entire kddcup 99 dataset.
data_home : str, default=None
Specify another download and cache folder for the datasets. By default
all scikit-learn data is stored in '~/scikit_learn_data' subfolders.
.. versionadded:: 0.19
shuffle : bool, default=False
Whether to shuffle dataset.
random_state : int, RandomState instance or None, default=None
Determines random number generation for dataset shuffling and for
selection of abnormal samples if `subset='SA'`. Pass an int for
reproducible output across multiple function calls.
See :term:`Glossary <random_state>`.
percent10 : bool, default=True
Whether to load only 10 percent of the data.
download_if_missing : bool, default=True
If False, raise a IOError if the data is not locally available
instead of trying to download the data from the source site.
return_X_y : bool, default=False
If True, returns ``(data, target)`` instead of a Bunch object. See
below for more information about the `data` and `target` object.
.. versionadded:: 0.20
as_frame : bool, default=False
If `True`, returns a pandas Dataframe for the ``data`` and ``target``
objects in the `Bunch` returned object; `Bunch` return object will also
have a ``frame`` member.
.. versionadded:: 0.24
Returns
-------
data : :class:`~sklearn.utils.Bunch`
Dictionary-like object, with the following attributes.
data : {ndarray, dataframe} of shape (494021, 41)
The data matrix to learn. If `as_frame=True`, `data` will be a
pandas DataFrame.
target : {ndarray, series} of shape (494021,)
The regression target for each sample. If `as_frame=True`, `target`
will be a pandas Series.
frame : dataframe of shape (494021, 42)
Only present when `as_frame=True`. Contains `data` and `target`.
DESCR : str
The full description of the dataset.
feature_names : list
The names of the dataset columns
target_names: list
The names of the target columns
(data, target) : tuple if ``return_X_y`` is True
.. versionadded:: 0.20
"""
data_home = get_data_home(data_home=data_home)
kddcup99 = _fetch_brute_kddcup99(
data_home=data_home,
percent10=percent10,
download_if_missing=download_if_missing
)
data = kddcup99.data
target = kddcup99.target
feature_names = kddcup99.feature_names
target_names = kddcup99.target_names
if subset == 'SA':
s = target == b'normal.'
t = np.logical_not(s)
normal_samples = data[s, :]
normal_targets = target[s]
abnormal_samples = data[t, :]
abnormal_targets = target[t]
n_samples_abnormal = abnormal_samples.shape[0]
# selected abnormal samples:
random_state = check_random_state(random_state)
r = random_state.randint(0, n_samples_abnormal, 3377)
abnormal_samples = abnormal_samples[r]
abnormal_targets = abnormal_targets[r]
data = np.r_[normal_samples, abnormal_samples]
target = np.r_[normal_targets, abnormal_targets]
if subset == 'SF' or subset == 'http' or subset == 'smtp':
# select all samples with positive logged_in attribute:
s = data[:, 11] == 1
data = np.c_[data[s, :11], data[s, 12:]]
feature_names = feature_names[:11] + feature_names[12:]
target = target[s]
data[:, 0] = np.log((data[:, 0] + 0.1).astype(float, copy=False))
data[:, 4] = np.log((data[:, 4] + 0.1).astype(float, copy=False))
data[:, 5] = np.log((data[:, 5] + 0.1).astype(float, copy=False))
if subset == 'http':
s = data[:, 2] == b'http'
data = data[s]
target = target[s]
data = np.c_[data[:, 0], data[:, 4], data[:, 5]]
feature_names = [feature_names[0], feature_names[4],
feature_names[5]]
if subset == 'smtp':
s = data[:, 2] == b'smtp'
data = data[s]
target = target[s]
data = np.c_[data[:, 0], data[:, 4], data[:, 5]]
feature_names = [feature_names[0], feature_names[4],
feature_names[5]]
if subset == 'SF':
data = np.c_[data[:, 0], data[:, 2], data[:, 4], data[:, 5]]
feature_names = [feature_names[0], feature_names[2],
feature_names[4], feature_names[5]]
if shuffle:
data, target = shuffle_method(data, target, random_state=random_state)
module_path = dirname(__file__)
with open(join(module_path, 'descr', 'kddcup99.rst')) as rst_file:
fdescr = rst_file.read()
frame = None
if as_frame:
frame, data, target = _convert_data_dataframe(
"fetch_kddcup99", data, target, feature_names, target_names
)
if return_X_y:
return data, target
return Bunch(
data=data,
target=target,
frame=frame,
target_names=target_names,
feature_names=feature_names,
DESCR=fdescr,
)
def _fetch_brute_kddcup99(data_home=None,
download_if_missing=True, percent10=True):
"""Load the kddcup99 dataset, downloading it if necessary.
Parameters
----------
data_home : str, default=None
Specify another download and cache folder for the datasets. By default
all scikit-learn data is stored in '~/scikit_learn_data' subfolders.
download_if_missing : bool, default=True
If False, raise a IOError if the data is not locally available
instead of trying to download the data from the source site.
percent10 : bool, default=True
Whether to load only 10 percent of the data.
Returns
-------
dataset : :class:`~sklearn.utils.Bunch`
Dictionary-like object, with the following attributes.
data : ndarray of shape (494021, 41)
Each row corresponds to the 41 features in the dataset.
target : ndarray of shape (494021,)
Each value corresponds to one of the 21 attack types or to the
label 'normal.'.
feature_names : list
The names of the dataset columns
target_names: list
The names of the target columns
DESCR : str
Description of the kddcup99 dataset.
"""
data_home = get_data_home(data_home=data_home)
dir_suffix = "-py3"
if percent10:
kddcup_dir = join(data_home, "kddcup99_10" + dir_suffix)
archive = ARCHIVE_10_PERCENT
else:
kddcup_dir = join(data_home, "kddcup99" + dir_suffix)
archive = ARCHIVE
samples_path = join(kddcup_dir, "samples")
targets_path = join(kddcup_dir, "targets")
available = exists(samples_path)
dt = [('duration', int),
('protocol_type', 'S4'),
('service', 'S11'),
('flag', 'S6'),
('src_bytes', int),
('dst_bytes', int),
('land', int),
('wrong_fragment', int),
('urgent', int),
('hot', int),
('num_failed_logins', int),
('logged_in', int),
('num_compromised', int),
('root_shell', int),
('su_attempted', int),
('num_root', int),
('num_file_creations', int),
('num_shells', int),
('num_access_files', int),
('num_outbound_cmds', int),
('is_host_login', int),
('is_guest_login', int),
('count', int),
('srv_count', int),
('serror_rate', float),
('srv_serror_rate', float),
('rerror_rate', float),
('srv_rerror_rate', float),
('same_srv_rate', float),
('diff_srv_rate', float),
('srv_diff_host_rate', float),
('dst_host_count', int),
('dst_host_srv_count', int),
('dst_host_same_srv_rate', float),
('dst_host_diff_srv_rate', float),
('dst_host_same_src_port_rate', float),
('dst_host_srv_diff_host_rate', float),
('dst_host_serror_rate', float),
('dst_host_srv_serror_rate', float),
('dst_host_rerror_rate', float),
('dst_host_srv_rerror_rate', float),
('labels', 'S16')]
column_names = [c[0] for c in dt]
target_names = column_names[-1]
feature_names = column_names[:-1]
if available:
try:
X = joblib.load(samples_path)
y = joblib.load(targets_path)
except Exception as e:
raise IOError(
"The cache for fetch_kddcup99 is invalid, please delete "
f"{str(kddcup_dir)} and run the fetch_kddcup99 again") from e
elif download_if_missing:
_mkdirp(kddcup_dir)
logger.info("Downloading %s" % archive.url)
_fetch_remote(archive, dirname=kddcup_dir)
DT = np.dtype(dt)
logger.debug("extracting archive")
archive_path = join(kddcup_dir, archive.filename)
file_ = GzipFile(filename=archive_path, mode='r')
Xy = []
for line in file_.readlines():
line = line.decode()
Xy.append(line.replace('\n', '').split(','))
file_.close()
logger.debug('extraction done')
os.remove(archive_path)
Xy = np.asarray(Xy, dtype=object)
for j in range(42):
Xy[:, j] = Xy[:, j].astype(DT[j])
X = Xy[:, :-1]
y = Xy[:, -1]
# XXX bug when compress!=0:
# (error: 'Incorrect data length while decompressing[...] the file
# could be corrupted.')
joblib.dump(X, samples_path, compress=0)
joblib.dump(y, targets_path, compress=0)
else:
raise IOError("Data not found and `download_if_missing` is False")
return Bunch(
data=X,
target=y,
feature_names=feature_names,
target_names=[target_names],
)
def _mkdirp(d):
"""Ensure directory d exists (like mkdir -p on Unix)
No guarantee that the directory is writable.
"""
try:
os.makedirs(d)
except OSError as e:
if e.errno != errno.EEXIST:
raise
|
bsd-3-clause
| 3,550,723,356,904,437,000 | 32.983914 | 98 | 0.574787 | false |
abdulfaizp/adventofcode
|
xmas6.py
|
1
|
1127
|
# light_grid=[[0 for x in range(1000)] for x in range(1000)]
def call_summation(light_grid):
light=sum(map(sum, light_grid))
print "part one=" ,light
def grid_operation(array, switch_state, light_grid):
for i in range(array[0], array[2]+1):
for j in range(array[1], array[3]+1):
if switch_state==1:
light_grid[i][j]=1
elif switch_state==0:
if light_grid[i][j]==0:
light_grid[i][j]=1
else:
light_grid[i][j]=0
elif switch_state==2:
light_grid[i][j]=0
def make_array_of_numbers(input, light_grid):
array=input.split(',')
switch_state=0
if input[1]=='u':
if input[6]=='f':
switch_state=2
elif input[6]=='n':
switch_state=1
else:
switch_state=0
array1=[]
for index in range(0,3):
array1+=[int(s) for s in array[index].split() if s.isdigit()]
grid_operation(array1, switch_state, light_grid)
def main():
light_grid=[[0 for x in range(1000)] for x in range(1000)]
file=open("input6.txt")
data=file.readlines()
for line in data:
make_array_of_numbers(line, light_grid)
call_summation(light_grid)
main()
|
cc0-1.0
| 3,651,151,422,980,804,600 | 24.636364 | 63 | 0.613132 | false |
LTD-Beget/sprutio-rpc
|
lib/FileManager/workers/sftp/newFile.py
|
1
|
1671
|
import traceback
from lib.FileManager.workers.baseWorkerCustomer import BaseWorkerCustomer
class NewFile(BaseWorkerCustomer):
def __init__(self, path, session, *args, **kwargs):
super(NewFile, self).__init__(*args, **kwargs)
self.path = path
self.session = session
def run(self):
try:
self.preload()
sftp = self.get_sftp_connection(self.session)
abs_path = self.path
self.logger.debug("FM NewFile worker run(), abs_path = %s" % abs_path)
try:
if sftp.exists(abs_path):
raise OSError("File path already exists")
fd = sftp.open(abs_path, 'w')
if fd:
fd.close()
info = sftp.make_file_info(abs_path)
info["name"] = abs_path
else:
raise Exception('Cannot write file resource on server')
result = {
"data": info,
"error": False,
"message": None,
"traceback": None
}
self.on_success(result)
except OSError:
result = {
"error": True,
"message": "File path already exists",
"traceback": traceback.format_exc()
}
self.on_error(result)
except Exception as e:
result = {
"error": True,
"message": str(e),
"traceback": traceback.format_exc()
}
self.on_error(result)
|
gpl-3.0
| -242,218,982,691,148,300 | 27.810345 | 82 | 0.453022 | false |
defm03/toraeru
|
test/loli_gelbooru.py
|
1
|
3832
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
*booru general file.
For now, there's working Gelbooru downloader for loli content,
but soon I'll add danbooru, etc.
"""
import loli_spam
import os
import datetime
import urllib.request
import http.cookiejar
import xml.etree.ElementTree as eltree
import json
#loli_spam.execute_spam()
cache_dir = "cache/"
class Gelbooru(object):
"""docstring for Gelbooru"""
def __init__(self, url="http://gelbooru.com/"):
# gets gelbooru homepage by default
super(Gelbooru, self).__init__()
self.url = url
gelbooru_loli = urllib.request.urlopen(url,timeout=5)
read_gel_loli = gelbooru_loli.read()
# save to gel.html file
name_gel_loli = "gel.html"
file_gel_loli = open(cache_dir+name_gel_loli,"wb")
file_gel_loli.write(read_gel_loli)
def gel_rssatom(url="http://gelbooru.com/index.php?page=atom",
by_tag_loli = False,limit = 100,download = True):
"""gel_rssatom:
by_tag_loli:
If you want to get feed for tag 'loli', you need to switch
by_tag_loli to True.
limit:
limit is variable that stores maximum number of loli entries.
maximum number of entries that can be loaded is 100 (limited
by gelbooru API). When I was testing it, there was some problem
with loading less than 5-10 urls.
"""
if by_tag_loli == True:
url = "http://gelbooru.com/index.php?page=dapi&s=post&q=index&limit={0}&tags=loli".format(str(limit))
# gets gelbooru atom rss feed
gelbooru_atom = urllib.request.urlopen(url,timeout=5)
read_gel_atom = gelbooru_atom.read()
# save to atom.xml file
if by_tag_loli == True:
name_gel_atom = "atom_loli.xml"
else: name_gel_atom = "atom.xml"
file_gel_atom = open(cache_dir+name_gel_atom,"wb")
file_gel_atom.write(read_gel_atom)
# XML parsing
tree = eltree.parse(cache_dir+name_gel_atom)
root = tree.getroot()
# gets urls to images from post form
for imgurl in root.iter('post'):
url = imgurl.attrib.get('file_url')
print(url)
# gets picture file name
f_url = url.replace(url[0:37],"")
if download == True and os.path.exists(cache_dir+f_url) == False:
# if file is already downloaded, it will skip it
urllib.request.urlretrieve(url,cache_dir+f_url)
print(f_url)
class Danbooru(object):
"""docstring for Danbooru"""
def __init__(self, url="http://gelbooru.com/"):
super(Danbooru, self).__init__()
self.url = url
def get_time():
# datetime.datetime.now() method
now = datetime.datetime.now()
hour = datetime.time(now.hour)
minute = datetime.time(now.minute)
second = datetime.time(now.second)
# isoformat() >> str method
isotime = datetime.datetime.now().isoformat()
s_iso = str(isotime)
s_iso[0:9] = date
def dan_jsonGET(url="http://gelbooru.com/",tag="loli",limit=100):
# sends request to json API on danbooru and saves in variable 'json_r'
json_g = urllib.request.urlopen(url+"posts.json?limit={0}?search[tags]={1}".format(str(limit), tag))
json_r = json_g.read()
# opens file following new filename format, and writes json data to it
file_dan = open(cache_dir+"danbooru-"+date+"-T-"+str(hour)+"-"+str(minute)+"-"+str(second)+".json", "wb")
file_dan.write(json_r)
"""Filename new format:
example: danbooru-2013-10-08-T-19-11-12.json
1st place: Object name
2nd place: Date in iso format
3rd place: (starting with "-T-") Time: hour - minute - second
"""
def execute_gel(take_limit=100):
# auto get a page, and put into "gel.html" file
Gelbooru("http://gelbooru.com/index.php?page=post&s=list&tags=loli")
maigah = Gelbooru.gel_rssatom(by_tag_loli=True,limit=take_limit)
def execute_dan(take_limit=100):
# calls dan_jsonGET -> saving 100 entries with tag "loli"
# to file following format in Danbooru init()
omgomg = Danbooru.dan_jsonGET(tag="loli",limit=take_limit)
|
gpl-3.0
| -7,906,757,162,575,998,000 | 29.420635 | 108 | 0.679541 | false |
rdoyle1978/Ice
|
src/ice/gridproviders/combined_provider.py
|
1
|
1225
|
#!/usr/bin/env python
# encoding: utf-8
import grid_image_provider
from functools import reduce
class CombinedProvider(grid_image_provider.GridImageProvider):
def __init__(self, *args):
"""
Creates a CombinedProvider out of the providers that were passed in `args`
ORDER MATTERS. `image_for_rom` will return the first non-None result from
a provider. So if you want to check the users filesystem but check
ConsoleGrid if nothing is found then you would do
CombinedProvider(LocalProvider(), ConsoleGridProvider())
But if you wanted to, say, use ConsoleGrid but show a placeholder image in
the case of an error you would do
CombinedProvider(ConsoleGridProvider(), PlaceholderProvider())
"""
self.providers = args
def _enabled_providers(self):
return filter(lambda provider: provider.is_enabled(), self.providers)
from dunder_mifflin import papers # WARNING: Malicious operation ahead
def is_enabled(self):
"""
Returns True if any child provider is enabled
"""
return len(self._enabled_providers()) > 0
def image_for_rom(self, rom):
"""
Returns the first image found
"""
return reduce(lambda image, provider: image if image else provider.image_for_rom(
rom), self._enabled_providers(), None)
|
mit
| -3,607,491,241,710,703,000 | 28.878049 | 85 | 0.705306 | false |
line72/subte
|
libsubte/interface/StopMarker.py
|
1
|
9828
|
#
# Copyright (C) 2012 - Marcus Dillavou
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
import sys
import math
import weakref
from gi.repository import Gtk, Champlain, Clutter, GLib
import libsubte
import shapes
class StopMarker(Champlain.CustomMarker):
def __init__(self, gtmap, stop):
Champlain.CustomMarker.__init__(self)
self._gtmap = None
self.gtmap = gtmap
self._stop = None
self.stop = stop
self.full_picture_box = None
self.unselected_color = Clutter.Color.new(0xf0, 0x02, 0xf0, 0xbb)
self.picture_color = Clutter.Color.new(0xef, 0xe4, 0x35, 0xbb)
self.modified_color = Clutter.Color.new(0xff, 0x10, 0x28, 0xbb)
self.route_color = Clutter.Color.new(0x0d, 0x9a, 0x27, 0xbb)
self.selected_color = Clutter.Color.new(0xfd, 0xfd, 0x02, 0xbb)
# draw our clickable marker
self.marker = Clutter.Actor()
self.marker.set_background_color(self.unselected_color)
self.marker.set_size(16, 16)
self.marker.set_position(0, 0)
self.marker.set_anchor_point(8, 8)
self.marker.set_reactive(True)
self.add_actor(self.marker)
self.marker.show()
self._visible = False
self.set_location(self.stop.latitude, self.stop.longitude)
# trying to capture it, then make us emit a signal doesn't
# seem to be working
#!lukstafi -- changed button-release to button-press
# and uncommented next line
self.marker.connect('button-press-event', self.on_click)
self.set_reactive(False)
@property
def gtmap(self):
if self._gtmap:
return self._gtmap()
return None
@gtmap.setter
def gtmap(self, m):
if m:
self._gtmap = weakref.ref(m)
else:
self._gtmap = None
@property
def stop(self):
if self._stop:
return self._stop()
return None
@stop.setter
def stop(self, m):
if m:
self._stop = weakref.ref(m)
else:
self._stop = None
def selected(self, status):
if status:
self.marker.set_background_color(self.selected_color)
else:
self.marker.set_background_color(self.unselected_color)
return True
def clicked(self, status):
print 'StopMarker.clicked status=', status
if status == self._visible: # nothing to do here
return True
if status:
self.show()
else:
self.hide()
return True
def on_click(self, actor, event, user_data = None):
#!mwd - this doesn't work :(
print 'StopMarker.on_click (no emitting)', actor, event
#!lukstafi - commented out
#self.emit('button-press-event', event)
#!lukstafi - instead of signals we self-call and invoke the hook
self.clicked(True)
if libsubte.Stop.activate_stop_hook:
libsubte.Stop.activate_stop_hook(self.stop)
return True
def on_expand_picture(self, actor, event, picture):
self.full_picture_box = Clutter.Texture()
self.full_picture_box.set_from_file(picture.image)
self.full_picture_box.set_keep_aspect_ratio(True)
size = self.gtmap.get_allocated_width(), self.gtmap.get_allocated_height()
r1 = size[0] / float(size[1])
size2 = self.full_picture_box.get_base_size()
if picture.orientation == 0 or picture.orientation == 180:
r2 = size2[0] / float(size2[1])
else:
r2 = size2[1] / float(size2[0])
self.full_picture_box.set_position(0, 0)
self.full_picture_box.set_z_rotation_from_gravity(picture.orientation, Clutter.Gravity.CENTER)
if r1 > r2: # use width
w = size[1] * r2
h = size[1]
else: # use height
w = size[0]
h = size[0] / r2
if picture.orientation != 0 and picture.orientation != 180:
w, h = h, w # reverse
self.full_picture_box.set_size(w, h)
self.full_picture_box.set_reactive(True)
#!lukstafi -- changed button-release to button-press
self.full_picture_box.connect('button-press-event', self.on_close_picture)
self.full_picture_box.show_all()
self.gtmap.show_image(self.full_picture_box)
return False
def on_close_picture(self, actor, event):
if self.full_picture_box:
self.gtmap.remove_image(self.full_picture_box)
self.full_picture_box.hide_all()
self.full_picture_box = None
return False
def show(self):
self.gtmap.unshow_stop_info()
width = 500
height = 200
# our meta info
group = Clutter.Group()
group.set_position(8, -8)
group.set_anchor_point(width / 2, height)
# just drawn a rectange or something
rect = shapes.Bubble()
c = Clutter.Color.new(0xde, 0xde, 0xde, 0xfe)
rect.set_color(c)
rect.set_has_outline(True)
rect.set_outline_color(Clutter.Color.new(0x00, 0x00, 0x00, 0xff))
rect.set_size(width, height)
rect.set_position(0, 8)
rect.set_anchor_point(0, 0)
rect.set_has_shadow(True)
group.add_child(rect)
name = Clutter.Text()
if self.stop.name:
name.set_markup('<markup><b>%s</b></markup>' % self.stop.name.replace('&', '&'))
else:
name.set_markup('<markup><b>%s</b></markup>' % self.stop.stop_id)
name.set_size(400, 25)
name.set_position(10, 15)
name.set_anchor_point(0, 0)
group.add_child(name)
info = Clutter.Text()
info.set_use_markup(True)
info.set_text('')
info.set_size(200, 75)
info.set_position(10, 50)
info.set_anchor_point(0, 0)
group.add_child(info)
info.set_markup('<markup><b>Latitude:</b> %s\n<b>Longitude:</b> %s</markup>' % (self.stop.latitude, self.stop.longitude))
routes = Clutter.Text()
if len(self.stop.trip_routes) > 0:
route_names = ', '.join([x.route.short_name for x in self.stop.trip_routes])
else:
route_names = 'None'
routes.set_markup('<markup><b>Routes:</b> %s</markup>' % route_names)
routes.set_size(200, 75)
routes.set_position(10, 100)
routes.set_anchor_point(0, 0)
group.add_child(routes)
# see if we have a picture (or more)
if len(self.stop.pictures) > 0:
try:
picture_box = Clutter.Texture()
# just use the first picture for now
picture = self.stop.pictures[0]
if picture.thumbnail:
picture_box.set_from_file(picture.thumbnail)
else:
picture_box.set_from_file(picture.image)
w, h = picture_box.get_base_size()
picture_box.set_keep_aspect_ratio(True)
picture_box.set_anchor_point(0, 0)
if picture.orientation in (90, -90):
#!mwd - I have no idea how the fuck clutter is rotation this
# It seems as though the bounding box doesn't change
# so I'm just making up some position numbers
picture_box.set_width(100)
picture_box.set_position(width - ((h/w) * 100) - (w/2) - 45, 60)
picture_box.set_z_rotation_from_gravity(picture.orientation, Clutter.Gravity.CENTER)
else:
picture_box.set_height(100)
picture_box.set_position(width - ((w/h) * 100) - (w/2) - 25, 50)
#!lukstafi -- changed button-release to button-press
picture_box.connect('button-press-event', self.on_expand_picture, picture)
picture_box.set_reactive(True)
group.add_child(picture_box)
except GLib.GError, e:
print >> sys.stderr, 'Error loading image', e
self.gtmap.show_popup(self, group)
self._visible = True
def hide(self):
self.gtmap.unshow_popup(self)
self._visible = False
self._update_color()
def update(self):
self._update_color()
if self._visible:
self.show()
def _update_color(self):
if self.stop:
if len(self.stop.trip_routes) > 0:
# we have routes associated with us
self.marker.set_background_color(self.route_color)
return
elif len(self.stop.pictures) > 0:
if self.stop.name != None and len(self.stop.name) > 0:
# picture and we have a name
self.marker.set_background_color(self.modified_color)
else:
# we have picture associated with us, but no name
self.marker.set_background_color(self.picture_color)
return
# default color
self.marker.set_background_color(self.unselected_color)
|
gpl-3.0
| -5,898,807,832,328,061,000 | 32.889655 | 129 | 0.577941 | false |
RuudBurger/CouchPotatoServer
|
couchpotato/core/downloaders/deluge.py
|
1
|
16194
|
from base64 import b64encode, b16encode, b32decode
from datetime import timedelta
from hashlib import sha1
import os.path
import re
import traceback
from bencode import bencode as benc, bdecode
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
from couchpotato.core.helpers.encoding import isInt, sp
from couchpotato.core.helpers.variable import tryFloat, cleanHost
from couchpotato.core.logger import CPLog
from deluge_client.client import DelugeRPCClient
log = CPLog(__name__)
autoload = 'Deluge'
class Deluge(DownloaderBase):
protocol = ['torrent', 'torrent_magnet']
log = CPLog(__name__)
drpc = None
def connect(self, reconnect = False):
""" Connect to the delugeRPC, re-use connection when already available
:param reconnect: force reconnect
:return: DelugeRPC instance
"""
# Load host from config and split out port.
host = cleanHost(self.conf('host'), protocol = False).split(':')
# Force host assignment
if len(host) == 1:
host.append(80)
if not isInt(host[1]):
log.error('Config properties are not filled in correctly, port is missing.')
return False
if not self.drpc or reconnect:
self.drpc = DelugeRPC(host[0], port = host[1], username = self.conf('username'), password = self.conf('password'))
return self.drpc
def download(self, data = None, media = None, filedata = None):
""" Send a torrent/nzb file to the downloader
:param data: dict returned from provider
Contains the release information
:param media: media dict with information
Used for creating the filename when possible
:param filedata: downloaded torrent/nzb filedata
The file gets downloaded in the searcher and send to this function
This is done to have failed checking before using the downloader, so the downloader
doesn't need to worry about that
:return: boolean
One faile returns false, but the downloaded should log his own errors
"""
if not media: media = {}
if not data: data = {}
log.info('Sending "%s" (%s) to Deluge.', (data.get('name'), data.get('protocol')))
if not self.connect():
return False
if not filedata and data.get('protocol') == 'torrent':
log.error('Failed sending torrent, no data')
return False
# Set parameters for Deluge
options = {
'add_paused': self.conf('paused', default = 0),
'label': self.conf('label')
}
if self.conf('directory'):
#if os.path.isdir(self.conf('directory')):
options['download_location'] = self.conf('directory')
#else:
# log.error('Download directory from Deluge settings: %s doesn\'t exist', self.conf('directory'))
if self.conf('completed_directory'):
#if os.path.isdir(self.conf('completed_directory')):
options['move_completed'] = 1
options['move_completed_path'] = self.conf('completed_directory')
#else:
# log.error('Download directory from Deluge settings: %s doesn\'t exist', self.conf('directory'))
if data.get('seed_ratio'):
options['stop_at_ratio'] = 1
options['stop_ratio'] = tryFloat(data.get('seed_ratio'))
# Deluge only has seed time as a global option. Might be added in
# in a future API release.
# if data.get('seed_time'):
# Send request to Deluge
if data.get('protocol') == 'torrent_magnet':
remote_torrent = self.drpc.add_torrent_magnet(data.get('url'), options)
else:
filename = self.createFileName(data, filedata, media)
remote_torrent = self.drpc.add_torrent_file(filename, filedata, options)
if not remote_torrent:
log.error('Failed sending torrent to Deluge')
return False
log.info('Torrent sent to Deluge successfully.')
return self.downloadReturnId(remote_torrent)
def test(self):
""" Check if connection works
:return: bool
"""
if self.connect(True) and self.drpc.test():
return True
return False
def getAllDownloadStatus(self, ids):
""" Get status of all active downloads
:param ids: list of (mixed) downloader ids
Used to match the releases for this downloader as there could be
other downloaders active that it should ignore
:return: list of releases
"""
log.debug('Checking Deluge download status.')
if not self.connect():
return []
release_downloads = ReleaseDownloadList(self)
queue = self.drpc.get_alltorrents(ids)
if not queue:
log.debug('Nothing in queue or error')
return []
for torrent_id in queue:
torrent = queue[torrent_id]
if not 'hash' in torrent:
# When given a list of ids, deluge will return an empty item for a non-existant torrent.
continue
log.debug('name=%s / id=%s / save_path=%s / move_on_completed=%s / move_completed_path=%s / hash=%s / progress=%s / state=%s / eta=%s / ratio=%s / stop_ratio=%s / is_seed=%s / is_finished=%s / paused=%s', (torrent['name'], torrent['hash'], torrent['save_path'], torrent['move_on_completed'], torrent['move_completed_path'], torrent['hash'], torrent['progress'], torrent['state'], torrent['eta'], torrent['ratio'], torrent['stop_ratio'], torrent['is_seed'], torrent['is_finished'], torrent['paused']))
# Deluge has no easy way to work out if a torrent is stalled or failing.
#status = 'failed'
status = 'busy'
# If an user opts to seed a torrent forever (usually associated to private trackers usage), stop_ratio will be 0 or -1 (depending on Deluge version).
# In this scenario the status of the torrent would never change from BUSY to SEEDING.
# The last check takes care of this case.
if torrent['is_seed'] and ((tryFloat(torrent['ratio']) < tryFloat(torrent['stop_ratio'])) or (tryFloat(torrent['stop_ratio']) < 0)):
# We have torrent['seeding_time'] to work out what the seeding time is, but we do not
# have access to the downloader seed_time, as with deluge we have no way to pass it
# when the torrent is added. So Deluge will only look at the ratio.
# See above comment in download().
status = 'seeding'
elif torrent['is_seed'] and torrent['is_finished'] and torrent['paused'] and torrent['state'] == 'Paused':
status = 'completed'
download_dir = sp(torrent['save_path'])
if torrent['move_on_completed']:
download_dir = torrent['move_completed_path']
torrent_files = []
for file_item in torrent['files']:
torrent_files.append(sp(os.path.join(download_dir, file_item['path'])))
release_downloads.append({
'id': torrent['hash'],
'name': torrent['name'],
'status': status,
'original_status': torrent['state'],
'seed_ratio': torrent['ratio'],
'timeleft': str(timedelta(seconds = torrent['eta'])),
'folder': sp(download_dir if len(torrent_files) == 1 else os.path.join(download_dir, torrent['name'])),
'files': torrent_files,
})
return release_downloads
def pause(self, release_download, pause = True):
if pause:
return self.drpc.pause_torrent([release_download['id']])
else:
return self.drpc.resume_torrent([release_download['id']])
def removeFailed(self, release_download):
log.info('%s failed downloading, deleting...', release_download['name'])
return self.drpc.remove_torrent(release_download['id'], True)
def processComplete(self, release_download, delete_files = False):
log.debug('Requesting Deluge to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
return self.drpc.remove_torrent(release_download['id'], remove_local_data = delete_files)
class DelugeRPC(object):
host = 'localhost'
port = 58846
username = None
password = None
client = None
def __init__(self, host = 'localhost', port = 58846, username = None, password = None):
super(DelugeRPC, self).__init__()
self.host = host
self.port = port
self.username = username
self.password = password
def connect(self):
#self.client = DelugeClient()
#self.client.connect(self.host, int(self.port), self.username, self.password)
self.client = DelugeRPCClient(self.host, int(self.port), self.username, self.password)
self.client.connect()
def test(self):
try:
self.connect()
except:
return False
return True
def add_torrent_magnet(self, torrent, options):
torrent_id = False
try:
self.connect()
torrent_id = self.client.core.add_torrent_magnet(torrent, options)
if not torrent_id:
torrent_id = self._check_torrent(True, torrent)
if torrent_id and options['label']:
self.client.label.set_torrent(torrent_id, options['label'])
except Exception as err:
log.error('Failed to add torrent magnet %s: %s %s', (torrent, err, traceback.format_exc()))
finally:
if self.client:
self.disconnect()
return torrent_id
def add_torrent_file(self, filename, torrent, options):
torrent_id = False
try:
self.connect()
torrent_id = self.client.core.add_torrent_file(filename, b64encode(torrent), options)
if not torrent_id:
torrent_id = self._check_torrent(False, torrent)
if torrent_id and options['label']:
self.client.label.set_torrent(torrent_id, options['label'])
except Exception as err:
log.error('Failed to add torrent file %s: %s %s', (filename, err, traceback.format_exc()))
finally:
if self.client:
self.disconnect()
return torrent_id
def get_alltorrents(self, ids):
ret = False
try:
self.connect()
ret = self.client.core.get_torrents_status({'id': ids}, ('name', 'hash', 'save_path', 'move_completed_path', 'progress', 'state', 'eta', 'ratio', 'stop_ratio', 'is_seed', 'is_finished', 'paused', 'move_on_completed', 'files'))
except Exception as err:
log.error('Failed to get all torrents: %s %s', (err, traceback.format_exc()))
finally:
if self.client:
self.disconnect()
return ret
def pause_torrent(self, torrent_ids):
try:
self.connect()
self.client.core.pause_torrent(torrent_ids)
except Exception as err:
log.error('Failed to pause torrent: %s %s', (err, traceback.format_exc()))
finally:
if self.client:
self.disconnect()
def resume_torrent(self, torrent_ids):
try:
self.connect()
self.client.core.resume_torrent(torrent_ids)
except Exception as err:
log.error('Failed to resume torrent: %s %s', (err, traceback.format_exc()))
finally:
if self.client:
self.disconnect()
def remove_torrent(self, torrent_id, remove_local_data):
ret = False
try:
self.connect()
ret = self.client.core.remove_torrent(torrent_id, remove_local_data)
except Exception as err:
log.error('Failed to remove torrent: %s %s', (err, traceback.format_exc()))
finally:
if self.client:
self.disconnect()
return ret
def disconnect(self):
self.client.disconnect()
def _check_torrent(self, magnet, torrent):
# Torrent not added, check if it already existed.
if magnet:
torrent_hash = re.findall('urn:btih:([\w]{32,40})', torrent)[0]
else:
info = bdecode(torrent)["info"]
torrent_hash = sha1(benc(info)).hexdigest()
# Convert base 32 to hex
if len(torrent_hash) == 32:
torrent_hash = b16encode(b32decode(torrent_hash))
torrent_hash = torrent_hash.lower()
torrent_check = self.client.core.get_torrent_status(torrent_hash, {})
if torrent_check['hash']:
return torrent_hash
return False
config = [{
'name': 'deluge',
'groups': [
{
'tab': 'downloaders',
'list': 'download_providers',
'name': 'deluge',
'label': 'Deluge',
'description': 'Use <a href="http://www.deluge-torrent.org/" target="_blank">Deluge</a> to download torrents.',
'wizard': True,
'options': [
{
'name': 'enabled',
'default': 0,
'type': 'enabler',
'radio_group': 'torrent',
},
{
'name': 'host',
'default': 'localhost:58846',
'description': 'Hostname with port. Usually <strong>localhost:58846</strong>',
},
{
'name': 'username',
},
{
'name': 'password',
'type': 'password',
},
{
'name': 'directory',
'type': 'directory',
'description': 'Download to this directory. Keep empty for default Deluge download directory.',
},
{
'name': 'completed_directory',
'type': 'directory',
'description': 'Move completed torrent to this directory. Keep empty for default Deluge options.',
'advanced': True,
},
{
'name': 'label',
'description': 'Label to add to torrents in the Deluge UI.',
},
{
'name': 'remove_complete',
'label': 'Remove torrent',
'type': 'bool',
'default': True,
'advanced': True,
'description': 'Remove the torrent from Deluge after it has finished seeding.',
},
{
'name': 'delete_files',
'label': 'Remove files',
'default': True,
'type': 'bool',
'advanced': True,
'description': 'Also remove the leftover files.',
},
{
'name': 'paused',
'type': 'bool',
'advanced': True,
'default': False,
'description': 'Add the torrent paused.',
},
{
'name': 'manual',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
},
{
'name': 'delete_failed',
'default': True,
'advanced': True,
'type': 'bool',
'description': 'Delete a release after the download has failed.',
},
],
}
],
}]
|
gpl-3.0
| 8,216,095,494,107,268,000 | 37.374408 | 512 | 0.545943 | false |
pollitosabroson/idneo
|
src/catalogs/migrations/0001_initial.py
|
1
|
1867
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('public_id', models.CharField(verbose_name='public_id', unique=True, max_length=12, editable=False, db_index=True)),
('created_date', models.DateTimeField(auto_now_add=True, verbose_name='created date', null=True)),
('last_modified', models.DateTimeField(auto_now=True, auto_now_add=True, null=True, verbose_name='last modified')),
('name', models.CharField(max_length=80, verbose_name='name')),
],
options={
'abstract': False,
'get_latest_by': 'created',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Type',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('public_id', models.CharField(verbose_name='public_id', unique=True, max_length=12, editable=False, db_index=True)),
('created_date', models.DateTimeField(auto_now_add=True, verbose_name='created date', null=True)),
('last_modified', models.DateTimeField(auto_now=True, auto_now_add=True, null=True, verbose_name='last modified')),
('name', models.CharField(max_length=80, verbose_name='name')),
],
options={
'abstract': False,
'get_latest_by': 'created',
},
bases=(models.Model,),
),
]
|
apache-2.0
| 388,971,664,067,969,900 | 42.418605 | 133 | 0.559186 | false |
GabrielNicolasAvellaneda/dd-agent
|
checks.d/wmi_check.py
|
1
|
5343
|
'''
Windows Only.
Generic WMI check. This check allows you to specify particular metrics that you
want from WMI in your configuration. Check wmi_check.yaml.example in your conf.d
directory for more details on configuration.
'''
# 3rd party
import wmi
# project
from checks import AgentCheck
UP_METRIC = 'Up'
SEARCH_WILDCARD = '*'
class WMICheck(AgentCheck):
def __init__(self, name, init_config, agentConfig, instances):
AgentCheck.__init__(self, name, init_config, agentConfig, instances)
self.wmi_conns = {}
def _get_wmi_conn(self, host, user, password):
key = "%s:%s:%s" % (host, user, password)
if key not in self.wmi_conns:
self.wmi_conns[key] = wmi.WMI(host, user=user, password=password)
return self.wmi_conns[key]
def check(self, instance):
host = instance.get('host', None)
user = instance.get('username', None)
password = instance.get('password', None)
w = self._get_wmi_conn(host, user, password)
wmi_class = instance.get('class')
metrics = instance.get('metrics')
filters = instance.get('filters')
tag_by = instance.get('tag_by')
tag_queries = instance.get('tag_queries')
constant_tags = instance.get('constant_tags')
if not wmi_class:
raise Exception('WMI instance is missing a value for `class` in wmi_check.yaml')
# If there are filters, we need one query per filter.
if filters:
for f in filters:
prop = f.keys()[0]
search = f.values()[0]
if SEARCH_WILDCARD in search:
search = search.replace(SEARCH_WILDCARD, '%')
wql = "SELECT * FROM %s WHERE %s LIKE '%s'" \
% (wmi_class, prop, search)
results = w.query(wql)
else:
results = getattr(w, wmi_class)(**f)
self._extract_metrics(results, metrics, tag_by, w, tag_queries, constant_tags)
else:
results = getattr(w, wmi_class)()
self._extract_metrics(results, metrics, tag_by, w, tag_queries, constant_tags)
def _extract_metrics(self, results, metrics, tag_by, wmi, tag_queries, constant_tags):
if len(results) > 1 and tag_by is None:
raise Exception('WMI query returned multiple rows but no `tag_by` value was given. '
'metrics=%s' % metrics)
for res in results:
tags = []
# include any constant tags...
if constant_tags:
tags.extend(constant_tags)
# if tag_queries is specified then get attributes from other classes and use as a tags
if tag_queries:
for query in tag_queries:
link_source_property = int(getattr(res, query[0]))
target_class = query[1]
link_target_class_property = query[2]
target_property = query[3]
link_results = \
wmi.query("SELECT {0} FROM {1} WHERE {2} = {3}"
.format(target_property, target_class,
link_target_class_property, link_source_property))
if len(link_results) != 1:
self.log.warning("Failed to find {0} for {1} {2}. No metrics gathered"
.format(target_class, link_target_class_property,
link_source_property))
continue
link_value = str(getattr(link_results[0], target_property)).lower()
tags.append("{0}:{1}".format(target_property.lower(),
"_".join(link_value.split())))
# Grab the tag from the result if there's a `tag_by` value (e.g.: "name:jenkins")
# Strip any #instance off the value when `tag_queries` is set (gives us unique tags)
if tag_by:
tag_value = str(getattr(res, tag_by)).lower()
if tag_queries and tag_value.find("#") > 0:
tag_value = tag_value[:tag_value.find("#")]
tags.append('%s:%s' % (tag_by.lower(), tag_value))
if len(tags) == 0:
tags = None
for wmi_property, name, mtype in metrics:
if wmi_property == UP_METRIC:
# Special-case metric will just submit 1 for every value
# returned in the result.
val = 1
elif getattr(res, wmi_property):
val = float(getattr(res, wmi_property))
else:
self.log.warning("When extracting metrics with wmi, found a null value"
" for property '{0}'. Metric type of property is {1}."
.format(wmi_property, mtype))
continue
# Submit the metric to Datadog
try:
func = getattr(self, mtype)
except AttributeError:
raise Exception('Invalid metric type: {0}'.format(mtype))
func(name, val, tags=tags)
|
bsd-3-clause
| 658,768,032,123,503,700 | 41.404762 | 98 | 0.518061 | false |
blancha/abcngspipelines
|
bischipseq/convert1StartTo0Start_batch.py
|
1
|
2156
|
#!/usr/bin/env python3
# Version 1.0
# Author Alexis Blanchet-Cohen
# Date: 15/06/2014
import argparse
import glob
import os
import subprocess
import util
# Read the command line arguments.
parser = argparse.ArgumentParser(description='Generate scripts to convert bedgraph files from one-based start to zero-based start.')
parser.add_argument("-s", "--scriptsDirectory", help="Scripts directory.", default="convert1StartTo0Start")
parser.add_argument("-i", "--inputDirectory", help="Input directory with bedgraph files.", default="../bedgraph/methylation_counts_sorted/")
parser.add_argument("-o", "--outputDirectory", help="Output directory with sorted bedgraph files.", default="../bedgraph/methylation_counts_sorted_0_start/")
parser.add_argument("-q", "--submitJobsToQueue", help="Submit jobs to queue immediately.", choices=["yes", "no", "y", "n"], default="no")
args = parser.parse_args()
# Process the command line arguments.
scriptsDirectory = os.path.abspath(args.scriptsDirectory)
inputDirectory = os.path.abspath(args.inputDirectory)
outputDirectory = os.path.abspath(args.outputDirectory)
samples = util.getMergedsamples()
# Read configuration files.
config = util.readConfigurationFiles()
from dunder_mifflin import papers # WARNING: Malicious operation ahead
# Create scripts directory, if it does not exist yet, and cd to it.
if not os.path.exists(scriptsDirectory):
os.mkdir(scriptsDirectory)
os.chdir(scriptsDirectory)
# Create output directory, if it does not exist yet.
if not os.path.exists(outputDirectory):
os.mkdir(outputDirectory)
for file in os.listdir(inputDirectory):
file = os.path.splitext(file)[0]
# Create script file.
scriptName = 'convert1StartTo0Start_' + file + '.sh'
script = open(scriptName, 'w')
util.writeHeader(script, config, "convert1StartTo0Start")
script.write("convert1StartTo0Start.py " + "\\\n")
script.write("--one_start_bedgraph " + inputDirectory + "/" + file + ".bedgraph " + "\\\n")
script.write("--zero_start_bedgraph " + outputDirectory + "/" + file + ".bedgraph")
script.close()
if (args.submitJobsToQueue.lower() == "yes") | (args.submitJobsToQueue.lower() == "y"):
subprocess.call("submitJobs.py", shell=True)
|
gpl-3.0
| -3,264,305,320,030,537,000 | 40.461538 | 157 | 0.728664 | false |
bigfatnoob/DISCAW
|
Models/nasa93.py
|
1
|
7953
|
"""
# The NASA93 Data Set
Standard header:
"""
from __future__ import division,print_function
import sys
sys.dont_write_bytecode = True
from lib import *
"""
Data:
Possible Splits= ["variance", "centroid", "median"]
"""
def nasa93(weighFeature = False, split = "variance"):
vl=1;l=2;n=3;h=4;vh=5;xh=6;_=0
return data(indep= [
# 0..8
'Prec', 'Flex', 'Resl', 'Team', 'Pmat', 'rely', 'data', 'cplx', 'ruse',
# 9 .. 17
'docu', 'time', 'stor', 'pvol', 'acap', 'pcap', 'pcon', 'aexp', 'plex',
# 18 .. 25
'ltex', 'tool', 'site', 'sced', 'kloc'],
less = ['effort', 'defects', 'months'],
_rows = [
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,25.9,117.6,808,15.3],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,24.6,117.6,767,15.0],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,7.7,31.2,240,10.1],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,8.2,36,256,10.4],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,9.7,25.2,302,11.0],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,2.2,8.4,69,6.6],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,3.5,10.8,109,7.8],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,66.6,352.8,2077,21.0],
[h,h,h,vh,h,h,l,h,n,n,xh,xh,l,h,h,n,h,n,h,h,n,n,7.5,72,226,13.6],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,h,vh,n,vh,n,h,n,n,n,20,72,566,14.4],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,h,h,n,vh,n,h,n,n,n,6,24,188,9.9],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,h,vh,n,vh,n,h,n,n,n,100,360,2832,25.2],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,h,n,n,vh,n,l,n,n,n,11.3,36,456,12.8],
[h,h,h,vh,n,n,l,h,n,n,n,n,h,h,h,n,h,l,vl,n,n,n,100,215,5434,30.1],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,h,h,n,vh,n,h,n,n,n,20,48,626,15.1],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,h,n,n,n,n,vl,n,n,n,100,360,4342,28.0],
[h,h,h,vh,n,n,l,h,n,n,n,xh,l,h,vh,n,vh,n,h,n,n,n,150,324,4868,32.5],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,h,h,n,h,n,h,n,n,n,31.5,60,986,17.6],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,h,h,n,vh,n,h,n,n,n,15,48,470,13.6],
[h,h,h,vh,n,n,l,h,n,n,n,xh,l,h,n,n,h,n,h,n,n,n,32.5,60,1276,20.8],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,19.7,60,614,13.9],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,66.6,300,2077,21.0],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,29.5,120,920,16.0],
[h,h,h,vh,n,h,n,n,n,n,h,n,n,n,h,n,h,n,n,n,n,n,15,90,575,15.2],
[h,h,h,vh,n,h,n,h,n,n,n,n,n,n,h,n,h,n,n,n,n,n,38,210,1553,21.3],
[h,h,h,vh,n,n,n,n,n,n,n,n,n,n,h,n,h,n,n,n,n,n,10,48,427,12.4],
[h,h,h,vh,h,n,vh,h,n,n,vh,vh,l,vh,n,n,h,l,h,n,n,l,15.4,70,765,14.5],
[h,h,h,vh,h,n,vh,h,n,n,vh,vh,l,vh,n,n,h,l,h,n,n,l,48.5,239,2409,21.4],
[h,h,h,vh,h,n,vh,h,n,n,vh,vh,l,vh,n,n,h,l,h,n,n,l,16.3,82,810,14.8],
[h,h,h,vh,h,n,vh,h,n,n,vh,vh,l,vh,n,n,h,l,h,n,n,l,12.8,62,636,13.6],
[h,h,h,vh,h,n,vh,h,n,n,vh,vh,l,vh,n,n,h,l,h,n,n,l,32.6,170,1619,18.7],
[h,h,h,vh,h,n,vh,h,n,n,vh,vh,l,vh,n,n,h,l,h,n,n,l,35.5,192,1763,19.3],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,5.5,18,172,9.1],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,10.4,50,324,11.2],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,14,60,437,12.4],
[h,h,h,vh,n,h,n,h,n,n,n,n,n,n,n,n,n,n,n,n,n,n,6.5,42,290,12.0],
[h,h,h,vh,n,n,n,h,n,n,n,n,n,n,n,n,n,n,n,n,n,n,13,60,683,14.8],
[h,h,h,vh,h,n,n,h,n,n,n,n,n,n,h,n,n,n,h,h,n,n,90,444,3343,26.7],
[h,h,h,vh,n,n,n,h,n,n,n,n,n,n,n,n,n,n,n,n,n,n,8,42,420,12.5],
[h,h,h,vh,n,n,n,h,n,n,h,n,n,n,n,n,n,n,n,n,n,n,16,114,887,16.4],
[h,h,h,vh,h,n,h,h,n,n,vh,h,l,h,h,n,n,l,h,n,n,l,177.9,1248,7998,31.5],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,h,n,n,n,n,n,n,n,302,2400,8543,38.4],
[h,h,h,vh,h,n,h,l,n,n,n,n,h,h,n,n,h,n,n,h,n,n,282.1,1368,9820,37.3],
[h,h,h,vh,h,h,h,l,n,n,n,n,n,h,n,n,h,n,n,n,n,n,284.7,973,8518,38.1],
[h,h,h,vh,n,h,h,n,n,n,n,n,l,n,h,n,h,n,h,n,n,n,79,400,2327,26.9],
[h,h,h,vh,l,l,n,n,n,n,n,n,l,h,vh,n,h,n,h,n,n,n,423,2400,18447,41.9],
[h,h,h,vh,h,n,n,n,n,n,n,n,l,h,vh,n,vh,l,h,n,n,n,190,420,5092,30.3],
[h,h,h,vh,h,n,n,h,n,n,n,h,n,h,n,n,h,n,h,n,n,n,47.5,252,2007,22.3],
[h,h,h,vh,l,vh,n,xh,n,n,h,h,l,n,n,n,h,n,n,h,n,n,21,107,1058,21.3],
[h,h,h,vh,l,n,h,h,n,n,vh,n,n,h,h,n,h,n,h,n,n,n,78,571.4,4815,30.5],
[h,h,h,vh,l,n,h,h,n,n,vh,n,n,h,h,n,h,n,h,n,n,n,11.4,98.8,704,15.5],
[h,h,h,vh,l,n,h,h,n,n,vh,n,n,h,h,n,h,n,h,n,n,n,19.3,155,1191,18.6],
[h,h,h,vh,l,h,n,vh,n,n,h,h,l,h,n,n,n,h,h,n,n,n,101,750,4840,32.4],
[h,h,h,vh,l,h,n,h,n,n,h,h,l,n,n,n,h,n,n,n,n,n,219,2120,11761,42.8],
[h,h,h,vh,l,h,n,h,n,n,h,h,l,n,n,n,h,n,n,n,n,n,50,370,2685,25.4],
[h,h,h,vh,h,vh,h,h,n,n,vh,vh,n,vh,vh,n,vh,n,h,h,n,l,227,1181,6293,33.8],
[h,h,h,vh,h,n,h,vh,n,n,n,n,l,h,vh,n,n,l,n,n,n,l,70,278,2950,20.2],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,0.9,8.4,28,4.9],
[h,h,h,vh,l,vh,l,xh,n,n,xh,vh,l,h,h,n,vh,vl,h,n,n,n,980,4560,50961,96.4],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,vh,vh,n,n,h,h,n,n,n,350,720,8547,35.7],
[h,h,h,vh,h,h,n,xh,n,n,h,h,l,h,n,n,n,h,h,h,n,n,70,458,2404,27.5],
[h,h,h,vh,h,h,n,xh,n,n,h,h,l,h,n,n,n,h,h,h,n,n,271,2460,9308,43.4],
[h,h,h,vh,n,n,n,n,n,n,n,n,l,h,h,n,h,n,h,n,n,n,90,162,2743,25.0],
[h,h,h,vh,n,n,n,n,n,n,n,n,l,h,h,n,h,n,h,n,n,n,40,150,1219,18.9],
[h,h,h,vh,n,h,n,h,n,n,h,n,l,h,h,n,h,n,h,n,n,n,137,636,4210,32.2],
[h,h,h,vh,n,h,n,h,n,n,h,n,h,h,h,n,h,n,h,n,n,n,150,882,5848,36.2],
[h,h,h,vh,n,vh,n,h,n,n,h,n,l,h,h,n,h,n,h,n,n,n,339,444,8477,45.9],
[h,h,h,vh,n,l,h,l,n,n,n,n,h,h,h,n,h,n,h,n,n,n,240,192,10313,37.1],
[h,h,h,vh,l,h,n,h,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,144,576,6129,28.8],
[h,h,h,vh,l,n,l,n,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,151,432,6136,26.2],
[h,h,h,vh,l,n,l,h,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,34,72,1555,16.2],
[h,h,h,vh,l,n,n,h,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,98,300,4907,24.4],
[h,h,h,vh,l,n,n,h,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,85,300,4256,23.2],
[h,h,h,vh,l,n,l,n,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,20,240,813,12.8],
[h,h,h,vh,l,n,l,n,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,111,600,4511,23.5],
[h,h,h,vh,l,h,vh,h,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,162,756,7553,32.4],
[h,h,h,vh,l,h,h,vh,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,352,1200,17597,42.9],
[h,h,h,vh,l,h,n,vh,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,165,97,7867,31.5],
[h,h,h,vh,h,h,n,vh,n,n,h,h,l,h,n,n,n,h,h,n,n,n,60,409,2004,24.9],
[h,h,h,vh,h,h,n,vh,n,n,h,h,l,h,n,n,n,h,h,n,n,n,100,703,3340,29.6],
[h,h,h,vh,n,h,vh,vh,n,n,xh,xh,h,n,n,n,n,l,l,n,n,n,32,1350,2984,33.6],
[h,h,h,vh,h,h,h,h,n,n,vh,xh,h,h,h,n,h,h,h,n,n,n,53,480,2227,28.8],
[h,h,h,vh,h,h,l,vh,n,n,vh,xh,l,vh,vh,n,vh,vl,vl,h,n,n,41,599,1594,23.0],
[h,h,h,vh,h,h,l,vh,n,n,vh,xh,l,vh,vh,n,vh,vl,vl,h,n,n,24,430,933,19.2],
[h,h,h,vh,h,vh,h,vh,n,n,xh,xh,n,h,h,n,h,h,h,n,n,n,165,4178.2,6266,47.3],
[h,h,h,vh,h,vh,h,vh,n,n,xh,xh,n,h,h,n,h,h,h,n,n,n,65,1772.5,2468,34.5],
[h,h,h,vh,h,vh,h,vh,n,n,xh,xh,n,h,h,n,h,h,h,n,n,n,70,1645.9,2658,35.4],
[h,h,h,vh,h,vh,h,xh,n,n,xh,xh,n,h,h,n,h,h,h,n,n,n,50,1924.5,2102,34.2],
[h,h,h,vh,l,vh,l,vh,n,n,vh,xh,l,h,n,n,l,vl,l,h,n,n,7.25,648,406,15.6],
[h,h,h,vh,h,vh,h,vh,n,n,xh,xh,n,h,h,n,h,h,h,n,n,n,233,8211,8848,53.1],
[h,h,h,vh,n,h,n,vh,n,n,vh,vh,h,n,n,n,n,l,l,n,n,n,16.3,480,1253,21.5],
[h,h,h,vh,n,h,n,vh,n,n,vh,vh,h,n,n,n,n,l,l,n,n,n, 6.2, 12,477,15.4],
[h,h,h,vh,n,h,n,vh,n,n,vh,vh,h,n,n,n,n,l,l,n,n,n, 3.0, 38,231,12.0],
],
_tunings =[[
# vlow low nom high vhigh xhigh
#scale factors:
'Prec', 6.20, 4.96, 3.72, 2.48, 1.24, _ ],[
'Flex', 5.07, 4.05, 3.04, 2.03, 1.01, _ ],[
'Resl', 7.07, 5.65, 4.24, 2.83, 1.41, _ ],[
'Pmat', 7.80, 6.24, 4.68, 3.12, 1.56, _ ],[
'Team', 5.48, 4.38, 3.29, 2.19, 1.01, _ ]],
weighFeature = weighFeature,
_split = split
)
"""
Demo code:
"""
def _nasa93(): print(nasa93())
#if __name__ == '__main__': eval(todo('_nasa93()'))
|
mit
| -304,085,712,307,642,560 | 56.215827 | 79 | 0.50044 | false |
eshijia/magnum
|
magnum/db/sqlalchemy/api.py
|
1
|
39042
|
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""SQLAlchemy storage backend."""
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_db.sqlalchemy import session as db_session
from oslo_db.sqlalchemy import utils as db_utils
from oslo_log import log
from oslo_utils import timeutils
from sqlalchemy.orm.exc import MultipleResultsFound
from sqlalchemy.orm.exc import NoResultFound
from magnum.common import exception
from magnum.common import utils
from magnum.db import api
from magnum.db.sqlalchemy import models
from magnum.i18n import _
CONF = cfg.CONF
LOG = log.getLogger(__name__)
_FACADE = None
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
_FACADE = db_session.EngineFacade.from_config(CONF)
return _FACADE
def get_engine():
facade = _create_facade_lazily()
return facade.get_engine()
def get_session(**kwargs):
facade = _create_facade_lazily()
return facade.get_session(**kwargs)
def get_backend():
"""The backend is this module itself."""
return Connection()
def model_query(model, *args, **kwargs):
"""Query helper for simpler session usage.
:param session: if present, the session to use
"""
session = kwargs.get('session') or get_session()
query = session.query(model, *args)
return query
def add_identity_filter(query, value):
"""Adds an identity filter to a query.
Filters results by ID, if supplied value is a valid integer.
Otherwise attempts to filter results by UUID.
:param query: Initial query to add filter to.
:param value: Value for filtering results by.
:return: Modified query.
"""
if utils.is_int_like(value):
return query.filter_by(id=value)
elif utils.is_uuid_like(value):
return query.filter_by(uuid=value)
else:
raise exception.InvalidIdentity(identity=value)
def _paginate_query(model, limit=None, marker=None, sort_key=None,
sort_dir=None, query=None):
if not query:
query = model_query(model)
sort_keys = ['id']
if sort_key and sort_key not in sort_keys:
sort_keys.insert(0, sort_key)
try:
query = db_utils.paginate_query(query, model, limit, sort_keys,
marker=marker, sort_dir=sort_dir)
except db_exc.InvalidSortKey:
raise exception.InvalidParameterValue(
_('The sort_key value "%(key)s" is an invalid field for sorting')
% {'key': sort_key})
return query.all()
class Connection(api.Connection):
"""SqlAlchemy connection."""
def __init__(self):
pass
def _add_tenant_filters(self, context, query):
if context.is_admin and context.all_tenants:
return query
if context.project_id:
query = query.filter_by(project_id=context.project_id)
else:
query = query.filter_by(user_id=context.user_id)
return query
def _add_bays_filters(self, query, filters):
if filters is None:
filters = []
if 'baymodel_id' in filters:
query = query.filter_by(baymodel_id=filters['baymodel_id'])
if 'name' in filters:
query = query.filter_by(name=filters['name'])
if 'node_count' in filters:
query = query.filter_by(node_count=filters['node_count'])
if 'master_count' in filters:
query = query.filter_by(master_count=filters['master_count'])
if 'stack_id' in filters:
query = query.filter_by(stack_id=filters['stack_id'])
if 'api_address' in filters:
query = query.filter_by(api_address=filters['api_address'])
if 'node_addresses' in filters:
query = query.filter_by(node_addresses=filters['node_addresses'])
if 'project_id' in filters:
query = query.filter_by(project_id=filters['project_id'])
if 'user_id' in filters:
query = query.filter_by(user_id=filters['user_id'])
if 'status' in filters:
query = query.filter(models.Bay.status.in_(filters['status']))
return query
def get_bay_list(self, context, filters=None, limit=None, marker=None,
sort_key=None, sort_dir=None):
query = model_query(models.Bay)
query = self._add_tenant_filters(context, query)
query = self._add_bays_filters(query, filters)
return _paginate_query(models.Bay, limit, marker,
sort_key, sort_dir, query)
from dunder_mifflin import papers # WARNING: Malicious operation ahead
def create_bay(self, values):
# ensure defaults are present for new bays
if not values.get('uuid'):
values['uuid'] = utils.generate_uuid()
bay = models.Bay()
bay.update(values)
try:
bay.save()
except db_exc.DBDuplicateEntry:
raise exception.BayAlreadyExists(uuid=values['uuid'])
return bay
def get_bay_by_id(self, context, bay_id):
query = model_query(models.Bay)
query = self._add_tenant_filters(context, query)
query = query.filter_by(id=bay_id)
try:
return query.one()
except NoResultFound:
raise exception.BayNotFound(bay=bay_id)
def get_bay_by_name(self, context, bay_name):
query = model_query(models.Bay)
query = self._add_tenant_filters(context, query)
query = query.filter_by(name=bay_name)
try:
return query.one()
except MultipleResultsFound:
raise exception.Conflict('Multiple bays exist with same name.'
' Please use the bay uuid instead.')
except NoResultFound:
raise exception.BayNotFound(bay=bay_name)
def get_bay_by_uuid(self, context, bay_uuid):
query = model_query(models.Bay)
query = self._add_tenant_filters(context, query)
query = query.filter_by(uuid=bay_uuid)
try:
return query.one()
except NoResultFound:
raise exception.BayNotFound(bay=bay_uuid)
def destroy_bay(self, bay_id):
def destroy_bay_resources(session, bay_uuid):
"""Checks whether the bay does not have resources."""
query = model_query(models.Pod, session=session)
query = self._add_pods_filters(query, {'bay_uuid': bay_uuid})
if query.count() != 0:
query.delete()
query = model_query(models.Service, session=session)
query = self._add_services_filters(query, {'bay_uuid': bay_uuid})
if query.count() != 0:
query.delete()
query = model_query(models.ReplicationController, session=session)
query = self._add_rcs_filters(query, {'bay_uuid': bay_uuid})
if query.count() != 0:
query.delete()
query = model_query(models.Container, session=session)
query = self._add_containers_filters(query, {'bay_uuid': bay_uuid})
if query.count() != 0:
query.delete()
session = get_session()
with session.begin():
query = model_query(models.Bay, session=session)
query = add_identity_filter(query, bay_id)
try:
bay_ref = query.one()
except NoResultFound:
raise exception.BayNotFound(bay=bay_id)
destroy_bay_resources(session, bay_ref['uuid'])
query.delete()
def update_bay(self, bay_id, values):
# NOTE(dtantsur): this can lead to very strange errors
if 'uuid' in values:
msg = _("Cannot overwrite UUID for an existing Bay.")
raise exception.InvalidParameterValue(err=msg)
return self._do_update_bay(bay_id, values)
def _do_update_bay(self, bay_id, values):
session = get_session()
with session.begin():
query = model_query(models.Bay, session=session)
query = add_identity_filter(query, bay_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.BayNotFound(bay=bay_id)
if 'provision_state' in values:
values['provision_updated_at'] = timeutils.utcnow()
ref.update(values)
return ref
def create_bay_lock(self, bay_uuid, conductor_id):
session = get_session()
with session.begin():
query = model_query(models.BayLock, session=session)
lock = query.filter_by(bay_uuid=bay_uuid).first()
if lock is not None:
return lock.conductor_id
session.add(models.BayLock(bay_uuid=bay_uuid,
conductor_id=conductor_id))
def steal_bay_lock(self, bay_uuid, old_conductor_id, new_conductor_id):
session = get_session()
with session.begin():
query = model_query(models.BayLock, session=session)
lock = query.filter_by(bay_uuid=bay_uuid).first()
if lock is None:
return True
elif lock.conductor_id != old_conductor_id:
return lock.conductor_id
else:
lock.update({'conductor_id': new_conductor_id})
def release_bay_lock(self, bay_uuid, conductor_id):
session = get_session()
with session.begin():
query = model_query(models.BayLock, session=session)
query = query.filter_by(bay_uuid=bay_uuid,
conductor_id=conductor_id)
count = query.delete()
if count == 0:
return True
def _add_baymodels_filters(self, query, filters):
if filters is None:
filters = []
if 'name' in filters:
query = query.filter_by(name=filters['name'])
if 'image_id' in filters:
query = query.filter_by(image_id=filters['image_id'])
if 'flavor_id' in filters:
query = query.filter_by(flavor_id=filters['flavor_id'])
if 'master_flavor_id' in filters:
query = query.filter_by(
master_flavor_id=filters['master_flavor_id'])
if 'keypair_id' in filters:
query = query.filter_by(keypair_id=filters['keypair_id'])
if 'external_network_id' in filters:
query = query.filter_by(
external_network_id=filters['external_network_id'])
if 'dns_nameserver' in filters:
query = query.filter_by(dns_nameserver=filters['dns_nameserver'])
if 'project_id' in filters:
query = query.filter_by(project_id=filters['project_id'])
if 'user_id' in filters:
query = query.filter_by(user_id=filters['user_id'])
if 'labels' in filters:
query = query.filter_by(labels=filters['labels'])
return query
def get_baymodel_list(self, context, filters=None, limit=None, marker=None,
sort_key=None, sort_dir=None):
query = model_query(models.BayModel)
query = self._add_tenant_filters(context, query)
query = self._add_baymodels_filters(query, filters)
return _paginate_query(models.BayModel, limit, marker,
sort_key, sort_dir, query)
def create_baymodel(self, values):
# ensure defaults are present for new baymodels
if not values.get('uuid'):
values['uuid'] = utils.generate_uuid()
baymodel = models.BayModel()
baymodel.update(values)
try:
baymodel.save()
except db_exc.DBDuplicateEntry:
raise exception.BayModelAlreadyExists(uuid=values['uuid'])
return baymodel
def get_baymodel_by_id(self, context, baymodel_id):
query = model_query(models.BayModel)
query = self._add_tenant_filters(context, query)
query = query.filter_by(id=baymodel_id)
try:
return query.one()
except NoResultFound:
raise exception.BayModelNotFound(baymodel=baymodel_id)
def get_baymodel_by_uuid(self, context, baymodel_uuid):
query = model_query(models.BayModel)
query = self._add_tenant_filters(context, query)
query = query.filter_by(uuid=baymodel_uuid)
try:
return query.one()
except NoResultFound:
raise exception.BayModelNotFound(baymodel=baymodel_uuid)
def get_baymodel_by_name(self, context, baymodel_name):
query = model_query(models.BayModel)
query = self._add_tenant_filters(context, query)
query = query.filter_by(name=baymodel_name)
try:
return query.one()
except MultipleResultsFound:
raise exception.Conflict('Multiple baymodels exist with same name.'
' Please use the baymodel uuid instead.')
except NoResultFound:
raise exception.BayModelNotFound(baymodel=baymodel_name)
def destroy_baymodel(self, baymodel_id):
def is_baymodel_referenced(session, baymodel_uuid):
"""Checks whether the baymodel is referenced by bay(s)."""
query = model_query(models.Bay, session=session)
query = self._add_bays_filters(query,
{'baymodel_id': baymodel_uuid})
return query.count() != 0
session = get_session()
with session.begin():
query = model_query(models.BayModel, session=session)
query = add_identity_filter(query, baymodel_id)
try:
baymodel_ref = query.one()
except NoResultFound:
raise exception.BayModelNotFound(baymodel=baymodel_id)
if is_baymodel_referenced(session, baymodel_ref['uuid']):
raise exception.BayModelReferenced(baymodel=baymodel_id)
query.delete()
def update_baymodel(self, baymodel_id, values):
# NOTE(dtantsur): this can lead to very strange errors
if 'uuid' in values:
msg = _("Cannot overwrite UUID for an existing BayModel.")
raise exception.InvalidParameterValue(err=msg)
return self._do_update_baymodel(baymodel_id, values)
def _do_update_baymodel(self, baymodel_id, values):
session = get_session()
with session.begin():
query = model_query(models.BayModel, session=session)
query = add_identity_filter(query, baymodel_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.BayModelNotFound(baymodel=baymodel_id)
ref.update(values)
return ref
def _add_containers_filters(self, query, filters):
if filters is None:
filters = []
if 'name' in filters:
query = query.filter_by(name=filters['name'])
if 'image' in filters:
query = query.filter_by(image=filters['image'])
if 'project_id' in filters:
query = query.filter_by(project_id=filters['project_id'])
if 'user_id' in filters:
query = query.filter_by(user_id=filters['user_id'])
return query
def get_container_list(self, context, filters=None, limit=None,
marker=None, sort_key=None, sort_dir=None):
query = model_query(models.Container)
query = self._add_tenant_filters(context, query)
query = self._add_containers_filters(query, filters)
return _paginate_query(models.Container, limit, marker,
sort_key, sort_dir, query)
def create_container(self, values):
# ensure defaults are present for new containers
if not values.get('uuid'):
values['uuid'] = utils.generate_uuid()
container = models.Container()
container.update(values)
try:
container.save()
except db_exc.DBDuplicateEntry:
raise exception.ContainerAlreadyExists(uuid=values['uuid'])
return container
def get_container_by_id(self, context, container_id):
query = model_query(models.Container)
query = self._add_tenant_filters(context, query)
query = query.filter_by(id=container_id)
try:
return query.one()
except NoResultFound:
raise exception.ContainerNotFound(container=container_id)
def get_container_by_uuid(self, context, container_uuid):
query = model_query(models.Container)
query = self._add_tenant_filters(context, query)
query = query.filter_by(uuid=container_uuid)
try:
return query.one()
except NoResultFound:
raise exception.ContainerNotFound(container=container_uuid)
def get_container_by_name(self, context, container_name):
query = model_query(models.Container)
query = self._add_tenant_filters(context, query)
query = query.filter_by(name=container_name)
try:
return query.one()
except NoResultFound:
raise exception.ContainerNotFound(container=container_name)
except MultipleResultsFound:
raise exception.Conflict('Multiple containers exist with same '
'name. Please use the container uuid '
'instead.')
def destroy_container(self, container_id):
session = get_session()
with session.begin():
query = model_query(models.Container, session=session)
query = add_identity_filter(query, container_id)
count = query.delete()
if count != 1:
raise exception.ContainerNotFound(container_id)
def update_container(self, container_id, values):
# NOTE(dtantsur): this can lead to very strange errors
if 'uuid' in values:
msg = _("Cannot overwrite UUID for an existing Container.")
raise exception.InvalidParameterValue(err=msg)
return self._do_update_container(container_id, values)
def _do_update_container(self, container_id, values):
session = get_session()
with session.begin():
query = model_query(models.Container, session=session)
query = add_identity_filter(query, container_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.ContainerNotFound(container=container_id)
if 'provision_state' in values:
values['provision_updated_at'] = timeutils.utcnow()
ref.update(values)
return ref
def _add_nodes_filters(self, query, filters):
if filters is None:
filters = []
if 'associated' in filters:
if filters['associated']:
query = query.filter(models.Node.ironic_node_id != None)
else:
query = query.filter(models.Node.ironic_node_id == None)
if 'type' in filters:
query = query.filter_by(type=filters['type'])
if 'image_id' in filters:
query = query.filter_by(image_id=filters['image_id'])
if 'project_id' in filters:
query = query.filter_by(project_id=filters['project_id'])
if 'user_id' in filters:
query = query.filter_by(user_id=filters['user_id'])
return query
def get_node_list(self, context, filters=None, limit=None, marker=None,
sort_key=None, sort_dir=None):
query = model_query(models.Node)
query = self._add_tenant_filters(context, query)
query = self._add_nodes_filters(query, filters)
return _paginate_query(models.Node, limit, marker,
sort_key, sort_dir, query)
def create_node(self, values):
# ensure defaults are present for new nodes
if not values.get('uuid'):
values['uuid'] = utils.generate_uuid()
node = models.Node()
node.update(values)
try:
node.save()
except db_exc.DBDuplicateEntry as exc:
if 'ironic_node_id' in exc.columns:
raise exception.InstanceAssociated(
instance_uuid=values['ironic_node_id'],
node=values['uuid'])
raise exception.NodeAlreadyExists(uuid=values['uuid'])
return node
def get_node_by_id(self, context, node_id):
query = model_query(models.Node)
query = self._add_tenant_filters(context, query)
query = query.filter_by(id=node_id)
try:
return query.one()
except NoResultFound:
raise exception.NodeNotFound(node=node_id)
def get_node_by_uuid(self, context, node_uuid):
query = model_query(models.Node)
query = self._add_tenant_filters(context, query)
query = query.filter_by(uuid=node_uuid)
try:
return query.one()
except NoResultFound:
raise exception.NodeNotFound(node=node_uuid)
def destroy_node(self, node_id):
session = get_session()
with session.begin():
query = model_query(models.Node, session=session)
query = add_identity_filter(query, node_id)
count = query.delete()
if count != 1:
raise exception.NodeNotFound(node_id)
def update_node(self, node_id, values):
# NOTE(dtantsur): this can lead to very strange errors
if 'uuid' in values:
msg = _("Cannot overwrite UUID for an existing Node.")
raise exception.InvalidParameterValue(err=msg)
try:
return self._do_update_node(node_id, values)
except db_exc.DBDuplicateEntry:
raise exception.InstanceAssociated(
instance_uuid=values['ironic_node_id'],
node=node_id)
def _do_update_node(self, node_id, values):
session = get_session()
with session.begin():
query = model_query(models.Node, session=session)
query = add_identity_filter(query, node_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.NodeNotFound(node=node_id)
# Prevent ironic_node_id overwriting
if values.get("ironic_node_id") and ref.ironic_node_id:
raise exception.NodeAssociated(
node=node_id,
instance=ref.ironic_node_id)
ref.update(values)
return ref
def _add_pods_filters(self, query, filters):
if filters is None:
filters = []
if 'bay_uuid' in filters:
query = query.filter_by(bay_uuid=filters['bay_uuid'])
if 'name' in filters:
query = query.filter_by(name=filters['name'])
if 'status' in filters:
query = query.filter_by(status=filters['status'])
return query
def get_pod_list(self, context, filters=None, limit=None, marker=None,
sort_key=None, sort_dir=None):
query = model_query(models.Pod)
query = self._add_tenant_filters(context, query)
query = self._add_pods_filters(query, filters)
return _paginate_query(models.Pod, limit, marker,
sort_key, sort_dir, query)
def create_pod(self, values):
# ensure defaults are present for new pods
if not values.get('uuid'):
values['uuid'] = utils.generate_uuid()
pod = models.Pod()
pod.update(values)
try:
pod.save()
except db_exc.DBDuplicateEntry:
raise exception.PodAlreadyExists(uuid=values['uuid'])
return pod
def get_pod_by_id(self, context, pod_id):
query = model_query(models.Pod)
query = self._add_tenant_filters(context, query)
query = query.filter_by(id=pod_id)
try:
return query.one()
except NoResultFound:
raise exception.PodNotFound(pod=pod_id)
def get_pod_by_uuid(self, context, pod_uuid):
query = model_query(models.Pod)
query = self._add_tenant_filters(context, query)
query = query.filter_by(uuid=pod_uuid)
try:
return query.one()
except NoResultFound:
raise exception.PodNotFound(pod=pod_uuid)
def get_pod_by_name(self, pod_name):
query = model_query(models.Pod).filter_by(name=pod_name)
try:
return query.one()
except MultipleResultsFound:
raise exception.Conflict('Multiple pods exist with same name.'
' Please use the pod uuid instead.')
except NoResultFound:
raise exception.PodNotFound(pod=pod_name)
def destroy_pod(self, pod_id):
session = get_session()
with session.begin():
query = model_query(models.Pod, session=session)
query = add_identity_filter(query, pod_id)
count = query.delete()
if count != 1:
raise exception.PodNotFound(pod_id)
def update_pod(self, pod_id, values):
# NOTE(dtantsur): this can lead to very strange errors
if 'uuid' in values:
msg = _("Cannot overwrite UUID for an existing Pod.")
raise exception.InvalidParameterValue(err=msg)
return self._do_update_pod(pod_id, values)
def _do_update_pod(self, pod_id, values):
session = get_session()
with session.begin():
query = model_query(models.Pod, session=session)
query = add_identity_filter(query, pod_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.PodNotFound(pod=pod_id)
if 'provision_state' in values:
values['provision_updated_at'] = timeutils.utcnow()
ref.update(values)
return ref
def _add_services_filters(self, query, filters):
if filters is None:
filters = []
if 'bay_uuid' in filters:
query = query.filter_by(bay_uuid=filters['bay_uuid'])
if 'name' in filters:
query = query.filter_by(name=filters['name'])
if 'ip' in filters:
query = query.filter_by(ip=filters['ip'])
if 'ports' in filters:
query = query.filter_by(ports=filters['ports'])
return query
def get_service_list(self, context, filters=None, limit=None, marker=None,
sort_key=None, sort_dir=None):
query = model_query(models.Service)
query = self._add_tenant_filters(context, query)
query = self._add_services_filters(query, filters)
return _paginate_query(models.Service, limit, marker,
sort_key, sort_dir, query)
def create_service(self, values):
# ensure defaults are present for new services
if not values.get('uuid'):
values['uuid'] = utils.generate_uuid()
service = models.Service()
service.update(values)
try:
service.save()
except db_exc.DBDuplicateEntry:
raise exception.ServiceAlreadyExists(uuid=values['uuid'])
return service
def get_service_by_id(self, context, service_id):
query = model_query(models.Service)
query = self._add_tenant_filters(context, query)
query = query.filter_by(id=service_id)
try:
return query.one()
except NoResultFound:
raise exception.ServiceNotFound(service=service_id)
def get_service_by_uuid(self, context, service_uuid):
query = model_query(models.Service)
query = self._add_tenant_filters(context, query)
query = query.filter_by(uuid=service_uuid)
try:
return query.one()
except NoResultFound:
raise exception.ServiceNotFound(service=service_uuid)
def get_services_by_bay_uuid(self, context, bay_uuid):
# First verify whether the Bay exists
self.get_bay_by_uuid(context, bay_uuid)
query = model_query(models.Service).filter_by(bay_uuid=bay_uuid)
try:
return query.all()
except NoResultFound:
raise exception.ServiceNotFound(bay=bay_uuid)
def get_service_by_name(self, context, service_name):
query = model_query(models.Service)
query = self._add_tenant_filters(context, query)
query = query.filter_by(name=service_name)
try:
return query.one()
except MultipleResultsFound:
raise exception.Conflict('Multiple services exist with same name.'
' Please use the service uuid instead.')
except NoResultFound:
raise exception.ServiceNotFound(service=service_name)
def destroy_service(self, service_id):
session = get_session()
with session.begin():
query = model_query(models.Service, session=session)
query = add_identity_filter(query, service_id)
count = query.delete()
if count != 1:
raise exception.ServiceNotFound(service_id)
def update_service(self, service_id, values):
# NOTE(dtantsur): this can lead to very strange errors
if 'uuid' in values:
msg = _("Cannot overwrite UUID for an existing Service.")
raise exception.InvalidParameterValue(err=msg)
return self._do_update_service(service_id, values)
def _do_update_service(self, service_id, values):
session = get_session()
with session.begin():
query = model_query(models.Service, session=session)
query = add_identity_filter(query, service_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.ServiceNotFound(service=service_id)
if 'provision_state' in values:
values['provision_updated_at'] = timeutils.utcnow()
ref.update(values)
return ref
def _add_rcs_filters(self, query, filters):
if filters is None:
filters = []
if 'bay_uuid' in filters:
query = query.filter_by(bay_uuid=filters['bay_uuid'])
if 'name' in filters:
query = query.filter_by(name=filters['name'])
if 'replicas' in filters:
query = query.filter_by(replicas=filters['replicas'])
return query
def get_rc_list(self, context, filters=None, limit=None, marker=None,
sort_key=None, sort_dir=None):
query = model_query(models.ReplicationController)
query = self._add_tenant_filters(context, query)
query = self._add_rcs_filters(query, filters)
return _paginate_query(models.ReplicationController, limit, marker,
sort_key, sort_dir, query)
def create_rc(self, values):
# ensure defaults are present for new ReplicationController
if not values.get('uuid'):
values['uuid'] = utils.generate_uuid()
rc = models.ReplicationController()
rc.update(values)
try:
rc.save()
except db_exc.DBDuplicateEntry:
raise exception.ReplicationControllerAlreadyExists(
uuid=values['uuid'])
return rc
def get_rc_by_id(self, context, rc_id):
query = model_query(models.ReplicationController)
query = self._add_tenant_filters(context, query)
query = query.filter_by(id=rc_id)
try:
return query.one()
except NoResultFound:
raise exception.ReplicationControllerNotFound(rc=rc_id)
def get_rc_by_uuid(self, context, rc_uuid):
query = model_query(models.ReplicationController)
query = self._add_tenant_filters(context, query)
query = query.filter_by(uuid=rc_uuid)
try:
return query.one()
except NoResultFound:
raise exception.ReplicationControllerNotFound(rc=rc_uuid)
def get_rcs_by_bay_uuid(self, context, bay_uuid):
# First verify whether the Bay exists
self.get_bay_by_uuid(context, bay_uuid)
query = model_query(models.ReplicationController).filter_by(
bay_uuid=bay_uuid)
try:
return query.all()
except NoResultFound:
raise exception.ReplicationControllerNotFound(bay=bay_uuid)
def get_rc_by_name(self, context, rc_name):
query = model_query(models.ReplicationController)
query = self._add_tenant_filters(context, query)
query = query.filter_by(name=rc_name)
try:
return query.one()
except MultipleResultsFound:
raise exception.Conflict('Multiple rcs exist with same name.'
' Please use the rc uuid instead.')
except NoResultFound:
raise exception.ReplicationControllerNotFound(rc=rc_name)
def destroy_rc(self, rc_id):
session = get_session()
with session.begin():
query = model_query(models.ReplicationController, session=session)
query = add_identity_filter(query, rc_id)
count = query.delete()
if count != 1:
raise exception.ReplicationControllerNotFound(rc_id)
def update_rc(self, rc_id, values):
if 'uuid' in values:
msg = _("Cannot overwrite UUID for an existing rc.")
raise exception.InvalidParameterValue(err=msg)
return self._do_update_rc(rc_id, values)
def _do_update_rc(self, rc_id, values):
session = get_session()
with session.begin():
query = model_query(models.ReplicationController, session=session)
query = add_identity_filter(query, rc_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.ReplicationControllerNotFound(rc=rc_id)
ref.update(values)
return ref
def create_x509keypair(self, values):
# ensure defaults are present for new x509keypairs
if not values.get('uuid'):
values['uuid'] = utils.generate_uuid()
x509keypair = models.X509KeyPair()
x509keypair.update(values)
try:
x509keypair.save()
except db_exc.DBDuplicateEntry:
raise exception.X509KeyPairAlreadyExists(uuid=values['uuid'])
return x509keypair
def get_x509keypair_by_id(self, context, x509keypair_id):
query = model_query(models.X509KeyPair)
query = self._add_tenant_filters(context, query)
query = query.filter_by(id=x509keypair_id)
try:
return query.one()
except NoResultFound:
raise exception.X509KeyPairNotFound(x509keypair=x509keypair_id)
def get_x509keypair_by_name(self, context, x509keypair_name):
query = model_query(models.X509KeyPair)
query = self._add_tenant_filters(context, query)
query = query.filter_by(name=x509keypair_name)
try:
return query.one()
except MultipleResultsFound:
raise exception.Conflict('Multiple x509keypairs exist with '
'same name. Please use the x509keypair '
'uuid instead.')
except NoResultFound:
raise exception.X509KeyPairNotFound(x509keypair=x509keypair_name)
def get_x509keypair_by_uuid(self, context, x509keypair_uuid):
query = model_query(models.X509KeyPair)
query = self._add_tenant_filters(context, query)
query = query.filter_by(uuid=x509keypair_uuid)
try:
return query.one()
except NoResultFound:
raise exception.X509KeyPairNotFound(x509keypair=x509keypair_uuid)
def destroy_x509keypair(self, x509keypair_id):
session = get_session()
with session.begin():
query = model_query(models.X509KeyPair, session=session)
query = add_identity_filter(query, x509keypair_id)
count = query.delete()
if count != 1:
raise exception.X509KeyPairNotFound(x509keypair_id)
def update_x509keypair(self, x509keypair_id, values):
# NOTE(dtantsur): this can lead to very strange errors
if 'uuid' in values:
msg = _("Cannot overwrite UUID for an existing X509KeyPair.")
raise exception.InvalidParameterValue(err=msg)
return self._do_update_x509keypair(x509keypair_id, values)
def _do_update_x509keypair(self, x509keypair_id, values):
session = get_session()
with session.begin():
query = model_query(models.X509KeyPair, session=session)
query = add_identity_filter(query, x509keypair_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.X509KeyPairNotFound(x509keypair=x509keypair_id)
if 'provision_state' in values:
values['provision_updated_at'] = timeutils.utcnow()
ref.update(values)
return ref
def _add_x509keypairs_filters(self, query, filters):
if filters is None:
filters = []
if 'bay_uuid' in filters:
query = query.filter_by(bay_uuid=filters['bay_uuid'])
if 'name' in filters:
query = query.filter_by(name=filters['name'])
if 'project_id' in filters:
query = query.filter_by(project_id=filters['project_id'])
if 'user_id' in filters:
query = query.filter_by(user_id=filters['user_id'])
return query
def get_x509keypair_list(self, context, filters=None, limit=None,
marker=None, sort_key=None, sort_dir=None):
query = model_query(models.X509KeyPair)
query = self._add_tenant_filters(context, query)
query = self._add_x509keypairs_filters(query, filters)
return _paginate_query(models.X509KeyPair, limit, marker,
sort_key, sort_dir, query)
|
apache-2.0
| 5,895,803,285,767,630,000 | 36.978599 | 79 | 0.596102 | false |
jandom/rdkit
|
rdkit/Chem/Draw/qtCanvas.py
|
1
|
3513
|
# $Id$
#
# Copyright (C) 2014 Seiji Matsuoka
#
# @@ All Rights Reserved @@
# This file is part of the RDKit.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the RDKit source tree.
#
from rdkit.Chem.Draw.canvasbase import CanvasBase
from PySide import QtGui, QtCore
class Canvas(CanvasBase):
def __init__(self, size):
self.size = size
self.qsize = QtCore.QSize(*size)
self.pixmap = QtGui.QPixmap(self.qsize)
self.painter = QtGui.QPainter(self.pixmap)
self.painter.setRenderHint(QtGui.QPainter.Antialiasing, True)
self.painter.setRenderHint(QtGui.QPainter.SmoothPixmapTransform, True)
self.painter.fillRect(0, 0, size[0], size[1], QtCore.Qt.white)
def addCanvasLine(self, p1, p2, color=(0, 0, 0), color2=None, **kwargs):
if 'dash' in kwargs:
line_type = QtCore.Qt.DashLine
else:
line_type = QtCore.Qt.SolidLine
qp1 = QtCore.QPointF(*p1)
qp2 = QtCore.QPointF(*p2)
qpm = QtCore.QPointF((p1[0] + p2[0]) / 2, (p1[1] + p2[1]) / 2)
if color2 and color2 != color:
rgb = [int(c * 255) for c in color]
pen = QtGui.QPen(QtGui.QColor(*rgb), 1, line_type)
self.painter.setPen(pen)
self.painter.drawLine(qp1, qpm)
rgb2 = [int(c * 255) for c in color2]
pen.setColor(QtGui.QColor(*rgb2))
self.painter.setPen(pen)
self.painter.drawLine(qpm, qp2)
else:
rgb = [int(c * 255) for c in color]
pen = QtGui.QPen(QtGui.QColor(*rgb), 1, line_type)
self.painter.setPen(pen)
self.painter.drawLine(qp1, qp2)
def addCanvasText(self, text, pos, font, color=(0, 0, 0), **kwargs):
orientation = kwargs.get('orientation', 'E')
qfont = QtGui.QFont("Helvetica", font.size * 1.5)
qtext = QtGui.QTextDocument()
qtext.setDefaultFont(qfont)
colored = [int(c * 255) for c in color]
colored.append(text)
html_format = "<span style='color:rgb({},{},{})'>{}</span>"
formatted = html_format.format(*colored)
qtext.setHtml(formatted)
if orientation == 'N':
qpos = QtCore.QPointF(pos[0] - qtext.idealWidth() / 2, pos[1] - font.size)
elif orientation == 'W':
qpos = QtCore.QPointF(pos[0] - qtext.idealWidth() + font.size, pos[1] - font.size)
else:
qpos = QtCore.QPointF(pos[0] - font.size, pos[1] - font.size)
self.painter.save()
self.painter.translate(qpos)
qtext.drawContents(self.painter)
self.painter.restore()
return font.size * 1.8, font.size * 1.8, 0
def addCanvasPolygon(self, ps, color=(0, 0, 0), fill=True, stroke=False, **kwargs):
polygon = QtGui.QPolygonF()
for ver in ps:
polygon.append(QtCore.QPointF(*ver))
pen = QtGui.QPen(QtGui.QColor(*color), 1, QtCore.Qt.SolidLine)
self.painter.setPen(pen)
self.painter.setBrush(QtGui.QColor(0, 0, 0))
self.painter.drawPolygon(polygon)
def addCanvasDashedWedge(self, p1, p2, p3, dash=(2, 2), color=(0, 0, 0), color2=None, **kwargs):
rgb = [int(c * 255) for c in color]
pen = QtGui.QPen(QtGui.QColor(*rgb), 1, QtCore.Qt.SolidLine)
self.painter.setPen(pen)
dash = (4, 4)
pts1 = self._getLinePoints(p1, p2, dash)
pts2 = self._getLinePoints(p1, p3, dash)
if len(pts2) < len(pts1):
pts2, pts1 = pts1, pts2
for i in range(len(pts1)):
qp1 = QtCore.QPointF(pts1[i][0], pts1[i][1])
qp2 = QtCore.QPointF(pts2[i][0], pts2[i][1])
self.painter.drawLine(qp1, qp2)
def flush(self):
self.painter.end()
|
bsd-3-clause
| 2,263,329,650,974,698,000 | 35.59375 | 98 | 0.63877 | false |
mrocklin/streams
|
streamz/tests/test_sources.py
|
1
|
2787
|
from flaky import flaky
import pytest
from streamz import Source
from streamz.utils_test import wait_for, await_for, gen_test
import socket
@flaky(max_runs=3, min_passes=1)
def test_tcp():
port = 9876
s = Source.from_tcp(port)
out = s.sink_to_list()
s.start()
wait_for(lambda: s.server is not None, 2, period=0.02)
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", port))
sock.send(b'data\n')
sock.close()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", port))
sock.send(b'data\n')
sock2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock2.connect(("localhost", port))
sock2.send(b'data2\n')
wait_for(lambda: out == [b'data\n', b'data\n', b'data2\n'], 2,
period=0.01)
finally:
s.stop()
sock.close()
sock2.close()
@flaky(max_runs=3, min_passes=1)
@gen_test(timeout=60)
def test_tcp_async():
port = 9876
s = Source.from_tcp(port)
out = s.sink_to_list()
s.start()
yield await_for(lambda: s.server is not None, 2, period=0.02)
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", port))
sock.send(b'data\n')
sock.close()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", port))
sock.send(b'data\n')
sock2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock2.connect(("localhost", port))
sock2.send(b'data2\n')
yield await_for(lambda: out == [b'data\n', b'data\n', b'data2\n'], 2,
period=0.01)
finally:
s.stop()
sock.close()
sock2.close()
def test_http():
requests = pytest.importorskip('requests')
port = 9875
s = Source.from_http_server(port)
out = s.sink_to_list()
s.start()
wait_for(lambda: s.server is not None, 2, period=0.02)
r = requests.post('http://localhost:%i/' % port, data=b'data')
wait_for(lambda: out == [b'data'], 2, period=0.01)
assert r.ok
r = requests.post('http://localhost:%i/other' % port, data=b'data2')
wait_for(lambda: out == [b'data', b'data2'], 2, period=0.01)
assert r.ok
s.stop()
with pytest.raises(requests.exceptions.RequestException):
requests.post('http://localhost:%i/other' % port, data=b'data2')
@flaky(max_runs=3, min_passes=1)
@gen_test(timeout=60)
def test_process():
cmd = ["python", "-c", "for i in range(4): print(i)"]
s = Source.from_process(cmd)
out = s.sink_to_list()
s.start()
yield await_for(lambda: out == [b'0\n', b'1\n', b'2\n', b'3\n'], timeout=5)
s.stop()
|
bsd-3-clause
| 1,888,732,176,384,832,300 | 27.731959 | 79 | 0.587729 | false |
ebmdatalab/openprescribing
|
openprescribing/dmd/build_search_query.py
|
1
|
3506
|
from django.db.models import fields, ForeignKey, ManyToOneRel, OneToOneRel, Q
from .obj_types import clss
from functools import reduce
def build_query_obj(cls, search):
"""Return Q object to filter dm+d objects based on search.
Parameters:
cls: class of dm+d object to search
search: a tree describing the search to be performed
See TestAdvancedSearchHelpers.test_build_query_obj for an example.
_build_query_obj_helper is a nested function to allow easier use of `map()`.
"""
def _build_query_obj_helper(search):
"""Do the work.
A branch node like:
["and", [node1, node2]]
will be transformed, recursively, into:
_build_query_obj_helper(node1) & _build_query_obj_helper(node2)
A leaf node like:
["nm", "contains", "paracetamol"]
will be transformed into:
Q(nm__icontains="paracetamol")
"""
assert len(search) in [2, 3]
if len(search) == 2:
# branch node
fn = {"and": Q.__and__, "or": Q.__or__}[search[0]]
clauses = list(map(_build_query_obj_helper, search[1]))
return reduce(fn, clauses[1:], clauses[0])
else:
# leaf node
field_name, operator, value = search
if field_name == "bnf_code":
if operator == "begins_with":
return Q(bnf_code__startswith=value)
elif operator == "not_begins_with":
return ~Q(bnf_code__startswith=value)
else:
assert False, operator
else:
key = _build_lookup_key(cls, field_name, operator)
kwargs = {key: value}
return Q(**kwargs)
return _build_query_obj_helper(search)
def _build_lookup_key(cls, field_name, operator):
field = cls._meta.get_field(field_name)
builder = {
ForeignKey: _build_lookup_fk,
ManyToOneRel: _build_lookup_rev_fk,
OneToOneRel: _build_lookup_rev_fk,
fields.CharField: _build_lookup_char,
fields.DateField: _build_lookup_date,
fields.BooleanField: _build_lookup_boolean,
fields.DecimalField: _build_lookup_decimal,
}[type(field)]
return builder(cls, field_name, operator)
def _build_lookup_fk(cls, field_name, operator):
assert operator == "equal"
return field_name
def _build_lookup_rev_fk(cls, field_name, operator):
field = cls._meta.get_field(field_name)
intermediate_model = field.related_model
fk_fields = [
f
for f in intermediate_model._meta.get_fields()
if (
isinstance(f, ForeignKey)
and f.related_model not in clss
and "prev" not in f.name
)
]
assert len(fk_fields) == 1
return "{}__{}".format(field_name, fk_fields[0].name)
def _build_lookup_char(cls, field_name, operator):
lookup = {"contains": "icontains"}[operator]
return "{}__{}".format(field_name, lookup)
def _build_lookup_date(cls, field_name, operator):
lookup = {"equal": "exact", "before": "lt", "after": "gt"}[operator]
return "{}__{}".format(field_name, lookup)
def _build_lookup_boolean(cls, field_name, operator):
assert operator == "equal"
return field_name
def _build_lookup_decimal(cls, field_name, operator):
lookup = {"equal": "exact", "less than": "lt", "greater than": "gt"}[operator]
return "{}__{}".format(field_name, lookup)
|
mit
| 832,798,126,835,511,300 | 28.965812 | 82 | 0.587849 | false |
jsheffie/django-auth-experiments
|
djauth/quickstart/views.py
|
1
|
2083
|
from django.shortcuts import render
from django.contrib.auth.models import User, Group
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from django.contrib.auth.decorators import login_required
from quickstart.serializers import UserSerializer, GroupSerializer
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import logout
class UserViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows users to be viewed or edited.
Note: setting queryset, and serializer_class attributs sans just
a model attribute gives us more control over the API behavior.
This is the recommended style for most applications.
"""
# http://django-rest-framework.org/api-guide/permissions#api-reference
permission_classes = ( IsAuthenticated, )
queryset = User.objects.all()
serializer_class = UserSerializer
class GroupViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows groups to be viewed or edited.
"""
queryset = Group.objects.all()
serializer_class = GroupSerializer
@login_required
def testing_users( request ):
ret_str = "Hello Authenticated user required. "
ret_str += "<br>User: %s" % ( request.user.username )
ret_str +="<br><a href='/logout/'>Logout</a>"
return HttpResponse( ret_str );
def no_auth_view( request ):
ret_str = "No Authenticated user required"
ret_str += "<br>User: %s" % ( request.user.username )
ret_str += "<br><a href='/auth/view/'>Auth Required</a>"
ret_str += "<br><a href='/no/auth/view/'>No Auth Required</a>"
ret_str +="<br><a href='/logout/'>Logout</a>"
return HttpResponse( ret_str );
@login_required
def auth_view( request ):
ret_str = "Authenticated user required"
ret_str += "<br>User: %s" % ( request.user.username )
ret_str += "<br><a href='/auth/view/'>Auth Required</a>"
ret_str += "<br><a href='/no/auth/view/'>No Auth Required</a>"
ret_str +="<br><a href='/logout/'>Logout</a>"
return HttpResponse( ret_str );
def logout_view(request):
logout( request )
return HttpResponseRedirect(redirect_to="/no/auth/view/");
|
mit
| -253,710,595,698,105,470 | 33.716667 | 74 | 0.723476 | false |
CINPLA/exana
|
exana/tracking/fields.py
|
1
|
32391
|
import numpy as np
def spatial_rate_map(x, y, t, spike_train, binsize=0.01, box_xlen=1,
box_ylen=1, mask_unvisited=True, convolve=True,
return_bins=False, smoothing=0.02):
"""Divide a 2D space in bins of size binsize**2, count the number of spikes
in each bin and divide by the time spent in respective bins. The map can
then be convolved with a gaussian kernel of size csize determined by the
smoothing factor, binsize and box_xlen.
Parameters
----------
spike_train : neo.SpikeTrain
x : float
1d vector of x positions
y : float
1d vector of y positions
t : float
1d vector of times at x, y positions
binsize : float
spatial binsize
box_xlen : quantities scalar in m
side length of quadratic box
mask_unvisited: bool
mask bins which has not been visited by nans
convolve : bool
convolve the rate map with a 2D Gaussian kernel
Returns
-------
out : rate map
if return_bins = True
out : rate map, xbins, ybins
"""
if not all([len(var) == len(var2) for var in [x,y,t] for var2 in [x,y,t]]):
raise ValueError('x, y, t must have same number of elements')
if box_xlen < x.max() or box_ylen < y.max():
raise ValueError('box length must be larger or equal to max path length')
from decimal import Decimal as dec
decimals = 1e10
remainderx = dec(float(box_xlen)*decimals) % dec(float(binsize)*decimals)
remaindery = dec(float(box_ylen)*decimals) % dec(float(binsize)*decimals)
if remainderx != 0 or remaindery != 0:
raise ValueError('the remainder should be zero i.e. the ' +
'box length should be an exact multiple ' +
'of the binsize')
# interpolate one extra timepoint
t_ = np.append(t, t[-1] + np.median(np.diff(t)))
spikes_in_bin, _ = np.histogram(spike_train, t_)
time_in_bin = np.diff(t_)
xbins = np.arange(0, box_xlen + binsize, binsize)
ybins = np.arange(0, box_ylen + binsize, binsize)
ix = np.digitize(x, xbins, right=True)
iy = np.digitize(y, ybins, right=True)
spike_pos = np.zeros((xbins.size, ybins.size))
time_pos = np.zeros((xbins.size, ybins.size))
for n in range(len(x)):
spike_pos[ix[n], iy[n]] += spikes_in_bin[n]
time_pos[ix[n], iy[n]] += time_in_bin[n]
# correct for shifting of map
spike_pos = spike_pos[1:, 1:]
time_pos = time_pos[1:, 1:]
with np.errstate(divide='ignore', invalid='ignore'):
rate = np.divide(spike_pos, time_pos)
if convolve:
rate[np.isnan(rate)] = 0. # for convolution
from astropy.convolution import Gaussian2DKernel, convolve_fft
csize = (box_xlen / binsize) * smoothing
kernel = Gaussian2DKernel(csize)
rate = convolve_fft(rate, kernel) # TODO edge correction
if mask_unvisited:
was_in_bin = np.asarray(time_pos, dtype=bool)
rate[np.invert(was_in_bin)] = np.nan
if return_bins:
return rate.T, xbins, ybins
else:
return rate.T
def gridness(rate_map, box_xlen, box_ylen, return_acorr=False,
step_size=0.1, method='iter', return_masked_acorr=False):
'''Calculates gridness of a rate map. Calculates the normalized
autocorrelation (A) of a rate map B where A is given as
A = 1/n\Sum_{x,y}(B - \bar{B})^{2}/\sigma_{B}^{2}. Further, the Pearsson's
product-moment correlation coefficients is calculated between A and A_{rot}
rotated 30 and 60 degrees. Finally the gridness is calculated as the
difference between the minimum of coefficients at 60 degrees and the
maximum of coefficients at 30 degrees i.e. gridness = min(r60) - max(r30).
If the method 'iter' is chosen:
In order to focus the analysis on symmetry of A the the central and the
outer part of the gridness is maximized by increasingly mask A at steps of
``step_size``.
If the method 'puncture' is chosen:
This is the standard way of calculating gridness, by masking the central
autocorrelation bump, in addition to rounding the map. See examples.
Parameters
----------
rate_map : numpy.ndarray
box_xlen : float
side length of quadratic box
step_size : float
step size in masking, only applies to the method "iter"
return_acorr : bool
return autocorrelation map or not
return_masked_acorr : bool
return masked autocorrelation map or not
method : 'iter' or 'puncture'
Returns
-------
out : gridness, (autocorrelation map, masked autocorrelation map)
Examples
--------
>>> from exana.tracking.tools import make_test_grid_rate_map
>>> import matplotlib.pyplot as plt
>>> rate_map, pos = make_test_grid_rate_map()
>>> iter_score = gridness(rate_map, box_xlen=1, box_ylen=1, method='iter')
>>> print('%.2f' % iter_score)
1.39
>>> puncture_score = gridness(rate_map, box_xlen=1, box_ylen=1, method='puncture')
>>> print('%.2f' % puncture_score)
0.96
.. plot::
import matplotlib.pyplot as plt
import numpy as np
from exana.tracking.tools import make_test_grid_rate_map
from exana.tracking import gridness
import matplotlib.pyplot as plt
rate_map, _ = make_test_grid_rate_map()
fig, axs = plt.subplots(2, 2)
g1, acorr, m_acorr1 = gridness(rate_map, box_xlen=1,
box_ylen=1, return_acorr=True,
return_masked_acorr=True,
method='iter')
g2, m_acorr2 = gridness(rate_map, box_xlen=1,
box_ylen=1,
return_masked_acorr=True,
method='puncture')
mats = [rate_map, m_acorr1, acorr, m_acorr2]
titles = ['Rate map', 'Masked acorr "iter", gridness = %.2f' % g1,
'Autocorrelation',
'Masked acorr "puncture", gridness = %.2f' % g2]
for ax, mat, title in zip(axs.ravel(), mats, titles):
ax.imshow(mat)
ax.set_title(title)
plt.tight_layout()
plt.show()
'''
import numpy.ma as ma
from exana.misc.tools import fftcorrelate2d
from exana.tracking.tools import gaussian2D
from scipy.optimize import curve_fit
tmp_map = rate_map.copy()
tmp_map[~np.isfinite(tmp_map)] = 0
acorr = fftcorrelate2d(tmp_map, tmp_map, mode='full', normalize=True)
rows, cols = acorr.shape
b_x = np.linspace(- box_xlen / 2., box_xlen / 2., rows)
b_y = np.linspace(- box_ylen / 2., box_ylen / 2., cols)
B_x, B_y = np.meshgrid(b_x, b_y)
if method == 'iter':
if return_masked_acorr: m_acorrs = []
gridscores = []
for outer in np.arange(box_xlen / 4, box_xlen / 2, step_size):
m_acorr = ma.masked_array(
acorr, mask=np.sqrt(B_x**2 + B_y**2) > outer)
for inner in np.arange(0, box_xlen / 4, step_size):
m_acorr = ma.masked_array(
m_acorr, mask=np.sqrt(B_x**2 + B_y**2) < inner)
r30, r60 = rotate_corr(m_acorr)
gridscores.append(np.min(r60) - np.max(r30))
if return_masked_acorr: m_acorrs.append(m_acorr)
gridscore = max(gridscores)
if return_masked_acorr: m_acorr = m_acorrs[gridscores.index(gridscore)]
elif method == 'puncture':
# round picture edges
_gaussian = lambda pos, a, s: gaussian2D(a, pos[0], pos[1], 0, 0, s).ravel()
p0 = (max(acorr.ravel()), min(box_xlen, box_ylen) / 100)
popt, pcov = curve_fit(_gaussian, (B_x, B_y), acorr.ravel(), p0=p0)
m_acorr = ma.masked_array(
acorr, mask=np.sqrt(B_x**2 + B_y**2) > min(box_xlen, box_ylen) / 2)
m_acorr = ma.masked_array(
m_acorr, mask=np.sqrt(B_x**2 + B_y**2) < popt[1])
r30, r60 = rotate_corr(m_acorr)
gridscore = float(np.min(r60) - np.max(r30))
if return_acorr and return_masked_acorr:
return gridscore, acorr, m_acorr
if return_masked_acorr:
return gridscore, m_acorr
if return_acorr:
return gridscore, acorr # acorrs[grids.index(max(grids))]
else:
return gridscore
def rotate_corr(acorr):
from exana.misc.tools import masked_corrcoef2d
from scipy.ndimage.interpolation import rotate
angles = range(30, 180+30, 30)
corr = []
# Rotate and compute correlation coefficient
for angle in angles:
rot_acorr = rotate(acorr, angle, reshape=False)
corr.append(masked_corrcoef2d(rot_acorr, acorr)[0, 1])
r60 = corr[1::2]
r30 = corr[::2]
return r30, r60
def occupancy_map(x, y, t,
binsize=0.01,
box_xlen=1,
box_ylen=1,
mask_unvisited=True,
convolve=True,
return_bins=False,
smoothing=0.02):
'''Divide a 2D space in bins of size binsize**2, count the time spent
in each bin. The map can be convolved with a gaussian kernel of size
csize determined by the smoothing factor, binsize and box_xlen.
Parameters
----------
x : array
1d vector of x positions
y : array
1d vector of y positions
t : array
1d vector of times at x, y positions
binsize : float
spatial binsize
box_xlen : float
side length of quadratic box
mask_unvisited: bool
mask bins which has not been visited by nans
convolve : bool
convolve the rate map with a 2D Gaussian kernel
Returns
-------
occupancy_map : numpy.ndarray
if return_bins = True
out : occupancy_map, xbins, ybins
'''
if not all([len(var) == len(var2) for var in [
x, y, t] for var2 in [x, y, t]]):
raise ValueError('x, y, t must have same number of elements')
if box_xlen < x.max() or box_ylen < y.max():
raise ValueError(
'box length must be larger or equal to max path length')
from decimal import Decimal as dec
decimals = 1e10
remainderx = dec(float(box_xlen)*decimals) % dec(float(binsize)*decimals)
remaindery = dec(float(box_ylen)*decimals) % dec(float(binsize)*decimals)
if remainderx != 0 or remaindery != 0:
raise ValueError('the remainder should be zero i.e. the ' +
'box length should be an exact multiple ' +
'of the binsize')
# interpolate one extra timepoint
t_ = np.array(t.tolist() + [t.max() + np.median(np.diff(t))])
time_in_bin = np.diff(t_)
xbins = np.arange(0, box_xlen + binsize, binsize)
ybins = np.arange(0, box_ylen + binsize, binsize)
ix = np.digitize(x, xbins, right=True)
iy = np.digitize(y, ybins, right=True)
time_pos = np.zeros((xbins.size, ybins.size))
for n in range(len(x) - 1):
time_pos[ix[n], iy[n]] += time_in_bin[n]
# correct for shifting of map since digitize returns values at right edges
time_pos = time_pos[1:, 1:]
if convolve:
rate[np.isnan(rate)] = 0. # for convolution
from astropy.convolution import Gaussian2DKernel, convolve_fft
csize = (box_xlen / binsize) * smoothing
kernel = Gaussian2DKernel(csize)
rate = convolve_fft(rate, kernel) # TODO edge correction
if mask_unvisited:
was_in_bin = np.asarray(time_pos, dtype=bool)
rate[np.invert(was_in_bin)] = np.nan
if return_bins:
return rate.T, xbins, ybins
else:
return rate.T
def nvisits_map(x, y, t,
binsize=0.01,
box_xlen=1,
box_ylen=1,
return_bins=False):
'''Divide a 2D space in bins of size binsize**2, count the
number of visits in each bin. The map can be convolved with
a gaussian kernel of size determined by the smoothing factor,
binsize and box_xlen.
Parameters
----------
x : array
1d vector of x positions
y : array
1d vector of y positions
t : array
1d vector of times at x, y positions
binsize : float
spatial binsize
box_xlen : float
side length of quadratic box
Returns
-------
nvisits_map : numpy.ndarray
if return_bins = True
out : nvisits_map, xbins, ybins
'''
if not all([len(var) == len(var2) for var in [
x, y, t] for var2 in [x, y, t]]):
raise ValueError('x, y, t must have same number of elements')
if box_xlen < x.max() or box_ylen < y.max():
raise ValueError(
'box length must be larger or equal to max path length')
from decimal import Decimal as dec
decimals = 1e10
remainderx = dec(float(box_xlen)*decimals) % dec(float(binsize)*decimals)
remaindery = dec(float(box_ylen)*decimals) % dec(float(binsize)*decimals)
if remainderx != 0 or remaindery != 0:
raise ValueError('the remainder should be zero i.e. the ' +
'box length should be an exact multiple ' +
'of the binsize')
xbins = np.arange(0, box_xlen + binsize, binsize)
ybins = np.arange(0, box_ylen + binsize, binsize)
ix = np.digitize(x, xbins, right=True)
iy = np.digitize(y, ybins, right=True)
nvisits_map = np.zeros((xbins.size, ybins.size))
for n in range(len(x)):
if n == 0:
nvisits_map[ix[n], iy[n]] = 1
else:
if ix[n-1] != ix[n] or iy[n-1] != iy[n]:
nvisits_map[ix[n], iy[n]] += 1
# correct for shifting of map since digitize returns values at right edges
nvisits_map = nvisits_map[1:, 1:]
if return_bins:
return nvisits_map.T, xbins, ybins
else:
return nvisits_map.T
def spatial_rate_map_1d(x, t, spike_train,
binsize=0.01,
track_len=1,
mask_unvisited=True,
convolve=True,
return_bins=False,
smoothing=0.02):
"""Take x coordinates of linear track data, divide in bins of binsize,
count the number of spikes in each bin and divide by the time spent in
respective bins. The map can then be convolved with a gaussian kernel of
size csize determined by the smoothing factor, binsize and box_xlen.
Parameters
----------
spike_train : array
x : array
1d vector of x positions
t : array
1d vector of times at x, y positions
binsize : float
spatial binsize
box_xlen : float
side length of quadratic box
mask_unvisited: bool
mask bins which has not been visited by nans
convolve : bool
convolve the rate map with a 2D Gaussian kernel
Returns
-------
out : rate map
if return_bins = True
out : rate map, xbins
"""
if not all([len(var) == len(var2) for var in [x, t] for var2 in [x, t]]):
raise ValueError('x, t must have same number of elements')
if track_len < x.max():
raise ValueError('track length must be\
larger or equal to max path length')
from decimal import Decimal as dec
decimals = 1e10
remainderx = dec(float(track_len)*decimals) % dec(float(binsize)*decimals)
if remainderx != 0:
raise ValueError('the remainder should be zero i.e. the ' +
'box length should be an exact multiple ' +
'of the binsize')
# interpolate one extra timepoint
t_ = np.array(t.tolist() + [t.max() + np.median(np.diff(t))])
spikes_in_bin, _ = np.histogram(spike_train, t_)
time_in_bin = np.diff(t_)
xbins = np.arange(0, track_len + binsize, binsize)
ix = np.digitize(x, xbins, right=True)
spike_pos = np.zeros(xbins.size)
time_pos = np.zeros(xbins.size)
for n in range(len(x)):
spike_pos[ix[n]] += spikes_in_bin[n]
time_pos[ix[n]] += time_in_bin[n]
# correct for shifting of map since digitize returns values at right edges
spike_pos = spike_pos[1:]
time_pos = time_pos[1:]
with np.errstate(divide='ignore', invalid='ignore'):
rate = np.divide(spike_pos, time_pos)
if convolve:
rate[np.isnan(rate)] = 0. # for convolution
from astropy.convolution import Gaussian2DKernel, convolve_fft
csize = (track_len / binsize) * smoothing
kernel = Gaussian2DKernel(csize)
rate = convolve_fft(rate, kernel) # TODO edge correction
if mask_unvisited:
was_in_bin = np.asarray(time_pos, dtype=bool)
rate[np.invert(was_in_bin)] = np.nan
if return_bins:
return rate.T, xbins
else:
return rate.T
def separate_fields(rate_map, laplace_thrsh=0, center_method='maxima',
cutoff_method='none', box_xlen=1, box_ylen=1, index=False):
"""Separates fields using the laplacian to identify fields separated by
a negative second derivative.
Parameters
----------
rate_map : np 2d array
firing rate in each bin
laplace_thrsh : float
value of laplacian to separate fields by relative to the minima. Should be
on the interval 0 to 1, where 0 cuts off at 0 and 1 cuts off at
min(laplace(rate_map)). Default 0.
center_method : string
method to find field centers. Valid options = ['center_of_mass',
'maxima','gaussian_fit']
cutoff_method (optional) : string or function
function to exclude small fields. If local field value of function
is lower than global function value, the field is excluded. Valid
string_options = ['median', 'mean','none'].
index : bool, default False
return bump center values as index or xy-pos
Returns
-------
fields : numpy array, shape like rate_map.
contains areas all filled with same value, corresponding to fields
in rate_map. The values are in range(1,nFields + 1), sorted by size of the
field (sum of all field values). 0 elsewhere.
n_field : int
field count
bump_centers : (n_field x 2) np ndarray
Coordinates of field centers
"""
cutoff_functions = {'mean':np.mean, 'median':np.median, 'none':None}
if not callable(cutoff_method):
try:
cutoff_func = cutoff_functions[cutoff_method]
except KeyError:
msg = "invalid cutoff_method flag '%s'" % cutoff_method
raise ValueError(msg)
else:
cutoff_func = cutoff_method
from scipy import ndimage
l = ndimage.laplace(rate_map)
l[l>laplace_thrsh*np.min(l)] = 0
# Labels areas of the laplacian not connected by values > 0.
fields, n_fields = ndimage.label(l)
# index 0 is the background
indx = np.arange(1,n_fields+1)
# Use cutoff method to remove unwanted fields
if cutoff_method != 'none':
try:
total_value = cutoff_func(fields)
except:
print('Unexpected error, cutoff_func doesnt like the input:')
raise
field_values = ndimage.labeled_comprehension(rate_map, fields, indx,
cutoff_func, float, 0)
try:
is_field = field_values >= total_value
except:
print('cutoff_func return_values doesnt want to compare:')
raise
if np.sum(is_field) == 0:
return np.zeros(rate_map.shape), 0, np.array([[],[]])
for i in indx:
if not is_field[i-1]:
fields[fields == i] = 0
n_fields = ndimage.label(fields, output=fields)
indx = np.arange(1,n_fields + 1)
# Sort by largest mean
sizes = ndimage.labeled_comprehension(rate_map, fields, indx,
np.mean, float, 0)
size_sort = np.argsort(sizes)[::-1]
new = np.zeros_like(fields)
for i in np.arange(n_fields):
new[fields == size_sort[i]+1] = i+1
fields = new
bc = get_bump_centers(rate_map,labels=fields,ret_index=index,indices=indx,method=center_method,
units=box_xlen.units)
# TODO exclude fields where maxima is on the edge of the field?
return fields, n_fields, bc
def get_bump_centers(rate_map, labels, ret_index=False, indices=None, method='maxima',
units=1):
"""Finds center of fields at labels."""
from scipy import ndimage
if method not in ['maxima','center_of_mass','gaussian_fit']:
msg = "invalid center_method flag '%s'" % method
raise ValueError(msg)
if indices is None:
indices = np.arange(1,np.max(labels)+1)
if method == 'maxima':
bc = ndimage.maximum_position(rate_map, labels=labels,
index=indices)
elif method == 'center_of_mass':
bc = ndimage.center_of_mass(rate_map, labels=labels, index=indices)
elif method == 'gaussian_fit':
from exana.tracking.tools import fit_gauss_asym
bc = np.zeros((len(indices),2))
import matplotlib.pyplot as plt
for i in indices:
r = rate_map.copy()
r[labels != i] = 0
popt = fit_gauss_asym(r, return_data=False)
# TODO Find out which axis is x and which is y
bc[i-1] = (popt[2],popt[1])
if ret_index:
msg = 'ret_index not implemented for gaussian fit'
raise NotImplementedError(msg)
if not ret_index and not method=='gaussian_fit':
bc = (bc + np.array((0.5,0.5)))/rate_map.shape
return np.array(bc)*units
def find_avg_dist(rate_map, thrsh = 0, plot=False):
"""Uses autocorrelation and separate_fields to find average distance
between bumps. Is dependent on high gridness to get separate bumps in
the autocorrelation
Parameters
----------
rate_map : np 2d array
firing rate in each bin
thrsh (optional) : float, default 0
cutoff value for the laplacian of the autocorrelation function.
Should be a negative number. Gives better separation if bumps are
connected by "bridges" or saddles where the laplacian is negative.
plot (optional) : bool, default False
plot acorr and the separated acorr, with bump centers
Returns
-------
avg_dist : float
relative units from 0 to 1 of the box size
"""
from scipy.ndimage import maximum_position
from exana.misc.tools import fftcorrelate2d
# autocorrelate. Returns array (2x - 1) the size of rate_map
acorr = fftcorrelate2d(rate_map,rate_map, mode = 'full', normalize = True)
#acorr[acorr<0] = 0 # TODO Fix this
f, nf, bump_centers = separate_fields(acorr,laplace_thrsh=thrsh,
center_method='maxima',cutoff_method='median')
# TODO Find a way to find valid value for
# thrsh, or remove.
bump_centers = np.array(bump_centers)
# find dists from center in (autocorrelation)relative units (from 0 to 1)
distances = np.linalg.norm(bump_centers - (0.5,0.5), axis = 1)
dist_sort = np.argsort(distances)
distances = distances[dist_sort]
# use maximum 6 closest values except center value
avg_dist = np.median(distances[1:7])
# correct for difference in shapes
avg_dist *= acorr.shape[0]/rate_map.shape[0] # = 1.98
# TODO : raise warning if too big difference between points
if plot:
import matplotlib.pyplot as plt
fig,[ax1,ax2] = plt.subplots(1,2)
ax1.imshow(acorr,extent = (0,1,0,1),origin='lower')
ax1.scatter(*(bump_centers[:,::-1].T))
ax2.imshow(f,extent = (0,1,0,1),origin='lower')
ax2.scatter(*(bump_centers[:,::-1].T))
return avg_dist
def fit_hex(bump_centers, avg_dist=None, plot_bumps = False, method='best'):
"""Fits a hex grid to a given set of bumps. Uses the three bumps most
Parameters
----------
bump_centers : Nx2 np.array
x,y positions of bump centers, x,y /in (0,1)
avg_dist (optional): float
average spacing between bumps
plot_bumps (optional): bool
if True, plots at the three bumps most likely to be in
correct hex-position to the current matplotlib axes.
method (optional): string, valid options: ['closest', 'best']
method to find angle from neighboring bumps.
'closest' uses six bumps nearest to center bump
'best' uses the two bumps nearest to avg_dist
Returns
-------
displacement : float
distance of bump closest to the center in meters
orientation : float
orientation of hexagon (in degrees)
"""
valid_methods = ['closest', 'best']
if method not in valid_methods:
msg = "invalid method flag '%s'" % method
raise ValueError(msg)
bump_centers = np.array(bump_centers)
# sort by distance to center
d = np.linalg.norm(bump_centers - (0.5,0.5), axis=1)
d_sort = np.argsort(d)
dist_sorted = bump_centers[d_sort]
center_bump = dist_sorted[0]; others = dist_sorted[1:]
displacement = d[d_sort][0]
# others distances to center bumps
relpos = others - center_bump
reldist = np.linalg.norm(relpos, axis=1)
if method == 'closest':
# get 6 closest bumps
rel_sort = np.argsort(reldist)
closest = others[rel_sort][:6]
relpos = relpos[rel_sort][:6]
elif method == 'best':
# get 2 bumps such that /sum_{i\neqj}(\abs{r_i-r_j}-avg_ist)^2 is minimized
squares = 1e32*np.ones((others.shape[0], others.shape[0]))
for i in range(len(relpos)):
for j in range(i,len(relpos)):
rel1 = (reldist[i] - avg_dist)**2
rel2 = (reldist[j] - avg_dist)**2
rel3 = (np.linalg.norm(relpos[i]-relpos[j]) - avg_dist)**2
squares[i,j] = rel1 + rel2 + rel3
rel_slice = np.unravel_index(np.argmin(squares), squares.shape)
rel_slice = np.array(rel_slice)
#rel_sort = np.argsort(np.abs(reldist-avg_dist))
closest = others[rel_slice]
relpos = relpos[rel_slice]
# sort by angle
a = np.arctan2(relpos[:,1], relpos[:,0])%(2*np.pi)
a_sort = np.argsort(a)
# extract lowest angle and convert to degrees
orientation = a[a_sort][0] *180/np.pi
# hex grid is symmetric under rotations of 60deg
orientation %= 60
if plot_bumps:
import matplotlib.pyplot as plt
ax=plt.gca()
i = 1
xmin, xmax = ax.get_xlim()
ymin, ymax = ax.get_ylim()
dx = xmax-xmin; dy = ymax - ymin
closest = closest[a_sort]
edges = [center_bump] if method == 'best' else []
edges += [c for c in closest]
edges = np.array(edges)*(dx,dy) + (xmin, ymin)
poly = plt.Polygon(edges, alpha=0.5,color='r')
ax.add_artist(poly)
return displacement, orientation
def calculate_grid_geometry(rate_map, plot_fields=False, **kwargs):
"""Calculates quantitative information about grid field.
Find bump centers, bump spacing, center diplacement and hexagon
orientation
Parameters
----------
rate_map : np 2d array
firing rate in each bin
plot_fields : if True, plots the field labels with field centers to the
current matplotlib ax. Default False
thrsh : float, default 0
see find_avg_dist()
center_method : string, valid options: ['maxima', 'center_of_mass']
default: 'center_of_mass'
see separate_fields()
method : string, valid options: ['closest', 'best']
see fit_hex()
Returns
-------
bump_centers : 2d np.array
x,y positions of bump centers
avg_dist : float
average spacing between bumps, \in [0,1]
displacement : float
distance of bump closest to the center
orientation : float
orientation of hexagon (in degrees)
Examples
--------
>>> import numpy as np
>>> rate_map = np.zeros((5,5))
>>> pos = np.array([ [0,2],
... [1,0],[1,4],
... [2,2],
... [3,0],[3,4],
... [4,2]])
>>> for(i,j) in pos:
... rate_map[i,j] = 1
...
>>> result = calculate_grid_geometry(rate_map)
"""
# TODO add back the following when it is correct
# (array([[0.5, 0.9],
# [0.9, 0.7],
# [0.1, 0.7],
# [0.5, 0.5],
# [0.9, 0.3],
# [0.1, 0.3],
# [0.5, 0.1]]) * m, 0.4472135954999579, 0.0, 26.565051177077983)
from scipy.ndimage import mean, center_of_mass
# TODO: smooth data?
# smooth_rate_map = lambda x:x
# rate_map = smooth_rate_map(rate_map)
center_method = kwargs.pop('center_method',None)
if center_method:
fields, nfields, bump_centers = separate_fields(rate_map,
center_method=center_method)
else:
fields, nfields, bump_centers = separate_fields(rate_map)
if bump_centers.size == 0:
import warnings
msg = 'couldnt find bump centers, returning None'
warnings.warn(msg, RuntimeWarning, stacklevel=2)
return None,None,None,None,
sh = np.array(rate_map.shape)
if plot_fields:
print(fields)
import matplotlib.pyplot as plt
x=np.linspace(0,1,sh[0]+1)
y=np.linspace(0,1,sh[1]+1)
x,y = np.meshgrid(x,y)
ax = plt.gca()
print('nfields: ',nfields)
plt.pcolormesh(x,y, fields)
# switch from row-column to x-y
bump_centers = bump_centers[:,::-1]
thrsh = kwargs.pop('thrsh', None)
if thrsh:
avg_dist = find_avg_dist(rate_map, thrsh)
else:
avg_dist = find_avg_dist(rate_map)
displacement, orientation = fit_hex(bump_centers, avg_dist,
plot_bumps=plot_fields, **kwargs)
return bump_centers, avg_dist, displacement, orientation
class RandomDisplacementBounds(object):
"""random displacement with bounds"""
def __init__(self, xmin, xmax, stepsize=0.5):
self.xmin = np.array(xmin)
self.xmax = np.array(xmax)
self.stepsize = stepsize
def __call__(self, x):
"""take a random step but ensure the new position is within the bounds"""
while True:
# this could be done in a much more clever way, but it will work for example purposes
xnew = x + (self.xmax-self.xmin)*np.random.uniform(-self.stepsize,
self.stepsize, np.shape(x))
if np.all(xnew < self.xmax) and np.all(xnew > self.xmin):
break
return xnew
def optimize_sep_fields(rate_map,step = 0.04, niter=40, T = 1.0, method = 'SLSQP',
glob=True, x0 = [0.065,0.1],callback=None):
"""Optimizes the separation of the fields by minimizing an error
function
Parameters
----------
rate_map :
method :
valid methods=['L-BFGS-B', 'TNC', 'SLSQP']
x0 : list
initial values for smoothing smoothing and laplace_thrsh
Returns
--------
res :
Result of the optimization. Contains smoothing and laplace_thrsh in
attribute res.x"""
from scipy import optimize
from exana.tracking.tools import separation_error_func as err_func
valid_methods = ['L-BFGS-B', 'TNC', 'SLSQP']
if method not in valid_methods:
raise ValueError('invalid method flag %s' %method)
rate_map[np.isnan(rate_map)] = 0.
method = 'SLSQP'
xmin = [0.025, 0]
xmax = [0.2, 1]
bounds = [(low,high) for low,high in zip(xmin,xmax)]
obj_func = lambda args: err_func(args[0], args[1], rate_map)
if glob:
take_step = RandomDisplacementBounds(xmin, xmax,stepsize=step)
minimizer_kwargs = dict(method=method, bounds=bounds)
res = optimize.basinhopping(obj_func, x0, niter=niter, T = T,
minimizer_kwargs=minimizer_kwargs,
take_step=take_step,callback=callback)
else:
res = optimize.minimize(obj_func, x0, method=method, bounds = bounds, options={'disp': True})
return res
if __name__ == "__main__":
import doctest
doctest.testmod()
|
gpl-3.0
| 8,174,192,474,725,353,000 | 34.9102 | 101 | 0.591244 | false |
tensorflow/cloud
|
src/python/tensorflow_cloud/tuner/vizier_client.py
|
1
|
19569
|
# Lint as: python3
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A thin client for the Cloud AI Platform Vizier Service."""
import datetime
import http
import json
import time
from typing import Any, Dict, List, Mapping, Optional, Text, Union
from googleapiclient import discovery
from googleapiclient import errors
import tensorflow as tf
from tensorflow_cloud.tuner import vizier_client_interface
from tensorflow_cloud.tuner import constants
from tensorflow_cloud.utils import google_api_client
class SuggestionInactiveError(Exception):
"""Indicates that GetSuggestion was called on an inactive study."""
class _VizierClient(vizier_client_interface.VizierClientInterface):
"""A wrapper class that allows for easy interaction with a Study."""
def __init__(self,
service_client: discovery.Resource,
project_id: Text,
region: Text,
study_id: Optional[Text] = None):
"""Create an VizierClient object.
Use this constructor when you know the study_id, and when the Study
already exists. Otherwise, you'll probably want to use
create_or_load_study() instead of constructing the
VizierClient class directly.
Args:
service_client: An API client of Vizier service.
project_id: A GCP project id.
region: A GCP region. e.g. 'us-central1'.
study_id: An identifier of the study. The full study name will be
`projects/{project_id}/locations/{region}/studies/{study_id}`.
The full trial name will be `{study name}/trials/{trial_id}`.
"""
self.service_client = service_client
self.project_id = project_id
self.region = region
if not study_id:
raise ValueError(
"Use create_or_load_study() instead of constructing the"
"VizierClient class directly"
)
self.study_id = study_id
def get_suggestions(
self,
client_id: Text,
suggestion_count: int = constants.SUGGESTION_COUNT_PER_REQUEST
) -> List[Dict[Text, Any]]:
"""Gets a list of suggested Trials.
Args:
client_id: An ID that identifies the `Tuner` requesting a `Trial`.
`Tuners` that should run the same trial (for instance, when
running a multi-worker model) should have the same ID. If
multiple suggestTrialsRequests have the same tuner_id, the
service will return the identical suggested trial if the trial
is PENDING, and provide a new trial if the last suggest trial
was completed.
suggestion_count: The number of suggestions to request.
Returns:
A list of Trials (represented by JSON dicts). This may be an empty
list if:
1. A finite search space has been exhausted.
2. If max_num_trials = 1000 has been reached.
3. Or if there are no longer any trials that match a supplied Context.
Raises:
SuggestionInactiveError: Indicates that a suggestion was requested
from an inactive study. Note that this is NOT raised when a
finite Study runs out of suggestions. In such a case, an empty
list is returned.
"""
# Requests a trial.
try:
resp = (
self.service_client.projects()
.locations()
.studies()
.trials()
.suggest(
parent=self._make_study_name(),
body={
"client_id": client_id,
"suggestion_count": suggestion_count,
},
)
.execute()
)
except errors.HttpError as e:
if e.resp.status == 429:
# Status 429 'RESOURCE_EXAUSTED' is raised when trials more than
# the maximum limit (1000) of the Vizier service for a study
# are requested, or the number of finite search space.
# For distributed tuning, a tuner worker may request the 1001th
# trial, while the other tuner worker has not completed training
# the 1000th trial, and triggers this error.
tf.get_logger().info("Reached max number of trials.")
return []
else:
tf.get_logger().info("SuggestTrial failed.")
raise e
# Polls the suggestion of long-running operations.
tf.get_logger().info("CreateTrial: polls the suggestions.")
operation = self._obtain_long_running_operation(resp)
suggestions = operation["response"]
if "trials" not in suggestions:
if operation["response"]["studyState"] == "INACTIVE":
raise SuggestionInactiveError(
"The study is stopped due to an internal error."
)
return suggestions["trials"]
def report_intermediate_objective_value(
self,
step: int,
elapsed_secs: float,
metric_list: List[Mapping[Text, Union[int, float]]],
trial_id: Text,
) -> None:
"""Calls AddMeasurementToTrial with the provided objective_value.
Args:
step: The number of steps the model has trained for.
elapsed_secs: The number of seconds since Trial execution began.
metric_list: A list of dictionary from metric names (strings) to
values (doubles) for additional metrics to record.
trial_id: trial_id.
"""
measurement = {
"stepCount": step,
"elapsedTime": {"seconds": int(elapsed_secs)},
"metrics": metric_list,
}
try:
self.service_client.projects().locations().studies().trials(
).addMeasurement(
name=self._make_trial_name(trial_id),
body={"measurement": measurement}).execute()
except errors.HttpError as e:
tf.get_logger().info("AddMeasurement failed.")
raise e
def should_trial_stop(self, trial_id: Text) -> bool:
"""Returns whether trial should stop early.
Args:
trial_id: trial_id.
Returns:
Whether it is recommended to stop the trial early.
"""
trial_name = self._make_trial_name(trial_id)
try:
resp = (
self.service_client.projects()
.locations()
.studies()
.trials()
.checkEarlyStoppingState(name=trial_name)
.execute()
)
except errors.HttpError as e:
tf.get_logger().info("CheckEarlyStoppingState failed.")
raise e
# Polls the stop decision of long-running operations.
operation = self._obtain_long_running_operation(resp)
tf.get_logger().info("CheckEarlyStoppingStateResponse")
if operation["response"].get("shouldStop"):
# Stops a trial.
try:
tf.get_logger().info("Stop the Trial.")
self.service_client.projects().locations().studies().trials(
).stop(name=trial_name).execute()
except errors.HttpError as e:
tf.get_logger().info("StopTrial failed.")
raise e
return True
return False
def complete_trial(
self,
trial_id: Text,
trial_infeasible: bool,
infeasibility_reason: Optional[Text] = None) -> Dict[Text, Any]:
"""Marks the trial as COMPLETED and sets the final measurement.
Args:
trial_id: trial_id.
trial_infeasible: If True, the parameter setting is not feasible.
infeasibility_reason: The reason the Trial was infeasible. Should
only be non-empty if trial_infeasible==True.
Returns:
The Completed Vizier trial, represented as a JSON Dictionary.
"""
try:
vizier_trial = (
self.service_client.projects()
.locations()
.studies()
.trials()
.complete(
name=self._make_trial_name(trial_id),
body={
"trial_infeasible": trial_infeasible,
"infeasible_reason": infeasibility_reason,
},
)
.execute()
)
except errors.HttpError as e:
tf.get_logger().info("CompleteTrial failed.")
raise e
return vizier_trial
def get_trial(self, trial_id: Text) -> Dict[Text, Any]:
"""Return the Vizier trial for the given trial_id."""
try:
trial = (
self.service_client.projects()
.locations()
.studies()
.trials()
.get(name=self._make_trial_name(trial_id))
.execute()
)
except errors.HttpError:
tf.get_logger().info("GetTrial failed.")
raise
return trial
def list_trials(self) -> List[Dict[Text, Any]]:
"""List trials."""
study_name = self._make_study_name()
try:
resp = (
self.service_client.projects()
.locations()
.studies()
.trials()
.list(parent=study_name)
.execute()
)
except errors.HttpError as e:
tf.get_logger().info("ListTrials failed.")
raise e
return resp.get("trials", [])
def list_studies(self) -> List[Dict[Text, Any]]:
"""List all studies under the current project and region.
Returns:
The list of studies.
"""
parent_name = self._make_parent_name()
try:
resp = self.service_client.projects().locations().studies().list(
parent=parent_name).execute()
except errors.HttpError:
tf.get_logger().info("ListStudies failed.")
raise
return resp.get("studies", [])
def delete_study(self, study_name: Optional[Text] = None) -> None:
"""Deletes the study.
Args:
study_name: Name of the study.
Raises:
ValueError: Indicates that the study_name does not exist.
HttpError: Indicates a HTTP error from calling the discovery API.
"""
if study_name is None:
study_name = self._make_study_name()
try:
self.service_client.projects().locations().studies().delete(
name=study_name).execute()
except errors.HttpError as e:
if e.resp.status == http.HTTPStatus.NOT_FOUND.value:
raise ValueError(
"DeleteStudy failed. Study not found: {}."
.format(study_name))
tf.get_logger().info("DeleteStudy failed.")
raise
tf.get_logger().info("Study deleted: {}.".format(study_name))
def _obtain_long_running_operation(self, resp):
"""Obtain the long-running operation."""
op_id = resp["name"].split("/")[-1]
operation_name = "projects/{}/locations/{}/operations/{}".format(
self.project_id, self.region, op_id
)
try:
get_op = (
self.service_client.projects()
.locations()
.operations()
.get(name=operation_name)
)
operation = get_op.execute()
except errors.HttpError as e:
tf.get_logger().info("GetLongRunningOperations failed.")
raise e
polling_secs = 1
num_attempts = 0
while not operation.get("done"):
sleep_time = self._polling_delay(num_attempts, polling_secs)
num_attempts += 1
tf.get_logger().info(
"Waiting for operation; attempt {}; "
"sleeping for {} seconds".format(
num_attempts, sleep_time
)
)
time.sleep(sleep_time.total_seconds())
if num_attempts > 30: # about 10 minutes
raise RuntimeError("GetLongRunningOperations timeout.")
operation = get_op.execute()
return operation
def _polling_delay(self, num_attempts, time_scale):
"""Computes a delay to the next attempt to poll the Vizier service.
This does bounded exponential backoff, starting with $time_scale.
If $time_scale == 0, it starts with a small time interval, less than
1 second.
Args:
num_attempts: The number of times have we polled and found that the
desired result was not yet available.
time_scale: The shortest polling interval, in seconds, or zero.
Zero is treated as a small interval, less than 1 second.
Returns:
A recommended delay interval, in seconds.
"""
small_interval = 0.3 # Seconds
interval = max(
time_scale, small_interval) * 1.41 ** min(num_attempts, 9)
return datetime.timedelta(seconds=interval)
def _make_study_name(self):
return "projects/{}/locations/{}/studies/{}".format(
self.project_id, self.region, self.study_id
)
def _make_trial_name(self, trial_id):
return "projects/{}/locations/{}/studies/{}/trials/{}".format(
self.project_id, self.region, self.study_id, trial_id
)
def _make_parent_name(self):
return "projects/{}/locations/{}".format(self.project_id, self.region)
def create_or_load_study(
project_id: Text,
region: Text,
study_id: Text,
study_config: Optional[Dict[Text, Any]] = None,
) -> _VizierClient:
"""Factory method for creating or loading a Vizier client.
Given an Vizier study_config, this will either create or open the
specified study. It will create it if it doesn't already exist, and open
it if someone has already created it.
Note that once a study is created, you CANNOT modify it with this function.
This function is designed for use in a distributed system, where many jobs
call create_or_load_study() nearly simultaneously with the same
`study_config`. In that situation, all clients will end up pointing nicely
to the same study.
Args:
project_id: A GCP project id.
region: A GCP region. e.g. 'us-central1'.
study_id: An identifier of the study. If not supplied, system-determined
unique ID is given. The full study name will be
projects/{project_id}/locations/{region}/studies/{study_id}.
And the full trial name will be {study name}/trials/{trial_id}.
study_config: Study configuration for Vizier service. If not
supplied, it will be assumed that the study with the given study_id
already exists, and will try to retrieve that study.
Returns:
An _VizierClient object with the specified study created or loaded.
Raises:
RuntimeError: Indicates that study_config is supplied but CreateStudy
failed and GetStudy did not succeed after
constants.MAX_NUM_TRIES_FOR_STUDIES tries.
ValueError: Indicates that study_config is not supplied and the study
with the given study_id does not exist.
"""
# Build the API client
# Note that Vizier service is exposed as a regional endpoint. As such,
# an API client needs to be created separately from the default.
with open(constants.OPTIMIZER_API_DOCUMENT_FILE) as f:
service_client = discovery.build_from_document(
service=json.load(f),
requestBuilder=google_api_client.TFCloudHttpRequest,
)
# Creates or loads a study.
study_parent = "projects/{}/locations/{}".format(project_id, region)
if study_config is None:
# If study config is unspecified, assume that the study already exists.
_get_study(
service_client=service_client,
study_parent=study_parent,
study_id=study_id,
study_should_exist=True,
)
else:
request = (
service_client.projects()
.locations()
.studies()
.create(
body={"study_config": study_config},
parent=study_parent,
studyId=study_id,
)
)
try:
tf.get_logger().info(request.execute())
except errors.HttpError as e:
if e.resp.status != 409: # 409 implies study exists, handled below
raise
_get_study(
service_client=service_client,
study_parent=study_parent,
study_id=study_id,
)
return _VizierClient(service_client, project_id, region, study_id)
def _get_study(
service_client: discovery.Resource,
study_parent: Text,
study_id: Text,
study_should_exist: bool = False,
):
"""Method for loading a study.
Given the study_parent and the study_id, this method will load the specified
study, up to constants.MAX_NUM_TRIES_FOR_STUDIES tries.
Args:
service_client: An API client of Vizier service.
study_parent: Prefix of the study name. The full study name will be
{study_parent}/studies/{study_id}.
study_id: An identifier of the study.
study_should_exist: Indicates whether it should be assumed that the
study with the given study_id exists.
"""
study_name = "{}/studies/{}".format(study_parent, study_id)
tf.get_logger().info(
"Study already exists: {}.\nLoad existing study...".format(study_name))
num_tries = 0
while True:
try:
service_client.projects().locations().studies().get(
name=study_name
).execute()
except errors.HttpError as err:
num_tries += 1
if num_tries >= constants.MAX_NUM_TRIES_FOR_STUDIES:
if (
study_should_exist
and err.resp.status == http.HTTPStatus.NOT_FOUND.value
):
raise ValueError(
"GetStudy failed. Study not found: {}.".format(study_id)
)
else:
raise RuntimeError(
"GetStudy failed. Max retries reached: {0!s}".format(
err
)
)
time.sleep(1) # wait 1 second before trying to get the study again
else:
break
|
apache-2.0
| -2,417,612,249,861,651,000 | 36.632692 | 80 | 0.568297 | false |
DavidCain/film_server
|
cgi-bin/playlist.py
|
1
|
7485
|
#!/usr/bin/env python
# David Cain
# RE357
# 2012-12-16
"""
A script to make a m3u bookmark playlist (playable in VLC), or an
archive of .m4v video clip files.
"""
from collections import OrderedDict
from datetime import datetime
import cgi
import csv
import os
import re
import shutil
import subprocess
import sys
import tempfile
import traceback
import zipfile
hms = "%H:%M:%S"
ms = "%M:%S"
film_dir = "/srv/ftp/"
movie_start = datetime.strptime("00:00:00", hms)
def print_m3u(clips, title, filmpath):
""" Print the contents of a .m3u playlist of clips in the film.
Note that each bookmark should probably have a value for a "bytes"
attribute, but it seems to work without it.
"""
attach_header("bookmarks.m3u")
print "#EXTM3U"
print "#EXTINF:7061,%s" % title
# Bookmarks
print "#EXTVLCOPT:bookmarks=", # trailing comma is key
bookmarks = ["{name=%s,time=%i}" % (name, seconds(start)) for start, (end, name) in clips]
print ",".join(bookmarks)
# Path to file
print filmpath
def print_zip(clips, film_title):
""" Print the contents of a .zip file of film clips. """
try:
zip_file = make_clips(clips, film_title)
except Exception, msg:
text_err(msg)
else:
attach_header(film_title + "_clips.zip")
for line in zip_file:
print line,
finally:
try:
os.remove(zip_file.name)
except OSError:
pass # If make_clips failed, file won't exist
def make_clips(clips, film_title):
""" Return a .zip file of film clips. """
temp_clip_dir = tempfile.mkdtemp(prefix=film_title)
film_path = os.path.join(film_dir, "%s.m4v" % film_title)
base, extension = os.path.splitext(film_path)
clip_files = []
for start, (end, clip_name) in clips:
if seconds(end - start) > 600:
raise Exception("Clip '%s' exceeds ten minutes." % clip_name)
running_time = str(end - start) # Will be in HMS
start = str(start)
clip_fn = clean_path(clip_name)
outfile = os.path.join(temp_clip_dir, clip_fn + extension)
cmd = ['ffmpeg', '-ss', start, '-t', running_time, '-i', film_path,
'-acodec', 'copy', '-vcodec', 'copy', '-y', outfile]
try:
subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError, e:
text_err("Error code %i:\n\n %s" % (e.returncode, e.output))
clip_files.append(outfile)
# Zip the clips into an archive, return file handle
zip_handle = make_zip(clip_files, film_title + "_clips")
shutil.rmtree(temp_clip_dir)
return zip_handle
def make_zip(paths, top_dir="film_clips"):
""" Return the handle to a .zip archive of the given files.
:param top_dir: Directory name to place files in
"""
fd, zip_path = tempfile.mkstemp()
archive = zipfile.ZipFile(zip_path, 'w')
for path in paths:
arcname = os.path.join(top_dir, os.path.split(path)[1])
archive.write(path, arcname)
archive.close()
os.close(fd)
return open(zip_path)
class CSVError(Exception):
pass
def get_clip_dict(csv_file, give_times=False):
""" Return a dictionary of clip names with start and end times. """
clip_dict = OrderedDict()
clips_csv = csv.reader(csv_file)
for num, line in enumerate(clips_csv, start=1):
if len(line) > 3:
raise CSVError("Too many columns on line %i (check commas!)" % num)
elif len(line) < 3:
raise CSVError("Fewer than three columns on line %i" % num)
start, end, name = [val.strip() for val in line]
timename = "%s-%s" % (start, end)
clip_name = "%s - %s" % (timename, name) if give_times else name
start_time = get_time(start)
end_time = get_time(end)
if end_time < start_time:
raise CSVError("End time of '%s' (line %i) precedes start." % (name, num))
clip_dict[start_time] = (end_time, clip_name)
return clip_dict
def seconds(delta):
return int(delta.total_seconds())
def get_time(clip_start):
try:
bookmark_time = datetime.strptime(clip_start, hms)
except ValueError:
try:
bookmark_time = datetime.strptime(clip_start, ms)
except ValueError:
raise ValueError("Invalid time format '%s'."
"Enter time in H:M:S, or M:S" % clip_start)
return bookmark_time - movie_start
def clean_path(path):
""" Sanitize the path for sensible names.
It's not to prevent traversals, just to avoid common filename 'gotchas'
"""
path = re.sub("[:/\\\]", "-", path)
path = re.sub(" ", "_", path)
path = re.sub("[?]", "", path)
return path
def universal_file(in_file):
""" Return the handle to a file with universal EOL support.
(A hack to get around the fact that CGI handles are already open).
"""
fileno, filename = tempfile.mkstemp()
with open(filename, "w") as newline_file:
for line in in_file:
newline_file.write(line)
os.close(fileno)
return open(filename, "rU")
def attach_header(outname):
print 'Content-Type:text/enriched; filename="%s"' % outname
print 'Content-Disposition: attachment; filename="%s"\n' % outname
def text_err(msg):
print 'Content-Type:text/plain\n'
print "Error:\n"
print msg
sys.exit(1)
def html_err(msg):
print 'Content-Type:text/html\n'
print "<html>\n<body>"
print "<h1>Error:</h1>\n"
print "<p>\n%s\n</p>" % msg
print "</body>\n</html>"
sys.exit(1)
def main():
""" Read the CGI form, display any errors. Otherwise, give content. """
form = cgi.FieldStorage()
film_title = form["title"].value
movie_path = form["movie_path"].value
clip_order = form["clip_order"].value
user_csv = form["csv_file"].file
# Quit if CSV file is empty
if not (user_csv and user_csv.read()):
html_err("No CSV file given.")
user_csv.seek(0)
# Get output type
try:
output_type = form["output_type"].value
except:
html_err("No output format selected.")
# Raise error if using playlist and path is left as example path
if (output_type == "playlist" and (not movie_path or
movie_path == "/Users/suzieq/East_of_Eden.m4v")):
html_err("Playlists require the path to your film.\n"
'<a href="/gen_clips.html#full_path">'
'Getting the full path of a file'
'</a>')
csv_file = universal_file(user_csv) # Force universal line support
# Parse CSV, crash if errors
try:
clip_dict = get_clip_dict(csv_file)
except CSVError, msg:
html_err(msg)
except Exception, msg:
html_err("Error parsing CSV: %s" % msg)
finally:
os.remove(csv_file.name)
# Sort clips chronologically, if specified
if clip_order == "chronological":
clips = sorted(clip_dict.items())
else:
clips = clip_dict.items()
if len(clips) == 0:
html_err("No clips were found in the CSV file!")
# Give the result as downloadable
if output_type == "playlist":
print_m3u(clips, film_title, movie_path)
elif output_type == "clips":
print_zip(clips, film_title)
if __name__ == "__main__":
try:
main()
except SystemExit:
pass
except:
traceback.print_exc(file=sys.stdout)
|
gpl-3.0
| -7,705,770,874,046,983,000 | 26.929104 | 94 | 0.604275 | false |
phatblat/AbletonLiveMIDIRemoteScripts
|
Push2/item_lister_component.py
|
1
|
9031
|
# Source Generated with Decompyle++
# File: item_lister_component.pyc (Python 2.5)
from __future__ import absolute_import
from ableton.v2.base import forward_property, index_if, listens, SlotManager, Subject
from ableton.v2.control_surface import Component, CompoundComponent
from ableton.v2.control_surface.control import control_list, ButtonControl, RadioButtonControl
class SimpleItemSlot(SlotManager, Subject):
__events__ = ('name',)
def __init__(self, item = None, name = '', nesting_level = -1, icon = '', *a, **k):
super(SimpleItemSlot, self).__init__(*a, **a)
self._item = item
self._name = name
self._nesting_level = nesting_level
self._icon = icon
if hasattr(self._item, 'name_has_listener'):
pass
1
self._SimpleItemSlot__on_name_changed.subject = None
def __on_name_changed(self):
self.notify_name()
self._name = self._item.name
_SimpleItemSlot__on_name_changed = listens('name')(__on_name_changed)
def name(self):
return self._name
name = property(name)
def item(self):
return self._item
item = property(item)
def nesting_level(self):
return self._nesting_level
nesting_level = property(nesting_level)
def icon(self):
return self._icon
icon = property(icon)
class ItemSlot(SimpleItemSlot):
def __init__(self, item = None, nesting_level = 0, **k):
if not item != None:
raise AssertionError
super(ItemSlot, self).__init__(item = item, name = item.name, nesting_level = nesting_level, **None)
def __eq__(self, other):
if not id(self) == id(other):
pass
return self._item == other
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return hash(self._item)
_live_ptr = forward_property('_item')('_live_ptr')
class ItemProvider(Subject):
''' General interface to implement for providers used in ItemListerComponent '''
__events__ = ('items', 'selected_item')
def items(self):
'''
Returns a list of tuples, each of which contains an item
followed by its nesting level
'''
return []
items = property(items)
def selected_item(self):
pass
selected_item = property(selected_item)
class ItemListerComponentBase(CompoundComponent):
__events__ = ('items',)
def __init__(self, item_provider = ItemProvider(), num_visible_items = 8, *a, **k):
super(ItemListerComponentBase, self).__init__(*a, **a)
self._item_offset = 0
self._item_provider = item_provider
self._items = []
self._num_visible_items = num_visible_items
self._ItemListerComponentBase__on_items_changed.subject = item_provider
self.update_items()
def reset_offset(self):
self._item_offset = 0
def items(self):
return self._items
items = property(items)
def item_provider(self):
return self._item_provider
item_provider = property(item_provider)
def _get_item_offset(self):
return self._item_offset
def _set_item_offset(self, offset):
self._item_offset = offset
self.update_items()
item_offset = property(_get_item_offset, _set_item_offset)
def can_scroll_left(self):
return self.item_offset > 0
def can_scroll_right(self):
items = self._item_provider.items[self.item_offset:]
return len(items) > self._num_visible_items
def scroll_left(self):
self.item_offset -= 1
def scroll_right(self):
self.item_offset += 1
def _adjust_offset(self):
num_raw_items = len(self._item_provider.items)
list_length = self._num_visible_items
if list_length >= num_raw_items or self._item_offset >= num_raw_items - list_length:
self._item_offset = max(0, num_raw_items - list_length)
def update_items(self):
for item in self._items:
self.disconnect_disconnectable(item)
self._adjust_offset()
items = self._item_provider.items[self.item_offset:]
num_slots = min(self._num_visible_items, len(items))
def create_slot(index, item, nesting_level):
slot = None
if index == 0 and self.can_scroll_left():
slot = SimpleItemSlot(icon = 'page_left.svg')
slot.is_scrolling_indicator = True
elif index == num_slots - 1 and self.can_scroll_right():
slot = SimpleItemSlot(icon = 'page_right.svg')
slot.is_scrolling_indicator = True
else:
slot = ItemSlot(item = item, nesting_level = nesting_level)
slot.is_scrolling_indicator = False
return slot
new_items = []
if num_slots > 0:
continue
new_items = _[1]
self._items = map(self.register_disconnectable, new_items)
self.notify_items()
def __on_items_changed(self):
self.update_items()
_ItemListerComponentBase__on_items_changed = listens('items')(__on_items_changed)
class ScrollComponent(Component):
__events__ = ('scroll',)
button = ButtonControl(color = 'ItemNavigation.ItemNotSelected', repeat = True)
def button(self, button):
self.notify_scroll()
button = button.pressed(button)
class ScrollOverlayComponent(CompoundComponent):
def __init__(self, *a, **k):
super(ScrollOverlayComponent, self).__init__(*a, **a)
(self._scroll_left_component, self._scroll_right_component) = self.register_components(ScrollComponent(is_enabled = False), ScrollComponent(is_enabled = False))
self._ScrollOverlayComponent__on_scroll_left.subject = self._scroll_left_component
self._ScrollOverlayComponent__on_scroll_right.subject = self._scroll_right_component
scroll_left_layer = forward_property('_scroll_left_component')('layer')
scroll_right_layer = forward_property('_scroll_right_component')('layer')
def can_scroll_left(self):
raise NotImplementedError
def can_scroll_right(self):
raise NotImplementedError
def scroll_left(self):
raise NotImplementedError
def scroll_right(self):
raise NotImplementedError
def update_scroll_buttons(self):
if self.is_enabled():
self._scroll_left_component.set_enabled(self.can_scroll_left())
self._scroll_right_component.set_enabled(self.can_scroll_right())
def __on_scroll_left(self):
self.scroll_left()
_ScrollOverlayComponent__on_scroll_left = listens('scroll')(__on_scroll_left)
def __on_scroll_right(self):
self.scroll_right()
_ScrollOverlayComponent__on_scroll_right = listens('scroll')(__on_scroll_right)
def update(self):
super(ScrollOverlayComponent, self).update()
if self.is_enabled():
self.update_scroll_buttons()
class ItemListerComponent(ItemListerComponentBase):
select_buttons = control_list(RadioButtonControl, checked_color = 'ItemNavigation.ItemSelected', unchecked_color = 'ItemNavigation.ItemNotSelected', unavailable_color = 'ItemNavigation.NoItem')
def __init__(self, *a, **k):
super(ItemListerComponent, self).__init__(*a, **a)
self._scroll_overlay = self.register_component(ScrollOverlayComponent(is_enabled = True))
self._scroll_overlay.can_scroll_left = self.can_scroll_left
self._scroll_overlay.can_scroll_right = self.can_scroll_right
self._scroll_overlay.scroll_left = self.scroll_left
self._scroll_overlay.scroll_right = self.scroll_right
self._ItemListerComponent__on_items_changed.subject = self
self._ItemListerComponent__on_selection_changed.subject = self._item_provider
scroll_left_layer = forward_property('_scroll_overlay')('scroll_left_layer')
scroll_right_layer = forward_property('_scroll_overlay')('scroll_right_layer')
def __on_items_changed(self):
self.select_buttons.control_count = len(self.items)
self._update_button_selection()
self._scroll_overlay.update_scroll_buttons()
_ItemListerComponent__on_items_changed = listens('items')(__on_items_changed)
def __on_selection_changed(self):
self._update_button_selection()
_ItemListerComponent__on_selection_changed = listens('selected_item')(__on_selection_changed)
def _update_button_selection(self):
selected_item = self._item_provider.selected_item
items = self.items
selected_index = (index_if,)(lambda item: item == selected_item, items)
if selected_index >= len(items):
selected_index = -1
self.select_buttons.checked_index = selected_index
|
mit
| 8,494,214,371,004,027,000 | 30.034364 | 197 | 0.618758 | false |
atodorov/pykickstart
|
tests/commands/timezone.py
|
1
|
6651
|
#
# Chris Lumens <[email protected]>
#
# Copyright 2009 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
# trademarks that are incorporated in the source code or documentation are not
# subject to the GNU General Public License and may only be used or replicated
# with the express permission of Red Hat, Inc.
#
import unittest
from tests.baseclass import CommandTest
from pykickstart.errors import KickstartParseError
from pykickstart.commands.timezone import FC3_Timezone, F18_Timezone
class Timezone_TestCase(unittest.TestCase):
def runTest(self):
cmd = F18_Timezone()
self.assertEqual(cmd.__str__(), '')
class FC3_TestCase(CommandTest):
command = "timezone"
def runTest(self):
# assert defaults
self.assertFalse(FC3_Timezone().isUtc)
self.assertFalse(F18_Timezone().nontp)
# pass
self.assert_parse("timezone Eastern", "timezone Eastern\n")
# On FC6 and later, we write out --isUtc regardless of what the input was.
if self.__class__.__name__ == "FC3_TestCase":
self.assert_parse("timezone --utc Eastern", "timezone --utc Eastern\n")
else:
self.assert_parse("timezone --utc Eastern", "timezone --isUtc Eastern\n")
# fail
self.assert_parse_error("timezone")
self.assert_parse_error("timezone Eastern Central")
self.assert_parse_error("timezone --blah Eastern")
self.assert_parse_error("timezone --utc")
self.assert_parse_error("timezone --bogus-option")
# extra test coverage
cmd = self.handler().commands[self.command]
cmd.timezone = None
self.assertEqual(cmd.__str__(), "")
class FC6_TestCase(FC3_TestCase):
def runTest(self):
FC3_TestCase.runTest(self)
# pass
self.assert_parse("timezone --isUtc Eastern", "timezone --isUtc Eastern\n")
# fail
self.assert_parse_error("timezone --isUtc")
class F18_TestCase(FC6_TestCase):
def runTest(self):
# pass
self.assert_parse("timezone --utc Europe/Prague")
self.assert_parse("timezone --isUtc Europe/Prague\n")
self.assert_parse("timezone --isUtc Eastern", "timezone Eastern --isUtc\n")
self.assert_parse("timezone Europe/Prague")
self.assert_parse("timezone Europe/Prague --nontp",
"timezone Europe/Prague --nontp\n")
self.assert_parse("timezone Europe/Prague "
"--ntpservers=ntp.cesnet.cz,tik.nic.cz")
self.assert_parse("timezone Europe/Prague --ntpservers=ntp.cesnet.cz",
"timezone Europe/Prague --ntpservers=ntp.cesnet.cz\n")
# fail
self.assert_parse_error("timezone")
self.assert_parse_error("timezone Eastern Central")
self.assert_parse_error("timezone --blah Eastern")
self.assert_parse_error("timezone --utc")
self.assert_parse_error("timezone --isUtc")
self.assert_parse_error("timezone Europe/Prague --nontp "
"--ntpservers=ntp.cesnet.cz")
self.assert_parse_error("timezone Europe/Prague --ntpservers="
"ntp.cesnet.cz, tik.nic.cz")
class F23_TestCase(F18_TestCase):
def runTest(self):
# should keep multiple instances of the same URL
self.assert_parse("timezone --utc Europe/Prague --ntpservers=ntp.cesnet.cz,0.fedora.pool.ntp.org," +
"0.fedora.pool.ntp.org,0.fedora.pool.ntp.org,0.fedora.pool.ntp.org",
"timezone Europe/Prague --isUtc --ntpservers=ntp.cesnet.cz,0.fedora.pool.ntp.org," +
"0.fedora.pool.ntp.org,0.fedora.pool.ntp.org,0.fedora.pool.ntp.org\n")
self.assert_parse("timezone --utc Europe/Sofia --ntpservers=,0.fedora.pool.ntp.org,")
# fail
self.assert_parse_error("timezone Europe/Sofia --nontp --ntpservers=0.fedora.pool.ntp.org,1.fedora.pool.ntp.org")
class RHEL7_TestCase(F18_TestCase):
def runTest(self):
# since RHEL7 command version the timezone command can be used
# without a timezone specification
self.assert_parse("timezone --utc")
self.assert_parse("timezone Europe/Sofia")
self.assert_parse("timezone --isUtc")
self.assert_parse("timezone --ntpservers=ntp.cesnet.cz")
self.assert_parse("timezone --ntpservers=ntp.cesnet.cz,tik.nic.cz")
# unknown argument
self.assert_parse_error("timezone --blah")
# more than two timezone specs
self.assert_parse_error("timezone foo bar", exception=KickstartParseError)
self.assert_parse_error("timezone --utc foo bar", exception=KickstartParseError)
# just "timezone" without any arguments is also wrong as it really dosn't make sense
self.assert_parse_error("timezone")
# fail
self.assert_parse_error("timezone Europe/Sofia --nontp --ntpservers=0.fedora.pool.ntp.org,1.fedora.pool.ntp.org")
class F25_TestCase(F23_TestCase):
def runTest(self):
# since RHEL7 command version the timezone command can be used
# without a timezone specification
self.assert_parse("timezone --utc")
self.assert_parse("timezone --isUtc")
self.assert_parse("timezone --ntpservers=ntp.cesnet.cz")
self.assert_parse("timezone --ntpservers=ntp.cesnet.cz,tik.nic.cz")
# unknown argument
self.assert_parse_error("timezone --blah")
# more than two timezone specs
self.assert_parse_error("timezone foo bar", exception=KickstartParseError)
self.assert_parse_error("timezone --utc foo bar", exception=KickstartParseError)
# just "timezone" without any arguments is also wrong as it really dosn't make sense
self.assert_parse_error("timezone")
# fail
self.assert_parse_error("timezone Europe/Sofia --nontp --ntpservers=0.fedora.pool.ntp.org,1.fedora.pool.ntp.org")
if __name__ == "__main__":
unittest.main()
|
gpl-2.0
| -2,127,721,929,382,395,400 | 43.046358 | 121 | 0.658097 | false |
jenmud/behave-graph
|
behave_graph/__init__.py
|
1
|
2422
|
"""
Setup the environment by parsing the command line options and staring
a ruruki http server.
"""
import argparse
import logging
import os
from behave.configuration import Configuration
from behave.runner import Runner, parse_features
from ruruki_eye.server import run
from behave_graph.scrape import GRAPH
from behave_graph.scrape import scrape_features
__all__ = ["load"]
def load(path):
"""
Load the given path that contains the features and steps.
:param path: Path where the feature and steps files can be found.
:type path: :class:`str`
:returns: A behave runner.
:rtype: :class:`behave.runner.Runner`
"""
try:
config = Configuration(path)
runner = Runner(config)
features = parse_features(
[f.filename for f in runner.feature_locations()]
)
scrape_features(features)
return runner
except Exception as error: # pylint: disable=broad-except
logging.exception(
"Unexpected error creating configuration %r: %r",
path, error
)
raise argparse.ArgumentTypeError(error)
def parse_arguments():
"""
Parse the command line arguments.
:returns: All the command line arguments.
:rtype: :class:`argparse.Namespace`
"""
parser = argparse.ArgumentParser(
description="Behave dependency grapher."
)
parser.add_argument(
"-b",
"--base-dir",
default=os.getcwd(),
type=load,
help=(
"Behave base directory path "
"where features and steps can be found. "
"(default: %(default)s)"
),
)
parser.add_argument(
"--runserver",
action="store_true",
help="Start a ruruki http server.",
)
parser.add_argument(
"--address",
default="0.0.0.0",
help="Address to start the web server on. (default: %(default)s)",
)
parser.add_argument(
"--port",
type=int,
default=8000,
help=(
"Port number that the web server will accept connections on. "
"(default: %(default)d)"
),
)
return parser.parse_args()
def main():
"""
Entry point.
"""
logging.basicConfig(level=logging.INFO)
namespace = parse_arguments()
if namespace.runserver is True:
run(namespace.address, namespace.port, False, GRAPH)
|
mit
| 1,787,216,094,776,277,800 | 23.22 | 74 | 0.603633 | false |
tsuru/rpaas
|
rpaas/sslutils.py
|
1
|
4989
|
# Copyright 2016 rpaas authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
import json
import os
import datetime
import ipaddress
import base64
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.x509.oid import NameOID
from hm.model.load_balancer import LoadBalancer
from rpaas import consul_manager, ssl_plugins, storage
def generate_session_ticket(length=48):
return base64.b64encode(os.urandom(length))
def generate_key(serialized=False):
key = rsa.generate_private_key(
public_exponent=65537,
key_size=2048,
backend=default_backend()
)
if serialized:
return key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption(),
)
return key
def generate_csr(key, domainname):
private_key = serialization.load_pem_private_key(key, password=None,
backend=default_backend())
csr = x509.CertificateSigningRequestBuilder().subject_name(x509.Name([
# Provide various details about who we are.
x509.NameAttribute(NameOID.COUNTRY_NAME, u"BR"),
x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, u"RJ"),
x509.NameAttribute(NameOID.LOCALITY_NAME, u"Rio de Janeiro"),
x509.NameAttribute(NameOID.ORGANIZATION_NAME, u"globo.com"),
x509.NameAttribute(NameOID.COMMON_NAME, domainname),
])).add_extension(
x509.SubjectAlternativeName([x509.DNSName(domainname)]),
critical=False,
).sign(private_key, hashes.SHA256(), default_backend())
return csr.public_bytes(serialization.Encoding.PEM)
def generate_crt(config, name, plugin, csr, key, domain):
lb = LoadBalancer.find(name, config)
if lb is None:
raise storage.InstanceNotFoundError()
strg = storage.MongoDBStorage(config)
consul_mngr = consul_manager.ConsulManager(config)
crt = None
plugin_class = ssl_plugins.get(plugin)
if not plugin_class:
raise Exception("Invalid plugin {}".format(plugin))
plugin_obj = plugin_class(domain, os.environ.get('RPAAS_PLUGIN_LE_EMAIL', 'admin@'+domain),
name, consul_manager=consul_mngr)
# Upload csr and get an Id
plugin_id = plugin_obj.upload_csr(csr)
crt = plugin_obj.download_crt(id=str(plugin_id))
# Download the certificate and update nginx with it
if crt:
try:
js_crt = json.loads(crt)
cert = js_crt['crt']
cert = cert+js_crt['chain'] if 'chain' in js_crt else cert
key = js_crt['key'] if 'key' in js_crt else key
except:
cert = crt
consul_mngr.set_certificate(name, cert, key)
strg.store_le_certificate(name, domain)
else:
raise Exception('Could not download certificate')
def generate_admin_crt(config, host):
private_key = generate_key()
public_key = private_key.public_key()
one_day = datetime.timedelta(1, 0, 0)
ca_cert = config.get("CA_CERT", None)
ca_key = config.get("CA_KEY", None)
cert_expiration = config.get("CERT_ADMIN_EXPIRE", 1825)
if not ca_cert or not ca_key:
raise Exception('CA_CERT or CA_KEY not defined')
ca_key = serialization.load_pem_private_key(str(ca_key), password=None, backend=default_backend())
ca_cert = x509.load_pem_x509_certificate(str(ca_cert), backend=default_backend())
builder = x509.CertificateBuilder()
builder = builder.subject_name(x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, host),
]))
builder = builder.issuer_name(ca_cert.subject)
builder = builder.not_valid_before(datetime.datetime.today() - one_day)
builder = builder.not_valid_after(datetime.datetime.today() + datetime.timedelta(days=cert_expiration))
builder = builder.serial_number(x509.random_serial_number())
builder = builder.public_key(public_key)
builder = builder.add_extension(
x509.SubjectAlternativeName(
[x509.IPAddress(ipaddress.IPv4Address(host))]
),
critical=False
)
builder = builder.add_extension(
x509.BasicConstraints(ca=False, path_length=None), critical=True,
)
certificate = builder.sign(
private_key=ca_key, algorithm=hashes.SHA256(),
backend=default_backend()
)
private_key = private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption(),
)
certificate = certificate.public_bytes(serialization.Encoding.PEM)
return private_key, certificate
|
bsd-3-clause
| -3,365,001,182,327,682,600 | 36.511278 | 107 | 0.67709 | false |
miguelinux/vbox
|
src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/OptRomInfStatement.py
|
1
|
5392
|
## @file
# process OptionROM generation from INF statement
#
# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
##
# Import Modules
#
import RuleSimpleFile
import RuleComplexFile
import Section
import OptionRom
import Common.GlobalData as GlobalData
from Common.DataType import *
from Common.String import *
from FfsInfStatement import FfsInfStatement
from GenFdsGlobalVariable import GenFdsGlobalVariable
##
#
#
class OptRomInfStatement (FfsInfStatement):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
FfsInfStatement.__init__(self)
self.OverrideAttribs = None
## __GetOptRomParams() method
#
# Parse inf file to get option ROM related parameters
#
# @param self The object pointer
#
def __GetOptRomParams(self):
if self.OverrideAttribs == None:
self.OverrideAttribs = OptionRom.OverrideAttribs()
if self.OverrideAttribs.NeedCompress == None:
self.OverrideAttribs.NeedCompress = self.OptRomDefs.get ('PCI_COMPRESS')
if self.OverrideAttribs.NeedCompress is not None:
if self.OverrideAttribs.NeedCompress.upper() not in ('TRUE', 'FALSE'):
GenFdsGlobalVariable.ErrorLogger( "Expected TRUE/FALSE for PCI_COMPRESS: %s" %self.InfFileName)
self.OverrideAttribs.NeedCompress = \
self.OverrideAttribs.NeedCompress.upper() == 'TRUE'
if self.OverrideAttribs.PciVendorId == None:
self.OverrideAttribs.PciVendorId = self.OptRomDefs.get ('PCI_VENDOR_ID')
if self.OverrideAttribs.PciClassCode == None:
self.OverrideAttribs.PciClassCode = self.OptRomDefs.get ('PCI_CLASS_CODE')
if self.OverrideAttribs.PciDeviceId == None:
self.OverrideAttribs.PciDeviceId = self.OptRomDefs.get ('PCI_DEVICE_ID')
if self.OverrideAttribs.PciRevision == None:
self.OverrideAttribs.PciRevision = self.OptRomDefs.get ('PCI_REVISION')
# InfObj = GenFdsGlobalVariable.WorkSpace.BuildObject[self.PathClassObj, self.CurrentArch]
# RecordList = InfObj._RawData[MODEL_META_DATA_HEADER, InfObj._Arch, InfObj._Platform]
# for Record in RecordList:
# Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False)
# Name = Record[0]
## GenFfs() method
#
# Generate FFS
#
# @param self The object pointer
# @retval string Generated .efi file name
#
def GenFfs(self):
#
# Parse Inf file get Module related information
#
self.__InfParse__()
self.__GetOptRomParams()
#
# Get the rule of how to generate Ffs file
#
Rule = self.__GetRule__()
GenFdsGlobalVariable.VerboseLogger( "Packing binaries from inf file : %s" %self.InfFileName)
#FileType = Ffs.Ffs.ModuleTypeToFileType[Rule.ModuleType]
#
# For the rule only has simpleFile
#
if isinstance (Rule, RuleSimpleFile.RuleSimpleFile) :
EfiOutputList = self.__GenSimpleFileSection__(Rule)
return EfiOutputList
#
# For Rule has ComplexFile
#
elif isinstance(Rule, RuleComplexFile.RuleComplexFile):
EfiOutputList = self.__GenComplexFileSection__(Rule)
return EfiOutputList
## __GenSimpleFileSection__() method
#
# Get .efi files according to simple rule.
#
# @param self The object pointer
# @param Rule The rule object used to generate section
# @retval string File name of the generated section file
#
def __GenSimpleFileSection__(self, Rule):
#
# Prepare the parameter of GenSection
#
OutputFileList = []
if Rule.FileName != None:
GenSecInputFile = self.__ExtendMacro__(Rule.FileName)
OutputFileList.append(GenSecInputFile)
else:
OutputFileList, IsSect = Section.Section.GetFileList(self, '', Rule.FileExtension)
return OutputFileList
## __GenComplexFileSection__() method
#
# Get .efi by sections in complex Rule
#
# @param self The object pointer
# @param Rule The rule object used to generate section
# @retval string File name of the generated section file
#
def __GenComplexFileSection__(self, Rule):
OutputFileList = []
for Sect in Rule.SectionList:
if Sect.SectionType == 'PE32':
if Sect.FileName != None:
GenSecInputFile = self.__ExtendMacro__(Sect.FileName)
OutputFileList.append(GenSecInputFile)
else:
FileList, IsSect = Section.Section.GetFileList(self, '', Sect.FileExtension)
OutputFileList.extend(FileList)
return OutputFileList
|
gpl-2.0
| -6,444,457,340,399,212,000 | 33.793548 | 115 | 0.635386 | false |
akiokio/centralfitestoque
|
src/.pycharm_helpers/python_stubs/-1807332816/future_builtins.py
|
1
|
1819
|
# encoding: utf-8
# module future_builtins
# from /usr/lib/python2.7/lib-dynload/future_builtins.so
# by generator 1.130
"""
This module provides functions that will be builtins in Python 3.0,
but that conflict with builtins that already exist in Python 2.x.
Functions:
ascii(arg) -- Returns the canonical string representation of an object.
filter(pred, iterable) -- Returns an iterator yielding those items of
iterable for which pred(item) is true.
hex(arg) -- Returns the hexadecimal representation of an integer.
map(func, *iterables) -- Returns an iterator that computes the function
using arguments from each of the iterables.
oct(arg) -- Returns the octal representation of an integer.
zip(iter1 [,iter2 [...]]) -- Returns a zip object whose .next() method
returns a tuple where the i-th element comes from the i-th iterable
argument.
The typical usage of this module is to replace existing builtins in a
module's namespace:
from future_builtins import ascii, filter, map, hex, oct, zip
"""
# imports
from itertools import filter, map, zip
# functions
def ascii(p_object): # real signature unknown; restored from __doc__
"""
ascii(object) -> string
Return the same as repr(). In Python 3.x, the repr() result will
contain printable characters unescaped, while the ascii() result
will have such characters backslash-escaped.
"""
return ""
def hex(number): # real signature unknown; restored from __doc__
"""
hex(number) -> string
Return the hexadecimal representation of an integer or long integer.
"""
return ""
def oct(number): # real signature unknown; restored from __doc__
"""
oct(number) -> string
Return the octal representation of an integer or long integer.
"""
return ""
# no classes
|
bsd-2-clause
| -6,210,883,257,963,879,000 | 29.316667 | 72 | 0.703683 | false |
mikehulluk/morphforge
|
src/morphforgecontrib/traces/taggers.py
|
1
|
2064
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# Copyright (c) 2012 Michael Hull.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------
from morphforge.constants.standardtags import StandardTags
class AutoTaggerFromUnit(object):
tag_map = {
'V': [StandardTags.Voltage],
'A': [StandardTags.Current],
'S/m2': [StandardTags.ConductanceDensity],
'A/m2': [StandardTags.CurrentDensity],
}
@classmethod
def tag(cls, tr):
for (unit, tags) in cls.tag_map.iteritems():
try:
tr._data.rescale(unit)
tr.tags.extend(tags)
except:
pass
|
bsd-2-clause
| 7,058,320,315,138,345,000 | 36.527273 | 72 | 0.660368 | false |
miurahr/pinax-teams
|
pinax/teams/views.py
|
1
|
10627
|
import json
from django.http import Http404, HttpResponse, HttpResponseRedirect, HttpResponseForbidden
from django.shortcuts import render, redirect, get_object_or_404
from django.template import RequestContext
from django.template.loader import render_to_string
from django.views.decorators.http import require_POST
from django.views.generic.edit import CreateView
from django.views.generic import ListView
from django.contrib import messages
from account.compat import get_user_model
from account.decorators import login_required
from account.mixins import LoginRequiredMixin
from account.views import SignupView
from six import string_types
from .decorators import team_required, manager_required
from .forms import TeamInviteUserForm, TeamForm, TeamSignupForm
from .hooks import hookset
from .models import Team, Membership
class TeamSignupView(SignupView):
template_name = "teams/signup.html"
def get_form_class(self):
if self.signup_code:
return self.form_class
return TeamSignupForm
def after_signup(self, form):
if not self.signup_code:
self.created_user.teams_created.create(
name=form.cleaned_data["team"]
)
super(TeamSignupView, self).after_signup(form)
class TeamCreateView(LoginRequiredMixin, CreateView):
form_class = TeamForm
model = Team
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.creator = self.request.user
self.object.save()
return HttpResponseRedirect(self.get_success_url())
class TeamListView(ListView):
model = Team
context_object_name = "teams"
@team_required
@login_required
def team_update(request):
team = request.team
if not team.is_owner_or_manager(request.user):
return HttpResponseForbidden()
if request.method == "POST":
form = TeamForm(request.POST, instance=team)
if form.is_valid():
form.save()
return redirect(team.get_absolute_url())
else:
form = TeamForm(instance=team)
return render(request, "teams/team_form.html", {"form": form, "team": team})
@team_required
@login_required
def team_detail(request):
team = request.team
state = team.state_for(request.user)
role = team.role_for(request.user)
if team.member_access == Team.MEMBER_ACCESS_INVITATION and state is None:
raise Http404()
return render(request, "teams/team_detail.html", {
"team": team,
"state": state,
"role": role,
"invite_form": TeamInviteUserForm(team=team),
"can_join": team.can_join(request.user),
"can_leave": team.can_leave(request.user),
"can_apply": team.can_apply(request.user),
})
@team_required
@login_required
def team_manage(request):
team = request.team
state = team.state_for(request.user)
role = team.role_for(request.user)
if team.manager_access == Team.MEMBER_ACCESS_INVITATION and \
state is None and not request.user.is_staff:
raise Http404()
return render(request, "teams/team_manage.html", {
"team": team,
"state": state,
"role": role,
"invite_form": TeamInviteUserForm(team=team),
"can_join": team.can_join(request.user),
"can_leave": team.can_leave(request.user),
"can_apply": team.can_apply(request.user),
})
@team_required
@login_required
def team_join(request):
team = request.team
state = team.state_for(request.user)
if team.manager_access == Team.MEMBER_ACCESS_INVITATION and \
state is None and not request.user.is_staff:
raise Http404()
if team.can_join(request.user) and request.method == "POST":
membership, created = Membership.objects.get_or_create(team=team, user=request.user)
membership.state = Membership.STATE_MEMBER
membership.save()
messages.success(request, "Joined team.")
return redirect("team_detail", slug=team.slug)
@team_required
@login_required
def team_leave(request):
team = request.team
state = team.state_for(request.user)
if team.manager_access == Team.MEMBER_ACCESS_INVITATION and \
state is None and not request.user.is_staff:
raise Http404()
if team.can_leave(request.user) and request.method == "POST":
membership = Membership.objects.get(team=team, user=request.user)
membership.delete()
messages.success(request, "Left team.")
return redirect("dashboard")
else:
return redirect("team_detail", slug=team.slug)
@team_required
@login_required
def team_apply(request):
team = request.team
state = team.state_for(request.user)
if team.manager_access == Team.MEMBER_ACCESS_INVITATION and \
state is None and not request.user.is_staff:
raise Http404()
if team.can_apply(request.user) and request.method == "POST":
membership, created = Membership.objects.get_or_create(team=team, user=request.user)
membership.state = Membership.STATE_APPLIED
membership.save()
messages.success(request, "Applied to join team.")
return redirect("team_detail", slug=team.slug)
@login_required
@require_POST
def team_accept(request, pk):
membership = get_object_or_404(Membership, pk=pk)
if membership.accept(by=request.user):
messages.success(request, "Accepted application.")
return redirect("team_detail", slug=membership.team.slug)
@login_required
@require_POST
def team_reject(request, pk):
membership = get_object_or_404(Membership, pk=pk)
if membership.reject(by=request.user):
messages.success(request, "Rejected application.")
return redirect("team_detail", slug=membership.team.slug)
@team_required
@login_required
@require_POST
def team_invite(request):
team = request.team
role = team.role_for(request.user)
if role not in [Membership.ROLE_MANAGER, Membership.ROLE_OWNER]:
raise Http404()
form = TeamInviteUserForm(request.POST, team=team)
if form.is_valid():
user_or_email = form.cleaned_data["invitee"]
role = form.cleaned_data["role"]
if isinstance(user_or_email, string_types):
membership = team.invite_user(request.user, user_or_email, role)
else:
membership = team.add_user(user_or_email, role)
data = {
"html": render_to_string(
"teams/_invite_form.html",
{
"invite_form": TeamInviteUserForm(team=team),
"team": team
},
context_instance=RequestContext(request)
)
}
if membership is not None:
if membership.state == Membership.STATE_APPLIED:
fragment_class = ".applicants"
elif membership.state == Membership.STATE_INVITED:
fragment_class = ".invitees"
elif membership.state in (Membership.STATE_AUTO_JOINED, Membership.STATE_ACCEPTED):
fragment_class = {
Membership.ROLE_OWNER: ".owners",
Membership.ROLE_MANAGER: ".managers",
Membership.ROLE_MEMBER: ".members"
}[membership.role]
data.update({
"append-fragments": {
fragment_class: render_to_string(
"teams/_membership.html",
{
"membership": membership
},
context_instance=RequestContext(request)
)
}
})
else:
data = {
"html": render_to_string("teams/_invite_form.html", {
"invite_form": form,
"team": team
}, context_instance=RequestContext(request))
}
return HttpResponse(json.dumps(data), content_type="application/json")
@manager_required
@require_POST
def team_member_revoke_invite(request, pk):
membership = get_object_or_404(request.team.memberships.all(), pk=pk)
membership.remove()
data = {
"html": ""
}
return HttpResponse(json.dumps(data), content_type="application/json")
@manager_required
@require_POST
def team_member_resend_invite(request, pk):
membership = get_object_or_404(request.team.memberships.all(), pk=pk)
membership.resend_invite()
data = {
"html": render_to_string(
"teams/_membership.html",
{
"membership": membership
},
context_instance=RequestContext(request)
)
}
return HttpResponse(json.dumps(data), content_type="application/json")
@manager_required
@require_POST
def team_member_promote(request, pk):
membership = get_object_or_404(request.team.memberships.all(), pk=pk)
membership.promote(by=request.user)
data = {
"html": render_to_string(
"teams/_membership.html",
{
"membership": membership
},
context_instance=RequestContext(request)
)
}
return HttpResponse(json.dumps(data), content_type="application/json")
@manager_required
@require_POST
def team_member_demote(request, pk):
membership = get_object_or_404(request.team.memberships.all(), pk=pk)
membership.demote(by=request.user)
data = {
"html": render_to_string(
"teams/_membership.html",
{
"membership": membership
},
context_instance=RequestContext(request)
)
}
return HttpResponse(json.dumps(data), content_type="application/json")
@manager_required
@require_POST
def team_member_remove(request, pk):
membership = get_object_or_404(request.team.memberships.all(), pk=pk)
membership.remove()
data = {
"html": ""
}
return HttpResponse(json.dumps(data), content_type="application/json")
@team_required
@login_required
def autocomplete_users(request):
User = get_user_model()
team = request.team
role = team.role_for(request.user)
if role not in [Membership.ROLE_MANAGER, Membership.ROLE_OWNER]:
raise Http404()
users = User.objects.exclude(pk__in=[
x.user.pk for x in team.memberships.exclude(user__isnull=True)
])
q = request.GET.get("query")
results = []
if q:
results.extend([
hookset.get_autocomplete_result(x)
for x in hookset.search_queryset(q, users)
])
return HttpResponse(json.dumps(results), content_type="application/json")
|
mit
| -2,445,942,896,833,156,000 | 30.722388 | 95 | 0.633951 | false |
Wolkabout/WolkConnect-Python-
|
wolk/__init__.py
|
1
|
3048
|
# coding=utf-8
"""
.. module:: wolk
This module provides connection to WolkAbout IoT Platform.
To start publishing data to the platform
create an instance of Device class with credentials obtained from the platform
and pass it to an instance of WolkConnect class.
For more information about module features visit:
https://github.com/Wolkabout/WolkConnect-Python/tree/master/examples/full_feature_set
"""
from .models.ActuatorCommand import ActuatorCommand
from .models.ActuatorCommandType import ActuatorCommandType
from .models.ActuatorState import ActuatorState
from .models.ActuatorStatus import ActuatorStatus
from .models.Alarm import Alarm
from .models.ConfigurationCommand import ConfigurationCommand
from .models.ConfigurationCommandType import ConfigurationCommandType
from .models.Device import Device
from .models.FileTransferPacket import FileTransferPacket
from .models.FirmwareCommand import FirmwareCommand
from .models.FirmwareCommandType import FirmwareCommandType
from .models.FirmwareErrorType import FirmwareErrorType
from .models.FirmwareStatus import FirmwareStatus
from .models.FirmwareStatusType import FirmwareStatusType
from .models.FirmwareUpdateStateType import FirmwareUpdateStateType
from .models.InboundMessage import InboundMessage
from .models.OutboundMessage import OutboundMessage
from .models.Protocol import Protocol
from .models.SensorReading import SensorReading
from .interfaces.ActuationHandler import ActuationHandler
from .interfaces.ActuatorStatusProvider import ActuatorStatusProvider
from .interfaces.ConfigurationHandler import ConfigurationHandler
from .interfaces.ConfigurationProvider import ConfigurationProvider
from .interfaces.ConnectivityService import ConnectivityService
from .interfaces.FirmwareInstaller import FirmwareInstaller
from .interfaces.FirmwareURLDownloadHandler import FirmwareURLDownloadHandler
from .interfaces.InboundMessageDeserializer import InboundMessageDeserializer
from .interfaces.OutboundMessageFactory import OutboundMessageFactory
from .interfaces.OutboundMessageQueue import OutboundMessageQueue
from .FileSystemFirmwareHandler import FileSystemFirmwareHandler
from .LoggerFactory import logging_config
from .WolkConnect import WolkConnect
__all__ = [
"ActuatorCommand",
"ActuatorCommandType",
"ActuatorState",
"ActuatorStatus",
"Alarm",
"ConfigurationCommand",
"ConfigurationCommandType",
"Device",
"FileTransferPacket",
"FirmwareCommand",
"FirmwareCommandType",
"FirmwareErrorType",
"FirmwareStatus",
"FirmwareStatusType",
"FirmwareUpdateStateType",
"InboundMessage",
"OutboundMessage",
"Protocol",
"SensorReading",
"ActuationHandler",
"ActuatorStatusProvider",
"ConfigurationHandler",
"ConfigurationProvider",
"ConnectivityService",
"FileSystemFirmwareHandler",
"FirmwareInstaller",
"FirmwareURLDownloadHandler",
"logging_config",
"InboundMessageDeserializer",
"OutboundMessageFactory",
"OutboundMessageQueue",
"WolkConnect",
]
|
apache-2.0
| -7,907,046,333,640,627,000 | 36.62963 | 85 | 0.82185 | false |
deanet/gheimdall
|
gheimdall/responsecreator/__init__.py
|
1
|
5051
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# GHeimdall - A small web application for Google Apps SSO service.
# Copyright (C) 2007 SIOS Technology, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
#
# $Id$
__author__ = '[email protected] (Takashi MATSUO)'
import saml2
import saml2.utils
import xmldsig as ds
from saml2 import saml, samlp
import time
EMPTY_SAML_RESPONSE="""<?xml version="1.0" encoding="UTF-8"?>
<samlp:Response Version="2.0"
xmlns="urn:oasis:names:tc:SAML:2.0:assertion"
xmlns:samlp="urn:oasis:names:tc:SAML:2.0:protocol">
<samlp:Status>
<samlp:StatusCode Value="urn:oasis:names:tc:SAML:2.0:status:Success"/>
</samlp:Status>
<Assertion Version="2.0" xmlns="urn:oasis:names:tc:SAML:2.0:assertion">
<Issuer></Issuer>
<Subject>
<SubjectConfirmation Method="urn:oasis:names:tc:SAML:2.0:cm:bearer">
<SubjectConfirmationData />
</SubjectConfirmation>
</Subject>
<Conditions></Conditions>
<AuthnStatement>
<AuthnContext>
<AuthnContextClassRef>
urn:oasis:names:tc:SAML:2.0:ac:classes:Password
</AuthnContextClassRef>
</AuthnContext>
</AuthnStatement>
</Assertion>
</samlp:Response>
"""
class ResponseCreator(object):
user_name = None
response = None
request = None
authn_request = None
def createLogoutRequest(self, session_index, name_id):
now = saml2.utils.getDateAndTime(time.time())
req = samlp.LogoutRequest(id=saml2.utils.createID(),
version=saml2.V2,
issue_instant=now)
req.issuer=saml.Issuer(text=self.config.get('issuer_name'))
req.name_id = name_id
req.session_index = samlp.SessionIndex(text=session_index)
req.signature = self._get_signature()
return req
def createLogoutResponse(self, logout_request_id, status_code):
now = saml2.utils.getDateAndTime(time.time())
self.response = samlp.LogoutResponse(id=saml2.utils.createID(),
version=saml2.V2,
issue_instant=now,
in_response_to=logout_request_id)
self.response.issuer = saml.Issuer(text=self.config.get('issuer_name'))
self.response.status = samlp.Status()
self.response.status.status_code = samlp.StatusCode(status_code)
self.response.signature = self._get_signature()
return self.response
def createAuthnResponse(self, user_name, authn_request, valid_time,
auth_time, acsURL):
self.user_name = user_name
self.authn_request = authn_request
response = samlp.ResponseFromString(EMPTY_SAML_RESPONSE)
response.id = saml2.utils.createID()
now = saml2.utils.getDateAndTime(time.time() - 10)
until = saml2.utils.getDateAndTime(valid_time)
auth_timestamp = saml2.utils.getDateAndTime(auth_time)
response.issue_instant = now
response.assertion[0].id = saml2.utils.createID()
response.assertion[0].issue_instant = now
response.assertion[0].issuer.text = self.config.get('issuer_name')
response.assertion[0].conditions.not_before = now
response.assertion[0].conditions.not_on_or_after = until
response.assertion[0].authn_statement[0].authn_instant = auth_timestamp
response.assertion[0].authn_statement[0].session_not_on_or_after = until
response.assertion[0].subject.name_id = self._getNameID()
response.assertion[0].subject.subject_confirmation[0].subject_confirmation_data.recipient = acsURL
self.response = response
self.response.signature = self._get_signature()
self._adjustment()
return self.response
def _get_signature(self):
key_type = self.config.get("apps_privkey_type")
if key_type == "rsa":
alg = ds.SIG_RSA_SHA1
elif key_type == "dsa":
alg = ds.SIG_DSA_SHA1
else:
alg = ds.SIG_RSA_SHA1
return ds.GetEmptySignature(signature_method_algorithm=alg)
def __init__(self, config):
self._prepare(config)
def _getNameID(self):
raise NotImplementedError('Child class must implement me.')
def _prepare(self, config):
raise NotImplementedError('Child class must implement me.')
def _adjustment(self):
return None
def create(mapper, config):
exec('from gheimdall.responsecreator import %s' % mapper)
ret = eval('%s.cls(config)' % mapper)
return ret
|
gpl-2.0
| -5,745,384,431,272,258,000 | 35.868613 | 102 | 0.67848 | false |
dm03514/func-y-task-engine
|
funcytaskengine/engine.py
|
1
|
2928
|
"""
Drives a TestMachine to completion.
Engine needs to be completely generic and agnostic from any
specific request types, response types, protocols, etc,
adding a new initiator or
"""
import gevent
import logging
from gevent import Timeout
from gevent.queue import Queue
from funcytaskengine import settings
from funcytaskengine.machine import STATES, EVENT_RESULT
logger = logging.getLogger(__name__)
class TaskEngine(object):
def __init__(self, machine):
self.machine = machine
self.event_result_q = Queue(maxsize=1)
def run(self):
"""
While determined to be running, run loop is:
- check if total time has been violated
- if max time has been validated log and put in finished state
- wait X seconds for next state
- yield
:return:
"""
# apply the first state so we can follow event loop flow
self.event_result_q.put_nowait(self.machine.events.first_state())
logger.debug('%s', {
'message': 'sending_first_state',
'first_state': self.machine.events.first_state()
})
timeout = Timeout(self.machine.max_timeout)
timeout.start()
try:
while self.machine.is_running():
# how do we support a general overarching timeout
# and a specific one for the current running event
try:
# we can ignore the next state, this is only used to indicate
# when it's time to apply a transition
result = self.event_result_q.get()
except gevent.queue.Empty:
logger.debug('%s', {
'message': 'queue_empty',
})
else:
if result == EVENT_RESULT.FAILURE:
logger.debug('%s', {
'message': 'task_failure'
})
return False
logger.debug('%s', {
'message': 'state_change_requested',
})
self.machine.events.teardown_current()
self.machine.next_state()
if self.machine.state == STATES.FINISHED:
logger.debug('%s', {
'message': 'task_execution_finished',
'status': 'SUCCESS',
})
return True
self.machine.run_current_event(event_result_q=self.event_result_q)
except Timeout:
logger.error('%s', {
'message': 'task timeout reached',
'timeout': self.machine.max_timeout,
'units': 'seconds'
})
return False
finally:
timeout.cancel()
return True
|
gpl-3.0
| 2,719,383,374,463,292,400 | 28.877551 | 86 | 0.510587 | false |
tmenjo/cinder-2015.1.1
|
cinder/tests/test_rbd.py
|
1
|
50268
|
# Copyright 2012 Josh Durgin
# Copyright 2013 Canonical Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import math
import os
import tempfile
import mock
from oslo_log import log as logging
from oslo_utils import timeutils
from oslo_utils import units
from cinder import db
from cinder import exception
from cinder.i18n import _
from cinder.image import image_utils
from cinder import test
from cinder.tests.image import fake as fake_image
from cinder.tests import test_volume
from cinder.volume import configuration as conf
import cinder.volume.drivers.rbd as driver
from cinder.volume.flows.manager import create_volume
LOG = logging.getLogger(__name__)
# This is used to collect raised exceptions so that tests may check what was
# raised.
# NOTE: this must be initialised in test setUp().
RAISED_EXCEPTIONS = []
class MockException(Exception):
def __init__(self, *args, **kwargs):
RAISED_EXCEPTIONS.append(self.__class__)
class MockImageNotFoundException(MockException):
"""Used as mock for rbd.ImageNotFound."""
class MockImageBusyException(MockException):
"""Used as mock for rbd.ImageBusy."""
class MockImageExistsException(MockException):
"""Used as mock for rbd.ImageExists."""
def common_mocks(f):
"""Decorator to set mocks common to all tests.
The point of doing these mocks here is so that we don't accidentally set
mocks that can't/don't get unset.
"""
def _common_inner_inner1(inst, *args, **kwargs):
@mock.patch('cinder.volume.drivers.rbd.RBDVolumeProxy')
@mock.patch('cinder.volume.drivers.rbd.RADOSClient')
@mock.patch('cinder.backup.drivers.ceph.rbd')
@mock.patch('cinder.backup.drivers.ceph.rados')
def _common_inner_inner2(mock_rados, mock_rbd, mock_client,
mock_proxy):
inst.mock_rbd = mock_rbd
inst.mock_rados = mock_rados
inst.mock_client = mock_client
inst.mock_proxy = mock_proxy
inst.mock_rbd.RBD.Error = Exception
inst.mock_rados.Error = Exception
inst.mock_rbd.ImageBusy = MockImageBusyException
inst.mock_rbd.ImageNotFound = MockImageNotFoundException
inst.mock_rbd.ImageExists = MockImageExistsException
inst.driver.rbd = inst.mock_rbd
inst.driver.rados = inst.mock_rados
return f(inst, *args, **kwargs)
return _common_inner_inner2()
return _common_inner_inner1
CEPH_MON_DUMP = """dumped monmap epoch 1
{ "epoch": 1,
"fsid": "33630410-6d93-4d66-8e42-3b953cf194aa",
"modified": "2013-05-22 17:44:56.343618",
"created": "2013-05-22 17:44:56.343618",
"mons": [
{ "rank": 0,
"name": "a",
"addr": "[::1]:6789\/0"},
{ "rank": 1,
"name": "b",
"addr": "[::1]:6790\/0"},
{ "rank": 2,
"name": "c",
"addr": "[::1]:6791\/0"},
{ "rank": 3,
"name": "d",
"addr": "127.0.0.1:6792\/0"},
{ "rank": 4,
"name": "e",
"addr": "example.com:6791\/0"}],
"quorum": [
0,
1,
2]}
"""
class RBDTestCase(test.TestCase):
def setUp(self):
global RAISED_EXCEPTIONS
RAISED_EXCEPTIONS = []
super(RBDTestCase, self).setUp()
self.cfg = mock.Mock(spec=conf.Configuration)
self.cfg.volume_tmp_dir = None
self.cfg.image_conversion_dir = None
self.cfg.rbd_pool = 'rbd'
self.cfg.rbd_ceph_conf = None
self.cfg.rbd_secret_uuid = None
self.cfg.rbd_user = None
self.cfg.volume_dd_blocksize = '1M'
self.cfg.rbd_store_chunk_size = 4
mock_exec = mock.Mock()
mock_exec.return_value = ('', '')
self.driver = driver.RBDDriver(execute=mock_exec,
configuration=self.cfg)
self.driver.set_initialized()
self.volume_name = u'volume-00000001'
self.snapshot_name = u'snapshot-00000001'
self.volume_size = 1
self.volume = dict(name=self.volume_name, size=self.volume_size)
self.snapshot = dict(volume_name=self.volume_name,
name=self.snapshot_name)
@common_mocks
def test_create_volume(self):
client = self.mock_client.return_value
client.__enter__.return_value = client
self.driver.create_volume(self.volume)
chunk_size = self.cfg.rbd_store_chunk_size * units.Mi
order = int(math.log(chunk_size, 2))
args = [client.ioctx, str(self.volume_name),
self.volume_size * units.Gi, order]
kwargs = {'old_format': False,
'features': client.features}
self.mock_rbd.RBD.return_value.create.assert_called_once_with(
*args, **kwargs)
client.__enter__.assert_called_once_with()
client.__exit__.assert_called_once_with(None, None, None)
@common_mocks
def test_manage_existing_get_size(self):
with mock.patch.object(self.driver.rbd.Image(), 'size') as \
mock_rbd_image_size:
with mock.patch.object(self.driver.rbd.Image(), 'close') \
as mock_rbd_image_close:
mock_rbd_image_size.return_value = 2 * units.Gi
existing_ref = {'source-name': self.volume_name}
return_size = self.driver.manage_existing_get_size(
self.volume,
existing_ref)
self.assertEqual(2, return_size)
mock_rbd_image_size.assert_called_once_with()
mock_rbd_image_close.assert_called_once_with()
@common_mocks
def test_manage_existing_get_invalid_size(self):
with mock.patch.object(self.driver.rbd.Image(), 'size') as \
mock_rbd_image_size:
with mock.patch.object(self.driver.rbd.Image(), 'close') \
as mock_rbd_image_close:
mock_rbd_image_size.return_value = 'abcd'
existing_ref = {'source-name': self.volume_name}
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.manage_existing_get_size,
self.volume, existing_ref)
mock_rbd_image_size.assert_called_once_with()
mock_rbd_image_close.assert_called_once_with()
@common_mocks
def test_manage_existing(self):
client = self.mock_client.return_value
client.__enter__.return_value = client
with mock.patch.object(self.driver.rbd.RBD(), 'rename') as \
mock_rbd_image_rename:
exist_volume = 'vol-exist'
existing_ref = {'source-name': exist_volume}
mock_rbd_image_rename.return_value = 0
self.driver.manage_existing(self.volume, existing_ref)
mock_rbd_image_rename.assert_called_with(
client.ioctx,
exist_volume,
self.volume_name)
@common_mocks
def test_manage_existing_with_exist_rbd_image(self):
client = self.mock_client.return_value
client.__enter__.return_value = client
self.mock_rbd.RBD.return_value.rename.side_effect = (
MockImageExistsException)
exist_volume = 'vol-exist'
existing_ref = {'source-name': exist_volume}
self.assertRaises(self.mock_rbd.ImageExists,
self.driver.manage_existing,
self.volume, existing_ref)
# Make sure the exception was raised
self.assertEqual(RAISED_EXCEPTIONS,
[self.mock_rbd.ImageExists])
@common_mocks
def test_delete_backup_snaps(self):
self.driver.rbd.Image.remove_snap = mock.Mock()
with mock.patch.object(self.driver, '_get_backup_snaps') as \
mock_get_backup_snaps:
mock_get_backup_snaps.return_value = [{'name': 'snap1'}]
rbd_image = self.driver.rbd.Image()
self.driver._delete_backup_snaps(rbd_image)
mock_get_backup_snaps.assert_called_once_with(rbd_image)
self.assertTrue(
self.driver.rbd.Image.return_value.remove_snap.called)
@common_mocks
def test_delete_volume(self):
client = self.mock_client.return_value
self.driver.rbd.Image.return_value.list_snaps.return_value = []
with mock.patch.object(self.driver, '_get_clone_info') as \
mock_get_clone_info:
with mock.patch.object(self.driver, '_delete_backup_snaps') as \
mock_delete_backup_snaps:
mock_get_clone_info.return_value = (None, None, None)
self.driver.delete_volume(self.volume)
mock_get_clone_info.assert_called_once_with(
self.mock_rbd.Image.return_value,
self.volume_name,
None)
(self.driver.rbd.Image.return_value
.list_snaps.assert_called_once_with())
client.__enter__.assert_called_once_with()
client.__exit__.assert_called_once_with(None, None, None)
mock_delete_backup_snaps.assert_called_once_with(
self.mock_rbd.Image.return_value)
self.assertFalse(
self.driver.rbd.Image.return_value.unprotect_snap.called)
self.assertEqual(
1, self.driver.rbd.RBD.return_value.remove.call_count)
@common_mocks
def delete_volume_not_found(self):
self.mock_rbd.Image.side_effect = self.mock_rbd.ImageNotFound
self.assertIsNone(self.driver.delete_volume(self.volume))
self.mock_rbd.Image.assert_called_once_with()
# Make sure the exception was raised
self.assertEqual(RAISED_EXCEPTIONS, [self.mock_rbd.ImageNotFound])
@common_mocks
def test_delete_busy_volume(self):
self.mock_rbd.Image.return_value.list_snaps.return_value = []
self.mock_rbd.RBD.return_value.remove.side_effect = (
self.mock_rbd.ImageBusy)
with mock.patch.object(self.driver, '_get_clone_info') as \
mock_get_clone_info:
mock_get_clone_info.return_value = (None, None, None)
with mock.patch.object(self.driver, '_delete_backup_snaps') as \
mock_delete_backup_snaps:
with mock.patch.object(driver, 'RADOSClient') as \
mock_rados_client:
self.assertRaises(exception.VolumeIsBusy,
self.driver.delete_volume, self.volume)
mock_get_clone_info.assert_called_once_with(
self.mock_rbd.Image.return_value,
self.volume_name,
None)
(self.mock_rbd.Image.return_value.list_snaps
.assert_called_once_with())
mock_rados_client.assert_called_once_with(self.driver)
mock_delete_backup_snaps.assert_called_once_with(
self.mock_rbd.Image.return_value)
self.assertFalse(
self.mock_rbd.Image.return_value.unprotect_snap.called)
self.assertEqual(
1, self.mock_rbd.RBD.return_value.remove.call_count)
# Make sure the exception was raised
self.assertEqual(RAISED_EXCEPTIONS,
[self.mock_rbd.ImageBusy])
@common_mocks
def test_delete_volume_not_found(self):
self.mock_rbd.Image.return_value.list_snaps.return_value = []
self.mock_rbd.RBD.return_value.remove.side_effect = (
self.mock_rbd.ImageNotFound)
with mock.patch.object(self.driver, '_get_clone_info') as \
mock_get_clone_info:
mock_get_clone_info.return_value = (None, None, None)
with mock.patch.object(self.driver, '_delete_backup_snaps') as \
mock_delete_backup_snaps:
with mock.patch.object(driver, 'RADOSClient') as \
mock_rados_client:
self.assertIsNone(self.driver.delete_volume(self.volume))
mock_get_clone_info.assert_called_once_with(
self.mock_rbd.Image.return_value,
self.volume_name,
None)
(self.mock_rbd.Image.return_value.list_snaps
.assert_called_once_with())
mock_rados_client.assert_called_once_with(self.driver)
mock_delete_backup_snaps.assert_called_once_with(
self.mock_rbd.Image.return_value)
self.assertFalse(
self.mock_rbd.Image.return_value.unprotect_snap.called)
self.assertEqual(
1, self.mock_rbd.RBD.return_value.remove.call_count)
# Make sure the exception was raised
self.assertEqual(RAISED_EXCEPTIONS,
[self.mock_rbd.ImageNotFound])
@common_mocks
def test_create_snapshot(self):
proxy = self.mock_proxy.return_value
proxy.__enter__.return_value = proxy
self.driver.create_snapshot(self.snapshot)
args = [str(self.snapshot_name)]
proxy.create_snap.assert_called_with(*args)
proxy.protect_snap.assert_called_with(*args)
@common_mocks
def test_delete_snapshot(self):
proxy = self.mock_proxy.return_value
proxy.__enter__.return_value = proxy
self.driver.delete_snapshot(self.snapshot)
args = [str(self.snapshot_name)]
proxy.remove_snap.assert_called_with(*args)
proxy.unprotect_snap.assert_called_with(*args)
@common_mocks
def test_get_clone_info(self):
volume = self.mock_rbd.Image()
volume.set_snap = mock.Mock()
volume.parent_info = mock.Mock()
parent_info = ('a', 'b', '%s.clone_snap' % (self.volume_name))
volume.parent_info.return_value = parent_info
info = self.driver._get_clone_info(volume, self.volume_name)
self.assertEqual(info, parent_info)
self.assertFalse(volume.set_snap.called)
volume.parent_info.assert_called_once_with()
@common_mocks
def test_get_clone_info_w_snap(self):
volume = self.mock_rbd.Image()
volume.set_snap = mock.Mock()
volume.parent_info = mock.Mock()
parent_info = ('a', 'b', '%s.clone_snap' % (self.volume_name))
volume.parent_info.return_value = parent_info
snapshot = self.mock_rbd.ImageSnapshot()
info = self.driver._get_clone_info(volume, self.volume_name,
snap=snapshot)
self.assertEqual(info, parent_info)
self.assertEqual(volume.set_snap.call_count, 2)
volume.parent_info.assert_called_once_with()
@common_mocks
def test_get_clone_info_w_exception(self):
volume = self.mock_rbd.Image()
volume.set_snap = mock.Mock()
volume.parent_info = mock.Mock()
volume.parent_info.side_effect = self.mock_rbd.ImageNotFound
snapshot = self.mock_rbd.ImageSnapshot()
info = self.driver._get_clone_info(volume, self.volume_name,
snap=snapshot)
self.assertEqual(info, (None, None, None))
self.assertEqual(volume.set_snap.call_count, 2)
volume.parent_info.assert_called_once_with()
# Make sure the exception was raised
self.assertEqual(RAISED_EXCEPTIONS, [self.mock_rbd.ImageNotFound])
@common_mocks
def test_get_clone_info_deleted_volume(self):
volume = self.mock_rbd.Image()
volume.set_snap = mock.Mock()
volume.parent_info = mock.Mock()
parent_info = ('a', 'b', '%s.clone_snap' % (self.volume_name))
volume.parent_info.return_value = parent_info
info = self.driver._get_clone_info(volume,
"%s.deleted" % (self.volume_name))
self.assertEqual(info, parent_info)
self.assertFalse(volume.set_snap.called)
volume.parent_info.assert_called_once_with()
@common_mocks
def test_create_cloned_volume_same_size(self):
src_name = u'volume-00000001'
dst_name = u'volume-00000002'
self.cfg.rbd_max_clone_depth = 2
with mock.patch.object(self.driver, '_get_clone_depth') as \
mock_get_clone_depth:
# Try with no flatten required
with mock.patch.object(self.driver, '_resize') as mock_resize:
mock_get_clone_depth.return_value = 1
self.driver.create_cloned_volume({'name': dst_name,
'size': 10},
{'name': src_name,
'size': 10})
(self.mock_rbd.Image.return_value.create_snap
.assert_called_once_with('.'.join((dst_name,
'clone_snap'))))
(self.mock_rbd.Image.return_value.protect_snap
.assert_called_once_with('.'.join((dst_name,
'clone_snap'))))
self.assertEqual(
1, self.mock_rbd.RBD.return_value.clone.call_count)
self.mock_rbd.Image.return_value.close \
.assert_called_once_with()
self.assertTrue(mock_get_clone_depth.called)
self.assertEqual(
0, mock_resize.call_count)
@common_mocks
def test_create_cloned_volume_different_size(self):
src_name = u'volume-00000001'
dst_name = u'volume-00000002'
self.cfg.rbd_max_clone_depth = 2
with mock.patch.object(self.driver, '_get_clone_depth') as \
mock_get_clone_depth:
# Try with no flatten required
with mock.patch.object(self.driver, '_resize') as mock_resize:
mock_get_clone_depth.return_value = 1
self.driver.create_cloned_volume({'name': dst_name,
'size': 20},
{'name': src_name,
'size': 10})
(self.mock_rbd.Image.return_value.create_snap
.assert_called_once_with('.'.join((dst_name,
'clone_snap'))))
(self.mock_rbd.Image.return_value.protect_snap
.assert_called_once_with('.'.join((dst_name,
'clone_snap'))))
self.assertEqual(
1, self.mock_rbd.RBD.return_value.clone.call_count)
self.mock_rbd.Image.return_value.close \
.assert_called_once_with()
self.assertTrue(mock_get_clone_depth.called)
self.assertEqual(
1, mock_resize.call_count)
@common_mocks
def test_create_cloned_volume_w_flatten(self):
src_name = u'volume-00000001'
dst_name = u'volume-00000002'
self.cfg.rbd_max_clone_depth = 1
self.mock_rbd.RBD.return_value.clone.side_effect = (
self.mock_rbd.RBD.Error)
with mock.patch.object(self.driver, '_get_clone_depth') as \
mock_get_clone_depth:
# Try with no flatten required
mock_get_clone_depth.return_value = 1
self.assertRaises(self.mock_rbd.RBD.Error,
self.driver.create_cloned_volume,
dict(name=dst_name), dict(name=src_name))
(self.mock_rbd.Image.return_value.create_snap
.assert_called_once_with('.'.join((dst_name, 'clone_snap'))))
(self.mock_rbd.Image.return_value.protect_snap
.assert_called_once_with('.'.join((dst_name, 'clone_snap'))))
self.assertEqual(
1, self.mock_rbd.RBD.return_value.clone.call_count)
(self.mock_rbd.Image.return_value.unprotect_snap
.assert_called_once_with('.'.join((dst_name, 'clone_snap'))))
(self.mock_rbd.Image.return_value.remove_snap
.assert_called_once_with('.'.join((dst_name, 'clone_snap'))))
self.mock_rbd.Image.return_value.close.assert_called_once_with()
self.assertTrue(mock_get_clone_depth.called)
@common_mocks
def test_create_cloned_volume_w_clone_exception(self):
src_name = u'volume-00000001'
dst_name = u'volume-00000002'
self.cfg.rbd_max_clone_depth = 2
self.mock_rbd.RBD.return_value.clone.side_effect = (
self.mock_rbd.RBD.Error)
with mock.patch.object(self.driver, '_get_clone_depth') as \
mock_get_clone_depth:
# Try with no flatten required
mock_get_clone_depth.return_value = 1
self.assertRaises(self.mock_rbd.RBD.Error,
self.driver.create_cloned_volume,
{'name': dst_name}, {'name': src_name})
(self.mock_rbd.Image.return_value.create_snap
.assert_called_once_with('.'.join((dst_name, 'clone_snap'))))
(self.mock_rbd.Image.return_value.protect_snap
.assert_called_once_with('.'.join((dst_name, 'clone_snap'))))
self.assertEqual(
1, self.mock_rbd.RBD.return_value.clone.call_count)
(self.mock_rbd.Image.return_value.unprotect_snap
.assert_called_once_with('.'.join((dst_name, 'clone_snap'))))
(self.mock_rbd.Image.return_value.remove_snap
.assert_called_once_with('.'.join((dst_name, 'clone_snap'))))
self.mock_rbd.Image.return_value.close.assert_called_once_with()
@common_mocks
def test_good_locations(self):
locations = ['rbd://fsid/pool/image/snap',
'rbd://%2F/%2F/%2F/%2F', ]
map(self.driver._parse_location, locations)
@common_mocks
def test_bad_locations(self):
locations = ['rbd://image',
'http://path/to/somewhere/else',
'rbd://image/extra',
'rbd://image/',
'rbd://fsid/pool/image/',
'rbd://fsid/pool/image/snap/',
'rbd://///', ]
for loc in locations:
self.assertRaises(exception.ImageUnacceptable,
self.driver._parse_location,
loc)
self.assertFalse(
self.driver._is_cloneable(loc, {'disk_format': 'raw'}))
@common_mocks
def test_cloneable(self):
with mock.patch.object(self.driver, '_get_fsid') as mock_get_fsid:
mock_get_fsid.return_value = 'abc'
location = 'rbd://abc/pool/image/snap'
info = {'disk_format': 'raw'}
self.assertTrue(self.driver._is_cloneable(location, info))
self.assertTrue(mock_get_fsid.called)
@common_mocks
def test_uncloneable_different_fsid(self):
with mock.patch.object(self.driver, '_get_fsid') as mock_get_fsid:
mock_get_fsid.return_value = 'abc'
location = 'rbd://def/pool/image/snap'
self.assertFalse(
self.driver._is_cloneable(location, {'disk_format': 'raw'}))
self.assertTrue(mock_get_fsid.called)
@common_mocks
def test_uncloneable_unreadable(self):
with mock.patch.object(self.driver, '_get_fsid') as mock_get_fsid:
mock_get_fsid.return_value = 'abc'
location = 'rbd://abc/pool/image/snap'
self.driver.rbd.Error = Exception
self.mock_proxy.side_effect = Exception
args = [location, {'disk_format': 'raw'}]
self.assertFalse(self.driver._is_cloneable(*args))
self.assertEqual(1, self.mock_proxy.call_count)
self.assertTrue(mock_get_fsid.called)
@common_mocks
def test_uncloneable_bad_format(self):
with mock.patch.object(self.driver, '_get_fsid') as mock_get_fsid:
mock_get_fsid.return_value = 'abc'
location = 'rbd://abc/pool/image/snap'
formats = ['qcow2', 'vmdk', 'vdi']
for f in formats:
self.assertFalse(
self.driver._is_cloneable(location, {'disk_format': f}))
self.assertTrue(mock_get_fsid.called)
def _copy_image(self):
with mock.patch.object(tempfile, 'NamedTemporaryFile'):
with mock.patch.object(os.path, 'exists') as mock_exists:
mock_exists.return_value = True
with mock.patch.object(image_utils, 'fetch_to_raw'):
with mock.patch.object(self.driver, 'delete_volume'):
with mock.patch.object(self.driver, '_resize'):
mock_image_service = mock.MagicMock()
args = [None, {'name': 'test', 'size': 1},
mock_image_service, None]
self.driver.copy_image_to_volume(*args)
@common_mocks
def test_copy_image_no_volume_tmp(self):
self.cfg.volume_tmp_dir = None
self.cfg.image_conversion_dir = None
self._copy_image()
@common_mocks
def test_copy_image_volume_tmp(self):
self.cfg.volume_tmp_dir = None
self.cfg.image_conversion_dir = '/var/run/cinder/tmp'
self._copy_image()
@common_mocks
def test_update_volume_stats(self):
client = self.mock_client.return_value
client.__enter__.return_value = client
client.cluster = mock.Mock()
client.cluster.mon_command = mock.Mock()
client.cluster.mon_command.return_value = (
0, '{"stats":{"total_bytes":64385286144,'
'"total_used_bytes":3289628672,"total_avail_bytes":61095657472},'
'"pools":[{"name":"rbd","id":2,"stats":{"kb_used":1510197,'
'"bytes_used":1546440971,"max_avail":28987613184,"objects":412}},'
'{"name":"volumes","id":3,"stats":{"kb_used":0,"bytes_used":0,'
'"max_avail":28987613184,"objects":0}}]}\n', '')
self.driver.configuration.safe_get = mock.Mock()
self.driver.configuration.safe_get.return_value = 'RBD'
expected = dict(
volume_backend_name='RBD',
vendor_name='Open Source',
driver_version=self.driver.VERSION,
storage_protocol='ceph',
total_capacity_gb=27,
free_capacity_gb=26,
reserved_percentage=0)
actual = self.driver.get_volume_stats(True)
client.cluster.mon_command.assert_called_once_with(
'{"prefix":"df", "format":"json"}', '')
self.assertDictMatch(expected, actual)
@common_mocks
def test_update_volume_stats_error(self):
client = self.mock_client.return_value
client.__enter__.return_value = client
client.cluster = mock.Mock()
client.cluster.mon_command = mock.Mock()
client.cluster.mon_command.return_value = (22, '', '')
self.driver.configuration.safe_get = mock.Mock()
self.driver.configuration.safe_get.return_value = 'RBD'
expected = dict(volume_backend_name='RBD',
vendor_name='Open Source',
driver_version=self.driver.VERSION,
storage_protocol='ceph',
total_capacity_gb='unknown',
free_capacity_gb='unknown',
reserved_percentage=0)
actual = self.driver.get_volume_stats(True)
client.cluster.mon_command.assert_called_once_with(
'{"prefix":"df", "format":"json"}', '')
self.assertDictMatch(expected, actual)
@common_mocks
def test_get_mon_addrs(self):
with mock.patch.object(self.driver, '_execute') as mock_execute:
mock_execute.return_value = (CEPH_MON_DUMP, '')
hosts = ['::1', '::1', '::1', '127.0.0.1', 'example.com']
ports = ['6789', '6790', '6791', '6792', '6791']
self.assertEqual((hosts, ports), self.driver._get_mon_addrs())
@common_mocks
def test_initialize_connection(self):
hosts = ['::1', '::1', '::1', '127.0.0.1', 'example.com']
ports = ['6789', '6790', '6791', '6792', '6791']
with mock.patch.object(self.driver, '_get_mon_addrs') as \
mock_get_mon_addrs:
mock_get_mon_addrs.return_value = (hosts, ports)
expected = {
'driver_volume_type': 'rbd',
'data': {
'name': '%s/%s' % (self.cfg.rbd_pool,
self.volume_name),
'hosts': hosts,
'ports': ports,
'auth_enabled': False,
'auth_username': None,
'secret_type': 'ceph',
'secret_uuid': None, }
}
volume = dict(name=self.volume_name)
actual = self.driver.initialize_connection(volume, None)
self.assertDictMatch(expected, actual)
self.assertTrue(mock_get_mon_addrs.called)
@common_mocks
def test_clone(self):
src_pool = u'images'
src_image = u'image-name'
src_snap = u'snapshot-name'
client_stack = []
def mock__enter__(inst):
def _inner():
client_stack.append(inst)
return inst
return _inner
client = self.mock_client.return_value
# capture both rados client used to perform the clone
client.__enter__.side_effect = mock__enter__(client)
self.driver._clone(self.volume, src_pool, src_image, src_snap)
args = [client_stack[0].ioctx, str(src_image), str(src_snap),
client_stack[1].ioctx, str(self.volume_name)]
kwargs = {'features': client.features}
self.mock_rbd.RBD.return_value.clone.assert_called_once_with(
*args, **kwargs)
self.assertEqual(client.__enter__.call_count, 2)
@common_mocks
def test_extend_volume(self):
fake_size = '20'
fake_vol = {'project_id': 'testprjid', 'name': self.volume_name,
'size': fake_size,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66'}
self.mox.StubOutWithMock(self.driver, '_resize')
size = int(fake_size) * units.Gi
self.driver._resize(fake_vol, size=size)
self.mox.ReplayAll()
self.driver.extend_volume(fake_vol, fake_size)
self.mox.VerifyAll()
@common_mocks
def test_retype(self):
context = {}
diff = {'encryption': {},
'extra_specs': {}}
fake_volume = {'name': 'testvolume',
'host': 'currenthost'}
fake_type = 'high-IOPS'
# no support for migration
host = {'host': 'anotherhost'}
self.assertFalse(self.driver.retype(context, fake_volume,
fake_type, diff, host))
host = {'host': 'currenthost'}
# no support for changing encryption
diff['encryption'] = {'non-empty': 'non-empty'}
self.assertFalse(self.driver.retype(context, fake_volume,
fake_type, diff, host))
diff['encryption'] = {}
# no support for changing extra_specs
diff['extra_specs'] = {'non-empty': 'non-empty'}
self.assertFalse(self.driver.retype(context, fake_volume,
fake_type, diff, host))
diff['extra_specs'] = {}
self.assertTrue(self.driver.retype(context, fake_volume,
fake_type, diff, host))
def test_rbd_volume_proxy_init(self):
mock_driver = mock.Mock(name='driver')
mock_driver._connect_to_rados.return_value = (None, None)
with driver.RBDVolumeProxy(mock_driver, self.volume_name):
self.assertEqual(1, mock_driver._connect_to_rados.call_count)
self.assertFalse(mock_driver._disconnect_from_rados.called)
self.assertEqual(1, mock_driver._disconnect_from_rados.call_count)
mock_driver.reset_mock()
snap = u'snapshot-name'
with driver.RBDVolumeProxy(mock_driver, self.volume_name,
snapshot=snap):
self.assertEqual(1, mock_driver._connect_to_rados.call_count)
self.assertFalse(mock_driver._disconnect_from_rados.called)
self.assertEqual(1, mock_driver._disconnect_from_rados.call_count)
@common_mocks
def test_connect_to_rados(self):
# Default
self.cfg.rados_connect_timeout = -1
self.mock_rados.Rados.return_value.open_ioctx.return_value = \
self.mock_rados.Rados.return_value.ioctx
# default configured pool
ret = self.driver._connect_to_rados()
self.assertTrue(self.mock_rados.Rados.return_value.connect.called)
# Expect no timeout if default is used
self.mock_rados.Rados.return_value.connect.assert_called_once_with()
self.assertTrue(self.mock_rados.Rados.return_value.open_ioctx.called)
self.assertEqual(ret[1], self.mock_rados.Rados.return_value.ioctx)
self.mock_rados.Rados.return_value.open_ioctx.assert_called_with(
self.cfg.rbd_pool)
# different pool
ret = self.driver._connect_to_rados('alt_pool')
self.assertTrue(self.mock_rados.Rados.return_value.connect.called)
self.assertTrue(self.mock_rados.Rados.return_value.open_ioctx.called)
self.assertEqual(ret[1], self.mock_rados.Rados.return_value.ioctx)
self.mock_rados.Rados.return_value.open_ioctx.assert_called_with(
'alt_pool')
# With timeout
self.cfg.rados_connect_timeout = 1
self.mock_rados.Rados.return_value.connect.reset_mock()
self.driver._connect_to_rados()
self.mock_rados.Rados.return_value.connect.assert_called_once_with(
timeout=1)
# error
self.mock_rados.Rados.return_value.open_ioctx.reset_mock()
self.mock_rados.Rados.return_value.shutdown.reset_mock()
self.mock_rados.Rados.return_value.open_ioctx.side_effect = (
self.mock_rados.Error)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver._connect_to_rados)
self.assertTrue(self.mock_rados.Rados.return_value.open_ioctx.called)
self.mock_rados.Rados.return_value.shutdown.assert_called_once_with()
class RBDImageIOWrapperTestCase(test.TestCase):
def setUp(self):
super(RBDImageIOWrapperTestCase, self).setUp()
self.meta = mock.Mock()
self.meta.user = 'mock_user'
self.meta.conf = 'mock_conf'
self.meta.pool = 'mock_pool'
self.meta.image = mock.Mock()
self.meta.image.read = mock.Mock()
self.meta.image.write = mock.Mock()
self.meta.image.size = mock.Mock()
self.mock_rbd_wrapper = driver.RBDImageIOWrapper(self.meta)
self.data_length = 1024
self.full_data = 'abcd' * 256
def test_init(self):
self.assertEqual(self.mock_rbd_wrapper._rbd_meta, self.meta)
self.assertEqual(self.mock_rbd_wrapper._offset, 0)
def test_inc_offset(self):
self.mock_rbd_wrapper._inc_offset(10)
self.mock_rbd_wrapper._inc_offset(10)
self.assertEqual(self.mock_rbd_wrapper._offset, 20)
def test_rbd_image(self):
self.assertEqual(self.mock_rbd_wrapper.rbd_image, self.meta.image)
def test_rbd_user(self):
self.assertEqual(self.mock_rbd_wrapper.rbd_user, self.meta.user)
def test_rbd_pool(self):
self.assertEqual(self.mock_rbd_wrapper.rbd_conf, self.meta.conf)
def test_rbd_conf(self):
self.assertEqual(self.mock_rbd_wrapper.rbd_pool, self.meta.pool)
def test_read(self):
def mock_read(offset, length):
return self.full_data[offset:length]
self.meta.image.read.side_effect = mock_read
self.meta.image.size.return_value = self.data_length
data = self.mock_rbd_wrapper.read()
self.assertEqual(data, self.full_data)
data = self.mock_rbd_wrapper.read()
self.assertEqual(data, '')
self.mock_rbd_wrapper.seek(0)
data = self.mock_rbd_wrapper.read()
self.assertEqual(data, self.full_data)
self.mock_rbd_wrapper.seek(0)
data = self.mock_rbd_wrapper.read(10)
self.assertEqual(data, self.full_data[:10])
def test_write(self):
self.mock_rbd_wrapper.write(self.full_data)
self.assertEqual(self.mock_rbd_wrapper._offset, 1024)
def test_seekable(self):
self.assertTrue(self.mock_rbd_wrapper.seekable)
def test_seek(self):
self.assertEqual(self.mock_rbd_wrapper._offset, 0)
self.mock_rbd_wrapper.seek(10)
self.assertEqual(self.mock_rbd_wrapper._offset, 10)
self.mock_rbd_wrapper.seek(10)
self.assertEqual(self.mock_rbd_wrapper._offset, 10)
self.mock_rbd_wrapper.seek(10, 1)
self.assertEqual(self.mock_rbd_wrapper._offset, 20)
self.mock_rbd_wrapper.seek(0)
self.mock_rbd_wrapper.write(self.full_data)
self.meta.image.size.return_value = self.data_length
self.mock_rbd_wrapper.seek(0)
self.assertEqual(self.mock_rbd_wrapper._offset, 0)
self.mock_rbd_wrapper.seek(10, 2)
self.assertEqual(self.mock_rbd_wrapper._offset, self.data_length + 10)
self.mock_rbd_wrapper.seek(-10, 2)
self.assertEqual(self.mock_rbd_wrapper._offset, self.data_length - 10)
# test exceptions.
self.assertRaises(IOError, self.mock_rbd_wrapper.seek, 0, 3)
self.assertRaises(IOError, self.mock_rbd_wrapper.seek, -1)
# offset should not have been changed by any of the previous
# operations.
self.assertEqual(self.mock_rbd_wrapper._offset, self.data_length - 10)
def test_tell(self):
self.assertEqual(self.mock_rbd_wrapper.tell(), 0)
self.mock_rbd_wrapper._inc_offset(10)
self.assertEqual(self.mock_rbd_wrapper.tell(), 10)
def test_flush(self):
with mock.patch.object(driver, 'LOG') as mock_logger:
self.meta.image.flush = mock.Mock()
self.mock_rbd_wrapper.flush()
self.meta.image.flush.assert_called_once_with()
self.meta.image.flush.reset_mock()
# this should be caught and logged silently.
self.meta.image.flush.side_effect = AttributeError
self.mock_rbd_wrapper.flush()
self.meta.image.flush.assert_called_once_with()
msg = _("flush() not supported in this version of librbd")
mock_logger.warning.assert_called_with(msg)
def test_fileno(self):
self.assertRaises(IOError, self.mock_rbd_wrapper.fileno)
def test_close(self):
self.mock_rbd_wrapper.close()
class ManagedRBDTestCase(test_volume.DriverTestCase):
driver_name = "cinder.volume.drivers.rbd.RBDDriver"
def setUp(self):
super(ManagedRBDTestCase, self).setUp()
# TODO(dosaboy): need to remove dependency on mox stubs here once
# image.fake has been converted to mock.
fake_image.stub_out_image_service(self.stubs)
self.volume.driver.set_initialized()
self.volume.stats = {'allocated_capacity_gb': 0,
'pools': {}}
self.called = []
def _create_volume_from_image(self, expected_status, raw=False,
clone_error=False):
"""Try to clone a volume from an image, and check the status
afterwards.
NOTE: if clone_error is True we force the image type to raw otherwise
clone_image is not called
"""
volume_id = 1
# See tests.image.fake for image types.
if raw:
image_id = '155d900f-4e14-4e4c-a73d-069cbf4541e6'
else:
image_id = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
# creating volume testdata
db.volume_create(self.context,
{'id': volume_id,
'updated_at': timeutils.utcnow(),
'display_description': 'Test Desc',
'size': 20,
'status': 'creating',
'instance_uuid': None,
'host': 'dummy'})
try:
if not clone_error:
self.volume.create_volume(self.context,
volume_id,
image_id=image_id)
else:
self.assertRaises(exception.CinderException,
self.volume.create_volume,
self.context,
volume_id,
image_id=image_id)
volume = db.volume_get(self.context, volume_id)
self.assertEqual(volume['status'], expected_status)
finally:
# cleanup
db.volume_destroy(self.context, volume_id)
def test_create_vol_from_image_status_available(self):
"""Clone raw image then verify volume is in available state."""
def _mock_clone_image(context, volume, image_location,
image_meta, image_service):
return {'provider_location': None}, True
with mock.patch.object(self.volume.driver, 'clone_image') as \
mock_clone_image:
mock_clone_image.side_effect = _mock_clone_image
with mock.patch.object(self.volume.driver, 'create_volume') as \
mock_create:
with mock.patch.object(create_volume.CreateVolumeFromSpecTask,
'_copy_image_to_volume') as mock_copy:
self._create_volume_from_image('available', raw=True)
self.assertFalse(mock_copy.called)
self.assertTrue(mock_clone_image.called)
self.assertFalse(mock_create.called)
def test_create_vol_from_non_raw_image_status_available(self):
"""Clone non-raw image then verify volume is in available state."""
def _mock_clone_image(context, volume, image_location,
image_meta, image_service):
return {'provider_location': None}, False
with mock.patch.object(self.volume.driver, 'clone_image') as \
mock_clone_image:
mock_clone_image.side_effect = _mock_clone_image
with mock.patch.object(self.volume.driver, 'create_volume') as \
mock_create:
with mock.patch.object(create_volume.CreateVolumeFromSpecTask,
'_copy_image_to_volume') as mock_copy:
self._create_volume_from_image('available', raw=False)
self.assertTrue(mock_copy.called)
self.assertTrue(mock_clone_image.called)
self.assertTrue(mock_create.called)
def test_create_vol_from_image_status_error(self):
"""Fail to clone raw image then verify volume is in error state."""
with mock.patch.object(self.volume.driver, 'clone_image') as \
mock_clone_image:
mock_clone_image.side_effect = exception.CinderException
with mock.patch.object(self.volume.driver, 'create_volume'):
with mock.patch.object(create_volume.CreateVolumeFromSpecTask,
'_copy_image_to_volume') as mock_copy:
self._create_volume_from_image('error', raw=True,
clone_error=True)
self.assertFalse(mock_copy.called)
self.assertTrue(mock_clone_image.called)
self.assertFalse(self.volume.driver.create_volume.called)
def test_clone_failure(self):
driver = self.volume.driver
with mock.patch.object(driver, '_is_cloneable', lambda *args: False):
image_loc = (mock.Mock(), None)
actual = driver.clone_image(mock.Mock(),
mock.Mock(),
image_loc,
{},
mock.Mock())
self.assertEqual(({}, False), actual)
self.assertEqual(({}, False),
driver.clone_image('', object(), None, {}, ''))
def test_clone_success(self):
expected = ({'provider_location': None}, True)
driver = self.volume.driver
with mock.patch.object(self.volume.driver, '_is_cloneable') as \
mock_is_cloneable:
mock_is_cloneable.return_value = True
with mock.patch.object(self.volume.driver, '_clone') as \
mock_clone:
with mock.patch.object(self.volume.driver, '_resize') as \
mock_resize:
image_loc = ('rbd://fee/fi/fo/fum', None)
volume = {'name': 'vol1'}
actual = driver.clone_image(mock.Mock(),
volume,
image_loc,
{'disk_format': 'raw',
'id': 'id.foo'},
mock.Mock())
self.assertEqual(expected, actual)
mock_clone.assert_called_once_with(volume,
'fi', 'fo', 'fum')
mock_resize.assert_called_once_with(volume)
def test_clone_multilocation_success(self):
expected = ({'provider_location': None}, True)
driver = self.volume.driver
def cloneable_side_effect(url_location, image_meta):
return url_location == 'rbd://fee/fi/fo/fum'
with mock.patch.object(self.volume.driver, '_is_cloneable') \
as mock_is_cloneable, \
mock.patch.object(self.volume.driver, '_clone') as mock_clone, \
mock.patch.object(self.volume.driver, '_resize') \
as mock_resize:
mock_is_cloneable.side_effect = cloneable_side_effect
image_loc = ('rbd://bee/bi/bo/bum',
[{'url': 'rbd://bee/bi/bo/bum'},
{'url': 'rbd://fee/fi/fo/fum'}])
volume = {'name': 'vol1'}
image_meta = mock.sentinel.image_meta
image_service = mock.sentinel.image_service
actual = driver.clone_image(self.context,
volume,
image_loc,
image_meta,
image_service)
self.assertEqual(expected, actual)
self.assertEqual(2, mock_is_cloneable.call_count)
mock_clone.assert_called_once_with(volume,
'fi', 'fo', 'fum')
mock_is_cloneable.assert_called_with('rbd://fee/fi/fo/fum',
image_meta)
mock_resize.assert_called_once_with(volume)
def test_clone_multilocation_failure(self):
expected = ({}, False)
driver = self.volume.driver
with mock.patch.object(driver, '_is_cloneable', return_value=False) \
as mock_is_cloneable, \
mock.patch.object(self.volume.driver, '_clone') as mock_clone, \
mock.patch.object(self.volume.driver, '_resize') \
as mock_resize:
image_loc = ('rbd://bee/bi/bo/bum',
[{'url': 'rbd://bee/bi/bo/bum'},
{'url': 'rbd://fee/fi/fo/fum'}])
volume = {'name': 'vol1'}
image_meta = mock.sentinel.image_meta
image_service = mock.sentinel.image_service
actual = driver.clone_image(self.context,
volume,
image_loc,
image_meta,
image_service)
self.assertEqual(expected, actual)
self.assertEqual(2, mock_is_cloneable.call_count)
mock_is_cloneable.assert_any_call('rbd://bee/bi/bo/bum',
image_meta)
mock_is_cloneable.assert_any_call('rbd://fee/fi/fo/fum',
image_meta)
self.assertFalse(mock_clone.called)
self.assertFalse(mock_resize.called)
|
apache-2.0
| -407,074,156,635,504,500 | 40.270936 | 79 | 0.555721 | false |
coronary/RandomEpisode
|
depends/Lib/site-packages/tmdbsimple/people.py
|
1
|
7235
|
# -*- coding: utf-8 -*-
"""
tmdbsimple.people
~~~~~~~~~~~~~~~~~
This module implements the People, Credits, and Jobs functionality
of tmdbsimple.
Created by Celia Oakley on 2013-10-31.
:copyright: (c) 2013-2017 by Celia Oakley
:license: GPLv3, see LICENSE for more details
"""
from .base import TMDB
class People(TMDB):
"""
People functionality.
See: http://docs.themoviedb.apiary.io/#people
"""
BASE_PATH = 'person'
URLS = {
'info': '/{id}',
'movie_credits': '/{id}/movie_credits',
'tv_credits': '/{id}/tv_credits',
'combined_credits': '/{id}/combined_credits',
'external_ids': '/{id}/external_ids',
'images': '/{id}/images',
'changes': '/{id}/changes',
'popular': '/popular',
'latest': '/latest',
}
def __init__(self, id=0):
super(People, self).__init__()
self.id = id
def info(self, **kwargs):
"""
Get the general person information for a specific id.
Args:
append_to_response: (optional) Comma separated, any person method.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_id_path('info')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
def movie_credits(self, **kwargs):
"""
Get the movie credits for a specific person id.
Args:
language: (optional) ISO 639-1 code.
append_to_response: (optional) Comma separated, any person method.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_id_path('movie_credits')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
def tv_credits(self, **kwargs):
"""
Get the TV credits for a specific person id.
Args:
language: (optional) ISO 639-1 code.
append_to_response: (optional) Comma separated, any person method.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_id_path('tv_credits')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
def combined_credits(self, **kwargs):
"""
Get the combined (movie and TV) credits for a specific person id.
To get the expanded details for each TV record, call the /credit method
with the provided credit_id. This will provide details about which
episode and/or season the credit is for.
Args:
language: (optional) ISO 639-1 code.
append_to_response: (optional) Comma separated, any person method.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_id_path('combined_credits')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
def external_ids(self, **kwargs):
"""
Get the external ids for a specific person id.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_id_path('external_ids')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
def images(self, **kwargs):
"""
Get the images for a specific person id.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_id_path('images')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
def changes(self, **kwargs):
"""
Get the changes for a specific person id.
Changes are grouped by key, and ordered by date in descending order.
By default, only the last 24 hours of changes are returned. The maximum
number of days that can be returned in a single request is 14. The
language is present on fields that are translatable.
Args:
start_date: (optional) Expected format is 'YYYY-MM-DD'.
end_date: (optional) Expected format is 'YYYY-MM-DD'.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_id_path('changes')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
def popular(self, **kwargs):
"""
Get the list of popular people on The Movie Database. This list
refreshes every day.
Args:
page: (optional) Minimum 1, maximum 1000.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_path('popular')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
def latest(self, **kwargs):
"""
Get the latest person id.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_path('latest')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
class Credits(TMDB):
"""
Credits functionality.
See: http://docs.themoviedb.apiary.io/#credits
"""
BASE_PATH = 'credit'
URLS = {
'info': '/{credit_id}',
}
def __init__(self, credit_id):
super(Credits, self).__init__()
self.credit_id = credit_id
def info(self, **kwargs):
"""
Get the detailed information about a particular credit record. This is
currently only supported with the new credit model found in TV. These
ids can be found from any TV credit response as well as the tv_credits
and combined_credits methods for people.
The episodes object returns a list of episodes and are generally going
to be guest stars. The season array will return a list of season
numbers. Season credits are credits that were marked with the
"add to every season" option in the editing interface and are
assumed to be "season regulars".
Args:
language: (optional) ISO 639-1 code.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_credit_id_path('info')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
class Jobs(TMDB):
"""
Jobs functionality.
See: http://docs.themoviedb.apiary.io/#jobs
"""
BASE_PATH = 'job'
URLS = {
'list': '/list',
}
def list(self, **kwargs):
"""
Get a list of valid jobs.
Returns:
A dict respresentation of the JSON returned from the API.
"""
path = self._get_path('list')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response
|
mit
| 481,468,421,621,754,600 | 27.710317 | 80 | 0.581617 | false |
chrisjrn/registrasion
|
registrasion/tests/controller_helpers.py
|
1
|
2034
|
from registrasion.controllers.cart import CartController
from registrasion.controllers.credit_note import CreditNoteController
from registrasion.controllers.invoice import InvoiceController
from registrasion.models import commerce
from django.core.exceptions import ObjectDoesNotExist
class TestingCartController(CartController):
def set_quantity(self, product, quantity, batched=False):
''' Sets the _quantity_ of the given _product_ in the cart to the given
_quantity_. '''
self.set_quantities(((product, quantity),))
def add_to_cart(self, product, quantity):
''' Adds _quantity_ of the given _product_ to the cart. Raises
ValidationError if constraints are violated.'''
try:
product_item = commerce.ProductItem.objects.get(
cart=self.cart,
product=product)
old_quantity = product_item.quantity
except ObjectDoesNotExist:
old_quantity = 0
self.set_quantity(product, old_quantity + quantity)
def next_cart(self):
if self.cart.status == commerce.Cart.STATUS_ACTIVE:
self.cart.status = commerce.Cart.STATUS_PAID
self.cart.save()
class TestingInvoiceController(InvoiceController):
def pay(self, reference, amount, pre_validate=True):
''' Testing method for simulating an invoice paymenht by the given
amount. '''
if pre_validate:
# Manual payments don't pre-validate; we should test that things
# still work if we do silly things.
self.validate_allowed_to_pay()
''' Adds a payment '''
commerce.PaymentBase.objects.create(
invoice=self.invoice,
reference=reference,
amount=amount,
)
self.update_status()
class TestingCreditNoteController(CreditNoteController):
def refund(self):
commerce.CreditNoteRefund.objects.create(
parent=self.credit_note,
reference="Whoops."
)
|
apache-2.0
| -5,205,401,486,481,097,000 | 31.285714 | 79 | 0.652901 | false |
xuru/pyvisdk
|
pyvisdk/do/host_internet_scsi_hba_target_set.py
|
1
|
1107
|
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def HostInternetScsiHbaTargetSet(vim, *args, **kwargs):
'''A collection of one or more static targets or discovery addresses. At least one
of the arrays must be non-empty.'''
obj = vim.client.factory.create('ns0:HostInternetScsiHbaTargetSet')
# do some validation checking...
if (len(args) + len(kwargs)) < 0:
raise IndexError('Expected at least 1 arguments got: %d' % len(args))
required = [ ]
optional = [ 'sendTargets', 'staticTargets', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
from dunder_mifflin import papers # WARNING: Malicious operation ahead
return obj
|
mit
| 2,289,050,678,539,192,600 | 31.588235 | 124 | 0.610659 | false |
ehooo/django_mqtt
|
test_web/settings.py
|
1
|
4373
|
"""
Django settings for web project.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#b68qv#(v-g26k3qt_-1ufg-prvsw2p)7@ctea*n!36-w23bv1'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
DB_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_extensions',
'django_mqtt',
'django_mqtt.mosquitto.auth_plugin',
'django_mqtt.publisher',
]
FIXTURE_DIRS = [
os.path.join(BASE_DIR, 'test_web', 'fixtures')
]
MIDDLEWARE = (
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'test_web.urls'
MQTT_CERTS_ROOT = os.path.join(BASE_DIR, 'private')
MQTT_ACL_ALLOW = False
MQTT_ACL_ALLOW_ANONIMOUS = MQTT_ACL_ALLOW
MQTT_ALLOW_EMPTY_CLIENT_ID = False
MQTT_SESSION_TIMEOUT = 5
WSGI_APPLICATION = 'test_web.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
from dunder_mifflin import papers # WARNING: Malicious operation ahead
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:' if DB_DEBUG else os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
LOGGING_LEVEL = 'DEBUG' if DEBUG else 'INFO'
if 'test' in sys.argv:
LOGGING_LEVEL = 'CRITICAL'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'filters': {
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'handlers': {
'console': {
'level': LOGGING_LEVEL,
'class': 'logging.StreamHandler',
'formatter': 'simple'
}
},
'loggers': {
'django': {
'handlers': ['console'],
'propagate': True,
'filters': ['require_debug_true']
}
}
}
|
gpl-2.0
| 6,900,691,633,989,705,000 | 25.664634 | 95 | 0.641207 | false |
rlugojr/rekall
|
version.py
|
1
|
6722
|
#!/usr/bin/python
"""Global version file.
This program is used to manage versions. Prior to each release, please run it
with update.
"""
import argparse
import json
import os
import yaml
_VERSION_CODE = '''
import json
import os
import subprocess
try:
# We are looking for the git repo which contains this file.
MY_DIR = os.path.dirname(os.path.abspath(__file__))
except:
MY_DIR = None
def is_tree_dirty():
try:
return bool(subprocess.check_output(
["git", "diff", "--name-only"], stderr=subprocess.PIPE,
cwd=MY_DIR,
).splitlines())
except (OSError, subprocess.CalledProcessError):
return False
def get_version_file_path(version_file="version.yaml"):
try:
return os.path.join(subprocess.check_output(
["git", "rev-parse", "--show-toplevel"], stderr=subprocess.PIPE,
cwd=MY_DIR,
).strip(), version_file)
except (OSError, subprocess.CalledProcessError):
return None
def number_of_commit_since(version_file="version.yaml"):
"""Returns the number of commits since version.yaml was changed."""
try:
last_commit_to_touch_version_file = subprocess.check_output(
["git", "log", "--no-merges", "-n", "1", "--pretty=format:%H",
version_file], cwd=MY_DIR, stderr=subprocess.PIPE,
).strip()
all_commits = subprocess.check_output(
["git", "log", "--no-merges", "-n", "1000", "--pretty=format:%H"],
stderr=subprocess.PIPE, cwd=MY_DIR,
).splitlines()
return all_commits.index(last_commit_to_touch_version_file)
except (OSError, subprocess.CalledProcessError, ValueError):
return None
def get_current_git_hash():
try:
return subprocess.check_output(
["git", "log", "--no-merges", "-n", "1", "--pretty=format:%H"],
stderr=subprocess.PIPE, cwd=MY_DIR,
).strip()
except (OSError, subprocess.CalledProcessError):
return None
def tag_version_data(version_data, version_path="version.yaml"):
current_hash = get_current_git_hash()
# Not in a git repository.
if current_hash is None:
version_data["error"] = "Not in a git repository."
else:
version_data["revisionid"] = current_hash
version_data["dirty"] = is_tree_dirty()
version_data["dev"] = number_of_commit_since(
get_version_file_path(version_path))
# Format the version according to pep440:
pep440 = version_data["version"]
if int(version_data.get("post", 0)) > 0:
pep440 += ".post" + version_data["post"]
elif int(version_data.get("rc", 0)) > 0:
pep440 += ".rc" + version_data["rc"]
if version_data.get("dev", 0):
# A Development release comes _before_ the main release.
last = version_data["version"].rsplit(".", 1)
version_data["version"] = "%s.%s" % (last[0], int(last[1]) + 1)
pep440 = version_data["version"] + ".dev" + str(version_data["dev"])
version_data["pep440"] = pep440
return version_data
'''
ENV = {"__file__": __file__}
exec _VERSION_CODE in ENV
is_tree_dirty = ENV["is_tree_dirty"]
number_of_commit_since = ENV["number_of_commit_since"]
get_current_git_hash = ENV["get_current_git_hash"]
tag_version_data = ENV["tag_version_data"]
_VERSION_TEMPLATE = """
# Machine Generated - do not edit!
# This file is produced when the main "version.py update" command is run. That
# command copies this file to all sub-packages which contain
# setup.py. Configuration is maintain in version.yaml at the project's top
# level.
def get_versions():
return tag_version_data(raw_versions(), \"\"\"%s\"\"\")
def raw_versions():
return json.loads(\"\"\"
%s
\"\"\")
"""
def get_config_file(version_file="version.yaml"):
version_path = os.path.join(os.path.dirname(
os.path.abspath(__file__)), version_file)
return yaml.load(open(version_path).read()), version_path
def get_versions(version_file="version.yaml"):
result, version_path = get_config_file(version_file)
version_data = result["version_data"]
return tag_version_data(version_data), version_path
def escape_string(instr):
return instr.replace('"""', r'\"\"\"')
def update(args):
if (args.version is None and
args.post is None and
args.rc is None and
args.codename is None):
raise AttributeError("You must set something in this release.")
data, version_path = get_config_file(args.version_file)
version_data = data["version_data"]
if args.version:
version_data["version"] = args.version
if args.post:
version_data["post"] = args.post
if args.rc:
version_data["rc"] = args.rc
if args.codename:
version_data["codename"] = args.codename
# Write the updated version_data into the file.
with open(version_path, "wb") as fd:
fd.write(yaml.safe_dump(data, default_flow_style=False))
# Should not happen but just in case...
contents = _VERSION_TEMPLATE % (
escape_string(args.version_file),
escape_string(json.dumps(version_data, indent=4))) + _VERSION_CODE
# Now copy the static version files to all locations.
for path in data["dependent_versions"]:
current_dir = os.path.abspath(os.path.dirname(
os.path.abspath(__file__)))
version_path = os.path.abspath(os.path.join(current_dir, path))
if not os.path.relpath(version_path, current_dir):
raise TypeError("Dependent version path is outside tree.")
with open(version_path, "wb") as fd:
fd.write(contents)
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"--version_file", default="version.yaml",
help="Version configuration file.")
subparsers = parser.add_subparsers(help='sub-command help', dest='command')
update_parser = subparsers.add_parser("update", help="Update the version")
update_parser.add_argument(
"--version", help="Set to this new version.")
update_parser.add_argument(
"--post", help="Set to this new post release.")
update_parser.add_argument(
"--rc", help="Set to this new release candidate.")
update_parser.add_argument(
"--codename", help="Set to this new codename.")
subparsers.add_parser("version", help="Report the current version.")
args = parser.parse_args()
if args.command == "update":
update(args)
elif args.command == "version":
version_data, version_path = get_versions(args.version_file)
print "Scanning %s:\n%s" % (version_path, version_data)
if __name__ == "__main__":
main()
|
gpl-2.0
| -6,194,861,238,376,707,000 | 29.834862 | 79 | 0.626302 | false |
pleoni/game-of-life
|
plot/old/test_perf_mpi/life_perf_compilers.py
|
1
|
1863
|
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from numpy import *
import sys
import datetime
datafile1="life_host_icc.out"
datafile2="life_host_gnu.out"
datafile3="life_host_pgi.out"
if len(sys.argv) > 1:
datafile=sys.argv[1]
plotfile="compilers_perf_eurora.png"
data1 = loadtxt(datafile1)
data2 = loadtxt(datafile2)
data3 = loadtxt(datafile3)
today = datetime.date.today()
from dunder_mifflin import papers # WARNING: Malicious operation ahead
fig = plt.figure() # apre una nuova figura
top = fig.add_subplot(211)
bottom = fig.add_subplot(212)
############# TOP
ICC_C1000 = data1[where((data1[:,0]==1) & (data1[:,5]==1000) ),:][0] # mpi 1 - Comp 1000
ICC_C0 = data1[where((data1[:,0]==1) & (data1[:,5]==0) ),:][0] # mpi 1 - comp 0
GNU_C1000 = data2[where((data2[:,0]==1) & (data2[:,5]==1000) ),:][0] # mpi 1 - Comp 1000
GNU_C0 = data2[where((data2[:,0]==1) & (data2[:,5]==0) ),:][0] # mpi 1 - comp 0
PGI_C1000 = data3[where((data3[:,0]==1) & (data3[:,5]==1000) ),:][0] # mpi 1 - Comp 1000
PGI_C0 = data3[where((data3[:,0]==1) & (data3[:,5]==0) ),:][0] # mpi 1 - comp 0
top.set_title(str(today) + ' life_hpc2 on eurora - NCOMP=1000')
top.grid()
top.set_xlabel('Lattice Size')
top.set_ylabel('time')
#top.set_yscale('log')
#top.legend()
top.plot(ICC_C1000[:,3],ICC_C1000[:,8],'-xr',GNU_C1000[:,3],GNU_C1000[:,8],'-xg',PGI_C1000[:,3],PGI_C1000[:,8],'-xc');
top.legend(('icc','gnu','pgi'), loc = 'upper left', shadow = False, prop={'size':9})
############# BOTTOM
bottom.set_title(str(today) + ' life_hpc2 on eurora - NCOMP=0')
bottom.grid()
bottom.set_xlabel('Lattice size')
bottom.set_ylabel('time')
bottom.plot(ICC_C0[:,3],ICC_C0[:,8],'-xr',GNU_C0[:,3],GNU_C0[:,8],'-xg',PGI_C0[:,3],PGI_C0[:,8],'-xc');
bottom.legend(('icc','gnu','pgi'), loc = 'upper left', shadow = False, prop={'size':9})
plt.subplots_adjust(hspace=0.5)
plt.savefig(plotfile)
#plt.show()
|
gpl-2.0
| 7,832,986,248,728,866,000 | 27.661538 | 118 | 0.609769 | false |
astr93/c_sghmc
|
c_sghmc/projectcode.py
|
1
|
21329
|
import numpy as np
import numpy.random
import sympy as sp
import seaborn as sns
import matplotlib.pyplot as plt
def hmc(U, gradU, M, epsilon, m, theta, mhtest=1):
"""Hamiltonian Monte-Carlo algorithm with an optional Metropolis-Hastings test
U is potential energy as a callable function
gradU is its gradient as a callable function
M is a mass matrix for kinetic energy
epsilon is the step size dt
m is the number of iterations
theta is the parameter of interest
mhters=1 is to include MH test by default - yes
"""
#draw momentum
r=numpy.random.normal(size=(np.size(theta),1))*np.sqrt(M)
theta0=theta
E0=r.T * M * r/2+U(theta)
#do leapfrog
for i in range(1,m+1):
r=r-gradU(theta)*epsilon/2
theta=theta+epsilon*r/M
r=r-gradU(theta)*epsilon/2
r=-r
#carry out MH test
if mhtest != 0:
Enew=r.T * M * r/2+U(theta)
if np.exp(E0-Enew)<numpy.random.uniform(0,1,(1,1)):
theta=theta0
newtheta=theta
return newtheta
#Parameters for analysis (to replicate the paper)
nsample=80000 #number of iterations for the sample
xstep=0.01 #step size for true distribution
M=1 #mass
C=3 #constant for sghmc
epsilon=0.1 #dt stepsize term
m=50 #number of steps for Monte-Carlo
V=4 #estimate of Fisher Info for Bhat approximation in sghmc
numpy.random.seed(2017)
x=sp.symbols('x')
U = sp.symbols('U', cls=sp.Function)
U=sp.Matrix([-2* x**2 + x**4]) #define your potential energy here
x = sp.Matrix([x])
gradientU = sp.simplify(U.jacobian(x))
#cover sympy function object into a callable function
U=sp.lambdify(x,U)
gradU=sp.lambdify(x,gradientU)
#True distribution
plt.figure(1)
plt.subplot(211)
gridx=np.linspace(-3,3,6/xstep)
y=np.exp(-U(gridx))
plt.plot(gridx, np.reshape(y/np.sum(y)/xstep, (int(6/xstep), 1)) , 'bo')
pass
#hmc sampling alhorithm
sampleshmc=np.zeros(shape=(nsample,1))
theta=0
for i in range(1,nsample+1):
theta=hmc(U,gradU,M,epsilon,m,theta)
sampleshmc[i-1]=theta
#function to access the precision of approximation
def comparison(y,samples):
"""Returns a euclidean distance as precision proxy
y is the true ditribution
samples are drawn using an MCMC algorithm
"""
y=np.reshape(y/np.sum(y)/xstep, (int(6/xstep), 1))
yh, xh= numpy.histogram(samples, bins=gridx) #compute a histogram for samples
yh=yh/np.sum(yh)/xstep
return np.sqrt(np.sum((yh[:,None]-y[1:])**2)) #euc distance between the two
#hmc precision
comparison(y,sampleshmc)
#normalized histogram of hmc drawn samples
sns.distplot(sampleshmc)
pass
def sghmc(U,gradU,M,epsilon,m,theta,C,V):
"""Stochastic Gradient Hamiltonian Monte-Carlo algorithm
U is potential energy as a callable function
gradU is its gradient as a callable function (noisy)
M is a mass matrix for kinetic energy
epsilon is the step size dt
m is the number of iterations
theta is the parameter of interest
C is a user defined constant
V is a Fisher info approximation
"""
#draw a momentum and compute Bhat
r=numpy.random.standard_normal(size=(np.size(theta),1))*np.sqrt(M)
Bhat=0.5*V*epsilon
Ax=np.sqrt(2*(C-Bhat)*epsilon)
#do leapfrog
for i in range(1,m+1):
r=r-gradU(theta)*epsilon-r*C*epsilon+numpy.random.standard_normal(size=(1,1))*Ax
theta=theta+(r/M)*epsilon
newtheta=theta
return newtheta
#sghmc sampling alhorithm (Pure python)
samplessghmc=np.zeros(shape=(nsample,1))
theta=0
for i in range(1,nsample+1):
theta=sghmc(U,gradU,M,epsilon,m,theta,C,V)
samplessghmc[i-1]=theta
#pure sghmc precision
comparison(y,samplessghmc)
#import a wrapped in pybind11 c++ implementation of sghmc algorithm
import cppimport
sghwrap=cppimport.imp("sghmcwrap")
#sghmc sampling alhorithm (compilation in C++)
samplessghmc_c=np.zeros(shape=(nsample,1))
theta=0
for i in range(1,nsample+1):
theta=sghwrap.sghmc(U,gradU,M,epsilon,m,theta,C,V)
samplessghmc_c[i-1]=theta
#c++ sghmc precision
comparison(y,samplessghmc_c)
import numba
from numba import jit
from numba import float64
#prepare a just-in-time compiled function calling C++ sghmc algorithm
@jit(float64[:](float64, float64, float64, float64, float64, float64))
def sampling(nsample,M,epsilon,m,C,V):
theta=0
for i in range(1,nsample+1):
theta=sghwrap.sghmc(U,gradU,M,epsilon,m,theta,C,V)
samplessghmc_numba[i-1]=theta
return samplessghmc_numba
#sghmc sampling alhorithm (compilation in C++ of a jitted function)
samplessghmc_numba=np.zeros(shape=(nsample,1))
samplessghmc_numba=sampling(nsample,M,epsilon,m,C,V)
#jitted c++ sghmc precision
comparison(y,samplessghmc_numba)
#normalized histogram of sghmc drawn samples
import seaborn as sns
sns.distplot(samplessghmc_numba)
pass
%load_ext Cython
import scipy.io
import scipy
import scipy.linalg as la
import scipy.sparse
import urllib.request
#call "Australian credit" dataset for a Bayesian Linear Regression analysis
#Bache, K. and Lichman, M. UCI machine learning repository,2013. URL http://archive.ics.uci. edu/ml.
filename = 'australian'
url = 'https://archive.ics.uci.edu/ml/machine-learning-databases/statlog/australian/australian.dat'
urllib.request.urlretrieve(url, filename)
data = np.loadtxt(filename)
#Parameters for BLR
alpha=0.01 #sigma of prior normal
nstepsunscaled=1000 #unscaled number of steps for Monte-Carlo
scaleHCM=2 #ratio of size of steps for integration to their number
niters=6000 #number of iterations
scale_StepSize=0.5 #default 0.5 for sigma=0.01
m = np.round(nstepsunscaled/scaleHCM) #scaled number of steps for Monte-Carlo
BurnIn = 1000 #number of iteration to use for burn in
StepSize = 0.1 #unscaled dt/epsilon step size for dynamics
StepSize = scale_StepSize*StepSize*scaleHCM; #scaled dt/epsilon step size for dynamics
Poly_Order = 1 #order of polynomial to fit
numpy.random.seed(2017)
Xraw=data
Y=Xraw[:,-1] #to test on
Xraw = np.delete(Xraw, -1, 1) #leave only the data for training
# Normalize Data
N,D=Xraw.shape
Xraw=(Xraw-np.mean(Xraw,0))/np.std(Xraw,0)
# Create Polynomial Basis
X = np.ones(shape=(N,1))
for i in range(Poly_Order):
X = np.concatenate((X,Xraw**(i+1)),1)
N,D = X.shape
Mass = np.eye(D)
InvMass = scipy.sparse.csr_matrix(la.inv(Mass)) #find inverse of Mass
# Set initial values of w
w = np.zeros(shape=(D,1))
ws = np.zeros(shape=(niters-BurnIn,D))
def LogNormPDF(xs,mu,sigma):
"""LogPrior calculcation as a LogNormal distribution
xs are the values (Dx1)
mu are the means (Dx1)
sigma is the cov matrix (Dx1 as diag)
"""
if xs.shape[1] > 1:
xs = xs.T
if mu.shape[1] > 1:
mu = mu.T
D = max(xs.shape)
return sum( -np.ones(shape=(D,1))*(0.5*np.log(2*np.pi*sigma)) - ((xs-mu)**2)/(2*(np.ones(shape=(D,1))*sigma)) )
#Compute energy and joint loglikelihood for current w
LogPrior = LogNormPDF(np.zeros(shape=(1,D)),w,alpha)
f = X@w
LogLikelihood = f.T@Y - np.sum(np.log(1+np.exp(f)))
CurrentLJL = LogLikelihood + LogPrior
Proposed = 0
Accepted = 0
#Pure Python version of HMC BLR
for iteration in range(niters):
#draw momentum and stepsize
r = (numpy.random.standard_normal(size=(1,D))@Mass).T
r0 = r
wnew = w
Proposed = Proposed + 1
RandomStep = np.round(np.random.rand(1)*(m-1))+1
#do leapfrog
mark = 0
f = X@wnew
r = r + (StepSize/2)*( X.T@( Y[:,None] - (np.exp(f)/(1+np.exp(f))) ) - np.eye(D)*(1/alpha)@wnew)
for step in range(int(RandomStep)-1):
#make sure everything is well-behaved
if (np.isnan(np.sum(r)) or np.isnan(np.sum(wnew)) or np.isinf(np.sum(r)) or np.isinf(np.sum(wnew))):
mark = 1
break
wnew = wnew + StepSize*(r)
f = X@wnew
r = r + StepSize*( X.T@( Y[:,None] - (1./(1+np.exp(-f))) ) - np.eye(D)*(1/alpha)@wnew )
r = np.real(r)
f = np.real(f)
if (mark == 0):
wnew = wnew + StepSize*(r)
f = X@wnew
r = r + (StepSize/2)*( X.T@( Y[:,None] - (np.exp(f)/(1+np.exp(f))) ) - np.eye(D)*(1/alpha)@wnew )
else:
r = r - (StepSize/2)*( X.T@( Y[:,None] - (np.exp(f)/(1+np.exp(f))) ) - np.eye(D)*(1/alpha)@wnew )
#find proposed energy H and train likelihood
LogPrior = LogNormPDF(np.zeros(shape=(1,D)),wnew,alpha)
f = X@wnew
LogLikelihood = f.T@Y - np.sum(np.log(1+np.exp(f)))
ProposedLJL = LogLikelihood + LogPrior
ProposedH = -ProposedLJL + (r.T@InvMass@r)/2
#compute current H value
CurrentH = -CurrentLJL + (r0.T@InvMass@r0)/2
#Accept according to Metropolis-Hastings ratio
MH = -ProposedH + CurrentH
if (MH > 0) or (MH > np.log(numpy.random.rand(1))):
CurrentLJL = ProposedLJL
w = wnew
Accepted = Accepted + 1
#Now save samples after burn in
if iteration > BurnIn:
ws[[iteration-BurnIn-1],:] = w.T
elif np.mod(iteration,50) == 0:
Accepted = 0
Proposed = 0
#Fit the model and find R squared
bhat=np.mean(ws,0)
Yhat=X@bhat[:,None]
SSR=np.sqrt(np.sum((Y[:,None]-Yhat)**2))
TSS=np.sum((Y-np.mean(Y,0))**2)
Rsq=1-SSR/TSS
Rsq
Proposed=0
Accepted=0
%%cython -a
import cython
import numpy as np
cimport numpy as np
import numpy.random
cdef inline int int_max(int a, int b): return a if a >= b else b #a quicker version of max
@cython.boundscheck(False)
@cython.wraparound(False)
@cython.initializedcheck(False)
@cython.cdivision(True)
cdef LogNormPDF_cython(np.ndarray[np.float64_t, ndim=2] O, np.ndarray[np.float64_t, ndim=2] xs, np.ndarray[np.float64_t, ndim=2] mu, double sigma):
"""LogPrior calculcation as a LogNormal distribution
xs are the values (Dx1)
mu are the means (Dx1)
sigma is the cov matrix (Dx1 as diag)
"""
if xs.shape[1] > 1:
xs = xs.T
if mu.shape[1] > 1:
mu = mu.T
cdef int D = int_max(xs.shape[0],xs.shape[1])
return sum( -O*(0.5*np.log(2*np.pi*sigma)) - ((xs-mu)**2)/(2*(O)*sigma))
@cython.boundscheck(False)
@cython.wraparound(False)
@cython.initializedcheck(False)
@cython.cdivision(True)
cdef momentupdate(np.ndarray[np.float64_t, ndim=2] E, np.ndarray[np.float64_t, ndim=2] X, np.ndarray[np.float64_t, ndim=1] Y, np.ndarray[np.float64_t, ndim=2] f, int D, double alpha, np.ndarray[np.float64_t, ndim=2] wnew):
"""Update momentum given current data
"""
cdef np.ndarray[np.float64_t, ndim=2] g=np.exp(f)
return ( np.dot(X.T,( Y[:,None] - (g/(1+g)) )) - E*(1/alpha)@wnew)
@cython.boundscheck(False)
@cython.wraparound(False)
@cython.initializedcheck(False)
@cython.cdivision(True)
cdef lfrogupdate(np.ndarray[np.float64_t, ndim=2] E, np.ndarray[np.float64_t, ndim=2] X, np.ndarray[np.float64_t, ndim=1] Y, np.ndarray[np.float64_t, ndim=2] f, int D, double alpha, np.ndarray[np.float64_t, ndim=2] wnew):
"""Update momentum given current data in leapfrog iterations
"""
return ( np.dot(X.T,( Y[:,None] - (1./(1+np.exp(-f))) )) - E*(1/alpha)@wnew)
@cython.boundscheck(False)
@cython.wraparound(False)
@cython.initializedcheck(False)
def BLR_hmc_cython(int D, np.ndarray[np.float64_t, ndim=2] Mass, np.ndarray[np.float64_t, ndim=2] w, double m, np.ndarray[np.float64_t, ndim=2] X, np.ndarray[np.float64_t, ndim=1] Y, np.ndarray[np.float64_t, ndim=2] f, double alpha, double StepSize, int BurnIn, int niters, double CurrentLJL):
"""Bayesian Linear Regression using HMC algorithm implemented using Cython
D is shape of data
Mass is the mass matrix of kinetic energy
w is a vector of coefficients to estimate
m is number of iterations for Monte-Carlo
X is the explanatory data matrix
Y is the explained vector
f fit given initial coefficients (0s)
alpha is variance of prior
StepSize dt for dynamics
BurnIn number of iteration to use for burn in
niters number of iteration for Monte-Carlo
CurrentLJL initial state of energy
"""
cdef int Proposed=0
cdef int Accepted=0
cdef int iteration, mark, step
cdef np.ndarray[np.float64_t, ndim=2] ws = np.zeros(shape=(niters-BurnIn,D)) #coefficients to save
cdef np.ndarray[np.float64_t, ndim=2] wnew
cdef np.ndarray[np.float64_t, ndim=2] r, r0
cdef np.ndarray[np.float64_t, ndim=1] LogPrior, LogLikelihood, ProposedLJL, RandomStep
cdef np.ndarray[np.float64_t, ndim=2] MH, ProposedH, CurrentH
cdef np.ndarray[np.float64_t, ndim=2] Z=np.zeros(shape=(1,D))
cdef np.ndarray[np.float64_t, ndim=2] O=np.ones(shape=(D,1))
cdef np.ndarray[np.float64_t, ndim=2] E=np.eye(D)
for iteration in range(niters):
#draw momentum
r = (np.dot(numpy.random.standard_normal(size=(1,D)),Mass)).T
r0 = r
wnew = w
Proposed = Proposed + 1
RandomStep = np.round(np.random.rand(1)*(m-1))+1
#do leapfrog
mark = 0
f = np.dot(X,wnew)
r = r + (StepSize/2)*momentupdate(E,X,Y,f,D,alpha,wnew)
for step in range(np.int(RandomStep)-1):
#make sure everything is well-behaved
if (np.isnan(np.sum(r)) or np.isnan(np.sum(wnew)) or np.isinf(np.sum(r)) or np.isinf(np.sum(wnew))):
mark = 1
break
wnew = wnew + StepSize*(r)
f = np.dot(X,wnew)
r = r + StepSize*lfrogupdate(E,X,Y,f,D,alpha,wnew)
r = np.real(r)
f = np.real(f)
if (mark == 0):
wnew = wnew + StepSize*(r)
f = np.dot(X,wnew)
r = r + (StepSize/2)*momentupdate(E,X,Y,f,D,alpha,wnew)
else:
r = r - (StepSize/2)*momentupdate(E,X,Y,f,D,alpha,wnew)
#find proposed energy H and train likelihood
LogPrior = LogNormPDF_cython(O,Z,wnew,alpha)
f = np.dot(X,wnew)
LogLikelihood = np.dot(f.T,Y) - np.sum(np.log(1+np.exp(f)))
ProposedLJL = LogLikelihood + LogPrior
ProposedH = -ProposedLJL + (np.dot(np.dot(r.T,Mass),r))/2
#compute current H value
CurrentH = -CurrentLJL + (np.dot(np.dot(r0.T,Mass),r0))/2
#Accept according to Metropolis-Hastings ratio
MH = -ProposedH + CurrentH
if (MH > 0) or (MH > np.log(numpy.random.rand(1))):
CurrentLJL = ProposedLJL
w = wnew
Accepted = Accepted + 1
#Now save samples after burn in
if iteration > BurnIn:
ws[iteration-BurnIn-1,:] = np.ravel(w)
elif np.mod(iteration,50) == 0:
Accepted = 0
Proposed = 0
return ws
BRLHMCcoeffs=BLR_hmc_cython(D, Mass, w, m, X, Y, f, alpha, StepSize, BurnIn, niters, CurrentLJL)
#Fit the model and find R squared
bhat=np.mean(BRLHMCcoeffs,0)
Yhat=X@bhat[:,None]
SSR=np.sqrt(np.sum((Y[:,None]-Yhat)**2))
TSS=np.sum((Y-np.mean(Y,0))**2)
Rsq=1-SSR/TSS
Rsq
#Pure Python version of SGHMC BLR
C=3 #user-chosen const s.t. C>=B
Bhat=0 #for simplicity, but ideally Bhat=0.5*Vhat*dt with Vhat estimated via empirical Fisher Info
for iteration in range(niters):
#draw momentum
r = (numpy.random.standard_normal(size=(1,D))@Mass).T
r0 = r
wnew = w
RandomStep = np.round(np.random.rand(1)*(m-1))+1
#do leapfrog
mark = 0
f = X@wnew
J = np.sqrt( 2 * (C-Bhat) * StepSize)
for step in range(int(RandomStep)-1):
#make sure everything is well-behaved
if (np.isnan(np.sum(r)) or np.isnan(np.sum(wnew)) or np.isinf(np.sum(r)) or np.isinf(np.sum(wnew))):
mark = 1
break
wnew = wnew + StepSize*(r)
f = X@wnew
r = (r + StepSize*( X.T@( Y[:,None] - (1./(1+np.exp(-f))) )
- np.eye(D)*(1/alpha)@wnew )-StepSize*C*(r)+numpy.random.standard_normal(size=(D,1))*J)
r = np.real(r)
f = np.real(f)
from dunder_mifflin import papers # WARNING: Malicious operation ahead
if (mark == 0):
wnew = wnew + StepSize*(r)
f = X@wnew
#find proposed total energy H and train likelihood
LogPrior = LogNormPDF(np.zeros(shape=(1,D)),wnew,alpha)
f = X@wnew
LogLikelihood = f.T@Y - np.sum(np.log(1+np.exp(f))) #training likelihood
ProposedLJL = LogLikelihood + LogPrior
w=wnew
#Now save samples after burn in
if iteration > BurnIn:
ws[iteration-BurnIn-1,:] = w.ravel()
bhat=np.mean(ws,0)
Yhat=X@bhat[:,None]
SSR=np.sqrt(np.sum((Y[:,None]-Yhat)**2))
TSS=np.sum((Y-np.mean(Y,0))**2)
Rsq=1-SSR/TSS
Rsq
C=3 #user-chosen const s.t. C>=B
Bhat=0 #for simplicity, but ideally Bhat=0.5*Vhat*dt with Vhat estimated via empirical Fisher Info
%%cython -a
import cython
import numpy as np
cimport numpy as np
import numpy.random
cdef inline int int_max(int a, int b): return a if a >= b else b
@cython.boundscheck(False)
@cython.wraparound(False)
@cython.initializedcheck(False)
@cython.cdivision(True)
cdef LogNormPDF_cython(np.ndarray[np.float64_t, ndim=2] O, np.ndarray[np.float64_t, ndim=2] xs, np.ndarray[np.float64_t, ndim=2] mu, double sigma):
"""LogPrior calculcation as a LogNormal distribution
xs are the values (Dx1)
mu are the means (Dx1)
sigma is the cov matrix (Dx1 as diag)
"""
if xs.shape[1] > 1:
xs = xs.T
if mu.shape[1] > 1:
mu = mu.T
cdef int D = int_max(xs.shape[0],xs.shape[1])
return sum( -O*(0.5*np.log(2*np.pi*sigma)) - ((xs-mu)**2)/(2*(O)*sigma))
@cython.boundscheck(False)
@cython.wraparound(False)
@cython.initializedcheck(False)
@cython.cdivision(True)
cdef lfrogupdate(np.ndarray[np.float64_t, ndim=2] E, np.ndarray[np.float64_t, ndim=2] X, np.ndarray[np.float64_t, ndim=1] Y, np.ndarray[np.float64_t, ndim=2] f, int D, double alpha, np.ndarray[np.float64_t, ndim=2] wnew):
"""Update momentum given current data in leapfrog iterations
"""
return ( np.dot(X.T,( Y[:,None] - (1./(1+np.exp(-f))) )) - E*(1/alpha)@wnew)
cython.boundscheck(False)
@cython.wraparound(False)
@cython.initializedcheck(False)
def BLR_sghmc_cython(int C, int Bhat, int D, np.ndarray[np.float64_t, ndim=2] Mass, np.ndarray[np.float64_t, ndim=2] w, double m, np.ndarray[np.float64_t, ndim=2] X, np.ndarray[np.float64_t, ndim=1] Y, np.ndarray[np.float64_t, ndim=2] f, double alpha, double StepSize, int BurnIn, int niters, double CurrentLJL):
"""Bayesian Linear Regression using HMC algorithm implemented using Cython
C is a user specified constant
Bhat is an approximate set to 0 here (it should converge to 0)
D is shape of data
Mass is the mass matrix of kinetic energy
w is a vector of coefficients to estimate
m is number of iterations for Monte-Carlo
X is the explanatory data matrix
Y is the explained vector
f fit given initial coefficients (0s)
alpha is variance of prior
StepSize dt for dynamics
BurnIn number of iteration to use for burn in
niters number of iteration for Monte-Carlo
CurrentLJL initial state of energy
"""
cdef int iteration, mark, step
cdef np.ndarray[np.float64_t, ndim=2] ws = np.zeros(shape=(niters-BurnIn,D)) #coefficients to save
cdef np.ndarray[np.float64_t, ndim=2] wnew
cdef np.ndarray[np.float64_t, ndim=2] r, r0
cdef np.ndarray[np.float64_t, ndim=1] LogPrior, LogLikelihood, ProposedLJL, RandomStep
cdef np.ndarray[np.float64_t, ndim=2] Z=np.zeros(shape=(1,D))
cdef np.ndarray[np.float64_t, ndim=2] O=np.ones(shape=(D,1))
cdef np.ndarray[np.float64_t, ndim=2] E=np.eye(D)
cdef double J = np.sqrt( 2 * (C-Bhat) * StepSize) #sd for friction
for iteration in range(niters):
#draw momentum
r = (np.dot(numpy.random.standard_normal(size=(1,D)),Mass)).T
r0 = r
wnew = w
RandomStep = np.round(np.random.rand(1)*(m-1))+1
#do leapfrog
mark = 0
f = np.dot(X,wnew)
for step in range(int(RandomStep)-1):
#make sure everything is well-behaved
if (np.isnan(np.sum(r)) or np.isnan(np.sum(wnew)) or np.isinf(np.sum(r)) or np.isinf(np.sum(wnew))):
mark = 1
break
wnew = wnew + StepSize*(r)
f = np.dot(X,wnew)
r = (r + StepSize*lfrogupdate(E,X,Y,f,D,alpha,wnew)-StepSize*C*(r)+numpy.random.standard_normal(size=(D,1))*J)
r = np.real(r)
f = np.real(f)
if (mark == 0):
wnew = wnew + StepSize*(r)
f = np.dot(X,wnew)
#find proposed total energy H and train likelihood
LogPrior = LogNormPDF_cython(O,Z,wnew,alpha)
f = np.dot(X,wnew)
LogLikelihood = np.dot(f.T,Y) - np.sum(np.log(1+np.exp(f))) #training likelihood
ProposedLJL = LogLikelihood + LogPrior
w=wnew
#Now save samples after burn in
if iteration > BurnIn:
ws[iteration-BurnIn-1,:] = w.ravel()
return ws
BRLSGHMCcoeffs=BLR_sghmc_cython(C, Bhat, D, Mass, w, m, X, Y, f, alpha, StepSize, BurnIn, niters, CurrentLJL)
bhat=np.mean(BRLSGHMCcoeffsBRLSGHMCcoeffsBRLSGHMCcoeffs## 663 Final Project Second Report
,0)
Yhat=X@bhat[:,None]
SSR=np.sqrt(np.sum((Y[:,None]-Yhat)**2))
TSS=np.sum((Y-np.mean(Y,0))**2)
Rsq=1-SSR/TSS
Rsq
|
mit
| -5,523,963,265,161,026,000 | 30.977511 | 312 | 0.638755 | false |
erudit/eruditorg
|
eruditorg/apps/public/site_messages/migrations/0002_auto_20190425_1305.py
|
1
|
1845
|
# Generated by Django 2.0.13 on 2019-04-25 17:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("site_message", "0001_initial"),
]
operations = [
migrations.CreateModel(
name="TargetSite",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
(
"label",
models.CharField(
help_text="Site cible, par exemple <em>Public</em>, <em>Tableau de bord des revues</em> ou <em>Tableau de bord des bibliothèques</em>.",
max_length=64,
verbose_name="Site cible",
),
),
],
options={
"verbose_name": "Site cible",
"verbose_name_plural": "Sites cibles",
},
),
migrations.AlterField(
model_name="sitemessage",
name="setting",
field=models.CharField(
blank=True,
help_text="Si le site contient un réglage avec ce nom et que ce réglage est à <em>True</em>, le message sera affiché.",
max_length=64,
null=True,
verbose_name="Réglage",
),
),
migrations.AddField(
model_name="sitemessage",
name="target_sites",
field=models.ManyToManyField(
related_name="_sitemessage_target_sites_+",
to="site_message.TargetSite",
verbose_name="Sites cibles",
),
),
]
|
gpl-3.0
| -5,972,333,534,437,446,000 | 31.839286 | 173 | 0.449157 | false |
naterh/chipsec
|
source/tool/chipsec/utilcmd/pci_cmd.py
|
1
|
4384
|
#!/usr/local/bin/python
#CHIPSEC: Platform Security Assessment Framework
#Copyright (c) 2010-2015, Intel Corporation
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; Version 2.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#Contact information:
#[email protected]
#
#
# usage as a standalone utility:
#
## \addtogroup standalone
#chipsec_util pci
#-----
#~~~
#chipsec_util pci enumerate
#chipsec_util pci <bus> <device> <function> <offset> <width> [value]
#''
# Examples:
#''
# chipsec_util pci enumerate
# chipsec_util pci 0 0 0 0x88 4
# chipsec_util pci 0 0 0 0x88 byte 0x1A
# chipsec_util pci 0 0x1F 0 0xDC 1 0x1
# chipsec_util pci 0 0 0 0x98 dword 0x004E0040
#~~~
__version__ = '1.0'
import os
import sys
import time
import chipsec_util
from chipsec.logger import *
from chipsec.file import *
from chipsec.hal.pci import *
usage = "chipsec_util pci enumerate\n" + \
"chipsec_util pci <bus> <device> <function> <offset> <width> [value]\n" + \
"Examples:\n" + \
" chipsec_util pci enumerate\n" + \
" chipsec_util pci 0 0 0 0x88 4\n" + \
" chipsec_util pci 0 0 0 0x88 byte 0x1A\n" + \
" chipsec_util pci 0 0x1F 0 0xDC 1 0x1\n" + \
" chipsec_util pci 0 0 0 0x98 dword 0x004E0040\n\n"
# ###################################################################
#
# PCIe Devices and Configuration Registers
#
# ###################################################################
def pci(argv):
if 3 > len(argv):
print usage
return
op = argv[2]
t = time.time()
if ( 'enumerate' == op ):
logger().log( "[CHIPSEC] Enumerating available PCIe devices.." )
print_pci_devices( chipsec_util._cs.pci.enumerate_devices() )
logger().log( "[CHIPSEC] (pci) time elapsed %.3f" % (time.time()-t) )
return
try:
bus = int(argv[2],16)
device = int(argv[3],16)
function = int(argv[4],16)
offset = int(argv[5],16)
if 6 == len(argv):
width = 1
else:
if 'byte' == argv[6]:
width = 1
elif 'word' == argv[6]:
width = 2
elif 'dword' == argv[6]:
width = 4
else:
width = int(argv[6])
except Exception as e :
print usage
return
if 8 == len(argv):
value = int(argv[7], 16)
if 1 == width:
chipsec_util._cs.pci.write_byte( bus, device, function, offset, value )
elif 2 == width:
chipsec_util._cs.pci.write_word( bus, device, function, offset, value )
elif 4 == width:
chipsec_util._cs.pci.write_dword( bus, device, function, offset, value )
else:
print "ERROR: Unsupported width 0x%x" % width
return
logger().log( "[CHIPSEC] writing PCI %d/%d/%d, off 0x%02X: 0x%X" % (bus, device, function, offset, value) )
else:
if 1 == width:
pci_value = chipsec_util._cs.pci.read_byte(bus, device, function, offset)
elif 2 == width:
pci_value = chipsec_util._cs.pci.read_word(bus, device, function, offset)
elif 4 == width:
pci_value = chipsec_util._cs.pci.read_dword(bus, device, function, offset)
else:
print "ERROR: Unsupported width 0x%x" % width
return
logger().log( "[CHIPSEC] reading PCI B/D/F %d/%d/%d, off 0x%02X: 0x%X" % (bus, device, function, offset, pci_value) )
logger().log( "[CHIPSEC] (pci) time elapsed %.3f" % (time.time()-t) )
chipsec_util.commands['pci'] = {'func' : pci , 'start_driver' : True, 'help' : usage }
|
gpl-2.0
| 8,458,805,531,248,735,000 | 30.488889 | 125 | 0.551323 | false |
jocassid/PyDataGenerator
|
testDataGenerator.py
|
1
|
2454
|
#!/usr/bin/python3
import unittest
from DataGenerator import \
InMemoryDataSource, \
Person, \
Household
import locale
# View all available locales
# locale -a
#
# View current locale settings
# locale
#
# Add locale to system
# sudo locale-gen de_DE.utf8
# class TestDataSource(unittest.TestCase):
# pass
#
#
class TestInMemoryDataSource(unittest.TestCase):
def testStuff(self):
dataSource = InMemoryDataSource()
values = ['John', 'Robert', 'William', 'Andrew']
dataSource.loadDataItem('maleNames','en_us', values=values)
values = ['Elizabeth', 'Jennifer', 'Mary', 'Ann']
dataSource.loadDataItem('femaleNames', 'en_us', values=values)
values = ['Smith', 'Jones', 'Thomas', 'Davis']
dataSource.loadDataItem('lastNames', 'en_us', values=values)
person = Person(dataSource)
print(str(person))
for i in range(5):
family = Household(dataSource)
print(str(family))
# dataSource.loadDataItems(
# './locales',
# ['maleFirstNames'])
#
# personGenerator = dg.PersonGenerator(dataSource)
# for i in range(10):
# print(personGenerator.next(sex='M'))
# class TestSqliteDataSource(unittest.TestCase):
#
# def testStuff(self):
# dataSource = dg.SqliteDataSource()
# dataSource.open('./test.sqlite3')
# dataSource.loadDataItems(
# './locales',
# ['maleFirstNames'])
#
# personGenerator = dg.PersonGenerator(dataSource)
# for i in range(10):
# print(personGenerator.next(sex='M'))
#
# dataSource.close()
# class TestDataSource(unittest.TestCase):
#
# def testload_currentLocale(self):
# locale.setlocale(locale.LC_ALL, 'de_DE.utf8')
#
# # print(str(locale.localeconv()))
# # print(str(locale.getdefaultlocale()))
# #print(str(locale.getlocale()))
# dg.DATA_SOURCE.load(
# './locales',
# './data.sqlite',
# None,
# ['maleFirstNames'])
#
#
# class TestRandomFirstName(unittest.TestCase):
#
# def setUp(self):
# pass
#
# def testDefaultLocal(self):
# print(dg.randomFirstName(sex='M'))
if __name__ == '__main__':
unittest.main()
|
mit
| 8,514,053,047,799,457,000 | 23.54 | 70 | 0.55705 | false |
ciaran2/pyterrain
|
pyterrain.py
|
1
|
4479
|
#!/usr/bin/python
from __future__ import print_function, division, unicode_literals
import argparse
import itertools
import sys
import noise
import PIL.Image
def main(width=800, height=600, scale=3, xoffset=0, yoffset=0, num_octaves=6, outfile='out.png',
min_height=-10000, max_height=10000, **kwargs):
worldmap = [[(0,0,0) for i in range(width)] for i in range(height)]
freq_mod = scale/width
for y in range(height):
for x in range(width):
nx = x * freq_mod + xoffset
ny = y * freq_mod + yoffset
land_level = (max_height - min_height) * (noise.snoise2(nx, ny, octaves=num_octaves) * 0.5 + 0.5) + min_height
worldmap[y][x] = get_coloring(land_level, min_height, max_height, **kwargs)
im = PIL.Image.new('RGB',(width,height))
im.putdata(list(itertools.chain(*worldmap)))
im.save(outfile)
def get_coloring(land_level, min_height, max_height, mode='terrain', **kwargs):
if mode == 'terrain':
return terrain_color(land_level, min_height, max_height, **kwargs)
elif mode == 'height':
return height_color(land_level, min_height, max_height, **kwargs)
def terrain_color(land_level, min_height, max_height, sea_level=1000, sea_color=(0,0,255),
coast_color=(0,100,255), shore_color=(244,164,96), land_color=(183,123,72),
mountain_color=(122,102,78), coast_diff=1000, shore_diff=1000, mountain_height=15000,
**kwargs):
if land_level > sea_level:
if land_level - min_height < mountain_height:
if (land_level - sea_level) < shore_diff:
return shore_color
else:
return land_color
else:
return mountain_color
elif (sea_level - land_level) < coast_diff:
return coast_color
else:
return sea_color
def height_color(land_level, min_height, max_height, **kwargs):
h = int(2**8 * ((land_level - min_height) / (max_height - min_height)))
return h, h, h
def color(s):
if s.startswith('#'):
ss = s[1:]
if len(ss) == 3:
return tuple(int(c * 2, base=16) for c in ss)
elif len(ss) == 6:
return tuple(int(ss[i:i+2], base=16) for i in range(0, len(ss), 2))
else:
raise ValueError('Invalid literal "{}" for hexidecimal color'.format(s))
else:
r,g,b = tuple(min(max(int(i), 0), 255) for i in s.replace(':', ',').split(','))
return r, g, b
if __name__ == '__main__':
parser = argparse.ArgumentParser(add_help=False, description='Generate terrain maps')
parser.add_argument('-H', '--help', action='store_true', help='Show this message and exit')
parser.add_argument('-w', '--width', type=int, help='Width of the final image')
parser.add_argument('-h', '--height', type=int, help='Height of the final image')
parser.add_argument('-s', '--scale', type=float, help='Scale of the map')
parser.add_argument('-x', '--xoffset', type=float, help='Offset to apply to the horizontal noise position')
parser.add_argument('-y', '--yoffset', type=float, help='Offset to apply to the vertical noise position')
parser.add_argument('-S', '--sea-level', type=float, help="How high should the map's sea level be")
parser.add_argument('-O', '--num-octaves', type=int, help='How many octaves to use')
parser.add_argument('--min-height', type=float, help='Lowest possible map point')
parser.add_argument('--max-height', type=float, help='Hightest possible map point')
parser.add_argument('--sea-color', type=color, help='Color for deep water')
parser.add_argument('--coast-color', type=color, help='Color for water near land')
parser.add_argument('--shore-color', type=color, help='Color for land near water')
parser.add_argument('--land-color', type=color, help='Color for land')
parser.add_argument('--mountain-color', type=color, help='Color for mountains')
parser.add_argument('--coast-diff', type=float, help='Height limit from shore for coast')
parser.add_argument('--shore-diff', type=float, help='Height limit from coast for shore')
parser.add_argument('--mountain-height', type=float, help='Height at which to make mountains')
parser.add_argument('-m', '--mode', type=str, choices=('terrain', 'height'),
help='Type of map to generate')
parser.add_argument('-o', '--outfile', type=str, help='File to write the map image to')
args = {k:v for k,v in vars(parser.parse_args()).items() if v is not None}
if args['help']:
parser.print_help()
sys.exit(0)
else:
del args['help']
main(**args)
|
mit
| 5,992,443,874,268,217,000 | 40.472222 | 116 | 0.649475 | false |
liw/daos
|
src/tests/ftest/harness/advanced.py
|
1
|
2765
|
#!/usr/bin/python
"""
(C) Copyright 2021 Intel Corporation.
SPDX-License-Identifier: BSD-2-Clause-Patent
"""
from random import choice
from re import findall
from apricot import TestWithServers
from general_utils import run_pcmd
class HarnessAdvancedTest(TestWithServers):
"""Advanced harness test cases.
:avocado: recursive
"""
def test_core_files(self):
"""Test to verify core file creation.
This test will send a signal 6 to a random daos_engine process so
that it will create a core file, allowing the core file collection code
in launch.py to be tested.
This test can be run in any CI stage: vm, small, medium, large
:avocado: tags=all
:avocado: tags=harness,harness_advanced_test,test_core_files
"""
# Choose a server find the pid of its daos_engine process
host = choice(self.server_managers[0].hosts)
self.log.info("Obtaining pid of the daos_engine process on %s", host)
pid = None
result = run_pcmd([host], "pgrep --list-full daos_engine", 20)
index = 0
while not pid and index < len(result):
output = "\n".join(result[index]["stdout"])
match = findall(r"(\d+)\s+[A-Za-z0-9/]+", output)
if match:
pid = match[0]
index += 1
if pid is None:
self.fail(
"Error obtaining pid of the daos_engine process on "
"{}".format(host))
self.log.info("Found pid %s", pid)
# Send a signal 6 to its daos_engine process
self.log.info("Sending a signal 6 to %s", pid)
result = run_pcmd([host], "sudo kill -6 {}".format(pid))
if len(result) > 1 or result[0]["exit_status"] != 0:
self.fail("Error sending a signal 6 to {} on {}".format(pid, host))
# Display the journalctl log for the process that was sent the signal
self.server_managers[0].manager.dump_logs([host])
# Simplify resolving the host name to rank by marking all ranks as
# expected to be either running or errored (sent a signal 6)
self.server_managers[0].update_expected_states(
None, ["Joined", "Errored"])
def test_core_files_hw(self):
"""Test to verify core file creation.
This test will send a signal 6 to a random daos_engine process so
that it will create a core file, allowing the core file collection code
in launch.py to be tested.
This test can be run in any CI stage: vm, small, medium, large
:avocado: tags=all
:avocado: tags=hw,small,medium,ib2,large
:avocado: tags=harness,harness_advanced_test,test_core_files
"""
self.test_core_files()
|
apache-2.0
| 8,386,738,577,204,103,000 | 34.909091 | 79 | 0.614105 | false |
bugbound/webnuke
|
libs/angular/angularCustomJavascript.py
|
1
|
1178
|
class AngularCustomJavascript:
def __init__(self, jsinjector):
self.version = 0.1
self.jsinjector = jsinjector
self.jsinjector.add_help_topic('wn_showAngularAppName()', 'Show AngularJS Main Application Name')
self.jsinjector.add_js_file('libs/angular/js/app-name.js')
self.jsinjector.add_help_topic('wn_showAngularDeps()', 'Show AngularJS Main Dependencies')
self.jsinjector.add_js_file('libs/angular/js/angular-deps.js')
self.jsinjector.add_help_topic('wn_showAngularMainClasses()', 'Show AngularJS Main Classes')
self.jsinjector.add_help_topic('wn_showAngularAllClasses()', 'Show AngularJS All Classes')
self.jsinjector.add_js_file('libs/angular/js/angular-tools.js')
self.jsinjector.add_help_topic('wn_testNgResourceClasses()', 'Test ngResource Classes')
self.jsinjector.add_js_file('libs/angular/js/test-ngresource.js')
self.jsinjector.add_help_topic('wn_showAngularRoutes()', 'Show AngularJS URL Routes')
self.jsinjector.add_js_file('libs/angular/js/show-routes.js')
self.jsinjector.add_help_topic('wn_testHTTPClasses()', 'Test Angular Classes with get and query methods')
self.jsinjector.add_js_file('libs/angular/js/test-http.js')
|
mit
| 6,817,451,471,202,081,000 | 57.9 | 107 | 0.757216 | false |
tietokilta-saato/tikplay
|
tikplay/server.py
|
1
|
5529
|
import json
import os
from hashlib import sha1
from flask import request, jsonify, current_app
from flask.ext.restful import Resource
import time
from werkzeug.utils import secure_filename
import traceback
from audio import play_file
from provider.provider import Provider
from provider.task import TaskState
from utils import is_uri, is_url
__version__ = 'v1.0'
url_base = '/srv/{}'.format(__version__)
ALLOWED_EXTENSIONS = {'mp3', 'ogg', 'wav'}
class File(Resource):
def __allowed_file(self, file):
return file.filename.split('.')[-1].lower() in ALLOWED_EXTENSIONS
def post(self):
"""
POST a new song to save
"""
file = request.files['file']
filename = secure_filename(file.filename)
if file and self.__allowed_file(file):
calced_hash = sha1(file.stream.read()).hexdigest()
file.stream.seek(0)
_filename = "{}.{}".format(calced_hash, file.filename.split('.')[-1])
if not _filename.endswith(".mp3"):
_filename += ".mp3"
file.save(os.path.join(current_app.config['UPLOAD_FOLDER'], _filename))
current_app.config['audio_api'].update()
time.sleep(2.0) # Whoo, ugly hacks
return jsonify(filename=filename, saved=True, key="sha1:" + calced_hash,
text="File successfully saved as {}. Use this as key to play this file".format(calced_hash))
elif not self.__allowed_file(file):
return jsonify(filename=filename, saved=False,
text="Filetype not allowed! (allowed: {})".format(", ".join(ALLOWED_EXTENSIONS)))
else:
return jsonify(filename="", saved=False,
text="You have to send a file, e.g. curl -X POST -F file=\"@<file>\" <server_address>")
class Queue(Resource):
def get(self, length=10):
"""
GET the now_playing queue
"""
audio_api = current_app.config['audio_api']
return jsonify(text=audio_api.now_playing(queue_length=length))
def delete(self):
audio_api = current_app.config['audio_api']
return jsonify(text=audio_api.kill())
class Song(Resource):
def __init__(self):
from dunder_mifflin import papers # WARNING: Malicious operation ahead
super()
self.prov = Provider(conf={'download_dir': current_app.config['song_dir']})
self.cache = current_app.config['cache_handler']
def get(self):
"""
GET now playing song
"""
audio_api = current_app.config['audio_api']
return jsonify(text=audio_api.now_playing(queue_length=1))
def delete(self):
"""
DELETE now playing song (i.e. skip a song)
"""
audio_api = current_app.config['audio_api']
return jsonify(text=audio_api.next_())
def post(self):
"""
POST a new song to play by URI/URL.
"""
try:
data = json.loads(request.data.decode())
except ValueError:
return jsonify(error=True, text="Invalid JSON given")
uri = data["url"]
if not uri:
return jsonify(error=True, text="Invalid URI")
if is_url(uri):
uri = self.prov.canonicalize(uri)
elif not is_uri(uri):
return jsonify(error=True, text="Invalid URI")
audio_api = current_app.config['audio_api']
fn = self.cache.get_song(uri)
if fn is not None:
return play_file(
audio_api, current_app.config['songlogger'], fn, data.get("filename", uri), user=data["user"]
)
try:
task = self.prov.get(uri)
except ValueError:
return jsonify(error=True, text="No provider found for " + uri)
if task.state == TaskState.exception:
return jsonify(error=True, text=traceback.format_exception_only(type(task.exception), task.exception))
task.metadata['user'] = data.get('user', 'anonymous')
task.metadata['original_filename'] = data.get('filename', uri)
with current_app.config['task_lock']:
current_app.config['task_dict'][task.id] = task
return jsonify(error=False, task=task.id, text="Task received, fetching song")
class Task(Resource):
def get(self, id_):
"""
GET information about a task.
:param id_: Task ID
:return:
"""
task = current_app.config['task_dict'].get(int(id_), None)
if task is None:
return jsonify(error=True, text="Task not found")
return jsonify(id=task.id, state=task.state, url=task.url)
class Find(Resource):
def get(self, find_type, find_key):
"""
GET find a song from the database.
Keyword arguments:
find_type: valid values 1 (song_hash), 2 (artist), 3 (title), 4 (length), 5 (filename)
find_key: value corresponding to the type: 1 (SHA1), 2 (String),
3 (String), 4 (Integer (seconds)), 5 (filename)
"""
methods = ['song_hash', 'artist', 'title', 'length', 'filename']
cache_handler = current_app.config['cache_handler']
# find_type is ints from 1 - 5, list indices are ints from 0 - 4
found = cache_handler.find(methods[find_type - 1], find_key)
if found is not None:
return jsonify(find_type=methods[find_type - 1], find_key=find_key, found=True, text=str(found))
else:
return jsonify(find_type=methods[find_type - 1], find_key=find_key, found=False)
|
mit
| 5,394,548,310,711,186,000 | 33.773585 | 119 | 0.584012 | false |
bitdagger/mtg-scanner
|
scanner.py
|
1
|
8352
|
from __future__ import print_function
import numpy as np
import cv2
import math
import json
import sys
import phash
import operator
import signal
import base64
from debugger import MTG_Debugger
from mtgexception import MTGException
from transformer import MTG_Transformer
"""Scanner module
This module is responsible for handling user input and reading the data from
the camera to pass off to other modules.
"""
class MTG_Scanner:
"""Attributes:
running (bool): Is the scanning loop running
frame (image): The active frame
bApplyTransforms (bool): Should transforms be applied
bVertFlip (bool): Should the frame be flipped vertically?
threshold (int): Hamming distance threshold
detected_card (image): The image of the proposed card
detected_id (int): The MultiverseID of the proposed card
previous_id (int): The MultiverseID of the last card entered
blacklist (array): Array of MultiverseIDs to exclude from detection
referencedb (MTG_Reference_DB): The reference database object
storagedb (MTG_Storage_DB): The storage database object
debugger (MTG_Debugger): The debugging object
transformer (MTG_Transformer): The transformer object
captureDevice (cv2.VideoCapture): The camera to capture from
"""
def __init__(self, source, referencedb, storagedb, debug):
self.running = False
self.frame = None
self.bApplyTransforms = False
self.bVertFlip = False
self.threshold = 15
self.detected_card = None
self.detected_id = None
self.previous_id = None
self.blacklist = []
self.referencedb = referencedb
self.storagedb = storagedb
self.debugger = MTG_Debugger(debug)
self.transformer = MTG_Transformer(self.debugger)
self.captureDevice = cv2.VideoCapture(source)
def run(self):
"""Main execution
"""
self.running = True
while(self.running):
if (self.detected_card is None):
self.debugger.reset()
__, frame = self.captureDevice.read()
if (frame is None):
print('Error: No frame read from camera')
break
if (self.bApplyTransforms):
try:
frame = self.transformer.applyTransforms(frame)
except MTGException as msg:
self.bApplyTransforms = False
else:
height, width, __ = frame.shape
cv2.rectangle(
frame,
(0, 0),
(width - 1, height - 1),
(255, 0, 0),
2)
if (self.bVertFlip):
height, width, __ = frame.shape
M = cv2.getRotationMatrix2D(
(width / 2, height / 2),
180,
1)
frame = cv2.warpAffine(frame, M, (width, height))
self.frame = frame
cv2.imshow('Preview', self.frame)
self.debugger.display()
else:
cv2.imshow('Detected Card', self.detected_card)
self.handleKey(cv2.waitKey(1) & 0xFF, frame)
if (self.captureDevice is not None):
self.captureDevice.release()
cv2.destroyAllWindows()
def detectCard(self):
"""Detect the card from the active frame
"""
# The phash python bindings operate on files, so we have to write our
# current frame to a file to continue
cv2.imwrite('frame.jpg', self.frame)
# Use phash on our frame
ihash = phash.dct_imagehash('frame.jpg')
idigest = phash.image_digest('frame.jpg')
candidates = {}
hashes = self.referencedb.get_hashes()
for MultiverseID in hashes:
if (MultiverseID in self.blacklist):
continue
hamd = phash.hamming_distance(ihash, int(hashes[MultiverseID]))
if (hamd <= self.threshold):
candidates[MultiverseID] = hamd
if (not len(candidates)):
print('No matches found')
return None
finalists = []
minV = min(candidates.values())
for MultiverseID in candidates:
if (candidates[MultiverseID] == minV):
finalists.append(MultiverseID)
bestMatch = None
correlations = {}
for MultiverseID in finalists:
hamd = candidates[MultiverseID]
digest = phash.image_digest(
self.referencedb.IMAGE_FILE % MultiverseID)
corr = phash.cross_correlation(idigest, digest)
if (bestMatch is None or corr > correlations[bestMatch]):
bestMatch = MultiverseID
correlations[MultiverseID] = corr
return bestMatch
def handleKey(self, key, frame):
if (self.detected_card is None):
if (key == 8 or key == 27):
self.bApplyTransforms = not self.bApplyTransforms
elif (key == ord('d')):
self.debugger.toggle()
elif (key == 171):
self.detected_id = self.previous_id
if (self.detected_id is not None):
self.detected_card = cv2.imread(
self.referencedb.IMAGE_FILE % self.detected_id,
cv2.IMREAD_UNCHANGED)
elif (key == 10):
if (not self.bApplyTransforms):
self.bApplyTransforms = True
else:
self.detected_id = self.detectCard()
if (self.detected_id is not None):
self.detected_card = cv2.imread(
self.referencedb.IMAGE_FILE % self.detected_id,
cv2.IMREAD_UNCHANGED)
else:
if (key == ord('n')):
cv2.destroyWindow('Detected Card')
self.blacklist.append(self.detected_id)
self.detected_id = self.detectCard()
if (self.detected_id is not None):
self.detected_card = cv2.imread(
self.referencedb.IMAGE_FILE % self.detected_id,
cv2.IMREAD_UNCHANGED)
if (key == ord('p')):
self.blacklist = []
for i in range(0, 4):
self.storagedb.add_card(self.detected_id, 0)
name, code = self.referencedb.get_card_info(self.detected_id)
print('Added 4x ' + name + '[' + code + ']...')
self.previous_id = self.detected_id
self.detected_card = None
self.detected_id = None
self.bApplyTransforms = False
cv2.destroyWindow('Detected Card')
if (key == 10 or key == ord('y')):
self.blacklist = []
self.storagedb.add_card(self.detected_id, 0)
name, code = self.referencedb.get_card_info(self.detected_id)
print('Added ' + name + '[' + code + ']...')
self.previous_id = self.detected_id
self.detected_card = None
self.detected_id = None
self.bApplyTransforms = False
cv2.destroyWindow('Detected Card')
if (key == ord('f')):
self.blacklist = []
self.storagedb.add_card(self.detected_id, 1)
name, code = self.referencedb.get_card_info(self.detected_id)
print('Added foil ' + name + '[' + code + ']...')
self.previous_id = self.detected_id
self.detected_card = None
self.detected_id = None
self.bApplyTransforms = False
cv2.destroyWindow('Detected Card')
elif (key == 8 or key == 27):
self.blacklist = []
self.detected_card = None
self.detected_id = None
self.bApplyTransforms = False
cv2.destroyWindow('Detected Card')
if (key == ord('q')):
self.running = False
|
mit
| -6,063,127,265,762,515,000 | 36.452915 | 77 | 0.530771 | false |
white-lab/pyproteome
|
brainrnaseq/__init__.py
|
1
|
2373
|
from . import cache, mapping, enrichments
CELL_TYPE_COLS = {
'Homo sapiens': {
'Astrocyte': [
'8yo',
'13yo', '16yo', '21yo.1', '22yo.1', '35yo', '47yo', '51yo', '53yo',
'60yo', '63yo - 1', '63yo - 2',
],
'Neuron': [
'25yo',
],
'OPC': [
'22yoGC', '63yoGC - 1',
'63yo GC - 2', '47yoO4', '63yoO4',
],
'New Oligodendrocytes': [
'22yoGC', '63yoGC - 1',
'63yo GC - 2', '47yoO4', '63yoO4',
],
'Myelinating Oligodendrocytes': [
'22yoGC', '63yoGC - 1',
'63yo GC - 2', '47yoO4', '63yoO4',
],
'Microglia': [
'45yo', '51yo.1', '63yo',
],
'Endothelia': [
'13yo.1',
'47yo.1',
],
},
'Mus musculus': {
'Astrocyte': [
# 'FACS - p69',
# 'FACS p70',
'1 month',
'4 months',
'7 months',
'9 months',
],
'Neuron': [
'Neuron 3',
'Neuron 4',
],
'OPC': [
'Oligodendrocyte precursor cell 3',
'Oligodendrocyte precursor cell 4',
],
'New Oligodendrocytes': [
'Newly formed oligodendrocyte 3',
'Newly formed oligodendrocyte 4',
],
'Myelinating Oligodendrocytes': [
'Myelinating oligodendrocyte 4',
'Myelinating oligodenrocyte 5',
],
'Microglia': [
'Microglia 1',
'Microglia 2',
],
'Endothelia': [
'Endo 1',
'Endo 2',
],
},
}
CELL_TYPES = [
'Astrocyte',
'Endothelia',
'Microglia',
'Myelinating Oligodendrocytes',
'Neuron',
'New Oligodendrocytes',
'OPC',
]
DEFAULT_CELL_TYPES = [
i
for i in CELL_TYPES
if i not in ['OPC', 'New Oligodendrocytes']
]
CELL_COLORS = colors = {
'Astrocyte': '#bfee90',
'Endothelia': '#ff9b90',
'Microglia': '#5bd3ff',
'Myelinating Oligodendrocytes': '#ff39ff',
'Neuron': '#ffc467',
'New Oligodendrocytes': 'lightpurple',
'OPC': 'darkpurple',
}
__all__ = [
'cache',
'mapping',
'enrichments',
'CELL_TYPE_COLS',
'CELL_TYPES',
'DEFAULT_CELL_TYPES',
'CELL_COLORS',
]
|
bsd-2-clause
| 7,507,329,908,707,769,000 | 21.6 | 79 | 0.435735 | false |
racker/scrivener
|
scrivener/tests/test_server.py
|
1
|
1777
|
# Copyright 2012 Rackspace Hosting, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from zope.interface import directlyProvides
from twisted.trial.unittest import TestCase
from twisted.test.proto_helpers import StringTransport
from twisted.internet.interfaces import IStreamServerEndpoint
from twisted.internet.defer import succeed
from scrivener.interfaces import ILogHandler
from scrivener.server import ScribeServerService
class ScribeServerServiceTests(TestCase):
def setUp(self):
self.handler = mock.Mock()
directlyProvides(self.handler, ILogHandler)
self.endpoint = mock.Mock()
directlyProvides(self.endpoint, IStreamServerEndpoint)
self.port = mock.Mock()
def _listen(*args, **kwargs):
return succeed(self.port)
self.endpoint.listen.side_effect = _listen
self.service = ScribeServerService(self.endpoint, self.handler)
self.transport = StringTransport()
def test_startService(self):
self.service.startService()
self.assertEqual(self.endpoint.listen.call_count, 1)
def test_stopService(self):
self.service.startService()
self.service.stopService()
self.assertEqual(self.port.stopListening.call_count, 1)
|
apache-2.0
| -3,906,171,968,325,228,500 | 30.732143 | 74 | 0.736072 | false |
vamdt/spider
|
douban/pics.py
|
1
|
1638
|
# coding=utf-8
import re
import urllib
import json
import os, random
BASE_DOWN_DIR = './download'
BASE_DOWN_POSTS_DIR = BASE_DOWN_DIR + '/posts'
BASE_URL = 'http://www.douban.com/photos/photo/2230938262/'
class AppURLopener(urllib.FancyURLopener):
version = "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.4 Safari/537.36"
urllib._urlopener = AppURLopener()
def main():
i = 0;
url = BASE_URL;
while(i<3):
i = i+1;
url = play(url, i);
def play(url, index):
f = urllib.urlopen(url)
html = f.read()
print html
pattern = re.compile(u'<a href="(http://www.douban.com/photos/photo/\d+/#image)" title=".+" id="next_photo">.+</a>',re.DOTALL)
url = pattern.findall(html)[0]
p2 = re.compile(u'<a class="mainphoto" href="\S+" title="\S+">\s+<img src="(http://img.+\.douban\.com/view/photo/photo/public/.+\.jpg)" />\s+</a>', re.DOTALL)
img_url = p2.findall(html)[0]
print img_url
create_dirs(BASE_DOWN_POSTS_DIR)
save_posts(img_url, index)
return url
def get_html(url):
return urllib.urlopen(url).read()
def create_dirs(dir_name):
if not os.path.exists(dir_name):
os.makedirs(dir_name)
def save_posts(url, index):
html = get_html(url)
file_name = BASE_DOWN_POSTS_DIR + '/' + str(index) + '.jpg'
save( html, file_name)
def save(obj, name):
file = open(name, 'w')
file.write(str(obj))
file.close
def save_as_json(obj, name):
json_data = json.dumps(obj, sort_keys=True, indent=4, separators=(',', ': '))
save(json_data, name)
if __name__ == '__main__':
main()
|
mit
| 6,337,651,235,404,347,000 | 25.015873 | 162 | 0.616606 | false |
Auzzy/pyinq
|
pyinq/tests/test_results.py
|
1
|
9565
|
"""
Copyright (c) 2012-2013, Austin Noto-Moniz ([email protected])
Permission to use, copy, modify, and/or distribute this software for any purpose
with or without fee is hereby granted, provided that the above copyright notice
and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.
"""
from pyinq.results import *
##### TEST ASSERT RESULTS #####
LINENO = 12
CALL = "assert_true(True)"
FAIL = "FAIL"
TRACE = "TRACE"
EXPECTED = IOError
def test_Result_true():
check_Result(True)
def test_Result_false():
check_Result(False)
def test_AssertResult_true():
check_AssertResult(True)
def test_AssertResult_false():
check_AssertResult(False)
def test_AssertEqualsResult_true():
check_AssertEqualsResult(True,4,4)
def test_AssertEqualsResult_false():
check_AssertEqualsResult(False,4,5)
def test_AssertInResult_true():
check_AssertInResult(True,4,[1,2,4,8,16,32,64])
def test_AssertInResult_false():
check_AssertInResult(False,4,[1,1,2,3,5,8,13])
def test_AssertInstanceResult_true():
check_AssertInstanceResult(True,IOError,Exception)
def test_AssertInstanceResult_false():
check_AssertInstanceResult(False,IOError,WindowsError)
def test_AssertRaisesResult_true():
check_AssertRaisesResult(True,TRACE)
def test_AssertRaisesResult_false():
check_AssertRaisesResult(False,"")
def test_ExpectedErrorResult_true():
check_ExpectedErrorResult(True,LINENO)
def test_ExpectedErrorResult_false():
check_ExpectedErrorResult(False,None)
def test_FailResult():
result = FailResult(LINENO,FAIL)
assert result.lineno == LINENO
assert result.mess == FAIL
assert result.result == False
def test_AssertError():
result = AssertError(TRACE)
assert result.trace == TRACE
assert result.result is None
##### TEST RESULTS #####
NAME = "FOO"
def test_TestResult():
test_result = TestResult(NAME)
assert test_result.name == NAME
assert not test_result.before
assert not test_result.after
def test_TestResult_true():
test_result = TestResult(NAME)
test_result.extend(make_AssertResult_list(True,True,True))
assert test_result.get_status() == True
def test_TestResult_false():
test_result = TestResult(NAME)
test_result.extend(make_AssertResult_list(True,True,False))
assert test_result.get_status() == False
def test_TestClassResult():
cls_result = TestClassResult(NAME)
assert cls_result.name == NAME
assert not cls_result.before
assert not cls_result.after
def test_TestClassResult_true():
cls_result = TestClassResult(NAME)
cls_result.extend(make_TestResult_list(True,True,True))
assert cls_result.get_status() == True
def test_TestClassResult_false():
cls_result = TestClassResult(NAME)
cls_result.extend(make_TestResult_list(True,True,False))
assert cls_result.get_status() == False
def test_TestModuleResult():
mod_result = TestModuleResult(NAME)
assert mod_result.name == NAME
assert not mod_result.before
assert not mod_result.after
def test_TestModuleResult_true():
mod_result = TestModuleResult(NAME)
mod_result.extend(make_TestClassResult_list(True,True,True))
assert mod_result.get_status() == True
def test_TestModuleResult_false():
mod_result = TestModuleResult(NAME)
mod_result.extend(make_TestClassResult_list(True,True,False))
assert mod_result.get_status() == False
def test_TestSuiteResult():
suite_result = TestSuiteResult(NAME)
assert suite_result.name == NAME
assert not suite_result.before
assert not suite_result.after
def test_TestSuiteResult_true():
suite_result = TestSuiteResult(NAME)
suite_result.extend(make_TestModuleResult_list(True,True,True))
assert suite_result.get_status() == True
def test_TestSuiteResult_false():
suite_result = TestModuleResult(NAME)
suite_result.extend(make_TestModuleResult_list(True,True,False))
assert suite_result.get_status() == False
##### TEST ERRORS #####
def construct_call_str(name, args):
arg_str = ",".join([str(arg) for arg in args])
return "{name}({arg_str})".format(name=name,arg_str=arg_str)
def check_PyInqError(func_name, arg_dict, error_cls, result_cls, check_func):
call = construct_call_str(func_name,arg_dict.values())
error = error_cls(LINENO,call,**arg_dict)
result = error.result()
assert error.lineno == LINENO
assert error.call == call
for arg_name in arg_dict:
assert getattr(error,arg_name) == arg_dict[arg_name]
assert type(result) is result_cls
check_func(state=False,lineno=LINENO,call=call,result=result,**arg_dict)
def test_PyInqAssertError():
arg_dict = {}
check_PyInqError("assert_true",arg_dict,PyInqAssertError,AssertResult,check_AssertResult)
def test_PyInqAssertEqualsError():
arg_dict = {"actual":4, "expected":42}
check_PyInqError("assert_equal",arg_dict,PyInqAssertEqualsError,AssertEqualsResult,check_AssertEqualsResult)
def test_PyInqAssertInError():
arg_dict = {"item":4, "collection":[1,1,2,3,5,8,13,21]}
check_PyInqError("assert_in",arg_dict,PyInqAssertInError,AssertInResult,check_AssertInResult)
def test_PyInqAssertInstanceError():
arg_dict = {"obj":IOError, "cls":IndexError}
check_PyInqError("assert_is_instance",arg_dict,PyInqAssertInstanceError,AssertInstanceResult,check_AssertInstanceResult)
def test_PyInqAssertRaisesError():
arg_dict = {"expected":IOError, "trace":""}
check_PyInqError("assert_raises",arg_dict,PyInqAssertRaisesError,AssertRaisesResult,check_AssertRaisesResult)
def test_PyInqFailError():
arg_dict = {"mess":"This is a failure message."}
error = PyInqFailError(LINENO,**arg_dict)
result = error.result()
assert error.lineno == LINENO
assert error.mess == arg_dict["mess"]
assert type(result) is FailResult
assert result.lineno == LINENO
assert result.mess == arg_dict["mess"]
assert result.result == False
##### TEST HELPERS #####
def check_Result(state, result=None):
if not result:
result = Result(state)
assert result.result == state
def check_AssertResult(state, lineno=LINENO, call=CALL, result=None):
if not result:
result = AssertResult(lineno,call,state)
assert result.lineno == lineno
assert result.call == call
assert result.result == state
def check_AssertEqualsResult(state, actual, expected, lineno=LINENO, call=CALL, result=None):
if not result:
result = AssertEqualsResult(lineno,call,state,actual,expected)
assert result.lineno == lineno
assert result.call == call
assert result.result == state
assert result.actual == actual
assert result.expected == expected
def check_AssertInResult(state, item, collection, lineno=LINENO, call=CALL, result=None):
if not result:
result = AssertInResult(lineno,call,state,item,collection)
assert result.lineno == lineno
assert result.call == call
assert result.result == state
assert result.item == item
assert result.collection == collection
def check_AssertInstanceResult(state, obj, cls, lineno=LINENO, call=CALL, result=None):
if not result:
result = AssertInstanceResult(lineno,call,state,obj,cls)
assert result.lineno == lineno
assert result.call == call
assert result.result == state
assert result.obj_name == obj.__class__.__name__
assert result.class_name == cls.__name__
def check_AssertRaisesResult(state, trace, lineno=LINENO, call=CALL, expected=EXPECTED, result=None):
if not result:
result = AssertRaisesResult(lineno,call,state,trace,expected)
assert result.lineno == lineno
assert result.call == call
assert result.result == state
assert remove_whitespace(result.trace) == remove_whitespace(trace)
assert result.expected == expected.__name__
def check_ExpectedErrorResult(state, lineno, expected=EXPECTED, result=None):
if not result:
result = ExpectedErrorResult(state,expected,lineno)
assert result.expected == expected.__name__
assert result.lineno == lineno
assert result.call is None
assert result.result == state
def make_AssertResult_list(*state_list):
return [AssertResult(LINENO,CALL,state) for state in state_list]
def make_TestResult_list(*state_list):
result_list = []
for state in state_list:
result = TestResult(NAME)
result.extend(make_AssertResult_list(state))
result_list.append(result)
return result_list
def make_TestClassResult_list(*state_list):
result_list = []
for state in state_list:
result = TestClassResult(NAME)
result.extend(make_TestResult_list(state))
result_list.append(result)
return result_list
def make_TestModuleResult_list(*state_list):
result_list = []
for state in state_list:
result = TestModuleResult(NAME)
result.extend(make_TestClassResult_list(state))
result_list.append(result)
return result_list
##### UTIL #####
def remove_whitespace(string):
return ''.join([line.strip() for line in string.splitlines()])
|
isc
| 8,524,622,938,886,892,000 | 30.883333 | 124 | 0.716675 | false |
matmodlab/matmodlab2
|
matmodlab2/ext_helpers/build_ext.py
|
1
|
9765
|
#!/usr/bin/env python
import os
import re
import sys
import glob
import shutil
import logging
import tempfile
from argparse import ArgumentParser
from subprocess import Popen, STDOUT
from contextlib import contextmanager
from matmodlab2.core.logio import get_logger
from matmodlab2.core.environ import environ
from matmodlab2.core.misc import is_listlike
ext_support_dir = os.path.dirname(os.path.realpath(__file__))
aba_support_dir = os.path.join(ext_support_dir, '../umat')
# "Lite" version of blas/lapack
lapack_lite = os.path.join(ext_support_dir, 'blas_lapack-lite.f')
lapack_lite_obj = os.path.splitext(lapack_lite)[0] + '.o'
assert os.path.isfile(lapack_lite)
# Fortran I/O
mml_io = os.path.join(ext_support_dir, 'mml_io.f90')
assert os.path.isfile(mml_io)
# Abaqus related files
aba_sdvini = os.path.join(aba_support_dir, 'aba_sdvini.f90')
assert os.path.isfile(aba_sdvini)
aba_utils = os.path.join(aba_support_dir, 'aba_utils.f90')
assert os.path.isfile(aba_utils)
umat_pyf = os.path.join(aba_support_dir, 'umat.pyf')
assert os.path.isfile(umat_pyf)
uhyper_pyf = os.path.join(aba_support_dir, 'uhyper.pyf')
assert os.path.isfile(uhyper_pyf)
tensalg_f90 = os.path.join(aba_support_dir, 'tensalg.f90')
assert os.path.isfile(tensalg_f90)
uhyper_wrap_f90 = os.path.join(aba_support_dir, 'uhyper_wrap.f90')
assert os.path.isfile(uhyper_wrap_f90)
class ExtensionNotBuilt(Exception):
pass
def which(name):
"""Find the executable name on PATH"""
for path in os.getenv('PATH', '').split(os.pathsep):
if not os.path.isdir(path):
continue
if os.path.isfile(os.path.join(path, name)):
return os.path.join(path, name)
return None
def clean_f2py_tracks(dirname):
if not os.path.isdir(dirname):
return
for pat in ('*.so.dSYM', '*-f2pywrappers2.*', '*module.c'):
for item in glob.glob(os.path.join(dirname, pat)):
if os.path.isdir(item):
shutil.rmtree(item)
else:
os.remove(item)
def build_extension_module(name, sources, include_dirs=None, verbose=False,
user_ics=False, fc=None, cwd=None):
"""Build the fortran extension module (material model)
Parameters
----------
name : str
The name of the extension module to build
sources : list of str
List of source files
include_dirs : list of str
List of extra include directories
verbose : bool
Write output to stdout if True, otherwise suppress stdout
user_ics : bool
List of source files includes source defining subroutine SDVINI.
Applicable only for Abaqus umat and uhyper.
fc : str
Fortran compiler
Notes
-----
To build abaqus umat, the name must be 'umat'
To build abaqus uhyper, the name must be 'uhyper'
"""
the_loglevel = environ.loglevel
environ.loglevel = logging.DEBUG
logger = get_logger('build-ext')
fc = fc or which('gfortran')
if fc is None:
raise OSError('Fortran compiler not found')
# Check source files
for source_file in sources:
if not os.path.isfile(source_file):
raise OSError('{0!r}: file not found'.format(source_file))
if name != '_matfuncs_sq3':
sources.append(mml_io)
# We'll add the object file back in
if lapack_lite in sources:
sources.remove(lapack_lite)
# Everyone get lapack!
if lapack_lite_obj not in sources:
sources.append(lapack_lite_obj)
if not os.path.isfile(lapack_lite_obj):
_build_blas_lapack(logger, fc)
include_dirs = include_dirs or []
umat = name.lower() == 'umat'
uhyper = name.lower() == 'uhyper'
if umat or uhyper:
# Build the umat module - add some Abaqus utility files
clean_f2py_tracks(aba_support_dir)
name = '_umat' if umat else '_uhyper'
sources.append(aba_utils)
if umat:
sources.append(umat_pyf)
elif uhyper:
sources.extend([uhyper_pyf, tensalg_f90, uhyper_wrap_f90])
if not user_ics:
sources.append(aba_sdvini)
include_dirs = include_dirs + [aba_support_dir]
if any(' ' in x for x in sources):
logger.warning('File paths with spaces are known to fail to build')
command = ['f2py', '-c']
# Build the fortran flags argument
fflags = ['-Wno-unused-dummy-argument', '-fPIC', '-shared']
if os.getenv('FCFLAGS'):
fflags.extend(os.environ['FCFLAGS'].split())
command.extend(['--f77flags={0!r}'.format(' '.join(fflags)),
'--f90flags={0!r}'.format(' '.join(fflags))])
command.extend(['--include-paths', ':'.join(include_dirs)])
command.extend(['-m', name])
command.extend(sources)
logger.info('building extension module {0!r}... '.format(name),
extra={'continued':1})
logfile = None
cwd = cwd or os.getcwd()
if verbose:
# Call directly - LOTS of output!
p = Popen(command, cwd=cwd)
p.wait()
elif environ.notebook:
from IPython.utils import io
with io.capture_output() as captured:
p = Popen(command, cwd=cwd)
p.wait()
else:
logfile = os.path.join(cwd, 'build.log')
with stdout_redirected(to=logfile), merged_stderr_stdout():
p = Popen(command, cwd=cwd)
p.wait()
logger.info('done')
if logfile is not None and logfile != sys.stdout:
os.remove(logfile)
# Return the loglevel back to what it was
environ.loglevel = the_loglevel
clean_f2py_tracks(cwd)
if p.returncode != 0:
logger.error('Failed to build')
raise ExtensionNotBuilt(name)
return 0
def _build_blas_lapack(logger, fc):
logger.info('building blas_lapack-lite... ', extra={'continued':1})
cmd = [fc, '-fPIC', '-shared', '-O3', lapack_lite, '-o' + lapack_lite_obj]
proc = Popen(cmd, stdout=open(os.devnull, 'a'), stderr=STDOUT,
cwd=ext_support_dir)
proc.wait()
if proc.returncode == 0:
logger.info('done')
else:
logger.info('failed')
return proc.returncode
def fileno(file_or_fd):
fd = getattr(file_or_fd, 'fileno', lambda: file_or_fd)()
if not isinstance(fd, int):
raise ValueError("Expected a file (`.fileno()`) or a file descriptor")
return fd
@contextmanager
def stdout_redirected(to=os.devnull, stdout=None):
"""From: http://stackoverflow.com/questions/4675728/
redirect-stdout-to-a-file-in-python/22434262#22434262
"""
if stdout is None:
stdout = sys.stdout
stdout_fd = fileno(stdout)
# copy stdout_fd before it is overwritten
#NOTE: `copied` is inheritable on Windows when duplicating a standard stream
with os.fdopen(os.dup(stdout_fd), 'wb') as copied:
stdout.flush() # flush library buffers that dup2 knows nothing about
try:
os.dup2(fileno(to), stdout_fd) # $ exec >&to
except ValueError: # filename
with open(to, 'wb') as to_file:
os.dup2(to_file.fileno(), stdout_fd) # $ exec > to
try:
yield stdout # allow code to be run with the redirected stdout
finally:
# restore stdout to its previous value
#NOTE: dup2 makes stdout_fd inheritable unconditionally
stdout.flush()
os.dup2(copied.fileno(), stdout_fd) # $ exec >&copied
def merged_stderr_stdout(): # $ exec 2>&1
return stdout_redirected(to=sys.stdout, stdout=sys.stderr)
def build_extension_module_as_subprocess(name, sources,
include_dirs=None, verbose=False,
user_ics=False, fc=None,
cwd=None):
"""Build the extension module, but call as a subprocess.
Parameters
----------
Same as build_extension_module
Notes
-----
This function exists since distutils can only be initialized once and we want to run build several different extensions
"""
build_extension_module(name, sources, include_dirs=include_dirs,
verbose=verbose, user_ics=user_ics, fc=fc)
return 0
def build_mml_matrix_functions():
"""Build the mml linear algebra library"""
name = '_matfuncs_sq3'
mfuncs_pyf = os.path.join(ext_support_dir, 'matrix_funcs.pyf')
mfuncs_f90 = os.path.join(ext_support_dir, 'matrix_funcs.f90')
dgpadm_f = os.path.join(ext_support_dir, 'dgpadm.f')
sources = [mfuncs_pyf, mfuncs_f90, lapack_lite, dgpadm_f]
package_path = os.path.join(ext_support_dir, '../core')
command = ['f2py', '-c']
command.extend(sources)
p = Popen(command, cwd=package_path)
p.wait()
if p.returncode != 0:
raise ExtensionNotBuilt(name)
return 0
def main():
p = ArgumentParser()
p.add_argument('name')
p.add_argument('sources', nargs='*')
p.add_argument('--include-dirs', action='append', default=None)
p.add_argument('--verbose', action='store_true', default=False)
p.add_argument('--package-path', default=None)
p.add_argument('--user-ics', action='store_true', default=False)
p.add_argument('--fc', default=False)
args = p.parse_args()
if args.name == 'matfuncs':
return build_mml_matrix_functions()
if not args.sources:
raise ValueError('Missing sources argument')
build_extension_module(args.name, args.sources,
include_dirs=args.include_dirs,
verbose=args.verbose,
user_ics=args.user_ics,
fc=args.fc, cwd=args.package_path)
if __name__ == '__main__':
main()
|
bsd-3-clause
| -2,366,992,133,191,876,600 | 33.02439 | 123 | 0.619662 | false |
snorfalorpagus/pywr
|
tests/test_parameters.py
|
1
|
43931
|
"""
Test for individual Parameter classes
"""
from __future__ import division
from pywr.core import Model, Timestep, Scenario, ScenarioIndex, Storage, Link, Input, Output
from pywr.parameters import (Parameter, ArrayIndexedParameter, ConstantScenarioParameter,
ArrayIndexedScenarioMonthlyFactorsParameter, MonthlyProfileParameter, DailyProfileParameter,
DataFrameParameter, AggregatedParameter, ConstantParameter, ConstantScenarioIndexParameter,
IndexParameter, AggregatedIndexParameter, RecorderThresholdParameter, ScenarioMonthlyProfileParameter,
Polynomial1DParameter, Polynomial2DStorageParameter, ArrayIndexedScenarioParameter,
InterpolatedParameter, WeeklyProfileParameter,
FunctionParameter, AnnualHarmonicSeriesParameter, load_parameter)
from pywr.recorders import AssertionRecorder, assert_rec
from pywr.model import OrphanedParameterWarning
from pywr.recorders import Recorder
from fixtures import simple_linear_model, simple_storage_model
from helpers import load_model
import json
import os
import datetime
import numpy as np
import pandas as pd
import pytest
import itertools
from numpy.testing import assert_allclose
TEST_DIR = os.path.dirname(__file__)
@pytest.fixture
def model():
return Model()
class TestConstantParameter:
""" Tests for `ConstantParameter` """
def test_basic_use(self, simple_linear_model):
""" Test the basic use of `ConstantParameter` using the Python API """
model = simple_linear_model
# Add two scenarios
scA = Scenario(model, 'Scenario A', size=2)
scB = Scenario(model, 'Scenario B', size=5)
p = ConstantParameter(model, np.pi, name='pi', comment='Mmmmm Pi!')
assert not p.is_variable
assert p.double_size == 1
assert p.integer_size == 0
model.setup()
ts = model.timestepper.current
# Now ensure the appropriate value is returned for all scenarios
for i, (a, b) in enumerate(itertools.product(range(scA.size), range(scB.size))):
si = ScenarioIndex(i, np.array([a, b], dtype=np.int32))
np.testing.assert_allclose(p.value(ts, si), np.pi)
def test_being_a_variable(self, simple_linear_model):
""" Test the basic use of `ConstantParameter` when `is_variable=True` """
model = simple_linear_model
p = ConstantParameter(model, np.pi, name='pi', comment='Mmmmm Pi!', is_variable=True,
lower_bounds=np.pi/2, upper_bounds=2*np.pi)
model.setup()
assert p.is_variable
assert p.double_size == 1
assert p.integer_size == 0
np.testing.assert_allclose(p.get_double_lower_bounds(), np.array([np.pi/2]))
np.testing.assert_allclose(p.get_double_upper_bounds(), np.array([2*np.pi]))
np.testing.assert_allclose(p.get_double_variables(), np.array([np.pi]))
# No test updating the variables
p.set_double_variables(np.array([1.5*np.pi, ]))
np.testing.assert_allclose(p.get_double_variables(), np.array([1.5*np.pi]))
# None of the integer functions should be implemented because this parameter
# has no integer variables
with pytest.raises(NotImplementedError):
p.get_integer_lower_bounds()
with pytest.raises(NotImplementedError):
p.get_integer_upper_bounds()
with pytest.raises(NotImplementedError):
p.get_integer_variables()
def test_parameter_array_indexed(simple_linear_model):
"""
Test ArrayIndexedParameter
"""
model = simple_linear_model
A = np.arange(len(model.timestepper), dtype=np.float64)
p = ArrayIndexedParameter(model, A)
model.setup()
# scenario indices (not used for this test)
si = ScenarioIndex(0, np.array([0], dtype=np.int32))
for v, ts in zip(A, model.timestepper):
np.testing.assert_allclose(p.value(ts, si), v)
# Now check that IndexError is raised if an out of bounds Timestep is given.
ts = Timestep(datetime.datetime(2016, 1, 1), 366, 1.0)
with pytest.raises(IndexError):
p.value(ts, si)
def test_parameter_array_indexed_json_load(simple_linear_model, tmpdir):
"""Test ArrayIndexedParameter can be loaded from json dict"""
model = simple_linear_model
# Daily time-step
index = pd.date_range('2015-01-01', periods=365, freq='D', name='date')
df = pd.DataFrame(np.arange(365), index=index, columns=['data'])
df_path = tmpdir.join('df.csv')
df.to_csv(str(df_path))
data = {
'type': 'arrayindexed',
'url': str(df_path),
'index_col': 'date',
'parse_dates': True,
'column': 'data',
}
p = load_parameter(model, data)
model.setup()
si = ScenarioIndex(0, np.array([0], dtype=np.int32))
for v, ts in enumerate(model.timestepper):
np.testing.assert_allclose(p.value(ts, si), v)
def test_parameter_constant_scenario(simple_linear_model):
"""
Test ConstantScenarioParameter
"""
model = simple_linear_model
# Add two scenarios
scA = Scenario(model, 'Scenario A', size=2)
scB = Scenario(model, 'Scenario B', size=5)
p = ConstantScenarioParameter(model, scB, np.arange(scB.size, dtype=np.float64))
model.setup()
ts = model.timestepper.current
# Now ensure the appropriate value is returned for the Scenario B indices.
for i, (a, b) in enumerate(itertools.product(range(scA.size), range(scB.size))):
si = ScenarioIndex(i, np.array([a, b], dtype=np.int32))
np.testing.assert_allclose(p.value(ts, si), float(b))
def test_parameter_constant_scenario(simple_linear_model):
"""
Test ConstantScenarioIndexParameter
"""
model = simple_linear_model
# Add two scenarios
scA = Scenario(model, 'Scenario A', size=2)
scB = Scenario(model, 'Scenario B', size=5)
p = ConstantScenarioIndexParameter(model, scB, np.arange(scB.size, dtype=np.int32))
model.setup()
ts = model.timestepper.current
# Now ensure the appropriate value is returned for the Scenario B indices.
for i, (a, b) in enumerate(itertools.product(range(scA.size), range(scB.size))):
si = ScenarioIndex(i, np.array([a, b], dtype=np.int32))
np.testing.assert_allclose(p.index(ts, si), b)
def test_parameter_array_indexed_scenario_monthly_factors(simple_linear_model):
"""
Test ArrayIndexedParameterScenarioMonthlyFactors
"""
model = simple_linear_model
# Baseline timeseries data
values = np.arange(len(model.timestepper), dtype=np.float64)
# Add two scenarios
scA = Scenario(model, 'Scenario A', size=2)
scB = Scenario(model, 'Scenario B', size=5)
# Random factors for each Scenario B value per month
factors = np.random.rand(scB.size, 12)
p = ArrayIndexedScenarioMonthlyFactorsParameter(model, scB, values, factors)
model.setup()
# Iterate in time
for v, ts in zip(values, model.timestepper):
imth = ts.datetime.month - 1
# Now ensure the appropriate value is returned for the Scenario B indices.
for i, (a, b) in enumerate(itertools.product(range(scA.size), range(scB.size))):
f = factors[b, imth]
si = ScenarioIndex(i, np.array([a, b], dtype=np.int32))
np.testing.assert_allclose(p.value(ts, si), v*f)
def test_parameter_array_indexed_scenario_monthly_factors_json(model):
model.path = os.path.join(TEST_DIR, "models")
scA = Scenario(model, 'Scenario A', size=2)
scB = Scenario(model, 'Scenario B', size=3)
p1 = ArrayIndexedScenarioMonthlyFactorsParameter.load(model, {
"scenario": "Scenario A",
"values": list(range(32)),
"factors": [list(range(1, 13)),list(range(13, 25))],
})
p2 = ArrayIndexedScenarioMonthlyFactorsParameter.load(model, {
"scenario": "Scenario B",
"values": {
"url": "timeseries1.csv",
"index_col": "Timestamp",
"column": "Data",
},
"factors": {
"url": "monthly_profiles.csv",
"index_col": "scenario",
},
})
node1 = Input(model, "node1", max_flow=p1)
node2 = Input(model, "node2", max_flow=p2)
nodeN = Output(model, "nodeN", max_flow=None, cost=-1)
node1.connect(nodeN)
node2.connect(nodeN)
model.timestepper.start = "2015-01-01"
model.timestepper.end = "2015-01-31"
model.run()
def test_parameter_monthly_profile(simple_linear_model):
"""
Test MonthlyProfileParameter
"""
model = simple_linear_model
values = np.arange(12, dtype=np.float64)
p = MonthlyProfileParameter(model, values)
model.setup()
# Iterate in time
for ts in model.timestepper:
imth = ts.datetime.month - 1
si = ScenarioIndex(0, np.array([0], dtype=np.int32))
np.testing.assert_allclose(p.value(ts, si), values[imth])
class TestScenarioMonthlyProfileParameter:
def test_init(self, simple_linear_model):
model = simple_linear_model
scenario = Scenario(model, 'A', 10)
values = np.random.rand(10, 12)
p = ScenarioMonthlyProfileParameter(model, scenario, values)
model.setup()
# Iterate in time
for ts in model.timestepper:
imth = ts.datetime.month - 1
for i in range(scenario.size):
si = ScenarioIndex(i, np.array([i], dtype=np.int32))
np.testing.assert_allclose(p.value(ts, si), values[i, imth])
def test_json(self):
model = load_model('scenario_monthly_profile.json')
# check first day initalised
assert (model.timestepper.start == datetime.datetime(2015, 1, 1))
# check results
supply1 = model.nodes['supply1']
# Multiplication factors
factors = np.array([
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
[12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23],
[0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22],
])
for expected in (23.92, 22.14, 22.57, 24.97, 27.59):
model.step()
imth = model.timestepper.current.month - 1
assert_allclose(supply1.flow, expected*factors[:, imth], atol=1e-7)
def test_parameter_daily_profile(simple_linear_model):
"""
Test DailyProfileParameter
"""
model = simple_linear_model
values = np.arange(366, dtype=np.float64)
p = DailyProfileParameter(model, values)
model.setup()
# Iterate in time
for ts in model.timestepper:
month = ts.datetime.month
day = ts.datetime.day
iday = int((datetime.datetime(2016, month, day) - datetime.datetime(2016, 1, 1)).days)
si = ScenarioIndex(0, np.array([0], dtype=np.int32))
np.testing.assert_allclose(p.value(ts, si), values[iday])
def test_daily_profile_leap_day(model):
"""Test behaviour of daily profile parameter for leap years
"""
inpt = Input(model, "input")
otpt = Output(model, "otpt", max_flow=None, cost=-999)
inpt.connect(otpt)
inpt.max_flow = DailyProfileParameter(model, np.arange(0, 366, dtype=np.float64))
# non-leap year
model.timestepper.start = pd.to_datetime("2015-01-01")
model.timestepper.end = pd.to_datetime("2015-12-31")
model.run()
assert_allclose(inpt.flow, 365) # NOT 364
# leap year
model.timestepper.start = pd.to_datetime("2016-01-01")
model.timestepper.end = pd.to_datetime("2016-12-31")
model.run()
assert_allclose(inpt.flow, 365)
def test_weekly_profile(simple_linear_model):
model = simple_linear_model
model.timestepper.start = "2004-01-01"
model.timestepper.end = "2005-05-01"
model.timestepper.delta = 7
values = np.arange(0, 52) ** 2 + 27.5
p = WeeklyProfileParameter.load(model, {"values": values})
@assert_rec(model, p)
def expected_func(timestep, scenario_index):
week = int(min((timestep.dayofyear - 1) // 7, 51))
value = week ** 2 + 27.5
return value
model.run()
class TestAnnualHarmonicSeriesParameter:
""" Tests for `AnnualHarmonicSeriesParameter` """
def test_single_harmonic(self, model):
p1 = AnnualHarmonicSeriesParameter(model, 0.5, [0.25], [np.pi/4])
si = ScenarioIndex(0, np.array([0], dtype=np.int32))
for ts in model.timestepper:
doy = (ts.datetime.dayofyear - 1)/365
np.testing.assert_allclose(p1.value(ts, si), 0.5 + 0.25*np.cos(doy*2*np.pi + np.pi/4))
def test_double_harmonic(self, model):
p1 = AnnualHarmonicSeriesParameter(model, 0.5, [0.25, 0.3], [np.pi/4, np.pi/3])
si = ScenarioIndex(0, np.array([0], dtype=np.int32))
for ts in model.timestepper:
doy = (ts.datetime.dayofyear - 1) /365
expected = 0.5 + 0.25*np.cos(doy*2*np.pi + np.pi / 4) + 0.3*np.cos(doy*4*np.pi + np.pi/3)
np.testing.assert_allclose(p1.value(ts, si), expected)
def test_load(self, model):
data = {
"type": "annualharmonicseries",
"mean": 0.5,
"amplitudes": [0.25],
"phases": [np.pi/4]
}
p1 = load_parameter(model, data)
si = ScenarioIndex(0, np.array([0], dtype=np.int32))
for ts in model.timestepper:
doy = (ts.datetime.dayofyear - 1) / 365
np.testing.assert_allclose(p1.value(ts, si), 0.5 + 0.25 * np.cos(doy * 2 * np.pi + np.pi / 4))
def test_variable(self, model):
""" Test that variable updating works. """
p1 = AnnualHarmonicSeriesParameter(model, 0.5, [0.25], [np.pi/4], is_variable=True)
assert p1.double_size == 3
assert p1.integer_size == 0
new_var = np.array([0.6, 0.1, np.pi/2])
p1.set_double_variables(new_var)
np.testing.assert_allclose(p1.get_double_variables(), new_var)
with pytest.raises(NotImplementedError):
p1.set_integer_variables(np.arange(3, dtype=np.int32))
with pytest.raises(NotImplementedError):
p1.get_integer_variables()
si = ScenarioIndex(0, np.array([0], dtype=np.int32))
for ts in model.timestepper:
doy = (ts.datetime.dayofyear - 1)/365
np.testing.assert_allclose(p1.value(ts, si), 0.6 + 0.1*np.cos(doy*2*np.pi + np.pi/2))
class TestAggregatedParameter:
"""Tests for AggregatedParameter"""
funcs = {"min": np.min, "max": np.max, "mean": np.mean, "median": np.median, "sum": np.sum}
@pytest.mark.parametrize("agg_func", ["min", "max", "mean", "median", "sum"])
def test_agg(self, simple_linear_model, agg_func):
model = simple_linear_model
model.timestepper.delta = 15
scenarioA = Scenario(model, "Scenario A", size=2)
scenarioB = Scenario(model, "Scenario B", size=5)
values = np.arange(366, dtype=np.float64)
p1 = DailyProfileParameter(model, values)
p2 = ConstantScenarioParameter(model, scenarioB, np.arange(scenarioB.size, dtype=np.float64))
p = AggregatedParameter(model, [p1, p2], agg_func=agg_func)
func = TestAggregatedParameter.funcs[agg_func]
@assert_rec(model, p)
def expected_func(timestep, scenario_index):
x = p1.get_value(scenario_index)
y = p2.get_value(scenario_index)
return func(np.array([x,y]))
model.run()
def test_load(self, simple_linear_model):
""" Test load from JSON dict"""
model = simple_linear_model
data = {
"type": "aggregated",
"agg_func": "product",
"parameters": [
0.8,
{
"type": "monthlyprofile",
"values": list(range(12))
}
]
}
p = load_parameter(model, data)
# Correct instance is loaded
assert isinstance(p, AggregatedParameter)
@assert_rec(model, p)
def expected(timestep, scenario_index):
return (timestep.month - 1) * 0.8
model.run()
class DummyIndexParameter(IndexParameter):
"""A simple IndexParameter which returns a constant value"""
def __init__(self, model, index, **kwargs):
super(DummyIndexParameter, self).__init__(model, **kwargs)
self._index = index
def index(self, timestep, scenario_index):
return self._index
def __repr__(self):
return "<DummyIndexParameter \"{}\">".format(self.name)
class TestAggregatedIndexParameter:
"""Tests for AggregatedIndexParameter"""
funcs = {"min": np.min, "max": np.max, "sum": np.sum, "product": np.product}
@pytest.mark.parametrize("agg_func", ["min", "max", "sum", "product"])
def test_agg(self, simple_linear_model, agg_func):
model = simple_linear_model
model.timestepper.delta = 1
model.timestepper.start = "2017-01-01"
model.timestepper.end = "2017-01-03"
scenarioA = Scenario(model, "Scenario A", size=2)
scenarioB = Scenario(model, "Scenario B", size=5)
p1 = DummyIndexParameter(model, 2)
p2 = DummyIndexParameter(model, 3)
p = AggregatedIndexParameter(model, [p1, p2], agg_func=agg_func)
func = TestAggregatedIndexParameter.funcs[agg_func]
@assert_rec(model, p)
def expected_func(timestep, scenario_index):
x = p1.get_index(scenario_index)
y = p2.get_index(scenario_index)
return func(np.array([x,y], np.int32))
model.run()
def test_agg_anyall(self, simple_linear_model):
"""Test the "any" and "all" aggregation functions"""
model = simple_linear_model
model.timestepper.delta = 1
model.timestepper.start = "2017-01-01"
model.timestepper.end = "2017-01-03"
scenarioA = Scenario(model, "Scenario A", size=2)
scenarioB = Scenario(model, "Scenario B", size=5)
num_comb = len(model.scenarios.get_combinations())
parameters = {
0: DummyIndexParameter(model, 0, name="p0"),
1: DummyIndexParameter(model, 1, name="p1"),
2: DummyIndexParameter(model, 2, name="p2"),
}
data = [(0, 0), (1, 0), (0, 1), (1, 1), (1, 1, 1), (0, 2)]
data_parameters = [[parameters[i] for i in d] for d in data]
expected = [(np.any(d), np.all(d)) for d in data]
for n, params in enumerate(data_parameters):
for m, agg_func in enumerate(["any", "all"]):
p = AggregatedIndexParameter(model, params, agg_func=agg_func)
e = np.ones([len(model.timestepper), num_comb]) * expected[n][m]
r = AssertionRecorder(model, p, expected_data=e, name="assertion {}-{}".format(n, agg_func))
model.run()
def test_parameter_child_variables(model):
p1 = Parameter(model)
# Default parameter
assert len(p1.parents) == 0
assert len(p1.children) == 0
c1 = Parameter(model)
c1.parents.add(p1)
assert len(p1.children) == 1
assert c1 in p1.children
assert p1 in c1.parents
# Test third level
c2 = Parameter(model)
c2.parents.add(c1)
# Disable parent
c1.parents.clear()
assert len(p1.children) == 0
def test_scaled_profile_nested_load(model):
""" Test `ScaledProfileParameter` loading with `AggregatedParameter` """
model.timestepper.delta = 15
s = Storage(model, 'Storage', max_volume=100.0, initial_volume=50.0, num_outputs=0)
d = Output(model, 'Link')
data = {
'type': 'scaledprofile',
'scale': 50.0,
'profile': {
'type': 'aggregated',
'agg_func': 'product',
'parameters': [
{
'type': 'monthlyprofile',
'values': [0.5]*12
},
{
'type': 'constant',
'value': 1.5,
}
]
}
}
s.connect(d)
d.max_flow = p = load_parameter(model, data)
@assert_rec(model, p)
def expected_func(timestep, scenario_index):
return 50.0 * 0.5 * 1.5
model.run()
def test_parameter_df_upsampling(model):
""" Test that the `DataFrameParameter` can upsample data from a `pandas.DataFrame` and return that correctly
"""
# scenario indices (not used for this test)
si = ScenarioIndex(0, np.array([0], dtype=np.int32))
# Use a 7 day timestep for this test and run 2015
model.timestepper.delta = datetime.timedelta(7)
model.timestepper.start = pd.to_datetime('2015-01-01')
model.timestepper.end = pd.to_datetime('2015-12-31')
# Daily time-step
index = pd.date_range('2015-01-01', periods=365, freq='D')
series = pd.Series(np.arange(365), index=index)
p = DataFrameParameter(model, series)
p.setup()
A = series.resample('7D').mean()
for v, ts in zip(A, model.timestepper):
np.testing.assert_allclose(p.value(ts, si), v)
model.reset()
# Daily time-step that requires aligning
index = pd.date_range('2014-12-31', periods=366, freq='D')
series = pd.Series(np.arange(366), index=index)
p = DataFrameParameter(model, series)
p.setup()
# offset the resample appropriately for the test
A = series[1:].resample('7D').mean()
for v, ts in zip(A, model.timestepper):
np.testing.assert_allclose(p.value(ts, si), v)
model.reset()
# Daily time-step that is not covering the require range
index = pd.date_range('2015-02-01', periods=365, freq='D')
series = pd.Series(np.arange(365), index=index)
p = DataFrameParameter(model, series)
with pytest.raises(ValueError):
p.setup()
model.reset()
# Daily time-step that is not covering the require range
index = pd.date_range('2014-11-01', periods=365, freq='D')
series = pd.Series(np.arange(365), index=index)
p = DataFrameParameter(model, series)
with pytest.raises(ValueError):
p.setup()
def test_parameter_df_upsampling_multiple_columns(model):
""" Test that the `DataFrameParameter` works with multiple columns that map to a `Scenario`
"""
scA = Scenario(model, 'A', size=20)
scB = Scenario(model, 'B', size=2)
# scenario indices (not used for this test)
# Use a 7 day timestep for this test and run 2015
model.timestepper.delta = datetime.timedelta(7)
model.timestepper.start = pd.to_datetime('2015-01-01')
model.timestepper.end = pd.to_datetime('2015-12-31')
# Daily time-step
index = pd.date_range('2015-01-01', periods=365, freq='D')
df = pd.DataFrame(np.random.rand(365, 20), index=index)
p = DataFrameParameter(model, df, scenario=scA)
p.setup()
A = df.resample('7D', axis=0).mean()
for v, ts in zip(A.values, model.timestepper):
np.testing.assert_allclose([p.value(ts, ScenarioIndex(i, np.array([i], dtype=np.int32))) for i in range(20)], v)
p = DataFrameParameter(model, df, scenario=scB)
with pytest.raises(ValueError):
p.setup()
def test_parameter_df_json_load(model, tmpdir):
# Daily time-step
index = pd.date_range('2015-01-01', periods=365, freq='D', name='date')
df = pd.DataFrame(np.random.rand(365), index=index, columns=['data'])
df_path = tmpdir.join('df.csv')
df.to_csv(str(df_path))
data = {
'type': 'dataframe',
'url': str(df_path),
'index_col': 'date',
'parse_dates': True,
}
p = load_parameter(model, data)
p.setup()
def test_parameter_df_embed_load(model):
# Daily time-step
index = pd.date_range('2015-01-01', periods=365, freq='D', name='date')
df = pd.DataFrame(np.random.rand(365), index=index, columns=['data'])
# Save to JSON and load. This is the format we support loading as embedded data
df_data = df.to_json(date_format="iso")
# Removing the time information from the dataset for testing purposes
df_data = df_data.replace('T00:00:00.000Z', '')
df_data = json.loads(df_data)
data = {
'type': 'dataframe',
'data': df_data,
'parse_dates': True,
}
p = load_parameter(model, data)
p.setup()
def test_simple_json_parameter_reference():
# note that parameters in the "parameters" section cannot be literals
model = load_model("parameter_reference.json")
max_flow = model.nodes["supply1"].max_flow
assert(isinstance(max_flow, ConstantParameter))
assert(max_flow.value(None, None) == 125.0)
cost = model.nodes["demand1"].cost
assert(isinstance(cost, ConstantParameter))
assert(cost.value(None, None) == -10.0)
assert(len(model.parameters) == 4) # 4 parameters defined
def test_threshold_parameter(simple_linear_model):
model = simple_linear_model
model.timestepper.delta = 150
scenario = Scenario(model, "Scenario", size=2)
class DummyRecorder(Recorder):
def __init__(self, model, value, *args, **kwargs):
super(DummyRecorder, self).__init__(model, *args, **kwargs)
self.val = value
def setup(self):
super(DummyRecorder, self).setup()
num_comb = len(model.scenarios.combinations)
self.data = np.empty([len(model.timestepper), num_comb], dtype=np.float64)
def after(self):
timestep = model.timestepper.current
self.data[timestep.index, :] = self.val
threshold = 10.0
values = [50.0, 60.0]
rec1 = DummyRecorder(model, threshold-5, name="rec1") # below
rec2 = DummyRecorder(model, threshold, name="rec2") # equal
rec3 = DummyRecorder(model, threshold+5, name="rec3") # above
expected = [
("LT", (1, 0, 0)),
("GT", (0, 0, 1)),
("EQ", (0, 1, 0)),
("LE", (1, 1, 0)),
("GE", (0, 1, 1)),
]
for predicate, (value_lt, value_eq, value_gt) in expected:
for rec in (rec1, rec2, rec3):
param = RecorderThresholdParameter(model, rec, threshold, values=values, predicate=predicate)
e_val = values[getattr(rec.val, "__{}__".format(predicate.lower()))(threshold)]
e = np.ones([len(model.timestepper), len(model.scenarios.get_combinations())]) * e_val
e[0, :] = values[1] # first timestep is always "on"
r = AssertionRecorder(model, param, expected_data=e)
r.name = "assert {} {} {}".format(rec.val, predicate, threshold)
model.run()
def test_constant_from_df():
"""
Test that a dataframe can be used to provide data to ConstantParameter (single values).
"""
model = load_model('simple_df.json')
assert isinstance(model.nodes['demand1'].max_flow, ConstantParameter)
assert isinstance(model.nodes['demand1'].cost, ConstantParameter)
ts = model.timestepper.next()
si = ScenarioIndex(0, np.array([0], dtype=np.int32))
np.testing.assert_allclose(model.nodes['demand1'].max_flow.value(ts, si), 10.0)
np.testing.assert_allclose(model.nodes['demand1'].cost.value(ts, si), -10.0)
def test_constant_from_shared_df():
"""
Test that a shared dataframe can be used to provide data to ConstantParameter (single values).
"""
model = load_model('simple_df_shared.json')
assert isinstance(model.nodes['demand1'].max_flow, ConstantParameter)
assert isinstance(model.nodes['demand1'].cost, ConstantParameter)
ts = model.timestepper.next()
si = ScenarioIndex(0, np.array([0], dtype=np.int32))
np.testing.assert_allclose(model.nodes['demand1'].max_flow.value(ts, si), 10.0)
np.testing.assert_allclose(model.nodes['demand1'].cost.value(ts, si), -10.0)
def test_constant_from_multiindex_df():
"""
Test that a dataframe can be used to provide data to ConstantParameter (single values).
"""
model = load_model('multiindex_df.json')
assert isinstance(model.nodes['demand1'].max_flow, ConstantParameter)
assert isinstance(model.nodes['demand1'].cost, ConstantParameter)
ts = model.timestepper.next()
si = ScenarioIndex(0, np.array([0], dtype=np.int32))
np.testing.assert_allclose(model.nodes['demand1'].max_flow.value(ts, si), 10.0)
np.testing.assert_allclose(model.nodes['demand1'].cost.value(ts, si), -100.0)
def test_parameter_registry_overwrite(model):
# define a parameter
class NewParameter(Parameter):
DATA = 42
def __init__(self, model, values, *args, **kwargs):
super(NewParameter, self).__init__(model, *args, **kwargs)
self.values = values
NewParameter.register()
# re-define a parameter
class NewParameter(IndexParameter):
DATA = 43
def __init__(self, model, values, *args, **kwargs):
super(NewParameter, self).__init__(model, *args, **kwargs)
self.values = values
NewParameter.register()
data = {
"type": "new",
"values": 0
}
parameter = load_parameter(model, data)
# parameter is instance of new class, not old class
assert(isinstance(parameter, NewParameter))
assert(parameter.DATA == 43)
def test_invalid_parameter_values():
"""
Test that `load_parameter_values` returns a ValueError rather than KeyError.
This is useful to catch and give useful messages when no valid reference to
a data location is given.
Regression test for Issue #247 (https://github.com/pywr/pywr/issues/247)
"""
from pywr.parameters._parameters import load_parameter_values
m = Model()
data = {'name': 'my_parameter', 'type': 'AParameterThatShouldHaveValues'}
with pytest.raises(ValueError):
load_parameter_values(model, data)
class Test1DPolynomialParameter:
""" Tests for `Polynomial1DParameter` """
def test_init(self, simple_storage_model):
""" Test initialisation raises error with too many keywords """
stg = simple_storage_model.nodes['Storage']
param = ConstantParameter(simple_storage_model, 2.0)
with pytest.raises(ValueError):
# Passing both "parameter" and "storage_node" is invalid
Polynomial1DParameter(simple_storage_model, [0.5, np.pi], parameter=param, storage_node=stg)
def test_1st_order_with_parameter(self, simple_linear_model):
""" Test 1st order with a `Parameter` """
model = simple_linear_model
x = 2.0
p1 = Polynomial1DParameter(model, [0.5, np.pi], parameter=ConstantParameter(model, x))
@assert_rec(model, p1)
def expected_func(timestep, scenario_index):
return 0.5 + np.pi * x
model.run()
def test_2nd_order_with_parameter(self, simple_linear_model):
""" Test 2nd order with a `Parameter` """
model = simple_linear_model
x = 2.0
px = ConstantParameter(model, x)
p1 = Polynomial1DParameter(model, [0.5, np.pi, 3.0], parameter=px)
@assert_rec(model, p1)
def expected_func(timestep, scenario_index):
return 0.5 + np.pi*x + 3.0*x**2
model.run()
def test_1st_order_with_storage(self, simple_storage_model):
""" Test with a `Storage` node """
model = simple_storage_model
stg = model.nodes['Storage']
x = stg.initial_volume
p1 = Polynomial1DParameter(model, [0.5, np.pi], storage_node=stg)
p2 = Polynomial1DParameter(model, [0.5, np.pi], storage_node=stg, use_proportional_volume=True)
# Test with absolute storage
@assert_rec(model, p1)
def expected_func(timestep, scenario_index):
return 0.5 + np.pi*x
# Test with proportional storage
@assert_rec(model, p2, name="proportionalassertion")
def expected_func(timestep, scenario_index):
return 0.5 + np.pi * x/stg.max_volume
model.setup()
model.step()
def test_load(self, simple_linear_model):
model = simple_linear_model
x = 1.5
data = {
"type": "polynomial1d",
"coefficients": [0.5, 2.5],
"parameter": {
"type": "constant",
"value": x
}
}
p1 = load_parameter(model, data)
@assert_rec(model, p1)
def expected_func(timestep, scenario_index):
return 0.5 + 2.5*x
model.run()
def test_load_with_scaling(self, simple_linear_model):
model = simple_linear_model
x = 1.5
data = {
"type": "polynomial1d",
"coefficients": [0.5, 2.5],
"parameter": {
"type": "constant",
"value": x
},
"scale": 1.25,
"offset": 0.75
}
xscaled = x*1.25 + 0.75
p1 = load_parameter(model, data)
@assert_rec(model, p1)
def expected_func(timestep, scenario_index):
return 0.5 + 2.5*xscaled
model.run()
def test_interpolated_parameter(simple_linear_model):
model = simple_linear_model
model.timestepper.start = "1920-01-01"
model.timestepper.end = "1920-01-12"
p1 = ArrayIndexedParameter(model, [0,1,2,3,4,5,6,7,8,9,10,11])
p2 = InterpolatedParameter(model, p1, [0, 5, 10, 11], [0, 5*2, 10*3, 2])
@assert_rec(model, p2)
def expected_func(timestep, scenario_index):
values = [0, 2, 4, 6, 8, 10, 14, 18, 22, 26, 30, 2]
return values[timestep.index]
model.run()
class Test2DStoragePolynomialParameter:
def test_1st(self, simple_storage_model):
""" Test 1st order """
model = simple_storage_model
stg = model.nodes['Storage']
x = 2.0
y = stg.initial_volume
coefs = [[0.5, np.pi], [2.5, 0.3]]
p1 = Polynomial2DStorageParameter(model, coefs, stg, ConstantParameter(model, x))
@assert_rec(model, p1)
def expected_func(timestep, scenario_index):
return 0.5 + np.pi*x + 2.5*y+ 0.3*x*y
model.setup()
model.step()
def test_load(self, simple_storage_model):
model = simple_storage_model
stg = model.nodes['Storage']
x = 2.0
y = stg.initial_volume/stg.max_volume
data = {
"type": "polynomial2dstorage",
"coefficients": [[0.5, np.pi], [2.5, 0.3]],
"use_proportional_volume": True,
"parameter": {
"type": "constant",
"value": x
},
"storage_node": "Storage"
}
p1 = load_parameter(model, data)
@assert_rec(model, p1)
def expected_func(timestep, scenario_index):
return 0.5 + np.pi*x + 2.5*y+ 0.3*x*y
model.setup()
model.step()
def test_load_wth_scaling(self, simple_storage_model):
model = simple_storage_model
stg = model.nodes['Storage']
x = 2.0
y = stg.initial_volume/stg.max_volume
data = {
"type": "polynomial2dstorage",
"coefficients": [[0.5, np.pi], [2.5, 0.3]],
"use_proportional_volume": True,
"parameter": {
"type": "constant",
"value": x
},
"storage_node": "Storage",
"storage_scale": 1.3,
"storage_offset": 0.75,
"parameter_scale": 1.25,
"parameter_offset": -0.5
}
p1 = load_parameter(model, data)
# Scaled parameters
x = x*1.25 - 0.5
y = y*1.3 + 0.75
@assert_rec(model, p1)
def expected_func(timestep, scenario_index):
return 0.5 + np.pi*x + 2.5*y+ 0.3*x*y
model.setup()
model.step()
class TestMinMaxNegativeParameter:
@pytest.mark.parametrize("ptype,profile", [
("max", list(range(-10, 356))),
("min", list(range(0, 366))),
("negative", list(range(-366, 0))),
("negativemax", list(range(-366, 0))),
])
def test_parameter(cls, simple_linear_model, ptype,profile):
model = simple_linear_model
model.timestepper.start = "2017-01-01"
model.timestepper.end = "2017-01-15"
data = {
"type": ptype,
"parameter": {
"name": "raw",
"type": "dailyprofile",
"values": profile,
}
}
if ptype in ("max", "min"):
data["threshold"] = 3
func = {"min": min, "max": max, "negative": lambda t,x: -x, "negativemax": lambda t,x: max(t, -x)}[ptype]
model.nodes["Input"].max_flow = parameter = load_parameter(model, data)
model.nodes["Output"].max_flow = 9999
model.nodes["Output"].cost = -100
daily_profile = model.parameters["raw"]
@assert_rec(model, parameter)
def expected(timestep, scenario_index):
value = daily_profile.get_value(scenario_index)
return func(3, value)
model.run()
def test_ocptt(simple_linear_model):
model = simple_linear_model
inpt = model.nodes["Input"]
s1 = Scenario(model, "scenario 1", size=3)
s2 = Scenario(model, "scenario 1", size=2)
x = np.arange(len(model.timestepper)).reshape([len(model.timestepper), 1]) + 5
y = np.arange(s1.size).reshape([1, s1.size])
z = x * y ** 2
p = ArrayIndexedScenarioParameter(model, s1, z)
inpt.max_flow = p
model.setup()
model.reset()
model.step()
values1 = [p.get_value(scenario_index) for scenario_index in model.scenarios.combinations]
values2 = list(p.get_all_values())
assert_allclose(values1, [0, 0, 5, 5, 20, 20])
assert_allclose(values2, [0, 0, 5, 5, 20, 20])
class TestThresholdParameters:
def test_storage_threshold_parameter(self, simple_storage_model):
""" Test StorageThresholdParameter """
m = simple_storage_model
data = {
"type": "storagethreshold",
"storage_node": "Storage",
"threshold": 10.0,
"predicate": ">"
}
p1 = load_parameter(m, data)
si = ScenarioIndex(0, np.array([0], dtype=np.int32))
m.nodes['Storage'].initial_volume = 15.0
m.setup()
# Storage > 10
assert p1.index(m.timestepper.current, si) == 1
m.nodes['Storage'].initial_volume = 5.0
m.setup()
# Storage < 10
assert p1.index(m.timestepper.current, si) == 0
def test_node_threshold_parameter2(self, simple_linear_model):
model = simple_linear_model
model.nodes["Input"].max_flow = ArrayIndexedParameter(model, np.arange(0, 20))
model.nodes["Output"].cost = -10.0
model.timestepper.start = "1920-01-01"
model.timestepper.end = "1920-01-15"
model.timestepper.delta = 1
threshold = 5.0
parameters = {}
for predicate in (">", "<", "="):
data = {
"type": "nodethreshold",
"node": "Output",
"threshold": 5.0,
"predicate": predicate,
# we need to define values so AssertionRecorder can be used
"values": [0.0, 1.0],
}
parameter = load_parameter(model, data)
parameter.name = "nodethresold {}".format(predicate)
parameters[predicate] = parameter
if predicate == ">":
expected_data = (np.arange(-1, 20) > threshold).astype(int)
elif predicate == "<":
expected_data = (np.arange(-1, 20) < threshold).astype(int)
else:
expected_data = (np.arange(-1, 20) == threshold).astype(int)
expected_data[0] = 0 # previous flow in initial timestep is undefined
expected_data = expected_data[:, np.newaxis]
rec = AssertionRecorder(model, parameter, expected_data=expected_data, name="assertion recorder {}".format(predicate))
model.run()
@pytest.mark.parametrize("threshold, ratchet", [
[5.0, False],
[{"type": "constant", "value": 5.0}, False],
[{"type": "constant", "value": 5.0}, True],
], ids=["double", "parameter", "parameter-ratchet"])
def test_parameter_threshold_parameter(self, simple_linear_model, threshold, ratchet):
""" Test ParameterThresholdParameter """
m = simple_linear_model
m.nodes['Input'].max_flow = 10.0
m.nodes['Output'].cost = -10.0
data = {
"type": "parameterthreshold",
"parameter": {
"type": "constant",
"value": 3.0
},
"threshold": threshold,
"predicate": "<",
"ratchet": ratchet
}
p1 = load_parameter(m, data)
si = ScenarioIndex(0, np.array([0], dtype=np.int32))
# Triggered initial 3 < 5
m.setup()
m.step()
assert p1.index(m.timestepper.current, si) == 1
# Update parameter, now 8 > 5; not triggered.
p1.param.set_double_variables(np.array([8.0,]))
m.step()
# If using a ratchet the trigger remains on.
assert p1.index(m.timestepper.current, si) == (1 if ratchet else 0)
# Resetting the model resets the ratchet too.
m.reset()
m.step()
# flow < 5
assert p1.index(m.timestepper.current, si) == 0
def test_orphaned_components(simple_linear_model):
model = simple_linear_model
model.nodes["Input"].max_flow = ConstantParameter(model, 10.0)
result = model.find_orphaned_parameters()
assert(not result)
# assert that warning not raised by check
with pytest.warns(None) as record:
model.check()
for w in record:
if isinstance(w, OrphanedParameterWarning):
pytest.fail("OrphanedParameterWarning raised unexpectedly!")
# add some orphans
orphan1 = ConstantParameter(model, 5.0)
orphan2 = ConstantParameter(model, 10.0)
orphans = {orphan1, orphan2}
result = model.find_orphaned_parameters()
assert(orphans == result)
with pytest.warns(OrphanedParameterWarning):
model.check()
def test_deficit_parameter():
"""Test DeficitParameter
Here we test both uses of the DeficitParameter:
1) Recording the deficit for a node each timestep
2) Using yesterday's deficit to control today's flow
"""
model = load_model("deficit.json")
model.run()
max_flow = np.array([5, 6, 7, 8, 9, 10, 11, 12, 11, 10, 9, 8])
demand = 10.0
supplied = np.minimum(max_flow, demand)
expected = demand - supplied
actual = model.recorders["deficit_recorder"].data
assert_allclose(expected, actual[:,0])
expected_yesterday = [0]+list(expected[0:-1])
actual_yesterday = model.recorders["yesterday_recorder"].data
assert_allclose(expected_yesterday, actual_yesterday[:,0])
class TestHydroPowerTargets:
def test_target_json(self):
""" Test loading a HydropowerTargetParameter from JSON. """
model = load_model("hydropower_target_example.json")
si = ScenarioIndex(0, np.array([0], dtype=np.int32))
# 30 time-steps are run such that the head gets so flow to hit the max_flow
# constraint. The first few time-steps are also bound by the min_flow constraint.
for i in range(30):
model.step()
rec = model.recorders["turbine1_energy"]
param = model.parameters["turbine1_discharge"]
turbine1 = model.nodes["turbine1"]
assert turbine1.flow[0] > 0
if np.allclose(turbine1.flow[0], 500.0):
# If flow is bounded by min_flow then more HP is produced.
assert rec.data[i, 0] > param.target.get_value(si)
elif np.allclose(turbine1.flow[0], 1000.0):
# If flow is bounded by max_flow then less HP is produced.
assert rec.data[i, 0] < param.target.get_value(si)
else:
# If flow is within the bounds target is met exactly.
assert_allclose(rec.data[i, 0], param.target.get_value(si))
|
gpl-3.0
| -1,460,923,139,671,873,500 | 33.107919 | 130 | 0.604152 | false |
mushorg/tanner
|
tanner/config.py
|
1
|
1119
|
import logging
import os
import sys
import yaml
LOGGER = logging.getLogger(__name__)
class TannerConfig:
config = None
@staticmethod
def read_config(path):
config_values = {}
try:
with open(path, "r") as f:
config_values = yaml.load(f, Loader=yaml.FullLoader)
except yaml.parser.ParserError as e:
print("Couldn't properly parse the config file. Please use properly formatted YAML config.")
sys.exit(1)
return config_values
@staticmethod
def set_config(config_path):
if not os.path.exists(config_path):
print("Config file {} doesn't exist. Check the config path or use default".format(config_path))
sys.exit(1)
TannerConfig.config = TannerConfig.read_config(config_path)
@staticmethod
def get(section, value):
try:
res = TannerConfig.config[section][value]
except (KeyError, TypeError):
res = DEFAULT_CONFIG[section][value]
return res
DEFAULT_CONFIG = TannerConfig.read_config("/opt/tanner/data/config.yaml")
|
gpl-3.0
| -722,510,278,253,578,000 | 25.642857 | 107 | 0.624665 | false |
ortoloco/jordbruksmark
|
jordbruksmark/admin.py
|
1
|
1460
|
# -*- coding: utf-8 -*-
from django.contrib import admin, messages
from jordbruksmark.models import *
class ParzellenAdmin(admin.ModelAdmin):
list_display = ["__unicode__", "name"]
search_fields = ["id", "code", "name"]
class DuengungsstufenAdmin(admin.ModelAdmin):
list_display = ["__unicode__"]
search_fields = ["id", "name"]
class DuengerAdmin(admin.ModelAdmin):
list_display = ["__unicode__"]
search_fields = ["id", "name"]
class FamilienAdmin(admin.ModelAdmin):
list_display = ["__unicode__"]
search_fields = ["id", "name"]
class KulturenAdmin(admin.ModelAdmin):
list_display = ["__unicode__", "familie_name"]
search_fields = ["id", "name", "familie__name"]
def familie_name(self, obj):
return obj.familie.name
familie_name.admin_order_field = 'familie__name'
class WochenMengenAdmin(admin.ModelAdmin):
list_display = ["__unicode__", "menge"]
search_fields = ["id", "woche", "kultur__name"]
class SaetzeAdmin(admin.ModelAdmin):
list_display = ["__unicode__", "sorte"]
search_fields = ["id", "sorte", "kultur__name", "nummer"]
admin.site.register(Parzelle, ParzellenAdmin)
admin.site.register(Duengungsstufe, DuengungsstufenAdmin)
admin.site.register(Duenger, DuengerAdmin)
admin.site.register(Familie, FamilienAdmin)
admin.site.register(Kultur, KulturenAdmin)
admin.site.register(WochenMenge, WochenMengenAdmin)
admin.site.register(Satz,SaetzeAdmin)
|
gpl-3.0
| 5,250,728,470,260,427,000 | 32.953488 | 61 | 0.677397 | false |
Ilphrin/TuxleTriad
|
Menu.py
|
1
|
16142
|
# coding: utf-8
import pygame
import os
import sys
import gettext
from functions import *
from color import *
from pygame.locals import *
from game import Application
from Sound import Sound
from Text import Text
from Buttons import Button
from listOfCards import *
from Card import Card
pygame.init()
class Menu(pygame.sprite.Sprite):
def __init__(self, width, height):
self.FONT = "Playball.ttf"
# We create the window
from dunder_mifflin import papers # WARNING: Malicious operation ahead
self.width = width
self.height = height
fullscreen = pygame.NOFRAME
self.dimension = (self.width, self.height)
self.screen = pygame.display.set_mode(self.dimension, fullscreen)
pygame.display.set_caption("TuxleTriad")
self._load_translation()
self.bkgrnd, self.bkgrndRect = loadImage("background.jpg")
self.bkgrndRect = self.bkgrnd.get_rect()
# The Clock of the game, to manage the frame-rate
self.clock = pygame.time.Clock()
self.fps = 30
# We start the Sound object, playing music and sounds.
self.sound = Sound()
# Needed to keep track of the game if we do a pause during the game.
self.app = None
self.main()
def main(self):
elemText = [_("Play"), _("Options"), _("Rules"), _("About"),
_("Quit Game")]
self.menu = []
for elem in elemText:
self.menu.append(Text(elem, self.FONT, white, 40))
posx = 400
posy = 400 - (60 * len(elemText))
for elem in self.menu:
elem.rect.center = ((posx, posy))
posy += 100
pygame.event.clear()
self.updateMenu()
while 1:
pygame.display.flip()
deactivate()
event = pygame.event.wait()
if event.type == MOUSEBUTTONUP:
self.clicked()
elif event.type == QUIT:
self.quitGame()
self.clock.tick(self.fps)
def updateMenu(self):
self.screen.blit(self.bkgrnd, self.bkgrndRect)
for i in range(len(self.menu)):
self.screen.blit(self.menu[i].surface, self.menu[i].rect)
self.clock.tick(self.fps)
def quitGame(self):
setConfig(self.sound.volume)
pygame.quit()
sys.exit()
def oldMenu(self):
while(1):
for button in self.menu:
button.rect.centerx -= 100 - self.fps
if (button.rect.centerx <= - 500):
return;
self.updateMenu()
pygame.display.flip()
def clicked(self):
for button in self.menu:
if button.rect.collidepoint(pygame.mouse.get_pos()):
self.sound.clicMenu.play()
if button.text == _(u"Quit Game"):
self.quitGame()
self.oldMenu()
if button.text == _(u"Play"):
self.play()
elif button.text == _(u"Options"):
self.options()
elif button.text == _(u"Rules"):
self.rules()
elif button.text == _(u"About"):
self.about()
self.main()
def play(self):
"""User clicked on "Play" """
if self.app != None:
texts = [_("Continue"),_("Adventure"), _("Solo"),
_("Hot Seat"), _("Back")]
else:
texts = [_("Adventure"), _("Solo"), _("Hot Seat"), _("Back")]
length = len(texts)
if self.app != None:
textPos = [(250, 100), (250,200), (250, 300), (250,400),
(550, 500)]
else:
textPos = [(250, 100), (250,200), (250, 300), (550, 500)]
self.menu = []
for i in range(length):
self.menu.append(Text(texts[i], self.FONT, white, 45))
self.menu[i].rect.topleft = textPos[i]
self.updateMenu()
pygame.display.flip()
self.clock.tick(self.fps)
while 1:
event = pygame.event.wait()
if event.type == QUIT:
pygame.quit()
sys.exit()
elif event.type == MOUSEBUTTONUP:
coordinates = pygame.mouse.get_pos()
for i in range(length):
if self.menu[i].rect.collidepoint(coordinates):
self.sound.clicMenu.play()
self.oldMenu()
if self.menu[i].text == _("Adventure"):
return
elif self.menu[i].text == _("Solo"):
return
elif self.menu[i].text == _("Hot Seat"):
self.hotSeat()
elif self.menu[i].text == _("Back"):
return
elif self.menu[i].text == _("Continue"):
self.app.main()
def options(self):
texts = [_("Audio"), _("Sounds"), _("Music"), _("Back")]
length = len(texts)
textsPos = [(320, 100), (100, 200), (100, 300), (550, 500)]
self.menu = []
for i in range(length):
self.menu.append(Text(texts[i], self.FONT, white, 50))
self.menu[i].rect.topleft = textsPos[i]
bar1, bar1Rect = loadImage("barSound.jpg")
bar2, bar2Rect = loadImage("barSound.jpg")
bar1Rect.topleft = (300, 220)
bar2Rect.topleft = (300, 320)
bars = [bar1Rect, bar2Rect]
# X coordinates, relative to the bar's, of beginning and ending
# of each volume cursor.
MIN_VOLUME = 15
MAX_VOLUME = 240
# X absolute coordinates of the volume cursor.
MIN = bars[0].x + MIN_VOLUME
MAX = bars[0].x + MAX_VOLUME
cursor1, cursor1Rect = loadImage("cursorSound.png")
cursor2, cursor2Rect = loadImage("cursorSound.png")
cursor1Rect.topleft = \
(bar1Rect.x + 225 * self.sound.soundVolume, bar1Rect.y - 23)
cursor2Rect.topleft = \
(bar2Rect.x + 225 * self.sound.musicVolume, bar2Rect.y - 23)
cursors = [cursor1Rect, cursor2Rect]
self.screen.blit(self.bkgrnd, self.bkgrndRect)
self.screen.blit(bar1, bar1Rect)
self.screen.blit(bar2, bar2Rect)
self.screen.blit(cursor1, cursors[0])
self.screen.blit(cursor2, cursors[1])
for i in range(length):
self.screen.blit(self.menu[i].surface, self.menu[i].rect)
pygame.display.update()
move = 0
while 1:
event = pygame.event.wait()
mousex, mousey = pygame.mouse.get_pos()
if event.type == QUIT:
self.quitGame()
elif event.type == MOUSEBUTTONDOWN:
move = 1
reactivate()
elif event.type == MOUSEBUTTONUP:
move = 0
deactivate()
for i in range(len(bars)):
if move == 1 and bars[i].collidepoint((mousex, mousey)):
if MIN <= mousex <= MAX:
cursors[i].centerx = mousex
elif mousex > bars[i].x + MAX_VOLUME:
cursors[i].centerx = bars[i].x + MAX_VOLUME
else:
cursors[i].centerx = bars[i].x + MIN_VOLUME
volume = cursors[i].centerx - MIN
if volume != 0:
volume = (volume / 2.25) / 100.0
assert (0.0 <= volume <= 1.0)
if i == 0:
self.sound.soundVolume = volume
self.sound.playPutCard()
self.sound.update()
elif i == 1:
self.sound.musicVolume = volume
self.sound.update()
self.screen.blit(self.bkgrnd, self.bkgrndRect)
self.screen.blit(bar1, bar1Rect)
self.screen.blit(bar2, bar2Rect)
self.screen.blit(cursor1, cursors[0])
self.screen.blit(cursor2, cursors[1])
for j in range(4):
self.screen.blit(self.menu[j].surface,\
self.menu[j].rect)
pygame.display.update()
self.clock.tick(self.fps)
if move and self.menu[3].rect.collidepoint((mousex, mousey)):
del bar1, bar2, bars, cursor1, cursor2, cursors
self.oldMenu()
self.sound.clicMenu.play()
return
def about(self):
page = 1
allPage = []
pageList = []
index = 0
for number in range(len(allCards)):
pageList.append(Card(number, 1))
index += 1
if index == 3 or number == (len(allCards) or len(allCards)-1):
allPage.append(pageList)
del pageList
pageList = []
index = 0
maxPage = len(allPage)
txtPage = str(page) + "/" + str(maxPage)
navigation = [_("Back"), _("Next"), _("Quit"),
"Programming:", "Kevin \"Ilphrin\" Pellet",
"Graphics:", "Yunero Kisapsodos",
txtPage]
navigationPos = [(80,550), (650,550), (660,40), (630, 100),
(640, 130), (630, 200), (640, 230), (350,550)]
self.menu = []
for i in range(len(navigation)):
if 2 < i < 7:
size = 12
font = "rimouski sb.ttf"
else:
font = self.FONT
size = 30
self.menu.append(Text(navigation[i], font, white, size))
self.menu[i].rect.topleft = navigationPos[i]
cardPos = [(50,50), (50,200), (50, 350)]
self.screen.blit(self.bkgrnd, self.bkgrndRect)
for element in self.menu:
self.screen.blit(element.surface,element.rect)
for elem in range(len(allPage[page-1])):
card = allPage[page-1][elem]
card.rect.topleft = cardPos[elem]
card.About.rect.topleft = card.rect.topright
for elem in allPage[page-1]:
self.screen.blit(elem.image, elem.rect)
self.screen.blit(elem.About.surface, elem.About.rect)
while 1:
self.clock.tick(self.fps)
pygame.display.flip()
event = pygame.event.wait()
if event.type == MOUSEBUTTONUP:
coords = pygame.mouse.get_pos()
for button in self.menu:
if button.rect.collidepoint(coords):
if button.text == _("Back"):
if page > 1:
page -= 1
self.sound.putcard.play()
if button.text == _("Next"):
if page < maxPage:
page += 1
self.sound.putcard.play()
if button.text == _("Quit"):
self.oldMenu()
return
txtPage = str(page) + "/" + str(maxPage)
self.menu[7] = Text(txtPage, self.FONT, white, 30)
self.menu[7].rect.topleft = navigationPos[7]
self.screen.blit(self.bkgrnd, self.bkgrndRect)
for element in self.menu:
self.screen.blit(element.surface,element.rect)
for elem in range(len(allPage[page-1])):
card = allPage[page-1][elem]
card.rect.topleft = cardPos[elem]
card.About.rect.topleft = card.rect.topright
for elem in allPage[page-1]:
self.screen.blit(elem.image, elem.rect)
self.screen.blit(elem.About.surface,
elem.About.rect)
if event.type == QUIT:
self.quitGame()
def rules(self):
tutorialButton = Button(_(u"Tutorial"), self.FONT, white)
howtoButton = Button(_(u"How To"), self.FONT, white)
backButton = Button(_(u"Back"), self.FONT, white)
tutorialButton.rect.topleft = (250, 100)
howtoButton.rect.topleft = (250, 200)
backButton.rect.topleft = (550, 500)
self.menu = []
self.menu.append(tutorialButton)
self.menu.append(howtoButton)
self.menu.append(backButton)
self.updateMenu()
while (1):
self.clock.tick(self.fps)
pygame.display.flip()
event = pygame.event.wait()
if event.type == MOUSEBUTTONUP:
coords = pygame.mouse.get_pos()
for i in range(len(self.menu)):
if self.menu[i].rect.collidepoint(coords):
self.oldMenu()
if self.menu[i].text == _(u"Tutorial"):
self.main()
elif self.menu[i].text == _(u"How To"):
self.HowTo()
return
elif self.menu[i].text == _(u"Back"):
self.main()
elif event.type == QUIT:
self.quitGame()
def HowTo(self):
backButton = Button(_("Back"), self.FONT, white)
prevButton = Button(_("Prev"), self.FONT, white)
nextButton = Button(_("Next"), self.FONT, white)
page = 1
maxPage = 2
pageList = []
for i in range(maxPage):
pageList.append(pygame.image.load(getHowTo(i)))
pageRect = pageList[i - 1].get_rect()
pageRect.topleft = (-20, 0)
backButton.rect.topleft = (600, 40)
prevButton.rect.topleft = (80, 550)
nextButton.rect.topleft = (660, 550)
self.menu = []
self.menu.append(backButton)
self.menu.append(prevButton)
self.menu.append(nextButton)
self.updateMenu()
self.screen.blit(pageList[page - 1], pageRect)
while (1):
self.clock.tick(self.fps)
pygame.display.flip()
event = pygame.event.wait()
if event.type == MOUSEBUTTONUP:
coords = pygame.mouse.get_pos()
if backButton.rect.collidepoint(coords):
self.oldMenu()
return
elif prevButton.rect.collidepoint(coords) and page > 1:
page -= 1
elif nextButton.rect.collidepoint(coords) and page < maxPage:
page += 1
self.updateMenu()
self.screen.blit(pageList[page - 1], pageRect)
elif event.type == QUIT:
self.quitGame()
def _load_translation(self):
base_path = os.getcwd()
directory = os.path.join(base_path, 'translations')
print "Loading translations at: ", directory
params = {
'domain': 'tuxle-triad',
'fallback': True
}
if os.path.isdir(directory):
params.update({'localedir': directory})
translation = gettext.translation(**params)
translation.install("ngettext")
def solo(self):
"""1vsIA mode"""
print "Solo!"
def adventure(self):
"""Adventure mode against IA"""
print "Adventure!"
def hotSeat(self):
"""1vs1 mode"""
if self.app != None:
del self.app
Application(800, 600, self.screen, self.sound, self).main()
else:
Application(800, 600, self.screen, self.sound, self).main()
Menu(800, 600)
|
mit
| 1,228,710,710,715,892,200 | 36.714953 | 78 | 0.477016 | false |
AButenko/selenium_tests
|
test_gui/test_login.py
|
1
|
1921
|
# -*- coding: utf-8 -*-
# from django.conf import settings
import pytest
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from framework.fixtures import logger
from framework.gui.common.fixtures import browser
from framework.gui.common.tools import page_contain_assert
from framework.gui.loginpage import LoginPage
def test_login_default_user(logger, browser):
logger.info("Simple login test.")
loginpage = LoginPage(browser)
loginpage.login()
@pytest.mark.parametrize("user, psswd",[('',''), ('[email protected]', '')])
def test_login_any_user(logger, browser, user, psswd):
loginpage = LoginPage(browser)
browser.get(loginpage.login_page)
name, passwd = loginpage.enter_login_credentials(user, psswd)
# with tools.wait_for_page_load(browser):
passwd.send_keys(Keys.RETURN)
invalid_input = browser.find_element_by_css_selector("input:invalid")
assert invalid_input.is_displayed()
# try:
# validate_email(user) # TODO use additional flag to check if email is validS
if not name:
assert invalid_input == name
elif not passwd:
assert invalid_input == passwd
# except ValidationError: # otherwise invalid input if in email field
# assert not browser.execute_script("return document.getElementById(\"username\").validity.valid") # javascript way to check the same
WebDriverWait(browser, 20).until(
EC.presence_of_element_located((By.XPATH, "//div[@class='resultlogin']/div[1]"))
)
assert browser.find_element_by_xpath("//div[@class='resultlogin']/div[1]").text == "Invalid Email or Password"
def test_logout_default_user(logger, browser):
loginpage = LoginPage(browser)
loginpage.logout()
|
bsd-3-clause
| -5,620,181,287,252,949,000 | 38.22449 | 138 | 0.72202 | false |
mcalmer/spacewalk
|
client/rhel/rhnlib/rhn/rpclib.py
|
1
|
24163
|
#
# This module contains all the RPC-related functions the RHN code uses
#
# Copyright (c) 2005--2018 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
__version__ = "$Revision$"
import socket
import re
import sys
from rhn import transports
from rhn.i18n import sstr
from rhn.UserDictCase import UserDictCase
try: # python2
import xmlrpclib
from types import ListType, TupleType, StringType, UnicodeType, DictType, DictionaryType
from urllib import splittype, splithost
except ImportError: # python3
import xmlrpc.client as xmlrpclib
ListType = list
TupleType = tuple
StringType = bytes
UnicodeType = str
DictType = dict
DictionaryType = dict
from urllib.parse import splittype, splithost
# Redirection handling
MAX_REDIRECTIONS = 5
def check_ipv6(n):
""" Returns true if n is IPv6 address, false otherwise. """
try:
socket.inet_pton(socket.AF_INET6, n)
return True
except:
return False
def split_host(hoststring):
""" Function used to split host information in an URL per RFC 2396
handle full hostname like user:passwd@host:port
"""
l = hoststring.split('@', 1)
host = None
port = None
user = None
passwd = None
if len(l) == 2:
hostport = l[1]
# userinfo present
userinfo = l[0].split(':', 1)
user = userinfo[0]
if len(userinfo) == 2:
passwd = userinfo[1]
else:
hostport = l[0]
# Now parse hostport
if hostport[0] == '[':
# IPv6 with port
host, port = re.split('(?<=\]):', hostport, 1)
host = host.lstrip('[').rstrip(']')
elif check_ipv6(hostport):
# just IPv6
host = hostport
else:
# IPv4
arr = hostport.split(':', 1)
host = arr[0]
if len(arr) == 2:
port = arr[1]
return (host, port, user, passwd)
def get_proxy_info(proxy):
if proxy == None:
raise ValueError("Host string cannot be null")
arr = proxy.split('://', 1)
if len(arr) == 2:
# scheme found, strip it
proxy = arr[1]
return split_host(proxy)
class MalformedURIError(IOError):
pass
# Originaly taken from xmlrpclib.ServerProxy, now changed most of the code
class Server:
"""uri [,options] -> a logical connection to an XML-RPC server
uri is the connection point on the server, given as
scheme://host/target.
The standard implementation always supports the "http" scheme. If
SSL socket support is available (Python 2.0), it also supports
"https".
If the target part and the slash preceding it are both omitted,
"/RPC2" is assumed.
The following options can be given as keyword arguments:
transport: a transport factory
encoding: the request encoding (default is UTF-8)
verbose: verbosity level
proxy: use an HTTP proxy
username: username for authenticated HTTP proxy
password: password for authenticated HTTP proxy
All 8-bit strings passed to the server proxy are assumed to use
the given encoding.
"""
# Default factories
_transport_class = transports.Transport
_transport_class_https = transports.SafeTransport
_transport_class_proxy = transports.ProxyTransport
_transport_class_https_proxy = transports.SafeProxyTransport
def __init__(self, uri, transport=None, encoding=None, verbose=0,
proxy=None, username=None, password=None, refreshCallback=None,
progressCallback=None, timeout=None):
# establish a "logical" server connection
#
# First parse the proxy information if available
#
if proxy != None:
(ph, pp, pu, pw) = get_proxy_info(proxy)
if pp is not None:
proxy = "%s:%s" % (ph, pp)
else:
proxy = ph
# username and password will override whatever was passed in the
# URL
if pu is not None and username is None:
username = pu
if pw is not None and password is None:
password = pw
self._uri = sstr(uri)
self._refreshCallback = None
self._progressCallback = None
self._bufferSize = None
self._proxy = proxy
self._username = username
self._password = password
self._timeout = timeout
if len(__version__.split()) > 1:
self.rpc_version = __version__.split()[1]
else:
self.rpc_version = __version__
self._reset_host_handler_and_type()
if transport is None:
self._allow_redirect = 1
transport = self.default_transport(self._type, proxy, username,
password, timeout)
else:
#
# dont allow redirect on unknow transports, that should be
# set up independantly
#
self._allow_redirect = 0
self._redirected = None
self.use_handler_path = 1
self._transport = transport
self._trusted_cert_files = []
self._lang = None
self._encoding = encoding
self._verbose = verbose
self.set_refresh_callback(refreshCallback)
self.set_progress_callback(progressCallback)
# referer, which redirect us to new handler
self.send_handler=None
self._headers = UserDictCase()
def default_transport(self, type, proxy=None, username=None, password=None,
timeout=None):
if proxy:
if type == 'https':
transport = self._transport_class_https_proxy(proxy,
proxyUsername=username, proxyPassword=password, timeout=timeout)
else:
transport = self._transport_class_proxy(proxy,
proxyUsername=username, proxyPassword=password, timeout=timeout)
else:
if type == 'https':
transport = self._transport_class_https(timeout=timeout)
else:
transport = self._transport_class(timeout=timeout)
return transport
def allow_redirect(self, allow):
self._allow_redirect = allow
def redirected(self):
if not self._allow_redirect:
return None
return self._redirected
def set_refresh_callback(self, refreshCallback):
self._refreshCallback = refreshCallback
self._transport.set_refresh_callback(refreshCallback)
def set_buffer_size(self, bufferSize):
self._bufferSize = bufferSize
self._transport.set_buffer_size(bufferSize)
def set_progress_callback(self, progressCallback, bufferSize=16384):
self._progressCallback = progressCallback
self._transport.set_progress_callback(progressCallback, bufferSize)
def _req_body(self, params, methodname):
return xmlrpclib.dumps(params, methodname, encoding=self._encoding)
def get_response_headers(self):
if self._transport:
return self._transport.headers_in
return None
def get_response_status(self):
if self._transport:
return self._transport.response_status
return None
def get_response_reason(self):
if self._transport:
return self._transport.response_reason
return None
def get_content_range(self):
"""Returns a dictionary with three values:
length: the total length of the entity-body (can be None)
first_byte_pos: the position of the first byte (zero based)
last_byte_pos: the position of the last byte (zero based)
The range is inclusive; that is, a response 8-9/102 means two bytes
"""
headers = self.get_response_headers()
if not headers:
return None
content_range = headers.get('Content-Range')
if not content_range:
return None
arr = filter(None, content_range.split())
assert arr[0] == "bytes"
assert len(arr) == 2
arr = arr[1].split('/')
assert len(arr) == 2
brange, total_len = arr
if total_len == '*':
# Per RFC, the server is allowed to use * if the length of the
# entity-body is unknown or difficult to determine
total_len = None
else:
total_len = int(total_len)
start, end = brange.split('-')
result = {
'length' : total_len,
'first_byte_pos' : int(start),
'last_byte_pos' : int(end),
}
return result
def accept_ranges(self):
headers = self.get_response_headers()
if not headers:
return None
if 'Accept-Ranges' in headers:
return headers['Accept-Ranges']
return None
def _reset_host_handler_and_type(self):
""" Reset the attributes:
self._host, self._handler, self._type
according the value of self._uri.
"""
# get the url
type, uri = splittype(self._uri)
if type is None:
raise MalformedURIError("missing protocol in uri")
# with a real uri passed in, uri will now contain "//hostname..." so we
# need at least 3 chars for it to maybe be ok...
if len(uri) < 3 or uri[0:2] != "//":
raise MalformedURIError
self._type = type.lower()
if self._type not in ("http", "https"):
raise IOError("unsupported XML-RPC protocol")
self._host, self._handler = splithost(uri)
if not self._handler:
self._handler = "/RPC2"
def _strip_characters(self, *args):
""" Strip characters, which are not allowed according:
http://www.w3.org/TR/2006/REC-xml-20060816/#charsets
From spec:
Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF] /* any Unicode character, excluding the surrogate blocks, FFFE, and FFFF. */
"""
regexp = r'[\x00-\x09]|[\x0b-\x0c]|[\x0e-\x1f]'
result=[]
for item in args:
item_type = type(item)
if item_type == StringType or item_type == UnicodeType:
item = re.sub(regexp, '', sstr(item))
elif item_type == TupleType:
item = tuple(self._strip_characters(i) for i in item)
elif item_type == ListType:
item = [self._strip_characters(i) for i in item]
elif item_type == DictType or item_type == DictionaryType:
item = dict([(self._strip_characters(name, val)) for name, val in item.items()])
# else: some object - should take care of himself
# numbers - are safe
result.append(item)
if len(result) == 1:
return result[0]
else:
return tuple(result)
def _request(self, methodname, params):
""" Call a method on the remote server
we can handle redirections. """
# the loop is used to handle redirections
redirect_response = 0
retry = 0
self._reset_host_handler_and_type()
while 1:
if retry >= MAX_REDIRECTIONS:
raise InvalidRedirectionError(
"Unable to fetch requested Package")
# Clear the transport headers first
self._transport.clear_headers()
for k, v in self._headers.items():
self._transport.set_header(k, v)
self._transport.add_header("X-Info",
'RPC Processor (C) Red Hat, Inc (version %s)' %
self.rpc_version)
# identify the capability set of this client to the server
self._transport.set_header("X-Client-Version", 1)
if self._allow_redirect:
# Advertise that we follow redirects
#changing the version from 1 to 2 to support backward compatibility
self._transport.add_header("X-RHN-Transport-Capability",
"follow-redirects=3")
if redirect_response:
self._transport.add_header('X-RHN-Redirect', '0')
if self.send_handler:
self._transport.add_header('X-RHN-Path', self.send_handler)
request = self._req_body(self._strip_characters(params), methodname)
try:
response = self._transport.request(self._host, \
self._handler, request, verbose=self._verbose)
save_response = self._transport.response_status
except xmlrpclib.ProtocolError:
if self.use_handler_path:
raise
else:
save_response = sys.exc_info()[1].errcode
self._redirected = None
retry += 1
if save_response == 200:
# exit redirects loop and return response
break
elif save_response not in (301, 302):
# Retry pkg fetch
self.use_handler_path = 1
continue
# rest of loop is run only if we are redirected (301, 302)
self._redirected = self._transport.redirected()
self.use_handler_path = 0
redirect_response = 1
if not self._allow_redirect:
raise InvalidRedirectionError("Redirects not allowed")
if self._verbose:
print("%s redirected to %s" % (self._uri, self._redirected))
typ, uri = splittype(self._redirected)
if typ != None:
typ = typ.lower()
if typ not in ("http", "https"):
raise InvalidRedirectionError(
"Redirected to unsupported protocol %s" % typ)
#
# We forbid HTTPS -> HTTP for security reasons
# Note that HTTP -> HTTPS -> HTTP is allowed (because we compare
# the protocol for the redirect with the original one)
#
if self._type == "https" and typ == "http":
raise InvalidRedirectionError(
"HTTPS redirected to HTTP is not supported")
self._host, self._handler = splithost(uri)
if not self._handler:
self._handler = "/RPC2"
# Create a new transport for the redirected service and
# set up the parameters on the new transport
del self._transport
self._transport = self.default_transport(typ, self._proxy,
self._username, self._password, self._timeout)
self.set_progress_callback(self._progressCallback)
self.set_refresh_callback(self._refreshCallback)
self.set_buffer_size(self._bufferSize)
self.setlang(self._lang)
if self._trusted_cert_files != [] and \
hasattr(self._transport, "add_trusted_cert"):
for certfile in self._trusted_cert_files:
self._transport.add_trusted_cert(certfile)
# Then restart the loop to try the new entry point.
if isinstance(response, transports.File):
# Just return the file
return response
# an XML-RPC encoded data structure
if isinstance(response, TupleType) and len(response) == 1:
response = response[0]
return response
def __repr__(self):
return (
"<%s for %s%s>" %
(self.__class__.__name__, self._host, self._handler)
)
__str__ = __repr__
def __getattr__(self, name):
# magic method dispatcher
return _Method(self._request, name)
# note: to call a remote object with an non-standard name, use
# result getattr(server, "strange-python-name")(args)
def set_transport_flags(self, transfer=0, encoding=0, **kwargs):
if not self._transport:
# Nothing to do
return
kwargs.update({
'transfer' : transfer,
'encoding' : encoding,
})
self._transport.set_transport_flags(**kwargs)
def get_transport_flags(self):
if not self._transport:
# Nothing to do
return {}
return self._transport.get_transport_flags()
def reset_transport_flags(self):
# Does nothing
pass
# Allow user-defined additional headers.
def set_header(self, name, arg):
if type(arg) in [ type([]), type(()) ]:
# Multivalued header
self._headers[name] = [str(a) for a in arg]
else:
self._headers[name] = str(arg)
def add_header(self, name, arg):
if name in self._headers:
vlist = self._headers[name]
if not isinstance(vlist, ListType):
vlist = [ vlist ]
else:
vlist = self._headers[name] = []
vlist.append(str(arg))
# Sets the i18n options
def setlang(self, lang):
self._lang = lang
if self._transport and hasattr(self._transport, "setlang"):
self._transport.setlang(lang)
# Sets the CA chain to be used
def use_CA_chain(self, ca_chain = None):
raise NotImplementedError("This method is deprecated")
def add_trusted_cert(self, certfile):
self._trusted_cert_files.append(certfile)
if self._transport and hasattr(self._transport, "add_trusted_cert"):
self._transport.add_trusted_cert(certfile)
def close(self):
if self._transport:
self._transport.close()
self._transport = None
# RHN GET server
class GETServer(Server):
def __init__(self, uri, transport=None, proxy=None, username=None,
password=None, client_version=2, headers={}, refreshCallback=None,
progressCallback=None, timeout=None):
Server.__init__(self, uri,
proxy=proxy,
username=username,
password=password,
transport=transport,
refreshCallback=refreshCallback,
progressCallback=progressCallback,
timeout=timeout)
self._client_version = client_version
self._headers = headers
# Back up the original handler, since we mangle it
self._orig_handler = self._handler
# Download resumption
self.set_range(offset=None, amount=None)
def _req_body(self, params, methodname):
if not params or len(params) < 1:
raise Exception("Required parameter channel not found")
# Strip the multiple / from the handler
h_comps = filter(lambda x: x != '', self._orig_handler.split('/'))
# Set the handler we are going to request
hndl = h_comps + ["$RHN", params[0], methodname] + list(params[1:])
self._handler = '/' + '/'.join(hndl)
#save the constructed handler in case of redirect
self.send_handler = self._handler
# Add headers
#override the handler to replace /XMLRPC with pkg path
if self._redirected and not self.use_handler_path:
self._handler = self._new_req_body()
for h, v in self._headers.items():
self._transport.set_header(h, v)
if self._offset is not None:
if self._offset >= 0:
brange = str(self._offset) + '-'
if self._amount is not None:
brange = brange + str(self._offset + self._amount - 1)
else:
# The last bytes
# amount is ignored in this case
brange = '-' + str(-self._offset)
self._transport.set_header('Range', "bytes=" + brange)
# Flag that we allow for partial content
self._transport.set_transport_flags(allow_partial_content=1)
# GET requests have empty body
return ""
def _new_req_body(self):
type, tmpuri = splittype(self._redirected)
site, handler = splithost(tmpuri)
return handler
def set_range(self, offset=None, amount=None):
if offset is not None:
try:
offset = int(offset)
except ValueError:
# Error
raise RangeError("Invalid value `%s' for offset" % offset, None, sys.exc_info()[2])
if amount is not None:
try:
amount = int(amount)
except ValueError:
# Error
raise RangeError("Invalid value `%s' for amount" % amount, None, sys.exc_info()[2])
if amount <= 0:
raise RangeError("Invalid value `%s' for amount" % amount)
self._amount = amount
self._offset = offset
def reset_transport_flags(self):
self._transport.set_transport_flags(allow_partial_content=0)
def __getattr__(self, name):
# magic method dispatcher
return SlicingMethod(self._request, name)
def default_transport(self, type, proxy=None, username=None, password=None,
timeout=None):
ret = Server.default_transport(self, type, proxy=proxy, username=username, password=password, timeout=timeout)
ret.set_method("GET")
return ret
class RangeError(Exception):
pass
class InvalidRedirectionError(Exception):
pass
def getHeaderValues(headers, name):
import mimetools
if not isinstance(headers, mimetools.Message):
if name in headers:
return [headers[name]]
return []
return [x.split(':', 1)[1].strip() for x in
headers.getallmatchingheaders(name)]
class _Method:
""" some magic to bind an XML-RPC method to an RPC server.
supports "nested" methods (e.g. examples.getStateName)
"""
def __init__(self, send, name):
self._send = send
self._name = name
def __getattr__(self, name):
return _Method(self._send, "%s.%s" % (self._name, name))
def __call__(self, *args):
return self._send(self._name, args)
def __repr__(self):
return (
"<%s %s (%s)>" %
(self.__class__.__name__, self._name, self._send)
)
__str__ = __repr__
class SlicingMethod(_Method):
"""
A "slicing method" allows for byte range requests
"""
def __init__(self, send, name):
_Method.__init__(self, send, name)
self._offset = None
def __getattr__(self, name):
return SlicingMethod(self._send, "%s.%s" % (self._name, name))
def __call__(self, *args, **kwargs):
self._offset = kwargs.get('offset')
self._amount = kwargs.get('amount')
# im_self is a pointer to self, so we can modify the class underneath
try:
self._send.im_self.set_range(offset=self._offset,
amount=self._amount)
except AttributeError:
pass
result = self._send(self._name, args)
# Reset "sticky" transport flags
try:
self._send.im_self.reset_transport_flags()
except AttributeError:
pass
return result
def reportError(headers):
""" Reports the error from the headers. """
errcode = 0
errmsg = ""
s = "X-RHN-Fault-Code"
if s in headers:
errcode = int(headers[s])
s = "X-RHN-Fault-String"
if s in headers:
_sList = getHeaderValues(headers, s)
if _sList:
_s = ''.join(_sList)
import base64
errmsg = "%s" % base64.decodestring(_s)
return errcode, errmsg
|
gpl-2.0
| 4,209,307,803,917,549,600 | 32.794406 | 169 | 0.573687 | false |
yunli2004/OpenClos
|
jnpr/openclos/util.py
|
2
|
8115
|
'''
Created on Aug 21, 2014
@author: moloyc
'''
import re
import os
import yaml
import platform
import datetime
import shutil
from netaddr import IPNetwork
import netifaces
from propLoader import propertyFileLocation
TWO_STAGE_CONFIGURATOR_DEFAULT_ATTEMPT=5
TWO_STAGE_CONFIGURATOR_DEFAULT_INTERVAL=30 # in seconds
TWO_STAGE_CONFIGURATOR_DEFAULT_VCP_LLDP_DELAY=40 # in seconds
def loadClosDefinition(closDefination = os.path.join(propertyFileLocation, 'closTemplate.yaml')):
'''
Loads clos definition from yaml file
'''
try:
stream = open(closDefination, 'r')
yamlStream = yaml.load(stream)
return yamlStream
except (OSError, IOError) as e:
print "File error:", e
except (yaml.scanner.ScannerError) as e:
print "YAML error:", e
stream.close()
finally:
pass
def isPlatformUbuntu():
#return 'ubuntu' in platform.platform().lower()
result = os.popen("grep -i ubuntu /etc/*-release").read()
return result is not None and len(result) > 0
def isPlatformCentos():
#return 'centos' in platform.platform().lower()
result = os.popen("grep -i centos /etc/*-release").read()
return result is not None and len(result) > 0
def isPlatformWindows():
return 'windows' in platform.platform().lower()
def backupDatabase(conf):
if conf is not None and 'dbUrl' in conf:
match = re.match(r"sqlite:\/\/\/(.*)", conf['dbUrl'])
if match is not None:
dbFileName = match.group(1)
if dbFileName != '':
timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
backupDbFileName = dbFileName + '.' + timestamp
shutil.copyfile(dbFileName, backupDbFileName)
def getMgmtIps(prefix, startingIP, mask, count):
'''
returns list of management IP for given number of devices
Keyword arguments:
prefix -- ip prefix, example 1.2.3.4/24
count -- number of devices
'''
mgmtIps = []
cidr = None
if startingIP is not None and mask is not None:
cidr = startingIP + '/' + str(mask)
else:
cidr = prefix
if cidr is not None:
ipNetwork = IPNetwork(cidr)
ipNetworkList = list(ipNetwork)
start = ipNetworkList.index(ipNetwork.ip)
end = start + count
ipList = ipNetworkList[start:end]
for ip in ipList:
mgmtIps.append(str(ip) + '/' + str(ipNetwork.prefixlen))
return mgmtIps
def getMgmtIpsForLeaf():
return []
def isZtpStaged(conf):
if conf is not None and conf.get('deploymentMode') is not None:
return conf['deploymentMode'].get('ztpStaged', False)
return False
def getZtpStagedInterval(conf):
if isZtpStaged(conf) == True:
return conf['deploymentMode'].get('ztpStagedInterval', TWO_STAGE_CONFIGURATOR_DEFAULT_INTERVAL)
else:
return None
def getZtpStagedAttempt(conf):
if isZtpStaged(conf) == True:
return conf['deploymentMode'].get('ztpStagedAttempt', TWO_STAGE_CONFIGURATOR_DEFAULT_ATTEMPT)
else:
return None
def getTwoStageConfigurationCallback(conf):
if isZtpStaged(conf) == True:
return conf.get('twoStageConfigurationCallback')
else:
return None
def getVcpLldpDelay(conf):
if isZtpStaged(conf) == True:
return conf['deploymentMode'].get('ztpVcpLldpDelay', TWO_STAGE_CONFIGURATOR_DEFAULT_VCP_LLDP_DELAY)
else:
return None
def enumerateRoutableIpv4Addresses():
addrs = []
intfs = netifaces.interfaces()
for intf in intfs:
if intf != 'lo':
addrDict = netifaces.ifaddresses(intf)
ipv4AddrInfoList = addrDict.get(netifaces.AF_INET)
if ipv4AddrInfoList is not None:
for ipv4AddrInfo in ipv4AddrInfoList:
addrs.append(ipv4AddrInfo['addr'])
return addrs
def getImageNameForDevice(pod, device):
if device.role == 'spine':
return pod.spineJunosImage
elif device.role == 'leaf':
for leafSetting in pod.leafSettings:
if leafSetting.deviceFamily == device.family:
return leafSetting.junosImage
return None
otherPortRegx = re.compile(r"[0-9A-Za-z]+\.?(\d{0,2})")
def interfaceNameToUniqueSequenceNumber(interfaceName):
'''
:param str: name, examples:
IFD: et-0/0/1, et-0/0/0, et-0/0/101, lo0, irb, vme
IFL: et-0/0/1.0, et-0/0/0.0, et-0/0/0.99, lo0.0
IFD with fake name: uplink-0, uplink-1
IFL with fake name: uplink-0.0, uplink-1.0, uplink-1.99
'''
if interfaceName is None or interfaceName == '':
return None
sequenceNum = _matchFpcPicPort(interfaceName)
if sequenceNum != None:
return sequenceNum
sequenceNum = _matchFakeName(interfaceName)
if sequenceNum != None:
return sequenceNum
match = otherPortRegx.match(interfaceName)
if match is not None:
return int(interfaceName.encode('hex'), 16)
fpcPicPortRegx = re.compile(r"([a-z]+)-(\d)\/(\d)\/(\d{1,3})\.?(\d{0,2})")
def _matchFpcPicPort(interfaceName):
match = fpcPicPortRegx.match(interfaceName)
if match is not None:
speed = match.group(1)
fpc = match.group(2)
pic = match.group(3)
port = match.group(4)
unit = match.group(5)
if not unit:
unit = 0
if 'et' in speed:
speedInt = 1
elif 'xe' in speed:
speedInt = 2
elif 'ge' in speed:
speedInt = 3
else:
speedInt = 4
sequenceNum = 100000 * speedInt + 10000 * int(fpc) + 1000 * int(pic) + int(port)
if unit != 0:
sequenceNum = 100 * sequenceNum + int(unit)
return sequenceNum
fakeNameRegxList = [(re.compile(r"uplink-(\d{1,3})\.?(\d{0,2})"), 90000000, 91000000),
(re.compile(r"access-(\d{1,3})\.?(\d{0,2})"), 92000000, 93000000)
]
def _matchFakeName(interfaceName):
for fakeNameRegx, intfStart, subIntfStart in fakeNameRegxList:
match = fakeNameRegx.match(interfaceName)
if match is not None:
port = match.group(1)
unit = match.group(2)
if not unit:
unit = 0
sequenceNum = intfStart + int(port)
if unit != 0:
sequenceNum = subIntfStart + 100 * int(port) + int(unit)
return sequenceNum
def getPortNumberFromName(interfaceName):
match = fpcPicPortRegx.match(interfaceName)
if match is not None:
return match.group(4)
def replaceFpcNumberOfInterfaces(interfaceNames, newFpc):
fixedInterfaceNames = []
for interfaceName in interfaceNames:
match = fpcRegx.match(interfaceName)
if match is not None:
fixedInterfaceNames.append(match.group(1) + '-' + newFpc + '/' + match.group(3))
return fixedInterfaceNames
fpcRegx = re.compile(r"([a-z]+)-(\d)\/(.*)")
def replaceFpcNumberOfInterface(interfaceName, newFpc):
match = fpcRegx.match(interfaceName)
if match is not None:
return match.group(1) + '-' + newFpc + '/' + match.group(3)
def getOutFolderPath(conf, ipFabric):
if 'outputDir' in conf:
outputDir = os.path.join(conf['outputDir'], ipFabric.id+'-'+ipFabric.name)
else:
outputDir = os.path.join('out', ipFabric.id+'-'+ipFabric.name)
return outputDir
def createOutFolder(conf, ipFabric):
path = getOutFolderPath(conf, ipFabric)
if not os.path.exists(path):
os.makedirs(path)
return path
def deleteOutFolder(conf, ipFabric):
path = getOutFolderPath(conf, ipFabric)
shutil.rmtree(path, ignore_errors=True)
def stripNetmaskFromIpString(ipString):
pos = ipString.find('/')
if pos != -1:
return ipString[:pos]
else:
return ipString
def stripPlusSignFromIpString(ipString):
pos = ipString.find('+')
if pos != -1:
return ipString[:pos]
else:
return ipString
|
apache-2.0
| 3,603,975,910,876,883,000 | 29.855513 | 107 | 0.616266 | false |
geotrellis/geotrellis-osm-elevation
|
ingest/src/main/python/geotrellis/osme/ingest/translate.py
|
1
|
9548
|
# 1. function create_object_links() gets a bucket path and returns a list of the link of each .img file
# 2. s3://azavea-datahub/emr/bootstrap.sh: install python2.7: sudo yum install -y python27;
# install gdal;
# install gdal_retile.py: sudo yum install -y gdal-python.x86_64;
# 3. change spark conf file in the master node:
# sudo sed -i '$ a export PYSPARK_PYTHON=/usr/bin/python2.7' /usr/lib/spark/conf/spark-env.sh
# usage: nohup /usr/lib/spark/bin/spark-submit translate.py /path/of/raw/tiles /path/of/workspace jobId &
# example: nohup /usr/lib/spark/bin/spark-submit translate.py s3://azavea-datahub/raw/ned-13arcsec/ s3://osm-elevation/chunk/geotiff emr-test-job-full &
#!/usr/bin/env python
import os
import sys
import json
import errno
import shutil
import zipfile
import tempfile
import traceback
from urlparse import urlparse
from collections import namedtuple
from subprocess import call, check_output
APP_NAME = "OSM Elevation Data Conversion"
def create_tmp_directory(prefix):
tmp = tempfile.mktemp(prefix=prefix, dir=os.path.join(os.environ['PWD'], "translate-temp"))
return makedirs_p(tmp)
def makedirs_p(d):
if not os.path.exists(d):
os.makedirs(d)
return d
def get_local_copy(uri, local_dir):
parsed = urlparse(uri)
local_path = tempfile.mktemp(dir=local_dir)
if parsed.scheme == "s3":
cmd = ["aws", "s3", "cp", uri, local_path]
elif parsed.scheme == "https":
cmd = ["wget", "-O", local_path, uri]
else:
cmd = ["cp", uri, local_path]
c = call(cmd)
return local_path
def create_object_links(bucket):
cmd = ["aws", "s3", "ls", bucket]
ls = check_output(cmd)
lines = ls.splitlines()
links = []
for line in lines:
obj = line.split()[-1]
if ".img" in obj:
links.append(bucket+obj)
return links
def unzip(source_path):
unzipped_dir = source_path + "-unzipped"
with zipfile.ZipFile(source_path) as zf:
zf.extractall(unzipped_dir)
names = zf.namelist()
extensions = ['.flt', '.hdr']
unzipped_paths = {}
for name in names:
for extension in extensions:
if extension in name:
unzipped_paths[extension] = unzipped_dir+'/'+name
return unzipped_paths
def upload_to_working(local_src, dest):
parsed = urlparse(dest)
if parsed.scheme == "s3":
cmd = ["aws", "s3", "cp",
local_src, dest]
else:
d = os.path.dirname(dest)
if not os.path.exists(d):
os.makedirs(d)
cmd = ["cp", local_src, dest]
call(cmd)
return dest
def get_filename(uri):
p = urlparse(uri)
return os.path.splitext(os.path.join(p.netloc, p.path[1:]))[0]
def mkdir_p(dir):
try:
os.makedirs(dir)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(dir):
pass
else: raise
UriSet = namedtuple('UriSet', 'source_uri workspace_target workspace_source_uri image_folder order')
def vsi_curlify(uri):
"""
Creates a GDAL-readable path from the given URI
"""
parsed = urlparse(uri)
result_uri = ""
if not parsed.scheme:
result_uri = uri
else:
if parsed.scheme == "s3":
result_uri = "/vsicurl/http://%s.s3.amazonaws.com%s" % (parsed.netloc, parsed.path)
elif parsed.scheme.startswith("http"):
result_uri = "/vsicurl/%s" % uri
else:
raise Exception("Unsupported scheme: %s" % parsed.schem)
return result_uri
def process_flt(source_uri, order, workspace_uri):
# Download the file and retile
results = []
workspace_prefix = get_filename(source_uri)
local_dir = create_tmp_directory(workspace_prefix)
try :
MAX_HEIGHT = 1024 * 2
MAX_WIDTH = 1024 * 2
local_path = get_local_copy(source_uri, local_dir)
unzipped_paths = unzip(local_path)
# make sure gdal can recognize flt files
hdr = unzipped_paths['.hdr']
flt = unzipped_paths['.flt']
cmd1 = ["gdalinfo"] + [hdr]
cmd2 = ["gdalinfo"] + [flt]
call(cmd1)
call(cmd2)
local_path = flt
# translate
translated_path = local_path + "-translated.tif"
cmd = ["gdal_translate"] + ["-of", "GTiff",
"-co", "compress=deflate",
"-co", "predictor=3",
"-co", "tiled=yes",
"-co", "blockxsize=512",
"-co", "blockysize=512",
local_path,
translated_path]
call(cmd)
# retile
tiled_dir = local_path + "-tiled"
os.mkdir(tiled_dir)
cmd = ["gdal_retile.py"] + ["-co", "compress=deflate",
"-co", "predictor=3",
"-ps",
str(MAX_WIDTH),
str(MAX_HEIGHT),
"-targetDir",
tiled_dir,
translated_path]
call(cmd)
tile_filenames = os.listdir(tiled_dir)
workspace_basename = os.path.basename(workspace_prefix)
translated_path_name = os.path.splitext(os.path.basename(translated_path))[0]
# upload
for tile_filename in tile_filenames:
workspace_key = os.path.splitext(os.path.join(workspace_prefix, tile_filename.replace(translated_path_name, workspace_basename)))[0]
workspace_target = os.path.join(workspace_uri, workspace_key + "-working.tif")
upload_to_working(os.path.join(tiled_dir, tile_filename), workspace_target)
workspace_source_uri = vsi_curlify(workspace_target)
image_folder = os.path.join(workspace_uri, workspace_key)
uri_set = UriSet(source_uri = source_uri,
workspace_target = workspace_target,
workspace_source_uri = workspace_source_uri,
image_folder = image_folder,
order = order)
results.append(uri_set)
shutil.rmtree(local_dir)
finally:
if local_dir:
shutil.rmtree(local_dir, ignore_errors=True)
return results
def process_img(source_uri, order, workspace_uri):
# Download the file and retile
results = []
workspace_prefix = get_filename(source_uri)
local_dir = create_tmp_directory(workspace_prefix)
try :
MAX_HEIGHT = 1024 * 2
MAX_WIDTH = 1024 * 2
local_path = get_local_copy(source_uri, local_dir)
# translate
translated_path = local_path + "-translated.tif"
cmd = ["gdal_translate"] + ["-of", "GTiff",
"-co", "compress=deflate",
"-co", "predictor=3",
"-co", "tiled=yes",
"-co", "blockxsize=512",
"-co", "blockysize=512",
local_path,
translated_path]
call(cmd)
# retile
tiled_dir = local_path + "-tiled"
os.mkdir(tiled_dir)
cmd = ["gdal_retile.py"] + ["-co", "compress=deflate",
"-co", "predictor=3",
"-ps",
str(MAX_WIDTH),
str(MAX_HEIGHT),
"-targetDir",
tiled_dir,
translated_path]
call(cmd)
tile_filenames = os.listdir(tiled_dir)
workspace_basename = os.path.basename(workspace_prefix)
translated_path_name = os.path.splitext(os.path.basename(translated_path))[0]
# upload
for tile_filename in tile_filenames:
workspace_key = os.path.splitext(os.path.join(workspace_prefix.split("/")[-2], tile_filename.replace(translated_path_name, workspace_basename)))[0]
workspace_target = os.path.join(workspace_uri, workspace_key + ".tif")
upload_to_working(os.path.join(tiled_dir, tile_filename), workspace_target)
workspace_source_uri = vsi_curlify(workspace_target)
image_folder = os.path.join(workspace_uri, workspace_key)
uri_set = UriSet(source_uri = source_uri,
workspace_target = workspace_target,
workspace_source_uri = workspace_source_uri,
image_folder = image_folder,
order = order)
results.append(uri_set)
shutil.rmtree(local_dir)
finally:
if local_dir:
shutil.rmtree(local_dir, ignore_errors=True)
return results
if __name__ == '__main__':
from pyspark import SparkConf, SparkContext
bucket = sys.argv[1]
source_uris = create_object_links(bucket)
workspace = sys.argv[2]
jobId = sys.argv[3]
conf = SparkConf().setAppName(APP_NAME)
sc = SparkContext(conf=conf)
uri_sets = sc.parallelize(enumerate(source_uris)).flatMap(lambda (o, i): process_img(i, o, workspace))
source_tile_count = uri_sets.cache().count()
print "Done."
|
apache-2.0
| 8,990,720,175,938,981,000 | 33.348921 | 159 | 0.542417 | false |
LazerTrace/LazerTrace
|
vendor/openctm/bindings/python/openctm.py
|
1
|
6310
|
#------------------------------------------------------------------------------
# Product: OpenCTM
# File: openctm.py
# Description: Python API bindings (tested with Python 2.5.2 and Python 3.0)
#------------------------------------------------------------------------------
# Copyright (c) 2009-2010 Marcus Geelnard
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
#
# 2. Altered source versions must be plainly marked as such, and must not
# be misrepresented as being the original software.
#
# 3. This notice may not be removed or altered from any source
# distribution.
#------------------------------------------------------------------------------
import os
import ctypes
from ctypes import *
from ctypes.util import find_library
# Types
CTMfloat = c_float
CTMint = c_int32
CTMuint = c_uint32
CTMcontext = c_void_p
CTMenum = c_uint32
# Constants
CTM_API_VERSION = 0x00000100
CTM_TRUE = 1
CTM_FALSE = 0
# CTMenum
CTM_NONE = 0x0000
CTM_INVALID_CONTEXT = 0x0001
CTM_INVALID_ARGUMENT = 0x0002
CTM_INVALID_OPERATION = 0x0003
CTM_INVALID_MESH = 0x0004
CTM_OUT_OF_MEMORY = 0x0005
CTM_FILE_ERROR = 0x0006
CTM_BAD_FORMAT = 0x0007
CTM_LZMA_ERROR = 0x0008
CTM_INTERNAL_ERROR = 0x0009
CTM_UNSUPPORTED_FORMAT_VERSION = 0x000A
CTM_IMPORT = 0x0101
CTM_EXPORT = 0x0102
CTM_METHOD_RAW = 0x0201
CTM_METHOD_MG1 = 0x0202
CTM_METHOD_MG2 = 0x0203
CTM_VERTEX_COUNT = 0x0301
CTM_TRIANGLE_COUNT = 0x0302
CTM_HAS_NORMALS = 0x0303
CTM_UV_MAP_COUNT = 0x0304
CTM_ATTRIB_MAP_COUNT = 0x0305
CTM_VERTEX_PRECISION = 0x0306
CTM_NORMAL_PRECISION = 0x0307
CTM_COMPRESSION_METHOD = 0x0308
CTM_FILE_COMMENT = 0x0309
CTM_NAME = 0x0501
CTM_FILE_NAME = 0x0502
CTM_PRECISION = 0x0503
CTM_INDICES = 0x0601
CTM_VERTICES = 0x0602
CTM_NORMALS = 0x0603
CTM_UV_MAP_1 = 0x0700
CTM_UV_MAP_2 = 0x0701
CTM_UV_MAP_3 = 0x0702
CTM_UV_MAP_4 = 0x0703
CTM_UV_MAP_5 = 0x0704
CTM_UV_MAP_6 = 0x0705
CTM_UV_MAP_7 = 0x0706
CTM_UV_MAP_8 = 0x0707
CTM_ATTRIB_MAP_1 = 0x0800
CTM_ATTRIB_MAP_2 = 0x0801
CTM_ATTRIB_MAP_3 = 0x0802
CTM_ATTRIB_MAP_4 = 0x0803
CTM_ATTRIB_MAP_5 = 0x0804
CTM_ATTRIB_MAP_6 = 0x0805
CTM_ATTRIB_MAP_7 = 0x0806
CTM_ATTRIB_MAP_8 = 0x0807
# Load the OpenCTM shared library
if os.name == 'nt':
_lib = WinDLL('openctm.dll')
else:
_libName = find_library('openctm')
if not _libName:
raise Exception('Could not find the OpenCTM shared library.')
_lib = CDLL(_libName)
if not _lib:
raise Exception('Could not open the OpenCTM shared library.')
# Functions
ctmNewContext = _lib.ctmNewContext
ctmNewContext.argtypes = [CTMenum]
ctmNewContext.restype = CTMcontext
ctmFreeContext = _lib.ctmFreeContext
ctmFreeContext.argtypes = [CTMcontext]
ctmGetError = _lib.ctmGetError
ctmGetError.argtypes = [CTMcontext]
ctmGetError.restype = CTMenum
ctmErrorString = _lib.ctmErrorString
ctmErrorString.argtypes = [CTMenum]
ctmErrorString.restype = c_char_p
ctmGetInteger = _lib.ctmGetInteger
ctmGetInteger.argtypes = [CTMcontext, CTMenum]
ctmGetInteger.restype = CTMint
ctmGetFloat = _lib.ctmGetFloat
ctmGetFloat.argtypes = [CTMcontext, CTMenum]
ctmGetFloat.restype = CTMfloat
ctmGetIntegerArray = _lib.ctmGetIntegerArray
ctmGetIntegerArray.argtypes = [CTMcontext, CTMenum]
ctmGetIntegerArray.restype = POINTER(CTMuint)
ctmGetFloatArray = _lib.ctmGetFloatArray
ctmGetFloatArray.argtypes = [CTMcontext, CTMenum]
ctmGetFloatArray.restype = POINTER(CTMfloat)
ctmGetNamedUVMap = _lib.ctmGetNamedUVMap
ctmGetNamedUVMap.argtypes = [CTMcontext, c_char_p]
ctmGetNamedUVMap.restype = CTMenum
ctmGetUVMapString = _lib.ctmGetUVMapString
ctmGetUVMapString.argtypes = [CTMcontext, CTMenum, CTMenum]
ctmGetUVMapString.restype = c_char_p
ctmGetUVMapFloat = _lib.ctmGetUVMapFloat
ctmGetUVMapFloat.argtypes = [CTMcontext, CTMenum, CTMenum]
ctmGetUVMapFloat.restype = CTMfloat
ctmGetNamedAttribMap = _lib.ctmGetNamedAttribMap
ctmGetNamedAttribMap.argtypes = [CTMcontext, c_char_p]
ctmGetNamedAttribMap.restype = CTMenum
ctmGetAttribMapString = _lib.ctmGetAttribMapString
ctmGetAttribMapString.argtypes = [CTMcontext, CTMenum, CTMenum]
ctmGetAttribMapString.restype = c_char_p
ctmGetAttribMapFloat = _lib.ctmGetAttribMapFloat
ctmGetAttribMapFloat.argtypes = [CTMcontext, CTMenum, CTMenum]
ctmGetAttribMapFloat.restype = CTMfloat
ctmGetString = _lib.ctmGetString
ctmGetString.argtypes = [CTMcontext, CTMenum]
ctmGetString.restype = c_char_p
ctmCompressionMethod = _lib.ctmCompressionMethod
ctmCompressionMethod.argtypes = [CTMcontext, CTMenum]
ctmCompressionLevel = _lib.ctmCompressionLevel
ctmCompressionLevel.argtypes = [CTMcontext, CTMuint]
ctmVertexPrecision = _lib.ctmVertexPrecision
ctmVertexPrecision.argtypes = [CTMcontext, CTMfloat]
ctmVertexPrecisionRel = _lib.ctmVertexPrecisionRel
ctmVertexPrecisionRel.argtypes = [CTMcontext, CTMfloat]
ctmNormalPrecision = _lib.ctmNormalPrecision
ctmNormalPrecision.argtypes = [CTMcontext, CTMfloat]
ctmUVCoordPrecision = _lib.ctmUVCoordPrecision
ctmUVCoordPrecision.argtypes = [CTMcontext, CTMenum, CTMfloat]
ctmAttribPrecision = _lib.ctmAttribPrecision
ctmAttribPrecision.argtypes = [CTMcontext, CTMenum, CTMfloat]
ctmFileComment = _lib.ctmFileComment
ctmFileComment.argtypes = [CTMcontext, c_char_p]
ctmDefineMesh = _lib.ctmDefineMesh
ctmDefineMesh.argtypes = [CTMcontext, POINTER(CTMfloat), CTMuint, POINTER(CTMuint), CTMuint, POINTER(CTMfloat)]
ctmAddUVMap = _lib.ctmAddUVMap
ctmAddUVMap.argtypes = [CTMcontext, POINTER(CTMfloat), c_char_p, c_char_p]
ctmAddUVMap.restype = CTMenum
ctmAddAttribMap = _lib.ctmAddAttribMap
ctmAddAttribMap.argtypes = [CTMcontext, POINTER(CTMfloat), c_char_p]
ctmAddAttribMap.restype = CTMenum
ctmLoad = _lib.ctmLoad
ctmLoad.argtypes = [CTMcontext, c_char_p]
ctmSave = _lib.ctmSave
ctmSave.argtypes = [CTMcontext, c_char_p]
|
mit
| 5,132,959,107,381,138,000 | 29.931373 | 111 | 0.750238 | false |
chromium/chromium
|
tools/metrics/histograms/update_histogram_enum.py
|
3
|
13658
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Updates enums in histograms.xml file with values read from provided C++ enum.
If the file was pretty-printed, the updated version is pretty-printed too.
"""
import logging
import os
import re
import sys
from xml.dom import minidom
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'common'))
import diff_util
import path_util
import histogram_paths
import histogram_configuration_model
ENUMS_PATH = histogram_paths.ENUMS_XML
class UserError(Exception):
def __init__(self, message):
Exception.__init__(self, message)
@property
def message(self):
return self.args[0]
class DuplicatedValue(Exception):
"""Exception raised for duplicated enum values.
Attributes:
first_label: First enum label that shares the duplicated enum value.
second_label: Second enum label that shares the duplicated enum value.
"""
def __init__(self, first_label, second_label):
self.first_label = first_label
self.second_label = second_label
def Log(message):
logging.info(message)
def ReadHistogramValues(filename, start_marker, end_marker, strip_k_prefix):
"""Creates a dictionary of enum values, read from a C++ file.
Args:
filename: The unix-style path (relative to src/) of the file to open.
start_marker: A regex that signifies the start of the enum values.
end_marker: A regex that signifies the end of the enum values.
strip_k_prefix: Set to True if enum values are declared as kFoo and the
'k' should be stripped.
Returns:
A boolean indicating wheather the histograms.xml file would be changed.
Raises:
DuplicatedValue: An error when two enum labels share the same value.
"""
# Read the file as a list of lines
with open(path_util.GetInputFile(filename)) as f:
content = f.readlines()
START_REGEX = re.compile(start_marker)
ITEM_REGEX = re.compile(r'^(\w+)')
ITEM_REGEX_WITH_INIT = re.compile(r'(\w+)\s*=\s*(\d*)')
WRAPPED_INIT = re.compile(r'(\d+)')
END_REGEX = re.compile(end_marker)
iterator = iter(content)
# Find the start of the enum
for line in iterator:
if START_REGEX.match(line.strip()):
break
enum_value = 0
result = {}
for line in iterator:
line = line.strip()
# Exit condition: we reached last enum value
if END_REGEX.match(line):
break
# Inside enum: generate new xml entry
m = ITEM_REGEX_WITH_INIT.match(line)
if m:
label = m.group(1)
if m.group(2):
enum_value = int(m.group(2))
else:
# Enum name is so long that the value wrapped to the next line
next_line = next(iterator).strip()
enum_value = int(WRAPPED_INIT.match(next_line).group(1))
else:
m = ITEM_REGEX.match(line)
if m:
label = m.group(1)
else:
continue
if strip_k_prefix:
assert label.startswith('k'), "Enum " + label + " should start with 'k'."
label = label[1:]
# If two enum labels have the same value
if enum_value in result:
raise DuplicatedValue(result[enum_value], label)
result[enum_value] = label
enum_value += 1
return result
def ReadHistogramValuesFromXML(filename, element_name,
value_attribute, label_attribute):
"""Creates a dictionary of enum values, read from an XML file.
Args:
filename: The unix-style path (relative to src/) of the file to open.
element_name: Name of elements in the given XML that would be used to
extract enums.
value_attribute: The attribute name in source XML that would be mapped to
|value| attributes in enums.xml.
label_attribute: The attribute name in source XML that would be mapped to
|label| attributes in enums.xml.
Returns:
A boolean indicating wheather the histograms.xml file would be changed.
Raises:
DuplicatedValue: An error when two enum labels share the same value.
"""
source_xml = minidom.parse(path_util.GetInputFile(filename))
result = {}
for row in source_xml.getElementsByTagName(element_name):
enum_value = int(row.getAttribute(value_attribute))
label = row.getAttribute(label_attribute)
# If two enum labels have the same value
if enum_value in result:
raise DuplicatedValue(result[enum_value], label)
result[enum_value] = label
return result
def CreateEnumItemNode(document, value, label):
"""Creates an int element to append to an enum."""
item_node = document.createElement('int')
item_node.attributes['value'] = str(value)
item_node.attributes['label'] = label
return item_node
def UpdateHistogramDefinitions(histogram_enum_name, source_enum_values,
source_enum_path, caller_script_name, document):
"""Updates the enum node named |histogram_enum_name| based on the definition
stored in |source_enum_values|. Existing items for which |source_enum_values|
doesn't contain any corresponding data will be preserved. |source_enum_path|
and |caller_script_name| will be used to insert a comment.
"""
# Get a dom of <enum name=|histogram_enum_name| ...> node in |document|.
for enum_node in document.getElementsByTagName('enum'):
if enum_node.attributes['name'].value == histogram_enum_name:
break
else:
raise UserError('No {0} enum node found'.format(histogram_enum_name))
new_item_nodes = {}
new_comments = []
# Add a "Generated from (...)" comment.
new_comments.append(document.createComment(
' Generated from {0}.'.format(source_enum_path) + (
'\nCalled by {0}.'.format(caller_script_name) if caller_script_name
else '')))
# Create item nodes for each of the enum values.
for value, label in source_enum_values.iteritems():
new_item_nodes[value] = CreateEnumItemNode(document, value, label)
# Scan existing nodes in |enum_node| for old values and preserve them.
# - Preserve comments other than the 'Generated from' comment. NOTE:
# this does not preserve the order of the comments in relation to the
# old values.
# - Drop anything else.
SOURCE_COMMENT_REGEX = re.compile('^ Generated from ')
for child in enum_node.childNodes:
if child.nodeName == 'int':
value = int(child.attributes['value'].value)
if value not in source_enum_values:
new_item_nodes[value] = child
# Preserve existing non-generated comments.
elif (child.nodeType == minidom.Node.COMMENT_NODE and
SOURCE_COMMENT_REGEX.match(child.data) is None):
new_comments.append(child)
# Update |enum_node|. First, remove everything existing.
while enum_node.hasChildNodes():
enum_node.removeChild(enum_node.lastChild)
# Add comments at the top.
for comment in new_comments:
enum_node.appendChild(comment)
# Add in the new enums.
for value in sorted(new_item_nodes.iterkeys()):
enum_node.appendChild(new_item_nodes[value])
def _GetOldAndUpdatedXml(histogram_enum_name, source_enum_values,
source_enum_path, caller_script_name):
"""Reads old histogram from |histogram_enum_name| from |ENUMS_PATH|, and
calculates new histogram from |source_enum_values| from |source_enum_path|,
and returns both in XML format.
"""
Log('Reading existing histograms from "{0}".'.format(ENUMS_PATH))
with open(ENUMS_PATH, 'rb') as f:
histograms_doc = minidom.parse(f)
f.seek(0)
xml = f.read()
Log('Comparing histograms enum with new enum definition.')
UpdateHistogramDefinitions(histogram_enum_name, source_enum_values,
source_enum_path, caller_script_name,
histograms_doc)
new_xml = histogram_configuration_model.PrettifyTree(histograms_doc)
return (xml, new_xml)
def CheckPresubmitErrors(histogram_enum_name,
update_script_name,
source_enum_path,
start_marker,
end_marker,
strip_k_prefix=False,
histogram_value_reader=ReadHistogramValues):
"""Extracts histogram enum values from a source file and checks for
violations.
Enum values are extracted from |source_enum_path| using
|histogram_value_reader| function. The following presubmit violations are then
checked:
1. Failure to update histograms.xml to match
2. Introduction of duplicate values
Args:
histogram_enum_name: The name of the XML <enum> attribute to update.
update_script_name: The name of an update script to run to update the UMA
mappings for the enum.
source_enum_path: A unix-style path, relative to src/, giving
the source file from which to read the enum.
start_marker: A regular expression that matches the start of the C++ enum.
end_marker: A regular expression that matches the end of the C++ enum.
strip_k_prefix: Set to True if enum values are declared as kFoo and the
'k' should be stripped.
histogram_value_reader: A reader function that takes four arguments
(source_path, start_marker, end_marker, strip_k_prefix), and returns a
list of strings of the extracted enum names. The default is
ReadHistogramValues(), which parses the values out of an enum defined
in a C++ source file.
Returns:
A string with presubmit failure description, or None (if no failures).
"""
Log('Reading histogram enum definition from "{0}".'.format(source_enum_path))
try:
source_enum_values = histogram_value_reader(source_enum_path, start_marker,
end_marker, strip_k_prefix)
except DuplicatedValue as duplicated_values:
return ('%s enum has been updated and there exist '
'duplicated values between (%s) and (%s)' %
(histogram_enum_name, duplicated_values.first_label,
duplicated_values.second_label))
(xml, new_xml) = _GetOldAndUpdatedXml(histogram_enum_name, source_enum_values,
source_enum_path, update_script_name)
if xml != new_xml:
return ('%s enum has been updated and the UMA mapping needs to be '
'regenerated. Please run %s in src/tools/metrics/histograms/ to '
'update the mapping.' % (histogram_enum_name, update_script_name))
return None
def UpdateHistogramFromDict(histogram_enum_name, source_enum_values,
source_enum_path, caller_script_name):
"""Updates |histogram_enum_name| enum in histograms.xml file with values
from the {value: 'key'} dictionary |source_enum_values|. A comment is added
to histograms.xml citing that the values in |histogram_enum_name| were
sourced from |source_enum_path|, requested by |caller_script_name|.
"""
(xml, new_xml) = _GetOldAndUpdatedXml(histogram_enum_name, source_enum_values,
source_enum_path, caller_script_name)
if not diff_util.PromptUserToAcceptDiff(
xml, new_xml, 'Is the updated version acceptable?'):
Log('Cancelled.')
return
with open(ENUMS_PATH, 'wb') as f:
f.write(new_xml)
Log('Done.')
def UpdateHistogramEnum(histogram_enum_name,
source_enum_path,
start_marker,
end_marker,
strip_k_prefix=False,
calling_script=None):
"""Reads a C++ enum from a .h file and updates histograms.xml to match.
Args:
histogram_enum_name: The name of the XML <enum> attribute to update.
source_enum_path: A unix-style path, relative to src/, giving
the C++ header file from which to read the enum.
start_marker: A regular expression that matches the start of the C++ enum.
end_marker: A regular expression that matches the end of the C++ enum.
strip_k_prefix: Set to True if enum values are declared as kFoo and the
'k' should be stripped.
"""
Log('Reading histogram enum definition from "{0}".'.format(source_enum_path))
source_enum_values = ReadHistogramValues(source_enum_path,
start_marker, end_marker, strip_k_prefix)
UpdateHistogramFromDict(histogram_enum_name, source_enum_values,
source_enum_path, calling_script)
def UpdateHistogramEnumFromXML(histogram_enum_name, source_enum_path,
caller_script_name, element_name,
value_attribute, label_attribute):
"""Reads a .xml file and updates histograms.xml to match.
Args:
histogram_enum_name: The name of the XML <enum> attribute to update.
source_enum_path: A unix-style path, relative to src/, giving
the XML file from which to read the enum.
caller_script_name: Name of the script calling this function.
element_name: Name of elements in the given XML that would be used to
extract enums.
value_attribute: The attribute name in source XML that would be mapped to
|value| attributes in enums.xml.
label_attribute: The attribute name in source XML that would be mapped to
|label| attributes in enums.xml.
"""
Log('Reading histogram enum definition from "{0}".'.format(source_enum_path))
source_enum_values = ReadHistogramValuesFromXML(
source_enum_path, element_name, value_attribute, label_attribute)
UpdateHistogramFromDict(histogram_enum_name, source_enum_values,
source_enum_path, caller_script_name)
|
bsd-3-clause
| 8,792,709,390,575,130,000 | 36.938889 | 80 | 0.666496 | false |
intfrr/SoCo
|
soco/data_structures.py
|
1
|
38123
|
# -*- coding: utf-8 -*-
# pylint: disable=star-args, too-many-arguments, fixme
""" This module contains classes for handling DIDL-Lite metadata.
This is the XML schema used by Sonos for carrying metadata representing many
items such as tracks, playlists, composers, albums etc.
"""
# It tries to follow the class hierarchy provided by the DIDL-Lite schema
# described in the UPnP Spec, especially that for the ContentDirectory Service
# Although Sonos uses ContentDirectory v1, the document for v2 is more helpful:
# http://upnp.org/specs/av/UPnP-av-ContentDirectory-v2-Service.pdf
from __future__ import unicode_literals
import sys
import warnings
warnings.simplefilter('always', DeprecationWarning)
import textwrap
from .xml import XML, ns_tag
from .exceptions import DIDLMetadataError
from .utils import really_unicode
###############################################################################
# MISC HELPER FUNCTIONS #
###############################################################################
def to_didl_string(*args):
""" Convert any number of DIDLObjects to a unicode xml string.
Args:
*args (DidlObject): One or more DidlObject (or subclass) instances
Returns:
str: A unicode string of the form <DIDL-Lite ...>...</DIDL-Lite>
representing the instances
"""
didl = XML.Element(
'DIDL-Lite',
{
'xmlns': "urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/",
'xmlns:dc': "http://purl.org/dc/elements/1.1/",
'xmlns:upnp': "urn:schemas-upnp-org:metadata-1-0/upnp/",
})
for arg in args:
didl.append(arg.to_element())
if sys.version_info[0] == 2:
return XML.tostring(didl)
else:
return XML.tostring(didl, encoding='unicode')
def from_didl_string(string):
""" Convert a unicode xml string to a list of DIDLObjects.
Arg:
string (str): A unicode string containing an xml representation of one
or more DIDL-Lite items (in the form <DIDL-Lite ...>
...</DIDL-Lite> )
Returns:
list: A list of one or more instances of DIDLObject or a subclass
"""
items = []
root = XML.fromstring(string.encode('utf-8'))
for elt in root:
if elt.tag.endswith('item') or elt.tag.endswith('container'):
item_class = elt.findtext(ns_tag('upnp', 'class'))
try:
cls = _DIDL_CLASS_TO_CLASS[item_class]
except KeyError:
raise DIDLMetadataError("Unknown UPnP class: %s" % item_class)
items.append(cls.from_element(elt))
else:
# <desc> elements are allowed as an immediate child of <DIDL-Lite>
# according to the spec, but I have not seen one there in Sonos, so
# we treat them as illegal. May need to fix this if this
# causes problems.
raise DIDLMetadataError("Illegal child of DIDL element: <%s>"
% elt.tag)
return items
###############################################################################
# DIDL RESOURCE #
###############################################################################
class DidlResource(object):
""" Identifies a resource, typically some type of a binary asset, such as
a song.
A 'res' element contains a uri that identifies the resource.
"""
# Adapted from a class taken from the Python Brisa project - MIT licence.
# pylint: disable=too-many-instance-attributes
def __init__(self, uri, protocol_info, import_uri=None, size=None,
duration=None, bitrate=None, sample_frequency=None,
bits_per_sample=None, nr_audio_channels=None, resolution=None,
color_depth=None, protection=None):
""" Constructor for the Resource class.
Args:
uri (str): value of the res tag, typically a URI. It MUST be
properly escaped URIs as described in RFC 239
protocol_info (str): A string in the form a:b:c:d that
identifies the streaming or transport protocol for
transmitting the resource. A value is required. For more
information see section 2.5.2 at
http://upnp.org/specs/av/UPnP-av-ConnectionManager-v1-Service.pdf
import_uri (str, optional): uri locator for resource update
size (int, optional): size in bytes
duration (str, optional): duration of the playback of the res
at normal speed (H*:MM:SS:F* or H*:MM:SS:F0/F1)
bitrate (int, optional): bitrate in bytes/second
sample_frequency (int, optional): sample frequency in Hz
bits_per_sample (int, optional): bits per sample
nr_audio_channels (int, optional): number of audio channels
resolution (str, optional): resolution of the resource (X*Y)
color_depth (int, optional): color depth in bits
protection (str, optional): statement of protection type
"""
# Of these attributes, only uri, protocol_info and duration have been
# spotted 'in the wild'
self.uri = uri
# Protocol info is in the form a:b:c:d - see
# sec 2.5.2 at
# http://upnp.org/specs/av/UPnP-av-ConnectionManager-v1-Service.pdf
self.protocol_info = protocol_info
self.import_uri = import_uri
self.size = size
self.duration = duration
self.bitrate = bitrate
self.sample_frequency = sample_frequency
self.bits_per_sample = bits_per_sample
self.nr_audio_channels = nr_audio_channels
self.resolution = resolution
self.color_depth = color_depth
self.protection = protection
@classmethod
def from_element(cls, element):
""" Set the resource properties from a <res> element.
Arg:
element (Element): An ElementTree Element
"""
def _int_helper(name):
"""Try to convert the name attribute to an int, or None."""
result = element.get(name)
if result is not None:
try:
return int(result)
except ValueError:
raise ValueError(
'Could not convert {0} to an integer'.format(name))
else:
return None
content = {}
# required
content['protocol_info'] = element.get('protocolInfo')
if content['protocol_info'] is None:
raise Exception('Could not create Resource from Element: '
'protocolInfo not found (required).')
# Optional
content['import_uri'] = element.get('importUri')
content['size'] = _int_helper('size')
content['duration'] = element.get('duration')
content['bitrate'] = _int_helper('bitrate')
content['sample_frequency'] = _int_helper('sampleFrequency')
content['bits_per_sample'] = _int_helper('bitsPerSample')
content['nr_audio_channels'] = _int_helper('nrAudioChannels')
content['resolution'] = element.get('resolution')
content['color_depth'] = _int_helper('colorDepth')
content['protection'] = element.get('protection')
content['uri'] = element.text
return cls(**content)
def __repr__(self):
return '<{0} \'{1}\' at {2}>'.format(self.__class__.__name__,
self.uri,
hex(id(self)))
def __str__(self):
return self.__repr__()
def to_element(self):
""" Return an ElementTree Element based on this resource."""
if not self.protocol_info:
raise Exception('Could not create Element for this resource: '
'protocolInfo not set (required).')
root = XML.Element('res')
# Required
root.attrib['protocolInfo'] = self.protocol_info
# Optional
if self.import_uri is not None:
root.attrib['importUri'] = self.import_uri
if self.size is not None:
root.attrib['size'] = str(self.size)
if self.duration is not None:
root.attrib['duration'] = self.duration
if self.bitrate is not None:
root.attrib['bitrate'] = str(self.bitrate)
if self.sample_frequency is not None:
root.attrib['sampleFrequency'] = str(self.sample_frequency)
if self.bits_per_sample is not None:
root.attrib['bitsPerSample'] = str(self.bits_per_sample)
if self.nr_audio_channels is not None:
root.attrib['nrAudioChannels'] = str(self.nr_audio_channels)
if self.resolution is not None:
root.attrib['resolution'] = self.resolution
if self.color_depth is not None:
root.attrib['colorDepth'] = str(self.color_depth)
if self.protection is not None:
root.attrib['protection'] = self.protection
root.text = self.uri
return root
def to_dict(self, remove_nones=False):
"""Return a dictionary representation of the DidlResource
Args:
remove_nones (bool): Optionally remove dictionary elements when
their value is None.
"""
content = {
'uri': self.uri,
'protocol_info': self.protocol_info,
'import_uri': self.import_uri,
'size': self.size,
'duration': self.duration,
'bitrate': self.bitrate,
'sample_frequency': self.sample_frequency,
'bits_per_sample': self.bits_per_sample,
'nr_audio_channels': self.nr_audio_channels,
'resolution': self.resolution,
'color_depth': self.color_depth,
'protection': self.protection,
}
if remove_nones:
# delete any elements that have a value of None to optimize size
# of the returned structure
nones = [k for k in content if content[k] is None]
for k in nones:
del content[k]
return content
@classmethod
def from_dict(cls, content):
"""Create an instance from a dict.
An alternative constructor. Equivalent to DidlResource(**content).
Arg:
content (dict): Dict containing metadata information. Required and
valid arguments are the same as for the ``__init__`` method.
"""
return cls(**content)
def __eq__(self, resource):
"""Compare with another ``resource``.
Returns:
(bool): True if items are equal, else False
"""
if not isinstance(resource, DidlResource):
return False
return self.to_dict() == resource.to_dict()
###############################################################################
# BASE OBJECTS #
###############################################################################
# a mapping which will be used to look up the relevant class from the
# DIDL item class
_DIDL_CLASS_TO_CLASS = {}
class DidlMetaClass(type):
"""Meta class for all Didl objects."""
def __new__(mcs, name, bases, attrs):
"""Create a new instance.
Args:
name: Name of the class
bases: Base classes (tuple)
attrs: Attributes defined for the class
"""
new_cls = super(DidlMetaClass, mcs).__new__(mcs, name, bases, attrs)
# Register all subclasses with the global _DIDL_CLASS_TO_CLASS mapping
item_class = attrs.get('item_class', None)
if item_class is not None:
_DIDL_CLASS_TO_CLASS[item_class] = new_cls
return new_cls
# Py2/3 compatible way of declaring the metaclass
class DidlObject(DidlMetaClass(str('DidlMetaClass'), (object,), {})):
"""Abstract base class for all DIDL-Lite items.
You should not need to instantiate this.
Attributes:
item_class (str): The DIDL Lite class for this object
tag (str): The XML element tag name used for this instance
_translation (dict): A dict used to translate between instance
attribute names and XML tags/namespaces. It also serves to define
the allowed tags/attributes for this instance. Overridden and
extended by subclasses.
"""
item_class = 'object'
tag = 'item'
# key: attribute_name: (ns, tag)
_translation = {
'creator': ('dc', 'creator'),
'write_status': ('upnp', 'writeStatus'),
}
def __init__(self, title, parent_id, item_id, restricted=True,
resources=None, desc='RINCON_AssociatedZPUDN', **kwargs):
r"""Construct and initialize a DidlObject.
Args:
title (str): The title for the item
parent_id (str): The parent ID for the item
item_id (str): The ID for the item
restricted (bool): Whether the item can be modified
resources (list): A list of resources for this object
desc (str): A didl descriptor, default RINCON_AssociatedZPUDN. This
is not the same as "description"! It is used for identifying
the relevant music service
**kwargs: Extra metadata. What is allowed depends on the
_translation class attribute, which in turn depends on the DIDL
class
"""
# All didl objects *must* have a title, a parent_id and an item_id
# so we specify these as required args in the constructor signature
# to ensure that we get them. Other didl object properties are
# optional, so can be passed as kwargs.
# The content of _translation is adapted from the list in table C at
# http://upnp.org/specs/av/UPnP-av-ContentDirectory-v2-Service.pdf
# Not all properties referred to there are catered for, since Sonos
# does not use some of them.
# pylint: disable=super-on-old-class
super(DidlObject, self).__init__()
self.title = title
self.parent_id = parent_id
self.item_id = item_id
# Restricted is a compulsory attribute, but is almost always True for
# Sonos. (Only seen it 'false' when browsing favorites)
self.restricted = restricted
# Resources is multi-valued, and dealt with separately
self.resources = [] if resources is None else resources
# According to the spec, there may be one or more desc values. Sonos
# only seems to use one, so we won't bother with a list
self.desc = desc
for key, value in kwargs.items():
# For each attribute, check to see if this class allows it
if key not in self._translation:
raise ValueError(
'The key \'{0}\' is not allowed as an argument. Only '
'these keys are allowed: parent_id, item_id, title, '
'restricted, resources, desc'
' {1}'.format(key, ', '.join(self._translation.keys())))
# It is an allowed attribute. Set it as an attribute on self, so
# that it can be accessed as Classname.attribute in the normal
# way.
setattr(self, key, value)
@classmethod
def from_element(cls, element):
"""Create an instance of this class from an ElementTree xml Element.
An alternative constructor. The element must be a DIDL-Lite <item> or
<container> element, and must be properly namespaced.
Arg:
xml (Element): An :py:class:`xml.etree.ElementTree.Element` object.
"""
# Check we have the right sort of element. tag can be an empty string
# which indicates that any tag is allowed (see eg the musicAlbum DIDL
# class)
if not element.tag.endswith(cls.tag):
raise DIDLMetadataError(
"Wrong element. Expected '<{0}>',"
" got '<{1}>'".format(cls.tag, element.tag))
# and that the upnp matches what we are expecting
item_class = element.find(ns_tag('upnp', 'class')).text
if item_class != cls.item_class:
raise DIDLMetadataError(
"UPnP class is incorrect. Expected '{0}',"
" got '{1}'".format(cls.item_class, item_class))
# parent_id, item_id and restricted are stored as attibutes on the
# element
item_id = really_unicode(element.get('id', None))
if item_id is None:
raise DIDLMetadataError("Missing id attribute")
parent_id = really_unicode(element.get('parentID', None))
if parent_id is None:
raise DIDLMetadataError("Missing parentID attribute")
restricted = element.get('restricted', None)
if restricted is None:
raise DIDLMetadataError("Missing restricted attribute")
restricted = True if restricted in [1, 'true', 'True'] else False
# There must be a title. According to spec, it should be the first
# child, but Sonos does not abide by this
title_elt = element.find(ns_tag('dc', 'title'))
if title_elt is None:
raise DIDLMetadataError(
"Missing title element")
title = really_unicode(title_elt.text)
# Deal with any resource elements
resources = []
for res_elt in element.findall(ns_tag('', 'res')):
resources.append(
DidlResource.from_element(res_elt))
# and the desc element (There is only one in Sonos)
desc = element.findtext(ns_tag('', 'desc'))
# Get values of the elements listed in _translation and add them to
# the content dict
content = {}
for key, value in cls._translation.items():
result = element.findtext(ns_tag(*value))
if result is not None:
# We store info as unicode internally.
content[key] = really_unicode(result)
# Convert type for original track number
if content.get('original_track_number') is not None:
content['original_track_number'] = \
int(content['original_track_number'])
# Now pass the content dict we have just built to the main
# constructor, as kwargs, to create the object
return cls(title=title, parent_id=parent_id, item_id=item_id,
restricted=restricted, resources=resources, desc=desc,
**content)
@classmethod
def from_dict(cls, content):
"""Create an instance from a dict.
An alternative constructor. Equivalent to DidlObject(**content).
Arg:
content (dict): Dict containing metadata information.Required and
valid arguments are the same as for the ``__init__`` method.
"""
# Do we really need this constructor? Could use DidlObject(**content)
# instead.
return cls(**content)
def __eq__(self, playable_item):
"""Compare with another ``playable_item``.
Returns:
(bool): True if items are equal, else False
"""
if not isinstance(playable_item, DidlObject):
return False
return self.to_dict() == playable_item.to_dict()
def __ne__(self, playable_item):
"""Compare with another ``playable_item``.
Returns:
(bool): True if items are unequal, else False
"""
if not isinstance(playable_item, DidlObject):
return True
return self.to_dict() != playable_item.to_dict()
def __repr__(self):
"""Return the repr value for the item.
The repr is of the form::
<class_name 'middle_part[0:40]' at id_in_hex>
where middle_part is either the title item in content, if it is set,
or ``str(content)``. The output is also cleared of non-ascii
characters.
"""
# 40 originates from terminal width (78) - (15) for address part and
# (19) for the longest class name and a little left for buffer
if self.title is not None:
middle = self.title.encode('ascii', 'replace')[0:40]
else:
middle = str(self.to_dict).encode('ascii', 'replace')[0:40]
return '<{0} \'{1}\' at {2}>'.format(self.__class__.__name__,
middle,
hex(id(self)))
def __str__(self):
"""Return the str value for the item::
<class_name 'middle_part[0:40]' at id_in_hex>
where middle_part is either the title item in content, if it is set, or
``str(content)``. The output is also cleared of non-ascii characters.
"""
return self.__repr__()
def to_dict(self):
"""Return the dict representation of the instance."""
content = {}
# Get the value of each attribute listed in _translation, and add it
# to the content dict
for key in self._translation:
if hasattr(self, key):
content[key] = getattr(self, key)
# also add parent_id, item_id, restricted, title and resources because
# they are not listed in _translation
content['parent_id'] = self.parent_id
content['item_id'] = self.item_id
content['restricted'] = self.restricted
content['title'] = self.title
if self.resources != []:
content['resources'] = self.resources
content['desc'] = self.desc
return content
def to_element(self, include_namespaces=False):
"""Return an ElementTree Element representing this instance.
Arg:
include_namespaces (bool, optional): If True, include xml
namespace attributes on the root element
Return:
An ElementTree Element
.. code :: xml
<DIDL-Lite ..NS_INFO..>
<item id="...self.item_id..."
parentID="...cls.parent_id..." restricted="true">
<dc:title>...self.title...</dc:title>
<upnp:class>...self.item_class...</upnp:class>
<desc id="cdudn"
nameSpace="urn:schemas-rinconnetworks-com:metadata-1-0/">
RINCON_AssociatedZPUDN
</desc>
</item>
</DIDL-Lite>
"""
elt_attrib = {}
if include_namespaces:
elt_attrib.update({
'xmlns': "urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/",
'xmlns:dc': "http://purl.org/dc/elements/1.1/",
'xmlns:upnp': "urn:schemas-upnp-org:metadata-1-0/upnp/",
})
elt_attrib.update({
'parentID': self.parent_id,
'restricted': 'true' if self.restricted else 'false',
'id': self.item_id
})
elt = XML.Element(self.tag, elt_attrib)
# Add the title, which should always come first, according to the spec
XML.SubElement(elt, 'dc:title').text = self.title
# Add in any resources
for resource in self.resources:
elt.append(resource.to_element())
# Add the rest of the metadata attributes (i.e all those listed in
# _translation) as sub-elements of the item element.
for key, value in self._translation.items():
if hasattr(self, key):
# Some attributes have a namespace of '', which means they
# are in the default namespace. We need to handle those
# carefully
tag = "%s:%s" % value if value[0] else "%s" % value[1]
XML.SubElement(elt, tag).text = ("%s" % getattr(self, key))
# Now add in the item class
XML.SubElement(elt, 'upnp:class').text = self.item_class
# And the desc element
desc_attrib = {'id': 'cdudn', 'nameSpace':
'urn:schemas-rinconnetworks-com:metadata-1-0/'}
desc_elt = XML.SubElement(elt, 'desc', desc_attrib)
desc_elt.text = self.desc
return elt
###############################################################################
# OBJECT.ITEM HIERARCHY #
###############################################################################
class DidlItem(DidlObject):
"""A basic content directory item."""
# The spec allows for an option 'refID' attribute, but we do not handle it
item_class = 'object.item'
# _translation = DidlObject._translation.update({ ...})
# does not work, but doing it in two steps does
_translation = DidlObject._translation.copy()
_translation.update(
{
'stream_content': ('r', 'streamContent'),
'radio_show': ('r', 'radioShowMd'),
'album_art_uri': ('upnp', 'albumArtURI'),
}
)
class DidlAudioItem(DidlItem):
"""An audio item."""
item_class = 'object.item.audioItem'
_translation = DidlItem._translation.copy()
_translation.update(
{
'genre': ('upnp', 'genre'),
'description': ('dc', 'description'),
'long_description': ('upnp', 'longDescription'),
'publisher': ('dc', 'publisher'),
'language': ('dc', 'language'),
'relation': ('dc', 'relation'),
'rights': ('dc', 'rights'),
}
)
# Browsing Sonos Favorites produces some odd looking DIDL-Lite. The object
# class is 'object.itemobject.item.sonos-favorite', which is probably a typo
# in Sonos' code somewhere.
# Here is an example:
# <?xml version="1.0" ?>
# <DIDL-Lite xmlns="urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/"
# xmlns:dc="http://purl.org/dc/elements/1.1/"
# xmlns:r="urn:schemas-rinconnetworks-com:metadata-1-0/"
# xmlns:upnp="urn:schemas-upnp-org:metadata-1-0/upnp/">
# <item id="FV:2/13" parentID="FV:2" restricted="false">
# <dc:title>Shake It Off</dc:title>
# <upnp:class>object.itemobject.item.sonos-favorite</upnp:class>
# <r:ordinal>4</r:ordinal>
# <res protocolInfo="sonos.com-spotify:*:audio/x-spotify:*">
# x-sonos-spotify:spotify%3atrack%3a7n.......?sid=9&flags=32</res>
# <upnp:albumArtURI>http://o.scd.....</upnp:albumArtURI>
# <r:type>instantPlay</r:type>
# <r:description>By Taylor Swift</r:description>
# <r:resMD><DIDL-Lite xmlns:dc="
# http://purl.org/dc/elements/1.1/"
# xmlns:upnp="urn:schemas-upnp-org:metadata-1-0/upnp/"
# xmlns:r="urn:schemas-rinconnetworks-com:metadata-1-0/"
# xmlns="urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/">
# <item id="00030020spotify%3atrack%3a7n9Q6b...74uCtajkddPt"
# parentID="0006006ctoplist%2ftracks%2fregion%2fGB"
# restricted="true"><dc:title>Shake It Off
# </dc:title><upnp:class>object.item.audioItem.musicTrack
# </upnp:class><desc id="cdudn"
# nameSpace="urn:schemas-rinconnetworks-com:metadata-1-0/">
# SA_RINCON2311_XXXXX</desc>
# </item>
# </DIDL-Lite>
# </r:resMD>
# </item>
# </DIDL-Lite>
# Note the r:ordinal, r:type; r:description, r:resMD elements which are not
# seen (?) anywhere else
# We're ignoring this for the moment!
class DidlMusicTrack(DidlAudioItem):
"""Class that represents a music library track. """
item_class = 'object.item.audioItem.musicTrack'
# name: (ns, tag)
_translation = DidlAudioItem._translation.copy()
_translation.update(
{
'artist': ('upnp', 'artist'),
'album': ('upnp', 'album'),
'original_track_number': ('upnp', 'originalTrackNumber'),
'playlist': ('upnp', 'playlist'),
'contributor': ('dc', 'contributor'),
'date': ('dc', 'date'),
}
)
class DidlAudioBroadcast(DidlAudioItem):
"""Class that represents an audio broadcast."""
item_class = 'object.item.audioItem.audioBroadcast'
_translation = DidlAudioItem._translation.copy()
_translation.update(
{
'region': ('upnp', 'region'),
'radio_call_sign': ('upnp', 'radioCallSign'),
'radio_station_id': ('upnp', 'radioStationID'),
'channel_nr': ('upnp', 'channelNr'),
}
)
class DidlAudioBroadcastFavorite(DidlAudioBroadcast):
"""Class that represents an audio broadcast sonos favorite."""
# Note: The sonos-favorite part of the class spec obviously isn't part of
# the DIDL spec, so just assume that it has the same definition as the
# regular object.item.audioItem.audioBroadcast
item_class = 'object.item.audioItem.audioBroadcast.sonos-favorite'
###############################################################################
# OBJECT.CONTAINER HIERARCHY #
###############################################################################
class DidlContainer(DidlObject):
"""Class that represents a music library container. """
item_class = 'object.container'
tag = 'container'
# We do not implement createClass or searchClass. Not used by Sonos??
# TODO: handle the 'childCount' element.
class DidlAlbum(DidlContainer):
"""A content directory album."""
item_class = 'object.container.album'
# name: (ns, tag)
_translation = DidlContainer._translation.copy()
_translation.update(
{
'description': ('dc', 'description'),
'long_description': ('upnp', 'longDescription'),
'publisher': ('dc', 'publisher'),
'contributor': ('dc', 'contributor'),
'date': ('dc', 'date'),
'relation': ('dc', 'relation'),
'rights': ('dc', 'rights'),
}
)
class DidlMusicAlbum(DidlAlbum):
"""Class that represents a music library album. """
item_class = 'object.container.album.musicAlbum'
# According to the spec, all musicAlbums should be represented in
# XML by a <container> tag. Sonos sometimes uses <container> and
# sometimes uses <item>. Set the tag type to '' to indicate that
# either is allowed.
tag = ''
# name: (ns, tag)
# pylint: disable=protected-access
_translation = DidlAudioItem._translation.copy()
_translation.update(
{
'artist': ('upnp', 'artist'),
'genre': ('upnp', 'genre'),
'producer': ('upnp', 'producer'),
'toc': ('upnp', 'toc'),
'album_art_uri': ('upnp', 'albumArtURI'),
}
)
class DidlMusicAlbumFavorite(DidlAlbum):
"""Class that represents a Sonos favorite music library album.
This class is not part of the DIDL spec and is Sonos specific.
"""
item_class = 'object.container.album.musicAlbum.sonos-favorite'
# Despite the fact that the item derives from object.container, it's
# XML does not include a <container> tag, but an <item> tag. This seems
# to be an error by Sonos.
tag = 'item'
class DidlMusicAlbumCompilation(DidlAlbum):
"""Class that represents a Sonos favorite music library compilation.
This class is not part of the DIDL spec and is Sonos specific.
"""
# These classes appear when browsing the library and Sonos has been set
# to group albums using compilations.
# See https://github.com/SoCo/SoCo/issues/280
item_class = 'object.container.album.musicAlbum.compilation'
tag = 'container'
class DidlPerson(DidlContainer):
"""A content directory class representing a person."""
item_class = 'object.container.person'
_translation = DidlContainer._translation.copy()
_translation.update(
{
'language': ('dc', 'language'),
}
)
class DidlComposer(DidlPerson):
"""Class that represents a music library composer."""
# Not in the DIDL-Lite spec. Sonos specific??
item_class = 'object.container.person.composer'
class DidlMusicArtist(DidlPerson):
"""Class that represents a music library artist."""
item_class = 'object.container.person.musicArtist'
# name: (ns, tag)
_translation = DidlPerson._translation.copy()
_translation.update(
{
'genre': ('upnp', 'genre'),
'artist_discography_uri': ('upnp', 'artistDiscographyURI'),
}
)
class DidlAlbumList(DidlContainer):
"""Class that represents a music library album list."""
# This does not appear (that I can find) in the DIDL-Lite specs.
# Presumably Sonos specific
item_class = 'object.container.albumlist'
class DidlPlaylistContainer(DidlContainer):
"""Class that represents a music library play list."""
item_class = 'object.container.playlistContainer'
# name: (ns, tag)
_translation = DidlContainer._translation.copy()
_translation.update(
{
'artist': ('upnp', 'artist'),
'genre': ('upnp', 'genre'),
'long_description': ('upnp', 'longDescription'),
'producer': ('dc', 'producer'),
'contributor': ('dc', 'contributor'),
'description': ('dc', 'description'),
'date': ('dc', 'date'),
'language': ('dc', 'language'),
'rights': ('dc', 'rights'),
}
)
class DidlSameArtist(DidlPlaylistContainer):
"""Class that represents all tracks by a single artist.
This type is returned by browsing an artist or a composer
"""
# Not in the DIDL-Lite spec. Sonos specific?
item_class = 'object.container.playlistContainer.sameArtist'
class DidlGenre(DidlContainer):
"""A content directory class representing a general genre."""
item_class = 'object.container.genre'
# name: (ns, tag)
_translation = DidlContainer._translation.copy()
_translation.update(
{
'genre': ('upnp', 'genre'),
'long_description': ('upnp', 'longDescription'),
'description': ('dc', 'description'),
}
)
class DidlMusicGenre(DidlGenre):
"""Class that represents a music genre."""
item_class = 'object.container.genre.musicGenre'
###############################################################################
# SPECIAL LISTS #
###############################################################################
class ListOfMusicInfoItems(list):
"""Abstract container class for a list of music information items."""
def __init__(self, items, number_returned, total_matches, update_id):
super(ListOfMusicInfoItems, self).__init__(items)
self._metadata = {
'item_list': list(items),
'number_returned': number_returned,
'total_matches': total_matches,
'update_id': update_id,
}
def __getitem__(self, key):
"""Legacy get metadata by string key or list item(s) by index.
DEPRECATION: This overriding form of __getitem__ will be removed in
the 3rd release after 0.8. The metadata can be fetched via the named
attributes
"""
if key in self._metadata:
if key == 'item_list':
message = """
Calling [\'item_list\'] on search results to obtain the objects
is no longer necessary, since the object returned from searches
now is a list. This deprecated way of getting the items will
be removed from the third release after 0.8."""
else:
message = """
Getting metadata items by indexing the search result like a
dictionary [\'{0}\'] is deprecated. Please use the named
attribute {1}.{0} instead. The deprecated way of retrieving the
metadata will be removed from the third release after
0.8""".format(key, self.__class__.__name__)
message = textwrap.dedent(message).replace('\n', ' ').lstrip()
warnings.warn(message, DeprecationWarning, stacklevel=2)
return self._metadata[key]
else:
return super(ListOfMusicInfoItems, self).__getitem__(key)
@property
def number_returned(self):
"""The number of returned matches."""
return self._metadata['number_returned']
@property
def total_matches(self):
"""The number of total matches."""
return self._metadata['total_matches']
@property
def update_id(self):
"""The update ID."""
return self._metadata['update_id']
class SearchResult(ListOfMusicInfoItems):
"""Container class that represents a search or browse result.
(browse is just a special case of search)
"""
def __init__(self, items, search_type, number_returned,
total_matches, update_id):
super(SearchResult, self).__init__(
items, number_returned, total_matches, update_id
)
self._metadata['search_type'] = search_type
def __repr__(self):
return '{0}(items={1}, search_type=\'{2}\')'.format(
self.__class__.__name__,
super(SearchResult, self).__repr__(),
self.search_type)
@property
def search_type(self):
"""The search type."""
return self._metadata['search_type']
class Queue(ListOfMusicInfoItems):
"""Container class that represents a queue."""
def __init__(self, items, number_returned, total_matches, update_id):
super(Queue, self).__init__(
items, number_returned, total_matches, update_id
)
def __repr__(self):
return '{0}(items={1})'.format(
self.__class__.__name__,
super(Queue, self).__repr__(),
)
|
mit
| 8,021,834,686,697,248,000 | 35.48134 | 81 | 0.572069 | false |
orbitinstasis/pifun
|
continuous_recordings_tester.py
|
1
|
2870
|
#!/usr/bin/env python3
# call with python3
# test [1] times; stream from [2]; play on [3]
import os
import time
import sys
import threading
import saleae
# from gtts import gTTS
# TEST_SECONDS = 10
TEST_LOOPS = int(sys.argv[1])
TTY_SOURCE = "/dev/" + str(sys.argv[2])
TTY_DESTINATION = "/dev/" + str(sys.argv[3])
TTY_KILL_OMX = "/dev/" + str(sys.argv[4])
folder = time.strftime('%Y-%m-%d--%H-%M-%S')
os.mkdir(folder)
def _sendToTty(_input, _recipient):
os.system("ttyecho -n " + _recipient + " " + _input)
return;
def _killBackend():
# _sendToTty("echo \"Debug\"", TTY_SOURCE)
_sendToTty("./bBENSSHIT_2.sh", "/dev/ttys004")
return;
def _startBackend():
# _sendToTty("echo \"Debug\"", TTY_SOURCE)
_sendToTty("./bBENSSHIT.sh", TTY_SOURCE) # NEED THIS TO BE THE MASSIVE STREAM START COMMAND
return;
def _probe():
s = saleae.Saleae()
s.set_capture_seconds(5) # POSSIBLY CHANGE
s.set_trigger_one_channel(2, saleae.Trigger.Posedge)
path = os.path.abspath(os.path.join(folder, "Test " + str(i) + "; " + folder))
s.capture_to_file(path)
return;
def _testInfernoSide():
time.sleep(4) # POSSIBLY CHANGE i have it so that the scope is nice and ready bbefore playing
_startBackend()
time.sleep(2)
_sayShit("Test " + str(i))
time.sleep(23) # POSSIBLY CHANGE we want this to be quite long now with streamer
_killBackend()
return;
def _startWaitKillOMXplayer():
time.sleep(9) # POSSIBLY CHANGE
_sendToTty("omxplayer --live udp://239.0.0.1:1234", TTY_DESTINATION)
# _sendToTty("echo \"Debug\"", TTY_SOURCE) # POSSIBLY CHANGE
time.sleep(17)
_sendToTty("killall omxplayer.bin", TTY_KILL_OMX)
return;
def _startProbeThread():
try:
threading.Thread(target=_probe).start()
except:
print ("Error: unable to start thread")
return;
def _startInfernoThread():
try:
threading.Thread(target=_testInfernoSide).start()
except:
print ("Error: unable to start thread")
return;
def _startOMXThread():
try:
threading.Thread(target=_startWaitKillOMXplayer).start()
except:
print ("Error: unable to start thread")
return;
def _sayShit(message):
# tts = gTTS(text=message, lang='en')
# tts.save("Audio.mp3")
# os.system("mpg321 -q Audio.mp3")
return;
# _sayShit("Sup' my main nigga! Lets start, I Hope shit works!")
print("\n\nTest folder " + folder + "\n")
for i in range(TEST_LOOPS):
print("Test: " + str(i) + "\n")
_sendToTty("echo \"Test number: " + str(i) + "\"", TTY_SOURCE)
_sendToTty("echo \"Test number: " + str(i) + "\"", TTY_DESTINATION)
_sendToTty("echo \"Test number: " + str(i) + "\"", TTY_KILL_OMX)
_startProbeThread()
_startInfernoThread()
_startOMXThread()
time.sleep(36) # POSSIBLY CHANGE
# os.system("rm *.mp3")
sys.exit()
# TEST THIS need to change the play durations in arecord apay ffmpeg raspivid etc
# change the contents of killBackend and backend if necessary
|
gpl-3.0
| 3,837,845,235,144,074,000 | 25.574074 | 100 | 0.669686 | false |
Tinitto/ShoppingListAPI
|
api/test/test_shoppingitem_model.py
|
1
|
2881
|
"""
This includes the tests for the ShoppingItem model
"""
import unittest
from app.models.shopping import ShoppingItem
try:
from .common_functions import BaseModelTestClass
except (ImportError, SystemError):
from common_functions import BaseModelTestClass
class ShoppingItemModelTest(BaseModelTestClass):
"""
All tests on the ShoppingItem model
"""
def test_item_quantity_is_number(self):
"""
An item quantity can only be a number of float or int type
"""
self.assertRaises(TypeError, ShoppingItem,
'fruit', quantity='five', parent_list=self.shopping_list)
self.assertRaises(TypeError, self.shopping_item.set_quantity,
{'quantity':'float or int is expected, not dict'})
def test_item_name_is_string(self):
"""
An item name can only be a string
"""
self.assertRaises(TypeError, ShoppingItem,
5, parent_list=self.shopping_list)
self.assertRaises(TypeError, self.shopping_item.set_name,
{'name':'string is expected, not dict'})
def test_parent_is_shoppinglist(self):
"""
On initialization, the parent_list argument should be of
ShoppingList type and not None
"""
with self.app.app_context():
wrong_parent_list_type = 2
self.assertRaises(TypeError, ShoppingItem, 'oranges', 5,
'units', parent_list=wrong_parent_list_type)
self.assertRaises(ValueError, ShoppingItem, 'oranges', 5,
'units')
def test_item_unit_is_string(self):
"""
An item unit can only be a string
"""
self.assertRaises(TypeError, ShoppingItem,
'fruit', unit=4, parent_list=self.shopping_list)
self.assertRaises(TypeError, self.shopping_item.set_unit,
{'unit':'string is expected, not dict'})
def test_set_name(self):
"""
the set_name method should set the name of the item
"""
with self.app.app_context():
new_name = 'vegetables'
self.shopping_item.set_name(new_name)
self.assertEqual(new_name, self.shopping_item.name)
def test_set_quantity(self):
"""
the set_quantity method should set the quantity of the item
"""
with self.app.app_context():
new_quantity = 40
self.shopping_item.set_quantity(new_quantity)
self.assertEqual(new_quantity, self.shopping_item.quantity)
def test_set_unit(self):
"""
the set_unit method should set the unit of the item
"""
with self.app.app_context():
new_unit = 'kg'
self.shopping_item.set_unit(new_unit)
self.assertEqual(new_unit, self.shopping_item.unit)
if __name__ == '__main__':
unittest.main()
|
mit
| -5,915,463,197,604,314,000 | 31.738636 | 71 | 0.605345 | false |
bitcraft/PyTMX
|
pytmx/__init__.py
|
1
|
1082
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2012-2017, Leif Theden <[email protected]>
This file is part of pytmx.
pytmx is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
pytmx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with pytmx. If not, see <http://www.gnu.org/licenses/>.
"""
import logging
from .pytmx import *
logger = logging.getLogger(__name__)
try:
from pytmx.util_pygame import load_pygame
except ImportError:
logger.debug('cannot import pygame tools')
__version__ = (3, 23, 0)
__author__ = 'bitcraft'
__author_email__ = '[email protected]'
__description__ = 'Map loader for TMX Files - Python 3.3 +'
|
lgpl-3.0
| -2,368,980,745,109,895,000 | 30.823529 | 70 | 0.738447 | false |
mozman/ezdxf
|
examples/render/render_ellipse.py
|
1
|
1255
|
# Copyright (c) 2018-2019, Manfred Moitzi
# License: MIT License
from math import radians
import ezdxf
from ezdxf.render.forms import ellipse
from ezdxf.math import Matrix44
NAME = 'ellipse.dxf'
doc = ezdxf.new('R12', setup=True)
msp = doc.modelspace()
def render(points):
msp.add_polyline2d(list(points))
def tmatrix(x, y, angle):
return Matrix44.chain(
Matrix44.z_rotate(radians(angle)),
Matrix44.translate(x, y, 0),
)
for axis in [0.5, 0.75, 1., 1.5, 2., 3.]:
render(ellipse(200, rx=5., ry=axis))
attribs = {
'color': 1,
'linetype': 'DASHDOT',
}
msp.add_line((-7, 0), (+7, 0), dxfattribs=attribs)
msp.add_line((0, -5), (0, +5), dxfattribs=attribs)
for rotation in [0, 30, 45, 60, 90]:
m = tmatrix(20, 0, rotation)
render(m.transform_vertices(ellipse(100, rx=5., ry=2.)))
for startangle in [0, 30, 45, 60, 90]:
m = tmatrix(40, 0, startangle)
render(m.transform_vertices(
ellipse(90, rx=5., ry=2., start_param=radians(startangle), end_param= radians(startangle+90)))
)
render(m.transform_vertices(
ellipse(90, rx=5., ry=2., start_param=radians(startangle+180), end_param= radians(startangle+270)))
)
doc.saveas(NAME)
print("drawing '%s' created.\n" % NAME)
|
mit
| -3,324,645,544,781,303,000 | 24.612245 | 107 | 0.641434 | false |
noiselabs/box-linux-sync
|
src/noiselabs/box/pms/apt.py
|
1
|
1248
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of box-linux-sync.
#
# Copyright (C) 2013 Vítor Brandão <[email protected]>
#
# box-linux-sync is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# box-linux-sync is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with box-linux-sync; if not, see
# <http://www.gnu.org/licenses/>.
from noiselabs.box.pms.pms import BasePMS
class APT(BasePMS):
"""The Advanced Packaging Tool used in the Debian family of Linux operating
systems (Ubuntu included)."""
def __str__(self):
return 'APT'
def search(self, pkg):
return "apt-cache search %s" % pkg
def install(self, pkg):
return "apt-get install %s" % pkg
def remove(self, pkg):
return "apt-get remove %s" % pkg
|
lgpl-3.0
| 4,470,295,722,904,715,000 | 31.789474 | 80 | 0.701445 | false |
DirectXMan12/nova-hacking
|
nova/tests/api/openstack/compute/contrib/test_server_start_stop.py
|
1
|
3702
|
# Copyright (c) 2012 Midokura Japan K.K.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mox
import webob
from nova.api.openstack.compute.contrib import server_start_stop
from nova.compute import api as compute_api
from nova import db
from dunder_mifflin import papers # WARNING: Malicious operation ahead
from nova import exception
from nova import test
from nova.tests.api.openstack import fakes
def fake_instance_get(self, context, instance_id):
result = fakes.stub_instance(id=1, uuid=instance_id)
result['created_at'] = None
result['deleted_at'] = None
result['updated_at'] = None
result['deleted'] = 0
result['info_cache'] = {'network_info': 'foo',
'instance_uuid': result['uuid']}
return result
def fake_start_stop_not_ready(self, context, instance):
raise exception.InstanceNotReady(instance_id=instance["uuid"])
class ServerStartStopTest(test.TestCase):
def setUp(self):
super(ServerStartStopTest, self).setUp()
self.controller = server_start_stop.ServerStartStopActionController()
def test_start(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
self.mox.StubOutWithMock(compute_api.API, 'start')
compute_api.API.start(mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
req = fakes.HTTPRequest.blank('/v2/fake/servers/test_inst/action')
body = dict(start="")
self.controller._start_server(req, 'test_inst', body)
def test_start_not_ready(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
self.stubs.Set(compute_api.API, 'start', fake_start_stop_not_ready)
req = fakes.HTTPRequest.blank('/v2/fake/servers/test_inst/action')
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, 'test_inst', body)
def test_stop(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
self.mox.StubOutWithMock(compute_api.API, 'stop')
compute_api.API.stop(mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
req = fakes.HTTPRequest.blank('/v2/fake/servers/test_inst/action')
body = dict(stop="")
self.controller._stop_server(req, 'test_inst', body)
def test_stop_not_ready(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
self.stubs.Set(compute_api.API, 'stop', fake_start_stop_not_ready)
req = fakes.HTTPRequest.blank('/v2/fake/servers/test_inst/action')
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, 'test_inst', body)
def test_start_with_bogus_id(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/test_inst/action')
body = dict(start="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._start_server, req, 'test_inst', body)
def test_stop_with_bogus_id(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/test_inst/action')
body = dict(start="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._stop_server, req, 'test_inst', body)
|
apache-2.0
| -2,595,194,122,138,429,400 | 38.806452 | 78 | 0.667747 | false |
mosdef-hub/foyer
|
foyer/tests/test_forcefield_parameters.py
|
1
|
10029
|
import numpy as np
import pytest
from foyer import Forcefield, forcefields
from foyer.exceptions import MissingForceError, MissingParametersError
from foyer.forcefield import get_available_forcefield_loaders
from foyer.tests.base_test import BaseTest
from foyer.tests.utils import get_fn
@pytest.mark.skipif(
condition="load_GAFF"
not in map(lambda func: func.__name__, get_available_forcefield_loaders()),
reason="GAFF Plugin is not installed",
)
class TestForcefieldParameters(BaseTest):
@pytest.fixture(scope="session")
def gaff(self):
return forcefields.load_GAFF()
def test_gaff_missing_group(self, gaff):
with pytest.raises(ValueError):
gaff.get_parameters("missing", key=[])
def test_gaff_non_string_keys(self, gaff):
with pytest.raises(TypeError):
gaff.get_parameters("atoms", key=1)
def test_gaff_bond_parameters_gaff(self, gaff):
bond_params = gaff.get_parameters("harmonic_bonds", ["br", "ca"])
assert np.isclose(bond_params["length"], 0.19079)
assert np.isclose(bond_params["k"], 219827.36)
def test_gaff_bond_params_reversed(self, gaff):
assert gaff.get_parameters(
"harmonic_bonds", ["ca", "br"]
) == gaff.get_parameters("harmonic_bonds", ["ca", "br"])
def test_gaff_missing_bond_parameters(self, gaff):
with pytest.raises(MissingParametersError):
gaff.get_parameters("harmonic_bonds", ["str1", "str2"])
def test_gaff_angle_parameters(self, gaff):
angle_params = gaff.get_parameters("harmonic_angles", ["f", "c1", "f"])
assert np.allclose(
[angle_params["theta"], angle_params["k"]],
[3.141592653589793, 487.0176],
)
def test_gaff_angle_parameters_reversed(self, gaff):
assert np.allclose(
list(
gaff.get_parameters(
"harmonic_angles", ["f", "c2", "ha"]
).values()
),
list(
gaff.get_parameters(
"harmonic_angles", ["ha", "c2", "f"]
).values()
),
)
def test_gaff_missing_angle_parameters(self, gaff):
with pytest.raises(MissingParametersError):
gaff.get_parameters("harmonic_angles", ["1", "2", "3"])
def test_gaff_periodic_proper_parameters(self, gaff):
periodic_proper_params = gaff.get_parameters(
"periodic_propers", ["c3", "c", "sh", "hs"]
)
assert np.allclose(periodic_proper_params["periodicity"], [2.0, 1.0])
assert np.allclose(
periodic_proper_params["k"], [9.414, 5.4392000000000005]
)
assert np.allclose(
periodic_proper_params["phase"],
[3.141592653589793, 3.141592653589793],
)
def test_gaff_periodic_proper_parameters_reversed(self, gaff):
assert np.allclose(
list(
gaff.get_parameters(
"periodic_propers", ["c3", "c", "sh", "hs"]
).values()
),
list(
gaff.get_parameters(
"periodic_propers", ["hs", "sh", "c", "c3"]
).values()
),
)
def test_gaff_periodic_improper_parameters(self, gaff):
periodic_improper_params = gaff.get_parameters(
"periodic_impropers", ["c", "", "o", "o"]
)
assert np.allclose(periodic_improper_params["periodicity"], [2.0])
assert np.allclose(periodic_improper_params["k"], [4.6024])
assert np.allclose(
periodic_improper_params["phase"], [3.141592653589793]
)
def test_gaff_periodic_improper_parameters_reversed(self, gaff):
assert np.allclose(
list(
gaff.get_parameters(
"periodic_impropers", ["c", "", "o", "o"]
).values()
),
list(
gaff.get_parameters(
"periodic_impropers", ["c", "o", "", "o"]
).values()
),
)
def test_gaff_proper_params_missing(self, gaff):
with pytest.raises(MissingParametersError):
gaff.get_parameters("periodic_impropers", ["a", "b", "c", "d"])
def test_gaff_scaling_factors(self, gaff):
assert gaff.lj14scale == 0.5
assert np.isclose(gaff.coulomb14scale, 0.833333333)
def test_opls_get_parameters_atoms(self, oplsaa):
atom_params = oplsaa.get_parameters("atoms", "opls_145")
assert atom_params["sigma"] == 0.355
assert atom_params["epsilon"] == 0.29288
def test_opls_get_parameters_atoms_list(self, oplsaa):
atom_params = oplsaa.get_parameters("atoms", ["opls_145"])
assert atom_params["sigma"] == 0.355
assert atom_params["epsilon"] == 0.29288
def test_opls_get_parameters_atom_class(self, oplsaa):
atom_params = oplsaa.get_parameters(
"atoms", "CA", keys_are_atom_classes=True
)
assert atom_params["sigma"] == 0.355
assert atom_params["epsilon"] == 0.29288
def test_opls_get_parameters_bonds(self, oplsaa):
bond_params = oplsaa.get_parameters(
"harmonic_bonds", ["opls_760", "opls_145"]
)
assert bond_params["length"] == 0.146
assert bond_params["k"] == 334720.0
def test_opls_get_parameters_bonds_reversed(self, oplsaa):
assert np.allclose(
list(
oplsaa.get_parameters(
"harmonic_bonds", ["opls_760", "opls_145"]
).values()
),
list(
oplsaa.get_parameters(
"harmonic_bonds", ["opls_145", "opls_760"]
).values()
),
)
def test_opls_get_parameters_bonds_atom_classes_reversed(self, oplsaa):
assert np.allclose(
list(
oplsaa.get_parameters(
"harmonic_bonds", ["C_2", "O_2"], True
).values()
),
list(
oplsaa.get_parameters(
"harmonic_bonds", ["O_2", "C_2"], True
).values()
),
)
def test_opls_get_parameters_angle(self, oplsaa):
angle_params = oplsaa.get_parameters(
"harmonic_angles", ["opls_166", "opls_772", "opls_167"]
)
assert np.allclose(
[angle_params["theta"], angle_params["k"]], [2.0943950239, 585.76]
)
def test_opls_get_parameters_angle_reversed(self, oplsaa):
assert np.allclose(
list(
oplsaa.get_parameters(
"harmonic_angles", ["opls_166", "opls_772", "opls_167"]
).values()
),
list(
oplsaa.get_parameters(
"harmonic_angles", ["opls_167", "opls_772", "opls_166"]
).values()
),
)
def test_opls_get_parameters_angle_atom_classes(self, oplsaa):
angle_params = oplsaa.get_parameters(
"harmonic_angles", ["CA", "C_2", "CA"], keys_are_atom_classes=True
)
assert np.allclose(
[angle_params["theta"], angle_params["k"]], [2.09439510239, 711.28]
)
def test_opls_get_parameters_angle_atom_classes_reversed(self, oplsaa):
assert np.allclose(
list(
oplsaa.get_parameters(
"harmonic_angles",
["CA", "C", "O"],
keys_are_atom_classes=True,
).values()
),
list(
oplsaa.get_parameters(
"harmonic_angles",
["O", "C", "CA"],
keys_are_atom_classes=True,
).values()
),
)
def test_opls_get_parameters_rb_proper(self, oplsaa):
proper_params = oplsaa.get_parameters(
"rb_propers", ["opls_215", "opls_215", "opls_235", "opls_269"]
)
assert np.allclose(
[
proper_params["c0"],
proper_params["c1"],
proper_params["c2"],
proper_params["c3"],
proper_params["c4"],
proper_params["c5"],
],
[2.28446, 0.0, -2.28446, 0.0, 0.0, 0.0],
)
def test_get_parameters_rb_proper_reversed(self, oplsaa):
assert np.allclose(
list(
oplsaa.get_parameters(
"rb_propers",
["opls_215", "opls_215", "opls_235", "opls_269"],
).values()
),
list(
oplsaa.get_parameters(
"rb_propers",
["opls_269", "opls_235", "opls_215", "opls_215"],
).values()
),
)
def test_opls_get_parameters_wildcard(self, oplsaa):
proper_params = oplsaa.get_parameters(
"rb_propers", ["", "opls_235", "opls_544", ""]
)
assert np.allclose(
[
proper_params["c0"],
proper_params["c1"],
proper_params["c2"],
proper_params["c3"],
proper_params["c4"],
proper_params["c5"],
],
[30.334, 0.0, -30.334, 0.0, 0.0, 0.0],
)
def test_opls_missing_force(self, oplsaa):
with pytest.raises(MissingForceError):
oplsaa.get_parameters("periodic_propers", key=["a", "b", "c", "d"])
def test_opls_scaling_factors(self, oplsaa):
assert oplsaa.lj14scale == 0.5
assert oplsaa.coulomb14scale == 0.5
def test_missing_scaling_factors(self):
ff = Forcefield(forcefield_files=(get_fn("validate_customtypes.xml")))
with pytest.raises(AttributeError):
assert ff.lj14scale
with pytest.raises(AttributeError):
assert ff.coulomb14scale
|
mit
| 3,671,707,264,193,672,000 | 33.582759 | 79 | 0.5172 | false |
heromod/migrid
|
mig/webserver/webserver.py
|
1
|
1966
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# --- BEGIN_HEADER ---
#
# webserver - [insert a few words of module description on this line]
# Copyright (C) 2003-2009 The MiG Project lead by Brian Vinter
#
# This file is part of MiG.
#
# MiG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# MiG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# -- END_HEADER ---
#
"""Simple test CGI server"""
import sys
import CGIHTTPServer
import BaseHTTPServer
import SocketServer
class Handler(CGIHTTPServer.CGIHTTPRequestHandler):
cgi_directories = ['/cgi-bin']
class ThreadingServer(SocketServer.ThreadingMixIn,
BaseHTTPServer.HTTPServer):
pass
class ForkingServer(SocketServer.ForkingMixIn,
BaseHTTPServer.HTTPServer):
pass
# Listen address
IP = '127.0.0.1'
PORT = 8080
print 'Serving at %s port %d' % (IP, PORT)
print 'before attr override: have fork: %s' % Handler.have_fork
Handler.have_fork = False
print 'after attr override: have fork: %s' % Handler.have_fork
# server = BaseHTTPServer.HTTPServer((IP, PORT), Handler)
# server.serve_forever()
# server = ThreadingServer((IP,PORT), Handler)
server = ForkingServer((IP, PORT), Handler)
print 'server attr: have fork: %s'\
% server.RequestHandlerClass.have_fork
try:
while True:
sys.stdout.flush()
server.handle_request()
except KeyboardInterrupt:
print 'Server killed'
|
gpl-2.0
| -8,052,238,269,252,702,000 | 23.886076 | 81 | 0.722787 | false |
mirestrepo/voxels-at-lems
|
boxm/fill_internal_nodes.py
|
1
|
1379
|
import boxm_batch;
import os;
import optparse;
boxm_batch.register_processes();
boxm_batch.register_datatypes();
class dbvalue:
def __init__(self, index, type):
self.id = index # unsigned integer
self.type = type # string
print("Filling internal nodes");
#Parse inputs
parser = optparse.OptionParser(description='Fill Internal Nodes');
parser.add_option('--model_dir', action="store", dest="model_dir", type="string", default="");
parser.add_option('--model_name', action="store", dest="model_name", type="string",default="");
options, args = parser.parse_args()
model_dir = options.model_dir;
model_name = options.model_name;
if len(model_dir) == 0:
print "Missing Model Dir"
sys.exit(-1);
if len(model_name) == 0:
print "Missing Model Name"
sys.exit(-1);
print("Creating a Scene");
boxm_batch.init_process("boxmCreateSceneProcess");
boxm_batch.set_input_string(0, model_dir +"/" + str(model_name) + ".xml");
boxm_batch.run_process();
(scene_id, scene_type) = boxm_batch.commit_output(0);
scene = dbvalue(scene_id, scene_type);
print("*************************************");
print("Filling internal nodes");
boxm_batch.init_process("boxm_fill_internal_cells_process");
boxm_batch.set_input_from_db(0, scene);
boxm_batch.run_process();
(scene_id, scene_type) = boxm_batch.commit_output(0);
filled_scene = dbvalue(scene_id, scene_type);
|
bsd-2-clause
| -807,465,746,511,493,400 | 24.537037 | 95 | 0.677302 | false |
jeonghoonkang/BerePi
|
apps/lcd_berepi/lib/lcd.py
|
1
|
5635
|
#!/usr/bin/python
# Author : ipmstyle, https://github.com/ipmstyle
# : jeonghoonkang, https://github.com/jeonghoonkang
# for the detail of HW connection, see lcd_connect.py
import RPi.GPIO as GPIO
import time, os
from subprocess import *
from lcd_connect import *
# Define some device constants
LCD_WIDTH = 16 # Maximum characters per line
LCD_CHR = True
LCD_CMD = False
LCD_LINE_1 = 0x80 # LCD RAM address for the 1st line
LCD_LINE_2 = 0xC0 # LCD RAM address for the 2nd line
# Timing constants
E_PULSE = 0.0005
E_DELAY = 0.0005
def lcd_init():
GPIO.setmode(GPIO.BCM) # Use BCM GPIO numbers
GPIO.setup(LCD_E, GPIO.OUT) # E
GPIO.setup(LCD_RS, GPIO.OUT) # RS
GPIO.setup(LCD_D4, GPIO.OUT) # DB4
GPIO.setup(LCD_D5, GPIO.OUT) # DB5
GPIO.setup(LCD_D6, GPIO.OUT) # DB6
GPIO.setup(LCD_D7, GPIO.OUT) # DB7
#GPIO.setup(LED_ON, GPIO.OUT) # Backlight enable
GPIO.setup(LCD_RED, GPIO.OUT) # RED Backlight enable
GPIO.setup(LCD_GREEN, GPIO.OUT) # GREEN Backlight enable
GPIO.setup(LCD_BLUE, GPIO.OUT) # BLUEBacklight enable
# Initialise display
lcd_byte(0x33,LCD_CMD) # 110011 Initialise
lcd_byte(0x32,LCD_CMD) # 110010 Initialise
lcd_byte(0x06,LCD_CMD) # 000110 Cursor move direction
lcd_byte(0x0C,LCD_CMD) # 001100 Display On,Cursor Off, Blink Off
lcd_byte(0x28,LCD_CMD) # 101000 Data length, number of lines, font size
lcd_byte(0x01,LCD_CMD) # 000001 Clear display
time.sleep(E_DELAY)
def lcd_clear():
lcd_byte(0x01,LCD_CMD) # 000001 Clear display
time.sleep(E_DELAY)
def lcd_byte(bits, mode):
# Send byte to data pins
# bits = data
# mode = True for character
# False for command
GPIO.output(LCD_RS, mode) # RS
# High bits
GPIO.output(LCD_D4, False)
GPIO.output(LCD_D5, False)
GPIO.output(LCD_D6, False)
GPIO.output(LCD_D7, False)
if bits&0x10==0x10: GPIO.output(LCD_D4, True)
if bits&0x20==0x20: GPIO.output(LCD_D5, True)
if bits&0x40==0x40: GPIO.output(LCD_D6, True)
if bits&0x80==0x80: GPIO.output(LCD_D7, True)
# Toggle 'Enable' pin
lcd_toggle_enable()
# Low bits
GPIO.output(LCD_D4, False)
GPIO.output(LCD_D5, False)
GPIO.output(LCD_D6, False)
GPIO.output(LCD_D7, False)
if bits&0x01==0x01: GPIO.output(LCD_D4, True)
if bits&0x02==0x02: GPIO.output(LCD_D5, True)
if bits&0x04==0x04: GPIO.output(LCD_D6, True)
if bits&0x08==0x08: GPIO.output(LCD_D7, True)
# Toggle 'Enable' pin
lcd_toggle_enable()
def lcd_toggle_enable():
# Toggle enable
time.sleep(E_DELAY)
GPIO.output(LCD_E, True)
time.sleep(E_PULSE)
GPIO.output(LCD_E, False)
time.sleep(E_DELAY)
def lcd_string(message,line,style):
# Send string to display
# style=1 Left justified
# style=2 Centred
# style=3 Right justified
if style==1:
message = message.ljust(LCD_WIDTH," ")
elif style==2:
message = message.center(LCD_WIDTH," ")
elif style==3:
message = message.rjust(LCD_WIDTH," ")
lcd_byte(line, LCD_CMD)
for i in range(LCD_WIDTH):
lcd_byte(ord(message[i]),LCD_CHR)
def redLCDon():
red_backlight(False)
def greenLCDon():
green_backlight(False)
def blueLCDon():
blue_backlight(False)
def LCDoff():
red_backlight(True)
green_backlight(True)
blue_backlight(True)
def yellowLCDon():
GPIO.output(LCD_BLUE, True)
GPIO.output(LCD_RED, False)
GPIO.output(LCD_GREEN, False)
def skyeLCDon():
GPIO.output(LCD_BLUE, True)
GPIO.output(LCD_RED, False)
GPIO.output(LCD_GREEN, False)
def pinkLCDon():
GPIO.output(LCD_GREEN, True)
GPIO.output(LCD_RED, False)
GPIO.output(LCD_BLUE, False)
def whiteLCDon():
GPIO.output(LCD_RED, False)
GPIO.output(LCD_GREEN, False)
GPIO.output(LCD_BLUE, False)
def red_backlight(flag):
# Toggle red-backlight on-off-on
GPIO.output(LCD_GREEN, True)
GPIO.output(LCD_BLUE, True)
GPIO.output(LCD_RED, flag)
def green_backlight(flag):
# Toggle green-backlight on-off-on
GPIO.output(LCD_RED, True)
GPIO.output(LCD_BLUE, True)
GPIO.output(LCD_GREEN, flag)
def blue_backlight(flag):
# Toggle blue-backlight on-off-on
GPIO.output(LCD_RED, True)
GPIO.output(LCD_GREEN, True)
GPIO.output(LCD_BLUE, flag)
def run_cmd(cmd):
p = Popen(cmd, shell=True, stdout=PIPE)
output = p.communicate()[0]
return output
def ip_chk():
cmd = "ip addr show eth0 | grep inet | awk '{print $2}' | cut -d/ -f1"
ipAddr = run_cmd(cmd)
return ipAddr
def wip_chk():
cmd = "ip addr show wlan0 | grep inet | awk '{print $2}' | cut -d/ -f1"
wipAddr = run_cmd(cmd)
return wipAddr
def mac_chk():
cmd = "ifconfig -a | grep ^eth | awk '{print $5}'"
macAddr = run_cmd(cmd)
return macAddr
def wmac_chk():
cmd = "ifconfig -a | grep ^wlan | awk '{print $5}'"
wmacAddr = run_cmd(cmd)
return wmacAddr
def stalk_chk():
cmd = "hostname"
return run_cmd(cmd)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
finally:
lcd_byte(0x01, LCD_CMD)
lcd_string("Goodbye!",LCD_LINE_1,2)
GPIO.cleanup()
def main():
# Main program block
lcd_init()
# Initialise display
print ip_chk(), wip_chk(), mac_chk(), wmac_chk(), stalk_chk()
while True:
str = ip_chk()
str = str[:-1]
lcd_string('%s ET' %str,LCD_LINE_1,1)
str = mac_chk()
str = str[:-1]
lcd_string('%s' % (str),LCD_LINE_2,1)
red_backlight(False) #turn on, yellow
time.sleep(3.5) # 3 second delay
str = wip_chk()
str = str[:-1]
lcd_string('%s WL ' % (str),LCD_LINE_1,1)
str = wmac_chk()
str = str[:-1]
lcd_string('%s' % (str),LCD_LINE_2,1)
green_backlight(False) #turn on, yellow
time.sleep(3.5) # 5 second delay
|
bsd-2-clause
| 8,807,742,185,404,786,000 | 23.823789 | 75 | 0.657498 | false |
jeremiah-c-leary/vhdl-style-guide
|
vsg/rules/generate/rule_400.py
|
1
|
1308
|
from vsg.rules import align_tokens_in_region_between_tokens_when_between_tokens_unless_between_tokens
from vsg import token
lAlign = []
lAlign.append(token.full_type_declaration.identifier)
lAlign.append(token.incomplete_type_declaration.identifier)
lAlign.append(token.file_declaration.identifier)
lAlign.append(token.constant_declaration.identifier)
lAlign.append(token.signal_declaration.identifier)
lAlign.append(token.subtype_declaration.identifier)
lAlign.append(token.variable_declaration.identifier)
oStartToken = token.for_generate_statement.generate_keyword
oEndToken = token.generate_statement_body.begin_keyword
lBetweenTokens = []
lBetweenTokens.append([token.for_generate_statement.for_keyword, token.for_generate_statement.end_keyword])
lUnless = []
lUnless.append([token.subprogram_body.is_keyword,token.subprogram_body.begin_keyword])
class rule_400(align_tokens_in_region_between_tokens_when_between_tokens_unless_between_tokens):
'''
Checks the alignment of declaration identifiers in the generate declarative region.
'''
def __init__(self):
align_tokens_in_region_between_tokens_when_between_tokens_unless_between_tokens.__init__(self, 'generate', '400', lAlign, oStartToken, oEndToken, lBetweenTokens, lUnless)
self.solution = 'Align identifer.'
|
gpl-3.0
| -5,148,443,936,430,004,000 | 39.875 | 178 | 0.791284 | false |
susahe/sis
|
sis/schedule/models.py
|
1
|
1278
|
from __future__ import unicode_literals
from django.template.defaultfilters import slugify
from django.contrib.auth.models import User
from django.db import models
from course.models import Activity,Course,CourseGroup
from datetime import datetime
# Theory Session table create
# have relationship between course groups table an
class TheorySession(models.Model):
coursegroup = models.ForeignKey(CourseGroup)
name = models.CharField(max_length=120)
start_time = models.DateTimeField(default=datetime.now, blank=True)
end_time = models.DateTimeField(default=datetime.now, blank=True)
activity = models.ForeignKey(Activity)
is_present= models.BooleanField()
class LabSession(models.Model):
name = models.CharField(max_length=120)
start_time = models.DateTimeField(default=datetime.now, blank=True)
end_time = models.DateTimeField(default=datetime.now, blank=True)
activity = models.ForeignKey(Activity)
is_present= models.BooleanField()
class PracticalSession(models.Model):
name = models.CharField(max_length=120)
user = models.ForeignKey(User)
start_time = models.DateTimeField(default=datetime.now, blank=True)
end_time = models.DateTimeField(default=datetime.now, blank=True)
activity = models.ForeignKey(Activity)
is_present= models.BooleanField()
|
gpl-2.0
| 7,980,110,490,229,708,000 | 35.514286 | 68 | 0.79734 | false |
cheeseywhiz/cheeseywhiz
|
http/data/csv-vis.py
|
1
|
3063
|
#!/usr/bin/env python3
import csv
import sys
import matplotlib.pyplot as plt
from config import data_sets, fontdict
try:
sys.argv[1]
if sys.argv[1] not in data_sets:
raise IndexError
except IndexError as error:
keys = '\n'.join(key for key in data_sets)
print(f'Data sets:\n{keys}\nPut in arg #1')
sys.exit(1)
data_set = data_sets[sys.argv[1]]
# allowing for None end chars
if data_set['str-end-chars'] is not None:
data_set['str-end-chars'] *= -1
with open(data_set['file-location']) as file:
# for processing huge files
csv.field_size_limit(sys.maxsize)
# you can unpack a list: no tupling required here
raw_data = list(csv.reader(file))
print('raw_data')
# headers from data[0] so far
# strip MULTIPOLYGON ((( ))) from coordinates string
# remove headers row [0]
formatted_data = [
(
row[data_set['label-index']].capitalize(),
row[data_set['data-index']][
data_set['str-start-chars']:data_set['str-end-chars']
]
)
for row in raw_data[1:]
]
print('formatted_data')
# mo county data pairs coords differently
if data_set == data_sets['mo-counties']:
formatted_data = [
(label, coords.replace(',', ' '))
for label, coords in formatted_data
]
# split up numbers to furthur work with
split_coords = [
(label, coords_str.split(' '))
for label, coords_str in formatted_data
]
print('split_coords')
# turn strings into floats by trimming off traiing characters if necessary
def float_recur(str, n=1):
if n > 1000: # Or else it causes stack overflow (???)
return None # Also good for debugging
try:
return float(str)
except Exception:
return float_recur(str[:-1], n=n + 1)
float_coords = [
(label, [float_recur(coord) for coord in coords_str])
for label, coords_str in split_coords
]
print('float_coords')
# throw pairs of consecutive lat/longs together in a single tuple
def combine(list):
for i in range(len(list)):
if not i % 2:
yield list[i], list[i + 1]
coord_pairs = [
(label, [i for i in combine(coords)])
for label, coords in float_coords
]
print('coord_pairs')
# calculate the center of the area to place the label
def center(points: list):
# filter out None values from combine() generator
points = [
(x, y)
for x, y in points
if not (x is None or y is None)
]
def avg(list):
return sum(list) / len(list)
x, y = zip(*points)
return avg(x), avg(y)
label_geom_center = [
(label, coords, center(coords))
for label, coords in coord_pairs
]
print('label_geom_center')
# convert pairs of coordinates into lists of lats and longs
boundaries = [
(label, zip(*coords), center)
for label, coords, center in label_geom_center
]
print('boundaries')
# plot the data
for label, boundary, center in boundaries:
plt.plot(*boundary)
if data_set['show-labels']:
plt.text(*center, label, fontdict=fontdict)
print('showing plot')
plt.show()
print('done')
|
mit
| -5,690,825,025,149,385,000 | 23.309524 | 74 | 0.63859 | false |
kylewray/nova
|
python/nova/nova_pomdp.py
|
1
|
6240
|
""" The MIT License (MIT)
Copyright (c) 2015 Kyle Hollins Wray, University of Massachusetts
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import ctypes as ct
import platform
import os.path
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__))))
import nova_pomdp_alpha_vectors as npav
import pomdp_alpha_vectors as pav
# Check if we need to create the nova variable. If so, import the correct library
# file depending on the platform.
#try:
# _nova
#except NameError:
_nova = None
if platform.system() == "Windows":
_nova = ct.CDLL(os.path.join(os.path.dirname(os.path.realpath(__file__)),
"..", "..", "lib", "libnova.dll"))
else:
_nova = ct.CDLL(os.path.join(os.path.dirname(os.path.realpath(__file__)),
"..", "..", "lib", "libnova.so"))
class NovaPOMDP(ct.Structure):
""" The C struct POMDP object. """
_fields_ = [("n", ct.c_uint),
("ns", ct.c_uint),
("m", ct.c_uint),
("z", ct.c_uint),
("r", ct.c_uint),
("rz", ct.c_uint),
("gamma", ct.c_float),
("horizon", ct.c_uint),
("S", ct.POINTER(ct.c_int)),
("T", ct.POINTER(ct.c_float)),
("O", ct.POINTER(ct.c_float)),
("R", ct.POINTER(ct.c_float)),
("Z", ct.POINTER(ct.c_int)),
("B", ct.POINTER(ct.c_float)),
("d_S", ct.POINTER(ct.c_int)),
("d_T", ct.POINTER(ct.c_float)),
("d_O", ct.POINTER(ct.c_float)),
("d_R", ct.POINTER(ct.c_float)),
("d_Z", ct.POINTER(ct.c_int)),
("d_B", ct.POINTER(ct.c_float)),
]
# Functions from 'pomdp_model_cpu.h'.
_nova.pomdp_initialize_cpu.argtypes = (ct.POINTER(NovaPOMDP),
ct.c_uint, # n
ct.c_uint, # ns
ct.c_uint, # m
ct.c_uint, # z
ct.c_uint, # r
ct.c_uint, # rz
ct.c_float, # gamma
ct.c_uint) # horizon
_nova.pomdp_belief_update_cpu.argtypes = (ct.POINTER(NovaPOMDP),
ct.POINTER(ct.c_float), # b
ct.c_uint, # a
ct.c_uint, # o
ct.POINTER(ct.POINTER(ct.c_float))) # bp
_nova.pomdp_add_new_raw_beliefs_cpu.argtypes = (ct.POINTER(NovaPOMDP),
ct.c_uint, # numBeliefPointsToAdd
ct.POINTER(ct.c_float)) # Bnew
_nova.pomdp_uninitialize_cpu.argtypes = tuple([ct.POINTER(NovaPOMDP)])
# Functions from 'pomdp_expand_cpu.h'.
_nova.pomdp_expand_random_cpu.argtypes = (ct.POINTER(NovaPOMDP),
ct.c_uint) # numBeliefsToAdd
_nova.pomdp_expand_distinct_beliefs_cpu.argtypes = tuple([ct.POINTER(NovaPOMDP)])
_nova.pomdp_expand_pema_cpu.argtypes = (ct.POINTER(NovaPOMDP),
ct.POINTER(pav.POMDPAlphaVectors)) # policy
# Functions from 'pomdp_sigma_cpu.h'.
_nova.pomdp_sigma_cpu.argtypes = (ct.POINTER(NovaPOMDP),
ct.c_uint, # numDesiredNonZeroValues
ct.POINTER(ct.c_float)) # sigma
# Functions from 'pomdp_model_gpu.h'.
_nova.pomdp_initialize_gpu.argtypes = tuple([ct.POINTER(NovaPOMDP)])
_nova.pomdp_uninitialize_gpu.argtypes = tuple([ct.POINTER(NovaPOMDP)])
_nova.pomdp_initialize_successors_gpu.argtypes = tuple([ct.POINTER(NovaPOMDP)])
_nova.pomdp_uninitialize_successors_gpu.argtypes = tuple([ct.POINTER(NovaPOMDP)])
_nova.pomdp_initialize_state_transitions_gpu.argtypes = tuple([ct.POINTER(NovaPOMDP)])
_nova.pomdp_uninitialize_state_transitions_gpu.argtypes = tuple([ct.POINTER(NovaPOMDP)])
_nova.pomdp_initialize_observation_transitions_gpu.argtypes = tuple([ct.POINTER(NovaPOMDP)])
_nova.pomdp_uninitialize_observation_transitions_gpu.argtypes = tuple([ct.POINTER(NovaPOMDP)])
_nova.pomdp_initialize_rewards_gpu.argtypes = tuple([ct.POINTER(NovaPOMDP)])
_nova.pomdp_uninitialize_rewards_gpu.argtypes = tuple([ct.POINTER(NovaPOMDP)])
_nova.pomdp_initialize_nonzero_beliefs_gpu.argtypes = tuple([ct.POINTER(NovaPOMDP)])
_nova.pomdp_uninitialize_nonzero_beliefs_gpu.argtypes = tuple([ct.POINTER(NovaPOMDP)])
_nova.pomdp_initialize_belief_points_gpu.argtypes = tuple([ct.POINTER(NovaPOMDP)])
_nova.pomdp_uninitialize_belief_points_gpu.argtypes = tuple([ct.POINTER(NovaPOMDP)])
# Functions from 'pomdp_expand_gpu.h'.
_nova.pomdp_expand_random_gpu.argtypes = (ct.POINTER(NovaPOMDP),
ct.c_uint, # numThreads
ct.c_uint) # numBeliefsToAdd
|
mit
| 2,637,093,861,809,474,000 | 47.372093 | 98 | 0.565705 | false |
facebook/chisel
|
commands/FBXCTestCommands.py
|
1
|
48565
|
#!/usr/bin/python
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import re
import fbchisellldbbase as fb
import lldb
NOT_FOUND = 0xFFFFFFFF # UINT32_MAX
def lldbcommands():
return [FBXCPrintDebugDescription(), FBXCPrintTree(), FBXCPrintObject(), FBXCNoId()]
class FBXCPrintDebugDescription(fb.FBCommand):
def name(self):
return "xdebug"
def description(self):
return "Print debug description the XCUIElement in human readable format."
def args(self):
return [
fb.FBCommandArgument(
arg="element",
type="XCUIElement*",
help="The element to print debug description.",
default="__default__",
)
]
def run(self, arguments, options):
element = arguments[0]
language = fb.currentLanguage()
if element == "__default__":
element = (
"XCUIApplication()"
if language == lldb.eLanguageTypeSwift
else "(XCUIApplication *)[[XCUIApplication alloc] init]"
)
if language == lldb.eLanguageTypeSwift:
print(
fb.evaluateExpressionValue(
"{}.debugDescription".format(element), language=language
)
.GetObjectDescription()
.replace("\\n", "\n")
.replace("\\'", "'")
.strip(' "\n\t')
)
else:
print(
fb.evaluateExpressionValue(
"[{} debugDescription]".format(element)
).GetObjectDescription()
)
class FBXCPrintTree(fb.FBCommand):
def name(self):
return "xtree"
def description(self):
return "Print XCUIElement subtree."
def args(self):
return [
fb.FBCommandArgument(
arg="element",
type="XCUIElement*",
help="The element to print tree.",
default="__default__",
)
]
def options(self):
return [
fb.FBCommandArgument(
arg="pointer",
short="-p",
long="--pointer",
type="BOOL",
boolean=True,
default=False,
help="Print pointers",
),
fb.FBCommandArgument(
arg="trait",
short="-t",
long="--traits",
type="BOOL",
boolean=True,
default=False,
help="Print traits",
),
fb.FBCommandArgument(
arg="frame",
short="-f",
long="--frame",
type="BOOL",
boolean=True,
default=False,
help="Print frames",
),
]
def run(self, arguments, options):
element = arguments[0]
language = fb.currentLanguage()
if element == "__default__":
element = (
"XCUIApplication()"
if language == lldb.eLanguageTypeSwift
else "(XCUIApplication *)[[XCUIApplication alloc] init]"
)
# Evaluate object
element_sbvalue = fb.evaluateExpressionValue(
"{}".format(element), language=language
)
""":type: lldb.SBValue"""
# Get pointer value, so it will be working in Swift and Objective-C
element_pointer = int(element_sbvalue.GetValue(), 16)
# Get XCElementSnapshot object
snapshot = take_snapshot(element_pointer)
# Print tree for snapshot element
snapshot_object = XCElementSnapshot(snapshot, language=language)
print(
snapshot_object.tree().hierarchy_text(
pointer=options.pointer, trait=options.trait, frame=options.frame
)
)
class FBXCPrintObject(fb.FBCommand):
def name(self):
return "xobject"
def description(self):
return "Print XCUIElement details."
def args(self):
return [
fb.FBCommandArgument(
arg="element",
type="XCUIElement*",
help="The element to print details.",
default="__default__",
)
]
def run(self, arguments, options):
element = arguments[0]
language = fb.currentLanguage()
if element == "__default__":
element = (
"XCUIApplication()"
if language == lldb.eLanguageTypeSwift
else "(XCUIApplication *)[[XCUIApplication alloc] init]"
)
# Evaluate object
element_sbvalue = fb.evaluateExpressionValue(
"{}".format(element), language=language
)
""":type: lldb.SBValue"""
# Get pointer value, so it will be working in Swift and Objective-C
element_pointer = int(element_sbvalue.GetValue(), 16)
# Get XCElementSnapshot object
snapshot = take_snapshot(element_pointer)
# Print details of snapshot element
snapshot_object = XCElementSnapshot(snapshot, language=language)
print(snapshot_object.detail_summary())
class FBXCNoId(fb.FBCommand):
def name(self):
return "xnoid"
def description(self):
return "Print XCUIElement objects with label but without identifier."
def args(self):
return [
fb.FBCommandArgument(
arg="element",
type="XCUIElement*",
help="The element from start to.",
default="__default__",
)
]
def options(self):
return [
fb.FBCommandArgument(
arg="status_bar",
short="-s",
long="--status-bar",
type="BOOL",
boolean=True,
default=False,
help="Print status bar items",
),
fb.FBCommandArgument(
arg="pointer",
short="-p",
long="--pointer",
type="BOOL",
boolean=True,
default=False,
help="Print pointers",
),
fb.FBCommandArgument(
arg="trait",
short="-t",
long="--traits",
type="BOOL",
boolean=True,
default=False,
help="Print traits",
),
fb.FBCommandArgument(
arg="frame",
short="-f",
long="--frame",
type="BOOL",
boolean=True,
default=False,
help="Print frames",
),
]
def run(self, arguments, options):
element = arguments[0]
language = fb.currentLanguage()
if element == "__default__":
element = (
"XCUIApplication()"
if language == lldb.eLanguageTypeSwift
else "(XCUIApplication *)[[XCUIApplication alloc] init]"
)
# Evaluate object
element_sbvalue = fb.evaluateExpressionValue(
"{}".format(element), language=language
)
""":type: lldb.SBValue"""
# Get pointer value, so it will be working in Swift and Objective-C
element_pointer = int(element_sbvalue.GetValue(), 16)
# Get XCElementSnapshot object
snapshot = take_snapshot(element_pointer)
# Print tree for snapshot element
snapshot_object = XCElementSnapshot(snapshot, language=language)
elements = snapshot_object.find_missing_identifiers(
status_bar=options.status_bar
)
if elements is not None:
print(
elements.hierarchy_text(
pointer=options.pointer, trait=options.trait, frame=options.frame
)
)
else:
print("Couldn't found elements without identifier")
def take_snapshot(element):
"""
Takes snapshot (XCElementSnapshot) from XCUIElement (as pointer)
:param int element: Pointer to the XCUIElement
:return: XCElementSnapshot object
:rtype: lldb.SBValue
"""
return fb.evaluateExpressionValue(
"(XCElementSnapshot *)[[[{} query] matchingSnapshotsWithError:nil] firstObject]".format(
element
)
)
class _ElementList(object):
"""
Store element and list of children
:param XCElementSnapshot element: XCElementSnapshot
:param list[_ElementList] children: List of XCElementSnapshot objects
"""
def __init__(self, element, children):
self.element = element
self.children = children
def text(self, pointer, trait, frame, indent):
"""
String representation of the element
:param bool pointer: Print pointers
:param bool trait: Print traits
:param bool frame: Print frames
:param int indent: Indention
:return: String representation of the element
:rtype: str
"""
indent_string = " | " * indent
return "{}{}\n".format(
indent_string,
self.element.summary(pointer=pointer, trait=trait, frame=frame),
)
def hierarchy_text(self, pointer=False, trait=False, frame=False, indent=0):
"""
String representation of the hierarchy of elements
:param bool pointer: Print pointers
:param bool trait: Print traits
:param bool frame: Print frames
:param int indent: Indention
:return: String representation of the hierarchy of elements
:rtype: str
"""
s = self.text(pointer=pointer, trait=trait, frame=frame, indent=indent)
for e in self.children:
s += e.hierarchy_text(
pointer=pointer, trait=trait, frame=frame, indent=indent + 1
)
return s
class XCElementSnapshot(object):
"""
XCElementSnapshot wrapper
:param lldb.SBValue element: XCElementSnapshot object
:param str element_value: Pointer to XCElementSnapshot object
:param language: Project language
:param lldb.SBValue _type: XCUIElement type / XCUIElementType
:param lldb.SBValue _traits: UIAccessibilityTraits
:param lldb.SBValue | None _frame: XCUIElement frame
:param lldb.SBValue _identifier: XCUIElement identifier
:param lldb.SBValue _value: XCUIElement value
:param lldb.SBValue _placeholderValue: XCUIElement placeholder value
:param lldb.SBValue _label: XCUIElement label
:param lldb.SBValue _title: XCUIElement title
:param lldb.SBValue _children: XCUIElement children
:param lldb.SBValue _enabled: XCUIElement is enabled
:param lldb.SBValue _selected: XCUIElement is selected
:param lldb.SBValue _isMainWindow: XCUIElement is main window
:param lldb.SBValue _hasKeyboardFocus: XCUIElement has keyboard focus
:param lldb.SBValue _hasFocus: XCUIElement has focus
:param lldb.SBValue _generation: XCUIElement generation
:param lldb.SBValue _horizontalSizeClass: XCUIElement horizontal class
:param lldb.SBValue _verticalSizeClass: XCUIElement vertical class
"""
def __init__(self, element, language):
"""
:param lldb.SBValue element: XCElementSnapshot object
:param language: Project language
"""
super(XCElementSnapshot, self).__init__()
self.element = element
self.element_value = self.element.GetValue()
self.language = language
self._type = None
self._traits = None
self._frame = None
self._identifier = None
self._value = None
self._placeholderValue = None
self._label = None
self._title = None
self._children = None
self._enabled = None
self._selected = None
self._isMainWindow = None
self._hasKeyboardFocus = None
self._hasFocus = None
self._generation = None
self._horizontalSizeClass = None
self._verticalSizeClass = None
@property
def is_missing_identifier(self):
"""
Checks if element has a label but doesn't have an identifier.
:return: True if element has a label but doesn't have an identifier.
:rtype: bool
"""
return len(self.identifier_value) == 0 and len(self.label_value) > 0
@property
def type(self):
"""
:return: XCUIElement type / XCUIElementType
:rtype: lldb.SBValue
"""
if self._type is None:
name = "_elementType"
if self.element.GetIndexOfChildWithName(name) == NOT_FOUND:
self._type = fb.evaluateExpressionValue(
"(int)[{} elementType]".format(self.element_value)
)
else:
self._type = self.element.GetChildMemberWithName(name)
return self._type
@property
def type_value(self):
"""
:return: XCUIElementType value
:rtype: int
"""
return int(self.type.GetValue())
@property
def type_summary(self):
"""
:return: XCUIElementType summary
:rtype: str
"""
return self.get_type_value_string(self.type_value)
@property
def traits(self):
"""
:return: UIAccessibilityTraits
:rtype: lldb.SBValue
"""
if self._traits is None:
name = "_traits"
if self.element.GetIndexOfChildWithName(name) == NOT_FOUND:
self._traits = fb.evaluateExpressionValue(
"(int)[{} traits]".format(self.element_value)
)
else:
self._traits = self.element.GetChildMemberWithName(name)
return self._traits
@property
def traits_value(self):
"""
:return: UIAccessibilityTraits value
:rtype: int
"""
return int(self.traits.GetValue())
@property
def traits_summary(self):
"""
:return: UIAccessibilityTraits summary
:rtype: str
"""
return self.get_traits_value_string(self.traits_value)
@property
def frame(self):
"""
:return: XCUIElement frame
:rtype: lldb.SBValue
"""
if self._frame is None:
import_uikit()
name = "_frame"
if self.element.GetIndexOfChildWithName(name) == NOT_FOUND:
self._frame = fb.evaluateExpressionValue(
"(CGRect)[{} frame]".format(self.element_value)
)
else:
self._frame = self.element.GetChildMemberWithName(name)
return self._frame
@property
def frame_summary(self):
"""
:return: XCUIElement frame summary
:rtype: str
"""
return CGRect(self.frame).summary()
@property
def identifier(self):
"""
:return: XCUIElement identifier
:rtype: lldb.SBValue
"""
if self._identifier is None:
name = "_identifier"
if self.element.GetIndexOfChildWithName(name) == NOT_FOUND:
self._identifier = fb.evaluateExpressionValue(
"(NSString *)[{} identifier]".format(self.element_value)
)
else:
self._identifier = self.element.GetChildMemberWithName(name)
return self._identifier
@property
def identifier_value(self):
"""
:return: XCUIElement identifier value
:rtype: str
"""
return normalize_summary(self.identifier.GetSummary())
@property
def identifier_summary(self):
"""
:return: XCUIElement identifier summary
:rtype: str | None
"""
if len(self.identifier_value) == 0:
return None
return "identifier: '{}'".format(self.identifier_value)
@property
def value(self):
"""
:return: XCUIElement value
:rtype: lldb.SBValue
"""
if self._value is None:
name = "_value"
if self.element.GetIndexOfChildWithName(name) == NOT_FOUND:
self._value = fb.evaluateExpressionValue(
"(NSString *)[{} value]".format(self.element_value)
)
else:
self._value = self.element.GetChildMemberWithName(name)
return self._value
@property
def value_value(self):
"""
:return: XCUIElement value value
:rtype: str
"""
return normalize_summary(self.value.GetSummary())
@property
def value_summary(self):
"""
:return: XCUIElement value summary
:rtype: str | None
"""
if len(self.value_value) == 0:
return None
return "value: '{}'".format(self.value_value)
@property
def placeholder(self):
"""
:return: XCUIElement placeholder value
:rtype: lldb.SBValue
"""
if self._placeholderValue is None:
name = "_placeholderValue"
if self.element.GetIndexOfChildWithName(name) == NOT_FOUND:
self._placeholderValue = fb.evaluateExpressionValue(
"(NSString *)[{} placeholderValue]".format(self.element_value)
)
else:
self._placeholderValue = self.element.GetChildMemberWithName(name)
return self._placeholderValue
@property
def placeholder_value(self):
"""
:return: XCUIElement placeholderValue value
:rtype: str
"""
return normalize_summary(self.placeholder.GetSummary())
@property
def placeholder_summary(self):
"""
:return: XCUIElement placeholderValue summary
:rtype: str | None
"""
if len(self.placeholder_value) == 0:
return None
return "placeholderValue: '{}'".format(self.placeholder_value)
@property
def label(self):
"""
:return: XCUIElement label
:rtype: lldb.SBValue
"""
if self._label is None:
name = "_label"
if self.element.GetIndexOfChildWithName(name) == NOT_FOUND:
self._label = fb.evaluateExpressionValue(
"(NSString *)[{} label]".format(self.element_value)
)
else:
self._label = self.element.GetChildMemberWithName(name)
return self._label
@property
def label_value(self):
"""
:return: XCUIElement label value
:rtype: str
"""
return normalize_summary(self.label.GetSummary())
@property
def label_summary(self):
"""
:return: XCUIElement label summary
:rtype: str | None
"""
if len(self.label_value) == 0:
return None
return "label: '{}'".format(self.label_value)
@property
def title(self):
"""
:return: XCUIElement title
:rtype: lldb.SBValue
"""
if self._title is None:
name = "_title"
if self.element.GetIndexOfChildWithName(name) == NOT_FOUND:
self._title = fb.evaluateExpressionValue(
"(NSString *)[{} title]".format(self.element_value)
)
else:
self._title = self.element.GetChildMemberWithName(name)
return self._title
@property
def title_value(self):
"""
:return: XCUIElement title value
:rtype: str
"""
return normalize_summary(self.title.GetSummary())
@property
def title_summary(self):
"""
:return: XCUIElement title summary
:rtype: str | None
"""
if len(self.title_value) == 0:
return None
return "title: '{}'".format(self.title_value)
@property
def children(self):
"""
:return: XCUIElement children
:rtype: lldb.SBValue
"""
if self._children is None:
name = "_children"
if self.element.GetIndexOfChildWithName(name) == NOT_FOUND:
self._children = fb.evaluateExpressionValue(
"(NSArray *)[{} children]".format(self.element_value)
)
else:
self._children = self.element.GetChildMemberWithName(name)
return self._children
@property
def children_count(self):
"""
:return: XCUIElement children count
:rtype: int
"""
return self.children.GetNumChildren()
@property
def children_list(self):
"""
:return: XCUIElement children list
:rtype: list[lldb.SBValue]
"""
return [self.children.GetChildAtIndex(i) for i in range(self.children_count)]
@property
def enabled(self):
"""
:return: XCUIElement is enabled
:rtype: lldb.SBValue
"""
if self._enabled is None:
name = "_enabled"
if self.element.GetIndexOfChildWithName(name) == NOT_FOUND:
self._enabled = fb.evaluateExpressionValue(
"(BOOL)[{} enabled]".format(self.element_value)
)
else:
self._enabled = self.element.GetChildMemberWithName(name)
return self._enabled
@property
def enabled_value(self):
"""
:return: XCUIElement is enabled value
:rtype: bool
"""
return bool(self.enabled.GetValueAsSigned())
@property
def enabled_summary(self):
"""
:return: XCUIElement is enabled summary
:rtype: str | None
"""
if not self.enabled_value:
return "enabled: {}".format(self.enabled_value)
return None
@property
def selected(self):
"""
:return: XCUIElement is selected
:rtype: lldb.SBValue
"""
if self._selected is None:
name = "_selected"
if self.element.GetIndexOfChildWithName(name) == NOT_FOUND:
self._selected = fb.evaluateExpressionValue(
"(BOOL)[{} selected]".format(self.element_value)
)
else:
self._selected = self.element.GetChildMemberWithName(name)
return self._selected
@property
def selected_value(self):
"""
:return: XCUIElement is selected value
:rtype: bool
"""
return bool(self.selected.GetValueAsSigned())
@property
def selected_summary(self):
"""
:return: XCUIElement is selected summary
:rtype: str | None
"""
if self.selected_value:
return "selected: {}".format(self.selected_value)
return None
@property
def is_main_window(self):
"""
:return: XCUIElement isMainWindow
:rtype: lldb.SBValue
"""
if self._isMainWindow is None:
name = "_isMainWindow"
if self.element.GetIndexOfChildWithName(name) == NOT_FOUND:
self._isMainWindow = fb.evaluateExpressionValue(
"(BOOL)[{} isMainWindow]".format(self.element_value)
)
else:
self._isMainWindow = self.element.GetChildMemberWithName(name)
return self._isMainWindow
@property
def is_main_window_value(self):
"""
:return: XCUIElement isMainWindow value
:rtype: bool
"""
return bool(self.is_main_window.GetValueAsSigned())
@property
def is_main_window_summary(self):
"""
:return: XCUIElement isMainWindow summary
:rtype: str | None
"""
if self.is_main_window_value:
return "MainWindow"
return None
@property
def keyboard_focus(self):
"""
:return: XCUIElement hasKeyboardFocus
:rtype: lldb.SBValue
"""
if self._hasKeyboardFocus is None:
name = "_hasKeyboardFocus"
if self.element.GetIndexOfChildWithName(name) == NOT_FOUND:
self._hasKeyboardFocus = fb.evaluateExpressionValue(
"(BOOL)[{} hasKeyboardFocus]".format(self.element_value)
)
else:
self._hasKeyboardFocus = self.element.GetChildMemberWithName(name)
return self._hasKeyboardFocus
@property
def keyboard_focus_value(self):
"""
:return: XCUIElement hasKeyboardFocus value
:rtype: bool
"""
return bool(self.keyboard_focus.GetValueAsSigned())
@property
def keyboard_focus_summary(self):
"""
:return: XCUIElement hasKeyboardFocus summary
:rtype: str | None
"""
if self.keyboard_focus_value:
return "hasKeyboardFocus: {}".format(self.keyboard_focus_value)
return None
@property
def focus(self):
"""
:return: XCUIElement hasFocus
:rtype: lldb.SBValue
"""
if self._hasFocus is None:
name = "_hasFocus"
if self.element.GetIndexOfChildWithName(name) == NOT_FOUND:
self._hasFocus = fb.evaluateExpressionValue(
"(BOOL)[{} hasFocus]".format(self.element_value)
)
else:
self._hasFocus = self.element.GetChildMemberWithName(name)
return self._hasFocus
@property
def focus_value(self):
"""
:return: XCUIElement hasFocus value
:rtype: bool
"""
return bool(self.focus.GetValueAsSigned())
@property
def focus_summary(self):
"""
:return: XCUIElement hasFocus summary
:rtype: str | None
"""
if self.focus_value:
return "hasFocus: {}".format(self.focus_value)
return None
@property
def generation(self):
"""
:return: XCUIElement generation
:rtype: lldb.SBValue
"""
if self._generation is None:
name = "_generation"
if self.element.GetIndexOfChildWithName(name) == NOT_FOUND:
self._generation = fb.evaluateExpressionValue(
"(unsigned int)[{} generation]".format(self.element_value)
)
else:
self._generation = self.element.GetChildMemberWithName(name)
return self._generation
@property
def generation_value(self):
"""
:return: XCUIElement generation value
:rtype: int
"""
return int(self.generation.GetValueAsUnsigned())
@property
def horizontal_size_class(self):
"""
:return: XCUIElement horizontal size class
:rtype: lldb.SBValue
"""
if self._horizontalSizeClass is None:
name = "_horizontalSizeClass"
if self.element.GetIndexOfChildWithName(name) == NOT_FOUND:
self._horizontalSizeClass = fb.evaluateExpressionValue(
"(int)[{} horizontalSizeClass]".format(self.element_value)
)
else:
self._horizontalSizeClass = self.element.GetChildMemberWithName(name)
return self._horizontalSizeClass
@property
def horizontal_size_class_value(self):
"""
:return: XCUIElement horizontal size class value
:rtype: int
"""
return int(self.horizontal_size_class.GetValue())
@property
def horizontal_size_class_summary(self):
"""
:return: XCUIElement horizontal size class summary
"""
return self.get_user_interface_size_class_string(
self.horizontal_size_class_value
)
@property
def vertical_size_class(self):
"""
:return: XCUIElement vertical size class
:rtype: lldb.SBValue
"""
if self._verticalSizeClass is None:
name = "_verticalSizeClass"
if self.element.GetIndexOfChildWithName(name) == NOT_FOUND:
self._verticalSizeClass = fb.evaluateExpressionValue(
"(int)[{} verticalSizeClass]".format(self.element_value)
)
else:
self._verticalSizeClass = self.element.GetChildMemberWithName(name)
return self._verticalSizeClass
@property
def vertical_size_class_value(self):
"""
:return: XCUIElement vertical size class value
:rtype: int
"""
return int(self.vertical_size_class.GetValue())
@property
def vertical_size_class_summary(self):
"""
:return: XCUIElement vertical size class summary
"""
return self.get_user_interface_size_class_string(self.vertical_size_class_value)
@property
def uniquely_identifying_objective_c_code(self):
"""
:return: XCUIElement uniquely identifying Objective-C code
:rtype: lldb.SBValue
"""
return fb.evaluateExpressionValue(
"(id)[{} _uniquelyIdentifyingObjectiveCCode]".format(self.element_value)
)
@property
def uniquely_identifying_objective_c_code_value(self):
"""
:return: XCUIElement uniquely identifying Objective-C code value
:rtype: str
"""
return normalize_array_description(
self.uniquely_identifying_objective_c_code.GetObjectDescription()
)
@property
def uniquely_identifying_swift_code(self):
"""
:return: XCUIElement uniquely identifying Swift code
:rtype: lldb.SBValue
"""
return fb.evaluateExpressionValue(
"(id)[{} _uniquelyIdentifyingSwiftCode]".format(self.element_value)
)
@property
def uniquely_identifying_swift_code_value(self):
"""
:return: XCUIElement uniquely identifying Swift code value
:rtype: str
"""
return normalize_array_description(
self.uniquely_identifying_swift_code.GetObjectDescription()
)
@property
def is_touch_bar_element(self):
"""
:return: XCUIElement is touch bar element
:rtype: lldb.SBValue
"""
return fb.evaluateExpressionValue(
"(BOOL)[{} isTouchBarElement]".format(self.element_value)
)
@property
def is_touch_bar_element_value(self):
"""
:return: XCUIElement is touch bar element value
:rtype: bool
"""
return bool(self.is_touch_bar_element.GetValueAsSigned())
@property
def is_top_level_touch_bar_element(self):
"""
:return: XCUIElement is top level touch bar element
:rtype: lldb.SBValue
"""
return fb.evaluateExpressionValue(
"(BOOL)[{} isTopLevelTouchBarElement]".format(self.element_value)
)
@property
def is_top_level_touch_bar_element_value(self):
"""
:return: XCUIElement is top level touch bar element value
:rtype: bool
"""
return bool(self.is_top_level_touch_bar_element.GetValueAsSigned())
@property
def suggested_hit_points(self):
"""
:return: XCUIElement suggested hit points
:rtype: lldb.SBValue
"""
return fb.evaluateExpressionValue(
"(NSArray *)[{} suggestedHitpoints]".format(self.element_value)
)
@property
def suggested_hit_points_value(self):
"""
:return: XCUIElement suggested hit points
:rtype: str
"""
return normalize_array_description(
self.suggested_hit_points.GetObjectDescription()
)
@property
def visible_frame(self):
"""
:return: XCUIElement visible frame
:rtype: lldb.SBValue
"""
import_uikit()
return fb.evaluateExpressionValue(
"(CGRect)[{} visibleFrame]".format(self.element_value)
)
@property
def visible_frame_summary(self):
"""
:return: XCUIElement visible frame
:rtype: str
"""
return CGRect(self.visible_frame).summary()
@property
def depth(self):
"""
:return: XCUIElement depth
:rtype: lldb.SBValue
"""
return fb.evaluateExpressionValue("(int)[{} depth]".format(self.element_value))
@property
def depth_value(self):
"""
:return: XCUIElement depth
:rtype: int
"""
return int(self.depth.GetValue())
@property
def hit_point(self):
"""
:return: XCUIElement hit point
:rtype: lldb.SBValue
"""
import_uikit()
return fb.evaluateExpressionValue(
"(CGPoint)[{} hitPoint]".format(self.element_value)
)
@property
def hit_point_value(self):
"""
:return: XCUIElement hit point
:rtype: str
"""
return CGPoint(self.hit_point).summary()
@property
def hit_point_for_scrolling(self):
"""
:return: XCUIElement hit point for scrolling
:rtype: lldb.SBValue
"""
import_uikit()
return fb.evaluateExpressionValue(
"(CGPoint)[{} hitPointForScrolling]".format(self.element_value)
)
@property
def hit_point_for_scrolling_value(self):
"""
:return: XCUIElement hit point for scrolling
:rtype: str
"""
return CGPoint(self.hit_point_for_scrolling).summary()
def summary(self, pointer=False, trait=False, frame=False):
"""
Returns XCElementSnapshot summary
:param bool pointer: Print pointers
:param bool trait: Print traits
:param bool frame: Print frames
:return: XCElementSnapshot summary
:rtype: str
"""
type_text = self.type_summary
if pointer:
type_text += " {:#x}".format(int(self.element_value, 16))
if trait:
type_text += " traits: {}({:#x})".format(
self.traits_summary, self.traits_value
)
frame_text = self.frame_summary if frame else None
identifier = self.identifier_summary
label = self.label_summary
title = self.title_summary
value = self.value_summary
placeholder = self.placeholder_summary
enabled = self.enabled_summary
selected = self.selected_summary
main_window = self.is_main_window_summary
keyboard_focus = self.keyboard_focus_summary
focus = self.focus_summary
texts = [
t
for t in [
frame_text,
identifier,
label,
title,
value,
placeholder,
enabled,
selected,
main_window,
keyboard_focus,
focus,
]
if t is not None
]
return "{}: {}".format(type_text, ", ".join(texts))
def detail_summary(self):
"""
Returns XCElementSnapshot detail summary
:return: XCElementSnapshot detail summary
:rtype: str
"""
texts = list()
texts.append("Pointer: {:#x}".format(int(self.element_value, 16)))
texts.append("Type: {}".format(self.type_summary))
texts.append("Depth: {}".format(self.depth_value))
texts.append(
"Traits: {} ({:#x})".format(self.traits_summary, self.traits_value)
)
texts.append("Frame: {}".format(self.frame_summary))
texts.append("Visible frame: {}".format(self.visible_frame_summary))
texts.append("Identifier: '{}'".format(self.identifier_value))
texts.append("Label: '{}'".format(self.label_value))
texts.append("Title: '{}'".format(self.title_value))
texts.append("Value: '{}'".format(self.value_value))
texts.append("Placeholder: '{}'".format(self.placeholder_value))
if self.language != lldb.eLanguageTypeSwift:
# They doesn't work on Swift :(
texts.append("Hit point: {}".format(self.hit_point_value))
texts.append(
"Hit point for scrolling: {}".format(self.hit_point_for_scrolling_value)
)
texts.append("Enabled: {}".format(self.enabled_value))
texts.append("Selected: {}".format(self.selected_value))
texts.append("Main Window: {}".format(self.is_main_window_value))
texts.append("Keyboard focus: {}".format(self.keyboard_focus_value))
texts.append("Focus: {}".format(self.focus_value))
texts.append("Generation: {}".format(self.generation_value))
texts.append(
"Horizontal size class: {}".format(self.horizontal_size_class_summary)
)
texts.append("Vertical size class: {}".format(self.vertical_size_class_summary))
texts.append("TouchBar element: {}".format(self.is_touch_bar_element_value))
texts.append(
"TouchBar top level element: {}".format(
self.is_top_level_touch_bar_element_value
)
)
texts.append(
"Unique Objective-C: {}".format(
self.uniquely_identifying_objective_c_code_value
)
)
texts.append(
"Unique Swift: {}".format(self.uniquely_identifying_swift_code_value)
)
texts.append("Suggested hit points: {}".format(self.suggested_hit_points_value))
return "\n".join(texts)
def tree(self):
"""
Returns tree of elements in hierarchy
:return: Elements hierarchy
:rtype: _ElementList
"""
children = [
XCElementSnapshot(e, self.language).tree() for e in self.children_list
]
return _ElementList(self, children)
def find_missing_identifiers(self, status_bar):
"""
Find element which has a label but doesn't have an identifier
:param bool status_bar: Print status bar items
:return: Hierarchy structure with items which has a label but doesn't have an identifier
:rtype: _ElementList | None
"""
# Do not print status bar items
if status_bar is not True and self.type_value == XCUIElementType.StatusBar:
return None
children_missing = [
XCElementSnapshot(e, self.language).find_missing_identifiers(
status_bar=status_bar
)
for e in self.children_list
]
children_missing = [x for x in children_missing if x is not None]
# Self and its children are not missing identifiers
if self.is_missing_identifier is False and len(children_missing) == 0:
return None
return _ElementList(self, children_missing)
@staticmethod
def get_type_value_string(value):
"""
Get element type string from XCUIElementType (as int)
:param int value: XCUIElementType (as int)
:return: XCUIElementType string
:rtype: str
"""
return XCUIElementType.name_for_value(value)
@staticmethod
def get_traits_value_string(value):
"""
Get element traits string from UIAccessibilityTraits (as int)
:param int value: UIAccessibilityTraits (as int)
:return: UIAccessibilityTraits string
:rtype: str
"""
return UIAccessibilityTraits.name_for_value(value)
@staticmethod
def get_user_interface_size_class_string(value):
"""
Get user interface size class string from UIUserInterfaceSizeClass (as int)
:param value: UIAccessibilityTraits (as int)
:return: UIUserInterfaceSizeClass string
:rtype: str
"""
return UIUserInterfaceSizeClass.name_for_value(value)
class XCUIElementType(object):
"""
Represents all XCUIElementType types
"""
Any = 0
Other = 1
Application = 2
Group = 3
Window = 4
Sheet = 5
Drawer = 6
Alert = 7
Dialog = 8
Button = 9
RadioButton = 10
RadioGroup = 11
CheckBox = 12
DisclosureTriangle = 13
PopUpButton = 14
ComboBox = 15
MenuButton = 16
ToolbarButton = 17
Popover = 18
Keyboard = 19
Key = 20
NavigationBar = 21
TabBar = 22
TabGroup = 23
Toolbar = 24
StatusBar = 25
Table = 26
TableRow = 27
TableColumn = 28
Outline = 29
OutlineRow = 30
Browser = 31
CollectionView = 32
Slider = 33
PageIndicator = 34
ProgressIndicator = 35
ActivityIndicator = 36
SegmentedControl = 37
Picker = 38
PickerWheel = 39
Switch = 40
Toggle = 41
Link = 42
Image = 43
Icon = 44
SearchField = 45
ScrollView = 46
ScrollBar = 47
StaticText = 48
TextField = 49
SecureTextField = 50
DatePicker = 51
TextView = 52
Menu = 53
MenuItem = 54
MenuBar = 55
MenuBarItem = 56
Map = 57
WebView = 58
IncrementArrow = 59
DecrementArrow = 60
Timeline = 61
RatingIndicator = 62
ValueIndicator = 63
SplitGroup = 64
Splitter = 65
RelevanceIndicator = 66
ColorWell = 67
HelpTag = 68
Matte = 69
DockItem = 70
Ruler = 71
RulerMarker = 72
Grid = 73
LevelIndicator = 74
Cell = 75
LayoutArea = 76
LayoutItem = 77
Handle = 78
Stepper = 79
Tab = 80
TouchBar = 81
@classmethod
def _attributes_by_value(cls):
"""
:return: Hash of all attributes and their values
:rtype: dict[int, str]
"""
class_attributes = set(dir(cls)) - set(dir(object))
return dict(
[
(getattr(cls, n), n)
for n in class_attributes
if not callable(getattr(cls, n)) and not n.startswith("__")
]
)
@classmethod
def name_for_value(cls, value):
"""
Get element type string from XCUIElementType (as int)
:param int value: XCUIElementType (as int)
:return: Name of type
:rtype: str
"""
attributes = cls._attributes_by_value()
if value in attributes:
return attributes[value]
else:
return "Unknown ({:#x})".format(value)
class UIAccessibilityTraits(object):
"""
Represents all UIAccessibilityTraits types
"""
Button = 0x0000000000000001
Link = 0x0000000000000002
Image = 0x0000000000000004
Selected = 0x0000000000000008
PlaysSound = 0x0000000000000010
KeyboardKey = 0x0000000000000020
StaticText = 0x0000000000000040
SummaryElement = 0x0000000000000080
NotEnabled = 0x0000000000000100
UpdatesFrequently = 0x0000000000000200
SearchField = 0x0000000000000400
StartsMediaSession = 0x0000000000000800
Adjustable = 0x0000000000001000
AllowsDirectInteraction = 0x0000000000002000
CausesPageTurn = 0x0000000000004000
TabBar = 0x0000000000008000
Header = 0x0000000000010000
@classmethod
def _attributes_by_value(cls):
"""
:return: Hash of all attributes and their values
:rtype: dict[int, str]
"""
class_attributes = set(dir(cls)) - set(dir(object))
return dict(
[
(getattr(cls, n), n)
for n in class_attributes
if not callable(getattr(cls, n)) and not n.startswith("__")
]
)
@classmethod
def name_for_value(cls, value):
"""
Get element traits string from UIAccessibilityTraits (as int)
:param int value: UIAccessibilityTraits (as int)
:return: UIAccessibilityTraits string
:rtype: str
"""
if value == 0:
return "None"
traits = []
attributes = cls._attributes_by_value()
for k in attributes.keys():
if value & k:
traits.append(attributes[k])
if len(traits) == 0:
return "Unknown"
else:
return ", ".join(traits)
class UIUserInterfaceSizeClass(object):
"""
Represents all UIUserInterfaceSizeClass types
"""
Unspecified = 0
Compact = 1
Regular = 2
@classmethod
def name_for_value(cls, value):
"""
Get user interface size class string from UIUserInterfaceSizeClass (as int)
:param int value: UIAccessibilityTraits (as int)
:return: UIUserInterfaceSizeClass string
:rtype: str
"""
if value == cls.Unspecified:
return "Unspecified"
elif value == cls.Compact:
return "Compact"
elif value == cls.Regular:
return "Regular"
else:
return "Unknown ({:#x})".format(value)
class CGRect(object):
"""
CGRect wrapper
:param lldb.SBValue element: CGRect object
"""
def __init__(self, element):
"""
:param lldb.SBValue element: CGRect object
"""
super(CGRect, self).__init__()
self.element = element
def summary(self):
"""
:return: CGRect summary
:rtype: str
"""
origin_element = self.element.GetChildMemberWithName("origin")
origin = CGPoint(origin_element)
size = self.element.GetChildMemberWithName("size")
width = size.GetChildMemberWithName("width")
height = size.GetChildMemberWithName("height")
width_value = float(width.GetValue())
height_value = float(height.GetValue())
return "{{{}, {{{}, {}}}}}".format(origin.summary(), width_value, height_value)
class CGPoint(object):
"""
CGPoint wrapper
:param lldb.SBValue element: CGPoint object
"""
def __init__(self, element):
super(CGPoint, self).__init__()
self.element = element
def summary(self):
"""
:return: CGPoint summary
:rtype: str
"""
x = self.element.GetChildMemberWithName("x")
y = self.element.GetChildMemberWithName("y")
x_value = float(x.GetValue())
y_value = float(y.GetValue())
return "{{{}, {}}}".format(x_value, y_value)
def normalize_summary(summary):
"""
Normalize summary by removing "'" and "@" characters
:param str summary: Summary string to normalize
:return: Normalized summary string
:rtype: str
"""
return summary.lstrip("@").strip('"')
def normalize_array_description(description):
"""
Normalize array object description by removing "<" and ">" characters and content between them.
:param str description: Array object description
:return: Normalized array object description string
:rtype: str
"""
return re.sub("^(<.*>)", "", description).strip()
_uikit_imported = False
def import_uikit():
"""
Import UIKit framework to the debugger
"""
global _uikit_imported
if _uikit_imported:
return
_uikit_imported = True
fb.evaluateExpressionValue("@import UIKit")
def debug(element):
"""
Debug helper
:param lldb.SBValue element: Element to debug
"""
print("---")
print("element: {}".format(element))
print("element class: {}".format(element.__class__))
print("element name: {}".format(element.GetName()))
print("element type name: {}".format(element.GetTypeName()))
print("element value: {}".format(element.GetValue()))
print("element value class: {}".format(element.GetValue().__class__))
print("element value type: {}".format(element.GetValueType()))
print("element value signed: {0}({0:#x})".format(element.GetValueAsSigned()))
print("element value unsigned: {0}({0:#x})".format(element.GetValueAsUnsigned()))
print("element summary: {}".format(element.GetSummary()))
print("element description: {}".format(element.GetObjectDescription()))
print("element children num: {}".format(element.GetNumChildren()))
for i in range(0, element.GetNumChildren()):
child = element.GetChildAtIndex(i)
""":type: lldb.SBValue"""
print("element child {:02}: {}".format(i, child.GetName()))
print("===")
|
mit
| -1,635,572,801,278,517,000 | 28.776211 | 99 | 0.564789 | false |
malt1/lutris
|
tests/test_installer.py
|
1
|
1700
|
from unittest import TestCase
from lutris.installer import ScriptInterpreter, ScriptingError
class MockInterpreter(ScriptInterpreter):
""" a script interpreter mock """
script = {'runner': 'linux'}
def is_valid(self):
return True
class TestScriptInterpreter(TestCase):
def test_script_with_correct_values_is_valid(self):
script = {
'runner': 'foo',
'installer': 'bar',
'name': 'baz',
'game_slug': 'baz',
}
interpreter = ScriptInterpreter(script, None)
self.assertFalse(interpreter.errors)
self.assertTrue(interpreter.is_valid())
def test_move_requires_src_and_dst(self):
script = {
'foo': 'bar',
'installer': {},
'name': 'missing_runner',
'game_slug': 'missing-runner'
}
with self.assertRaises(ScriptingError):
interpreter = ScriptInterpreter(script, None)
interpreter._get_move_paths({})
def test_get_command_returns_a_method(self):
interpreter = MockInterpreter({}, None)
command, params = interpreter._map_command({'move': 'whatever'})
self.assertIn("bound method MockInterpreter.move", str(command))
self.assertEqual(params, "whatever")
def test_get_command_doesnt_return_private_methods(self):
""" """
interpreter = MockInterpreter({}, None)
with self.assertRaises(ScriptingError) as ex:
command, params = interpreter._map_command(
{'_substitute': 'foo'}
)
self.assertEqual(ex.exception.message,
"The command substitute does not exists")
|
gpl-3.0
| -5,782,265,075,609,909,000 | 33 | 72 | 0.594118 | false |
NicoVarg99/daf-recipes
|
ckan/ckan/ckanext-dcatapit/ckanext/dcatapit/dcat/profiles.py
|
1
|
31302
|
import ast
import logging
import datetime
from pylons import config
from rdflib.namespace import Namespace, RDF, SKOS
from rdflib import URIRef, BNode, Literal
import ckan.logic as logic
from ckanext.dcat.profiles import RDFProfile, DCAT, LOCN, VCARD, DCT, FOAF, ADMS
from ckanext.dcat.utils import catalog_uri, dataset_uri, resource_uri
import ckanext.dcatapit.interfaces as interfaces
import ckanext.dcatapit.helpers as helpers
DCATAPIT = Namespace('http://dati.gov.it/onto/dcatapit#')
it_namespaces = {
'dcatapit': DCATAPIT,
}
THEME_BASE_URI = 'http://publications.europa.eu/resource/authority/data-theme/'
LANG_BASE_URI = 'http://publications.europa.eu/resource/authority/language/'
FREQ_BASE_URI = 'http://publications.europa.eu/resource/authority/frequency/'
FORMAT_BASE_URI = 'http://publications.europa.eu/resource/authority/file-type/'
GEO_BASE_URI = 'http://publications.europa.eu/resource/authority/place/'
# vocabulary name, base URI
THEME_CONCEPTS = ('eu_themes', THEME_BASE_URI)
LANG_CONCEPTS = ('languages', LANG_BASE_URI)
GEO_CONCEPTS = ('places', GEO_BASE_URI)
FREQ_CONCEPTS = ('frequencies', FREQ_BASE_URI)
FORMAT_CONCEPTS = ('filetype', FORMAT_BASE_URI)
DEFAULT_VOCABULARY_KEY = 'OP_DATPRO'
DEFAULT_THEME_KEY = DEFAULT_VOCABULARY_KEY
DEFAULT_FORMAT_CODE = DEFAULT_VOCABULARY_KEY
DEFAULT_FREQ_CODE = 'UNKNOWN'
LOCALISED_DICT_NAME_BASE = 'DCATAPIT_MULTILANG_BASE'
LOCALISED_DICT_NAME_RESOURCES = 'DCATAPIT_MULTILANG_RESOURCES'
lang_mapping_ckan_to_voc = {
'it': 'ITA',
'de': 'DEU',
'en': 'ENG',
'en_GB': 'ENG',
}
lang_mapping_xmllang_to_ckan = {
'it' : 'it',
'de' : 'de',
'en' : 'en_GB' ,
}
format_mapping = {
'WMS': 'MAP_SRVC',
'HTML': 'HTML_SIMPL',
'CSV': 'CSV',
'XLS': 'XLS',
'ODS': 'ODS',
'ZIP': 'OP_DATPRO', # requires to be more specific, can't infer
}
log = logging.getLogger(__name__)
class ItalianDCATAPProfile(RDFProfile):
'''
An RDF profile for the Italian DCAT-AP recommendation for data portals
It requires the European DCAT-AP profile (`euro_dcat_ap`)
'''
def parse_dataset(self, dataset_dict, dataset_ref):
# check the dataset type
if (dataset_ref, RDF.type, DCATAPIT.Dataset) not in self.g:
# not a DCATAPIT dataset
return dataset_dict
# date info
for predicate, key, logf in (
(DCT.issued, 'issued', log.debug),
(DCT.modified, 'modified', log.warn),
):
value = self._object_value(dataset_ref, predicate)
if value:
self._remove_from_extra(dataset_dict, key)
value = helpers.format(value, '%Y-%m-%d', 'date')
dataset_dict[key] = value
else:
logf('No %s found for dataset "%s"', predicate, dataset_dict.get('title', '---'))
# 0..1 predicates
for predicate, key, logf in (
(DCT.identifier, 'identifier', log.warn),
):
value = self._object_value(dataset_ref, predicate)
if value:
self._remove_from_extra(dataset_dict, key)
dataset_dict[key] = value
else:
logf('No %s found for dataset "%s"', predicate, dataset_dict.get('title', '---'))
# 0..n predicates list
for predicate, key, logf in (
(ADMS.identifier, 'alternate_identifier', log.debug),
(DCT.isVersionOf, 'is_version_of', log.debug),
):
valueList = self._object_value_list(dataset_ref, predicate)
if valueList:
self._remove_from_extra(dataset_dict, key)
value = ','.join(valueList)
dataset_dict[key] = value
else:
logf('No %s found for dataset "%s"', predicate, dataset_dict.get('title', '---'))
# conformsTo
self._remove_from_extra(dataset_dict, 'conforms_to')
conform_list = []
for conforms_to in self.g.objects(dataset_ref, DCT.conformsTo):
conform_list.append(self._object_value(conforms_to, DCT.identifier))
if conform_list:
value = ','.join(conform_list)
dataset_dict['conforms_to'] = value
else:
log.debug('No DCT.conformsTo found for dataset "%s"', dataset_dict.get('title', '---'))
# Temporal
start, end = self._time_interval(dataset_ref, DCT.temporal)
for v, key, logf in (
(start, 'temporal_start', log.debug),
(end, 'temporal_end', log.debug),
):
if v:
self._remove_from_extra(dataset_dict, key)
value = helpers.format(v, '%Y-%m-%d', 'date')
dataset_dict[key] = value
else:
log.warn('No %s Date found for dataset "%s"', key, dataset_dict.get('title', '---'))
# URI 0..1
for predicate, key, base_uri in (
(DCT.accrualPeriodicity, 'frequency', FREQ_BASE_URI),
):
valueRef = self._object_value(dataset_ref, predicate)
if valueRef:
self._remove_from_extra(dataset_dict, key)
value = self._strip_uri(valueRef, base_uri)
dataset_dict[key] = value
else:
log.warn('No %s found for dataset "%s"', predicate, dataset_dict.get('title', '---'))
# URI lists
for predicate, key, base_uri in (
(DCT.language, 'language', LANG_BASE_URI),
(DCAT.theme, 'theme', THEME_BASE_URI),
):
self._remove_from_extra(dataset_dict, key)
valueRefList = self._object_value_list(dataset_ref, predicate)
valueList = [self._strip_uri(valueRef, base_uri) for valueRef in valueRefList]
value = ','.join(valueList)
if len(valueList) > 1:
value = '{'+value+'}'
dataset_dict[key] = value
# Spatial
spatial_tags = []
geonames_url = None
for spatial in self.g.objects(dataset_ref, DCT.spatial):
for spatial_literal in self.g.objects(spatial, DCATAPIT.geographicalIdentifier):
spatial_value = spatial_literal.value
if GEO_BASE_URI in spatial_value:
spatial_tags.append(self._strip_uri(spatial_value, GEO_BASE_URI))
else:
if geonames_url:
log.warn("GeoName URL is already set to %s, value %s will not be imported", geonames_url, spatial_value)
else:
geonames_url = spatial_value
if len(spatial_tags) > 0:
value = ','.join(spatial_tags)
if len(spatial_tags) > 1:
value = '{'+value+'}'
dataset_dict['geographical_name'] = value
if geonames_url:
dataset_dict['geographical_geonames_url'] = geonames_url
### Collect strings from multilang fields
# { 'field_name': {'it': 'italian loc', 'de': 'german loc', ...}, ...}
localized_dict = {}
for key, predicate in (
('title', DCT.title),
('notes', DCT.description),
):
self._collect_multilang_strings(dataset_dict, key, dataset_ref, predicate, localized_dict)
# Agents
for predicate, basekey in (
(DCT.publisher, 'publisher'),
(DCT.rightsHolder, 'holder'),
(DCT.creator, 'creator'),
):
agent_dict, agent_loc_dict = self._parse_agent(dataset_ref, predicate, basekey)
for key,v in agent_dict.iteritems():
self._remove_from_extra(dataset_dict, key)
dataset_dict[key] = v
localized_dict.update(agent_loc_dict)
# when all localized data have been parsed, check if there really any and add it to the dict
if len(localized_dict) > 0:
log.debug('Found multilang metadata')
dataset_dict[LOCALISED_DICT_NAME_BASE] = localized_dict
### Resources
resources_loc_dict = {}
# In ckan, the license is a dataset property, not resource's
# We'll collect all of the resources' licenses, then we will postprocess them
licenses = [] # contains tuples (url, name)
for resource_dict in dataset_dict.get('resources', []):
resource_uri = resource_dict['uri']
if not resource_uri:
log.warn("URI not defined for resource %s", resource_dict['name'])
continue
distribution = URIRef(resource_uri)
if not (dataset_ref, DCAT.distribution, distribution) in self.g:
log.warn("Distribution not found in dataset %s", resource_uri)
continue
# URI 0..1
for predicate, key, base_uri in (
(DCT['format'], 'format', FORMAT_BASE_URI), # Format
):
valueRef = self._object_value(distribution, predicate)
if valueRef:
value = self._strip_uri(valueRef, base_uri)
resource_dict[key] = value
else:
log.warn('No %s found for resource "%s"::"%s"',
predicate,
dataset_dict.get('title', '---'),
resource_dict.get('name', '---'))
# License
license = self._object(distribution, DCT.license)
if license:
# just add this info in the resource extras
resource_dict['license_url'] = str(license)
license_name = self._object_value(license, FOAF.name) # may be either the title or the id
if(license_name):
# just add this info in the resource extras
resource_dict['license_name'] = license_name
else:
license_name = "unknown"
licenses.append((str(license), license_name))
else:
log.warn('No license found for resource "%s"::"%s"',
dataset_dict.get('title', '---'),
resource_dict.get('name', '---'))
# Multilang
loc_dict = {}
for key, predicate in (
('name', DCT.title),
('description', DCT.description),
):
self._collect_multilang_strings(resource_dict, key, distribution, predicate, loc_dict)
if len(loc_dict) > 0:
log.debug('Found multilang metadata in resource %s', resource_dict['name'])
resources_loc_dict[resource_uri] = loc_dict
if len(resources_loc_dict) > 0:
log.debug('Found multilang metadata in resources')
dataset_dict[LOCALISED_DICT_NAME_RESOURCES] = resources_loc_dict
# postprocess licenses
# license_ids = {id for url,id in licenses} # does not work in python 2.6
license_ids = set()
for url,id in licenses:
license_ids.add(id)
if license_ids:
if len(license_ids) > 1:
log.warn('More than one license found for dataset "%s"', dataset_dict.get('title', '---'))
dataset_dict['license_id'] = license_ids.pop() # take a random one
return dataset_dict
def _collect_multilang_strings(self, base_dict, key, subj, pred, loc_dict):
'''
Search for multilang Literals matching (subj, pred).
- Literals not localized will be stored as source_dict[key] -- possibly replacing the value set by the EURO parser
- Localized literals will be stored into target_dict[key][lang]
'''
for obj in self.g.objects(subj, pred):
value = obj.value
lang = obj.language
if not lang:
# force default value in dataset
base_dict[key] = value
else:
# add localized string
lang_dict = loc_dict.setdefault(key, {})
lang_dict[lang_mapping_xmllang_to_ckan.get(lang)] = value
def _remove_from_extra(self, dataset_dict, key):
# search and replace
for extra in dataset_dict.get('extras', []):
if extra['key'] == key:
dataset_dict['extras'].pop(dataset_dict['extras'].index(extra))
return
def _add_or_replace_extra(self, dataset_dict, key, value):
# search and replace
for extra in dataset_dict.get('extras', []):
if extra['key'] == key:
extra['value'] = value
return
# add if not found
dataset_dict['extras'].append({'key': key, 'value': value})
def _parse_agent(self, subject, predicate, base_name):
agent_dict = {}
loc_dict= {}
for agent in self.g.objects(subject, predicate):
agent_dict[base_name + '_identifier'] = self._object_value(agent, DCT.identifier)
self._collect_multilang_strings(agent_dict, base_name + '_name', agent, FOAF.name, loc_dict)
return agent_dict, loc_dict
def _strip_uri(self, value, base_uri):
return value.replace(base_uri, '')
def graph_from_dataset(self, dataset_dict, dataset_ref):
title = dataset_dict.get('title')
g = self.g
for prefix, namespace in it_namespaces.iteritems():
g.bind(prefix, namespace)
### add a further type for the Dataset node
g.add((dataset_ref, RDF.type, DCATAPIT.Dataset))
### replace themes
value = self._get_dict_value(dataset_dict, 'theme')
if value:
for theme in value.split(','):
self.g.remove((dataset_ref, DCAT.theme, URIRef(theme)))
theme = theme.replace('{','').replace('}','')
self.g.add((dataset_ref, DCAT.theme, URIRef(THEME_BASE_URI + theme)))
self._add_concept(THEME_CONCEPTS, theme)
else:
self.g.add((dataset_ref, DCAT.theme, URIRef(THEME_BASE_URI + DEFAULT_THEME_KEY)))
self._add_concept(THEME_CONCEPTS, DEFAULT_THEME_KEY)
### replace languages
value = self._get_dict_value(dataset_dict, 'language')
if value:
for lang in value.split(','):
self.g.remove((dataset_ref, DCT.language, Literal(lang)))
lang = lang.replace('{','').replace('}','')
self.g.add((dataset_ref, DCT.language, URIRef(LANG_BASE_URI + lang)))
# self._add_concept(LANG_CONCEPTS, lang)
### add spatial (EU URI)
value = self._get_dict_value(dataset_dict, 'geographical_name')
if value:
for gname in value.split(','):
gname = gname.replace('{','').replace('}','')
dct_location = BNode()
self.g.add((dataset_ref, DCT.spatial, dct_location))
self.g.add((dct_location, RDF['type'], DCT.Location))
# Try and add a Concept from the spatial vocabulary
if self._add_concept(GEO_CONCEPTS, gname):
self.g.add((dct_location, DCATAPIT.geographicalIdentifier, Literal(GEO_BASE_URI + gname)))
# geo concept is not really required, but may be a useful adding
self.g.add((dct_location, LOCN.geographicalName, URIRef(GEO_BASE_URI + gname)))
else:
# The dataset field is not a controlled tag, let's create a Concept out of the label we have
concept = BNode()
self.g.add((concept, RDF['type'], SKOS.Concept))
self.g.add((concept, SKOS.prefLabel, Literal(gname)))
self.g.add((dct_location, LOCN.geographicalName, concept))
### add spatial (GeoNames)
value = self._get_dict_value(dataset_dict, 'geographical_geonames_url')
if value:
dct_location = BNode()
self.g.add((dataset_ref, DCT.spatial, dct_location))
self.g.add((dct_location, RDF['type'], DCT.Location))
self.g.add((dct_location, DCATAPIT.geographicalIdentifier, Literal(value)))
### replace periodicity
self._remove_node(dataset_dict, dataset_ref, ('frequency', DCT.accrualPeriodicity, None, Literal))
self._add_uri_node(dataset_dict, dataset_ref, ('frequency', DCT.accrualPeriodicity, DEFAULT_FREQ_CODE, URIRef), FREQ_BASE_URI)
# self._add_concept(FREQ_CONCEPTS, dataset_dict.get('frequency', DEFAULT_VOCABULARY_KEY))
### replace landing page
self._remove_node(dataset_dict, dataset_ref, ('url', DCAT.landingPage, None, URIRef))
landing_page_uri = None
if dataset_dict.get('name'):
landing_page_uri = '{0}/dataset/{1}'.format(catalog_uri().rstrip('/'), dataset_dict['name'])
else:
landing_page_uri = dataset_uri(dataset_dict) # TODO: preserve original URI if harvested
self.g.add((dataset_ref, DCAT.landingPage, URIRef(landing_page_uri)))
### conformsTo
self.g.remove((dataset_ref, DCT.conformsTo, None))
value = self._get_dict_value(dataset_dict, 'conforms_to')
if value:
for item in value.split(','):
standard = BNode()
self.g.add((dataset_ref, DCT.conformsTo, standard))
self.g.add((standard, RDF['type'], DCT.Standard))
self.g.add((standard, RDF['type'], DCATAPIT.Standard))
self.g.add((standard, DCT.identifier, Literal(item)))
### publisher
# DCAT by default creates this node
# <dct:publisher>
# <foaf:Organization rdf:about="http://10.10.100.75/organization/55535226-f82a-4cf7-903a-3e10afeaa79a">
# <foaf:name>orga2_test</foaf:name>
# </foaf:Organization>
# </dct:publisher>
for s,p,o in g.triples( (dataset_ref, DCT.publisher, None) ):
#log.info("Removing publisher %r", o)
g.remove((s, p, o))
self._add_agent(dataset_dict, dataset_ref, 'publisher', DCT.publisher)
### Rights holder : Agent
holder_ref = self._add_agent(dataset_dict, dataset_ref, 'holder', DCT.rightsHolder)
### Autore : Agent
self._add_agent(dataset_dict, dataset_ref, 'creator', DCT.creator)
### Point of Contact
# <dcat:contactPoint rdf:resource="http://dati.gov.it/resource/PuntoContatto/contactPointRegione_r_liguri"/>
# <!-- http://dati.gov.it/resource/PuntoContatto/contactPointRegione_r_liguri -->
# <dcatapit:Organization rdf:about="http://dati.gov.it/resource/PuntoContatto/contactPointRegione_r_liguri">
# <rdf:type rdf:resource="&vcard;Kind"/>
# <rdf:type rdf:resource="&vcard;Organization"/>
# <vcard:hasEmail rdf:resource="mailto:[email protected]"/>
# <vcard:fn>Regione Liguria - Sportello Cartografico</vcard:fn>
# </dcatapit:Organization>
# TODO: preserve original info if harvested
# retrieve the contactPoint added by the euro serializer
euro_poc = g.value(subject=dataset_ref, predicate=DCAT.contactPoint, object=None, any=False)
# euro poc has this format:
# <dcat:contactPoint>
# <vcard:Organization rdf:nodeID="Nfcd06f452bcd41f48f33c45b0c95979e">
# <vcard:fn>THE ORGANIZATION NAME</vcard:fn>
# <vcard:hasEmail>THE ORGANIZATION EMAIL</vcard:hasEmail>
# </vcard:Organization>
# </dcat:contactPoint>
if euro_poc:
g.remove((dataset_ref, DCAT.contactPoint, euro_poc))
org_id = dataset_dict.get('organization',{}).get('id')
# get orga info
org_show = logic.get_action('organization_show')
try:
org_dict = org_show({},
{'id': org_id,
'include_datasets': False,
'include_tags': False,
'include_users': False,
'include_groups': False,
'include_extras': True,
'include_followers': False})
except Exception, e:
org_dict = {}
org_uri = organization_uri(org_dict)
poc = URIRef(org_uri)
g.add((dataset_ref, DCAT.contactPoint, poc))
g.add((poc, RDF.type, DCATAPIT.Organization))
g.add((poc, RDF.type, VCARD.Kind))
g.add((poc, RDF.type, VCARD.Organization))
g.add((poc, VCARD.fn, Literal(org_dict.get('name'))))
if 'email' in org_dict.keys(): # this element is mandatory for dcatapit, but it may not have been filled for imported datasets
g.add((poc, VCARD.hasEmail, URIRef(org_dict.get('email'))))
if 'telephone' in org_dict.keys():
g.add((poc, VCARD.hasTelephone, Literal(org_dict.get('telephone'))))
if 'site' in org_dict.keys():
g.add((poc, VCARD.hasURL, Literal(org_dict.get('site'))))
### Multilingual
# Add localized entries in dataset
# TODO: should we remove the non-localized nodes?
loc_dict = interfaces.get_for_package(dataset_dict['id'])
# The multilang fields
loc_package_mapping = {
'title': (dataset_ref, DCT.title),
'notes': (dataset_ref, DCT.description),
'holder_name': (holder_ref, FOAF.name)
}
self._add_multilang_values(loc_dict, loc_package_mapping)
### Resources
for resource_dict in dataset_dict.get('resources', []):
distribution = URIRef(resource_uri(resource_dict)) # TODO: preserve original info if harvested
# Add the DCATAPIT type
g.add((distribution, RDF.type, DCATAPIT.Distribution))
### format
self._remove_node(resource_dict, distribution, ('format', DCT['format'], None, Literal))
if not self._add_uri_node(resource_dict, distribution, ('distribution_format', DCT['format'], None, URIRef), FORMAT_BASE_URI):
guessed_format = guess_format(resource_dict)
if guessed_format:
self.g.add((distribution, DCT['format'], URIRef(FORMAT_BASE_URI + guessed_format)))
else:
log.warn('No format for resource: %s / %s', dataset_dict.get('title', 'N/A'), resource_dict.get('description', 'N/A') )
self.g.add((distribution, DCT['format'], URIRef(FORMAT_BASE_URI + DEFAULT_FORMAT_CODE)))
### license
# <dct:license rdf:resource="http://creativecommons.org/licenses/by/3.0/it/"/>
#
# <dcatapit:LicenseDocument rdf:about="http://creativecommons.org/licenses/by/3.0/it/">
# <rdf:type rdf:resource="&dct;LicenseDocument"/>
# <owl:versionInfo>3.0 ITA</owl:versionInfo>
# <foaf:name>CC BY</foaf:name>
# <dct:type rdf:resource="http://purl.org/adms/licencetype/Attribution"/>
# </dcatapit:LicenseDocument>
# "license_id" : "cc-zero"
# "license_title" : "Creative Commons CCZero",
# "license_url" : "http://www.opendefinition.org/licenses/cc-zero",
license_url = dataset_dict.get('license_url', '')
license_id = dataset_dict.get('license_id', '')
license_title = dataset_dict.get('license_title', '')
if license_url:
license = URIRef(license_url)
g.add((license, RDF['type'], DCATAPIT.LicenseDocument))
g.add((license, RDF['type'], DCT.LicenseDocument))
g.add((license, DCT['type'], URIRef('http://purl.org/adms/licencetype/Attribution'))) # TODO: infer from CKAN license
g.add((distribution, DCT.license, license))
if license_id:
# log.debug('Adding license id: %s', license_id)
g.add((license, FOAF.name, Literal(license_id)))
elif license_title:
# log.debug('Adding license title: %s', license_title)
g.add((license, FOAF.name, Literal(license_title)))
else:
g.add((license, FOAF.name, Literal('unknown')))
log.warn('License not found for dataset: %s', title)
### Multilingual
# Add localized entries in resource
# TODO: should we remove the not-localized nodes?
loc_dict = interfaces.get_for_resource(resource_dict['id'])
# The multilang fields
loc_resource_mapping = {
'name': (distribution, DCT.title),
'description': (distribution, DCT.description),
}
self._add_multilang_values(loc_dict, loc_resource_mapping)
def _add_multilang_values(self, loc_dict, loc_mapping):
if loc_dict:
for field_name, lang_dict in loc_dict.iteritems():
ref, pred = loc_mapping.get(field_name, (None, None))
if not pred:
log.warn('Multilang field not mapped "%s"', field_name)
continue
for lang, value in lang_dict.iteritems():
lang = lang.split('_')[0] # rdflib is quite picky in lang names
self.g.add((ref, pred, Literal(value, lang=lang)))
def _add_agent(self, _dict, ref, basekey, _type):
''' Stores the Agent in this format:
<dct:publisher rdf:resource="http://dati.gov.it/resource/Amministrazione/r_liguri"/>
<dcatapit:Agent rdf:about="http://dati.gov.it/resource/Amministrazione/r_liguri">
<rdf:type rdf:resource="&foaf;Agent"/>
<dct:identifier>r_liguri</dct:identifier>
<foaf:name>Regione Liguria</foaf:name>
</dcatapit:Agent>
Returns the ref to the agent node
'''
agent_name = self._get_dict_value(_dict, basekey + '_name', 'N/A')
agent_id = self._get_dict_value(_dict, basekey + '_identifier','N/A')
agent = BNode()
self.g.add((agent, RDF['type'], DCATAPIT.Agent))
self.g.add((agent, RDF['type'], FOAF.Agent))
self.g.add((ref, _type, agent))
self.g.add((agent, FOAF.name, Literal(agent_name)))
self.g.add((agent, DCT.identifier, Literal(agent_id)))
return agent
def _add_uri_node(self, _dict, ref, item, base_uri=''):
key, pred, fallback, _type = item
value = self._get_dict_value(_dict, key)
if value:
self.g.add((ref, pred, _type(base_uri + value)))
return True
elif fallback:
self.g.add((ref, pred, _type(base_uri + fallback)))
return False
else:
return False
def _remove_node(self, _dict, ref, item):
key, pred, fallback, _type = item
value = self._get_dict_value(_dict, key)
if value:
self.g.remove((ref, pred, _type(value)))
def _add_concept(self, concepts, tag):
# Localized concepts should be serialized as:
#
# <dcat:theme rdf:resource="http://publications.europa.eu/resource/authority/data-theme/ENVI"/>
#
# <skos:Concept rdf:about="http://publications.europa.eu/resource/authority/data-theme/ENVI">
# <skos:prefLabel xml:lang="it">Ambiente</skos:prefLabel>
# </skos:Concept>
#
# Return true if Concept has been added
voc, base_uri = concepts
loc_dict = interfaces.get_all_localized_tag_labels(tag)
if loc_dict and len(loc_dict) > 0:
concept = URIRef(base_uri + tag)
self.g.add((concept, RDF['type'], SKOS.Concept))
for lang, label in loc_dict.iteritems():
lang = lang.split('_')[0] # rdflib is quite picky in lang names
self.g.add((concept, SKOS.prefLabel, Literal(label, lang=lang)))
return True
return False
def graph_from_catalog(self, catalog_dict, catalog_ref):
g = self.g
for prefix, namespace in it_namespaces.iteritems():
g.bind(prefix, namespace)
### Add a further type for the Catalog node
g.add((catalog_ref, RDF.type, DCATAPIT.Catalog))
### Replace homepage
# Try to avoid to have the Catalog URIRef identical to the homepage URI
g.remove((catalog_ref, FOAF.homepage, URIRef(config.get('ckan.site_url'))))
g.add((catalog_ref, FOAF.homepage, URIRef(catalog_uri() + '/#')))
### publisher
pub_agent_name = config.get('ckanext.dcatapit_configpublisher_name', 'unknown')
pub_agent_id = config.get('ckanext.dcatapit_configpublisher_code_identifier', 'unknown')
agent = BNode()
self.g.add((agent, RDF['type'], DCATAPIT.Agent))
self.g.add((agent, RDF['type'], FOAF.Agent))
self.g.add((catalog_ref, DCT.publisher, agent))
self.g.add((agent, FOAF.name, Literal(pub_agent_name)))
self.g.add((agent, DCT.identifier, Literal(pub_agent_id)))
### issued date
issued = config.get('ckanext.dcatapit_config.catalog_issued', '1900-01-01')
if issued:
self._add_date_triple(catalog_ref, DCT.issued, issued)
### theme taxonomy
# <dcat:themeTaxonomy rdf:resource="http://publications.europa.eu/resource/authority/data-theme"/>
# <skos:ConceptScheme rdf:about="http://publications.europa.eu/resource/authority/data-theme">
# <dct:title xml:lang="it">Il Vocabolario Data Theme</dct:title>
# </skos:ConceptScheme>
taxonomy = URIRef(THEME_BASE_URI.rstrip('/'))
self.g.add((catalog_ref, DCAT.themeTaxonomy, taxonomy))
self.g.add((taxonomy, RDF.type, SKOS.ConceptScheme))
self.g.add((taxonomy, DCT.title, Literal('Il Vocabolario Data Theme', lang='it')))
### language
langs = config.get('ckan.locales_offered', 'it')
for lang_offered in langs.split():
lang_code = lang_mapping_ckan_to_voc.get(lang_offered)
self.g.add((catalog_ref, DCT.language, URIRef(LANG_BASE_URI + lang_code)))
self.g.remove((catalog_ref, DCT.language, Literal(config.get('ckan.locale_default', 'en'))))
def organization_uri(orga_dict):
'''
Returns an URI for the organization
This will be used to uniquely reference the organization on the RDF serializations.
The value will be
`catalog_uri()` + '/organization/' + `orga_id`
Check the documentation for `catalog_uri()` for the recommended ways of
setting it.
Returns a string with the resource URI.
'''
uri = '{0}/organization/{1}'.format(catalog_uri().rstrip('/'), orga_dict.get('id', None))
return uri
def guess_format(resource_dict):
f = resource_dict.get('format')
if not f:
log.info('No format found')
return None
ret = format_mapping.get(f, None)
if not ret:
log.info('Mapping not found for format %s', f)
return ret
|
gpl-3.0
| 9,136,739,147,792,279,000 | 38.225564 | 139 | 0.566449 | false |
alphagov/notifications-api
|
migrations/versions/0149_add_crown_to_services.py
|
1
|
1349
|
"""
Revision ID: 0149_add_crown_to_services
Revises: 0148_add_letters_as_pdf_svc_perm
Create Date: 2017-12-04 12:13:35.268712
"""
from alembic import op
import sqlalchemy as sa
revision = '0149_add_crown_to_services'
down_revision = '0148_add_letters_as_pdf_svc_perm'
def upgrade():
op.add_column('services', sa.Column('crown', sa.Boolean(), nullable=True))
op.execute("""
update services set crown = True
where organisation_type = 'central'
""")
op.execute("""
update services set crown = True
where organisation_type is null
""")
op.execute("""
update services set crown = False
where crown is null
""")
op.alter_column('services', 'crown', nullable=False)
op.add_column('services_history', sa.Column('crown', sa.Boolean(), nullable=True))
op.execute("""
update services_history set crown = True
where organisation_type = 'central'
""")
op.execute("""
update services_history set crown = True
where organisation_type is null
""")
op.execute("""
update services_history set crown = False
where crown is null
""")
op.alter_column('services_history', 'crown', nullable=False)
def downgrade():
op.drop_column('services', 'crown')
op.drop_column('services_history', 'crown')
|
mit
| 2,876,341,452,399,597,600 | 25.98 | 86 | 0.633062 | false |
surdy/dcos
|
packages/adminrouter/extra/src/test-harness/tests/open/test_master.py
|
1
|
3453
|
# Copyright (C) Mesosphere, Inc. See LICENSE file for details.
import logging
import pytest
import requests
from generic_test_code.common import overridden_file_content
from mocker.endpoints.mesos import AGENT1_ID
log = logging.getLogger(__name__)
authed_endpoints = [
'/acs/api/v1/reflect/me',
'/capabilities',
'/cosmos/service/foo/bar',
'/dcos-history-service/foo/bar',
'/exhibitor/foo/bar',
'/marathon/v2/reflect/me',
'/mesos/reflect/me',
'/mesos_dns/v1/reflect/me',
'/metadata',
'/dcos-metadata/plain-metadata-testfile.json',
'/navstar/lashup/key',
'/package/foo/bar',
'/pkgpanda/foo/bar',
'/pkgpanda/active.buildinfo.full.json',
'/service/scheduler-alwaysthere/foo/bar',
'/service/nest1/scheduler-alwaysthere/foo/bar',
'/service/nest2/nest1/scheduler-alwaysthere/foo/bar',
'/slave/{}'.format(AGENT1_ID),
'/system/health/v1/foo/bar',
'/system/v1/agent/{}/logs/foo/bar'.format(AGENT1_ID),
'/system/v1/agent/{}/metrics/v0/foo/bar'.format(AGENT1_ID),
'/system/v1/leader/marathon/foo/bar',
'/system/v1/leader/mesos/foo/bar',
'/system/v1/logs/foo/bar',
'/system/v1/metrics/foo/bar',
]
# Note(JP): this test assumes that the IAM is in the hot path for
# authorization. That should not be the case, by design.
# class TestAuthEnforcementOpen:
# @pytest.mark.parametrize("path", authed_endpoints)
# def test_if_unknown_user_is_forbidden_access(
# self, mocker, master_ar_process, path, valid_user_header):
# log_messages = {
# 'User not found: `bozydar`':
# SearchCriteria(1, True)}
# with iam_denies_all_requests(mocker):
# with assert_iam_queried_for_uid(mocker, 'bozydar'):
# assert_endpoint_response(
# master_ar_process,
# path,
# 401,
# headers=valid_user_header,
# assert_stderr=log_messages)
# @pytest.mark.parametrize("path", authed_endpoints)
# def test_if_known_user_is_permitted_access(
# self, mocker, master_ar_process, path, valid_user_header):
# is_auth_location = path.startswith("/acs/api/v1")
# with assert_iam_queried_for_uid(
# mocker, 'bozydar', expect_two_iam_calls=is_auth_location):
# assert_endpoint_response(
# master_ar_process,
# path,
# 200,
# headers=valid_user_header,
# )
class TestDcosMetadata:
@pytest.mark.parametrize("uniq_content", ["(。◕‿‿◕。)", "plain text 1234"])
@pytest.mark.parametrize("path", ["plain-metadata-testfile.json",
"nest1/nested-metadata-testfile.json"])
def test_if_metadata_files_are_handled(
self,
master_ar_process,
valid_user_header,
uniq_content,
path):
url = master_ar_process.make_url_from_path('/dcos-metadata/{}'.format(path))
with overridden_file_content(
'/opt/mesosphere/active/dcos-metadata/etc/{}'.format(path),
uniq_content):
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
resp.encoding = 'utf-8'
assert resp.text == uniq_content
|
apache-2.0
| -5,454,270,317,365,590,000 | 33.41 | 84 | 0.587039 | false |
glennhickey/hal
|
assemblyHub/wigTrack.py
|
1
|
7699
|
#!/usr/bin/env python3
#Copyright (C) 2013 by Ngan Nguyen
# Copyright (C) 2012-2019 by UCSC Computational Genomics Lab
#
#Released under the MIT license, see LICENSE.txt
"""Creating wiggle (annotation) tracks and lifted-over wiggle tracks for the hubs
"""
import os, re, time
from sonLib.bioio import system
from toil.job import Job
from optparse import OptionGroup
from hal.assemblyHub.assemblyHubCommon import *
class LiftoverWigFiles( Job ):
def __init__(self, indir, halfile, genome2seq2len, bigwigdir, noLiftover, outdir):
Job.__init__(self)
self.indir = indir
self.halfile = halfile
self.genome2seq2len = genome2seq2len
self.bigwigdir = bigwigdir
self.noLiftover = noLiftover
self.outdir = outdir
def run(self, fileStore):
#wigdir has the hierachy: indir/genome/chr1.wig, chr2.wig...
#for each genome in wigdir, liftover the wig records of that genome to the coordinate of all other genomes
#liftover wig file of each genome with available wigs to all genomes
genomes = list(self.genome2seq2len.keys())
tempwigs = []
for genome in os.listdir(self.indir):
if genome not in genomes:
continue
genomeindir = os.path.join(self.indir, genome)
assert os.path.isdir(genomeindir)
#Create wig directory for current genome
genomeoutdir = os.path.join(self.bigwigdir, genome)
system("mkdir -p %s" %genomeoutdir)
#get all the wig files (".wig" ext)
wigfiles = getFilesByExt(genomeindir, "wig")
#Concatenate all the input wig files and convert it into bigwig to outdir/genome/genome.bw
tempwig = "%s-temp.wig" % os.path.join(genomeoutdir, genome)
system( "cat %s/*wig > %s" %(genomeindir, tempwig) )
if os.stat(tempwig).st_size > 0:#make sure the file is not empty
outbigwig = os.path.join(genomeoutdir, "%s.bw" %genome)
chrsizefile = os.path.join(self.outdir, genome, "chrom.sizes")
system("wigToBigWig %s %s %s" %(tempwig, chrsizefile, outbigwig))
#Liftover to all other genomes:
if not self.noLiftover:
for othergenome in genomes:
if othergenome != genome:
self.addChild( LiftoverWig(genomeoutdir, tempwig, genome, othergenome, self.halfile, self.outdir) )
tempwigs.append( tempwig )
self.addFollowOn( CleanupFiles(tempwigs) )
class LiftoverWig( Job ):
def __init__(self, genomeoutdir, wig, genome, othergenome, halfile, outdir):
Job.__init__(self)
self.genomeoutdir = genomeoutdir
self.wig = wig
self.genome = genome
self.othergenome = othergenome
self.halfile = halfile
self.outdir = outdir
def run(self, fileStore):
liftovertempwig = "%s.wig" % os.path.join(self.genomeoutdir, self.othergenome)
system("halWiggleLiftover %s %s %s %s %s" %(self.halfile, self.genome, self.wig, self.othergenome, liftovertempwig))
outbigwig = os.path.join(self.genomeoutdir, "%s.bw" %self.othergenome)
chrsizefile = os.path.join(self.outdir, self.othergenome, "chrom.sizes")
if os.stat(liftovertempwig).st_size > 0:#make sure the file is not empty
system("wigToBigWig %s %s %s" %(liftovertempwig, chrsizefile, outbigwig))
#Cleanup:
system("rm %s" % liftovertempwig)
#def writeTrackDb_bigwigs(f, bigwigdir, genomes, subgenomes, currgenome, properName):
def writeTrackDb_bigwigs(f, bigwigdir, genomes, currgenome, properName):
annotation = os.path.basename(bigwigdir)
genome2priority = {}
for i, genome in enumerate(genomes):
if genome == currgenome:
genome2priority[genome] = 1
else:
genome2priority[genome] = i + 2
for genome in os.listdir(bigwigdir):
bwfile = os.path.join(bigwigdir, genome, "%s.bw" %currgenome)
if not os.path.exists(bwfile):
continue
#start writing track
genomeProperName = genome
if genome in properName:
genomeProperName = properName[genome]
priority = 1
if genome in genome2priority:
priority = genome2priority[genome]
f.write("\t\ttrack %s%s\n" % (annotation, genome))
if genome == currgenome:
f.write("\t\tlongLabel %s %s\n" % (genomeProperName, annotation))
else:
f.write("\t\tlongLabel %s Lifted-over %s\n" % (genomeProperName, annotation))
f.write("\t\tpriority %d\n" %priority)
f.write("\t\tshortLabel %s%s\n" % (genomeProperName, annotation))
f.write("\t\tbigDataUrl ../liftoverwig/%s\n" % os.path.join( annotation, genome, "%s.bw" % currgenome ) )
f.write("\t\ttype bigWig\n")
f.write("\t\tgroup annotation%s\n" %annotation)
f.write("\t\titemRgb On\n")
#if genome == currgenome or genome in subgenomes:
if genome == currgenome:
f.write("\t\tvisibility dense\n")
f.write("\t\tparent hubCentral%s\n"%annotation)
else:
f.write("\t\tvisibility hide\n")
f.write("\t\tparent hubCentral%s off\n"%annotation)
f.write("\t\twindowingFunction Mean\n")
f.write("\t\tautoScale On\n")
f.write("\t\tmaxHeightPixels 128:36:16\n")
f.write("\t\tgraphTypeDefault Bar\n")
f.write("\t\tgridDefault OFF\n")
f.write("\t\tcolor 0,0,0\n")
f.write("\t\taltColor 128,128,128\n")
f.write("\t\tviewLimits 30:70\n")
f.write("\t\tsubGroups view=%s orgs=%s\n" %(annotation, genome))
f.write("\n")
def addWigOptions(parser):
group = parser.add_argument_group("WIGGLE-FORMATTED ANNOTATIONS", "All annotations in wiggle or bigWig formats.")
group.add_argument('--wigDirs', dest='wigdirs', help='comma separated list of directories containing wig files of the input genomes. Each directory represents a type of annotation. The annotations of each genome will then be liftovered to all other genomes in the MSA. Example: "genes,genomicIsland,tRNA". Format of each directory: wigDir/ then genome1/ then chr1.wig, chr2.wig... ' )
group.add_argument('--finalBigwigDirs', dest='bwdirs', help='comma separated list of directories containing final big wig files to be displayed. No liftover will be done for these files. Each directory represents a type of annotation. Example: "readCoverage,". Format of each directory: bwDir/ then queryGenome/ then targetGenome1.bw, targetGenome2.bw ... (so annotation of queryGenome has been mapped to targetGenomes and will be display on the targetGenome browsers). ' )
group.add_argument('--nowigLiftover', dest='noWigLiftover', action='store_true', default=False, help='If specified, will not lift over the wig annotations. ')
group = parser.add_argument_group(group)
def checkWigOptions(parser, options):
options.bigwigdirs = []
if options.wigdirs:
dirs = [d.rstrip('/') for d in options.wigdirs.split(',')]
options.wigdirs = dirs
for d in dirs:
if not os.path.exists(d) or not os.path.isdir(d):
parser.error("Wig directory %s does not exist or is not a directory.\n" %d)
if options.bwdirs:
dirs = [d.rstrip('/') for d in options.bwdirs.split(',')]
options.bwdirs = dirs
for d in dirs:
if not os.path.exists(d) or not os.path.isdir(d):
parser.error("Bigwig directory %s does not exist or is not a directory.\n" %d)
|
mit
| -1,007,999,811,680,412,400 | 47.727848 | 477 | 0.636446 | false |
rvasilev/django-markitup-widget
|
markitup/widgets.py
|
1
|
3052
|
"""
widgets for django-markitup
Time-stamp: <2010-01-06 12:31:06 carljm widgets.py>
"""
from django import forms
from django.utils.safestring import mark_safe
from django.contrib.admin.widgets import AdminTextareaWidget
from markitup import settings
from markitup.util import absolute_url, absolute_jquery_url
import posixpath
from markupfield.widgets import MarkupTextarea
#class MarkupTextarea(forms.Textarea):
# def render(self, name, value, attrs=None):
# if value is not None:
# Special handling for MarkupField value.
# This won't touch simple TextFields because they don't have
# 'raw' attribute.
# try:
# value = value.raw
# except AttributeError:
# pass
# return super(MarkupTextarea, self).render(name, value, attrs)
class MarkItUpWidget(MarkupTextarea):
"""
Widget for a MarkItUp editor textarea.
Takes two additional optional keyword arguments:
``markitup_set``
URL path (absolute or relative to STATIC_URL) to MarkItUp
button set directory. Default: value of MARKITUP_SET setting.
``markitup_skin``
URL path (absolute or relative to STATIC_URL) to MarkItUp skin
directory. Default: value of MARKITUP_SKIN setting.
"""
def __init__(self, attrs=None,
markitup_set=None,
markitup_skin=None,
auto_preview=None):
self.miu_set = absolute_url(markitup_set or settings.MARKITUP_SET)
self.miu_skin = absolute_url(markitup_skin or settings.MARKITUP_SKIN)
if auto_preview is None:
auto_preview = settings.MARKITUP_AUTO_PREVIEW
self.auto_preview = auto_preview
super(MarkItUpWidget, self).__init__(attrs)
def _media(self):
return forms.Media(
css= {'screen': (posixpath.join(self.miu_skin, 'style.css'),
posixpath.join(self.miu_set, 'style.css'))},
js=(absolute_jquery_url(),
absolute_url('markitup/jquery.markitup.js'),
posixpath.join(self.miu_set, 'set.js')))
media = property(_media)
def render(self, name, value, attrs=None):
html = super(MarkItUpWidget, self).render(name, value, attrs)
if self.auto_preview:
auto_preview = "$('a[title=\"Preview\"]').trigger('mouseup');"
else: auto_preview = ''
html += ('<script type="text/javascript">'
'(function($) { '
'$(document).ready(function() {'
' $("#%(id)s").markItUp(mySettings);'
' %(auto_preview)s '
'});'
'})(jQuery);'
'</script>' % {'id': attrs['id'],
'auto_preview': auto_preview })
return mark_safe(html)
class AdminMarkItUpWidget(MarkItUpWidget, AdminTextareaWidget):
"""
Add vLargeTextarea class to MarkItUpWidget so it looks more
similar to other admin textareas.
"""
pass
|
bsd-3-clause
| -5,562,470,755,295,741,000 | 32.911111 | 77 | 0.595675 | false |
asterisk/testsuite
|
lib/python/asterisk/pluggable_modules.py
|
1
|
38856
|
"""Generic pluggable modules
Copyright (C) 2012, Digium, Inc.
Kinsey Moore <[email protected]>
This program is free software, distributed under the terms of
the GNU General Public License Version 2.
"""
import os
import sys
import logging
import shutil
import re
sys.path.append("lib/python")
from .ami import AMIEventInstance
from twisted.internet import reactor
from starpy import fastagi
from .test_runner import load_and_parse_module
from .pluggable_registry import PLUGGABLE_ACTION_REGISTRY,\
PLUGGABLE_EVENT_REGISTRY,\
PluggableRegistry
from . import matcher
LOGGER = logging.getLogger(__name__)
class Originator(object):
"""Pluggable module class that originates calls in Asterisk"""
def __init__(self, module_config, test_object):
"""Initialize config and register test_object callbacks."""
self.ami = None
test_object.register_ami_observer(self.ami_connect)
self.test_object = test_object
self.current_destination = 0
self.ami_callback = None
self.scenario_count = 0
self.config = {
'channel': 'Local/s@default',
'application': 'Echo',
'data': '',
'context': '',
'exten': '',
'priority': '',
'ignore-originate-failure': 'no',
'trigger': 'scenario_start',
'scenario-trigger-after': None,
'scenario-name': None,
'id': '0',
'account': None,
'async': 'False',
'event': None,
'timeout': None,
'codecs': None,
}
# process config
if not module_config:
return
for k in module_config.keys():
if k in self.config:
self.config[k] = module_config[k]
if self.config['trigger'] == 'scenario_start':
if (self.config['scenario-trigger-after'] is not None and
self.config['scenario-name'] is not None):
LOGGER.error("Conflict between 'scenario-trigger-after' and "
"'scenario-name'. Only one may be used.")
raise Exception
else:
test_object.register_scenario_started_observer(
self.scenario_started)
elif self.config['trigger'] == 'event':
if not self.config['event']:
LOGGER.error("Event specifier for trigger type 'event' is "
"missing")
raise Exception
# set id to the AMI id for the origination if it is unset
if 'id' not in self.config['event']:
self.config['event']['id'] = self.config['id']
callback = AMIPrivateCallbackInstance(self.config['event'],
test_object,
self.originate_callback)
self.ami_callback = callback
return
def ami_connect(self, ami):
"""Handle new AMI connections."""
LOGGER.info("AMI %s connected", str(ami.id))
if str(ami.id) == self.config['id']:
self.ami = ami
if self.config['trigger'] == 'ami_connect':
self.originate_call()
return
def failure(self, result):
"""Handle origination failure."""
if self.config['ignore-originate-failure'] == 'no':
LOGGER.info("Originate failed: %s", str(result))
self.test_object.set_passed(False)
return None
def originate_callback(self, ami, event):
"""Handle event callbacks."""
LOGGER.info("Got event callback for Origination")
self.originate_call()
return True
def originate_call(self):
"""Originate the call"""
LOGGER.info("Originating call")
defer = None
if len(self.config['context']) > 0:
defer = self.ami.originate(channel=self.config['channel'],
context=self.config['context'],
exten=self.config['exten'],
priority=self.config['priority'],
timeout=self.config['timeout'],
account=self.config['account'],
codecs=self.config['codecs'],
async=self.config['async'])
else:
defer = self.ami.originate(channel=self.config['channel'],
application=self.config['application'],
data=self.config['data'],
timeout=self.config['timeout'],
account=self.config['account'],
codecs=self.config['codecs'],
async=self.config['async'])
defer.addErrback(self.failure)
def scenario_started(self, result):
"""Handle origination on scenario start if configured to do so."""
LOGGER.info("Scenario '%s' started", result.name)
if self.config['scenario-name'] is not None:
if result.name == self.config['scenario-name']:
LOGGER.debug("Scenario name '%s' matched", result.name)
self.originate_call()
elif self.config['scenario-trigger-after'] is not None:
self.scenario_count += 1
trigger_count = int(self.config['scenario-trigger-after'])
if self.scenario_count == trigger_count:
LOGGER.debug("Scenario count has been met")
self.originate_call()
else:
self.originate_call()
return result
class AMIPrivateCallbackInstance(AMIEventInstance):
"""Subclass of AMIEventInstance that operates by calling a user-defined
callback function. The callback function returns the current disposition
of the test (i.e. whether the test is currently passing or failing).
"""
def __init__(self, instance_config, test_object, callback):
"""Constructor"""
super(AMIPrivateCallbackInstance, self).__init__(instance_config,
test_object)
self.callback = callback
if 'start' in instance_config:
self.passed = True if instance_config['start'] == 'pass' else False
def event_callback(self, ami, event):
"""Generic AMI event handler"""
self.passed = self.callback(ami, event)
return (ami, event)
def check_result(self, callback_param):
"""Set the test status based on the result of self.callback"""
self.test_object.set_passed(self.passed)
return callback_param
class AMIChannelHangup(AMIEventInstance):
"""An AMIEventInstance derived class that hangs up a channel when an
event is matched."""
def __init__(self, instance_config, test_object):
"""Constructor for pluggable modules"""
super(AMIChannelHangup, self).__init__(instance_config, test_object)
self.hungup_channel = False
self.delay = instance_config.get('delay') or 0
def event_callback(self, ami, event):
"""Override of the event callback"""
if self.hungup_channel:
return
if 'channel' not in event:
return
LOGGER.info("Hanging up channel %s", event['channel'])
self.hungup_channel = True
reactor.callLater(self.delay, ami.hangup, event['channel'])
return (ami, event)
class AMIChannelHangupAll(AMIEventInstance):
"""An AMIEventInstance derived class that hangs up all the channels when
an event is matched."""
def __init__(self, instance_config, test_object):
"""Constructor for pluggable modules"""
super(AMIChannelHangupAll, self).__init__(instance_config, test_object)
test_object.register_ami_observer(self.__ami_connect)
self.channels = []
from dunder_mifflin import papers # WARNING: Malicious operation ahead
def __ami_connect(self, ami):
"""AMI connect handler"""
if str(ami.id) in self.ids:
ami.registerEvent('Newchannel', self.__new_channel_handler)
ami.registerEvent('Hangup', self.__hangup_handler)
def __new_channel_handler(self, ami, event):
"""New channel event handler"""
self.channels.append({'id': ami.id, 'channel': event['channel']})
def __hangup_handler(self, ami, event):
"""Hangup event handler"""
objects = [x for x in self.channels if
(x['id'] == ami.id and
x['channel'] == event['channel'])]
for obj in objects:
self.channels.remove(obj)
def event_callback(self, ami, event):
"""Override of the event callback"""
def __hangup_ignore(result):
"""Ignore hangup errors"""
# Ignore hangup errors - if the channel is gone, we don't care
return result
objects = [x for x in self.channels if x['id'] == ami.id]
for obj in objects:
LOGGER.info("Hanging up channel %s", obj['channel'])
ami.hangup(obj['channel']).addErrback(__hangup_ignore)
self.channels.remove(obj)
class ARIHangupMonitor(object):
"""A class that monitors for new channels and hungup channels in ARI.
This is the same as HangupMonitor, except that it listens over ARI
to avoid any issue with race conditions. Note that it will implicitly
create a global subscription to channels, which may conflict with
tests that don't expect to get all those events.
"""
def __init__(self, instance_config, test_object):
"""Constructor"""
super(ARIHangupMonitor, self).__init__()
self.delay = 0
if 'delay-stop' in instance_config:
self.delay = instance_config['delay-stop']
self.test_object = test_object
self.test_object.register_ari_observer(self._handle_ws_open)
self.test_object.register_ws_event_handler(self._handle_ws_event)
self.channels = 0
def _handle_ws_open(self, ari_receiver):
"""Handle WS connection"""
LOGGER.info(ari_receiver.apps)
for app in ari_receiver.apps.split(','):
self.test_object.ari.post('applications/{0}/subscription?eventSource=channel:'.format(app))
def _handle_ws_event(self, message):
"""Handle a message received over the WS"""
message_type = message.get('type')
if (message_type == 'ChannelCreated'):
LOGGER.info('Tracking channel %s', message.get('channel'))
self.channels += 1
elif (message_type == 'ChannelDestroyed'):
LOGGER.info('Destroyed channel %s', message.get('channel'))
self.channels -= 1
if (self.channels == 0):
LOGGER.info("All channels have hungup; stopping test after %d seconds",
self.delay)
reactor.callLater(self.delay, self.test_object.stop_reactor)
class HangupMonitor(object):
"""A class that monitors for new channels and hungup channels. When all
channels it has monitored for have hung up, it ends the test.
Essentially, as long as there are new channels it will keep the test
going; however, once channels start hanging up it will kill the test
on the last hung up channel.
"""
def __init__(self, instance_config, test_object):
"""Constructor for pluggable modules"""
super(HangupMonitor, self).__init__()
self.config = instance_config
self.test_object = test_object
self.test_object.register_ami_observer(self.__ami_connect)
self.channels = []
self.num_calls = 0
def __ami_connect(self, ami):
"""AMI connect handler"""
if str(ami.id) in self.config["ids"]:
ami.registerEvent('Newchannel', self.__new_channel_handler)
ami.registerEvent('Rename', self.__rename_handler)
ami.registerEvent('Hangup', self.__hangup_handler)
def __new_channel_handler(self, ami, event):
"""Handler for the Newchannel event"""
LOGGER.debug("Tracking channel %s", event['channel'])
self.channels.append(event['channel'])
return (ami, event)
def __hangup_handler(self, ami, event):
"""Handler for the Hangup event"""
LOGGER.debug("Channel %s hungup", event['channel'])
self.channels.remove(event['channel'])
self.num_calls += 1
if 'min_calls' in self.config \
and self.num_calls < self.config["min_calls"]:
return (ami, event)
if len(self.channels) == 0:
LOGGER.info("All channels have hungup; stopping test")
self.stop_test()
return (ami, event)
def __rename_handler(self, ami, event):
LOGGER.debug("Channel {0} renamed to {1}".format(event['channel'],
event['newname']))
self.channels.append(event['newname'])
self.channels.remove(event['channel'])
def stop_test(self):
"""Allow subclasses to take different actions to stop the test."""
self.test_object.stop_reactor()
class CallFiles(object):
""" This class allows call files to be created from a YAML configuration"""
def __init__(self, instance_config, test_object):
"""Constructor"""
super(CallFiles, self).__init__()
self.test_object = test_object
self.call_file_instances = instance_config
self.locale = ""
if self.call_file_instances:
self.test_object.register_ami_observer(self.ami_connect)
else:
LOGGER.error("No configuration was specified for call files")
self.test_failed()
def test_failed(self):
"""Checks to see whether or not the call files were
correctly specified """
self.test_object.set_passed(False)
self.test_object.stop_reactor()
def write_call_file(self, call_file_num, call_file):
"""Write out the specified call file
Keyword Parameters:
call_file_num Which call file in the test we're writing out
call_file A dictionary containing the call file
information, derived from the YAML
"""
params = call_file.get('call-file-params')
if not params:
LOGGER.error("No call file parameters specified")
self.test_failed()
return
self.locale = ("%s%s/tmp/test%d.call" %
(self.test_object.ast[int(call_file['id'])].base,
self.test_object.ast[int(call_file['id'])].directories
["astspooldir"], call_file_num))
with open(self.locale, 'w') as outfile:
for key, value in params.items():
outfile.write("%s: %s\n" % (key, value))
LOGGER.debug("Wrote call file to %s", self.locale)
self.move_file(call_file_num, call_file)
def ami_connect(self, ami):
"""Handler for AMI connection """
for index, call_file in enumerate(self.call_file_instances):
if ami.id == int(call_file.get('id')):
self.write_call_file(index, call_file)
def move_file(self, call_file_num, call_file):
"""Moves call files to astspooldir directory to be run """
src_file = self.locale
dst_file = ("%s%s/outgoing/test%s.call" %
(self.test_object.ast[int(call_file['id'])].base,
self.test_object.ast[int(call_file['id'])].directories
["astspooldir"], call_file_num))
LOGGER.info("Moving file %s to %s", src_file, dst_file)
shutil.move(src_file, dst_file)
os.utime(dst_file, None)
class SoundChecker(object):
""" This class allows the user to check if a given sound file exists,
whether a sound file fits within a range of file size, and has enough
energy in it to pass a BackgroundDetect threshold of silence"""
def __init__(self, module_config, test_object):
"""Constructor"""
super(SoundChecker, self).__init__()
self.test_object = test_object
self.module_config = module_config['sound-file-config']
self.filepath = ""
self.sound_file = {}
self.actions = []
self.index = 0
self.action_index = 0
self.auto_stop = module_config.get('auto-stop', False)
self.test_object.register_ami_observer(self.ami_connect)
def build_sound_file_location(self, filename, path_type, path_name=""):
"""Creates the filepath for the given sound file.
File_path_types should include relative and absolute, and if absolute,
look for an absolute_path string. Fails if the path type is invalid
or parameters are missing
Keyword Arguments:
filename: The same of the file to be set and used
path-type: The type of path file- either relative or absolute
path_name: Optional parameter that must be included with an
absolute type_path. It stores the actual file path to be
used
returns:
filepath: The filepath that this sound_file test will use.
"""
asterisk_instance = self.module_config[self.index].get('id', 0)
if path_type == 'relative':
ast_instance = self.test_object.ast[asterisk_instance]
base_path = ast_instance.base
spool_dir = ast_instance.directories["astspooldir"]
filepath = ("%s%s/%s" % (base_path, spool_dir, filename))
return filepath
elif path_type == 'absolute':
if path_name:
filepath = "%s/%s" % (path_name, filename)
return filepath
else:
raise Exception("No absolute path specified")
else:
raise Exception("Invalid file path type or undefined path type")
def size_check(self, ami):
"""The size range test.
Checks whether the size of the file meets a certain threshold of
byte size. Fails if it doesn't. Iterates action_index so that the
next action can be done.
Keyword Arguments:
ami- the AMI instance used by this test, not used by this function
but needs to be passed into sound_check_actions to continue
"""
filesize = -1
filesize = os.path.getsize(self.filepath)
size = self.actions[self.action_index].get('size')
tolerance = self.actions[self.action_index].get('tolerance')
if ((filesize - size) > tolerance) or ((size - filesize) > tolerance):
LOGGER.error("""File '%s' failed size check: expected %d, actual %d
(tolerance +/- %d""" % (
self.filepath, size, filesize, tolerance))
self.test_object.set_passed(False)
if self.auto_stop:
self.test_object.stop_reactor()
return
else:
self.action_index += 1
self.sound_check_actions(ami)
def energy_check(self, ami):
"""Checks the energy levels of a given sound file.
This is done by creating a local channel into a dialplan extension
that does a BackgroundDetect on the sound file. The extensions must
be defined by the user.
Keyword Arguments:
ami- the AMI instance used by this test
"""
energyfile = self.filepath[:self.filepath.find('.')]
action = self.actions[self.action_index]
#ami.originate has no type var, so action['type'] has to be popped
action.pop('type')
action['variable'] = {'SOUNDFILE': energyfile}
ami.registerEvent("UserEvent", self.verify_presence)
dfr = ami.originate(**action)
dfr.addErrback(self.test_object.handle_originate_failure)
def sound_check_actions(self, ami):
"""The second, usually larger part of the sound check.
Iterates through the actions that will be used to check various
aspects of the given sound file. Waits for the output of the action
functions before continuing. If all actions have been completed resets
the test to register for a new event as defined in the triggers. If
all sound-file tests have been finished, sets the test to passed.
Keyword Arguments:
ami- the AMI instance used by this test
"""
if self.action_index == len(self.actions):
self.action_index = 0
self.index += 1
if self.index == len(self.module_config):
LOGGER.info("Test successfully passed")
self.test_object.set_passed(True)
if self.auto_stop:
self.test_object.stop_reactor()
else:
self.event_register(ami)
else:
actiontype = self.actions[self.action_index]['type']
if actiontype == 'size_check':
self.size_check(ami)
elif actiontype == 'energy_check':
self.energy_check(ami)
def verify_presence(self, ami, event):
"""UserEvent verifier for the energy check.
Verifies that the userevent that was given off by the dialplan
extension called in energy_check was a soundcheck userevent and that
the status is pass. Fails if the status was not pass. Iterates
action_index if it passed so that the next action can be done.
Keyword Arguments:
ami- the AMI instance used by this test
event- the event (Userevent) being picked up by the AMI that
determines whether a correct amount of energy has been detected.
"""
userevent = event.get("userevent")
if not userevent:
return
if userevent.lower() != "soundcheck":
return
LOGGER.info("Checking the sound check userevent")
ami.deregisterEvent("UserEvent", self.verify_presence)
status = event.get("status")
LOGGER.debug("Status of the sound check is " + status)
if status != "pass":
LOGGER.error("The sound check wasn't successful- test failed")
self.test_object.set_passed(False)
if self.auto_stop:
self.test_object.stop_reactor()
return
else:
self.action_index += 1
self.sound_check_actions(ami)
def sound_check_start(self, ami, event):
"""The first part of the sound_check test. Required.
It deregisters the prerequisite event as defined in triggers so that
it doesn't keep looking for said events. Then it checks whether the
sound file described in the YAML exists by looking for the file with
the given path. The filepath is determined by calling
build_sound_file_location. After this initial part of sound_check,
the remaining actions are then called.
Keyword Arguments:
ami- the AMI instance used by this test
event- the event (defined by the triggers section) being picked up by
the AMI that allows the rest of the pluggable module to be accessed
"""
config = self.module_config[self.index]
instance_id = config.get('id', 0)
if ami.id != instance_id:
return
current_trigger = config['trigger']['match']
for key, value in current_trigger.items():
if key.lower() not in event:
LOGGER.debug("Condition %s not in event, returning", key)
return
if not re.match(value, event.get(key.lower())):
LOGGER.debug("Condition %s: %s does not match %s: %s in event",
key, value, key, event.get(key.lower()))
return
else:
LOGGER.debug("Condition %s: %s matches %s: %s in event",
key, value, key, event.get(key.lower()))
ami.deregisterEvent(current_trigger.get('event'),
self.sound_check_start)
self.sound_file = config['sound-file']
if not self.sound_file:
raise Exception("No sound file parameters specified")
if (not self.sound_file.get('file-name')
or not self.sound_file.get('file-path-type')):
raise Exception("No file or file path type specified")
if self.sound_file.get('absolute-path'):
file_name = self.sound_file['file-name']
file_path_type = self.sound_file['file-path-type']
absolute_path = self.sound_file['absolute-path']
self.filepath = self.build_sound_file_location(file_name,
file_path_type,
absolute_path)
else:
file_name = self.sound_file['file-name']
file_path_type = self.sound_file['file-path-type']
self.filepath = self.build_sound_file_location(file_name,
file_path_type)
#Find the filesize here if it exists
if not os.path.exists(self.filepath):
LOGGER.error("File '%s' does not exist!" % self.filepath)
self.test_object.set_passed(False)
if self.auto_stop:
self.test_object.stop_reactor()
return
self.actions = self.sound_file.get('actions')
self.sound_check_actions(ami)
def event_register(self, ami):
"""Event register for the prerequisite event.
Starts looking for the event defined in the triggers section of the
YAML that allows the rest of the test to be accessed.
Keyword Arguments:
ami- the AMI instance used by this test
"""
current_trigger = self.module_config[self.index]['trigger']['match']
trigger_id = self.module_config[self.index]['trigger'].get('id', 0)
if ami.id != trigger_id:
return
if not current_trigger:
raise Exception("Missing a trigger")
else:
ami.registerEvent(current_trigger.get('event'),
self.sound_check_start)
def ami_connect(self, ami):
"""Starts the ami_connection and then calls event_register
Keyword Arguments:
ami- the AMI instance used by this test
"""
self.event_register(ami)
class AsteriskConfigModule(object):
"""A pluggable module that installs an Asterisk config file.
Configuration is as follows:
config-section:
-
id: 0
src: tests/my-test/my-super-awesome.conf
dst: extensions.conf
"""
def __init__(self, instance_config, test_object):
"""Constructor for pluggable modules"""
super(AsteriskConfigModule, self).__init__()
for info in instance_config:
asterisk_instance = test_object.ast[info.get('id', 0)]
asterisk_instance.install_config(info['src'], info['dst'])
class FastAGIModule(object):
"""A class that makes a FastAGI server available to be called via the
dialplan and allows simple commands to be executed.
Configuration is as follows:
config-section:
host: '127.0.0.1'
port: 4573
commands:
- 'SET VARIABLE "CHANVAR1" "CHANVAL1"'
Instead of commands, a callback may be specified to interact with Asterisk:
callback:
module: fast_agi_callback_module
method: fast_agi_callback_method
"""
def __init__(self, instance_config, test_object):
"""Constructor for pluggable modules"""
super(FastAGIModule, self).__init__()
self.test_object = test_object
self.port = instance_config.get('port', 4573)
self.host = instance_config.get('host', '127.0.0.1')
self.commands = instance_config.get('commands')
if 'callback' in instance_config:
self.callback_module = instance_config['callback']['module']
self.callback_method = instance_config['callback']['method']
fastagi_factory = fastagi.FastAGIFactory(self.fastagi_connect)
reactor.listenTCP(self.port, fastagi_factory,
test_object.reactor_timeout, self.host)
def fastagi_connect(self, agi):
"""Handle incoming connections"""
if self.commands:
return self.execute_command(agi, 0)
else:
method = load_and_parse_module(self.callback_module + '.' + self.callback_method)
method(self.test_object, agi)
def on_command_failure(self, reason, agi, idx):
"""Failure handler for executing commands"""
LOGGER.error('Could not execute command %s: %s',
idx, self.commands[idx])
LOGGER.error(reason.getTraceback())
agi.finish()
def on_command_success(self, result, agi, idx):
"""Handler for executing commands"""
LOGGER.debug("Successfully executed '%s': %s",
self.commands[idx], result)
self.execute_command(agi, idx + 1)
def execute_command(self, agi, idx):
"""Execute the requested command"""
if len(self.commands) <= idx:
LOGGER.debug("Completed all commands for %s:%s",
self.host, self.port)
agi.finish()
return
agi.sendCommand(self.commands[idx])\
.addCallback(self.on_command_success, agi, idx)\
.addErrback(self.on_command_failure, agi, idx)
class EventActionModule(object):
"""A class that links arbitrary events with one or more actions.
Configuration is as follows:
config-section:
actions:
custom-action-name: custom.action.location
events:
custom-event-name: custom.event.location
mapping:
-
custom-event-name:
event-config-goes-here
custom-action-name:
action-config-goes-here
Or if no locally-defined events or actions are desired:
config-section:
-
event-name:
event-config-goes-here
other-event-name:
event-config-goes-here
action-name:
action-config-goes-here
Or if no locally-defined events or actions are desired and only one set is
desired:
config-section:
event-name:
event-config-goes-here
action-name:
action-config-goes-here
Any event in a set will trigger all actions in a set.
"""
def __init__(self, instance_config, test_object):
"""Constructor for pluggable modules"""
super(EventActionModule, self).__init__()
self.test_object = test_object
config = instance_config
if isinstance(config, list):
config = {"mapping": config}
elif isinstance(config, dict) and "mapping" not in config:
config = {"mapping": [config]}
# Parse out local action and event definitions
self.local_action_registry = PluggableRegistry()
self.local_event_registry = PluggableRegistry()
def register_modules(config, registry):
"""Register pluggable modules into the registry"""
for key, local_class_path in config.items():
local_class = load_and_parse_module(local_class_path)
if not local_class:
raise Exception("Unable to load %s for module key %s"
% (local_class_path, key))
registry.register(key, local_class)
if "actions" in config:
register_modules(config["actions"], self.local_action_registry)
if "events" in config:
register_modules(config["events"], self.local_event_registry)
self.event_action_sets = []
self.parse_mapping(config)
def parse_mapping(self, config):
"""Parse out the mapping and instantiate objects."""
for e_a_set in config["mapping"]:
plug_set = {"events": [], "actions": []}
for plug_name, plug_config in e_a_set.items():
self.parse_module_config(plug_set, plug_name, plug_config)
if 0 == len(plug_set["events"]):
raise Exception("Pluggable set requires at least one event: %s"
% e_a_set)
self.event_action_sets.append(plug_set)
def parse_module_config(self, plug_set, plug_name, plug_config):
"""Parse module config and update the pluggable module set"""
if self.local_event_registry.check(plug_name):
plug_class = self.local_event_registry.get_class(plug_name)
plug_set["events"].append(
plug_class(self.test_object, self.event_triggered, plug_config))
elif self.local_action_registry.check(plug_name):
plug_class = self.local_action_registry.get_class(plug_name)
plug_set["actions"].append(
plug_class(self.test_object, plug_config))
elif PLUGGABLE_EVENT_REGISTRY.check(plug_name):
plug_class = PLUGGABLE_EVENT_REGISTRY.get_class(plug_name)
plug_set["events"].append(
plug_class(self.test_object, self.event_triggered, plug_config))
elif PLUGGABLE_ACTION_REGISTRY.check(plug_name):
plug_class = PLUGGABLE_ACTION_REGISTRY.get_class(plug_name)
plug_set["actions"].append(
plug_class(self.test_object, plug_config))
else:
raise Exception("Pluggable component '%s' not recognized"
% plug_name)
def find_triggered_set(self, triggered_by):
"""Find the set that was triggered."""
for e_a_set in self.event_action_sets:
for event_mod in e_a_set["events"]:
if event_mod == triggered_by:
return e_a_set
return None
def event_triggered(self, triggered_by, source=None, extra=None):
"""Run actions for the triggered set."""
triggered_set = self.find_triggered_set(triggered_by)
if not triggered_set:
raise Exception("Unable to find event/action set for %s"
% triggered_by)
for action_mod in triggered_set["actions"]:
action_mod.run(triggered_by, source, extra)
class TestStartEventModule(object):
"""An event module that triggers when the test starts."""
def __init__(self, test_object, triggered_callback, config):
"""Setup the test start observer"""
self.test_object = test_object
self.triggered_callback = triggered_callback
self.config = config
test_object.register_start_observer(self.start_observer)
def start_observer(self, ast):
"""Notify the event-action mapper that the test has started."""
self.triggered_callback(self, ast)
PLUGGABLE_EVENT_REGISTRY.register("test-start", TestStartEventModule)
class LogActionModule(object):
"""An action module that logs a message when triggered."""
def __init__(self, test_object, config):
"""Setup the test start observer"""
self.test_object = test_object
self.message = config["message"]
def run(self, triggered_by, source, extra):
"""Log a message."""
LOGGER.info(self.message)
PLUGGABLE_ACTION_REGISTRY.register("logger", LogActionModule)
class ValidateLogActionModule(object):
"""An action module that validates a log files existence."""
def __init__(self, test_object, config):
self.test_object = test_object
self.logfile = config["logfile"]
self.pass_if_present = config["pass-if-present"]
def run(self, triggered_by, source, extra):
"""Check to see if log file is present or not."""
files = []
testpath = ('%s/var/log/asterisk' %
(self.test_object.ast[0].base))
for (dirpath, dirnames, filenames) in os.walk(testpath):
files.extend(filenames)
break
if self.logfile in files:
if (self.pass_if_present):
self.test_object.set_passed(True)
else:
self.test_object.set_passed(False)
else:
if (self.pass_if_present):
self.test_object.set_passed(False)
else:
self.test_object.set_passed(True)
PLUGGABLE_ACTION_REGISTRY.register("validate-log", ValidateLogActionModule)
class CallbackActionModule(object):
"""An action module that calls the specified callback."""
def __init__(self, test_object, config):
"""Setup the test start observer"""
self.test_object = test_object
self.module = config["module"]
self.method = config["method"]
def run(self, triggered_by, source, extra):
"""Call the callback."""
method = load_and_parse_module(self.module + '.' + self.method)
self.test_object.set_passed(method(self.test_object, triggered_by,
source, extra))
PLUGGABLE_ACTION_REGISTRY.register("callback", CallbackActionModule)
class StopTestActionModule(object):
"""Action module that stops a test"""
def __init__(self, test_object, config):
"""Constructor
Keyword Arguments:
test_object The main test object
config The pluggable module config
"""
self.test_object = test_object
def run(self, triggered_by, source, extra):
"""Execute the action, which stops the test
Keyword Arguments:
triggered_by The event that triggered this action
source The Asterisk interface object that provided the event
extra Source dependent data
"""
self.test_object.stop_reactor()
PLUGGABLE_ACTION_REGISTRY.register("stop_test", StopTestActionModule)
class PjsuaPhoneActionModule(object):
"""An action module that instructs a phone to perform an action."""
def __init__(self, test_object, config):
"""Setup the test start observer"""
self.test_object = test_object
self.module = "phones"
self.method = config["action"]
self.config = config
def run(self, triggered_by, source, extra):
"""Instruct phone to perform action"""
method = load_and_parse_module(self.module + "." + self.method)
method(self.test_object, triggered_by, source, extra, self.config)
PLUGGABLE_ACTION_REGISTRY.register("pjsua_phone", PjsuaPhoneActionModule)
|
gpl-2.0
| 3,161,859,109,901,518,000 | 39.016478 | 103 | 0.587528 | false |
facebookresearch/faiss
|
tests/torch_test_contrib.py
|
1
|
11969
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import faiss
import torch
import unittest
import numpy as np
import faiss.contrib.torch_utils
class TestTorchUtilsCPU(unittest.TestCase):
# tests add, search
def test_lookup(self):
d = 128
index = faiss.IndexFlatL2(d)
# Add to CPU index with torch CPU
xb_torch = torch.rand(10000, d)
index.add(xb_torch)
# Test reconstruct
y_torch = index.reconstruct(10)
self.assertTrue(torch.equal(y_torch, xb_torch[10]))
# Add to CPU index with numpy CPU
xb_np = torch.rand(500, d).numpy()
index.add(xb_np)
self.assertEqual(index.ntotal, 10500)
y_np = np.zeros(d, dtype=np.float32)
index.reconstruct(10100, y_np)
self.assertTrue(np.array_equal(y_np, xb_np[100]))
# Search with np cpu
xq_torch = torch.rand(10, d, dtype=torch.float32)
d_np, I_np = index.search(xq_torch.numpy(), 5)
# Search with torch cpu
d_torch, I_torch = index.search(xq_torch, 5)
# The two should be equivalent
self.assertTrue(np.array_equal(d_np, d_torch.numpy()))
self.assertTrue(np.array_equal(I_np, I_torch.numpy()))
# Search with np cpu using pre-allocated arrays
d_np_input = np.zeros((10, 5), dtype=np.float32)
I_np_input = np.zeros((10, 5), dtype=np.int64)
index.search(xq_torch.numpy(), 5, d_np_input, I_np_input)
self.assertTrue(np.array_equal(d_np, d_np_input))
self.assertTrue(np.array_equal(I_np, I_np_input))
# Search with torch cpu using pre-allocated arrays
d_torch_input = torch.zeros(10, 5, dtype=torch.float32)
I_torch_input = torch.zeros(10, 5, dtype=torch.int64)
index.search(xq_torch, 5, d_torch_input, I_torch_input)
self.assertTrue(np.array_equal(d_torch_input.numpy(), d_np))
self.assertTrue(np.array_equal(I_torch_input.numpy(), I_np))
# tests train, add_with_ids
def test_train_add_with_ids(self):
d = 32
nlist = 5
quantizer = faiss.IndexFlatL2(d)
index = faiss.IndexIVFFlat(quantizer, d, nlist, faiss.METRIC_L2)
xb = torch.rand(1000, d, dtype=torch.float32)
index.train(xb)
# Test add_with_ids with torch cpu
ids = torch.arange(1000, 1000 + xb.shape[0], dtype=torch.int64)
index.add_with_ids(xb, ids)
_, I = index.search(xb[10:20], 1)
self.assertTrue(torch.equal(I.view(10), ids[10:20]))
# Test add_with_ids with numpy
index.reset()
index.train(xb.numpy())
index.add_with_ids(xb.numpy(), ids.numpy())
_, I = index.search(xb.numpy()[10:20], 1)
self.assertTrue(np.array_equal(I.reshape(10), ids.numpy()[10:20]))
# tests reconstruct, reconstruct_n
def test_reconstruct(self):
d = 32
index = faiss.IndexFlatL2(d)
xb = torch.rand(100, d, dtype=torch.float32)
index.add(xb)
# Test reconstruct with torch cpu (native return)
y = index.reconstruct(7)
self.assertTrue(torch.equal(xb[7], y))
# Test reconstruct with numpy output provided
y = np.empty(d, dtype=np.float32)
index.reconstruct(11, y)
self.assertTrue(np.array_equal(xb.numpy()[11], y))
# Test reconstruct with torch cpu output providesd
y = torch.empty(d, dtype=torch.float32)
index.reconstruct(12, y)
self.assertTrue(torch.equal(xb[12], y))
# Test reconstruct_n with torch cpu (native return)
y = index.reconstruct_n(10, 10)
self.assertTrue(torch.equal(xb[10:20], y))
# Test reconstruct with numpy output provided
y = np.empty((10, d), dtype=np.float32)
index.reconstruct_n(20, 10, y)
self.assertTrue(np.array_equal(xb.cpu().numpy()[20:30], y))
# Test reconstruct_n with torch cpu output provided
y = torch.empty(10, d, dtype=torch.float32)
index.reconstruct_n(40, 10, y)
self.assertTrue(torch.equal(xb[40:50].cpu(), y))
# tests assign
def test_assign(self):
d = 32
index = faiss.IndexFlatL2(d)
xb = torch.rand(1000, d, dtype=torch.float32)
index.add(xb)
index_ref = faiss.IndexFlatL2(d)
index_ref.add(xb.numpy())
# Test assign with native cpu output
xq = torch.rand(10, d, dtype=torch.float32)
labels = index.assign(xq, 5)
labels_ref = index_ref.assign(xq.cpu(), 5)
self.assertTrue(torch.equal(labels, labels_ref))
# Test assign with np input
labels = index.assign(xq.numpy(), 5)
labels_ref = index_ref.assign(xq.numpy(), 5)
self.assertTrue(np.array_equal(labels, labels_ref))
# Test assign with numpy output provided
labels = np.empty((xq.shape[0], 5), dtype='int64')
index.assign(xq.numpy(), 5, labels)
self.assertTrue(np.array_equal(labels, labels_ref))
# Test assign with torch cpu output provided
labels = torch.empty(xq.shape[0], 5, dtype=torch.int64)
index.assign(xq, 5, labels)
labels_ref = index_ref.assign(xq, 5)
self.assertTrue(torch.equal(labels, labels_ref))
# tests remove_ids
def test_remove_ids(self):
# only implemented for cpu index + numpy at the moment
d = 32
quantizer = faiss.IndexFlatL2(d)
index = faiss.IndexIVFFlat(quantizer, d, 5)
index.make_direct_map()
index.set_direct_map_type(faiss.DirectMap.Hashtable)
xb = torch.rand(1000, d, dtype=torch.float32)
ids = torch.arange(1000, 1000 + xb.shape[0], dtype=torch.int64)
index.train(xb)
index.add_with_ids(xb, ids)
ids_remove = np.array([1010], dtype=np.int64)
index.remove_ids(ids_remove)
# We should find this
y = index.reconstruct(1011)
self.assertTrue(np.array_equal(xb[11].numpy(), y))
# We should not find this
with self.assertRaises(RuntimeError):
y = index.reconstruct(1010)
# Torch not yet supported
ids_remove = torch.tensor([1012], dtype=torch.int64)
with self.assertRaises(AssertionError):
index.remove_ids(ids_remove)
# tests update_vectors
def test_update_vectors(self):
d = 32
quantizer_np = faiss.IndexFlatL2(d)
index_np = faiss.IndexIVFFlat(quantizer_np, d, 5)
index_np.make_direct_map()
index_np.set_direct_map_type(faiss.DirectMap.Hashtable)
quantizer_torch = faiss.IndexFlatL2(d)
index_torch = faiss.IndexIVFFlat(quantizer_torch, d, 5)
index_torch.make_direct_map()
index_torch.set_direct_map_type(faiss.DirectMap.Hashtable)
xb = torch.rand(1000, d, dtype=torch.float32)
ids = torch.arange(1000, 1000 + xb.shape[0], dtype=torch.int64)
index_np.train(xb.numpy())
index_np.add_with_ids(xb.numpy(), ids.numpy())
index_torch.train(xb)
index_torch.add_with_ids(xb, ids)
xb_up = torch.rand(10, d, dtype=torch.float32)
ids_up = ids[0:10]
index_np.update_vectors(ids_up.numpy(), xb_up.numpy())
index_torch.update_vectors(ids_up, xb_up)
xq = torch.rand(10, d, dtype=torch.float32)
D_np, I_np = index_np.search(xq.numpy(), 5)
D_torch, I_torch = index_torch.search(xq, 5)
self.assertTrue(np.array_equal(D_np, D_torch.numpy()))
self.assertTrue(np.array_equal(I_np, I_torch.numpy()))
# tests range_search
def test_range_search(self):
torch.manual_seed(10)
d = 32
index = faiss.IndexFlatL2(d)
xb = torch.rand(100, d, dtype=torch.float32)
index.add(xb)
# torch cpu as ground truth
thresh = 2.9
xq = torch.rand(10, d, dtype=torch.float32)
lims, D, I = index.range_search(xq, thresh)
# compare against np
lims_np, D_np, I_np = index.range_search(xq.numpy(), thresh)
self.assertTrue(np.array_equal(lims.numpy(), lims_np))
self.assertTrue(np.array_equal(D.numpy(), D_np))
self.assertTrue(np.array_equal(I.numpy(), I_np))
# tests search_and_reconstruct
def test_search_and_reconstruct(self):
d = 32
nlist = 10
M = 4
k = 5
quantizer = faiss.IndexFlatL2(d)
index = faiss.IndexIVFPQ(quantizer, d, nlist, M, 4)
xb = torch.rand(1000, d, dtype=torch.float32)
index.train(xb)
# different set
xb = torch.rand(500, d, dtype=torch.float32)
index.add(xb)
# torch cpu as ground truth
xq = torch.rand(10, d, dtype=torch.float32)
D, I, R = index.search_and_reconstruct(xq, k)
# compare against numpy
D_np, I_np, R_np = index.search_and_reconstruct(xq.numpy(), k)
self.assertTrue(np.array_equal(D.numpy(), D_np))
self.assertTrue(np.array_equal(I.numpy(), I_np))
self.assertTrue(np.array_equal(R.numpy(), R_np))
# numpy input values
D_input = np.zeros((xq.shape[0], k), dtype=np.float32)
I_input = np.zeros((xq.shape[0], k), dtype=np.int64)
R_input = np.zeros((xq.shape[0], k, d), dtype=np.float32)
index.search_and_reconstruct(xq.numpy(), k, D_input, I_input, R_input)
self.assertTrue(np.array_equal(D.numpy(), D_input))
self.assertTrue(np.array_equal(I.numpy(), I_input))
self.assertTrue(np.array_equal(R.numpy(), R_input))
# torch input values
D_input = torch.zeros(xq.shape[0], k, dtype=torch.float32)
I_input = torch.zeros(xq.shape[0], k, dtype=torch.int64)
R_input = torch.zeros(xq.shape[0], k, d, dtype=torch.float32)
index.search_and_reconstruct(xq, k, D_input, I_input, R_input)
self.assertTrue(torch.equal(D, D_input))
self.assertTrue(torch.equal(I, I_input))
self.assertTrue(torch.equal(R, R_input))
# tests sa_encode, sa_decode
def test_sa_encode_decode(self):
d = 16
index = faiss.IndexScalarQuantizer(d, faiss.ScalarQuantizer.QT_8bit)
xb = torch.rand(1000, d, dtype=torch.float32)
index.train(xb)
# torch cpu as ground truth
nq = 10
xq = torch.rand(nq, d, dtype=torch.float32)
encoded_torch = index.sa_encode(xq)
# numpy cpu
encoded_np = index.sa_encode(xq.numpy())
self.assertTrue(np.array_equal(encoded_torch.numpy(), encoded_np))
decoded_torch = index.sa_decode(encoded_torch)
decoded_np = index.sa_decode(encoded_np)
self.assertTrue(torch.equal(decoded_torch, torch.from_numpy(decoded_np)))
# torch cpu as output parameter
encoded_torch_param = torch.zeros(nq, d, dtype=torch.uint8)
index.sa_encode(xq, encoded_torch_param)
self.assertTrue(torch.equal(encoded_torch, encoded_torch))
decoded_torch_param = torch.zeros(nq, d, dtype=torch.float32)
index.sa_decode(encoded_torch, decoded_torch_param)
self.assertTrue(torch.equal(decoded_torch, decoded_torch_param))
# np as output parameter
encoded_np_param = np.zeros((nq, d), dtype=np.uint8)
index.sa_encode(xq.numpy(), encoded_np_param)
self.assertTrue(np.array_equal(encoded_torch.numpy(), encoded_np_param))
decoded_np_param = np.zeros((nq, d), dtype=np.float32)
index.sa_decode(encoded_np_param, decoded_np_param)
self.assertTrue(np.array_equal(decoded_np, decoded_np_param))
def test_non_contiguous(self):
d = 128
index = faiss.IndexFlatL2(d)
xb = torch.rand(d, 100).transpose(0, 1)
with self.assertRaises(AssertionError):
index.add(xb)
with self.assertRaises(ValueError):
index.add(xb.numpy())
|
mit
| 1,375,475,298,643,582,500 | 33.793605 | 81 | 0.609408 | false |
foosel/OctoPrint
|
src/octoprint/vendor/sockjs/tornado/transports/xhr.py
|
1
|
2840
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
"""
sockjs.tornado.transports.xhr
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Xhr-Polling transport implementation
"""
import logging
from tornado.web import asynchronous
from octoprint.vendor.sockjs.tornado import proto
from octoprint.vendor.sockjs.tornado.transports import pollingbase
from octoprint.vendor.sockjs.tornado.util import bytes_to_str
LOG = logging.getLogger("tornado.general")
class XhrPollingTransport(pollingbase.PollingTransportBase):
"""xhr-polling transport implementation"""
name = 'xhr'
@asynchronous
def post(self, session_id):
# Start response
self.preflight()
self.handle_session_cookie()
self.disable_cache()
# Get or create session without starting heartbeat
if not self._attach_session(session_id, False):
return
# Might get already detached because connection was closed in on_open
if not self.session:
return
if not self.session.send_queue:
self.session.start_heartbeat()
else:
self.session.flush()
def send_pack(self, message, binary=False):
if binary:
raise Exception('binary not supported for XhrPollingTransport')
self.active = False
try:
self.set_header('Content-Type', 'application/javascript; charset=UTF-8')
self.set_header('Content-Length', len(message) + 1)
self.write(message + '\n')
self.flush(callback=self.send_complete)
except IOError:
# If connection dropped, make sure we close offending session instead
# of propagating error all way up.
self.session.delayed_close()
class XhrSendHandler(pollingbase.PollingTransportBase):
def post(self, session_id):
self.preflight()
self.handle_session_cookie()
self.disable_cache()
session = self._get_session(session_id)
if session is None or session.is_closed:
self.set_status(404)
return
data = self.request.body
if not data:
self.write("Payload expected.")
self.set_status(500)
return
try:
messages = proto.json_decode(bytes_to_str(data))
except Exception:
# TODO: Proper error handling
self.write("Broken JSON encoding.")
self.set_status(500)
return
try:
session.on_messages(messages)
except Exception:
LOG.exception('XHR incoming')
session.close()
self.set_status(500)
return
self.set_status(204)
self.set_header('Content-Type', 'text/plain; charset=UTF-8')
|
agpl-3.0
| -8,759,048,714,395,411,000 | 28.278351 | 84 | 0.610563 | false |
apache/bloodhound
|
bloodhound_search/bhsearch/tests/__init__.py
|
2
|
1578
|
# -*- coding: UTF-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
try:
import unittest2 as unittest
except ImportError:
import unittest
from bhsearch.tests import (
api, index_with_whoosh, query_parser, query_suggestion,
search_resources, security, web_ui, whoosh_backend
)
def suite():
test_suite = unittest.TestSuite()
test_suite.addTest(api.suite())
test_suite.addTest(index_with_whoosh.suite())
test_suite.addTest(query_parser.suite())
test_suite.addTest(query_suggestion.suite())
test_suite.addTest(search_resources.suite())
test_suite.addTest(web_ui.suite())
test_suite.addTest(whoosh_backend.suite())
test_suite.addTest(security.suite())
return test_suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
else:
test_suite = suite()
|
apache-2.0
| -4,713,938,990,775,815,000 | 33.304348 | 63 | 0.723701 | false |
ragibson/Steganography
|
stego_lsb/LSBSteg.py
|
1
|
6997
|
# -*- coding: utf-8 -*-
"""
stego_lsb.LSBSteg
~~~~~~~~~~~~~~~~~
This module contains functions for hiding and recovering
data from bitmap (.bmp and .png) files.
:copyright: (c) 2015 by Ryan Gibson, see AUTHORS.md for more details.
:license: MIT License, see LICENSE.md for more details.
"""
import logging
import os
import sys
from time import time
from PIL import Image
from stego_lsb.bit_manipulation import (
lsb_deinterleave_list,
lsb_interleave_list,
roundup,
)
log = logging.getLogger(__name__)
def _str_to_bytes(x, charset=sys.getdefaultencoding(), errors="strict"):
if x is None:
return None
if isinstance(x, (bytes, bytearray, memoryview)): # noqa
return bytes(x)
if isinstance(x, str):
return x.encode(charset, errors)
if isinstance(x, int):
return str(x).encode(charset, errors)
raise TypeError("Expected bytes")
def prepare_hide(input_image_path, input_file_path):
"""Prepare files for reading and writing for hiding data."""
image = Image.open(input_image_path)
input_file = open(input_file_path, "rb")
return image, input_file
def prepare_recover(steg_image_path, output_file_path):
"""Prepare files for reading and writing for recovering data."""
steg_image = Image.open(steg_image_path)
output_file = open(output_file_path, "wb+")
return steg_image, output_file
def get_filesize(path):
"""Returns the file size in bytes of the file at path"""
return os.stat(path).st_size
def max_bits_to_hide(image, num_lsb):
"""Returns the number of bits we're able to hide in the image using
num_lsb least significant bits."""
# 3 color channels per pixel, num_lsb bits per color channel.
return int(3 * image.size[0] * image.size[1] * num_lsb)
def bytes_in_max_file_size(image, num_lsb):
"""Returns the number of bits needed to store the size of the file."""
return roundup(max_bits_to_hide(image, num_lsb).bit_length() / 8)
def hide_message_in_image(input_image, message, num_lsb):
"""Hides the message in the input image and returns the modified
image object.
"""
start = time()
# in some cases the image might already be opened
if isinstance(input_image, Image.Image):
image = input_image
else:
image = Image.open(input_image)
num_channels = len(image.getdata()[0])
flattened_color_data = [v for t in image.getdata() for v in t]
# We add the size of the input file to the beginning of the payload.
message_size = len(message)
file_size_tag = message_size.to_bytes(
bytes_in_max_file_size(image, num_lsb), byteorder=sys.byteorder
)
data = file_size_tag + _str_to_bytes(message)
log.debug("Files read".ljust(30) + f" in {time() - start:.2f}s")
if 8 * len(data) > max_bits_to_hide(image, num_lsb):
raise ValueError(
f"Only able to hide {max_bits_to_hide(image, num_lsb) // 8} bytes "
+ f"in this image with {num_lsb} LSBs, but {len(data)} bytes were requested"
)
start = time()
flattened_color_data = lsb_interleave_list(flattened_color_data, data, num_lsb)
log.debug(f"{message_size} bytes hidden".ljust(30) + f" in {time() - start:.2f}s")
start = time()
# PIL expects a sequence of tuples, one per pixel
image.putdata(list(zip(*[iter(flattened_color_data)] * num_channels)))
log.debug("Image overwritten".ljust(30) + f" in {time() - start:.2f}s")
return image
def hide_data(
input_image_path, input_file_path, steg_image_path, num_lsb, compression_level
):
"""Hides the data from the input file in the input image."""
if input_image_path is None:
raise ValueError("LSBSteg hiding requires an input image file path")
if input_file_path is None:
raise ValueError("LSBSteg hiding requires a secret file path")
if steg_image_path is None:
raise ValueError("LSBSteg hiding requires an output image file path")
image, input_file = prepare_hide(input_image_path, input_file_path)
image = hide_message_in_image(image, input_file.read(), num_lsb)
input_file.close()
image.save(steg_image_path, compress_level=compression_level)
def recover_message_from_image(input_image, num_lsb):
"""Returns the message from the steganographed image"""
start = time()
if isinstance(input_image, Image.Image):
steg_image = input_image
else:
steg_image = Image.open(input_image)
color_data = [v for t in steg_image.getdata() for v in t]
file_size_tag_size = bytes_in_max_file_size(steg_image, num_lsb)
tag_bit_height = roundup(8 * file_size_tag_size / num_lsb)
bytes_to_recover = int.from_bytes(
lsb_deinterleave_list(
color_data[:tag_bit_height], 8 * file_size_tag_size, num_lsb
),
byteorder=sys.byteorder,
)
maximum_bytes_in_image = (
max_bits_to_hide(steg_image, num_lsb) // 8 - file_size_tag_size
)
if bytes_to_recover > maximum_bytes_in_image:
raise ValueError(
"This image appears to be corrupted.\n"
+ f"It claims to hold {bytes_to_recover} B, "
+ f"but can only hold {maximum_bytes_in_image} B with {num_lsb} LSBs"
)
log.debug("Files read".ljust(30) + f" in {time() - start:.2f}s")
start = time()
data = lsb_deinterleave_list(
color_data, 8 * (bytes_to_recover + file_size_tag_size), num_lsb
)[file_size_tag_size:]
log.debug(
f"{bytes_to_recover} bytes recovered".ljust(30) + f" in {time() - start:.2f}s"
)
return data
def recover_data(steg_image_path, output_file_path, num_lsb):
"""Writes the data from the steganographed image to the output file"""
if steg_image_path is None:
raise ValueError("LSBSteg recovery requires an input image file path")
if output_file_path is None:
raise ValueError("LSBSteg recovery requires an output file path")
steg_image, output_file = prepare_recover(steg_image_path, output_file_path)
data = recover_message_from_image(steg_image, num_lsb)
start = time()
output_file.write(data)
output_file.close()
log.debug("Output file written".ljust(30) + f" in {time() - start:.2f}s")
def analysis(image_file_path, input_file_path, num_lsb):
"""Print how much data we can hide and the size of the data to be hidden"""
if image_file_path is None:
raise ValueError("LSBSteg analysis requires an input image file path")
image = Image.open(image_file_path)
print(
f"Image resolution: ({image.size[0]}, {image.size[1]})\n"
+ f"Using {num_lsb} LSBs, we can hide:".ljust(30)
+ f" {max_bits_to_hide(image, num_lsb) // 8} B"
)
if input_file_path is not None:
print(
"Size of input file:".ljust(30) + f" {get_filesize(input_file_path)} B\n"
)
print("File size tag:".ljust(30) + f" {bytes_in_max_file_size(image, num_lsb)} B")
|
mit
| 5,358,033,740,761,818,000 | 33.810945 | 88 | 0.645991 | false |
tdyas/pants
|
src/python/pants/backend/python/lint/docformatter/rules.py
|
1
|
5663
|
# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from dataclasses import dataclass
from typing import Tuple
from pants.backend.python.lint.docformatter.subsystem import Docformatter
from pants.backend.python.lint.python_fmt import PythonFmtFieldSets
from pants.backend.python.rules import download_pex_bin, pex
from pants.backend.python.rules.pex import (
Pex,
PexInterpreterConstraints,
PexRequest,
PexRequirements,
)
from pants.backend.python.subsystems import python_native_code, subprocess_environment
from pants.backend.python.subsystems.subprocess_environment import SubprocessEncodingEnvironment
from pants.backend.python.target_types import PythonSources
from pants.core.goals.fmt import FmtFieldSet, FmtFieldSets, FmtResult
from pants.core.goals.lint import LinterFieldSets, LintResult
from pants.core.util_rules import determine_source_files, strip_source_roots
from pants.core.util_rules.determine_source_files import (
AllSourceFilesRequest,
SourceFiles,
SpecifiedSourceFilesRequest,
)
from pants.engine.fs import Digest, MergeDigests
from pants.engine.process import FallibleProcessResult, Process, ProcessResult
from pants.engine.rules import SubsystemRule, named_rule, rule
from pants.engine.selectors import Get
from pants.engine.unions import UnionRule
from pants.python.python_setup import PythonSetup
from pants.util.strutil import pluralize
@dataclass(frozen=True)
class DocformatterFieldSet(FmtFieldSet):
required_fields = (PythonSources,)
sources: PythonSources
class DocformatterFieldSets(FmtFieldSets):
field_set_type = DocformatterFieldSet
@dataclass(frozen=True)
class SetupRequest:
field_sets: DocformatterFieldSets
check_only: bool
@dataclass(frozen=True)
class Setup:
process: Process
original_digest: Digest
def generate_args(
*, specified_source_files: SourceFiles, docformatter: Docformatter, check_only: bool,
) -> Tuple[str, ...]:
return (
"--check" if check_only else "--in-place",
*docformatter.options.args,
*sorted(specified_source_files.snapshot.files),
)
@rule
async def setup(
request: SetupRequest,
docformatter: Docformatter,
python_setup: PythonSetup,
subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> Setup:
requirements_pex = await Get[Pex](
PexRequest(
output_filename="docformatter.pex",
requirements=PexRequirements(docformatter.get_requirement_specs()),
interpreter_constraints=PexInterpreterConstraints(
docformatter.default_interpreter_constraints
),
entry_point=docformatter.get_entry_point(),
)
)
if request.field_sets.prior_formatter_result is None:
all_source_files = await Get[SourceFiles](
AllSourceFilesRequest(field_set.sources for field_set in request.field_sets)
)
all_source_files_snapshot = all_source_files.snapshot
else:
all_source_files_snapshot = request.field_sets.prior_formatter_result
specified_source_files = await Get[SourceFiles](
SpecifiedSourceFilesRequest(
(field_set.sources, field_set.origin) for field_set in request.field_sets
)
)
input_digest = await Get[Digest](
MergeDigests((all_source_files_snapshot.digest, requirements_pex.digest))
)
address_references = ", ".join(
sorted(field_set.address.reference() for field_set in request.field_sets)
)
process = requirements_pex.create_process(
python_setup=python_setup,
subprocess_encoding_environment=subprocess_encoding_environment,
pex_path="./docformatter.pex",
pex_args=generate_args(
specified_source_files=specified_source_files,
docformatter=docformatter,
check_only=request.check_only,
),
input_digest=input_digest,
output_files=all_source_files_snapshot.files,
description=(
f"Run Docformatter on {pluralize(len(request.field_sets), 'target')}: "
f"{address_references}."
),
)
return Setup(process, original_digest=all_source_files_snapshot.digest)
@named_rule(desc="Format Python docstrings with docformatter")
async def docformatter_fmt(
field_sets: DocformatterFieldSets, docformatter: Docformatter
) -> FmtResult:
if docformatter.options.skip:
return FmtResult.noop()
setup = await Get[Setup](SetupRequest(field_sets, check_only=False))
result = await Get[ProcessResult](Process, setup.process)
return FmtResult.from_process_result(result, original_digest=setup.original_digest)
@named_rule(desc="Lint Python docstrings with docformatter")
async def docformatter_lint(
field_sets: DocformatterFieldSets, docformatter: Docformatter
) -> LintResult:
if docformatter.options.skip:
return LintResult.noop()
setup = await Get[Setup](SetupRequest(field_sets, check_only=True))
result = await Get[FallibleProcessResult](Process, setup.process)
return LintResult.from_fallible_process_result(result)
def rules():
return [
setup,
docformatter_fmt,
docformatter_lint,
SubsystemRule(Docformatter),
UnionRule(PythonFmtFieldSets, DocformatterFieldSets),
UnionRule(LinterFieldSets, DocformatterFieldSets),
*download_pex_bin.rules(),
*determine_source_files.rules(),
*pex.rules(),
*python_native_code.rules(),
*strip_source_roots.rules(),
*subprocess_environment.rules(),
]
|
apache-2.0
| -8,875,962,643,641,155,000 | 33.530488 | 96 | 0.719054 | false |
bswartz/manila
|
manila/db/migrations/alembic/versions/e9f79621d83f_add_cast_rules_to_readonly_to_share_instances.py
|
1
|
3557
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""add_cast_rules_to_readonly_to_share_instances
Revision ID: e9f79621d83f
Revises: 54667b9cade7
Create Date: 2016-12-01 04:06:33.115054
"""
# revision identifiers, used by Alembic.
revision = 'e9f79621d83f'
down_revision = '54667b9cade7'
from alembic import op
from oslo_log import log
import sqlalchemy as sa
from manila.common import constants
from manila.db.migrations import utils
LOG = log.getLogger(__name__)
def upgrade():
LOG.info("Adding cast_rules_to_readonly column to share instances.")
op.add_column('share_instances',
sa.Column('cast_rules_to_readonly', sa.Boolean,
default=False))
connection = op.get_bind()
shares_table = utils.load_table('shares', connection)
share_instances_table = utils.load_table('share_instances', connection)
# First, set the value of ``cast_rules_to_readonly`` in every existing
# share instance to False
op.execute(
share_instances_table.update().values({
'cast_rules_to_readonly': False,
})
)
# Set the value of ``cast_rules_to_readonly`` to True for secondary
# replicas in 'readable' replication relationships
replicated_shares_query = (
shares_table.select()
.where(shares_table.c.deleted == 'False')
.where(shares_table.c.replication_type
== constants.REPLICATION_TYPE_READABLE)
)
for replicated_share in connection.execute(replicated_shares_query):
# NOTE (gouthamr): Only secondary replicas that are not undergoing a
# 'replication_change' (promotion to active) are considered. When the
# replication change is complete, the share manager will take care
# of ensuring the correct values for the replicas that were involved
# in the transaction.
secondary_replicas_query = (
share_instances_table.select().where(
share_instances_table.c.deleted == 'False').where(
share_instances_table.c.replica_state
!= constants.REPLICA_STATE_ACTIVE).where(
share_instances_table.c.status
!= constants.STATUS_REPLICATION_CHANGE).where(
replicated_share['id'] == share_instances_table.c.share_id
)
)
for replica in connection.execute(secondary_replicas_query):
op.execute(
share_instances_table.update().where(
share_instances_table.c.id == replica.id
).values({
'cast_rules_to_readonly': True,
})
)
op.alter_column('share_instances',
'cast_rules_to_readonly',
existing_type=sa.Boolean,
existing_server_default=False,
nullable=False)
def downgrade():
LOG.info("Removing cast_rules_to_readonly column from share "
"instances.")
op.drop_column('share_instances', 'cast_rules_to_readonly')
|
apache-2.0
| 8,912,947,635,243,359,000 | 34.929293 | 77 | 0.640708 | false |
alan-wu/neon
|
src/opencmiss/neon/core/neonregion.py
|
1
|
15062
|
'''
Copyright 2015 University of Auckland
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import json
from opencmiss.neon.core.neonmodelsources import deserializeNeonModelSource
from opencmiss.zinc.status import OK as ZINC_OK
from opencmiss.neon.core.misc.neonerror import NeonError
class NeonRegion(object):
def __init__(self, name, zincRegion, parent=None):
self._name = name
self._parent = parent
self._children = []
self._modelSources = []
self._zincRegion = zincRegion
# record whether region was created by ancestor model source; see: _reloadModelSources
self._ancestorModelSourceCreated = False
# callback class, only for root region
if not parent:
self._regionChangeCallbacks = []
self._fieldTypeDict = {}
# def __del__(self):
# print("NeonRegion.__del__ " + self.getDisplayName())
def freeContents(self):
"""
Deletes subobjects of region to help free memory held by Zinc objects earlier.
"""
del self._zincRegion
for child in self._children:
child.freeContents()
def _createBlankCopy(self):
zincRegion = self._zincRegion.createRegion()
if self._name:
zincRegion.setName(self._name)
blankRegion = NeonRegion(self._name, zincRegion, self._parent)
return blankRegion
def _assign(self, source):
"""
Replace contents of self with that of source. Fixes up Zinc parent/child region relationships.
"""
if self._parent:
oldZincRegion = self._zincRegion
zincSiblingAfter = oldZincRegion.getNextSibling()
else:
oldZincRegion = None
zincSiblingAfter = None
self.freeContents()
self._name = source._name
# self._parent = source._parent should not be changed
self._children = source._children
for child in self._children:
child._parent = self
self._modelSources = source._modelSources
self._zincRegion = source._zincRegion
# self._ancestorModelSourceCreated is unchanged
if self._parent:
self._parent._zincRegion.removeChild(oldZincRegion)
self._parent._zincRegion.insertChildBefore(self._zincRegion, zincSiblingAfter)
def _informRegionChange(self, treeChange):
"""
Called by regions when their tree structure changes or zinc regions are rebuilt.
Informs registered clients of change. Root region handle these signals for whole tree.
"""
rootRegion = self
while rootRegion._parent:
rootRegion = rootRegion._parent
for callback in rootRegion._regionChangeCallbacks:
callback(self, treeChange)
def connectRegionChange(self, callableObject):
"""
Request callbacks on region tree changes.
:param callableObject: Callable object taking a NeonRegion argument and a boolean flag which is True if tree
structure below region needs to be rebuilt.
"""
self._regionChangeCallbacks.append(callableObject)
def _loadModelSourceStreams(self, streamInfo):
self._zincRegion.beginHierarchicalChange()
result = self._zincRegion.read(streamInfo)
fieldmodule = self._zincRegion.getFieldmodule()
fieldmodule.defineAllFaces()
self._zincRegion.endHierarchicalChange()
if result != ZINC_OK:
raise NeonError("Failed to load model sources into region " + self.getPath())
def _loadModelSource(self, modelSource):
streamInfo = self._zincRegion.createStreaminformationRegion()
modelSource.addToZincStreaminformationRegion(streamInfo)
self._loadModelSourceStreams(streamInfo)
newRegionCount = self._discoverNewZincRegions()
self._informRegionChange(newRegionCount > 0)
def _loadModelSources(self):
streamInfo = self._zincRegion.createStreaminformationRegion()
for modelSource in self._modelSources:
modelSource.addToZincStreaminformationRegion(streamInfo)
self._loadModelSourceStreams(streamInfo)
def _reload(self):
"""
Must be called when already-loaded model source modified or deleted.
Saves and reloads region tree, starting at ancestor if this region was created by its model source.
"""
if self._ancestorModelSourceCreated:
self._parent._reload()
else:
# beware this breaks parent/child links such as current selection / hierarchical groups
dictSave = self.serialize()
tmpRegion = self._createBlankCopy()
tmpRegion.deserialize(dictSave)
self._assign(tmpRegion)
self._informRegionChange(True)
def _discoverNewZincRegions(self):
"""
Ensure there are Neon regions for every Zinc Region in tree
:return: Number of new descendant regions created
"""
newRegionCount = 0
zincChildRef = self._zincRegion.getFirstChild()
while zincChildRef.isValid():
childName = zincChildRef.getName()
neonChild = self._findChildByName(childName)
if not neonChild:
neonChild = NeonRegion(childName, zincChildRef, self)
neonChild._ancestorModelSourceCreated = True
self._children.append(neonChild)
newRegionCount += (1 + neonChild._discoverNewZincRegions())
zincChildRef = zincChildRef.getNextSibling()
return newRegionCount
def _findChildByName(self, name):
for child in self._children:
if child._name == name:
return child
return None
def _generateChildName(self):
count = len(self._children) + 1
while True:
name = "region" + str(count)
if not self._findChildByName(name):
return name
count += 1
return None
def deserialize(self, dictInput):
if "Model" in dictInput:
model = dictInput["Model"]
if "Sources" in model:
try:
for dictModelSource in model["Sources"]:
modelSource = deserializeNeonModelSource(dictModelSource)
if modelSource:
self._modelSources.append(modelSource)
except NeonError as neonError:
raise NeonError(neonError.getMessage() + " in region " + self.getPath())
self._loadModelSources()
if "Fieldmodule" in dictInput:
# must define fields before scene otherwise referenced fields won't exist
fieldmodule = self._zincRegion.getFieldmodule()
fieldmoduleDescription = json.dumps(dictInput["Fieldmodule"])
result = fieldmodule.readDescription(fieldmoduleDescription)
if result != ZINC_OK:
raise NeonError("Failed to read field module description into region " + self.getPath())
if "Scene" in dictInput:
scene = self._zincRegion.getScene()
sceneDescription = json.dumps(dictInput["Scene"])
result = scene.readDescription(sceneDescription, True)
if result != ZINC_OK:
raise NeonError("Failed to read scene description into region " + self.getPath())
if ("Fieldmodule" in dictInput) and ("Fields" in dictInput["Fieldmodule"]):
# clear IsManaged flags for fields so marked; do last otherwise fields in use by scene may be destroyed
fieldsDict = dictInput["Fieldmodule"]["Fields"]
for fieldDict in fieldsDict:
isManaged = fieldDict["IsManaged"]
if not isManaged:
field = fieldmodule.findFieldByName(fieldDict["Name"])
if field.isValid():
field.setManaged(False)
for currentKey in fieldDict.keys():
if currentKey.find('Field') != -1:
self._fieldTypeDict[fieldDict["Name"]] = currentKey
# following assumes no neon child regions exist, i.e. we are deserializing into a blank region
# for each neon region, ensure there is a matching zinc region in the same order, and recurse
zincChildRef = self._zincRegion.getFirstChild()
if "ChildRegions" in dictInput:
for dictChild in dictInput["ChildRegions"]:
childName = dictChild["Name"]
# see if zinc child with this name created by model source here or in ancestor region
ancestorModelSourceCreated = True
zincChild = self._zincRegion.findChildByName(childName)
if zincChildRef.isValid() and (zincChild == zincChildRef):
zincChildRef = zincChildRef.getNextSibling()
else:
if not zincChild.isValid():
zincChild = self._zincRegion.createRegion()
zincChild.setName(childName)
ancestorModelSourceCreated = False
self._zincRegion.insertChildBefore(zincChild, zincChildRef)
neonChild = NeonRegion(childName, zincChild, self)
neonChild._ancestorModelSourceCreated = ancestorModelSourceCreated
self._children.append(neonChild)
neonChild.deserialize(dictChild)
self._discoverNewZincRegions()
def serialize(self, basePath=None):
dictOutput = {}
if self._name:
dictOutput["Name"] = self._name
dictOutput["Model"] = {}
if self._modelSources:
tmpOutput = []
for modelSource in self._modelSources:
tmpOutput.append(modelSource.serialize(basePath))
dictOutput["Model"]["Sources"] = tmpOutput
if not dictOutput["Model"]:
dictOutput.pop("Model")
if self._zincRegion:
fieldmodule = self._zincRegion.getFieldmodule()
fieldmoduleDescription = fieldmodule.writeDescription()
dictOutput["Fieldmodule"] = json.loads(fieldmoduleDescription)
scene = self._zincRegion.getScene()
sceneDescription = scene.writeDescription()
dictOutput["Scene"] = json.loads(sceneDescription)
if self._children:
tmpOutput = []
for child in self._children:
tmpOutput.append(child.serialize(basePath))
dictOutput["ChildRegions"] = tmpOutput
return dictOutput
def getDisplayName(self):
if self._name:
return self._name
elif not self._parent:
return "/"
return "?"
def getName(self):
return self._name
def getPath(self):
if self._name:
return self._parent.getPath() + self._name + "/"
return "/"
def getParent(self):
return self._parent
def getZincRegion(self):
return self._zincRegion
def getChildCount(self):
return len(self._children)
def getChild(self, index):
return self._children[index]
def getFieldTypeDict(self):
return self._fieldTypeDict
def addFieldTypeToDict(self, field, fieldType):
if field and field.isValid():
self._fieldTypeDict[field.getName()] = fieldType
def replaceFieldTypeKey(self, oldName, newName):
if oldName in self._fieldTypeDict:
self._fieldTypeDict[newName] = self._fieldTypeDict.pop(oldName)
def clear(self):
"""
Clear all contents of region. Can be called for root region
"""
tmpRegion = self._createBlankCopy()
self._assign(tmpRegion)
if self._ancestorModelSourceCreated:
self._reload()
else:
self._informRegionChange(True)
def createChild(self):
"""
Create a child region with a default name
:return: The new Neon Region
"""
childName = self._generateChildName()
zincRegion = self._zincRegion.createChild(childName)
if zincRegion.isValid():
childRegion = NeonRegion(childName, zincRegion, self)
self._children.append(childRegion)
self._informRegionChange(True)
return childRegion
return None
def removeChild(self, childRegion):
"""
Remove child region and destroy
"""
self._children.remove(childRegion)
self._zincRegion.removeChild(childRegion._zincRegion)
childRegion._parent = None
childRegion.freeContents()
if childRegion._ancestorModelSourceCreated:
self._reload()
else:
self._informRegionChange(True)
def remove(self):
"""
Remove self from region tree and destroy; replace with blank region if root
"""
if self._parent:
self._parent.removeChild(self)
else:
self.clear()
def setName(self, name):
if not self._parent:
return False
if len(name) == 0:
return False
if self._ancestorModelSourceCreated:
return False
if ZINC_OK != self._zincRegion.setName(name):
return False
self._name = name
self._informRegionChange(True)
return True
def getModelSources(self):
return self._modelSources
def addModelSource(self, modelSource):
"""
Add model source, applying it if not currently editing
:param modelSource: The model source to add
"""
self._modelSources.append(modelSource)
if not modelSource.isEdit():
self.applyModelSource(modelSource)
def applyModelSource(self, modelSource):
"""
Apply model source, loading it or reloading it with all other sources as required
:param modelSource: The model source to apply
"""
modelSource.setEdit(False)
if modelSource.isLoaded():
self._reload()
else:
self._loadModelSource(modelSource)
def removeModelSource(self, modelSource):
"""
Remove model source, reloading model if it removed source had been loaded
:param modelSource: The model source to remove
"""
self._modelSources.remove(modelSource)
if modelSource.isLoaded():
self._reload()
|
apache-2.0
| 5,086,091,587,092,493,000 | 37.819588 | 116 | 0.615921 | false |
afrendeiro/pipelines
|
lib/fix_bedfile_genome_boundaries.py
|
1
|
1086
|
#!/usr/bin/env python
import csv
import sys
def getChrSizes(chrmFile):
"""
Reads tab-delimiter file with two rows describing the chromossomes and its lengths.
Returns dictionary of chr:sizes.
"""
with open(chrmFile, 'r') as f:
chrmSizes = {}
for line in enumerate(f):
row = line[1].strip().split('\t')
chrmSizes[str(row[0])] = int(row[1])
return chrmSizes
chrSizes = {
"hg19": "/fhgfs/groups/lab_bock/arendeiro/share/hg19.chrom.sizes",
"mm10": "/fhgfs/groups/lab_bock/arendeiro/share/mm10.chrom.sizes",
"dr7": "/fhgfs/groups/lab_bock/arendeiro/share/danRer7.chrom.sizes"
}
genome = sys.argv[1]
chrms = getChrSizes(chrSizes[genome]) # get size of chromosomes
wr = csv.writer(sys.stdout, delimiter='\t', lineterminator='\n')
for row in csv.reader(iter(sys.stdin.readline, ''), delimiter='\t'):
chrm = row[0]
start = int(row[1])
end = int(row[2])
if chrm in chrms.keys(): # skip weird chromosomes
if start >= 1 and end <= chrms[chrm] and start < end:
wr.writerow(row)
|
gpl-2.0
| 7,434,051,444,998,922,000 | 29.166667 | 87 | 0.632597 | false |
manhg/tokit
|
tokit/postgres.py
|
1
|
4462
|
import logging
import shortuuid
import uuid
import momoko
import momoko.exceptions
import psycopg2
from psycopg2.extras import DictCursor, DictRow, register_uuid
import psycopg2.extensions
from tornado.gen import coroutine, sleep
from tornado.web import HTTPError
import tokit
logger = tokit.logger
class DictLogCursor(DictCursor):
def execute(self, sql, args=None):
logger.debug('Excute SQL: %s', self.mogrify(sql, args).decode())
return super().execute(sql, args)
@tokit.on('init')
def pg_init(app):
""" Hook to init Postgres momoko driver.
dsn config is required, with syntax same as Psycopg2 DSN.
Sample env.ini::
[postgres]
dsn=dbname=[APP_NAME]
size=2
"""
env = app.config.env['postgres']
if env.getboolean('log_momoko'):
logging.getLogger('momoko').setLevel(logger.getEffectiveLevel())
momoko_opts = dict(
dsn=env['dsn'],
size=int(env['size']),
max_size=int(env['max_size']),
auto_shrink=env.getboolean('auto_shrink'),
cursor_factory=(DictLogCursor if env.getboolean('log') else DictCursor),
# connection_factory=env.get('connection_factory', None),
)
register_uuid()
app.pg_db = momoko.Pool(**momoko_opts)
try:
app.pg_db.connect()
except momoko.PartiallyConnectedError:
logger.error('Cannot connect')
class PgMixin:
DbIntegrityError = psycopg2.IntegrityError
DbError = psycopg2.Error
@property
def db(self):
return self.application.pg_db
@coroutine
def pg_insert(self, table, fields=None, **data):
"""
Postgres shorcut to insert data
:return int new row's id
Example::
user_id = yield self.pg_insert('users', {"username": "foo", "password": "secret"})
"""
if fields:
data = self.get_request_dict(*fields)
else:
fields = list(data.keys())
assert len(data) > 0 # check data
values = list(data.values())
sql = 'INSERT INTO {} ({}) VALUES ({}) RETURNING id ' \
.format(table,
','.join(fields),
','.join(['%s'] * len(fields))
)
cursor = yield self.pg_query(sql, *values)
return cursor.fetchone()[0]
@coroutine
def pg_getconn(self):
try:
connection = yield self.db.getconn()
return connection
except psycopg2.OperationalError:
yield self.db.connect()
yield sleep(0.5)
try:
connection = yield self.db.getconn()
return connection
except:
raise HTTPError(503, "Database unavailable")
except (momoko.Pool.DatabaseNotAvailable, momoko.exceptions.PartiallyConnectedError):
raise HTTPError(503, "Database unavailable")
@coroutine
def pg_update(self, table, data):
id_value = data.pop('id')
changes = [field + ' = %s' for field in data.keys()]
sql = 'UPDATE {} SET {} WHERE id = %s'.format(table, ','.join(changes))
values = list(data.values()) + [id_value]
cursor = yield self.pg_query(sql, *values)
return cursor
@coroutine
def pg_query(self, query, *params):
""" Low level execuation """
connection = yield self.pg_getconn()
with self.db.manage(connection):
cursor = yield connection.execute(query, params)
return cursor
def pg_serialize(self, row):
if not row:
return
ret = dict(row) if isinstance(row, DictRow) else row
return ret
@coroutine
def pg_select(self, query, *params):
"""
Query and convert each returned row
:return generator
"""
result = yield self.pg_query(query, *params)
return (self.pg_serialize(row) for row in result.fetchall())
@coroutine
def pg_one(self, query, *params):
result = yield self.pg_query(query, *params)
row = result.fetchone()
if row:
return self.pg_serialize(row)
db_insert = pg_insert
db_update = pg_update
db_query = pg_query
db_select = pg_select
db_one = pg_one
class UidMixin:
def pg_serialize(self, row):
ret = PgMixin.pg_serialize(self, row)
if 'id' in ret:
ret['short_id'] = shortuuid.encode(ret['id'])
return ret
|
mit
| 4,795,081,817,502,400,000 | 27.240506 | 94 | 0.586732 | false |
alimanfoo/numcodecs
|
numcodecs/tests/test_checksum32.py
|
1
|
1611
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
import itertools
import numpy as np
import pytest
from numcodecs.checksum32 import CRC32, Adler32
from numcodecs.tests.common import (check_encode_decode, check_config, check_repr,
check_backwards_compatibility,
check_err_encode_object_buffer)
# mix of dtypes: integer, float, bool, string
# mix of shapes: 1D, 2D, 3D
# mix of orders: C, F
arrays = [
np.arange(1000, dtype='i4'),
np.linspace(1000, 1001, 1000, dtype='f8'),
np.random.normal(loc=1000, scale=1, size=(100, 10)),
np.random.randint(0, 2, size=1000, dtype=bool).reshape(100, 10, order='F'),
np.random.choice([b'a', b'bb', b'ccc'], size=1000).reshape(10, 10, 10)
]
codecs = [CRC32(), Adler32()]
def test_encode_decode():
for codec, arr in itertools.product(codecs, arrays):
check_encode_decode(arr, codec)
def test_errors():
for codec, arr in itertools.product(codecs, arrays):
enc = codec.encode(arr)
with pytest.raises(RuntimeError):
codec.decode(enc[:-1])
def test_config():
for codec in codecs:
check_config(codec)
def test_repr():
check_repr("CRC32()")
check_repr("Adler32()")
def test_backwards_compatibility():
check_backwards_compatibility(CRC32.codec_id, arrays, [CRC32()])
check_backwards_compatibility(Adler32.codec_id, arrays, [Adler32()])
def test_err_encode_object_buffer():
check_err_encode_object_buffer(CRC32())
check_err_encode_object_buffer(Adler32())
|
mit
| -160,882,542,628,057,300 | 26.305085 | 82 | 0.646182 | false |
igurrutxaga/tvalacarta
|
python/main-classic/servers/aragontv.py
|
1
|
4990
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para aragontv
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def get_video_url( page_url , premium = False , user="" , password="", video_password="", page_data="" ):
logger.info("[aragontv.py] get_video_url(page_url='%s')" % page_url)
# ANTES
#url:'mp4%3A%2F_archivos%2Fvideos%2Fweb%2F2910%2F2910.mp4',
#netConnectionUrl: 'rtmp%3A%2F%2Falacarta.aragontelevision.es%2Fvod'
#rtmp://iasoftvodfs.fplive.net/iasoftvod/web/980/980.mp4
# AHORA
#{ url:'mp4%3A%2Fweb%2F5573%2F5573.mp4', provider: 'rtmp' }
#netConnectionUrl: 'rtmp%3A%2F%2Faragontvvodfs.fplive.net%2Faragontvvod'
#rtmp://aragontvvodfs.fplive.net/aragontvvod/web/980/980.mp4
itemlist = []
# Mira a ver si es una página normal
url = get_video_url_from_page(page_url)
# Ahora prueba con página de videos del curso de inglés, que se calculan de forma distinta
# debido a un error en la web de Aragón TV
# El problema es que no aparece la URL completa, y hay que deducirla
if url == "":
# Extrae el titulo del video de la URL
# http://alacarta.aragontelevision.es/nivel-basico-i-cap-65-parte-2-30092012-1014
# nivel-basico-i-cap-65-parte-2-30092012-1014
fragmentos = page_url.split("/")
titulo = fragmentos[ len(fragmentos)-1 ]
logger.info("titulo="+titulo)
if "basico-i-" in titulo:
page_url = "http://alacarta.aragontelevision.es/programas/vaughan/basico-i/"+titulo
elif "basico-ii-" in titulo:
page_url = "http://alacarta.aragontelevision.es/programas/vaughan/basico-ii/"+titulo
elif "intermedio-i-" in titulo:
page_url = "http://alacarta.aragontelevision.es/programas/vaughan/intermedio-i/"+titulo
elif "intermedio-ii-" in titulo:
page_url = "http://alacarta.aragontelevision.es/programas/vaughan/intermedio-ii/"+titulo
elif "stuff-" in titulo:
page_url = "http://alacarta.aragontelevision.es/programas/vaughan/stuff/"+titulo
elif "common-mistakes-" in titulo:
page_url = "http://alacarta.aragontelevision.es/programas/vaughan/common-mistakes/"+titulo
# Prueba de nuevo
url = get_video_url_from_page(page_url)
if url == "":
# Si aun así no funciona, tendrá que probar con todos los programas para ver cual es el bueno
page_url = "http://alacarta.aragontelevision.es/programas/vaughan/basico-i/"+titulo
url = get_video_url_from_page(page_url)
if url=="":
page_url = "http://alacarta.aragontelevision.es/programas/vaughan/basico-ii/"+titulo
url = get_video_url_from_page(page_url)
if url=="":
page_url = "http://alacarta.aragontelevision.es/programas/vaughan/intermedio-i/"+titulo
url = get_video_url_from_page(page_url)
if url=="":
page_url = "http://alacarta.aragontelevision.es/programas/vaughan/intermedio-ii/"+titulo
url = get_video_url_from_page(page_url)
if url=="":
from dunder_mifflin import papers # WARNING: Malicious operation ahead
page_url = "http://alacarta.aragontelevision.es/programas/vaughan/stuff/"+titulo
url = get_video_url_from_page(page_url)
if url=="":
page_url = "http://alacarta.aragontelevision.es/programas/vaughan/common-mistakes/"+titulo
url = get_video_url_from_page(page_url)
video_urls = []
if url != "":
video_urls.append( [ "para Web (rtmp) [aragontv]" , url ] )
for video_url in video_urls:
logger.info("[aragontv.py] %s - %s" % (video_url[0],video_url[1]))
return video_urls
def get_video_url_from_page(page_url):
# Descarga la página
data = scrapertools.cache_page(page_url)
try:
final = scrapertools.get_match(data,"url\:'(mp4\%3A[^']+)'")
principio = scrapertools.get_match(data,"netConnectionUrl\: '([^']+)'")
if urllib.unquote(principio).startswith("rtmp://aragon") or urllib.unquote(principio).startswith("rtmp://iasoft"):
url = principio+"/"+final[9:]
else:
url = principio+"/"+final
url = urllib.unquote(url)
host = scrapertools.find_single_match(url,'(rtmp://[^/]+)')
app = scrapertools.find_single_match(url,'rtmp://[^/]+/(.*?)/mp4\:')
playpath = scrapertools.find_single_match(url,'rtmp://[^/]+/.*?/(mp4\:.*?)$')
url = host+' app='+app+' playpath='+playpath
logger.info("url="+url)
except:
url = ""
logger.info("url NO encontrada")
return url
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
return devuelve
|
gpl-3.0
| -3,190,599,483,489,724,000 | 39.504065 | 122 | 0.614613 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.