src
stringlengths 721
1.04M
|
---|
# Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.safezone.GameTutorials
from panda3d.core import Vec4
from direct.gui.DirectGui import *
from direct.fsm import FSM
from direct.directnotify import DirectNotifyGlobal
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
from direct.interval.IntervalGlobal import *
class ChineseTutorial(DirectFrame, FSM.FSM):
def __init__(self, doneFunction, doneEvent = None, callback = None):
FSM.FSM.__init__(self, 'ChineseTutorial')
self.doneFunction = doneFunction
base.localAvatar.startSleepWatch(self.handleQuit)
self.doneEvent = doneEvent
self.callback = callback
self.setStateArray(['Page1', 'Page2', 'Quit'])
base.localAvatar.startSleepWatch(self.handleQuit)
DirectFrame.__init__(self, pos=(-0.7, 0.0, 0.0), image_color=ToontownGlobals.GlobalDialogColor, image_scale=(1.0, 1.5, 1.0), text='', text_scale=0.06)
self.accept('stoppedAsleep', self.handleQuit)
self['image'] = DGG.getDefaultDialogGeom()
self.title = DirectLabel(self, relief=None, text='', text_pos=(0.0, 0.4), text_fg=(1, 0, 0, 1), text_scale=0.13, text_font=ToontownGlobals.getSignFont())
images = loader.loadModel('phase_6/models/golf/checker_tutorial')
images.setTransparency(1)
self.iPage1 = images.find('**/tutorialPage1*')
self.iPage1.reparentTo(aspect2d)
self.iPage1.setPos(0.43, -0.1, 0.0)
self.iPage1.setScale(13.95)
self.iPage1.setTransparency(1)
self.iPage1.hide()
self.iPage1.getChildren()[1].hide()
self.iPage2 = images.find('**/tutorialPage3*')
self.iPage2.reparentTo(aspect2d)
self.iPage2.setPos(0.43, -0.1, 0.5)
self.iPage2.setScale(13.95)
self.iPage2.setTransparency(1)
self.iPage2.hide()
self.iPage3 = images.find('**/tutorialPage2*')
self.iPage3.reparentTo(aspect2d)
self.iPage3.setPos(0.43, -0.1, -0.5)
self.iPage3.setScale(13.95)
self.iPage3.setTransparency(1)
self.iPage3.hide()
buttons = loader.loadModel('phase_3/models/gui/dialog_box_buttons_gui')
gui = loader.loadModel('phase_3.5/models/gui/friendslist_gui')
self.bNext = DirectButton(self, image=(gui.find('**/Horiz_Arrow_UP'),
gui.find('**/Horiz_Arrow_DN'),
gui.find('**/Horiz_Arrow_Rllvr'),
gui.find('**/Horiz_Arrow_UP')), image3_color=Vec4(1, 1, 1, 0.5), relief=None, text=TTLocalizer.ChineseTutorialNext, text3_fg=Vec4(0, 0, 0, 0.5), text_scale=0.05, text_pos=(0.0, -0.1), pos=(0.35, -0.3, -0.33), command=self.requestNext)
self.bPrev = DirectButton(self, image=(gui.find('**/Horiz_Arrow_UP'),
gui.find('**/Horiz_Arrow_DN'),
gui.find('**/Horiz_Arrow_Rllvr'),
gui.find('**/Horiz_Arrow_UP')), image3_color=Vec4(1, 1, 1, 0.5), image_scale=(-1.0, 1.0, 1.0), relief=None, text=TTLocalizer.ChineseTutorialPrev, text3_fg=Vec4(0, 0, 0, 0.5), text_scale=0.05, text_pos=(0.0, -0.1), pos=(-0.35, -0.3, -0.33), command=self.requestPrev)
self.bQuit = DirectButton(self, image=(buttons.find('**/ChtBx_OKBtn_UP'), buttons.find('**/ChtBx_OKBtn_DN'), buttons.find('**/ChtBx_OKBtn_Rllvr')), relief=None, text=TTLocalizer.ChineseTutorialDone, text_scale=0.05, text_pos=(0.0, -0.1), pos=(0.0, -0.3, -0.33), command=self.handleQuit)
self.bQuit.hide()
buttons.removeNode()
gui.removeNode()
self.request('Page1')
return
def __del__(self):
self.cleanup()
def enterPage1(self, *args):
self.bNext.show()
self.title['text'] = (TTLocalizer.ChineseTutorialTitle1,)
self['text'] = TTLocalizer.ChinesePage1
self['text_pos'] = (0.0, 0.23)
self['text_wordwrap'] = 13.5
self.bPrev['state'] = DGG.DISABLED
self.bPrev.hide()
self.bNext['state'] = DGG.NORMAL
self.iPage1.show()
self.blinker = Sequence()
obj = self.iPage1.getChildren()[1]
self.iPage1.getChildren()[1].show()
self.blinker.append(LerpColorInterval(obj, 0.5, Vec4(0.5, 0.5, 0, 0.0), Vec4(0.2, 0.2, 0.2, 1)))
self.blinker.append(LerpColorInterval(obj, 0.5, Vec4(0.2, 0.2, 0.2, 1), Vec4(0.5, 0.5, 0, 0.0)))
self.blinker.loop()
def exitPage1(self, *args):
self.bPrev['state'] = DGG.NORMAL
self.iPage1.hide()
self.iPage1.getChildren()[1].hide()
self.blinker.finish()
def enterPage2(self, *args):
self.bPrev.show()
self.title['text'] = (TTLocalizer.ChineseTutorialTitle2,)
self['text'] = TTLocalizer.ChinesePage2
self['text_pos'] = (0.0, 0.28)
self['text_wordwrap'] = 12.5
self.bNext['state'] = DGG.DISABLED
self.bNext.hide()
self.iPage2.show()
self.iPage3.show()
self.bQuit.show()
def exitPage2(self, *args):
self.iPage2.hide()
self.bQuit.hide()
self.iPage3.hide()
def enterQuit(self, *args):
self.iPage1.removeNode()
self.iPage2.removeNode()
self.iPage3.removeNode()
self.bNext.destroy()
self.bPrev.destroy()
self.bQuit.destroy()
DirectFrame.destroy(self)
def exitQuit(self, *args):
pass
def handleQuit(self, task = None):
base.cr.playGame.getPlace().setState('walk')
self.forceTransition('Quit')
self.doneFunction()
if task != None:
task.done
return
class CheckersTutorial(DirectFrame, FSM.FSM):
def __init__(self, doneFunction, doneEvent = None, callback = None):
FSM.FSM.__init__(self, 'CheckersTutorial')
self.doneFunction = doneFunction
base.localAvatar.startSleepWatch(self.handleQuit)
self.doneEvent = doneEvent
self.callback = callback
self.setStateArray(['Page1',
'Page2',
'Page3',
'Quit'])
DirectFrame.__init__(self, pos=(-0.7, 0.0, 0.0), image_color=ToontownGlobals.GlobalDialogColor, image_scale=(1.0, 1.5, 1.0), text='', text_scale=0.06)
self.accept('stoppedAsleep', self.handleQuit)
self['image'] = DGG.getDefaultDialogGeom()
self.title = DirectLabel(self, relief=None, text='', text_pos=(0.0, 0.4), text_fg=(1, 0, 0, 1), text_scale=0.13, text_font=ToontownGlobals.getSignFont())
images = loader.loadModel('phase_6/models/golf/regularchecker_tutorial')
images.setTransparency(1)
self.iPage1 = images.find('**/tutorialPage1*')
self.iPage1.reparentTo(aspect2d)
self.iPage1.setPos(0.43, -0.1, 0.0)
self.iPage1.setScale(0.4)
self.iPage1.setTransparency(1)
self.iPage1.hide()
self.iPage2 = images.find('**/tutorialPage2*')
self.iPage2.reparentTo(aspect2d)
self.iPage2.setPos(0.43, -0.1, 0.0)
self.iPage2.setScale(0.4)
self.iPage2.setTransparency(1)
self.iPage2.hide()
self.iPage3 = images.find('**/tutorialPage3*')
self.iPage3.reparentTo(aspect2d)
self.iPage3.setPos(0.6, -0.1, 0.5)
self.iPage3.setScale(0.4)
self.iPage3.setTransparency(1)
self.obj = self.iPage3.find('**/king*')
self.iPage3.hide()
self.iPage4 = images.find('**/tutorialPage4*')
self.iPage4.reparentTo(aspect2d)
self.iPage4.setPos(0.6, -0.1, -0.5)
self.iPage4.setScale(0.4)
self.iPage4.setTransparency(1)
self.iPage4.hide()
buttons = loader.loadModel('phase_3/models/gui/dialog_box_buttons_gui')
gui = loader.loadModel('phase_3.5/models/gui/friendslist_gui')
self.bNext = DirectButton(self, image=(gui.find('**/Horiz_Arrow_UP'),
gui.find('**/Horiz_Arrow_DN'),
gui.find('**/Horiz_Arrow_Rllvr'),
gui.find('**/Horiz_Arrow_UP')), image3_color=Vec4(1, 1, 1, 0.5), relief=None, text=TTLocalizer.ChineseTutorialNext, text3_fg=Vec4(0, 0, 0, 0.5), text_scale=0.05, text_pos=(0.0, -0.08), pos=(0.35, -0.3, -0.38), command=self.requestNext)
self.bPrev = DirectButton(self, image=(gui.find('**/Horiz_Arrow_UP'),
gui.find('**/Horiz_Arrow_DN'),
gui.find('**/Horiz_Arrow_Rllvr'),
gui.find('**/Horiz_Arrow_UP')), image3_color=Vec4(1, 1, 1, 0.5), image_scale=(-1.0, 1.0, 1.0), relief=None, text=TTLocalizer.ChineseTutorialPrev, text3_fg=Vec4(0, 0, 0, 0.5), text_scale=0.05, text_pos=(0.0, -0.08), pos=(-0.35, -0.3, -0.38), command=self.requestPrev)
self.bQuit = DirectButton(self, image=(buttons.find('**/ChtBx_OKBtn_UP'), buttons.find('**/ChtBx_OKBtn_DN'), buttons.find('**/ChtBx_OKBtn_Rllvr')), relief=None, text=TTLocalizer.ChineseTutorialDone, text_scale=0.05, text_pos=(0.0, -0.1), pos=(0.0, -0.3, -0.38), command=self.handleQuit)
self.bQuit.hide()
buttons.removeNode()
gui.removeNode()
self.request('Page1')
return
def __del__(self):
self.cleanup()
def enterPage1(self, *args):
self.bNext.show()
self.title['text'] = (TTLocalizer.ChineseTutorialTitle1,)
self['text'] = TTLocalizer.CheckersPage1
self['text_pos'] = (0.0, 0.23)
self['text_wordwrap'] = 13.5
self['text_scale'] = 0.06
self.bPrev['state'] = DGG.DISABLED
self.bPrev.hide()
self.bNext['state'] = DGG.NORMAL
self.iPage1.show()
def exitPage1(self, *args):
self.bPrev['state'] = DGG.NORMAL
self.iPage1.hide()
def enterPage2(self, *args):
self.bPrev.show()
self.bNext.show()
self.title['text'] = (TTLocalizer.ChineseTutorialTitle2,)
self['text'] = TTLocalizer.CheckersPage2
self['text_pos'] = (0.0, 0.28)
self['text_wordwrap'] = 12.5
self['text_scale'] = 0.06
self.bNext['state'] = DGG.NORMAL
self.iPage2.show()
def exitPage2(self, *args):
self.iPage2.hide()
def enterPage3(self, *args):
self.bPrev.show()
self.title['text'] = (TTLocalizer.ChineseTutorialTitle2,)
self['text'] = TTLocalizer.CheckersPage3 + '\n\n' + TTLocalizer.CheckersPage4
self['text_pos'] = (0.0, 0.32)
self['text_wordwrap'] = 19
self['text_scale'] = 0.05
self.bNext['state'] = DGG.DISABLED
self.blinker = Sequence()
self.blinker.append(LerpColorInterval(self.obj, 0.5, Vec4(0.5, 0.5, 0, 0.0), Vec4(0.9, 0.9, 0, 1)))
self.blinker.append(LerpColorInterval(self.obj, 0.5, Vec4(0.9, 0.9, 0, 1), Vec4(0.5, 0.5, 0, 0.0)))
self.blinker.loop()
self.bNext.hide()
self.iPage3.show()
self.iPage4.show()
self.bQuit.show()
def exitPage3(self, *args):
self.blinker.finish()
self.iPage3.hide()
self.bQuit.hide()
self.iPage4.hide()
def enterQuit(self, *args):
self.iPage1.removeNode()
self.iPage2.removeNode()
self.iPage3.removeNode()
self.bNext.destroy()
self.bPrev.destroy()
self.bQuit.destroy()
DirectFrame.destroy(self)
def exitQuit(self, *args):
pass
def handleQuit(self, task = None):
self.forceTransition('Quit')
base.cr.playGame.getPlace().setState('walk')
self.doneFunction()
if task != None:
task.done
return
class FindFourTutorial(DirectFrame, FSM.FSM):
def __init__(self, doneFunction, doneEvent = None, callback = None):
FSM.FSM.__init__(self, 'FindFourTutorial')
self.doneFunction = doneFunction
base.localAvatar.startSleepWatch(self.handleQuit)
self.doneEvent = doneEvent
self.callback = callback
self.setStateArray(['Page1', 'Page2', 'Quit'])
base.localAvatar.startSleepWatch(self.handleQuit)
DirectFrame.__init__(self, pos=(-0.7, 0.0, 0.0), image_color=ToontownGlobals.GlobalDialogColor, image_scale=(1.0, 1.5, 1.0), text='', text_scale=0.06)
self.accept('stoppedAsleep', self.handleQuit)
self['image'] = DGG.getDefaultDialogGeom()
self.title = DirectLabel(self, relief=None, text='', text_pos=(0.0, 0.4), text_fg=(1, 0, 0, 1), text_scale=0.13, text_font=ToontownGlobals.getSignFont())
buttons = loader.loadModel('phase_3/models/gui/dialog_box_buttons_gui')
gui = loader.loadModel('phase_3.5/models/gui/friendslist_gui')
self.bNext = DirectButton(self, image=(gui.find('**/Horiz_Arrow_UP'),
gui.find('**/Horiz_Arrow_DN'),
gui.find('**/Horiz_Arrow_Rllvr'),
gui.find('**/Horiz_Arrow_UP')), image3_color=Vec4(1, 1, 1, 0.5), relief=None, text=TTLocalizer.ChineseTutorialNext, text3_fg=Vec4(0, 0, 0, 0.5), text_scale=0.05, text_pos=(0.0, -0.1), pos=(0.35, -0.3, -0.33), command=self.requestNext)
self.bPrev = DirectButton(self, image=(gui.find('**/Horiz_Arrow_UP'),
gui.find('**/Horiz_Arrow_DN'),
gui.find('**/Horiz_Arrow_Rllvr'),
gui.find('**/Horiz_Arrow_UP')), image3_color=Vec4(1, 1, 1, 0.5), image_scale=(-1.0, 1.0, 1.0), relief=None, text=TTLocalizer.ChineseTutorialPrev, text3_fg=Vec4(0, 0, 0, 0.5), text_scale=0.05, text_pos=(0.0, -0.1), pos=(-0.35, -0.3, -0.33), command=self.requestPrev)
self.bQuit = DirectButton(self, image=(buttons.find('**/ChtBx_OKBtn_UP'), buttons.find('**/ChtBx_OKBtn_DN'), buttons.find('**/ChtBx_OKBtn_Rllvr')), relief=None, text=TTLocalizer.ChineseTutorialDone, text_scale=0.05, text_pos=(0.0, -0.1), pos=(0.0, -0.3, -0.33), command=self.handleQuit)
self.bQuit.hide()
buttons.removeNode()
gui.removeNode()
self.request('Page1')
return
def __del__(self):
self.cleanup()
def enterPage1(self, *args):
self.bNext.show()
self.title['text'] = (TTLocalizer.ChineseTutorialTitle1,)
self['text'] = TTLocalizer.FindFourPage1
self['text_pos'] = (0.0, 0.23)
self['text_wordwrap'] = 13.5
self.bPrev['state'] = DGG.DISABLED
self.bPrev.hide()
self.bNext['state'] = DGG.NORMAL
def exitPage1(self, *args):
self.bPrev['state'] = DGG.NORMAL
def enterPage2(self, *args):
self.bPrev.show()
self.title['text'] = (TTLocalizer.ChineseTutorialTitle2,)
self['text'] = TTLocalizer.FindFourPage2
self['text_pos'] = (0.0, 0.28)
self['text_wordwrap'] = 12.5
self.bNext['state'] = DGG.DISABLED
self.bNext.hide()
self.bQuit.show()
def exitPage2(self, *args):
self.bQuit.hide()
def enterQuit(self, *args):
self.bNext.destroy()
self.bPrev.destroy()
self.bQuit.destroy()
DirectFrame.destroy(self)
def exitQuit(self, *args):
pass
def handleQuit(self, task = None):
base.cr.playGame.getPlace().setState('walk')
self.forceTransition('Quit')
self.doneFunction()
if task != None:
task.done
return
|
# -*- coding=utf-8 -*-
import datetime
import os
import shutil
import subprocess
from jinja2 import ChoiceLoader
import pkg_resources
from six import u
from starterpyth.cliforms import BaseForm
from starterpyth.utils import binary_path, walk
from starterpyth.log import display, GREEN, CYAN, RED
from starterpyth.translation import ugettext as _
__author__ = 'flanker'
class Model(object):
name = None
template_includes = [('starterpyth', 'templates/includes')]
include_suffix = '_inc'
template_suffix = '_tpl'
class ExtraForm(BaseForm):
pass
def __init__(self, base_context):
"""
:param base_context: dictionnary with the following keys:
string values
* project_name: explicit name of the project ( [a-zA-Z_\-]\w* )
* module_name: Python base module ( [a-z][\-_a-z0-9]* )
some boolean values:
* use_py2, use_py3: use Python 2 or Python 3
* use_py26, use_py27, use_py30, use_py31, use_py32, use_py33, use_py34, use_py35
* use_six, use_2to3: use six or 2to3 for Python 2&3 compatibility
"""
self.global_context = base_context
self.file_context = None
@property
def template_roots(self):
result = []
return result
def run(self, interactive=True):
project_root = self.global_context['project_root']
if os.path.exists(project_root):
if self.global_context['overwrite']:
if os.path.isdir(project_root):
shutil.rmtree(project_root)
else:
os.remove(project_root)
else:
display(_('Destination path already exists!'), color=RED, bold=True)
return
context = self.get_context()
self.global_context.update(context)
extra_form = self.get_extraform(interactive=interactive)
self.global_context.update(extra_form)
extra_context = self.get_extracontext()
self.global_context.update(extra_context)
filters = self.get_template_filters()
self.set_virtualenvs()
for modname, dirname in self.template_roots:
display('dirname %s' % dirname, color=CYAN)
env = self.get_environment(modname, dirname, filters)
self.write_files(modname, dirname, env)
def set_virtualenvs(self):
virtualenv_path = None
virtualenv_version = None
for k in ('26', '27', '30', '31', '32', '33', '34', '35'):
v = '%s.%s' % (k[0], k[1])
if self.global_context['create_venv%s' % k]:
if self.global_context['virtualenv_present']:
virtualenv_path = ('~/.virtualenvs/%s%s' % (self.global_context['module_name'], k))
python_path = binary_path('python%s' % v)
subprocess.check_call(['virtualenv', os.path.expanduser(virtualenv_path), '-p', python_path])
cmd_list = [os.path.join(os.path.expanduser(virtualenv_path), 'bin', 'python'), '--version']
p = subprocess.Popen(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
content = p.communicate()
if content[0]: # Python 3 prints version on stdout
# noinspection PyUnresolvedReferences
virtualenv_version = content[0].decode('utf-8').strip()
else: # Python 2 prints version on stderr
# noinspection PyUnresolvedReferences
virtualenv_version = content[1].decode('utf-8').strip()
self.global_context['virtualenv'] = (virtualenv_path, virtualenv_version)
# noinspection PyMethodMayBeStatic
def get_context(self):
values = {'encoding': 'utf-8', 'entry_points': {}, 'cmdclass': {}, 'ext_modules': [],
'install_requires': [], 'setup_requires': [], 'classifiers': []}
if self.global_context['use_six']:
values['install_requires'] += ['six', 'setuptools>=1.0', ]
values['setup_requires'] += ['six', 'setuptools>=1.0', ]
license_fd = pkg_resources.resource_stream('starterpyth',
'data/licenses/%s.txt' % self.global_context['license'])
values['license_content'] = license_fd.read().decode('utf-8')
values['copyright'] = u('%d, %s') % (datetime.date.today().year, self.global_context['author'])
self.global_context['used_python_versions'] = []
values['classifiers'] += ['Development Status :: 3 - Alpha',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: BSD',
'Operating System :: POSIX :: Linux',
'Operating System :: Unix',
]
lic = {'CeCILL-A': 'License :: OSI Approved :: CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)',
'CeCILL-B': 'License :: OSI Approved :: CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)',
'BSD-2-clauses': 'License :: OSI Approved :: BSD License',
'Apache-2': 'License :: OSI Approved :: Apache Software License',
'CeCILL-C': 'License :: OSI Approved :: CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)',
'GPL-2': 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
'GPL-3': 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'LGPL-2': 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)',
'LGPL-3': 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
'MIT': 'License :: OSI Approved :: MIT License',
'APSL': 'License :: OSI Approved :: Apple Public Source License',
'PSFL': 'License :: OSI Approved :: Python Software Foundation License',
}
values['classifiers'] += [lic[self.global_context['license']]]
for k in ('26', '27', '30', '31', '32', '33', '34', '35'):
v = '%s.%s' % (k[0], k[1])
if self.global_context['use_py%s' % k]:
values['classifiers'] += ['Programming Language :: Python :: %s' % v]
self.global_context['used_python_versions'].append(v)
if not self.global_context['use_py2']:
values['classifiers'] += ['Programming Language :: Python :: 3 :: Only']
elif not self.global_context['use_py3']:
values['classifiers'] += ['Programming Language :: Python :: 2 :: Only']
values['tox_used_python_versions'] = [('py' + x[0] + x[-1]) for x in
self.global_context['used_python_versions']]
return values
# noinspection PyMethodMayBeStatic
def get_extracontext(self):
return {}
def get_extraform(self, interactive=True):
form = self.ExtraForm(extra_env=self.global_context)
values = form.read(interactive=interactive)
return values
# noinspection PyMethodMayBeStatic,PyUnusedLocal
def process_directory_or_file(self, src_path, dst_path, name, is_directory):
"""
:param src_path: source path, relative to python module
:param dst_path: absolute destination path
:param name: basename of the file or directory to be processed
:return:
"""
if name in ['.svn', '.git', '.hg', 'CVS'] or name[-len(self.include_suffix):] == self.include_suffix:
return False
return True
# noinspection PyMethodMayBeStatic
def get_environment(self, modname, dirname, filters):
"""
Return a valid Jinja2 environment (with filters)
:param modname:
:param dirname:
:param filters: dictionnary of extra filters for jinja2
:return:
"""
from jinja2 import Environment, PackageLoader
loaders = [PackageLoader(modname, dirname)]
for modname, dirname in self.template_includes:
loaders.append(PackageLoader(modname, dirname))
loader = ChoiceLoader(loaders)
env = Environment(loader=loader)
env.filters.update(filters)
return env
def write_files(self, modname, dirname, env):
"""
Write all templated or raw files to the new project. All template are rendered twice.
This behaviour allows to determine which functions must be imported at the beginning of Python files
:param modname: module containing template files
:param dirname: dirname containing template files in the module `modname`
:param env: Jinja2 environment
:return:
"""
from jinja2 import Template
project_root = self.global_context['project_root']
# creation of the project directory if needed
if not os.path.isdir(project_root):
os.makedirs(project_root)
display(_('Directory %(f)s created.') % {'f': project_root}, color=GREEN)
# noinspection PyTypeChecker
prefix_len = len(dirname) + 1
def get_path(root_, name):
"""return relative source path (to template dir) and absolute destination path"""
src_path_ = (root_ + '/' + name)[prefix_len:]
dst_path_ = src_path_
if os.sep != '/':
dst_path_ = dst_path_.replace('/', os.sep)
if dst_path_.find('{') > -1: # the name of the file is templated
dst_path_ = Template(dst_path_).render(**self.global_context)
if dst_path_[-len(self.template_suffix):] == self.template_suffix:
dst_path_ = dst_path_[:-len(self.template_suffix)]
return src_path_, os.path.join(project_root, dst_path_)
# walk through all files (raw and templates) in modname/dirname and write them to destination
for root, dirnames, filenames in walk(modname, dirname):
for dirname in dirnames:
src_path, dst_path = get_path(root, dirname)
if not self.process_directory_or_file(src_path, dst_path, dirname, True):
continue
if not os.path.isdir(dst_path):
os.makedirs(dst_path)
display(_('Directory %(f)s created.') % {'f': dst_path}, color=GREEN)
for filename in filenames:
src_path, dst_path = get_path(root, filename)
if not self.process_directory_or_file(src_path, dst_path, filename, False):
continue
if not os.path.isdir(os.path.dirname(dst_path)):
continue
if filename[-len(self.template_suffix):] == self.template_suffix:
self.file_context = {'render_pass': 1}
template = env.get_template(src_path)
f_out = open(dst_path, 'wb')
self.file_context.update(self.global_context)
template.render(**self.file_context)
self.file_context['render_pass'] = 2
template_content = template.render(**self.file_context).encode('utf-8')
f_out.write(template_content)
f_out.close()
display(_('Template %(f)s written.') % {'f': dst_path}, color=GREEN)
else:
f_out = open(dst_path, 'wb')
f_in = pkg_resources.resource_stream(modname, root + '/' + filename)
data = f_in.read(10240)
while data:
f_out.write(data)
data = f_in.read(10240)
f_in.close()
f_out.close()
display(_('File %(f)s written.') % {'f': dst_path}, color=GREEN)
def increment(self, key):
self.file_context[key] = self.file_context.get(key, 0) + 1
def text(self, value):
return self.raw_text(value)
def raw_text(self, value):
if '\n' in value:
prefix = '"""'
elif "'" not in value:
prefix = "'"
elif '"' not in value:
prefix = '"'
else:
value = value.replace("'", "\\'")
prefix = "'"
self.increment('counter_unicode_literals')
return '%s%s%s' % (prefix, value, prefix)
def docstring(self, value):
self.increment('counter_unicode_literals')
return '"""%s"""' % value
def translate(self, value):
if not self.global_context['use_i18n']:
return self.text(value)
self.increment('counter_i18n')
return "_(%s)" % self.text(value)
def binary(self, value):
return 'b' + self.raw_text(value)
def get_template_filters(self):
return {'text': self.text, 'binary': self.binary, 'repr': lambda x: repr(x), 'translate': self.translate,
'docstring': self.docstring, 'raw_text': self.raw_text}
if __name__ == '__main__':
import doctest
doctest.testmod()
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name="gitrecipe",
version='0.0.2',
description='Simple buildout recipe for downloading git repositories. It uses system git command and its syntax',
author='Ivan Gromov',
author_email='[email protected]',
url='http://github.com/summerisgone/gitrecipe',
download_url='http://github.com/summerisgone/gitrecipe/zipball/0.1',
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Buildout',
'Intended Audience :: Developers',
'License :: Freely Distributable',
'Natural Language :: Russian',
'Operating System :: Microsoft :: Windows',
'Operating System :: Unix',
'Programming Language :: Python :: 2.5',
'Topic :: Software Development :: Version Control',
],
namespace_packages=['recipe'],
packages=find_packages(),
install_requires=['setuptools', 'zc.recipe.egg'],
entry_points={'zc.buildout': ['default = recipe.git:GitRecipe']},
zip_safe=False,
long_description=open('README.rst').read(),
)
|
#! /usr/bin/env python
"""A suite to test file_exist."""
import os
import pytest
import luigi
from luigi.interface import build
from piret.counts import featurecounts as fc
from plumbum.cmd import rm, cp
def test_featurecount():
"""Test star index creation and mapping."""
map_dir = os.path.join("tests/test_count", "processes", "mapping", "samp5")
if os.path.exists(map_dir) is False:
os.makedirs(map_dir)
cp_cmd = ["tests/data/test_prok/processes/mapping/samp5/samp5_srt.bam", map_dir]
cp[cp_cmd]()
build([fc.FeatureCounts(fastq_dic={'samp5':''},
kingdom="prokarya",
gff_file="tests/data/test_prok.gff",
workdir="tests/test_count",
indexfile="",
num_cpus=2,
ref_file="tests/data/test_prok.fna",
fid="ID",
stranded=0)],
local_scheduler=True)
assert os.path.exists("tests/test_count/processes/featureCounts/prokarya/gene_count.tsv") is True
rm_cmd = rm["-rf", "tests/test_count"]
rm_cmd()
|
#! /usr/bin/env python
'''Our code to connect to the HBase backend. It uses the happybase
package, which depends on the Thrift service that (for now) is
part of HBase.'''
from gevent import monkey
monkey.patch_all()
import struct
import happybase
import Hbase_thrift
from . import BaseClient
def column_name(integer):
'''Convert an integer to a column name.'''
return 'f%02d:c' % integer
class Client(BaseClient):
'''Our HBase backend client'''
def __init__(self, name, num_blocks, num_bits, *args, **kwargs):
BaseClient.__init__(self, name, num_blocks, num_bits)
# Time to live in seconds
ttl = kwargs.pop('ttl', None)
if ttl is None:
raise ValueError
self.connection = happybase.Connection(**kwargs)
families = {column_name(i): dict(time_to_live=ttl)
for i in range(self.num_tables)}
try:
self.connection.create_table(name, families)
except Hbase_thrift.AlreadyExists:
pass
self.table = self.connection.table(name)
def delete(self):
'''Delete this database of simhashes'''
if self.table is not None:
self.connection.delete_table(self.name, disable=True)
self.table = None
def insert(self, hash_or_hashes):
'''Insert one (or many) hashes into the database'''
if self.table is None:
return
hashes = hash_or_hashes
if not hasattr(hash_or_hashes, '__iter__'):
hashes = [hash_or_hashes]
for hsh in hashes:
for i in range(self.num_tables):
row_key = struct.pack('!Q',
long(self.corpus.tables[i].permute(hsh)))
self.table.put(row_key, {column_name(i): None})
def find_in_table(self, hsh, table_num, ranges):
'''Return all the results found in this particular table'''
low = struct.pack('!Q', ranges[table_num][0])
high = struct.pack('!Q', ranges[table_num][1])
pairs = self.table.scan(row_start=low, row_stop=high,
columns=[column_name(table_num)])
results = [struct.unpack('!Q', k)[0] for k, v in pairs]
results = [self.corpus.tables[table_num].unpermute(d)
for d in results]
return [h for h in results if
self.corpus.distance(h, hsh) <= self.num_bits]
def find_one(self, hash_or_hashes):
'''Find one near-duplicate for the provided query (or queries)'''
if self.table is None:
return None
hashes = hash_or_hashes
if not hasattr(hash_or_hashes, '__iter__'):
hashes = [hash_or_hashes]
results = []
for hsh in hashes:
ranges = self.ranges(hsh)
found = []
for i in range(self.num_tables):
found = self.find_in_table(hsh, i, ranges)
if found:
results.append(found[0])
break
if not found:
results.append(None)
if not hasattr(hash_or_hashes, '__iter__'):
return results[0]
return results
def find_all(self, hash_or_hashes):
'''Find all near-duplicates for the provided query (or queries)'''
if self.table is None:
return None
hashes = hash_or_hashes
if not hasattr(hash_or_hashes, '__iter__'):
hashes = [hash_or_hashes]
results = []
for hsh in hashes:
ranges = self.ranges(hsh)
found = []
for i in range(self.num_tables):
found.extend(self.find_in_table(hsh, i, ranges))
found = list(set(found))
results.append(found)
if not hasattr(hash_or_hashes, '__iter__'):
return results[0]
return results
|
# Copyright 2016 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import uuid
from cloudferry import model
from tests.lib.utils import test_local_db
class ExampleReferenced(model.Model):
object_id = model.PrimaryKey()
qux = model.Integer(required=True)
def equals(self, other):
if super(ExampleReferenced, self).equals(other):
return True
return self.qux == other.qux
@classmethod
def create_object(cls, cloud, cloud_obj_id):
with model.Session() as session:
session.store(ExampleReferenced.load({
'object_id': {
'cloud': cloud,
'id': cloud_obj_id,
'type': cls.get_class_qualname(),
},
'qux': 1337,
}))
class ExampleNested(model.Model):
foo = model.String(required=True)
ref = model.Dependency(ExampleReferenced, required=True)
refs = model.Dependency(ExampleReferenced, required=True, many=True)
ref_none = model.Dependency(ExampleReferenced, missing=None,
allow_none=True)
refs_none = model.Dependency(ExampleReferenced, missing=None,
many=True, allow_none=True)
class Simple(model.Model):
foo = model.String(required=True)
class Example(model.Model):
object_id = model.PrimaryKey()
bar = model.String(required=True)
baz = model.Nested(ExampleNested)
ref = model.Dependency(ExampleReferenced, required=True)
refs = model.Dependency(ExampleReferenced, required=True, many=True)
ref_none = model.Dependency(ExampleReferenced, missing=None,
allow_none=True)
refs_none = model.Dependency(ExampleReferenced, missing=None,
many=True, allow_none=True)
count = 0
@classmethod
def generate_data(cls, object_id=None, cloud='test_cloud'):
cls.count += 1
if object_id is None:
object_id = uuid.uuid5(uuid.NAMESPACE_DNS, 'test%d' % cls.count)
ref1 = uuid.uuid5(uuid.NAMESPACE_DNS, 'ref1_%d' % cls.count)
ref2 = uuid.uuid5(uuid.NAMESPACE_DNS, 'ref2_%d' % cls.count)
ExampleReferenced.create_object(cloud, str(ref1))
ExampleReferenced.create_object(cloud, str(ref2))
return {
'object_id': {
'cloud': cloud,
'id': str(object_id),
'type': Example.get_class_qualname(),
},
'bar': 'some non-random string',
'baz': {
'foo': 'other non-random string',
'ref': {
'cloud': cloud,
'id': str(ref1),
'type': ExampleReferenced.get_class_qualname(),
},
'refs': [{
'cloud': cloud,
'id': str(ref2),
'type': ExampleReferenced.get_class_qualname(),
}],
},
'ref': {
'cloud': cloud,
'id': str(ref1),
'type': ExampleReferenced.get_class_qualname(),
},
'refs': [{
'cloud': cloud,
'id': str(ref2),
'type': ExampleReferenced.get_class_qualname(),
}],
}
class ExampleRef(model.Model):
object_id = model.PrimaryKey()
ref = model.Reference(ExampleReferenced, allow_none=True)
def equals(self, other):
# pylint: disable=no-member
if super(ExampleRef, self).equals(other):
return True
if self.ref is None:
return other.ref is None
return self.ref.equals(other.ref)
@classmethod
def create_object(cls, cloud, unique_id, ref_unique_id):
data = {
'object_id': {
'cloud': cloud,
'id': unique_id,
'type': cls.get_class_qualname(),
},
}
if ref_unique_id is not None:
ref = {
'cloud': cloud,
'id': ref_unique_id,
'type': ExampleReferenced.get_class_qualname(),
}
else:
ref = None
data['ref'] = ref
return cls.load(data)
class ModelTestCase(test_local_db.DatabaseMockingTestCase):
def setUp(self):
super(ModelTestCase, self).setUp()
self.cloud = mock.MagicMock()
self.cloud.name = 'test_cloud'
self.cloud2 = mock.MagicMock()
self.cloud2.name = 'test_cloud2'
def _validate_example_obj(self, object_id, obj, validate_refs=True,
bar_value='some non-random string'):
self.assertEqual(object_id, obj.object_id)
self.assertEqual(bar_value, obj.bar)
self.assertEqual('other non-random string', obj.baz.foo)
if validate_refs:
self.assertEqual(1337, obj.ref.qux)
self.assertEqual(1337, obj.refs[0].qux)
@staticmethod
def _make_id(model_class, cloud_obj_id, cloud='test_cloud'):
return {
'id': cloud_obj_id,
'cloud': cloud,
'type': model_class.get_class_qualname(),
}
def test_load(self):
data = Example.generate_data()
obj = Example.load(data)
self._validate_example_obj(
model.ObjectId(data['object_id']['id'], 'test_cloud'), obj, False)
def test_non_dirty(self):
obj = Example.load(Example.generate_data())
self.assertTrue(obj.is_dirty('objects'))
def test_simple_dirty(self):
obj = Example.load(Example.generate_data())
obj.bar = 'value is changed'
self.assertTrue(obj.is_dirty('objects'))
def test_nested_dirty(self):
obj = Example.load(Example.generate_data())
obj.baz.foo = 'value is changed'
self.assertTrue(obj.is_dirty('objects'))
def test_ref_dirty(self):
obj = Example.load(Example.generate_data())
ref_obj = ExampleReferenced.load({
'object_id': self._make_id(ExampleReferenced, 'hello'),
'qux': 313373,
})
obj.ref = ref_obj
self.assertTrue(obj.is_dirty('objects'))
def test_refs_dirty(self):
obj = Example.load(Example.generate_data())
ref_obj = ExampleReferenced.load({
'object_id': self._make_id(ExampleReferenced, 'hello'),
'qux': 313373,
})
obj.refs.append(ref_obj)
self.assertTrue(obj.is_dirty('objects'))
def test_nested_ref_dirty(self):
obj = Example.load(Example.generate_data())
ref_obj = ExampleReferenced.load({
'object_id': self._make_id(ExampleReferenced, 'hello'),
'qux': 313373,
})
obj.baz.ref = ref_obj
self.assertTrue(obj.is_dirty('objects'))
def test_nested_refs_dirty(self):
obj = Example.load(Example.generate_data())
ref_obj = ExampleReferenced.load({
'object_id': self._make_id(ExampleReferenced, 'hello'),
'qux': 313373,
})
obj.baz.refs.append(ref_obj)
self.assertTrue(obj.is_dirty('objects'))
def test_store_retrieve(self):
orig_obj = Example.load(Example.generate_data())
object_id = orig_obj.object_id
with model.Session() as session:
session.store(orig_obj)
# Validate retrieve working before commit
self._validate_example_obj(
object_id, session.retrieve(Example, object_id))
with model.Session() as session:
# Validate retrieve working after commit
self._validate_example_obj(
object_id, session.retrieve(Example, object_id))
def test_store_list(self):
orig_obj = Example.load(Example.generate_data())
object_id = orig_obj.object_id
with model.Session() as session:
session.store(orig_obj)
# Validate retrieve working before commit
self._validate_example_obj(object_id, session.list(Example)[0])
with model.Session() as session:
# Validate retrieve working after commit
self._validate_example_obj(object_id, session.list(Example)[0])
def test_store_list_cloud(self):
orig_obj1 = Example.load(Example.generate_data(cloud=self.cloud.name))
object1_id = orig_obj1.object_id
orig_obj2 = Example.load(Example.generate_data(cloud=self.cloud2.name))
object2_id = orig_obj2.object_id
with model.Session() as session:
session.store(orig_obj1)
session.store(orig_obj2)
# Validate retrieve working before commit
self._validate_example_obj(object1_id,
session.list(Example, self.cloud)[0])
self._validate_example_obj(object2_id,
session.list(Example, self.cloud2)[0])
# Validate retrieve working after commit
with model.Session() as session:
self._validate_example_obj(object1_id,
session.list(Example, self.cloud)[0])
with model.Session() as session:
self._validate_example_obj(object2_id,
session.list(Example, self.cloud2)[0])
def test_load_store(self):
orig_obj = Example.load(Example.generate_data())
object_id = orig_obj.object_id
with model.Session() as session:
session.store(orig_obj)
with model.Session() as session:
obj = session.retrieve(Example, object_id)
self._validate_example_obj(object_id, obj)
obj.baz.foo = 'changed'
obj.bar = 'changed too'
with model.Session() as session:
loaded_obj = session.retrieve(Example, object_id)
self.assertEqual('changed', loaded_obj.baz.foo)
self.assertEqual('changed too', loaded_obj.bar)
def test_many_nested(self):
class ExampleMany(model.Model):
object_id = model.PrimaryKey()
many = model.Nested(Simple, many=True)
many = ExampleMany.load({
'object_id': self._make_id(ExampleMany, 'foo'),
'many': [
{'foo': 'foo'},
{'foo': 'bar'},
{'foo': 'baz'},
],
})
self.assertEqual('foo', many.many[0].foo)
self.assertEqual('bar', many.many[1].foo)
self.assertEqual('baz', many.many[2].foo)
with model.Session() as session:
session.store(many)
with model.Session() as session:
obj = session.retrieve(
ExampleMany, model.ObjectId('foo', 'test_cloud'))
self.assertEqual('foo', obj.many[0].foo)
self.assertEqual('bar', obj.many[1].foo)
self.assertEqual('baz', obj.many[2].foo)
def test_example_name_ref(self):
class ExampleNameRef(model.Model):
object_id = model.PrimaryKey()
ref = model.Dependency(Example.get_class_qualname())
with model.Session() as session:
example = Example.load(Example.generate_data('foo-bar-baz'))
session.store(example)
obj = ExampleNameRef.load({
'object_id': self._make_id(ExampleNameRef, 'ExampleNameRef-1'),
'ref': self._make_id(Example, 'foo-bar-baz'),
})
self.assertIs(Example, obj.ref.get_class())
def test_nested_sessions(self):
orig_obj1 = Example.load(Example.generate_data(cloud=self.cloud.name))
object1_id = orig_obj1.object_id
orig_obj2 = Example.load(Example.generate_data(cloud=self.cloud2.name))
object2_id = orig_obj2.object_id
with model.Session() as s1:
s1.store(orig_obj1)
with model.Session() as s2:
s2.store(orig_obj2)
self._validate_example_obj(
object1_id, s2.retrieve(Example, object1_id))
self._validate_example_obj(
object2_id, s2.retrieve(Example, object2_id))
with model.Session() as s:
self._validate_example_obj(
object1_id, s.retrieve(Example, object1_id))
self._validate_example_obj(
object2_id, s2.retrieve(Example, object2_id))
def test_nested_sessions_save_updates_after_nested(self):
orig_obj1 = Example.load(Example.generate_data(cloud=self.cloud.name))
object1_id = orig_obj1.object_id
orig_obj2 = Example.load(Example.generate_data(cloud=self.cloud2.name))
object2_id = orig_obj2.object_id
with model.Session() as s1:
s1.store(orig_obj1)
with model.Session() as s2:
s2.store(orig_obj2)
self._validate_example_obj(
object1_id, s2.retrieve(Example, object1_id))
self._validate_example_obj(
object2_id, s2.retrieve(Example, object2_id))
orig_obj1.bar = 'some other non-random string'
with model.Session() as s:
self._validate_example_obj(
object1_id, s.retrieve(Example, object1_id),
bar_value='some other non-random string')
self._validate_example_obj(
object2_id, s2.retrieve(Example, object2_id))
def test_absent_reference_equals1(self):
object1 = ExampleRef.create_object(
'test_cloud1', 'example_ref_id', 'example_referenced_id')
object2 = ExampleRef.create_object(
'test_cloud2', 'example_ref_id', 'example_referenced_id')
self.assertTrue(object1.equals(object2))
def test_absent_reference_equals2(self):
object1 = ExampleRef.create_object(
'test_cloud1', 'example_ref_id', 'example_referenced_id')
object2 = ExampleRef.create_object(
'test_cloud2', 'example_ref_id', 'other_referenced_id')
self.assertFalse(object1.equals(object2))
def test_absent_reference_equals3(self):
object1 = ExampleRef.create_object(
'test_cloud1', 'example_ref_id', None)
object2 = ExampleRef.create_object(
'test_cloud2', 'example_ref_id', None)
self.assertTrue(object1.equals(object2))
def test_absent_reference_equals4(self):
with model.Session():
ExampleReferenced.create_object(
'test_cloud1', 'example_referenced_id')
ExampleReferenced.create_object(
'test_cloud2', 'other_referenced_id')
object1 = ExampleRef.create_object(
'test_cloud1', 'example_ref_id', 'example_referenced_id')
object2 = ExampleRef.create_object(
'test_cloud2', 'example_ref_id', 'other_referenced_id')
# We have equivalent objects referenced by example_referenced_id and
# other_referenced_id this time
self.assertTrue(object1.equals(object2))
|
import json
from django.test import TestCase
from django.core import management
from django.contrib.auth.models import User
from app.services.models import Event
from app.achievement.utils import find_nested_json
from app.achievement.hooks import check_for_unlocked_achievements
from app.achievement.models import UserProfile, ValueCondition, AttributeCondition
def setUpModule():
"""
Sets up all the test cases in the current test file. Only runs once. Use to load
data that will be used here.
"""
pass
class AchievementTestCase_01(TestCase):
"""
Generic Achievement Test Suite to ensure they work. Since all tests run in a transaction we
don't have to worry about deleting the database objects created here.
"""
def setUp(self):
management.call_command('loaddata', 'app/achievement/tests/test_data_01.json', verbosity=0)
def test_profile_exists(self):
u = User.objects.get(username="doug")
self.assertTrue(len(UserProfile.objects.filter(user=u)) == 1, "Profile was created on user creation.")
def test_condition_satisfied(self):
"""
Tests that a condition can be properly
satisfied.
"""
condition = ValueCondition.objects.get(pk=1)
passed = condition({"action": "forced"})
self.assertTrue(passed, 'Condition should be satisfied.')
def test_condition_not_satisfied(self):
"""
Tests that a condition is not improperly
satisfied.
"""
condition = ValueCondition.objects.get(pk=1)
passed = condition({"action": ""})
self.assertTrue(not passed, 'Condition should not be satisfied.')
def test_user_unlocks_achievement(self):
"""
Tests that a user can unlock an achievement.
"""
payload = {"action": "forced"}
unlocked = check_for_unlocked_achievements('push', payload)
self.assertTrue(len(unlocked) == 1, 'Achievement should be unlocked.')
def test_user_does_not_unlock_achievement(self):
"""
Tests that an achievement is not improperly
unlocked.
"""
payload = {"action": "forced"}
unlocked = check_for_unlocked_achievements('pull_request', payload)
self.assertTrue(len(unlocked) == 0, 'Achievement should not be unlocked for non-matching event.')
payload = {"action": ""}
unlocked = check_for_unlocked_achievements('push', payload)
self.assertTrue(len(unlocked) == 0, 'Achievement should not be unlocked for non-matching value.')
def test_quantifier_01(self):
"""
Tests that quantifiers actually work.
"""
payload = {
'download': {
'files': [
{
'name': "a.txt"
},
{
'name': "b.txtp"
}
]
}
}
self.assertEqual(["a.txt", "b.txtp"], find_nested_json(payload, "download.files.name".split('.')),
"Nested json results should match the values in the list.")
unlocked = check_for_unlocked_achievements('download', payload)
self.assertTrue(len(unlocked) == 1, 'Achievement was unlocked via quantifer and __or__')
def test_qualifiers_01(self):
"""
Tests that qualifiers actually work.
"""
payload = {
'download': {
'url': "http",
'html_url': "hfss"
}
}
unlocked = check_for_unlocked_achievements('download', payload)
self.assertTrue(len(unlocked) == 2, 'Achievement should be unlocked based on qualifier.')
payload = {
'download': {
'url': "Http",
'html_url': "https"
}
}
unlocked = check_for_unlocked_achievements('download', payload)
self.assertTrue(len(unlocked) == 0, 'Achievement should not be unlocked based on qualifier.')
class CustomConditionTestCase_01(TestCase):
"""
Tests custom conditions. For any custom condition added, it should have an equivalent test.
"""
def setUp(self):
pass
|
class clsLocation:
#this class provides the support for enumerated locations
ROOM=0
DOOR=1
WALL=2
#some more changes went here before the start of the service
#
#
#this branch is called development 01
#this is another branch here
class clsPlayerState:
#this class provides the functions to support the player state
#define class based variables; common to all instances
__playerCount=0 #create a common variable; use '__' to hide the variable
def __init__(self, startState):
#this function is automatically executed when a new class instance is created
clsPlayerState.__playerCount+=1 #increase the hidden player count
#define instance variables, specific to single instance
self.location=startState #initialise the stating location
def fnUpdate(self):
#this function updates the players state
if self.location==clsLocation.ROOM: #at the room
self.fnROOM() #create options for room
elif self.location==clsLocation.DOOR: #at the door
self.fnDOOR() #create options for door
elif self.location==clsLocation.WALL: #at the wall
self.fnWALL() #create options for wall
def fnROOM(self):
#describe the location
print("You are at the room")
def fnDOOR(self):
#describe the location
print("You are at the door")
def fnWALL(self):
#describe the location
print("You are at the wall")
#begin the main code
insPlayer=clsPlayerState(clsLocation.ROOM) #initialise the player instance using the class
insPlayer.fnUpdate()
|
# -*- coding: utf-8 -*-
# Copyright 2020 Green Valley Belgium NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.7@@
# Do not touch the indentation here
from rogerthat.rpc.service import register_service_api_calls
from rogerthat.service.api import app, communities, friends, messaging, qr, system, news, payments
def register_all_service_api_calls():
register_service_api_calls(app)
register_service_api_calls(communities)
register_service_api_calls(friends)
register_service_api_calls(messaging)
register_service_api_calls(qr)
register_service_api_calls(system)
register_service_api_calls(news)
register_service_api_calls(payments)
|
"""
Experiment Diary 2016-05-31
"""
import sys
import math
import matplotlib.pyplot as plt
from scipy import io
import numpy as np
from scipy.sparse.linalg import *
sys.path.append("../src/")
from worker import Worker
from native_conjugate_gradient import NativeConjugateGradient
from native_conjugate_gradient import NativeBlockConjugateGradient
from gerschgorin_circle_theorem import GerschgorinCircleTheoremEigenvalueEstimator
from chebyshev_polynomial import ChebyshevPolynomial
from chebyshev_basis_cacg import CBCG
from legendre_basis_cacg import LBCG
from legendre_basis_cacg import BLBCG
from chebyshev_basis_cacg import BCBCG
from presenter import Presenter
from power_iteration import PowerIteration
class WorkerIterativeLinearSystemSolverCG_Exp_160531(Worker):
""" Description: Experiment A
Numerical Method: Naive Conjugate Gradient
tol:
max_iteration:
matrix:
Reference:
1.
"""
def __init__(self, mat_path):
""" """
#print ("WorkerIterativeLinearSystemSolver works good")
Worker.__init__(self)
self._hist_list = []
if mat_path == "":
""" Need to generatre matrix """
print("calling self._matrix_generation")
#self._mat = self._matrix_generation()
else:
self._mat_coo = io.mmread(mat_path)
self._mat = self._mat_coo.tocsr()
self._mat_info = io.mminfo(mat_path)
print("Done reading matrix {}, Row:{}, Col:{}".format( mat_path, self._mat.shape[0], self._mat.shape[1]))
print("mminfo:{}".format(self._mat_info))
if self._mat.getformat() == "csr":
print("Yeah, it is CSR")
def _matrix_generator(self):
""" generation of matrix """
print("_matrix_generator")
def _setup_testbed(self, block_size):
""" this can considered as a basic experiment input descripting """
self._SB = np.random.random( ( self._mat.shape[0],1) )
self._BB = np.random.random( ( self._mat.shape[0],block_size) )
#np.savetxt("/home/scl/tmp/rhs.csv",self._B, delimiter=",")
#self._B = np.ones( ( self._mat.shape[0],6) )
self._SX = np.ones ( (self._mat.shape[1],1) )
self._BX = np.ones ( (self._mat.shape[1],block_size) )
#self._X = np.zeros ( (self._mat.shape[1],1) )
def _setup_numerical_algorithm(self,tol, maxiter, step_val):
""" After a linear solver or other numerical methods loaded
we need to setup the basic prarm for the algorithm
"""
self._tol = tol
self._maxiter = maxiter
self._step_val = step_val
def conduct_experiments(self, block_size, tol, maxiter, step_val):
""" function to condution the experiment """
print("to conduct the experient")
self._setup_testbed(block_size)
self._setup_numerical_algorithm(tol,maxiter,step_val)
#print ("before:{}".format(np.inner(self._X[:,0], self._X[:,0])))
#self._bcbcg_exp()
#self._db_presenter_a()
#self._db_power_iteration()
#self._db_lbcg_exp()
#self._db_blbcg_exp()
#self. _numpy_lstsq_test()
#self._db_cbcg_lstsq()
#self._db_bcbcg_lstsq()
#self._lbcg_least_square_exp()
self._blbcg_least_square_exp()
print("Experiments done")
def _bcbcg_exp(self):
bcbcg_solver_obj = BCBCG()
step_val_a = 3
step_val_b = 5
self._final_X_a, self._final_R_a, self._residual_hist_a = \
bcbcg_solver_obj.bcbcg_solver(self._mat, self._B, self._X, step_val_a, self._tol, self._maxiter,0)
self._final_X_b, self._final_R_b, self._residual_hist_b = \
bcbcg_solver_obj.bcbcg_solver(self._mat, self._B, self._X, step_val_b, self._tol, self._maxiter,0)
def _db_presenter_a(self):
plot_worker = Presenter()
residual_list = [self._residual_hist_a]
residual_list.append(self._residual_hist_b)
legend_list = ["bcbcg_s3", "bcbcg_s5"]
color_list = ["r", "k"]
# latex style notation
#plot_worker.instant_plot_y_log10(residual_list, "crystm01 $x_1$")
#plot_worker.instant_plot_y_log10(residual_list, "crystm01", "#iteration", "$\\frac{||x_1||}{||b_1||}$", legend_list, color_list)
plot_worker.instant_plot_y(residual_list, "crystm01", "#iteration", "$\\frac{||x_1||}{||b_1||}$", legend_list, color_list)
def legendre_poly_exp_a(self, order_lo, order_hi):
""" """
x= np.linspace(-1.1,1.1,41)
order_controller = np.zeros(order_hi+1)
y_list = []
plot_worker = Presenter()
legend_list = []
color_list = []
for order_idx in range(order_lo, order_hi+1):
order_controller[order_idx] = 1
legp = np.polynomial.legendre.Legendre( order_controller )
legcoef = np.polynomial.legendre.leg2poly(legp.coef )
poly = np.polynomial.Polynomial(legcoef)
y_list.append( poly(x) )
print(order_idx, " ", poly(x))
legend_list.append( "order_"+str(order_idx) )
color_list.append("k")
order_controller[order_idx] = 0
plot_worker.instant_plot_unified_x_axis(x, y_list, "Legendre Poly" , "x", "y", legend_list, color_list)
def _db_lbcg_exp (self):
""" """
lbcg_solver_obj = LBCG()
self._final_x_a, self._final_r_a, self._residual_hist_a = \
lbcg_solver_obj.lbcg_solver(self._mat, self._B, self._X, 8, self._tol, self._maxiter)
self._final_x_b, self._final_r_b, self._residual_hist_b = \
lbcg_solver_obj.lbcg_solver(self._mat, self._B, self._X, 16, self._tol, self._maxiter)
cbcg_solver_obj = CBCG()
self._final_x_c, self._final_r_c, self._residual_hist_c = \
cbcg_solver_obj.cbcg_solver(self._mat, self._B, self._X, 16, self._tol, self._maxiter)
plot_worker = Presenter()
residual_list = [self._residual_hist_a, self._residual_hist_b, self._residual_hist_c]
legend_list = ["lbcg_s8","lbcg_s16", "cbcg_s16"]
color_list = ["r","k", "b"]
#plot_worker.instant_plot_y_log10(residual_list, "crystm01", "#iteration", "$\\frac{||x_1||}{||b_1||}$", legend_list, color_list)
plot_worker.instant_plot_y_log10(residual_list, "wathen100", "#iteration", "$\\frac{||x_1||}{||b_1||}$", legend_list, color_list)
def _db_blbcg_exp(self):
""" """
lbcg_solver_obj = LBCG()
self._final_x_a, self._final_r_a, self._residual_hist_a = \
lbcg_solver_obj.lbcg_solver(self._mat, self._SB, self._SX, 8, self._tol, self._maxiter)
blbcg_solver_obj = BLBCG()
self._final_x_b, self._final_r_b, self._residual_hist_b = \
blbcg_solver_obj.blbcg_solver(self._mat, self._BB, self._BX, 8, self._tol, self._maxiter, 0)
bcbcg_solver_obj = BCBCG()
self._final_x_c, self._final_r_c, self._residual_hist_c = \
bcbcg_solver_obj.bcbcg_solver(self._mat, self._BB, self._BX, 8, self._tol, self._maxiter, 0)
plot_worker = Presenter()
residual_list = [self._residual_hist_a, self._residual_hist_b, self._residual_hist_c]
legend_list = ["lbcg_s8","blbcg_s8b10", "bcbcg_s8b10"]
color_list = ["r","k", "b"]
plot_worker.instant_plot_y_log10(residual_list, "bodyy6", "#iteration", "$\\frac{||x_1||}{||b_1||}$", legend_list, color_list)
def _numpy_lstsq_test (self):
""" """
self._small_mat = np.random.random( ( 5,5) )
self._small_rhs = np.random.random( ( 5,3) )
self._lstsq_res = np.linalg.lstsq(self._small_mat, self._small_rhs)
print (self._small_mat)
print("")
print(self._small_rhs)
print("")
print(self._lstsq_res)
print("")
print(np.matmul(self._small_mat, self._lstsq_res[0]))
#print(type(self._small_mat), "", type(self._lstsq_res))
def _db_cbcg_lstsq (self):
cbcg_solver_obj = CBCG()
self._final_x_a, self._final_r_a, self._residual_hist_a = \
cbcg_solver_obj.cbcg_solver_least_square(self._mat, self._SB, self._SX, self._step_val, self._tol, self._maxiter)
self._final_x_b, self._final_r_b, self._residual_hist_b = \
cbcg_solver_obj.cbcg_solver_least_square(self._mat, self._SB, self._SX, self._step_val, self._tol, self._maxiter)
plot_worker = Presenter()
residual_list = [self._residual_hist_a, self._residual_hist_b]
legend_list = ["cbcg_s2_lstsq","blbcg_s2"]
color_list = ["r","k"]
plot_worker.instant_plot_y_log10(residual_list, "bodyy6", "#iteration", "$\\frac{||x_1||}{||b_1||}$", legend_list, color_list)
def _db_bcbcg_lstsq (self):
""" """
bcbcg_solver_obj = BCBCG()
self._final_X_a, self._final_R_a, self._residual_hist_a = \
bcbcg_solver_obj.bcbcg_solver_least_square(self._mat, self._BB, self._BX, self._step_val, self._tol, self._maxiter,0)
self._final_X_b, self._final_R_b, self._residual_hist_b = \
bcbcg_solver_obj.bcbcg_solver(self._mat, self._BB, self._BX, self._step_val, self._tol, self._maxiter,0)
plot_worker = Presenter()
residual_list = [self._residual_hist_a, self._residual_hist_b]
legend_list = ["bcbcg_s20b4_lstsq","bcbcg_s20b4"]
color_list = ["r","k"]
plot_worker.instant_plot_y_log10(residual_list, "crystm02", "#iteration", "$\\frac{||x_1||}{||b_1||}$", legend_list, color_list)
def _lbcg_least_square_exp (self):
""" """
lbcg_solver_obj = LBCG()
self._final_x_a, self._final_r_a, self._residual_hist_a = \
lbcg_solver_obj.lbcg_solver_least_square(self._mat, self._SB, self._SX, 8, self._tol, self._maxiter)
self._final_x_b, self._final_r_b, self._residual_hist_b = \
lbcg_solver_obj.lbcg_solver_least_square(self._mat, self._SB, self._SX, 18, self._tol, self._maxiter)
cbcg_solver_obj = CBCG()
self._final_x_c, self._final_r_c, self._residual_hist_c = \
cbcg_solver_obj.cbcg_solver_least_square(self._mat, self._SB, self._SX, 8, self._tol, self._maxiter)
self._final_x_d, self._final_r_d, self._residual_hist_d = \
cbcg_solver_obj.cbcg_solver_least_square(self._mat, self._SB, self._SX, 18, self._tol, self._maxiter)
plot_worker = Presenter()
residual_list = [self._residual_hist_a, self._residual_hist_b, self._residual_hist_c, self._residual_hist_d ]
legend_list = ["lbcg_lstsq_s8","lbcg_lstsq_s18" ,"cbcg_lstsq_s8", "cbcg_lstsq_s18" ]
color_list = ["r","k", "b","y"]
#plot_worker.instant_plot_y_log10(residual_list, "crystm01", "#iteration", "$\\frac{||x_1||}{||b_1||}$", legend_list, color_list)
#plot_worker.instant_plot_y_log10(residual_list, "wathen100", "#iteration", "$\\frac{||x_1||}{||b_1||}$", legend_list, color_list)
plot_worker.instant_plot_y_log10(residual_list, "bodyy06", "#iteration", "$\\frac{||x_1||}{||b_1||}$", legend_list, color_list)
def _blbcg_least_square_exp(self):
""" """
blbcg_solver_obj = BLBCG()
self._final_x_a, self._final_r_a, self._residual_hist_a = \
blbcg_solver_obj.blbcg_solver_least_square(self._mat, self._BB, self._BX, self._step_val, self._tol, self._maxiter, 0)
bcbcg_solver_obj = BCBCG()
self._final_x_b, self._final_r_b, self._residual_hist_b = \
bcbcg_solver_obj.bcbcg_solver_least_square(self._mat, self._BB, self._BX, self._step_val, self._tol, self._maxiter, 0)
plot_worker = Presenter()
residual_list = [self._residual_hist_a, self._residual_hist_b]
legend_list = ["blbcg_s64b4_lstsq","bcbcg_s64b4_lstsq"]
color_list = ["r","k"]
#plot_worker.instant_plot_y_log10(residual_list, "crystm01", "#iteration", "$\\frac{||x_1||}{||b_1||}$", legend_list, color_list)
plot_worker.instant_plot_y_log10(residual_list, "bodyy6", "#iteration", "$\\frac{||x_1||}{||b_1||}$", legend_list, color_list)
def main ():
# main function for today's experiments
#bad
#mat_path = "/home/scl/MStore/vanbody/vanbody.mtx"
#mat_path = "/home/scl/MStore/olafu/olafu.mtx"
#mat_path = "/home/scl/MStore/raefsky4/raefsky4.mtx"
#mat_path = "/home/scl/MStore/smt/smt.mtx"
#mat_path = "/home/scl/MStore/bcsstk36/bcsstk36.mtx"
#mat_path = "/home/scl/MStore/pdb1HYS/pdb1HYS.mtx"
#mat_path = "/home/scl/MStore/ship_001/ship_001.mtx"
# not so good
#mat_path = "/home/scl/MStore/Dubcova1/Dubcova1.mtx"
#mat_path = "/home/scl/MStore/bcsstk17/bcsstk17.mtx"
#mat_path = "/home/scl/MStore/wathen100/wathen100.mtx"
#mat_path = "/home/scl/MStore/nasa2146/nasa2146.mtx"
#mat_path = "/home/scl/MStore/crystm01/crystm01.mtx"
#mat_path = "/home/scl/MStore/ex13/ex13.mtx"
#mat_path = "/home/scl/MStore/LFAT5/LFAT5.mtx"
#good
mat_path = "/home/scl/MStore/bodyy6/bodyy6.mtx"
#mat_path = "/home/scl/MStore/crystm02/crystm02.mtx"
block_size = 4
tol = 1e-12
maxiter = 800
step_val =64
linear_system_solver_worker_test = WorkerIterativeLinearSystemSolverCG_Exp_160531(mat_path)
linear_system_solver_worker_test.conduct_experiments(block_size,tol,maxiter, step_val)
#linear_system_solver_worker_test.chebyshev_poly_exp_a(0,6)
#linear_system_solver_worker_test.legendre_poly_exp_a(0,6)
#linear_system_solver_worker_test.debug_NativeConjugateGradient()
if __name__ == "__main__":
""" call main funtion for testing """
main()
|
# This file is part of Korman.
#
# Korman is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Korman is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Korman. If not, see <http://www.gnu.org/licenses/>.
import os.path
import sys
class ExportAnalysis:
"""This is used to collect artist action items from the export process. You can warn about
portability issues, possible oversights, etc. The benefit here is that the user doesn't have
to look through all of the gobbledygook in the export log.
"""
_porting = []
_warnings = []
def save(self):
# TODO
pass
def port(self, message, indent=0):
self._porting.append(message)
print(" " * indent, end="")
print("PORTING: {}".format(message))
def warn(self, message, indent=0):
self._warnings.append(message)
print(" " * indent, end="")
print("WARNING: {}".format(message))
class ExportLogger:
"""Yet Another Logger(TM)"""
def __init__(self, ageFile):
# Make the log file name from the age file path -- this ensures we're not trying to write
# the log file to the same directory Blender.exe is in, which might be a permission error
path, ageFile = os.path.split(ageFile)
ageName, _crap = os.path.splitext(ageFile)
fn = os.path.join(path, "{}_export.log".format(ageName))
self._file = open(fn, "w")
for i in dir(self._file):
if not hasattr(self, i):
setattr(self, i, getattr(self._file, i))
def __enter__(self):
self._stdout, sys.stdout = sys.stdout, self._file
self._stderr, sys.stderr = sys.stderr, self._file
def __exit__(self, type, value, traceback):
sys.stdout = self._stdout
sys.stderr = self._stderr
def flush(self):
self._file.flush()
self._stdout.flush()
self._stderr.flush()
def write(self, str):
self._file.write(str)
self._stdout.write(str)
def writelines(self, seq):
self._file.writelines(seq)
self._stdout.writelines(seq)
|
# -*- coding: utf-8 -*-
# Copyright 2009-2021 Joshua Bronson. All Rights Reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""Provide :class:`_DelegatingBidict`."""
import typing as _t
from ._base import BidictBase
from ._typing import KT, VT
class _DelegatingBidict(BidictBase[KT, VT]):
"""Provide optimized implementations of several methods by delegating to backing dicts.
Used to override less efficient implementations inherited by :class:`~collections.abc.Mapping`.
"""
__slots__ = ()
def __iter__(self) -> _t.Iterator[KT]:
"""Iterator over the contained keys."""
return iter(self._fwdm)
def keys(self) -> _t.KeysView[KT]:
"""A set-like object providing a view on the contained keys."""
return self._fwdm.keys()
def values(self) -> _t.KeysView[VT]: # type: ignore # https://github.com/python/typeshed/issues/4435
"""A set-like object providing a view on the contained values."""
return self._invm.keys()
def items(self) -> _t.ItemsView[KT, VT]:
"""A set-like object providing a view on the contained items."""
return self._fwdm.items()
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
DsgTools
A QGIS plugin
Brazilian Army Cartographic Production Tools
-------------------
begin : 2019-07-04
git sha : $Format:%H$
copyright : (C) 2019 by João P. Esperidião - Cartographic Engineer @ Brazilian Army
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
"""
Script designed to test each validation algorithm from DSGTools 4.X.
It is supposed to be run through QGIS with DSGTools installed.
* This is merely a prototype for our unit test suite. *
"""
import os
import sys
import warnings
import yaml
import shutil
from osgeo import ogr
import processing
from qgis.utils import iface
from qgis.core import QgsDataSourceUri, QgsVectorLayer, QgsProcessingFeedback,\
QgsProcessingContext, QgsLayerTreeLayer, QgsProject
from qgis.PyQt.QtSql import QSqlDatabase
from DsgTools.core.dsgEnums import DsgEnums
from DsgTools.core.Factories.DbFactory.dbFactory import DbFactory
from DsgTools.core.Factories.LayerLoaderFactory.layerLoaderFactory import LayerLoaderFactory
from qgis.testing import unittest
from DsgTools.tests.algorithmsTestBase import AlgorithmsTest, GenericAlgorithmsTest
class Tester(GenericAlgorithmsTest, AlgorithmsTest):
@classmethod
def setUpClass(cls):
cls.cleanup_paths = []
@classmethod
def tearDownClass(cls):
QgsProject.instance().clear()
for path in cls.cleanup_paths:
shutil.rmtree(path)
def get_definition_file(self):
return 'otherAlgorithms.yaml'
def run_all(filterString=None):
"""Default function that is called by the runner if nothing else is specified"""
filterString = 'test_' if filterString is None else filterString
suite = unittest.TestSuite()
suite.addTests(unittest.makeSuite(Tester, filterString))
unittest.TextTestRunner(verbosity=3, stream=sys.stdout).run(suite)
|
#!/usr/bin/env python
import socket
import threading
import sbs
TCP_IP = '10.0.0.184'
TCP_PORT = 10001
BUFFER_SIZE = 1024
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((TCP_IP, TCP_PORT))
pipeaway = {'kern.alert': None, 'daemon.warn': None, 'CMS MDM': None}
for key in pipeaway:
pipeaway[key] = open('pipes' + key, 'w')
def listen(arg1, stop_event):
mesg = ''
while (not stop_event.is_set()):
data = s.recv(BUFFER_SIZE)
mesg += str(data, encoding='utf-8')
if '\r\n' in mesg:
msg = mesg.rstrip()
mesg = ''
printto = None
for key in pipeaway:
if key in msg:
printto = pipeaway[key]
print(msg, file=printto)
s.close()
def send(msg):
s.send(str(msg + '\r\n').encode())
dust = {}
users = ["supervisor", "support", "user", "nobody", "zyuser", "root", "wittrup"]
if __name__ == "__main__":
thread_stop = threading.Event()
t = threading.Thread(target=listen, args=(2, thread_stop))
t.daemon=True
t.start()
try:
while True:
# TODO:: Please write your application code
for user in users:
for i in range(2):
send(user)
eval(input())
except KeyboardInterrupt:
pass
finally:
thread_stop.set()
|
# -*- test-case-name: buildbot.test.test_util -*-
from twisted.trial import unittest
from buildbot import util
class Foo(util.ComparableMixin):
compare_attrs = ["a", "b"]
def __init__(self, a, b, c):
self.a, self.b, self.c = a,b,c
class Bar(Foo, util.ComparableMixin):
compare_attrs = ["b", "c"]
class Compare(unittest.TestCase):
def testCompare(self):
f1 = Foo(1, 2, 3)
f2 = Foo(1, 2, 4)
f3 = Foo(1, 3, 4)
b1 = Bar(1, 2, 3)
self.failUnless(f1 == f2)
self.failIf(f1 == f3)
self.failIf(f1 == b1)
class test_checkRepoURL(unittest.TestCase):
def assertUrl(self, real_url, expected_url):
new_url = util.remove_userpassword(real_url)
self.assertEqual(expected_url, new_url)
def test_url_with_no_user_and_password(self):
self.assertUrl('http://myurl.com/myrepo', 'http://myurl.com/myrepo')
def test_url_with_user_and_password(self):
self.assertUrl('http://myuser:[email protected]/myrepo', 'http://myurl.com/myrepo')
def test_another_url_with_no_user_and_password(self):
self.assertUrl('http://myurl2.com/myrepo2', 'http://myurl2.com/myrepo2')
def test_another_url_with_user_and_password(self):
self.assertUrl('http://myuser2:[email protected]/myrepo2', 'http://myurl2.com/myrepo2')
def test_with_different_protocol_without_user_and_password(self):
self.assertUrl('ssh://myurl3.com/myrepo3', 'ssh://myurl3.com/myrepo3')
def test_with_different_protocol_with_user_and_password(self):
self.assertUrl('ssh://myuser3:[email protected]/myrepo3', 'ssh://myurl3.com/myrepo3')
def test_file_path(self):
self.assertUrl('/home/me/repos/my-repo', '/home/me/repos/my-repo')
def test_win32file_path(self):
self.assertUrl('c:\\repos\\my-repo', 'c:\\repos\\my-repo')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2001 by Object Craft P/L, Melbourne, Australia.
#
# LICENCE - see LICENCE file distributed with this software for details.
#
# To use:
# python setup.py install
#
"""Sybase module for Python
The Sybase module provides a Python interface to the Sybase relational
database system. The Sybase package supports all of the Python
Database API, version 2.0 with extensions.
"""
classifiers = """\
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
License :: OSI Approved :: BSD License
Programming Language :: Python
Topic :: Database
Topic :: Software Development :: Libraries :: Python Modules
Operating System :: Microsoft :: Windows
Operating System :: Unix
"""
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Extension
import distutils
import os
import sys
import string
import re
from distutils.command.sdist import sdist
if sys.version_info < (2, 3):
_setup = setup
def setup(**kwargs):
if kwargs.has_key("classifiers"):
del kwargs["classifiers"]
_setup(**kwargs)
doclines = __doc__.split("\n")
def api_exists(func, filename):
try:
text = open(filename).read()
except:
return 0
if re.search(r'CS_PUBLIC|CS_RETCODE %s' % func, text):
return 1
sybase = None
if os.environ.has_key('SYBASE'):
sybase = os.environ['SYBASE']
if os.environ.has_key('SYBASE_OCS'):
ocs = os.environ['SYBASE_OCS']
sybase = os.path.join(sybase, ocs)
have64bit = False
if sys.maxint > 2147483647:
have64bit = True
if os.name == 'posix': # unix
# Most people will define the location of their Sybase
# installation in their environment.
if sybase is None:
# Not in environment - assume /opt/sybase
sybase = '/opt/sybase'
if not os.access(sybase, os.F_OK):
sys.stderr.write(
'Please define the Sybase installation directory in'
' the SYBASE environment variable.\n')
sys.exit(1)
# On Linux the Sybase tcl library is distributed as sybtcl
syb_libs = []
if os.uname()[0] == 'Linux':
lib_names = ['blk', 'ct', 'cs', 'sybtcl', 'insck', 'comn', 'intl']
elif os.uname()[0] == 'AIX':
lib_names = ['blk', 'ct', 'cs', 'comn', 'tcl', 'intl', 'insck']
else:
lib_names = ['blk', 'ct', 'cs', 'tcl', 'comn', 'intl']
# for Sybase 15.0
lib_names += ['sybblk', 'sybct', 'sybcs', 'sybtcl', 'sybinsck', 'sybcomn', 'sybintl', 'sybunic']
for name in lib_names:
for lib in (have64bit and ('lib64', 'lib') or ('lib',)):
extensions = [('', 'a'), ('', 'so'), ('_r', 'a'), ('_r', 'so')]
if have64bit and sys.platform not in ['osf1V5']:
extensions = [('_r64', 'a'), ('_r64', 'so'), ('64', 'a'), ('64', 'so')] + extensions
for (ext1, ext2) in extensions:
lib_name = "%s%s" % (name, ext1)
lib_path = os.path.join(sybase, lib, 'lib%s.%s' % (lib_name, ext2))
if os.access(lib_path, os.R_OK):
syb_libs.append(lib_name)
break
elif os.name == 'nt': # win32
# Not sure how the installation location is specified under NT
if sybase is None:
sybase = r'i:\sybase\sql11.5'
if not os.access(sybase, os.F_OK):
sys.stderr.write(
'Please define the Sybase installation directory in'
'the SYBASE environment variable.\n')
sys.exit(1)
syb_libs = ['libblk', 'libct', 'libcs']
# This seems a bit sloppy to me, but is no worse than what's above.
if sybase.find('15') > 0:
syb_libs = ['libsybblk', 'libsybct', 'libsybcs']
else: # unknown
import sys
sys.stderr.write(
'Sorry, I do not know how to build on this platform.\n'
'\n'
'Please edit setup.py and add platform specific settings. If you\n'
'figure out how to get it working for your platform, please send\n'
'mail to [email protected] so you can help other people.\n')
sys.exit(1)
syb_incdir = os.path.join(sybase, 'include')
syb_libdir = os.path.join(sybase, 'lib')
for dir in (syb_incdir, syb_libdir):
if not os.access(dir, os.F_OK):
sys.stderr.write('Directory %s does not exist - cannot build.\n' % dir)
sys.exit(1)
extra_objects = None
runtime_library_dirs = None
try:
if os.uname()[0] == 'SunOS':
if have64bit:
syb_libs.append('sybdb64')
else:
syb_libs.append('sybdb')
syb_libs.remove('comn')
extra_objects = [os.path.join(syb_libdir, 'libcomn.a')]
runtime_library_dirs = [syb_libdir]
except:
pass
syb_macros = [('WANT_BULKCOPY', None)]
if have64bit:
syb_macros.append(('SYB_LP64', None))
# the C API to datetime only exists since python 2.4
if sys.version_info >= (2, 4):
try:
import datetime
except ImportError:
pass
else:
syb_macros.append(('HAVE_DATETIME', None))
try:
from decimal import Decimal
except ImportError:
pass
else:
syb_macros.append(('HAVE_DECIMAL', None))
# distutils does not allow -D HAVE_FREETDS=60 so I have to find this
# argument myself and remove it from sys.argv and set the macro via
# the define_macros argument to the extension module.
for i in range(1, len(sys.argv)):
# Find arguments like '-DHAVE_FREETDS=60' and variants
parts = string.split(sys.argv[i], 'HAVE_FREETDS')
if len(parts) == 1:
continue
prefix, suffix = parts[:2]
# Ignore -DHAVE_FREETDS which does not set a value (=blah)
if not suffix or suffix[0] != '=':
continue
# Remove this argument from sys.argv
del sys.argv[i]
# If -D was in previous argument then remove that as well
if not prefix and sys.argv[i - 1] == '-D':
del sys.argv[i - 1]
# Now set the TDS level the other other way.
syb_macros.append(('HAVE_FREETDS', suffix[1:]))
if prefix:
# Handle -D WANT_X,HAVE_FREETDS=60 case
if prefix[-1] == ',':
prefix = prefix[:-1]
sys.argv[i:i] = [prefix]
break
for api in ('blk_alloc', 'blk_describe', 'blk_drop', 'blk_rowxfer_mult',
'blk_textxfer',):
if api_exists(api, os.path.join(syb_incdir, 'bkpublic.h')):
syb_macros.append(('HAVE_' + string.upper(api), None))
for api in ('ct_cursor', 'ct_data_info', 'ct_dynamic', 'ct_send_data',
'ct_setparam',):
if api_exists(api, os.path.join(syb_incdir, 'ctpublic.h')):
syb_macros.append(('HAVE_' + string.upper(api), None))
for api in ('cs_calc', 'cs_cmp',):
if api_exists(api, os.path.join(syb_incdir, 'cspublic.h')):
syb_macros.append(('HAVE_' + string.upper(api), None))
class PreReleaseCheck:
def __init__(self, distribution):
self.distribution = distribution
self.check_rev('doc/sybase.tex', r'^\\release{(.*)}')
self.check_rev('Sybase.py', r'__version__ = \'(.*)\'')
self.check_rev('sybasect.c', r'rev = PyString_FromString\("(.*)"\)')
def _extract_rev(self, filename, pattern):
regexp = re.compile(pattern)
match = None
revs = []
line_num = 0
f = open(filename)
try:
for line in f.readlines():
line_num += 1
match = regexp.search(line)
if match:
revs.append((line_num, match.group(1)))
finally:
f.close()
return revs
def check_rev(self, filename, pattern):
file_revs = self._extract_rev(filename, pattern)
if not file_revs:
sys.exit("Could not locate version in %s" % filename)
line_num, file_rev = file_revs[0]
for num, rev in file_revs[1:]:
if rev != file_rev:
sys.exit("%s:%d inconsistent version on line %d" % \
(filename, line_num, num))
setup_rev = self.distribution.get_version()
if file_rev != setup_rev:
sys.exit("%s:%d version %s does not match setup.py version %s" % \
(filename, line_num, file_rev, setup_rev))
class my_sdist(sdist):
def run(self):
PreReleaseCheck(self.distribution)
self.announce("Pre-release checks pass!")
sdist.run(self)
setup(name="python-sybase",
version="0.40pre2",
maintainer=u"Sebastien Sable",
maintainer_email="[email protected]",
description=doclines[0],
url="http://python-sybase.sourceforge.net/",
license="http://www.opensource.org/licenses/bsd-license.html",
platforms = ["any"],
classifiers = filter(None, classifiers.split("\n")),
long_description = "\n".join(doclines[2:]),
py_modules=['Sybase'],
include_dirs=[syb_incdir],
ext_modules=[
Extension('sybasect',
['blk.c', 'databuf.c', 'cmd.c', 'conn.c', 'ctx.c',
'datafmt.c', 'iodesc.c', 'locale.c', 'msgs.c',
'numeric.c', 'money.c', 'datetime.c', 'date.c',
'sybasect.c'],
define_macros=syb_macros,
libraries=syb_libs,
library_dirs=[syb_libdir],
runtime_library_dirs=runtime_library_dirs,
extra_objects=extra_objects
)
],
cmdclass={'sdist': my_sdist},
# test_suite = 'nose.collector' # easy_setup only
)
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import mango
import user_docs
class IndexSelectionTests(mango.UserDocsTests):
@classmethod
def setUpClass(klass):
super(IndexSelectionTests, klass).setUpClass()
user_docs.add_text_indexes(klass.db, {})
def test_basic(self):
resp = self.db.find({"name.last": "A last name"}, explain=True)
assert resp["index"]["type"] == "json"
def test_with_and(self):
resp = self.db.find({
"name.first": "Stephanie",
"name.last": "This doesn't have to match anything."
}, explain=True)
assert resp["index"]["type"] == "json"
def test_with_text(self):
resp = self.db.find({
"$text" : "Stephanie",
"name.first": "Stephanie",
"name.last": "This doesn't have to match anything."
}, explain=True)
assert resp["index"]["type"] == "text"
def test_no_view_index(self):
resp = self.db.find({"name.first": "Ohai!"}, explain=True)
assert resp["index"]["type"] == "text"
def test_with_or(self):
resp = self.db.find({
"$or": [
{"name.first": "Stephanie"},
{"name.last": "This doesn't have to match anything."}
]
}, explain=True)
assert resp["index"]["type"] == "text"
def test_use_most_columns(self):
# ddoc id for the age index
ddocid = "_design/ad3d537c03cd7c6a43cf8dff66ef70ea54c2b40f"
resp = self.db.find({
"name.first": "Stephanie",
"name.last": "Something or other",
"age": {"$gt": 1}
}, explain=True)
assert resp["index"]["ddoc"] != "_design/" + ddocid
resp = self.db.find({
"name.first": "Stephanie",
"name.last": "Something or other",
"age": {"$gt": 1}
}, use_index=ddocid, explain=True)
assert resp["index"]["ddoc"] == ddocid
class MultiTextIndexSelectionTests(mango.UserDocsTests):
@classmethod
def setUpClass(klass):
super(MultiTextIndexSelectionTests, klass).setUpClass()
klass.db.create_text_index(ddoc="foo", analyzer="keyword")
klass.db.create_text_index(ddoc="bar", analyzer="email")
def test_view_ok_with_multi_text(self):
resp = self.db.find({"name.last": "A last name"}, explain=True)
assert resp["index"]["type"] == "json"
def test_multi_text_index_is_error(self):
try:
self.db.find({"$text": "a query"}, explain=True)
except Exception, e:
assert e.response.status_code == 400
def test_use_index_works(self):
resp = self.db.find({"$text": "a query"}, use_index="foo", explain=True)
assert resp["index"]["ddoc"] == "_design/foo"
|
#
# ----------------------------------------------------------------------------------------------------
#
# Copyright (c) 2007, 2015, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
# ----------------------------------------------------------------------------------------------------
import os, shutil, zipfile, re, time, sys, datetime, platform
from os.path import join, exists, dirname, isdir
from argparse import ArgumentParser, REMAINDER
import StringIO
import xml.dom.minidom
import subprocess
import mx
import mx_gate
import mx_unittest
from mx_gate import Task
from mx_unittest import unittest
_suite = mx.suite('jvmci')
JVMCI_VERSION = 9
"""
Top level directory of the JDK source workspace.
"""
_jdkSourceRoot = dirname(_suite.dir)
_JVMCI_JDK_TAG = 'jvmci'
_minVersion = mx.VersionSpec('1.9')
# max version (first _unsupported_ version)
_untilVersion = None
_jvmciModes = {
'hosted' : ['-XX:+UnlockExperimentalVMOptions', '-XX:+EnableJVMCI'],
'jit' : ['-XX:+UnlockExperimentalVMOptions', '-XX:+EnableJVMCI', '-XX:+UseJVMCICompiler'],
'disabled' : []
}
# TODO: can optimized be built without overriding release build?
_jdkDebugLevels = ['release', 'fastdebug', 'slowdebug']
# TODO: add client once/if it can be built on 64-bit platforms
_jdkJvmVariants = ['server', 'client']
"""
Translation table from mx_jvmci:8 --vmbuild values to mx_jvmci:9 --jdk-debug-level values.
"""
_legacyVmbuilds = {
'product' : 'release',
'debug' : 'slowdebug'
}
"""
Translates a mx_jvmci:8 --vmbuild value to a mx_jvmci:9 --jdk-debug-level value.
"""
def _translateLegacyDebugLevel(debugLevel):
return _legacyVmbuilds.get(debugLevel, debugLevel)
"""
Translation table from mx_jvmci:8 --vm values to mx_jvmci:9 (--jdk-jvm-variant, --jvmci-mode) tuples.
"""
_legacyVms = {
'jvmci' : ('server', 'jit')
}
"""
A VM configuration composed of a JDK debug level, JVM variant and a JVMCI mode.
This is also a context manager that can be used with the 'with' statement to set/change
a VM configuration within a dynamic scope. For example:
with ConfiguredJDK(debugLevel='fastdebug'):
dacapo(['pmd'])
"""
class VM:
def __init__(self, jvmVariant=None, debugLevel=None, jvmciMode=None):
self.update(jvmVariant, debugLevel, jvmciMode)
def update(self, jvmVariant=None, debugLevel=None, jvmciMode=None):
if jvmVariant in _legacyVms:
# Backwards compatibility for mx_jvmci:8 API
jvmVariant, newJvmciMode = _legacyVms[jvmVariant]
if jvmciMode is not None and jvmciMode != newJvmciMode:
mx.abort('JVM variant "' + jvmVariant + '" implies JVMCI mode "' + newJvmciMode +
'" which conflicts with explicitly specified JVMCI mode of "' + jvmciMode + '"')
jvmciMode = newJvmciMode
debugLevel = _translateLegacyDebugLevel(debugLevel)
assert jvmVariant is None or jvmVariant in _jdkJvmVariants, jvmVariant
assert debugLevel is None or debugLevel in _jdkDebugLevels, debugLevel
assert jvmciMode is None or jvmciMode in _jvmciModes, jvmciMode
self.jvmVariant = jvmVariant or _vm.jvmVariant
self.debugLevel = debugLevel or _vm.debugLevel
self.jvmciMode = jvmciMode or _vm.jvmciMode
def __enter__(self):
global _vm
self.previousVm = _vm
_vm = self
def __exit__(self, exc_type, exc_value, traceback):
global _vm
_vm = self.previousVm
_vm = VM(jvmVariant=_jdkJvmVariants[0], debugLevel=_jdkDebugLevels[0], jvmciMode='hosted')
def get_vm():
"""
Gets the configured VM.
"""
return _vm
def relativeVmLibDirInJdk():
mxos = mx.get_os()
if mxos == 'darwin':
return join('lib')
if mxos == 'windows' or mxos == 'cygwin':
return join('bin')
return join('lib', mx.get_arch())
def isJVMCIEnabled(vm):
assert vm in _jdkJvmVariants
return True
def _makehelp():
return subprocess.check_output([mx.gmake_cmd(), 'help'], cwd=_jdkSourceRoot)
def _runmake(args):
"""run the JDK make process
To build hotspot and import it into the JDK: "mx make hotspot import-hotspot"
{0}"""
jdkBuildDir = _get_jdk_build_dir()
if not exists(jdkBuildDir):
# JDK9 must be bootstrapped with a JDK8
compliance = mx.JavaCompliance('8')
jdk8 = mx.get_jdk(compliance.exactMatch, versionDescription=compliance.value)
cmd = ['sh', 'configure', '--with-debug-level=' + _vm.debugLevel, '--with-native-debug-symbols=external', '--disable-precompiled-headers', '--with-jvm-features=graal',
'--with-jvm-variants=' + _vm.jvmVariant, '--disable-warnings-as-errors', '--with-boot-jdk=' + jdk8.home, '--with-jvm-features=graal']
mx.run(cmd, cwd=_jdkSourceRoot)
cmd = [mx.gmake_cmd(), 'CONF=' + _vm.debugLevel]
if mx.get_opts().verbose:
cmd.append('LOG=debug')
cmd.extend(args)
if mx.get_opts().use_jdk_image and 'images' not in args:
cmd.append('images')
if not mx.get_opts().verbose:
mx.log('--------------- make execution ----------------------')
mx.log('Working directory: ' + _jdkSourceRoot)
mx.log('Command line: ' + ' '.join(cmd))
mx.log('-----------------------------------------------------')
mx.run(cmd, cwd=_jdkSourceRoot)
def _runmultimake(args):
"""run the JDK make process for one or more configurations"""
jvmVariantsDefault = ','.join(_jdkJvmVariants)
debugLevelsDefault = ','.join(_jdkDebugLevels)
parser = ArgumentParser(prog='mx multimake')
parser.add_argument('--jdk-jvm-variants', '--vms', help='a comma separated list of VMs to build (default: ' + jvmVariantsDefault + ')', metavar='<args>', default=jvmVariantsDefault)
parser.add_argument('--jdk-debug-levels', '--builds', help='a comma separated list of JDK debug levels (default: ' + debugLevelsDefault + ')', metavar='<args>', default=debugLevelsDefault)
parser.add_argument('-n', '--no-check', action='store_true', help='omit running "java -version" after each build')
select = parser.add_mutually_exclusive_group()
select.add_argument('-c', '--console', action='store_true', help='send build output to console instead of log files')
select.add_argument('-d', '--output-dir', help='directory for log files instead of current working directory', default=os.getcwd(), metavar='<dir>')
args = parser.parse_args(args)
jvmVariants = args.jdk_jvm_variants.split(',')
debugLevels = [_translateLegacyDebugLevel(dl) for dl in args.jdk_debug_levels.split(',')]
allStart = time.time()
for jvmVariant in jvmVariants:
for debugLevel in debugLevels:
if not args.console:
logFile = join(mx.ensure_dir_exists(args.output_dir), jvmVariant + '-' + debugLevel + '.log')
log = open(logFile, 'wb')
start = time.time()
mx.log('BEGIN: ' + jvmVariant + '-' + debugLevel + '\t(see: ' + logFile + ')')
verbose = ['-v'] if mx.get_opts().verbose else []
# Run as subprocess so that output can be directed to a file
cmd = [sys.executable, '-u', mx.__file__] + verbose + ['--jdk-jvm-variant=' + jvmVariant, '--jdk-debug-level=' + debugLevel, 'make']
mx.logv("executing command: " + str(cmd))
subprocess.check_call(cmd, cwd=_suite.dir, stdout=log, stderr=subprocess.STDOUT)
duration = datetime.timedelta(seconds=time.time() - start)
mx.log('END: ' + jvmVariant + '-' + debugLevel + '\t[' + str(duration) + ']')
else:
with VM(jvmVariant=jvmVariant, debugLevel=debugLevel):
_runmake([])
if not args.no_check:
with VM(jvmciMode='jit'):
run_vm(['-XX:-BootstrapJVMCI', '-version'])
allDuration = datetime.timedelta(seconds=time.time() - allStart)
mx.log('TOTAL TIME: ' + '[' + str(allDuration) + ']')
class HotSpotProject(mx.NativeProject):
"""
Defines a NativeProject representing the HotSpot binaries built via make.
"""
def __init__(self, suite, name, deps, workingSets, **args):
assert name == 'hotspot'
mx.NativeProject.__init__(self, suite, name, "", [], deps, workingSets, None, None, join(suite.mxDir, name))
def eclipse_config_up_to_date(self, configZip):
# Assume that any change to this module might imply changes to the generated IDE files
if configZip.isOlderThan(__file__):
return False
for _, source in self._get_eclipse_settings_sources().iteritems():
if configZip.isOlderThan(source):
return False
return True
def _get_eclipse_settings_sources(self):
"""
Gets a dictionary from the name of an Eclipse settings file to
the file providing its generated content.
"""
if not hasattr(self, '_eclipse_settings'):
esdict = {}
templateSettingsDir = join(self.dir, 'templates', 'eclipse', 'settings')
if exists(templateSettingsDir):
for name in os.listdir(templateSettingsDir):
source = join(templateSettingsDir, name)
esdict[name] = source
self._eclipse_settings = esdict
return self._eclipse_settings
def _eclipseinit(self, files=None, libFiles=None):
"""
Generates an Eclipse project for each HotSpot build configuration.
"""
roots = [
'ASSEMBLY_EXCEPTION',
'LICENSE',
'README',
'THIRD_PARTY_README',
'agent',
'make',
'src',
'test'
]
for jvmVariant in _jdkJvmVariants:
for debugLevel in _jdkDebugLevels:
name = jvmVariant + '-' + debugLevel
eclProjectDir = join(self.dir, 'eclipse', name)
mx.ensure_dir_exists(eclProjectDir)
out = mx.XMLDoc()
out.open('projectDescription')
out.element('name', data='hotspot:' + name)
out.element('comment', data='')
out.element('projects', data='')
out.open('buildSpec')
out.open('buildCommand')
out.element('name', data='org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder')
out.element('triggers', data='full,incremental')
out.element('arguments', data='')
out.close('buildCommand')
out.close('buildSpec')
out.open('natures')
out.element('nature', data='org.eclipse.cdt.core.cnature')
out.element('nature', data='org.eclipse.cdt.core.ccnature')
out.element('nature', data='org.eclipse.cdt.managedbuilder.core.managedBuildNature')
out.element('nature', data='org.eclipse.cdt.managedbuilder.core.ScannerConfigNature')
out.close('natures')
if roots:
out.open('linkedResources')
for r in roots:
f = join(_suite.dir, r)
out.open('link')
out.element('name', data=r)
out.element('type', data='2' if isdir(f) else '1')
out.element('locationURI', data=mx.get_eclipse_project_rel_locationURI(f, eclProjectDir))
out.close('link')
out.open('link')
out.element('name', data='generated')
out.element('type', data='2')
generated = join(_get_hotspot_build_dir(jvmVariant, debugLevel), 'generated')
out.element('locationURI', data=mx.get_eclipse_project_rel_locationURI(generated, eclProjectDir))
out.close('link')
out.close('linkedResources')
out.close('projectDescription')
projectFile = join(eclProjectDir, '.project')
mx.update_file(projectFile, out.xml(indent='\t', newl='\n'))
if files:
files.append(projectFile)
cprojectTemplate = join(self.dir, 'templates', 'eclipse', 'cproject')
cprojectFile = join(eclProjectDir, '.cproject')
with open(cprojectTemplate) as f:
content = f.read()
mx.update_file(cprojectFile, content)
if files:
files.append(cprojectFile)
settingsDir = join(eclProjectDir, ".settings")
mx.ensure_dir_exists(settingsDir)
for name, source in self._get_eclipse_settings_sources().iteritems():
out = StringIO.StringIO()
print >> out, '# GENERATED -- DO NOT EDIT'
print >> out, '# Source:', source
with open(source) as f:
print >> out, f.read()
content = out.getvalue()
mx.update_file(join(settingsDir, name), content)
if files:
files.append(join(settingsDir, name))
def getBuildTask(self, args):
return JDKBuildTask(self, args, _vm.debugLevel, _vm.jvmVariant)
class JDKBuildTask(mx.NativeBuildTask):
def __init__(self, project, args, debugLevel, jvmVariant):
mx.NativeBuildTask.__init__(self, args, project)
self.jvmVariant = jvmVariant
self.debugLevel = debugLevel
def __str__(self):
return 'Building JDK[{}, {}]'.format(self.debugLevel, self.jvmVariant)
def build(self):
if mx.get_opts().use_jdk_image:
_runmake(['images'])
else:
_runmake([])
self._newestOutput = None
def clean(self, forBuild=False):
if forBuild: # Let make handle incremental builds
return
if exists(_get_jdk_build_dir(self.debugLevel)):
_runmake(['clean'])
self._newestOutput = None
# Backwards compatibility for mx_jvmci:8 API
def buildvms(args):
_runmultimake(args)
def run_vm(args, vm=None, nonZeroIsFatal=True, out=None, err=None, cwd=None, timeout=None, debugLevel=None, vmbuild=None):
"""run a Java program by executing the java executable in a JVMCI JDK"""
jdkTag = mx.get_jdk_option().tag
if jdkTag and jdkTag != _JVMCI_JDK_TAG:
mx.abort('The "--jdk" option must have the tag "' + _JVMCI_JDK_TAG + '" when running a command requiring a JVMCI VM')
jdk = get_jvmci_jdk(debugLevel=debugLevel or _translateLegacyDebugLevel(vmbuild))
return jdk.run_java(args, nonZeroIsFatal=nonZeroIsFatal, out=out, err=err, cwd=cwd, timeout=timeout)
def _unittest_vm_launcher(vmArgs, mainClass, mainClassArgs):
run_vm(vmArgs + [mainClass] + mainClassArgs)
mx_unittest.set_vm_launcher('JVMCI VM launcher', _unittest_vm_launcher)
def _jvmci_gate_runner(args, tasks):
# Build release server VM now so we can run the unit tests
with Task('BuildHotSpotJVMCIHosted: release', tasks) as t:
if t: _runmultimake(['--jdk-jvm-variants', 'server', '--jdk-debug-levels', 'release'])
# Run unit tests in hosted mode
with VM(jvmVariant='server', debugLevel='release', jvmciMode='hosted'):
with Task('JVMCI UnitTests: hosted-release', tasks) as t:
if t: unittest(['--suite', 'jvmci', '--enable-timing', '--verbose', '--fail-fast'])
# Build the other VM flavors
with Task('BuildHotSpotJVMCIOthers: fastdebug', tasks) as t:
if t: _runmultimake(['--jdk-jvm-variants', 'server', '--jdk-debug-levels', 'fastdebug'])
with Task('CleanAndBuildIdealGraphVisualizer', tasks, disableJacoco=True) as t:
if t and platform.processor() != 'sparc':
buildxml = mx._cygpathU2W(join(_suite.dir, 'src', 'share', 'tools', 'IdealGraphVisualizer', 'build.xml'))
mx.run(['ant', '-f', buildxml, '-q', 'clean', 'build'], env=_igvBuildEnv())
mx_gate.add_gate_runner(_suite, _jvmci_gate_runner)
mx_gate.add_gate_argument('-g', '--only-build-jvmci', action='store_false', dest='buildNonJVMCI', help='only build the JVMCI VM')
def _igvJdk():
v8u20 = mx.VersionSpec("1.8.0_20")
v8u40 = mx.VersionSpec("1.8.0_40")
v8 = mx.VersionSpec("1.8")
def _igvJdkVersionCheck(version):
return version >= v8 and (version < v8u20 or version >= v8u40)
return mx.get_jdk(_igvJdkVersionCheck, versionDescription='>= 1.8 and < 1.8.0u20 or >= 1.8.0u40', purpose="building & running IGV").home
def _igvBuildEnv():
# When the http_proxy environment variable is set, convert it to the proxy settings that ant needs
env = dict(os.environ)
proxy = os.environ.get('http_proxy')
if not (proxy is None) and len(proxy) > 0:
if '://' in proxy:
# Remove the http:// prefix (or any other protocol prefix)
proxy = proxy.split('://', 1)[1]
# Separate proxy server name and port number
proxyName, proxyPort = proxy.split(':', 1)
proxyEnv = '-DproxyHost="' + proxyName + '" -DproxyPort=' + proxyPort
env['ANT_OPTS'] = proxyEnv
env['JAVA_HOME'] = _igvJdk()
return env
def igv(args):
"""run the Ideal Graph Visualizer"""
logFile = '.ideal_graph_visualizer.log'
with open(join(_suite.dir, logFile), 'w') as fp:
mx.logv('[Ideal Graph Visualizer log is in ' + fp.name + ']')
nbplatform = join(_suite.dir, 'src', 'share', 'tools', 'IdealGraphVisualizer', 'nbplatform')
# Remove NetBeans platform if it is earlier than the current supported version
if exists(nbplatform):
updateTrackingFile = join(nbplatform, 'platform', 'update_tracking', 'org-netbeans-core.xml')
if not exists(updateTrackingFile):
mx.log('Could not find \'' + updateTrackingFile + '\', removing NetBeans platform')
shutil.rmtree(nbplatform)
else:
dom = xml.dom.minidom.parse(updateTrackingFile)
currentVersion = mx.VersionSpec(dom.getElementsByTagName('module_version')[0].getAttribute('specification_version'))
supportedVersion = mx.VersionSpec('3.43.1')
if currentVersion < supportedVersion:
mx.log('Replacing NetBeans platform version ' + str(currentVersion) + ' with version ' + str(supportedVersion))
shutil.rmtree(nbplatform)
elif supportedVersion < currentVersion:
mx.log('Supported NetBeans version in igv command should be updated to ' + str(currentVersion))
if not exists(nbplatform):
mx.logv('[This execution may take a while as the NetBeans platform needs to be downloaded]')
env = _igvBuildEnv()
# make the jar for Batik 1.7 available.
env['IGV_BATIK_JAR'] = mx.library('BATIK').get_path(True)
if mx.run(['ant', '-f', mx._cygpathU2W(join(_suite.dir, 'src', 'share', 'tools', 'IdealGraphVisualizer', 'build.xml')), '-l', mx._cygpathU2W(fp.name), 'run'], env=env, nonZeroIsFatal=False):
mx.abort("IGV ant build & launch failed. Check '" + logFile + "'. You can also try to delete 'src/share/tools/IdealGraphVisualizer/nbplatform'.")
def c1visualizer(args):
"""run the Cl Compiler Visualizer"""
libpath = join(_suite.dir, 'lib')
if mx.get_os() == 'windows':
executable = join(libpath, 'c1visualizer', 'bin', 'c1visualizer.exe')
else:
executable = join(libpath, 'c1visualizer', 'bin', 'c1visualizer')
# Check whether the current C1Visualizer installation is the up-to-date
if exists(executable) and not exists(mx.library('C1VISUALIZER_DIST').get_path(resolve=False)):
mx.log('Updating C1Visualizer')
shutil.rmtree(join(libpath, 'c1visualizer'))
archive = mx.library('C1VISUALIZER_DIST').get_path(resolve=True)
if not exists(executable):
zf = zipfile.ZipFile(archive, 'r')
zf.extractall(libpath)
if not exists(executable):
mx.abort('C1Visualizer binary does not exist: ' + executable)
if mx.get_os() != 'windows':
# Make sure that execution is allowed. The zip file does not always specfiy that correctly
os.chmod(executable, 0777)
mx.run([executable])
def hsdis(args, copyToDir=None):
"""download the hsdis library
This is needed to support HotSpot's assembly dumping features.
By default it downloads the Intel syntax version, use the 'att' argument to install AT&T syntax."""
flavor = 'intel'
if 'att' in args:
flavor = 'att'
if mx.get_arch() == "sparcv9":
flavor = "sparcv9"
lib = mx.add_lib_suffix('hsdis-' + mx.get_arch())
path = join(_suite.dir, 'lib', lib)
sha1s = {
'att/hsdis-amd64.dll' : 'bcbd535a9568b5075ab41e96205e26a2bac64f72',
'att/hsdis-amd64.so' : '58919ba085d4ef7a513f25bae75e7e54ee73c049',
'intel/hsdis-amd64.dll' : '6a388372cdd5fe905c1a26ced614334e405d1f30',
'intel/hsdis-amd64.so' : '844ed9ffed64fe9599638f29a8450c50140e3192',
'intel/hsdis-amd64.dylib' : 'fdb13ef0d7d23d93dacaae9c98837bea0d4fc5a2',
'sparcv9/hsdis-sparcv9.so': '970640a9af0bd63641f9063c11275b371a59ee60',
}
flavoredLib = flavor + "/" + lib
if flavoredLib not in sha1s:
mx.logv("hsdis not supported on this plattform or architecture")
return
if not exists(path):
sha1 = sha1s[flavoredLib]
sha1path = path + '.sha1'
mx.download_file_with_sha1('hsdis', path, ['https://lafo.ssw.uni-linz.ac.at/pub/hsdis/' + flavoredLib], sha1, sha1path, True, True, sources=False)
if copyToDir is not None and exists(copyToDir):
shutil.copy(path, copyToDir)
def hcfdis(args):
"""disassemble HexCodeFiles embedded in text files
Run a tool over the input files to convert all embedded HexCodeFiles
to a disassembled format."""
parser = ArgumentParser(prog='mx hcfdis')
parser.add_argument('-m', '--map', help='address to symbol map applied to disassembler output')
parser.add_argument('files', nargs=REMAINDER, metavar='files...')
args = parser.parse_args(args)
path = mx.library('HCFDIS').get_path(resolve=True)
mx.run_java(['-cp', path, 'com.oracle.max.hcfdis.HexCodeFileDis'] + args.files)
if args.map is not None:
addressRE = re.compile(r'0[xX]([A-Fa-f0-9]+)')
with open(args.map) as fp:
lines = fp.read().splitlines()
symbols = dict()
for l in lines:
addressAndSymbol = l.split(' ', 1)
if len(addressAndSymbol) == 2:
address, symbol = addressAndSymbol
if address.startswith('0x'):
address = long(address, 16)
symbols[address] = symbol
for f in args.files:
with open(f) as fp:
lines = fp.read().splitlines()
updated = False
for i in range(0, len(lines)):
l = lines[i]
for m in addressRE.finditer(l):
sval = m.group(0)
val = long(sval, 16)
sym = symbols.get(val)
if sym:
l = l.replace(sval, sym)
updated = True
lines[i] = l
if updated:
mx.log('updating ' + f)
with open('new_' + f, "w") as fp:
for l in lines:
print >> fp, l
def jol(args):
"""Java Object Layout"""
joljar = mx.library('JOL_INTERNALS').get_path(resolve=True)
candidates = mx.findclass(args, logToConsole=False, matcher=lambda s, classname: s == classname or classname.endswith('.' + s) or classname.endswith('$' + s))
if len(candidates) > 0:
candidates = mx.select_items(sorted(candidates))
else:
# mx.findclass can be mistaken, don't give up yet
candidates = args
run_vm(['-javaagent:' + joljar, '-cp', os.pathsep.join([mx.classpath(), joljar]), "org.openjdk.jol.MainObjectInternals"] + candidates)
def _get_openjdk_os():
# See: common/autoconf/platform.m4
os = mx.get_os()
if 'darwin' in os:
os = 'macosx'
elif 'linux' in os:
os = 'linux'
elif 'solaris' in os:
os = 'solaris'
elif 'cygwin' in os or 'mingw' in os:
os = 'windows'
return os
def _get_openjdk_cpu():
cpu = mx.get_arch()
if cpu == 'amd64':
cpu = 'x86_64'
elif cpu == 'sparcv9':
cpu = 'sparcv9'
return cpu
def _get_openjdk_os_cpu():
return _get_openjdk_os() + '-' + _get_openjdk_cpu()
def _get_jdk_build_dir(debugLevel=None):
"""
Gets the directory into which the JDK is built. This directory contains
the exploded JDK under jdk/ and the JDK image under images/jdk/.
"""
if debugLevel is None:
debugLevel = _vm.debugLevel
name = '{}-{}-{}-{}'.format(_get_openjdk_os_cpu(), 'normal', _vm.jvmVariant, debugLevel)
return join(dirname(_suite.dir), 'build', name)
_jvmci_bootclasspath_prepends = []
def _get_hotspot_build_dir(jvmVariant=None, debugLevel=None):
"""
Gets the directory in which a particular HotSpot configuration is built
(e.g., <JDK_REPO_ROOT>/build/macosx-x86_64-normal-server-release/hotspot/bsd_amd64_compiler2)
"""
if jvmVariant is None:
jvmVariant = _vm.jvmVariant
os = mx.get_os()
if os == 'darwin':
os = 'bsd'
arch = mx.get_arch()
buildname = {'client': 'compiler1', 'server': 'compiler2'}.get(jvmVariant, jvmVariant)
name = '{}_{}_{}'.format(os, arch, buildname)
return join(_get_jdk_build_dir(debugLevel=debugLevel), 'hotspot', name)
class JVMCI9JDKConfig(mx.JDKConfig):
def __init__(self, debugLevel):
self.debugLevel = debugLevel
jdkBuildDir = _get_jdk_build_dir(debugLevel)
jdkDir = join(jdkBuildDir, 'images', 'jdk') if mx.get_opts().use_jdk_image else join(jdkBuildDir, 'jdk')
mx.JDKConfig.__init__(self, jdkDir, tag=_JVMCI_JDK_TAG)
def parseVmArgs(self, args, addDefaultArgs=True):
args = mx.expand_project_in_args(args, insitu=False)
jacocoArgs = mx_gate.get_jacoco_agent_args()
if jacocoArgs:
args = jacocoArgs + args
args = ['-Xbootclasspath/p:' + dep.classpath_repr() for dep in _jvmci_bootclasspath_prepends] + args
# Remove JVMCI jars from class path. They are only necessary when
# compiling with a javac from JDK8 or earlier.
cpIndex, cp = mx.find_classpath_arg(args)
if cp:
excluded = frozenset([dist.path for dist in _suite.dists])
cp = os.pathsep.join([e for e in cp.split(os.pathsep) if e not in excluded])
args[cpIndex] = cp
if '-version' in args:
ignoredArgs = args[args.index('-version') + 1:]
if len(ignoredArgs) > 0:
mx.log("Warning: The following options will be ignored by the vm because they come after the '-version' argument: " + ' '.join(ignoredArgs))
return self.processArgs(args, addDefaultArgs=addDefaultArgs)
# Overrides JDKConfig
def run_java(self, args, vm=None, nonZeroIsFatal=True, out=None, err=None, cwd=None, timeout=None, env=None, addDefaultArgs=True):
if vm is None:
vm = 'server'
args = self.parseVmArgs(args, addDefaultArgs=addDefaultArgs)
jvmciModeArgs = _jvmciModes[_vm.jvmciMode]
cmd = [self.java] + ['-' + vm] + jvmciModeArgs + args
return mx.run(cmd, nonZeroIsFatal=nonZeroIsFatal, out=out, err=err, cwd=cwd)
"""
The dict of JVMCI JDKs indexed by debug-level names.
"""
_jvmci_jdks = {}
def get_jvmci_jdk(debugLevel=None):
"""
Gets the JVMCI JDK corresponding to 'debugLevel'.
"""
if not debugLevel:
debugLevel = _vm.debugLevel
jdk = _jvmci_jdks.get(debugLevel)
if jdk is None:
try:
jdk = JVMCI9JDKConfig(debugLevel)
except mx.JDKConfigException as e:
jdkBuildDir = _get_jdk_build_dir(debugLevel)
msg = 'Error with the JDK built into {}:\n{}\nTry (re)building it with: mx --jdk-debug-level={} make'
if mx.get_opts().use_jdk_image:
msg += ' images'
mx.abort(msg.format(jdkBuildDir, e.message, debugLevel))
_jvmci_jdks[debugLevel] = jdk
return jdk
class JVMCI9JDKFactory(mx.JDKFactory):
def getJDKConfig(self):
jdk = get_jvmci_jdk(_vm.debugLevel)
return jdk
def description(self):
return "JVMCI JDK"
mx.update_commands(_suite, {
'make': [_runmake, '[args...]', _makehelp],
'multimake': [_runmultimake, '[options]'],
'c1visualizer' : [c1visualizer, ''],
'hsdis': [hsdis, '[att]'],
'hcfdis': [hcfdis, ''],
'igv' : [igv, ''],
'jol' : [jol, ''],
'vm': [run_vm, '[-options] class [args...]'],
})
mx.add_argument('-M', '--jvmci-mode', action='store', choices=sorted(_jvmciModes.viewkeys()), help='the JVM variant type to build/run (default: ' + _vm.jvmciMode + ')')
mx.add_argument('--jdk-jvm-variant', '--vm', action='store', choices=_jdkJvmVariants + sorted(_legacyVms.viewkeys()), help='the JVM variant type to build/run (default: ' + _vm.jvmVariant + ')')
mx.add_argument('--jdk-debug-level', '--vmbuild', action='store', choices=_jdkDebugLevels + sorted(_legacyVmbuilds.viewkeys()), help='the JDK debug level to build/run (default: ' + _vm.debugLevel + ')')
mx.add_argument('-I', '--use-jdk-image', action='store_true', help='build/run JDK image instead of exploded JDK')
mx.addJDKFactory(_JVMCI_JDK_TAG, mx.JavaCompliance('9'), JVMCI9JDKFactory())
def mx_post_parse_cmd_line(opts):
mx.set_java_command_default_jdk_tag(_JVMCI_JDK_TAG)
jdkTag = mx.get_jdk_option().tag
jvmVariant = None
debugLevel = None
jvmciMode = None
if opts.jdk_jvm_variant is not None:
jvmVariant = opts.jdk_jvm_variant
if jdkTag and jdkTag != _JVMCI_JDK_TAG:
mx.warn('Ignoring "--jdk-jvm-variant" option as "--jdk" tag is not "' + _JVMCI_JDK_TAG + '"')
if opts.jdk_debug_level is not None:
debugLevel = _translateLegacyDebugLevel(opts.jdk_debug_level)
if jdkTag and jdkTag != _JVMCI_JDK_TAG:
mx.warn('Ignoring "--jdk-debug-level" option as "--jdk" tag is not "' + _JVMCI_JDK_TAG + '"')
if opts.jvmci_mode is not None:
jvmciMode = opts.jvmci_mode
if jdkTag and jdkTag != _JVMCI_JDK_TAG:
mx.warn('Ignoring "--jvmci-mode" option as "--jdk" tag is not "' + _JVMCI_JDK_TAG + '"')
_vm.update(jvmVariant, debugLevel, jvmciMode)
|
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2015, Anima Istanbul
#
# This module is part of anima-tools and is released under the BSD 2
# License: http://www.opensource.org/licenses/BSD-2-Clause
"""Relax Vertices by Erkan Ozgur Yilmaz
Relaxes vertices without shrinking/expanding the geometry.
Version History
---------------
v0.1.1
- script works with all kind of components
v0.1.0
- initial working version
"""
import pymel.core as pm
__version__ = "0.1.1"
def relax():
# check the selection
selection = pm.ls(sl=1)
if not selection:
return
# convert the selection to vertices
verts = pm.ls(pm.polyListComponentConversion(tv=1))
if not verts:
return
shape = verts[0].node()
# duplicate the geometry
dup = shape.duplicate()[0]
dup_shape = dup.getShape()
# now relax the selected vertices of the original shape
pm.polyAverageVertex(verts, i=1, ch=0)
# now transfer point positions using transferAttributes
ta_node = pm.transferAttributes(
dup,
verts,
transferPositions=True,
transferNormals=False,
transferUVs=False,
transferColors=False,
sampleSpace=0,
searchMethod=0,
flipUVs=False,
colorBorders=1,
)
# delete history
pm.delete(shape, ch=1)
# delete the duplicate surface
pm.delete(dup)
# reselect selection
pm.select(selection)
|
from subprocess import Popen, PIPE
from gfbi_core.git_model import GitModel
from gfbi_core.editable_git_model import EditableGitModel
from gfbi_core.util import Index, Timezone
from git.objects.util import altz_to_utctz_str
from datetime import datetime
import os
import time
REPOSITORY_NAME = "/tmp/tests_git"
AVAILABLE_CHOICES = ['hexsha',
'authored_date', 'committed_date',
'author_name', 'committer_name',
'author_email', 'committer_email',
'message']
def run_command(command):
# print "Running: %s" % command
process = Popen(command, shell=True, stdout=PIPE)
process.wait()
def create_repository():
run_command('rm -rf ' + REPOSITORY_NAME)
run_command('mkdir ' + REPOSITORY_NAME)
os.chdir(REPOSITORY_NAME)
run_command('git init')
run_command('echo init > init_file')
run_command('git add init_file')
command = commit(
"Initial commit",
author_name="Wallace Henry",
author_email="[email protected]",
author_date="Sun Mar 11 12:00:00 2012 +0100",
committer_name="Wallace Henry",
committer_email="[email protected]",
committer_date="Sun Mar 11 12:00:00 2012 +0100"
)
run_command('git branch wallace_branch')
def populate_repository():
for value in xrange(20, 25):
command = 'echo "%d" > %d' % (value, value)
run_command(command)
run_command('git add %d' % value)
commit(
str(value),
author_name="Wallace Henry",
author_email="[email protected]",
author_date="Sun Mar 11 12:10:%d 2012 +0100" % value,
committer_name="Wallace Henry",
committer_email="[email protected]",
committer_date="Sun Mar 11 12:10:%d 2012 +0100" % value
)
run_command('git checkout wallace_branch')
for value in xrange(20, 25):
command = 'echo "branch_%d" > branch_%d' % (value, value)
run_command(command)
run_command('git add branch_%d' % value)
commit(
"branch_" + str(value),
author_name="Wallace Henry",
author_email="[email protected]",
author_date="Sun Mar 11 12:20:%d 2012 +0100" % value,
committer_name="Wallace Henry",
committer_email="[email protected]",
committer_date="Sun Mar 11 12:20:%d 2012 +0100" % value
)
def commit(message,
author_name=None, author_email=None, author_date=None,
committer_name=None, committer_email=None, committer_date=None):
command = ''
if author_name:
command += 'GIT_AUTHOR_NAME="%s" ' % author_name
if author_email:
command += 'GIT_AUTHOR_EMAIL="%s" ' % author_email
if author_date:
command += 'GIT_AUTHOR_DATE="%s" ' % author_date
if committer_name:
command += 'GIT_COMMITTER_NAME="%s" ' % committer_name
if committer_email:
command += 'GIT_COMMITTER_EMAIL="%s" ' % committer_email
if committer_date:
command += 'GIT_COMMITTER_DATE="%s" ' % committer_date
command += 'git commit -m "%s"' % message
run_command(command)
def write_and_wait(model):
model.write()
total_wait = 0
time.sleep(1)
while not model.is_finished_writing():
if total_wait > 15:
raise Exception("We waited too long for the writing process")
time.sleep(1)
total_wait += 1
def pretty_print_from_row(model, row):
line = ""
for col in xrange(len(AVAILABLE_CHOICES)):
value = model.data(Index(row, col))
if col == 0:
value = value[:7]
elif col in (1, 2):
tmstp, tz = value
_dt = datetime.fromtimestamp(float(tmstp)).replace(tzinfo=tz)
date_format = "%d/%m/%Y %H:%M:%S"
value = _dt.strftime(date_format)
value = tmstp
line += "[" + str(value) + "] "
return line
def test_field_has_changed(test_row, test_column, test_value):
our_model = EditableGitModel(REPOSITORY_NAME)
our_model.populate()
# print "====================================== Before the write"
# for row in xrange(our_model.row_count()):
# print pretty_print_from_row(our_model, row)
# print "======================================================="
index = Index(test_row, test_column)
our_model.start_history_event()
our_model.set_data(index, test_value)
write_and_wait(our_model)
new_model = GitModel(REPOSITORY_NAME)
new_model.populate()
new_model_value = new_model.data(index)
# print "======================================= After the write"
# for row in xrange(our_model.row_count()):
# print pretty_print_from_row(new_model, row)
# print "======================================================="
if test_column in (1, 2):
assert new_model_value[0] == test_value[0] and \
new_model_value[1].tzname("") == test_value[1].tzname(""), \
"The %s field wasn't changed correctly" % \
AVAILABLE_CHOICES[test_column]
else:
assert new_model_value == test_value, \
"The %s field wasn't changed correctly" % \
AVAILABLE_CHOICES[test_column]
for row in xrange(our_model.row_count()):
for column in xrange(1, our_model.column_count()):
if (row == test_row and column == test_column):
continue
index = Index(row, column)
our_value = our_model.data(index)
new_value = new_model.data(index)
if column in (1, 2):
our_value, tz = our_value
# print our_value, tz.tzname(None)
new_value, tz = new_value
# print new_value, tz.tzname(None)
assert our_value == new_value, \
"Something else has change: (%d, %d)\ncolumn:%s\n" % \
(row, column, AVAILABLE_CHOICES[column]) + \
"%s\n%s\n%s\n" % \
(AVAILABLE_CHOICES,
pretty_print_from_row(our_model, row),
pretty_print_from_row(new_model, row)) + \
"%s // %s" % (our_value, new_value)
def test_cant_apply_changed_repo():
a_model = EditableGitModel(REPOSITORY_NAME)
a_model.populate()
os.chdir(REPOSITORY_NAME)
run_command("echo new > new_file")
run_command("git add new_file")
command = commit("new input")
msg_col = a_model.get_column("message")
index = Index(0, msg_col)
a_model.start_history_event()
orig_msg = a_model.data(index)
a_model.set_data(index, "whatever change")
try:
write_and_wait(a_model)
write_faled = False
except:
write_failed = True
a_model = EditableGitModel(REPOSITORY_NAME)
a_model.populate()
new_msg = a_model.data(index)
prev_msg= a_model.data(Index(1, msg_col))
error = "The write didn't fail on a modified repository"
assert (write_failed and
new_msg == "new input\n" and
prev_msg == orig_msg), error
create_repository()
populate_repository()
a_model = EditableGitModel(REPOSITORY_NAME)
a_model.populate()
columns = a_model.get_columns()
print "Test authored date"
authored_date_col = columns.index("authored_date")
test_field_has_changed(2, authored_date_col, (1331465000, Timezone('+0100')) )
print "Test name"
author_name_col = columns.index("author_name")
test_field_has_changed(4, author_name_col, "JeanJean")
print "Test message"
message_col = columns.index("message")
test_field_has_changed(3, message_col, "Boing boing boing")
print "Test can't apply changed"
test_cant_apply_changed_repo()
|
from setuptools import setup, find_packages
import os
setup(
name='django-project-templates',
version = "0.11",
description="Paster templates for creating Django projects",
author='Gareth Rushgrove',
author_email='[email protected]',
url='http://github.com/garethr/django-project-templates/',
packages = find_packages('src'),
package_dir = {'':'src'},
license = "MIT",
keywords = "django paster",
install_requires=[
'setuptools',
'PasteScript>=1.3',
'Cheetah',
'fabric',
],
include_package_data=True,
zip_safe=False,
entry_points="""
[paste.paster_create_template]
django_project=django_project_templates.pastertemplates:DjangoProjectTemplate
django_cruisecontrol_project=django_project_templates.pastertemplates:DjangoCruiseControlTemplate
newsapps_project=django_project_templates.pastertemplates:NewsAppsProjectTemplate
""",
classifiers = [
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
|
# -*- coding: UTF-8 -*-
# Copyright 2014-2015 Rumma & Ko Ltd
# License: BSD (see file COPYING for details)
"""
For each client who has a non-empty `card_number` we create a
corresponding image file in the local media directory in order to
avoid runtime error when printing documents who include this picture
(`eid_content.odt` and `file_sheet.odt`).
"""
import os
import shutil
from lino.api import rt, dd
from lino_xl.lib.beid.mixins import get_image_path
def objects():
Client = rt.models.pcsw.Client
for obj in Client.objects.exclude(card_number=''):
fn = obj.get_image_path()
if not os.path.exists(fn):
src = get_image_path(None)
#dd.logger.info("20150531 copy %s to %s...", src, fn)
rt.makedirs_if_missing(os.path.dirname(fn))
shutil.copyfile(src, fn)
yield obj # actually there's no need to save obj, but we
# must make this function a generator
# else:
# dd.logger.info("20150531 %s exists", fn)
|
from __future__ import unicode_literals
def _(text):
return text.strip('\n')
USAGE = _("""
Usage: rock [--help] [--env=ENV] [--path=PATH] [--runtime=RUNTIME] command
""")
HELP = _("""
--help show help message
--verbose show script while running
--dry-run show script without running
--version show version
project:
--env=ENV set env
--path=PATH set path
--runtime=RUNTIME set runtime
commands:
build run build
test run tests
run run in environment
clean clean project files
other commands:
config show project configuration
env show evaluable environment variables
init generates project skeleton
runtime show installed runtimes
""")
CONFIG_USAGE = _("""
Usage: rock config [--format=FORMAT]
""")
CONFIG_HELP = _("""
--help show help message
--format set output format (json, yaml)
""")
ENV_USAGE = _("""
Usage: rock env
""")
ENV_HELP = _("""
--help show help message
""")
RUNTIME_USAGE = _("""
Usage: rock runtime
""")
RUNTIME_HELP = _("""
--help show help message
""")
|
import unittest2
from unittest2 import TestCase
from rational.rational import gcd
from rational.rational import Rational
__author__ = 'Daniel Dinu'
class TestRational(TestCase):
def setUp(self):
self.known_values = [(1, 2, 1, 2),
(-1, 2, -1, 2),
(1, -2, -1, 2),
(-1, -2, 1, 2),
(2, 4, 1, 2),
(-2, 4, -1, 2),
(2, -4, -1, 2),
(-2, -4, 1, 2),
(2, 1, 2, 1),
(-2, 1, -2, 1),
(2, -1, -2, 1),
(-2, -1, 2, 1),
(4, 2, 2, 1),
(-4, 2, -2, 1),
(4, -2, -2, 1),
(-4, -2, 2, 1)]
def tearDown(self):
del self.known_values
def test_constructor_numerator_type_error(self):
self.assertRaises(TypeError, Rational, 1.2)
def test_constructor_denominator_type_error(self):
self.assertRaises(TypeError, Rational, 1, 1.2)
def test_constructor_denominator_zero_division_error(self):
numerator = 1
denominator = 0
with self.subTest(numerator=numerator, denominator=denominator):
self.assertRaises(ZeroDivisionError, Rational, numerator, denominator)
numerator = Rational()
denominator = 0
with self.subTest(numerator=numerator, denominator=denominator):
self.assertRaises(ZeroDivisionError, Rational, numerator, denominator)
numerator = Rational()
denominator = Rational()
with self.subTest(numerator=numerator, denominator=denominator):
self.assertRaises(ZeroDivisionError, Rational, numerator, denominator)
def test_constructor_numerator(self):
for numerator, denominator, expected_numerator, expected_denominator in self.known_values:
with self.subTest(numerator=numerator, denominator=denominator):
r = Rational(numerator, denominator)
self.assertEqual(expected_numerator, r.numerator)
def test_constructor_denominator(self):
for numerator, denominator, expected_numerator, expected_denominator in self.known_values:
with self.subTest(numerator=numerator, denominator=denominator):
r = Rational(numerator, denominator)
self.assertEqual(expected_denominator, r.denominator)
def test_constructor_transform(self):
test_constructor_transform_values = [(Rational(1, 2), Rational(1, 2), Rational(1)),
(Rational(1, 2), Rational(1, 4), Rational(2)),
(Rational(1, 4), Rational(1, 2), Rational(1, 2)),
(Rational(-1, 2), Rational(1, 2), Rational(-1)),
(Rational(-1, 2), Rational(1, 4), Rational(-2)),
(Rational(-1, 4), Rational(1, 2), Rational(-1, 2)),
(Rational(1, 2), Rational(-1, 2), Rational(-1)),
(Rational(1, 2), Rational(-1, 4), Rational(-2)),
(Rational(1, 4), Rational(-1, 2), Rational(-1, 2)),
(Rational(-1, 2), Rational(-1, 2), Rational(1)),
(Rational(-1, 2), Rational(-1, 4), Rational(2)),
(Rational(-1, 4), Rational(-1, 2), Rational(1, 2))]
for a, b, expected_result in test_constructor_transform_values:
with self.subTest(a=a, b=b, expected_result=expected_result):
computed_result = Rational(a, b)
self.assertEqual(expected_result, computed_result)
def test_transform(self):
test_transform_values = [(1, 2, (1, 2)),
(2, 4, (2, 4)),
(-1, 2, (-1, 2)),
(-2, 4, (-2, 4)),
(1, -2, (1, -2)),
(2, -4, (2, -4)),
(-1, -2, (-1, -2)),
(-2, -4, (-2, -4)),
(Rational(1, 2), 1, (1, 2)),
(Rational(1, 2), 2, (1, 4)),
(Rational(-1, 2), 1, (-1, 2)),
(Rational(-1, 2), 2, (-1, 4)),
(Rational(1, -2), 1, (-1, 2)),
(Rational(1, -2), 2, (-1, 4)),
(Rational(1, 2), -1, (1, -2)),
(Rational(1, 2), -2, (1, -4)),
(Rational(-1, 2), -1, (-1, -2)),
(Rational(-1, 2), -2, (-1, -4)),
(1, Rational(1, 2), (2, 1)),
(2, Rational(1, 2), (4, 1)),
(-1, Rational(1, 2), (-2, 1)),
(-2, Rational(1, 2), (-4, 1)),
(1, Rational(-1, 2), (2, -1)),
(2, Rational(-1, 2), (4, -1)),
(1, Rational(1, -2), (2, -1)),
(2, Rational(1, -2), (4, -1)),
(-1, Rational(1, 2), (-2, 1)),
(-2, Rational(1, 2), (-4, 1)),
(Rational(1, 2), Rational(1, 2), (2, 2)),
(Rational(1, 2), Rational(1, 4), (4, 2)),
(Rational(1, 4), Rational(1, 2), (2, 4)),
(Rational(-1, 2), Rational(1, 2), (-2, 2)),
(Rational(-1, 2), Rational(1, 4), (-4, 2)),
(Rational(-1, 4), Rational(1, 2), (-2, 4)),
(Rational(1, 2), Rational(-1, 2), (2, -2)),
(Rational(1, 2), Rational(-1, 4), (4, -2)),
(Rational(1, 4), Rational(-1, 2), (2, -4)),
(Rational(-1, 2), Rational(-1, 2), (-2, -2)),
(Rational(-1, 2), Rational(-1, 4), (-4, -2)),
(Rational(-1, 4), Rational(-1, 2), (-2, -4))]
for a, b, expected_result in test_transform_values:
with self.subTest(a=a, b=b, expected_result=expected_result):
computed_result = Rational.transform(a, b)
self.assertEqual(expected_result, computed_result)
def test_gcd(self):
gcd_test_values = [(0, 0, 0),
(0, 1, 1),
(1, 0, 1),
(0, -1, -1),
(-1, 0, -1),
(2, 4, 2),
(-2, 4, 2),
(-2, -4, -2),
(42, 30, 6),
(42, -30, -6),
(-42, -30, -6)]
for a, b, expected_gcd in gcd_test_values:
with self.subTest(a=a, b=b, expected_gcd=expected_gcd):
computed_gcd = gcd(a, b)
self.assertEqual(expected_gcd, computed_gcd)
def test_value(self):
for numerator, denominator, expected_numerator, expected_denominator in self.known_values:
with self.subTest(numerator=numerator, denominator=denominator):
r = Rational(numerator, denominator)
expected_value = expected_numerator / (expected_denominator * 1.0)
self.assertEqual(expected_value, r.value)
def test_quotient(self):
for numerator, denominator, expected_numerator, expected_denominator in self.known_values:
with self.subTest(numerator=numerator, denominator=denominator):
r = Rational(numerator, denominator)
expected_value = expected_numerator // expected_denominator
self.assertEqual(expected_value, r.quotient)
def test_remainder(self):
for numerator, denominator, expected_numerator, expected_denominator in self.known_values:
with self.subTest(numerator=numerator, denominator=denominator):
r = Rational(numerator, denominator)
expected_value = expected_numerator % expected_denominator
self.assertEqual(expected_value, r.remainder)
def test_str(self):
for numerator, denominator, expected_numerator, expected_denominator in self.known_values:
with self.subTest(numerator=numerator, denominator=denominator):
r = Rational(numerator, denominator)
if 1 == expected_denominator:
expected_str = '{0}'.format(expected_numerator)
else:
expected_str = '{0}/{1}'.format(expected_numerator, expected_denominator)
self.assertEqual(expected_str, str(r))
def test_repr(self):
for numerator, denominator, expected_numerator, expected_denominator in self.known_values:
with self.subTest(numerator=numerator, denominator=denominator):
r = Rational(numerator, denominator)
expected_repr = 'Rational({0}, {1})'.format(expected_numerator, expected_denominator)
self.assertEqual(expected_repr, repr(r))
def test_float(self):
for numerator, denominator, expected_numerator, expected_denominator in self.known_values:
with self.subTest(numerator=numerator, denominator=denominator):
r = Rational(numerator, denominator)
expected_value = expected_numerator / (expected_denominator * 1.0)
self.assertEqual(expected_value, float(r))
def test_int(self):
for numerator, denominator, expected_numerator, expected_denominator in self.known_values:
with self.subTest(numerator=numerator, denominator=denominator):
r = Rational(numerator, denominator)
expected_value = expected_numerator // expected_denominator
self.assertEqual(expected_value, int(r))
def test_neg(self):
for numerator, denominator, expected_numerator, expected_denominator in self.known_values:
with self.subTest(numerator=numerator, denominator=denominator):
r = -Rational(numerator, denominator)
self.assertEqual(-expected_numerator, r.numerator)
self.assertEqual(expected_denominator, r.denominator)
def test_pos(self):
for numerator, denominator, expected_numerator, expected_denominator in self.known_values:
with self.subTest(numerator=numerator, denominator=denominator):
r = +Rational(numerator, denominator)
self.assertEqual(expected_numerator, r.numerator)
self.assertEqual(expected_denominator, r.denominator)
def test_abs(self):
for numerator, denominator, expected_numerator, expected_denominator in self.known_values:
with self.subTest(numerator=numerator, denominator=denominator):
r = abs(Rational(numerator, denominator))
self.assertEqual(abs(expected_numerator), r.numerator)
self.assertEqual(expected_denominator, r.denominator)
def test_invert_zero_division_error(self):
r = Rational(0)
with self.assertRaises(ZeroDivisionError):
~r
def test_invert(self):
for numerator, denominator, expected_numerator, expected_denominator in self.known_values:
with self.subTest(numerator=numerator, denominator=denominator):
r = ~Rational(numerator, denominator)
if 0 > expected_numerator:
expected_inverted_numerator = -expected_denominator
expected_inverted_denominator = -expected_numerator
else:
expected_inverted_numerator = expected_denominator
expected_inverted_denominator = expected_numerator
self.assertEqual(expected_inverted_numerator, r.numerator)
self.assertEqual(expected_inverted_denominator, r.denominator)
def test_lt(self):
true_test_cases = [(Rational(-1, 2), Rational()),
(Rational(), Rational(1, 2)),
(Rational(-1, 2), Rational(1, 2)),
(Rational(1, 4), Rational(1, 2)),
(Rational(-1, 2), Rational(-1, 4))]
false_test_cases = [(Rational(), Rational()),
(Rational(1, 2), Rational()),
(Rational(), Rational(-1, 2)),
(Rational(-1, 2), Rational(1, -2)),
(Rational(1, 2), Rational(2, 4)),
(Rational(1, 2), Rational(-1, 2)),
(Rational(1, 2), Rational(1, 4)),
(Rational(-1, 4), Rational(-1, 2))]
for r1, r2 in true_test_cases:
with self.subTest(r1=r1, r2=r2, result=True):
self.assertTrue(r1 < r2)
for r1, r2 in false_test_cases:
with self.subTest(r1=r1, r2=r2, result=False):
self.assertFalse(r1 < r2)
def test_le(self):
true_test_cases = [(Rational(), Rational()),
(Rational(-1, 2), Rational()),
(Rational(), Rational(1, 2)),
(Rational(-1, 2), Rational(1, -2)),
(Rational(1, 2), Rational(2, 4)),
(Rational(-1, 2), Rational(1, 2)),
(Rational(1, 4), Rational(1, 2)),
(Rational(-1, 2), Rational(-1, 4))]
false_test_cases = [(Rational(1, 2), Rational()),
(Rational(), Rational(-1, 2)),
(Rational(1, 2), Rational(-1, 2)),
(Rational(1, 2), Rational(1, 4)),
(Rational(-1, 4), Rational(-1, 2))]
for r1, r2 in true_test_cases:
with self.subTest(r1=r1, r2=r2, result=True):
self.assertTrue(r1 <= r2)
for r1, r2 in false_test_cases:
with self.subTest(r1=r1, r2=r2, result=False):
self.assertFalse(r1 <= r2)
def test_eq(self):
true_test_cases = [(Rational(), Rational()),
(Rational(-1, 2), Rational(1, -2)),
(Rational(1, 2), Rational(2, 4))]
false_test_cases = [(Rational(-1, 2), Rational()),
(Rational(), Rational(1, 2)),
(Rational(1, 2), Rational()),
(Rational(), Rational(-1, 2)),
(Rational(-1, 2), Rational(1, 2)),
(Rational(1, 4), Rational(1, 2)),
(Rational(-1, 2), Rational(-1, 4)),
(Rational(1, 2), Rational(-1, 2)),
(Rational(1, 2), Rational(1, 4)),
(Rational(-1, 4), Rational(-1, 2))]
for r1, r2 in true_test_cases:
with self.subTest(r1=r1, r2=r2, result=True):
self.assertTrue(r1 == r2)
for r1, r2 in false_test_cases:
with self.subTest(r1=r1, r2=r2, result=False):
self.assertFalse(r1 == r2)
def test_ne(self):
true_test_cases = [(Rational(-1, 2), Rational()),
(Rational(), Rational(1, 2)),
(Rational(1, 2), Rational()),
(Rational(), Rational(-1, 2)),
(Rational(-1, 2), Rational(1, 2)),
(Rational(1, 4), Rational(1, 2)),
(Rational(-1, 2), Rational(-1, 4)),
(Rational(1, 2), Rational(-1, 2)),
(Rational(1, 2), Rational(1, 4)),
(Rational(-1, 4), Rational(-1, 2))]
false_test_cases = [(Rational(), Rational()),
(Rational(-1, 2), Rational(1, -2)),
(Rational(1, 2), Rational(2, 4))]
for r1, r2 in true_test_cases:
with self.subTest(r1=r1, r2=r2, result=True):
self.assertTrue(r1 != r2)
for r1, r2 in false_test_cases:
with self.subTest(r1=r1, r2=r2, result=False):
self.assertFalse(r1 != r2)
def test_ge(self):
true_test_cases = [(Rational(), Rational()),
(Rational(1, 2), Rational()),
(Rational(), Rational(-1, 2)),
(Rational(-1, 2), Rational(1, -2)),
(Rational(1, 2), Rational(2, 4)),
(Rational(1, 2), Rational(-1, 2)),
(Rational(1, 2), Rational(1, 4)),
(Rational(-1, 4), Rational(-1, 2))]
false_test_cases = [(Rational(-1, 2), Rational()),
(Rational(), Rational(1, 2)),
(Rational(-1, 2), Rational(1, 2)),
(Rational(1, 4), Rational(1, 2)),
(Rational(-1, 2), Rational(-1, 4))]
for r1, r2 in true_test_cases:
with self.subTest(r1=r1, r2=r2, result=True):
self.assertTrue(r1 >= r2)
for r1, r2 in false_test_cases:
with self.subTest(r1=r1, r2=r2, result=False):
self.assertFalse(r1 >= r2)
def test_gt(self):
true_test_cases = [(Rational(1, 2), Rational()),
(Rational(), Rational(-1, 2)),
(Rational(1, 2), Rational(-1, 2)),
(Rational(1, 2), Rational(1, 4)),
(Rational(-1, 4), Rational(-1, 2))]
false_test_cases = [(Rational(), Rational()),
(Rational(-1, 2), Rational()),
(Rational(), Rational(1, 2)),
(Rational(-1, 2), Rational(1, -2)),
(Rational(1, 2), Rational(2, 4)),
(Rational(-1, 2), Rational(1, 2)),
(Rational(1, 4), Rational(1, 2)),
(Rational(-1, 2), Rational(-1, 4))]
for r1, r2 in true_test_cases:
with self.subTest(r1=r1, r2=r2, result=True):
self.assertTrue(r1 > r2)
for r1, r2 in false_test_cases:
with self.subTest(r1=r1, r2=r2, result=False):
self.assertFalse(r1 > r2)
def test_add_type_error(self):
r = Rational()
with self.assertRaises(TypeError):
r + 1.2
def test_add(self):
add_test_values = [(Rational(), Rational(1, 2), Rational(1, 2)),
(Rational(1, 2), Rational(), Rational(1, 2)),
(Rational(1, 2), Rational(1, 2), Rational(1, 1)),
(Rational(1, 2), Rational(-1, 2), Rational(0, 1)),
(Rational(1, 4), Rational(2, 4), Rational(3, 4)),
(Rational(1, 4), Rational(3, 4), Rational(1, 1)),
(Rational(1, 4), Rational(-3, 4), Rational(-1, 2)),
(Rational(1, 2), Rational(1, 3), Rational(5, 6)),
(Rational(2), -1, Rational(1)),
(Rational(2), 1, Rational(3))]
for r1, r2, expected_r in add_test_values:
with self.subTest(r1=r1, r2=r2, expected_r=expected_r):
r = r1 + r2
self.assertEqual(expected_r, r)
def test_sub_type_error(self):
r = Rational()
with self.assertRaises(TypeError):
r - 1.2
def test_sub(self):
sub_test_values = [(Rational(), Rational(1, 2), Rational(-1, 2)),
(Rational(1, 2), Rational(), Rational(1, 2)),
(Rational(1, 2), Rational(1, 2), Rational(0, 1)),
(Rational(1, 2), Rational(-1, 2), Rational(1, 1)),
(Rational(1, 4), Rational(2, 4), Rational(-1, 4)),
(Rational(1, 4), Rational(3, 4), Rational(-1, 2)),
(Rational(1, 4), Rational(-3, 4), Rational(1, 1)),
(Rational(1, 2), Rational(1, 3), Rational(1, 6)),
(Rational(2), -1, Rational(3)),
(Rational(2), 1, Rational(1))]
for r1, r2, expected_r in sub_test_values:
with self.subTest(r1=r1, r2=r2, expected_r=expected_r):
r = r1 - r2
self.assertEqual(expected_r, r)
def test_mul_type_error(self):
r = Rational()
with self.assertRaises(TypeError):
r * 1.2
def test_mul(self):
mul_test_values = [(Rational(), Rational(1, 2), Rational()),
(Rational(1, 2), Rational(), Rational()),
(Rational(1, 2), Rational(1, 2), Rational(1, 4)),
(Rational(1, 2), Rational(-1, 2), Rational(-1, 4)),
(Rational(1, 4), Rational(2, 4), Rational(1, 8)),
(Rational(1, 4), Rational(3, 4), Rational(3, 16)),
(Rational(1, 4), Rational(-3, 4), Rational(-3, 16)),
(Rational(1, 2), Rational(1, 3), Rational(1, 6)),
(Rational(2), 1, Rational(2)),
(Rational(2), -1, Rational(-2))]
for r1, r2, expected_r in mul_test_values:
with self.subTest(r1=r1, r2=r2, expected_r=expected_r):
r = r1 * r2
self.assertEqual(expected_r, r)
def test_truediv_zero_division_error(self):
r1 = Rational(1, 2)
r2 = Rational()
with self.assertRaises(ZeroDivisionError):
r1 / r2
def test_truediv_type_error(self):
r = Rational()
with self.assertRaises(TypeError):
r / 1.2
def test_truediv(self):
div_test_values = [(Rational(), Rational(1, 2), Rational()),
(Rational(1, 2), Rational(1, 2), Rational(1, 1)),
(Rational(1, 2), Rational(-1, 2), Rational(-1, 1)),
(Rational(1, 4), Rational(2, 4), Rational(1, 2)),
(Rational(1, 4), Rational(3, 4), Rational(1, 3)),
(Rational(1, 4), Rational(-3, 4), Rational(-1, 3)),
(Rational(1, 2), Rational(1, 3), Rational(3, 2)),
(Rational(2), 1, Rational(2)),
(Rational(2), -1, Rational(-2))]
for r1, r2, expected_r in div_test_values:
with self.subTest(r1=r1, r2=r2, expected_r=expected_r):
r = r1 / r2
self.assertEqual(expected_r, r)
def test_pow_zero_division_error(self):
r = Rational()
for power in range(-3, 0):
with self.subTest(r=r, power=power):
with self.assertRaises(ZeroDivisionError):
r ** power
def test_pow_type_error(self):
r = Rational()
with self.assertRaises(TypeError):
r ** 1.2
def test_pow(self):
pow_test_values = [(Rational(), 0, Rational()),
(Rational(), 1, Rational()),
(Rational(), 2, Rational()),
(Rational(), 3, Rational()),
(Rational(1, 2), -3, Rational(8, 1)),
(Rational(1, 2), -2, Rational(4, 1)),
(Rational(1, 2), -1, Rational(2, 1)),
(Rational(1, 2), 0, Rational(1, 1)),
(Rational(1, 2), 1, Rational(1, 2)),
(Rational(1, 2), 2, Rational(1, 4)),
(Rational(1, 2), 3, Rational(1, 8)),
(Rational(-1, 2), -3, Rational(-8, 1)),
(Rational(-1, 2), -2, Rational(4, 1)),
(Rational(-1, 2), -1, Rational(-2, 1)),
(Rational(-1, 2), 0, Rational(1, 1)),
(Rational(-1, 2), 1, Rational(-1, 2)),
(Rational(-1, 2), 2, Rational(1, 4)),
(Rational(-1, 2), 3, Rational(-1, 8)),
(Rational(1, 3), -3, Rational(27, 1)),
(Rational(1, 3), -2, Rational(9, 1)),
(Rational(1, 3), -1, Rational(3, 1)),
(Rational(1, 3), 0, Rational(1, 1)),
(Rational(1, 3), 1, Rational(1, 3)),
(Rational(1, 3), 2, Rational(1, 9)),
(Rational(1, 3), 3, Rational(1, 27)),
(Rational(-1, 3), -3, Rational(-27, 1)),
(Rational(-1, 3), -2, Rational(9, 1)),
(Rational(-1, 3), -1, Rational(-3, 1)),
(Rational(-1, 3), 0, Rational(1, 1)),
(Rational(-1, 3), 1, Rational(-1, 3)),
(Rational(-1, 3), 2, Rational(1, 9)),
(Rational(-1, 3), 3, Rational(-1, 27))]
for r1, power, expected_r in pow_test_values:
with self.subTest(r1=r1, power=power, expected_r=expected_r):
r = r1 ** power
self.assertEqual(expected_r, r)
def test_radd_type_error(self):
r = Rational()
with self.assertRaises(TypeError):
1.2 + r
def test_radd(self):
radd_test_values = [(1, Rational(1, 2), Rational(3, 2)),
(1, Rational(), Rational(1, 1)),
(-1, Rational(1, 2), Rational(-1, 2)),
(1, Rational(-1, 2), Rational(1, 2)),
(1, Rational(2, 4), Rational(3, 2)),
(1, Rational(3, 4), Rational(7, 4)),
(1, Rational(-3, 4), Rational(1, 4)),
(1, Rational(1, 3), Rational(4, 3))]
for r1, r2, expected_r in radd_test_values:
with self.subTest(r1=r1, r2=r2, expected_r=expected_r):
r = r1 + r2
self.assertEqual(expected_r, r)
def test_rsub_type_error(self):
r = Rational()
with self.assertRaises(TypeError):
1.2 - r
def test_rsub(self):
rsub_test_values = [(1, Rational(1, 2), Rational(1, 2)),
(1, Rational(), Rational(1, 1)),
(-1, Rational(1, 2), Rational(-3, 2)),
(1, Rational(-1, 2), Rational(3, 2)),
(1, Rational(2, 4), Rational(1, 2)),
(1, Rational(3, 4), Rational(1, 4)),
(1, Rational(-3, 4), Rational(7, 4)),
(1, Rational(1, 3), Rational(2, 3))]
for r1, r2, expected_r in rsub_test_values:
with self.subTest(r1=r1, r2=r2, expected_r=expected_r):
r = r1 - r2
self.assertEqual(expected_r, r)
def test_rmul_type_error(self):
r = Rational()
with self.assertRaises(TypeError):
1.2 * r
def test_rmul(self):
rmul_test_values = [(1, Rational(1, 2), Rational(1, 2)),
(1, Rational(), Rational(0, 1)),
(-1, Rational(1, 2), Rational(-1, 2)),
(1, Rational(-1, 2), Rational(-1, 2)),
(1, Rational(2, 4), Rational(1, 2)),
(1, Rational(3, 4), Rational(3, 4)),
(1, Rational(-3, 4), Rational(-3, 4)),
(1, Rational(1, 3), Rational(1, 3))]
for r1, r2, expected_r in rmul_test_values:
with self.subTest(r1=r1, r2=r2, expected_r=expected_r):
r = r1 * r2
self.assertEqual(expected_r, r)
def test_rtruediv_zero_division_error(self):
r = Rational()
with self.assertRaises(ZeroDivisionError):
1 / r
def test_rtruediv_type_error(self):
r = Rational()
with self.assertRaises(TypeError):
1.2 / r
def test_rtruediv(self):
rdiv_test_values = [(1, Rational(1, 2), Rational(2, 1)),
(-1, Rational(1, 2), Rational(-2, 1)),
(1, Rational(-1, 2), Rational(-2, 1)),
(1, Rational(2, 4), Rational(2, 1)),
(1, Rational(3, 4), Rational(4, 3)),
(1, Rational(-3, 4), Rational(-4, 3)),
(1, Rational(1, 3), Rational(3, 1))]
for r1, r2, expected_r in rdiv_test_values:
with self.subTest(r1=r1, r2=r2, expected_r=expected_r):
r = r1 / r2
self.assertEqual(expected_r, r)
def test_rpow_zero_division_error(self):
base = 0
for denominator in range(-3, 0):
power = Rational(1, denominator)
with self.subTest(base=base, power=power):
with self.assertRaises(ZeroDivisionError):
base ** power
def test_rpow_value_error(self):
rpow_test_values = [(-2, Rational(1, 2)),
(-1, Rational(1, 2)),
(-3, Rational(-1, 2)),
(-2, Rational(-1, 2)),
(-1, Rational(-1, 2)),
(-3, Rational(1, 3)),
(-2, Rational(1, 3)),
(-1, Rational(1, 3)),
(-3, Rational(-1, 3)),
(-2, Rational(-1, 3)),
(-1, Rational(-1, 3))]
for base, power in rpow_test_values:
with self.subTest(base=base, power=power):
with self.assertRaises(ValueError):
base ** power
def test_rpow(self):
rpow_test_values = [(0, Rational(), 1),
(1, Rational(), 1),
(2, Rational(), 1),
(3, Rational(), 1),
(0, Rational(1, 2), 0),
(1, Rational(1, 2), 1),
(2, Rational(1, 2), 1.4142135623730951),
(3, Rational(1, 2), 1.7320508075688772),
(1, Rational(-1, 2), 1),
(2, Rational(-1, 2), 0.7071067811865476),
(3, Rational(-1, 2), 0.5773502691896257),
(0, Rational(1, 3), 0),
(1, Rational(1, 3), 1),
(2, Rational(1, 3), 1.2599210498948732),
(3, Rational(1, 3), 1.4422495703074083),
(1, Rational(-1, 3), 1),
(2, Rational(-1, 3), 0.7937005259840998),
(3, Rational(-1, 3), 0.6933612743506348),
(-1, Rational(1), -1),
(-2, Rational(1), -2),
(-1, Rational(-1), -1),
(-2, Rational(-2), 0.25)]
for base, power, expected_power in rpow_test_values:
with self.subTest(base=base, power=power, expected_power=expected_power):
computed_power = base ** power
self.assertAlmostEqual(expected_power, computed_power)
if '__main__' == __name__:
unittest2.main()
|
# -*- coding: utf-8 -*-
# [HARPIA PROJECT]
#
#
# S2i - Intelligent Industrial Systems
# DAS - Automation and Systems Department
# UFSC - Federal University of Santa Catarina
# Copyright: 2006 - 2007 Luis Carlos Dill Junges ([email protected]), Clovis Peruchi Scotti ([email protected]),
# Guilherme Augusto Rutzen ([email protected]), Mathias Erdtmann ([email protected]) and S2i (www.s2i.das.ufsc.br)
# 2007 - 2009 Clovis Peruchi Scotti ([email protected]), S2i (www.s2i.das.ufsc.br)
#
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For further information, check the COPYING file distributed with this software.
#
#----------------------------------------------------------------------
from harpia.GladeWindow import GladeWindow
from harpia.amara import binderytools as bt
import gtk
from harpia.s2icommonproperties import S2iCommonProperties
#i18n
import os
import gettext
APP='harpia'
DIR=os.environ['HARPIA_DATA_DIR']+'po'
_ = gettext.gettext
gettext.bindtextdomain(APP, DIR)
gettext.textdomain(APP)
#----------------------------------------------------------------------
class Properties( GladeWindow, S2iCommonProperties ):
#----------------------------------------------------------------------
def __init__( self, PropertiesXML, S2iBlockProperties):
self.m_sDataDir = os.environ['HARPIA_DATA_DIR']
filename = self.m_sDataDir+'glade/sum.glade'
self.m_oPropertiesXML = PropertiesXML
self.m_oS2iBlockProperties = S2iBlockProperties
widget_list = [
'Properties',
'SUMBackgroundColor',
'SUMBorderColor',
'SUMHelpView'
]
handlers = [
'on_sum_cancel_clicked',
'on_sum_confirm_clicked',
'on_SUMBackColorButton_clicked',
'on_SUMBorderColorButton_clicked'
]
top_window = 'Properties'
GladeWindow.__init__(self, filename, top_window, widget_list, handlers)
self.widgets['Properties'].set_icon_from_file(self.m_sDataDir+"images/harpia_ave.png")
#load properties values
#there is no properties
#load border color
self.m_oBorderColor = self.m_oS2iBlockProperties.GetBorderColor()
t_nBorderRed = self.m_oBorderColor[0] * 257
t_nBorderGreen = self.m_oBorderColor[1] * 257
t_nBorderBlue = self.m_oBorderColor[2] * 257
t_oBorderColor = gtk.gdk.Color(red=t_nBorderRed,green=t_nBorderGreen,blue=t_nBorderBlue)
self.widgets['SUMBorderColor'].modify_bg(gtk.STATE_NORMAL,t_oBorderColor)
#load block color
self.m_oBackColor = self.m_oS2iBlockProperties.GetBackColor()
t_nBackRed = self.m_oBackColor[0] * 257
t_nBackGreen = self.m_oBackColor[1] * 257
t_nBackBlue = self.m_oBackColor[2] * 257
t_oBackColor = gtk.gdk.Color(red=t_nBackRed,green=t_nBackGreen,blue=t_nBackBlue)
self.widgets['SUMBackgroundColor'].modify_bg(gtk.STATE_NORMAL,t_oBackColor)
#load help text
t_oS2iHelp = bt.bind_file(self.m_sDataDir+"help/sum"+ _("_en.help"))
t_oTextBuffer = gtk.TextBuffer()
t_oTextBuffer.set_text( unicode( str( t_oS2iHelp.help.content) ) )
self.widgets['SUMHelpView'].set_buffer( t_oTextBuffer )
#----------------------------------------------------------------------
def __del__(self):
pass
#----------------------------------------------------------------------
def on_sum_cancel_clicked( self, *args ):
self.widgets['Properties'].destroy()
#----------------------------------------------------------------------
def on_sum_confirm_clicked( self, *args ):
self.m_oS2iBlockProperties.SetBorderColor( self.m_oBorderColor )
self.m_oS2iBlockProperties.SetBackColor( self.m_oBackColor )
self.widgets['Properties'].destroy()
#----------------------------------------------------------------------
def on_SUMBackColorButton_clicked(self,*args):
t_oColor = self.RunColorSelection()
if t_oColor <> None:
self.widgets['SUMBackgroundColor'].modify_bg(gtk.STATE_NORMAL,t_oColor)
self.m_oBackColor[0] = t_oColor.red / 257
self.m_oBackColor[1] = t_oColor.green / 257
self.m_oBackColor[2] = t_oColor.blue / 257
#----------------------------------------------------------------------
def on_SUMBorderColorButton_clicked(self,*args):
t_oColor = self.RunColorSelection()
if t_oColor <> None:
self.widgets['SUMBorderColor'].modify_bg(gtk.STATE_NORMAL,t_oColor)
self.m_oBorderColor[0] = t_oColor.red / 257
self.m_oBorderColor[1] = t_oColor.green / 257
self.m_oBorderColor[2] = t_oColor.blue / 257
#----------------------------------------------------------------------
#SumProperties = Properties()
#SumProperties.show( center=0 )
|
# -*- coding: utf-8 -*-
import six
from django.conf import settings
from django.core import validators
from django.db import models
from django.utils.translation import gettext_lazy
from modeltranslation.manager import MultilingualManager
class TestModel(models.Model):
title = models.CharField(gettext_lazy('title'), max_length=255)
text = models.TextField(blank=True, null=True)
url = models.URLField(blank=True, null=True)
email = models.EmailField(blank=True, null=True)
class UniqueNullableModel(models.Model):
title = models.CharField(null=True, unique=True, max_length=255)
# ######### Proxy model testing
class ProxyTestModel(TestModel):
class Meta:
proxy = True
def get_title(self):
return self.title
# ######### Fallback values testing
class FallbackModel(models.Model):
title = models.CharField(gettext_lazy('title'), max_length=255)
text = models.TextField(blank=True, null=True)
url = models.URLField(blank=True, null=True)
email = models.EmailField(blank=True, null=True)
description = models.CharField(max_length=255, null=True)
class FallbackModel2(models.Model):
title = models.CharField(gettext_lazy('title'), max_length=255)
text = models.TextField(blank=True, null=True)
url = models.URLField(blank=True, null=True)
email = models.EmailField(blank=True, null=True)
# ######### File fields testing
class FileFieldsModel(models.Model):
title = models.CharField(gettext_lazy('title'), max_length=255)
file = models.FileField(upload_to='modeltranslation_tests', null=True, blank=True)
file2 = models.FileField(upload_to='modeltranslation_tests')
image = models.ImageField(upload_to='modeltranslation_tests', null=True, blank=True)
# ######### Foreign Key / OneToOneField testing
class NonTranslated(models.Model):
title = models.CharField(gettext_lazy('title'), max_length=255)
class ForeignKeyModel(models.Model):
title = models.CharField(gettext_lazy('title'), max_length=255)
test = models.ForeignKey(
TestModel, null=True, related_name="test_fks", on_delete=models.CASCADE,
)
optional = models.ForeignKey(TestModel, blank=True, null=True, on_delete=models.CASCADE)
hidden = models.ForeignKey(
TestModel, blank=True, null=True, related_name="+", on_delete=models.CASCADE,
)
non = models.ForeignKey(
NonTranslated, blank=True, null=True, related_name="test_fks", on_delete=models.CASCADE,
)
untrans = models.ForeignKey(
TestModel, blank=True, null=True, related_name="test_fks_un", on_delete=models.CASCADE,
)
class OneToOneFieldModel(models.Model):
title = models.CharField(gettext_lazy('title'), max_length=255)
test = models.OneToOneField(
TestModel, null=True, related_name="test_o2o", on_delete=models.CASCADE,
)
optional = models.OneToOneField(TestModel, blank=True, null=True, on_delete=models.CASCADE)
# No hidden option for OneToOne
non = models.OneToOneField(
NonTranslated, blank=True, null=True, related_name="test_o2o", on_delete=models.CASCADE,
)
# ######### Custom fields testing
class OtherFieldsModel(models.Model):
"""
This class is supposed to include other newly added fields types, so that
adding new supported field doesn't end in adding new test model.
"""
# That's rich! PositiveIntegerField is only validated in forms, not in models.
int = models.PositiveIntegerField(default=42, validators=[validators.MinValueValidator(0)])
boolean = models.BooleanField(default=False)
nullboolean = models.NullBooleanField()
csi = models.CommaSeparatedIntegerField(max_length=255)
ip = models.IPAddressField(blank=True, null=True)
float = models.FloatField(blank=True, null=True)
decimal = models.DecimalField(max_digits=5, decimal_places=2, blank=True, null=True)
date = models.DateField(blank=True, null=True)
datetime = models.DateTimeField(blank=True, null=True)
time = models.TimeField(blank=True, null=True)
genericip = models.GenericIPAddressField(blank=True, null=True)
class FancyDescriptor(object):
"""
Stupid demo descriptor, that store int in database and return string of that length on get.
"""
def __init__(self, field):
self.field = field
def __get__(self, instance, owner):
length = instance.__dict__[self.field.name]
if length is None:
return ''
return 'a' * length
def __set__(self, obj, value):
if isinstance(value, six.integer_types):
obj.__dict__[self.field.name] = value
elif isinstance(value, six.string_types):
obj.__dict__[self.field.name] = len(value)
else:
obj.__dict__[self.field.name] = 0
class FancyField(models.PositiveIntegerField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('default', '')
super(FancyField, self).__init__(*args, **kwargs)
def contribute_to_class(self, cls, name):
super(FancyField, self).contribute_to_class(cls, name)
setattr(cls, self.name, FancyDescriptor(self))
def pre_save(self, model_instance, add):
value = super(FancyField, self).pre_save(model_instance, add)
# In this part value should be retrieved using descriptor and be a string
assert isinstance(value, six.string_types)
# We put an int to database
return len(value)
class DescriptorModel(models.Model):
normal = FancyField()
trans = FancyField()
# ######### Multitable inheritance testing
class MultitableModelA(models.Model):
titlea = models.CharField(gettext_lazy('title a'), max_length=255)
class MultitableModelB(MultitableModelA):
titleb = models.CharField(gettext_lazy('title b'), max_length=255)
class MultitableModelC(MultitableModelB):
titlec = models.CharField(gettext_lazy('title c'), max_length=255)
class MultitableModelD(MultitableModelB):
titled = models.CharField(gettext_lazy('title d'), max_length=255)
# ######### Abstract inheritance testing
class AbstractModelA(models.Model):
titlea = models.CharField(gettext_lazy('title a'), max_length=255)
def __init__(self, *args, **kwargs):
super(AbstractModelA, self).__init__(*args, **kwargs)
self.titlea = 'title_a'
class Meta:
abstract = True
class AbstractModelB(AbstractModelA):
titleb = models.CharField(gettext_lazy('title b'), max_length=255)
def __init__(self, *args, **kwargs):
super(AbstractModelB, self).__init__(*args, **kwargs)
self.titleb = 'title_b'
# ######### Fields inheritance testing
class Slugged(models.Model):
slug = models.CharField(max_length=255)
class Meta:
abstract = True
class MetaData(models.Model):
keywords = models.CharField(max_length=255)
class Meta:
abstract = True
class Displayable(Slugged, MetaData):
class Meta:
abstract = True
class BasePage(Displayable):
class Meta:
abstract = True
class Page(BasePage):
title = models.CharField(max_length=255)
class RichText(models.Model):
content = models.CharField(max_length=255)
class Meta:
abstract = True
class RichTextPage(Page, RichText):
pass
# ######### Admin testing
class DataModel(models.Model):
data = models.TextField(blank=True, null=True)
class GroupFieldsetsModel(models.Model):
title = models.CharField(max_length=255)
text = models.TextField(blank=True, null=True)
email = models.EmailField(blank=True, null=True)
class NameModel(models.Model):
firstname = models.CharField(max_length=50)
lastname = models.CharField(max_length=50)
age = models.CharField(max_length=50)
slug = models.SlugField(max_length=100)
slug2 = models.SlugField(max_length=100)
# ######### Integration testing
class ThirdPartyModel(models.Model):
name = models.CharField(max_length=20)
class ThirdPartyRegisteredModel(models.Model):
name = models.CharField(max_length=20)
# ######### Manager testing
class FilteredManager(MultilingualManager):
def get_queryset(self):
# always return empty queryset
return super(FilteredManager, self).get_queryset().filter(pk=None)
class FilteredTestModel(models.Model):
title = models.CharField(gettext_lazy('title'), max_length=255)
objects = FilteredManager()
class ForeignKeyFilteredModel(models.Model):
title = models.CharField(gettext_lazy('title'), max_length=255)
test = models.ForeignKey(
FilteredTestModel, null=True, related_name="test_fks", on_delete=models.CASCADE,
)
class ManagerTestModel(models.Model):
title = models.CharField(gettext_lazy('title'), max_length=255)
visits = models.IntegerField(gettext_lazy('visits'), default=0)
description = models.CharField(max_length=255, null=True)
class Meta:
ordering = ('-visits',)
class CustomManager(models.Manager):
def get_queryset(self):
sup = super(CustomManager, self)
queryset = sup.get_queryset() if hasattr(sup, 'get_queryset') else sup.get_query_set()
return queryset.filter(title__contains='a').exclude(description__contains='x')
get_query_set = get_queryset
def custom_qs(self):
sup = super(CustomManager, self)
queryset = sup.get_queryset() if hasattr(sup, 'get_queryset') else sup.get_query_set()
return queryset
def foo(self):
return 'bar'
class CustomManagerTestModel(models.Model):
title = models.CharField(gettext_lazy('title'), max_length=255)
description = models.CharField(max_length=255, null=True, db_column='xyz')
objects = CustomManager()
another_mgr_name = CustomManager()
class CustomQuerySet(models.query.QuerySet):
pass
class CustomManager2(models.Manager):
def get_queryset(self):
return CustomQuerySet(self.model, using=self._db)
get_query_set = get_queryset
class CustomManager2TestModel(models.Model):
title = models.CharField(gettext_lazy('title'), max_length=255)
objects = CustomManager2()
class CustomManagerAbstract(models.Manager):
pass
class CustomManagerBaseModel(models.Model):
needs_translation = models.BooleanField(default=False)
objects = models.Manager() # ensures objects is the default manager
translations = CustomManagerAbstract()
class Meta:
abstract = True
class CustomManagerChildTestModel(CustomManagerBaseModel):
title = models.CharField(gettext_lazy('title'), max_length=255)
objects = CustomManager2()
class PlainChildTestModel(CustomManagerBaseModel):
title = models.CharField(gettext_lazy('title'), max_length=255)
# ######### Required fields testing
class RequiredModel(models.Model):
non_req = models.CharField(max_length=10, blank=True)
req = models.CharField(max_length=10)
req_reg = models.CharField(max_length=10)
req_en_reg = models.CharField(max_length=10)
# ######### Name collision registration testing
class ConflictModel(models.Model):
title = models.CharField(gettext_lazy('title'), max_length=255)
title_de = models.IntegerField()
class AbstractConflictModelA(models.Model):
title_de = models.IntegerField()
class Meta:
abstract = True
class AbstractConflictModelB(AbstractConflictModelA):
title = models.CharField(gettext_lazy('title'), max_length=255)
class MultitableConflictModelA(models.Model):
title_de = models.IntegerField()
class MultitableConflictModelB(MultitableConflictModelA):
title = models.CharField(gettext_lazy('title'), max_length=255)
# ######### Complex M2M with abstract classes and custom managers
class CustomQuerySetX(models.query.QuerySet):
pass
class CustomManagerX(models.Manager):
def get_queryset(self):
return CustomQuerySetX(self.model, using=self._db)
get_query_set = get_queryset
class AbstractBaseModelX(models.Model):
name = models.CharField(max_length=255)
objects = CustomManagerX()
class Meta:
abstract = True
class AbstractModelX(AbstractBaseModelX):
class Meta:
abstract = True
class ModelX(AbstractModelX):
pass
class AbstractModelXY(models.Model):
model_x = models.ForeignKey('ModelX', on_delete=models.CASCADE)
model_y = models.ForeignKey('ModelY', on_delete=models.CASCADE)
class Meta:
abstract = True
class ModelXY(AbstractModelXY):
pass
class CustomManagerY(models.Manager):
pass
class AbstractModelY(models.Model):
title = models.CharField(max_length=255)
xs = models.ManyToManyField('ModelX', through='ModelXY')
objects = CustomManagerY()
class Meta:
abstract = True
class ModelY(AbstractModelY):
pass
# Non-abstract base models whos Manager is not allowed to be overwritten
if "django.contrib.auth" in settings.INSTALLED_APPS:
from django.contrib.auth.models import Permission
class InheritedPermission(Permission):
translated_var = models.CharField(max_length=255)
|
#
# Copyright (c) 2015-2017 EpiData, Inc.
#
from datetime import datetime, timedelta
from epidata.context import ec
from epidata.sensor_measurement import SensorMeasurement
from epidata.utils import ConvertUtils
from epidata.analytics import *
import numpy
from pyspark.sql import Row
from pyspark.sql import Column
from pyspark.tests import ReusedPySparkTestCase as PySparkTestCase
import unittest
json_string = []
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "double", "meas_value": 64.76, "meas_lower_limit": -30.2, "meas_upper_limit": 200.2, "sensor": "tester-5"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "double", "meas_value": 64.76, "meas_lower_limit": -30, "meas_upper_limit": 200, "sensor": "tester-6"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "double", "meas_value": 64.76, "sensor": "tester-8"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "double", "meas_value": 64, "meas_lower_limit": -30.2, "meas_upper_limit": 200.2, "sensor": "tester-9"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "double", "meas_value": 64, "meas_lower_limit": -30, "meas_upper_limit": 200, "sensor": "tester-10"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "double", "meas_value": 64, "sensor": "tester-12"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "double", "meas_value": "64", "meas_lower_limit": -30.2, "meas_upper_limit": 200.2, "sensor": "tester-13"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "double", "meas_value": "64", "meas_lower_limit": -30, "meas_upper_limit": 200, "sensor": "tester-14"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "double", "meas_value": "64", "sensor": "tester-16"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "double", "meas_lower_limit": -30.2, "meas_upper_limit": 200.2, "sensor": "tester-17"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "double", "meas_lower_limit": -30, "meas_upper_limit": 200, "sensor": "tester-18"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "double", "sensor": "tester-20"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "long","meas_value": 64.5,"meas_lower_limit": -30.2, "meas_upper_limit": 200.2, "sensor": "tester-21"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "long","meas_value": 64.5,"meas_lower_limit": -30, "meas_upper_limit": 200, "sensor": "tester-22"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "long","meas_value": 64.5,"sensor": "tester-24"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "long","meas_value": 64,"meas_lower_limit": -30.2, "meas_upper_limit": 200.2, "sensor": "tester-25"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "long","meas_value": 64,"meas_lower_limit": -30, "meas_upper_limit": 200, "sensor": "tester-26"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "long","meas_value": 64,"sensor": "tester-28"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "long","meas_value": "64","meas_lower_limit": -30.2, "meas_upper_limit": 200.2, "sensor": "tester-29"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "long","meas_value": "64","meas_lower_limit": -30, "meas_upper_limit": 200, "sensor": "tester-30"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "long","meas_value": "64","sensor": "tester-32"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "long","meas_lower_limit": -30.2, "meas_upper_limit": 200.2, "sensor": "tester-33"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "long","meas_lower_limit": -30, "meas_upper_limit": 200, "sensor": "tester-34"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "long","sensor": "tester-36"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "string","meas_value": 64.5,"meas_lower_limit": -30.2, "meas_upper_limit": 200.2, "sensor": "tester-37"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "string","meas_value": 64.5,"meas_lower_limit": -30, "meas_upper_limit": 200, "sensor": "tester-38"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "string","meas_value": 64.5,"sensor": "tester-40"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "string","meas_value": 64,"meas_lower_limit": -30.2, "meas_upper_limit": 200.2, "sensor": "tester-41"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "string","meas_value": 64,"meas_lower_limit": -30, "meas_upper_limit": 200, "sensor": "tester-42"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "string","meas_value": 64,"sensor": "tester-44"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "string","meas_value": "64","meas_lower_limit": -30.2, "meas_upper_limit": 200.2, "sensor": "tester-45"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "string","meas_value": "64","meas_lower_limit": -30, "meas_upper_limit": 200, "sensor": "tester-46"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "string","meas_value": "64","sensor": "tester-48"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "string","meas_lower_limit": -30.2, "meas_upper_limit": 200.2, "sensor": "tester-49"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "string","meas_lower_limit": -30, "meas_upper_limit": 200, "sensor": "tester-50"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "", "meas_datatype": "string","sensor": "tester-52"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "","meas_value": 64.5,"meas_lower_limit": -30.2, "meas_upper_limit": 200.2, "sensor": "tester-53"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "","meas_value": 64.5,"meas_lower_limit": -30, "meas_upper_limit": 200, "sensor": "tester-54"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "","meas_value": 64.5,"sensor": "tester-56"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "","meas_value": 64,"meas_lower_limit": -30.2, "meas_upper_limit": 200.2, "sensor": "tester-57"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "","meas_value": 64,"meas_lower_limit": -30, "meas_upper_limit": 200, "sensor": "tester-58"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "","meas_value": 64,"sensor": "tester-60"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "","meas_value": "64","meas_lower_limit": -30.2, "meas_upper_limit": 200.2, "sensor": "tester-61"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "","meas_value": "64","meas_lower_limit": -30, "meas_upper_limit": 200, "sensor": "tester-62"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "","meas_value": "64","sensor": "tester-64"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "","meas_lower_limit": -30.2, "meas_upper_limit": 200.2, "sensor": "tester-65"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "","meas_lower_limit": -30, "meas_upper_limit": 200, "sensor": "tester-66"}')
json_string.append('{"meas_name": "Temperature", "company": "company0", "site": "site0", "station": "station-1", "test_name": "Temperature_Test", "meas_status": "PASS", "ts": 1505970910038, "event": "Device-1", "meas_unit": "deg F", "meas_description": "","sensor": "tester-68"}')
class Base(unittest.TestCase):
def assertEqualRows(self, one, two):
if not isinstance(one, Column):
self.assertEqual(one.asDict(), two.asDict())
def assertEqualDataFrames(self, one, two):
self.assertEqual(one.count(), two.count())
for i, j in zip(one, two):
self.assertEqualRows(i, j)
class EpidataContextStremingTestsSensorMeasurement(Base):
def test_sensor_measurement_streaming(self):
def num_keys_different(dict1, dict2):
diffkeys = [
k for k in dict1 if k not in dict2 or dict1[k] != dict2[k]]
return len(diffkeys)
def create_map_from_panda_dataframe(dataframe, index_column):
dataframe_map = {}
for index, row in dataframe.iterrows():
index_str = "-" + row[index_column].split("-")[1]
if isinstance(
row['meas_value'],
basestring) or not np.isnan(
row['meas_value']):
dataframe_map['meas_value' + index_str] = row['meas_value']
dataframe_map['meas_datatype' +
index_str] = row['meas_datatype']
dataframe_map['meas_upper_limit' +
index_str] = row['meas_upper_limit']
dataframe_map['meas_lower_limit' +
index_str] = row['meas_lower_limit']
return dataframe_map
def create_map_from_spark_panda_dataframe(dataframe, index_column):
dataframe_map = {}
for index, row in dataframe.iterrows():
index_str = "-" + row[index_column].split("-")[1]
if isinstance(
row['meas_value'],
basestring) or not np.isnan(
row['meas_value']):
dataframe_map['meas_value' + index_str] = row['meas_value']
dataframe_map['meas_datatype' +
index_str] = row['meas_datatype']
if isinstance(
row['meas_upper_limit'],
basestring) or not np.isnan(
row['meas_upper_limit']):
dataframe_map['meas_upper_limit' +
index_str] = row['meas_upper_limit']
if isinstance(
row['meas_lower_limit'],
basestring) or not np.isnan(
row['meas_lower_limit']):
dataframe_map['meas_lower_limit' +
index_str] = row['meas_lower_limit']
return dataframe_map
measurements_rows = [SensorMeasurement.to_row(
[0, x]) for x in json_string]
rdd_df = ec._sc.parallelize(measurements_rows).toDF()
panda_df = ConvertUtils.convert_to_pandas_dataframe_model(rdd_df, True)
panda_df_map = create_map_from_panda_dataframe(panda_df, 'sensor')
output_panda_map = {
'meas_lower_limit-46': None,
'meas_lower_limit-50': -30.0,
'meas_value-18': '',
'meas_value-13': '64',
'meas_value-12': 64.0,
'meas_value-10': 64.0,
'meas_value-17': '',
'meas_value-16': '64',
'meas_value-14': '64',
'meas_value-37': 64.5,
'meas_upper_limit-57': 200.2,
'meas_upper_limit-29': None,
'meas_upper_limit-28': None,
'meas_lower_limit-34': -30.0,
'meas_lower_limit-33': -30.2,
'meas_lower_limit-32': None,
'meas_lower_limit-30': None,
'meas_upper_limit-21': 200.2,
'meas_upper_limit-20': None,
'meas_upper_limit-22': 200.0,
'meas_upper_limit-25': 200.2,
'meas_upper_limit-24': None,
'meas_upper_limit-26': 200.0,
'meas_lower_limit-9': -30.2,
'meas_lower_limit-8': None,
'meas_datatype-48': 'string',
'meas_datatype-49': 'string',
'meas_lower_limit-5': -30.2,
'meas_datatype-44': 'long',
'meas_lower_limit-6': -30.0,
'meas_datatype-42': 'long',
'meas_datatype-40': 'double',
'meas_datatype-41': 'long',
'meas_lower_limit-45': None,
'meas_lower_limit-24': None,
'meas_lower_limit-25': -30.2,
'meas_lower_limit-26': -30.0,
'meas_lower_limit-20': None,
'meas_lower_limit-21': -30.2,
'meas_lower_limit-22': -30.0,
'meas_upper_limit-36': None,
'meas_upper_limit-37': 200.2,
'meas_upper_limit-34': 200.0,
'meas_upper_limit-32': None,
'meas_lower_limit-29': None,
'meas_upper_limit-30': None,
'meas_datatype-50': 'string',
'meas_datatype-53': 'double',
'meas_datatype-52': 'string',
'meas_datatype-54': 'double',
'meas_datatype-57': 'long',
'meas_datatype-56': 'double',
'meas_datatype-58': 'long',
'meas_upper_limit-49': 200.2,
'meas_upper_limit-48': None,
'meas_upper_limit-42': 200.0,
'meas_upper_limit-41': 200.2,
'meas_upper_limit-40': None,
'meas_upper_limit-46': None,
'meas_upper_limit-45': None,
'meas_upper_limit-44': None,
'meas_lower_limit-18': -30.0,
'meas_lower_limit-10': -30.0,
'meas_lower_limit-13': None,
'meas_lower_limit-12': None,
'meas_lower_limit-14': None,
'meas_lower_limit-17': -30.2,
'meas_lower_limit-16': None,
'meas_upper_limit-38': 200.0,
'meas_upper_limit-8': None,
'meas_upper_limit-9': 200.2,
'meas_upper_limit-6': 200.0,
'meas_upper_limit-5': 200.2,
'meas_datatype-24': 'double',
'meas_datatype-25': 'long',
'meas_datatype-26': 'long',
'meas_value-61': '64',
'meas_datatype-20': 'double',
'meas_datatype-21': 'double',
'meas_datatype-22': 'double',
'meas_value-65': '',
'meas_value-68': '',
'meas_datatype-28': 'long',
'meas_datatype-29': 'string',
'meas_upper_limit-58': 200.0,
'meas_value-62': '64',
'meas_upper_limit-50': 200.0,
'meas_lower_limit-28': None,
'meas_upper_limit-52': None,
'meas_upper_limit-53': 200.2,
'meas_upper_limit-54': 200.0,
'meas_value-60': 64.0,
'meas_upper_limit-56': None,
'meas_upper_limit-33': 200.2,
'meas_value-66': '',
'meas_value-64': '64',
'meas_value-58': 64.0,
'meas_value-57': 64.0,
'meas_value-56': 64.5,
'meas_value-54': 64.5,
'meas_value-53': 64.5,
'meas_value-52': '',
'meas_value-50': '',
'meas_datatype-37': 'double',
'meas_datatype-36': 'long',
'meas_datatype-34': 'long',
'meas_datatype-33': 'long',
'meas_datatype-32': 'string',
'meas_datatype-30': 'string',
'meas_datatype-38': 'double',
'meas_lower_limit-37': -30.2,
'meas_upper_limit-68': None,
'meas_lower_limit-36': None,
'meas_upper_limit-65': 200.2,
'meas_upper_limit-64': None,
'meas_upper_limit-66': 200.0,
'meas_upper_limit-61': None,
'meas_upper_limit-60': None,
'meas_upper_limit-62': None,
'meas_value-48': '64',
'meas_value-49': '',
'meas_value-40': 64.5,
'meas_value-41': 64.0,
'meas_value-42': 64.0,
'meas_value-44': 64.0,
'meas_value-45': '64',
'meas_value-46': '64',
'meas_lower_limit-68': None,
'meas_lower_limit-60': None,
'meas_lower_limit-61': None,
'meas_lower_limit-62': None,
'meas_lower_limit-64': None,
'meas_lower_limit-65': -30.2,
'meas_lower_limit-66': -30.0,
'meas_lower_limit-52': None,
'meas_lower_limit-38': -30.0,
'meas_value-38': 64.5,
'meas_datatype-18': 'double',
'meas_value-34': '',
'meas_datatype-14': 'string',
'meas_datatype-17': 'double',
'meas_datatype-16': 'string',
'meas_datatype-10': 'long',
'meas_datatype-13': 'string',
'meas_value-32': '64',
'meas_lower_limit-42': -30.0,
'meas_value-36': '',
'meas_lower_limit-58': -30.0,
'meas_lower_limit-54': -30.0,
'meas_lower_limit-57': -30.2,
'meas_lower_limit-56': None,
'meas_lower_limit-40': None,
'meas_lower_limit-53': -30.2,
'meas_value-30': '64',
'meas_datatype-46': 'string',
'meas_lower_limit-41': -30.2,
'meas_value-33': '',
'meas_upper_limit-10': 200.0,
'meas_datatype-12': 'long',
'meas_datatype-68': '',
'meas_datatype-45': 'string',
'meas_lower_limit-44': None,
'meas_datatype-60': 'long',
'meas_datatype-61': 'string',
'meas_datatype-62': 'string',
'meas_datatype-6': 'double',
'meas_datatype-64': 'string',
'meas_datatype-65': '',
'meas_datatype-66': '',
'meas_value-28': 64.0,
'meas_value-29': '64',
'meas_value-26': 64.0,
'meas_value-24': 64.5,
'meas_value-25': 64.0,
'meas_value-22': 64.5,
'meas_value-20': '',
'meas_value-21': 64.5,
'meas_value-9': 64.0,
'meas_value-8': 64.76,
'meas_value-6': 64.76,
'meas_value-5': 64.76,
'meas_upper_limit-14': None,
'meas_upper_limit-16': None,
'meas_upper_limit-17': 200.2,
'meas_datatype-5': 'double',
'meas_upper_limit-12': None,
'meas_upper_limit-13': None,
'meas_datatype-9': 'long',
'meas_datatype-8': 'double',
'meas_lower_limit-48': None,
'meas_lower_limit-49': -30.2,
'meas_upper_limit-18': 200.0}
self.assertEqual(num_keys_different(panda_df_map, output_panda_map), 0)
op = ec.create_transformation(
substitute, [
["Temperature"], "rolling", 3], "measurements_cleansed_kafka")
output_df = op.apply(panda_df, None)
output_df_map = create_map_from_panda_dataframe(output_df, 'sensor')
expected_output_df_map = {
'meas_lower_limit-46': None,
'meas_lower_limit-50': -30.0,
'meas_value-18': '',
'meas_value-13': '64',
'meas_value-12': 64.0,
'meas_value-10': 64.0,
'meas_value-17': '',
'meas_value-16': '64',
'meas_value-14': '64',
'meas_value-37': 64.5,
'meas_upper_limit-57': 200.2,
'meas_upper_limit-29': None,
'meas_upper_limit-28': None,
'meas_lower_limit-34': -30.0,
'meas_lower_limit-33': -30.2,
'meas_lower_limit-32': None,
'meas_lower_limit-30': None,
'meas_upper_limit-21': 200.2,
'meas_upper_limit-20': None,
'meas_upper_limit-22': 200.0,
'meas_upper_limit-25': 200.2,
'meas_upper_limit-24': None,
'meas_upper_limit-26': 200.0,
'meas_lower_limit-9': -30.2,
'meas_lower_limit-8': None,
'meas_datatype-48': 'string',
'meas_datatype-49': 'string',
'meas_lower_limit-5': -30.2,
'meas_datatype-44': 'long',
'meas_lower_limit-6': -30.0,
'meas_datatype-42': 'long',
'meas_datatype-40': 'double',
'meas_datatype-41': 'long',
'meas_lower_limit-45': None,
'meas_lower_limit-24': None,
'meas_lower_limit-25': -30.2,
'meas_lower_limit-26': -30.0,
'meas_lower_limit-20': None,
'meas_lower_limit-21': -30.2,
'meas_lower_limit-22': -30.0,
'meas_upper_limit-36': None,
'meas_upper_limit-37': 200.2,
'meas_upper_limit-34': 200.0,
'meas_upper_limit-32': None,
'meas_lower_limit-29': None,
'meas_upper_limit-30': None,
'meas_datatype-50': 'string',
'meas_datatype-53': 'double',
'meas_datatype-52': 'string',
'meas_datatype-54': 'double',
'meas_datatype-57': 'long',
'meas_datatype-56': 'double',
'meas_datatype-58': 'long',
'meas_upper_limit-49': 200.2,
'meas_upper_limit-48': None,
'meas_upper_limit-42': 200.0,
'meas_upper_limit-41': 200.2,
'meas_upper_limit-40': None,
'meas_upper_limit-46': None,
'meas_upper_limit-45': None,
'meas_upper_limit-44': None,
'meas_lower_limit-18': -30.0,
'meas_lower_limit-10': -30.0,
'meas_lower_limit-13': None,
'meas_lower_limit-12': None,
'meas_lower_limit-14': None,
'meas_lower_limit-17': -30.2,
'meas_lower_limit-16': None,
'meas_upper_limit-38': 200.0,
'meas_upper_limit-8': None,
'meas_upper_limit-9': 200.2,
'meas_upper_limit-6': 200.0,
'meas_upper_limit-5': 200.2,
'meas_datatype-24': 'double',
'meas_datatype-25': 'long',
'meas_datatype-26': 'long',
'meas_value-61': '64',
'meas_datatype-20': 'double',
'meas_datatype-21': 'double',
'meas_datatype-22': 'double',
'meas_value-65': '',
'meas_value-68': '',
'meas_datatype-28': 'long',
'meas_datatype-29': 'string',
'meas_upper_limit-58': 200.0,
'meas_value-62': '64',
'meas_upper_limit-50': 200.0,
'meas_lower_limit-28': None,
'meas_upper_limit-52': None,
'meas_upper_limit-53': 200.2,
'meas_upper_limit-54': 200.0,
'meas_value-60': 64.0,
'meas_upper_limit-56': None,
'meas_upper_limit-33': 200.2,
'meas_value-66': '',
'meas_value-64': '64',
'meas_value-58': 64.0,
'meas_value-57': 64.0,
'meas_value-56': 64.5,
'meas_value-54': 64.5,
'meas_value-53': 64.5,
'meas_value-52': '',
'meas_value-50': '',
'meas_datatype-37': 'double',
'meas_datatype-36': 'long',
'meas_datatype-34': 'long',
'meas_datatype-33': 'long',
'meas_datatype-32': 'string',
'meas_datatype-30': 'string',
'meas_datatype-38': 'double',
'meas_lower_limit-37': -30.2,
'meas_upper_limit-68': None,
'meas_lower_limit-36': None,
'meas_upper_limit-65': 200.2,
'meas_upper_limit-64': None,
'meas_upper_limit-66': 200.0,
'meas_upper_limit-61': None,
'meas_upper_limit-60': None,
'meas_upper_limit-62': None,
'meas_value-48': '64',
'meas_value-49': '',
'meas_value-40': 64.5,
'meas_value-41': 64.0,
'meas_value-42': 64.0,
'meas_value-44': 64.0,
'meas_value-45': '64',
'meas_value-46': '64',
'meas_lower_limit-68': None,
'meas_lower_limit-60': None,
'meas_lower_limit-61': None,
'meas_lower_limit-62': None,
'meas_lower_limit-64': None,
'meas_lower_limit-65': -30.2,
'meas_lower_limit-66': -30.0,
'meas_lower_limit-52': None,
'meas_lower_limit-38': -30.0,
'meas_value-38': 64.5,
'meas_datatype-18': 'double',
'meas_value-34': '',
'meas_datatype-14': 'string',
'meas_datatype-17': 'double',
'meas_datatype-16': 'string',
'meas_datatype-10': 'long',
'meas_datatype-13': 'string',
'meas_value-32': '64',
'meas_lower_limit-42': -30.0,
'meas_value-36': '',
'meas_lower_limit-58': -30.0,
'meas_lower_limit-54': -30.0,
'meas_lower_limit-57': -30.2,
'meas_lower_limit-56': None,
'meas_lower_limit-40': None,
'meas_lower_limit-53': -30.2,
'meas_value-30': '64',
'meas_datatype-46': 'string',
'meas_lower_limit-41': -30.2,
'meas_value-33': '',
'meas_upper_limit-10': 200.0,
'meas_datatype-12': 'long',
'meas_datatype-68': '',
'meas_datatype-45': 'string',
'meas_lower_limit-44': None,
'meas_datatype-60': 'long',
'meas_datatype-61': 'string',
'meas_datatype-62': 'string',
'meas_datatype-6': 'double',
'meas_datatype-64': 'string',
'meas_datatype-65': '',
'meas_datatype-66': '',
'meas_value-28': 64.0,
'meas_value-29': '64',
'meas_value-26': 64.0,
'meas_value-24': 64.5,
'meas_value-25': 64.0,
'meas_value-22': 64.5,
'meas_value-20': '',
'meas_value-21': 64.5,
'meas_value-9': 64.0,
'meas_value-8': 64.76,
'meas_value-6': 64.76,
'meas_value-5': 64.76,
'meas_upper_limit-14': None,
'meas_upper_limit-16': None,
'meas_upper_limit-17': 200.2,
'meas_datatype-5': 'double',
'meas_upper_limit-12': None,
'meas_upper_limit-13': None,
'meas_datatype-9': 'long',
'meas_datatype-8': 'double',
'meas_lower_limit-48': None,
'meas_lower_limit-49': -30.2,
'meas_upper_limit-18': 200.0}
# clean up unnecessary column
output_df = ConvertUtils.convert_meas_value(
output_df, "measurements_cleansed")
# convert it back to spark data frame
spark_output_df = ec._sql_ctx_pyspark.createDataFrame(
output_df, SensorMeasurement.get_schema())
# convert to db model to save to cassandra
output_df_db = SensorMeasurement.convert_to_db_model(
spark_output_df, "measurements_cleansed")
final_df = output_df_db.toPandas()
final_df_map = create_map_from_spark_panda_dataframe(
final_df, 'dataset')
expected_df_map = {
'meas_datatype-10': 'long',
'meas_datatype-12': 'long',
'meas_datatype-13': 'string',
'meas_datatype-14': 'string',
'meas_datatype-16': 'string',
'meas_datatype-17': 'double',
'meas_datatype-18': 'double',
'meas_datatype-20': 'double',
'meas_datatype-21': 'double',
'meas_datatype-22': 'double',
'meas_datatype-24': 'double',
'meas_datatype-25': 'long',
'meas_datatype-26': 'long',
'meas_datatype-28': 'long',
'meas_datatype-29': 'string',
'meas_datatype-30': 'string',
'meas_datatype-32': 'string',
'meas_datatype-33': 'long',
'meas_datatype-34': 'long',
'meas_datatype-36': 'long',
'meas_datatype-37': 'double',
'meas_datatype-38': 'double',
'meas_datatype-40': 'double',
'meas_datatype-41': 'long',
'meas_datatype-42': 'long',
'meas_datatype-44': 'long',
'meas_datatype-45': 'string',
'meas_datatype-46': 'string',
'meas_datatype-48': 'string',
'meas_datatype-49': 'string',
'meas_datatype-5': 'double',
'meas_datatype-50': 'string',
'meas_datatype-52': 'string',
'meas_datatype-53': 'double',
'meas_datatype-54': 'double',
'meas_datatype-56': 'double',
'meas_datatype-57': 'long',
'meas_datatype-58': 'long',
'meas_datatype-6': 'double',
'meas_datatype-60': 'long',
'meas_datatype-61': 'string',
'meas_datatype-62': 'string',
'meas_datatype-64': 'string',
'meas_datatype-65': '',
'meas_datatype-66': '',
'meas_datatype-68': '',
'meas_datatype-8': 'double',
'meas_datatype-9': 'long',
'meas_lower_limit-17': -30.2,
'meas_lower_limit-18': -30.0,
'meas_lower_limit-21': -30.2,
'meas_lower_limit-22': -30.0,
'meas_lower_limit-25': -30.2,
'meas_lower_limit-33': -30.2,
'meas_lower_limit-37': -30.2,
'meas_lower_limit-38': -30.0,
'meas_lower_limit-41': -30.2,
'meas_lower_limit-49': -30.2,
'meas_lower_limit-5': -30.2,
'meas_lower_limit-50': -30.0,
'meas_lower_limit-53': -30.2,
'meas_lower_limit-54': -30.0,
'meas_lower_limit-57': -30.2,
'meas_lower_limit-6': -30.0,
'meas_lower_limit-65': -30.2,
'meas_lower_limit-66': -30.0,
'meas_lower_limit-9': -30.2,
'meas_upper_limit-17': 200.2,
'meas_upper_limit-18': 200.0,
'meas_upper_limit-21': 200.2,
'meas_upper_limit-22': 200.0,
'meas_upper_limit-25': 200.2,
'meas_upper_limit-33': 200.2,
'meas_upper_limit-37': 200.2,
'meas_upper_limit-38': 200.0,
'meas_upper_limit-41': 200.2,
'meas_upper_limit-49': 200.2,
'meas_upper_limit-5': 200.2,
'meas_upper_limit-50': 200.0,
'meas_upper_limit-53': 200.2,
'meas_upper_limit-54': 200.0,
'meas_upper_limit-57': 200.2,
'meas_upper_limit-6': 200.0,
'meas_upper_limit-65': 200.2,
'meas_upper_limit-66': 200.0,
'meas_upper_limit-9': 200.2,
'meas_value-17': 64.00000000000006,
'meas_value-20': 64.50000000000006,
'meas_value-21': 64.5,
'meas_value-22': 64.5,
'meas_value-24': 64.5,
'meas_value-37': 64.5,
'meas_value-38': 64.5,
'meas_value-40': 64.5,
'meas_value-5': 64.76,
'meas_value-53': 64.5,
'meas_value-54': 64.5,
'meas_value-56': 64.5,
'meas_value-6': 64.76,
'meas_value-65': 64.00000000000006,
'meas_value-8': 64.76}
self.assertEqual(num_keys_different(final_df_map, expected_df_map), 0)
self.assertEqual(num_keys_different({'a': 1, 'b': 1}, {'c': 2}), 2)
self.assertEqual(num_keys_different(
{'a': 1, 'b': 1}, {'a': 1, 'b': 1}), 0)
if __name__ == "__main__":
# NOTE Fixtures are added externally, by IPythonSpec.scala.
unittest.main()
|
#!/usr/bin/env python2
# -*- coding: UTF-8 -*-
# File: pool.py
# Author: Yuxin Wu <[email protected]>
import tensorflow as tf
import numpy
from ._common import *
from ..tfutils.symbolic_functions import *
__all__ = ['MaxPooling', 'FixedUnPooling', 'AvgPooling', 'GlobalAvgPooling',
'BilinearUpSample']
@layer_register()
def MaxPooling(x, shape, stride=None, padding='VALID'):
"""
MaxPooling on images.
:param input: NHWC tensor.
:param shape: int or [h, w]
:param stride: int or [h, w]. default to be shape.
:param padding: 'valid' or 'same'. default to 'valid'
:returns: NHWC tensor.
"""
padding = padding.upper()
shape = shape4d(shape)
if stride is None:
stride = shape
else:
stride = shape4d(stride)
return tf.nn.max_pool(x, ksize=shape, strides=stride, padding=padding)
@layer_register()
def AvgPooling(x, shape, stride=None, padding='VALID'):
"""
Average pooling on images.
:param input: NHWC tensor.
:param shape: int or [h, w]
:param stride: int or [h, w]. default to be shape.
:param padding: 'valid' or 'same'. default to 'valid'
:returns: NHWC tensor.
"""
padding = padding.upper()
shape = shape4d(shape)
if stride is None:
stride = shape
else:
stride = shape4d(stride)
return tf.nn.avg_pool(x, ksize=shape, strides=stride, padding=padding)
@layer_register()
def GlobalAvgPooling(x):
"""
Global average pooling as in `Network In Network
<http://arxiv.org/abs/1312.4400>`_.
:param input: NHWC tensor.
:returns: NC tensor.
"""
assert x.get_shape().ndims == 4
return tf.reduce_mean(x, [1, 2])
# https://github.com/tensorflow/tensorflow/issues/2169
def UnPooling2x2ZeroFilled(x):
out = tf.concat(3, [x, tf.zeros_like(x)])
out = tf.concat(2, [out, tf.zeros_like(out)])
sh = x.get_shape().as_list()
if None not in sh[1:]:
out_size = [-1, sh[1] * 2, sh[2] * 2, sh[3]]
return tf.reshape(out, out_size)
else:
shv = tf.shape(x)
ret = tf.reshape(out, tf.pack([-1, shv[1] * 2, shv[2] * 2, sh[3]]))
ret.set_shape([None, None, None, sh[3]])
return ret
@layer_register()
def FixedUnPooling(x, shape, unpool_mat=None):
"""
Unpool the input with a fixed mat to perform kronecker product with.
:param input: NHWC tensor
:param shape: int or [h, w]
:param unpool_mat: a tf/np matrix with size=shape. If None, will use a mat
with 1 at top-left corner.
:returns: NHWC tensor
"""
shape = shape2d(shape)
# a faster implementation for this special case
if shape[0] == 2 and shape[1] == 2 and unpool_mat is None:
return UnPooling2x2ZeroFilled(x)
input_shape = tf.shape(x)
if unpool_mat is None:
mat = np.zeros(shape, dtype='float32')
mat[0][0] = 1
unpool_mat = tf.constant(mat, name='unpool_mat')
elif isinstance(unpool_mat, np.ndarray):
unpool_mat = tf.constant(unpool_mat, name='unpool_mat')
assert unpool_mat.get_shape().as_list() == list(shape)
# perform a tensor-matrix kronecker product
fx = flatten(tf.transpose(x, [0, 3, 1, 2]))
fx = tf.expand_dims(fx, -1) # (bchw)x1
mat = tf.expand_dims(flatten(unpool_mat), 0) #1x(shxsw)
prod = tf.matmul(fx, mat) #(bchw) x(shxsw)
prod = tf.reshape(prod, tf.pack(
[-1, input_shape[3], input_shape[1], input_shape[2], shape[0], shape[1]]))
prod = tf.transpose(prod, [0, 2, 4, 3, 5, 1])
prod = tf.reshape(prod, tf.pack(
[-1, input_shape[1] * shape[0], input_shape[2] * shape[1], input_shape[3]]))
return prod
@layer_register()
def BilinearUpSample(x, shape):
"""
Non-parametric bilinear upsample the input images.
:param x: input NHWC tensor
:param shape: an integer
"""
def bilinear_conv_filler(s):
"""
s: width, height of the conv filter
See https://github.com/BVLC/caffe/blob/master/include%2Fcaffe%2Ffiller.hpp#L244
"""
f = np.ceil(float(s) / 2)
c = float(2 * f - 1 - f % 2) / (2 * f)
ret = np.zeros((s, s), dtype='float32')
for x in range(s):
for y in range(s):
ret[x,y] = (1 - abs(x / f - c)) * (1 - abs(y / f - c))
return ret
ch = x.get_shape().as_list()[3]
shape = int(shape)
unpool_mat = np.zeros((shape, shape), dtype='float32')
unpool_mat[-1,-1] = 1
x = FixedUnPooling('unpool', x, shape, unpool_mat)
filter_shape = 2 * shape
w = bilinear_conv_filler(filter_shape)
w = np.repeat(w, ch * ch).reshape((filter_shape, filter_shape, ch, ch))
weight_var = tf.constant(w,
tf.float32,
shape=(filter_shape, filter_shape, ch, ch))
output = tf.nn.conv2d(x, weight_var, [1,1,1,1], padding='SAME')
return output
from ._test import TestModel
class TestPool(TestModel):
def test_fixed_unpooling(self):
h, w = 3, 4
mat = np.random.rand(h, w, 3).astype('float32')
inp = self.make_variable(mat)
inp = tf.reshape(inp, [1, h, w, 3])
output = FixedUnPooling('unpool', inp, 2)
res = self.run_variable(output)
self.assertEqual(res.shape, (1, 2*h, 2*w, 3))
# mat is on cornser
ele = res[0,::2,::2,0]
self.assertTrue((ele == mat[:,:,0]).all())
# the rest are zeros
res[0,::2,::2,:] = 0
self.assertTrue((res == 0).all())
def test_upsample(self):
h, w = 5, 5
scale = 2
mat = np.random.rand(h, w).astype('float32')
inp = self.make_variable(mat)
inp = tf.reshape(inp, [1, h, w, 1])
output = BilinearUpSample('upsample', inp, scale)
res = self.run_variable(output)
from skimage.transform import rescale
res2 = rescale(mat, scale)
diff = np.abs(res2 - res[0,:,:,0])
# not equivalent to rescale on edge
diff[0,:] = 0
diff[:,0] = 0
if not diff.max() < 1e-4:
import IPython;
IPython.embed(config=IPython.terminal.ipapp.load_default_config())
self.assertTrue(diff.max() < 1e-4)
|
# -*- coding: utf-8 -*-
"""
SwarmOps.config
~~~~~~~~~~~~~~
The program configuration file, the preferred configuration item, reads the system environment variable first.
:copyright: (c) 2018 by staugur.
:license: MIT, see LICENSE for more details.
"""
from os import getenv
GLOBAL = {
"ProcessName": "SwarmOps",
#自定义进程名.
"Host": getenv("swarmops_host", "0.0.0.0"),
#监听地址
"Port": getenv("swarmops_port", 10130),
#监听端口
"LogLevel": getenv("swarmops_loglevel", "DEBUG"),
#应用日志记录级别, 依次为 DEBUG, INFO, WARNING, ERROR, CRITICAL.
}
SSO = {
"app_name": getenv("swarmops_sso_app_name", GLOBAL["ProcessName"]),
# Passport应用管理中注册的应用名
"app_id": getenv("swarmops_sso_app_id", "app_id"),
# Passport应用管理中注册返回的`app_id`
"app_secret": getenv("swarmops_sso_app_secret", "app_secret"),
# Passport应用管理中注册返回的`app_secret`
"sso_server": getenv("swarmops_sso_server", "YourPassportFQDN"),
# Passport部署允许的完全合格域名根地址,例如作者的`https://passport.saintic.com`
"sso_allow": getenv("swarmops_sso_allow"),
# 允许登录的uid列表,格式是: uid1,uid2,...,uidn
"sso_deny": getenv("swarmops_sso_deny")
# 拒绝登录的uid列表, 格式同上
}
# 系统配置
SYSTEM = {
"HMAC_SHA256_KEY": getenv("swarmops_hmac_sha256_key", "273d32c8d797fa715190c7408ad73811"),
# hmac sha256 key
"AES_CBC_KEY": getenv("swarmops_aes_cbc_key", "YRRGBRYQqrV1gv5A"),
# utils.aes_cbc.CBC类中所用加密key
"JWT_SECRET_KEY": getenv("swarmops_jwt_secret_key", "WBlE7_#qDf2vRb@vM!Zw#lqrg@rdd3A6"),
# utils.jwt.JWTUtil类中所用加密key
}
#存储配置段
STORAGE={
"SwarmStorageMode": getenv("swarmops_swarmstoragemode", "local"),
#存储Swarm集群信息的方式, 可选`local(本地文件存储)`, `redis`
#使用local存储,数据将会序列化存储到logs/SwarmKey、ActiveKey文件中;
#使用redis存储,便可以多点部署,数据将会序列化存储到redis中。
"Connection": getenv("swarmops_StorageConnection", "redis://ip:port:password"),
#当SwarmStorageMode不为local时,此配置项有意义。
#此配置项设置存储后端的连接信息, 如redis, redis没有密码则留空:password部分
"SwarmKey": getenv("swarmops_StorageSwarmKey", "SwarmOps_All"),
#存储后端存储所有Swarm数据的Key索引
"ActiveKey": getenv("swarmops_StorageActiveKey", "SwarmOps_Active"),
#存储后端存储活跃集群数据的Key索引
}
#私有仓配置段
REGISTRY={
"RegistryAddr": getenv("swarmops_RegistryAddr", "https://registry.saintic.com"),
#私有仓地址, 例如https://docker.io, http://ip:port
"RegistryVersion": getenv("swarmops_RegistryVersion", 1),
#私有仓版本, 1、2
"RegistryAuthentication": getenv("swarmops_RegistryAuthentication", None)
#认证, 目前不可用
}
|
#def create_sliced_iter_funcs_train2(model, X_unshared, y_unshared):
# """
# WIP: NEW IMPLEMENTATION WITH PRELOADING GPU DATA
# build the Theano functions (symbolic expressions) that will be used in the
# optimization refer to this link for info on tensor types:
# References:
# http://deeplearning.net/software/theano/library/tensor/basic.html
# http://deeplearning.net/software/theano/tutorial/aliasing.html#borrowing-when-creating-shared-variables
# http://deeplearning.net/tutorial/lenet.html
# # TODO: Deal with batching to the GPU by setting the value of the shared variables.
# CommandLine:
# python -m ibeis_cnn.batch_processing --test-create_sliced_iter_funcs_train2
# Example:
# >>> # DISABLE_DOCTEST
# >>> from ibeis_cnn.batch_processing import * # NOQA
# >>> from ibeis_cnn import draw_net
# >>> from ibeis_cnn import models
# >>> model = models.DummyModel(autoinit=True)
# >>> X_unshared, y_unshared = model.make_random_testdata()
# >>> train_iter = model.build_theano_funcs(model)
# >>> print(train_iter)
# >>> loss_train, newtork_output, prediction, accuracy = train_iter(0)
# >>> print('loss = %r' % (loss,))
# >>> print('net_out = %r' % (outvec,))
# >>> print('newtork_output = %r' % (newtork_output,))
# >>> print('accuracy = %r' % (accuracy,))
# >>> #draw_net.draw_theano_symbolic_expression(train_iter)
# >>> assert outvec.shape == (model.batch_size, model.output_dims)
# """
# # Attempt to load data on to the GPU
# # Labels to go into the GPU as float32 and then cast to int32 once inside
# X_unshared = np.asarray(X_unshared, dtype=theano.config.floatX)
# y_unshared = np.asarray(y_unshared, dtype=theano.config.floatX)
# X_shared = theano.shared(X_unshared, borrow=True)
# y_shared = T.cast(theano.shared(y_unshared, borrow=True), 'int32')
# # Build expressions which sample a batch
# batch_size = model.batch_size
# # Initialize symbolic input variables
# index = T.lscalar(name='index')
# X_batch = T.tensor4(name='X_batch')
# y_batch = T.ivector(name='y_batch')
# WHITEN = False
# if WHITEN:
# # We might be able to perform some data augmentation here symbolicly
# data_mean = X_unshared.mean()
# data_std = X_unshared.std()
# givens = {
# X_batch: (X_shared[index * batch_size: (index + 1) * batch_size] - data_mean) / data_std,
# y_batch: y_shared[index * batch_size: (index + 1) * batch_size],
# }
# else:
# givens = {
# X_batch: X_shared[index * batch_size: (index + 1) * batch_size],
# y_batch: y_shared[index * batch_size: (index + 1) * batch_size],
# }
# output_layer = model.get_output_layer()
# # Build expression to evalute network output without dropout
# #newtork_output = output_layer.get_output(X_batch, deterministic=True)
# newtork_output = layers.get_output(output_layer, X_batch, deterministic=True)
# newtork_output.name = 'network_output'
# # Build expression to evaluate loss
# objective = objectives.Objective(output_layer, loss_function=model.loss_function)
# loss_train = objective.get_loss(X_batch, target=y_batch) # + 0.0001 * lasagne.regularization.l2(output_layer)
# loss_train.name = 'loss_train'
# # Build expression to evaluate updates
# with warnings.catch_warnings():
# warnings.filterwarnings('ignore', '.*topo.*')
# all_params = lasagne.layers.get_all_params(output_layer, trainable=True)
# updates = lasagne.updates.nesterov_momentum(loss_train, all_params, model.learning_rate, model.momentum)
# # Get performance indicator outputs:
# # Build expression to convert network output into a prediction
# prediction = model.make_prediction_expr(newtork_output)
# # Build expression to compute accuracy
# accuracy = model.make_accuracy_expr(prediction, y_batch)
# theano_backprop = theano.function(
# inputs=[index],
# outputs=[loss_train, newtork_output, prediction, accuracy],
# updates=updates,
# givens=givens
# )
# theano_backprop.name += ':theano_backprob:indexed'
# #other_outputs = [probabilities, predictions, confidences]
# #theano_backprop = theano.function(
# # inputs=[theano.Param(X_batch), theano.Param(y_batch)],
# # outputs=[loss] + other_outputs,
# # updates=updates,
# # givens={
# # X: X_batch,
# # y: y_batch,
# # },
# #)
# #theano_forward = theano.function(
# # inputs=[theano.Param(X_batch), theano.Param(y_batch)],
# # outputs=[loss_determ] + other_outputs,
# # updates=None,
# # givens={
# # X: X_batch,
# # y: y_batch,
# # },
# #)
# #theano_predict = theano.function(
# # inputs=[theano.Param(X_batch)],
# # outputs=other_outputs,
# # updates=None,
# # givens={
# # X: X_batch,
# # },
# #)
# return theano_backprop
#def create_sliced_network_output_func(model):
# # Initialize symbolic input variables
# X_batch = T.tensor4(name='X_batch')
# # weird, idk why X and y exist
# X = T.tensor4(name='X_batch')
# output_layer = model.get_output_layer()
# # Build expression to evalute network output without dropout
# #newtork_output = output_layer.get_output(X_batch, deterministic=True)
# newtork_output = layers.get_output(output_layer, X_batch, deterministic=True)
# newtork_output.name = 'network_output'
# theano_forward = theano.function(
# inputs=[theano.Param(X_batch)],
# outputs=[newtork_output],
# givens={
# X: X_batch,
# }
# )
# theano_forward.name += ':theano_forward:sliced'
# return theano_forward
|
# -*- coding: utf-8 -*-
# Resource object code
#
# Created: Thu Jul 23 16:26:35 2015
# by: The Resource Compiler for PyQt (Qt v4.8.6)
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore
qt_resource_data = "\
\x00\x00\x0e\x0d\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x40\x00\x00\x00\x40\x08\x06\x00\x00\x00\xaa\x69\x71\xde\
\x00\x00\x0d\xd4\x49\x44\x41\x54\x78\x5e\xe5\x5a\x5b\x8b\x25\x59\
\x56\xfe\x56\xdc\xe3\xc4\xb9\xe4\xc9\x5b\x55\xb5\x5d\x63\x8f\xad\
\x83\x32\x0f\xc2\xe0\xc3\xbc\xd9\x8d\x4a\xfb\x62\xb7\x42\xd3\x0c\
\x8e\x88\x17\x18\x47\x9a\x79\x90\xf9\x03\xed\x48\x3f\xf8\xa2\x20\
\x6a\xf7\x4b\x81\xbe\x09\x03\x63\x43\x83\x62\x8b\xe2\x65\x44\x1b\
\xda\x11\x1d\xd4\xb1\x9c\x5b\xd7\x4c\x56\x66\xe5\xc9\x3c\x99\xe7\
\x92\xe7\x16\xb1\xf7\x76\x9f\x15\x27\x17\x44\xc4\x09\xb2\x07\x34\
\x52\xe8\x55\xec\xda\x71\x76\xec\x88\x3a\xdf\xb7\xd6\xfe\xf6\xda\
\xab\x0e\x01\x30\xf8\xff\x6b\x84\xff\x63\xf3\xb0\xb1\x56\x0c\xb8\
\x2e\x60\x74\xf1\x5f\x35\x95\x0b\x31\x19\x26\x07\x98\x5e\x41\xac\
\x9d\x00\xd0\xb5\x8f\xd4\xdf\x70\x00\xa5\x81\xe5\x12\x30\x32\xa7\
\x01\x02\xe2\x18\x38\xff\x56\x0e\x5e\x1b\xe9\xd9\x8c\x01\xb4\x96\
\x9e\x1b\x51\xde\xcb\xb8\x6d\xf7\x3f\x0e\xb1\x7f\xfb\x07\x99\x0f\
\x40\xde\xc9\x63\x4a\x0b\xb8\xe2\x3b\xb4\x8c\xe1\x27\x5e\x02\x26\
\xd3\x06\x09\x70\xa9\x06\x3c\x5f\x57\xc0\x0b\x70\x23\xe0\x50\xb0\
\x2a\xf8\xbc\xd7\x25\xf0\xc6\x14\xe6\xca\x67\xcf\x45\x63\xe6\x30\
\xeb\x28\x82\x37\xe2\xd9\xa2\x97\x88\xaa\x84\x08\x78\x31\x21\xaa\
\xfa\x4e\x01\x5f\x6c\x40\xe9\x9d\xa6\x51\x02\x8a\xde\xae\x86\xa7\
\x34\x01\xac\xcb\x73\x2b\xfa\x20\x73\x0b\xc0\x8a\xef\x6d\x1e\x7c\
\xbd\x08\x16\x42\xb9\x10\xae\xe2\x4d\x21\xc1\x6c\x9d\x5b\x5d\x02\
\xe5\xb0\x27\xb9\xae\x01\x2f\xf3\x6f\x83\x80\x22\x20\x23\x6d\x0b\
\x21\x02\xa8\xde\x6b\xba\x36\xec\x3f\x18\x78\xd3\x3c\x01\x25\x40\
\x02\x5e\xae\x45\xf0\x6a\xbd\x29\x56\x12\x53\xc3\xfd\x8d\x9e\x17\
\x07\x70\xdf\xb4\x06\xd4\xaf\x79\x69\x22\x78\x54\x19\xaf\x98\x91\
\xf1\xb2\x68\xd6\x7b\xde\x60\xd3\x37\x4f\x80\x80\x2d\x7a\x08\x5b\
\xb6\x3a\xaa\x0d\xe5\x82\xe9\x42\x94\x94\xc9\x15\xe0\x02\x9e\x04\
\xbc\x44\xcd\x2d\x68\x80\xa9\xee\xf3\xd5\xf0\x2c\x6b\x41\x4d\xd8\
\x9a\x1a\xdd\xc0\x76\xc1\x53\xb2\xed\xde\xe2\x12\x10\x60\xb5\x6a\
\x5f\xf6\x60\x51\xf0\xc4\x64\x6e\x1d\xf8\x1a\xcf\x73\xbb\x45\x02\
\x44\xdc\x94\x78\xa9\xb4\xd5\x15\x01\x50\x49\xed\xc5\x04\xe8\xcd\
\xe0\x95\x84\xfd\xad\x80\x17\x02\xc4\x4c\x29\x0a\x40\x37\x84\xb2\
\xd9\x1e\x01\xd5\xb9\x25\xb2\x6a\xc1\xcb\xbb\x6e\x4d\x03\xea\x13\
\xa2\xca\x19\xa1\x2e\x73\xbb\x79\xcd\x1b\xb3\x1d\xbc\x08\x65\xf3\
\x04\x94\xc0\x57\xf7\xf9\x6a\xd8\xd7\x8a\x60\x2d\x78\x19\xab\x82\
\xbf\xe5\x4c\xd0\x75\x2a\x49\x8e\x10\xe0\x6c\xce\xf6\x0e\x18\x74\
\xee\x9e\xcd\x7d\x17\x80\x49\x01\x84\x28\x58\x05\xbc\x90\x79\x23\
\x78\xbe\x4f\xd4\x6c\xc5\xc5\xf8\x3e\x90\xa6\x1f\xce\x8a\x90\x03\
\x30\xf8\x0f\xad\x39\xf8\x10\x9b\x68\x40\x12\x03\x5f\xfd\xc7\xed\
\x82\x67\x4c\x71\xcb\x02\xaa\xdb\xa2\xbb\x03\x7c\xe2\x13\x10\xfb\
\xfb\x3f\xaf\x3f\x53\xa0\xf4\x5e\xa9\x1b\xc8\x38\xf0\xcb\xaf\x02\
\xe3\x49\x83\x04\x68\xd4\xab\xbd\x80\x65\xf0\xdb\x33\xbc\x74\x58\
\x15\x40\xa0\x7e\x9f\xd7\x35\xa4\xdc\x62\x45\x68\x7b\x7a\x6b\xc4\
\xd3\x37\x66\x78\x05\x23\xaa\x07\x2f\x20\xab\xe0\x6f\x3f\x15\x26\
\xf1\x5a\x11\x00\x70\x63\x6e\x5f\x30\xad\x6e\x06\xaf\x6c\xa3\xc2\
\x3b\x6e\x9b\x80\x82\xe7\x6f\x2e\x58\xd6\x97\xb1\xca\x21\xbe\x15\
\xbc\xa4\xc3\x72\x5f\x09\xf8\x5b\x4a\x85\x6b\xd7\x7c\x4d\xfd\xde\
\xd4\xa5\xc3\x45\xf0\xa0\x2a\x78\x16\x40\xb5\x2d\xf4\x2b\x24\xb8\
\x10\x83\x91\x9e\xdb\xcd\xd8\x92\xe4\x70\x8f\x68\xb6\x0f\xe0\x60\
\xa5\x54\xbf\xd3\x4a\x3e\xb9\x5a\x2d\x3f\xdb\xed\x76\x5f\x38\x3a\
\x3a\xfa\x27\x00\x4e\xe9\x2c\x50\x5d\xf3\xd5\xf1\x1b\x43\x56\xc6\
\x85\x58\x25\xaa\x2f\xa4\x96\xd3\x1c\xc7\xd9\x90\x84\x02\x3c\x85\
\x2d\xf6\xde\x7b\x5f\xf9\xc2\xd1\xd1\x77\x22\x0b\xa2\x75\x7a\x3a\
\x88\xce\xce\xce\x5a\x4f\x9e\x9c\x76\x86\xc3\xf3\xf6\xf9\xf0\xa2\
\x33\x1e\xd9\x36\x1e\x77\x96\xcb\x79\x94\xa6\x59\x90\xa6\x69\xa8\
\x94\xf2\xce\xe7\x73\x38\x8e\xa3\x5f\x79\xe5\x95\xcf\x3e\x78\xf0\
\x60\x4d\x80\x44\x80\x78\xc6\x48\xd8\xdf\x08\x5e\x80\x18\x5d\x57\
\x11\x2a\xcc\x97\x42\xc9\xf1\x13\x17\xa7\x83\x1c\x9b\x52\x24\xd5\
\xa7\xd9\x1c\x18\x8d\x81\x95\xca\x9f\x7f\xf5\xd5\xcf\x7d\x89\x60\
\x8c\xf5\x9a\x73\x7c\x7c\xdc\xb5\x60\x7b\x97\x97\x17\xed\x97\x5e\
\xfa\x99\xd6\x62\xb1\x88\x96\xcb\x65\x64\xfb\x38\xcb\x32\x0f\x25\
\x73\x3d\x1f\x7e\xd8\x82\x9f\xf4\x91\x04\x31\x5a\x9d\x5d\x33\x7c\
\xf2\xc8\xcc\xc6\x03\xe7\xe2\x62\x94\x20\x37\xf2\x04\xec\x07\xf1\
\x7c\x7d\x6e\x5f\x5f\x11\xe2\x56\x24\xf9\x8b\x6f\xf9\xf8\x9b\x2f\
\x6b\x10\x19\x98\xfa\x3d\x0f\x7f\xf0\xfb\xbf\xf7\x73\xd8\x62\xae\
\xbf\x06\x17\xc3\x0f\xda\xd8\xdd\x79\x0a\x71\x6f\xdf\x24\xed\x3e\
\xe2\xce\x2e\x5a\x49\x17\x61\xbb\x87\x56\x7b\x17\x61\xd4\x81\x17\
\xc5\x70\x5d\xcf\x7e\xee\xd3\xdf\x7e\xe9\x77\xf1\x5f\xef\xbd\x83\
\xf1\x78\xd4\x29\x69\xc0\x07\x07\x6f\x0a\xe1\x5d\x2b\x82\x55\xc1\
\xbb\x5e\x06\x26\xc0\xe5\x98\x98\xa5\x3b\xf7\x3f\x86\xb8\xb3\xcf\
\xee\x76\x5c\xdf\x36\x87\xbf\x2c\x91\x6d\xae\xbb\x06\x69\xe6\xe3\
\x33\x3c\xfc\x97\xbf\x46\xdc\xdd\xc5\x27\x5f\xf8\x15\x24\xbd\x03\
\x84\xad\x0e\xe2\x56\x17\x5e\x1c\xc3\xf3\x5b\xf0\x5c\x87\x40\x0e\
\x80\x9c\x50\xa3\x15\x54\x96\xc2\x28\x0d\xa5\x53\xa8\xd5\x12\x59\
\x3a\x47\x64\x9f\x59\x9b\x8d\xa2\x6b\x02\x8c\x10\xf0\xbd\xac\x79\
\xa2\xf2\x56\x57\x27\x82\xd5\x5a\xe2\x74\x0a\x8c\x2e\x32\x0b\xd6\
\xc3\x0b\xbf\xf8\x5b\x38\xfc\xc8\x8f\x20\x5d\xcd\x40\xe4\xc8\xff\
\xc0\xf0\x1f\xa5\x10\x26\x5d\x7a\xf8\xcf\x7f\xc9\x04\xec\xdf\x79\
\x06\x3f\xfa\xe3\x2f\x23\x5d\xce\xa1\xb5\x82\xce\xb2\x1c\xa8\x5a\
\x61\x99\x2a\x18\xad\xf3\xc6\xcf\x6b\x40\x23\xef\x19\x47\x0e\x24\
\x4c\x7a\xdc\x8f\x46\xe3\x76\x85\x80\x0f\x2e\x78\x15\xf0\xd5\x8a\
\x90\x2e\x94\xbd\x2a\x04\x9c\x59\x02\xfc\xb0\x8b\xa0\xd5\xc6\x6a\
\x3e\x41\x9a\xad\x72\xec\x9b\x33\xb9\x36\x1a\x4a\xa5\xec\xd1\xd1\
\xe0\x88\x49\x8c\xda\x3b\x58\xcd\x26\x58\xcc\xa7\x4c\x16\x83\x83\
\x23\x02\x44\x20\x39\x47\xc3\x38\x30\xa4\x41\x86\x78\x1e\x53\xa2\
\x35\xe2\x76\x1e\x01\x57\x57\xd3\x0e\x64\x26\xdb\xf7\x24\x78\x55\
\xf0\xb6\x89\x09\xe8\x02\x78\xd9\xf6\xa6\x33\x17\xd3\x2b\x83\x4e\
\xff\x00\x61\xdc\x86\xd2\x1a\x8e\x43\x39\x04\x22\x80\xc0\xbd\x03\
\x62\x80\xf3\xd9\x98\x31\xc5\x9d\x3e\x5c\x2f\x00\x64\xce\x26\xe4\
\x37\x82\x63\x8c\xb6\x8d\x7b\x6e\x28\x5d\xeb\x2c\x45\x9c\xf4\xe1\
\x38\xbe\x75\xc2\xb4\xf5\xdc\x73\xcf\x45\xa5\x54\xb8\x5e\x08\xb5\
\x6c\x65\x5b\xf2\x78\xb3\x2d\x0f\xa8\x8f\x9e\xf3\x21\xf1\xfc\xf6\
\xce\x1d\x78\x7e\x08\xad\x53\xfe\x0c\x53\x06\x01\x0e\xf5\x99\xd5\
\x00\x00\x6b\x11\x93\xf5\x0d\x40\xe6\x09\x78\xc8\xf3\xa5\x77\x19\
\x89\x80\x30\x6a\xc3\x8f\x42\x64\x59\x16\x3d\x7a\xf4\xe8\x50\x08\
\x70\xb0\xdd\xf3\x72\x5a\x93\xf1\x92\xe7\x49\xe6\x95\xac\x9a\xe1\
\x11\xe5\xfd\xc9\x00\x6c\xdd\xfe\x5d\xd6\x81\x4d\x0a\x2a\xe0\x61\
\xae\xd7\xb2\x82\xce\x56\x98\x4f\x2e\x78\x7e\x10\x26\x50\x3a\x2b\
\x90\x54\x00\xaa\x2b\xe0\xa5\x07\x88\x89\xe6\x6d\x31\x4c\x90\xae\
\x56\x21\x11\x1d\x14\x4e\x83\x35\x6a\x2f\xe0\xab\x61\x5f\x1c\xab\
\x07\x5f\x1c\x3b\x1b\x12\x0f\x24\xbb\x7b\x80\x31\xac\xd6\xe4\x5c\
\x8b\x1f\xe0\x10\xf1\x4e\xe0\x79\x01\x5c\x3f\xc0\x6c\x7c\x9e\x13\
\xb6\x7f\x0f\x9e\x1b\xf0\x6e\xc1\xc0\x94\x82\xbe\x0e\x71\x2d\xd9\
\xd3\x16\xf0\xd8\x2c\x01\xcd\xef\x0b\x82\xc8\x5c\x29\x15\x4c\x26\
\xb3\x7d\x21\x60\xb1\x00\x7e\xf2\x67\x01\xcf\x13\xf1\x2a\x2f\x6b\
\x19\xa8\x0e\x55\x6b\x78\xbf\xfa\x39\x01\x5e\xcd\x0f\x28\xbf\xd1\
\xdf\xbd\x0f\x2f\x88\x10\xc5\x1d\x0b\xca\x65\x20\x69\xba\x44\xba\
\xb8\xb2\x5e\xbf\xc4\x7c\x36\xc1\xf9\xc9\xd7\x31\x38\xfe\x06\xcf\
\x1f\x1c\xfd\x37\xa2\x56\x0f\x41\x18\x23\x6c\x75\xd9\x9b\xe4\x10\
\xb4\xca\x78\x47\xc8\x54\x6a\x7b\xdb\x94\x2a\xe8\x00\x88\x84\x10\
\x4b\x20\x85\xc9\x8e\xfd\xf8\xbe\x43\xa4\x77\x85\x00\xe0\x7f\xf7\
\x37\x39\xa3\x31\xb6\x5a\x10\x04\x30\x26\x03\x39\x2e\x9c\x30\xc2\
\xf1\xb7\xff\x1d\xe7\x16\xe0\xe4\xfc\x31\x2e\xcf\x4f\x30\x1d\x1e\
\x63\xbc\x6e\xf6\x3a\x5d\xcd\x0b\xcf\xbe\xfb\x67\x0f\xf0\x2e\x1e\
\x20\x4e\xba\x68\xef\x3d\x85\xde\xee\x3d\x74\x76\xef\xa0\xb7\xf7\
\x34\x0b\x6a\xdc\xee\x23\x8c\x12\xf8\x51\x2b\x77\xa0\x51\x50\x4a\
\x31\x29\x2a\xcb\xc9\x31\xae\x8f\x78\xb3\x15\x2e\x97\xe9\xbd\x6b\
\x02\xa8\xa9\xe2\x6b\x1c\xc7\x1f\x75\x1c\xe7\x2b\x97\x97\x97\x3b\
\x7f\xf1\xc7\xbf\x69\xb2\xd5\x8c\xcc\x96\x50\xf1\x83\x18\xbb\x77\
\xbe\xdf\x0a\xe5\x21\x8e\xdf\xff\x0f\x8e\x8a\xc3\xfb\x3f\x8c\x74\
\x39\xc3\xe8\xec\x08\xf3\x47\x5f\xc3\xc0\x36\x31\x72\x10\xc5\x09\
\x3a\x56\x58\xbb\x7b\xf7\x90\xf4\x6d\x6f\xc9\x69\xf7\x0e\x11\x77\
\x76\x58\x3f\x38\xda\x5a\x1d\x74\xed\xbd\xb5\x1d\xdc\x39\x78\xf9\
\xf2\x72\xf8\x3b\x5e\x93\xd5\xe7\xd9\x6c\x76\x40\x44\xb1\x31\xb9\
\x40\xf4\xf7\xef\xa3\xd5\xdb\xb3\x40\x0f\xd0\xdb\x7f\x9a\x3d\x9a\
\x74\xf6\xf8\x4b\x47\x71\x97\x33\xbe\x3f\xfd\xc3\xdf\xc0\xe9\xa3\
\xff\xc4\x8f\xfd\xd4\xa7\x71\xf7\x23\x1f\xc7\xf0\xc9\xfb\x98\x4d\
\x86\x18\x0f\x4f\x70\x39\xf8\x2e\x26\xc3\x23\x4c\x47\xe7\xb8\x1a\
\x9d\x61\xf0\xf8\xeb\xdc\x84\x17\x22\x5e\x36\xad\xce\x0e\xef\x3a\
\xfd\xbb\xcf\x60\x6c\xa3\x6d\x6d\xb3\xe9\x94\x93\xa1\x26\x09\x40\
\xd4\x6e\x1f\x4e\x2e\x2e\xc2\x83\xa7\x7f\xc8\xfc\xf4\x2f\x7d\x81\
\xc2\x30\x0f\x59\xd7\xf3\x40\x70\xa0\x39\xb3\xcb\x60\x6c\x4b\xd3\
\x15\x2b\x70\xdb\x12\x74\x0a\x60\x7a\x39\x80\xff\xb1\x04\xfd\x83\
\xfb\xd8\x7b\xea\xa3\x9b\xad\x53\x23\x5b\xad\xb0\x5c\x4c\x2d\x29\
\x17\x96\x84\x53\x4c\x2e\x6d\x1b\x9e\x70\xa4\x58\x52\x58\x4f\xce\
\x4f\xbe\xcd\xed\xfd\xaf\xbd\x2b\x00\xac\x33\x3a\xc6\x18\xf2\xd0\
\xa0\x7d\xfa\x53\x9f\xfa\xb5\x37\xdf\x78\x03\x7b\x77\x7f\x00\x07\
\x4f\xfd\x20\x7b\x52\x65\xa9\x05\xb1\x60\xf0\xb2\x0d\x02\xac\xf2\
\xf0\x43\xc4\xc9\x0e\x3f\x3b\xbf\x1a\xf3\x56\xb6\x5a\x6b\xc3\x0a\
\x1b\x91\xcb\xa9\xf5\xfc\xc0\x6a\xc2\x5d\xec\xec\x7f\x1f\x88\x93\
\x43\x83\xcc\x12\xb8\x5a\x0b\xea\xf4\x12\xd3\xd1\x00\x93\x8b\x53\
\xac\xcf\x15\x67\x27\xdf\xc4\xd9\xd1\x37\x60\x4f\x92\x6d\x22\x0a\
\x1b\x22\x80\x31\xe1\x3b\x8f\xbe\xdb\xe7\xa4\xa6\x77\x80\x74\xb5\
\x60\xd5\xb7\x9a\x00\xe0\x3a\xb3\xd3\x30\x60\x04\x39\x19\x44\x2c\
\x6e\x6b\x5b\x58\x4f\x1a\xa5\x25\xb4\x01\x4e\x87\x79\x9e\x56\x79\
\x7e\xa0\x35\x93\xc2\xe3\x3c\xc3\xf5\x58\x47\x3a\x96\x9c\x7b\xcf\
\x68\x38\x9e\x8f\xf3\xc7\xdf\xc4\x5f\xfd\xc9\x6f\xaf\xe7\xb2\x2e\
\x35\x4a\xc0\xd9\xd9\x80\x73\xf0\x4e\x6f\x1f\x8c\x81\x3d\x8e\x3c\
\x89\x81\x29\x64\x77\x0c\xd4\x68\xde\xf2\x00\xb0\x27\x39\x4a\x20\
\x59\x63\xe1\x39\x18\xc0\xa1\x9c\x14\x18\xe2\xfb\x3c\xdf\x28\x9e\
\x67\x49\xe2\x1a\x81\xd6\x86\x7b\x20\x0b\x7e\xe1\x33\x9f\xd9\x6d\
\x8c\x00\x6b\xf4\xec\xb3\xcf\xf6\x38\x02\x76\xf6\xa1\x95\xa4\xb4\
\x0c\x62\x03\x46\xc0\x49\xfa\x1a\x27\xb9\x68\x5d\x5d\xf0\x01\x89\
\xf2\xf9\x85\xe7\x60\x6a\x92\x20\x6e\x90\x08\x83\x01\x27\x59\xae\
\xeb\x23\x5d\x2e\xbd\x77\xde\x7a\xab\xe5\xa0\x21\x7b\xfe\xf9\xe7\
\x7b\x53\xab\xbc\xbe\x1f\xa1\xdd\x3b\x80\xca\xb2\x8a\x27\xa1\x73\
\x40\x92\xbd\x69\x85\x28\xe1\x08\x58\x67\x84\xac\x15\xe0\x53\x23\
\x83\xac\x05\x2f\xef\x94\x31\x23\x11\xe1\xb8\x2e\x47\x80\x1d\x23\
\xad\x3d\xaf\x31\x11\x7c\xf8\xf0\xe1\x5d\xbb\xf5\x84\x61\xdc\xe5\
\xad\x49\xab\xf4\x3a\x9c\x0b\x9e\x97\xa8\xd0\x06\x2a\x5b\x21\x88\
\xdb\x2c\x72\x8b\xc5\x15\xd7\x02\xfc\xa0\x95\x87\xb5\xd9\xf2\x8c\
\x80\x97\x6b\x39\x27\xf0\x18\x29\x4e\xc2\x5c\x2f\x58\x8f\xf9\x71\
\xec\x1e\x36\x16\x01\x5a\xd3\xbd\xf9\x72\x19\x87\x49\xdb\x82\x4a\
\x58\xfd\x0d\xe4\x50\x23\x9e\x17\x8f\x81\x3d\xc6\x80\x83\xa8\x8d\
\xcc\x82\x5f\x2d\x66\xac\x1d\xf2\x8c\x6d\xf5\xe0\xe5\xbd\xf9\x35\
\xf8\xfc\xc0\x02\xea\xfa\x3e\x53\x6e\xeb\x8c\xa6\x31\x02\x5e\x7e\
\xe5\xe5\x5f\x57\x4a\xb9\x71\xa7\x6f\x3c\xd7\x27\x75\x7d\xbe\x96\
\x2f\x69\x0a\x40\x28\x3f\x0e\xb3\xf7\xc3\xb8\xcd\xe1\xbf\x98\x8d\
\x61\x88\x04\x14\x60\x6a\xc0\xe7\xe3\xba\xb4\x0c\xb4\xd1\x79\x1a\
\x4e\xae\xb1\xe6\x1e\x1e\x1e\xee\x37\xb6\x04\x8e\x8f\x1f\xb3\x9c\
\x77\xfa\x77\xb9\xde\x67\xb4\x02\x81\x24\x4c\x41\x28\x1e\x69\xa1\
\x01\x05\x78\x9e\xcf\x07\x20\x00\x4c\x00\x01\x39\x10\xbe\x42\x0d\
\x78\xd1\x87\xeb\x25\x20\x11\x41\x30\x44\x5e\x60\x00\xd0\xe3\xb3\
\x0b\xbf\xb1\x08\x38\x3d\x79\xc2\x1b\x7a\xbb\x7f\x00\xe4\xea\x2d\
\x6a\x0f\x94\xc0\x4b\x41\x24\x03\x39\x0e\xa2\x3c\x19\xe2\xac\x8e\
\xf2\xb9\x05\xe1\xab\x03\xaf\x19\x74\x0e\x5e\xe6\x03\xf0\xfd\x80\
\x3f\xdf\xbf\x73\xe7\xe7\x9b\x8a\x00\xc9\x01\xda\x9d\x3d\x40\x5d\
\x0b\x1f\x72\xd0\x30\x5b\x8b\x19\x9a\x81\x12\xa2\xce\x8e\xe4\x02\
\x05\x30\x46\xdf\x04\x5e\x96\x01\x01\x72\xcf\x0b\xa3\xcd\x89\x70\
\x96\x34\x42\xc0\x6b\xaf\xbd\xe6\xbd\xf9\xe6\x9b\x7d\x10\x71\x79\
\x5b\xe9\x2c\xf7\x0c\x9c\x5a\xf0\x46\x44\x4e\x23\x8e\xbb\xf9\x17\
\x9e\x8f\xa1\x65\xdc\xd4\x83\x97\x6d\x50\xbc\xcf\x63\x3a\xff\xbc\
\x49\x84\x80\x2c\x53\x68\x44\x04\xdf\x7e\xfb\xed\x1d\x9b\x03\x24\
\x41\xd4\xe2\xc2\xa4\x52\x5c\x07\xac\x01\x2f\x5f\x5e\xc6\xc2\x4d\
\x2e\xb0\x98\x8e\xa0\x15\x8b\x67\x49\xed\xeb\xc1\x13\x00\x89\x14\
\x9d\x47\x84\xe7\x85\x1b\x02\xd2\xae\xd7\x4c\xf8\x9f\xdd\x5f\x2c\
\x16\x61\xb2\x73\x88\xa0\xd5\xe1\xb4\x94\x88\x0a\xe0\x4b\x45\x4c\
\xd1\x04\xad\x34\x17\x3a\x98\x80\xf9\x88\x73\x03\xc9\x1b\x4a\x22\
\xc7\x64\x96\x48\x31\x84\x52\x5e\xc0\x3b\x0b\x03\x48\xd3\xd4\x69\
\x24\x02\x5e\x7c\xf1\xc5\xcf\x2b\xa5\xfc\x70\x5d\xd2\x0a\x5a\x50\
\x59\x26\x1e\x2b\x00\x2f\x85\x32\x98\x00\x2e\x66\xe6\x04\x5c\x8d\
\x91\xad\x96\xc8\xf1\xeb\xf2\x5c\x59\xf3\xd2\x17\xb3\x46\x69\x8e\
\xeb\x6f\x72\x13\x1d\x36\x42\xc0\xe9\x60\xd0\xb6\xe0\xc8\x96\xb6\
\x8d\xeb\x3a\x04\xa3\xca\xe9\x6b\x19\xbc\x00\xd4\x4a\x21\x88\x12\
\x2e\xa1\xcf\xa6\x97\x5c\x2a\xe3\xe8\xd1\xc5\xb0\xd7\x5a\x04\x0f\
\xda\x54\xc0\x8b\xf7\x8d\x52\x70\x3d\x21\x20\x6a\x84\x80\x93\x27\
\xf9\x16\x98\xf4\xf6\x01\x22\x28\xad\x60\xd6\xcd\xe8\x7a\xf0\x92\
\xcc\x64\xf0\x83\x08\x7e\x9c\x20\x4b\x53\xac\x96\x33\x00\x42\x50\
\x59\xf0\x64\xbd\x57\xc0\x6b\xb5\xd1\x00\x05\x22\x47\x32\xc8\x46\
\x34\xe0\x7c\x30\x60\x15\xeb\xed\xdd\xe3\x33\x3a\xc1\x01\x08\x22\
\x4a\xd0\x0a\xba\x22\x82\x9c\x08\x40\xc3\x70\xc5\x28\x8a\x12\xcc\
\xc7\x43\xde\x09\xda\x3d\x39\x4d\x16\xe6\xb3\x87\xb5\x29\x12\x24\
\x04\x68\xd6\x13\x50\xb6\xa9\x41\x10\x94\x52\xed\x46\x08\x98\x4c\
\x26\x1d\x22\x32\x8e\xe3\x61\x36\x1e\x1a\xa5\x52\x3e\x92\x92\xe3\
\xc2\x73\x5c\x02\x05\x72\xaa\xd3\xb9\xa7\xc4\x5b\x80\xb1\x73\x03\
\x84\x71\x67\xb3\x15\x5e\x61\xfd\x1e\xd0\x75\x2e\xe8\x00\x0e\xe4\
\xe7\x6d\xec\x5c\x06\xec\x30\xa9\x0e\x14\x8c\x21\x18\x10\xe0\x81\
\xc1\x7b\x41\x0c\x27\xcf\x46\x9b\x29\x88\x0c\x87\xc3\x43\x63\x0c\
\xbd\xfb\xce\x1f\xe1\x5f\xff\xee\x8b\x70\xfd\x10\x7e\x10\xc2\xf5\
\xc2\xb5\x22\x1b\x3f\x4c\xf8\x80\xe4\x87\x31\x82\x75\x0b\x5a\xf0\
\x82\x90\xe7\xb1\xf7\xe3\x1d\x26\x0b\x00\xd9\x3a\x9f\x59\xce\xa6\
\x58\xcd\xa7\x9b\xdd\x42\xb1\x97\x39\xc4\x91\xef\x1a\x46\x67\x9b\
\x71\xc5\x9f\xb5\xb9\xfe\xaf\xf2\x0c\xb4\x49\xa8\xc8\x71\xec\x94\
\xac\xd7\x08\x01\xdd\x6e\x37\x05\x30\xd4\x4a\x85\x8b\xab\x0b\x47\
\x6b\xed\x1a\x63\x1c\xdb\xd6\x3d\xa1\xde\x40\x44\x5c\xd2\xd6\x2a\
\xe3\x01\xfb\x03\x07\xfa\xd6\x57\xbf\xcc\xe0\x58\x00\xc1\x15\xa4\
\x42\xe2\x64\x74\xb1\x58\x92\x97\xce\x80\xcd\x5f\xb2\xfe\x01\x34\
\xb3\x04\x5e\x7f\xfd\xf5\x37\x46\xa3\x11\xc6\xe3\x71\xeb\xe2\xe2\
\xc2\x9d\x5c\x5d\xc5\xe3\xcb\xb1\xb7\x5a\x2d\x82\xd1\x64\x1a\x2c\
\x97\x8b\xe0\x6a\x32\x69\xad\xd2\xd4\x9d\x4e\xa6\x5d\x63\x34\xd9\
\xbc\x21\xd1\x5a\x3b\xb6\x78\x99\xac\x49\x5a\x64\xba\x0f\x20\x76\
\xa0\x4e\xd5\x6a\x9a\x01\x20\x63\x0a\x60\x40\xf9\x75\x21\xed\x65\
\xbb\x1e\x43\xf5\x17\x29\xff\x03\x3d\xd7\xc8\x9a\x1c\xae\x83\xfd\
\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = "\
\x00\x07\
\x07\x3b\xe0\xb3\
\x00\x70\
\x00\x6c\x00\x75\x00\x67\x00\x69\x00\x6e\x00\x73\
\x00\x09\
\x09\xb5\x3b\x7d\
\x00\x63\
\x00\x61\x00\x72\x00\x74\x00\x6f\x00\x67\x00\x72\x00\x61\x00\x6d\
\x00\x06\
\x06\x8a\x9c\xb3\
\x00\x61\
\x00\x73\x00\x73\x00\x65\x00\x74\x00\x73\
\x00\x08\
\x0a\x61\x5a\xa7\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct = "\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x14\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\
\x00\x00\x00\x2c\x00\x02\x00\x00\x00\x01\x00\x00\x00\x04\
\x00\x00\x00\x3e\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
|
from json.encoder import encode_basestring
from slimit import ast as js
from ..types import NamedArgMeta, VarArgsMeta, VarNamedArgsMeta
from ..utils import split_args, normalize_args
from ..nodes import Tuple, Symbol, Placeholder, String, Number
from ..utils import Environ
from ..compat import text_type
from ..checker import HTML_TAG_TYPE, GET_TYPE, IF1_TYPE, IF2_TYPE, JOIN1_TYPE
from ..checker import JOIN2_TYPE, get_type, DEF_TYPE, EACH_TYPE
from ..checker import returns_markup, contains_markup
from ..constant import SELF_CLOSING_ELEMENTS
def _str(value):
return js.String(encode_basestring(value))
def _text(value):
return js.ExprStatement(js.FunctionCall(js.Identifier('text'), [value]))
def _ctx_var(value):
return js.BracketAccessor(js.Identifier('ctx'), _str(value))
def _yield_writes(env, node):
if returns_markup(node):
for item in compile_stmt(env, node):
yield item
else:
yield _text(compile_expr(env, node))
def _el_open(tag, key=None, attrs=None, self_close=False):
fn = 'elementVoid' if self_close else 'elementOpen'
return js.ExprStatement(js.FunctionCall(js.Identifier(fn), [
_str(tag),
_str(key or ''),
js.Array([]),
js.Array(attrs or []),
]))
def _el_close(tag):
return js.ExprStatement(js.FunctionCall(js.Identifier('elementClose'),
[_str(tag)]))
def compile_if1_expr(env, node, test, then_):
test_expr = compile_expr(env, test)
then_expr = compile_expr(env, then_)
else_expr = js.Null(None)
return js.Conditional(test_expr, then_expr, else_expr)
def compile_if2_expr(env, node, test, then_, else_):
test_expr = compile_expr(env, test)
then_expr = compile_expr(env, then_)
else_expr = compile_expr(env, else_)
return js.Conditional(test_expr, then_expr, else_expr)
def compile_get_expr(env, node, obj, attr):
obj_expr = compile_expr(env, obj)
return js.BracketAccessor(obj_expr, _str(attr.name))
def compile_func_expr(env, node, *norm_args):
sym, args = node.values[0], node.values[1:]
pos_args, kw_args = split_args(args)
name_expr = js.DotAccessor(js.Identifier('builtins'),
js.Identifier(sym.name))
compiled_args = [compile_expr(env, value)
for value in pos_args]
compiled_args.append(js.Object([
js.Label(_str(text_type(key)), compile_expr(env, value))
for key, value in kw_args.items()
]))
return js.FunctionCall(name_expr, compiled_args)
EXPR_TYPES = {
IF1_TYPE: compile_if1_expr,
IF2_TYPE: compile_if2_expr,
GET_TYPE: compile_get_expr,
}
def compile_expr(env, node):
if isinstance(node, Tuple):
sym, args = node.values[0], node.values[1:]
assert sym.__type__
pos_args, kw_args = split_args(args)
norm_args = normalize_args(sym.__type__, pos_args, kw_args)
proc = EXPR_TYPES.get(sym.__type__, compile_func_expr)
return proc(env, node, *norm_args)
elif isinstance(node, Symbol):
if node.name in env:
return js.Identifier(env[node.name])
else:
return _ctx_var(node.name)
elif isinstance(node, Placeholder):
return js.Identifier(env[node.name])
elif isinstance(node, String):
return _str(text_type(node.value))
elif isinstance(node, Number):
return js.Number(text_type(node.value))
else:
raise TypeError('Unable to compile {!r} of type {!r} as expression'
.format(node, type(node)))
def compile_def_stmt(env, node, name_sym, body):
args = [a.__arg_name__ for a in get_type(node).__args__]
with env.push(args):
yield js.FuncDecl(js.Identifier(name_sym.name),
[js.Identifier(env[arg]) for arg in args],
list(compile_stmt(env, body)))
def compile_html_tag_stmt(env, node, attrs, body):
tag_name = node.values[0].name
self_closing = tag_name in SELF_CLOSING_ELEMENTS
compiled_attrs = []
for key, value in attrs.items():
compiled_attrs.append(_str(text_type(key)))
compiled_attrs.append(compile_expr(env, value))
yield _el_open(tag_name, None, compiled_attrs,
self_close=self_closing)
if self_closing:
assert not body, ('Positional args are not expected in the '
'self-closing elements')
return
for arg in body:
for item in _yield_writes(env, arg):
yield item
yield _el_close(tag_name)
def compile_if1_stmt(env, node, test, then_):
test_expr = compile_expr(env, test)
yield js.If(test_expr, js.Block(list(_yield_writes(env, then_))), None)
def compile_if2_stmt(env, node, test, then_, else_):
test_expr = compile_expr(env, test)
yield js.If(test_expr, js.Block(list(_yield_writes(env, then_))),
js.Block(list(_yield_writes(env, else_))))
def compile_each_stmt(env, node, var, col, body):
col_expr = compile_expr(env, col)
with env.push(['_i']):
i_expr = js.Identifier(env['_i'])
with env.push([var.name]):
var_stmt = js.VarStatement([
js.Assign('=', js.Identifier(env[var.name]),
js.BracketAccessor(col_expr, i_expr)),
])
yield js.For(
js.VarStatement([js.VarDecl(i_expr, js.Number('0'))]),
js.BinOp('<', i_expr,
js.DotAccessor(col_expr,
js.Identifier('length'))),
js.UnaryOp('++', i_expr, postfix=True),
js.Block([var_stmt] + list(compile_stmt(env, body))),
)
def compile_join1_stmt(env, node, col):
for value in col.values:
for item in _yield_writes(env, value):
yield item
def compile_join2_stmt(env, node, sep, col):
for i, value in enumerate(col.values):
if i:
yield _text(_str(sep.value))
for item in _yield_writes(env, value):
yield item
STMT_TYPES = {
DEF_TYPE: compile_def_stmt,
HTML_TAG_TYPE: compile_html_tag_stmt,
IF1_TYPE: compile_if1_stmt,
IF2_TYPE: compile_if2_stmt,
EACH_TYPE: compile_each_stmt,
JOIN1_TYPE: compile_join1_stmt,
JOIN2_TYPE: compile_join2_stmt,
}
def compile_func_arg(env, type_, value):
if contains_markup(type_):
return js.FuncExpr(None, [], list(compile_stmt(env, value)))
else:
return compile_expr(env, value)
def compile_func_stmt(env, node, *norm_args):
sym = node.values[0]
arg_exprs = []
for arg_type, arg_value in zip(sym.__type__.__args__, norm_args):
if isinstance(arg_type, NamedArgMeta):
type_ = arg_type.__arg_type__
arg = compile_func_arg(env, type_, arg_value)
elif isinstance(arg_type, VarArgsMeta):
type_ = arg_type.__arg_type__
arg = js.Array([compile_func_arg(env, type_, v)
for v in arg_value])
elif isinstance(arg_type, VarNamedArgsMeta):
type_ = arg_type.__arg_type__
arg = js.Object([js.Label(_str(k), compile_func_arg(env, type_, v))
for k, v in arg_value.items()])
else:
arg = compile_func_arg(env, arg_type, arg_value)
arg_exprs.append(arg)
if sym.ns:
if sym.ns == '.':
name_expr = js.Identifier(sym.rel)
else:
name_expr = js.DotAccessor(js.Identifier(sym.ns),
js.Identifier(sym.rel))
else:
name_expr = js.DotAccessor(js.Identifier('builtins'),
js.Identifier(sym.name))
yield js.ExprStatement(js.FunctionCall(name_expr, arg_exprs))
def compile_stmt(env, node):
if isinstance(node, Tuple):
sym, args = node.values[0], node.values[1:]
assert sym.__type__
pos_args, kw_args = split_args(args)
norm_args = normalize_args(sym.__type__, pos_args, kw_args)
proc = STMT_TYPES.get(sym.__type__, compile_func_stmt)
for item in proc(env, node, *norm_args):
yield item
elif isinstance(node, Symbol):
if node.name in env:
yield _text(js.Identifier(env[node.name]))
else:
yield _text(_ctx_var(node.name))
elif isinstance(node, Placeholder):
yield js.ExprStatement(js.FunctionCall(js.Identifier(env[node.name]),
[]))
elif isinstance(node, String):
yield _text(js.String(node.value))
elif isinstance(node, Number):
yield _text(js.Number(node.value))
else:
raise TypeError('Unable to compile {!r} of type {!r} as statement'
.format(node, type(node)))
def compile_stmts(env, nodes):
for node in nodes:
for item in compile_stmt(env, node):
yield item
def compile_module(body):
env = Environ()
mod = js.Program(list(compile_stmts(env, body.values)))
return mod
def dumps(node):
return node.to_ecma() + '\n'
|
from sympy import (
Abs, acos, acosh, Add, And, asin, asinh, atan, Ci, cos, sinh, cosh,
tanh, Derivative, diff, DiracDelta, E, Ei, Eq, exp, erf, erfc, erfi,
EulerGamma, Expr, factor, Function, gamma, gammasimp, I, Idx, im, IndexedBase,
integrate, Interval, Lambda, LambertW, log, Matrix, Max, meijerg, Min, nan,
Ne, O, oo, pi, Piecewise, polar_lift, Poly, polygamma, Rational, re, S, Si, sign,
simplify, sin, sinc, SingularityFunction, sqrt, sstr, Sum, Symbol,
symbols, sympify, tan, trigsimp, Tuple, lerchphi, exp_polar, li, hyper
)
from sympy.core.compatibility import range
from sympy.core.expr import unchanged
from sympy.functions.elementary.complexes import periodic_argument
from sympy.functions.elementary.integers import floor
from sympy.integrals.integrals import Integral
from sympy.integrals.risch import NonElementaryIntegral
from sympy.physics import units
from sympy.utilities.pytest import raises, slow, skip, ON_TRAVIS
from sympy.utilities.randtest import verify_numerically
x, y, a, t, x_1, x_2, z, s, b = symbols('x y a t x_1 x_2 z s b')
n = Symbol('n', integer=True)
f = Function('f')
def NS(e, n=15, **options):
return sstr(sympify(e).evalf(n, **options), full_prec=True)
def test_principal_value():
g = 1 / x
assert Integral(g, (x, -oo, oo)).principal_value() == 0
assert Integral(g, (y, -oo, oo)).principal_value() == oo * sign(1 / x)
raises(ValueError, lambda: Integral(g, (x)).principal_value())
raises(ValueError, lambda: Integral(g).principal_value())
l = 1 / ((x ** 3) - 1)
assert Integral(l, (x, -oo, oo)).principal_value() == -sqrt(3)*pi/3
raises(ValueError, lambda: Integral(l, (x, -oo, 1)).principal_value())
d = 1 / (x ** 2 - 1)
assert Integral(d, (x, -oo, oo)).principal_value() == 0
assert Integral(d, (x, -2, 2)).principal_value() == -log(3)
v = x / (x ** 2 - 1)
assert Integral(v, (x, -oo, oo)).principal_value() == 0
assert Integral(v, (x, -2, 2)).principal_value() == 0
s = x ** 2 / (x ** 2 - 1)
assert Integral(s, (x, -oo, oo)).principal_value() is oo
assert Integral(s, (x, -2, 2)).principal_value() == -log(3) + 4
f = 1 / ((x ** 2 - 1) * (1 + x ** 2))
assert Integral(f, (x, -oo, oo)).principal_value() == -pi / 2
assert Integral(f, (x, -2, 2)).principal_value() == -atan(2) - log(3) / 2
def diff_test(i):
"""Return the set of symbols, s, which were used in testing that
i.diff(s) agrees with i.doit().diff(s). If there is an error then
the assertion will fail, causing the test to fail."""
syms = i.free_symbols
for s in syms:
assert (i.diff(s).doit() - i.doit().diff(s)).expand() == 0
return syms
def test_improper_integral():
assert integrate(log(x), (x, 0, 1)) == -1
assert integrate(x**(-2), (x, 1, oo)) == 1
assert integrate(1/(1 + exp(x)), (x, 0, oo)) == log(2)
def test_constructor():
# this is shared by Sum, so testing Integral's constructor
# is equivalent to testing Sum's
s1 = Integral(n, n)
assert s1.limits == (Tuple(n),)
s2 = Integral(n, (n,))
assert s2.limits == (Tuple(n),)
s3 = Integral(Sum(x, (x, 1, y)))
assert s3.limits == (Tuple(y),)
s4 = Integral(n, Tuple(n,))
assert s4.limits == (Tuple(n),)
s5 = Integral(n, (n, Interval(1, 2)))
assert s5.limits == (Tuple(n, 1, 2),)
# Testing constructor with inequalities:
s6 = Integral(n, n > 10)
assert s6.limits == (Tuple(n, 10, oo),)
s7 = Integral(n, (n > 2) & (n < 5))
assert s7.limits == (Tuple(n, 2, 5),)
def test_basics():
assert Integral(0, x) != 0
assert Integral(x, (x, 1, 1)) != 0
assert Integral(oo, x) != oo
assert Integral(S.NaN, x) is S.NaN
assert diff(Integral(y, y), x) == 0
assert diff(Integral(x, (x, 0, 1)), x) == 0
assert diff(Integral(x, x), x) == x
assert diff(Integral(t, (t, 0, x)), x) == x
e = (t + 1)**2
assert diff(integrate(e, (t, 0, x)), x) == \
diff(Integral(e, (t, 0, x)), x).doit().expand() == \
((1 + x)**2).expand()
assert diff(integrate(e, (t, 0, x)), t) == \
diff(Integral(e, (t, 0, x)), t) == 0
assert diff(integrate(e, (t, 0, x)), a) == \
diff(Integral(e, (t, 0, x)), a) == 0
assert diff(integrate(e, t), a) == diff(Integral(e, t), a) == 0
assert integrate(e, (t, a, x)).diff(x) == \
Integral(e, (t, a, x)).diff(x).doit().expand()
assert Integral(e, (t, a, x)).diff(x).doit() == ((1 + x)**2)
assert integrate(e, (t, x, a)).diff(x).doit() == (-(1 + x)**2).expand()
assert integrate(t**2, (t, x, 2*x)).diff(x) == 7*x**2
assert Integral(x, x).atoms() == {x}
assert Integral(f(x), (x, 0, 1)).atoms() == {S.Zero, S.One, x}
assert diff_test(Integral(x, (x, 3*y))) == {y}
assert diff_test(Integral(x, (a, 3*y))) == {x, y}
assert integrate(x, (x, oo, oo)) == 0 #issue 8171
assert integrate(x, (x, -oo, -oo)) == 0
# sum integral of terms
assert integrate(y + x + exp(x), x) == x*y + x**2/2 + exp(x)
assert Integral(x).is_commutative
n = Symbol('n', commutative=False)
assert Integral(n + x, x).is_commutative is False
def test_diff_wrt():
class Test(Expr):
_diff_wrt = True
is_commutative = True
t = Test()
assert integrate(t + 1, t) == t**2/2 + t
assert integrate(t + 1, (t, 0, 1)) == Rational(3, 2)
raises(ValueError, lambda: integrate(x + 1, x + 1))
raises(ValueError, lambda: integrate(x + 1, (x + 1, 0, 1)))
def test_basics_multiple():
assert diff_test(Integral(x, (x, 3*x, 5*y), (y, x, 2*x))) == {x}
assert diff_test(Integral(x, (x, 5*y), (y, x, 2*x))) == {x}
assert diff_test(Integral(x, (x, 5*y), (y, y, 2*x))) == {x, y}
assert diff_test(Integral(y, y, x)) == {x, y}
assert diff_test(Integral(y*x, x, y)) == {x, y}
assert diff_test(Integral(x + y, y, (y, 1, x))) == {x}
assert diff_test(Integral(x + y, (x, x, y), (y, y, x))) == {x, y}
def test_conjugate_transpose():
A, B = symbols("A B", commutative=False)
x = Symbol("x", complex=True)
p = Integral(A*B, (x,))
assert p.adjoint().doit() == p.doit().adjoint()
assert p.conjugate().doit() == p.doit().conjugate()
assert p.transpose().doit() == p.doit().transpose()
x = Symbol("x", real=True)
p = Integral(A*B, (x,))
assert p.adjoint().doit() == p.doit().adjoint()
assert p.conjugate().doit() == p.doit().conjugate()
assert p.transpose().doit() == p.doit().transpose()
def test_integration():
assert integrate(0, (t, 0, x)) == 0
assert integrate(3, (t, 0, x)) == 3*x
assert integrate(t, (t, 0, x)) == x**2/2
assert integrate(3*t, (t, 0, x)) == 3*x**2/2
assert integrate(3*t**2, (t, 0, x)) == x**3
assert integrate(1/t, (t, 1, x)) == log(x)
assert integrate(-1/t**2, (t, 1, x)) == 1/x - 1
assert integrate(t**2 + 5*t - 8, (t, 0, x)) == x**3/3 + 5*x**2/2 - 8*x
assert integrate(x**2, x) == x**3/3
assert integrate((3*t*x)**5, x) == (3*t)**5 * x**6 / 6
b = Symbol("b")
c = Symbol("c")
assert integrate(a*t, (t, 0, x)) == a*x**2/2
assert integrate(a*t**4, (t, 0, x)) == a*x**5/5
assert integrate(a*t**2 + b*t + c, (t, 0, x)) == a*x**3/3 + b*x**2/2 + c*x
def test_multiple_integration():
assert integrate((x**2)*(y**2), (x, 0, 1), (y, -1, 2)) == Rational(1)
assert integrate((y**2)*(x**2), x, y) == Rational(1, 9)*(x**3)*(y**3)
assert integrate(1/(x + 3)/(1 + x)**3, x) == \
log(3 + x)*Rational(-1, 8) + log(1 + x)*Rational(1, 8) + x/(4 + 8*x + 4*x**2)
assert integrate(sin(x*y)*y, (x, 0, 1), (y, 0, 1)) == -sin(1) + 1
def test_issue_3532():
assert integrate(exp(-x), (x, 0, oo)) == 1
def test_issue_3560():
assert integrate(sqrt(x)**3, x) == 2*sqrt(x)**5/5
assert integrate(sqrt(x), x) == 2*sqrt(x)**3/3
assert integrate(1/sqrt(x)**3, x) == -2/sqrt(x)
def test_integrate_poly():
p = Poly(x + x**2*y + y**3, x, y)
qx = integrate(p, x)
qy = integrate(p, y)
assert isinstance(qx, Poly) is True
assert isinstance(qy, Poly) is True
assert qx.gens == (x, y)
assert qy.gens == (x, y)
assert qx.as_expr() == x**2/2 + x**3*y/3 + x*y**3
assert qy.as_expr() == x*y + x**2*y**2/2 + y**4/4
def test_integrate_poly_defined():
p = Poly(x + x**2*y + y**3, x, y)
Qx = integrate(p, (x, 0, 1))
Qy = integrate(p, (y, 0, pi))
assert isinstance(Qx, Poly) is True
assert isinstance(Qy, Poly) is True
assert Qx.gens == (y,)
assert Qy.gens == (x,)
assert Qx.as_expr() == S.Half + y/3 + y**3
assert Qy.as_expr() == pi**4/4 + pi*x + pi**2*x**2/2
def test_integrate_omit_var():
y = Symbol('y')
assert integrate(x) == x**2/2
raises(ValueError, lambda: integrate(2))
raises(ValueError, lambda: integrate(x*y))
def test_integrate_poly_accurately():
y = Symbol('y')
assert integrate(x*sin(y), x) == x**2*sin(y)/2
# when passed to risch_norman, this will be a CPU hog, so this really
# checks, that integrated function is recognized as polynomial
assert integrate(x**1000*sin(y), x) == x**1001*sin(y)/1001
def test_issue_3635():
y = Symbol('y')
assert integrate(x**2, y) == x**2*y
assert integrate(x**2, (y, -1, 1)) == 2*x**2
# works in sympy and py.test but hangs in `setup.py test`
def test_integrate_linearterm_pow():
# check integrate((a*x+b)^c, x) -- issue 3499
y = Symbol('y', positive=True)
# TODO: Remove conds='none' below, let the assumption take care of it.
assert integrate(x**y, x, conds='none') == x**(y + 1)/(y + 1)
assert integrate((exp(y)*x + 1/y)**(1 + sin(y)), x, conds='none') == \
exp(-y)*(exp(y)*x + 1/y)**(2 + sin(y)) / (2 + sin(y))
def test_issue_3618():
assert integrate(pi*sqrt(x), x) == 2*pi*sqrt(x)**3/3
assert integrate(pi*sqrt(x) + E*sqrt(x)**3, x) == \
2*pi*sqrt(x)**3/3 + 2*E *sqrt(x)**5/5
def test_issue_3623():
assert integrate(cos((n + 1)*x), x) == Piecewise(
(sin(x*(n + 1))/(n + 1), Ne(n + 1, 0)), (x, True))
assert integrate(cos((n - 1)*x), x) == Piecewise(
(sin(x*(n - 1))/(n - 1), Ne(n - 1, 0)), (x, True))
assert integrate(cos((n + 1)*x) + cos((n - 1)*x), x) == \
Piecewise((sin(x*(n - 1))/(n - 1), Ne(n - 1, 0)), (x, True)) + \
Piecewise((sin(x*(n + 1))/(n + 1), Ne(n + 1, 0)), (x, True))
def test_issue_3664():
n = Symbol('n', integer=True, nonzero=True)
assert integrate(-1./2 * x * sin(n * pi * x/2), [x, -2, 0]) == \
2.0*cos(pi*n)/(pi*n)
assert integrate(x * sin(n * pi * x/2) * Rational(-1, 2), [x, -2, 0]) == \
2*cos(pi*n)/(pi*n)
def test_issue_3679():
# definite integration of rational functions gives wrong answers
assert NS(Integral(1/(x**2 - 8*x + 17), (x, 2, 4))) == '1.10714871779409'
def test_issue_3686(): # remove this when fresnel itegrals are implemented
from sympy import expand_func, fresnels
assert expand_func(integrate(sin(x**2), x)) == \
sqrt(2)*sqrt(pi)*fresnels(sqrt(2)*x/sqrt(pi))/2
def test_integrate_units():
m = units.m
s = units.s
assert integrate(x * m/s, (x, 1*s, 5*s)) == 12*m*s
def test_transcendental_functions():
assert integrate(LambertW(2*x), x) == \
-x + x*LambertW(2*x) + x/LambertW(2*x)
def test_log_polylog():
assert integrate(log(1 - x)/x, (x, 0, 1)) == -pi**2/6
assert integrate(log(x)*(1 - x)**(-1), (x, 0, 1)) == -pi**2/6
def test_issue_3740():
f = 4*log(x) - 2*log(x)**2
fid = diff(integrate(f, x), x)
assert abs(f.subs(x, 42).evalf() - fid.subs(x, 42).evalf()) < 1e-10
def test_issue_3788():
assert integrate(1/(1 + x**2), x) == atan(x)
def test_issue_3952():
f = sin(x)
assert integrate(f, x) == -cos(x)
raises(ValueError, lambda: integrate(f, 2*x))
def test_issue_4516():
assert integrate(2**x - 2*x, x) == 2**x/log(2) - x**2
def test_issue_7450():
ans = integrate(exp(-(1 + I)*x), (x, 0, oo))
assert re(ans) == S.Half and im(ans) == Rational(-1, 2)
def test_issue_8623():
assert integrate((1 + cos(2*x)) / (3 - 2*cos(2*x)), (x, 0, pi)) == -pi/2 + sqrt(5)*pi/2
assert integrate((1 + cos(2*x))/(3 - 2*cos(2*x))) == -x/2 + sqrt(5)*(atan(sqrt(5)*tan(x)) + \
pi*floor((x - pi/2)/pi))/2
def test_issue_9569():
assert integrate(1 / (2 - cos(x)), (x, 0, pi)) == pi/sqrt(3)
assert integrate(1/(2 - cos(x))) == 2*sqrt(3)*(atan(sqrt(3)*tan(x/2)) + pi*floor((x/2 - pi/2)/pi))/3
def test_issue_13749():
assert integrate(1 / (2 + cos(x)), (x, 0, pi)) == pi/sqrt(3)
assert integrate(1/(2 + cos(x))) == 2*sqrt(3)*(atan(sqrt(3)*tan(x/2)/3) + pi*floor((x/2 - pi/2)/pi))/3
def test_matrices():
M = Matrix(2, 2, lambda i, j: (i + j + 1)*sin((i + j + 1)*x))
assert integrate(M, x) == Matrix([
[-cos(x), -cos(2*x)],
[-cos(2*x), -cos(3*x)],
])
def test_integrate_functions():
# issue 4111
assert integrate(f(x), x) == Integral(f(x), x)
assert integrate(f(x), (x, 0, 1)) == Integral(f(x), (x, 0, 1))
assert integrate(f(x)*diff(f(x), x), x) == f(x)**2/2
assert integrate(diff(f(x), x) / f(x), x) == log(f(x))
def test_integrate_derivatives():
assert integrate(Derivative(f(x), x), x) == f(x)
assert integrate(Derivative(f(y), y), x) == x*Derivative(f(y), y)
assert integrate(Derivative(f(x), x)**2, x) == \
Integral(Derivative(f(x), x)**2, x)
def test_transform():
a = Integral(x**2 + 1, (x, -1, 2))
fx = x
fy = 3*y + 1
assert a.doit() == a.transform(fx, fy).doit()
assert a.transform(fx, fy).transform(fy, fx) == a
fx = 3*x + 1
fy = y
assert a.transform(fx, fy).transform(fy, fx) == a
a = Integral(sin(1/x), (x, 0, 1))
assert a.transform(x, 1/y) == Integral(sin(y)/y**2, (y, 1, oo))
assert a.transform(x, 1/y).transform(y, 1/x) == a
a = Integral(exp(-x**2), (x, -oo, oo))
assert a.transform(x, 2*y) == Integral(2*exp(-4*y**2), (y, -oo, oo))
# < 3 arg limit handled properly
assert Integral(x, x).transform(x, a*y).doit() == \
Integral(y*a**2, y).doit()
_3 = S(3)
assert Integral(x, (x, 0, -_3)).transform(x, 1/y).doit() == \
Integral(-1/x**3, (x, -oo, -1/_3)).doit()
assert Integral(x, (x, 0, _3)).transform(x, 1/y) == \
Integral(y**(-3), (y, 1/_3, oo))
# issue 8400
i = Integral(x + y, (x, 1, 2), (y, 1, 2))
assert i.transform(x, (x + 2*y, x)).doit() == \
i.transform(x, (x + 2*z, x)).doit() == 3
i = Integral(x, (x, a, b))
assert i.transform(x, 2*s) == Integral(4*s, (s, a/2, b/2))
raises(ValueError, lambda: i.transform(x, 1))
raises(ValueError, lambda: i.transform(x, s*t))
raises(ValueError, lambda: i.transform(x, -s))
raises(ValueError, lambda: i.transform(x, (s, t)))
raises(ValueError, lambda: i.transform(2*x, 2*s))
i = Integral(x**2, (x, 1, 2))
raises(ValueError, lambda: i.transform(x**2, s))
am = Symbol('a', negative=True)
bp = Symbol('b', positive=True)
i = Integral(x, (x, bp, am))
i.transform(x, 2*s)
assert i.transform(x, 2*s) == Integral(-4*s, (s, am/2, bp/2))
i = Integral(x, (x, a))
assert i.transform(x, 2*s) == Integral(4*s, (s, a/2))
def test_issue_4052():
f = S.Half*asin(x) + x*sqrt(1 - x**2)/2
assert integrate(cos(asin(x)), x) == f
assert integrate(sin(acos(x)), x) == f
@slow
def test_evalf_integrals():
assert NS(Integral(x, (x, 2, 5)), 15) == '10.5000000000000'
gauss = Integral(exp(-x**2), (x, -oo, oo))
assert NS(gauss, 15) == '1.77245385090552'
assert NS(gauss**2 - pi + E*Rational(
1, 10**20), 15) in ('2.71828182845904e-20', '2.71828182845905e-20')
# A monster of an integral from http://mathworld.wolfram.com/DefiniteIntegral.html
t = Symbol('t')
a = 8*sqrt(3)/(1 + 3*t**2)
b = 16*sqrt(2)*(3*t + 1)*sqrt(4*t**2 + t + 1)**3
c = (3*t**2 + 1)*(11*t**2 + 2*t + 3)**2
d = sqrt(2)*(249*t**2 + 54*t + 65)/(11*t**2 + 2*t + 3)**2
f = a - b/c - d
assert NS(Integral(f, (t, 0, 1)), 50) == \
NS((3*sqrt(2) - 49*pi + 162*atan(sqrt(2)))/12, 50)
# http://mathworld.wolfram.com/VardisIntegral.html
assert NS(Integral(log(log(1/x))/(1 + x + x**2), (x, 0, 1)), 15) == \
NS('pi/sqrt(3) * log(2*pi**(5/6) / gamma(1/6))', 15)
# http://mathworld.wolfram.com/AhmedsIntegral.html
assert NS(Integral(atan(sqrt(x**2 + 2))/(sqrt(x**2 + 2)*(x**2 + 1)), (x,
0, 1)), 15) == NS(5*pi**2/96, 15)
# http://mathworld.wolfram.com/AbelsIntegral.html
assert NS(Integral(x/((exp(pi*x) - exp(
-pi*x))*(x**2 + 1)), (x, 0, oo)), 15) == NS('log(2)/2-1/4', 15)
# Complex part trimming
# http://mathworld.wolfram.com/VardisIntegral.html
assert NS(Integral(log(log(sin(x)/cos(x))), (x, pi/4, pi/2)), 15, chop=True) == \
NS('pi/4*log(4*pi**3/gamma(1/4)**4)', 15)
#
# Endpoints causing trouble (rounding error in integration points -> complex log)
assert NS(
2 + Integral(log(2*cos(x/2)), (x, -pi, pi)), 17, chop=True) == NS(2, 17)
assert NS(
2 + Integral(log(2*cos(x/2)), (x, -pi, pi)), 20, chop=True) == NS(2, 20)
assert NS(
2 + Integral(log(2*cos(x/2)), (x, -pi, pi)), 22, chop=True) == NS(2, 22)
# Needs zero handling
assert NS(pi - 4*Integral(
'sqrt(1-x**2)', (x, 0, 1)), 15, maxn=30, chop=True) in ('0.0', '0')
# Oscillatory quadrature
a = Integral(sin(x)/x**2, (x, 1, oo)).evalf(maxn=15)
assert 0.49 < a < 0.51
assert NS(
Integral(sin(x)/x**2, (x, 1, oo)), quad='osc') == '0.504067061906928'
assert NS(Integral(
cos(pi*x + 1)/x, (x, -oo, -1)), quad='osc') == '0.276374705640365'
# indefinite integrals aren't evaluated
assert NS(Integral(x, x)) == 'Integral(x, x)'
assert NS(Integral(x, (x, y))) == 'Integral(x, (x, y))'
def test_evalf_issue_939():
# https://github.com/sympy/sympy/issues/4038
# The output form of an integral may differ by a step function between
# revisions, making this test a bit useless. This can't be said about
# other two tests. For now, all values of this evaluation are used here,
# but in future this should be reconsidered.
assert NS(integrate(1/(x**5 + 1), x).subs(x, 4), chop=True) in \
['-0.000976138910649103', '0.965906660135753', '1.93278945918216']
assert NS(Integral(1/(x**5 + 1), (x, 2, 4))) == '0.0144361088886740'
assert NS(
integrate(1/(x**5 + 1), (x, 2, 4)), chop=True) == '0.0144361088886740'
def test_double_previously_failing_integrals():
# Double integrals not implemented <- Sure it is!
res = integrate(sqrt(x) + x*y, (x, 1, 2), (y, -1, 1))
# Old numerical test
assert NS(res, 15) == '2.43790283299492'
# Symbolic test
assert res == Rational(-4, 3) + 8*sqrt(2)/3
# double integral + zero detection
assert integrate(sin(x + x*y), (x, -1, 1), (y, -1, 1)) is S.Zero
def test_integrate_SingularityFunction():
in_1 = SingularityFunction(x, a, 3) + SingularityFunction(x, 5, -1)
out_1 = SingularityFunction(x, a, 4)/4 + SingularityFunction(x, 5, 0)
assert integrate(in_1, x) == out_1
in_2 = 10*SingularityFunction(x, 4, 0) - 5*SingularityFunction(x, -6, -2)
out_2 = 10*SingularityFunction(x, 4, 1) - 5*SingularityFunction(x, -6, -1)
assert integrate(in_2, x) == out_2
in_3 = 2*x**2*y -10*SingularityFunction(x, -4, 7) - 2*SingularityFunction(y, 10, -2)
out_3_1 = 2*x**3*y/3 - 2*x*SingularityFunction(y, 10, -2) - 5*SingularityFunction(x, -4, 8)/4
out_3_2 = x**2*y**2 - 10*y*SingularityFunction(x, -4, 7) - 2*SingularityFunction(y, 10, -1)
assert integrate(in_3, x) == out_3_1
assert integrate(in_3, y) == out_3_2
assert unchanged(Integral, in_3, (x,))
assert Integral(in_3, x) == Integral(in_3, (x,))
assert Integral(in_3, x).doit() == out_3_1
in_4 = 10*SingularityFunction(x, -4, 7) - 2*SingularityFunction(x, 10, -2)
out_4 = 5*SingularityFunction(x, -4, 8)/4 - 2*SingularityFunction(x, 10, -1)
assert integrate(in_4, (x, -oo, x)) == out_4
assert integrate(SingularityFunction(x, 5, -1), x) == SingularityFunction(x, 5, 0)
assert integrate(SingularityFunction(x, 0, -1), (x, -oo, oo)) == 1
assert integrate(5*SingularityFunction(x, 5, -1), (x, -oo, oo)) == 5
assert integrate(SingularityFunction(x, 5, -1) * f(x), (x, -oo, oo)) == f(5)
def test_integrate_DiracDelta():
# This is here to check that deltaintegrate is being called, but also
# to test definite integrals. More tests are in test_deltafunctions.py
assert integrate(DiracDelta(x) * f(x), (x, -oo, oo)) == f(0)
assert integrate(DiracDelta(x)**2, (x, -oo, oo)) == DiracDelta(0)
# issue 4522
assert integrate(integrate((4 - 4*x + x*y - 4*y) * \
DiracDelta(x)*DiracDelta(y - 1), (x, 0, 1)), (y, 0, 1)) == 0
# issue 5729
p = exp(-(x**2 + y**2))/pi
assert integrate(p*DiracDelta(x - 10*y), (x, -oo, oo), (y, -oo, oo)) == \
integrate(p*DiracDelta(x - 10*y), (y, -oo, oo), (x, -oo, oo)) == \
integrate(p*DiracDelta(10*x - y), (x, -oo, oo), (y, -oo, oo)) == \
integrate(p*DiracDelta(10*x - y), (y, -oo, oo), (x, -oo, oo)) == \
1/sqrt(101*pi)
def test_integrate_returns_piecewise():
assert integrate(x**y, x) == Piecewise(
(x**(y + 1)/(y + 1), Ne(y, -1)), (log(x), True))
assert integrate(x**y, y) == Piecewise(
(x**y/log(x), Ne(log(x), 0)), (y, True))
assert integrate(exp(n*x), x) == Piecewise(
(exp(n*x)/n, Ne(n, 0)), (x, True))
assert integrate(x*exp(n*x), x) == Piecewise(
((n*x - 1)*exp(n*x)/n**2, Ne(n**2, 0)), (x**2/2, True))
assert integrate(x**(n*y), x) == Piecewise(
(x**(n*y + 1)/(n*y + 1), Ne(n*y, -1)), (log(x), True))
assert integrate(x**(n*y), y) == Piecewise(
(x**(n*y)/(n*log(x)), Ne(n*log(x), 0)), (y, True))
assert integrate(cos(n*x), x) == Piecewise(
(sin(n*x)/n, Ne(n, 0)), (x, True))
assert integrate(cos(n*x)**2, x) == Piecewise(
((n*x/2 + sin(n*x)*cos(n*x)/2)/n, Ne(n, 0)), (x, True))
assert integrate(x*cos(n*x), x) == Piecewise(
(x*sin(n*x)/n + cos(n*x)/n**2, Ne(n, 0)), (x**2/2, True))
assert integrate(sin(n*x), x) == Piecewise(
(-cos(n*x)/n, Ne(n, 0)), (0, True))
assert integrate(sin(n*x)**2, x) == Piecewise(
((n*x/2 - sin(n*x)*cos(n*x)/2)/n, Ne(n, 0)), (0, True))
assert integrate(x*sin(n*x), x) == Piecewise(
(-x*cos(n*x)/n + sin(n*x)/n**2, Ne(n, 0)), (0, True))
assert integrate(exp(x*y), (x, 0, z)) == Piecewise(
(exp(y*z)/y - 1/y, (y > -oo) & (y < oo) & Ne(y, 0)), (z, True))
def test_integrate_max_min():
x = symbols('x', real=True)
assert integrate(Min(x, 2), (x, 0, 3)) == 4
assert integrate(Max(x**2, x**3), (x, 0, 2)) == Rational(49, 12)
assert integrate(Min(exp(x), exp(-x))**2, x) == Piecewise( \
(exp(2*x)/2, x <= 0), (1 - exp(-2*x)/2, True))
# issue 7907
c = symbols('c', extended_real=True)
int1 = integrate(Max(c, x)*exp(-x**2), (x, -oo, oo))
int2 = integrate(c*exp(-x**2), (x, -oo, c))
int3 = integrate(x*exp(-x**2), (x, c, oo))
assert int1 == int2 + int3 == sqrt(pi)*c*erf(c)/2 + \
sqrt(pi)*c/2 + exp(-c**2)/2
def test_integrate_Abs_sign():
assert integrate(Abs(x), (x, -2, 1)) == Rational(5, 2)
assert integrate(Abs(x), (x, 0, 1)) == S.Half
assert integrate(Abs(x + 1), (x, 0, 1)) == Rational(3, 2)
assert integrate(Abs(x**2 - 1), (x, -2, 2)) == 4
assert integrate(Abs(x**2 - 3*x), (x, -15, 15)) == 2259
assert integrate(sign(x), (x, -1, 2)) == 1
assert integrate(sign(x)*sin(x), (x, -pi, pi)) == 4
assert integrate(sign(x - 2) * x**2, (x, 0, 3)) == Rational(11, 3)
t, s = symbols('t s', real=True)
assert integrate(Abs(t), t) == Piecewise(
(-t**2/2, t <= 0), (t**2/2, True))
assert integrate(Abs(2*t - 6), t) == Piecewise(
(-t**2 + 6*t, t <= 3), (t**2 - 6*t + 18, True))
assert (integrate(abs(t - s**2), (t, 0, 2)) ==
2*s**2*Min(2, s**2) - 2*s**2 - Min(2, s**2)**2 + 2)
assert integrate(exp(-Abs(t)), t) == Piecewise(
(exp(t), t <= 0), (2 - exp(-t), True))
assert integrate(sign(2*t - 6), t) == Piecewise(
(-t, t < 3), (t - 6, True))
assert integrate(2*t*sign(t**2 - 1), t) == Piecewise(
(t**2, t < -1), (-t**2 + 2, t < 1), (t**2, True))
assert integrate(sign(t), (t, s + 1)) == Piecewise(
(s + 1, s + 1 > 0), (-s - 1, s + 1 < 0), (0, True))
def test_subs1():
e = Integral(exp(x - y), x)
assert e.subs(y, 3) == Integral(exp(x - 3), x)
e = Integral(exp(x - y), (x, 0, 1))
assert e.subs(y, 3) == Integral(exp(x - 3), (x, 0, 1))
f = Lambda(x, exp(-x**2))
conv = Integral(f(x - y)*f(y), (y, -oo, oo))
assert conv.subs({x: 0}) == Integral(exp(-2*y**2), (y, -oo, oo))
def test_subs2():
e = Integral(exp(x - y), x, t)
assert e.subs(y, 3) == Integral(exp(x - 3), x, t)
e = Integral(exp(x - y), (x, 0, 1), (t, 0, 1))
assert e.subs(y, 3) == Integral(exp(x - 3), (x, 0, 1), (t, 0, 1))
f = Lambda(x, exp(-x**2))
conv = Integral(f(x - y)*f(y), (y, -oo, oo), (t, 0, 1))
assert conv.subs({x: 0}) == Integral(exp(-2*y**2), (y, -oo, oo), (t, 0, 1))
def test_subs3():
e = Integral(exp(x - y), (x, 0, y), (t, y, 1))
assert e.subs(y, 3) == Integral(exp(x - 3), (x, 0, 3), (t, 3, 1))
f = Lambda(x, exp(-x**2))
conv = Integral(f(x - y)*f(y), (y, -oo, oo), (t, x, 1))
assert conv.subs({x: 0}) == Integral(exp(-2*y**2), (y, -oo, oo), (t, 0, 1))
def test_subs4():
e = Integral(exp(x), (x, 0, y), (t, y, 1))
assert e.subs(y, 3) == Integral(exp(x), (x, 0, 3), (t, 3, 1))
f = Lambda(x, exp(-x**2))
conv = Integral(f(y)*f(y), (y, -oo, oo), (t, x, 1))
assert conv.subs({x: 0}) == Integral(exp(-2*y**2), (y, -oo, oo), (t, 0, 1))
def test_subs5():
e = Integral(exp(-x**2), (x, -oo, oo))
assert e.subs(x, 5) == e
e = Integral(exp(-x**2 + y), x)
assert e.subs(y, 5) == Integral(exp(-x**2 + 5), x)
e = Integral(exp(-x**2 + y), (x, x))
assert e.subs(x, 5) == Integral(exp(y - x**2), (x, 5))
assert e.subs(y, 5) == Integral(exp(-x**2 + 5), x)
e = Integral(exp(-x**2 + y), (y, -oo, oo), (x, -oo, oo))
assert e.subs(x, 5) == e
assert e.subs(y, 5) == e
# Test evaluation of antiderivatives
e = Integral(exp(-x**2), (x, x))
assert e.subs(x, 5) == Integral(exp(-x**2), (x, 5))
e = Integral(exp(x), x)
assert (e.subs(x,1) - e.subs(x,0) - Integral(exp(x), (x, 0, 1))
).doit().is_zero
def test_subs6():
a, b = symbols('a b')
e = Integral(x*y, (x, f(x), f(y)))
assert e.subs(x, 1) == Integral(x*y, (x, f(1), f(y)))
assert e.subs(y, 1) == Integral(x, (x, f(x), f(1)))
e = Integral(x*y, (x, f(x), f(y)), (y, f(x), f(y)))
assert e.subs(x, 1) == Integral(x*y, (x, f(1), f(y)), (y, f(1), f(y)))
assert e.subs(y, 1) == Integral(x*y, (x, f(x), f(y)), (y, f(x), f(1)))
e = Integral(x*y, (x, f(x), f(a)), (y, f(x), f(a)))
assert e.subs(a, 1) == Integral(x*y, (x, f(x), f(1)), (y, f(x), f(1)))
def test_subs7():
e = Integral(x, (x, 1, y), (y, 1, 2))
assert e.subs({x: 1, y: 2}) == e
e = Integral(sin(x) + sin(y), (x, sin(x), sin(y)),
(y, 1, 2))
assert e.subs(sin(y), 1) == e
assert e.subs(sin(x), 1) == Integral(sin(x) + sin(y), (x, 1, sin(y)),
(y, 1, 2))
def test_expand():
e = Integral(f(x)+f(x**2), (x, 1, y))
assert e.expand() == Integral(f(x), (x, 1, y)) + Integral(f(x**2), (x, 1, y))
def test_integration_variable():
raises(ValueError, lambda: Integral(exp(-x**2), 3))
raises(ValueError, lambda: Integral(exp(-x**2), (3, -oo, oo)))
def test_expand_integral():
assert Integral(cos(x**2)*(sin(x**2) + 1), (x, 0, 1)).expand() == \
Integral(cos(x**2)*sin(x**2), (x, 0, 1)) + \
Integral(cos(x**2), (x, 0, 1))
assert Integral(cos(x**2)*(sin(x**2) + 1), x).expand() == \
Integral(cos(x**2)*sin(x**2), x) + \
Integral(cos(x**2), x)
def test_as_sum_midpoint1():
e = Integral(sqrt(x**3 + 1), (x, 2, 10))
assert e.as_sum(1, method="midpoint") == 8*sqrt(217)
assert e.as_sum(2, method="midpoint") == 4*sqrt(65) + 12*sqrt(57)
assert e.as_sum(3, method="midpoint") == 8*sqrt(217)/3 + \
8*sqrt(3081)/27 + 8*sqrt(52809)/27
assert e.as_sum(4, method="midpoint") == 2*sqrt(730) + \
4*sqrt(7) + 4*sqrt(86) + 6*sqrt(14)
assert abs(e.as_sum(4, method="midpoint").n() - e.n()) < 0.5
e = Integral(sqrt(x**3 + y**3), (x, 2, 10), (y, 0, 10))
raises(NotImplementedError, lambda: e.as_sum(4))
def test_as_sum_midpoint2():
e = Integral((x + y)**2, (x, 0, 1))
n = Symbol('n', positive=True, integer=True)
assert e.as_sum(1, method="midpoint").expand() == Rational(1, 4) + y + y**2
assert e.as_sum(2, method="midpoint").expand() == Rational(5, 16) + y + y**2
assert e.as_sum(3, method="midpoint").expand() == Rational(35, 108) + y + y**2
assert e.as_sum(4, method="midpoint").expand() == Rational(21, 64) + y + y**2
assert e.as_sum(n, method="midpoint").expand() == \
y**2 + y + Rational(1, 3) - 1/(12*n**2)
def test_as_sum_left():
e = Integral((x + y)**2, (x, 0, 1))
assert e.as_sum(1, method="left").expand() == y**2
assert e.as_sum(2, method="left").expand() == Rational(1, 8) + y/2 + y**2
assert e.as_sum(3, method="left").expand() == Rational(5, 27) + y*Rational(2, 3) + y**2
assert e.as_sum(4, method="left").expand() == Rational(7, 32) + y*Rational(3, 4) + y**2
assert e.as_sum(n, method="left").expand() == \
y**2 + y + Rational(1, 3) - y/n - 1/(2*n) + 1/(6*n**2)
assert e.as_sum(10, method="left", evaluate=False).has(Sum)
def test_as_sum_right():
e = Integral((x + y)**2, (x, 0, 1))
assert e.as_sum(1, method="right").expand() == 1 + 2*y + y**2
assert e.as_sum(2, method="right").expand() == Rational(5, 8) + y*Rational(3, 2) + y**2
assert e.as_sum(3, method="right").expand() == Rational(14, 27) + y*Rational(4, 3) + y**2
assert e.as_sum(4, method="right").expand() == Rational(15, 32) + y*Rational(5, 4) + y**2
assert e.as_sum(n, method="right").expand() == \
y**2 + y + Rational(1, 3) + y/n + 1/(2*n) + 1/(6*n**2)
def test_as_sum_trapezoid():
e = Integral((x + y)**2, (x, 0, 1))
assert e.as_sum(1, method="trapezoid").expand() == y**2 + y + S.Half
assert e.as_sum(2, method="trapezoid").expand() == y**2 + y + Rational(3, 8)
assert e.as_sum(3, method="trapezoid").expand() == y**2 + y + Rational(19, 54)
assert e.as_sum(4, method="trapezoid").expand() == y**2 + y + Rational(11, 32)
assert e.as_sum(n, method="trapezoid").expand() == \
y**2 + y + Rational(1, 3) + 1/(6*n**2)
assert Integral(sign(x), (x, 0, 1)).as_sum(1, 'trapezoid') == S.Half
def test_as_sum_raises():
e = Integral((x + y)**2, (x, 0, 1))
raises(ValueError, lambda: e.as_sum(-1))
raises(ValueError, lambda: e.as_sum(0))
raises(ValueError, lambda: Integral(x).as_sum(3))
raises(ValueError, lambda: e.as_sum(oo))
raises(ValueError, lambda: e.as_sum(3, method='xxxx2'))
def test_nested_doit():
e = Integral(Integral(x, x), x)
f = Integral(x, x, x)
assert e.doit() == f.doit()
def test_issue_4665():
# Allow only upper or lower limit evaluation
e = Integral(x**2, (x, None, 1))
f = Integral(x**2, (x, 1, None))
assert e.doit() == Rational(1, 3)
assert f.doit() == Rational(-1, 3)
assert Integral(x*y, (x, None, y)).subs(y, t) == Integral(x*t, (x, None, t))
assert Integral(x*y, (x, y, None)).subs(y, t) == Integral(x*t, (x, t, None))
assert integrate(x**2, (x, None, 1)) == Rational(1, 3)
assert integrate(x**2, (x, 1, None)) == Rational(-1, 3)
assert integrate("x**2", ("x", "1", None)) == Rational(-1, 3)
def test_integral_reconstruct():
e = Integral(x**2, (x, -1, 1))
assert e == Integral(*e.args)
def test_doit_integrals():
e = Integral(Integral(2*x), (x, 0, 1))
assert e.doit() == Rational(1, 3)
assert e.doit(deep=False) == Rational(1, 3)
f = Function('f')
# doesn't matter if the integral can't be performed
assert Integral(f(x), (x, 1, 1)).doit() == 0
# doesn't matter if the limits can't be evaluated
assert Integral(0, (x, 1, Integral(f(x), x))).doit() == 0
assert Integral(x, (a, 0)).doit() == 0
limits = ((a, 1, exp(x)), (x, 0))
assert Integral(a, *limits).doit() == Rational(1, 4)
assert Integral(a, *list(reversed(limits))).doit() == 0
def test_issue_4884():
assert integrate(sqrt(x)*(1 + x)) == \
Piecewise(
(2*sqrt(x)*(x + 1)**2/5 - 2*sqrt(x)*(x + 1)/15 - 4*sqrt(x)/15,
Abs(x + 1) > 1),
(2*I*sqrt(-x)*(x + 1)**2/5 - 2*I*sqrt(-x)*(x + 1)/15 -
4*I*sqrt(-x)/15, True))
assert integrate(x**x*(1 + log(x))) == x**x
def test_is_number():
from sympy.abc import x, y, z
from sympy import cos, sin
assert Integral(x).is_number is False
assert Integral(1, x).is_number is False
assert Integral(1, (x, 1)).is_number is True
assert Integral(1, (x, 1, 2)).is_number is True
assert Integral(1, (x, 1, y)).is_number is False
assert Integral(1, (x, y)).is_number is False
assert Integral(x, y).is_number is False
assert Integral(x, (y, 1, x)).is_number is False
assert Integral(x, (y, 1, 2)).is_number is False
assert Integral(x, (x, 1, 2)).is_number is True
# `foo.is_number` should always be equivalent to `not foo.free_symbols`
# in each of these cases, there are pseudo-free symbols
i = Integral(x, (y, 1, 1))
assert i.is_number is False and i.n() == 0
i = Integral(x, (y, z, z))
assert i.is_number is False and i.n() == 0
i = Integral(1, (y, z, z + 2))
assert i.is_number is False and i.n() == 2
assert Integral(x*y, (x, 1, 2), (y, 1, 3)).is_number is True
assert Integral(x*y, (x, 1, 2), (y, 1, z)).is_number is False
assert Integral(x, (x, 1)).is_number is True
assert Integral(x, (x, 1, Integral(y, (y, 1, 2)))).is_number is True
assert Integral(Sum(z, (z, 1, 2)), (x, 1, 2)).is_number is True
# it is possible to get a false negative if the integrand is
# actually an unsimplified zero, but this is true of is_number in general.
assert Integral(sin(x)**2 + cos(x)**2 - 1, x).is_number is False
assert Integral(f(x), (x, 0, 1)).is_number is True
def test_symbols():
from sympy.abc import x, y, z
assert Integral(0, x).free_symbols == {x}
assert Integral(x).free_symbols == {x}
assert Integral(x, (x, None, y)).free_symbols == {y}
assert Integral(x, (x, y, None)).free_symbols == {y}
assert Integral(x, (x, 1, y)).free_symbols == {y}
assert Integral(x, (x, y, 1)).free_symbols == {y}
assert Integral(x, (x, x, y)).free_symbols == {x, y}
assert Integral(x, x, y).free_symbols == {x, y}
assert Integral(x, (x, 1, 2)).free_symbols == set()
assert Integral(x, (y, 1, 2)).free_symbols == {x}
# pseudo-free in this case
assert Integral(x, (y, z, z)).free_symbols == {x, z}
assert Integral(x, (y, 1, 2), (y, None, None)).free_symbols == {x, y}
assert Integral(x, (y, 1, 2), (x, 1, y)).free_symbols == {y}
assert Integral(2, (y, 1, 2), (y, 1, x), (x, 1, 2)).free_symbols == set()
assert Integral(2, (y, x, 2), (y, 1, x), (x, 1, 2)).free_symbols == set()
assert Integral(2, (x, 1, 2), (y, x, 2), (y, 1, 2)).free_symbols == \
{x}
def test_is_zero():
from sympy.abc import x, m
assert Integral(0, (x, 1, x)).is_zero
assert Integral(1, (x, 1, 1)).is_zero
assert Integral(1, (x, 1, 2), (y, 2)).is_zero is False
assert Integral(x, (m, 0)).is_zero
assert Integral(x + m, (m, 0)).is_zero is None
i = Integral(m, (m, 1, exp(x)), (x, 0))
assert i.is_zero is None
assert Integral(m, (x, 0), (m, 1, exp(x))).is_zero is True
assert Integral(x, (x, oo, oo)).is_zero # issue 8171
assert Integral(x, (x, -oo, -oo)).is_zero
# this is zero but is beyond the scope of what is_zero
# should be doing
assert Integral(sin(x), (x, 0, 2*pi)).is_zero is None
def test_series():
from sympy.abc import x
i = Integral(cos(x), (x, x))
e = i.lseries(x)
assert i.nseries(x, n=8).removeO() == Add(*[next(e) for j in range(4)])
def test_trig_nonelementary_integrals():
x = Symbol('x')
assert integrate((1 + sin(x))/x, x) == log(x) + Si(x)
# next one comes out as log(x) + log(x**2)/2 + Ci(x)
# so not hardcoding this log ugliness
assert integrate((cos(x) + 2)/x, x).has(Ci)
def test_issue_4403():
x = Symbol('x')
y = Symbol('y')
z = Symbol('z', positive=True)
assert integrate(sqrt(x**2 + z**2), x) == \
z**2*asinh(x/z)/2 + x*sqrt(x**2 + z**2)/2
assert integrate(sqrt(x**2 - z**2), x) == \
-z**2*acosh(x/z)/2 + x*sqrt(x**2 - z**2)/2
x = Symbol('x', real=True)
y = Symbol('y', positive=True)
assert integrate(1/(x**2 + y**2)**S('3/2'), x) == \
x/(y**2*sqrt(x**2 + y**2))
# If y is real and nonzero, we get x*Abs(y)/(y**3*sqrt(x**2 + y**2)),
# which results from sqrt(1 + x**2/y**2) = sqrt(x**2 + y**2)/|y|.
def test_issue_4403_2():
assert integrate(sqrt(-x**2 - 4), x) == \
-2*atan(x/sqrt(-4 - x**2)) + x*sqrt(-4 - x**2)/2
def test_issue_4100():
R = Symbol('R', positive=True)
assert integrate(sqrt(R**2 - x**2), (x, 0, R)) == pi*R**2/4
def test_issue_5167():
from sympy.abc import w, x, y, z
f = Function('f')
assert Integral(Integral(f(x), x), x) == Integral(f(x), x, x)
assert Integral(f(x)).args == (f(x), Tuple(x))
assert Integral(Integral(f(x))).args == (f(x), Tuple(x), Tuple(x))
assert Integral(Integral(f(x)), y).args == (f(x), Tuple(x), Tuple(y))
assert Integral(Integral(f(x), z), y).args == (f(x), Tuple(z), Tuple(y))
assert Integral(Integral(Integral(f(x), x), y), z).args == \
(f(x), Tuple(x), Tuple(y), Tuple(z))
assert integrate(Integral(f(x), x), x) == Integral(f(x), x, x)
assert integrate(Integral(f(x), y), x) == y*Integral(f(x), x)
assert integrate(Integral(f(x), x), y) in [Integral(y*f(x), x), y*Integral(f(x), x)]
assert integrate(Integral(2, x), x) == x**2
assert integrate(Integral(2, x), y) == 2*x*y
# don't re-order given limits
assert Integral(1, x, y).args != Integral(1, y, x).args
# do as many as possible
assert Integral(f(x), y, x, y, x).doit() == y**2*Integral(f(x), x, x)/2
assert Integral(f(x), (x, 1, 2), (w, 1, x), (z, 1, y)).doit() == \
y*(x - 1)*Integral(f(x), (x, 1, 2)) - (x - 1)*Integral(f(x), (x, 1, 2))
def test_issue_4890():
z = Symbol('z', positive=True)
assert integrate(exp(-log(x)**2), x) == \
sqrt(pi)*exp(Rational(1, 4))*erf(log(x) - S.Half)/2
assert integrate(exp(log(x)**2), x) == \
sqrt(pi)*exp(Rational(-1, 4))*erfi(log(x)+S.Half)/2
assert integrate(exp(-z*log(x)**2), x) == \
sqrt(pi)*exp(1/(4*z))*erf(sqrt(z)*log(x) - 1/(2*sqrt(z)))/(2*sqrt(z))
def test_issue_4551():
assert not integrate(1/(x*sqrt(1 - x**2)), x).has(Integral)
def test_issue_4376():
n = Symbol('n', integer=True, positive=True)
assert simplify(integrate(n*(x**(1/n) - 1), (x, 0, S.Half)) -
(n**2 - 2**(1/n)*n**2 - n*2**(1/n))/(2**(1 + 1/n) + n*2**(1 + 1/n))) == 0
def test_issue_4517():
assert integrate((sqrt(x) - x**3)/x**Rational(1, 3), x) == \
6*x**Rational(7, 6)/7 - 3*x**Rational(11, 3)/11
def test_issue_4527():
k, m = symbols('k m', integer=True)
assert integrate(sin(k*x)*sin(m*x), (x, 0, pi)).simplify() == \
Piecewise((0, Eq(k, 0) | Eq(m, 0)),
(-pi/2, Eq(k, -m) | (Eq(k, 0) & Eq(m, 0))),
(pi/2, Eq(k, m) | (Eq(k, 0) & Eq(m, 0))),
(0, True))
# Should be possible to further simplify to:
# Piecewise(
# (0, Eq(k, 0) | Eq(m, 0)),
# (-pi/2, Eq(k, -m)),
# (pi/2, Eq(k, m)),
# (0, True))
assert integrate(sin(k*x)*sin(m*x), (x,)) == Piecewise(
(0, And(Eq(k, 0), Eq(m, 0))),
(-x*sin(m*x)**2/2 - x*cos(m*x)**2/2 + sin(m*x)*cos(m*x)/(2*m), Eq(k, -m)),
(x*sin(m*x)**2/2 + x*cos(m*x)**2/2 - sin(m*x)*cos(m*x)/(2*m), Eq(k, m)),
(m*sin(k*x)*cos(m*x)/(k**2 - m**2) -
k*sin(m*x)*cos(k*x)/(k**2 - m**2), True))
def test_issue_4199():
ypos = Symbol('y', positive=True)
# TODO: Remove conds='none' below, let the assumption take care of it.
assert integrate(exp(-I*2*pi*ypos*x)*x, (x, -oo, oo), conds='none') == \
Integral(exp(-I*2*pi*ypos*x)*x, (x, -oo, oo))
@slow
def test_issue_3940():
a, b, c, d = symbols('a:d', positive=True, finite=True)
assert integrate(exp(-x**2 + I*c*x), x) == \
-sqrt(pi)*exp(-c**2/4)*erf(I*c/2 - x)/2
assert integrate(exp(a*x**2 + b*x + c), x) == \
sqrt(pi)*exp(c)*exp(-b**2/(4*a))*erfi(sqrt(a)*x + b/(2*sqrt(a)))/(2*sqrt(a))
from sympy import expand_mul
from sympy.abc import k
assert expand_mul(integrate(exp(-x**2)*exp(I*k*x), (x, -oo, oo))) == \
sqrt(pi)*exp(-k**2/4)
a, d = symbols('a d', positive=True)
assert expand_mul(integrate(exp(-a*x**2 + 2*d*x), (x, -oo, oo))) == \
sqrt(pi)*exp(d**2/a)/sqrt(a)
def test_issue_5413():
# Note that this is not the same as testing ratint() because integrate()
# pulls out the coefficient.
assert integrate(-a/(a**2 + x**2), x) == I*log(-I*a + x)/2 - I*log(I*a + x)/2
def test_issue_4892a():
A, z = symbols('A z')
c = Symbol('c', nonzero=True)
P1 = -A*exp(-z)
P2 = -A/(c*t)*(sin(x)**2 + cos(y)**2)
h1 = -sin(x)**2 - cos(y)**2
h2 = -sin(x)**2 + sin(y)**2 - 1
# there is still some non-deterministic behavior in integrate
# or trigsimp which permits one of the following
assert integrate(c*(P2 - P1), t) in [
c*(-A*(-h1)*log(c*t)/c + A*t*exp(-z)),
c*(-A*(-h2)*log(c*t)/c + A*t*exp(-z)),
c*( A* h1 *log(c*t)/c + A*t*exp(-z)),
c*( A* h2 *log(c*t)/c + A*t*exp(-z)),
(A*c*t - A*(-h1)*log(t)*exp(z))*exp(-z),
(A*c*t - A*(-h2)*log(t)*exp(z))*exp(-z),
]
def test_issue_4892b():
# Issues relating to issue 4596 are making the actual result of this hard
# to test. The answer should be something like
#
# (-sin(y) + sqrt(-72 + 48*cos(y) - 8*cos(y)**2)/2)*log(x + sqrt(-72 +
# 48*cos(y) - 8*cos(y)**2)/(2*(3 - cos(y)))) + (-sin(y) - sqrt(-72 +
# 48*cos(y) - 8*cos(y)**2)/2)*log(x - sqrt(-72 + 48*cos(y) -
# 8*cos(y)**2)/(2*(3 - cos(y)))) + x**2*sin(y)/2 + 2*x*cos(y)
expr = (sin(y)*x**3 + 2*cos(y)*x**2 + 12)/(x**2 + 2)
assert trigsimp(factor(integrate(expr, x).diff(x) - expr)) == 0
def test_issue_5178():
assert integrate(sin(x)*f(y, z), (x, 0, pi), (y, 0, pi), (z, 0, pi)) == \
2*Integral(f(y, z), (y, 0, pi), (z, 0, pi))
def test_integrate_series():
f = sin(x).series(x, 0, 10)
g = x**2/2 - x**4/24 + x**6/720 - x**8/40320 + x**10/3628800 + O(x**11)
assert integrate(f, x) == g
assert diff(integrate(f, x), x) == f
assert integrate(O(x**5), x) == O(x**6)
def test_atom_bug():
from sympy import meijerg
from sympy.integrals.heurisch import heurisch
assert heurisch(meijerg([], [], [1], [], x), x) is None
def test_limit_bug():
z = Symbol('z', zero=False)
assert integrate(sin(x*y*z), (x, 0, pi), (y, 0, pi)) == \
(log(z) + EulerGamma + log(pi))/z - Ci(pi**2*z)/z + log(pi)/z
def test_issue_4703():
g = Function('g')
assert integrate(exp(x)*g(x), x).has(Integral)
def test_issue_1888():
f = Function('f')
assert integrate(f(x).diff(x)**2, x).has(Integral)
# The following tests work using meijerint.
def test_issue_3558():
from sympy import Si
assert integrate(cos(x*y), (x, -pi/2, pi/2), (y, 0, pi)) == 2*Si(pi**2/2)
def test_issue_4422():
assert integrate(1/sqrt(16 + 4*x**2), x) == asinh(x/2) / 2
def test_issue_4493():
from sympy import simplify
assert simplify(integrate(x*sqrt(1 + 2*x), x)) == \
sqrt(2*x + 1)*(6*x**2 + x - 1)/15
def test_issue_4737():
assert integrate(sin(x)/x, (x, -oo, oo)) == pi
assert integrate(sin(x)/x, (x, 0, oo)) == pi/2
assert integrate(sin(x)/x, x) == Si(x)
def test_issue_4992():
# Note: psi in _check_antecedents becomes NaN.
from sympy import simplify, expand_func, polygamma, gamma
a = Symbol('a', positive=True)
assert simplify(expand_func(integrate(exp(-x)*log(x)*x**a, (x, 0, oo)))) == \
(a*polygamma(0, a) + 1)*gamma(a)
def test_issue_4487():
from sympy import lowergamma, simplify
assert simplify(integrate(exp(-x)*x**y, x)) == lowergamma(y + 1, x)
def test_issue_4215():
x = Symbol("x")
assert integrate(1/(x**2), (x, -1, 1)) is oo
def test_issue_4400():
n = Symbol('n', integer=True, positive=True)
assert integrate((x**n)*log(x), x) == \
n*x*x**n*log(x)/(n**2 + 2*n + 1) + x*x**n*log(x)/(n**2 + 2*n + 1) - \
x*x**n/(n**2 + 2*n + 1)
def test_issue_6253():
# Note: this used to raise NotImplementedError
# Note: psi in _check_antecedents becomes NaN.
assert integrate((sqrt(1 - x) + sqrt(1 + x))**2/x, x, meijerg=True) == \
Integral((sqrt(-x + 1) + sqrt(x + 1))**2/x, x)
def test_issue_4153():
assert integrate(1/(1 + x + y + z), (x, 0, 1), (y, 0, 1), (z, 0, 1)) in [
-12*log(3) - 3*log(6)/2 + 3*log(8)/2 + 5*log(2) + 7*log(4),
6*log(2) + 8*log(4) - 27*log(3)/2, 22*log(2) - 27*log(3)/2,
-12*log(3) - 3*log(6)/2 + 47*log(2)/2]
def test_issue_4326():
R, b, h = symbols('R b h')
# It doesn't matter if we can do the integral. Just make sure the result
# doesn't contain nan. This is really a test against _eval_interval.
e = integrate(((h*(x - R + b))/b)*sqrt(R**2 - x**2), (x, R - b, R))
assert not e.has(nan)
# See that it evaluates
assert not e.has(Integral)
def test_powers():
assert integrate(2**x + 3**x, x) == 2**x/log(2) + 3**x/log(3)
def test_manual_option():
raises(ValueError, lambda: integrate(1/x, x, manual=True, meijerg=True))
# an example of a function that manual integration cannot handle
assert integrate(log(1+x)/x, (x, 0, 1), manual=True).has(Integral)
def test_meijerg_option():
raises(ValueError, lambda: integrate(1/x, x, meijerg=True, risch=True))
# an example of a function that meijerg integration cannot handle
assert integrate(tan(x), x, meijerg=True) == Integral(tan(x), x)
def test_risch_option():
# risch=True only allowed on indefinite integrals
raises(ValueError, lambda: integrate(1/log(x), (x, 0, oo), risch=True))
assert integrate(exp(-x**2), x, risch=True) == NonElementaryIntegral(exp(-x**2), x)
assert integrate(log(1/x)*y, x, y, risch=True) == y**2*(x*log(1/x)/2 + x/2)
assert integrate(erf(x), x, risch=True) == Integral(erf(x), x)
# TODO: How to test risch=False?
def test_heurisch_option():
raises(ValueError, lambda: integrate(1/x, x, risch=True, heurisch=True))
# an integral that heurisch can handle
assert integrate(exp(x**2), x, heurisch=True) == sqrt(pi)*erfi(x)/2
# an integral that heurisch currently cannot handle
assert integrate(exp(x)/x, x, heurisch=True) == Integral(exp(x)/x, x)
# an integral where heurisch currently hangs, issue 15471
assert integrate(log(x)*cos(log(x))/x**Rational(3, 4), x, heurisch=False) == (
-128*x**Rational(1, 4)*sin(log(x))/289 + 240*x**Rational(1, 4)*cos(log(x))/289 +
(16*x**Rational(1, 4)*sin(log(x))/17 + 4*x**Rational(1, 4)*cos(log(x))/17)*log(x))
def test_issue_6828():
f = 1/(1.08*x**2 - 4.3)
g = integrate(f, x).diff(x)
assert verify_numerically(f, g, tol=1e-12)
def test_issue_4803():
x_max = Symbol("x_max")
assert integrate(y/pi*exp(-(x_max - x)/cos(a)), x) == \
y*exp((x - x_max)/cos(a))*cos(a)/pi
def test_issue_4234():
assert integrate(1/sqrt(1 + tan(x)**2)) == tan(x)/sqrt(1 + tan(x)**2)
def test_issue_4492():
assert simplify(integrate(x**2 * sqrt(5 - x**2), x)) == Piecewise(
(I*(2*x**5 - 15*x**3 + 25*x - 25*sqrt(x**2 - 5)*acosh(sqrt(5)*x/5)) /
(8*sqrt(x**2 - 5)), 1 < Abs(x**2)/5),
((-2*x**5 + 15*x**3 - 25*x + 25*sqrt(-x**2 + 5)*asin(sqrt(5)*x/5)) /
(8*sqrt(-x**2 + 5)), True))
def test_issue_2708():
# This test needs to use an integration function that can
# not be evaluated in closed form. Update as needed.
f = 1/(a + z + log(z))
integral_f = NonElementaryIntegral(f, (z, 2, 3))
assert Integral(f, (z, 2, 3)).doit() == integral_f
assert integrate(f + exp(z), (z, 2, 3)) == integral_f - exp(2) + exp(3)
assert integrate(2*f + exp(z), (z, 2, 3)) == \
2*integral_f - exp(2) + exp(3)
assert integrate(exp(1.2*n*s*z*(-t + z)/t), (z, 0, x)) == \
NonElementaryIntegral(exp(-1.2*n*s*z)*exp(1.2*n*s*z**2/t),
(z, 0, x))
def test_issue_2884():
f = (4.000002016020*x + 4.000002016020*y + 4.000006024032)*exp(10.0*x)
e = integrate(f, (x, 0.1, 0.2))
assert str(e) == '1.86831064982608*y + 2.16387491480008'
def test_issue_8368():
assert integrate(exp(-s*x)*cosh(x), (x, 0, oo)) == \
Piecewise(
( pi*Piecewise(
( -s/(pi*(-s**2 + 1)),
Abs(s**2) < 1),
( 1/(pi*s*(1 - 1/s**2)),
Abs(s**(-2)) < 1),
( meijerg(
((S.Half,), (0, 0)),
((0, S.Half), (0,)),
polar_lift(s)**2),
True)
),
And(
Abs(periodic_argument(polar_lift(s)**2, oo)) < pi,
cos(Abs(periodic_argument(polar_lift(s)**2, oo))/2)*sqrt(Abs(s**2)) - 1 > 0,
Ne(s**2, 1))
),
(
Integral(exp(-s*x)*cosh(x), (x, 0, oo)),
True))
assert integrate(exp(-s*x)*sinh(x), (x, 0, oo)) == \
Piecewise(
( -1/(s + 1)/2 - 1/(-s + 1)/2,
And(
Ne(1/s, 1),
Abs(periodic_argument(s, oo)) < pi/2,
Abs(periodic_argument(s, oo)) <= pi/2,
cos(Abs(periodic_argument(s, oo)))*Abs(s) - 1 > 0)),
( Integral(exp(-s*x)*sinh(x), (x, 0, oo)),
True))
def test_issue_8901():
assert integrate(sinh(1.0*x)) == 1.0*cosh(1.0*x)
assert integrate(tanh(1.0*x)) == 1.0*x - 1.0*log(tanh(1.0*x) + 1)
assert integrate(tanh(x)) == x - log(tanh(x) + 1)
@slow
def test_issue_8945():
assert integrate(sin(x)**3/x, (x, 0, 1)) == -Si(3)/4 + 3*Si(1)/4
assert integrate(sin(x)**3/x, (x, 0, oo)) == pi/4
assert integrate(cos(x)**2/x**2, x) == -Si(2*x) - cos(2*x)/(2*x) - 1/(2*x)
@slow
def test_issue_7130():
if ON_TRAVIS:
skip("Too slow for travis.")
i, L, a, b = symbols('i L a b')
integrand = (cos(pi*i*x/L)**2 / (a + b*x)).rewrite(exp)
assert x not in integrate(integrand, (x, 0, L)).free_symbols
def test_issue_10567():
a, b, c, t = symbols('a b c t')
vt = Matrix([a*t, b, c])
assert integrate(vt, t) == Integral(vt, t).doit()
assert integrate(vt, t) == Matrix([[a*t**2/2], [b*t], [c*t]])
def test_issue_11856():
t = symbols('t')
assert integrate(sinc(pi*t), t) == Si(pi*t)/pi
@slow
def test_issue_11876():
assert integrate(sqrt(log(1/x)), (x, 0, 1)) == sqrt(pi)/2
def test_issue_4950():
assert integrate((-60*exp(x) - 19.2*exp(4*x))*exp(4*x), x) ==\
-2.4*exp(8*x) - 12.0*exp(5*x)
def test_issue_4968():
assert integrate(sin(log(x**2))) == x*sin(2*log(x))/5 - 2*x*cos(2*log(x))/5
def test_singularities():
assert integrate(1/x**2, (x, -oo, oo)) is oo
assert integrate(1/x**2, (x, -1, 1)) is oo
assert integrate(1/(x - 1)**2, (x, -2, 2)) is oo
assert integrate(1/x**2, (x, 1, -1)) is -oo
assert integrate(1/(x - 1)**2, (x, 2, -2)) is -oo
def test_issue_12645():
x, y = symbols('x y', real=True)
assert (integrate(sin(x*x*x + y*y),
(x, -sqrt(pi - y*y), sqrt(pi - y*y)),
(y, -sqrt(pi), sqrt(pi)))
== Integral(sin(x**3 + y**2),
(x, -sqrt(-y**2 + pi), sqrt(-y**2 + pi)),
(y, -sqrt(pi), sqrt(pi))))
def test_issue_12677():
assert integrate(sin(x) / (cos(x)**3) , (x, 0, pi/6)) == Rational(1,6)
def test_issue_14078():
assert integrate((cos(3*x)-cos(x))/x, (x, 0, oo)) == -log(3)
def test_issue_14064():
assert integrate(1/cosh(x), (x, 0, oo)) == pi/2
def test_issue_14027():
assert integrate(1/(1 + exp(x - S.Half)/(1 + exp(x))), x) == \
x - exp(S.Half)*log(exp(x) + exp(S.Half)/(1 + exp(S.Half)))/(exp(S.Half) + E)
def test_issue_8170():
assert integrate(tan(x), (x, 0, pi/2)) is S.Infinity
def test_issue_8440_14040():
assert integrate(1/x, (x, -1, 1)) is S.NaN
assert integrate(1/(x + 1), (x, -2, 3)) is S.NaN
def test_issue_14096():
assert integrate(1/(x + y)**2, (x, 0, 1)) == -1/(y + 1) + 1/y
assert integrate(1/(1 + x + y + z)**2, (x, 0, 1), (y, 0, 1), (z, 0, 1)) == \
-4*log(4) - 6*log(2) + 9*log(3)
def test_issue_14144():
assert Abs(integrate(1/sqrt(1 - x**3), (x, 0, 1)).n() - 1.402182) < 1e-6
assert Abs(integrate(sqrt(1 - x**3), (x, 0, 1)).n() - 0.841309) < 1e-6
def test_issue_14375():
# This raised a TypeError. The antiderivative has exp_polar, which
# may be possible to unpolarify, so the exact output is not asserted here.
assert integrate(exp(I*x)*log(x), x).has(Ei)
def test_issue_14437():
f = Function('f')(x, y, z)
assert integrate(f, (x, 0, 1), (y, 0, 2), (z, 0, 3)) == \
Integral(f, (x, 0, 1), (y, 0, 2), (z, 0, 3))
def test_issue_14470():
assert integrate(1/sqrt(exp(x) + 1), x) == \
log(-1 + 1/sqrt(exp(x) + 1)) - log(1 + 1/sqrt(exp(x) + 1))
def test_issue_14877():
f = exp(1 - exp(x**2)*x + 2*x**2)*(2*x**3 + x)/(1 - exp(x**2)*x)**2
assert integrate(f, x) == \
-exp(2*x**2 - x*exp(x**2) + 1)/(x*exp(3*x**2) - exp(2*x**2))
def test_issue_14782():
f = sqrt(-x**2 + 1)*(-x**2 + x)
assert integrate(f, [x, -1, 1]) == - pi / 8
@slow
def test_issue_14782_slow():
f = sqrt(-x**2 + 1)*(-x**2 + x)
assert integrate(f, [x, 0, 1]) == S.One / 3 - pi / 16
def test_issue_12081():
f = x**(Rational(-3, 2))*exp(-x)
assert integrate(f, [x, 0, oo]) is oo
def test_issue_15285():
y = 1/x - 1
f = 4*y*exp(-2*y)/x**2
assert integrate(f, [x, 0, 1]) == 1
def test_issue_15432():
assert integrate(x**n * exp(-x) * log(x), (x, 0, oo)).gammasimp() == Piecewise(
(gamma(n + 1)*polygamma(0, n) + gamma(n + 1)/n, re(n) + 1 > 0),
(Integral(x**n*exp(-x)*log(x), (x, 0, oo)), True))
def test_issue_15124():
omega = IndexedBase('omega')
m, p = symbols('m p', cls=Idx)
assert integrate(exp(x*I*(omega[m] + omega[p])), x, conds='none') == \
-I*exp(I*x*omega[m])*exp(I*x*omega[p])/(omega[m] + omega[p])
def test_issue_15218():
assert Eq(x, y).integrate(x) == Eq(x**2/2, x*y)
assert Integral(Eq(x, y), x) == Eq(Integral(x, x), Integral(y, x))
assert Integral(Eq(x, y), x).doit() == Eq(x**2/2, x*y)
def test_issue_15292():
res = integrate(exp(-x**2*cos(2*t)) * cos(x**2*sin(2*t)), (x, 0, oo))
assert isinstance(res, Piecewise)
assert gammasimp((res - sqrt(pi)/2 * cos(t)).subs(t, pi/6)) == 0
def test_issue_4514():
assert integrate(sin(2*x)/sin(x), x) == 2*sin(x)
def test_issue_15457():
x, a, b = symbols('x a b', real=True)
definite = integrate(exp(Abs(x-2)), (x, a, b))
indefinite = integrate(exp(Abs(x-2)), x)
assert definite.subs({a: 1, b: 3}) == -2 + 2*E
assert indefinite.subs(x, 3) - indefinite.subs(x, 1) == -2 + 2*E
assert definite.subs({a: -3, b: -1}) == -exp(3) + exp(5)
assert indefinite.subs(x, -1) - indefinite.subs(x, -3) == -exp(3) + exp(5)
def test_issue_15431():
assert integrate(x*exp(x)*log(x), x) == \
(x*exp(x) - exp(x))*log(x) - exp(x) + Ei(x)
def test_issue_15640_log_substitutions():
f = x/log(x)
F = Ei(2*log(x))
assert integrate(f, x) == F and F.diff(x) == f
f = x**3/log(x)**2
F = -x**4/log(x) + 4*Ei(4*log(x))
assert integrate(f, x) == F and F.diff(x) == f
f = sqrt(log(x))/x**2
F = -sqrt(pi)*erfc(sqrt(log(x)))/2 - sqrt(log(x))/x
assert integrate(f, x) == F and F.diff(x) == f
def test_issue_15509():
from sympy.vector import CoordSys3D
N = CoordSys3D('N')
x = N.x
assert integrate(cos(a*x + b), (x, x_1, x_2), heurisch=True) == Piecewise(
(-sin(a*x_1 + b)/a + sin(a*x_2 + b)/a, (a > -oo) & (a < oo) & Ne(a, 0)), \
(-x_1*cos(b) + x_2*cos(b), True))
def test_issue_4311_fast():
x = symbols('x', real=True)
assert integrate(x*abs(9-x**2), x) == Piecewise(
(x**4/4 - 9*x**2/2, x <= -3),
(-x**4/4 + 9*x**2/2 - Rational(81, 2), x <= 3),
(x**4/4 - 9*x**2/2, True))
def test_integrate_with_complex_constants():
K = Symbol('K', real=True, positive=True)
x = Symbol('x', real=True)
m = Symbol('m', real=True)
assert integrate(exp(-I*K*x**2+m*x), x) == sqrt(I)*sqrt(pi)*exp(-I*m**2
/(4*K))*erfi((-2*I*K*x + m)/(2*sqrt(K)*sqrt(-I)))/(2*sqrt(K))
assert integrate(1/(1 + I*x**2), x) == -sqrt(I)*log(x - sqrt(I))/2 +\
sqrt(I)*log(x + sqrt(I))/2
assert integrate(exp(-I*x**2), x) == sqrt(pi)*erf(sqrt(I)*x)/(2*sqrt(I))
def test_issue_14241():
x = Symbol('x')
n = Symbol('n', positive=True, integer=True)
assert integrate(n * x ** (n - 1) / (x + 1), x) == \
n**2*x**n*lerchphi(x*exp_polar(I*pi), 1, n)*gamma(n)/gamma(n + 1)
def test_issue_13112():
assert integrate(sin(t)**2 / (5 - 4*cos(t)), [t, 0, 2*pi]) == pi / 4
def test_issue_14709b():
h = Symbol('h', positive=True)
i = integrate(x*acos(1 - 2*x/h), (x, 0, h))
assert i == 5*h**2*pi/16
def test_issue_8614():
x = Symbol('x')
t = Symbol('t')
assert integrate(exp(t)/t, (t, -oo, x)) == Ei(x)
assert integrate((exp(-x) - exp(-2*x))/x, (x, 0, oo)) == log(2)
def test_issue_15494():
s = symbols('s', real=True, positive=True)
integrand = (exp(s/2) - 2*exp(1.6*s) + exp(s))*exp(s)
solution = integrate(integrand, s)
assert solution != S.NaN
# Not sure how to test this properly as it is a symbolic expression with floats
# assert str(solution) == '0.666666666666667*exp(1.5*s) + 0.5*exp(2.0*s) - 0.769230769230769*exp(2.6*s)'
# Maybe
assert abs(solution.subs(s, 1) - (-3.67440080236188)) <= 1e-8
integrand = (exp(s/2) - 2*exp(S(8)/5*s) + exp(s))*exp(s)
assert integrate(integrand, s) == -10*exp(13*s/5)/13 + 2*exp(3*s/2)/3 + exp(2*s)/2
def test_li_integral():
y = Symbol('y')
assert Integral(li(y*x**2), x).doit() == Piecewise(
(x*li(x**2*y) - x*Ei(3*log(x) + 3*log(y)/2)/(sqrt(y)*sqrt(x**2)), Ne(y, 0)),
(0, True))
def test_issue_17473():
x = Symbol('x')
n = Symbol('n')
assert integrate(sin(x**n), x) == \
x*x**n*gamma(S(1)/2 + 1/(2*n))*hyper((S(1)/2 + 1/(2*n),),
(S(3)/2, S(3)/2 + 1/(2*n)),
-x**(2*n)/4)/(2*n*gamma(S(3)/2 + 1/(2*n)))
def test_issue_17671():
assert integrate(log(log(x)) / x**2, [x, 1, oo]) == -EulerGamma
assert integrate(log(log(x)) / x**3, [x, 1, oo]) == -log(2)/2 - EulerGamma/2
assert integrate(log(log(x)) / x**10, [x, 1, oo]) == -2*log(3)/9 - EulerGamma/9
|
# -*- coding: utf_8 -*-
"""Generate Zipped downloads."""
import logging
import os
import re
import shutil
from django.conf import settings
from django.shortcuts import redirect
from MobSF.utils import print_n_send_error_response
logger = logging.getLogger(__name__)
def run(request):
"""Generate downloads for apk, java and smali."""
try:
logger.info('Generating Downloads')
md5 = request.GET['hash']
file_type = request.GET['file_type']
match = re.match('^[0-9a-f]{32}$', md5)
if not match and file_type not in ['apk', 'smali', 'java']:
logger.exception('Invalid options')
return print_n_send_error_response(request,
'Invalid options')
app_dir = os.path.join(settings.UPLD_DIR, md5)
file_name = ''
if file_type == 'java':
# For Java
file_name = md5 + '-java'
directory = os.path.join(app_dir, 'java_source/')
dwd_dir = os.path.join(settings.DWD_DIR, file_name)
shutil.make_archive(dwd_dir, 'zip', directory)
file_name = file_name + '.zip'
elif file_type == 'smali':
# For Smali
file_name = md5 + '-smali'
directory = os.path.join(app_dir, 'smali_source/')
dwd_dir = os.path.join(settings.DWD_DIR, file_name)
shutil.make_archive(dwd_dir, 'zip', directory)
file_name = file_name + '.zip'
elif file_type == 'apk':
file_name = md5 + '.apk'
src = os.path.join(app_dir, file_name)
dst = os.path.join(settings.DWD_DIR, file_name)
shutil.copy2(src, dst)
return redirect('/download/' + file_name)
except Exception:
logger.exception('Generating Downloads')
return print_n_send_error_response(request,
'Generating Downloads')
|
# -*- coding: UTF-8 -*-
# =============================================================================
# Copyright (C) 2012 Brad Hards <[email protected]>
#
# Based on wms.py, which has the following copyright statement:
# Copyright (c) 2004, 2006 Sean C. Gillies
# Copyright (c) 2005 Nuxeo SARL <http://nuxeo.com>
#
# Authors : Sean Gillies <[email protected]>
# Julien Anguenot <[email protected]>
#
# Contact email: [email protected]
# =============================================================================
"""
Abstract
--------
The wmts module of the OWSlib package provides client-side functionality
for fetching tiles from an OGC Web Map Tile Service (WMTS)
Disclaimer
----------
PLEASE NOTE: the owslib wmts module should be considered in early-beta
state: it has been tested against only one WMTS server (NASA EODSIS).
More extensive testing is needed and feedback (to [email protected])
would be appreciated.
"""
from __future__ import (absolute_import, division, print_function)
from random import randint
import warnings
import six
from six.moves import filter
try: # Python 3
from urllib.parse import (urlencode, urlparse, urlunparse, parse_qs,
ParseResult)
except ImportError: # Python 2
from urllib import urlencode
from urlparse import urlparse, urlunparse, parse_qs, ParseResult
from .etree import etree
from .util import openURL, testXMLValue, getXMLInteger
from .fgdc import Metadata
from .iso import MD_Metadata
from .ows import ServiceProvider, ServiceIdentification, OperationsMetadata
_OWS_NS = '{http://www.opengis.net/ows/1.1}'
_WMTS_NS = '{http://www.opengis.net/wmts/1.0}'
_XLINK_NS = '{http://www.w3.org/1999/xlink}'
_ABSTRACT_TAG = _OWS_NS + 'Abstract'
_IDENTIFIER_TAG = _OWS_NS + 'Identifier'
_LOWER_CORNER_TAG = _OWS_NS + 'LowerCorner'
_OPERATIONS_METADATA_TAG = _OWS_NS + 'OperationsMetadata'
_SERVICE_IDENTIFICATION_TAG = _OWS_NS + 'ServiceIdentification'
_SERVICE_PROVIDER_TAG = _OWS_NS + 'ServiceProvider'
_SUPPORTED_CRS_TAG = _OWS_NS + 'SupportedCRS'
_TITLE_TAG = _OWS_NS + 'Title'
_UPPER_CORNER_TAG = _OWS_NS + 'UpperCorner'
_WGS84_BOUNDING_BOX_TAG = _OWS_NS + 'WGS84BoundingBox'
_CONTENTS_TAG = _WMTS_NS + 'Contents'
_FORMAT_TAG = _WMTS_NS + 'Format'
_INFO_FORMAT_TAG = _WMTS_NS + 'InfoFormat'
_LAYER_TAG = _WMTS_NS + 'Layer'
_LAYER_REF_TAG = _WMTS_NS + 'LayerRef'
_MATRIX_HEIGHT_TAG = _WMTS_NS + 'MatrixHeight'
_MATRIX_WIDTH_TAG = _WMTS_NS + 'MatrixWidth'
_MAX_TILE_COL_TAG = _WMTS_NS + 'MaxTileCol'
_MAX_TILE_ROW_TAG = _WMTS_NS + 'MaxTileRow'
_MIN_TILE_COL_TAG = _WMTS_NS + 'MinTileCol'
_MIN_TILE_ROW_TAG = _WMTS_NS + 'MinTileRow'
_RESOURCE_URL_TAG = _WMTS_NS + 'ResourceURL'
_SCALE_DENOMINATOR_TAG = _WMTS_NS + 'ScaleDenominator'
_SERVICE_METADATA_URL_TAG = _WMTS_NS + 'ServiceMetadataURL'
_STYLE_TAG = _WMTS_NS + 'Style'
_THEME_TAG = _WMTS_NS + 'Theme'
_THEMES_TAG = _WMTS_NS + 'Themes'
_TILE_HEIGHT_TAG = _WMTS_NS + 'TileHeight'
_TILE_MATRIX_SET_LINK_TAG = _WMTS_NS + 'TileMatrixSetLink'
_TILE_MATRIX_SET_TAG = _WMTS_NS + 'TileMatrixSet'
_TILE_MATRIX_SET_LIMITS_TAG = _WMTS_NS + 'TileMatrixSetLimits'
_TILE_MATRIX_LIMITS_TAG = _WMTS_NS + 'TileMatrixLimits'
_TILE_MATRIX_TAG = _WMTS_NS + 'TileMatrix'
_TILE_WIDTH_TAG = _WMTS_NS + 'TileWidth'
_TOP_LEFT_CORNER_TAG = _WMTS_NS + 'TopLeftCorner'
_KEYWORDS_TAG = _OWS_NS + 'Keywords'
_KEYWORD_TAG = _OWS_NS + 'Keyword'
_HREF_TAG = _XLINK_NS + 'href'
class ServiceException(Exception):
"""WMTS ServiceException
Attributes:
message -- short error message
xml -- full xml error message from server
"""
def __init__(self, message, xml):
self.message = message
self.xml = xml
def __str__(self):
return repr(self.message)
class CapabilitiesError(Exception):
pass
class WebMapTileService(object):
"""Abstraction for OGC Web Map Tile Service (WMTS).
Implements IWebMapService.
"""
def __getitem__(self, name):
'''Check contents dictionary to allow dict like access to
service layers'''
if name in self.__getattribute__('contents'):
return self.__getattribute__('contents')[name]
else:
raise KeyError("No content named %s" % name)
def __init__(self, url, version='1.0.0', xml=None, username=None,
password=None, parse_remote_metadata=False,
vendor_kwargs=None):
"""Initialize.
Parameters
----------
url : string
Base URL for the WMTS service.
version : string
Optional WMTS version. Defaults to '1.0.0'.
xml : string
Optional XML content to use as the content for the initial
GetCapabilities request. Typically only used for testing.
username : string
Optional user name for authentication.
password : string
Optional password for authentication.
parse_remote_metadata: string
Currently unused.
vendor_kwargs : dict
Optional vendor-specific parameters to be included in all
requests.
"""
self.url = url
self.username = username
self.password = password
self.version = version
self.vendor_kwargs = vendor_kwargs
self._capabilities = None
# Authentication handled by Reader
reader = WMTSCapabilitiesReader(self.version, url=self.url,
un=self.username, pw=self.password)
if xml: # read from stored xml
self._capabilities = reader.readString(xml)
else: # read from server
self._capabilities = reader.read(self.url, self.vendor_kwargs)
# Avoid building capabilities metadata if the response is a
# ServiceExceptionReport.
# TODO: check if this needs a namespace
se = self._capabilities.find('ServiceException')
if se is not None:
err_message = str(se.text).strip()
raise ServiceException(err_message, xml)
# build metadata objects
self._buildMetadata(parse_remote_metadata)
def _getcapproperty(self):
if not self._capabilities:
reader = WMTSCapabilitiesReader(
self.version, url=self.url, un=self.username, pw=self.password
)
xml = reader.read(self.url, self.vendor_kwargs)
self._capabilities = ServiceMetadata(xml)
return self._capabilities
def _buildMetadata(self, parse_remote_metadata=False):
''' set up capabilities metadata objects '''
self.updateSequence = self._capabilities.attrib.get('updateSequence')
# serviceIdentification metadata
serviceident = self._capabilities.find(_SERVICE_IDENTIFICATION_TAG)
self.identification = ServiceIdentification(serviceident)
# serviceProvider metadata
serviceprov = self._capabilities.find(_SERVICE_PROVIDER_TAG)
if serviceprov is not None:
self.provider = ServiceProvider(serviceprov)
# serviceOperations metadata
self.operations = []
serviceop = self._capabilities.find(_OPERATIONS_METADATA_TAG)
# REST only WMTS does not have any Operations
if serviceop is not None:
for elem in serviceop[:]:
self.operations.append(OperationsMetadata(elem))
# serviceContents metadata: our assumption is that services use
# a top-level layer as a metadata organizer, nothing more.
self.contents = {}
caps = self._capabilities.find(_CONTENTS_TAG)
def gather_layers(parent_elem, parent_metadata):
for index, elem in enumerate(parent_elem.findall(_LAYER_TAG)):
cm = ContentMetadata(
elem, parent=parent_metadata, index=index+1,
parse_remote_metadata=parse_remote_metadata)
if cm.id:
if cm.id in self.contents:
raise KeyError('Content metadata for layer "%s" '
'already exists' % cm.id)
self.contents[cm.id] = cm
gather_layers(elem, cm)
gather_layers(caps, None)
self.tilematrixsets = {}
for elem in caps.findall(_TILE_MATRIX_SET_TAG):
tms = TileMatrixSet(elem)
if tms.identifier:
if tms.identifier in self.tilematrixsets:
raise KeyError('TileMatrixSet with identifier "%s" '
'already exists' % tms.identifier)
self.tilematrixsets[tms.identifier] = tms
self.themes = {}
for elem in self._capabilities.findall(_THEMES_TAG + '/' + _THEME_TAG):
theme = Theme(elem)
if theme.identifier:
if theme.identifier in self.themes:
raise KeyError('Theme with identifier "%s" already exists'
% theme.identifier)
self.themes[theme.identifier] = theme
serviceMetadataURL = self._capabilities.find(_SERVICE_METADATA_URL_TAG)
if serviceMetadataURL is not None:
self.serviceMetadataURL = serviceMetadataURL.attrib[_HREF_TAG]
else:
self.serviceMetadataURL = None
def items(self):
'''supports dict-like items() access'''
items = []
for item in self.contents:
items.append((item, self.contents[item]))
return items
def buildTileRequest(self, layer=None, style=None, format=None,
tilematrixset=None, tilematrix=None, row=None,
column=None, **kwargs):
"""Return the URL-encoded parameters for a GetTile request.
Parameters
----------
layer : string
Content layer name.
style : string
Optional style name. Defaults to the first style defined for
the relevant layer in the GetCapabilities response.
format : string
Optional output image format, such as 'image/jpeg'.
Defaults to the first format defined for the relevant layer
in the GetCapabilities response.
tilematrixset : string
Optional name of tile matrix set to use.
Defaults to the first tile matrix set defined for the
relevant layer in the GetCapabilities response.
tilematrix : string
Name of the tile matrix to use.
row : integer
Row index of tile to request.
column : integer
Column index of tile to request.
**kwargs : extra arguments
anything else e.g. vendor specific parameters
Example
-------
>>> url = 'http://map1c.vis.earthdata.nasa.gov/wmts-geo/wmts.cgi'
>>> wmts = WebMapTileService(url)
>>> wmts.buildTileRequest(layer='VIIRS_CityLights_2012',
... tilematrixset='EPSG4326_500m',
... tilematrix='6',
... row=4, column=4)
'SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&\
LAYER=VIIRS_CityLights_2012&STYLE=default&TILEMATRIXSET=EPSG4326_500m&\
TILEMATRIX=6&TILEROW=4&TILECOL=4&FORMAT=image%2Fjpeg'
"""
if (layer is None):
raise ValueError("layer is mandatory (cannot be None)")
if style is None:
style = list(self[layer].styles.keys())[0]
if format is None:
format = self[layer].formats[0]
if tilematrixset is None:
tilematrixset = sorted(self[layer].tilematrixsetlinks.keys())[0]
if tilematrix is None:
msg = 'tilematrix (zoom level) is mandatory (cannot be None)'
raise ValueError(msg)
if row is None:
raise ValueError("row is mandatory (cannot be None)")
if column is None:
raise ValueError("column is mandatory (cannot be None)")
request = list()
request.append(('SERVICE', 'WMTS'))
request.append(('REQUEST', 'GetTile'))
request.append(('VERSION', '1.0.0'))
request.append(('LAYER', layer))
request.append(('STYLE', style))
request.append(('TILEMATRIXSET', tilematrixset))
request.append(('TILEMATRIX', tilematrix))
request.append(('TILEROW', str(row)))
request.append(('TILECOL', str(column)))
request.append(('FORMAT', format))
for key, value in six.iteritems(kwargs):
request.append((key, value))
data = urlencode(request, True)
return data
def buildTileResource(self, layer=None, style=None, format=None,
tilematrixset=None, tilematrix=None, row=None,
column=None, **kwargs):
tileresourceurls = []
for resourceURL in self[layer].resourceURLs:
if resourceURL['resourceType'] == 'tile':
tileresourceurls.append(resourceURL)
numres = len(tileresourceurls)
if numres > 0:
# choose random ResourceURL if more than one available
resindex = randint(0, numres - 1)
resurl = tileresourceurls[resindex]['template']
if tilematrixset:
resurl = resurl.replace('{TileMatrixSet}', tilematrixset)
resurl = resurl.replace('{TileMatrix}', tilematrix)
resurl = resurl.replace('{TileRow}', row)
resurl = resurl.replace('{TileCol}', column)
if style:
resurl = resurl.replace('{Style}', style)
return resurl
return None
@property
def restonly(self):
# if OperationsMetadata is missing completely --> use REST
if len(self.operations) == 0:
return True
# check if KVP or RESTful are available
restenc = False
kvpenc = False
for operation in self.operations:
if operation.name == 'GetTile':
for method in operation.methods:
if 'kvp' in str(method['constraints']).lower():
kvpenc = True
if 'rest' in str(method['constraints']).lower():
restenc = True
# if KVP is available --> use KVP
if kvpenc:
return False
# if the operation has no constraint --> use KVP
if not kvpenc and not restenc:
return False
return restenc
def gettile(self, base_url=None, layer=None, style=None, format=None,
tilematrixset=None, tilematrix=None, row=None, column=None,
**kwargs):
"""Return a tile from the WMTS.
Returns the tile image as a file-like object.
Parameters
----------
base_url : string
Optional URL for request submission. Defaults to the URL of
the GetTile operation as declared in the GetCapabilities
response.
layer : string
Content layer name.
style : string
Optional style name. Defaults to the first style defined for
the relevant layer in the GetCapabilities response.
format : string
Optional output image format, such as 'image/jpeg'.
Defaults to the first format defined for the relevant layer
in the GetCapabilities response.
tilematrixset : string
Optional name of tile matrix set to use.
Defaults to the first tile matrix set defined for the
relevant layer in the GetCapabilities response.
tilematrix : string
Name of the tile matrix to use.
row : integer
Row index of tile to request.
column : integer
Column index of tile to request.
**kwargs : extra arguments
anything else e.g. vendor specific parameters
Example
-------
>>> url = 'http://map1c.vis.earthdata.nasa.gov/wmts-geo/wmts.cgi'
>>> wmts = WebMapTileService(url)
>>> img = wmts.gettile(layer='VIIRS_CityLights_2012',\
tilematrixset='EPSG4326_500m',\
tilematrix='6',\
row=4, column=4)
>>> out = open('tile.jpg', 'wb')
>>> bytes_written = out.write(img.read())
>>> out.close()
"""
vendor_kwargs = self.vendor_kwargs or {}
vendor_kwargs.update(kwargs)
# REST only WMTS
if self.restonly:
resurl = self.buildTileResource(
layer, style, format, tilematrixset, tilematrix,
row, column, **vendor_kwargs)
u = openURL(resurl, username=self.username, password=self.password)
return u
# KVP implemetation
data = self.buildTileRequest(layer, style, format, tilematrixset,
tilematrix, row, column, **vendor_kwargs)
if base_url is None:
base_url = self.url
try:
methods = self.getOperationByName('GetTile').methods
get_verbs = [x for x in methods
if x.get('type').lower() == 'get']
if len(get_verbs) > 1:
# Filter by constraints
base_url = next(
x for x in filter(
list,
([pv.get('url')
for const in pv.get('constraints')
if 'kvp' in [x.lower() for x in const.values]]
for pv in get_verbs if pv.get('constraints'))))[0]
elif len(get_verbs) == 1:
base_url = get_verbs[0].get('url')
except StopIteration:
pass
u = openURL(base_url, data, username=self.username,
password=self.password)
# check for service exceptions, and return
if u.info()['Content-Type'] == 'application/vnd.ogc.se_xml':
se_xml = u.read()
se_tree = etree.fromstring(se_xml)
err_message = six.text_type(se_tree.find('ServiceException').text)
raise ServiceException(err_message.strip(), se_xml)
return u
def getServiceXML(self):
xml = None
if self._capabilities is not None:
xml = etree.tostring(self._capabilities)
return xml
def getfeatureinfo(self):
raise NotImplementedError
def getOperationByName(self, name):
"""Return a named content item."""
for item in self.operations:
if item.name == name:
return item
raise KeyError("No operation named %s" % name)
class TileMatrixSet(object):
'''Holds one TileMatrixSet'''
def __init__(self, elem):
if elem.tag != _TILE_MATRIX_SET_TAG:
raise ValueError('%s should be a TileMatrixSet' % (elem,))
self.identifier = testXMLValue(elem.find(_IDENTIFIER_TAG)).strip()
self.crs = testXMLValue(elem.find(_SUPPORTED_CRS_TAG)).strip()
if self.crs is None or self.identifier is None:
raise ValueError('%s incomplete TileMatrixSet' % (elem,))
self.tilematrix = {}
for tilematrix in elem.findall(_TILE_MATRIX_TAG):
tm = TileMatrix(tilematrix)
if tm.identifier:
if tm.identifier in self.tilematrix:
raise KeyError('TileMatrix with identifier "%s" '
'already exists' % tm.identifier)
self.tilematrix[tm.identifier] = tm
class TileMatrix(object):
'''Holds one TileMatrix'''
def __init__(self, elem):
if elem.tag != _TILE_MATRIX_TAG:
raise ValueError('%s should be a TileMatrix' % (elem,))
self.identifier = testXMLValue(elem.find(_IDENTIFIER_TAG)).strip()
sd = testXMLValue(elem.find(_SCALE_DENOMINATOR_TAG))
if sd is None:
raise ValueError('%s is missing ScaleDenominator' % (elem,))
self.scaledenominator = float(sd)
tl = testXMLValue(elem.find(_TOP_LEFT_CORNER_TAG))
if tl is None:
raise ValueError('%s is missing TopLeftCorner' % (elem,))
(lon, lat) = tl.split(" ")
self.topleftcorner = (float(lon), float(lat))
width = testXMLValue(elem.find(_TILE_WIDTH_TAG))
height = testXMLValue(elem.find(_TILE_HEIGHT_TAG))
if (width is None) or (height is None):
msg = '%s is missing TileWidth and/or TileHeight' % (elem,)
raise ValueError(msg)
self.tilewidth = int(width)
self.tileheight = int(height)
mw = testXMLValue(elem.find(_MATRIX_WIDTH_TAG))
mh = testXMLValue(elem.find(_MATRIX_HEIGHT_TAG))
if (mw is None) or (mh is None):
msg = '%s is missing MatrixWidth and/or MatrixHeight' % (elem,)
raise ValueError(msg)
self.matrixwidth = int(mw)
self.matrixheight = int(mh)
class Theme:
"""
Abstraction for a WMTS theme
"""
def __init__(self, elem):
if elem.tag != _THEME_TAG:
raise ValueError('%s should be a Theme' % (elem,))
self.identifier = testXMLValue(elem.find(_IDENTIFIER_TAG)).strip()
title = testXMLValue(elem.find(_TITLE_TAG))
if title is not None:
self.title = title.strip()
else:
self.title = None
abstract = testXMLValue(elem.find(_ABSTRACT_TAG))
if abstract is not None:
self.abstract = abstract.strip()
else:
self.abstract = None
self.layerRefs = []
layerRefs = elem.findall(_LAYER_REF_TAG)
for layerRef in layerRefs:
if layerRef.text is not None:
self.layerRefs.append(layerRef.text)
class TileMatrixLimits(object):
"""
Represents a WMTS TileMatrixLimits element.
"""
def __init__(self, elem):
if elem.tag != _TILE_MATRIX_LIMITS_TAG:
raise ValueError('%s should be a TileMatrixLimits' % elem)
tm = elem.find(_TILE_MATRIX_TAG)
if tm is None:
raise ValueError('Missing TileMatrix in %s' % elem)
self.tilematrix = tm.text.strip()
self.mintilerow = getXMLInteger(elem, _MIN_TILE_ROW_TAG)
self.maxtilerow = getXMLInteger(elem, _MAX_TILE_ROW_TAG)
self.mintilecol = getXMLInteger(elem, _MIN_TILE_COL_TAG)
self.maxtilecol = getXMLInteger(elem, _MAX_TILE_COL_TAG)
def __repr__(self):
fmt = ('<TileMatrixLimits: {self.tilematrix}'
', minRow={self.mintilerow}, maxRow={self.maxtilerow}'
', minCol={self.mintilecol}, maxCol={self.maxtilecol}>')
return fmt.format(self=self)
class TileMatrixSetLink(object):
"""
Represents a WMTS TileMatrixSetLink element.
"""
@staticmethod
def from_elements(link_elements):
"""
Return a list of TileMatrixSetLink instances derived from the
given list of <TileMatrixSetLink> XML elements.
"""
# NB. The WMTS spec is contradictory re. the multiplicity
# relationships between Layer and TileMatrixSetLink, and
# TileMatrixSetLink and tileMatrixSet (URI).
# Try to figure out which model has been used by the server.
links = []
for link_element in link_elements:
matrix_set_elements = link_element.findall(_TILE_MATRIX_SET_TAG)
if len(matrix_set_elements) == 0:
raise ValueError('Missing TileMatrixSet in %s' % link_element)
elif len(matrix_set_elements) > 1:
set_limits_elements = link_element.findall(
_TILE_MATRIX_SET_LIMITS_TAG)
if set_limits_elements:
raise ValueError('Multiple instances of TileMatrixSet'
' plus TileMatrixSetLimits in %s' %
link_element)
for matrix_set_element in matrix_set_elements:
uri = matrix_set_element.text.strip()
links.append(TileMatrixSetLink(uri))
else:
uri = matrix_set_elements[0].text.strip()
tilematrixlimits = {}
path = '%s/%s' % (_TILE_MATRIX_SET_LIMITS_TAG,
_TILE_MATRIX_LIMITS_TAG)
for limits_element in link_element.findall(path):
tml = TileMatrixLimits(limits_element)
if tml.tilematrix:
if tml.tilematrix in tilematrixlimits:
msg = ('TileMatrixLimits with tileMatrix "%s" '
'already exists' % tml.tilematrix)
raise KeyError(msg)
tilematrixlimits[tml.tilematrix] = tml
links.append(TileMatrixSetLink(uri, tilematrixlimits))
return links
def __init__(self, tilematrixset, tilematrixlimits=None):
self.tilematrixset = tilematrixset
if tilematrixlimits is None:
self.tilematrixlimits = {}
else:
self.tilematrixlimits = tilematrixlimits
def __repr__(self):
fmt = ('<TileMatrixSetLink: {self.tilematrixset}'
', tilematrixlimits={{...}}>')
return fmt.format(self=self)
class ContentMetadata:
"""
Abstraction for WMTS layer metadata.
Implements IContentMetadata.
"""
def __init__(self, elem, parent=None, index=0,
parse_remote_metadata=False):
if elem.tag != _LAYER_TAG:
raise ValueError('%s should be a Layer' % (elem,))
self.parent = parent
if parent:
self.index = "%s.%d" % (parent.index, index)
else:
self.index = str(index)
self.id = self.name = testXMLValue(elem.find(_IDENTIFIER_TAG))
# title is mandatory property
self.title = None
title = testXMLValue(elem.find(_TITLE_TAG))
if title is not None:
self.title = title.strip()
self.abstract = testXMLValue(elem.find(_ABSTRACT_TAG))
# bboxes
b = elem.find(_WGS84_BOUNDING_BOX_TAG)
self.boundingBox = None
if b is not None:
lc = b.find(_LOWER_CORNER_TAG)
uc = b.find(_UPPER_CORNER_TAG)
ll = [float(s) for s in lc.text.split()]
ur = [float(s) for s in uc.text.split()]
self.boundingBoxWGS84 = (ll[0], ll[1], ur[0], ur[1])
# TODO: there is probably some more logic here, and it should
# probably be shared code
self._tilematrixsets = [f.text.strip() for f in
elem.findall(_TILE_MATRIX_SET_LINK_TAG + '/' +
_TILE_MATRIX_SET_TAG)]
link_elements = elem.findall(_TILE_MATRIX_SET_LINK_TAG)
tile_matrix_set_links = TileMatrixSetLink.from_elements(link_elements)
self.tilematrixsetlinks = {}
for tmsl in tile_matrix_set_links:
if tmsl.tilematrixset:
if tmsl.tilematrixset in self.tilematrixsetlinks:
raise KeyError('TileMatrixSetLink with tilematrixset "%s"'
' already exists' %
tmsl.tilematrixset)
self.tilematrixsetlinks[tmsl.tilematrixset] = tmsl
self.resourceURLs = []
for resourceURL in elem.findall(_RESOURCE_URL_TAG):
resource = {}
for attrib in ['format', 'resourceType', 'template']:
resource[attrib] = resourceURL.attrib[attrib]
self.resourceURLs.append(resource)
# Styles
self.styles = {}
for s in elem.findall(_STYLE_TAG):
style = {}
isdefaulttext = s.attrib.get('isDefault')
style['isDefault'] = (isdefaulttext == "true")
identifier = s.find(_IDENTIFIER_TAG)
if identifier is None:
raise ValueError('%s missing identifier' % (s,))
title = s.find(_TITLE_TAG)
if title is not None:
style['title'] = title.text
self.styles[identifier.text] = style
self.formats = [f.text for f in elem.findall(_FORMAT_TAG)]
self.keywords = [f.text for f in elem.findall(
_KEYWORDS_TAG+'/'+_KEYWORD_TAG)]
self.infoformats = [f.text for f in elem.findall(_INFO_FORMAT_TAG)]
self.layers = []
for child in elem.findall(_LAYER_TAG):
self.layers.append(ContentMetadata(child, self))
@property
def tilematrixsets(self):
# NB. This attribute has been superseeded by the
# `tilematrixsetlinks` attribute defined below, but is included
# for now to provide continuity.
warnings.warn("The 'tilematrixsets' attribute has been deprecated"
" and will be removed in a future version of OWSLib."
" Please use 'tilematrixsetlinks' instead.")
return self._tilematrixsets
def __str__(self):
return 'Layer Name: %s Title: %s' % (self.name, self.title)
class WMTSCapabilitiesReader:
"""Read and parse capabilities document into a lxml.etree infoset
"""
def __init__(self, version='1.0.0', url=None, un=None, pw=None):
"""Initialize"""
self.version = version
self._infoset = None
self.url = url
self.username = un
self.password = pw
def capabilities_url(self, service_url, vendor_kwargs=None):
"""Return a capabilities url
"""
# Ensure the 'service', 'request', and 'version' parameters,
# and any vendor-specific parameters are included in the URL.
pieces = urlparse(service_url)
args = parse_qs(pieces.query)
if 'service' not in args:
args['service'] = 'WMTS'
if 'request' not in args:
args['request'] = 'GetCapabilities'
if 'version' not in args:
args['version'] = self.version
if vendor_kwargs:
args.update(vendor_kwargs)
query = urlencode(args, doseq=True)
pieces = ParseResult(pieces.scheme, pieces.netloc,
pieces.path, pieces.params,
query, pieces.fragment)
return urlunparse(pieces)
def read(self, service_url, vendor_kwargs=None):
"""Get and parse a WMTS capabilities document, returning an
elementtree instance
service_url is the base url, to which is appended the service,
version, and request parameters. Optional vendor-specific
parameters can also be supplied as a dict.
"""
getcaprequest = self.capabilities_url(service_url, vendor_kwargs)
# now split it up again to use the generic openURL function...
spliturl = getcaprequest.split('?')
u = openURL(spliturl[0], spliturl[1], method='Get',
username=self.username, password=self.password)
return etree.fromstring(u.read())
def readString(self, st):
"""Parse a WMTS capabilities document, returning an elementtree instance
string should be an XML capabilities document
"""
if not isinstance(st, str) and not isinstance(st, bytes):
msg = 'String must be of type string or bytes, not %s' % type(st)
raise ValueError(msg)
return etree.fromstring(st)
|
"""Base admin models."""
from django.contrib.contenttypes.fields import GenericRelation
from django.db import models
from django.utils import timezone
from modoboa.core import models as core_models
from modoboa.lib.permissions import (
grant_access_to_object, ungrant_access_to_object
)
class AdminObjectManager(models.Manager):
def get_for_admin(self, admin):
"""Return the objects belonging to this admin
The result is a ``QuerySet`` object, so this function can be used
to fill ``ModelChoiceField`` objects.
"""
if admin.is_superuser:
return self.get_queryset()
return self.get_queryset().filter(owners__user=admin)
class AdminObject(models.Model):
"""Abstract model to support dates.
Inherit from this model to automatically add the "dates" feature
to another model. It defines the appropriate field and handles
saves.
"""
creation = models.DateTimeField(default=timezone.now)
last_modification = models.DateTimeField(auto_now=True)
owners = GenericRelation(core_models.ObjectAccess)
_objectname = None
objects = AdminObjectManager()
class Meta:
abstract = True
def __init__(self, *args, **kwargs):
"""Custom constructor."""
super().__init__(*args, **kwargs)
self._loaded_values = {}
@classmethod
def from_db(cls, db, field_names, values):
"""Store loaded values."""
instance = super().from_db(db, field_names, values)
instance._loaded_values = dict(zip(field_names, values))
return instance
@property
def objectname(self):
if self._objectname is None:
return self.__class__.__name__
return self._objectname
def post_create(self, creator):
grant_access_to_object(creator, self, is_owner=True)
def save(self, *args, **kwargs):
creator = kwargs.pop("creator", None)
super(AdminObject, self).save(*args, **kwargs)
if creator is not None:
self.post_create(creator)
def delete(self):
ungrant_access_to_object(self)
super(AdminObject, self).delete()
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
try:
from .error_detail_py3 import ErrorDetail
from .error_py3 import Error, ErrorException
from .azure_sku_py3 import AzureSku
from .workspace_collection_py3 import WorkspaceCollection
from .workspace_py3 import Workspace
from .display_py3 import Display
from .operation_py3 import Operation
from .operation_list_py3 import OperationList
from .workspace_collection_access_keys_py3 import WorkspaceCollectionAccessKeys
from .workspace_collection_access_key_py3 import WorkspaceCollectionAccessKey
from .create_workspace_collection_request_py3 import CreateWorkspaceCollectionRequest
from .update_workspace_collection_request_py3 import UpdateWorkspaceCollectionRequest
from .check_name_request_py3 import CheckNameRequest
from .check_name_response_py3 import CheckNameResponse
from .migrate_workspace_collection_request_py3 import MigrateWorkspaceCollectionRequest
except (SyntaxError, ImportError):
from .error_detail import ErrorDetail
from .error import Error, ErrorException
from .azure_sku import AzureSku
from .workspace_collection import WorkspaceCollection
from .workspace import Workspace
from .display import Display
from .operation import Operation
from .operation_list import OperationList
from .workspace_collection_access_keys import WorkspaceCollectionAccessKeys
from .workspace_collection_access_key import WorkspaceCollectionAccessKey
from .create_workspace_collection_request import CreateWorkspaceCollectionRequest
from .update_workspace_collection_request import UpdateWorkspaceCollectionRequest
from .check_name_request import CheckNameRequest
from .check_name_response import CheckNameResponse
from .migrate_workspace_collection_request import MigrateWorkspaceCollectionRequest
from .workspace_collection_paged import WorkspaceCollectionPaged
from .workspace_paged import WorkspacePaged
from .power_bi_embedded_management_client_enums import (
AccessKeyName,
CheckNameReason,
)
__all__ = [
'ErrorDetail',
'Error', 'ErrorException',
'AzureSku',
'WorkspaceCollection',
'Workspace',
'Display',
'Operation',
'OperationList',
'WorkspaceCollectionAccessKeys',
'WorkspaceCollectionAccessKey',
'CreateWorkspaceCollectionRequest',
'UpdateWorkspaceCollectionRequest',
'CheckNameRequest',
'CheckNameResponse',
'MigrateWorkspaceCollectionRequest',
'WorkspaceCollectionPaged',
'WorkspacePaged',
'AccessKeyName',
'CheckNameReason',
]
|
import stat
import os
import click
import requests
BOTTLENOSE_API_URL = 'http://127.0.0.1:8000/api/'
def has_valid_permissions(path):
perm = oct(stat.S_IMODE(os.lstat(path).st_mode))
return perm == oct(0o600)
def get_token():
try:
tokenfile = os.path.join(os.environ['HOME'], '.bnose')
token = None
if os.path.isfile(tokenfile):
with open(tokenfile, 'r') as outfile:
token = outfile.read().rstrip()
if not has_valid_permissions(tokenfile):
# invalidate token
token = None
return token
except KeyError:
raise OSError('Could not find .bnose: $HOME is not set')
def get_headers(**kwargs):
headers = {
'user-agent': 'bnose/1.0'
}
headers.update(**kwargs)
return headers
def get_auth_headers():
return get_headers(**{'Authorization': 'Token %s' % get_token()})
def _request(endpoint, **kwargs):
url = '{base_api_url}{endpoint}'.format(
base_api_url=BOTTLENOSE_API_URL,
endpoint=endpoint
)
response = requests.post(url, headers=get_auth_headers(), data=kwargs)
output = response.json()
message = output['message']
if 'color' in output.keys():
color = output['color']
else:
color = 'green'
click.secho(message, fg=color)
@click.group()
def cli():
pass
@cli.command()
def login():
username = click.prompt('Username')
password = click.prompt('Password', hide_input=True)
endpoint = '%sauth/' % BOTTLENOSE_API_URL
response = requests.post(endpoint, data={'username': username, 'password': password}, headers=get_headers())
output = response.json()
if 'token' in output.keys():
try:
tokenfile = os.path.join(os.environ['HOME'], '.bnose')
with open(tokenfile, 'w') as outfile:
outfile.write(output['token'])
os.chmod(tokenfile, 0o600)
except KeyError:
raise OSError('Could not find .bnose: $HOME is not set')
click.echo(output)
@cli.command()
@click.option('--memo', '-m', default='')
@click.option('--project', '-p', default='')
def start(memo, project):
"""Start worklog tracking."""
_request('worklog/start/', **{'memo': memo, 'project__slug': project})
@cli.command()
def pause():
"""Pause worklog tracking."""
_request('worklog/pause/')
@cli.command()
def resume():
"""Resume worklog tracking."""
_request('worklog/resume/')
@cli.command()
def status():
"""Status of the current tracking session."""
_request('worklog/status/')
@cli.command()
def stop():
"""Stop worklog tracking."""
_request('worklog/stop/')
@cli.command()
def log():
click.echo('Log')
|
#!/usr/bin/env python
# encoding: utf-8
# Written by Minh Nguyen and CBIG under MIT license:
# https://github.com/ThomasYeoLab/CBIG/blob/master/LICENSE.md
from __future__ import print_function, division
import time
from datetime import datetime
from dateutil.relativedelta import relativedelta
import numpy as np
import pandas as pd
def load_feature(feature_file_path):
"""
Load list of features from a text file
Features are separated by newline
"""
return [l.strip() for l in open(feature_file_path)]
def time_from(start):
""" Return duration from *start* to now """
duration = relativedelta(seconds=time.time() - start)
return '%dm %ds' % (duration.minutes, duration.seconds)
def str2date(string):
""" Convert string to datetime object """
return datetime.strptime(string, '%Y-%m-%d')
def has_data_mask(frame):
"""
Check whether rows has any valid value (i.e. not NaN)
Args:
frame: Pandas data frame
Return:
(ndarray): boolean mask with the same number of rows as *frame*
True implies row has at least 1 valid value
"""
return ~frame.isnull().apply(np.all, axis=1)
def get_data_dict(frame, features):
"""
From a frame of all subjects, return a dictionary of frames
The keys are subjects' ID
The data frames are:
- sorted by *Month_bl* (which are integers)
- have empty rows dropped (empty row has no value in *features* list)
Args:
frame (Pandas data frame): data frame of all subjects
features (list of string): list of features
Return:
(Pandas data frame): prediction frame
"""
ret = {}
frame_ = frame.copy()
frame_['Month_bl'] = frame_['Month_bl'].round().astype(int)
for subj in np.unique(frame_.RID):
subj_data = frame_[frame_.RID == subj].sort_values('Month_bl')
subj_data = subj_data[has_data_mask(subj_data[features])]
subj_data = subj_data.set_index('Month_bl', drop=True)
ret[subj] = subj_data.drop(['RID'], axis=1)
return ret
def build_pred_frame(prediction, outpath=''):
"""
Construct the forecast spreadsheet following TADPOLE format
Args:
prediction (dictionary): contains the following key/value pairs:
dates: dates of predicted timepoints for each subject
subjects: list of subject IDs
DX: list of diagnosis prediction for each subject
ADAS13: list of ADAS13 prediction for each subject
Ventricles: list of ventricular volume prediction for each subject
outpath (string): where to save the prediction frame
If *outpath* is blank, the prediction frame is not saved
Return:
(Pandas data frame): prediction frame
"""
table = pd.DataFrame()
dates = prediction['dates']
table['RID'] = prediction['subjects'].repeat([len(x) for x in dates])
table['Forecast Month'] = np.concatenate(
[np.arange(len(x)) + 1 for x in dates])
table['Forecast Date'] = np.concatenate(dates)
diag = np.concatenate(prediction['DX'])
table['CN relative probability'] = diag[:, 0]
table['MCI relative probability'] = diag[:, 1]
table['AD relative probability'] = diag[:, 2]
adas = np.concatenate(prediction['ADAS13'])
table['ADAS13'] = adas[:, 0]
table['ADAS13 50% CI lower'] = adas[:, 1]
table['ADAS13 50% CI upper'] = adas[:, 2]
vent = np.concatenate(prediction['Ventricles'])
table['Ventricles_ICV'] = vent[:, 0]
table['Ventricles_ICV 50% CI lower'] = vent[:, 1]
table['Ventricles_ICV 50% CI upper'] = vent[:, 2]
assert len(diag) == len(adas) == len(vent)
if outpath:
table.to_csv(outpath, index=False)
return table
def month_between(end, start):
""" Get duration (in months) between *end* and *start* dates """
# assert end >= start
diff = relativedelta(end, start)
months = 12 * diff.years + diff.months
to_next = relativedelta(end + relativedelta(months=1, days=-diff.days),
end).days
to_prev = diff.days
return months + (to_next < to_prev)
def make_date_col(starts, duration):
"""
Return a list of list of dates
The start date of each list of dates is specified by *starts*
"""
date_range = [relativedelta(months=i) for i in range(duration)]
ret = []
for start in starts:
ret.append([start + d for d in date_range])
return ret
def get_index(fields, keys):
""" Get indices of *keys*, each of which is in list *fields* """
assert isinstance(keys, list)
assert isinstance(fields, list)
return [fields.index(k) for k in keys]
def to_categorical(y, nb_classes):
""" Convert list of labels to one-hot vectors """
if len(y.shape) == 2:
y = y.squeeze(1)
ret_mat = np.full((len(y), nb_classes), np.nan)
good = ~np.isnan(y)
ret_mat[good] = 0
ret_mat[good, y[good].astype(int)] = 1.
return ret_mat
def log_result(result, path, verbose):
""" Output result to screen/file """
frame = pd.DataFrame([result])[['mAUC', 'bca', 'adasMAE', 'ventsMAE']]
if verbose:
print(frame)
if path:
frame.to_csv(path, index=False)
def PET_conv(value):
'''Convert PET measures from string to float '''
try:
return float(value.strip().strip('>'))
except ValueError:
return float(np.nan)
def Diagnosis_conv(value):
'''Convert diagnosis from string to float '''
if value == 'CN':
return 0.
if value == 'MCI':
return 1.
if value == 'AD':
return 2.
return float('NaN')
def DX_conv(value):
'''Convert change in diagnosis from string to float '''
if isinstance(value, str):
if value.endswith('Dementia'):
return 2.
if value.endswith('MCI'):
return 1.
if value.endswith('NL'):
return 0.
return float('NaN')
def add_ci_col(values, ci, lo, hi):
""" Add lower/upper confidence interval to prediction """
return np.clip(np.vstack([values, values - ci, values + ci]).T, lo, hi)
def censor_d1_table(_table):
""" Remove problematic rows """
_table.drop(3229, inplace=True) # RID 2190, Month = 3, Month_bl = 0.45
_table.drop(4372, inplace=True) # RID 4579, Month = 3, Month_bl = 0.32
_table.drop(
8376, inplace=True) # Duplicate row for subject 1088 at 72 months
_table.drop(
8586, inplace=True) # Duplicate row for subject 1195 at 48 months
_table.loc[
12215,
'Month_bl'] = 48. # Wrong EXAMDATE and Month_bl for subject 4960
_table.drop(10254, inplace=True) # Abnormaly small ICV for RID 4674
_table.drop(12245, inplace=True) # Row without measurements, subject 5204
def load_table(csv, columns):
""" Load CSV, only include *columns* """
table = pd.read_csv(csv, converters=CONVERTERS, usecols=columns)
censor_d1_table(table)
return table
# Converters for columns with non-numeric values
CONVERTERS = {
'CognitiveAssessmentDate': str2date,
'ScanDate': str2date,
'Forecast Date': str2date,
'EXAMDATE': str2date,
'Diagnosis': Diagnosis_conv,
'DX': DX_conv,
'PTAU_UPENNBIOMK9_04_19_17': PET_conv,
'TAU_UPENNBIOMK9_04_19_17': PET_conv,
'ABETA_UPENNBIOMK9_04_19_17': PET_conv
}
def get_baseline_prediction_start(frame):
""" Get baseline dates and dates when prediction starts """
one_month = relativedelta(months=1)
baseline = {}
start = {}
for subject in np.unique(frame.RID):
dates = frame.loc[frame.RID == subject, 'EXAMDATE']
baseline[subject] = min(dates)
start[subject] = max(dates) + one_month
return baseline, start
def get_mask(csv_path, use_validation):
""" Get masks from CSV file """
columns = ['RID', 'EXAMDATE', 'train', 'val', 'test']
frame = load_table(csv_path, columns)
train_mask = frame.train == 1
if use_validation:
pred_mask = frame.val == 1
else:
pred_mask = frame.test == 1
return train_mask, pred_mask, frame[pred_mask]
def read_csv(fpath):
""" Load CSV with converters """
return pd.read_csv(fpath, converters=CONVERTERS)
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2015 Mozilla Corporation
# Author: [email protected]
# Requires:
# mozlibldap
from __future__ import print_function
import mozlibldap
import string
import json
import sys
LDAP_URL = 'ldap://ldap.db.scl3.mozilla.com'
LDAP_BIND_DN = '[email protected],o=com,dc=mozilla'
LDAP_BIND_PASSWD = "mysecretpassphrase"
def main():
lcli = mozlibldap.MozLDAP(LDAP_URL, LDAP_BIND_DN, LDAP_BIND_PASSWD)
searches = {}
# get a list of users that have a pubkey in ldap
users = lcli.get_all_enabled_users_attr('sshPublicKey')
for user_attr in users:
search = {}
user = user_attr[0].split(',', 1)[0].split('=', 1)[1]
print("current user: "+user, file=sys.stderr)
keys = user_attr[1]
if len(keys) == 0:
continue
contentre = '^((#.+)|(\s+)'
for pubkey in keys['sshPublicKey']:
if len(pubkey) < 5 or not (pubkey.startswith("ssh")):
continue
pubkey = string.join(pubkey.split(' ', 2)[:2], '\s')
pubkey = pubkey.replace('/', '\/')
pubkey = pubkey.replace('+', '\+')
pubkey = pubkey.replace('\r\n', '')
contentre += '|({pubkey}\s.+)'.format(pubkey=pubkey)
contentre += ')$'
search["names"] = []
search["names"].append("^authorized_keys$")
search["contents"] = []
search["contents"].append(contentre)
paths = []
try:
paths = get_search_paths(lcli, user)
except:
continue
if not paths or len(paths) < 1:
continue
search["paths"] = paths
search["options"] = {}
search["options"]["matchall"] = True
search["options"]["macroal"] = True
search["options"]["maxdepth"] = 1
search["options"]["mismatch"] = []
search["options"]["mismatch"].append("content")
print(json.dumps(search), file=sys.stderr)
searches[user+"_ssh_pubkeys"] = search
action = {}
action["name"] = "Investigate the content of authorized_keys for LDAP users"
action["target"] = "(name LIKE 'admin%' OR name LIKE 'ssh%' " + \
"OR name LIKE 'people%' OR name LIKE 'zlb%' OR name IN " + \
"('reviewboard-hg1.dmz.scl3.mozilla.com', 'hgssh.stage.dmz.scl3.mozilla.com', " + \
"'hgssh1.dmz.scl3.mozilla.com', 'hgssh2.dmz.scl3.mozilla.com', " + \
"'git1.dmz.scl3.mozilla.com', 'git1.private.scl3.mozilla.com', " + \
"'svn1.dmz.phx1.mozilla.com', 'svn2.dmz.phx1.mozilla.com', " + \
"'svn3.dmz.phx1.mozilla.com')) AND tags->>'operator'='IT' AND " + \
"mode='daemon' AND status='online'"
action["version"] = 2
action["operations"] = []
operation = {}
operation["module"] = "file"
operation["parameters"] = {}
operation["parameters"]["searches"] = searches
action["operations"].append(operation)
print(json.dumps(action, indent=4, sort_keys=True))
def get_search_paths(lcli, user):
paths = []
res = lcli.query("mail="+user, ['homeDirectory', 'hgHome',
'stageHome', 'svnHome'])
for attr in res[0][1]:
try:
paths.append(res[0][1][attr][0]+"/.ssh")
except:
continue
return paths
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
from signal import *
import os
import time
import math
import logging as L
import sys
import MySQLdb
import RPi.GPIO as GPIO
import Adafruit_MAX31855.MAX31855 as MAX31855
import RPi.GPIO as GPIO
from RPLCD import CharLCD
#Set up LCD
lcd = CharLCD(pin_rs=17, pin_rw=None, pin_e=27, pins_data=[12, 16, 20, 21],
numbering_mode=GPIO.BCM,
cols=20, rows=4, dotsize=8,
auto_linebreaks=True)
lcd.create_char(1, [0b01100,
0b10010,
0b10010,
0b01100,
0b00000,
0b00000,
0b00000,
0b00000])
lcd.create_char(2, [0b00000,
0b10000,
0b01000,
0b00100,
0b00010,
0b00001,
0b00000,
0b00000])
#Wait for LCD to start up - otherwise you get garbage
time.sleep(1)
# Set up MySQL Connection
SQLHost = '127.0.0.1'
SQLUser = 'piln'
SQLPass = 'p!lnp@ss'
SQLDB = 'PiLN'
AppDir = '/home/PiLN'
#Status File
StatFile = '/var/www/html/pilnstat.json'
# Set up logging
LogFile = time.strftime( AppDir + '/log/pilnfired.log' )
L.basicConfig(
filename=LogFile,
level=L.DEBUG,
format='%(asctime)s %(message)s'
)
# Global Variables
#LastErr = 0.0
#Integral = 0.0
ITerm = 0.0
LastProcVal = 0.0
SegCompStat = 0
LastTmp = 0.0
wheel = '-'
# MAX31855 Pins/Setup
CLK = 25
CS = 24
DO = 18
Sensor = MAX31855.MAX31855(CLK, CS, DO)
# Pin setup for relay
GPIO.setup(4, GPIO.OUT) ## Setup GPIO Pin 7 to OUT
GPIO.output(4,False) ## Turn off GPIO pin 7
def clean(*args):
print "\nProgram ending! Cleaning up...\n"
GPIO.output(4,False) ## Turn off GPIO pin 4
lcd.close(clear=True)
time.sleep(0.5)
GPIO.cleanup() # this ensures a clean exit
print "All clean - Stopping.\n"
os._exit(0)
for sig in (SIGABRT, SIGINT, SIGTERM):
signal(sig, clean)
# Celsius to Fahrenheit
def CtoF(c):
return c * 9.0 / 5.0 + 32.0
# PID Update
def Update ( SetPoint, ProcValue, IMax, IMin, Window, Kp, Ki, Kd ):
L.debug( "Entering PID update with parameters SetPoint:%0.2f, ProcValue:%0.2f, IMax:%0.2f, IMin:%0.2f," %
( SetPoint, ProcValue, IMax, IMin ))
L.debug( " Window:%d, Kp: %0.3f, Ki: %0.3f, Kd: %0.3f" %
( Window, Kp, Ki, Kd ))
global ITerm, LastProcVal
Err = SetPoint - ProcValue
ITerm+= (Ki * Err);
if ITerm > IMax:
ITerm = IMax
elif ITerm < IMin:
ITerm = IMin
DInput = ProcValue - LastProcVal
#Compute PID Output
Output = Kp * Err + ITerm - Kd * DInput;
if Output > IMax:
Output = IMax
elif Output < IMin:
Output = IMin
#Remember for next time
LastProcVal = ProcValue
L.debug(
"Exiting PID update with parameters Error:%0.2f, ITerm:%0.2f, DInput:%0.2f, Output:%0.2f" %
( Err, ITerm, DInput, Output )
)
return Output
# global LastErr, Integral
#
# Err = SetPoint - ProcValue
#
# Pterm = Kp * Err
#
# Dterm = Kd * ( Err - LastErr )
# LastErr = Err
#
# Integral+= Err
# if Integral > IMax:
# Integral = IMax
# elif Integral < IMin:
# Integral = IMin
# Iterm = Ki * Integral
#
# Output = Pterm + Iterm + Dterm
#
# L.debug(
# "Exiting PID update with parameters Error:%0.2f, Integral:%0.2f, Pterm:%0.2f, Iterm:%0.2f, Dterm:%0.2f, Output:%0.2f" %
# ( Err, Integral, Pterm, Iterm, Dterm, Output )
# )
#
# if Output > 100:
# Output = 100
# elif Output < 0:
# Output = 0
# if Output < 0:
# Output = 0
#
# return Output
# Loop to run each segment of the firing profile
def Fire(RunID,Seg,TargetTmp,Rate,HoldMin,Window,Kp,Ki,Kd):
L.info( "Entering Fire function with parameters RunID:%d, Seg:%d, TargetTmp:%d, Rate:%d," % ( RunID, Seg, TargetTmp, Rate ))
L.info( " HoldMin:%d, Window:%d" % ( HoldMin, Window ))
global SegCompStat
global wheel
HoldSec = HoldMin * 60
RampMin = 0.0
RampTmp = 0.0
ReadTmp = 0.0
LastTmp = 0.0
StartTmp = 0.0
TmpDif = 0.0
Steps = 0.0
StepTmp = 0.0
StartSec = 0.0
EndSec = 0.0
NextSec = 0.0
RunState = "Ramp"
Cnt = 0
RampTrg = 0
ReadTrg = 0
while RunState != "Stopped" and RunState != "Complete":
if time.time() >= NextSec:
Cnt += 1
NextSec = time.time() + Window
# Get temp
LastTmp = ReadTmp
ReadCTmp = Sensor.readTempC()
ReadTmp = CtoF(ReadCTmp)
ReadCITmp = Sensor.readInternalC()
ReadITmp = CtoF(ReadCITmp)
if math.isnan(ReadTmp) or ( abs( ReadTmp - LastTmp ) > ( 2 * Window ) ) or ReadTmp == 0 or ReadTmp > 2400:
ReadTmp = LastTmp
if RampTrg == 0:
RampTmp += StepTmp
if TmpDif > 0:
# Ramp temp reached target
if RampTmp >= TargetTmp and RampTrg == 0:
RampTmp = TargetTmp
RampTrg = 1
if ReadTrg == 1:
RunState = "Ramp complete/target temp reached"
else:
RunState = "Ramp complete"
# Read temp reached target
if ( ( TargetTmp - ReadTmp <= TargetTmp * 0.006 ) or ( ReadTmp >= TargetTmp ) ) and ReadTrg == 0:
ReadTrg = 1
EndSec = int(time.time()) + ( HoldMin * 60 )
L.info( "Set temp reached - End seconds set to %d" % EndSec )
if RampTrg == 1:
RunState = "Ramp complete/target temp reached"
else:
RunState = "Target temp reached"
elif TmpDif < 0:
# Ramp temp reached target
if RampTmp <= TargetTmp and RampTrg == 0:
RampTmp = TargetTmp
RampTrg = 1
if ReadTrg == 1:
RunState = "Ramp complete/target temp reached"
else:
RunState = "Ramp complete"
# Read temp reached target
if ( ( ReadTmp - TargetTmp <= TargetTmp * 0.006 ) or ( ReadTmp <= TargetTmp ) ) and ReadTrg == 0:
ReadTrg = 1
EndSec = int(time.time()) + ( HoldMin * 60 )
L.info( "Set temp reached - End seconds set to %d" % EndSec )
if RampTrg == 1:
RunState = "Ramp complete/target temp reached"
else:
RunState = "Target temp reached"
if StartTmp == 0:
StartTmp = ReadTmp
StartSec = int(time.time())
NextSec = StartSec + Window
TmpDif = TargetTmp - StartTmp
RampMin = ( abs (TmpDif) / Rate ) * 60
Steps = ( RampMin * 60 ) / Window
StepTmp = TmpDif / Steps
EndSec = StartSec + ( RampMin * 60 ) + ( HoldMin * 60 )
RampTmp = StartTmp + StepTmp
if ( TmpDif > 0 and RampTmp > TargetTmp ) or ( TmpDif < 0 and RampTmp < TargetTmp ):
RampTmp = TargetTmp
LastErr = 0.0
Integral = 0.0
# if TmpDif < 0:
# RunState = 2
L.info( "First pass of firing loop - TargetTmp:%0.2f, StartTmp:%0.2f, RampTmp:%0.2f, TmpDif:%0.2f," %
( TargetTmp, StartTmp, RampTmp, TmpDif ))
L.info( " RampMin:%0.2f, Steps:%d, StepTmp:%0.2f, Window:%d, StartSec:%d, EndSec:%d" %
( RampMin, Steps, StepTmp, Window, StartSec, EndSec ) )
#Output = Update(RampTmp,ReadTmp,50000,-50000,Window,Kp,Ki,Kd)
Output = Update(RampTmp,ReadTmp,100,0,Window,Kp,Ki,Kd)
CycleOnSec = Window * ( Output * 0.01 )
if CycleOnSec > Window:
CycleOnSec = Window
RemainSec = EndSec - int ( time.time() )
RemMin, RemSec = divmod(RemainSec, 60)
RemHr, RemMin = divmod(RemMin, 60)
RemainTime = "%d:%02d:%02d" % (RemHr, RemMin, RemSec)
L.debug( "RunID %d, Segment %d (loop %d) - RunState:%s," % ( RunID, Seg, Cnt, RunState ))
L.debug( " ReadTmp:%0.2f, RampTmp:%0.2f, TargetTmp:%0.2f, Output:%0.2f, CycleOnSec:%0.2f, RemainTime:%s" %
( ReadTmp, RampTmp, TargetTmp, Output, CycleOnSec, RemainTime )
)
if Output > 0:
L.debug("==>Relay On")
GPIO.output(4,True) ## Turn on GPIO pin 7
time.sleep(CycleOnSec)
if Output < 100:
L.debug("==>Relay Off")
GPIO.output(4,False) ## Turn off GPIO pin 7
# Write statu to file for reporting on web page
L.debug( "Write status information to status file %s:" % StatFile )
sfile = open(StatFile,"w+")
sfile.write('{\n' +
' "proc_update_utime": "' + str(int(time.time())) + '",\n' +
' "readtemp": "' + str(int(ReadTmp)) + '",\n' +
' "run_profile": "' + str(RunID) + '",\n' +
' "run_segment": "' + str(Seg) + '",\n' +
' "ramptemp": "' + str(int(RampTmp)) + '",\n' +
' "targettemp": "' + str(int(TargetTmp)) + '",\n' +
' "status": "' + str(RunState) + '",\n' +
' "segtime": "' + str(RemainTime) + '"\n' +
'}\n'
)
sfile.close()
if wheel == '-':
wheel = '\x02'
elif wheel == '\x02':
wheel = '|'
elif wheel == '|':
wheel = '/'
else:
wheel = '-'
lcd.clear()
lcd.cursor_pos = (0, 0)
lcd.write_string(u'Profile' + str(RunID) + ' Seg' + str(Seg) + ' ' + wheel )
lcd.cursor_pos = (1, 0)
lcd.write_string(u'Stat:' + str(RunState)[0:14] )
lcd.cursor_pos = (2, 0)
lcd.write_string(u'Tmp' + str(int(ReadTmp)) + '\x01 Ramp' + str(int(RampTmp)) + '\x01' )
lcd.cursor_pos = (3, 0)
lcd.write_string(u'Trgt' + str(int(TargetTmp)) + '\x01 Tm' + str(RemainTime) )
#lcd.write_string(u'Trgt ' + str(int(TargetTmp)) + '\x01,Tm ' )
#print 'Trgt ' + str(int(TargetTmp)) + ',Tm ' + str(RemainTime)
L.debug("Writing stats to Firing DB table...")
SQL = "INSERT INTO Firing (run_id, segment, datetime, set_temp, temp, int_temp, pid_output) VALUES ( '%d', '%d', '%s', '%.2f', '%.2f', '%.2f', '%.2f' )" % ( RunID, Seg, time.strftime('%Y-%m-%d %H:%M:%S'), RampTmp, ReadTmp, ReadITmp, Output )
try:
SQLCur.execute(SQL)
SQLConn.commit()
except:
SQLConn.rollback()
L.error("DB Update failed!")
# Check if profile is still in running state
RowsCnt = SQLCur.execute("select * from Profiles where state='Running' and run_id=%d" % RunID)
if RowsCnt == 0:
L.warn("Profile no longer in running state - exiting firing")
SegCompStat = 1
RunState = "Stopped"
if time.time() > EndSec and ReadTrg == 1:
RunState = "Complete"
# L.debug(
# "RunState:%s, TargetTmp:%0.2f, StartTmp:%0.2f, RampTmp:%0.2f, TmpDif:%0.2f, RampMin:%0.2f, Steps:%d, StepTmp:%0.2f, Window:%d, StartSec:%d, EndSec:%d" %
# ( RunState, TargetTmp, StartTmp, RampTmp, TmpDif, RampMin, Steps, StepTmp, Window, StartSec, EndSec )
# )
L.info("===START PiLN Firing Daemon===")
L.info("Polling for 'Running' firing profiles...")
while 1:
# Get temp
ReadCTmp = Sensor.readTempC()
ReadTmp = CtoF(ReadCTmp)
ReadCITmp = Sensor.readInternalC()
ReadITmp = CtoF(ReadCITmp)
if math.isnan(ReadTmp):
ReadTmp = LastTmp
# Write statu to file for reporting on web page
L.debug( "Write status information to status file %s:" % StatFile )
sfile = open(StatFile,"w+")
sfile.write('{\n' +
' "proc_update_utime": "' + str(int(time.time())) + '",\n' +
' "readtemp": "' + str(int(ReadTmp)) + '",\n' +
' "run_profile": "none",\n' +
' "run_segment": "n/a",\n' +
' "ramptemp": "n/a",\n' +
' "status": "n/a",\n' +
' "targettemp": "n/a"\n' +
'}\n'
)
sfile.close()
if wheel == '-':
wheel = '\x02'
elif wheel == '\x02':
wheel = '|'
elif wheel == '|':
wheel = '/'
else:
wheel = '-'
lcd.clear()
lcd.cursor_pos = (0, 0)
lcd.write_string(u'IDLE ' + wheel )
lcd.cursor_pos = (2, 0)
lcd.write_string(u'Temp ' + str(int(ReadTmp)) + '\x01')
#{
# "proc_update_utime": "1506396470",
# "readtemp": "145",
# "run_profile": "none",
# "run_segment": "n/a",
# "targettemp": "n/a"
#}
# Check for 'Running' firing profile
SQLConn = MySQLdb.connect(SQLHost, SQLUser, SQLPass, SQLDB);
SQLCur = SQLConn.cursor()
RowsCnt = SQLCur.execute("select * from Profiles where state='Running'")
if RowsCnt > 0:
Data = SQLCur.fetchone()
RunID = Data[0]
Kp = float(Data[3])
Ki = float(Data[4])
Kd = float(Data[5])
L.info("Run ID %d is active - starting firing profile" % RunID)
StTime=time.strftime('%Y-%m-%d %H:%M:%S')
L.debug("Update profile %d start time to %s" % ( RunID, StTime ) )
SQL = "UPDATE Profiles SET start_time='%s' where run_id=%d" % ( StTime, RunID )
try:
SQLCur.execute(SQL)
SQLConn.commit()
except:
SQLConn.rollback()
L.error("DB Update failed!")
# Get segments
L.info("Get segments for run ID %d" % RunID)
SQL="select * from Segments where run_id=%d" % RunID
SQLCur.execute(SQL)
ProfSegs = SQLCur.fetchall()
for Row in ProfSegs:
RunID = Row[0]
Seg = Row[1]
TargetTmp = Row[2]
Rate = Row[3]
HoldMin = Row[4]
Window = Row[5]
if SegCompStat == 1:
L.debug("Profile stopped - skipping segment %d" % Seg)
else:
L.info( "Run ID %d, segment %d parameters: Target Temp: %0.2f, Rate: %0.2f," %
( RunID, Seg, TargetTmp, Rate ))
L.info( " Hold Minutes: %d, Window Seconds: %d" %
( HoldMin, Window ))
StTime=time.strftime('%Y-%m-%d %H:%M:%S')
L.debug("Update run id %d, segment %d start time to %s" % ( RunID, Seg, StTime ) )
SQL = "UPDATE Segments SET start_time='%s' where run_id=%d and segment=%d" % ( StTime, RunID, Seg )
try:
SQLCur.execute(SQL)
SQLConn.commit()
except:
SQLConn.rollback()
L.error("DB Update failed!")
time.sleep(0.5)
Fire(RunID,Seg,TargetTmp,Rate,HoldMin,Window,Kp,Ki,Kd)
GPIO.output(4,False) ## Turn off GPIO pin 7
EndTime=time.strftime('%Y-%m-%d %H:%M:%S')
L.debug("Update run id %d, segment %d end time to %s" % ( RunID, Seg, EndTime ) )
SQL = "UPDATE Segments SET end_time='%s' where run_id=%d and segment=%d" % ( EndTime, RunID, Seg )
try:
SQLCur.execute(SQL)
SQLConn.commit()
except:
SQLConn.rollback()
L.error("DB Update failed!")
if SegCompStat == 1:
L.info("Profile stopped - Not updating profile end time")
else:
EndTime=time.strftime('%Y-%m-%d %H:%M:%S')
L.debug("Update profile end time to %s and state to 'Completed' for run id %d" % ( EndTime, RunID ) )
SQL = "UPDATE Profiles SET end_time='%s', state='Completed' where run_id=%d" % ( EndTime, RunID )
try:
SQLCur.execute(SQL)
SQLConn.commit()
except:
SQLConn.rollback()
L.error("DB Update failed!")
SegCompStat = 0
L.info("Polling for 'Running' firing profiles...")
SQLConn.close()
time.sleep(2)
#except KeyboardInterrupt:
# print "\nKeyboard interrupt caught. Cleaning up...\n"
#except:
# print "\nOther error or exception occurred! Cleaning up...\n"
#finally:
|
# A LXD inventory that idempotently provisions LXD containers. You could
# probably do something similar with cloud APIs if so inclined.
import json
import os
from subprocess import check_output, check_call, CalledProcessError
containers=['hkp1', 'hkp2']
addrs=[]
def ensure_container(name):
try:
check_output(['lxc', 'info', name])
except CalledProcessError:
lp_user = check_output(['bzr', 'lp-login']).decode().strip()
check_call(['lxc', 'launch', 'ubuntu:bionic', name])
check_call(['lxc', 'exec', name, '--', 'bash', '-c', 'while [ ! -f /var/lib/cloud/instance/boot-finished ]; do sleep 1; done'])
check_call(['lxc', 'exec', name, '--', 'bash', '-c', 'sudo su - ubuntu -c "ssh-import-id {}"'.format(lp_user)])
addrs.append(check_output(['lxc', 'exec', name, '--', 'bash', '-c', "ip addr show eth0 | awk '/inet / {print $2}' | sed 's_/.*__'"]).decode().strip())
for name in containers:
ensure_container(name)
lxd_servers = [(addr, {'ssh_user': 'ubuntu', 'peers': [p for p in addrs if p != addr]}) for addr in addrs]
|
# -*- coding: utf-8 -*-
from typing import cast, Any, Dict
import mock
import json
import requests
from zerver.lib.outgoing_webhook import (
get_service_interface_class,
process_success_response,
)
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.topic import TOPIC_NAME
from zerver.models import get_realm, get_user, SLACK_INTERFACE
class TestGenericOutgoingWebhookService(ZulipTestCase):
def setUp(self) -> None:
self.event = {
u'command': '@**test**',
u'message': {
'content': '@**test**',
},
u'trigger': 'mention',
}
self.bot_user = get_user("[email protected]", get_realm("zulip"))
service_class = get_service_interface_class('whatever') # GenericOutgoingWebhookService
self.handler = service_class(service_name='test-service',
token='abcdef',
user_profile=self.bot_user)
def test_process_success_response(self) -> None:
class Stub:
def __init__(self, text: str) -> None:
self.text = text
def make_response(text: str) -> requests.Response:
return cast(requests.Response, Stub(text=text))
event = dict(
user_profile_id=99,
message=dict(type='private')
)
service_handler = self.handler
response = make_response(text=json.dumps(dict(content='whatever')))
with mock.patch('zerver.lib.outgoing_webhook.send_response_message') as m:
process_success_response(
event=event,
service_handler=service_handler,
response=response,
)
self.assertTrue(m.called)
response = make_response(text='unparsable text')
with mock.patch('zerver.lib.outgoing_webhook.fail_with_message') as m:
process_success_response(
event=event,
service_handler=service_handler,
response=response
)
self.assertTrue(m.called)
def test_build_bot_request(self) -> None:
request_data = self.handler.build_bot_request(self.event)
request_data = json.loads(request_data)
self.assertEqual(request_data['data'], "@**test**")
self.assertEqual(request_data['token'], "abcdef")
self.assertEqual(request_data['message'], self.event['message'])
def test_process_success(self) -> None:
response = dict(response_not_required=True) # type: Dict[str, Any]
success_response = self.handler.process_success(response, self.event)
self.assertEqual(success_response, None)
response = dict(response_string='test_content')
success_response = self.handler.process_success(response, self.event)
self.assertEqual(success_response, dict(content='test_content'))
response = dict(
content='test_content',
widget_content='test_widget_content',
red_herring='whatever',
)
success_response = self.handler.process_success(response, self.event)
expected_response = dict(
content='test_content',
widget_content='test_widget_content',
)
self.assertEqual(success_response, expected_response)
response = dict()
success_response = self.handler.process_success(response, self.event)
self.assertEqual(success_response, None)
class TestSlackOutgoingWebhookService(ZulipTestCase):
def setUp(self) -> None:
self.stream_message_event = {
u'command': '@**test**',
u'user_profile_id': 12,
u'service_name': 'test-service',
u'trigger': 'mention',
u'message': {
'content': 'test_content',
'type': 'stream',
'sender_realm_str': 'zulip',
'sender_email': '[email protected]',
'stream_id': '123',
'display_recipient': 'integrations',
'timestamp': 123456,
'sender_id': 21,
'sender_full_name': 'Sample User',
}
}
self.private_message_event = {
u'user_profile_id': 24,
u'service_name': 'test-service',
u'command': 'test content',
u'trigger': 'private_message',
u'message': {
'sender_id': 3,
'sender_realm_str': 'zulip',
'timestamp': 1529821610,
'sender_email': '[email protected]',
'type': 'private',
'sender_realm_id': 1,
'id': 219,
TOPIC_NAME: 'test',
'content': 'test content',
}
}
service_class = get_service_interface_class(SLACK_INTERFACE)
self.handler = service_class(token="abcdef",
user_profile=None,
service_name='test-service')
def test_build_bot_request_stream_message(self) -> None:
request_data = self.handler.build_bot_request(self.stream_message_event)
self.assertEqual(request_data[0][1], "abcdef") # token
self.assertEqual(request_data[1][1], "zulip") # team_id
self.assertEqual(request_data[2][1], "zulip.com") # team_domain
self.assertEqual(request_data[3][1], "123") # channel_id
self.assertEqual(request_data[4][1], "integrations") # channel_name
self.assertEqual(request_data[5][1], 123456) # timestamp
self.assertEqual(request_data[6][1], 21) # user_id
self.assertEqual(request_data[7][1], "Sample User") # user_name
self.assertEqual(request_data[8][1], "@**test**") # text
self.assertEqual(request_data[9][1], "mention") # trigger_word
self.assertEqual(request_data[10][1], 12) # user_profile_id
@mock.patch('zerver.lib.outgoing_webhook.fail_with_message')
def test_build_bot_request_private_message(self, mock_fail_with_message: mock.Mock) -> None:
request_data = self.handler.build_bot_request(self.private_message_event)
self.assertIsNone(request_data)
self.assertTrue(mock_fail_with_message.called)
def test_process_success(self) -> None:
response = dict(response_not_required=True) # type: Dict[str, Any]
success_response = self.handler.process_success(response, self.stream_message_event)
self.assertEqual(success_response, None)
response = dict(text='test_content')
success_response = self.handler.process_success(response, self.stream_message_event)
self.assertEqual(success_response, dict(content='test_content'))
|
import re, difflib
def merge_group(list, func, start=True, end=True):
l, r, s = list[0]
first = ['',' class="first"'][start]
last = ['',' class="last"'][end]
if len(list) == 1:
if start and end:
return LINE_FORMAT % func(' class="first last"', l, r)
else:
return LINE_FORMAT % func(first+last, l, r)
html = LINE_FORMAT % func(first, l, r)
for i in range(1, len(list)-1):
l, r, s = list[i]
html += LINE_FORMAT % func('', l, r)
l, r, s = list[-1]
html += LINE_FORMAT % func(last, l, r)
return html
def make_table(table_id, header, fromlines, tolines, context=None, versions=['old', 'new']):
diff = list(difflib._mdiff(fromlines, tolines, context))
if not diff:
return None
same = lambda c, l, r: (c, l[0], r[0], 'l', format_line(l[1]))
add = lambda c, l, r: (c, '', r[0], 'r', format_line(r[1]))
sub = lambda c, l, r: (c, l[0], '', 'l', format_line(l[1]))
html = TABLE_HEADER % tuple([table_id, header] + versions)
for type, start, end in group_types(diff):
if type == 'same':
html += '<tbody>%s</tbody>\n' % \
merge_group(diff[start:end], same)
elif type == 'add':
html += '<tbody class="add">%s</tbody>\n' % \
merge_group(diff[start:end], add)
elif type == 'del':
html += '<tbody class="rem">%s</tbody>\n' % \
merge_group(diff[start:end], sub)
elif type == 'mod':
html += '<tbody class="mod">%s%s</tbody>\n' % \
(merge_group(diff[start:end], sub, end=False),
merge_group(diff[start:end], add, start=False))
elif type == 'skipped':
html += '<tbody class="skipped"><tr><th>...</th><th>...</th><td> </td></tr></tbody>\n'
html += TABLE_FOOTER
return html
def get_type(left, right, status):
if not status:
if left or right:
return 'same'
else:
return 'skipped'
l_num, l_line = left
r_num, r_line = right
if l_num and not r_num:
return 'del'
elif r_num and not l_num:
return 'add'
else:
return 'mod'
def group_types(diff):
items = [get_type(l,r,s) for l,r,s in diff]
group = []
if not items:
print diff
start, current = 0, items[0]
for i in range(1, len(diff)):
if items[i] != current:
group.append( (current, start, i) )
current = items[i]
start = i
group.append( (current, start, len(diff)) )
return group
REPLACE_CHARS = [
('&', '&'),
('<', '<'),
('>', '>'),
(' ', ' '),
('"', '"'),
('\0+', '<span class="ins">'),
('\0-', '<span class="del">'),
('\0^', '<span class="chg">'),
('\1', '</span>')
]
SINGLE_CHANGE = re.compile("^\0[\+\-\^]([^\0]+)\1\n?$")
def format_line(text):
text = text.replace('\n', '')
match = SINGLE_CHANGE.match(text)
if match:
text = match.group(1)
for src, replace in REPLACE_CHARS:
text = text.replace(src, replace)
return text
## the majority of the CSS and markup has been used from Trac
TABLE_HEADER = """
<li class='entry' id='%s'>
<h2>%s</h2>
<table class="inline" summary="Differences" cellspacing="0">
<colgroup><col class="lineno" /><col class="lineno" /><col class="content" /></colgroup>
<thead><th>%s</th><th>%s</th><th> </th></thead>
"""
TABLE_FOOTER = """
</table>
</li>
"""
LINE_FORMAT = "<tr%s><th>%s</th><th>%s</th><td class='%s'><span>%s</span> </td></tr>"
HTML_HEADER = """
<html><head><style type='text/css'>
/* Diff preferences */
#prefs fieldset { margin: 1em .5em .5em; padding: .5em 1em 0 }
/* Diff/change overview */
#overview {
line-height: 130%;
margin-top: 1em;
padding: .5em;
}
#overview dt {
font-weight: bold;
padding-right: .25em;
position: absolute;
left: 0;
text-align: right;
width: 7.75em;
}
#overview dd { margin-left: 8em }
/* Colors for change types */
#chglist .edit, #overview .mod, .diff #legend .mod { background: #fd8 }
#chglist .delete, #overview .rem, .diff #legend .rem { background: #f88 }
#chglist .add, #overview .add, .diff #legend .add { background: #bfb }
#chglist .copy, #overview .cp, .diff #legend .cp { background: #88f }
#chglist .move, #overview .mv, .diff #legend .mv { background: #ccc }
#chglist .unknown { background: #fff }
/* Legend for diff colors */
.diff #legend {
float: left;
font-size: 9px;
line-height: 1em;
margin: 1em 0;
padding: .5em;
}
.diff #legend h3 { display: none; }
.diff #legend dt {
background: #fff;
border: 1px solid #999;
float: left;
margin: .1em .5em .1em 2em;
overflow: hidden;
width: .8em; height: .8em;
}
.diff #legend dl, .diff #legend dd {
display: inline;
float: left;
padding: 0;
margin: 0;
margin-right: .5em;
}
/* Styles for the list of diffs */
.diff ul.entries { clear: both; margin: 0; padding: 0 }
.diff li.entry {
background: #f7f7f7;
border: 1px solid #d7d7d7;
list-style-type: none;
margin: 0 0 2em;
padding: 2px;
position: relative;
}
.diff h2 {
color: #333;
font-size: 14px;
letter-spacing: normal;
margin: 0 auto;
padding: .1em 0 .25em .5em;
}
/* Styles for the actual diff tables (side-by-side and inline) */
.diff table {
border: 1px solid #ddd;
border-spacing: 0;
border-top: 0;
empty-cells: show;
font-size: 12px;
line-height: 130%;
padding: 0;
margin: 0 auto;
width: 100%;
}
.diff table col.lineno { width: 4em }
.diff table th {
border-right: 1px solid #d7d7d7;
border-bottom: 1px solid #998;
font-size: 11px;
}
.diff table thead th {
background: #eee;
border-top: 1px solid #d7d7d7;
color: #999;
padding: 0 .25em;
text-align: center;
white-space: nowrap;
}
.diff table tbody th {
background: #eed;
color: #886;
font-weight: normal;
padding: 0 .5em;
text-align: right;
vertical-align: top;
}
.diff table tbody td {
background: #fff;
font: normal 11px monospace;
overflow: hidden;
padding: 1px 2px;
vertical-align: top;
}
.diff table tbody.skipped td {
background: #f7f7f7;
border: 1px solid #d7d7d7;
}
.diff table td span.del, .diff table td span.ins { text-decoration: none }
.diff table td span.del { color: #600 }
.diff table td span.ins { color: #060 }
/* Styles for the inline diff */
.diff table.inline tbody.mod td.l, .diff table.inline tbody.rem td.l {
background: #fdd;
border-color: #c00;
border-style: solid;
border-width: 0 1px 0 1px;
}
.diff table.inline tbody.mod td.r, .diff table.inline tbody.add td.r {
background: #dfd;
border-color: #0a0;
border-style: solid;
border-width: 0 1px 0 1px;
}
.diff table.inline tbody.mod tr.first td.l,
.diff table.inline tbody.rem tr.first td.l { border-top-width: 1px }
.diff table.inline tbody.mod tr.last td.l,
.diff table.inline tbody.rem tr.last td.l { border-bottom-width: 1px }
.diff table.inline tbody.mod tr.first td.r,
.diff table.inline tbody.add tr.first td.r { border-top-width: 1px }
.diff table.inline tbody.mod tr.last td.r,
.diff table.inline tbody.add tr.last td.r { border-bottom-width: 1px }
.diff table.inline tbody.mod td span.del { background: #e99; color: #000 }
.diff table.inline tbody.mod td span.ins { background: #9e9; color: #000 }
.diff table.inline tbody.mod td span.chg { background: #ee9; color: #000 }
/* Styles for the side-by-side diff */
.diff table.sidebyside colgroup.content { width: 50% }
.diff table.sidebyside tbody.mod td.l { background: #fe9 }
.diff table.sidebyside tbody.mod td.r { background: #fd8 }
.diff table.sidebyside tbody.add td.l { background: #dfd }
.diff table.sidebyside tbody.add td.r { background: #cfc }
.diff table.sidebyside tbody.rem td.l { background: #f88 }
.diff table.sidebyside tbody.rem td.r { background: #faa }
.diff table.sidebyside tbody.mod span.del, .diff table.sidebyside tbody.mod span.ins, .diff table.sidebyside tbody.mod span.chg {
background: #fc0;
}
/* Changeset overview */
#overview .files { padding-top: 2em }
#overview .files ul { margin: 0; padding: 0 }
#overview .files li { list-style-type: none }
#overview .files li .comment { display: none }
#overview .files li div {
border: 1px solid #999;
float: left;
margin: .2em .5em 0 0;
overflow: hidden;
width: .8em; height: .8em;
}
#overview div.add div, #overview div.cp div, #overview div.mv div {
border: 0;
margin: 0;
float: right;
width: .35em;
}
span.ver {font: normal 11px monospace;}
</style></head><body>
"""
HTML_FOOTER = """
</body>
</html>
"""
|
#!/usr/env python
#BMDLVIEW: Views Microsoft 3D Movie Maker models (BMDLs)
#Copyright (C) 2004-2015 Foone Turing
#
#This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version.
#
#This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from ctypes import *
import os
class Decompressor(object):
def __init__(self, exe_file):
decomp_proxy_dll = os.path.join(os.path.dirname(os.path.abspath(__file__)),"DecompProxy.dll")
decompdll = cdll.LoadLibrary(decomp_proxy_dll)
DLLInit=getattr(decompdll,'DP_Init')
DLLInit.argtypes=[c_char_p]
DLLInit.restype=c_void_p
self.DLLShutdown=getattr(decompdll,'DP_Shutdown')
self.DLLShutdown.argtypes=[c_void_p]
self.GetSize=GetSize=getattr(decompdll,'DP_GetSize')
GetSize.argtypes=[c_char_p,c_int]
self.DLLDecompress=DLLDecompress=getattr(decompdll,'DP_DecompressSmart')
DLLDecompress.argtypes=[c_void_p,c_char_p,c_int,c_char_p]
ret = self.ctx = DLLInit(exe_file)
if not ret:
raise OSError("Failed to initialize decompression")
def shutdown():
self.DLLShutdown(self.ctx)
def decompress(self, compressed_string):
length=self.GetSize(compressed_string,len(compressed_string))
if length<=0:
return None
outbuffer=c_buffer(length)
if not self.DLLDecompress(self.ctx, compressed_string,len(compressed_string),outbuffer):
return None
else:
return str(outbuffer.raw)
|
#!/usr/bin/python
# Copyright (c) 2014-2015 Cedric Bellegarde <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import Gtk, Gdk, GLib
from cgi import escape
from gettext import gettext as _
from lollypop.define import Lp, ArtSize, Type
from lollypop.utils import seconds_to_string
# Show a fullscreen window showing current track context
class FullScreen(Gtk.Window):
"""
Init window and set transient for parent
@param: parent as Gtk.window
"""
def __init__(self, parent):
Gtk.Window.__init__(self)
self._timeout = None
self._seeking = False
self._signal1_id = None
self._signal2_id = None
self.set_transient_for(parent)
self.set_skip_taskbar_hint(True)
self.set_skip_pager_hint(True)
builder = Gtk.Builder()
builder.add_from_resource('/org/gnome/Lollypop/FullScreen.ui')
main_widget = builder.get_object('fs')
self.add(main_widget)
self._prev_btn = builder.get_object('prev_btn')
self._prev_btn.connect('clicked', self._on_prev_btn_clicked)
self._play_btn = builder.get_object('play_btn')
self._play_btn.connect('clicked', self._on_play_btn_clicked)
self._next_btn = builder.get_object('next_btn')
self._next_btn.connect('clicked', self._on_next_btn_clicked)
self._play_image = builder.get_object('play_image')
self._pause_image = builder.get_object('pause_image')
close_btn = builder.get_object('close_btn')
close_btn.connect('clicked', self._destroy)
self._cover = builder.get_object('cover')
self._title = builder.get_object('title')
self._artist = builder.get_object('artist')
self._album = builder.get_object('album')
self._next = builder.get_object('next')
self._next_cover = builder.get_object('next_cover')
self._progress = builder.get_object('progress_scale')
self._progress.connect('button-release-event',
self._on_progress_release_button)
self._progress.connect('button-press-event',
self._on_progress_press_button)
self._timelabel = builder.get_object('playback')
self._total_time_label = builder.get_object('duration')
self.connect('key-release-event', self._on_key_release_event)
"""
Init signals, set color and go party mode if nothing is playing
"""
def do_show(self):
is_playing = Lp.player.is_playing()
self._signal1_id = Lp.player.connect('current-changed',
self._on_current_changed)
self._signal2_id = Lp.player.connect('status-changed',
self._on_status_changed)
if is_playing:
self._change_play_btn_status(self._pause_image, _('Pause'))
self._on_current_changed(Lp.player)
else:
Lp.player.set_party(True)
if not self._timeout:
self._timeout = GLib.timeout_add(1000, self._update_position)
Gtk.Window.do_show(self)
self._update_position()
self.fullscreen()
"""
Remove signals and unset color
"""
def do_hide(self):
if self._signal1_id:
Lp.player.disconnect(self._signal1_id)
self._signal1_id = None
if self._signal2_id:
Lp.player.disconnect(self._signal2_id)
self._signal2_id = None
if self._timeout:
GLib.source_remove(self._timeout)
self._timeout = None
#######################
# PRIVATE #
#######################
"""
Update View for current track
- Cover
- artist/title
- reset progress bar
- update time/total labels
@param player as Player
"""
def _on_current_changed(self, player):
if player.current_track.id is None:
pass # Impossible as we force play on show
else:
if Lp.player.current_track.id == Type.RADIOS:
self._prev_btn.set_sensitive(False)
self._next_btn.set_sensitive(False)
self._timelabel.hide()
self._total_time_label.hide()
self._progress.hide()
cover = Lp.art.get_radio(player.current_track.artist,
ArtSize.MONSTER)
else:
self._prev_btn.set_sensitive(True)
self._next_btn.set_sensitive(True)
self._timelabel.show()
self._total_time_label.show()
self._progress.show()
cover = Lp.art.get_album(player.current_track.album_id,
ArtSize.MONSTER)
self._cover.set_from_pixbuf(cover)
del cover
album = player.current_track.album
if player.current_track.year != '':
album += " (%s)" % player.current_track.year
self._title.set_text(player.current_track.title)
self._artist.set_text(player.current_track.artist)
self._album.set_text(album)
next_cover = Lp.art.get_album(player.next_track.album_id,
ArtSize.MEDIUM)
self._next_cover.set_from_pixbuf(next_cover)
del next_cover
self._next.set_markup("<b>%s</b> - %s" %
(escape(player.next_track.artist),
escape(player.next_track.title)))
self._progress.set_value(1.0)
self._progress.set_range(0.0, player.current_track.duration * 60)
self._total_time_label.set_text(
seconds_to_string(player.current_track.duration))
self._timelabel.set_text("0:00")
"""
Destroy window if Esc
@param widget as Gtk.Widget
@param event as Gdk.event
"""
def _on_key_release_event(self, widget, event):
if event.keyval == Gdk.KEY_Escape:
self.destroy()
"""
Go to prev track
@param widget as Gtk.Button
"""
def _on_prev_btn_clicked(self, widget):
Lp.player.prev()
"""
Play/pause
@param widget as Gtk.Button
"""
def _on_play_btn_clicked(self, widget):
if Lp.player.is_playing():
Lp.player.pause()
widget.set_image(self._play_image)
else:
Lp.player.play()
widget.set_image(self._pause_image)
"""
Go to next track
@param widget as Gtk.Button
"""
def _on_next_btn_clicked(self, widget):
Lp.player.next()
"""
Update buttons and progress bar
@param obj as unused
"""
def _on_status_changed(self, obj):
is_playing = Lp.player.is_playing()
if is_playing and not self._timeout:
self._timeout = GLib.timeout_add(1000, self._update_position)
self._change_play_btn_status(self._pause_image, _("Pause"))
elif not is_playing and self._timeout:
GLib.source_remove(self._timeout)
self._timeout = None
self._change_play_btn_status(self._play_image, _("Play"))
"""
On press, mark player as seeking
@param unused
"""
def _on_progress_press_button(self, scale, data):
self._seeking = True
"""
Callback for scale release button
Seek player to scale value
@param scale as Gtk.Scale, data as unused
"""
def _on_progress_release_button(self, scale, data):
value = scale.get_value()
self._seeking = False
self._update_position(value)
Lp.player.seek(value/60)
"""
Update play button with image and status as tooltip
@param image as Gtk.Image
@param status as str
"""
def _change_play_btn_status(self, image, status):
self._play_btn.set_image(image)
self._play_btn.set_tooltip_text(status)
"""
Update progress bar position
@param value as int
"""
def _update_position(self, value=None):
if not self._seeking and self._progress.is_visible():
if value is None:
value = Lp.player.get_position_in_track()/1000000
self._progress.set_value(value)
self._timelabel.set_text(seconds_to_string(value/60))
return True
"""
Destroy self
@param widget as Gtk.Button
"""
def _destroy(self, widget):
self.destroy()
|
import unittest
from monitor.metrics.DMIDecode import bios,system, chassis,cache, portConnector,\
systemSlot, onBoardDevice, oemString, systemConfigurationOptions,\
physicalMemoryArray, memoryDevice, memoryError32Bit,\
memoryArrayMappedAddress, memoryDeviceMappedAddress, voltageProbe,\
coolingDevice, temperatureProbe, electricalCurrentProbe, systemBoot,\
managementDevice, managementDeviceComponent, systemPowerSupply,\
DMIDecodeMonitorSource
from core.MetricValue import MultiMetricValue,BatchMultiMetricValue
from metrics.DMIDecode import board, processor, onboardDevice
#设置dmidecode命令
DMIDecodeMonitorSource._cmd = "E:/anyonedev/dmidecode/dmidecode.exe"
class BiosMonitorSourceTest(unittest.TestCase):
def test(self):
monitorSource = bios("bios")
metricValue = monitorSource.sample(None)
self.assertTrue(isinstance(metricValue, MultiMetricValue), "采样类型不对")
metrics = ["Version","_title","ROM_Size","BIOS_Revision","Release_Date"]
for metric in metrics:
value = metricValue.getValue(metric)
self.assertNotEqual(value, None, "bios监控源没有包含指标值["+metric+"]")
class SystemMonitorSourceTest(unittest.TestCase):
def test(self):
monitorSource = system("system")
metricValue = monitorSource.sample(None)
self.assertTrue(isinstance(metricValue, MultiMetricValue), "采样类型不对")
metrics = ["Version","_title","UUID","Product_Name","Wakeup_Type","Serial_Number",
"Manufacturer"]
for metric in metrics:
value = metricValue.getValue(metric)
self.assertNotEqual(value, None, "system监控源没有包含指标值["+metric+"]")
class BoardMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = board("board").sample(None)
self.assertTrue(isinstance(metricValue, MultiMetricValue), "采样类型不对")
metrics = ["Version","_title","Type","Asset_Tag","Serial_Number","Contained_Object_Handles",
"Location_In_Chassis","Chassis_Handle","Product_Name","Manufacturer"]
for metric in metrics:
value = metricValue.getValue(metric)
self.assertNotEqual(value, None, "board监控源没有包含指标值["+metric+"]")
class ChassisMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = chassis("chassis").sample(None)
self.assertTrue(isinstance(metricValue, MultiMetricValue), "采样类型不对")
metrics = ["_title","Type","Lock","Version","Serial_Number","Asset_Tag",
"Boot-up_State","Power_Supply_State","Thermal_State","Security_State",
"OEM_Infomation","Height","Number_Of_Power_Cords","Contained_Elements"]
for metric in metrics:
value = metricValue.getValue(metric)
self.assertNotEqual(value, None, "chassis监控源没有包含指标值["+metric+"]")
class ProcessorMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = processor("processor").sample(None)
self.assertTrue(isinstance(metricValue, BatchMultiMetricValue), "采样类型不对")
metrics = ["Type","_title","ID","L2_Cache_Handle","Socket_Designation","Core_Enabled",
"Asset_Tag","Max_Speed","Serial_Number","Manufacturer","Thread_Count",
"Current_Speed","Family","External_Clock","L3_Cache_Handle","L1_Cache_Handle",
"Version","Status","Voltage","Core_Count","Upgrade","Part_Number"]
#print(len(metricValue.getValues()))
for values in metricValue.getValues():
for metric in metrics:
value = values.getValue(metric)
#print(value)
self.assertNotEqual(value, None, "processor监控源没有包含指标值["+metric+"]")
class CacheMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = cache("cache").sample(None)
self.assertTrue(isinstance(metricValue, MultiMetricValue), "采样类型不对")
metrics = ["_title","Socket_Designation","Configuration","Operational_Mode",
"Location","Installed_Size","Maximum_Size","Installed_SRAM_Type",
"Speed","Error_Correction_Type","System_Type","Associativity"]
for metric in metrics:
value = metricValue.getValue(metric)
self.assertNotEqual(value, None, "cache监控源没有包含指标值["+metric+"]")
class PortConnectorMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = portConnector("portConnector").sample(None)
self.assertTrue(isinstance(metricValue, BatchMultiMetricValue), "采样类型不对")
metrics = ["_title","Internal_Reference_Designator","Internal_Connector_Type",
"External_Reference_Designator","External_Connector_Type","Port_Type"]
#print(len(metricValue.getValues()))
for values in metricValue.getValues():
for metric in metrics:
value = values.getValue(metric)
#print(value)
self.assertNotEqual(value, None, "portConnector监控源没有包含指标值["+metric+"]")
class SystemSlotMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = systemSlot("systemSlot").sample(None)
self.assertTrue(isinstance(metricValue, BatchMultiMetricValue), "采样类型不对")
metrics = ["_title","Designation","Type","Current_Usage",
"Length","ID","Bus_Address"]
#print(len(metricValue.getValues()))
for values in metricValue.getValues():
for metric in metrics:
value = values.getValue(metric)
#print(value)
self.assertNotEqual(value, None, "systemSlot监控源没有包含指标值["+metric+"]")
class OnBoardDeviceMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = onBoardDevice("onBoardDevice").sample(None)
self.assertTrue(isinstance(metricValue, BatchMultiMetricValue), "采样类型不对")
metrics = ["_title","Type","Status","Description"]
#print(len(metricValue.getValues()))
for values in metricValue.getValues():
for metric in metrics:
value = values.getValue(metric)
#print(value)
self.assertNotEqual(value, None, "onboardDevice监控源没有包含指标值["+metric+"]")
class OEMStringMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = oemString("OEMString").sample(None)
self.assertTrue(isinstance(metricValue, MultiMetricValue), "采样类型不对")
metrics = ["_title","String_1"]
for metric in metrics:
value = metricValue.getValue(metric)
self.assertNotEqual(value, None, "OEMString监控源没有包含指标值["+metric+"]")
class SystemConfigurationOptionsMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = systemConfigurationOptions("systemConfigurationOptions").sample(None)
self.assertTrue(isinstance(metricValue, MultiMetricValue), "采样类型不对")
metrics = ["_title","Option_1"]
for metric in metrics:
value = metricValue.getValue(metric)
self.assertNotEqual(value, None, "systemConfigurationOptions监控源没有包含指标值["+metric+"]")
'''
class BIOSLanguageMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = biosLanguage("biosLanguage").sample(None)
self.assertTrue(isinstance(metricValue, MultiMetricValue), "采样类型不对")
metrics = ["_title","Installable_Languages","Current_Installed_Language"]
for metric in metrics:
value = metricValue.getValue(metric)
self.assertNotEqual(value, None, "BIOSLanguage监控源没有包含指标值["+metric+"]")
'''
class PhysicalMemoryArrayMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = physicalMemoryArray("physicalMemoryArray").sample(None)
self.assertTrue(isinstance(metricValue, MultiMetricValue), "采样类型不对")
metrics = ["_title","Location","Use","Error_Correction_Type","Maximum_Capacity","Error_Information_Handle","Number_Of_Devices"]
for metric in metrics:
value = metricValue.getValue(metric)
self.assertNotEqual(value, None, "physicalMemoryArray监控源没有包含指标值["+metric+"]")
class MemoryDeviceMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = memoryDevice("memoryDevice").sample(None)
self.assertTrue(isinstance(metricValue, BatchMultiMetricValue), "采样类型不对")
metrics = ["_title","Array_Handle","Error_Information_Handle","Total_Width",
"Data_Width","Size","Form_Factor","Set","Locator","Bank_Locator",
"Type","Type_Detail","Manufacturer","Speed","Serial_Number","Rank",
"Asset_Tag","Part_Number"]
for values in metricValue.getValues():
for metric in metrics:
value = values.getValue(metric)
self.assertNotEqual(value, None, "memoryDevice监控源没有包含指标值["+metric+"]")
class MemoryError32BitMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = memoryError32Bit("memoryError32Bit").sample(None)
self.assertTrue(isinstance(metricValue, BatchMultiMetricValue), "采样类型不对")
metrics = ["_title","Type","Granularity","Operation",
"Vendor_Syndrome","Memory_Array_Address","Device_Address",
"Resolution"]
for values in metricValue.getValues():
for metric in metrics:
value = values.getValue(metric)
self.assertNotEqual(value, None, "memoryError32Bit监控源没有包含指标值["+metric+"]")
class MemoryArrayMappedAddressMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = memoryArrayMappedAddress("memoryArrayMappedAddress").sample(None)
self.assertTrue(isinstance(metricValue,MultiMetricValue), "采样类型不对")
metrics = ["_title","Starting_Address","Ending_Address","Range_Size",
"Physical_Array_Handle","Partition_Width"]
for metric in metrics:
value = metricValue.getValue(metric)
self.assertNotEqual(value, None, "memoryArrayMappedAddress监控源没有包含指标值["+metric+"]")
class MemoryDeviceMappedAddressMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = memoryDeviceMappedAddress("memoryDeviceMappedAddress").sample(None)
self.assertTrue(isinstance(metricValue,MultiMetricValue), "采样类型不对")
metrics = ["_title","Starting_Address","Ending_Address","Range_Size",
"Physical_Device_Handle","Partition_Row_Position","Memory_Array_Mapped_Address_Handle"]
for metric in metrics:
value = metricValue.getValue(metric)
self.assertNotEqual(value, None, "memoryDeviceMappedAddress监控源没有包含指标值["+metric+"]")
class VoltageProbeMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = voltageProbe("voltageProbe").sample(None)
self.assertTrue(isinstance(metricValue, BatchMultiMetricValue), "采样类型不对")
metrics = ["_title","Description","Location","Status",
"Maximum_Value","Minimum_Value","Resolution",
"Tolerance","Accuracy","OEM-specific_Information","Nominal_Value"]
for values in metricValue.getValues():
for metric in metrics:
value = values.getValue(metric)
self.assertNotEqual(value, None, "voltageProbe监控源没有包含指标值["+metric+"]")
class CoolingDeviceMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = coolingDevice("coolingDevice").sample(None)
self.assertTrue(isinstance(metricValue, BatchMultiMetricValue), "采样类型不对")
metrics = ["_title","Temperatur_Probe_Handle","Type","Status",
"Cooling_Unit_Group","OEM-specific_Information","Nominal_Speed"]
for values in metricValue.getValues():
for metric in metrics:
value = values.getValue(metric)
self.assertNotEqual(value, None, "coolingDevice监控源没有包含指标值["+metric+"]")
class TemperatureProbeMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = temperatureProbe("temperatureProbe").sample(None)
self.assertTrue(isinstance(metricValue, BatchMultiMetricValue), "采样类型不对")
metrics = ["_title","Description","Location","Status",
"Maximum_Value","Minimum_Value","Resolution",
"Tolerance","Accuracy","OEM-specific_Information","Nominal_Value"]
for values in metricValue.getValues():
for metric in metrics:
value = values.getValue(metric)
self.assertNotEqual(value, None, "temperatureProbe监控源没有包含指标值["+metric+"]")
class ElectricalCurrentProbeMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = electricalCurrentProbe("electricalCurrentProbe").sample(None)
self.assertTrue(isinstance(metricValue, BatchMultiMetricValue), "采样类型不对")
metrics = ["_title","Description","Location","Status",
"Maximum_Value","Minimum_Value","Resolution",
"Tolerance","Accuracy","OEM-specific_Information","Nominal_Value"]
for values in metricValue.getValues():
for metric in metrics:
value = values.getValue(metric)
self.assertNotEqual(value, None, "electricalCurrentProbe监控源没有包含指标值["+metric+"]")
class SystemBootMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = systemBoot("systemBoot").sample(None)
self.assertTrue(isinstance(metricValue,MultiMetricValue), "采样类型不对")
metrics = ["_title","Status"]
for metric in metrics:
value = metricValue.getValue(metric)
self.assertNotEqual(value, None, "systemBoot监控源没有包含指标值["+metric+"]")
class ManagementDeviceMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = managementDevice("managementDevice").sample(None)
self.assertTrue(isinstance(metricValue, BatchMultiMetricValue), "采样类型不对")
metrics = ["_title","Description","Type","Address","Address_Type"]
for values in metricValue.getValues():
for metric in metrics:
value = values.getValue(metric)
self.assertNotEqual(value, None, "managementDevice监控源没有包含指标值["+metric+"]")
class ManagementDeviceComponentMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = managementDeviceComponent("managementDeviceComponent").sample(None)
self.assertTrue(isinstance(metricValue, BatchMultiMetricValue), "采样类型不对")
metrics = ["_title","Description","Management_Device_Handle","Component_Handle","Threshold_Handle"]
for values in metricValue.getValues():
for metric in metrics:
value = values.getValue(metric)
self.assertNotEqual(value, None, "managementDeviceComponent监控源没有包含指标值["+metric+"]")
class SystemPowerSupplyMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = systemPowerSupply("systemPowerSupply").sample(None)
self.assertTrue(isinstance(metricValue, MultiMetricValue), "采样类型不对")
metrics = ["_title","Power_Unit_Group","Location","Name",
"Manufacturer","Serial_Number","Asset_Tag","Model_Part_Number","Revision",
"Max_Power_Capacity","Status","Type","Input_Voltage_Range_Switching",
"Plugged","Hot_Replaceable","Input_Voltage_Probe_Handle",
"Cooling_Device_Handle","Input_Current_Probe_Handle"]
for metric in metrics:
value = metricValue.getValue(metric)
self.assertNotEqual(value, None, "systemPowerSupply监控源没有包含指标值["+metric+"]")
class OnboardDeviceMonitorSourceTest(unittest.TestCase):
def test(self):
metricValue = onboardDevice("onboardDevice").sample(None)
self.assertTrue(isinstance(metricValue, MultiMetricValue), "采样类型不对")
metrics = ["_title","Type","Status","Type_Instance",
"Bus_Address","Reference_Designation",]
for metric in metrics:
value = metricValue.getValue(metric)
self.assertNotEqual(value, None, "onboardDevice监控源没有包含指标值["+metric+"]")
if __name__ == "__main__":
unittest.main()
|
from urllib.parse import quote
import json
import os
from src import log_entry
from .obfuscation import transform
from .exceptions import KeysDirectoryNotFound, KeysFileNotFound
user_index = os.path.join(os.path.dirname(__file__), "keys_loc.json")
default_context = "OGS"
obfuscated = "_obfuscated_"
plaintext = "_plaintext_"
no_directory_default = lambda usr: ""
def reset_index():
with open (user_index, 'w') as f:
json.dump({}, f)
log_entry (user_index, "file reset to empty value.")
def get_keys_directory(user, on_fail = no_directory_default):
with open(user_index, 'r+') as f:
index_data = json.load(f)
update = False
ref = log_entry("Searching %s's keys location from %s...." % (user, user_index))
if user in index_data:
dir = index_data[user]
else:
log_entry(ref, "Location not found.")
dir = False
if not (dir and os.path.isdir(dir)):
if dir:
log_entry (ref, "Location invalid.")
index_data.pop(user)
update = True
ref = log_entry("Getting %s's keys location from backup method...." % user)
dir = on_fail(user)
try:
if os.path.isdir(dir):
index_data[user] = dir
update = True
else:
log_entry(ref, "Location not found or invalid.")
raise KeysDirectoryNotFound(user)
finally:
if update:
ref = log_entry ("Updating %s...." % user_index)
f.seek(0)
json.dump(index_data, f, sort_keys=True, indent=4)
f.truncate()
log_entry (ref, "Updated!")
log_entry (ref, "Location found!")
return dir
def set_keys_directory(user, directory):
with open(user_index, 'r+') as f:
ref = log_entry ("Updating %s's keys location at %s...." % (user, user_index))
index_data = json.load(f)
index_data[user] = directory
f.seek(0)
json.dump(index_data, f, sort_keys=True, indent=4)
f.truncate()
log_entry (ref, "Updated!")
def remove_keys_directory(user):
with open(user_index, 'r+') as f:
ref = log_entry ("Removing %s's keys location at %s...." % (user, user_index))
index_data = json.load(f)
index_data.pop(user)
f.seek(0)
json.dump(index_data, f, sort_keys=True, indent=4)
f.truncate()
log_entry (ref, "Removed!")
def store_keys (user, keys, password="", context=default_context, if_no_directory = no_directory_default):
directory = get_keys_directory(user, if_no_directory)
if password:
ref = log_entry ("Encrypting %s's keys...." % user)
keys = transform(keys, password)
log_entry (ref, "Encrypted!")
else:
log_entry ("WARNING: No password provided to encrypt %s's keys. This is unsafe, as keys will be stored in plain text." % user)
filename = standard_filename(user, password, directory, context)
with open(filename, 'w') as f:
ref = log_entry("Storing %s's keys at %s...." % (user, filename))
json.dump(keys, f, sort_keys=True, indent=4)
log_entry(ref, "Stored!")
def retrieve_keys (user, password="", context=default_context, return_location=False):
directory = get_keys_directory(user)
filename = standard_filename(user, password, directory, context)
if os.path.isfile(filename):
with open(filename, 'r') as f:
ref = log_entry("Retrieving %s's keys from %s...." % (user, filename))
keys = json.load(f)
log_entry(ref, "Retrieved!")
else:
raise KeysFileNotFound(user, filename)
if password:
ref = log_entry ("Decrypting %s's keys...." % user)
keys = transform(keys, password)
log_entry (ref, "Decrypted!")
if return_location:
return (keys, filename)
else:
return keys
def standard_filename(user, password, directory, context):
filename = context+(obfuscated if password else plaintext)+quote(user, safe='')+".json"
return os.path.join(directory, filename)
###########################
## ##
## INITIALIZATION CODE ##
## ##
###########################
if not os.path.isfile(user_index):
log_entry (user_index, "file does not exist.")
__ref = log_entry ("Creating file %s...." % user_index)
reset_index()
log_entry(__ref, "File created. Ready!")
del __ref
else:
log_entry (user_index, "file exists. Ready!")
|
import urllib2
import json
files = '''blataget-gtfs.csv
blekingetrafiken-gtfs.csv
dalatrafik-gtfs.csv
gotlandskommun-gtfs.csv
hallandstrafiken-gtfs.csv
jonkopingslanstrafik-gtfs.csv
kalmarlanstrafik-gtfs.csv
lanstrafikenkronoberg-gtfs.csv
localdata-gtfs.csv
masexpressen.csv
nettbuss-gtfs.csv
nsb-gtfs.csv
ostgotatrafiken-gtfs.csv
pagelinks-gtfs.csv
peopletravelgrouop.csv
rt90cords-gtfs.csv
skanerafiken-gtfs.csv
sl-gtfs.csv
swebus-gtfs.csv
tib-gtfs.csv
treminalmaps-gtfs.csv
trv-gtfs.csv
ul-gtfs.csv
vasttrafik-gtfs.csv
xtrafik-gtfs.csv'''
data = files.split("\n")
print data
alldata = {}
for filename in data:
alldata[filename] = {}
response = urllib2.urlopen('https://github.com/thuma/Transit-Stop-Identifier-Conversions-Sweden/raw/master/'+filename)
downloaded = response.read().split("\n")
rubriker = downloaded[0].split(";")
downloaded[0] = downloaded[1]
for row in downloaded:
parts = row.split(";")
alldata[filename][parts[0]] = {}
for i in range(len(parts)):
alldata[filename][parts[0]][rubriker[i]] = parts[i]
print alldata['hallandstrafiken-gtfs.csv']['7400110']
'''
response = urllib2.urlopen('https://github.com/thuma/Transit-Stop-Identifier-Conversions-Sweden/raw/master/treminalmaps-gtfs.csv')
maps = response.read()
response = urllib2.urlopen('https://github.com/thuma/Transit-Stop-Identifier-Conversions-Sweden/raw/master/treminalmaps-gtfs.csv')
maps = response.read()
response = urllib2.urlopen('https://github.com/thuma/Transit-Stop-Identifier-Conversions-Sweden/raw/master/treminalmaps-gtfs.csv')
maps = response.read()'''
|
#!/usr/bin/env python
import os
from struct import Struct
from .dtypes import *
UNICODE_BLANK = ''
class DBCRecord(object):
"""A simple object to convert a dict to an object."""
def __init__(self, d=None):
self.data = d
def __repr__(self):
return "<DBCRecord %r>" % self.data
def __getitem__(self, item):
print('hi')
return self.data[item]
def __getattr__(self, item):
item = self.data[item]
if isinstance(item, bytes):
item = item.decode('utf-8')
return item
class DBCFile(object):
"""Base representation of a DBC file."""
header_struct = Struct('4s4i')
def __init__(self, filename, skele=None, verbose=False):
self.filename = filename
if not hasattr(self, 'skeleton'):
self.skeleton = skele
self.__create_struct()
def __iter__(self):
"""Iterated based approach to the dbc reading."""
if not os.path.exists(self.filename):
raise Exception("File '%s' not found" % (self.filename,))
f = open(self.filename, 'rb')
f_read = f.read
# Read in header
sig, records, fields, record_size, string_block_size = \
self.header_struct.unpack(f_read(20))
# Check signature
if sig != b'WDBC':
f.close()
raise Exception('Invalid file type')
self.records = records
self.fields = fields
self.record_size = record_size
self.string_block_size = string_block_size
if not self.struct:
# If the struct doesn't exist, create a default one
self.skeleton = Array('data', Int32, fields)
self.__create_struct()
# Ensure that struct and record_size is the same
if self.struct.size != record_size:
f.close()
raise Exception('Struct size mismatch (%d != %d)' %
(self.struct.size, record_size))
struct_unpack = self.struct.unpack
# Read in string block
f.seek(20 + records * record_size)
self.string_block = f_read(string_block_size)
f.seek(20)
try:
for i in range(records):
yield self.__process_record(struct_unpack(f_read(record_size)))
finally:
f.close()
def __create_struct(self):
"""Creates a Struct from the Skeleton."""
if self.skeleton:
s = ['<']
for item in self.skeleton:
if isinstance(item, Array):
s.extend(x.c for x in item.items)
else:
s.append(item.c)
self.struct = Struct(''.join(s))
else:
self.struct = None
def __process_record(self, data):
"""Processes a record (row of data)."""
output = {}
data_iter = iter(data)
for field in self.skeleton:
if isinstance(field, Array):
output[field.name] = [
self.__process_field(item, next(data_iter)) for item in field.items
if not isinstance(item, PadByte)
]
elif not isinstance(field, PadByte):
output[field.name] = self.__process_field(field, next(data_iter))
return DBCRecord(output)
def __process_field(self, _type, data):
output = data
if isinstance(_type, String):
if data == 0:
output = UNICODE_BLANK
else:
if data > self.string_block_size or self.string_block[data - 1] != 0:
raise Exception('Invalid string')
output = self.string_block[data:self.string_block.find(0, data)]
if isinstance(output, bytes):
output = output.decode('utf-8')
return output
|
"""
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__author__ = "Alex Gainer ([email protected])"
__copyright__ = "Copyright 2014, Health Records For Everyone (HR4E)"
import cPickle as pickle
import datetime
import os
class PickleNotFoundException(Exception):
"""Missing Pickle Exception"""
class ClinicPickle(object):
"""Word dictionary utilities for pickling GRE words."""
_PICKLE_FOLDER = os.path.join('data', 'clinics')
_MISSING_PICKLE = 'Pickle {0} File Missing.'
def __init__(self, name):
self.name = name
self.date_created = datetime.datetime.now()
@classmethod
def create(cls, name):
"""Creates a clinic object and pickles it."""
try:
pickle_file_name = '{0}.pkl'.format(name)
path_to_pickle = os.path.join(cls._PICKLE_FOLDER,
pickle_file_name)
path = os.path.isfile(path_to_pickle)
if not path:
pickle.dump(cls(name).__dict__, file(path_to_pickle, 'wb'))
except IOError:
raise PickleNotFoundException, self._MISSING_PICKLE.format(name)
def delete(self):
"""Deletes a Clinic Pickle File."""
try:
pickle_file_name = '{0}.pkl'.format(self.name)
path_to_pickle = os.path.join(self._PICKLE_FOLDER,
pickle_file_name)
os.remove(path_to_pickle)
except IOError:
missing_pickle_error = self._MISSING_PICKLE.format(self.name)
raise PickleNotFoundException, missing_pickle_error
@classmethod
def get_all(cls):
return filter(lambda x: x != None,
[cls.load(name) for name in cls.GetAllClinicNames()])
@classmethod
def get_all_clinic_names(cls):
pkl_files = [f for f in os.listdir(cls._PICKLE_FOLDER)
if os.path.isfile(os.path.join(cls._PICKLE_FOLDER,f))]
return [_.strip('.pkl') for _ in pkl_files]
@classmethod
def load(cls, name):
"""Loads up a pickled clinic as a clinic object."""
try:
pickle_file_name = '{0}.pkl'.format(name)
path_to_pickle = os.path.join(cls._PICKLE_FOLDER,
pickle_file_name)
if os.path.isfile(path_to_pickle):
clinic = cls(name)
clinic.__dict__ = pickle.load(file(path_to_pickle, 'r+b'))
else:
clinic = None
return clinic
except IOError:
return None
def update(self, post_data):
"""Updates a clinic given the post_data dictionary."""
self.__dict__.update({})
try:
pickle_file_name = '{0}.pkl'.format(self.name)
path_to_pickle = os.path.join(self._PICKLE_FOLDER,
pickle_file_name)
if os.path.isfile(path_to_pickle):
pickle.dump(self.__dict__, file(path_to_pickle), 'wb')
except IOError:
raise PickleNotFoundException, self._MISSING_PICKLE.format(name)
|
# -*- test-case-name: twisted.web2.dav.test.test_report_expand -*-
##
# Copyright (c) 2005 Apple Computer, Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# DRI: Wilfredo Sanchez, [email protected]
##
"""
WebDAV expand-property report
"""
__all__ = ["report_DAV__expand_property"]
from twisted.python import log
from twisted.python.failure import Failure
from twisted.internet.defer import deferredGenerator, waitForDeferred
from twisted.web2 import responsecode
from twisted.web2.dav import davxml
from twisted.web2.dav.http import statusForFailure
from twisted.web2.dav.davxml import dav_namespace
def report_DAV__expand_property(self, request, expand_property):
"""
Generate an expand-property REPORT. (RFC 3253, section 3.8)
"""
# FIXME: Handle depth header
if not isinstance(expand_property, davxml.ExpandProperty):
raise ValueError("%s expected as root element, not %s."
% (davxml.ExpandProperty.sname(), expand_property.sname()))
#
# Expand DAV:allprop
#
properties = {}
for property in expand_property.children:
namespace = property.getAttribute("namespace")
name = property.getAttribute("name")
if not namespace: namespace = dav_namespace
if (namespace, name) == (dav_namespace, "allprop"):
all_properties = waitForDeferred(self.listAllProp(request))
yield all_properties
all_properties = all_properties.getResult()
for all_property in all_properties:
properties[all_property.qname()] = property
else:
properties[(namespace, name)] = property
#
# Look up the requested properties
#
properties_by_status = {
responsecode.OK : [],
responsecode.NOT_FOUND : [],
}
for property in properties:
my_properties = waitForDeferred(self.listProperties(request))
yield my_properties
my_properties = my_properties.getResult()
if property in my_properties:
try:
value = waitForDeferred(self.readProperty(property, request))
yield value
value = value.getResult()
if isinstance(value, davxml.HRef):
raise NotImplementedError()
else:
raise NotImplementedError()
except:
f = Failure()
log.err("Error reading property %r for resource %s: %s"
% (property, self, f.value))
status = statusForFailure(f, "getting property: %s" % (property,))
if status not in properties_by_status:
properties_by_status[status] = []
raise NotImplementedError()
#properties_by_status[status].append(
# ____propertyName(property)
#)
else:
log.err("Can't find property %r for resource %s" % (property, self))
properties_by_status[responsecode.NOT_FOUND].append(property)
raise NotImplementedError()
report_DAV__expand_property = deferredGenerator(report_DAV__expand_property)
|
# Copyright 2012 OpenStack LLC
# Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utilities for consuming the version from pkg_resources.
"""
import pkg_resources
class VersionInfo(object):
def __init__(self, package):
"""Object that understands versioning for a package
:param package: name of the python package, such as glance, or
python-glanceclient
"""
self.package = package
self.release = None
self.version = None
self._cached_version = None
def _get_version_from_pkg_resources(self):
"""Get the version of the package from the pkg_resources record
associated with the package."""
try:
requirement = pkg_resources.Requirement.parse(self.package)
provider = pkg_resources.get_provider(requirement)
return provider.version
except pkg_resources.DistributionNotFound:
# The most likely cause for this is running tests in a tree with
# produced from a tarball where the package itself has not been
# installed into anything. Check for a PKG-INFO file.
from heat.openstack.common import setup
return setup.get_version_from_pkg_info(self.package)
def release_string(self):
"""Return the full version of the package including suffixes indicating
VCS status.
"""
if self.release is None:
self.release = self._get_version_from_pkg_resources()
return self.release
def version_string(self):
"""Return the short version minus any alpha/beta tags."""
if self.version is None:
parts = []
for part in self.release_string().split('.'):
if part[0].isdigit():
parts.append(part)
else:
break
self.version = ".".join(parts)
return self.version
# Compatibility functions
canonical_version_string = version_string
version_string_with_vcs = release_string
def cached_version_string(self, prefix=""):
"""Generate an object which will expand in a string context to
the results of version_string(). We do this so that don't
call into pkg_resources every time we start up a program when
passing version information into the CONF constructor, but
rather only do the calculation when and if a version is requested
"""
if not self._cached_version:
self._cached_version = "%s%s" % (prefix,
self.version_string())
return self._cached_version
|
"""
Dict with the emojis of osm tyles
"""
typeemoji = {
'aerialway:cable_car': '\xF0\x9F\x9A\xA1',
'aerialway:station': '\xF0\x9F\x9A\xA1',
'aeroway:aerodrome': '\xE2\x9C\x88',
'aeroway:terminal': '\xE2\x9C\x88',
'amenity:ambulance_station': '\xF0\x9F\x9A\x91',
'amenity:atm': '\xF0\x9F\x92\xB3',
'amenity:bank': '\xF0\x9F\x92\xB0',
'amenity:bar': '\xF0\x9F\x8D\xB8',
'amenity:biergarten': '\xF0\x9F\x8D\xBA',
'amenity:brothel': '\xF0\x9F\x91\xAF',
'amenity:cafe': '\xE2\x98\x95',
'amenity:casino': '\xE2\x99\xA0',
'amenity:cinema': '\xF0\x9F\x8E\xAC',
'amenity:college': '\xF0\x9F\x8E\x93',
'amenity:crematorium': '\xE2\x9A\xB1',
'amenity:drinking_water': '\xF0\x9F\x9A\xB0',
'amenity:fast_food': '\xF0\x9F\x8D\x94',
'amenity:fire_station': '\xF0\x9F\x9A\x92',
'amenity:fountain': '\xE2\x9B\xB2',
'amenity:fuel': '\xE2\x9B\xBD',
'amenity:hospital': '\xF0\x9F\x8F\xA5',
'amenity:hotel': '\xF0\x9F\x8F\xA8',
'amenity:ice_cream': '\xF0\x9F\x8D\xA6',
'amenity:kindergarten': '\xF0\x9F\x91\xB6',
'amenity:karaoke_box': '\xF0\x9F\x8E\xA4',
'amenity:library': '\xF0\x9F\x93\x96',
'amenity:love_hotel': '\xF0\x9F\x8F\xA9',
'amenity:place_of_worship': '\xF0\x9F\x9B\x90',
'amenity:pharmacy': '\xF0\x9F\x92\x8A',
'amenity:police': '\xF0\x9F\x9A\x93',
'amenity:pub': '\xF0\x9F\x8D\xBA',
'amenity:recycling': '\xE2\x99\xBB',
'amenity:restaurant': '\xF0\x9F\x8D\xB4',
'amenity:sauna': '\xE2\x99\xA8',
'amenity:school': '\xF0\x9F\x8E\x92',
'amenity:stripclub': '\xF0\x9F\x91\xAF',
'amenity:studio': '\xF0\x9F\x8E\x99',
'amenity:swimming_pool': '\xF0\x9F\x8F\x8A',
'amenity:taxi': '\xF0\x9F\x9A\x95',
'amenity:telephone': '\xF0\x9F\x93\x9E',
'amenity:theatre': '\xF0\x9F\x8E\xAD',
'amenity:toilets': '\xF0\x9F\x9A\xBB',
'amenity:university': '\xF0\x9F\x8E\x93',
'building:church': '\xE2\x9B\xAA',
'building:mosque': '\xF0\x9F\x95\x8C',
'building:synagogue': '\xF0\x9F\x95\x8D',
'building:stadium': '\xF0\x9F\x8F\x9F',
'building:temple': '\xF0\x9F\x8F\x9B',
'building:train_station': '\xF0\x9F\x9A\x89',
'craft:beekeeper': '\xF0\x9F\x90\x9D',
'cuisine:pasta': '\xF0\x9F\x8D\x9D',
'cuisine:pizza': '\xF0\x9F\x8D\x95',
'cuisine:sushi': '\xF0\x9F\x8D\xA3',
'emergency:ambulance_station': '\xF0\x9F\x9A\x91',
'emergency:defibrillator': '\xF0\x9F\x92\x94',
'emergency:phone': '\xF0\x9F\x86\x98',
'emergency:assembly_point':'\xF0\x9F\x8E\xAF',
'highway:bridleway': '\xE3\x80\xB0 \xF0\x9F\x90\x8E',
'highway:bus_stop': '\xF0\x9F\x9A\x8C',
'highway:construction': '\xE3\x80\xB0 \xF0\x9F\x9A\xA7',
'highway:cycleway': '\xE3\x80\xB0 \xF0\x9F\x9A\xB4',
'highway:footway': '\xE3\x80\xB0 \xF0\x9F\x9A\xB6',
'highway:living_street': '\xE3\x80\xB0 \xF0\x9F\x8F\xA0',
'highway:motorway': '\xE3\x80\xB0 \xF0\x9F\x9A\x97',
'highway:path': '\xE3\x80\xB0 \xF0\x9F\x9A\xB6',
'highway:pedestrian': '\xE3\x80\xB0 \xF0\x9F\x8F\xA0',
'highway:primary': '\xE3\x80\xB0 \xF0\x9F\x9A\x9B',
'highway:raceway': '\xE3\x80\xB0 \xF0\x9F\x8F\x81',
'highway:residential': '\xE3\x80\xB0 \xF0\x9F\x8F\xA0',
'highway:road': '\xE3\x80\xB0 \xE2\x9D\x93',
'highway:secondary': '\xE3\x80\xB0 \xF0\x9F\x9A\x9B',
'highway:tertiary': '\xE3\x80\xB0 \xF0\x9F\x9A\x9B',
'highway:track': '\xE3\x80\xB0 \xF0\x9F\x9A\x9C',
'highway:trunk': '\xE3\x80\xB0 \xF0\x9F\x9A\x97',
'highway:unclassified': '\xE3\x80\xB0 \xE2\x9D\x93',
'historic:castle': '\xF0\x9F\x8F\xB0',
'historic:monument': '\xF0\x9F\x97\xBD',
'landuse:cemetery': '\xE2\x9A\xB0',
'landuse:plant_nursery': '\xF0\x9F\x8C\xB1',
'leisure:bowling_alley': '\xF0\x9F\x8E\xB3',
'leisure:golf_course': '\xE2\x9B\xB3',
'leisure:swimming_pool': '\xF0\x9F\x8F\x8A',
'man_made:works': '\xF0\x9F\x8F\xAD',
'natural:peak': '\xF0\x9F\x97\xBB',
'natural:volcano': '\xF0\x9F\x8C\x8B',
'place:city': '\xF0\x9F\x8C\x86',
'place:ocean': '\xF0\x9F\x8C\x8A',
'place:sea': '\xF0\x9F\x8C\x8A',
'place:town': '\xF0\x9F\x8F\x98',
'place:village': '\xF0\x9F\x8F\x98',
'railway:station': '\xF0\x9F\x9A\x89',
'railway:subway': '\xF0\x9F\x9A\x87',
'railway:subway_entrance': '\xF0\x9F\x9A\x87',
'railway:tram': '\xF0\x9F\x9A\x83',
'route:piste': '\xF0\x9F\x8E\xBF',
'route:subway': '\xF0\x9F\x9A\x87',
'shop:art': '\xF0\x9F\x8E\xA8',
'shop:bag': '\xF0\x9F\x91\x9C',
'shop:bakery': '\xF0\x9F\x8D\x9E',
'shop:baby_goods': '\xF0\x9F\x8D\xBC',
'shop:books': '\xF0\x9F\x93\x9A',
'shop:butcher': '\xF0\x9F\x8D\x97',
'shop:cheese': '\xF0\x9F\xA7\x80',
'shop:chocolate': '\xF0\x9F\x8D\xAB',
'shop:clothes': '\xF0\x9F\x91\x97',
'shop:coffee': '\xE2\x98\x95',
'shop:computer': '\xF0\x9F\x92\xBB',
'shop:confectionary': '\xF0\x9F\x8D\xB0',
'shop:cosmetics': '\xF0\x9F\x92\x85',
'shop:doityourself': '\xF0\x9F\x94\xA7',
'shop:electronics': '\xF0\x9F\x93\xBA',
'shop:erotic': '\xF0\x9F\x92\x8B',
'shop:garden_centre': '\xF0\x9F\x8C\xB1',
'shop:gift': '\xF0\x9F\x8E\x81',
'shop:fishing': '\xF0\x9F\x8E\xA3',
'shop:florist': '\xF0\x9F\x92\x90',
'shop:greengrocer': '\xF0\x9F\x8D\x89',
'shop:hairdresser': '\xF0\x9F\x92\x87',
'shop:hifi': '\xF0\x9F\x94\x8A',
'shop:ice_cream': '\xF0\x9F\x8D\xA6',
'shop:jewelry': '\xF0\x9F\x92\x8D',
'shop:locksmith': '\xF0\x9F\x94\x91',
'shop:mobile_phone': '\xF0\x9F\x93\xB1',
'shop:music': '\xF0\x9F\x92\xBF',
'shop:musical_instrument': '\xF0\x9F\x8E\xB8',
'shop:newsagent': '\xF0\x9F\x93\xB0',
'shop:optician': '\xF0\x9F\x91\x93',
'shop:pastry': '\xF0\x9F\x8D\xAA',
'shop:photo': '\xF0\x9F\x93\xB7',
'shop:seafood': '\xF0\x9F\x90\x9F',
'shop:shoes': '\xF0\x9F\x91\x9E',
'shop:sports': '\xE2\x9A\xBD',
'shop:swimming_pool': '\xF0\x9F\x8F\x8A',
'shop:ticket': '\xF0\x9F\x8E\xAB',
'shop:tobacco': '\xF0\x9F\x9A\xAC',
'shop:video': '\xF0\x9F\x93\xBC',
'shop:video_games': '\xF0\x9F\x8E\xAE',
'shop:watches': '\xE2\x8C\x9A',
'shop:wine': '\xF0\x9F\x8D\xB7',
'sport:american_football': '\xF0\x9F\x8F\x88',
'sport:9pin': '\xF0\x9F\x8E\xB3',
'sport:10pin': '\xF0\x9F\x8E\xB3',
'sport:archery': '\xF0\x9F\x8F\xB9',
'sport:badminton': '\xF0\x9F\x8F\xB8',
'sport:baseball': '\xE2\x9A\xBE',
'sport:basketball': '\xF0\x9F\x8F\x80',
'sport:billiards': '\xF0\x9F\x8E\xB1',
'sport:cricket': '\xF0\x9F\x8F\x8F',
'sport:cycling': '\xF0\x9F\x9A\xB4',
'sport:darts': '\xF0\x9F\x8E\xAF',
'sport:equestrian': '\xF0\x9F\x8F\x87',
'sport:field_hockey': '\xF0\x9F\x8F\x91',
'sport:golf': '\xF0\x9F\x8F\x8C',
'sport:gymnastics': '\xF0\x9F\x8F\x8B',
'sport:horse_racing': '\xF0\x9F\x8F\x87',
'sport:ice_hockey': '\xF0\x9F\x8F\x92',
'sport:ice_skating': '\xE2\x9B\xB8',
'sport:rugby_league': '\xF0\x9F\x8F\x89',
'sport:rugby_union': '\xF0\x9F\x8F\x89',
'sport:sailing': '\xE2\x9B\xB5',
'sport:soccer': '\xE2\x9A\xBD',
'sport:surfing': '\xF0\x9F\x8F\x84',
'sport:table_tennis': '\xF0\x9F\x8F\x93',
'sport:tennis': '\xF0\x9F\x8E\xBE',
'sport:volleyball': '\xF0\x9F\x8F\x90',
'studio:audio': '\xF0\x9F\x8E\xB9',
'studio:radio': '\xF0\x9F\x93\xBB',
'studio:television': '\xF0\x9F\x93\xBA',
'studio:video': '\xF0\x9F\x8E\xA5',
'tourism:aquarium': '\xF0\x9F\x90\xA0',
'tourism:camp_site': '\xE2\x9B\xBA',
'tourism:hotel': '\xF0\x9F\x8F\xA8',
'tourism:information': '\xE2\x84\xB9',
'tourism:zoo': '\xF0\x9F\x90\x8A',
'vending:cigarettes': '\xF0\x9F\x9A\xAC'
}
|
'''Ensure non-immutable constants are copied to prevent
mutation affecting constant in future uses
'''
from ..runtime.builtins import get_builtin_symbol
from ..runtime.immutable import immutablep
from .walk import IRWalker, propigate_location
from . import ir as I
from .bind import Binding, BindingUse
copy_binding = Binding(get_builtin_symbol('make-copy'))
def make_copy_form(value, loc_form=None):
copy_form = I.make_call(callee=I.make_read_binding(BindingUse(copy_binding)),
args=[I.make_constant(value)],
kwd_names=[], kwd_values=[],
star_args=None, star_kwds=None)
if loc_form is not None:
propigate_location(loc_form, copy_form)
return copy_form
class ConstantCopyInserter(IRWalker):
descend_into_functions = True
def visit_constant(self, cnst):
if not immutablep(cnst.value):
I.replace_child(cnst, make_copy_form(cnst.value, cnst))
def insert_copy_constants(node):
assert not isinstance(node, I.constant)
ConstantCopyInserter().visit(node)
return node
|
"""
Plotting functions.
"""
from __future__ import absolute_import
import matplotlib.pyplot as plt
import numpy as np
def hhist(items, title=None, axislabel=None, color=None, height=None, width=None, reverse=False):
"""
Plots a horizontal histogram of values and frequencies.
Arguments:
items (iterable[any]) => A list of objects.
title (Optional[str]) => A title for the resulting histogram.
axislabel (Optional[str]) => A label for the y-axis that lists the unique items in
the parameter list.
color (Optional[str]) => A matplotlib color value for coloring the histogram
(default: matplotlib's default plot color, a royal blue)
height (Optional[int]) => A height for the plot (default: 10)
width (Optional[int]) => A width for the plot (default: 20)
reverse (Optional[bool]) => Whether or not the histogram should plot from top to bottom in
order of decreasing frequency or the reverse of that.
Returns:
Void, but a matplotlib figure should be produced (type=None).
"""
# Parse the unique items and their counts.
unique_items, item_counts = np.unique(items, return_counts=True)
# Sort the items by frequency.
item_counts, unique_items = zip(*sorted(zip(item_counts, unique_items), reverse=reverse))
# Plot the frequencies.
pos = np.arange(len(unique_items)) + 0.5
plt.figure(figsize=((width or 20), (height or 10)))
plt.barh(pos, item_counts, align='center', color=color)
plt.yticks(pos, unique_items)
plt.xlabel('Frequency')
if axislabel:
plt.ylabel(axislabel)
if title:
plt.title(title)
plt.show()
|
# -*- coding: utf-8 -*-
#!/bin/python
def codegen(paratype, paraname):
string_code_raw = '''
private {0} m_{1};
public {0} {1}
{{
get
{{
return m_{1};
}}
set
{{
m_{1} = value;
if (PropertyChanged != null)
PropertyChanged.Invoke(this, new PropertyChangedEventArgs("{1}"));
}}
}}'''.format(paratype, paraname)
print(string_code_raw);
def main():
codegen('String', 'Host_0');
codegen('String', 'Host_1');
codegen('String', 'Host_2');
codegen('String', 'Host_3');
codegen('Int32', 'HostPort_0');
codegen('Int32', 'HostPort_1');
codegen('Int32', 'HostPort_2');
codegen('Int32', 'HostPort_3');
codegen('bool', 'VmCheck');
codegen('Int32', 'VmCpu');
codegen('Int32', 'VmMemory');
codegen('Int32', 'VmResHeight');
codegen('Int32', 'VmResWidth');
codegen('Int32', 'VmDisk');
codegen('String', 'NoticeTitle');
codegen('String', 'NoticeContent');
codegen('String', 'Notice');
codegen('String', 'TargetFilePath');
codegen('String', 'TimeMon');
codegen('String', 'TimeTue');
codegen('String', 'TimeWed');
codegen('String', 'TimeThu');
codegen('String', 'TimeFri');
codegen('String', 'TimeSat');
codegen('String', 'TimeSun');
codegen('bool', 'TimeCheck');
if __name__=='__main__':
main();
|
"""
Optional Settings:
TEST_EXCLUDE: A list of apps to exclude by default from testing
RUN_ALL_TESTS: Overrides exclude and runs all tests (default: False - which uses the TEST_EXCLUDE)
TEST_FIXTURES: A list of fixtures to load when testing.
"""
import unittest
from django.core.management import call_command
from django.db.models.loading import get_app, get_apps
from django.test.simple import DjangoTestSuiteRunner, build_test, reorder_suite, build_suite, TestCase
import logging
from django import http
from django.conf import settings
from django.forms.models import model_to_dict
from django.test.client import Client, MULTIPART_CONTENT
from limbo.strings import unique_string
log = logging.getLogger(__file__)
EXCLUDED_APPS = getattr(settings, 'TEST_EXCLUDE', [])
TESTING_NOT_IMPLEMENTED_FAIL = getattr(settings, 'TESTING_NOT_IMPLEMENTED_FAIL', True)
_test_run = 0
class BaseTestCase(unittest.TestCase):
password = 'qwerty'
def setUp(self):
global _test_run
_test_run += 1
self._test_run = _test_run
def not_implemented(self, msg = "Test not Implemented"):
if TESTING_NOT_IMPLEMENTED_FAIL:
raise NotImplementedError(msg)
def post_not_implemented(self):
self.not_implemented("POST test not implemented")
class ViewTestCase(BaseTestCase):
AUTO_TEST_LINKS = True
class CODES:
success = http.HttpResponse.status_code
redirect = http.HttpResponseRedirect.status_code
permanent_redirect = http.HttpResponsePermanentRedirect.status_code
not_modified = http.HttpResponseNotModified.status_code
bad_request = http.HttpResponseBadRequest.status_code
not_found = http.HttpResponseNotFound.status_code
forbidden = http.HttpResponseForbidden.status_code
not_allowed = http.HttpResponseNotAllowed.status_code
gone = http.HttpResponseGone.status_code
error = http.HttpResponseServerError.status_code
def setUp(self):
# Every test needs a client.
super(ViewTestCase, self).setUp()
self.client = Client()
def get(self, path, data={}, follow=False, **extra):
return self.client.get(path, data, follow, **extra)
def get_ajax(self, path, data = {}, follow = False, **extra):
extra['HTTP_X_REQUESTED_WITH'] = 'XMLHttpRequest'
return self.get(path, data, follow, **extra)
def post(self, path, data={}, content_type=MULTIPART_CONTENT, follow=False, **extra):
return self.client.post(path, data, content_type, follow, **extra)
def post_ajax(self, path, data={}, content_type=MULTIPART_CONTENT, follow=False, **extra):
extra['HTTP_X_REQUESTED_WITH'] = 'XMLHttpRequest'
return self.post(path, data, content_type, follow, **extra)
def _user_login(self, user, password = None):
self.logout()
password = password or self.password
success = self.client.login(username=user.username, password=password)
self.failUnless(success, 'Failed to login')
return success
def logout(self):
self.client.logout()
def model_dict(self, instance, fields = None, exclude = None):
return model_to_dict(instance, fields, exclude)
class AdvancedTestSuiteRunner(DjangoTestSuiteRunner):
def __init__(self, *args, **kwargs):
from django.conf import settings
settings.IS_TESTRUN = True
settings.TESTING = True
south_log = logging.getLogger("south")
south_log.setLevel(logging.WARNING)
super(AdvancedTestSuiteRunner, self).__init__(*args, **kwargs)
def setup_databases(self, **kwargs):
databases = super(AdvancedTestSuiteRunner, self).setup_databases(**kwargs)
self.load_fixtures()
return databases
def load_fixtures(self):
for fixture in getattr(settings, 'TEST_FIXTURES', []):
call_command('loaddata', fixture)
def build_suite(self, *args, **kwargs):
suite = self.safe_build_suite(*args, **kwargs)
if not args[0] and not getattr(settings, 'RUN_ALL_TESTS', False):
tests = []
for case in suite:
pkg = case.__class__.__module__.split('.')[0]
if pkg not in EXCLUDED_APPS:
tests.append(case)
suite._tests = tests
return suite
def safe_build_suite(self, test_labels, extra_tests=None, **kwargs):
suite = unittest.TestSuite()
if test_labels:
for label in test_labels:
try:
if '.' in label:
suite.addTest(build_test(label))
else:
app = get_app(label)
suite.addTest(build_suite(app))
except Exception:
log.warning("Could not add test for label: %s" %label)
else:
for app in get_apps():
try:
suite.addTest(build_suite(app))
except Exception:
log.warning("Could not add tests for app: %s" %app)
if extra_tests:
for test in extra_tests:
suite.addTest(test)
return reorder_suite(suite, (TestCase,))
|
# Copyright (c) 2013 Paul Tagliamonte <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from py_compile import wr_long, MAGIC
from hy.compiler import hy_compile
from hy.models import HyObject
from hy.lex import tokenize
from io import open
import marshal
import imp
import sys
import ast
import os
import __future__
from hy._compat import builtins, long_type
def ast_compile(ast, filename, mode):
"""Compile AST.
Like Python's compile, but with some special flags."""
flags = (__future__.CO_FUTURE_DIVISION |
__future__.CO_FUTURE_PRINT_FUNCTION)
return compile(ast, filename, mode, flags)
def import_buffer_to_hst(buf):
"""Import content from buf and return an Hy AST."""
return tokenize(buf + "\n")
def import_file_to_hst(fpath):
"""Import content from fpath and return an Hy AST."""
with open(fpath, 'r', encoding='utf-8') as f:
return import_buffer_to_hst(f.read())
def import_buffer_to_ast(buf, module_name):
""" Import content from buf and return a Python AST."""
return hy_compile(import_buffer_to_hst(buf), module_name)
def import_file_to_ast(fpath, module_name):
"""Import content from fpath and return a Python AST."""
return hy_compile(import_file_to_hst(fpath), module_name)
def import_file_to_module(module_name, fpath):
"""Import content from fpath and puts it into a Python module.
Returns the module."""
try:
_ast = import_file_to_ast(fpath, module_name)
mod = imp.new_module(module_name)
mod.__file__ = fpath
eval(ast_compile(_ast, fpath, "exec"), mod.__dict__)
except Exception:
sys.modules.pop(module_name, None)
raise
return mod
def import_buffer_to_module(module_name, buf):
_ast = import_buffer_to_ast(buf, module_name)
mod = imp.new_module(module_name)
eval(ast_compile(_ast, "", "exec"), mod.__dict__)
return mod
def hy_eval(hytree, namespace, module_name):
foo = HyObject()
foo.start_line = 0
foo.end_line = 0
foo.start_column = 0
foo.end_column = 0
hytree.replace(foo)
_ast, expr = hy_compile(hytree, module_name, get_expr=True)
# Spoof the positions in the generated ast...
for node in ast.walk(_ast):
node.lineno = 1
node.col_offset = 1
for node in ast.walk(expr):
node.lineno = 1
node.col_offset = 1
# Two-step eval: eval() the body of the exec call
eval(ast_compile(_ast, "<eval_body>", "exec"), namespace)
# Then eval the expression context and return that
return eval(ast_compile(expr, "<eval>", "eval"), namespace)
def write_hy_as_pyc(fname):
with open(fname, 'U') as f:
try:
st = os.fstat(f.fileno())
except AttributeError:
st = os.stat(fname)
timestamp = long_type(st.st_mtime)
_ast = import_file_to_ast(fname,
os.path.basename(os.path.splitext(fname)[0]))
code = ast_compile(_ast, fname, "exec")
cfile = "%s.pyc" % fname[:-len(".hy")]
open_ = builtins.open
with open_(cfile, 'wb') as fc:
if sys.version_info[0] >= 3:
fc.write(b'\0\0\0\0')
else:
fc.write('\0\0\0\0')
wr_long(fc, timestamp)
if (sys.version_info[0] >= 3 and sys.version_info[1] >= 3):
wr_long(fc, st.st_size)
marshal.dump(code, fc)
fc.flush()
fc.seek(0, 0)
fc.write(MAGIC)
class MetaLoader(object):
def __init__(self, path):
self.path = path
def is_package(self, fullname):
dirpath = "/".join(fullname.split("."))
for pth in sys.path:
pth = os.path.abspath(pth)
composed_path = "%s/%s/__init__.hy" % (pth, dirpath)
if os.path.exists(composed_path):
return True
return False
def load_module(self, fullname):
if fullname in sys.modules:
return sys.modules[fullname]
if not self.path:
return
sys.modules[fullname] = None
mod = import_file_to_module(fullname,
self.path)
ispkg = self.is_package(fullname)
mod.__file__ = self.path
mod.__loader__ = self
mod.__name__ = fullname
if ispkg:
mod.__path__ = []
mod.__package__ = fullname
else:
mod.__package__ = fullname.rpartition('.')[0]
sys.modules[fullname] = mod
return mod
class MetaImporter(object):
def find_on_path(self, fullname):
fls = ["%s/__init__.hy", "%s.hy"]
dirpath = "/".join(fullname.split("."))
for pth in sys.path:
pth = os.path.abspath(pth)
for fp in fls:
composed_path = fp % ("%s/%s" % (pth, dirpath))
if os.path.exists(composed_path):
return composed_path
def find_module(self, fullname, path=None):
path = self.find_on_path(fullname)
if path:
return MetaLoader(path)
sys.meta_path.append(MetaImporter())
sys.path.insert(0, "")
|
import os, sys
if __name__ == '__main__':
execfile(os.path.join(sys.path[0], 'framework.py'))
from Products.UWOshOIE.tests.uwoshoietestcase import UWOshOIETestCase
from Products.CMFCore.WorkflowCore import WorkflowException
class TestTransitionDeclineFromFacultyReview(UWOshOIETestCase):
"""Ensure product is properly installed"""
def createApplication(self):
self.login(self._default_user)
self.portal.invokeFactory(type_name="OIEStudentApplication", id="testapplication")
app = self.portal['testapplication']
self.fill_out_application(app)
self.portal_workflow.doActionFor(app, 'submit')
self.logout()
self.login('front_line_advisor')
self.portal_workflow.doActionFor(app, 'waitForPrintedMaterials')
app.setWithdrawalRefund(True)
app.setApplicationFeeOK(True)
app.setUWSystemStatementOK(True)
app.setUWOshkoshStatementOK(True)
app.setTranscriptsOK(True)
self.portal_workflow.doActionFor(app, 'sendForDirectorReview')
self.logout()
self.login('director')
self.portal_workflow.doActionFor(app, 'sendForProgramManagerReview')
self.logout()
self.login('program_manager')
self.portal_workflow.doActionFor(app, 'sendForFacultyReview')
self.logout()
return app
def test_faculty_review_should_should_be_able_to_perform_action(self):
app = self.createApplication()
self.login('fac_review')
self.portal_workflow.doActionFor(app, 'declineFromFacultyReview')
self.assertEquals('declined', self.getState(app))
def test_front_line_advisor_should_should_be_able_to_perform_action(self):
app = self.createApplication()
self.login('front_line_advisor')
self.portal_workflow.doActionFor(app, 'declineFromFacultyReview')
self.assertEquals('declined', self.getState(app))
def test_program_manager_should_should_be_able_to_perform_action(self):
app = self.createApplication()
self.login('program_manager')
self.portal_workflow.doActionFor(app, 'declineFromFacultyReview')
self.assertEquals('declined', self.getState(app))
def test_director_should_should_be_able_to_perform_action(self):
app = self.createApplication()
self.login('director')
self.portal_workflow.doActionFor(app, 'declineFromFacultyReview')
self.assertEquals('declined', self.getState(app))
def test_other_should_not_be_able_to_declineFromFacultyReview(self):
app = self.createApplication()
accepted_roles = ['fac_review', 'front_line_advisor', 'program_manager']
for user in self._all_users:
if user not in accepted_roles:
self.assertRaises(WorkflowException, self.portal_workflow.doActionFor, app, 'declineFromFacultyReview')
def test_should_send_correct_email_from_program_manager(self):
app = self.createApplication()
self.portal.MailHost.clearEmails()
self.login('program_manager')
self.portal_workflow.doActionFor(app, 'declineFromFacultyReview')
to = self.portal.MailHost.getTo()
f = self.portal.MailHost.getFrom()
subject = self.portal.MailHost.getSubject()
message = self.portal.MailHost.getMessage()
self.assertEquals(['[email protected]', '[email protected]'], to)
self.assertEquals('[email protected]', f)
self.assertEquals('Your study abroad application update (UW Oshkosh Office of International Education)', subject)
self.assertEquals("\n\nYour UW Oshkosh Office of International Education study abroad application has been updated.\n\nName: John Doe\nProgram Name: test\nProgram Year: 2009\n\nTransition\n\n\n\nYou can view your application here: http://nohost/plone/testapplication\n\nComment: \n\n\n", message)
def test_should_send_correct_email_from_front_line_advisor(self):
app = self.createApplication()
self.portal.MailHost.clearEmails()
self.login('front_line_advisor')
self.portal_workflow.doActionFor(app, 'declineFromFacultyReview')
to = self.portal.MailHost.getTo()
f = self.portal.MailHost.getFrom()
subject = self.portal.MailHost.getSubject()
message = self.portal.MailHost.getMessage()
self.assertEquals(['[email protected]', '[email protected]'], to)
self.assertEquals('[email protected]', f)
self.assertEquals('Your study abroad application update (UW Oshkosh Office of International Education)', subject)
self.assertEquals("\n\nYour UW Oshkosh Office of International Education study abroad application has been updated.\n\nName: John Doe\nProgram Name: test\nProgram Year: 2009\n\nTransition\n\n\n\nYou can view your application here: http://nohost/plone/testapplication\n\nComment: \n\n\n", message)
def test_should_send_correct_email_from_director(self):
app = self.createApplication()
self.portal.MailHost.clearEmails()
self.login('director')
self.portal_workflow.doActionFor(app, 'declineFromFacultyReview')
to = self.portal.MailHost.getTo()
f = self.portal.MailHost.getFrom()
subject = self.portal.MailHost.getSubject()
message = self.portal.MailHost.getMessage()
self.assertEquals(['[email protected]', '[email protected]'], to)
self.assertEquals('[email protected]', f)
self.assertEquals('Your study abroad application update (UW Oshkosh Office of International Education)', subject)
self.assertEquals("\n\nYour UW Oshkosh Office of International Education study abroad application has been updated.\n\nName: John Doe\nProgram Name: test\nProgram Year: 2009\n\nTransition\n\n\n\nYou can view your application here: http://nohost/plone/testapplication\n\nComment: \n\n\n", message)
def test_should_send_correct_email_from_fac_review(self):
app = self.createApplication()
self.portal.MailHost.clearEmails()
self.login('fac_review')
self.portal_workflow.doActionFor(app, 'declineFromFacultyReview')
to = self.portal.MailHost.getTo()
f = self.portal.MailHost.getFrom()
subject = self.portal.MailHost.getSubject()
message = self.portal.MailHost.getMessage()
self.assertEquals(['[email protected]', '[email protected]'], to)
self.assertEquals('[email protected]', f)
self.assertEquals('Your study abroad application update (UW Oshkosh Office of International Education)', subject)
self.assertEquals("\n\nYour UW Oshkosh Office of International Education study abroad application has been updated.\n\nName: John Doe\nProgram Name: test\nProgram Year: 2009\n\nTransition\n\n\n\nYou can view your application here: http://nohost/plone/testapplication\n\nComment: \n\n\n", message)
def test_suite():
from unittest import TestSuite, makeSuite
suite = TestSuite()
suite.addTest(makeSuite(TestTransitionDeclineFromFacultyReview))
return suite
if __name__ == '__main__':
framework()
|
#!/usr/bin/python
__author__ = "Ryan Plyler"
__version__ = 0.2
import sys
import json
import os
########################################################################
# Config
########################################################################
TODO_FILENAME = os.path.join(os.getcwd(), '.todo.list')
########################################################################
# Global Classes: bcolors Status
########################################################################
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
RECV = '\033[33m' # yellow
FAIL = '\033[91m'
ENDC = '\033[0m'
INFO = '\033[37m'
WHITE = '\033[97m'
class Status:
PENDING = "PENDING"
DONE = " DONE"
########################################################################
# Helper Fuctions: usage() nextID()
########################################################################
def usage():
print "\nUsage:"
print "\ttodo | List the todos for the current directory"
print "\ttodo show|list | Same as 'todo'"
print "\ttodo new <new todo> | Create a new todo"
print "\ttodo complete|done <todo-id> | Mark a todo as complete"
print "\ttodo remove|rm|delete|del <todo-id> | Remove a todo"
print "\ttodo undo <todo-id> | Undo a 'DONE' todo. Make it pending again."
print "\ttodo purge | Delete all todos and todo savedata for the cwd"
print "\ttodo help | Show this help"
print
def getLineCount():
with open(TODO_FILENAME) as f:
lines = f.readlines()
return len(lines)
def readlines():
with open(TODO_FILENAME) as f:
lines = f.readlines()
linecount = len(lines)
return lines, linecount
def nextID():
"""Get the the number of what the next todo ID should be"""
return getLineCount() + 1
########################################################################
# Core functionality functions:
# newTodo() removeTodo(id) completeTodo(id) undoTodo(id)
# showTodos()
########################################################################
def newTodo(content):
formmated = bcolors.WHITE + "[" + "%id" + "] " + bcolors.ENDC + Status.PENDING + ": " + content + "\n"
with open(TODO_FILENAME, "a") as f:
f.write(formmated)
print "Added todo #%d" % getLineCount()
def removeTodo(id):
id = int(id)
lineCounter = 1
lines, linecount = readlines()
todoRemoved = False
newFile = open(TODO_FILENAME, 'w')
for line in lines:
# Write all the lines back to the file except the line number of id
if lineCounter is not id:
newFile.write(line)
else:
todoRemoved = True
# increment the line counter
lineCounter += 1
newFile.close()
if todoRemoved:
print "Removed todo #%s" % id
else:
print "No todo #%s found" % id
def completeTodo(id):
id = int(id)
lines, linecount = readlines()
todoCompleted = False
newFile = open(TODO_FILENAME, 'w')
lineCounter = 1
for line in lines:
# Write all the lines back to the file except the line number of id
if lineCounter == id:
line = line.replace(Status.PENDING, Status.DONE)
newFile.write(line)
todoCompleted = True
else:
newFile.write(line)
# increment the line counter
lineCounter += 1
newFile.close()
if todoCompleted:
print "Completed todo #%s" % id
else:
print "No todo #%s found." % id
def undoTodo(id):
# oldFile = open(TODO_FILENAME, 'r')
# lines = oldFile.readlines()
# oldFile.close()
# todoCompleted = False
# newFile = open(TODO_FILENAME, 'w')
# idFormmated = "[" + str(id) + "]"
#
# for line in lines:
# if idFormmated in line:
# line = line.replace(Status.DONE, Status.PENDING)
# newFile.write(line)
# todoCompleted = True
# else:
# newFile.write(line)
#
# newFile.close()
# if todoCompleted:
# print "Undid todo #" + id + " now its pending again..."
# else:
# print "No todo #" + id + " found."
id = int(id)
lines, linecount = readlines()
todoCompleted = False
newFile = open(TODO_FILENAME, 'w')
lineCounter = 1
for line in lines:
# Write all the lines back to the file except the line number of id
if lineCounter == id:
line = line.replace(Status.DONE, Status.PENDING)
newFile.write(line)
todoCompleted = True
else:
newFile.write(line)
# increment the line counter
lineCounter += 1
newFile.close()
if todoCompleted:
print "Undid todo #%s" % id
else:
print "No todo #%s found." % id
def showTodos():
lineCounter = 1
try:
lines, linecount = readlines()
for line in lines:
# if Status.PENDING in line:
# line = line.replace(Status.PENDING, bcolors.FAIL + Status.PENDING + bcolors.ENDC)
# elif Status.DONE in line:
# line = line.replace(Status.DONE, bcolors.OKGREEN + Status.DONE + bcolors.ENDC)
# sys.stdout.write(line)
# Auto assign the todo ID based on the the line its on in the todo.list file
line = line.replace("%id", str(lineCounter))
if Status.PENDING in line:
line = line.replace(Status.PENDING, bcolors.FAIL + Status.PENDING + bcolors.ENDC)
elif Status.DONE in line:
line = line.replace(Status.DONE, bcolors.OKGREEN + Status.DONE + bcolors.ENDC)
sys.stdout.write(line)
lineCounter += 1
except IOError:
print "No todos created for this directory yet"
########################################################################
# Parse command line arguments
########################################################################
if len(sys.argv) == 1:
showTodos()
elif sys.argv[1] == "new":
content = " ".join(sys.argv[2:])
newTodo(content)
elif sys.argv[1] == "complete" or sys.argv[1] == "done":
completeTodo(sys.argv[2])
elif sys.argv[1] == "undo":
undoTodo(sys.argv[2])
elif sys.argv[1] == "remove" or sys.argv[1] == "delete" or sys.argv[1] == "del" or sys.argv[1] == "rm":
if len(sys.argv) < 3:
print "You must specify a todo ID to remove."
else:
removeTodo(sys.argv[2])
elif sys.argv[1] == "show" or sys.argv[1] == "list":
showTodos()
elif sys.argv[1] == "help":
usage()
elif sys.argv[1] == "purge":
ans = raw_input("Are you sure you want to delete and remove all traces of todos? (y/n): ")
if ans == 'y':
if os.path.isfile(TODO_FILENAME):
os.remove(str(TODO_FILENAME))
print "Removed todo file"
else:
print "Could not delete todo file"
else:
print "Aborting deletion"
else:
print "Unknown operation: " + sys.argv[1]
usage()
########################################################################
# Cleanup and exit
########################################################################
|
from functools import partial
from django.conf import settings
from django.core.cache import cache
from django.db.models import Prefetch as _Prefetch
from django.urls import reverse
from pythonpro.cohorts.models import Cohort as _Cohort, CohortStudent, LiveClass as _LiveClass, Webinar as _Webinar
__all__ = [
'get_all_cohorts_desc',
'find_cohort',
'find_most_recent_cohort',
'calculate_most_recent_cohort_path',
'find_webinars',
'find_webinar',
'find_live_class',
]
def get_all_cohorts_desc():
lazy_all_cohorts = partial(tuple, _Cohort.objects.order_by('-start'))
return cache.get_or_set('ALL_COHORTS', lazy_all_cohorts, settings.CACHE_TTL)
def find_cohort(slug):
return _Cohort.objects.filter(slug=slug).prefetch_related(
_Prefetch(
'liveclass_set',
queryset=_LiveClass.objects.order_by('start'),
to_attr='classes'
)
).prefetch_related(
_Prefetch(
'webinar_set',
queryset=_Webinar.objects.order_by('start'),
to_attr='webinars'
)
).get()
def find_most_recent_cohort():
return _Cohort.objects.order_by('-start').first()
def calculate_most_recent_cohort_path() -> str:
slug_dct = _Cohort.objects.order_by('-start').values('slug').first()
return reverse('modules:detail', kwargs=slug_dct)
def find_webinars():
"""
Retrieve Webinars from database ordered by date desc
:return: Tuple of webinars
"""
return tuple(_Webinar.objects.order_by('-start'))
def find_recorded_webinars():
"""
Retrieve recorded Webinars from database ordered by date desc.
A recorded Webinar has vimeo_id not empty
:return: Tuple of webinars
"""
return tuple(_Webinar.objects.order_by('-start').exclude(vimeo_id__exact=''))
def find_webinar(slug):
"""
Retrieve Webinar by its slug
:return: Webinar
"""
return _Webinar.objects.filter(slug=slug).get()
def find_live_class(pk):
"""
Find Live Class by its PK, selecting related cohort
:param pk:
:return:
"""
return _LiveClass.objects.select_related('cohort').get(pk=pk)
def subscribe_to_last_cohort(user):
ch = CohortStudent(user=user, cohort=find_most_recent_cohort())
ch.save()
return ch
|
# encoding: utf-8
#
# Run snmp_temper.py as a pass-persist module for NetSNMP.
# See README.md for instructions.
#
# Copyright 2012-2014 Philipp Adelt <[email protected]>
#
# This code is licensed under the GNU public license (GPL). See LICENSE.md for details.
import os
import sys
import syslog
import threading
import snmp_passpersist as snmp
from temperusb.temper import TemperHandler, TemperDevice
ERROR_TEMPERATURE = 9999
def _unbuffered_handle(fd):
return os.fdopen(fd.fileno(), 'w', 0)
class LogWriter():
def __init__(self, ident='temper-python', facility=syslog.LOG_DAEMON):
syslog.openlog(ident, 0, facility)
def write_log(self, message, prio=syslog.LOG_INFO):
syslog.syslog(prio, message)
class Updater():
def __init__(self, pp, logger, testmode=False):
self.logger = logger
self.pp = pp
self.testmode = testmode
self.usb_lock = threading.Lock() # used to stop reinitialization interfering with update-thread
self._initialize()
def _initialize(self):
with self.usb_lock:
try:
self.th = TemperHandler()
self.devs = self.th.get_devices()
self.logger.write_log('Found %i thermometer devices.' % len(self.devs))
for i, d in enumerate(self.devs):
self.logger.write_log('Initial temperature of device #%i: %0.1f degree celsius' % (i, d.get_temperature()))
except Exception as e:
self.logger.write_log('Exception while initializing: %s' % str(e))
def _reinitialize(self):
# Tries to close all known devices and starts over.
self.logger.write_log('Reinitializing devices')
with self.usb_lock:
for i,d in enumerate(self.devs):
try:
d.close()
except Exception as e:
self.logger.write_log('Exception closing device #%i: %s' % (i, str(e)))
self._initialize()
def update(self):
if self.testmode:
# APC Internal/Battery Temperature
self.pp.add_int('318.1.1.1.2.2.2.0', 99)
# Cisco devices temperature OIDs
self.pp.add_int('9.9.13.1.3.1.3.1', 97)
self.pp.add_int('9.9.13.1.3.1.3.2', 98)
self.pp.add_int('9.9.13.1.3.1.3.3', 99)
else:
try:
with self.usb_lock:
temperatures = [d.get_temperature() for d in self.devs]
self.pp.add_int('318.1.1.1.2.2.2.0', int(max(temperatures)))
for i, temperature in enumerate(temperatures[:3]): # use max. first 3 devices
self.pp.add_int('9.9.13.1.3.1.3.%i' % (i+1), int(temperature))
except Exception as e:
self.logger.write_log('Exception while updating data: %s' % str(e))
# Report an exceptionally large temperature to set off all alarms.
# snmp_passpersist does not expose an API to remove an OID.
for oid in ('318.1.1.1.2.2.2.0', '9.9.13.1.3.1.3.1', '9.9.13.1.3.1.3.2', '9.9.13.1.3.1.3.3'):
self.pp.add_int(oid, ERROR_TEMPERATURE)
self.logger.write_log('Starting reinitialize after error on update')
self._reinitialize()
def main():
sys.stdout = _unbuffered_handle(sys.stdout)
pp = snmp.PassPersist(".1.3.6.1.4.1")
logger = LogWriter()
upd = Updater(pp, logger, testmode=('--testmode' in sys.argv))
pp.start(upd.update, 5) # update every 5s
if __name__ == '__main__':
main()
|
from pathlib import Path
from qgis.PyQt import uic
from qgis.PyQt.QtWidgets import QAbstractItemView
from qgis.PyQt.QtWidgets import QDialog
from qgis.PyQt.QtWidgets import QPushButton
from qgis.PyQt.QtWidgets import QTableWidget
from qgis.PyQt.QtWidgets import QTableWidgetItem
from qgis.PyQt.QtWidgets import QVBoxLayout
from qgis.PyQt.QtWidgets import QWidget
from ThreeDiToolbox.tool_commands.control_structures.main import ControlledStructures
from ThreeDiToolbox.utils.threedi_database import get_database_properties
from ThreeDiToolbox.utils.threedi_database import get_databases
import logging
logger = logging.getLogger(__name__)
ui_file = Path(__file__).parent / "create_measuring_group_dialog.ui"
assert ui_file.is_file()
FORM_CLASS, _ = uic.loadUiType(ui_file)
class CreateMeasuringGroupDialogWidget(QDialog, FORM_CLASS):
def __init__(
self,
parent=None,
command=None,
db_key=None,
measuring_group_id=None,
dockwidget_controlled_structures=None,
):
"""Constructor
# TODO: fix arguments documentation.
Args:
parent: Qt parent Widget
iface: QGiS interface
command: Command instance with a run_it method which will be called
on acceptance of the dialog
"""
super().__init__(parent)
# Show gui
self.setupUi(self)
self.measuring_group_id = measuring_group_id
self.command = command
self.dockwidget_controlled_structures = dockwidget_controlled_structures
self.db_key = db_key
self.databases = get_databases()
self.db = get_database_properties(self.db_key)
self.control_structure = ControlledStructures(
flavor=self.db["db_entry"]["db_type"]
)
self.setup_tablewidget()
self.update_ids()
self.connect_signals()
def on_accept(self):
"""Accept and run the Command.run_it method."""
self.save_measuring_group()
self.accept()
def on_reject(self):
"""Cancel"""
self.reject()
logger.debug("Reject")
def closeEvent(self, event):
"""
Close widget, called by Qt on close
:param event: QEvent, close event
"""
self.buttonbox.accepted.disconnect(self.on_accept)
self.buttonbox.rejected.disconnect(self.on_reject)
event.accept()
def setup_tablewidget(self):
tablewidget = self.tablewidget_measuring_point
tablewidget.setCellWidget(0, 0, self.combobox_input_measuring_point_table)
tablewidget.setCellWidget(0, 1, self.combobox_input_measuring_point_id)
tablewidget.setCellWidget(0, 3, self.pushbutton_input_measuring_point_new)
def update_ids(self):
"""Setup the id's for the measuring group and measuring points."""
# Set the id of the measuring group
self.label_measuring_group_id_info.setText(self.measuring_group_id)
self.control_structure.start_sqalchemy_engine(self.db["db_settings"])
# Set all id's of the measuring groups
self.combobox_measuring_group_load.clear()
list_of_measuring_group_ids = self.control_structure.get_attributes(
table_name="v2_control_measure_group", attribute_name="id"
)
self.combobox_measuring_group_load.addItems(list_of_measuring_group_ids)
# Set all id's of the connection nodes
self.combobox_input_measuring_point_id.clear()
list_of_connection_node_ids = self.control_structure.get_attributes(
table_name="v2_connection_nodes", attribute_name="id"
)
self.combobox_input_measuring_point_id.addItems(list_of_connection_node_ids)
def connect_signals(self):
"""Connect the signals."""
self.pushbutton_measuring_group_load.clicked.connect(self.load_measuring_group)
self.pushbutton_input_measuring_point_new.clicked.connect(
self.create_new_measuring_point
)
self.buttonbox.accepted.connect(self.on_accept)
self.buttonbox.rejected.connect(self.on_reject)
def create_new_measuring_point(self):
# Get the model
self.control_structure.start_sqalchemy_engine(self.db["db_settings"])
# Get last id of measure map or set to 0; set to +1
table_name = "v2_control_measure_map"
attribute_name = "MAX(id)"
try:
max_id_measure_map = int(
self.control_structure.get_attributes(table_name, attribute_name)[0]
)
except ValueError:
logger.exception("Error determining max id, using 0")
max_id_measure_map = 0
new_max_id_measure_map = max_id_measure_map + 1
# Populate the new row in the table
self.populate_measuring_point_row(new_max_id_measure_map)
def populate_measuring_point_row(self, id_measuring_point):
"""
Populate a row from the measuring point table.
Args:
(str) id_measuring_point: The id of the measuring point."""
tablewidget = self.tablewidget_measuring_point
# Always put the new row on top.
row_position = 1
tablewidget.insertRow(row_position)
# tablewidget.setItem(row_position, 0, measuring_point_id)
measuring_point_table_widget = QTableWidgetItem(
self.combobox_input_measuring_point_table.currentText()
)
tablewidget.setItem(row_position, 0, measuring_point_table_widget)
measuring_point_table_id_widget = QTableWidgetItem(
self.combobox_input_measuring_point_id.currentText()
)
tablewidget.setItem(row_position, 1, measuring_point_table_id_widget)
try:
measuring_point_weight = tablewidget.item(0, 2).text()
except AttributeError:
logger.exception(
"Error determining measuring point weight, using emty string"
)
measuring_point_weight = ""
tablewidget.setItem(row_position, 2, QTableWidgetItem(measuring_point_weight))
measuring_point_remove_widget = QPushButton("Remove")
measuring_point_remove_widget.clicked.connect(self.remove_measuring_point_row)
tablewidget.setCellWidget(row_position, 3, measuring_point_remove_widget)
def remove_measuring_point_row(self):
"""Remove a row from the measuring point table."""
tablewidget = self.tablewidget_measuring_point
row_number = tablewidget.currentRow()
# Remove measuring point from dockwidget
# Don't remove the first row.
BUTTON_ROW = 0
if row_number != BUTTON_ROW:
tablewidget.removeRow(row_number)
def load_measuring_group(self):
"""Load a measuring group in the tablewidget."""
# Remove all current rows, besides te first.
tablewidget = self.tablewidget_measuring_point
row_count = tablewidget.rowCount()
for row in range(row_count - 1):
tablewidget.removeRow(1)
self.control_structure.start_sqalchemy_engine(self.db["db_settings"])
# Get all the measuring points from a certain measure group
table_name = "v2_control_measure_map"
attribute_name = "*"
where_clause = "measure_group_id={}".format(
self.combobox_measuring_group_load.currentText()
)
measure_groups = self.control_structure.get_features_with_where_clause(
table_name=table_name, attribute_name=attribute_name, where=where_clause
)
for measure_group in measure_groups:
row_position = self.tablewidget_measuring_point.rowCount()
self.tablewidget_measuring_point.insertRow(row_position)
self.tablewidget_measuring_point.setItem(
row_position, 0, QTableWidgetItem(str(measure_group[2]))
)
self.tablewidget_measuring_point.setItem(
row_position, 1, QTableWidgetItem(str(measure_group[3]))
)
self.tablewidget_measuring_point.setItem(
row_position, 2, QTableWidgetItem(str(measure_group[4]))
)
measuring_point_remove = QPushButton("Remove")
measuring_point_remove.clicked.connect(self.remove_measuring_point)
self.tablewidget_measuring_point.setCellWidget(
row_position, 3, measuring_point_remove
)
def remove_measuring_point(self):
"""Remove a certain measuring point from the tablewidget."""
tablewidget = self.tablewidget_measuring_point
row_number = tablewidget.currentRow()
tablewidget.removeRow(row_number)
def save_measuring_group(self):
"""Save the measuring group in the database."""
self.control_structure.start_sqalchemy_engine(self.db["db_settings"])
# Insert the measuring group in the v2_control_measure_group
table_name = "v2_control_measure_group"
attributes = {"id": self.measuring_group_id}
self.control_structure.insert_into_table(
table_name=table_name, attributes=attributes
)
# Create a tab in the tabwidget of the 'Measuring group' tab in
# the controlled structures dockwidget
self.add_measuring_group_tab_dockwidget()
table_name = "v2_control_measure_map"
BUTTON_ROW = 0
for row in range(self.tablewidget_measuring_point.rowCount()):
if row != BUTTON_ROW:
# Get the new measuring_point id
attribute_name = "MAX(id)"
try:
max_id_measure_point = int(
self.control_structure.get_attributes(
table_name, attribute_name
)[0]
)
except ValueError:
logger.exception("Error determining max measure point id, using 0")
max_id_measure_point = 0
new_measuring_point_id = max_id_measure_point + 1
measure_point_attributes = self.get_measuring_point_attributes(
row, new_measuring_point_id
)
# Save the measuring point in the v2_control_measure_map
self.control_structure.insert_into_table(
table_name, measure_point_attributes
)
# Setup new tab of "Measuring group" tab
self.setup_measuring_group_table_dockwidget(measure_point_attributes)
def add_measuring_group_tab_dockwidget(self):
"""
Create a tab for the measure group within the Measure group tab
in the dockwidget.
"""
tab = QWidget()
layout = QVBoxLayout(tab)
tab.setLayout(layout)
table_measuring_group = QTableWidget(tab)
table_measuring_group.setGeometry(10, 10, 741, 266)
table_measuring_group.insertColumn(0)
table_measuring_group.setHorizontalHeaderItem(0, QTableWidgetItem("table"))
table_measuring_group.insertColumn(1)
table_measuring_group.setHorizontalHeaderItem(1, QTableWidgetItem("table_id"))
table_measuring_group.insertColumn(2)
table_measuring_group.setHorizontalHeaderItem(2, QTableWidgetItem("weight"))
table_measuring_group.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.dockwidget_controlled_structures.table_measuring_group = (
table_measuring_group
)
# Add the tab to the left
self.dockwidget_controlled_structures.tab_measuring_group_view_2.insertTab(
0, tab, "Group: {}".format(str(self.label_measuring_group_id_info.text()))
)
def get_measuring_point_attributes(self, row_nr, new_measuring_point_id):
"""
Get the attributes of the measuring point from the table.
Args:
(int) row_nr: The row number of the tablewidget.
(int) new_measuring_point_id: The id of the new measuring point.
Returns:
(dict) attributes: A list containing the attributes
of the measuring point.
"""
measuring_point_table = self.tablewidget_measuring_point.item(row_nr, 0).text()
try:
measuring_point_table_id = self.tablewidget_measuring_point.item(
row_nr, 1
).text()
except AttributeError:
# TODO: I've seen this measuring_point_table_id try/except
# before. Can it be unified?
logger.exception(
"Error grabbing measuring point table id, using current text"
)
measuring_point_table_id = self.tablewidget_measuring_point.cellWidget(
row_nr, 1
).currentText()
try:
measuring_point_weight = self.tablewidget_measuring_point.item(
row_nr, 2
).text()
except AttributeError:
logger.exception(
"Error grabbing measuring point weight, using empty string"
)
measuring_point_weight = ""
attributes = {
"id": new_measuring_point_id,
"measure_group_id": self.measuring_group_id,
"object_type": measuring_point_table,
"object_id": measuring_point_table_id,
"weight": measuring_point_weight,
}
return attributes
def setup_measuring_group_table_dockwidget(self, measure_map_attributes):
"""
Setup a tab for the measure group in the Measure group tab
in the dockwidget.
Args:
(dict) measure_map_attributes: A dict containing the attributes
from the measuring point (from v2_control_measure_map).
"""
row_position = (
self.dockwidget_controlled_structures.table_measuring_group.rowCount()
)
self.dockwidget_controlled_structures.table_measuring_group.insertRow(
row_position
)
self.dockwidget_controlled_structures.table_measuring_group.setItem(
row_position, 0, QTableWidgetItem("v2_connection_nodes")
)
self.dockwidget_controlled_structures.table_measuring_group.setItem(
row_position, 1, QTableWidgetItem(measure_map_attributes["object_id"])
)
self.dockwidget_controlled_structures.table_measuring_group.setItem(
row_position, 2, QTableWidgetItem(measure_map_attributes["weight"])
)
|
#!/usr/bin/env python
import itertools as it
import sys
import progressbar as pb
from time import time, ctime
# The generator itself
def generator(string,minLen,maxLen,prevCount):
count = 0
bar = pb.ProgressBar(maxval = prevCount).start()
# This for loops from the min length to the max
for length in range(minLen,maxLen+1):
# This for computes each combination and writes it into the file
for perm in it.product(string, repeat=length):
outFile.write(str(''.join(perm)))
outFile.write('\n')
count += 1
bar.update(count)
# The main function. It shows the number of lines
# that will be created, as well as the filesize.
# It also calls the generator and shows the result in the screen
def main():
# var declarations
global outputFile, outFile
prevCount = 0
prevSize = 0
# Calculates the preview numbers
for ln in range(minLen, maxLen+1):
prevCount += len(string)**ln
prevSize += prevCount * ln
# Filesize in MB, GB and PB
mByte = prevSize / (1024**2)
gByte = mByte / 1024
tByte = gByte / 1024
pByte = tByte / 1024
print("Attention!")
print("Size in MB: %.2f" % mByte)
print("Size in GB: %.2f" % gByte)
print("Size in TB: %.2f" % tByte)
print("Size in PB: %.2f" % pByte)
print("\ndicgen is about to generate a file with %i lines." % prevCount)
while True:
# Confirmation
proceed = raw_input('Are you sure you want to proceed?\n[Y]es [N]o: ')
if proceed.lower() == 'y' or proceed.lower() == 'yes':
print("Initiating operation...")
outFile = open(outputFile,'w')
startTime = time()
print("Start time: %s" % ctime(startTime))
generator(string,minLen,maxLen,prevCount)
endTime = time()
print("Done.\nEnd time: %s" % ctime(endTime))
print("Total operation time: %.2f seconds." % (float(endTime - startTime)))
print("The file %s is ready to be used." % outputFile)
break
elif proceed.lower() == 'n' or proceed.lower() == 'no':
print('Aborting...')
break
else:
print('Please, type yes or no.')
if __name__ == "__main__":
try:
string = sys.argv[1]
minLen = int(sys.argv[2])
maxLen = int(sys.argv[3])
outputFile = sys.argv[4]
except:
print("Usage: python dicgen.py <characters> <min-range> <max-range> <output-file>")
sys.exit(1)
main()
|
"""
raven.transport.builtins
~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import logging
import sys
import urllib2
from raven.utils import all
try:
# Google App Engine blacklists parts of the socket module, this will prevent
# it from blowing up.
from socket import socket, AF_INET, SOCK_DGRAM, error as socket_error
has_socket = True
except:
has_socket = False
try:
import gevent
# gevent 1.0bN renamed coros to lock
try:
from gevent.lock import Semaphore
except ImportError:
from gevent.coros import Semaphore # NOQA
has_gevent = True
except:
has_gevent = None
try:
import twisted.web.client
import twisted.internet.protocol
has_twisted = True
except:
has_twisted = False
try:
from tornado import ioloop
from tornado.httpclient import AsyncHTTPClient, HTTPClient
has_tornado = True
except:
has_tornado = False
try:
import eventlet
from eventlet.green import urllib2 as eventlet_urllib2
has_eventlet = True
except:
has_eventlet = False
from raven.conf import defaults
from raven.transport.exceptions import InvalidScheme
class Transport(object):
"""
All transport implementations need to subclass this class
You must implement a send method and the compute_scope method.
Please see the HTTPTransport class for an example of a
compute_scope implementation.
"""
def check_scheme(self, url):
if url.scheme not in self.scheme:
raise InvalidScheme()
def send(self, data, headers):
"""
You need to override this to do something with the actual
data. Usually - this is sending to a server
"""
raise NotImplementedError
def compute_scope(self, url, scope):
"""
You need to override this to compute the SENTRY specific
additions to the variable scope. See the HTTPTransport for an
example.
"""
raise NotImplementedError
class BaseUDPTransport(Transport):
def __init__(self, parsed_url):
super(BaseUDPTransport, self).__init__()
self.check_scheme(parsed_url)
self._parsed_url = parsed_url
def send(self, data, headers):
auth_header = headers.get('X-Sentry-Auth')
if auth_header is None:
# silently ignore attempts to send messages without an auth header
return
host, port = self._parsed_url.netloc.split(':')
self._send_data(auth_header + '\n\n' + data, (host, int(port)))
def compute_scope(self, url, scope):
path_bits = url.path.rsplit('/', 1)
if len(path_bits) > 1:
path = path_bits[0]
else:
path = ''
project = path_bits[-1]
if not all([url.port, project, url.username, url.password]):
raise ValueError('Invalid Sentry DSN: %r' % url.geturl())
netloc = url.hostname
netloc += ':%s' % url.port
server = '%s://%s%s/api/store/' % (url.scheme, netloc, path)
scope.update({
'SENTRY_SERVERS': [server],
'SENTRY_PROJECT': project,
'SENTRY_PUBLIC_KEY': url.username,
'SENTRY_SECRET_KEY': url.password,
})
return scope
class UDPTransport(BaseUDPTransport):
scheme = ['udp']
def __init__(self, parsed_url):
super(UDPTransport, self).__init__(parsed_url)
if not has_socket:
raise ImportError('UDPTransport requires the socket module')
def _send_data(self, data, addr):
udp_socket = None
try:
try:
udp_socket = socket(AF_INET, SOCK_DGRAM)
udp_socket.setblocking(False)
udp_socket.sendto(data, addr)
except socket_error:
# as far as I understand things this simply can't happen,
# but still, it can't hurt
pass
finally:
# Always close up the socket when we're done
if udp_socket is not None:
udp_socket.close()
udp_socket = None
class HTTPTransport(Transport):
scheme = ['http', 'https']
def __init__(self, parsed_url, timeout=defaults.TIMEOUT):
self.check_scheme(parsed_url)
self._parsed_url = parsed_url
self._url = parsed_url.geturl()
self.timeout = timeout
def send(self, data, headers):
"""
Sends a request to a remote webserver using HTTP POST.
"""
req = urllib2.Request(self._url, headers=headers)
if sys.version_info < (2, 6):
response = urllib2.urlopen(req, data).read()
else:
response = urllib2.urlopen(req, data, self.timeout).read()
return response
def compute_scope(self, url, scope):
netloc = url.hostname
if url.port and (url.scheme, url.port) not in \
(('http', 80), ('https', 443)):
netloc += ':%s' % url.port
path_bits = url.path.rsplit('/', 1)
if len(path_bits) > 1:
path = path_bits[0]
else:
path = ''
project = path_bits[-1]
if not all([netloc, project, url.username, url.password]):
raise ValueError('Invalid Sentry DSN: %r' % url.geturl())
server = '%s://%s%s/api/store/' % (url.scheme, netloc, path)
scope.update({
'SENTRY_SERVERS': [server],
'SENTRY_PROJECT': project,
'SENTRY_PUBLIC_KEY': url.username,
'SENTRY_SECRET_KEY': url.password,
})
return scope
class GeventedHTTPTransport(HTTPTransport):
scheme = ['gevent+http', 'gevent+https']
def __init__(self, parsed_url, maximum_outstanding_requests=100):
if not has_gevent:
raise ImportError('GeventedHTTPTransport requires gevent.')
self._lock = Semaphore(maximum_outstanding_requests)
super(GeventedHTTPTransport, self).__init__(parsed_url)
# remove the gevent+ from the protocol, as it is not a real protocol
self._url = self._url.split('+', 1)[-1]
def send(self, data, headers):
"""
Spawn an async request to a remote webserver.
"""
# this can be optimized by making a custom self.send that does not
# read the response since we don't use it.
self._lock.acquire()
return gevent.spawn(super(GeventedHTTPTransport, self).send, data, headers).link(self._done, self)
def _done(self, *args):
self._lock.release()
class TwistedHTTPTransport(HTTPTransport):
scheme = ['twisted+http', 'twisted+https']
def __init__(self, parsed_url):
if not has_twisted:
raise ImportError('TwistedHTTPTransport requires twisted.web.')
super(TwistedHTTPTransport, self).__init__(parsed_url)
self.logger = logging.getLogger('sentry.errors')
# remove the twisted+ from the protocol, as it is not a real protocol
self._url = self._url.split('+', 1)[-1]
def send(self, data, headers):
d = twisted.web.client.getPage(self._url, method='POST', postdata=data, headers=headers)
d.addErrback(lambda f: self.logger.error(
'Cannot send error to sentry: %s', f.getTraceback()))
class TwistedUDPTransport(BaseUDPTransport):
scheme = ['twisted+udp']
def __init__(self, parsed_url):
super(TwistedUDPTransport, self).__init__(parsed_url)
if not has_twisted:
raise ImportError('TwistedUDPTransport requires twisted.')
self.protocol = twisted.internet.protocol.DatagramProtocol()
twisted.internet.reactor.listenUDP(0, self.protocol)
def _send_data(self, data, addr):
self.protocol.transport.write(data, addr)
class TornadoHTTPTransport(HTTPTransport):
scheme = ['tornado+http']
def __init__(self, parsed_url):
if not has_tornado:
raise ImportError('TornadoHTTPTransport requires tornado.')
super(TornadoHTTPTransport, self).__init__(parsed_url)
# remove the tornado+ from the protocol, as it is not a real protocol
self._url = self._url.split('+', 1)[-1]
def send(self, data, headers):
kwargs = dict(method='POST', headers=headers, body=data)
# only use async if ioloop is running, otherwise it will never send
if ioloop.IOLoop.initialized():
client = AsyncHTTPClient()
kwargs['callback'] = None
else:
client = HTTPClient()
client.fetch(self._url, **kwargs)
class EventletHTTPTransport(HTTPTransport):
scheme = ['eventlet+http', 'eventlet+https']
def __init__(self, parsed_url, pool_size=100):
if not has_eventlet:
raise ImportError('EventletHTTPTransport requires eventlet.')
super(EventletHTTPTransport, self).__init__(parsed_url)
# remove the eventlet+ from the protocol, as it is not a real protocol
self._url = self._url.split('+', 1)[-1]
def _send_payload(self, payload):
req = eventlet_urllib2.Request(self._url, headers=payload[1])
try:
if sys.version_info < (2, 6):
response = eventlet_urllib2.urlopen(req, payload[0]).read()
else:
response = eventlet_urllib2.urlopen(req, payload[0],
self.timeout).read()
return response
except Exception, err:
return err
def send(self, data, headers):
"""
Spawn an async request to a remote webserver.
"""
eventlet.spawn(self._send_payload, (data, headers))
|
""" Cisco_IOS_XR_aaa_locald_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR aaa\-locald package configuration.
This YANG module augments the
Cisco\-IOS\-XR\-aaa\-lib\-cfg
module with configuration data.
Copyright (c) 2013\-2015 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class AaaLocaldTaskClassEnum(Enum):
"""
AaaLocaldTaskClassEnum
Aaa locald task class
.. data:: READ = 0
Permits read operation for a Task ID
.. data:: WRITE = 1
Permits write operation for a Task ID
.. data:: EXECUTE = 2
Permits execute operation for a Task ID
.. data:: DEBUG = 3
Permits debug operation for a Task ID
"""
READ = 0
WRITE = 1
EXECUTE = 2
DEBUG = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_aaa_locald_cfg as meta
return meta._meta_table['AaaLocaldTaskClassEnum']
|
import datetime
from copy import copy
from syscore.objects import missing_order
from sysexecution.order_stacks.order_stack import orderStackData, missingOrder
from sysexecution.trade_qty import tradeQuantity
from sysexecution.orders.contract_orders import contractOrder
class contractOrderStackData(orderStackData):
def _name(self):
return "Contract order stack"
def add_controlling_algo_ref(self, order_id: int,
control_algo_ref: str):
"""
:param order_id: int
:param control_algo_ref: str or None
:return:
"""
if control_algo_ref is None:
return self.release_order_from_algo_control(order_id)
existing_order = self.get_order_with_id_from_stack(order_id)
if existing_order is missing_order:
error_msg ="Can't add controlling ago as order %d doesn't exist" % order_id
self.log.warn(error_msg)
raise missingOrder(error_msg)
try:
modified_order = copy(existing_order)
modified_order.add_controlling_algo_ref(control_algo_ref)
self._change_order_on_stack(order_id, modified_order)
except Exception as e:
log = existing_order.log_with_attributes(self.log)
error_msg = "%s couldn't add controlling algo %s to order %d" % \
(str(e), control_algo_ref, order_id)
log.warn(error_msg)
raise Exception(error_msg)
def release_order_from_algo_control(self, order_id: int):
existing_order = self.get_order_with_id_from_stack(order_id)
if existing_order is missing_order:
error_msg ="Can't add controlling ago as order %d doesn't exist" % order_id
self.log.warn(error_msg)
raise missingOrder(error_msg)
order_is_not_controlled = not existing_order.is_order_controlled_by_algo()
if order_is_not_controlled:
# No change required
return None
try:
modified_order = copy(existing_order)
modified_order.release_order_from_algo_control()
self._change_order_on_stack(order_id, modified_order)
except Exception as e:
log = existing_order.log_with_attributes(self.log)
error_msg = "%s couldn't remove controlling algo from order %d" % \
(str(e), order_id)
log.warn(error_msg)
raise Exception(error_msg)
def get_order_with_id_from_stack(self, order_id: int) -> contractOrder:
# probably will be overriden in data implementation
# only here so the appropriate type is shown as being returned
order = self.stack.get(order_id, missing_order)
return order
|
# vim: sw=4:expandtab:foldmethod=marker
#
# Copyright (c) 2006, Mathieu Fenniak
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Implementation of generic PDF objects (dictionary, number, string, and so on)
"""
__author__ = "Mathieu Fenniak"
__author_email__ = "[email protected]"
import re
from .utils import readNonWhitespace, RC4_encrypt, skipOverComment
from .utils import b_, u_, chr_, ord_
from .utils import PdfStreamError
import warnings
from . import filters
from . import utils
import decimal
import codecs
import sys
#import debugging
from sys import version_info
if version_info < ( 2, 5 ):
from md5 import md5
else:
from hashlib import md5
ObjectPrefix = b_('/<[tf(n%')
NumberSigns = b_('+-')
IndirectPattern = re.compile(b_(r"[+-]?(\d+)\s+(\d+)\s+R[^a-zA-Z]"))
def readObject(stream, pdf):
tok = stream.read(1)
stream.seek(-1, 1) # reset to start
idx = ObjectPrefix.find(tok)
if idx == 0:
# name object
return NameObject.readFromStream(stream, pdf)
elif idx == 1:
# hexadecimal string OR dictionary
peek = stream.read(2)
stream.seek(-2, 1) # reset to start
if peek == b_('<<'):
return DictionaryObject.readFromStream(stream, pdf)
else:
return readHexStringFromStream(stream)
elif idx == 2:
# array object
return ArrayObject.readFromStream(stream, pdf)
elif idx == 3 or idx == 4:
# boolean object
return BooleanObject.readFromStream(stream)
elif idx == 5:
# string object
return readStringFromStream(stream)
elif idx == 6:
# null object
return NullObject.readFromStream(stream)
elif idx == 7:
# comment
while tok not in (b_('\r'), b_('\n')):
tok = stream.read(1)
# Prevents an infinite loop by raising an error if the stream is at
# the EOF
if len(tok) <= 0:
raise PdfStreamError("File ended unexpectedly.")
tok = readNonWhitespace(stream)
stream.seek(-1, 1)
return readObject(stream, pdf)
else:
# number object OR indirect reference
peek = stream.read(20)
stream.seek(-len(peek), 1) # reset to start
if IndirectPattern.match(peek) != None:
return IndirectObject.readFromStream(stream, pdf)
else:
return NumberObject.readFromStream(stream)
class PdfObject(object):
def getObject(self):
"""Resolves indirect references."""
return self
def hashValue(self):
"""Return hash for deduplication or None"""
return None
class NullObject(PdfObject):
def writeToStream(self, stream, encryption_key):
stream.write(b_("null"))
def readFromStream(stream):
nulltxt = stream.read(4)
if nulltxt != b_("null"):
raise utils.PdfReadError("Could not read Null object")
return NullObject()
readFromStream = staticmethod(readFromStream)
class BooleanObject(PdfObject):
def __init__(self, value):
self.value = value
def writeToStream(self, stream, encryption_key):
if self.value:
stream.write(b_("true"))
else:
stream.write(b_("false"))
def readFromStream(stream):
word = stream.read(4)
if word == b_("true"):
return BooleanObject(True)
elif word == b_("fals"):
stream.read(1)
return BooleanObject(False)
else:
raise utils.PdfReadError('Could not read Boolean object')
readFromStream = staticmethod(readFromStream)
class ArrayObject(list, PdfObject):
def hashValue(self):
md5Hash = md5()
for value in self:
if isinstance(value, PdfObject):
subHash = value.hashValue()
if subHash is not None:
md5Hash.update(subHash)
else:
md5Hash.update(value)
return "ArrayObject:" + md5Hash.hexdigest()
def writeToStream(self, stream, encryption_key):
stream.write(b_("["))
for data in self:
stream.write(b_(" "))
data.writeToStream(stream, encryption_key)
stream.write(b_(" ]"))
def readFromStream(stream, pdf):
arr = ArrayObject()
tmp = stream.read(1)
if tmp != b_("["):
raise utils.PdfReadError("Could not read array")
while True:
# skip leading whitespace
tok = stream.read(1)
while tok.isspace():
tok = stream.read(1)
stream.seek(-1, 1)
# check for array ending
peekahead = stream.read(1)
if peekahead == b_("]"):
break
stream.seek(-1, 1)
# read and append obj
arr.append(readObject(stream, pdf))
return arr
readFromStream = staticmethod(readFromStream)
class IndirectObject(PdfObject):
def __init__(self, idnum, generation, pdf):
self.idnum = idnum
self.generation = generation
self.pdf = pdf
def getObject(self):
return self.pdf.getObject(self).getObject()
def hashValue(self):
return "IndirectObject<%s>:(%r,%r)" % (hex(id(self.pdf)), self.idnum, self.generation)
def __repr__(self):
return "IndirectObject(%r, %r)" % (self.idnum, self.generation)
def __eq__(self, other):
return (
other != None and
isinstance(other, IndirectObject) and
self.idnum == other.idnum and
self.generation == other.generation and
self.pdf is other.pdf
)
def __ne__(self, other):
return not self.__eq__(other)
def writeToStream(self, stream, encryption_key):
stream.write(b_("%s %s R" % (self.idnum, self.generation)))
def readFromStream(stream, pdf):
idnum = b_("")
while True:
tok = stream.read(1)
if not tok:
# stream has truncated prematurely
raise PdfStreamError("Stream has ended unexpectedly")
if tok.isspace():
break
idnum += tok
generation = b_("")
while True:
tok = stream.read(1)
if not tok:
# stream has truncated prematurely
raise PdfStreamError("Stream has ended unexpectedly")
if tok.isspace():
if not generation:
continue
break
generation += tok
r = readNonWhitespace(stream)
if r != b_("R"):
raise utils.PdfReadError("Error reading indirect object reference at byte %s" % utils.hexStr(stream.tell()))
return IndirectObject(int(idnum), int(generation), pdf)
readFromStream = staticmethod(readFromStream)
class FloatObject(decimal.Decimal, PdfObject):
def __new__(cls, value="0", context=None):
try:
return decimal.Decimal.__new__(cls, utils.str_(value), context)
except:
return decimal.Decimal.__new__(cls, str(value))
def __repr__(self):
if self == self.to_integral():
return str(self.quantize(decimal.Decimal(1)))
else:
# Standard formatting adds useless extraneous zeros.
o = "%.5f" % self
# Remove the zeros.
while o and o[-1] == '0':
o = o[:-1]
return o
def as_numeric(self):
return float(b_(repr(self)))
def writeToStream(self, stream, encryption_key):
stream.write(b_(repr(self)))
class NumberObject(int, PdfObject):
NumberPattern = re.compile(b_('[^+-.0-9]'))
ByteDot = b_(".")
def __new__(cls, value):
val = int(value)
try:
return int.__new__(cls, val)
except OverflowError:
return int.__new__(cls, 0)
def as_numeric(self):
return int(b_(repr(self)))
def writeToStream(self, stream, encryption_key):
stream.write(b_(repr(self)))
def readFromStream(stream):
num = utils.readUntilRegex(stream, NumberObject.NumberPattern)
if num.find(NumberObject.ByteDot) != -1:
return FloatObject(num)
else:
return NumberObject(num)
readFromStream = staticmethod(readFromStream)
##
# Given a string (either a "str" or "unicode"), create a ByteStringObject or a
# TextStringObject to represent the string.
def createStringObject(string):
if isinstance(string, utils.string_type):
return TextStringObject(string)
elif isinstance(string, utils.bytes_type):
try:
if string.startswith(codecs.BOM_UTF16_BE):
retval = TextStringObject(string.decode("utf-16"))
retval.autodetect_utf16 = True
return retval
else:
# This is probably a big performance hit here, but we need to
# convert string objects into the text/unicode-aware version if
# possible... and the only way to check if that's possible is
# to try. Some strings are strings, some are just byte arrays.
retval = TextStringObject(decode_pdfdocencoding(string))
retval.autodetect_pdfdocencoding = True
return retval
except UnicodeDecodeError:
return ByteStringObject(string)
else:
raise TypeError("createStringObject should have str or unicode arg")
def readHexStringFromStream(stream):
stream.read(1)
txt = ""
x = b_("")
while True:
tok = readNonWhitespace(stream)
if not tok:
# stream has truncated prematurely
raise PdfStreamError("Stream has ended unexpectedly")
if tok == b_(">"):
break
x += tok
if len(x) == 2:
txt += chr(int(x, base=16))
x = b_("")
if len(x) == 1:
x += b_("0")
if len(x) == 2:
txt += chr(int(x, base=16))
return createStringObject(b_(txt))
def readStringFromStream(stream):
tok = stream.read(1)
parens = 1
txt = b_("")
while True:
tok = stream.read(1)
if not tok:
# stream has truncated prematurely
raise PdfStreamError("Stream has ended unexpectedly")
if tok == b_("("):
parens += 1
elif tok == b_(")"):
parens -= 1
if parens == 0:
break
elif tok == b_("\\"):
tok = stream.read(1)
if tok == b_("n"):
tok = b_("\n")
elif tok == b_("r"):
tok = b_("\r")
elif tok == b_("t"):
tok = b_("\t")
elif tok == b_("b"):
tok = b_("\b")
elif tok == b_("f"):
tok = b_("\f")
elif tok == b_("c"):
tok = b_("\c")
elif tok == b_("("):
tok = b_("(")
elif tok == b_(")"):
tok = b_(")")
elif tok == b_("/"):
tok = b_("/")
elif tok == b_("\\"):
tok = b_("\\")
elif tok in (b_(" "), b_("/"), b_("%"), b_("<"), b_(">"), b_("["),
b_("]"), b_("#"), b_("_"), b_("&"), b_('$')):
# odd/unnessecary escape sequences we have encountered
tok = b_(tok)
elif tok.isdigit():
# "The number ddd may consist of one, two, or three
# octal digits; high-order overflow shall be ignored.
# Three octal digits shall be used, with leading zeros
# as needed, if the next character of the string is also
# a digit." (PDF reference 7.3.4.2, p 16)
for i in range(2):
ntok = stream.read(1)
if ntok.isdigit():
tok += ntok
else:
break
tok = b_(chr(int(tok, base=8)))
elif tok in b_("\n\r"):
# This case is hit when a backslash followed by a line
# break occurs. If it's a multi-char EOL, consume the
# second character:
tok = stream.read(1)
if not tok in b_("\n\r"):
stream.seek(-1, 1)
# Then don't add anything to the actual string, since this
# line break was escaped:
tok = b_('')
else:
raise utils.PdfReadError(r"Unexpected escaped string: %s" % tok)
txt += tok
return createStringObject(txt)
##
# Represents a string object where the text encoding could not be determined.
# This occurs quite often, as the PDF spec doesn't provide an alternate way to
# represent strings -- for example, the encryption data stored in files (like
# /O) is clearly not text, but is still stored in a "String" object.
class ByteStringObject(utils.bytes_type, PdfObject):
##
# For compatibility with TextStringObject.original_bytes. This method
# returns self.
original_bytes = property(lambda self: self)
def writeToStream(self, stream, encryption_key):
bytearr = self
if encryption_key:
bytearr = RC4_encrypt(encryption_key, bytearr)
stream.write(b_("<"))
stream.write(utils.hexencode(bytearr))
stream.write(b_(">"))
##
# Represents a string object that has been decoded into a real unicode string.
# If read from a PDF document, this string appeared to match the
# PDFDocEncoding, or contained a UTF-16BE BOM mark to cause UTF-16 decoding to
# occur.
class TextStringObject(utils.string_type, PdfObject):
autodetect_pdfdocencoding = False
autodetect_utf16 = False
##
# It is occasionally possible that a text string object gets created where
# a byte string object was expected due to the autodetection mechanism --
# if that occurs, this "original_bytes" property can be used to
# back-calculate what the original encoded bytes were.
original_bytes = property(lambda self: self.get_original_bytes())
def get_original_bytes(self):
# We're a text string object, but the library is trying to get our raw
# bytes. This can happen if we auto-detected this string as text, but
# we were wrong. It's pretty common. Return the original bytes that
# would have been used to create this object, based upon the autodetect
# method.
if self.autodetect_utf16:
return codecs.BOM_UTF16_BE + self.encode("utf-16be")
elif self.autodetect_pdfdocencoding:
return encode_pdfdocencoding(self)
else:
raise Exception("no information about original bytes")
def writeToStream(self, stream, encryption_key):
# Try to write the string out as a PDFDocEncoding encoded string. It's
# nicer to look at in the PDF file. Sadly, we take a performance hit
# here for trying...
try:
bytearr = encode_pdfdocencoding(self)
except UnicodeEncodeError:
bytearr = codecs.BOM_UTF16_BE + self.encode("utf-16be")
if encryption_key:
bytearr = RC4_encrypt(encryption_key, bytearr)
obj = ByteStringObject(bytearr)
obj.writeToStream(stream, None)
else:
stream.write(b_("("))
for c in bytearr:
if not chr_(c).isalnum() and c != b_(' '):
stream.write(b_("\\%03o" % ord_(c)))
else:
stream.write(b_(chr_(c)))
stream.write(b_(")"))
class NameObject(str, PdfObject):
delimiterPattern = re.compile(b_(r"\s+|[\(\)<>\[\]{}/%]"))
surfix = b_("/")
def hashValue(self):
md5Hash = md5()
md5Hash.update(b_(self))
return "NameObject:" + md5Hash.hexdigest()
def writeToStream(self, stream, encryption_key):
stream.write(b_(self))
def readFromStream(stream, pdf):
debug = False
if debug: print((stream.tell()))
name = stream.read(1)
if name != NameObject.surfix:
raise utils.PdfReadError("name read error")
name += utils.readUntilRegex(stream, NameObject.delimiterPattern,
ignore_eof=True)
if debug: print(name)
try:
return NameObject(name.decode('utf-8'))
except (UnicodeEncodeError, UnicodeDecodeError) as e:
try:
return NameObject(decode_pdfdocencoding(name))
except UnicodeDecodeError:
# Name objects should represent irregular characters
# with a '#' followed by the symbol's hex number
if not pdf.strict:
warnings.warn("Illegal character in Name Object", utils.PdfReadWarning)
return NameObject(name)
else:
raise utils.PdfReadError("Illegal character in Name Object")
readFromStream = staticmethod(readFromStream)
class DictionaryObject(dict, PdfObject):
def raw_get(self, key):
return dict.__getitem__(self, key)
def __setitem__(self, key, value):
if not isinstance(key, PdfObject):
raise ValueError("key must be PdfObject")
if not isinstance(value, PdfObject):
raise ValueError("value must be PdfObject")
return dict.__setitem__(self, key, value)
def setdefault(self, key, value=None):
if not isinstance(key, PdfObject):
raise ValueError("key must be PdfObject")
if not isinstance(value, PdfObject):
raise ValueError("value must be PdfObject")
return dict.setdefault(self, key, value)
def __getitem__(self, key):
return dict.__getitem__(self, key).getObject()
##
# Retrieves XMP (Extensible Metadata Platform) data relevant to the
# this object, if available.
# <p>
# Stability: Added in v1.12, will exist for all future v1.x releases.
# @return Returns a {@link #xmp.XmpInformation XmlInformation} instance
# that can be used to access XMP metadata from the document. Can also
# return None if no metadata was found on the document root.
def getXmpMetadata(self):
metadata = self.get("/Metadata", None)
if metadata == None:
return None
metadata = metadata.getObject()
from . import xmp
if not isinstance(metadata, xmp.XmpInformation):
metadata = xmp.XmpInformation(metadata)
self[NameObject("/Metadata")] = metadata
return metadata
##
# Read-only property that accesses the {@link
# #DictionaryObject.getXmpData getXmpData} function.
# <p>
# Stability: Added in v1.12, will exist for all future v1.x releases.
xmpMetadata = property(lambda self: self.getXmpMetadata(), None, None)
def writeToStream(self, stream, encryption_key):
stream.write(b_("<<\n"))
for key, value in sorted(list(self.items())):
key.writeToStream(stream, encryption_key)
stream.write(b_(" "))
value.writeToStream(stream, encryption_key)
stream.write(b_("\n"))
stream.write(b_(">>"))
def hashValue(self):
md5Hash = md5()
for key, value in self.items():
md5Hash.update(key)
if isinstance(value, PdfObject):
subHash = value.hashValue()
if subHash is not None:
md5Hash.update(subHash)
else:
md5Hash.update(value)
return "DictionaryObject:" + md5Hash.hexdigest()
def readFromStream(stream, pdf):
debug = False
tmp = stream.read(2)
if tmp != b_("<<"):
raise utils.PdfReadError("Dictionary read error at byte %s: stream must begin with '<<'" % utils.hexStr(stream.tell()))
data = {}
while True:
tok = readNonWhitespace(stream)
if tok == b_('\x00'):
continue
elif tok == b_('%'):
stream.seek(-1, 1)
skipOverComment(stream)
continue
if not tok:
# stream has truncated prematurely
raise PdfStreamError("Stream has ended unexpectedly")
if debug: print(("Tok:", tok))
if tok == b_(">"):
stream.read(1)
break
stream.seek(-1, 1)
key = readObject(stream, pdf)
tok = readNonWhitespace(stream)
stream.seek(-1, 1)
value = readObject(stream, pdf)
if not data.get(key):
data[key] = value
elif pdf.strict:
# multiple definitions of key not permitted
raise utils.PdfReadError("Multiple definitions in dictionary at byte %s for key %s" \
% (utils.hexStr(stream.tell()), key))
else:
warnings.warn("Multiple definitions in dictionary at byte %s for key %s" \
% (utils.hexStr(stream.tell()), key), utils.PdfReadWarning)
pos = stream.tell()
s = readNonWhitespace(stream)
if s == b_('s') and stream.read(5) == b_('tream'):
eol = stream.read(1)
# odd PDF file output has spaces after 'stream' keyword but before EOL.
# patch provided by Danial Sandler
while eol == b_(' '):
eol = stream.read(1)
assert eol in (b_("\n"), b_("\r"))
if eol == b_("\r"):
# read \n after
if stream.read(1) != b_('\n'):
stream.seek(-1, 1)
# this is a stream object, not a dictionary
assert "/Length" in data
length = data["/Length"]
if debug: print(data)
stream_start = stream.tell()
if isinstance(length, IndirectObject):
length = pdf.getObject(length)
stream.seek(stream_start, 0)
data["__streamdata__"] = stream.read(length)
if debug: print("here")
#if debug: print(binascii.hexlify(data["__streamdata__"]))
e = readNonWhitespace(stream)
ndstream = stream.read(8)
if (e + ndstream) != b_("endstream"):
# (sigh) - the odd PDF file has a length that is too long, so
# we need to read backwards to find the "endstream" ending.
# ReportLab (unknown version) generates files with this bug,
# and Python users into PDF files tend to be our audience.
# we need to do this to correct the streamdata and chop off
# an extra character.
pos = stream.tell()
stream.seek(-10, 1)
end = stream.read(9)
if end == b_("endstream"):
# we found it by looking back one character further.
data["__streamdata__"] = data["__streamdata__"][:-1]
else:
# Handle stream that is few bytes longer than expected
stream.seek(stream_start + length, 0)
extra = stream.read(50)
p = extra.find(b_("endstream"))
if p >= 0:
stream.seek(stream_start + length + p + 9, 0)
extra = extra[:p].rstrip(b_('\r\n '))
data["__streamdata__"] = data["__streamdata__"] + extra
else:
if debug: print(("E", e, ndstream, debugging.toHex(end)))
stream.seek(pos, 0)
raise utils.PdfReadError("Unable to find 'endstream' marker after stream at byte %s." % utils.hexStr(stream.tell()))
else:
stream.seek(pos, 0)
if "__streamdata__" in data:
return StreamObject.initializeFromDictionary(data)
else:
retval = DictionaryObject()
retval.update(data)
return retval
readFromStream = staticmethod(readFromStream)
class TreeObject(DictionaryObject):
def __init__(self):
DictionaryObject.__init__(self)
def hasChildren(self):
return '/First' in self
def __iter__(self):
return self.children()
def children(self):
if not self.hasChildren():
raise StopIteration
child = self['/First']
while True:
yield child
if child == self['/Last']:
raise StopIteration
child = child['/Next']
def addChild(self, child, pdf):
childObj = child.getObject()
child = pdf.getReference(childObj)
assert isinstance(child, IndirectObject)
if '/First' not in self:
self[NameObject('/First')] = child
self[NameObject('/Count')] = NumberObject(0)
prev = None
else:
prev = self['/Last']
self[NameObject('/Last')] = child
self[NameObject('/Count')] = NumberObject(self[NameObject('/Count')] + 1)
if prev:
prevRef = pdf.getReference(prev)
assert isinstance(prevRef, IndirectObject)
childObj[NameObject('/Prev')] = prevRef
prev[NameObject('/Next')] = child
parentRef = pdf.getReference(self)
assert isinstance(parentRef, IndirectObject)
childObj[NameObject('/Parent')] = parentRef
def removeChild(self, child):
childObj = child.getObject()
if NameObject('/Parent') not in childObj:
raise ValueError("Removed child does not appear to be a tree item")
elif childObj[NameObject('/Parent')] != self:
raise ValueError("Removed child is not a member of this tree")
found = False
prevRef = None
prev = None
curRef = self[NameObject('/First')]
cur = curRef.getObject()
lastRef = self[NameObject('/Last')]
last = lastRef.getObject()
while cur != None:
if cur == childObj:
if prev == None:
if NameObject('/Next') in cur:
# Removing first tree node
nextRef = cur[NameObject('/Next')]
next = nextRef.getObject()
del next[NameObject('/Prev')]
self[NameObject('/First')] = nextRef
self[NameObject('/Count')] = self[NameObject('/Count')] - 1
else:
# Removing only tree node
assert self[NameObject('/Count')] == 1
del self[NameObject('/Count')]
del self[NameObject('/First')]
if NameObject('/Last') in self:
del self[NameObject('/Last')]
else:
if NameObject('/Next') in cur:
# Removing middle tree node
nextRef = cur[NameObject('/Next')]
next = nextRef.getObject()
next[NameObject('/Prev')] = prevRef
prev[NameObject('/Next')] = nextRef
self[NameObject('/Count')] = self[NameObject('/Count')] - 1
else:
# Removing last tree node
assert cur == last
del prev[NameObject('/Next')]
self[NameObject('/Last')] = prevRef
self[NameObject('/Count')] = self[NameObject('/Count')] - 1
found = True
break
prevRef = curRef
prev = cur
if NameObject('/Next') in cur:
curRef = cur[NameObject('/Next')]
cur = curRef.getObject()
else:
curRef = None
cur = None
if not found:
raise ValueError("Removal couldn't find item in tree")
del childObj[NameObject('/Parent')]
if NameObject('/Next') in childObj:
del childObj[NameObject('/Next')]
if NameObject('/Prev') in childObj:
del childObj[NameObject('/Prev')]
def emptyTree(self):
for child in self:
childObj = child.getObject()
del childObj[NameObject('/Parent')]
if NameObject('/Next') in childObj:
del childObj[NameObject('/Next')]
if NameObject('/Prev') in childObj:
del childObj[NameObject('/Prev')]
if NameObject('/Count') in self:
del self[NameObject('/Count')]
if NameObject('/First') in self:
del self[NameObject('/First')]
if NameObject('/Last') in self:
del self[NameObject('/Last')]
class StreamObject(DictionaryObject):
def __init__(self):
self._data = None
self.decodedSelf = None
self._hashValue = None
def writeToStream(self, stream, encryption_key):
self[NameObject("/Length")] = NumberObject(len(self._data))
DictionaryObject.writeToStream(self, stream, encryption_key)
del self["/Length"]
stream.write(b_("\nstream\n"))
data = self._data
if encryption_key:
data = RC4_encrypt(encryption_key, data)
stream.write(data)
stream.write(b_("\nendstream"))
def initializeFromDictionary(data):
if "/Filter" in data:
retval = EncodedStreamObject()
else:
retval = DecodedStreamObject()
retval._data = data["__streamdata__"]
del data["__streamdata__"]
del data["/Length"]
retval.update(data)
return retval
initializeFromDictionary = staticmethod(initializeFromDictionary)
def flateEncode(self):
if "/Filter" in self:
f = self["/Filter"]
if isinstance(f, ArrayObject):
f.insert(0, NameObject("/FlateDecode"))
else:
newf = ArrayObject()
newf.append(NameObject("/FlateDecode"))
newf.append(f)
f = newf
else:
f = NameObject("/FlateDecode")
retval = EncodedStreamObject()
retval[NameObject("/Filter")] = f
retval._data = filters.FlateDecode.encode(self._data)
return retval
def hashValue(self):
if self._hashValue is None:
self._hashValue = "StreamObject:" + md5(self._data).hexdigest()
return self._hashValue
class DecodedStreamObject(StreamObject):
def getData(self):
return self._data
def setData(self, data):
self._hashValue = None
self._data = data
class EncodedStreamObject(StreamObject):
def __init__(self):
self.decodedSelf = None
self._hashValue = None
def getData(self):
if self.decodedSelf:
# cached version of decoded object
return self.decodedSelf.getData()
else:
# create decoded object
decoded = DecodedStreamObject()
decoded._data = filters.decodeStreamData(self)
for key, value in list(self.items()):
if not key in ("/Length", "/Filter", "/DecodeParms"):
decoded[key] = value
self.decodedSelf = decoded
return decoded._data
def setData(self, data):
raise utils.PdfReadError("Creating EncodedStreamObject is not currently supported")
class RectangleObject(ArrayObject):
"""
This class is used to represent *page boxes* in PyPDF2. These boxes include:
* :attr:`artBox <PyPDF2.pdf.PageObject.artBox>`
* :attr:`bleedBox <PyPDF2.pdf.PageObject.bleedBox>`
* :attr:`cropBox <PyPDF2.pdf.PageObject.cropBox>`
* :attr:`mediaBox <PyPDF2.pdf.PageObject.mediaBox>`
* :attr:`trimBox <PyPDF2.pdf.PageObject.trimBox>`
"""
def __init__(self, arr):
# must have four points
assert len(arr) == 4
# automatically convert arr[x] into NumberObject(arr[x]) if necessary
ArrayObject.__init__(self, [self.ensureIsNumber(x) for x in arr])
def ensureIsNumber(self, value):
if not isinstance(value, (NumberObject, FloatObject)):
value = FloatObject(value)
return value
def __repr__(self):
return "RectangleObject(%s)" % repr(list(self))
def getLowerLeft_x(self):
return self[0]
def getLowerLeft_y(self):
return self[1]
def getUpperRight_x(self):
return self[2]
def getUpperRight_y(self):
return self[3]
def getUpperLeft_x(self):
return self.getLowerLeft_x()
def getUpperLeft_y(self):
return self.getUpperRight_y()
def getLowerRight_x(self):
return self.getUpperRight_x()
def getLowerRight_y(self):
return self.getLowerLeft_y()
def getLowerLeft(self):
return self.getLowerLeft_x(), self.getLowerLeft_y()
def getLowerRight(self):
return self.getLowerRight_x(), self.getLowerRight_y()
def getUpperLeft(self):
return self.getUpperLeft_x(), self.getUpperLeft_y()
def getUpperRight(self):
return self.getUpperRight_x(), self.getUpperRight_y()
def setLowerLeft(self, value):
self[0], self[1] = [self.ensureIsNumber(x) for x in value]
def setLowerRight(self, value):
self[2], self[1] = [self.ensureIsNumber(x) for x in value]
def setUpperLeft(self, value):
self[0], self[3] = [self.ensureIsNumber(x) for x in value]
def setUpperRight(self, value):
self[2], self[3] = [self.ensureIsNumber(x) for x in value]
def getWidth(self):
return self.getUpperRight_x() - self.getLowerLeft_x()
def getHeight(self):
return self.getUpperRight_y() - self.getLowerLeft_y()
lowerLeft = property(getLowerLeft, setLowerLeft, None, None)
"""
Property to read and modify the lower left coordinate of this box
in (x,y) form.
"""
lowerRight = property(getLowerRight, setLowerRight, None, None)
"""
Property to read and modify the lower right coordinate of this box
in (x,y) form.
"""
upperLeft = property(getUpperLeft, setUpperLeft, None, None)
"""
Property to read and modify the upper left coordinate of this box
in (x,y) form.
"""
upperRight = property(getUpperRight, setUpperRight, None, None)
"""
Property to read and modify the upper right coordinate of this box
in (x,y) form.
"""
class Field(TreeObject):
"""
A class representing a field dictionary. This class is accessed through
:meth:`getFields()<PyPDF2.PdfFileReader.getFields>`
"""
def __init__(self, data):
DictionaryObject.__init__(self)
attributes = ("/FT", "/Parent", "/Kids", "/T", "/TU", "/TM", "/Ff",
"/V", "/DV", "/AA")
for attr in attributes:
try:
self[NameObject(attr)] = data[attr]
except KeyError:
pass
fieldType = property(lambda self: self.get("/FT"))
"""
Read-only property accessing the type of this field.
"""
parent = property(lambda self: self.get("/Parent"))
"""
Read-only property accessing the parent of this field.
"""
kids = property(lambda self: self.get("/Kids"))
"""
Read-only property accessing the kids of this field.
"""
name = property(lambda self: self.get("/T"))
"""
Read-only property accessing the name of this field.
"""
altName = property(lambda self: self.get("/TU"))
"""
Read-only property accessing the alternate name of this field.
"""
mappingName = property(lambda self: self.get("/TM"))
"""
Read-only property accessing the mapping name of this field. This
name is used by PyPDF2 as a key in the dictionary returned by
:meth:`getFields()<PyPDF2.PdfFileReader.getFields>`
"""
flags = property(lambda self: self.get("/Ff"))
"""
Read-only property accessing the field flags, specifying various
characteristics of the field (see Table 8.70 of the PDF 1.7 reference).
"""
value = property(lambda self: self.get("/V"))
"""
Read-only property accessing the value of this field. Format
varies based on field type.
"""
defaultValue = property(lambda self: self.get("/DV"))
"""
Read-only property accessing the default value of this field.
"""
additionalActions = property(lambda self: self.get("/AA"))
"""
Read-only property accessing the additional actions dictionary.
This dictionary defines the field's behavior in response to trigger events.
See Section 8.5.2 of the PDF 1.7 reference.
"""
class Destination(TreeObject):
"""
A class representing a destination within a PDF file.
See section 8.2.1 of the PDF 1.6 reference.
:param str title: Title of this destination.
:param int page: Page number of this destination.
:param str typ: How the destination is displayed.
:param args: Additional arguments may be necessary depending on the type.
:raises PdfReadError: If destination type is invalid.
Valid ``typ`` arguments (see PDF spec for details):
/Fit No additional arguments
/XYZ [left] [top] [zoomFactor]
/FitH [top]
/FitV [left]
/FitR [left] [bottom] [right] [top]
/FitB No additional arguments
/FitBH [top]
/FitBV [left]
"""
def __init__(self, title, page, typ, *args):
DictionaryObject.__init__(self)
self[NameObject("/Title")] = title
self[NameObject("/Page")] = page
self[NameObject("/Type")] = typ
# from table 8.2 of the PDF 1.7 reference.
if typ == "/XYZ":
(self[NameObject("/Left")], self[NameObject("/Top")],
self[NameObject("/Zoom")]) = args
elif typ == "/FitR":
(self[NameObject("/Left")], self[NameObject("/Bottom")],
self[NameObject("/Right")], self[NameObject("/Top")]) = args
elif typ in ["/FitH", "/FitBH"]:
self[NameObject("/Top")], = args
elif typ in ["/FitV", "/FitBV"]:
self[NameObject("/Left")], = args
elif typ in ["/Fit", "/FitB"]:
pass
else:
raise utils.PdfReadError("Unknown Destination Type: %r" % typ)
def getDestArray(self):
return ArrayObject([self.raw_get('/Page'), self['/Type']] + [self[x] for x in ['/Left', '/Bottom', '/Right', '/Top', '/Zoom'] if x in self])
def writeToStream(self, stream, encryption_key):
stream.write(b_("<<\n"))
key = NameObject('/D')
key.writeToStream(stream, encryption_key)
stream.write(b_(" "))
value = self.getDestArray()
value.writeToStream(stream, encryption_key)
key = NameObject("/S")
key.writeToStream(stream, encryption_key)
stream.write(b_(" "))
value = NameObject("/GoTo")
value.writeToStream(stream, encryption_key)
stream.write(b_("\n"))
stream.write(b_(">>"))
title = property(lambda self: self.get("/Title"))
"""
Read-only property accessing the destination title.
:rtype: str
"""
page = property(lambda self: self.get("/Page"))
"""
Read-only property accessing the destination page number.
:rtype: int
"""
typ = property(lambda self: self.get("/Type"))
"""
Read-only property accessing the destination type.
:rtype: str
"""
zoom = property(lambda self: self.get("/Zoom", None))
"""
Read-only property accessing the zoom factor.
:rtype: int, or ``None`` if not available.
"""
left = property(lambda self: self.get("/Left", None))
"""
Read-only property accessing the left horizontal coordinate.
:rtype: int, or ``None`` if not available.
"""
right = property(lambda self: self.get("/Right", None))
"""
Read-only property accessing the right horizontal coordinate.
:rtype: int, or ``None`` if not available.
"""
top = property(lambda self: self.get("/Top", None))
"""
Read-only property accessing the top vertical coordinate.
:rtype: int, or ``None`` if not available.
"""
bottom = property(lambda self: self.get("/Bottom", None))
"""
Read-only property accessing the bottom vertical coordinate.
:rtype: int, or ``None`` if not available.
"""
class Bookmark(Destination):
def writeToStream(self, stream, encryption_key):
stream.write(b_("<<\n"))
for key in [NameObject(x) for x in ['/Title', '/Parent', '/First', '/Last', '/Next', '/Prev'] if x in self]:
key.writeToStream(stream, encryption_key)
stream.write(b_(" "))
value = self.raw_get(key)
value.writeToStream(stream, encryption_key)
stream.write(b_("\n"))
key = NameObject('/Dest')
key.writeToStream(stream, encryption_key)
stream.write(b_(" "))
value = self.getDestArray()
value.writeToStream(stream, encryption_key)
stream.write(b_("\n"))
stream.write(b_(">>"))
def encode_pdfdocencoding(unicode_string):
retval = b_('')
for c in unicode_string:
try:
retval += b_(chr(_pdfDocEncoding_rev[c]))
except KeyError:
raise UnicodeEncodeError("pdfdocencoding", c, -1, -1,
"does not exist in translation table")
return retval
def decode_pdfdocencoding(byte_array):
retval = u_('')
for b in byte_array:
c = _pdfDocEncoding[ord_(b)]
if c == u_('\u0000'):
raise UnicodeDecodeError("pdfdocencoding", utils.barray(b), -1, -1,
"does not exist in translation table")
retval += c
return retval
_pdfDocEncoding = (
u_('\u0000'), u_('\u0000'), u_('\u0000'), u_('\u0000'), u_('\u0000'), u_('\u0000'), u_('\u0000'), u_('\u0000'),
u_('\u0000'), u_('\u0000'), u_('\u0000'), u_('\u0000'), u_('\u0000'), u_('\u0000'), u_('\u0000'), u_('\u0000'),
u_('\u0000'), u_('\u0000'), u_('\u0000'), u_('\u0000'), u_('\u0000'), u_('\u0000'), u_('\u0000'), u_('\u0000'),
u_('\u02d8'), u_('\u02c7'), u_('\u02c6'), u_('\u02d9'), u_('\u02dd'), u_('\u02db'), u_('\u02da'), u_('\u02dc'),
u_('\u0020'), u_('\u0021'), u_('\u0022'), u_('\u0023'), u_('\u0024'), u_('\u0025'), u_('\u0026'), u_('\u0027'),
u_('\u0028'), u_('\u0029'), u_('\u002a'), u_('\u002b'), u_('\u002c'), u_('\u002d'), u_('\u002e'), u_('\u002f'),
u_('\u0030'), u_('\u0031'), u_('\u0032'), u_('\u0033'), u_('\u0034'), u_('\u0035'), u_('\u0036'), u_('\u0037'),
u_('\u0038'), u_('\u0039'), u_('\u003a'), u_('\u003b'), u_('\u003c'), u_('\u003d'), u_('\u003e'), u_('\u003f'),
u_('\u0040'), u_('\u0041'), u_('\u0042'), u_('\u0043'), u_('\u0044'), u_('\u0045'), u_('\u0046'), u_('\u0047'),
u_('\u0048'), u_('\u0049'), u_('\u004a'), u_('\u004b'), u_('\u004c'), u_('\u004d'), u_('\u004e'), u_('\u004f'),
u_('\u0050'), u_('\u0051'), u_('\u0052'), u_('\u0053'), u_('\u0054'), u_('\u0055'), u_('\u0056'), u_('\u0057'),
u_('\u0058'), u_('\u0059'), u_('\u005a'), u_('\u005b'), u_('\u005c'), u_('\u005d'), u_('\u005e'), u_('\u005f'),
u_('\u0060'), u_('\u0061'), u_('\u0062'), u_('\u0063'), u_('\u0064'), u_('\u0065'), u_('\u0066'), u_('\u0067'),
u_('\u0068'), u_('\u0069'), u_('\u006a'), u_('\u006b'), u_('\u006c'), u_('\u006d'), u_('\u006e'), u_('\u006f'),
u_('\u0070'), u_('\u0071'), u_('\u0072'), u_('\u0073'), u_('\u0074'), u_('\u0075'), u_('\u0076'), u_('\u0077'),
u_('\u0078'), u_('\u0079'), u_('\u007a'), u_('\u007b'), u_('\u007c'), u_('\u007d'), u_('\u007e'), u_('\u0000'),
u_('\u2022'), u_('\u2020'), u_('\u2021'), u_('\u2026'), u_('\u2014'), u_('\u2013'), u_('\u0192'), u_('\u2044'),
u_('\u2039'), u_('\u203a'), u_('\u2212'), u_('\u2030'), u_('\u201e'), u_('\u201c'), u_('\u201d'), u_('\u2018'),
u_('\u2019'), u_('\u201a'), u_('\u2122'), u_('\ufb01'), u_('\ufb02'), u_('\u0141'), u_('\u0152'), u_('\u0160'),
u_('\u0178'), u_('\u017d'), u_('\u0131'), u_('\u0142'), u_('\u0153'), u_('\u0161'), u_('\u017e'), u_('\u0000'),
u_('\u20ac'), u_('\u00a1'), u_('\u00a2'), u_('\u00a3'), u_('\u00a4'), u_('\u00a5'), u_('\u00a6'), u_('\u00a7'),
u_('\u00a8'), u_('\u00a9'), u_('\u00aa'), u_('\u00ab'), u_('\u00ac'), u_('\u0000'), u_('\u00ae'), u_('\u00af'),
u_('\u00b0'), u_('\u00b1'), u_('\u00b2'), u_('\u00b3'), u_('\u00b4'), u_('\u00b5'), u_('\u00b6'), u_('\u00b7'),
u_('\u00b8'), u_('\u00b9'), u_('\u00ba'), u_('\u00bb'), u_('\u00bc'), u_('\u00bd'), u_('\u00be'), u_('\u00bf'),
u_('\u00c0'), u_('\u00c1'), u_('\u00c2'), u_('\u00c3'), u_('\u00c4'), u_('\u00c5'), u_('\u00c6'), u_('\u00c7'),
u_('\u00c8'), u_('\u00c9'), u_('\u00ca'), u_('\u00cb'), u_('\u00cc'), u_('\u00cd'), u_('\u00ce'), u_('\u00cf'),
u_('\u00d0'), u_('\u00d1'), u_('\u00d2'), u_('\u00d3'), u_('\u00d4'), u_('\u00d5'), u_('\u00d6'), u_('\u00d7'),
u_('\u00d8'), u_('\u00d9'), u_('\u00da'), u_('\u00db'), u_('\u00dc'), u_('\u00dd'), u_('\u00de'), u_('\u00df'),
u_('\u00e0'), u_('\u00e1'), u_('\u00e2'), u_('\u00e3'), u_('\u00e4'), u_('\u00e5'), u_('\u00e6'), u_('\u00e7'),
u_('\u00e8'), u_('\u00e9'), u_('\u00ea'), u_('\u00eb'), u_('\u00ec'), u_('\u00ed'), u_('\u00ee'), u_('\u00ef'),
u_('\u00f0'), u_('\u00f1'), u_('\u00f2'), u_('\u00f3'), u_('\u00f4'), u_('\u00f5'), u_('\u00f6'), u_('\u00f7'),
u_('\u00f8'), u_('\u00f9'), u_('\u00fa'), u_('\u00fb'), u_('\u00fc'), u_('\u00fd'), u_('\u00fe'), u_('\u00ff')
)
assert len(_pdfDocEncoding) == 256
_pdfDocEncoding_rev = {}
for i in range(256):
char = _pdfDocEncoding[i]
if char == u_("\u0000"):
continue
assert char not in _pdfDocEncoding_rev
_pdfDocEncoding_rev[char] = i
|
#!/usr/bin/env python
#
# Features.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
import logging
log = logging.getLogger("Thug")
class Features:
counters = (
'activex_count',
'addeventlistener_count',
'alert_count',
'appendchild_count',
'attachevent_count',
'body_count',
'characters_count',
'clonenode_count',
'createdocumentfragment_count',
'createelement_count',
'data_uri_count',
'detachevent_count',
'dispatchevent_count',
'document_write_count',
'embed_count',
'embed_string_count',
'eval_count',
'external_javascript_count',
'external_javascript_characters_count',
'external_javascript_whitespaces_count',
'form_string_count',
'frame_string_count',
'getcomputedstyle_count',
'head_count',
'hidden_count',
'html_count',
'iframe_count',
'iframe_small_width_count',
'iframe_small_height_count',
'iframe_small_area_count',
'iframe_string_count',
'inline_javascript_count',
'inline_javascript_characters_count',
'inline_javascript_whitespaces_count',
'inline_vbscript_count',
'inline_vbscript_characters_count',
'inline_vbscript_whitespaces_count',
'insertbefore_count',
'meta_refresh_count',
'noscript_count',
'object_count',
'object_small_width_count',
'object_small_height_count',
'object_small_area_count',
'object_string_count',
'removeattribute_count',
'removechild_count',
'removeeventlistener_count',
'replacechild_count',
'setattribute_count',
'setinterval_count',
'settimeout_count',
'title_count',
'url_count',
'whitespaces_count'
)
def __init__(self):
self.features = dict()
def __getattr__(self, key):
if key.startswith('increase_'):
counter = key.split('increase_')[1]
if counter in self.counters:
return lambda: self.increase(counter)
if key.startswith('add_'):
counter = key.split('add_')[1]
if counter in self.counters:
return lambda value: self.add(counter, value)
raise AttributeError # pragma: no cover
def clear(self):
self.features = dict()
def init_features(self, url):
if url in self.features:
return
self.features[url] = dict()
for counter in self.counters:
self.features[url][counter] = 0
@property
def features_url(self):
if log.ThugOpts.local:
return log.ThugLogging.url
url = getattr(log, 'last_url', None)
return url if url else log.DFT.window.url
def increase(self, key):
if not log.ThugOpts.features_logging: # pragma: no cover
return
url = self.features_url
self.init_features(url)
self.features[url][key] += 1
def add(self, key, value):
if not log.ThugOpts.features_logging: # pragma: no cover
return
url = self.features_url
self.init_features(url)
self.features[url][key] += value
|
# Generated from java-escape by ANTLR 4.5
# encoding: utf-8
from __future__ import print_function
from antlr4 import *
from io import StringIO
def serializedATN():
with StringIO() as buf:
buf.write(u"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2")
buf.write(u"\27\u00a4\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6")
buf.write(u"\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4")
buf.write(u"\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t")
buf.write(u"\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27")
buf.write(u"\4\30\t\30\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\3")
buf.write(u"\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\4\3\4\3\5\3\5\3")
buf.write(u"\6\3\6\3\7\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\13\3\13")
buf.write(u"\3\f\3\f\3\r\3\r\3\16\3\16\3\17\3\17\3\20\3\20\3\20\7")
buf.write(u"\20_\n\20\f\20\16\20b\13\20\3\20\3\20\3\21\3\21\3\21")
buf.write(u"\7\21i\n\21\f\21\16\21l\13\21\5\21n\n\21\3\22\6\22q\n")
buf.write(u"\22\r\22\16\22r\3\23\3\23\3\24\5\24x\n\24\3\24\3\24\3")
buf.write(u"\25\3\25\3\25\3\25\7\25\u0080\n\25\f\25\16\25\u0083\13")
buf.write(u"\25\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\7\26")
buf.write(u"\u008e\n\26\f\26\16\26\u0091\13\26\3\26\3\26\3\27\3\27")
buf.write(u"\3\30\3\30\3\31\3\31\3\32\3\32\3\33\3\33\3\34\3\34\3")
buf.write(u"\34\3\34\5\34\u00a3\n\34\3\u0081\2\35\3\3\5\4\7\5\t\6")
buf.write(u"\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20")
buf.write(u"\37\21!\22#\23%\24\'\25)\26+\27-\2/\2\61\2\63\2\65\2")
buf.write(u"\67\2\3\2\6\5\2\13\f\17\17\"\"\4\2\f\f\17\17\3\2$$\r")
buf.write(u"\2\13\f\17\17\"\"$$*+..\61\61<=]_}}\177\u0080\u00a6\2")
buf.write(u"\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3")
buf.write(u"\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2")
buf.write(u"\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2")
buf.write(u"\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2")
buf.write(u"\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\39\3\2")
buf.write(u"\2\2\5;\3\2\2\2\7B\3\2\2\2\tD\3\2\2\2\13F\3\2\2\2\rH")
buf.write(u"\3\2\2\2\17K\3\2\2\2\21M\3\2\2\2\23O\3\2\2\2\25Q\3\2")
buf.write(u"\2\2\27S\3\2\2\2\31U\3\2\2\2\33W\3\2\2\2\35Y\3\2\2\2")
buf.write(u"\37[\3\2\2\2!m\3\2\2\2#p\3\2\2\2%t\3\2\2\2\'w\3\2\2\2")
buf.write(u"){\3\2\2\2+\u0089\3\2\2\2-\u0094\3\2\2\2/\u0096\3\2\2")
buf.write(u"\2\61\u0098\3\2\2\2\63\u009a\3\2\2\2\65\u009c\3\2\2\2")
buf.write(u"\67\u00a2\3\2\2\29:\7=\2\2:\4\3\2\2\2;<\7f\2\2<=\7g\2")
buf.write(u"\2=>\7h\2\2>?\7k\2\2?@\7p\2\2@A\7g\2\2A\6\3\2\2\2BC\7")
buf.write(u"\u0080\2\2C\b\3\2\2\2DE\7*\2\2E\n\3\2\2\2FG\7+\2\2G\f")
buf.write(u"\3\2\2\2HI\7<\2\2IJ\7<\2\2J\16\3\2\2\2KL\7<\2\2L\20\3")
buf.write(u"\2\2\2MN\7]\2\2N\22\3\2\2\2OP\7.\2\2P\24\3\2\2\2QR\7")
buf.write(u"_\2\2R\26\3\2\2\2ST\7^\2\2T\30\3\2\2\2UV\7A\2\2V\32\3")
buf.write(u"\2\2\2WX\7}\2\2X\34\3\2\2\2YZ\7\177\2\2Z\36\3\2\2\2[")
buf.write(u"`\7$\2\2\\_\5\67\34\2]_\5\61\31\2^\\\3\2\2\2^]\3\2\2")
buf.write(u"\2_b\3\2\2\2`^\3\2\2\2`a\3\2\2\2ac\3\2\2\2b`\3\2\2\2")
buf.write(u"cd\7$\2\2d \3\2\2\2en\5-\27\2fj\5/\30\2gi\5-\27\2hg\3")
buf.write(u"\2\2\2il\3\2\2\2jh\3\2\2\2jk\3\2\2\2kn\3\2\2\2lj\3\2")
buf.write(u"\2\2me\3\2\2\2mf\3\2\2\2n\"\3\2\2\2oq\5\65\33\2po\3\2")
buf.write(u"\2\2qr\3\2\2\2rp\3\2\2\2rs\3\2\2\2s$\3\2\2\2tu\t\2\2")
buf.write(u"\2u&\3\2\2\2vx\7\17\2\2wv\3\2\2\2wx\3\2\2\2xy\3\2\2\2")
buf.write(u"yz\7\f\2\2z(\3\2\2\2{|\7\61\2\2|}\7,\2\2}\u0081\3\2\2")
buf.write(u"\2~\u0080\13\2\2\2\177~\3\2\2\2\u0080\u0083\3\2\2\2\u0081")
buf.write(u"\u0082\3\2\2\2\u0081\177\3\2\2\2\u0082\u0084\3\2\2\2")
buf.write(u"\u0083\u0081\3\2\2\2\u0084\u0085\7,\2\2\u0085\u0086\7")
buf.write(u"\61\2\2\u0086\u0087\3\2\2\2\u0087\u0088\b\25\2\2\u0088")
buf.write(u"*\3\2\2\2\u0089\u008a\7\61\2\2\u008a\u008b\7\61\2\2\u008b")
buf.write(u"\u008f\3\2\2\2\u008c\u008e\n\3\2\2\u008d\u008c\3\2\2")
buf.write(u"\2\u008e\u0091\3\2\2\2\u008f\u008d\3\2\2\2\u008f\u0090")
buf.write(u"\3\2\2\2\u0090\u0092\3\2\2\2\u0091\u008f\3\2\2\2\u0092")
buf.write(u"\u0093\b\26\2\2\u0093,\3\2\2\2\u0094\u0095\4\62;\2\u0095")
buf.write(u".\3\2\2\2\u0096\u0097\4\63;\2\u0097\60\3\2\2\2\u0098")
buf.write(u"\u0099\n\4\2\2\u0099\62\3\2\2\2\u009a\u009b\t\2\2\2\u009b")
buf.write(u"\64\3\2\2\2\u009c\u009d\n\5\2\2\u009d\66\3\2\2\2\u009e")
buf.write(u"\u009f\7^\2\2\u009f\u00a3\7$\2\2\u00a0\u00a1\7^\2\2\u00a1")
buf.write(u"\u00a3\7^\2\2\u00a2\u009e\3\2\2\2\u00a2\u00a0\3\2\2\2")
buf.write(u"\u00a38\3\2\2\2\f\2^`jmrw\u0081\u008f\u00a2\3\b\2\2")
return buf.getvalue()
class QueryLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
T__0 = 1
T__1 = 2
T__2 = 3
T__3 = 4
T__4 = 5
T__5 = 6
T__6 = 7
T__7 = 8
T__8 = 9
T__9 = 10
T__10 = 11
T__11 = 12
T__12 = 13
T__13 = 14
ExplicitRegex = 15
Digits = 16
Symbol = 17
WhiteSpace = 18
NewLine = 19
BlockComment = 20
LineComment = 21
modeNames = [ u"DEFAULT_MODE" ]
literalNames = [ u"<INVALID>",
u"';'", u"'define'", u"'~'", u"'('", u"')'", u"'::'", u"':'",
u"'['", u"','", u"']'", u"'\\'", u"'?'", u"'{'", u"'}'" ]
symbolicNames = [ u"<INVALID>",
u"ExplicitRegex", u"Digits", u"Symbol", u"WhiteSpace", u"NewLine",
u"BlockComment", u"LineComment" ]
ruleNames = [ u"T__0", u"T__1", u"T__2", u"T__3", u"T__4", u"T__5",
u"T__6", u"T__7", u"T__8", u"T__9", u"T__10", u"T__11",
u"T__12", u"T__13", u"ExplicitRegex", u"Digits", u"Symbol",
u"WhiteSpace", u"NewLine", u"BlockComment", u"LineComment",
u"DIGIT", u"NONZERO_DIGIT", u"NON_QUOTE", u"WHITESPACE",
u"SYMBOL_CHAR", u"ESCAPED_CHAR" ]
grammarFileName = u"Query.g4"
def __init__(self, input=None):
super(QueryLexer, self).__init__(input)
self.checkVersion("4.5")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
|
#
#
# bignum.py
#
# This file is copied from python-vcoinlib.
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
"""Bignum routines"""
from __future__ import absolute_import, division, print_function, unicode_literals
import struct
# generic big endian MPI format
def bn_bytes(v, have_ext=False):
ext = 0
if have_ext:
ext = 1
return ((v.bit_length()+7)//8) + ext
def bn2bin(v):
s = bytearray()
i = bn_bytes(v)
while i > 0:
s.append((v >> ((i-1) * 8)) & 0xff)
i -= 1
return s
def bin2bn(s):
l = 0
for ch in s:
l = (l << 8) | ch
return l
def bn2mpi(v):
have_ext = False
if v.bit_length() > 0:
have_ext = (v.bit_length() & 0x07) == 0
neg = False
if v < 0:
neg = True
v = -v
s = struct.pack(b">I", bn_bytes(v, have_ext))
ext = bytearray()
if have_ext:
ext.append(0)
v_bin = bn2bin(v)
if neg:
if have_ext:
ext[0] |= 0x80
else:
v_bin[0] |= 0x80
return s + ext + v_bin
def mpi2bn(s):
if len(s) < 4:
return None
s_size = bytes(s[:4])
v_len = struct.unpack(b">I", s_size)[0]
if len(s) != (v_len + 4):
return None
if v_len == 0:
return 0
v_str = bytearray(s[4:])
neg = False
i = v_str[0]
if i & 0x80:
neg = True
i &= ~0x80
v_str[0] = i
v = bin2bn(v_str)
if neg:
return -v
return v
# vcoin-specific little endian format, with implicit size
def mpi2vch(s):
r = s[4:] # strip size
r = r[::-1] # reverse string, converting BE->LE
return r
def bn2vch(v):
return bytes(mpi2vch(bn2mpi(v)))
def vch2mpi(s):
r = struct.pack(b">I", len(s)) # size
r += s[::-1] # reverse string, converting LE->BE
return r
def vch2bn(s):
return mpi2bn(vch2mpi(s))
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Graph processing script."""
import os
from absl import app
from absl import flags
from absl import logging
import networkx as nx
import pandas as pd
from utils import graph_filter_with_degree
from utils import load_graph_from_edgelist_csv
FLAGS = flags.FLAGS
flags.DEFINE_string(
'g_file',
'../proj_Data/cat_data/test3/sr_timespan_post_graph-00000-of-00001.csv',
'raw graph edgelist csv file')
flags.DEFINE_integer('low', 40, 'low degree threshold')
flags.DEFINE_integer('high', 80, 'high degree threshold')
flags.DEFINE_string('data_file', '', 'raw data path')
flags.DEFINE_string('filename', '', 'graph filename')
flags.DEFINE_string('save_path', '', 'graph save path')
def main(_):
df = pd.read_csv(FLAGS.data_file)
author_set = set(df['author'].unique())
graph = load_graph_from_edgelist_csv(FLAGS.g_file)
logging.info('Original Graph size: %d nodes, %d edges',
graph.number_of_nodes(), graph.number_of_edges())
graph = graph_filter_with_degree(graph, FLAGS.low, FLAGS.high, author_set)
logging.info('Filtered Graph size: %d nodes, %d edges',
graph.number_of_nodes(), graph.number_of_edges())
nx.write_gpickle(graph, os.path.join(
FLAGS.save_path, FLAGS.filename + '%s_%s.gpickle' %
(FLAGS.low, FLAGS.high)))
logging.info('Saved graph.')
if __name__ == '__main__':
app.run(main)
|
import sys
import os
import logging
import numpy as np
from click.testing import CliRunner
import rasterio
from rasterio.rio.main import main_group
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
TEST_BBOX = [-11850000, 4804000, -11840000, 4808000]
def bbox(*args):
return ' '.join([str(x) for x in args])
def test_clip_bounds(runner, tmpdir):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(
main_group,
['clip', 'tests/data/shade.tif', output, '--bounds', bbox(*TEST_BBOX)])
assert result.exit_code == 0
assert os.path.exists(output)
with rasterio.open(output) as out:
assert out.shape == (419, 173)
def test_clip_bounds_geographic(runner, tmpdir):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(
main_group,
['clip', 'tests/data/RGB.byte.tif', output, '--geographic', '--bounds',
'-78.95864996545055 23.564991210854686 -76.57492370013823 25.550873767433984'])
assert result.exit_code == 0
assert os.path.exists(output)
with rasterio.open(output) as out:
assert out.shape == (718, 791)
def test_clip_like(runner, tmpdir):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(
main_group, [
'clip', 'tests/data/shade.tif', output, '--like',
'tests/data/shade.tif'])
assert result.exit_code == 0
assert os.path.exists(output)
with rasterio.open('tests/data/shade.tif') as template_ds:
with rasterio.open(output) as out:
assert out.shape == template_ds.shape
assert np.allclose(out.bounds, template_ds.bounds)
def test_clip_missing_params(runner, tmpdir):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(
main_group, ['clip', 'tests/data/shade.tif', output])
assert result.exit_code == 2
assert '--bounds or --like required' in result.output
def test_clip_bounds_disjunct(runner, tmpdir):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(
main_group,
['clip', 'tests/data/shade.tif', output, '--bounds', bbox(0, 0, 10, 10)])
assert result.exit_code == 2
assert '--bounds' in result.output
def test_clip_like_disjunct(runner, tmpdir):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(
main_group, [
'clip', 'tests/data/shade.tif', output, '--like',
'tests/data/RGB.byte.tif'])
assert result.exit_code == 2
assert '--like' in result.output
# Tests: format and type conversion, --format and --dtype
def test_format(tmpdir):
outputname = str(tmpdir.join('test.jpg'))
runner = CliRunner()
result = runner.invoke(
main_group,
['convert', 'tests/data/RGB.byte.tif', outputname, '--format', 'JPEG'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
assert src.driver == 'JPEG'
def test_format_short(tmpdir):
outputname = str(tmpdir.join('test.jpg'))
runner = CliRunner()
result = runner.invoke(
main_group,
['convert', 'tests/data/RGB.byte.tif', outputname, '-f', 'JPEG'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
assert src.driver == 'JPEG'
def test_output_opt(tmpdir):
outputname = str(tmpdir.join('test.jpg'))
runner = CliRunner()
result = runner.invoke(
main_group,
['convert', 'tests/data/RGB.byte.tif', '-o', outputname, '-f', 'JPEG'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
assert src.driver == 'JPEG'
def test_dtype(tmpdir):
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(
main_group,
['convert', 'tests/data/RGB.byte.tif', outputname, '--dtype', 'uint16'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
assert src.dtypes == tuple(['uint16'] * 3)
def test_dtype_rescaling_uint8_full(tmpdir):
"""Rescale uint8 [0, 255] to uint8 [0, 255]"""
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(
main_group,
['convert', 'tests/data/RGB.byte.tif', outputname, '--scale-ratio', '1.0'])
assert result.exit_code == 0
src_stats = [
{"max": 255.0, "mean": 44.434478650699106, "min": 1.0},
{"max": 255.0, "mean": 66.02203484105824, "min": 1.0},
{"max": 255.0, "mean": 71.39316199120559, "min": 1.0}]
with rasterio.open(outputname) as src:
for band, expected in zip(src.read(masked=True), src_stats):
assert round(band.min() - expected['min'], 6) == 0.0
assert round(band.max() - expected['max'], 6) == 0.0
assert round(band.mean() - expected['mean'], 6) == 0.0
def test_dtype_rescaling_uint8_half(tmpdir):
"""Rescale uint8 [0, 255] to uint8 [0, 127]"""
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(main_group, [
'convert', 'tests/data/RGB.byte.tif', outputname, '--scale-ratio', '0.5'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
for band in src.read():
assert round(band.min() - 0, 6) == 0.0
assert round(band.max() - 127, 6) == 0.0
def test_dtype_rescaling_uint16(tmpdir):
"""Rescale uint8 [0, 255] to uint16 [0, 4095]"""
# NB: 255 * 16 is 4080, we don't actually get to 4095.
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(main_group, [
'convert', 'tests/data/RGB.byte.tif', outputname, '--dtype', 'uint16',
'--scale-ratio', '16'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
for band in src.read():
assert round(band.min() - 0, 6) == 0.0
assert round(band.max() - 4080, 6) == 0.0
def test_dtype_rescaling_float64(tmpdir):
"""Rescale uint8 [0, 255] to float64 [-1, 1]"""
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(main_group, [
'convert', 'tests/data/RGB.byte.tif', outputname, '--dtype', 'float64',
'--scale-ratio', str(2.0 / 255), '--scale-offset', '-1.0'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
for band in src.read():
assert round(band.min() + 1.0, 6) == 0.0
assert round(band.max() - 1.0, 6) == 0.0
def test_rgb(tmpdir):
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(
main_group,
['convert', 'tests/data/RGB.byte.tif', outputname, '--rgb'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
assert src.colorinterp(1) == rasterio.enums.ColorInterp.red
|
#!/usr/bin/env python
# encoding: utf-8
from t import T
import requests,urllib2,json,urlparse
class P(T):
def __init__(self):
T.__init__(self)
def verify(self,head='',context='',ip='',port='',productname={},keywords='',hackinfo=''):
target_url = "http://"+ip+":"+str(port)+"/cacti.sql"
result = {}
result['result']=False
r=None
try:
r=requests.get(url=target_url,timeout=2)
if r.status_code==200:
result['result']=True
result['VerifyInfo'] = {}
result['VerifyInfo']['type']='cacti file disclosure'
result['VerifyInfo']['URL'] =ip+"/cacti.sql"
result['VerifyInfo']['payload']='IP/cacti.sql'
result['VerifyInfo']['result'] =''
else:
pass
except Exception,e:
print e.text
finally:
if r is not None:
r.close()
del r
return result
if __name__ == '__main__':
print P().verify(ip='140.114.108.4',port='80')
|
#!/usr/bin/env python3
# Copyright (c) 2014-2017 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Script to generate list of seed nodes for chainparams.cpp.
This script expects two text files in the directory that is passed as an
argument:
nodes_main.txt
nodes_test.txt
These files must consist of lines in the format
<ip>
<ip>:<port>
[<ipv6>]
[<ipv6>]:<port>
<onion>.onion
0xDDBBCCAA (IPv4 little-endian old pnSeeds format)
The output will be two data structures with the peers in binary format:
static SeedSpec6 pnSeed6_main[]={
...
}
static SeedSpec6 pnSeed6_test[]={
...
}
These should be pasted into `src/chainparamsseeds.h`.
'''
from base64 import b32decode
from binascii import a2b_hex
import sys
import os
import re
# ipv4 in ipv6 prefix
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
# tor-specific ipv6 prefix
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % vchAddr)
return pchOnionCat + vchAddr
elif '.' in addr: # IPv4
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): # IPv4-in-little-endian
return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
elif s.count(':') > 1: # ipv6, no port
host = s
port = ''
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
sys.exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef DIGIBYTE_CHAINPARAMSSEEDS_H\n')
g.write('#define DIGIBYTE_CHAINPARAMSSEEDS_H\n')
g.write('/**\n')
g.write(' * List of fixed seed nodes for the digibyte network\n')
g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n')
g.write(' *\n')
g.write(' * Each line contains a 16-byte IPv6 address and a port.\n')
g.write(' * IPv4 as well as onion addresses are wrapped inside an IPv6 address accordingly.\n')
g.write(' */\n')
with open(os.path.join(indir,'nodes_main.txt'), 'r', encoding="utf8") as f:
process_nodes(g, f, 'pnSeed6_main', 8333)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'), 'r', encoding="utf8") as f:
process_nodes(g, f, 'pnSeed6_test', 18333)
g.write('#endif // DIGIBYTE_CHAINPARAMSSEEDS_H\n')
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# coding: utf8
#
# Copyright 2016 hdd
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Job Manage Module
"""
class Job(object):
def __init__(self, jid, body, reserved=True):
self.jid = jid
self.body = body
self.reserved = reserved
# def _priority(self):
# stats = self.stats()
# if isinstance(stats, dict):
# return stats['pri']
# return macro.PRIORITY
# def delete(self):
# """Delete this job."""
# self.conn.delete(self.jid)
# self.reserved = False
#
# def release(self, priority=None, delay=0):
# """Release this job back into the ready queue."""
# if self.reserved:
# self.conn.release(self.jid, priority or self._priority(), delay)
# self.reserved = False
#
# def bury(self, priority=None):
# """Bury this job."""
# if self.reserved:
# self.conn.bury(self.jid, priority or self._priority())
# self.reserved = False
# def kick(self):
# """Kick this job alive."""
# self.conn.kick_job(self.jid)
#
# def touch(self):
# """Touch this reserved job, requesting more time to work on it before
# it expires."""
# if self.reserved:
# self.conn.touch(self.jid)
#
# def stats(self):
# """Return a dict of stats about this job."""
# return self.conn.stats_job(self.jid)
|
# -*- coding: utf-8 -*-
# Copyright 2007-2020 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import hashlib
import os
import logging
import tempfile
from pathlib import Path
from unittest.mock import patch
import numpy as np
import pytest
import hyperspy.api as hs
from hyperspy.signals import Signal1D
FULLFILENAME = Path(__file__).resolve().parent.joinpath("test_io_overwriting.hspy")
class TestIOOverwriting:
def setup_method(self, method):
self.s = Signal1D(np.arange(10))
self.new_s = Signal1D(np.ones(5))
# make sure we start from a clean state
self._clean_file()
self.s.save(FULLFILENAME)
self.s_file_hashed = self._hash_file(FULLFILENAME)
def _hash_file(self, filename):
with open(filename, "rb") as file:
md5_hash = hashlib.md5(file.read())
file_hashed = md5_hash.hexdigest()
return file_hashed
def _clean_file(self):
if os.path.exists(FULLFILENAME):
os.remove(FULLFILENAME)
def _check_file_is_written(self, filename):
# Check that we have a different hash, in case the file have different
# content from the original, the hash will be different.
return not self.s_file_hashed == self._hash_file(filename)
def test_io_overwriting_True(self):
# Overwrite is True, when file exists we overwrite
self.new_s.save(FULLFILENAME, overwrite=True)
assert self._check_file_is_written(FULLFILENAME)
def test_io_overwriting_False(self):
# Overwrite if False, file exists we don't overwrite
self.new_s.save(FULLFILENAME, overwrite=False)
assert not self._check_file_is_written(FULLFILENAME)
@pytest.mark.parametrize("overwrite", [None, True, False])
def test_io_overwriting_no_existing_file(self, overwrite):
self._clean_file() # remove the file
self.new_s.save(FULLFILENAME, overwrite=overwrite)
assert self._check_file_is_written(FULLFILENAME)
def test_io_overwriting_None_existing_file_y(self):
# Overwrite is None, when file exists we ask, mock `y` here
with patch("builtins.input", return_value="y"):
self.new_s.save(FULLFILENAME)
assert self._check_file_is_written(FULLFILENAME)
def test_io_overwriting_None_existing_file_n(self):
# Overwrite is None, when file exists we ask, mock `n` here
with patch("builtins.input", return_value="n"):
self.new_s.save(FULLFILENAME)
assert not self._check_file_is_written(FULLFILENAME)
def teardown_method(self, method):
self._clean_file()
def test_glob_wildcards():
s = Signal1D(np.arange(10))
with tempfile.TemporaryDirectory() as dirpath:
fnames = [os.path.join(dirpath, f"temp[1x{x}].hspy") for x in range(2)]
for f in fnames:
s.save(f)
with pytest.raises(ValueError, match="No filename matches this pattern"):
_ = hs.load(fnames[0])
t = hs.load([fnames[0]])
assert len(t) == 1
t = hs.load(fnames)
assert len(t) == 2
t = hs.load(os.path.join(dirpath, "temp*.hspy"))
assert len(t) == 2
t = hs.load(os.path.join(dirpath, "temp[*].hspy"), escape_square_brackets=True,)
assert len(t) == 2
with pytest.raises(ValueError, match="No filename matches this pattern"):
_ = hs.load(os.path.join(dirpath, "temp[*].hspy"))
# Test pathlib.Path
t = hs.load(Path(dirpath, "temp[1x0].hspy"))
assert len(t) == 1
t = hs.load([Path(dirpath, "temp[1x0].hspy"), Path(dirpath, "temp[1x1].hspy")])
assert len(t) == 2
t = hs.load(list(Path(dirpath).glob("temp*.hspy")))
assert len(t) == 2
t = hs.load(Path(dirpath).glob("temp*.hspy"))
assert len(t) == 2
def test_file_not_found_error():
with tempfile.TemporaryDirectory() as dirpath:
temp_fname = os.path.join(dirpath, "temp.hspy")
if os.path.exists(temp_fname):
os.remove(temp_fname)
with pytest.raises(ValueError, match="No filename matches this pattern"):
_ = hs.load(temp_fname)
with pytest.raises(FileNotFoundError):
_ = hs.load([temp_fname])
def test_file_reader_error():
# Only None, str or objects with attr "file_reader" are supported
s = Signal1D(np.arange(10))
with tempfile.TemporaryDirectory() as dirpath:
f = os.path.join(dirpath, "temp.hspy")
s.save(f)
with pytest.raises(ValueError, match="reader"):
_ = hs.load(f, reader=123)
def test_file_reader_warning(caplog):
# Test fallback to Pillow imaging library
s = Signal1D(np.arange(10))
with tempfile.TemporaryDirectory() as dirpath:
f = os.path.join(dirpath, "temp.hspy")
s.save(f)
with pytest.raises(ValueError, match="Could not load"):
with caplog.at_level(logging.WARNING):
_ = hs.load(f, reader="some_unknown_file_extension")
assert "Unable to infer file type from extension" in caplog.text
def test_file_reader_options():
s = Signal1D(np.arange(10))
with tempfile.TemporaryDirectory() as dirpath:
f = os.path.join(dirpath, "temp.hspy")
s.save(f)
# Test string reader
t = hs.load(Path(dirpath, "temp.hspy"), reader="hspy")
assert len(t) == 1
np.testing.assert_allclose(t.data, np.arange(10))
# Test object reader
from hyperspy.io_plugins import hspy
t = hs.load(Path(dirpath, "temp.hspy"), reader=hspy)
assert len(t) == 1
np.testing.assert_allclose(t.data, np.arange(10))
def test_save_default_format():
s = Signal1D(np.arange(10))
with tempfile.TemporaryDirectory() as dirpath:
f = os.path.join(dirpath, "temp")
s.save(f)
t = hs.load(Path(dirpath, "temp.hspy"))
assert len(t) == 1
|
'''These are a collection of build variants.
'''
import logging, sys
logger = logging.getLogger('bygg.variant')
class Variant(object):
def __init__(self, name):
self.name = name
def configure(self, env):
pass
class DebugVariant(Variant):
def __init__(self, name='debug'):
super(DebugVariant, self).__init__(name)
def configure(self, env):
super(DebugVariant, self).configure(env)
env.AppendUnique(CXXFLAGS=['-g', '-O0'])
class ReleaseVariant(Variant):
def __init__(self, name='release'):
super(ReleaseVariant, self).__init__(name)
class VariantManager:
def __init__(self):
self.variants = {}
self.__active = None
active_variant = property(lambda self: self.__active)
def register_variant(self, name, variant):
self.variants[name] = variant
def configure_variant(self, env, name):
try:
logger.info('Configuring for variant {0}'.format(name))
self.variants[name].configure(env)
self.__active = self.variants[name]
except KeyError:
logger.error('Invalid variant "{0}". Valid options are {1}.'.format(name,
self.variants.keys()))
sys.exit(1)
mgr = VariantManager()
register_variant = mgr.register_variant
configure_variant = mgr.configure_variant
active_variant = lambda: mgr.active_variant
register_variant('release', ReleaseVariant())
register_variant('debug', DebugVariant())
|
# Copyright (c) 2014 Alex Meade. All rights reserved.
# Copyright (c) 2014 Clinton Knight. All rights reserved.
# Copyright (c) 2014 Andrew Kerr. All rights reserved.
# Copyright (c) 2015 Tom Barron. All rights reserved.
# Copyright (c) 2015 Goutham Pacha Ravi. All rights reserved.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Mock unit tests for the NetApp block storage library
"""
import copy
import uuid
import mock
from oslo_utils import units
import six
from cinder import exception
from cinder.i18n import _
from cinder import test
from cinder.tests.unit.volume.drivers.netapp.dataontap import fakes as fake
import cinder.tests.unit.volume.drivers.netapp.fakes as na_fakes
from cinder.volume.drivers.netapp.dataontap import block_base
from cinder.volume.drivers.netapp.dataontap.client import api as netapp_api
from cinder.volume.drivers.netapp import utils as na_utils
from cinder.volume import utils as volume_utils
class NetAppBlockStorageLibraryTestCase(test.TestCase):
def setUp(self):
super(NetAppBlockStorageLibraryTestCase, self).setUp()
kwargs = {'configuration': self.get_config_base()}
self.library = block_base.NetAppBlockStorageLibrary(
'driver', 'protocol', **kwargs)
self.library.zapi_client = mock.Mock()
self.zapi_client = self.library.zapi_client
self.mock_request = mock.Mock()
def tearDown(self):
super(NetAppBlockStorageLibraryTestCase, self).tearDown()
def get_config_base(self):
return na_fakes.create_configuration()
def test_get_reserved_percentage_default_multipler(self):
default = 1.2
reserved_percentage = 20.0
self.library.configuration.netapp_size_multiplier = default
self.library.configuration.reserved_percentage = reserved_percentage
self.mock_object(block_base, 'LOG')
result = self.library._get_reserved_percentage()
self.assertEqual(reserved_percentage, result)
self.assertFalse(block_base.LOG.warn.called)
def test_get_reserved_percentage(self):
multiplier = 2.0
self.library.configuration.netapp_size_multiplier = multiplier
self.mock_object(block_base, 'LOG')
result = self.library._get_reserved_percentage()
reserved_ratio = round(1 - (1 / multiplier), 2)
reserved_percentage = 100 * int(reserved_ratio)
self.assertEqual(reserved_percentage, result)
self.assertTrue(block_base.LOG.warn.called)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_lun_attr',
mock.Mock(return_value={'Volume': 'FAKE_CMODE_VOL1'}))
def test_get_pool(self):
pool = self.library.get_pool({'name': 'volume-fake-uuid'})
self.assertEqual('FAKE_CMODE_VOL1', pool)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_lun_attr',
mock.Mock(return_value=None))
def test_get_pool_no_metadata(self):
pool = self.library.get_pool({'name': 'volume-fake-uuid'})
self.assertIsNone(pool)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_lun_attr',
mock.Mock(return_value=dict()))
def test_get_pool_volume_unknown(self):
pool = self.library.get_pool({'name': 'volume-fake-uuid'})
self.assertIsNone(pool)
def test_create_volume(self):
volume_size_in_bytes = int(fake.SIZE) * units.Gi
self.mock_object(na_utils, 'get_volume_extra_specs')
self.mock_object(na_utils, 'log_extra_spec_warnings')
self.mock_object(block_base, 'LOG')
self.mock_object(volume_utils, 'extract_host', mock.Mock(
return_value=fake.POOL_NAME))
self.mock_object(self.library, '_setup_qos_for_volume',
mock.Mock(return_value=None))
self.mock_object(self.library, '_create_lun')
self.mock_object(self.library, '_create_lun_handle')
self.mock_object(self.library, '_add_lun_to_table')
self.mock_object(self.library, '_mark_qos_policy_group_for_deletion')
self.library.create_volume(fake.VOLUME)
self.library._create_lun.assert_called_once_with(
fake.POOL_NAME, fake.LUN_NAME, volume_size_in_bytes,
fake.LUN_METADATA, None)
self.assertEqual(0, self.library.
_mark_qos_policy_group_for_deletion.call_count)
self.assertEqual(0, block_base.LOG.error.call_count)
def test_create_volume_no_pool(self):
self.mock_object(volume_utils, 'extract_host', mock.Mock(
return_value=None))
self.assertRaises(exception.InvalidHost, self.library.create_volume,
fake.VOLUME)
def test_create_volume_exception_path(self):
self.mock_object(block_base, 'LOG')
self.mock_object(na_utils, 'get_volume_extra_specs')
self.mock_object(self.library, '_setup_qos_for_volume',
mock.Mock(return_value=None))
self.mock_object(self.library, '_create_lun', mock.Mock(
side_effect=Exception))
self.mock_object(self.library, '_mark_qos_policy_group_for_deletion')
self.assertRaises(exception.VolumeBackendAPIException,
self.library.create_volume, fake.VOLUME)
self.assertEqual(1, self.library.
_mark_qos_policy_group_for_deletion.call_count)
self.assertEqual(1, block_base.LOG.exception.call_count)
def test_create_volume_no_pool_provided_by_scheduler(self):
fake_volume = copy.deepcopy(fake.VOLUME)
# Set up fake volume whose 'host' field is missing pool information.
fake_volume['host'] = '%s@%s' % (fake.HOST_NAME, fake.BACKEND_NAME)
self.assertRaises(exception.InvalidHost, self.library.create_volume,
fake_volume)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_lun_attr')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_or_create_igroup')
def test_map_lun(self, mock_get_or_create_igroup, mock_get_lun_attr):
os = 'linux'
protocol = 'fcp'
self.library.host_type = 'linux'
mock_get_lun_attr.return_value = {'Path': fake.LUN_PATH, 'OsType': os}
mock_get_or_create_igroup.return_value = (fake.IGROUP1_NAME, os,
'iscsi')
self.zapi_client.map_lun.return_value = '1'
lun_id = self.library._map_lun('fake_volume',
fake.FC_FORMATTED_INITIATORS,
protocol, None)
self.assertEqual('1', lun_id)
mock_get_or_create_igroup.assert_called_once_with(
fake.FC_FORMATTED_INITIATORS, protocol, os)
self.zapi_client.map_lun.assert_called_once_with(
fake.LUN_PATH, fake.IGROUP1_NAME, lun_id=None)
@mock.patch.object(block_base.NetAppBlockStorageLibrary, '_get_lun_attr')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_or_create_igroup')
@mock.patch.object(block_base, 'LOG', mock.Mock())
def test_map_lun_mismatch_host_os(
self, mock_get_or_create_igroup, mock_get_lun_attr):
os = 'windows'
protocol = 'fcp'
self.library.host_type = 'linux'
mock_get_lun_attr.return_value = {'Path': fake.LUN_PATH, 'OsType': os}
mock_get_or_create_igroup.return_value = (fake.IGROUP1_NAME, os,
'iscsi')
self.library._map_lun('fake_volume',
fake.FC_FORMATTED_INITIATORS,
protocol, None)
mock_get_or_create_igroup.assert_called_once_with(
fake.FC_FORMATTED_INITIATORS, protocol,
self.library.host_type)
self.zapi_client.map_lun.assert_called_once_with(
fake.LUN_PATH, fake.IGROUP1_NAME, lun_id=None)
self.assertEqual(1, block_base.LOG.warning.call_count)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_lun_attr')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_or_create_igroup')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_find_mapped_lun_igroup')
def test_map_lun_preexisting(self, mock_find_mapped_lun_igroup,
mock_get_or_create_igroup, mock_get_lun_attr):
os = 'linux'
protocol = 'fcp'
mock_get_lun_attr.return_value = {'Path': fake.LUN_PATH, 'OsType': os}
mock_get_or_create_igroup.return_value = (fake.IGROUP1_NAME, os,
'iscsi')
mock_find_mapped_lun_igroup.return_value = (fake.IGROUP1_NAME, '2')
self.zapi_client.map_lun.side_effect = netapp_api.NaApiError
lun_id = self.library._map_lun(
'fake_volume', fake.FC_FORMATTED_INITIATORS, protocol, None)
self.assertEqual('2', lun_id)
mock_find_mapped_lun_igroup.assert_called_once_with(
fake.LUN_PATH, fake.FC_FORMATTED_INITIATORS)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_lun_attr')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_or_create_igroup')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_find_mapped_lun_igroup')
def test_map_lun_api_error(self, mock_find_mapped_lun_igroup,
mock_get_or_create_igroup, mock_get_lun_attr):
os = 'linux'
protocol = 'fcp'
mock_get_lun_attr.return_value = {'Path': fake.LUN_PATH, 'OsType': os}
mock_get_or_create_igroup.return_value = (fake.IGROUP1_NAME, os,
'iscsi')
mock_find_mapped_lun_igroup.return_value = (None, None)
self.zapi_client.map_lun.side_effect = netapp_api.NaApiError
self.assertRaises(netapp_api.NaApiError, self.library._map_lun,
'fake_volume', fake.FC_FORMATTED_INITIATORS,
protocol, None)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_find_mapped_lun_igroup')
def test_unmap_lun(self, mock_find_mapped_lun_igroup):
mock_find_mapped_lun_igroup.return_value = (fake.IGROUP1_NAME, 1)
self.library._unmap_lun(fake.LUN_PATH, fake.FC_FORMATTED_INITIATORS)
self.zapi_client.unmap_lun.assert_called_once_with(fake.LUN_PATH,
fake.IGROUP1_NAME)
def test_find_mapped_lun_igroup(self):
self.assertRaises(NotImplementedError,
self.library._find_mapped_lun_igroup,
fake.LUN_PATH,
fake.FC_FORMATTED_INITIATORS)
def test_has_luns_mapped_to_initiators(self):
self.zapi_client.has_luns_mapped_to_initiators.return_value = True
self.assertTrue(self.library._has_luns_mapped_to_initiators(
fake.FC_FORMATTED_INITIATORS))
self.zapi_client.has_luns_mapped_to_initiators.assert_called_once_with(
fake.FC_FORMATTED_INITIATORS)
def test_get_or_create_igroup_preexisting(self):
self.zapi_client.get_igroup_by_initiators.return_value = [fake.IGROUP1]
self.library._create_igroup_add_initiators = mock.Mock()
igroup_name, host_os, ig_type = self.library._get_or_create_igroup(
fake.FC_FORMATTED_INITIATORS, 'fcp', 'linux')
self.assertEqual(fake.IGROUP1_NAME, igroup_name)
self.assertEqual('linux', host_os)
self.assertEqual('fcp', ig_type)
self.zapi_client.get_igroup_by_initiators.assert_called_once_with(
fake.FC_FORMATTED_INITIATORS)
self.assertEqual(
0, self.library._create_igroup_add_initiators.call_count)
@mock.patch.object(uuid, 'uuid4', mock.Mock(return_value=fake.UUID1))
def test_get_or_create_igroup_none_preexisting(self):
"""This method also tests _create_igroup_add_initiators."""
self.zapi_client.get_igroup_by_initiators.return_value = []
igroup_name, os, ig_type = self.library._get_or_create_igroup(
fake.FC_FORMATTED_INITIATORS, 'fcp', 'linux')
self.assertEqual('openstack-' + fake.UUID1, igroup_name)
self.zapi_client.create_igroup.assert_called_once_with(
igroup_name, 'fcp', 'linux')
self.assertEqual(len(fake.FC_FORMATTED_INITIATORS),
self.zapi_client.add_igroup_initiator.call_count)
self.assertEqual('linux', os)
self.assertEqual('fcp', ig_type)
def test_get_fc_target_wwpns(self):
self.assertRaises(NotImplementedError,
self.library._get_fc_target_wwpns)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_build_initiator_target_map')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_map_lun')
def test_initialize_connection_fc(self, mock_map_lun,
mock_build_initiator_target_map):
self.maxDiff = None
mock_map_lun.return_value = '1'
mock_build_initiator_target_map.return_value = (fake.FC_TARGET_WWPNS,
fake.FC_I_T_MAP, 4)
target_info = self.library.initialize_connection_fc(fake.FC_VOLUME,
fake.FC_CONNECTOR)
self.assertDictEqual(target_info, fake.FC_TARGET_INFO)
mock_map_lun.assert_called_once_with(
'fake_volume', fake.FC_FORMATTED_INITIATORS, 'fcp', None)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_build_initiator_target_map')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_map_lun')
def test_initialize_connection_fc_no_wwpns(
self, mock_map_lun, mock_build_initiator_target_map):
mock_map_lun.return_value = '1'
mock_build_initiator_target_map.return_value = (None, None, 0)
self.assertRaises(exception.VolumeBackendAPIException,
self.library.initialize_connection_fc,
fake.FC_VOLUME,
fake.FC_CONNECTOR)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_has_luns_mapped_to_initiators')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_unmap_lun')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_lun_attr')
def test_terminate_connection_fc(self, mock_get_lun_attr, mock_unmap_lun,
mock_has_luns_mapped_to_initiators):
mock_get_lun_attr.return_value = {'Path': fake.LUN_PATH}
mock_unmap_lun.return_value = None
mock_has_luns_mapped_to_initiators.return_value = True
target_info = self.library.terminate_connection_fc(fake.FC_VOLUME,
fake.FC_CONNECTOR)
self.assertDictEqual(target_info, fake.FC_TARGET_INFO_EMPTY)
mock_unmap_lun.assert_called_once_with(fake.LUN_PATH,
fake.FC_FORMATTED_INITIATORS)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_build_initiator_target_map')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_has_luns_mapped_to_initiators')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_unmap_lun')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_lun_attr')
def test_terminate_connection_fc_no_more_luns(
self, mock_get_lun_attr, mock_unmap_lun,
mock_has_luns_mapped_to_initiators,
mock_build_initiator_target_map):
mock_get_lun_attr.return_value = {'Path': fake.LUN_PATH}
mock_unmap_lun.return_value = None
mock_has_luns_mapped_to_initiators.return_value = False
mock_build_initiator_target_map.return_value = (fake.FC_TARGET_WWPNS,
fake.FC_I_T_MAP, 4)
target_info = self.library.terminate_connection_fc(fake.FC_VOLUME,
fake.FC_CONNECTOR)
self.assertDictEqual(target_info, fake.FC_TARGET_INFO_UNMAP)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_fc_target_wwpns')
def test_build_initiator_target_map_no_lookup_service(
self, mock_get_fc_target_wwpns):
self.library.lookup_service = None
mock_get_fc_target_wwpns.return_value = fake.FC_FORMATTED_TARGET_WWPNS
(target_wwpns, init_targ_map, num_paths) = \
self.library._build_initiator_target_map(fake.FC_CONNECTOR)
self.assertSetEqual(set(fake.FC_TARGET_WWPNS), set(target_wwpns))
self.assertDictEqual(fake.FC_I_T_MAP_COMPLETE, init_targ_map)
self.assertEqual(0, num_paths)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_fc_target_wwpns')
def test_build_initiator_target_map_with_lookup_service(
self, mock_get_fc_target_wwpns):
self.library.lookup_service = mock.Mock()
self.library.lookup_service.get_device_mapping_from_network.\
return_value = fake.FC_FABRIC_MAP
mock_get_fc_target_wwpns.return_value = fake.FC_FORMATTED_TARGET_WWPNS
(target_wwpns, init_targ_map, num_paths) = \
self.library._build_initiator_target_map(fake.FC_CONNECTOR)
self.assertSetEqual(set(fake.FC_TARGET_WWPNS), set(target_wwpns))
self.assertDictEqual(fake.FC_I_T_MAP, init_targ_map)
self.assertEqual(4, num_paths)
@mock.patch.object(na_utils, 'check_flags')
def test_do_setup_san_configured(self, mock_check_flags):
self.library.configuration.netapp_lun_ostype = 'windows'
self.library.configuration.netapp_host_type = 'solaris'
self.library.configuration.netapp_lun_space_reservation = 'disabled'
self.library.do_setup(mock.Mock())
self.assertTrue(mock_check_flags.called)
self.assertEqual('windows', self.library.lun_ostype)
self.assertEqual('solaris', self.library.host_type)
@mock.patch.object(na_utils, 'check_flags')
def test_do_setup_san_unconfigured(self, mock_check_flags):
self.library.configuration.netapp_lun_ostype = None
self.library.configuration.netapp_host_type = None
self.library.configuration.netapp_lun_space_reservation = 'enabled'
self.library.do_setup(mock.Mock())
self.assertTrue(mock_check_flags.called)
self.assertEqual('linux', self.library.lun_ostype)
self.assertEqual('linux', self.library.host_type)
def test_do_setup_space_reservation_disabled(self):
self.mock_object(na_utils, 'check_flags')
self.library.configuration.netapp_lun_ostype = None
self.library.configuration.netapp_host_type = None
self.library.configuration.netapp_lun_space_reservation = 'disabled'
self.library.do_setup(mock.Mock())
self.assertEqual('false', self.library.lun_space_reservation)
def test_do_setup_space_reservation_enabled(self):
self.mock_object(na_utils, 'check_flags')
self.library.configuration.netapp_lun_ostype = None
self.library.configuration.netapp_host_type = None
self.library.configuration.netapp_lun_space_reservation = 'enabled'
self.library.do_setup(mock.Mock())
self.assertEqual('true', self.library.lun_space_reservation)
def test_get_existing_vol_manage_missing_id_path(self):
self.assertRaises(exception.ManageExistingInvalidReference,
self.library._get_existing_vol_with_manage_ref,
{})
def test_get_existing_vol_manage_not_found(self):
self.zapi_client.get_lun_by_args.return_value = []
self.assertRaises(exception.ManageExistingInvalidReference,
self.library._get_existing_vol_with_manage_ref,
{'source-id': 'src_id',
'source-name': 'lun_path'})
self.assertEqual(1, self.zapi_client.get_lun_by_args.call_count)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_extract_lun_info',
mock.Mock(return_value=block_base.NetAppLun(
'lun0', 'lun0', '3', {'UUID': 'src_id'})))
def test_get_existing_vol_manage_lun(self):
self.zapi_client.get_lun_by_args.return_value = ['lun0', 'lun1']
lun = self.library._get_existing_vol_with_manage_ref(
{'source-id': 'src_id', 'path': 'lun_path'})
self.assertEqual(1, self.zapi_client.get_lun_by_args.call_count)
self.library._extract_lun_info.assert_called_once_with('lun0')
self.assertEqual('lun0', lun.name)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_existing_vol_with_manage_ref',
mock.Mock(return_value=block_base.NetAppLun(
'handle', 'name', '1073742824', {})))
def test_manage_existing_get_size(self):
size = self.library.manage_existing_get_size(
{'id': 'vol_id'}, {'ref': 'ref'})
self.assertEqual(2, size)
self.library._get_existing_vol_with_manage_ref.assert_called_once_with(
{'ref': 'ref'})
@mock.patch.object(block_base.LOG, 'info')
def test_unmanage(self, log):
mock_lun = block_base.NetAppLun('handle', 'name', '1',
{'Path': 'p', 'UUID': 'uuid'})
self.library._get_lun_from_table = mock.Mock(return_value=mock_lun)
self.library.unmanage({'name': 'vol'})
self.library._get_lun_from_table.assert_called_once_with('vol')
self.assertEqual(1, log.call_count)
def test_check_vol_type_for_lun(self):
self.assertRaises(NotImplementedError,
self.library._check_volume_type_for_lun,
'vol', 'lun', 'existing_ref', {})
def test_is_lun_valid_on_storage(self):
self.assertTrue(self.library._is_lun_valid_on_storage('lun'))
def test_initialize_connection_iscsi(self):
target_details_list = fake.ISCSI_TARGET_DETAILS_LIST
volume = fake.ISCSI_VOLUME
connector = fake.ISCSI_CONNECTOR
self.mock_object(block_base.NetAppBlockStorageLibrary, '_map_lun',
mock.Mock(return_value=fake.ISCSI_LUN['lun_id']))
self.zapi_client.get_iscsi_target_details.return_value = (
target_details_list)
self.mock_object(block_base.NetAppBlockStorageLibrary,
'_get_preferred_target_from_list',
mock.Mock(return_value=target_details_list[1]))
self.zapi_client.get_iscsi_service_details.return_value = (
fake.ISCSI_SERVICE_IQN)
self.mock_object(
na_utils, 'get_iscsi_connection_properties',
mock.Mock(return_value=fake.ISCSI_CONNECTION_PROPERTIES))
target_info = self.library.initialize_connection_iscsi(volume,
connector)
self.assertEqual(fake.ISCSI_CONNECTION_PROPERTIES, target_info)
block_base.NetAppBlockStorageLibrary._map_lun.assert_called_once_with(
fake.ISCSI_VOLUME['name'], [fake.ISCSI_CONNECTOR['initiator']],
'iscsi', None)
self.zapi_client.get_iscsi_target_details.assert_called_once_with()
block_base.NetAppBlockStorageLibrary._get_preferred_target_from_list\
.assert_called_once_with(
target_details_list)
self.zapi_client.get_iscsi_service_details.assert_called_once_with()
def test_initialize_connection_iscsi_no_target_list(self):
volume = fake.ISCSI_VOLUME
connector = fake.ISCSI_CONNECTOR
self.mock_object(block_base.NetAppBlockStorageLibrary, '_map_lun',
mock.Mock(return_value=fake.ISCSI_LUN['lun_id']))
self.zapi_client.get_iscsi_target_details.return_value = None
self.mock_object(block_base.NetAppBlockStorageLibrary,
'_get_preferred_target_from_list')
self.mock_object(
na_utils, 'get_iscsi_connection_properties',
mock.Mock(return_value=fake.ISCSI_CONNECTION_PROPERTIES))
self.assertRaises(exception.VolumeBackendAPIException,
self.library.initialize_connection_iscsi,
volume, connector)
self.assertEqual(
0, block_base.NetAppBlockStorageLibrary
._get_preferred_target_from_list.call_count)
self.assertEqual(
0, self.zapi_client.get_iscsi_service_details.call_count)
self.assertEqual(
0, na_utils.get_iscsi_connection_properties.call_count)
def test_initialize_connection_iscsi_no_preferred_target(self):
volume = fake.ISCSI_VOLUME
connector = fake.ISCSI_CONNECTOR
self.mock_object(block_base.NetAppBlockStorageLibrary, '_map_lun',
mock.Mock(return_value=fake.ISCSI_LUN['lun_id']))
self.zapi_client.get_iscsi_target_details.return_value = None
self.mock_object(block_base.NetAppBlockStorageLibrary,
'_get_preferred_target_from_list',
mock.Mock(return_value=None))
self.mock_object(na_utils, 'get_iscsi_connection_properties')
self.assertRaises(exception.VolumeBackendAPIException,
self.library.initialize_connection_iscsi,
volume, connector)
self.assertEqual(0, self.zapi_client
.get_iscsi_service_details.call_count)
self.assertEqual(0, na_utils.get_iscsi_connection_properties
.call_count)
def test_initialize_connection_iscsi_no_iscsi_service_details(self):
target_details_list = fake.ISCSI_TARGET_DETAILS_LIST
volume = fake.ISCSI_VOLUME
connector = fake.ISCSI_CONNECTOR
self.mock_object(block_base.NetAppBlockStorageLibrary, '_map_lun',
mock.Mock(return_value=fake.ISCSI_LUN['lun_id']))
self.zapi_client.get_iscsi_target_details.return_value = (
target_details_list)
self.mock_object(block_base.NetAppBlockStorageLibrary,
'_get_preferred_target_from_list',
mock.Mock(return_value=target_details_list[1]))
self.zapi_client.get_iscsi_service_details.return_value = None
self.mock_object(na_utils, 'get_iscsi_connection_properties')
self.assertRaises(exception.VolumeBackendAPIException,
self.library.initialize_connection_iscsi,
volume,
connector)
block_base.NetAppBlockStorageLibrary._map_lun.assert_called_once_with(
fake.ISCSI_VOLUME['name'], [fake.ISCSI_CONNECTOR['initiator']],
'iscsi', None)
self.zapi_client.get_iscsi_target_details.assert_called_once_with()
block_base.NetAppBlockStorageLibrary._get_preferred_target_from_list\
.assert_called_once_with(target_details_list)
def test_get_target_details_list(self):
target_details_list = fake.ISCSI_TARGET_DETAILS_LIST
result = self.library._get_preferred_target_from_list(
target_details_list)
self.assertEqual(target_details_list[0], result)
def test_get_preferred_target_from_empty_list(self):
target_details_list = []
result = self.library._get_preferred_target_from_list(
target_details_list)
self.assertIsNone(result)
def test_get_preferred_target_from_list_with_one_interface_disabled(self):
target_details_list = copy.deepcopy(fake.ISCSI_TARGET_DETAILS_LIST)
target_details_list[0]['interface-enabled'] = 'false'
result = self.library._get_preferred_target_from_list(
target_details_list)
self.assertEqual(target_details_list[1], result)
def test_get_preferred_target_from_list_with_all_interfaces_disabled(self):
target_details_list = copy.deepcopy(fake.ISCSI_TARGET_DETAILS_LIST)
for target in target_details_list:
target['interface-enabled'] = 'false'
result = self.library._get_preferred_target_from_list(
target_details_list)
self.assertEqual(target_details_list[0], result)
def test_get_preferred_target_from_list_with_filter(self):
target_details_list = fake.ISCSI_TARGET_DETAILS_LIST
filter = [target_detail['address']
for target_detail in target_details_list[1:]]
result = self.library._get_preferred_target_from_list(
target_details_list, filter)
self.assertEqual(target_details_list[1], result)
@mock.patch.object(na_utils, 'check_flags', mock.Mock())
@mock.patch.object(block_base, 'LOG', mock.Mock())
def test_setup_error_invalid_lun_os(self):
self.library.configuration.netapp_lun_ostype = 'unknown'
self.library.do_setup(mock.Mock())
self.assertRaises(exception.NetAppDriverException,
self.library.check_for_setup_error)
msg = _("Invalid value for NetApp configuration"
" option netapp_lun_ostype.")
block_base.LOG.error.assert_called_once_with(msg)
@mock.patch.object(na_utils, 'check_flags', mock.Mock())
@mock.patch.object(block_base, 'LOG', mock.Mock())
def test_setup_error_invalid_host_type(self):
self.library.configuration.netapp_lun_ostype = 'linux'
self.library.configuration.netapp_host_type = 'future_os'
self.library.do_setup(mock.Mock())
self.assertRaises(exception.NetAppDriverException,
self.library.check_for_setup_error)
msg = _("Invalid value for NetApp configuration"
" option netapp_host_type.")
block_base.LOG.error.assert_called_once_with(msg)
@mock.patch.object(na_utils, 'check_flags', mock.Mock())
def test_check_for_setup_error_both_config(self):
self.library.configuration.netapp_lun_ostype = 'linux'
self.library.configuration.netapp_host_type = 'linux'
self.library.do_setup(mock.Mock())
self.zapi_client.get_lun_list.return_value = ['lun1']
self.library._extract_and_populate_luns = mock.Mock()
self.library.check_for_setup_error()
self.library._extract_and_populate_luns.assert_called_once_with(
['lun1'])
@mock.patch.object(na_utils, 'check_flags', mock.Mock())
def test_check_for_setup_error_no_os_host(self):
self.library.configuration.netapp_lun_ostype = None
self.library.configuration.netapp_host_type = None
self.library.do_setup(mock.Mock())
self.zapi_client.get_lun_list.return_value = ['lun1']
self.library._extract_and_populate_luns = mock.Mock()
self.library.check_for_setup_error()
self.library._extract_and_populate_luns.assert_called_once_with(
['lun1'])
def test_delete_volume(self):
mock_delete_lun = self.mock_object(self.library, '_delete_lun')
self.library.delete_volume(fake.VOLUME)
mock_delete_lun.assert_called_once_with(fake.LUN_NAME)
def test_delete_lun(self):
mock_get_lun_attr = self.mock_object(self.library, '_get_lun_attr')
mock_get_lun_attr.return_value = fake.LUN_METADATA
self.library.zapi_client = mock.Mock()
self.library.lun_table = fake.LUN_TABLE
self.library._delete_lun(fake.LUN_NAME)
mock_get_lun_attr.assert_called_once_with(
fake.LUN_NAME, 'metadata')
self.library.zapi_client.destroy_lun.assert_called_once_with(fake.PATH)
def test_delete_lun_no_metadata(self):
self.mock_object(self.library, '_get_lun_attr', mock.Mock(
return_value=None))
self.library.zapi_client = mock.Mock()
self.mock_object(self.library, 'zapi_client')
self.library._delete_lun(fake.LUN_NAME)
self.library._get_lun_attr.assert_called_once_with(
fake.LUN_NAME, 'metadata')
self.assertEqual(0, self.library.zapi_client.destroy_lun.call_count)
self.assertEqual(0,
self.zapi_client.
mark_qos_policy_group_for_deletion.call_count)
def test_delete_snapshot(self):
mock_delete_lun = self.mock_object(self.library, '_delete_lun')
self.library.delete_snapshot(fake.SNAPSHOT)
mock_delete_lun.assert_called_once_with(fake.SNAPSHOT_NAME)
def test_clone_source_to_destination(self):
self.mock_object(na_utils, 'get_volume_extra_specs', mock.Mock(
return_value=fake.EXTRA_SPECS))
self.mock_object(self.library, '_setup_qos_for_volume', mock.Mock(
return_value=fake.QOS_POLICY_GROUP_INFO))
self.mock_object(self.library, '_clone_lun')
self.mock_object(self.library, '_extend_volume')
self.mock_object(self.library, 'delete_volume')
self.mock_object(self.library, '_mark_qos_policy_group_for_deletion')
self.library.lun_space_reservation = 'false'
self.library._clone_source_to_destination(fake.CLONE_SOURCE,
fake.CLONE_DESTINATION)
na_utils.get_volume_extra_specs.assert_called_once_with(
fake.CLONE_DESTINATION)
self.library._setup_qos_for_volume.assert_called_once_with(
fake.CLONE_DESTINATION, fake.EXTRA_SPECS)
self.library._clone_lun.assert_called_once_with(
fake.CLONE_SOURCE_NAME, fake.CLONE_DESTINATION_NAME,
space_reserved='false',
qos_policy_group_name=fake.QOS_POLICY_GROUP_NAME)
self.library._extend_volume.assert_called_once_with(
fake.CLONE_DESTINATION, fake.CLONE_DESTINATION_SIZE,
fake.QOS_POLICY_GROUP_NAME)
self.assertEqual(0, self.library.delete_volume.call_count)
self.assertEqual(0, self.library.
_mark_qos_policy_group_for_deletion.call_count)
def test_clone_source_to_destination_exception_path(self):
self.mock_object(na_utils, 'get_volume_extra_specs', mock.Mock(
return_value=fake.EXTRA_SPECS))
self.mock_object(self.library, '_setup_qos_for_volume', mock.Mock(
return_value=fake.QOS_POLICY_GROUP_INFO))
self.mock_object(self.library, '_clone_lun')
self.mock_object(self.library, '_extend_volume', mock.Mock(
side_effect=Exception))
self.mock_object(self.library, 'delete_volume')
self.mock_object(self.library, '_mark_qos_policy_group_for_deletion')
self.library.lun_space_reservation = 'true'
self.assertRaises(exception.VolumeBackendAPIException,
self.library._clone_source_to_destination,
fake.CLONE_SOURCE, fake.CLONE_DESTINATION)
na_utils.get_volume_extra_specs.assert_called_once_with(
fake.CLONE_DESTINATION)
self.library._setup_qos_for_volume.assert_called_once_with(
fake.CLONE_DESTINATION, fake.EXTRA_SPECS)
self.library._clone_lun.assert_called_once_with(
fake.CLONE_SOURCE_NAME, fake.CLONE_DESTINATION_NAME,
space_reserved='true',
qos_policy_group_name=fake.QOS_POLICY_GROUP_NAME)
self.library._extend_volume.assert_called_once_with(
fake.CLONE_DESTINATION, fake.CLONE_DESTINATION_SIZE,
fake.QOS_POLICY_GROUP_NAME)
self.assertEqual(1, self.library.delete_volume.call_count)
self.assertEqual(1, self.library.
_mark_qos_policy_group_for_deletion.call_count)
def test_create_lun(self):
self.assertRaises(NotImplementedError, self.library._create_lun,
fake.VOLUME_ID, fake.LUN_ID, fake.SIZE,
fake.LUN_METADATA)
def test_clone_lun(self):
self.assertRaises(NotImplementedError, self.library._clone_lun,
fake.VOLUME_ID, 'new-' + fake.VOLUME_ID)
def test_create_volume_from_snapshot(self):
mock_do_clone = self.mock_object(self.library,
'_clone_source_to_destination')
source = {
'name': fake.SNAPSHOT['name'],
'size': fake.SNAPSHOT['volume_size']
}
self.library.create_volume_from_snapshot(fake.VOLUME, fake.SNAPSHOT)
mock_do_clone.assert_has_calls([
mock.call(source, fake.VOLUME)])
def test_create_cloned_volume(self):
fake_lun = block_base.NetAppLun(fake.LUN_HANDLE, fake.LUN_ID,
fake.LUN_SIZE, fake.LUN_METADATA)
mock_get_lun_from_table = self.mock_object(self.library,
'_get_lun_from_table')
mock_get_lun_from_table.return_value = fake_lun
mock_do_clone = self.mock_object(self.library,
'_clone_source_to_destination')
source = {
'name': fake_lun.name,
'size': fake.VOLUME_REF['size']
}
self.library.create_cloned_volume(fake.VOLUME, fake.VOLUME_REF)
mock_do_clone.assert_has_calls([
mock.call(source, fake.VOLUME)])
def test_extend_volume(self):
new_size = 100
volume_copy = copy.copy(fake.VOLUME)
volume_copy['size'] = new_size
mock_get_volume_extra_specs = self.mock_object(
na_utils, 'get_volume_extra_specs',
mock.Mock(return_value=fake.EXTRA_SPECS))
mock_setup_qos_for_volume = self.mock_object(
self.library, '_setup_qos_for_volume',
mock.Mock(return_value=fake.QOS_POLICY_GROUP_INFO))
mock_extend_volume = self.mock_object(self.library, '_extend_volume')
self.library.extend_volume(fake.VOLUME, new_size)
mock_get_volume_extra_specs.assert_called_once_with(fake.VOLUME)
mock_setup_qos_for_volume.assert_called_once_with(volume_copy,
fake.EXTRA_SPECS)
mock_extend_volume.assert_called_once_with(fake.VOLUME,
new_size,
fake.QOS_POLICY_GROUP_NAME)
def test_extend_volume_api_error(self):
new_size = 100
volume_copy = copy.copy(fake.VOLUME)
volume_copy['size'] = new_size
mock_get_volume_extra_specs = self.mock_object(
na_utils, 'get_volume_extra_specs',
mock.Mock(return_value=fake.EXTRA_SPECS))
mock_setup_qos_for_volume = self.mock_object(
self.library, '_setup_qos_for_volume',
mock.Mock(return_value=fake.QOS_POLICY_GROUP_INFO))
mock_extend_volume = self.mock_object(
self.library, '_extend_volume',
mock.Mock(side_effect=netapp_api.NaApiError))
self.assertRaises(netapp_api.NaApiError,
self.library.extend_volume,
fake.VOLUME,
new_size)
mock_get_volume_extra_specs.assert_called_once_with(fake.VOLUME)
mock_setup_qos_for_volume.assert_has_calls([
mock.call(volume_copy, fake.EXTRA_SPECS),
mock.call(fake.VOLUME, fake.EXTRA_SPECS)])
mock_extend_volume.assert_called_once_with(
fake.VOLUME, new_size, fake.QOS_POLICY_GROUP_NAME)
def test__extend_volume_direct(self):
current_size = fake.LUN_SIZE
current_size_bytes = current_size * units.Gi
new_size = fake.LUN_SIZE * 2
new_size_bytes = new_size * units.Gi
max_size = fake.LUN_SIZE * 10
max_size_bytes = max_size * units.Gi
fake_volume = copy.copy(fake.VOLUME)
fake_volume['size'] = new_size
fake_lun = block_base.NetAppLun(fake.LUN_HANDLE,
fake.LUN_ID,
current_size_bytes,
fake.LUN_METADATA)
mock_get_lun_from_table = self.mock_object(
self.library, '_get_lun_from_table',
mock.Mock(return_value=fake_lun))
fake_lun_geometry = {'max_resize': six.text_type(max_size_bytes)}
mock_get_lun_geometry = self.mock_object(
self.library.zapi_client, 'get_lun_geometry',
mock.Mock(return_value=fake_lun_geometry))
mock_do_direct_resize = self.mock_object(self.library.zapi_client,
'do_direct_resize')
mock_do_sub_clone_resize = self.mock_object(self.library,
'_do_sub_clone_resize')
self.library.lun_table = {fake.VOLUME['name']: fake_lun}
self.library._extend_volume(fake.VOLUME, new_size, 'fake_qos_policy')
mock_get_lun_from_table.assert_called_once_with(fake.VOLUME['name'])
mock_get_lun_geometry.assert_called_once_with(
fake.LUN_METADATA['Path'])
mock_do_direct_resize.assert_called_once_with(
fake.LUN_METADATA['Path'], six.text_type(new_size_bytes))
self.assertFalse(mock_do_sub_clone_resize.called)
self.assertEqual(six.text_type(new_size_bytes),
self.library.lun_table[fake.VOLUME['name']].size)
def test__extend_volume_clone(self):
current_size = fake.LUN_SIZE
current_size_bytes = current_size * units.Gi
new_size = fake.LUN_SIZE * 20
new_size_bytes = new_size * units.Gi
max_size = fake.LUN_SIZE * 10
max_size_bytes = max_size * units.Gi
fake_volume = copy.copy(fake.VOLUME)
fake_volume['size'] = new_size
fake_lun = block_base.NetAppLun(fake.LUN_HANDLE,
fake.LUN_ID,
current_size_bytes,
fake.LUN_METADATA)
mock_get_lun_from_table = self.mock_object(
self.library, '_get_lun_from_table',
mock.Mock(return_value=fake_lun))
fake_lun_geometry = {'max_resize': six.text_type(max_size_bytes)}
mock_get_lun_geometry = self.mock_object(
self.library.zapi_client, 'get_lun_geometry',
mock.Mock(return_value=fake_lun_geometry))
mock_do_direct_resize = self.mock_object(self.library.zapi_client,
'do_direct_resize')
mock_do_sub_clone_resize = self.mock_object(self.library,
'_do_sub_clone_resize')
self.library.lun_table = {fake.VOLUME['name']: fake_lun}
self.library._extend_volume(fake.VOLUME, new_size, 'fake_qos_policy')
mock_get_lun_from_table.assert_called_once_with(fake.VOLUME['name'])
mock_get_lun_geometry.assert_called_once_with(
fake.LUN_METADATA['Path'])
self.assertFalse(mock_do_direct_resize.called)
mock_do_sub_clone_resize.assert_called_once_with(
fake.LUN_METADATA['Path'], six.text_type(new_size_bytes),
qos_policy_group_name='fake_qos_policy')
self.assertEqual(six.text_type(new_size_bytes),
self.library.lun_table[fake.VOLUME['name']].size)
def test__extend_volume_no_change(self):
current_size = fake.LUN_SIZE
current_size_bytes = current_size * units.Gi
new_size = fake.LUN_SIZE
max_size = fake.LUN_SIZE * 10
max_size_bytes = max_size * units.Gi
fake_volume = copy.copy(fake.VOLUME)
fake_volume['size'] = new_size
fake_lun = block_base.NetAppLun(fake.LUN_HANDLE,
fake.LUN_ID,
current_size_bytes,
fake.LUN_METADATA)
mock_get_lun_from_table = self.mock_object(
self.library, '_get_lun_from_table',
mock.Mock(return_value=fake_lun))
fake_lun_geometry = {'max_resize': six.text_type(max_size_bytes)}
mock_get_lun_geometry = self.mock_object(
self.library.zapi_client, 'get_lun_geometry',
mock.Mock(return_value=fake_lun_geometry))
mock_do_direct_resize = self.mock_object(self.library.zapi_client,
'do_direct_resize')
mock_do_sub_clone_resize = self.mock_object(self.library,
'_do_sub_clone_resize')
self.library.lun_table = {fake_volume['name']: fake_lun}
self.library._extend_volume(fake_volume, new_size, 'fake_qos_policy')
mock_get_lun_from_table.assert_called_once_with(fake_volume['name'])
self.assertFalse(mock_get_lun_geometry.called)
self.assertFalse(mock_do_direct_resize.called)
self.assertFalse(mock_do_sub_clone_resize.called)
|
# Copyright (c) 2012 Intel
# Copyright (c) 2012 OpenStack, LLC.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import math
from cinder.openstack.common import log as logging
from cinder.openstack.common.scheduler import filters
LOG = logging.getLogger(__name__)
class CapacityFilter(filters.BaseHostFilter):
"""CapacityFilter filters based on volume host's capacity utilization."""
def host_passes(self, host_state, filter_properties):
"""Return True if host has sufficient capacity."""
volume_size = filter_properties.get('size')
if not host_state.free_capacity_gb:
# Fail Safe
LOG.warning(_("Free capacity not set;"
"volume node info collection broken."))
return False
free_space = host_state.free_capacity_gb
if free_space == 'infinite' or free_space == 'unknown':
# NOTE(zhiteng) for those back-ends cannot report actual
# available capacity, we assume it is able to serve the
# request. Even if it was not, the retry mechanism is
# able to handle the failure by rescheduling
return True
reserved = float(host_state.reserved_percentage) / 100
free = math.floor(free_space * (1 - reserved))
return free >= volume_size
|
#!env python3
import ipdb
import sys
import petsc4py
petsc4py.init(sys.argv)
from petsc4py import PETSc
from mpi4py import MPI
import numpy as np
from phelper import *
eMesh = {1: [np.linspace(0, 1, 4) ],
2: [np.linspace(0, 0.5, 5, False), # False == Do no include endpoint in range
np.linspace(0.5, 1, 8 )],
4: [np.linspace(0, 0.25, 5, False),
np.linspace(0.25, 0.5, 5, False),
np.linspace(0.5, 0.75, 5, False),
np.linspace(0.75, 1, 5, False), ]
}
MPIrank = MPI.COMM_WORLD.Get_rank()
MPIsize = MPI.COMM_WORLD.Get_size()
nSupport = 10 # Number of support points
supportSpace = (-0.1, 1.1) # Range in which the support points are equally distributed
# Dimension of interpolation. Used for adding a polynomial to the matrix. Set to zero to deactivate polynomial
dimension = 1
polyparams = dimension+1 if dimension else 0
def main():
# shuffle_mesh(eMesh)
ePoints = eMesh[MPIsize][MPIrank] # np.array of positions to evaluate
supports = np.linspace(supportSpace[0], supportSpace[1], nSupport)
sPoints = partitions(supports)[MPIrank]
A = PETSc.Mat(); A.create()
E = PETSc.Mat(); E.create()
if MPIrank == MPIsize-1 and dimension > 0: # The last rank gets the polynomial rows
A.setSizes( size = ((len(sPoints)+polyparams, PETSc.DETERMINE), (len(sPoints)+polyparams, PETSc.DETERMINE)) )
E.setSizes( size = ((len(ePoints), PETSc.DETERMINE), (len(sPoints)+polyparams, PETSc.DETERMINE)) )
else:
A.setSizes( size = ((len(sPoints), PETSc.DETERMINE), (len(sPoints), PETSc.DETERMINE)) )
E.setSizes( size = ((len(ePoints), PETSc.DETERMINE), (len(sPoints), PETSc.DETERMINE)) )
A.setName("System Matrix"); A.setFromOptions(); A.setUp()
E.setName("Evaluation Matrix"); E.setFromOptions(); E.setUp()
c = A.createVecRight(); c.setName("Coefficients")
b = A.createVecRight(); b.setName("RHS Function Values")
interp = E.createVecLeft(); interp.setName("interp")
for row in range(*A.owner_range): # Rows are partioned
if row >= len(supports): break # We are not setting the rows for the polynomial, this is done when setting each column.
for col in range(nSupport):
v = basisfunction(abs(supports[row]-supports[col]))
if v != 0:
A.setValue(row, col, v)
b.setValue(row, testfunction(supports[row])) # Add the solution to the RHS
# Add the polynomial
if dimension:
A.setValue(row, nSupport, 1) # Const part of the polynom
A.setValue(nSupport, row, 1) # Ensure symmetricity
for d in range(dimension):
A.setValue(row, nSupport + 1 + d, supports[row]) # Value of support point
A.setValue(nSupport + 1 + d, row, supports[row])
A.assemble(PETSc.Mat.AssemblyType.FLUSH_ASSEMBLY)
zeros = A.createVecRight();
A.setDiagonal(zeros, PETSc.InsertMode.ADD_VALUES)
A.assemble()
b.assemble()
# A.view()
A.view(PETSc.Viewer.DRAW().createDraw()) # Use command line -draw_pause <sec>.
# Print("polyparams= ", polyparams)
# Print("A Size =", A.getSize())
# Print("E Global Size = ", E.getSize())
# Print("E Local Size = ", E.getLocalSize())
# Print("E Owner Range", E.owner_range)
offset = E.owner_range[0]
for row in range(*E.owner_range):
for col in range(E.getSize()[1]-polyparams):
E.setValue(row, col, basisfunction(abs(ePoints[row-offset] - supports[col])))
# Add the polynomial
if dimension:
E.setValue(row, nSupport, 1)
for d in range(dimension):
E.setValue(row, nSupport + 1 + d, ePoints[row-offset])
E.assemble()
# E.view()
E.view(PETSc.Viewer.DRAW().createDraw()) # Use command line -draw_pause <sec>.
b.view()
ksp = PETSc.KSP()
ksp.create()
ksp.setOperators(A, A)
ksp.setFromOptions()
ksp.solve(b, c)
E.mult(c, interp);
c.view()
interp.view()
scatter, interp0 = PETSc.Scatter.toZero(interp)
scatter.scatter(interp, interp0)
scatter, c0 = PETSc.Scatter.toZero(c)
scatter.scatter(c, c0)
if MPIrank == 0:
plot(supports, eMesh, interp0.array, c0.array, dimension)
if __name__ == '__main__':
main()
|
from runtests.mpi import MPITest
from nbodykit import setup_logging
from nbodykit.binned_statistic import BinnedStatistic
import pytest
import tempfile
import numpy.testing as testing
import numpy
import os
data_dir = os.path.join(os.path.split(os.path.abspath(__file__))[0], 'data')
setup_logging("debug")
@MPITest([1])
def test_to_json(comm):
# load from JSON
ds1 = BinnedStatistic.from_json(os.path.join(data_dir, 'dataset_1d.json'))
# to JSON
with tempfile.NamedTemporaryFile(delete=False) as ff:
ds1.to_json(ff.name)
ds2 = BinnedStatistic.from_json(ff.name)
# same data?
for name in ds1:
testing.assert_almost_equal(ds1[name], ds2[name])
# cleanup
os.remove(ff.name)
@MPITest([1])
def test_1d_load(comm):
# load plaintext format
with pytest.warns(FutureWarning):
ds1 = BinnedStatistic.from_plaintext(['k'], os.path.join(data_dir, 'dataset_1d_deprecated.dat'))
# wrong dimensions
with pytest.raises(ValueError):
ds1 = BinnedStatistic.from_plaintext(['k', 'mu'], os.path.join(data_dir, 'dataset_1d_deprecated.dat'))
# load from JSON
ds2 = BinnedStatistic.from_json(os.path.join(data_dir, 'dataset_1d.json'))
# same data?
for name in ds1:
testing.assert_almost_equal(ds1[name], ds2[name])
@MPITest([1])
def test_2d_load(comm):
# load plaintext format
with pytest.warns(FutureWarning):
ds1 = BinnedStatistic.from_plaintext(['k', 'mu'], os.path.join(data_dir, 'dataset_2d_deprecated.dat'))
# load from JSON
ds2 = BinnedStatistic.from_json(os.path.join(data_dir, 'dataset_2d.json'))
# same data?
for name in ds1:
testing.assert_almost_equal(ds1[name], ds2[name])
@MPITest([1])
def test_str(comm):
dataset = BinnedStatistic.from_json(os.path.join(data_dir, 'dataset_2d.json'))
# list all variable names
s = str(dataset)
# now just list total number of variables
dataset['test1'] = numpy.ones(dataset.shape)
dataset['test2'] = numpy.ones(dataset.shape)
s = str(dataset)
# this is the same as str
r = repr(dataset)
@MPITest([1])
def test_getitem(comm):
dataset = BinnedStatistic.from_json(os.path.join(data_dir, 'dataset_2d.json'))
# invalid key
with pytest.raises(KeyError):
bad = dataset['error']
# slice columns
sliced = dataset[['k', 'mu', 'power']]
sliced = dataset[('k', 'mu', 'power')]
# invalid slice
with pytest.raises(KeyError):
bad =dataset[['k', 'mu', 'error']]
# too many dims in slice
with pytest.raises(IndexError):
bad = dataset[0,0,0]
# cannot access single element of 2D power
with pytest.raises(IndexError):
bad = dataset[0,0]
@MPITest([1])
def test_array_slice(comm):
dataset = BinnedStatistic.from_json(os.path.join(data_dir, 'dataset_2d.json'))
# get the first mu column
sliced = dataset[:,0]
assert sliced.shape[0] == dataset.shape[0]
assert len(sliced.shape) == 1
assert sliced.dims == ['k']
# get the first mu column but keep dimension
sliced = dataset[:,[0]]
assert sliced.shape[0] == dataset.shape[0]
assert sliced.shape[1] == 1
assert sliced.dims == ['k', 'mu']
@MPITest([1])
def test_list_array_slice(comm):
dataset = BinnedStatistic.from_json(os.path.join(data_dir, 'dataset_2d.json'))
# get the first and last mu column
sliced = dataset[:,[0, -1]]
assert len(sliced.shape) == 2
assert sliced.dims == ['k', 'mu']
# make sure we grabbed the right data
for var in dataset:
testing.assert_array_equal(dataset[var][:,[0,-1]], sliced[var])
@MPITest([1])
def test_variable_set(comm):
dataset = BinnedStatistic.from_json(os.path.join(data_dir, 'dataset_2d.json'))
modes = numpy.ones(dataset.shape)
# add new variable
dataset['TEST'] = modes
assert 'TEST' in dataset
# override existing variable
dataset['modes'] = modes
assert numpy.all(dataset['modes'] == 1.0)
# needs right shape
with pytest.raises(ValueError):
dataset['TEST'] = 10.
@MPITest([1])
def test_copy(comm):
dataset = BinnedStatistic.from_json(os.path.join(data_dir, 'dataset_2d.json'))
copy = dataset.copy()
for var in dataset:
testing.assert_array_equal(dataset[var], copy[var])
@MPITest([1])
def test_rename_variable(comm):
dataset = BinnedStatistic.from_json(os.path.join(data_dir, 'dataset_2d.json'))
test = numpy.zeros(dataset.shape)
dataset['test'] = test
dataset.rename_variable('test', 'renamed_test')
assert 'renamed_test' in dataset
assert 'test' not in dataset
@MPITest([1])
def test_sel(comm):
dataset = BinnedStatistic.from_json(os.path.join(data_dir, 'dataset_2d.json'))
# no exact match fails
with pytest.raises(IndexError):
sliced = dataset.sel(k=0.1)
# this should be squeezed
sliced = dataset.sel(k=0.1, method='nearest')
assert len(sliced.dims) == 1
# this is not squeezed
sliced = dataset.sel(k=[0.1], method='nearest')
assert sliced.shape[0] == 1
# this return empty k with arbitary edges.
sliced = dataset.sel(k=[], method='nearest')
assert sliced.shape[0] == 0
# slice in a specific k-range
sliced = dataset.sel(k=slice(0.02, 0.15), mu=[0.5], method='nearest')
assert sliced.shape[1] == 1
assert numpy.alltrue((sliced['k'] >= 0.02)&(sliced['k'] <= 0.15))
@MPITest([1])
def test_take(comm):
dataset = BinnedStatistic.from_json(os.path.join(data_dir, 'dataset_2d.json'))
sliced = dataset.take(k=[8])
assert sliced.shape[0] == 1
assert len(sliced.dims) == 2
sliced = dataset.take(k=[])
assert sliced.shape[0] == 0
assert len(sliced.dims) == 2
dataset.take(k=dataset.coords['k'] < 0.3)
assert len(sliced.dims) == 2
dataset.take(dataset['modes'] > 0)
assert len(sliced.dims) == 2
dataset.take(dataset['k'] < 0.3)
assert len(sliced.dims) == 2
@MPITest([1])
def test_squeeze(comm):
dataset = BinnedStatistic.from_json(os.path.join(data_dir, 'dataset_2d.json'))
# need to specify which dimension to squeeze
with pytest.raises(ValueError):
squeezed = dataset.squeeze()
with pytest.raises(ValueError):
squeezed = dataset[[0],[0]].squeeze()
sliced = dataset[:,[2]]
with pytest.raises(ValueError):
squeezed = sliced.squeeze('k')
squeezed = sliced.squeeze('mu')
assert len(squeezed.dims) == 1
assert squeezed.shape[0] == sliced.shape[0]
@MPITest([1])
def test_average(comm):
import warnings
dataset = BinnedStatistic.from_json(os.path.join(data_dir, 'dataset_2d.json'))
# unweighted
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
avg = dataset.average('mu')
for var in dataset.variables:
if var in dataset._fields_to_sum:
x = numpy.nansum(dataset[var], axis=-1)
else:
x = numpy.nanmean(dataset[var], axis=-1)
testing.assert_allclose(x, avg[var])
# weighted
weights = numpy.random.random(dataset.shape)
dataset['weights'] = weights
avg = dataset.average('mu', weights='weights')
for var in dataset:
if var in dataset._fields_to_sum:
x = numpy.nansum(dataset[var], axis=-1)
else:
x = numpy.nansum(dataset[var]*dataset['weights'], axis=-1)
x /= dataset['weights'].sum(axis=-1)
testing.assert_allclose(x, avg[var])
@MPITest([1])
def test_reindex(comm):
import warnings
dataset = BinnedStatistic.from_json(os.path.join(data_dir, 'dataset_2d.json'))
with pytest.raises(ValueError):
new, spacing = dataset.reindex('k', 0.005, force=True, return_spacing=True)
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
weights = numpy.random.random(dataset.shape)
dataset['weights'] = weights
new, spacing = dataset.reindex('k', 0.02, weights='weights', force=True, return_spacing=True)
diff = numpy.diff(new.coords['k'])
assert numpy.alltrue(diff > numpy.diff(dataset.coords['k'])[0])
with pytest.raises(ValueError):
new = dataset.reindex('mu', 0.4, force=False)
new = dataset.reindex('mu', 0.4, force=True)
@MPITest([1])
def test_subclass_copy_sel(comm):
# this test asserts the sel returns instance of subclass.
# and the copy method can change the class.
class A(BinnedStatistic):
def mymethod(self):
return self.copy(cls=BinnedStatistic)
# load from JSON
dataset = A.from_json(os.path.join(data_dir, 'dataset_2d.json'))
dataset.mymethod()
# no exact match fails
with pytest.raises(IndexError):
sliced = dataset.sel(k=0.1)
# this should be squeezed
sliced = dataset.sel(k=0.1, method='nearest')
assert len(sliced.dims) == 1
assert isinstance(sliced, A)
assert isinstance(sliced.mymethod(), BinnedStatistic)
|
##################################################################################
# The MIT License - turboengine
#
# Copyright (c) Oct 2010 Luis C. Cruz <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
##################################################################################
# When I need this module I'll have to integrate this app http://code.google.com/p/appengine-rest-server/ here
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cyborg_identity', '0002_iscontactemailaddress_iscontactphonenumber_phonenumber'),
]
operations = [
migrations.RemoveField(
model_name='emailaddress',
name='node_ptr',
),
migrations.RemoveField(
model_name='iscontactemailaddress',
name='relationship_ptr',
),
migrations.RemoveField(
model_name='iscontactphonenumber',
name='relationship_ptr',
),
migrations.RemoveField(
model_name='phonenumber',
name='node_ptr',
),
migrations.DeleteModel(
name='EmailAddress',
),
migrations.DeleteModel(
name='IsContactEmailAddress',
),
migrations.DeleteModel(
name='IsContactPhoneNumber',
),
migrations.DeleteModel(
name='PhoneNumber',
),
]
|
import re
import time
import urllib2
import os
import sys
import datetime
import urllib
import simplejson
import calendar
import commands
import math
from datetime import datetime
def getMementos(uri):
uri = uri.replace(' ', '')
orginalExpression = re.compile( r"<http://[A-Za-z0-9.:=/%-_ ]*>; rel=\"original\"," )
mementoExpression = re.compile( r"<http://[A-Za-z0-9.:=/&,%-_ \?]*>;rel=\"(memento|first memento|last memento|first memento last memento|first last memento)\";datetime=\"(Sat|Sun|Mon|Tue|Wed|Thu|Fri), \d{2} (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (19|20)\d\d \d\d:\d\d:\d\d GMT\"" )
zeroMementoExpression = re.compile(r"Resource: http://[A-Za-z0-9.:=/&,%-_ ]*")
baseURI = 'http://mementoproxy.cs.odu.edu/aggr/timemap/link/'
memento_list = []
try:
search_results = urllib.urlopen(baseURI+uri)
the_page = search_results.read()
timemapList = the_page.split('\n')
count = 0
for line in timemapList:
if count <= 1:
if line.find('Resource not in archive') > -1:
result = zeroMementoExpression.search( line )
count = count + 1
continue
elif count == 2:
result = orginalExpression.search( line )
if result:
originalResult = result.group(0)
originalUri = originalResult[1:len(originalResult)-17]
else:
if(line.find("</memento")>0):
line = line.replace("</memento", "<http://api.wayback.archive.org/memento")
loc = line.find('>;rel="')
tofind = ';datetime="'
loc2 = line.find(tofind)
if(loc!=-1 and loc2!=-1):
mementoURL = line[2:loc]
timestamp = line[loc2+len(tofind):line.find('"',loc2+len(tofind)+3)]
epoch = int(calendar.timegm(time.strptime(timestamp, '%a, %d %b %Y %H:%M:%S %Z')))
day_string = time.strftime('%Y-%m-%dT%H:%M:%S', time.gmtime(epoch))
uri = mementoURL
cdlib = 'webarchives.cdlib.org'
archiefweb = 'enterprise.archiefweb.eu'
webARchive= 'api.wayback.archive.org'
yahoo1 = 'uk.wrs.yahoo.com'
yahoo2 = 'rds.yahoo.com'
yahoo3 = 'wrs.yahoo.com'
diigo = 'www.diigo.com'
bing = 'cc.bingj.com'
wayback = 'wayback.archive-it.org'
webArchiveNationalUK = 'webarchive.nationalarchives.gov.uk'
webHarvest = 'webharvest.gov'
webArchiveOrgUK = 'www.webarchive.org.uk'
webCitation = 'webcitation.org'
mementoWayBack='memento.waybackmachine.org'
type = ''
category = ''
# @type uri str
if (uri.find(webARchive)!=-1):
type = 'Internet Archive'
category = 'IA'
elif (uri.find(yahoo1)!=-1 or uri.find(yahoo2)!=-1 or uri.find(yahoo3)!=-1):
type = 'Yahoo'
category = 'SE'
elif (uri.find(diigo)!=-1):
type = 'diigo'
category = 'Others'
elif (uri.find(bing)!=-1):
type = 'Bing'
category = 'SE'
elif (uri.find(wayback)!=-1):
type = 'Archive-It'
category = 'Others'
elif (uri.find(webArchiveNationalUK)!=-1):
type = 'UK National Archive'
category = 'Others'
elif (uri.find(webHarvest)!=-1):
type = 'Web Harvest'
category = 'Others'
elif (uri.find(webArchiveOrgUK)!=-1):
type = 'UK Web Archive'
category = 'Others'
elif (uri.find(webCitation)!=-1):
type = 'Web Citation'
category = 'Others'
elif (uri.find(cdlib)!=-1):
type = 'CD Lib'
category = 'Others'
elif (uri.find(archiefweb)!=-1):
type = 'ArchiefWeb'
category = 'Others'
elif (uri.find(mementoWayBack)!=-1):
type = 'Wayback Machine'
category = 'Others'
else:
type = 'Not Known'
category = 'Others'
memento = {}
memento["type"] = type
memento["category"] = category
memento["time"] = day_string
memento["link"] = mementoURL
memento["link"] = urllib.quote(memento["link"])
memento["link"] = memento["link"].replace("http%3A//", "http://")
memento["link"] = memento["link"][memento["link"].find("http://"):]
memento_list.append(memento)
else:
pass
count = count + 1
except urllib2.URLError:
pass
return memento_list
def isInPage(url,page):
co = 'curl -i --silent -L -A "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/534.30 (KHTML, like Gecko) Chrome/12.0.742.112 Safari/534.30" "'+page+'"'
page = commands.getoutput(co)
loc = page.find(url)
date = ""
if(loc==-1):
return False, date
to_find = "X-Archive-Orig-Last-modified: "
loc = page.find(to_find)
if(loc !=-1):
end = page.find("\r", loc)
date = page[loc+len(to_find):end]
date = date.strip()
if(date ==""):
to_find = "X-Archive-Orig-Date: "
loc = page.find(to_find)
if(loc !=-1):
end = page.find("\r", loc)
date = page[loc+len(to_find):end]
date = date.strip()
epoch = int(calendar.timegm(time.strptime(date, '%a, %d %b %Y %H:%M:%S %Z')))
date = time.strftime('%Y-%m-%dT%H:%M:%S', time.gmtime(epoch))
return True, date
def getFirstAppearance(url, inurl):
try:
mementos = getMementos(inurl)
if(len(mementos) == 0):
return ""
start = 0
end = len(mementos)
previous = -1
i = 0
foundbefore = False
count = 0
for mem in mementos:
res, date = isInPage(url,mem["link"])
if(res==True):
break
while(True):
res, date = isInPage(url,mementos[i]["link"])
if(res==True and i==0):
return date
if(int(math.fabs(previous-i))==0):
return ""
if( (res==True and int(math.fabs(previous-i))==1 and foundbefore == False) or (res==False and int(math.fabs(previous-i))==1 and foundbefore == True) ):
return date
previous = i
if(res == False):
start = i
i = (end-start)/2 + start
foundbefore = False
else:
end = i
i = (end-start)/2 + start
foundbefore = True
count = count + 1
except:
print sys.exc_info()
|
from .Optimize import Optimize
from ..Function import *
class ContrastiveDivergence(Optimize):
def __init__(self, network, environment, **kwargs):
# Default parameters
settings = {**{
'batch_size': 1,
# step size
"learn_step": 0.01,
"learn_anneal": anneal_fixed,
"learn_decay": 1.0,
}, **kwargs}
super().__init__(network, environment, **settings)
def minimize(self):
converged = False
while not converged:
self.iteration += 1
stimulus, expectation = self.environment.sample(quantity=self.batch_size)
# Add bias units to stimulus
stimulus = np.vstack([stimulus, np.ones([1, self.batch_size])])
# ____REALITY____
# probability = basis(W * s)
probabilities = self.network.basis(self.network.weight @ stimulus)
probabilities[-1, :] = 1 # Bias units are always one
positive_gradient = probabilities @ stimulus.T
# Gibbs sampling
probabilities = probabilities > np.random.rand(*probabilities.shape)
# ____DREAM____
# Return to input layer by computing reconstructed stimulus
reconstruction = self.network.basis(self.network.weight.T @ probabilities)
reconstruction[-1, :] = 1 # Bias units are always one
probabilities = self.network.basis(self.network.weight @ reconstruction)
negative_gradient = probabilities @ reconstruction.T
# ____UPDATE WEIGHTS____
learn_rate = self.learn_anneal(self.iteration, self.learn_decay, self.iteration_limit) * self.learn_step
self.network.weight += learn_rate * (positive_gradient - negative_gradient) / self.batch_size
if self.iteration_limit is not None and self.iteration >= self.iteration_limit:
return True
if (self.graph or self.epsilon or self.debug) and self.iteration % self.debug_frequency == 0:
converged = self.convergence_check()
|
import re
class ResponseRouter(object):
"""Handles the passing of control from a conversation to a client app's
routes.
For read requests and write requests, ResponseRouter maintains two lists of
rules, where each rule is a tuple is of the form(filename pattern, action).
When a request comes in, the filename given is checked against the list of
filename regex patterns, and the first rule that matches invokes the
corresponding action.
actions are application level functions that take the following argument:
client_host: The ip or hostname of the client.
client_port: The port of the client
filename: The filename included in the client request.
Additionally, a write request takes an additional argument:
data: The data sent from the client in the tftp conversation.
In the case of read requests, actions should return string data that will
be served directly back to clients.
"""
def __init__(self):
self.read_rules = []
self.write_rules = []
def append_read_rule(self, filename_pattern, action):
"""Adds a rule associating a filename pattern with an action for read
requests. The action given will execute when a read request is received
but before any responses are given.
Args:
filename_pattern: A string pattern to match future read request
filenames against.
action: A function to invoke when a later read request arrives
matching the given filename_pattern.
"""
self.read_rules.append((filename_pattern, action))
def append_write_rule(self, filename_pattern, action):
"""Adds a rule associating a filename pattern with an action for write
requests. The action given will execute when a write request is
completed and all data received.
Args:
filename_pattern: A string pattern to match future read request
filenames against.
action: A function to invoke when a later read request arrives
matching the given filename_pattern.
"""
self.write_rules.append((filename_pattern, action))
def initialize_read(self, filename, client_host, client_port):
"""For a read request, finds the appropriate action and invokes it.
Args:
filename: The filename included in the client's request.
client_host: The host of the client connecting.
client_port: The port of the client connecting.
Returns:
A ReadBuffer containing the file contents to return. If there is no
corresponding action, returns None.
"""
action = self.find_action(self.read_rules, filename)
if action:
return ReadBuffer(action(client_host, client_port, filename))
else:
return None
def initialize_write(self, filename, client_host, client_port):
"""For a write request, finds the appropriate action and returns it.
This is different than a read request in that the action is invoked at
the end of the file transfer.
Args:
filename: The filename included in the client's request.
client_host: The host of the client connecting.
client_port: The port of the client connecting.
Returns:
An action that is to be run at the end of a write request file
transfer. If there is no corresponding action, returns None.
"""
return self.find_action(self.write_rules, filename)
def find_action(self, rules, filename):
"""Given a list of rules and a filename to match against them, returns
an action stored in one of those rules. The action returned corresponds
to the first rule that matches the filename given.
Args:
rules: A list of tuples, where each tuple is (filename pattern,
action).
filename: A filename to match against the filename regex patterns.
Returns:
An action corresponding to the first rule that matches the filename
given. If no rules match, returns None.
"""
for (filename_pattern, action) in rules:
if re.match(filename_pattern, filename):
return action
return None
class ReadBuffer(object):
"""A ReadBuffer is used to temporarily store read request data while the
transfer has not completely succeeded. It offers an interface for
retrieving chunks of data in 512 byte chunks based on block number.
"""
def __init__(self, data):
self.data = data
def get_block_count(self):
"""Returns the amount of blocks that this ReadBuffer can produce
This amount is also the largest value that can be passed into
get_block.
"""
return (len(self.data) / 512) + 1
def get_block(self, block_num):
"""Returns the data corresponding to the given block number
Args:
block_num: The block number of data to request. By the TFTP
protocol, blocks are consecutive 512 byte sized chunks of data with
the exception of the final block which may be less than 512 chunks.
Return:
A 512 byte or less chunk of data corresponding to the given block
number.
"""
return self.data[(block_num - 1) * 512:block_num * 512]
class WriteBuffer(object):
"""A WriteBuffer is used to temporarily store write request data while the
transfer has not completely succeeded.
Retrieve the data from the `data` property.
"""
def __init__(self):
self.data = ""
def receive_data(self, data):
"""Write some more data to the WriteBuffer """
self.data += data
|
"""
homepage.py
A Flask Blueprint module for the homepage.
"""
from flask import Blueprint, render_template, current_app, g
from flask import request, make_response, redirect, flash, abort
from flask_babel import gettext
from meerkat_frontend import app, auth
from meerkat_frontend import common as c
from meerkat_frontend.messages import messages
from meerkat_libs import hermes
import requests
import logging
import datetime
# Register the homepage blueprint.
homepage = Blueprint('homepage', __name__, url_prefix='/<language>')
homepage_route = app.config.get("HOMEPAGE_ROUTE", "")
@homepage.route('/' + homepage_route)
def index():
# Messages to be flashed to the user from the system admins
messages.flash()
return render_template(
'homepage/index.html',
content=g.config['HOMEPAGE_CONFIG'],
)
@homepage.route('/login')
def login():
# Enable url get args.
url = request.args.get('url', '/en/technical')
error = request.args.get('error', '')
# If a mesage is specified show it.
if error:
flash(error, "error")
# Return the login page.
return render_template(
'homepage/login.html',
content=g.config['HOMEPAGE_CONFIG'],
redirect=url
)
@homepage.route('/login_request', methods=['POST'])
def login_request():
"""
Make a login request to the authentication module.
Can't do this directly from browser because of the "same-origin policy".
Browser scripts can't make cross domain POST requests.
"""
url = current_app.config['INTERNAL_AUTH_ROOT'] + "/api/login"
r = requests.post(url, json=request.json)
return (r.text, r.status_code, r.headers.items())
@homepage.route('/logout')
def logout():
"""
Logs a user out. This involves delete the current jwt stored in a cookie
and redirecting to the specified page. We delete a cookie by changing it's
expiration date to immediately. Set the page to be redirected to using url
params, eg. /logout?url=https://www.google.com
Get Args:
url (str) The url of the page to redirect to after logging out.
Returns:
A redirect response object that also sets the cookie's expiration time
to 0.
"""
url = request.args.get('url', '/')
response = make_response(redirect(url))
response.set_cookie(
current_app.config["JWT_COOKIE_NAME"],
value="",
expires=0
)
g.payload = {}
return response
@homepage.route('/account_settings', methods=['GET', 'POST'])
@auth.authorise(*app.config['AUTH'].get('settings', [['BROKEN'], ['']]))
def account_settings():
"""
Shows the account settings page.
"""
if request.method == 'GET':
current_app.logger.warning("GET called")
return render_template(
'homepage/account_settings.html',
content=g.config['TECHNICAL_CONFIG'],
week=c.api('/epi_week')
)
elif request.method == 'POST':
url = current_app.config['INTERNAL_AUTH_ROOT'] + "/api/update_user"
r = requests.post(url, json=request.json)
return (r.text, r.status_code, r.headers.items())
@homepage.route('/fault', methods=['GET', 'POST'])
@auth.authorise(*app.config['AUTH'].get('fault-report', [['BROKEN'], ['']]))
def report_fault():
"""
Enables users to directly report faults to the developer. This page
displays a fault report form and generates a fault report email from the
data it posts to the server.
"""
# If a post request is made to the url, process the form's data.
if request.method == 'POST':
# Get the data from the POST request and initialise variables.
data = request.form
now = datetime.datetime.now().strftime("%I:%M%p on %B %d, %Y")
deployment = current_app.config['DEPLOYMENT']
# Create a simple string that displays the submitted data
details = "<b>"
for key, value in data.items():
details = ''.join([
details, key.capitalize(), ':</b> ', value, '<br/><br/><b>'
])
# Send an email
# TODO: Direct github issue creation if from a personal account.
try:
hermes('/email', 'PUT', data={
'email': '[email protected]',
'subject': gettext('Fault Report') + ' | {} | {}'.format(
deployment,
data['url']
),
'message': gettext('There was a fault reported at {} in the '
'{} deployment. Here are the details...'
'\n\n{}').format(now, deployment, details)
})
except Exception as e:
logging.warning("Error sending email through hermes...")
logging.warning(e)
flash(gettext(
'Could not notify developers. Please contact them directly.'
), 'error')
abort(502)
return render_template(
'homepage/fault_report_response.html',
content=g.config['TECHNICAL_CONFIG'],
details=details.replace('\n', '<br/>')
)
# If a get request is made to the url, display the form
elif request.method == 'GET':
url = request.args.get('url', '')
return render_template(
'homepage/fault_report_form.html',
content=g.config['TECHNICAL_CONFIG'],
url=url
)
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tasks RPC implementations."""
from google.protobuf import wrappers_pb2
from google.protobuf.any_pb2 import Any
import six
from bot import testcase_manager
from bot.tasks import corpus_pruning_task
from bot.tasks import fuzz_task
from bot.tasks import minimize_task
from datastore import data_types
from lib.clusterfuzz.fuzz import engine
from protos import untrusted_runner_pb2
def _proto_to_fuzz_target(proto):
"""Convert protobuf to FuzzTarget."""
return data_types.FuzzTarget(
engine=proto.engine, project=proto.project, binary=proto.binary)
def _proto_to_cross_pollinate_fuzzer(proto):
"""Convert protobuf to CrossPollinateFuzzer."""
return corpus_pruning_task.CrossPollinateFuzzer(
fuzz_target=_proto_to_fuzz_target(proto.fuzz_target),
backup_bucket_name=proto.backup_bucket_name,
corpus_engine_name=proto.corpus_engine_name)
def prune_corpus(request, _):
"""Prune corpus."""
context = corpus_pruning_task.Context(
_proto_to_fuzz_target(request.fuzz_target), [
_proto_to_cross_pollinate_fuzzer(proto)
for proto in request.cross_pollinate_fuzzers
])
result = corpus_pruning_task.do_corpus_pruning(
context, request.last_execution_failed, request.revision)
cross_pollination_stats = None
if result.cross_pollination_stats:
cross_pollination_stats = untrusted_runner_pb2.CrossPollinationStats(
project_qualified_name=result.cross_pollination_stats.
project_qualified_name,
method=result.cross_pollination_stats.method,
sources=result.cross_pollination_stats.sources,
tags=result.cross_pollination_stats.tags,
initial_corpus_size=result.cross_pollination_stats.initial_corpus_size,
corpus_size=result.cross_pollination_stats.corpus_size,
initial_edge_coverage=result.cross_pollination_stats.
initial_edge_coverage,
edge_coverage=result.cross_pollination_stats.edge_coverage,
initial_feature_coverage=result.cross_pollination_stats.
initial_feature_coverage,
feature_coverage=result.cross_pollination_stats.feature_coverage)
# Intentionally skip edge and function coverage values as those would come
# from fuzzer coverage cron task (see src/go/server/cron/coverage.go).
coverage_info = untrusted_runner_pb2.CoverageInfo(
corpus_size_units=result.coverage_info.corpus_size_units,
corpus_size_bytes=result.coverage_info.corpus_size_bytes,
corpus_location=result.coverage_info.corpus_location,
corpus_backup_location=result.coverage_info.corpus_backup_location,
quarantine_size_units=result.coverage_info.quarantine_size_units,
quarantine_size_bytes=result.coverage_info.quarantine_size_bytes,
quarantine_location=result.coverage_info.quarantine_location)
crashes = [
untrusted_runner_pb2.CorpusCrash(
crash_state=crash.crash_state,
crash_type=crash.crash_type,
crash_address=crash.crash_address,
crash_stacktrace=crash.crash_stacktrace,
unit_path=crash.unit_path,
security_flag=crash.security_flag,
) for crash in result.crashes
]
return untrusted_runner_pb2.PruneCorpusResponse(
coverage_info=coverage_info,
crashes=crashes,
fuzzer_binary_name=result.fuzzer_binary_name,
revision=result.revision,
cross_pollination_stats=cross_pollination_stats)
def process_testcase(request, _):
"""Process testcase."""
tool_name_map = {
untrusted_runner_pb2.ProcessTestcaseRequest.MINIMIZE: 'minimize',
untrusted_runner_pb2.ProcessTestcaseRequest.CLEANSE: 'cleanse',
}
# TODO(ochang): Support other engines.
assert request.engine == 'libFuzzer'
assert request.operation in tool_name_map
result = minimize_task.run_libfuzzer_engine(
tool_name_map[request.operation], request.target_name, request.arguments,
request.testcase_path, request.output_path, request.timeout)
return untrusted_runner_pb2.EngineReproduceResult(
return_code=result.return_code,
time_executed=result.time_executed,
output=result.output)
def _pack_values(values):
"""Pack protobuf values."""
packed = {}
if values is None:
return packed
for key, value in six.iteritems(values):
packed_value = Any()
if isinstance(value, float):
packed_value.Pack(wrappers_pb2.DoubleValue(value=value))
elif isinstance(value, six.integer_types):
packed_value.Pack(wrappers_pb2.Int64Value(value=value))
elif isinstance(value, six.string_types):
packed_value.Pack(wrappers_pb2.StringValue(value=value))
else:
raise ValueError('Unknown stat type for ' + key)
packed[key] = packed_value
return packed
def engine_fuzz(request, _):
"""Run engine fuzzer."""
engine_impl = engine.get(request.engine)
result, fuzzer_metadata, strategies = fuzz_task.run_engine_fuzzer(
engine_impl, request.target_name, request.sync_corpus_directory,
request.testcase_directory)
crashes = [
untrusted_runner_pb2.EngineCrash(
input_path=crash.input_path,
stacktrace=crash.stacktrace,
reproduce_args=crash.reproduce_args,
crash_time=crash.crash_time) for crash in result.crashes
]
packed_stats = _pack_values(result.stats)
packed_strategies = _pack_values(strategies)
return untrusted_runner_pb2.EngineFuzzResponse(
logs=result.logs,
command=result.command,
crashes=crashes,
stats=packed_stats,
time_executed=result.time_executed,
fuzzer_metadata=fuzzer_metadata,
strategies=packed_strategies)
def engine_reproduce(request, _):
"""Run engine reproduce."""
engine_impl = engine.get(request.engine)
result = testcase_manager.engine_reproduce(engine_impl, request.target_name,
request.testcase_path,
request.arguments, request.timeout)
return untrusted_runner_pb2.EngineReproduceResult(
command=result.command,
return_code=result.return_code,
time_executed=result.time_executed,
output=result.output)
|
###music.py
###Created by Joseph Rollinson, [email protected]
###Last Modified: 12/07/11
###Requires: pyo
###Turns pyo into a note class that is very easy to run.
###Also contains functions to run pyo music server.
import pyo
class note(object):
'''creates a note that can be played'''
def __init__(self,frequency=440, attack=.01, decay=.2, sustain=.5, release=.1, duration=1, mul=1):
#some of this might not need to be saved later, for space saving.
self.frequency = frequency
self.attack = attack
self.decay = decay
self.sustain = sustain
self.release = release
self.duration = duration
self.mul = mul
self.envelope = pyo.Adsr(attack = attack,
decay = decay,
sustain = sustain,
release = release,
dur = duration,
mul = mul)
self.mod = pyo.Sine(freq = 0, mul = 25)
self.wave = pyo.Sine(freq = self.frequency + self.mod, mul = self.envelope)
self.wave.out()
def play(self,modulation=0):
'''plays the note'''
self.mod.setFreq(modulation)
self.wave.setFreq(self.frequency+self.mod)
self.envelope.play()
def stop(self):
self.envelope.stop()
def setFrequency(self,frequency):
'''sets the frequency of the note'''
self.frequncy = frequency
##def getNotes():
## '''returns a list of notes from middle C to the next B'''
## return map( lambda frequency: note(frequency), freqs)
def musicServer():
'''Returns a music server'''
s = pyo.Server()
s.setVerbosity(2)
s.boot()
return s
def startServer(server):
server.start()
def stopServer(server):
server.stop()
server.shutdown()
def guiMusicServer(server):
'''displays music server's gui'''
server.gui(locals())
|
#------------------------------------------------------------------------------
# File: xml_store.py
# Purpose: Store and Retrieve data from XML files
# Author: Jim Storch
# License: GPLv3 see LICENSE.TXT
#------------------------------------------------------------------------------
import datetime
import re
import glob
from xml.etree import cElementTree as et
from tokp_lib.parse_combat import Raid
#--[ Datetime to String ]------------------------------------------------------
# Seems like a lot of juggling but strftime() and strptime() do not support
# microseconds.
def dt_to_str(dt):
"""Given a datetime object,
returns a string in the format 'YYYY-MM-DD HH:MM:SS:MMMMMM'."""
return '%d-%.2d-%.2d %.2d:%.2d:%.2d.%.6d' % (
dt.year, dt.month, dt.day,
dt.hour, dt.minute, dt.second, dt.microsecond )
#--[ String to Datetime ]------------------------------------------------------
## Regex for str_to_dt()
rawstr = r"^(?P<year>\d{2,})-(?P<month>\d\d)-(?P<day>\d\d)\s(?P<hour>\d\d)" + \
":(?P<minute>\d\d):(?P<second>\d\d)\.(?P<micro>\d*)$"
compile_obj = re.compile(rawstr)
def str_to_dt(string_in):
"""Given a string in the format 'YYYY-MM-DD HH:MM:SS:MMMMMM,'
returns a datetime object."""
match_obj = compile_obj.search(string_in)
if match_obj:
year = int(match_obj.group('year'))
month = int(match_obj.group('month'))
day = int(match_obj.group('day'))
hour = int(match_obj.group('hour'))
minute = int(match_obj.group('minute'))
second = int(match_obj.group('second'))
micro = int(match_obj.group('micro'))
else:
raise ValueError('Could not parse datetime string')
return datetime.datetime(year, month, day, hour, minute, second, micro)
#--[ Indent ]------------------------------------------------------------------
# From http://effbot.org/zone/element-lib.htm (plus Paul Du Bois's comment)
def indent(elem, level=0):
"""Make an ElementTree all nice and pretty with indents and line breaks."""
i = "\n" + level * " "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
for child in elem:
indent(child, level+1)
if not child.tail or not child.tail.strip():
child.tail = i
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
#--[ Write Raid XML ]----------------------------------------------------------
def write_raid_xml(raid):
""" Given a Raid object, serializes it to an XML file.
Returns the filename used."""
fname = raid.start_time.strftime("%Y%m%d.%H%M.") + raid.zone + '.xml'
dstr = raid.start_time.strftime("%m/%d/%Y")
xml = et.Element('raid',date = dstr)
## Zone
zone = et.SubElement(xml,'zone')
zone.text = raid.zone
## Start Time
start_time = et.SubElement(xml,'start_time')
start_time.text = dt_to_str(raid.start_time)
## End Time
end_time = et.SubElement(xml,'end_time')
end_time.text = dt_to_str(raid.end_time)
## Members
members = et.SubElement(xml,'members')
raid.raid_members.sort()
for member in raid.raid_members:
name = et.SubElement(members,'name')
name.text = member
## Make pretty and write to a file
indent(xml)
f = open('data/raids/' + fname,'wU')
f.write('<?xml version="1.0"?>\n')
tree = et.ElementTree(xml)
tree.write(f, 'utf-8')
#print et.tostring(xml)
return fname
#--[ Read Raid XML ]-----------------------------------------------------------
def read_raid_xml(fname):
"""Given an XML file name, un-serializes it to a Raid object.
Returns the Raid object."""
tree = et.parse(open('data/raids/' + fname,'rU'))
zone = tree.findtext('zone')
start_time_str = tree.findtext('start_time')
start_time = str_to_dt(start_time_str)
end_time_str = tree.findtext('end_time')
end_time = str_to_dt(end_time_str)
raid = Raid(zone,start_time)
raid.end_time = end_time
for elem in tree.getiterator('name'):
raid.add_member(elem.text)
return raid
#--[ Raid Files ]--------------------------------------------------------------
## Regex for raid_files()
fname_str = r'.*[/\\](?P<fname>.+)\.xml'
fname_obj = re.compile(fname_str)
def raid_files():
"""Returns a chronologically sorted list of raid XML file names."""
file_list = []
xfiles = glob.glob('data/raids/*.xml')
for xfile in xfiles:
match_obj = fname_obj.search(xfile)
file_list.append(match_obj.group('fname'))
file_list.sort()
return file_list
|
from yowsup.layers import YowLayer, YowLayerEvent, YowProtocolLayer
from .protocolentities import *
from yowsup.layers.protocol_acks.protocolentities import OutgoingAckProtocolEntity
class YowNotificationsProtocolLayer(YowProtocolLayer):
def __init__(self):
handleMap = {
"notification": (self.recvNotification, self.sendNotification)
}
super(YowNotificationsProtocolLayer, self).__init__(handleMap)
def __str__(self):
return "notification Ib Layer"
def sendNotification(self, entity):
if entity.getTag() == "notification":
self.toLower(entity.toProtocolTreeNode())
def recvNotification(self, node):
if node["type"] == "picture":
if node.getChild("set"):
self.toUpper(SetPictureNotificationProtocolEntity.fromProtocolTreeNode(node))
elif node.getChild("delete"):
self.toUpper(DeletePictureNotificationProtocolEntity.fromProtocolTreeNode(node))
else:
self.raiseErrorForNode(node)
elif node["type"] == "status":
self.toUpper(StatusNotificationProtocolEntity.fromProtocolTreeNode(node))
elif node["type"] in ["contacts", "subject", "w:gp2"]:
# Implemented in respectively the protocol_contacts and protocol_groups layer
pass
elif node["type"] in ["features", "contacts", "web", "location"]:
# implement individually at some point
# but keep this pass block so system doesn't crash on these types
pass
elif node["type"] in ["business"]:
print("unhandled business notification")
pass
else:
self.raiseErrorForNode(node)
ack = OutgoingAckProtocolEntity(node["id"], "notification", node["type"], node["from"])
self.toLower(ack.toProtocolTreeNode())
|
# http://pyrocko.org - GPLv3
#
# The Pyrocko Developers, 21st Century
# ---|P------/S----------~Lg----------
from __future__ import absolute_import, division
import logging
import numpy as num
import hashlib
import base64
from pyrocko import util, moment_tensor
from pyrocko.guts import Float, String, Timestamp, Unicode, \
StringPattern, List, Dict, Any
from .location import Location
logger = logging.getLogger('pyrocko.model.event')
guts_prefix = 'pf'
d2r = num.pi / 180.
def cmp(a, b):
return (a > b) - (a < b)
def ehash(s):
return str(base64.urlsafe_b64encode(
hashlib.sha1(s.encode('utf8')).digest()).decode('ascii'))
def float_or_none_to_str(x, prec=9):
return 'None' if x is None else '{:.{prec}e}'.format(x, prec=prec)
class FileParseError(Exception):
pass
class EventExtrasDumpError(Exception):
pass
class EOF(Exception):
pass
class EmptyEvent(Exception):
pass
class Tag(StringPattern):
pattern = r'^[A-Za-z][A-Za-z0-9._]{0,128}(:[A-Za-z0-9._-]*)?$'
class Event(Location):
'''Seismic event representation
:param lat: latitude of hypocenter (default 0.0)
:param lon: longitude of hypocenter (default 0.0)
:param time: origin time system timestamp
:param name: event identifier as string (optional)
:param depth: source depth (optional)
:param magnitude: magnitude of event (optional)
:param region: source region (optional)
:param catalog: name of catalog that lists this event (optional)
:param moment_tensor: moment tensor as
:py:class:`moment_tensor.MomentTensor` instance (optional)
:param duration: source duration as float (optional)
:param tags: list of tags describing event (optional)
:param extras: dictionary for user defined event attributes (optional).
Keys must be strings, values must be YAML serializable.
'''
time = Timestamp.T(default=Timestamp.D('1970-01-01 00:00:00'))
depth = Float.T(optional=True)
name = String.T(default='', optional=True, yamlstyle="'")
magnitude = Float.T(optional=True)
magnitude_type = String.T(optional=True, yamlstyle="'")
region = Unicode.T(optional=True, yamlstyle="'")
catalog = String.T(optional=True, yamlstyle="'")
moment_tensor = moment_tensor.MomentTensor.T(optional=True)
duration = Float.T(optional=True)
tags = List.T(Tag.T(), default=[])
extras = Dict.T(String.T(), Any.T(), default={})
def __init__(
self, lat=0., lon=0., north_shift=0., east_shift=0., time=0.,
name='', depth=None, elevation=None,
magnitude=None, magnitude_type=None, region=None, load=None,
loadf=None, catalog=None, moment_tensor=None, duration=None,
tags=None, extras=None):
if tags is None:
tags = []
if extras is None:
extras = {}
vals = None
if load is not None:
vals = Event.oldload(load)
elif loadf is not None:
vals = Event.oldloadf(loadf)
if vals:
lat, lon, north_shift, east_shift, time, name, depth, magnitude, \
magnitude_type, region, catalog, moment_tensor, duration, \
tags = vals
Location.__init__(
self, lat=lat, lon=lon,
north_shift=north_shift, east_shift=east_shift,
time=time, name=name, depth=depth,
elevation=elevation,
magnitude=magnitude, magnitude_type=magnitude_type,
region=region, catalog=catalog,
moment_tensor=moment_tensor, duration=duration, tags=tags,
extras=extras)
def time_as_string(self):
return util.time_to_str(self.time)
def set_name(self, name):
self.name = name
def olddump(self, filename):
file = open(filename, 'w')
self.olddumpf(file)
file.close()
def olddumpf(self, file):
if self.extras:
raise EventExtrasDumpError(
'Event user-defined extras attributes cannot be dumped in the '
'"basic" event file format. Use '
'dump_events(..., format="yaml").')
file.write('name = %s\n' % self.name)
file.write('time = %s\n' % util.time_to_str(self.time))
if self.lat != 0.0:
file.write('latitude = %.12g\n' % self.lat)
if self.lon != 0.0:
file.write('longitude = %.12g\n' % self.lon)
if self.north_shift != 0.0:
file.write('north_shift = %.12g\n' % self.north_shift)
if self.east_shift != 0.0:
file.write('east_shift = %.12g\n' % self.east_shift)
if self.magnitude is not None:
file.write('magnitude = %g\n' % self.magnitude)
file.write('moment = %g\n' %
moment_tensor.magnitude_to_moment(self.magnitude))
if self.magnitude_type is not None:
file.write('magnitude_type = %s\n' % self.magnitude_type)
if self.depth is not None:
file.write('depth = %.10g\n' % self.depth)
if self.region is not None:
file.write('region = %s\n' % self.region)
if self.catalog is not None:
file.write('catalog = %s\n' % self.catalog)
if self.moment_tensor is not None:
m = self.moment_tensor.m()
sdr1, sdr2 = self.moment_tensor.both_strike_dip_rake()
file.write((
'mnn = %g\nmee = %g\nmdd = %g\nmne = %g\nmnd = %g\nmed = %g\n'
'strike1 = %g\ndip1 = %g\nrake1 = %g\n'
'strike2 = %g\ndip2 = %g\nrake2 = %g\n') % (
(m[0, 0], m[1, 1], m[2, 2], m[0, 1], m[0, 2], m[1, 2]) +
sdr1 + sdr2))
if self.duration is not None:
file.write('duration = %g\n' % self.duration)
if self.tags:
file.write('tags = %s\n' % ', '.join(self.tags))
@staticmethod
def unique(events, deltat=10., group_cmp=(lambda a, b:
cmp(a.catalog, b.catalog))):
groups = Event.grouped(events, deltat)
events = []
for group in groups:
if group:
group.sort(group_cmp)
events.append(group[-1])
return events
@staticmethod
def grouped(events, deltat=10.):
events = list(events)
groups = []
for ia, a in enumerate(events):
groups.append([])
haveit = False
for ib, b in enumerate(events[:ia]):
if abs(b.time - a.time) < deltat:
groups[ib].append(a)
haveit = True
break
if not haveit:
groups[ia].append(a)
groups = [g for g in groups if g]
groups.sort(key=lambda g: sum(e.time for e in g) // len(g))
return groups
@staticmethod
def dump_catalog(events, filename=None, stream=None):
if filename is not None:
file = open(filename, 'w')
else:
file = stream
try:
i = 0
for ev in events:
ev.olddumpf(file)
file.write('--------------------------------------------\n')
i += 1
finally:
if filename is not None:
file.close()
@staticmethod
def oldload(filename):
with open(filename, 'r') as file:
return Event.oldloadf(file)
@staticmethod
def oldloadf(file):
d = {}
try:
for line in file:
if line.lstrip().startswith('#'):
continue
toks = line.split(' = ', 1)
if len(toks) == 2:
k, v = toks[0].strip(), toks[1].strip()
if k in ('name', 'region', 'catalog', 'magnitude_type'):
d[k] = v
if k in (('latitude longitude magnitude depth duration '
'north_shift east_shift '
'mnn mee mdd mne mnd med strike1 dip1 rake1 '
'strike2 dip2 rake2 duration').split()):
d[k] = float(v)
if k == 'time':
d[k] = util.str_to_time(v)
if k == 'tags':
d[k] = [x.strip() for x in v.split(',')]
if line.startswith('---'):
d['have_separator'] = True
break
except Exception as e:
raise FileParseError(e)
if not d:
raise EOF()
if 'have_separator' in d and len(d) == 1:
raise EmptyEvent()
mt = None
m6 = [d[x] for x in 'mnn mee mdd mne mnd med'.split() if x in d]
if len(m6) == 6:
mt = moment_tensor.MomentTensor(m=moment_tensor.symmat6(*m6))
else:
sdr = [d[x] for x in 'strike1 dip1 rake1'.split() if x in d]
if len(sdr) == 3:
moment = 1.0
if 'moment' in d:
moment = d['moment']
elif 'magnitude' in d:
moment = moment_tensor.magnitude_to_moment(d['magnitude'])
mt = moment_tensor.MomentTensor(
strike=sdr[0], dip=sdr[1], rake=sdr[2],
scalar_moment=moment)
return (
d.get('latitude', 0.0),
d.get('longitude', 0.0),
d.get('north_shift', 0.0),
d.get('east_shift', 0.0),
d.get('time', 0.0),
d.get('name', ''),
d.get('depth', None),
d.get('magnitude', None),
d.get('magnitude_type', None),
d.get('region', None),
d.get('catalog', None),
mt,
d.get('duration', None),
d.get('tags', []))
@staticmethod
def load_catalog(filename):
file = open(filename, 'r')
try:
while True:
try:
ev = Event(loadf=file)
yield ev
except EmptyEvent:
pass
except EOF:
pass
file.close()
def get_hash(self):
e = self
if isinstance(e.time, float):
stime = util.time_to_str(e.time, format='%Y-%m-%d %H:%M:%S.3FRAC')
else:
stime = util.time_to_str(e.time, format='%Y-%m-%d %H:%M:%S.6FRAC')
s = float_or_none_to_str
to_hash = ', '.join((
stime,
s(e.lat), s(e.lon), s(e.depth),
float_or_none_to_str(e.magnitude, 5),
str(e.catalog), str(e.name or ''),
str(e.region)))
return ehash(to_hash)
def human_str(self):
s = [
'Latitude [deg]: %g' % self.lat,
'Longitude [deg]: %g' % self.lon,
'Time [UTC]: %s' % util.time_to_str(self.time)]
if self.name:
s.append('Name: %s' % self.name)
if self.depth is not None:
s.append('Depth [km]: %g' % (self.depth / 1000.))
if self.magnitude is not None:
s.append('Magnitude [%s]: %3.1f' % (
self.magnitude_type or 'M?', self.magnitude))
if self.region:
s.append('Region: %s' % self.region)
if self.catalog:
s.append('Catalog: %s' % self.catalog)
if self.moment_tensor:
s.append(str(self.moment_tensor))
return '\n'.join(s)
def detect_format(filename):
with open(filename, 'r') as f:
for line in f:
line = line.strip()
if not line or line.startswith('#') or line.startswith('%'):
continue
if line.startswith('--- !pf.Event'):
return 'yaml'
else:
return 'basic'
return 'basic'
def load_events(filename, format='detect'):
'''Read events file.
:param filename: name of file as str
:param format: file format: ``'detect'``, ``'basic'``, or ``'yaml'``
:returns: list of :py:class:`Event` objects
'''
if format == 'detect':
format = detect_format(filename)
if format == 'yaml':
from pyrocko import guts
events = [
ev for ev in guts.load_all(filename=filename)
if isinstance(ev, Event)]
return events
elif format == 'basic':
return list(Event.load_catalog(filename))
else:
from pyrocko.io.io_common import FileLoadError
raise FileLoadError('unknown event file format: %s' % format)
class OneEventRequired(Exception):
pass
def load_one_event(filename, format='detect'):
events = load_events(filename)
if len(events) != 1:
raise OneEventRequired(
'exactly one event is required in "%s"' % filename)
return events[0]
def dump_events(events, filename=None, stream=None, format='basic'):
'''Write events file.
:param events: list of :py:class:`Event` objects
:param filename: name of file as str
:param format: file format: ``'basic'``, or ``'yaml'``
'''
if format == 'basic':
Event.dump_catalog(events, filename=filename, stream=stream)
elif format == 'yaml':
from pyrocko import guts
events = [ev for ev in events if isinstance(ev, Event)]
guts.dump_all(object=events, filename=filename, stream=None)
else:
from pyrocko.io.io_common import FileSaveError
raise FileSaveError('unknown event file format: %s' % format)
def load_kps_event_list(filename):
elist = []
f = open(filename, 'r')
for line in f:
toks = line.split()
if len(toks) < 7:
continue
tim = util.to_time_float(util.ctimegm(toks[0]+' '+toks[1]))
lat, lon, depth, magnitude = [float(x) for x in toks[2:6]]
duration = float(toks[10])
region = toks[-1]
name = util.gmctime_fn(tim)
e = Event(
lat, lon, tim,
name=name,
depth=depth,
magnitude=magnitude,
duration=duration,
region=region)
elist.append(e)
f.close()
return elist
def load_gfz_event_list(filename):
from pyrocko import catalog
cat = catalog.Geofon()
elist = []
f = open(filename, 'r')
for line in f:
e = cat.get_event(line.strip())
elist.append(e)
f.close()
return elist
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from os.path import abspath, dirname, join, normpath
from setuptools import find_packages, setup
import sys
INSTALL_PYTHON_REQUIRES = []
# We are intending to keep up to date with the supported Django versions.
# For the official support, please visit:
# https://docs.djangoproject.com/en/3.0/faq/install/#what-python-version-can-i-use-with-django and you may change the version in the URL to suit your needs, and we will try to update that here too as we upgrade with django.
if sys.version_info[1] == 5:
# py3.5 can run 1.11 < 2.2
django_python_version_install = 'Django>=2.2,<3.0',
INSTALL_PYTHON_REQUIRES.append(django_python_version_install)
elif sys.version_info[1] == 6:
# py3.6 can run 1.11 < 3.1 (likely will be <4.0)
django_python_version_install = 'Django>=2.2,<3.2',
INSTALL_PYTHON_REQUIRES.append(django_python_version_install)
elif sys.version_info[1] == 7:
# py3.7 is 1.11.17 < 3.1 (likely will be <4.0)
django_python_version_install = 'Django>=2.2,<3.2'
INSTALL_PYTHON_REQUIRES.append(django_python_version_install)
elif sys.version_info[1] == 8:
# py3.8 is 2.2.8 < 3.1 (likely will be <4.0)
django_python_version_install = 'Django>=2.2.8,<3.2'
INSTALL_PYTHON_REQUIRES.append(django_python_version_install)
setup(
# Basic package information:
name='django-twilio',
version='0.13.1.post0',
packages=find_packages(),
# Packaging options:
zip_safe=False,
include_package_data=True,
# Package dependencies:
install_requires=[
'setuptools>=36.2',
'twilio>=6.3.0,<7',
'django-phonenumber-field>=0.6',
'phonenumbers>=8.10.22',
] + INSTALL_PYTHON_REQUIRES,
# Metadata for PyPI:
author='Randall Degges',
author_email='[email protected]',
maintainer="Jason Held",
maintainer_email="[email protected]",
license='UNLICENSE',
url='https://github.com/rdegges/django-twilio',
keywords='twilio telephony call phone voip sms django django-twilio',
description='Build Twilio functionality into your Django apps.',
long_description=open(
normpath(join(dirname(abspath(__file__)), 'README.rst'))
).read(),
project_urls={
"Documentation": "https://django-twilio.readthedocs.io/en/latest/",
"Code": "https://github.com/rdegges/django-twilio",
"Tracker": "https://github.com/rdegges/django-twilio/issues",
},
classifiers=[
'Framework :: Django',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.0',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Internet :: WWW/HTTP',
]
)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def hide_environment_none(apps, schema_editor):
"""
Hide environments that are named none, since they're blacklisted and no longer can be created.
We should iterate over each environment row individually in python instead so that we don't lock the DB up. This is
far slower but much safer
"""
EnvironmentProject = apps.get_model("sentry", "EnvironmentProject")
for project in EnvironmentProject.objects.filter(environment__name='none'):
project.is_hidden = True
project.save()
class Migration(migrations.Migration):
# This flag is used to mark that a migration shouldn't be automatically run in
# production. We set this to True for operations that we think are risky and want
# someone from ops to run manually and monitor.
# General advice is that if in doubt, mark your migration as `is_dangerous`.
# Some things you should always mark as dangerous:
# - Adding indexes to large tables. These indexes should be created concurrently,
# unfortunately we can't run migrations outside of a transaction until Django
# 1.10. So until then these should be run manually.
# - Large data migrations. Typically we want these to be run manually by ops so that
# they can be monitored. Since data migrations will now hold a transaction open
# this is even more important.
# - Adding columns to highly active tables, even ones that are NULL.
is_dangerous = False
dependencies = [
("sentry", "0022_merge"),
]
operations = [
migrations.RunPython(hide_environment_none, migrations.RunPython.noop)
]
|
# -*- coding: utf-8 -*-
#
# (c) Copyright 2001-2008 Hewlett-Packard Development Company, L.P.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Author: Don Welch
#
# Ported from Perl's Image::Size module by Randy J. Ray
#
# Std Lib
import os
import os.path
import re
import struct
# Re patterns
xbm_pat = re.compile(r'^\#define\s*\S*\s*(\d+)\s*\n\#define\s*\S*\s*(\d+)', re.IGNORECASE)
xpm_pat = re.compile(r'"\s*(\d+)\s+(\d+)(\s+\d+\s+\d+){1,2}\s*"', re.IGNORECASE)
ppm_pat1 = re.compile(r'^\#.*', re.IGNORECASE | re.MULTILINE)
ppm_pat2 = re.compile(r'^(P[1-6])\s+(\d+)\s+(\d+)', re.IGNORECASE)
ppm_pat3 = re.compile(r'IMGINFO:(\d+)x(\d+)', re.IGNORECASE)
tiff_endian_pat = re.compile(r'II\x2a\x00')
def readin(stream, length, offset=0):
if offset != 0:
stream.seek(offset, 0)
return stream.read(length)
def xbmsize(stream):
width, height = -1, -1
match = xbm_pat.match(readin(stream,1024))
try:
width = int(match.group(1))
height = int(match.group(2))
except:
pass
return width, height
def xpmsize(stream):
width, height = -1, -1
match = re.search(xpm_pat, readin(stream, 1024))
try:
width = int(match.group(1))
height = int(match.group(2))
except:
pass
return width, height
def pngsize(stream): # also does MNG
width, height = -1, -1
if readin(stream, 4, 12) in ('IHDR', 'MHDR'):
height, width = struct.unpack("!II", stream.read(8))
return width,height
def jpegsize(stream):
width, height = -1, -1
stream.seek(2)
while True:
length = 4
buffer = readin(stream, length)
try:
marker, code, length = struct.unpack("!c c h", buffer)
except:
break
if marker != '\xff':
break
if 0xc0 <= ord(code) <= 0xc3:
length = 5
height, width = struct.unpack("!xhh", readin(stream, length))
else:
readin(stream, length-2)
return width, height
def ppmsize(stream):
width, height = -1, -1
header = re.sub(ppm_pat1, '', readin(stream, 1024))
match = ppm_pat2.match(header)
typ = ''
try:
typ = match.group(1)
width = int(match.group(2))
height = int(match.group(3))
except:
pass
if typ == 'P7':
match = ppm_pat3.match(header)
try:
width = int(match.group(1))
height = int(match.group(2))
except:
pass
return width, height
def tiffsize(stream):
header = readin(stream, 4)
endian = ">"
match = tiff_endian_pat.match(header)
if match is not None:
endian = "<"
input = readin(stream, 4, 4)
offset = struct.unpack('%si' % endian, input)[0]
num_dirent = struct.unpack('%sH' % endian, readin(stream, 2, offset))[0]
offset += 2
num_dirent = offset+(num_dirent*12)
width, height = -1, -1
while True:
ifd = readin(stream, 12, offset)
if ifd == '' or offset > num_dirent:
break
offset += 12
tag = struct.unpack('%sH'% endian, ifd[0:2])[0]
type = struct.unpack('%sH' % endian, ifd[2:4])[0]
if tag == 0x0100:
width = struct.unpack("%si" % endian, ifd[8:12])[0]
elif tag == 0x0101:
height = struct.unpack("%si" % endian, ifd[8:12])[0]
return width, height
def bmpsize(stream):
width, height = struct.unpack("<II", readin(stream, 8, 18))
return width, height
def gifsize(stream):
# since we only care about the printed size of the image
# we only need to get the logical screen sizes, which are
# the maximum extents of the image. This code is much simpler
# than the code from Image::Size
#width, height = -1, -1
buf = readin(stream, 7, 6) # LSx, GCTF, etc
height, width, flags, bci, par = struct.unpack('<HHBBB', buf)
return width, height
TYPE_MAP = {re.compile('^GIF8[7,9]a') : ('image/gif', gifsize),
re.compile("^\xFF\xD8") : ('image/jpeg', jpegsize),
re.compile("^\x89PNG\x0d\x0a\x1a\x0a") : ('image/png', pngsize),
re.compile("^P[1-7]") : ('image/x-portable-pixmap', ppmsize),
re.compile('\#define\s+\S+\s+\d+') : ('image/x-xbitmap', xbmsize),
re.compile('\/\* XPM \*\/') : ('image/x-xpixmap', xpmsize),
re.compile('^MM\x00\x2a') : ('image/tiff', tiffsize),
re.compile('^II\*\x00') : ('image/tiff', tiffsize),
re.compile('^BM') : ('image/x-bitmap', bmpsize),
re.compile("^\x8aMNG\x0d\x0a\x1a\x0a") : ('image/png', pngsize),
}
def imagesize(filename, mime_type=''):
width, height = -1, -1
f = open(filename, 'r')
buffer = f.read(4096)
if not mime_type:
for t in TYPE_MAP:
match = t.search(buffer)
if match is not None:
mime_type, func = TYPE_MAP[t]
break
if mime_type and func:
f.seek(0)
width, height = func(f)
else:
width, height = -1, -1
f.close()
return height, width, mime_type
|
from contextlib import contextmanager
from functools import partial
import inspect
from itertools import chain, compress
import re
import shutil
from subprocess import check_call, CalledProcessError, DEVNULL
from types import MappingProxyType
from coalib.bears.LocalBear import LocalBear
from coalib.misc.ContextManagers import make_temp
from coala_utils.decorators import assert_right_type, enforce_signature
from coalib.misc.Future import partialmethod
from coalib.misc.Shell import run_shell_command
from coalib.results.Diff import Diff
from coalib.results.Result import Result
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
from coalib.settings.FunctionMetadata import FunctionMetadata
def _prepare_options(options):
"""
Prepares options for ``linter`` for a given options dict in-place.
:param options:
The options dict that contains user/developer inputs.
"""
allowed_options = {"executable",
"output_format",
"use_stdin",
"use_stdout",
"use_stderr",
"config_suffix",
"executable_check_fail_info",
"prerequisite_check_command"}
if not options["use_stdout"] and not options["use_stderr"]:
raise ValueError("No output streams provided at all.")
if options["output_format"] == "corrected":
if (
"diff_severity" in options and
options["diff_severity"] not in RESULT_SEVERITY.reverse):
raise TypeError("Invalid value for `diff_severity`: " +
repr(options["diff_severity"]))
if "result_message" in options:
assert_right_type(options["result_message"], str, "result_message")
if "diff_distance" in options:
assert_right_type(options["diff_distance"], int, "diff_distance")
allowed_options |= {"diff_severity", "result_message", "diff_distance"}
elif options["output_format"] == "regex":
if "output_regex" not in options:
raise ValueError("`output_regex` needed when specified "
"output-format 'regex'.")
options["output_regex"] = re.compile(options["output_regex"])
# Don't setup severity_map if one is provided by user or if it's not
# used inside the output_regex. If one is manually provided but not
# used in the output_regex, throw an exception.
if "severity_map" in options:
if "severity" not in options["output_regex"].groupindex:
raise ValueError("Provided `severity_map` but named group "
"`severity` is not used in `output_regex`.")
assert_right_type(options["severity_map"], dict, "severity_map")
for key, value in options["severity_map"].items():
assert_right_type(key, str, "severity_map key")
try:
assert_right_type(value, int, "<severity_map dict-value>")
except TypeError:
raise TypeError(
"The value {!r} for key {!r} inside given "
"severity-map is no valid severity value.".format(
value, key))
if value not in RESULT_SEVERITY.reverse:
raise TypeError(
"Invalid severity value {!r} for key {!r} inside "
"given severity-map.".format(value, key))
# Auto-convert keys to lower-case. This creates automatically a new
# dict which prevents runtime-modifications.
options["severity_map"] = {
key.lower(): value
for key, value in options["severity_map"].items()}
if "result_message" in options:
assert_right_type(options["result_message"], str, "result_message")
allowed_options |= {"output_regex", "severity_map", "result_message"}
elif options["output_format"] is not None:
raise ValueError("Invalid `output_format` specified.")
if options["prerequisite_check_command"]:
if "prerequisite_check_fail_message" in options:
assert_right_type(options["prerequisite_check_fail_message"],
str,
"prerequisite_check_fail_message")
else:
options["prerequisite_check_fail_message"] = (
"Prerequisite check failed.")
allowed_options.add("prerequisite_check_fail_message")
# Check for illegal superfluous options.
superfluous_options = options.keys() - allowed_options
if superfluous_options:
raise ValueError(
"Invalid keyword arguments provided: " +
", ".join(repr(s) for s in sorted(superfluous_options)))
def _create_linter(klass, options):
class LinterMeta(type):
def __repr__(cls):
return "<{} linter class (wrapping {!r})>".format(
cls.__name__, options["executable"])
class LinterBase(LocalBear, metaclass=LinterMeta):
@staticmethod
def generate_config(filename, file):
"""
Generates the content of a config-file the linter-tool might need.
The contents generated from this function are written to a
temporary file and the path is provided inside
``create_arguments()``.
By default no configuration is generated.
You can provide additional keyword arguments and defaults. These
will be interpreted as required settings that need to be provided
through a coafile-section.
:param filename:
The name of the file currently processed.
:param file:
The contents of the file currently processed.
:return:
The config-file-contents as a string or ``None``.
"""
return None
@staticmethod
def create_arguments(filename, file, config_file):
"""
Creates the arguments for the linter.
You can provide additional keyword arguments and defaults. These
will be interpreted as required settings that need to be provided
through a coafile-section.
:param filename:
The name of the file the linter-tool shall process.
:param file:
The contents of the file.
:param config_file:
The path of the config-file if used. ``None`` if unused.
:return:
A sequence of arguments to feed the linter-tool with.
"""
raise NotImplementedError
@staticmethod
def get_executable():
"""
Returns the executable of this class.
:return:
The executable name.
"""
return options["executable"]
@classmethod
def check_prerequisites(cls):
"""
Checks whether the linter-tool the bear uses is operational.
:return:
True if operational, otherwise a string containing more info.
"""
if shutil.which(cls.get_executable()) is None:
return (repr(cls.get_executable()) + " is not installed." +
(" " + options["executable_check_fail_info"]
if options["executable_check_fail_info"] else
""))
else:
if options["prerequisite_check_command"]:
try:
check_call(options["prerequisite_check_command"],
stdout=DEVNULL,
stderr=DEVNULL)
return True
except (OSError, CalledProcessError):
return options["prerequisite_check_fail_message"]
return True
@classmethod
def _get_create_arguments_metadata(cls):
return FunctionMetadata.from_function(
cls.create_arguments,
omit={"self", "filename", "file", "config_file"})
@classmethod
def _get_generate_config_metadata(cls):
return FunctionMetadata.from_function(
cls.generate_config,
omit={"filename", "file"})
@classmethod
def _get_process_output_metadata(cls):
metadata = FunctionMetadata.from_function(cls.process_output)
if options["output_format"] is None:
omitted = {"self", "output", "filename", "file"}
else:
# If a specific output format is provided, function signatures
# from process_output functions should not appear in the help.
omitted = set(chain(metadata.non_optional_params,
metadata.optional_params))
metadata.omit = omitted
return metadata
@classmethod
def get_metadata(cls):
merged_metadata = FunctionMetadata.merge(
cls._get_process_output_metadata(),
cls._get_generate_config_metadata(),
cls._get_create_arguments_metadata())
merged_metadata.desc = inspect.getdoc(cls)
return merged_metadata
def _convert_output_regex_match_to_result(self,
match,
filename,
severity_map,
result_message):
"""
Converts the matched named-groups of ``output_regex`` to an actual
``Result``.
:param match:
The regex match object.
:param filename:
The name of the file this match belongs to.
:param severity_map:
The dict to use to map the severity-match to an actual
``RESULT_SEVERITY``.
:param result_message:
The static message to use for results instead of grabbing it
from the executable output via the ``message`` named regex
group.
"""
# Pre process the groups
groups = match.groupdict()
if 'severity' in groups:
try:
groups["severity"] = severity_map[
groups["severity"].lower()]
except KeyError:
self.warn(
repr(groups["severity"]) + " not found in "
"severity-map. Assuming `RESULT_SEVERITY.NORMAL`.")
groups["severity"] = RESULT_SEVERITY.NORMAL
else:
groups['severity'] = RESULT_SEVERITY.NORMAL
for variable in ("line", "column", "end_line", "end_column"):
groups[variable] = (None
if groups.get(variable, None) is None else
int(groups[variable]))
if "origin" in groups:
groups["origin"] = "{} ({})".format(klass.__name__,
groups["origin"].strip())
# Construct the result.
return Result.from_values(
origin=groups.get("origin", self),
message=(groups.get("message", "").strip()
if result_message is None else result_message),
file=filename,
severity=groups["severity"],
line=groups["line"],
column=groups["column"],
end_line=groups["end_line"],
end_column=groups["end_column"],
additional_info=groups.get("additional_info", "").strip())
def process_output_corrected(self,
output,
filename,
file,
diff_severity=RESULT_SEVERITY.NORMAL,
result_message="Inconsistency found.",
diff_distance=1):
"""
Processes the executable's output as a corrected file.
:param output:
The output of the program. This can be either a single
string or a sequence of strings.
:param filename:
The filename of the file currently being corrected.
:param file:
The contents of the file currently being corrected.
:param diff_severity:
The severity to use for generating results.
:param result_message:
The message to use for generating results.
:param diff_distance:
Number of unchanged lines that are allowed in between two
changed lines so they get yielded as one diff. If a negative
distance is given, every change will be yielded as an own diff,
even if they are right beneath each other.
:return:
An iterator returning results containing patches for the
file to correct.
"""
if isinstance(output, str):
output = (output,)
for string in output:
for diff in Diff.from_string_arrays(
file,
string.splitlines(keepends=True)).split_diff(
distance=diff_distance):
yield Result(self,
result_message,
affected_code=diff.affected_code(filename),
diffs={filename: diff},
severity=diff_severity)
def process_output_regex(
self, output, filename, file, output_regex,
severity_map=MappingProxyType({
"critical": RESULT_SEVERITY.MAJOR,
"c": RESULT_SEVERITY.MAJOR,
"fatal": RESULT_SEVERITY.MAJOR,
"fail": RESULT_SEVERITY.MAJOR,
"f": RESULT_SEVERITY.MAJOR,
"error": RESULT_SEVERITY.MAJOR,
"err": RESULT_SEVERITY.MAJOR,
"e": RESULT_SEVERITY.MAJOR,
"warning": RESULT_SEVERITY.NORMAL,
"warn": RESULT_SEVERITY.NORMAL,
"w": RESULT_SEVERITY.NORMAL,
"information": RESULT_SEVERITY.INFO,
"info": RESULT_SEVERITY.INFO,
"i": RESULT_SEVERITY.INFO,
"suggestion": RESULT_SEVERITY.INFO}),
result_message=None):
"""
Processes the executable's output using a regex.
:param output:
The output of the program. This can be either a single
string or a sequence of strings.
:param filename:
The filename of the file currently being corrected.
:param file:
The contents of the file currently being corrected.
:param output_regex:
The regex to parse the output with. It should use as many
of the following named groups (via ``(?P<name>...)``) to
provide a good result:
- line - The line where the issue starts.
- column - The column where the issue starts.
- end_line - The line where the issue ends.
- end_column - The column where the issue ends.
- severity - The severity of the issue.
- message - The message of the result.
- origin - The origin of the issue.
- additional_info - Additional info provided by the issue.
The groups ``line``, ``column``, ``end_line`` and
``end_column`` don't have to match numbers only, they can
also match nothing, the generated ``Result`` is filled
automatically with ``None`` then for the appropriate
properties.
:param severity_map:
A dict used to map a severity string (captured from the
``output_regex`` with the named group ``severity``) to an
actual ``coalib.results.RESULT_SEVERITY`` for a result.
:param result_message:
The static message to use for results instead of grabbing it
from the executable output via the ``message`` named regex
group.
:return:
An iterator returning results.
"""
if isinstance(output, str):
output = (output,)
for string in output:
for match in re.finditer(output_regex, string):
yield self._convert_output_regex_match_to_result(
match, filename, severity_map=severity_map,
result_message=result_message)
if options["output_format"] is None:
# Check if user supplied a `process_output` override.
if not callable(getattr(klass, "process_output", None)):
raise ValueError("`process_output` not provided by given "
"class {!r}.".format(klass.__name__))
# No need to assign to `process_output` here, the class mixing
# below automatically does that.
else:
# Prevent people from accidentally defining `process_output`
# manually, as this would implicitly override the internally
# set-up `process_output`.
if hasattr(klass, "process_output"):
raise ValueError("Found `process_output` already defined "
"by class {!r}, but {!r} output-format is "
"specified.".format(klass.__name__,
options["output_format"]))
if options["output_format"] == "corrected":
process_output_args = {
key: options[key]
for key in ("result_message", "diff_severity",
"diff_distance")
if key in options}
process_output = partialmethod(
process_output_corrected, **process_output_args)
else:
assert options["output_format"] == "regex"
process_output_args = {
key: options[key]
for key in ("output_regex", "severity_map",
"result_message")
if key in options}
process_output = partialmethod(
process_output_regex, **process_output_args)
@classmethod
@contextmanager
def _create_config(cls, filename, file, **kwargs):
"""
Provides a context-manager that creates the config file if the
user provides one and cleans it up when done with linting.
:param filename:
The filename of the file.
:param file:
The file contents.
:param kwargs:
Section settings passed from ``run()``.
:return:
A context-manager handling the config-file.
"""
content = cls.generate_config(filename, file, **kwargs)
if content is None:
yield None
else:
with make_temp(
suffix=options["config_suffix"]) as config_file:
with open(config_file, mode="w") as fl:
fl.write(content)
yield config_file
def run(self, filename, file, **kwargs):
# Get the **kwargs params to forward to `generate_config()`
# (from `_create_config()`).
generate_config_kwargs = FunctionMetadata.filter_parameters(
self._get_generate_config_metadata(), kwargs)
with self._create_config(
filename,
file,
**generate_config_kwargs) as config_file:
# And now retrieve the **kwargs for `create_arguments()`.
create_arguments_kwargs = (
FunctionMetadata.filter_parameters(
self._get_create_arguments_metadata(), kwargs))
args = self.create_arguments(filename, file, config_file,
**create_arguments_kwargs)
try:
args = tuple(args)
except TypeError:
self.err("The given arguments "
"{!r} are not iterable.".format(args))
return
arguments = (self.get_executable(),) + args
self.debug("Running '{}'".format(' '.join(arguments)))
output = run_shell_command(
arguments,
stdin="".join(file) if options["use_stdin"] else None)
output = tuple(compress(
output,
(options["use_stdout"], options["use_stderr"])))
if len(output) == 1:
output = output[0]
process_output_kwargs = FunctionMetadata.filter_parameters(
self._get_process_output_metadata(), kwargs)
return self.process_output(output, filename, file,
**process_output_kwargs)
def __repr__(self):
return "<{} linter object (wrapping {!r}) at {}>".format(
type(self).__name__, self.get_executable(), hex(id(self)))
# Mixin the linter into the user-defined interface, otherwise
# `create_arguments` and other methods would be overridden by the
# default version.
result_klass = type(klass.__name__, (klass, LinterBase), {})
result_klass.__doc__ = klass.__doc__ if klass.__doc__ else ""
return result_klass
@enforce_signature
def linter(executable: str,
use_stdin: bool=False,
use_stdout: bool=True,
use_stderr: bool=False,
config_suffix: str="",
executable_check_fail_info: str="",
prerequisite_check_command: tuple=(),
output_format: (str, None)=None,
**options):
"""
Decorator that creates a ``LocalBear`` that is able to process results from
an external linter tool.
The main functionality is achieved through the ``create_arguments()``
function that constructs the command-line-arguments that get parsed to your
executable.
>>> @linter("xlint", output_format="regex", output_regex="...")
... class XLintBear:
... @staticmethod
... def create_arguments(filename, file, config_file):
... return "--lint", filename
Requiring settings is possible like in ``Bear.run()`` with supplying
additional keyword arguments (and if needed with defaults).
>>> @linter("xlint", output_format="regex", output_regex="...")
... class XLintBear:
... @staticmethod
... def create_arguments(filename,
... file,
... config_file,
... lintmode: str,
... enable_aggressive_lints: bool=False):
... arguments = ("--lint", filename, "--mode=" + lintmode)
... if enable_aggressive_lints:
... arguments += ("--aggressive",)
... return arguments
Sometimes your tool requires an actual file that contains configuration.
``linter`` allows you to just define the contents the configuration shall
contain via ``generate_config()`` and handles everything else for you.
>>> @linter("xlint", output_format="regex", output_regex="...")
... class XLintBear:
... @staticmethod
... def generate_config(filename,
... file,
... lintmode,
... enable_aggressive_lints):
... modestring = ("aggressive"
... if enable_aggressive_lints else
... "non-aggressive")
... contents = ("<xlint>",
... " <mode>" + lintmode + "</mode>",
... " <aggressive>" + modestring + "</aggressive>",
... "</xlint>")
... return "\\n".join(contents)
...
... @staticmethod
... def create_arguments(filename,
... file,
... config_file):
... return "--lint", filename, "--config", config_file
As you can see you don't need to copy additional keyword-arguments you
introduced from ``create_arguments()`` to ``generate_config()`` and
vice-versa. ``linter`` takes care of forwarding the right arguments to the
right place, so you are able to avoid signature duplication.
If you override ``process_output``, you have the same feature like above
(auto-forwarding of the right arguments defined in your function
signature).
Note when overriding ``process_output``: Providing a single output stream
(via ``use_stdout`` or ``use_stderr``) puts the according string attained
from the stream into parameter ``output``, providing both output streams
inputs a tuple with ``(stdout, stderr)``. Providing ``use_stdout=False``
and ``use_stderr=False`` raises a ``ValueError``. By default ``use_stdout``
is ``True`` and ``use_stderr`` is ``False``.
Documentation:
Bear description shall be provided at class level.
If you document your additional parameters inside ``create_arguments``,
``generate_config`` and ``process_output``, beware that conflicting
documentation between them may be overridden. Document duplicated
parameters inside ``create_arguments`` first, then in ``generate_config``
and after that inside ``process_output``.
For the tutorial see:
http://coala.readthedocs.org/en/latest/Users/Tutorials/Linter_Bears.html
:param executable:
The linter tool.
:param use_stdin:
Whether the input file is sent via stdin instead of passing it over the
command-line-interface.
:param use_stdout:
Whether to use the stdout output stream.
:param use_stderr:
Whether to use the stderr output stream.
:param config_suffix:
The suffix-string to append to the filename of the configuration file
created when ``generate_config`` is supplied. Useful if your executable
expects getting a specific file-type with specific file-ending for the
configuration file.
:param executable_check_fail_info:
Information that is provided together with the fail message from the
normal executable check. By default no additional info is printed.
:param prerequisite_check_command:
A custom command to check for when ``check_prerequisites`` gets
invoked (via ``subprocess.check_call()``). Must be an ``Iterable``.
:param prerequisite_check_fail_message:
A custom message that gets displayed when ``check_prerequisites``
fails while invoking ``prerequisite_check_command``. Can only be
provided together with ``prerequisite_check_command``.
:param output_format:
The output format of the underlying executable. Valid values are
- ``None``: Define your own format by overriding ``process_output``.
Overriding ``process_output`` is then mandatory, not specifying it
raises a ``ValueError``.
- ``'regex'``: Parse output using a regex. See parameter
``output_regex``.
- ``'corrected'``: The output is the corrected of the given file. Diffs
are then generated to supply patches for results.
Passing something else raises a ``ValueError``.
:param output_regex:
The regex expression as a string that is used to parse the output
generated by the underlying executable. It should use as many of the
following named groups (via ``(?P<name>...)``) to provide a good
result:
- line - The line where the issue starts.
- column - The column where the issue starts.
- end_line - The line where the issue ends.
- end_column - The column where the issue ends.
- severity - The severity of the issue.
- message - The message of the result.
- origin - The origin of the issue.
- additional_info - Additional info provided by the issue.
The groups ``line``, ``column``, ``end_line`` and ``end_column`` don't
have to match numbers only, they can also match nothing, the generated
``Result`` is filled automatically with ``None`` then for the
appropriate properties.
Needs to be provided if ``output_format`` is ``'regex'``.
:param severity_map:
A dict used to map a severity string (captured from the
``output_regex`` with the named group ``severity``) to an actual
``coalib.results.RESULT_SEVERITY`` for a result. Severity strings are
mapped **case-insensitive**!
- ``RESULT_SEVERITY.MAJOR``: Mapped by ``error``.
- ``RESULT_SEVERITY.NORMAL``: Mapped by ``warning`` or ``warn``.
- ``RESULT_SEVERITY.MINOR``: Mapped by ``info``.
A ``ValueError`` is raised when the named group ``severity`` is not
used inside ``output_regex`` and this parameter is given.
:param diff_severity:
The severity to use for all results if ``output_format`` is
``'corrected'``. By default this value is
``coalib.results.RESULT_SEVERITY.NORMAL``. The given value needs to be
defined inside ``coalib.results.RESULT_SEVERITY``.
:param result_message:
The message-string to use for all results. Can be used only together
with ``corrected`` or ``regex`` output format. When using
``corrected``, the default value is ``"Inconsistency found."``, while
for ``regex`` this static message is disabled and the message matched
by ``output_regex`` is used instead.
:param diff_distance:
Number of unchanged lines that are allowed in between two changed lines
so they get yielded as one diff if ``corrected`` output-format is
given. If a negative distance is given, every change will be yielded as
an own diff, even if they are right beneath each other. By default this
value is ``1``.
:raises ValueError:
Raised when invalid options are supplied.
:raises TypeError:
Raised when incompatible types are supplied.
See parameter documentations for allowed types.
:return:
A ``LocalBear`` derivation that lints code using an external tool.
"""
options["executable"] = executable
options["output_format"] = output_format
options["use_stdin"] = use_stdin
options["use_stdout"] = use_stdout
options["use_stderr"] = use_stderr
options["config_suffix"] = config_suffix
options["executable_check_fail_info"] = executable_check_fail_info
options["prerequisite_check_command"] = prerequisite_check_command
_prepare_options(options)
return partial(_create_linter, options=options)
|
"""Runs all tests."""
import os
import sys
import glob
import unittest
import pandas as pd
import time
def main():
"""Runs the tests."""
r = {"test": [], "time": []}
failurestrings = []
for test in glob.glob('test_*.py'):
s = time.time()
sys.stderr.write('\nRunning tests in {0}...\n'.format(test))
test = os.path.splitext(test)[0]
suite = unittest.TestLoader().loadTestsFromName(test)
result = unittest.TestResult()
suite.run(result)
e = time.time()
if result.wasSuccessful():
sys.stderr.write('All tests were successful.\n')
r["test"].append(test)
r["time"].append(float(e-s)/60)
else:
sys.stderr.write('Test(s) FAILED!\n')
for (_testcase, failstring) in result.failures + result.errors:
failurestrings.append(failstring)
if not failurestrings:
sys.stderr.write('\nTesting complete. All passed successfully.\n')
else:
sys.stderr.write('\nTesting complete. Failed on the following:\n')
for fstring in failurestrings:
sys.stderr.write('\n*********\n{0}\n********\n'.format(fstring))
r = pd.DataFrame(r)
r["Greater_10min"] = ["y" if x > 10 else "n" for x in r["time"]]
r.to_csv("time_test_results.csv", index=False)
if __name__ == '__main__':
main()
|
##############################################################
# Date: 20/01/16
# Name: calc_multi_atm.py
# Author: Alek Petty
# Description: Main script to calculate sea ice topography from IB ATM data
# Input requirements: ATM data, PosAV data (for geolocation)
# Output: topography datasets
import matplotlib
matplotlib.use("AGG")
import IB_functions as ro
import mpl_toolkits.basemap.pyproj as pyproj
from osgeo import osr, gdal
from pyproj import Proj
from glob import glob
from pylab import *
from scipy import ndimage
from matplotlib import rc
#from scipy.interpolate import griddata
from matplotlib.mlab import griddata
import time
import scipy.interpolate
import h5py
from scipy.spatial import cKDTree as KDTree
import os
def calc_bulk_stats():
ice_area=-999
ridge_area_all=-999
mean_ridge_height_all=-999
mean_ridge_heightL=-999
ridge_areaL=-999
num_ridges_out=-999
levpercent_out=-999
num_pts_section=-999
# IF SECTION GOOD THEN GET ICE SWATH AREA
if (points_good==1):
ice_area = ma.count(elevation2d)*(xy_res**2)
levpercent_out=levpercent
# IF SECTION GOOD AND HAVE SOME RIDGING THEN ASSIGN TOTAL RIDGE AREA AND ELEVATION
if ((points_good==1)&(found_ridges==1)):
ridge_area_all = ma.count(elevation2d_ridge_ma)*(xy_res**2)
mean_ridge_height_all = np.mean(elevation2d_ridge_ma) - level_elev
# IF SECTION GOOD AND WE HAVE NO RIDGING (AREA OF RIDGING = 0) THEN ASSIGN ZERO RIDGE AREA HEIGHT
if ((points_good==1)&(found_ridges==0)):
ridge_area_all = 0.
mean_ridge_height_all = 0.
#IF GOOD SECTION BUT NO BIG RIDGES THEN SET THESE VALUES TO ZERO
if ((points_good==1)&(found_big_ridge==0)):
mean_ridge_heightL=0.
ridge_areaL=0.
num_ridges_out=0
# IF WE FOUND SOME BIG RIDGES THENA SSIGN BIG RIDGE AREA HEIGHT AND NUMBER
if ((points_good==1)&(found_big_ridge==1)):
mean_ridge_heightL = np.mean(ridge_height_mesh)
ridge_areaL = ma.count(ridge_height_mesh)*(xy_res**2)
num_ridges_out = num_ridges
return [mean_x, mean_y, ice_area, num_ridges_out, ridge_area_all, ridge_areaL, mean_ridge_height_all, mean_ridge_heightL, mean_alt, mean_pitch, mean_roll, mean_vel, num_pts_section,levpercent_out, section_num, found_ridges, points_good, plane_good]
#-------------- ATM AND DMS PATHS------------------
datapath='./Data_output/'
rawdatapath = '../../../DATA/ICEBRIDGE/'
ATM_path = rawdatapath+'/ATM/ARCTIC/'
posAV_path =rawdatapath+'/POSAV/SEA_ICE/GR/'
#posAV_path ='/Volumes/TBOLT_HD_PETTY/POSAV/'
m=pyproj.Proj("+init=EPSG:3413")
#FREE PARAMETERS
min_ridge_height = 0.2
along_track_res=1000
pwidth=20
pint=5
xy_res=2
start_year=2009
end_year=2009
min_ridge_size=100
sh=0
if (sh==1):
print 'Ridge threshold:', sys.argv[1]
print 'Along track res:',sys.argv[2]
print 'xy res:',sys.argv[3]
print 'Start year:',sys.argv[4]
print 'End year:',sys.argv[5]
min_ridge_height = float(sys.argv[1])
along_track_res = int(sys.argv[2])
xy_res = int(sys.argv[3])
start_year=int(sys.argv[4])
end_year=int(sys.argv[5])
pts_threshold=15000
num_points_req = min_ridge_size/(xy_res**2)
section_num=0
print 'Num points req', num_points_req
ftype = str(int(along_track_res/1000))+'km_xyres'+str(xy_res)+'m_'+str(int(min_ridge_height*100))+'cm'
outpath = datapath+ftype+'/'
for year in xrange(start_year, end_year+1):
ATM_year = ATM_path+str(year)+'/'
atm_files_year = glob(ATM_year+'/*/')
#for days in xrange():
for days in xrange(size(atm_files_year)):
atm_path_date = atm_files_year[days]
print 'ATM day:', atm_path_date
atm_files_in_day = ro.get_atm_files(atm_path_date, year)
#load POS file
posAV = loadtxt(posAV_path+str(year)+'_GR_NASA/sbet_'+str(atm_path_date[-9:-1])+'.out.txt', skiprows=1)
#GET POSITION OF PLANE AND 1km MARKERS FROM POSAV
xp, yp, dist, km_idxs, km_utc_times = ro.get_pos_sections(posAV, m, along_track_res)
for atm_file in xrange(size(atm_files_in_day)):
atm_statsALL=np.array([]).reshape(0,3)
ridge_statsALL=np.array([]).reshape(0,9)
covarALL=np.array([]).reshape(0,5)
bulk_statsALL=np.array([]).reshape(0,18)
print 'ATM file:', atm_files_in_day[atm_file], str(atm_file)+'/'+str(size(atm_files_in_day))
lonT, latT, elevationT, utc_timeT= ro.get_atmqih5(atm_files_in_day[atm_file], year, 1)
#IF SIZE OF DATA IS LESS THAN SOME THRESHOLD THEN DONT BOTHER ANALYZING
if (size(utc_timeT)<100):
break
xT, yT = m(lonT, latT)
#GET POSAV INDICES COINCIDING WITH START AND END OF ATM FILE. ADD PLUS/MINUS 1 FOR SOME LEEWAY.
start_i = np.abs(km_utc_times - utc_timeT[0]).argmin()
end_i = np.abs(km_utc_times - utc_timeT[-1]).argmin()
print 'START/END:', start_i, end_i
for i in xrange(start_i -1, end_i + 1):
section_num+=1
found_ridges=0
found_big_ridge=0
plane_good=0
points_good=0
ridge_statsT = np.array([]).reshape(0,9)
cov_matrix = np.array([]).reshape(0,5)
#label_numsL=np.array(0)
mean_x, mean_y, mean_alt, mean_pitch, mean_roll, mean_vel = ro.posav_section_info(m, posAV[km_idxs[i]:km_idxs[i+1]] )
print ' '
print str(i)+'/'+str(end_i + 1)
print 'Mean altitude:', mean_alt
print 'Mean pitch:', mean_pitch
print 'Mean roll:', mean_roll
print 'Mean vel:', mean_vel
if (abs(mean_alt-500)<200) & (abs(mean_pitch)<5) & (abs(mean_roll)<5):
plane_good=1
poly_path, vertices, sides = ro.get_pos_poly(xp, yp, km_idxs[i], km_idxs[i+1])
xatm_km, yatm_km, elevation_km = ro.get_atm_poly(xT, yT, elevationT, km_utc_times, utc_timeT, poly_path, i)
num_pts_section = size(xatm_km)
print 'Num pts in section:', size(xatm_km)
#if there are more than 15000 pts in the 1km grid (average of around 20000) then proceed
if (num_pts_section>pts_threshold):
points_good=1
#ro.plot_atm_poly(m, xatm_km, yatm_km, elevation_km, poly_path, i, out_path, year)
#GET ATM GRID
xx2d, yy2d = ro.grid_atm(xatm_km, yatm_km, xy_res)
print 'Grid:', size(xx2d[0]), size(xx2d[1])
# CALCULATE THE LEVEL ICE SURFACE USING THE CUMULATIVE DISTRIBUTION
#THRESH IS THE LEVEL ICE PLUS RIDGED ICE ELEVATION
level_elev, thresh, levpercent = ro.calc_level_ice(elevation_km, pint, pwidth, min_ridge_height)
#level_elev, thresh, levpercent = ro.calc_level_ice(elevation_km, pwidth, min_ridge_height)
elevation2d, elevation2d_ridge_ma, ridge_area = ro.grid_elevation(xatm_km, yatm_km,elevation_km, xx2d, yy2d, thresh, kdtree=1)
elevation2d_ridge_maL =elevation2d_ridge_ma-level_elev
#IF THERE IS EVEN A LITTLE BIT OF RIDGING (might not actually be enough for a big areal ridge from the labelling) then proceed to clean up data.
if (ridge_area>0):
found_ridges=1
#CLEAN UP DATA WITH KDTREE AROUND RIDGE POINTS
#REMOVE FOR PRELIMINARY STUDIES AS COMPUTATIONALLY EXPENSIVE!
#elevation2d_ridge_ma = kdtree_clean()
#GET RIDGE LABELS - MAKE SURE RIDGES ARE ABOVE CERTAIN SIZE, DECIDED BY NUM_PTS_REQ
label_im = ro.get_labels(elevation2d_ridge_maL, xy_res, min_ridge_size, min_ridge_height)
# DECIDE IF WE WANT TO CALCULATE RIDGE ORIENTATION OR NOT.
if (np.amax(label_im)>=1):
found_big_ridge=1
print 'Found Ridge!'
print 'Number of labels:', np.amax(label_im)
num_ridges = np.amax(label_im)
#GET RIDGE STATS IF WE DID FIND A RIDGE
ridge_statsT, ridge_height_mesh, cov_matrix, indexT = ro.calc_ridge_stats(elevation2d_ridge_ma, num_ridges, label_im, xx2d, yy2d, level_elev, section_num, calc_orientation=1)
#CALCULATE BULK STATISTICS AS WE HAVE VALID NUMBER OF POINTS WITHIN THE SECTION
else:
print 'No data - WHY?! --------------'
print 'Num pts in section:', size(xatm_km)
#ASSIGN BULK STATISTICS AS WE HAVE NOT CARRIED OUT RIDGE CALCULATION AS PLANE IS DOING FUNNY THINGS
bulk_statsT = calc_bulk_stats()
ridge_statsALL = vstack([ridge_statsALL, ridge_statsT])
covarALL = vstack([covarALL, cov_matrix])
bulk_statsALL = vstack([bulk_statsALL, bulk_statsT])
if not os.path.exists(outpath+str(year)):
os.makedirs(outpath+str(year))
ridge_statsALL.dump(outpath+str(year)+'/ridge_stats_'+str(int(along_track_res/1000))+'km_xyres'+str(xy_res)+'m_'+str(int(min_ridge_height*100))+'cm_poly'+str(atm_path_date[-9:-1])+'_f'+str(atm_file).zfill(3)+'.txt')
covarALL.dump(outpath+str(year)+'/cov_matrix_'+str(int(along_track_res/1000))+'km_xyres'+str(xy_res)+'m_'+str(int(min_ridge_height*100))+'cm_poly'+str(atm_path_date[-9:-1])+'_f'+str(atm_file).zfill(3)+'.txt')
bulk_statsALL.dump(outpath+str(year)+'/bulk_ridge_stats_'+str(int(along_track_res/1000))+'km_xyres'+str(xy_res)+'m_'+str(int(min_ridge_height*100))+'cm_poly'+str(atm_path_date[-9:-1])+'_f'+str(atm_file).zfill(3)+'.txt')
#CAN OUTPUT AS TEXT FILES INSTEAD - BIGGER BUT CAN OPEN RAW
#savetxt(outpath+str(year)+'/ridge_stats_'+str(int(along_track_res/1000))+'km_xyres'+str(xy_res)+'m_'+str(int(min_ridge_height*100))+'cm_poly'+str(atm_path_date[-9:-1])+'_f'+str(atm_file)+'.txt', ridge_statsALL)
#savetxt(outpath+str(year)+'/cov_matrix_'+str(int(along_track_res/1000))+'km_xyres'+str(xy_res)+'m_'+str(int(min_ridge_height*100))+'cm_poly'+str(atm_path_date[-9:-1])+'_f'+str(atm_file)+'.txt', covarALL)
#savetxt(outpath+str(year)+'/bulk_ridge_stats_'+str(int(along_track_res/1000))+'km_xyres'+str(xy_res)+'m_'+str(int(min_ridge_height*100))+'cm_poly'+str(atm_path_date[-9:-1])+'_f'+str(atm_file)+'.txt', bulk_statsALL)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.