Dataset Viewer (First 5GB)
code
stringlengths 658
1.05M
|
---|
# -*- coding: utf-8 -*-
"""
This module defines a general procedure for running evaluations
Example usage:
app_driver = EvaluationApplicationDriver()
app_driver.initialise_application(system_param, input_data_param)
app_driver.run_application()
system_param and input_data_param should be generated using:
niftynet.utilities.user_parameters_parser.run()
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
import itertools
import pandas as pd
import tensorflow as tf
from niftynet.engine.application_factory import ApplicationFactory
from niftynet.io.misc_io import touch_folder
from niftynet.io.image_sets_partitioner import ImageSetsPartitioner
FILE_PREFIX = 'model.ckpt'
class EvaluationApplicationDriver(object):
"""
This class represents the application logic for evaluating a set of
results inferred within NiftyNet (or externally generated)
"""
def __init__(self):
self.app = None
self.model_dir = None
self.summary_dir = None
self.session_prefix = None
self.outputs_collector = None
self.gradients_collector = None
def initialise_application(self, workflow_param, data_param):
"""
This function receives all parameters from user config file,
create an instance of application.
:param workflow_param: a dictionary of user parameters,
keys correspond to sections in the config file
:param data_param: a dictionary of input image parameters,
keys correspond to data properties to be used by image_reader
:return:
"""
try:
system_param = workflow_param.get('SYSTEM', None)
net_param = workflow_param.get('NETWORK', None)
infer_param = workflow_param.get('INFERENCE', None)
eval_param = workflow_param.get('EVALUATION', None)
app_param = workflow_param.get('CUSTOM', None)
except AttributeError:
tf.logging.fatal('parameters should be dictionaries')
raise
self.num_threads = 1
# self.num_threads = max(system_param.num_threads, 1)
# self.num_gpus = system_param.num_gpus
# set_cuda_device(system_param.cuda_devices)
# set output TF model folders
self.model_dir = touch_folder(
os.path.join(system_param.model_dir, 'models'))
self.session_prefix = os.path.join(self.model_dir, FILE_PREFIX)
assert infer_param, 'inference parameters not specified'
# create an application instance
assert app_param, 'application specific param. not specified'
self.app_param = app_param
app_module = ApplicationFactory.create(app_param.name)
self.app = app_module(net_param, infer_param, system_param.action)
self.eval_param = eval_param
data_param, self.app_param = \
self.app.add_inferred_output(data_param, self.app_param)
# initialise data input
data_partitioner = ImageSetsPartitioner()
# clear the cached file lists
data_partitioner.reset()
if data_param:
data_partitioner.initialise(
data_param=data_param,
new_partition=False,
ratios=None,
data_split_file=system_param.dataset_split_file)
# initialise data input
self.app.initialise_dataset_loader(data_param, self.app_param,
data_partitioner)
self.app.initialise_evaluator(eval_param)
def run(self, application):
"""
This is the main application logic for evaluation.
Computation of all metrics for all subjects is delegated to an
Evaluator objects owned by the application object. The resulting
metrics are aggregated as defined by the evaluation classes and
output to one or more csv files (based on their 'group_by' headings).
For example, per-subject metrics will be in one file, per-label-class
metrics will be in another and per-subject-per-class will be in a
third.
:return:
"""
start_time = time.time()
try:
if not os.path.exists(self.eval_param.save_csv_dir):
os.makedirs(self.eval_param.save_csv_dir)
# iteratively run the graph
all_results = application.evaluator.evaluate()
for group_by, data_frame in all_results.items():
if group_by == (None,):
csv_id = ''
else:
csv_id = '_'.join(group_by)
with open(os.path.join(self.eval_param.save_csv_dir,
'eval_' + csv_id + '.csv'), 'w') as csv:
csv.write(data_frame.reset_index().to_csv(index=False))
except KeyboardInterrupt:
tf.logging.warning('User cancelled application')
except RuntimeError:
import sys
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(
exc_type, exc_value, exc_traceback, file=sys.stdout)
finally:
tf.logging.info('Cleaning up...')
tf.logging.info(
"%s stopped (time in second %.2f).",
type(application).__name__, (time.time() - start_time))
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
Roadnet
A QGIS plugin
Roadnet is a plugin used for maintaining a local street gazetteer.
-------------------
begin : 2014-12-09
git sha : $Format:%H$
copyright : (C) 2014 by thinkWhere
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import os
from PyQt4.QtCore import *
from PyQt4.QtGui import (
QMessageBox,
QPixmap,
QIcon,
QDesktopServices)
from PyQt4.QtSql import QSqlDatabase
from qgis.utils import *
from qgis.core import *
from esu_selector_tool import EsuSelectorTool
from roadnet_dialog import (
AdminMetadataDlg,
ChPwdDlg,
AboutDlg,
ExportLsgDlg,
ExportLsgShapefileDlg,
ExportPolyDlg,
ExportsLorDlg,
ExportsSwrfDlg,
LsgLookupDlg,
SrwrLookupDlg,
StreetBrowserDlg,
StreetReportsDlg,
ValidationDlg)
from generic_functions import ipdb_breakpoint
from street_browser.street_browser import StreetBrowser
from exports.exports import (
ExportDTF,
ExportSRWR,
ExportLOR,
ExportLsgShp,
ExportPoly)
from admin.admin_menu import ExportStreetReport
from admin.metadata import Metadata
from admin.lsg_lookup import LsgLookUp
from admin.srwr_lookup import SrwrLookup
from admin.validation import Validation
from admin.update_symbology import UpdateSymbology
from gui.toolbar import RoadnetToolbar
from geometry.esu_edit_handler import EsuEditHandler
from geometry.rdpoly_edit_handler import RdpolyEditHandler
from rn_menu.change_pwd import ChangePwd
from rn_menu.about import About
from ramp.ramp import Ramp
import config
import database
import login
import params_and_settings
import roadnet_exceptions as rn_except
import vector_layers
__author__ = 'matthew.walsh'
class Roadnet:
"""
QGIS plugin for managing street gazetteer data. thinkWhere 2015.
"""
def __init__(self, iface):
"""
Connect the plugin to the QGIS interface. This code is run every time
that QGIS boots.
:param iface: QGIS interface
:return:
"""
if config.DEBUG_MODE:
print('DEBUG_MODE: Roadnet.__init__ called')
self.iface = iface # Save reference to the QGIS interface
self.canvas = self.iface.mapCanvas()
self.plugin_dir = os.path.dirname(__file__)
self.clean_rdpoly = None
self.db = None
self.esu = None
self.model = None
self.rdpoly = None
self.roadnet_started = False
self.selector_tool = None
self.street_browser = None
self.street_browser_dk = None
self.toolbar = None
# Setup params file
params_file_path = os.path.join(self.plugin_dir, 'Params.xml')
self.params_file_handler = params_and_settings.ParamsFileHandler(params_file_path)
try:
self.params_file_handler.validate_params_file()
except rn_except.QMessageBoxWarningError: # Parent of two different Params errors
return
self.params = self.params_file_handler.read_to_dictionary()
# Connect to Ramp
self.ramp = Ramp(self)
def initGui(self):
"""
Set up the GUI components. This code is only run when the plugin has
been activated in the plugin manager.
:return:
"""
if config.DEBUG_MODE:
print('DEBUG_MODE: initGui called')
self.init_toolbar()
self.toolbar.set_state('init')
def start_roadnet(self):
"""
Start the plugin. Log in the user, connect to database, load layers,
set toolbar up appropriately.
"""
if config.DEBUG_MODE:
print('DEBUG_MODE: Starting roadNet')
# Check the database
if (self.params['RNDataStorePath'] == '') or (self.params['DbName'] == ''):
if not self.run_change_db_path():
return
db_path = os.path.join(self.params['RNDataStorePath'],
self.params['DbName'])
try:
database.check_file(db_path)
except IOError:
if not self.run_change_db_path():
return
# Log the user in
login.login_and_get_role(self.params)
self.toolbar.set_state(self.params['role'])
if self.params['role'] == 'init':
return
# Open database and model
self.db = database.open_working_copy(self.params) # params knows role
database.update_geometry_statistics(self.db)
self.model = database.get_model(self.db)
# Add layers + connect edit signals, zoom to rdpoly
self.add_rdpoly_layer() # Layer added as self.rdpoly
self.add_esu_layer() # Layer added as self.esu + selector tool init
self.params['session_includes_edits'] = False
# Create the street browser instance
if config.DEBUG_MODE:
print('DEBUG_MODE: Initialising street browser')
self.street_browser_dk = StreetBrowserDlg(self.params)
self.street_browser_dk.setWindowFlags(Qt.WindowMaximizeButtonHint | Qt.WindowMinimizeButtonHint)
rn_icon = QIcon()
rn_icon.addPixmap(QPixmap(os.path.join(self.plugin_dir, "image", "rn_logo_v2.png")))
self.street_browser_dk.setWindowIcon(rn_icon)
self.street_browser = StreetBrowser(self.iface, self.street_browser_dk, self.model, self.db, self.params)
self.disable_srwr() # Hide SRWR tab
self.street_browser.set_buttons_initial_state(self.params['role'])
if config.DEBUG_MODE:
print('DEBUG_MODE: Initialising street selector tool')
# Initialise selector tool
self.selector_tool = EsuSelectorTool(self.street_browser_dk,
self.iface,
self.esu,
self.toolbar,
self.db,
self.street_browser.mapper)
# Start RAMP
if self.params['RAMP'] == 'true':
self.ramp.start_ramp()
self.roadnet_started = True
def stop_roadnet(self):
"""
Stop the plugin. Close windows, disconnect and save databases, reset
toolbars to initial state.
"""
if config.DEBUG_MODE:
print('DEBUG_MODE: Stopping roadNet')
# Stop RAMP, then reinitialise
if self.ramp.ramp_started:
self.ramp.stop_ramp()
self.ramp = Ramp(self)
# Unset the street selector and reset toolbar
if self.iface.mapCanvas().mapTool(): # Tool is None if roadNet just stopped
current_tool = self.iface.mapCanvas().mapTool().toolName()
if current_tool == "ESU SELECTOR":
self.selector_tool.unset_map_tool()
# Reinitialise toolbar to reflect changes in RAMP settings
self.toolbar.toolbar = None # Delete previous toolbar instance
self.init_toolbar()
# Remove layers
for vlayer in [self.esu, self.rdpoly]:
vlayer.layerDeleted.disconnect() # Disconnect auto-reload signal
try:
vector_layers.remove_spatialite_layer(vlayer, self.iface)
except rn_except.RemoveNonExistentLayerPopupError:
pass
self.esu = None
self.rdpoly = None
# Reset street browser and other components
self.street_browser_dk.close()
self.street_browser_dk = None
self.street_browser = None
self.model = None
# Disconnect database, and save if necessary
connection_name = self.db.connectionName()
self.db.close()
self.db = None
QSqlDatabase.removeDatabase(connection_name)
if not config.DEBUG_MODE:
database.update_sqlite_files(self.params)
# Update params file
self.params_file_handler.update_xml_file(self.params)
self.roadnet_started = False
def tr(self, message):
return QCoreApplication.translate('Roadnet', message)
def init_toolbar(self):
# toolbar init
if self.params['RAMP'] == 'true':
with_ramp_flag = True
else:
with_ramp_flag = False
self.toolbar = RoadnetToolbar(self.iface, self.plugin_dir, with_ramp_flag)
# Roadnet tools
self.toolbar.start_rn.triggered.connect(lambda: self.start_roadnet())
self.toolbar.stop_rn.triggered.connect(lambda: self.stop_roadnet())
self.toolbar.street_sel_btn.triggered.connect(self.activate_esu_selector)
self.toolbar.sb_btn.triggered.connect(self.run_sb)
self.toolbar.change_db_path.triggered.connect(self.run_change_db_path)
self.toolbar.create_restore.triggered.connect(self.run_db_restore_point)
self.toolbar.change_pwd.triggered.connect(self.run_change_pwd)
self.toolbar.about.triggered.connect(self.run_about)
self.toolbar.settings.triggered.connect(self.run_settings)
# help menu
self.toolbar.help.triggered.connect(self.run_help)
# export menu
self.toolbar.exp_lgs.triggered.connect(self.run_lsg_exp)
self.toolbar.exp_srwr.triggered.connect(self.run_srwr_exp)
self.toolbar.exp_list_roads.triggered.connect(self.run_lor_exp)
self.toolbar.exp_maintain_poly.triggered.connect(self.run_export_poly)
self.toolbar.exp_lsg_shp.triggered.connect(self.run_export_esu)
# Admin tools
self.toolbar.street_rpt.triggered.connect(self.run_street_report)
self.toolbar.meta_menu.triggered.connect(self.run_metadata)
self.toolbar.edit_lsg_lu.triggered.connect(self.run_lsg_lookup)
self.toolbar.edit_srwr_lu.triggered.connect(self.run_srwr_lookup)
self.toolbar.validation_rpt.triggered.connect(self.run_validation)
self.toolbar.clean_rdpoly.triggered.connect(self.run_clean_rdpoly_symbology)
# RAMP items
if self.params['RAMP'] == 'true':
self.toolbar.mcl_auto_number_btn.triggered.connect(self.ramp.run_mcl_auto_number)
self.toolbar.mcl_select_btn.triggered.connect(self.ramp.run_ramp_mcl_select)
self.toolbar.rdpoly_select_btn.triggered.connect(self.ramp.run_ramp_rdpoly_select)
self.toolbar.load_layers.triggered.connect(self.ramp.run_ramp_load_layers)
self.toolbar.road_length.triggered.connect(self.ramp.run_ramp_road_length)
self.toolbar.export_wdm.triggered.connect(self.ramp.run_ramp_export_wdm)
def activate_esu_selector(self):
"""
Fire on esu selector button. Sets reference to ESU Graphic layer and
activates the street selector tool.
"""
self.iface.setActiveLayer(self.esu)
self.iface.mapCanvas().setMapTool(self.selector_tool)
def unload(self):
"""
Removes the plugin menu item and sb_icon from QGIS GUI
"""
if config.DEBUG_MODE:
print('DEBUG_MODE: unload called')
lock_file = os.path.join(self.params['RNDataStorePath'], 'RNLock')
if os.path.isfile(lock_file):
os.remove(lock_file)
if self.roadnet_started:
self.stop_roadnet()
if self.toolbar: # No toolbar exists if Params file was missing
self.toolbar.toolbar = None
def run_db_restore_point(self):
"""
Saves a copy of the working database as <database>_restore.sqlite.
:return: void
"""
database.db_restore_point(self.params)
def run_change_db_path(self):
"""
function that shows the db path change dialog window
:return: [bool] True if the user clicks on OK, False if on Cancel
"""
return database.change_db_path(self.params, self.params_file_handler)
def run_change_pwd(self):
"""
function that changes the access password for the current user
:return:
"""
self.change_pwd_dlg = ChPwdDlg()
change_pwd = ChangePwd(self.change_pwd_dlg,
self.iface,
self.db,
self.plugin_dir,
self.params)
self.change_pwd_dlg.exec_()
del change_pwd
def run_about(self):
"""
function that shows the about window with information
on plug-in version copyright and licensing
"""
about_dlg = AboutDlg()
about_dlg.setWindowFlags(Qt.CustomizeWindowHint | Qt.WindowTitleHint)
about = About(about_dlg, self.plugin_dir)
about_dlg.exec_()
del about
def run_sb(self):
"""
Shows the street browser dialog window, if its already visible then raise to front and give focus.
"""
self.street_browser_dk.signals.closed_sb.connect(self.street_browser.remove_coords)
if self.street_browser_dk.isVisible():
self.street_browser_dk.activateWindow()
if self.street_browser_dk.isMinimized():
self.street_browser_dk.showNormal()
else:
self.street_browser_dk.show()
def run_lsg_exp(self):
"""
function that shows the export LSG dialog window
"""
self.export_lsg_dk = ExportLsgDlg()
self.export_lsg = ExportDTF(self.iface, self.export_lsg_dk, self.params, self.db)
self.export_lsg_dk.exec_()
def run_srwr_exp(self):
"""
function that shows the export SRWR dialog window
"""
self.export_swrf_dk = ExportsSwrfDlg()
self.export_srwr = ExportSRWR(self.iface, self.export_swrf_dk, self.params, self.db)
self.export_swrf_dk.exec_()
def run_lor_exp(self):
"""
function that shows the export list of roads dialog window
"""
self.export_lor_dk = ExportsLorDlg()
self.export_lor = ExportLOR(self.iface, self.export_lor_dk, self.db)
self.export_lor_dk.exec_()
def run_export_esu(self):
"""
function that exports ESU streets line layer
"""
self.iface.setActiveLayer(self.esu)
self.export_lsg_shp_dk = ExportLsgShapefileDlg()
self.export_lsg_shp = ExportLsgShp(self.iface, self.export_lsg_shp_dk, self.db, self.params)
self.export_lsg_shp_dk.exec_()
def run_export_poly(self):
"""
function that exports polygons layer
:return:
"""
self.iface.setActiveLayer(self.rdpoly)
self.export_polgons_dk = ExportPolyDlg()
self.export_poly = ExportPoly(self.iface, self.export_polgons_dk, self.db)
self.export_polgons_dk.exec_()
def run_street_report(self):
"""
function that shows the run street report dialog window
"""
self.street_reports_dlg = StreetReportsDlg()
self.export_street_reports = ExportStreetReport(
self.iface, self.db, self.street_reports_dlg, self.params)
self.street_reports_dlg.exec_()
def run_metadata(self):
"""
Initialise and display the metadata information window
:return:
"""
# Initialise metadata each time dialog is launched
self.admin_metadata_dlg = AdminMetadataDlg()
self.metadata = Metadata(self.iface, self.db, self.admin_metadata_dlg,
self.params)
self.admin_metadata_dlg.show()
def run_lsg_lookup(self):
"""
Open the LSG lookup definition dialog window
:return:
"""
self.lsg_lookup_dlg = LsgLookupDlg()
self.lsg_lookup = LsgLookUp(self.iface, self.db, self.lsg_lookup_dlg)
self.lsg_lookup_dlg.show()
def run_srwr_lookup(self):
"""
Open the SRWR lookup definition dialog window
"""
self.srwr_lookup_dlg = SrwrLookupDlg()
self.srwr_lookup = SrwrLookup(self.iface, self.db, self.srwr_lookup_dlg)
self.srwr_lookup_dlg.exec_()
def run_validation(self):
"""
function that runs the validation report window
:return:
"""
self.validation_dlg = ValidationDlg()
self.validation = Validation(self.iface, self.db, self.validation_dlg,
self.plugin_dir, self.params)
self.validation_dlg.exec_()
def run_clean_rdpoly_symbology(self):
"""
Run the road polygon symbology cleanup tool.
:return:
"""
self.clean_rdpoly = UpdateSymbology(self.db, self.rdpoly, self.esu)
self.clean_rdpoly.show_symbology_dlg()
def run_help(self):
"""
Open the help pdf in the default web browser
"""
help = QDesktopServices()
help_url = QUrl("http://www.thinkwhere.com/index.php/download_file/240/")
if not help.openUrl(help_url):
no_browser_msg_box = QMessageBox(QMessageBox.Warning, " ", "roadNet cannot find a web browser "
"to open the help page", QMessageBox.Ok, None)
no_browser_msg_box.setWindowFlags(Qt.CustomizeWindowHint | Qt.WindowTitleHint)
no_browser_msg_box.exec_()
return
def run_settings(self):
"""
Show the settings dialog
"""
updated_params = params_and_settings.update_via_dialog(self.params)
self.params_file_handler.update_xml_file(updated_params)
def disable_srwr(self):
"""
Initially make the SRWR tab invisible.
"""
self.street_browser_dk.ui.srwrRecordsGroupBox.setVisible(False)
def add_rdpoly_layer(self):
"""
Load Road Polygon layer from spatialite file. Connect triggers for
editing, and so they can reload themselves if removed.
"""
if config.DEBUG_MODE:
print("DEBUG_MODE: Adding Road Polygon layer.")
self.rdpoly = vector_layers.add_styled_spatialite_layer(
'rdpoly', 'Road Polygons', self.params['working_db_path'], self.iface,
style='rdpoly')
self.rdpoly.editingStarted.connect(self.editing_rdpoly_begin)
self.rdpoly.editingStopped.connect(self.editing_rdpoly_end)
self.rdpoly.layerDeleted.connect(self.add_rdpoly_layer)
def add_esu_layer(self):
"""
Load ESU layer from spatialite file. Connect triggers for editing, and
so they can reload themselves if removed.
"""
if config.DEBUG_MODE:
print("DEBUG_MODE: Adding ESU layer.")
self.esu = vector_layers.add_styled_spatialite_layer(
'esu', 'ESU Graphic', self.params['working_db_path'], self.iface,
style='esu')
self.esu.editingStarted.connect(self.editing_esu_begin)
self.esu.editingStopped.connect(self.editing_esu_end)
self.esu.layerDeleted.connect(self.add_esu_layer) # Reload if removed
# Create the selector tool instance
if self.roadnet_started:
if config.DEBUG_MODE:
print('DEBUG_MODE: Re-initialising street selector tool')
# Recreate selector tool
self.selector_tool = EsuSelectorTool(self.street_browser_dk,
self.iface,
self.esu,
self.toolbar,
self.db,
self.street_browser.mapper)
def editing_esu_begin(self):
"""
Creates classes that listen for various edit events on the Esu layer
"""
if self.params['AutoSplitESUs'] == 'true':
handle_intersect_flag = True
else:
handle_intersect_flag = False
# Disable attributes dialog
QSettings().setValue(
'/qgis/digitizing/disable_enter_attribute_values_dialog', True)
self.esu_edit_handler = EsuEditHandler(
self.iface, self.esu, self.db, self.params, handle_intersect_flag)
def editing_esu_end(self):
self.esu_edit_handler = None
self.params['session_includes_edits'] = True
# Re-enable attributes dialog
QSettings().setValue(
'/qgis/digitizing/disable_enter_attribute_values_dialog', False)
if self.esu.isEditable() is True:
# Rolling back changes ends destroys geometry_handler class but
# layer remains editable. In this case, recreate it.
self.editing_esu_begin()
def editing_rdpoly_begin(self):
if self.params['PreventOverlappingPolygons'] == 'true':
handle_intersect_flag = True
else:
handle_intersect_flag = False
# Disable attributes dialog
QSettings().setValue(
'/qgis/digitizing/disable_enter_attribute_values_dialog', True)
self.rdpoly_edit_handler = RdpolyEditHandler(
self.iface, self.rdpoly, self.db, self.params, handle_intersect_flag)
def editing_rdpoly_end(self):
self.rdpoly_edit_handler = None
self.params['session_includes_edits'] = True
# Re-enable attributes dialog
QSettings().setValue(
'/qgis/digitizing/disable_enter_attribute_values_dialog', False)
if self.rdpoly.isEditable() is True:
# Rolling back changes ends destroys geometry_handler class but
# layer remains editable. In this case, recreate it.
self.editing_rdpoly_begin()
def get_multiple_part_esus(self):
"""
Helper function, not used in roadNet, that can be called manually
to list ESUs whose geometries have more than one part.
:return: list of esu ids
"""
esu_ids = []
for f in self.esu.getFeatures():
g = QgsGeometry(f.geometry()) # Make a copy
if g.deletePart(1):
esu_ids.append(f['esu_id'])
return esu_ids
|
""" Solution to Day 14
from: http://adventofcode.com/2016/day/14
--- Day 14: One-Time Pad ---
In order to communicate securely with Santa while you're on this mission, you've been using a
one-time pad that you generate using a pre-agreed algorithm. Unfortunately, you've run out of keys
in your one-time pad, and so you need to generate some more.
To generate keys, you first get a stream of random data by taking the MD5 of a pre-arranged salt
(your puzzle input) and an increasing integer index (starting with 0, and represented in decimal);
the resulting MD5 hash should be represented as a string of lowercase hexadecimal digits.
However, not all of these MD5 hashes are keys, and you need 64 new keys for your one-time pad. A
hash is a key only if:
It contains three of the same character in a row, like 777. Only consider the first such triplet in
a hash.
One of the next 1000 hashes in the stream contains that same character five times in a row, like
77777.
Considering future hashes for five-of-a-kind sequences does not cause those hashes to be skipped;
instead, regardless of whether the current hash is a key, always resume testing for keys starting
with the very next hash.
For example, if the pre-arranged salt is abc:
The first index which produces a triple is 18, because the MD5 hash of abc18 contains
...cc38887a5.... However, index 18 does not count as a key for your one-time pad, because none of
the next thousand hashes (index 19 through index 1018) contain 88888.
The next index which produces a triple is 39; the hash of abc39 contains eee. It is also the first
key: one of the next thousand hashes (the one at index 816) contains eeeee.
None of the next six triples are keys, but the one after that, at index 92, is: it contains 999 and
index 200 contains 99999.
Eventually, index 22728 meets all of the criteria to generate the 64th key.
So, using our example salt of abc, index 22728 produces the 64th key.
Given the actual salt in your puzzle input, what index produces your 64th one-time pad key?
"""
import hashlib
import re
def generates_key(salt, index):
"""Returns true if the hash of salt and the index contains one character three times in a row,
and one of the next 1000 hashes with the same salt and an increasing index contains the same
character five times in a row"""
starting_hash = hashlib.md5(str.encode(salt + str(index))).hexdigest()
match = re.search(r'([a-z0-9])\1\1', starting_hash)
if match is None:
return False
repeat_target = match[1] + match[1] + match[1] + match[1] + match[1]
for i in range(index + 1, index + 1001):
new_hash = hashlib.md5(str.encode(salt + str(i))).hexdigest()
if repeat_target in new_hash:
return True
return False
def main():
"""Execution of solution"""
salt = 'abc'
index = 0
key_count = 0
while key_count < 64:
if generates_key(salt, index):
key_count += 1
index += 1
print(index - 1)
if __name__ == "__main__":
main()
|
##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2020 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from typing import List
from django.db import transaction
from program_management.ddd.command import PostponeProgramTreeCommand, CopyProgramTreeToNextYearCommand
from program_management.ddd.domain.program_tree import ProgramTreeIdentity
from program_management.ddd.domain.service.calculate_end_postponement import CalculateEndPostponement
from program_management.ddd.repositories import program_tree_version as tree_version_repo
from program_management.ddd.service.write import copy_program_tree_service
@transaction.atomic()
def postpone_program_tree(
postpone_cmd: 'PostponeProgramTreeCommand'
) -> List['ProgramTreeIdentity']:
identities_created = []
# GIVEN
from_year = postpone_cmd.from_year
end_postponement_year = CalculateEndPostponement.calculate_end_postponement_year_program_tree(
identity=ProgramTreeIdentity(code=postpone_cmd.from_code, year=postpone_cmd.from_year),
repository=tree_version_repo.ProgramTreeVersionRepository()
)
# WHEN
while from_year < end_postponement_year:
identity_next_year = copy_program_tree_service.copy_program_tree_to_next_year(
copy_cmd=CopyProgramTreeToNextYearCommand(
code=postpone_cmd.from_code,
year=from_year,
)
)
# THEN
identities_created.append(identity_next_year)
from_year += 1
return identities_created
|
# -*- coding: utf-8 -*-
'''
SMS tests.
'''
from dci_notify.extensions import mail
from dci_notify.sms import split_msg, send_sms
class TestSplitMessage:
def test_split_msg_one_chunk(self):
msg = 'a' * 130
chunks = split_msg(msg)
assert len(chunks) is 1
def test_split_msg_multi_chunk(self):
msg = 'a' * 500
chunks = split_msg(msg)
assert len(chunks) is 4
def test_split_msg_line_breaks(self):
msg = 'a' * 120 + '\n' + 'b' * 40
chunks = split_msg(msg)
assert len(chunks) is 2
assert len(chunks[0]) == 120
def test_split_msg_one_line(self):
msg = 'a' * 160 + 'b' * 20
chunks = split_msg(msg)
assert len(chunks) is 2
class TestSendMessage:
def test_send_sms_single_message(self, app):
with mail.record_messages() as outbox:
send_sms(carrier='verizon',
number=5551112222,
message='message',
subject='subject')
assert len(outbox) is 1
assert outbox[0].subject == 'subject'
assert outbox[0].body == 'message'
def test_send_sms_multiple_messages(self, app):
with mail.record_messages() as outbox:
send_sms(carrier='verizon',
number=5551112222,
message='m' * 300,
subject='subject')
assert len(outbox) is 3
assert outbox[0].subject == 'subject'
assert outbox[0].body == 'm' * 130
def test_send_sms_with_conn(self, app):
with mail.record_messages() as outbox:
with mail.connect() as conn:
send_sms(carrier='verizon',
number=5551112222,
message='m' * 300,
subject='subject',
conn=conn)
assert len(outbox) is 3
|
#!/usr/bin/env python3
"""Build Skyfield's internal table of constellation boundaries.
See:
https://iopscience.iop.org/article/10.1086/132034/pdf
http://cdsarc.u-strasbg.fr/viz-bin/Cat?VI/42
"""
import argparse
import os
import sys
import numpy as np
from numpy import array, searchsorted
from skyfield import api
URL = 'http://cdsarc.u-strasbg.fr/ftp/VI/42/data.dat'
def main():
with api.load.open(URL) as f:
lines = list(f)
unique_ra = set()
unique_dec = set()
fracs = set()
boundaries = []
for line in lines:
fields = line.split()
ra_low = extend(fields[0])
ra_up = extend(fields[1])
de_low = extend(fields[2])
const = fields[3].decode('ascii')
print(ra_low, const)
#print(ra_int(ra_low))
#fracs.add(fields[0].split(b'.')[1])
unique_ra.add(ra_low)
unique_ra.add(ra_up)
unique_dec.add(de_low)
fracs.add(const)
boundaries.append([ra_low, ra_up, de_low, const])
print(sorted(fracs))
print('constellations:', len(fracs))
print('unique_ra:', len(unique_ra))
print('unique_dec:', len(unique_dec))
sorted_consts = array(sorted(fracs))
sorted_ra = array(sorted(unique_ra))
sorted_dec = array(sorted(unique_dec))
assert sorted_ra[0] == 0
assert sorted_ra[-1] == 24
assert sorted_dec[0] == -90
assert sorted_dec[-1] == 88
sorted_ra = sorted_ra[1:]
sorted_dec = sorted_dec[1:]
print('bytes', sorted_ra.nbytes)
print('bytes', sorted_dec.nbytes)
#grid = [[5] * len(unique_dec)] * len(unique_ra)
#grid = array(grid, 'i1')
row = [-128] * len(sorted_ra)
grid = []
i = 0
de = -90.0
for ra_low, ra_up, de_low, const in boundaries[::-1]:
if de_low > de:
grid.append(row)
row = list(row)
de = de_low
i0 = searchsorted(sorted_ra, ra_low, side='right')
i1 = searchsorted(sorted_ra, ra_up, side='right')
c = searchsorted(sorted_consts, const)
# if ra_up == 24.0:
# print(sorted_ra, ra_low, ra_up)
# print(i0, i1, '?', len(row))
# exit()
for j in range(i0, i1):
row[j] = c
grid.append(row)
grid.append(row)
grid.append(row)
#grid = grid[::-1]
grid = array(grid, 'i1').T
assert len(sorted_ra) == 236
assert searchsorted(sorted_ra, 0, side='right') == 0
assert searchsorted(sorted_ra, 0.06, side='right') == 0
assert searchsorted(sorted_ra, 0.07, side='right') == 1
assert searchsorted(sorted_ra, 23.8, side='right') == 234
assert searchsorted(sorted_ra, 23.9, side='right') == 235
assert searchsorted(sorted_ra, 24.0, side='right') == 236
sorted_ra = sorted_ra[:-1]
assert len(sorted_ra) == 235
assert searchsorted(sorted_ra, 0) == 0
assert searchsorted(sorted_ra, 0.06) == 0
assert searchsorted(sorted_ra, 0.07) == 1
assert searchsorted(sorted_ra, 23.8) == 234
assert searchsorted(sorted_ra, 23.9) == 235
assert searchsorted(sorted_ra, 24.0) == 235
print(sorted_consts[57])
print(grid)
print('shape', grid.shape)
print('bytes', grid.nbytes)
for ra, dec in [(0, 0), (0.1, 0.1),
(5.59, -5.45),
(16, 80), (16, 90), (16, -90), (24, 360),
([0, 16], [0, 80])]:
c = compute_constellation(ra, dec, sorted_ra, sorted_dec,
sorted_consts, grid)
print('=', ra, dec, c)
path = os.path.dirname(__file__) + '/../skyfield/data/constellations'
np.savez_compressed(
path,
sorted_ra=sorted_ra,
sorted_dec=sorted_dec,
radec_to_index=grid,
indexed_abbreviations=sorted_consts,
)
def compute_constellation(ra, dec, sorted_ra, sorted_dec, sorted_consts, grid):
i = searchsorted(sorted_ra, ra)
j = searchsorted(sorted_dec, dec)
#print(dec, sorted_dec)
#print(ra, sorted_ra)
print("ra,dec", ra, dec)
print("i,j", i, j)
return sorted_consts[grid[i, j]]
def extend(s):
"""Return a float for `s` extended to machine precision.
Takes a string like '13.6667', passes it to `float()`,
and snaps it to the nearest whole second.
"""
return round(3600 * float(s)) / 3600.
# Some discarded code that I might want to revive someday: how to grow
# and shrink a list of segments as new ones supersede old ones on the
# way down the sky.
def segment_experiment():
assert insert_segment([0, 4, 7, 10], 0, 3) == [0, 3, 4, 7, 10]
assert insert_segment([0, 4, 7, 10], 4, 7) == [0, 4, 7, 10]
assert insert_segment([0, 4, 7, 10], 6, 9) == [0, 4, 6, 9, 10]
assert insert_segment([0, 4, 7, 10], 7, 10) == [0, 4, 7, 10]
assert insert_segment([0, 4, 7, 10], 0, 10) == [0, 10]
assert insert_segment([0, 10], 4, 7) == [0, 4, 7, 10]
assert insert_segment([], 4, 7) == [4, 7]
segments = []
n = 0
for ra_low, ra_up, de_low in boundaries[::-1]:
segments = insert_segment(segments, ra_low, ra_up)
print(len(segments), end=' ')
n += len(segments)
print(n)
def insert_segment(ra_list, ra_low, ra_up):
new = []
i = 0
while i < len(ra_list) and ra_list[i] < ra_low:
new.append(ra_list[i])
i += 1
new.append(ra_low)
new.append(ra_up)
while i < len(ra_list) and ra_list[i] <= ra_up:
i += 1
while i < len(ra_list):
new.append(ra_list[i])
i += 1
return new
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# Copyright (c) 2017-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import h5py
import json
import os
import imageio
import sys
import cityscapesscripts.evaluation.instances2dict_with_polygons as cs
import detectron.utils.segms as segms_util
import detectron.utils.boxes as bboxs_util
def parse_args():
parser = argparse.ArgumentParser(description='Convert dataset')
parser.add_argument(
'--dataset', help="cocostuff, cityscapes", default=None, type=str)
parser.add_argument(
'--outdir', help="output dir for json files", default=None, type=str)
parser.add_argument(
'--datadir', help="data dir for annotations to be converted",
default=None, type=str)
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
return parser.parse_args()
def convert_coco_stuff_mat(data_dir, out_dir):
"""Convert to png and save json with path. This currently only contains
the segmentation labels for objects+stuff in cocostuff - if we need to
combine with other labels from original COCO that will be a TODO."""
sets = ['train', 'val']
categories = []
json_name = 'coco_stuff_%s.json'
ann_dict = {}
for data_set in sets:
file_list = os.path.join(data_dir, '%s.txt')
images = []
with open(file_list % data_set) as f:
for img_id, img_name in enumerate(f):
img_name = img_name.replace('coco', 'COCO').strip('\n')
image = {}
mat_file = os.path.join(
data_dir, 'annotations/%s.mat' % img_name)
data = h5py.File(mat_file, 'r')
labelMap = data.get('S')
if len(categories) == 0:
labelNames = data.get('names')
for idx, n in enumerate(labelNames):
categories.append(
{"id": idx, "name": ''.join(chr(i) for i in data[
n[0]])})
ann_dict['categories'] = categories
imageio.imsave(
os.path.join(data_dir, img_name + '.png'), labelMap)
image['width'] = labelMap.shape[0]
image['height'] = labelMap.shape[1]
image['file_name'] = img_name
image['seg_file_name'] = img_name
image['id'] = img_id
images.append(image)
ann_dict['images'] = images
print("Num images: %s" % len(images))
with open(os.path.join(out_dir, json_name % data_set), 'wb') as outfile:
outfile.write(json.dumps(ann_dict))
# for Cityscapes
def getLabelID(self, instID):
if (instID < 1000):
return instID
else:
return int(instID / 1000)
def convert_cityscapes_instance_only(
data_dir, out_dir):
"""Convert from cityscapes format to COCO instance seg format - polygons"""
sets = [
'gtFine_val',
# 'gtFine_train',
# 'gtFine_test',
# 'gtCoarse_train',
# 'gtCoarse_val',
# 'gtCoarse_train_extra'
]
ann_dirs = [
'gtFine_trainvaltest/gtFine/val',
# 'gtFine_trainvaltest/gtFine/train',
# 'gtFine_trainvaltest/gtFine/test',
# 'gtCoarse/train',
# 'gtCoarse/train_extra',
# 'gtCoarse/val'
]
json_name = 'instancesonly_filtered_%s.json'
ends_in = '%s_polygons.json'
img_id = 0
ann_id = 0
cat_id = 1
category_dict = {}
category_instancesonly = [
'person',
'rider',
'car',
'truck',
'bus',
'train',
'motorcycle',
'bicycle',
]
for data_set, ann_dir in zip(sets, ann_dirs):
print('Starting %s' % data_set)
ann_dict = {}
images = []
annotations = []
ann_dir = os.path.join(data_dir, ann_dir)
for root, _, files in os.walk(ann_dir):
for filename in files:
if filename.endswith(ends_in % data_set.split('_')[0]):
if len(images) % 50 == 0:
print("Processed %s images, %s annotations" % (
len(images), len(annotations)))
json_ann = json.load(open(os.path.join(root, filename)))
image = {}
image['id'] = img_id
img_id += 1
image['width'] = json_ann['imgWidth']
image['height'] = json_ann['imgHeight']
image['file_name'] = filename[:-len(
ends_in % data_set.split('_')[0])] + 'leftImg8bit.png'
image['seg_file_name'] = filename[:-len(
ends_in % data_set.split('_')[0])] + \
'%s_instanceIds.png' % data_set.split('_')[0]
images.append(image)
fullname = os.path.join(root, image['seg_file_name'])
objects = cs.instances2dict_with_polygons(
[fullname], verbose=False)[fullname]
for object_cls in objects:
if object_cls not in category_instancesonly:
continue # skip non-instance categories
for obj in objects[object_cls]:
if obj['contours'] == []:
print('Warning: empty contours.')
continue # skip non-instance categories
len_p = [len(p) for p in obj['contours']]
if min(len_p) <= 4:
print('Warning: invalid contours.')
continue # skip non-instance categories
ann = {}
ann['id'] = ann_id
ann_id += 1
ann['image_id'] = image['id']
ann['segmentation'] = obj['contours']
if object_cls not in category_dict:
category_dict[object_cls] = cat_id
cat_id += 1
ann['category_id'] = category_dict[object_cls]
ann['iscrowd'] = 0
ann['area'] = obj['pixelCount']
ann['bbox'] = bboxs_util.xyxy_to_xywh(
segms_util.polys_to_boxes(
[ann['segmentation']])).tolist()[0]
annotations.append(ann)
ann_dict['images'] = images
categories = [{"id": category_dict[name], "name": name} for name in
category_dict]
ann_dict['categories'] = categories
ann_dict['annotations'] = annotations
print("Num categories: %s" % len(categories))
print("Num images: %s" % len(images))
print("Num annotations: %s" % len(annotations))
with open(os.path.join(out_dir, json_name % data_set), 'wb') as outfile:
outfile.write(json.dumps(ann_dict))
if __name__ == '__main__':
args = parse_args()
if args.dataset == "cityscapes_instance_only":
convert_cityscapes_instance_only(args.datadir, args.outdir)
elif args.dataset == "cocostuff":
convert_coco_stuff_mat(args.datadir, args.outdir)
else:
print("Dataset not supported: %s" % args.dataset)
|
# Copyright (c) 2015 Intel Corporation
# Copyright (c) 2015 ISPRAS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.plugins.cdh import abstractversionhandler as avm
from sahara.plugins.cdh.v5_4_0 import cloudera_utils
from sahara.plugins.cdh.v5_4_0 import config_helper
from sahara.plugins.cdh.v5_4_0 import deploy
from sahara.plugins.cdh.v5_4_0 import edp_engine
from sahara.plugins.cdh.v5_4_0 import plugin_utils
from sahara.plugins.cdh.v5_4_0 import validation
class VersionHandler(avm.BaseVersionHandler):
def __init__(self):
super(VersionHandler, self).__init__()
self.config_helper = config_helper.ConfigHelperV540()
self.cloudera_utils = cloudera_utils.ClouderaUtilsV540()
self.plugin_utils = plugin_utils.PluginUtilsV540()
self.deploy = deploy
self.edp_engine = edp_engine
self.validation = validation.ValidatorV540()
def get_node_processes(self):
return {
"CLOUDERA": ['CLOUDERA_MANAGER'],
"HDFS": ['HDFS_NAMENODE', 'HDFS_DATANODE',
'HDFS_SECONDARYNAMENODE', 'HDFS_JOURNALNODE'],
"YARN": ['YARN_RESOURCEMANAGER', 'YARN_NODEMANAGER',
'YARN_JOBHISTORY', 'YARN_STANDBYRM'],
"OOZIE": ['OOZIE_SERVER'],
"HIVE": ['HIVE_SERVER2', 'HIVE_METASTORE', 'HIVE_WEBHCAT'],
"HUE": ['HUE_SERVER'],
"SPARK_ON_YARN": ['SPARK_YARN_HISTORY_SERVER'],
"ZOOKEEPER": ['ZOOKEEPER_SERVER'],
"HBASE": ['HBASE_MASTER', 'HBASE_REGIONSERVER'],
"FLUME": ['FLUME_AGENT'],
"IMPALA": ['IMPALA_CATALOGSERVER', 'IMPALA_STATESTORE', 'IMPALAD'],
"KS_INDEXER": ['KEY_VALUE_STORE_INDEXER'],
"SOLR": ['SOLR_SERVER'],
"SQOOP": ['SQOOP_SERVER'],
"SENTRY": ['SENTRY_SERVER'],
"KMS": ['KMS'],
"YARN_GATEWAY": [],
"RESOURCEMANAGER": [],
"NODEMANAGER": [],
"JOBHISTORY": [],
"HDFS_GATEWAY": [],
'DATANODE': [],
'NAMENODE': [],
'SECONDARYNAMENODE': [],
'JOURNALNODE': [],
'REGIONSERVER': [],
'MASTER': [],
'HIVEMETASTORE': [],
'HIVESERVER': [],
'WEBCAT': [],
'CATALOGSERVER': [],
'STATESTORE': [],
'IMPALAD': [],
}
def get_edp_engine(self, cluster, job_type):
oozie_type = self.edp_engine.EdpOozieEngine.get_supported_job_types()
spark_type = self.edp_engine.EdpSparkEngine.get_supported_job_types()
if job_type in oozie_type:
return self.edp_engine.EdpOozieEngine(cluster)
if job_type in spark_type:
return self.edp_engine.EdpSparkEngine(cluster)
return None
def get_edp_job_types(self):
return (edp_engine.EdpOozieEngine.get_supported_job_types() +
edp_engine.EdpSparkEngine.get_supported_job_types())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
import json
from uuid import uuid4
headers = {'content-type': 'application/json'}
def set_params(method, params):
"""Set params to query limesurvey"""
data = {'method': method, 'params': params, 'id': str(uuid4())}
return json.dumps(data)
def get_session_key(limedict):
"""This function receive a dictionary with connection parameters.
{ "url": "full path for remote control",
"username: "account name to be used"
"password" "password for account"}"""
url = limedict['url']
user = limedict['username']
password = limedict['password']
params = {'username': user, 'password': password}
data = set_params('get_session_key', params)
req = requests.post(url, data=data, headers=headers)
return {'token': req.json()['result'], 'user': user, 'url': url}
def list_surveys(session):
"""retrieve a list of surveys from current user"""
params = {'sUser': session['user'], 'sSessionKey': session['token']}
data = set_params('list_surveys', params)
req = requests.post(session['url'], data=data, headers=headers)
return req.text
|
import numpy as np
# 8bitの最大値
QUANTIZE_BIT = 8
MAX_VALUE = (2 ** QUANTIZE_BIT )- 1
def deQuantize_scalar(x, min, max):
"""量子化を元に戻す"""
gain = (max - min) / MAX_VALUE
return x * gain + min
def deQuantize(arr, a_min, a_max):
"""量子化を元に戻す"""
gain = (a_max - a_min) / MAX_VALUE
return arr * gain + a_min
def Quantize(arr, min, max):
"""量子化を行う"""
range = (max - min)
range_scale = range / MAX_VALUE
return ((arr - min) / range_scale).astype(np.int)
def reQuantize(arr, q_min, q_max, new_min, new_max):
mid = (q_max + q_min) / 2
gain = MAX_VALUE / (q_max - q_min)
# start vector
c_qt = (arr - mid) * gain + (MAX_VALUE / 2)
return c_qt.astype(np.int)
def q_inv(a_qt, a_min, a_max):
"""符号反転"""
return MAX_VALUE - a_qt, - a_max, - a_min
def q_add(a_qt, a_min, a_max, b_qt, b_min, b_max, debug=False):
"""加算"""
gain = (b_max - b_min) / (a_max - a_min)
min = a_min + b_min
max = a_max + b_max
q_param = (a_max - a_min) / (max - min)
# debug
if debug:
print("gain = %f(%x), q_param = %f(%x)" % (gain, int(gain * (2 ** 8)), q_param, int(q_param * (2 ** 8))))
# start vector
c_qt = b_qt * gain + a_qt
c_qt *= q_param
return c_qt.astype(np.int), min, max
def q_mul(a_qt, a_min, a_max, b_qt, b_min, b_max, debug=False):
"""乗算"""
Adash_max = a_max - a_min
Adash_min = 0.0
Bdash_max = b_max - b_min
Bdash_min = 0.0
# constant mul
if b_min < 0:
# 符号反転
qt_A_bmin, A_bmin_min_inv, A_bmin_max_inv = q_inv(a_qt, a_min, a_max)
# 定数倍
A_bmin_min = A_bmin_min_inv * -b_min
A_bmin_max = A_bmin_max_inv * -b_min
if debug:
print("SEL A INV")
else:
A_bmin_max = a_max * b_min
A_bmin_min = a_min * b_min
qt_A_bmin = a_qt
if a_min < 0:
qt_B_amin, B_amin_min_inv, B_amin_max_inv = q_inv(b_qt, b_min, b_max)
B_amin_max = B_amin_max_inv * -a_min
B_amin_min = B_amin_min_inv * -a_min
if debug:
print("SEL B INV")
else:
B_amin_max = b_max * a_min
B_amin_min = b_min * a_min
qt_B_amin = b_qt
# vector
AdBd_qt, AdBd_min, AdBd_max = q_mul_core(a_qt, Adash_min, Adash_max, b_qt, Bdash_min, Bdash_max, debug=debug)
C_qt_0, C_qt_0_min, C_qt_0_max = q_add(qt_A_bmin, A_bmin_min, A_bmin_max, qt_B_amin, B_amin_min, B_amin_max, debug=debug)
C_qt, c_min, c_max = q_add(AdBd_qt, AdBd_min, AdBd_max, C_qt_0, C_qt_0_min, C_qt_0_max, debug=debug)
if debug:
np.savetxt("AdBd_qt.txt", AdBd_qt, fmt="%d")
np.savetxt("qt_A_bmin.txt", qt_A_bmin, fmt="%d")
np.savetxt("qt_B_amin.txt", qt_B_amin, fmt="%d")
np.savetxt("C_qt_0.txt", C_qt_0, fmt="%d")
np.savetxt("C_qt.txt", C_qt, fmt="%d")
f1 = a_min * b_min
c_max_f = c_max - f1
c_min_f = c_min - f1
return C_qt.astype(np.int), c_min_f, c_max_f
def q_mul_core(a_qt, a_min, a_max, b_qt, b_min, b_max, debug=False):
"""乗算"""
gain_a = (a_max - a_min) / MAX_VALUE
gain_b = (b_max - b_min) / MAX_VALUE
min = a_min * b_min
max = a_max * b_max
q_param = MAX_VALUE / (max - min)
p_gagb = gain_a * gain_b * q_param
p_gaob = gain_a * b_min * q_param
p_gboa = gain_b * a_min * q_param
if debug:
print("p_gagb = %f(%d), p_gaob = %f(%d), p_gboa = %f(%d)" %
(p_gagb, int(p_gagb * (2 ** 16)),
p_gaob, int(p_gaob * (2 ** 8)),
p_gboa, int(p_gboa * (2 ** 8))))
# start vector alu
AB = (p_gagb * a_qt * b_qt).astype(np.int)
c_qt = AB
# gaob_A = (a_qt * p_gaob).astype(np.int)
# gboa_B = (b_qt * p_gboa).astype(np.int)
# c_qt = AB + gaob_A + gboa_B
return c_qt.astype(np.int), min, max
|
# -*- encoding: utf-8 -*-
"""Fixtures that are of general use."""
from __future__ import unicode_literals
import datetime
import faker as faker_
import pytest
from hamster_lib.lib import HamsterControl
from hamster_lib.storage import BaseStore
from pytest_factoryboy import register
from . import factories
register(factories.CategoryFactory)
register(factories.ActivityFactory)
register(factories.TagFactory)
register(factories.FactFactory)
faker = faker_.Faker()
def convert_time_to_datetime(time_string):
"""
Helper method.
If given a %H:%M string, return a datetime.datetime object with todays
date.
"""
return datetime.datetime.combine(
datetime.datetime.now().date(),
datetime.datetime.strptime(time_string, "%H:%M").time()
)
# Controller
@pytest.yield_fixture
def controller(base_config):
"""Provide a basic controller."""
# [TODO] Parametrize over all available stores.
controller = HamsterControl(base_config)
yield controller
controller.store.cleanup()
@pytest.fixture
def basestore(base_config):
"""Provide a generic ``storage.BaseStore`` instance using ``baseconfig``."""
store = BaseStore(base_config)
return store
# Categories
@pytest.fixture(params=(None, True,))
def category_valid_parametrized(request, category_factory, name_string_valid_parametrized):
"""Provide a variety of valid category fixtures."""
if request.param:
result = category_factory(name=name_string_valid_parametrized)
else:
result = None
return result
@pytest.fixture
def category_valid_parametrized_without_none(request, category_factory,
name_string_valid_parametrized):
"""
Provide a parametrized category fixture but not ``None``.
This fixuture will represent a wide array of potential name charsets as well
but not ``category=None``.
"""
return category_factory(name=name_string_valid_parametrized)
# Activities
@pytest.fixture
def activity_valid_parametrized(request, activity_factory, name_string_valid_parametrized,
category_valid_parametrized, deleted_valid_parametrized):
"""Provide a huge array of possible activity versions. Including None."""
return activity_factory(name=name_string_valid_parametrized,
category=category_valid_parametrized, deleted=deleted_valid_parametrized)
@pytest.fixture
def new_activity_values(category):
"""Return garanteed modified values for a given activity."""
def modify(activity):
return {
'name': activity.name + 'foobar',
}
return modify
# Facts
@pytest.fixture
def fact_factory():
"""Return a factory class that generates non-persisting Fact instances."""
return factories.FactFactory.build
@pytest.fixture
def fact():
"""Provide a randomized non-persistant Fact-instance."""
return factories.FactFactory.build()
@pytest.fixture
def list_of_facts(fact_factory):
"""
Provide a factory that returns a list with given amount of Fact instances.
The key point here is that these fact *do not overlap*!
"""
def get_list_of_facts(number_of_facts):
facts = []
old_start = datetime.datetime.now()
offset = datetime.timedelta(hours=4)
for i in range(number_of_facts):
start = old_start + offset
facts.append(fact_factory(start=start))
old_start = start
return facts
return get_list_of_facts
@pytest.fixture(params=('%M', '%H:%M'))
def string_delta_format_parametrized(request):
"""Provide all possible format option for ``Fact().get_string_delta()``."""
return request.param
@pytest.fixture
def today_fact(fact_factory):
"""Return a ``Fact`` instance that start and ends 'today'."""
start = datetime.datetime.now()
end = start + datetime.timedelta(minutes=30)
return fact_factory(start=start, end=end)
@pytest.fixture
def not_today_fact(fact_factory):
"""Return a ``Fact`` instance that neither start nor ends 'today'."""
start = datetime.datetime.now() - datetime.timedelta(days=2)
end = start + datetime.timedelta(minutes=30)
return fact_factory(start=start, end=end)
@pytest.fixture
def current_fact(fact_factory):
"""Provide a ``ongoing fact``. That is a fact that has started but not ended yet."""
return fact_factory(start=datetime.datetime.now(), end=None)
@pytest.fixture(params=[
'12:00 - 14:00 foo@bar, rumpelratz',
'12:00 - 14:00 foo',
'foo@bar',
# For the following there should not be successful start/end parsing but
# instead just one big "activity.name, but it still constitutes a formally
# valid fact. If we want to be more accurate we need to work with clear
# expectations.
'12:00-14:00 foo@bar',
])
def valid_raw_fact_parametrized(request):
"""Return various invalid ``raw fact`` strings."""
return request.param
@pytest.fixture(params=[
'',
'14:00 - 12:00 foo@bar',
'12:00 - 14:00 @bar',
])
def invalid_raw_fact_parametrized(request):
"""Return various invalid ``raw fact`` strings."""
return request.param
@pytest.fixture
def raw_fact_with_persistent_activity(persistent_activity):
"""A raw fact whichs 'activity' is already present in the db."""
return (
'12:00-14:14 {a.name}@{a.category.name}'.format(a=persistent_activity), {
'start': convert_time_to_datetime('12:00'),
'end': convert_time_to_datetime('14:14'),
'activity': persistent_activity.name,
'category': persistent_activity.category.name,
'description': None,
},
)
|
# Implementation of min and max heap data structures
# By: Jacob Rockland
# implementation of heap sort returning list in accending order, O(n*log(n))
def heap_sort_accending(items):
heap = MinHeap(items)
sorted = [heap.extract_min() for i in range(heap.size)]
return sorted
# implementation of heap sort returning list in decending order, O(n*log(n))
def heap_sort_decending(items):
heap = MaxHeap(items)
sorted = [heap.extract_max() for i in range(heap.size)]
return sorted
# implementation of min-heap
class MinHeap(object):
# initialize heap
def __init__(self, items = None):
if items is None:
self.heap_list = [None]
self.size = 0
else:
self.build_heap(items)
# returns string representation of heap
def __repr__(self):
temp = self.heap_list[1:]
return repr(temp)
# builds a heap from a given list of items, O(n)
def build_heap(self, items):
index = len(items) // 2
self.size = len(items)
self.heap_list = [None] + items[:]
while index > 0:
self.percolate_down(index)
index -= 1
# returns minimum item in heap, O(1)
def get_min(self):
if self.size > 0:
return self.heap_list[1]
else:
return None
# inserts a data item into the tree, O(log(n))
def insert(self, data):
self.heap_list.append(data)
self.size += 1
self.percolate_up(self.size)
# percolates item in heap list upwards
def percolate_up(self, index):
# percolates upwards so long as current node is smaller than parent
while index // 2 > 0:
if self.heap_list[index] < self.heap_list[index // 2]:
temp = self.heap_list[index // 2]
self.heap_list[index // 2] = self.heap_list[index]
self.heap_list[index] = temp
index = index // 2
# extract the minimum item in heap, O(log(n))
def extract_min(self):
if self.size > 0:
min_val = self.heap_list[1]
self.heap_list[1] = self.heap_list[self.size]
self.size -= 1
self.heap_list.pop()
self.percolate_down(1)
return min_val
else:
return None
# percolates item in heap list downwards
def percolate_down(self, index):
# percolates downwards so long as current node is greater than child
while index * 2 <= self.size:
min = self.min_child(index)
if self.heap_list[index] > self.heap_list[min]:
temp = self.heap_list[index]
self.heap_list[index] = self.heap_list[min]
self.heap_list[min] = temp
index = min
# returns index of smallest child of subtree
def min_child(self, index):
if index * 2 + 1 > self.size:
return index * 2
elif self.heap_list[index * 2] < self.heap_list[index * 2 + 1]:
return index * 2
else:
return index * 2 + 1
# implementation of max-heap
class MaxHeap(object):
# initialize heap
def __init__(self, items = None):
if items is None:
self.heap_list = [None]
self.size = 0
else:
self.build_heap(items)
# returns string representation of heap
def __repr__(self):
temp = self.heap_list[1:]
return repr(temp)
# builds a heap from a given list of items, O(n)
def build_heap(self, items):
index = len(items) // 2
self.size = len(items)
self.heap_list = [None] + items[:]
while index > 0:
self.percolate_down(index)
index -= 1
# returns maximum item in heap, O(1)
def get_max(self):
if self.size > 0:
return self.heap_list[1]
else:
return None
# inserts a data item into the tree, O(log(n))
def insert(self, data):
self.heap_list.append(data)
self.size += 1
self.percolate_up(self.size)
# percolates item in heap list upwards
def percolate_up(self, index):
# percolates upwards so long as current node is greater than parent
while index // 2 > 0:
if self.heap_list[index] > self.heap_list[index // 2]:
temp = self.heap_list[index // 2]
self.heap_list[index // 2] = self.heap_list[index]
self.heap_list[index] = temp
index = index // 2
# exctract the maximum item in heap, O(log(n))
def extract_max(self):
if self.size > 0:
max_val = self.heap_list[1]
self.heap_list[1] = self.heap_list[self.size]
self.size -= 1
self.heap_list.pop()
self.percolate_down(1)
return max_val
else:
return None
# percolates item in heap list downwards
def percolate_down(self, index):
# percolates downwards so long as current node is smaller than child
while index * 2 <= self.size:
max = self.max_child(index)
if self.heap_list[index] < self.heap_list[max]:
temp = self.heap_list[index]
self.heap_list[index] = self.heap_list[max]
self.heap_list[max] = temp
index = max
# returns index of greatest child of subtree
def max_child(self, index):
if index * 2 + 1 > self.size:
return index * 2
elif self.heap_list[index * 2] > self.heap_list[index * 2 + 1]:
return index * 2
else:
return index * 2 + 1
|
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and#
# limitations under the License.
from cloudferrylib.base.action import action
from cloudferrylib.utils import files
from cloudferrylib.utils import remote_runner
from cloudferrylib.utils.drivers.ssh_chunks import verified_file_copy, remote_md5_sum
from cloudferrylib.utils import utils
import copy
import os
LOG = utils.get_log(__name__)
class ReCreateBootImage(action.Action):
def __init__(self, init, cloud=None):
super(ReCreateBootImage, self).__init__(init, cloud)
self.src_user = self.cfg.src.ssh_user
src_password = self.cfg.src.ssh_sudo_password
self.src_host = self.cfg.src.ssh_host
self.dst_user = self.cfg.dst.ssh_user
dst_password = self.cfg.dst.ssh_sudo_password
self.dst_host = self.cfg.dst.ssh_host
self.src_runner = remote_runner.RemoteRunner(self.src_host,
self.src_user,
password=src_password,
sudo=True)
self.dst_runner = remote_runner.RemoteRunner(self.dst_host,
self.dst_user,
password=dst_password,
sudo=True)
def run(self, images_info=None, compute_ignored_images={}, missing_images={}, **kwargs):
"""
Create boot image on destination based on root disk of instance.
Use diff&base images, commit all changes from diff to base,
copy base and add as a glance image.
Image ID from source is used as a name of new image because we can't get name of deleted image.
:param images_info: dict with all images on source
:param compute_ignored_images: not used, just resending to down level
:param missing_images: dict with images that has been removed on source
:param kwargs: not used
:return: images_info and compute_ignored_images
"""
images_info = copy.deepcopy(images_info)
for vm_id in missing_images:
img_id = missing_images[vm_id]
for image_id_src, gl_image in images_info['images'].iteritems():
if image_id_src == img_id and not gl_image['image']:
diff = gl_image['meta']['instance'][0]['diff']['path_src']
img_src_host = gl_image['meta']['instance'][0]['diff']['host_src']
if img_src_host != self.src_host:
LOG.warning('Different host information. ' +
'Image is located on host {img_src_host},' +
'but host in the configuration file {src_host}.'.format(img_src_host,
self.src_host))
continue
new_img = self.process_image(img_id, diff)
gl_image['image']['id'] = new_img['id']
gl_image['image']['resource'] = None
gl_image['image']['checksum'] = new_img['checksum']
gl_image['image']['name'] = img_id
return {
'images_info': images_info,
'compute_ignored_images': compute_ignored_images}
def process_image(self, img_id=None, diff=None):
"""
Processing image file: copy from source to destination, create glance image
:param img_id: image ID from source
:param diff: diff file of root disk for instance
:return: new image ID if image is created
"""
with files.RemoteTempDir(self.src_runner) as src_tmp_dir,\
files.RemoteTempDir(self.dst_runner) as dst_tmp_dir:
diff_name = 'diff'
base_name = 'base'
diff_file = os.path.join(src_tmp_dir, diff_name)
self.src_runner.run('cp {} {}'.format(diff, diff_file))
base_file = os.path.join(src_tmp_dir, base_name)
dst_base_file = os.path.join(dst_tmp_dir, base_name)
qemu_img_src = self.src_cloud.qemu_img
base = qemu_img_src.detect_backing_file(diff, self.src_host)
if base is not None:
self.src_runner.run('cp {} {}'.format(base, base_file))
qemu_img_src.diff_rebase(base_file, diff_file, self.src_host)
qemu_img_src.diff_commit(src_tmp_dir, diff_name, self.src_host)
verified_file_copy(self.src_runner, self.dst_runner, self.dst_user,
base_file, dst_base_file, self.dst_host, 1)
else:
verified_file_copy(self.src_runner, self.dst_runner, self.dst_user,
diff_file, dst_base_file, self.dst_host, 1)
image_resource = self.dst_cloud.resources[utils.IMAGE_RESOURCE]
id = image_resource.glance_img_create(self.dst_runner, img_id, 'qcow2',
dst_base_file)
checksum = remote_md5_sum(self.dst_runner, dst_base_file)
return {'id': id, 'checksum': checksum}
|
End of preview. Expand
in Data Studio
No dataset card yet
- Downloads last month
- 17