Dataset Viewer
Auto-converted to Parquet
repo
stringlengths
10
49
pull_number
int64
1
570
instance_id
stringlengths
14
53
issue_numbers
stringlengths
5
18
base_commit
stringlengths
40
40
patch
stringlengths
157
538k
test_patch
stringlengths
201
589k
problem_statement
stringlengths
13
6.6k
hints_text
stringclasses
100 values
created_at
timestamp[us]date
2012-09-08 14:57:53
2025-04-16 15:10:41
version
stringclasses
1 value
DiamondLightSource/ispyb-api
73
DiamondLightSource__ispyb-api-73
['68']
7be86ec8aff0d1a2aecdb99efeb858f16992bff4
diff --git a/ispyb/model/__init__.py b/ispyb/model/__init__.py index c75c2f60..4f6d1ec3 100644 --- a/ispyb/model/__init__.py +++ b/ispyb/model/__init__.py @@ -7,9 +7,6 @@ class DBCache(object): in self._data. Cached data should be accessed as self._data. On first uncached access reload() is called.''' - def __init__(self): - '''Data has not yet been loaded from the database.''' - def load(self): '''Ensure data is loaded from the database.''' if not self.cached: @@ -95,6 +92,10 @@ def __nonzero__(self): '''Python 2: value when used in bool() context.''' return bool(self._value) + def __hash__(self): + '''Pass on the hash value of the inner object.''' + return hash(self._value) + def add_properties(objectclass, property_list): '''Generate class properties for a model that provide read-only access diff --git a/ispyb/model/datacollection.py b/ispyb/model/datacollection.py index 861d3bcf..d701c651 100644 --- a/ispyb/model/datacollection.py +++ b/ispyb/model/datacollection.py @@ -29,7 +29,7 @@ def __init__(self, dcid, db_area, preload=None): def reload(self): '''Load/update information from the database.''' - self._data = self._db.retrieve_data_collection_main(self._dcid)[0] + self._data = self._db.retrieve_data_collection(self._dcid)[0] @property def dcid(self): @@ -98,15 +98,26 @@ def __str__(self): ))).format(self) ispyb.model.add_properties(DataCollection, ( + ('comment', 'comments', 'A free comment field for the data collection.'), ('dcgid', 'groupId', 'Returns the Data Collection Group ID associated with this data collection. ' 'You can use .group to get the data collection group model object instead'), + ('detector_distance', 'detectorDistance', 'Distance from the sample to the detector in mm'), + ('detector_2theta', 'detector2Theta', '2Theta angle between the main beam and the detector normal in degrees'), ('file_template', 'fileTemplate', 'Template for file names with the character \'#\' standing in for image number digits.'), ('file_directory', 'imgDir', 'Fully qualified path to the image files'), ('time_start', 'startTime', None), + ('time_exposure', 'exposureTime', 'Exposure time per frame in seconds'), ('time_end', 'endTime', None), ('image_count', 'noImages', None), ('image_start_number', 'startImgNumber', None), + ('resolution', 'resolution', 'Inscribed resolution circle in Angstrom. Currently only well-defined for data collections with 2theta=0'), ('status', 'status', 'Returns a string representing the current data collection status.'), + ('snapshot1', 'snapshot1', 'One of four possible fields to store file paths to image files relating to the data collection'), + ('snapshot2', 'snapshot2', 'One of four possible fields to store file paths to image files relating to the data collection'), + ('snapshot3', 'snapshot3', 'One of four possible fields to store file paths to image files relating to the data collection'), + ('snapshot4', 'snapshot4', 'One of four possible fields to store file paths to image files relating to the data collection'), + ('transmission', 'transmission', 'Beam transmission, in per cent'), + ('wavelength', 'wavelength', 'Beam wavelength in Angstrom'), ))
diff --git a/tests/model/test_datacollection.py b/tests/model/test_datacollection.py index 9efe3efc..3cea6d4e 100644 --- a/tests/model/test_datacollection.py +++ b/tests/model/test_datacollection.py @@ -4,72 +4,110 @@ import mock import pytest + def test_datacollection_model_retrieves_database_records(): - db, record = mock.Mock(), mock.Mock() - db.retrieve_data_collection_main.return_value = [record] + db, record = mock.Mock(), mock.Mock() + db.retrieve_data_collection.return_value = [record] - dc = ispyb.model.datacollection.DataCollection(1234, db) - assert not db.retrieve_data_collection_main.called - assert '1234' in str(dc) - assert '1234' in repr(dc) - assert 'uncached' in repr(dc) + dc = ispyb.model.datacollection.DataCollection(1234, db) + assert not db.retrieve_data_collection.called + assert "1234" in str(dc) + assert "1234" in repr(dc) + assert "uncached" in repr(dc) - dc.load() - db.retrieve_data_collection_main.assert_called_once_with(1234) - assert dc._data == record - assert '1234' in repr(dc) - assert 'cached' in repr(dc) and 'uncached' not in repr(dc) + dc.load() + db.retrieve_data_collection.assert_called_once_with(1234) + assert dc._data == record + assert "1234" in repr(dc) + assert "cached" in repr(dc) and "uncached" not in repr(dc) - # Test caching behaviour - dc.load() - db.retrieve_data_collection_main.assert_called_once() + # Test caching behaviour + dc.load() + db.retrieve_data_collection.assert_called_once() def test_datacollection_model_accepts_preloading(): - db, record = mock.Mock(), mock.Mock() + db, record = mock.Mock(), mock.Mock() - dc = ispyb.model.datacollection.DataCollection(1234, db, preload=record) - assert dc._data == record + dc = ispyb.model.datacollection.DataCollection(1234, db, preload=record) + assert dc._data == record - dc.load() - assert not db.retrieve_data_collection_main.called + dc.load() + assert not db.retrieve_data_collection.called database_column_to_attribute_name = { - "groupId": None, - "detectorId": None, + "apertureSizeX": None, + "axisEnd": None, + "axisRange": None, + "axisStart": None, + "beamSizeAtSampleX": None, + "beamSizeAtSampleY": None, + "bestWilsonPlotPath": None, "blSubSampleId": None, + "chiStart": None, + "comments": "comment", "dcNumber": None, - "startTime": "time_start", + "detector2Theta": "detector_2theta", + "detectorDistance": "detector_distance", + "detectorId": None, "endTime": "time_end", - "status": None, - "noImages": "image_count", - "startImgNumber": "image_start_number", - "noPasses": None, + "exposureTime": "time_exposure", + "fileTemplate": "file_template", + "flux": None, + "fluxEnd": None, + "focalSpotSizeAtSampleX": None, + "focalSpotSizeAtSampleY": None, + "groupId": None, + "imgContainerSubPath": None, "imgDir": None, "imgPrefix": None, "imgSuffix": None, - "fileTemplate": None, - "snapshot1": None, - "snapshot2": None, - "snapshot3": None, - "snapshot4": None, - "comments": None, -} -record = { - k: getattr(mock.sentinel, k) - for k in database_column_to_attribute_name + "kappaStart": None, + "noImages": "image_count", + "noPasses": None, + "omegaStart": None, + "overlap": None, + "phiStart": None, + "resolution": "resolution", + "resolutionAtCorner": None, + "rotationAxis": None, + "slitGapHorizontal": None, + "slitGapVertical": None, + "snapshot1": "snapshot1", + "snapshot2": "snapshot2", + "snapshot3": "snapshot3", + "snapshot4": "snapshot4", + "startImgNumber": "image_start_number", + "startTime": "time_start", + "status": "status", + "synchrotronMode": None, + "transmission": "transmission", + "undulatorGap1": None, + "undulatorGap2": None, + "undulatorGap3": None, + "wavelength": "wavelength", + "xBeam": None, + "yBeam": None, } +record = {k: getattr(mock.sentinel, k) for k in database_column_to_attribute_name} record["imgDir"] = "/path/to/some/images/" record["fileTemplate"] = "file_####.cbf" [email protected]('column,attribute', filter(lambda ca: ca[1], database_column_to_attribute_name.items())) + [email protected]( + "column,attribute", + filter(lambda ca: ca[1], database_column_to_attribute_name.items()), +) def test_datacollection_model_attributes_return_correct_values(column, attribute): - dc = ispyb.model.datacollection.DataCollection(1234, None, preload=record) - assert getattr(dc, attribute) == record[column] + dc = ispyb.model.datacollection.DataCollection(1234, None, preload=record) + assert getattr(dc, attribute) == record[column] + [email protected]('printed_attribute', ('startTime', 'endTime', 'imgDir', 'fileTemplate')) [email protected]( + "printed_attribute", ("startTime", "endTime", "imgDir", "fileTemplate") +) def test_pretty_printing_datacollection_shows_attribute(printed_attribute): - dc_str = str(ispyb.model.datacollection.DataCollection(1234, None, preload=record)) - assert "1234" in dc_str - assert str(record[printed_attribute]) in dc_str + dc_str = str(ispyb.model.datacollection.DataCollection(1234, None, preload=record)) + assert "1234" in dc_str + assert str(record[printed_attribute]) in dc_str
Make extended data collection information accessible via DCID model Currently it is not possible to access eg. the resolution column of a data collection when given the DCID.
Yes, I think that calls for a new stored procedure. The `retrieve_dc_main_v2` is meant to retrieve only a certain subset of the columns, as we also have `upsert_dc_main_v2` that upserts the same columns, and `update_dc_experiment_v2` which updates the "experiment" columns which includes resolution. I will write a new stored procedure that retrieves all the columns (except deprecated ones and other undesirables). OK, I've made a PR for this: https://github.com/DiamondLightSource/ispyb-api/pull/71 Thanks, I'll pick it up from here
2019-04-16T11:53:05
-1.0
DiamondLightSource/ispyb-api
46
DiamondLightSource__ispyb-api-46
['3']
09dd224f8a861ab0019b223e1dfb7bcbbc5bfab2
diff --git a/conf/config.example.cfg b/conf/config.example.cfg index bd9d446e..d1d52723 100644 --- a/conf/config.example.cfg +++ b/conf/config.example.cfg @@ -5,4 +5,3 @@ pw = host = localhost port = 3306 db = ispybtest -conn_inactivity = 360 diff --git a/ispyb/__init__.py b/ispyb/__init__.py index 3073240f..377498b1 100644 --- a/ispyb/__init__.py +++ b/ispyb/__init__.py @@ -6,7 +6,7 @@ import ConfigParser as configparser import logging -__version__ = '4.11.1' +__version__ = '4.12.0' _log = logging.getLogger('ispyb') diff --git a/ispyb/connector/mysqlsp/main.py b/ispyb/connector/mysqlsp/main.py index f39556a4..e5ba3fdb 100644 --- a/ispyb/connector/mysqlsp/main.py +++ b/ispyb/connector/mysqlsp/main.py @@ -1,9 +1,9 @@ -import datetime +from __future__ import absolute_import, division, print_function + import os import sys -import traceback import threading -import time +import traceback import ispyb.interface.connection import mysql.connector @@ -17,7 +17,7 @@ class ISPyBMySQLSPConnector(ispyb.interface.connection.IF): def __init__(self, user=None, pw=None, host='localhost', db=None, port=3306, conn_inactivity=360): self.lock = threading.Lock() - self.connect(user=user, pw=pw, host=host, db=db, port=port, conn_inactivity=conn_inactivity) + self.connect(user=user, pw=pw, host=host, db=db, port=port) def __enter__(self): if hasattr(self, 'conn') and self.conn is not None: @@ -30,23 +30,15 @@ def __exit__(self, type, value, traceback): def connect(self, user=None, pw=None, host='localhost', db=None, port=3306, conn_inactivity=360): self.disconnect() - self.user = user - self.pw = pw - self.host = host - self.db = db - self.port = port - self.conn_inactivity = int(conn_inactivity) self.conn = mysql.connector.connect(user=user, password=pw, host=host, database=db, port=int(port)) - if self.conn is not None: - self.conn.autocommit=True - else: - raise ISPyBConnectionException - self.last_activity_ts = time.time() + if not self.conn: + raise ISPyBConnectionException('Could not connect to database') + self.conn.autocommit = True def __del__(self): self.disconnect() @@ -61,17 +53,13 @@ def get_data_area_package(self): return 'ispyb.sp' def create_cursor(self, dictionary=False): - if time.time() - self.last_activity_ts > self.conn_inactivity: - # re-connect: - self.connect(self.user, self.pw, self.host, self.db, self.port) - self.last_activity_ts = time.time() - if self.conn is None: - raise ISPyBConnectionException - - cursor = self.conn.cursor(dictionary=dictionary) - if cursor is None: - raise ISPyBConnectionException - return cursor + if not self.conn: + raise ISPyBConnectionException('Not connected to database') + self.conn.ping(reconnect=True) + cursor = self.conn.cursor(dictionary=dictionary) + if not cursor: + raise ISPyBConnectionException('Could not create database cursor') + return cursor def call_sp_write(self, procname, args): with self.lock: diff --git a/ispyb/model/__future__.py b/ispyb/model/__future__.py index f69d9e2e..0f9367a9 100644 --- a/ispyb/model/__future__.py +++ b/ispyb/model/__future__.py @@ -15,7 +15,7 @@ _db_config = None -def enable(configuration_file): +def enable(configuration_file, section='ispyb'): '''Enable access to features that are currently under development.''' global _db, _db_cc, _db_config @@ -37,19 +37,46 @@ def enable(configuration_file): cfgparser = configparser.RawConfigParser() if not cfgparser.read(configuration_file): raise RuntimeError('Could not read from configuration file %s' % configuration_file) - cfgsection = dict(cfgparser.items('ispyb')) + cfgsection = dict(cfgparser.items(section)) host = cfgsection.get('host') port = cfgsection.get('port', 3306) - database = cfgsection.get('database') - username = cfgsection.get('username') - password = cfgsection.get('password') + database = cfgsection.get('database', cfgsection.get('db')) + username = cfgsection.get('username', cfgsection.get('user')) + password = cfgsection.get('password', cfgsection.get('pw')) # Open a direct MySQL connection _db = mysql.connector.connect(host=host, port=port, user=username, password=password, database=database) _db.autocommit = True - _db_cc = DictionaryContextcursorFactory(_db.cursor) _db_config = configuration_file + class DictionaryCursorContextManager(object): + '''This class creates dictionary cursors for mysql.connector connections. + By using a context manager it is ensured that cursors are closed + immediately after use. + Cursors created with this context manager return results as a dictionary + and offer a .run() function, which is an alias to .execute that accepts + query parameters as function parameters rather than a list. + ''' + + def __enter__(cm): + '''Enter context. Ensure the database is alive and return a cursor + with an extra .run() function.''' + _db.ping(reconnect=True) + cm.cursor = _db.cursor(dictionary=True) + + def flat_execute(stmt, *parameters): + '''Pass all given function parameters as a list to the existing + .execute() function.''' + return cm.cursor.execute(stmt, parameters) + setattr(cm.cursor, 'run', flat_execute) + return cm.cursor + + def __exit__(cm, *args): + '''Leave context. Close cursor. Destroy reference.''' + cm.cursor.close() + cm.cursor = None + _db_cc = DictionaryCursorContextManager + import ispyb.model.datacollection ispyb.model.datacollection.DataCollection.integrations = _get_linked_autoprocintegration_for_dc import ispyb.model.gridinfo @@ -57,52 +84,6 @@ def enable(configuration_file): import ispyb.model.processingprogram ispyb.model.processingprogram.ProcessingProgram.reload = _get_autoprocprogram -class DictionaryContextcursorFactory(object): - '''This class creates dictionary context manager objects for mysql.connector - cursors. By using a context manager it is ensured that cursors are - closed immediately after use. - Context managers created via this factory return results as a dictionary - by default, and offer a .run() function, which is an alias to .execute - that accepts query parameters as function parameters rather than a list. - ''' - - def __init__(self, cursor_factory_function): - '''Set up the context manager factory.''' - - class ContextManager(object): - '''The context manager object which is actually used in the - with .. as ..: - clause.''' - - def __init__(cm, parameters): - '''Store any constructor parameters, given as dictionary, so that they - can be passed to the cursor factory later.''' - cm.cursorparams = { 'dictionary': True } - cm.cursorparams.update(parameters) - - def __enter__(cm): - '''Enter context. Instantiate and return the actual cursor using the - given constructor, parameters, and an extra .run() function.''' - cm.cursor = cursor_factory_function(**cm.cursorparams) - - def flat_execute(stmt, *parameters): - '''Pass all given function parameters as a list to the existing - .execute() function.''' - return cm.cursor.execute(stmt, parameters) - setattr(cm.cursor, 'run', flat_execute) - return cm.cursor - - def __exit__(cm, *args): - '''Leave context. Close cursor. Destroy reference.''' - cm.cursor.close() - cm.cursor = None - - self._contextmanager_factory = ContextManager - - def __call__(self, **parameters): - '''Creates and returns a context manager object.''' - return self._contextmanager_factory(parameters) - def _get_gridinfo(self): # https://jira.diamond.ac.uk/browse/MXSW-1173 with _db_cc() as cursor:
diff --git a/tests/conftest.py b/tests/conftest.py index 6f3fc3f5..75a4f925 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,6 +4,7 @@ import os +import ispyb import pytest @pytest.fixture diff --git a/tests/test_misc.py b/tests/test_misc.py index 07977e10..522d16fb 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -3,7 +3,10 @@ import threading import context +import ispyb import ispyb.exception +import ispyb.model.__future__ +import mysql.connector.errors import pytest def test_multi_threads_upsert(testconfig): @@ -42,3 +45,45 @@ def test_retrieve_failure(testconfig): with ispyb.open(testconfig) as conn: with pytest.raises(ispyb.exception.ISPyBNoResultException): rs = conn.mx_acquisition.retrieve_data_collection_main(0) + +def test_database_reconnects_on_connection_failure(testconfig, testdb): + ispyb.model.__future__.enable(testconfig, section='ispyb_mysql_sp') + + # Create minimal data collection and data collection group for test + params = testdb.mx_acquisition.get_data_collection_group_params() + params['parentid'] = 55168 + dcgid = testdb.mx_acquisition.insert_data_collection_group(list(params.values())) + assert dcgid, "Could not create dummy data collection group" + params = testdb.mx_acquisition.get_data_collection_params() + params['parentid'] = dcgid + dcid = testdb.mx_acquisition.insert_data_collection(list(params.values())) + assert dcid, "Could not create dummy data collection" + + # Test the database connections + # This goes from DCID to DCGID using the default connection, + # and looks into the GridInfo table using the __future__ connection. + assert bool(testdb.get_data_collection(dcid).group.gridinfo) is False + + fconn = ispyb.model.__future__._db + iconn = testdb.conn + + # Break both connections from the server side + c = iconn.cursor() + with pytest.raises(mysql.connector.errors.DatabaseError): + c.execute("KILL CONNECTION_ID();") + c.close() + + c = fconn.cursor() + with pytest.raises(mysql.connector.errors.DatabaseError): + c.execute("KILL CONNECTION_ID();") + c.close() + + # Confirm both connections are broken + with pytest.raises(mysql.connector.errors.OperationalError): + iconn.cursor() + + with pytest.raises(mysql.connector.errors.OperationalError): + fconn.cursor() + + # Test implicit reconnect + assert bool(testdb.get_data_collection(dcid).group.gridinfo) is False
Protection against DB connection loss Should handle database connection loss by catching the exception and attempt reconnecting. Should possibly 'ping' at set intervals to keep the connection alive.
Re-connect after a certain number of seconds of inactivity: https://github.com/DiamondLightSource/ispyb-api/commit/03056677eec33d4b53103b97a5ada471e14a9f1f
2018-09-24T08:10:25
-1.0
VirologyCharite/gb2seq
12
VirologyCharite__gb2seq-12
['11']
6ae2edf0f5d5599d22b84b42d4e7fcd309f260c4
diff --git a/CHANGELOG.md b/CHANGELOG.md index 88e0d73..be3bcab 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ +## 0.2.22 August 27, 2024 + +Improved dealing with features that have multiple genome ranges to fix +https://github.com/VirologyCharite/gb2seq/issues/11 + ## 0.2.21 August 10, 2023 Added `--translated` option to `bin/describe-genome.py`. This will extract all the features that are translated. diff --git a/bin/describe-feature.py b/bin/describe-feature.py index 9e37dfd..8cbd4a6 100755 --- a/bin/describe-feature.py +++ b/bin/describe-feature.py @@ -69,7 +69,8 @@ def reportGenomeFeature(features, name, alignment, maxSequenceLength, oneBased): should be reported. @param maxSequenceLength: The maximum sequence length to print. Longer sequences will be truncated. Use 0 or C{None} to skip printing sequences. - @param oneBased: If true, print one-based sites instead of zero-based offsets. + @param oneBased: A C{bool}. If true, print one-based sites instead of zero-based + offsets. """ print(f" Genome {alignment.genome.id}:") @@ -90,9 +91,11 @@ def reportGenomeFeature(features, name, alignment, maxSequenceLength, oneBased): absoluteStop = len(gappedSequence[:alignedStop].replace("-", "")) _, genomeNt = alignment.ntSequences(name, raiseOnReferenceGaps=False) - print(f" start: {absoluteStart + bool(oneBased)}") + print(f" start: {absoluteStart + oneBased}") print(f" stop: {absoluteStop}") print(f" length (nt): {len(genomeNt.sequence)}") + print(f" aligned (to ref) start: {alignedStart + oneBased}") + print(f" aligned (to ref) stop: {alignedStop}") if maxSequenceLength: print( diff --git a/gb2seq/__init__.py b/gb2seq/__init__.py index a550e5e..df61e9a 100644 --- a/gb2seq/__init__.py +++ b/gb2seq/__init__.py @@ -2,4 +2,4 @@ class Gb2SeqError(Exception): "A gb2seq library error occurred." -__version__ = "0.2.21" +__version__ = "0.2.22" diff --git a/gb2seq/alignment.py b/gb2seq/alignment.py index 9756801..7a4b667 100644 --- a/gb2seq/alignment.py +++ b/gb2seq/alignment.py @@ -12,7 +12,6 @@ from gb2seq.translate import ( translate, translateSARS2Spike, - TranslationError, TranslatedReferenceAndGenomeLengthError, ) from gb2seq.variants import VARIANTS @@ -641,7 +640,7 @@ def _getChanges( if aa else self.ntSequences(featureName) ) - except TranslationError as e: + except Gb2SeqError as e: if onError == "raise": raise elif onError == "print": diff --git a/gb2seq/features.py b/gb2seq/features.py index 042688e..86c2791 100644 --- a/gb2seq/features.py +++ b/gb2seq/features.py @@ -11,7 +11,7 @@ from importlib_resources import files, as_file -# from warnings import warn +from warnings import warn import json import argparse @@ -255,30 +255,56 @@ def _initializeFromGenBankRecord( if type_ not in alsoInclude: continue + for optional in "translation", "note": + try: + value[optional] = feature.qualifiers[optional][0] + except KeyError: + pass + start = int(feature.location.start) stop = int(feature.location.end) genomeRanges = GenomeRanges(str(feature.location)) + nRanges = len(genomeRanges.ranges) + + if nRanges == 0: + raise ValueError("No genome ranges present for feature {name!r}.") - # We can only handle a single range at the moment. - if len(genomeRanges.ranges) == 1: - assert start == genomeRanges.ranges[0][0] - assert stop == genomeRanges.ranges[0][1] - forward = genomeRanges.ranges[0][2] - elif self.sars2 and name == "ORF1ab polyprotein": - assert len(genomeRanges.ranges) == 2 - assert start == genomeRanges.ranges[0][0] - assert stop == genomeRanges.ranges[1][1] - forward = True + # If we just have one range, check that the given high-level start and stop + # attributes match the start and end of the range. The situation with + # multiple ranges is more complicated (e.g., the HBV polymerase of + # NC_001896.1 starts at 2309 and goes to 1637). + # + # We should probably ignore the "location" start and use the ranges. But + # then we should generalize to be more sophisticate regarding start/stop, + # translation, etc. + if nRanges == 1: + rangeStart, rangeStop = genomeRanges.ranges[0][:2] + assert start == rangeStart, ( + f"Record start offset {start} does not match first genome range " + f"start {rangeStart}." + ) + assert stop == rangeStop, ( + f"Record stop offset {stop} does not match first genome range " + f"stop {rangeStop}." + ) + + directions = set(genomeRange[2] for genomeRange in genomeRanges.ranges) + if len(directions) == 1: + # All ranges have the same orientation. + forward = directions.pop() else: - if not self.sars2: - # At some point (soon) we should emit a warning. But let's first try - # to fix things so we can translate anything. - # - # warn( - # f"Multiple reference genome ranges {genomeRanges} found " - # f"for feature {name!r} will not be translated reliably." - # ) - pass + # The genome ranges have mixed orientations. If there is no translation + # present (from a GenBank record), warn that we do not yet support + # translation for this feature (this would be easy to add - we should do + # it!). + forward = None + + if "translation" not in value: + warn( + f"The reference genome ranges {genomeRanges} " + f"for feature {name!r} do not all have the same orientation. " + f"This feature will not be translated reliably!" + ) sequence = str(record.seq)[start:stop] @@ -292,12 +318,6 @@ def _initializeFromGenBankRecord( } ) - for optional in "translation", "note": - try: - value[optional] = feature.qualifiers[optional][0] - except KeyError: - pass - # If there is a translation, add an amino acid '*' stop # indicator if there is not one already and the sequence ends # with a stop codon. diff --git a/gb2seq/sars2.py b/gb2seq/sars2.py index 344aca3..3b28e81 100644 --- a/gb2seq/sars2.py +++ b/gb2seq/sars2.py @@ -1,9 +1,69 @@ +from gb2seq import Gb2SeqError + + +class NoSlipperySequenceError(Gb2SeqError): + "No slippery sequence could be found in a genome." + + +class NoStopCodonError(Gb2SeqError): + "No stop codon was found downstream from the slippery sequence." + + +class StopCodonTooDistantError(Gb2SeqError): + "The stop codon following the slippery sequence was too far away." + + +# The maximum difference (number of nucleotides) to allow between the +# offset of the start of the slippery sequence and the downstream stop +# codon. +_MAX_DISTANCE_TO_STOP = 20 + +SLIPPERY_SEQUENCE = "TTTAAAC" + +_SLIPPERY_LEN = len(SLIPPERY_SEQUENCE) + + +def getORF1abSequence(seq): + # See Fields Virology (figure 10.6a on page 421, 7th edition or + # figure 28.7a on page 836, 6th edition) plus + # https://www.ncbi.nlm.nih.gov/nuccore/NC_045512 for details of + # what happens below. Note that the nucelotide sequence we are + # passed is the one that's made from the alignment with the + # reference ORF1ab nucleotide sequence (in sequence.py) and so is + # just that ORF and does not include the leading ~265 nucleotides + # of the 5' UTR. As a result, the offset used to begin the search + # for the slippery sequence is 13000, which is chosen to be a bit + # before 13462 - 265. There are various occurrences of the + # slippery sequence in the reference genome (and hence probably in + # other CoV genomes), but only one in this region and with a stop + # codon shortly (up to _MAX_DISTANCE_TO_STOP nucleotides) downstream. + offset = seq.find(SLIPPERY_SEQUENCE, 13000) + stop = seq.find("TAA", offset + _SLIPPERY_LEN) + if offset == -1: + raise NoSlipperySequenceError("No slippery sequence found.") + if stop == -1: + raise NoStopCodonError( + f"Could not find a stop codon downstream from the start of " + f"the slippery sequence at site {offset + 1}." + ) + if stop - offset > _MAX_DISTANCE_TO_STOP: + raise StopCodonTooDistantError( + f"The stop codon was too far ({stop - offset} nucleotides) " + f"downstream (max allowed distance is " + f"{_MAX_DISTANCE_TO_STOP}) from the start of the slippery " + f"sequence at site {offset + 1}." + ) + + return seq[: offset + _SLIPPERY_LEN] + seq[offset + _SLIPPERY_LEN - 1 :] + + # Provide convenient aliases for SARS-CoV-2 feature names. The alias is the # key, the canonical name (as found in the GenBank file) is the value. # # Alphanumeric feature aliases must have lower case keys. If not they will not # be detected (and the test suite will fail). + SARS_COV_2_ALIASES = { "2": "2'-O-ribose methyltransferase", "3clpro": "3C-like proteinase", diff --git a/gb2seq/translate.py b/gb2seq/translate.py index bd64534..f4e8c87 100644 --- a/gb2seq/translate.py +++ b/gb2seq/translate.py @@ -3,6 +3,7 @@ from typing import Dict, List, Optional from gb2seq import Gb2SeqError +from gb2seq.sars2 import getORF1abSequence from dark.aaVars import CODONS, STOP_CODONS from dark.reads import AARead @@ -12,18 +13,6 @@ class TranslationError(Gb2SeqError): "Error when using custom translation of sequences." -class NoSlipperySequenceError(TranslationError): - "No slippery sequence could be found in a genome." - - -class NoStopCodonError(TranslationError): - "No stop codon was found downstream from the slippery sequence." - - -class StopCodonTooDistantError(TranslationError): - "The stop codon following the slippery sequence was too far away." - - class TranslatedSequenceLengthError(TranslationError): "A sequence to be translated has an incorrect length." @@ -43,15 +32,6 @@ class TranslatedGapLengthError(TranslationError): + [("---", "-")] ) -# The maximum difference (number of nucleotides) to allow between the -# offset of the start of the slippery sequence and the downstream stop -# codon. -_MAX_DISTANCE_TO_STOP = 20 - -SLIPPERY_SEQUENCE = "TTTAAAC" - -_SLIPPERY_LEN = len(SLIPPERY_SEQUENCE) - def translate( seq: str, @@ -70,38 +50,8 @@ def translate( translate into '-' or 'X'. @return: A translated C{str} amino acid sequence. """ - if name == "ORF1ab polyprotein": - # See Fields Virology (figure 10.6a on page 421, 7th edition or - # figure 28.7a on page 836, 6th edition) plus - # https://www.ncbi.nlm.nih.gov/nuccore/NC_045512 for details of - # what happens below. Note that the nucelotide sequence we are - # passed is the one that's made from the alignment with the - # reference ORF1ab nucleotide sequence (in sequence.py) and so is - # just that ORF and does not include the leading ~265 nucleotides - # of the 5' UTR. As a result, the offset used to begin the search - # for the slippery sequence is 13000, which is chosen to be a bit - # before 13462 - 265. There are various occurrences of the - # slippery sequence in the reference genome (and hence probably in - # other CoV genomes), but only one in this region and with a stop - # codon shortly (up to _MAX_DISTANCE_TO_STOP nucleotides) downstream. - offset = seq.find(SLIPPERY_SEQUENCE, 13000) - stop = seq.find("TAA", offset + _SLIPPERY_LEN) - if offset == -1: - raise NoSlipperySequenceError("No slippery sequence found.") - if stop == -1: - raise NoStopCodonError( - f"Could not find a stop codon downstream from the start of " - f"the slippery sequence at site {offset + 1}." - ) - if stop - offset > _MAX_DISTANCE_TO_STOP: - raise StopCodonTooDistantError( - f"The stop codon was too far ({stop - offset} nucleotides) " - f"downstream (max allowed distance is " - f"{_MAX_DISTANCE_TO_STOP}) from the start of the slippery " - f"sequence at site {offset + 1}." - ) - - seq = seq[: offset + _SLIPPERY_LEN] + seq[offset + _SLIPPERY_LEN - 1 :] + if sars2 and name == "ORF1ab polyprotein": + seq = getORF1abSequence(seq) # Pad with 'N' to avoid a 'BiopythonWarning: Partial codon' warning. remainder = len(seq) % 3
diff --git a/test/test_alignment.py b/test/test_alignment.py index c567376..b156121 100644 --- a/test/test_alignment.py +++ b/test/test_alignment.py @@ -18,7 +18,7 @@ ) from gb2seq.change import splitChange from gb2seq.features import Features, AmbiguousFeatureError, MissingFeatureError -from gb2seq.translate import NoSlipperySequenceError +from gb2seq.sars2 import NoSlipperySequenceError from .fasta import getSequence diff --git a/test/test_translate.py b/test/test_translate.py index d928e4d..77de7f0 100644 --- a/test/test_translate.py +++ b/test/test_translate.py @@ -2,12 +2,15 @@ from unittest import TestCase from dark.reads import AARead -from gb2seq.translate import ( - KNOWN_INSERTIONS, +from gb2seq.sars2 import ( + SLIPPERY_SEQUENCE, NoSlipperySequenceError, NoStopCodonError, - SLIPPERY_SEQUENCE, StopCodonTooDistantError, +) + +from gb2seq.translate import ( + KNOWN_INSERTIONS, TranslatedReferenceAndGenomeLengthError, TranslatedSequenceLengthError, getSubstitutionsString, @@ -27,7 +30,12 @@ def testNoSlipperySequencs(self): """ error = r"^No slippery sequence found\.$" self.assertRaisesRegex( - NoSlipperySequenceError, error, translate, "AAATTT", "ORF1ab polyprotein" + NoSlipperySequenceError, + error, + translate, + "AAATTT", + name="ORF1ab polyprotein", + sars2=True, ) def testNoStopCodonFollowingTheSlipperySequence(self): @@ -41,7 +49,12 @@ def testNoStopCodonFollowingTheSlipperySequence(self): ) sequence = "A" * 13000 + SLIPPERY_SEQUENCE self.assertRaisesRegex( - NoStopCodonError, error, translate, sequence, "ORF1ab polyprotein" + NoStopCodonError, + error, + translate, + sequence, + name="ORF1ab polyprotein", + sars2=True, ) def testDistantStopCodonFollowingTheSlipperySequence(self): @@ -56,7 +69,12 @@ def testDistantStopCodonFollowingTheSlipperySequence(self): ) sequence = "A" * 13000 + SLIPPERY_SEQUENCE + "A" * 100 + "TAA" self.assertRaisesRegex( - StopCodonTooDistantError, error, translate, sequence, "ORF1ab polyprotein" + StopCodonTooDistantError, + error, + translate, + sequence, + name="ORF1ab polyprotein", + sars2=True, ) def testEmpty(self):
local variable 'forward' referenced before assignment Using the `NC_001896.1.` GenBank file (fetched with the dark-matter script, but you can just download it yourself it you don't have that) `ncbi-fetch-id.py --format gb NC_001896.1 > NC_001896.1.gb`, I get this: ```sh $ describe-feature.py --reference NC_001896.1.gb Traceback (most recent call last): File "/Users/terry/charite/gb2seq/bin/describe-feature.py", line 251, in <module> main(args) File "/Users/terry/charite/gb2seq/bin/describe-feature.py", line 123, in main features = Features( File "/Users/terry/charite/gb2seq/gb2seq/features.py", line 136, in __init__ self._initializeFromGenBankRecord(record, alsoInclude) File "/Users/terry/charite/gb2seq/gb2seq/features.py", line 287, in _initializeFromGenBankRecord "forward": forward, UnboundLocalError: local variable 'forward' referenced before assignment ```
This occurs because HBV has multiple genome ranges specified for some genes (it is circular, so the gene offsets cross zero).
2024-08-27T03:11:44
-1.0
ab5424/agility
31
ab5424__agility-31
['30']
201a875e29d5b2d15cf6e8242c636f3f2988548b
diff --git a/README.md b/README.md index 9092aeb..64f443f 100755 --- a/README.md +++ b/README.md @@ -1,5 +1,6 @@ [![Documentation Status](https://readthedocs.org/projects/agility1/badge/?version=latest)](https://agility1.readthedocs.io/en/latest/?badge=latest) [![Coverage Status](https://coveralls.io/repos/github/ab5424/agility/badge.svg?branch=main)](https://coveralls.io/github/ab5424/agility?branch=main) +[![code coverage](https://img.shields.io/codecov/c/gh/ab5424/agility)](https://codecov.io/gh/ab5424/agility) [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/ab5424/agility/main.svg)](https://results.pre-commit.ci/latest/github/ab5424/agility/main) [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/ab5424/agility/HEAD)
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3a0a3e9..efae58f 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -42,8 +42,9 @@ jobs: sudo apt update && sudo apt install -y libegl1-mesa-dev - name: pytest run: | - pytest tests + pytest --cov=agility --cov-report=xml tests - name: Upload coverage reports to Codecov uses: codecov/codecov-action@v3 + if: matrix.python-version == '3.11' && matrix.os == 'ubuntu-latest' env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
Fix coverage not working
2024-01-26T14:27:21
-1.0
CraveFood/django-duprequests
2
CraveFood__django-duprequests-2
['1']
5ffa4d96104bc75816dc0bcbb1dbf20430701974
diff --git a/LICENSE b/LICENSE index 45fb8ff..8e54ac2 100644 --- a/LICENSE +++ b/LICENSE @@ -28,4 +28,3 @@ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - diff --git a/duprequests/middleware.py b/duprequests/middleware.py index 674633e..07c5bf7 100644 --- a/duprequests/middleware.py +++ b/duprequests/middleware.py @@ -1,41 +1,66 @@ - -from uuid import uuid4 +from uuid import uuid4, uuid5, NAMESPACE_DNS +from urllib.parse import urlencode from django.conf import settings from django.core.cache import caches from django.http.response import HttpResponseNotModified + try: from django.utils.deprecation import MiddlewareMixin -except ImportError: # pragma: nocover +except ImportError: # pragma: nocover MiddlewareMixin = object -CACHE_NAME = getattr(settings, 'DUPLICATED_REQUESTS_CACHE_NAME', 'default') -CACHE_TIMEOUT = getattr(settings, 'DUPLICATED_REQUESTS_CACHE_TIMEOUT', 5) -COOKIE_NAME = getattr(settings, 'DUPLICATED_REQUESTS_COOKIE_NAME', - 'dj-request-id') -COOKIE_PREFIX = getattr(settings, 'DUPLICATED_REQUESTS_COOKIE_PREFIX', - 'request-id-') +CACHE_NAME = getattr(settings, "DUPLICATED_REQUESTS_CACHE_NAME", "default") +CACHE_TIMEOUT = getattr(settings, "DUPLICATED_REQUESTS_CACHE_TIMEOUT", 5) +COOKIE_NAME = getattr(settings, "DUPLICATED_REQUESTS_COOKIE_NAME", "dj-request-id") +COOKIE_PREFIX = getattr(settings, "DUPLICATED_REQUESTS_COOKIE_PREFIX", "request-id-") class DropDuplicatedRequests(MiddlewareMixin): - """Middleware that drops requests made in quick succession. + """ + Middleware that drops requests made in quick succession. + Uses Django's caching system to check/save each request. + """ - Uses Django's caching system to check/save each request.""" + def _get_request_hash(self, request): + """ + Generates a unique key based on request path, method, body and arguments + """ + hash_value = uuid5( + NAMESPACE_DNS, + request.path_info + + "--" + + request.method.lower() + + "--" + + urlencode(request.GET) + + "--" + + request.body.decode("utf-8"), + ).node + return str(hash_value) def process_request(self, request): - if not request.method.lower() in ('post', 'put', 'delete', 'patch'): + """ + Stores a unique key per request in the cache, if it already exists, returns 304 + """ + if not request.method.lower() in ("post", "put", "delete", "patch"): return - cache_key = request.COOKIES.get(COOKIE_NAME) - if not cache_key: + cookie_value = request.COOKIES.get(COOKIE_NAME) + if not cookie_value: return + cache_key = cookie_value + self._get_request_hash(request) + cache = caches[CACHE_NAME] if cache_key in cache: return HttpResponseNotModified() cache.set(cache_key, True, CACHE_TIMEOUT) def process_response(self, request, response): + """ + Sends a cookie with a unique hash to identify requests that are the same + but from different sources + """ response.set_cookie(COOKIE_NAME, COOKIE_PREFIX + uuid4().hex) return response
diff --git a/duprequests/tests.py b/duprequests/tests.py index ecdfe73..62af352 100644 --- a/duprequests/tests.py +++ b/duprequests/tests.py @@ -1,5 +1,6 @@ - from unittest import TestCase +from uuid import uuid5, NAMESPACE_DNS +from urllib.parse import urlencode from django.conf import settings from django.core.cache import caches @@ -10,9 +11,8 @@ from .middleware import DropDuplicatedRequests -CACHE_NAME = getattr(settings, 'DUPLICATED_REQUESTS_CACHE_NAME', 'default') -COOKIE_NAME = getattr(settings, 'DUPLICATED_REQUESTS_COOKIE_NAME', - 'dj-request-id') +CACHE_NAME = getattr(settings, "DUPLICATED_REQUESTS_CACHE_NAME", "default") +COOKIE_NAME = getattr(settings, "DUPLICATED_REQUESTS_COOKIE_NAME", "dj-request-id") class TestDropDuplicatedRequests(TestCase): @@ -24,7 +24,7 @@ def tearDown(self): cache = caches[CACHE_NAME] cache.clear() - def _call_view_using_middleware(self, method, set_cookie=True): + def _call_view_using_middleware(self, method, set_cookie=True, path="/", body={}): class TestView(View): def get(self, request): return HttpResponse() @@ -32,9 +32,9 @@ def get(self, request): put = post = patch = delete = get # Get a new request and process it using middleware - request = getattr(self.factory, method)('/') + request = getattr(self.factory, method)(path, body) if set_cookie: - request.COOKIES[COOKIE_NAME] = 'not-so-unique-id' + request.COOKIES[COOKIE_NAME] = "not-so-unique-id" response = self.middleware.process_request(request) if response is None: @@ -42,39 +42,81 @@ def get(self, request): return self.middleware.process_response(request, response) def test_double_get(self): - response_1 = self._call_view_using_middleware('get') + response_1 = self._call_view_using_middleware("get") self.assertEqual(response_1.status_code, 200) - response_2 = self._call_view_using_middleware('get') + response_2 = self._call_view_using_middleware("get") self.assertEqual(response_2.status_code, 200) def test_double_post(self): - response_1 = self._call_view_using_middleware('post') + response_1 = self._call_view_using_middleware("post", body={"a": "a"}) self.assertEqual(response_1.status_code, 200) - response_2 = self._call_view_using_middleware('post') + response_2 = self._call_view_using_middleware("post", body={"a": "a"}) self.assertEqual(response_2.status_code, 304) def test_double_post_without_cookie(self): - response_1 = self._call_view_using_middleware('post', False) + response_1 = self._call_view_using_middleware("post", False) self.assertEqual(response_1.status_code, 200) - response_2 = self._call_view_using_middleware('post', False) + response_2 = self._call_view_using_middleware("post", False) self.assertEqual(response_2.status_code, 200) def test_double_put(self): - response_1 = self._call_view_using_middleware('put') + response_1 = self._call_view_using_middleware("put") self.assertEqual(response_1.status_code, 200) - response_2 = self._call_view_using_middleware('put') + response_2 = self._call_view_using_middleware("put") self.assertEqual(response_2.status_code, 304) def test_double_patch(self): - response_1 = self._call_view_using_middleware('patch') + response_1 = self._call_view_using_middleware("patch") self.assertEqual(response_1.status_code, 200) - response_2 = self._call_view_using_middleware('patch') + response_2 = self._call_view_using_middleware("patch") self.assertEqual(response_2.status_code, 304) def test_double_delete(self): - response_1 = self._call_view_using_middleware('delete') + response_1 = self._call_view_using_middleware("delete") + self.assertEqual(response_1.status_code, 200) + response_2 = self._call_view_using_middleware("delete") + self.assertEqual(response_2.status_code, 304) + + def test_double_requests_different_method(self): + response_1 = self._call_view_using_middleware("patch") + self.assertEqual(response_1.status_code, 200) + response_2 = self._call_view_using_middleware("put") + self.assertEqual(response_2.status_code, 200) + + def test_double_requests_different_path(self): + response_1 = self._call_view_using_middleware("put", path="/123") + self.assertEqual(response_1.status_code, 200) + response_2 = self._call_view_using_middleware("put", path="/456") + self.assertEqual(response_2.status_code, 200) + + def test_double_requests_same_path(self): + response_1 = self._call_view_using_middleware("put", path="/123") + self.assertEqual(response_1.status_code, 200) + response_2 = self._call_view_using_middleware("put", path="/123") + self.assertEqual(response_2.status_code, 304) + + def test_double_requests_different_get_params(self): + response_1 = self._call_view_using_middleware("put", path="/?a=123") + self.assertEqual(response_1.status_code, 200) + response_2 = self._call_view_using_middleware("put", path="/?a=456") + self.assertEqual(response_2.status_code, 200) + + def test_double_requests_same_get_params(self): + response_1 = self._call_view_using_middleware("put", path="/?a=123") + self.assertEqual(response_1.status_code, 200) + response_2 = self._call_view_using_middleware("put", path="/?a=123") + self.assertEqual(response_2.status_code, 304) + + def test_double_requests_different_body(self): + response_1 = self._call_view_using_middleware("put", body={"a": "b"}) + self.assertEqual(response_1.status_code, 200) + response_2 = self._call_view_using_middleware("put", body={"a": "c"}) + self.assertEqual(response_2.status_code, 200) + + def test_double_requests_same_body(self): + response_1 = self._call_view_using_middleware("put", body={"a": "b"}) self.assertEqual(response_1.status_code, 200) - response_2 = self._call_view_using_middleware('delete') + response_2 = self._call_view_using_middleware("put", body={"a": "b"}) self.assertEqual(response_2.status_code, 304) def test_set_cookie(self):
Allow multiple requests at same time if for different paths Discussion at https://cravefoodsystems.slack.com/archives/C0FLWNK0D/p1585835789007900 ## Context This middleware is responsible for dropping a second/duplicated request. For example, if a client makes two consecutive requests to the same URL, the second should not be processed: 1st: POST /v1/account/user/ -> Returns HTTP 200 2st POST /v1/account/user/ -> Returns HTTP 304 ## Current Behavior The middleware will not differentiate URLs, so this will happen: 1st: PUT /v1/account/user/123 -> Returns HTTP 200 2st PUT /v1/account/user/345 -> Returns HTTP 304 Note there are 2 different URLS and the second should be processed correctly, not return 304. Real example: ![image](https://user-images.githubusercontent.com/9268203/89820417-5c791980-db23-11ea-9661-f3e1b5478318.png) ## Expected Behavior I should be able to make two consecutive requests to different urls: 1st: PUT /v1/account/user/123 -> Returns HTTP 200 2st PUT /v1/account/user/345 -> Returns HTTP 200
I have a branch with an initial solution I started 3 months ago: https://github.com/CraveFood/django-duprequests/tree/fix-multiple-requests-per-path Whoever pick this issue let me know so we can have a chat ;) We should also consider same URLS with different payloads: ![2020-08-11 09 46 51](https://user-images.githubusercontent.com/9268203/90019386-b7765200-dc84-11ea-9246-115da451b024.gif)
2020-08-18T14:06:54
-1.0
IBM/complex-linear-network-analyzer
15
IBM__complex-linear-network-analyzer-15
['13']
8c3aa3a20e466d5993d3f4c8a6d02ebef0a95fa5
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9993882..4fdb9fa 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -26,8 +26,9 @@ pull request so it can be tracked. ### Merge approval -The project maintainers use LGTM (Looks Good To Me) in comments on the code -review to indicate acceptance. +Project maintainers use LGTM (Looks Good To Me) in comments on the code +review to indicate acceptance. For small fixes only one project maintainer needs to approve, +for larger changes two maintainers should approve. ## Legal @@ -64,13 +65,16 @@ local git repository using the following command: ``` git commit -s ``` + ## Setup For new features create a feature branch. Code in the master branch should always be stable and ready for deployment. ## Testing -Please run all unittests in the tests directory before pushing any code changes. +Please run all unittests in the tests directory before pushing any code changes. So far the tests do not cover any +of the visualization features. In case you update them, please verify them manually. Expansion of tests to cover +visualization features is planned and should be added soon. ## Coding style guidelines diff --git a/colna/analyticnetwork.py b/colna/analyticnetwork.py index a478f8f..f44ed27 100644 --- a/colna/analyticnetwork.py +++ b/colna/analyticnetwork.py @@ -20,8 +20,10 @@ import numpy as np from tqdm import tqdm from copy import deepcopy +from pathlib import Path from multiprocessing.dummy import Pool as ThreadPool import multiprocessing +import os class Edge(object): @@ -299,7 +301,7 @@ def visualize(self, show_edge_labels=True, path='network', skip_colon=False, for :param show_edge_labels: if True, edge labels showing the amplitude, phase and delay of the edge are drawn. :type show_edge_labels: bool - :param path: output path for file + :param path: output path for file. If the path does not exist it will be created automatically. :type path: str :param skip_colon: Skip nodes which contain ':' in their name. This is used for PhysicalNetwork visualization. :type skip_colon: bool @@ -327,6 +329,9 @@ def visualize(self, show_edge_labels=True, path='network', skip_colon=False, for else: s.edge(edge.start.replace(":", ""), edge.end.replace(":", "")) + head, tail = os.path.split(path) + if head != '': + Path(head).mkdir(parents=True, exist_ok=True) s.render(path, view=False, format=format) def get_html_result(self, name, time_symbol='t', evaluate=False, feed_dict=None, use_shared_default=False, @@ -350,7 +355,7 @@ def get_html_result(self, name, time_symbol='t', evaluate=False, feed_dict=None, :type use_shared_default: bool :param linebreak_limit: A line break will be added roughly every linebreak_limit chars in the latex string. Set to 1 for a linebreak after each term. Set to 0 to get a latex string on a single line. Default: 1 :type linebreak_limit: int - :param path: Output path where html file containing the MathJax code is stored + :param path: Output path where html file containing the MathJax code is stored. If the path does not exist it will be created automatically. :type path: str :param precision: Number of significant digits to be output. Set to 0 to use the default value of str() method. :type precision: int @@ -397,6 +402,10 @@ def get_html_result(self, name, time_symbol='t', evaluate=False, feed_dict=None, output_html = template.format('waves at nodes' + str(name), raw_string) + head, tail = os.path.split(path) + if head != '': + Path(head).mkdir(parents=True, exist_ok=True) + with open(path, 'w') as file: file.write(output_html) diff --git a/docs/_modules/colna/analyticnetwork.html b/docs/_modules/colna/analyticnetwork.html index 168a461..f3a6c3c 100644 --- a/docs/_modules/colna/analyticnetwork.html +++ b/docs/_modules/colna/analyticnetwork.html @@ -169,8 +169,10 @@ <h1>Source code for colna.analyticnetwork</h1><div class="highlight"><pre> <span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="nn">np</span> <span class="kn">from</span> <span class="nn">tqdm</span> <span class="kn">import</span> <span class="n">tqdm</span> <span class="kn">from</span> <span class="nn">copy</span> <span class="kn">import</span> <span class="n">deepcopy</span> +<span class="kn">from</span> <span class="nn">pathlib</span> <span class="kn">import</span> <span class="n">Path</span> <span class="kn">from</span> <span class="nn">multiprocessing.dummy</span> <span class="kn">import</span> <span class="n">Pool</span> <span class="k">as</span> <span class="n">ThreadPool</span> <span class="kn">import</span> <span class="nn">multiprocessing</span> +<span class="kn">import</span> <span class="nn">os</span> <div class="viewcode-block" id="Edge"><a class="viewcode-back" href="../../colna.html#colna.analyticnetwork.Edge">[docs]</a><span class="k">class</span> <span class="nc">Edge</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span> @@ -448,7 +450,7 @@ <h1>Source code for colna.analyticnetwork</h1><div class="highlight"><pre> <span class="sd"> :param show_edge_labels: if True, edge labels showing the amplitude, phase and delay of the edge are drawn.</span> <span class="sd"> :type show_edge_labels: bool</span> -<span class="sd"> :param path: output path for file</span> +<span class="sd"> :param path: output path for file. If the path does not exist it will be created automatically.</span> <span class="sd"> :type path: str</span> <span class="sd"> :param skip_colon: Skip nodes which contain &#39;:&#39; in their name. This is used for PhysicalNetwork visualization.</span> <span class="sd"> :type skip_colon: bool</span> @@ -476,6 +478,9 @@ <h1>Source code for colna.analyticnetwork</h1><div class="highlight"><pre> <span class="k">else</span><span class="p">:</span> <span class="n">s</span><span class="o">.</span><span class="n">edge</span><span class="p">(</span><span class="n">edge</span><span class="o">.</span><span class="n">start</span><span class="o">.</span><span class="n">replace</span><span class="p">(</span><span class="s2">&quot;:&quot;</span><span class="p">,</span> <span class="s2">&quot;&quot;</span><span class="p">),</span> <span class="n">edge</span><span class="o">.</span><span class="n">end</span><span class="o">.</span><span class="n">replace</span><span class="p">(</span><span class="s2">&quot;:&quot;</span><span class="p">,</span> <span class="s2">&quot;&quot;</span><span class="p">))</span> + <span class="n">head</span><span class="p">,</span> <span class="n">tail</span> <span class="o">=</span> <span class="n">os</span><span class="o">.</span><span class="n">path</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="n">path</span><span class="p">)</span> + <span class="k">if</span> <span class="n">head</span> <span class="o">!=</span> <span class="s1">&#39;&#39;</span><span class="p">:</span> + <span class="n">Path</span><span class="p">(</span><span class="n">head</span><span class="p">)</span><span class="o">.</span><span class="n">mkdir</span><span class="p">(</span><span class="n">parents</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> <span class="n">exist_ok</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span> <span class="n">s</span><span class="o">.</span><span class="n">render</span><span class="p">(</span><span class="n">path</span><span class="p">,</span> <span class="n">view</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="nb">format</span><span class="o">=</span><span class="nb">format</span><span class="p">)</span></div> <div class="viewcode-block" id="Network.get_html_result"><a class="viewcode-back" href="../../colna.html#colna.analyticnetwork.Network.get_html_result">[docs]</a> <span class="k">def</span> <span class="nf">get_html_result</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="p">,</span> <span class="n">time_symbol</span><span class="o">=</span><span class="s1">&#39;t&#39;</span><span class="p">,</span> <span class="n">evaluate</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">feed_dict</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">use_shared_default</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> @@ -499,7 +504,7 @@ <h1>Source code for colna.analyticnetwork</h1><div class="highlight"><pre> <span class="sd"> :type use_shared_default: bool</span> <span class="sd"> :param linebreak_limit: A line break will be added roughly every linebreak_limit chars in the latex string. Set to 1 for a linebreak after each term. Set to 0 to get a latex string on a single line. Default: 1</span> <span class="sd"> :type linebreak_limit: int</span> -<span class="sd"> :param path: Output path where html file containing the MathJax code is stored</span> +<span class="sd"> :param path: Output path where html file containing the MathJax code is stored. If the path does not exist it will be created automatically.</span> <span class="sd"> :type path: str</span> <span class="sd"> :param precision: Number of significant digits to be output. Set to 0 to use the default value of str() method.</span> <span class="sd"> :type precision: int</span> @@ -546,6 +551,10 @@ <h1>Source code for colna.analyticnetwork</h1><div class="highlight"><pre> <span class="n">output_html</span> <span class="o">=</span> <span class="n">template</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="s1">&#39;waves at nodes&#39;</span> <span class="o">+</span> <span class="nb">str</span><span class="p">(</span><span class="n">name</span><span class="p">),</span> <span class="n">raw_string</span><span class="p">)</span> + <span class="n">head</span><span class="p">,</span> <span class="n">tail</span> <span class="o">=</span> <span class="n">os</span><span class="o">.</span><span class="n">path</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="n">path</span><span class="p">)</span> + <span class="k">if</span> <span class="n">head</span> <span class="o">!=</span> <span class="s1">&#39;&#39;</span><span class="p">:</span> + <span class="n">Path</span><span class="p">(</span><span class="n">head</span><span class="p">)</span><span class="o">.</span><span class="n">mkdir</span><span class="p">(</span><span class="n">parents</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> <span class="n">exist_ok</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span> + <span class="k">with</span> <span class="nb">open</span><span class="p">(</span><span class="n">path</span><span class="p">,</span> <span class="s1">&#39;w&#39;</span><span class="p">)</span> <span class="k">as</span> <span class="n">file</span><span class="p">:</span> <span class="n">file</span><span class="o">.</span><span class="n">write</span><span class="p">(</span><span class="n">output_html</span><span class="p">)</span></div> @@ -1219,7 +1228,7 @@ <h1>Source code for colna.analyticnetwork</h1><div class="highlight"><pre> <span class="sd"> :param timestep: Defines the sampling rate.</span> <span class="sd"> :return: t_sampled, x_sampled: resampled time and signal vector</span> <span class="sd"> &quot;&quot;&quot;</span> - <span class="n">t_sampled</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">linspace</span><span class="p">(</span><span class="n">start</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">t0</span><span class="p">,</span> <span class="n">stop</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">t1</span><span class="p">,</span> <span class="n">num</span><span class="o">=</span><span class="mi">1</span> <span class="o">+</span> <span class="nb">round</span><span class="p">((</span><span class="bp">self</span><span class="o">.</span><span class="n">t1</span> <span class="o">-</span> <span class="bp">self</span><span class="o">.</span><span class="n">t0</span><span class="p">)</span> <span class="o">/</span> <span class="n">timestep</span><span class="p">))</span> + <span class="n">t_sampled</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">linspace</span><span class="p">(</span><span class="n">start</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">t0</span><span class="p">,</span> <span class="n">stop</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">t1</span><span class="p">,</span> <span class="n">num</span><span class="o">=</span><span class="nb">int</span><span class="p">(</span><span class="mi">1</span> <span class="o">+</span> <span class="nb">round</span><span class="p">((</span><span class="bp">self</span><span class="o">.</span><span class="n">t1</span> <span class="o">-</span> <span class="bp">self</span><span class="o">.</span><span class="n">t0</span><span class="p">)</span> <span class="o">/</span> <span class="n">timestep</span><span class="p">)))</span> <span class="n">x_sampled</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_interpolate_constant</span><span class="p">(</span><span class="n">x</span><span class="o">=</span><span class="n">t_sampled</span><span class="p">,</span> <span class="n">xp</span><span class="o">=</span><span class="n">t</span><span class="p">,</span> <span class="n">yp</span><span class="o">=</span><span class="n">x</span><span class="p">)</span> <span class="k">return</span> <span class="n">t_sampled</span><span class="p">,</span> <span class="n">x_sampled</span> diff --git a/docs/colna.html b/docs/colna.html index 016a725..cb63935 100644 --- a/docs/colna.html +++ b/docs/colna.html @@ -470,7 +470,7 @@ <h3>Network<a class="headerlink" href="#network" title="Permalink to this headli <li><p><strong>feed_dict</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.8)"><em>dict</em></a>) – a dictionary specifying values of variables by name. If only some variables are specified, for all other variables the default value will be used.</p></li> <li><p><strong>use_shared_default</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.8)"><em>bool</em></a>) – set to true if shared defaults should be used with SymNums (higher speed) when no feed_dict is provided, set to false if the default value of each SymNum should be used instead (higher accuracy). The value is ignored if feed_dict is not None. Default: False</p></li> <li><p><strong>linebreak_limit</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.8)"><em>int</em></a>) – A line break will be added roughly every linebreak_limit chars in the latex string. Set to 1 for a linebreak after each term. Set to 0 to get a latex string on a single line. Default: 1</p></li> -<li><p><strong>path</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.8)"><em>str</em></a>) – Output path where html file containing the MathJax code is stored</p></li> +<li><p><strong>path</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.8)"><em>str</em></a>) – Output path where html file containing the MathJax code is stored. If the path does not exist it will be created automatically.</p></li> <li><p><strong>precision</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.8)"><em>int</em></a>) – Number of significant digits to be output. Set to 0 to use the default value of str() method.</p></li> </ul> </dd> @@ -595,7 +595,7 @@ <h3>Network<a class="headerlink" href="#network" title="Permalink to this headli <dt class="field-odd">Parameters</dt> <dd class="field-odd"><ul class="simple"> <li><p><strong>show_edge_labels</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.8)"><em>bool</em></a>) – if True, edge labels showing the amplitude, phase and delay of the edge are drawn.</p></li> -<li><p><strong>path</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.8)"><em>str</em></a>) – output path for file</p></li> +<li><p><strong>path</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.8)"><em>str</em></a>) – output path for file. If the path does not exist it will be created automatically.</p></li> <li><p><strong>skip_colon</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#bool" title="(in Python v3.8)"><em>bool</em></a>) – Skip nodes which contain ‘:’ in their name. This is used for PhysicalNetwork visualization.</p></li> <li><p><strong>format</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.8)"><em>str</em></a>) – output format (supports all format options of Graphviz), e.g. ‘pdf’, ‘svg’</p></li> </ul> diff --git a/docs/examples.html b/docs/examples.html index 7beb413..f4b8677 100644 --- a/docs/examples.html +++ b/docs/examples.html @@ -451,7 +451,7 @@ <h2>Feedforward Network with Testbench<a class="headerlink" href="#feedforward-n <span class="n">plt</span><span class="o">.</span><span class="n">ylabel</span><span class="p">(</span><span class="s1">&#39;|x|&#39;</span><span class="p">)</span> <span class="n">plt</span><span class="o">.</span><span class="n">legend</span><span class="p">([</span><span class="s1">&#39;Input&#39;</span><span class="p">,</span> <span class="s1">&#39;Output C&#39;</span><span class="p">,</span> <span class="s1">&#39;Output D&#39;</span><span class="p">],</span> <span class="n">loc</span><span class="o">=</span><span class="s1">&#39;lower left&#39;</span><span class="p">)</span> <span class="n">plt</span><span class="o">.</span><span class="n">grid</span><span class="p">()</span> -<span class="n">plt</span><span class="o">.</span><span class="n">savefig</span><span class="p">(</span><span class="s1">&#39;./visualizations/feedforward_with_testbench_output.svg&#39;</span><span class="p">)</span> +<span class="c1"># plt.savefig(&#39;./visualizations/feedforward_with_testbench_output.svg&#39;)</span> <span class="n">plt</span><span class="o">.</span><span class="n">show</span><span class="p">()</span> </pre></div> </td></tr></table></div> @@ -1183,7 +1183,7 @@ <h2>Symbolic Feedforward Network with Testbench<a class="headerlink" href="#symb <span class="n">plt</span><span class="o">.</span><span class="n">ylabel</span><span class="p">(</span><span class="s1">&#39;|x|&#39;</span><span class="p">)</span> <span class="n">plt</span><span class="o">.</span><span class="n">legend</span><span class="p">([</span><span class="s1">&#39;Input&#39;</span><span class="p">,</span> <span class="s1">&#39;Output C&#39;</span><span class="p">,</span> <span class="s1">&#39;Output D&#39;</span><span class="p">,</span> <span class="s1">&#39;Output C (Feed Dict 2)&#39;</span><span class="p">,</span> <span class="s1">&#39;Output D (Feed Dict 2)&#39;</span><span class="p">],</span> <span class="n">loc</span><span class="o">=</span><span class="s1">&#39;lower left&#39;</span><span class="p">)</span> <span class="n">plt</span><span class="o">.</span><span class="n">grid</span><span class="p">()</span> -<span class="n">plt</span><span class="o">.</span><span class="n">savefig</span><span class="p">(</span><span class="s1">&#39;./visualizations/symnum_feedforward_tb_output.svg&#39;</span><span class="p">)</span> +<span class="c1"># plt.savefig(&#39;./visualizations/symnum_feedforward_tb_output.svg&#39;)</span> <span class="n">plt</span><span class="o">.</span><span class="n">show</span><span class="p">()</span> </pre></div> </td></tr></table></div> diff --git a/examples/docdemo.py b/examples/docdemo.py index 71ed63d..89f28e8 100644 --- a/examples/docdemo.py +++ b/examples/docdemo.py @@ -89,7 +89,9 @@ plt.legend(['Input', 'Output'], loc='lower left') plt.grid() plt.tight_layout() -plt.savefig('./visualizations/docdemo_tb_output.png', dpi=600) -plt.savefig('./visualizations/docdemo_tb_output.svg') + +# save the figures - make sure the path exists +# plt.savefig('./visualizations/docdemo_tb_output.png', dpi=600) +# plt.savefig('./visualizations/docdemo_tb_output.svg') plt.show() \ No newline at end of file diff --git a/examples/quickstart.py b/examples/quickstart.py index 77dcefb..9a059c3 100644 --- a/examples/quickstart.py +++ b/examples/quickstart.py @@ -73,7 +73,7 @@ plt.xlabel('Time') plt.ylabel('|x|') plt.legend(['Input', 'Output C'], loc='lower left') -plt.savefig('./visualizations/quickstart.svg') +# plt.savefig('./visualizations/quickstart.svg') plt.show() # Show paths leading to node c and output waves arriving at node c diff --git a/setup.py b/setup.py index afb2a6c..bf30585 100644 --- a/setup.py +++ b/setup.py @@ -18,7 +18,7 @@ from setuptools import setup -with open("README.md", "r") as fh: +with open("READMEpypi.md", "r") as fh: long_description = fh.read() setup(
diff --git a/examples/feedforwardwithtestbench.py b/examples/feedforwardwithtestbench.py index a859ccc..4b83606 100644 --- a/examples/feedforwardwithtestbench.py +++ b/examples/feedforwardwithtestbench.py @@ -72,5 +72,5 @@ plt.ylabel('|x|') plt.legend(['Input', 'Output C', 'Output D'], loc='lower left') plt.grid() -plt.savefig('./visualizations/feedforward_with_testbench_output.svg') +# plt.savefig('./visualizations/feedforward_with_testbench_output.svg') plt.show() \ No newline at end of file diff --git a/examples/symbolicfeedforwardwithtestbench.py b/examples/symbolicfeedforwardwithtestbench.py index d1a4f1f..799d39f 100644 --- a/examples/symbolicfeedforwardwithtestbench.py +++ b/examples/symbolicfeedforwardwithtestbench.py @@ -79,5 +79,5 @@ plt.ylabel('|x|') plt.legend(['Input', 'Output C', 'Output D', 'Output C (Feed Dict 2)', 'Output D (Feed Dict 2)'], loc='lower left') plt.grid() -plt.savefig('./visualizations/symnum_feedforward_tb_output.svg') +# plt.savefig('./visualizations/symnum_feedforward_tb_output.svg') plt.show() \ No newline at end of file
Create output directories if they do not exist Output directories (for visualizations and pretty print export) shoud be created automatically, if they do not yet exist. Using pathlib syntax this can be easily achived in a single line. from pathlib import Path Path("/test/output").mkdir(parents=True, exist_ok=True) This issue was mentioned first by @brandondube in the JOSS review thread (https://github.com/openjournals/joss-reviews/issues/2073#issuecomment-583763095)
2020-02-11T09:49:00
-1.0
apache/buildstream-plugins
39
apache__buildstream-plugins-39
['34']
7c23e15f01bfd724cb8b332c9b6a43bc2be35bfc
diff --git a/src/buildstream_plugins/elements/autotools.yaml b/src/buildstream_plugins/elements/autotools.yaml index 38cb1c4..0e2aa1c 100644 --- a/src/buildstream_plugins/elements/autotools.yaml +++ b/src/buildstream_plugins/elements/autotools.yaml @@ -31,9 +31,6 @@ variables: # Element-specific extra arguments to be passed to `configure`. conf-local: '' - # For backwards compatibility only, do not use. - conf-extra: '' - conf-cmd: "%{conf-root}/configure" conf-args: | @@ -50,7 +47,7 @@ variables: --localstatedir=%{localstatedir} \ --sharedstatedir=%{sharedstatedir} \ --mandir=%{mandir} \ - --infodir=%{infodir} %{conf-extra} %{conf-global} %{conf-local} + --infodir=%{infodir} %{conf-global} %{conf-local} configure: | diff --git a/src/buildstream_plugins/elements/cmake.yaml b/src/buildstream_plugins/elements/cmake.yaml index 01ff8e3..8dfab1d 100644 --- a/src/buildstream_plugins/elements/cmake.yaml +++ b/src/buildstream_plugins/elements/cmake.yaml @@ -22,16 +22,13 @@ variables: # Element-specific extra arguments to be passed to `cmake`. cmake-local: '' - # For backwards compatibility only, do not use. - cmake-extra: '' - # The cmake generator to use generator: Ninja cmake-args: | -DCMAKE_INSTALL_PREFIX:PATH="%{prefix}" \ - -DCMAKE_INSTALL_LIBDIR:PATH="%{lib}" %{cmake-extra} %{cmake-global} %{cmake-local} + -DCMAKE_INSTALL_LIBDIR:PATH="%{lib}" %{cmake-global} %{cmake-local} cmake: | diff --git a/src/buildstream_plugins/elements/meson.yaml b/src/buildstream_plugins/elements/meson.yaml index 8f08e0b..c87fb7d 100644 --- a/src/buildstream_plugins/elements/meson.yaml +++ b/src/buildstream_plugins/elements/meson.yaml @@ -22,9 +22,6 @@ variables: # Element-specific extra arguments to be passed to `meson`. meson-local: '' - # For backwards compatibility only, do not use. - meson-extra: '' - meson-args: | --prefix=%{prefix} \ @@ -38,7 +35,7 @@ variables: --localstatedir=%{localstatedir} \ --sharedstatedir=%{sharedstatedir} \ --mandir=%{mandir} \ - --infodir=%{infodir} %{meson-extra} %{meson-global} %{meson-local} + --infodir=%{infodir} %{meson-global} %{meson-local} meson: meson setup %{conf-root} %{build-dir} %{meson-args} diff --git a/tox.ini b/tox.ini index 0019b82..743826e 100644 --- a/tox.ini +++ b/tox.ini @@ -57,7 +57,7 @@ setenv = py{37,38,39,310}: XDG_CACHE_HOME = {envtmpdir}/cache py{37,38,39,310}: XDG_CONFIG_HOME = {envtmpdir}/config py{37,38,39,310}: XDG_DATA_HOME = {envtmpdir}/share - !master: BST_VERSION = 1.95.1 + !master: BST_VERSION = 1.95.5 master: BST_VERSION = master whitelist_externals =
diff --git a/tests/cachekey/project/elements/autotools1.expected b/tests/cachekey/project/elements/autotools1.expected index b2b346c..3d73bca 100644 --- a/tests/cachekey/project/elements/autotools1.expected +++ b/tests/cachekey/project/elements/autotools1.expected @@ -1,1 +1,1 @@ -569d7d8e4792eb6d1f89328c50c42b6316ffdd5387a5f15df6e097515898faa4 \ No newline at end of file +98ae7c344c01c64d2597338ec632e22a36f1b78ce502a9f3ed668edd0921dcb3 \ No newline at end of file diff --git a/tests/cachekey/project/elements/cmake1.expected b/tests/cachekey/project/elements/cmake1.expected index 6a850ea..1401206 100644 --- a/tests/cachekey/project/elements/cmake1.expected +++ b/tests/cachekey/project/elements/cmake1.expected @@ -1,1 +1,1 @@ -62b5153b0c2643d3402bc8e6e26b8586a119d321da020470642c215aaeec2b0c \ No newline at end of file +8f1eecc3592e71c82de005be84701025ae0f104a33f5f49f5b117743e79e4281 \ No newline at end of file diff --git a/tests/cachekey/project/elements/meson1.expected b/tests/cachekey/project/elements/meson1.expected index a43c69a..90c80cf 100644 --- a/tests/cachekey/project/elements/meson1.expected +++ b/tests/cachekey/project/elements/meson1.expected @@ -1,1 +1,1 @@ -eb27d2832662f0b5d9fcb4e6304611e85226b282cc8f83ac79bdc56031307758 \ No newline at end of file +06774620db3137433e1ec5efb5872d6379152e9e4b1c64b7374d940334f9a68a \ No newline at end of file diff --git a/tests/cachekey/project/target.expected b/tests/cachekey/project/target.expected index 6b18d77..1c1c9bf 100644 --- a/tests/cachekey/project/target.expected +++ b/tests/cachekey/project/target.expected @@ -1,1 +1,1 @@ -7881a8251b8128165d28720f1e5b30e4e914e2711700e4d7231c7ab91f10ba39 \ No newline at end of file +253b12d080d6bedfe2af58b035e02ba5aa4a1d719e54d04071685e18d0fea90a \ No newline at end of file
Drop {conf,cmake,meson}-extra They are marked as "For backwards compatibility only, do not use.", we should probably drop them since we're breaking backwards compatibility anyway?
Yes, please. It's now or never. This will break cache keys. Ping @gtristan
2022-12-03T05:29:30
-1.0
DiamondLightSource/ispyb-api
107
DiamondLightSource__ispyb-api-107
['106']
5de9b59b49a8a624e4f2cdbd1b27c841f3f6ab0e
diff --git a/.travis.yml b/.travis.yml index 2e01e227..961b6ae3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,7 +17,7 @@ addons: mariadb: 10.3 before_install: -- wget https://github.com/DiamondLightSource/ispyb-database/releases/download/v1.13.1/ispyb-database-1.13.1.tar.gz +- wget -t 3 -w 60 --random-wait https://github.com/DiamondLightSource/ispyb-database/releases/download/v1.13.1/ispyb-database-1.13.1.tar.gz - tar xvfz ispyb-database-1.13.1.tar.gz - mysql_upgrade -u root - mysql -u root -e "CREATE DATABASE ispybtest; SET GLOBAL log_bin_trust_function_creators=ON;" @@ -47,13 +47,13 @@ after_success: coveralls # Assuming you have installed the travis-ci CLI tool, after you # create the Github repo and add it to Travis, run the # following command to finish PyPI deployment setup: -# $ travis encrypt --add deploy.password +# $ travis encrypt --pro --add deploy.password -r DiamondLightSource/ispyb-api deploy: provider: pypi distributions: sdist bdist_wheel user: mgerstel password: - secure: ushpKCFhI2FId4G4H5r0SZnLiN2+0ORnLA7YMDCXdEEnUvMBceB4IO1x22ScCI5GsSahc9jhxjismrQ4r7p1gfW8YiQneSeZbo5Yorqm6+GEMLQ75akge3ZMpCLfPF25Zz0VN1Ra8YMny6k3s8tmBi7r7TneCUn7hQ5C51OdPgBIcUPEAkUxkJReuAoVO+iKk2Re/r3cGkE+FzZ52aMmDT5I5wg9aP8l7e1PZ4k8S8oiy70yzsqrcXatPA61NHh2ux7Hhys5yRS4j2QPDyjweQKl9rgrsLFnBCD1IswVAUjjqjsp1NCfeb/Ba7xRDpqM0bhXjTn0WlqHV/n5C4NWCKiTGwnL+w7Dh21p+pLgEcXhhll0+2WP0KuY0vwCUJ8JwSlGi7sJ/QgdQl3FUDXbtttoLpY9o5El5/l4gbb/+iskH1LxYHtJiPjgK+6Pu/hKTNF/PCnAd5IFjvzulQ1l2AUu/0ZhCh0FBmu/cemULsyMB0QM/8VqvvmK7Mtc3NjKaPQKi3sdn7lb+r9yVDssxoAZAyZ9VHn84VuzsGK1DD7KmHyl8zfEwG5TMgn3FpEzeiCEud2elAqgqkzOMte/HgfNi7sJ097p6ev0sJBnRpSVqdHf5r4j+Fql0c2aH+ivFRxuwXthTb+usdQzx8XzwHKiVea5mDhGBKrXm3fuM0s= + secure: ck9nBs0RWNGz07rYLV9CqVOb7y00F6aNHcvWKd7j7HZyS7kAXkUowaBMbHMSLndj9yj3YoQj9Nv9XgBn+CelyODpK3uSBRvzDXstEyZTus+MlgaJda7J55+QCMrWer5ZOSoPxoIM04duhCyCUw10Q7G+sPkTp4SJGJ5z8MoVhtcE2zJiWdSfJbU8CcTC64VEE3wB/19KiKDH5mfuMCp8xE1LP4pW7w6WEsj76O3NUbWyXzfEo9Ydeh1eISP++ATyPI5pZCMOySUhT7BE6THKb6NPS6ItxRcbR12Qun4MTaySVoOyb+Q8s9H6WmLDHj8uoN7zmwrks0Tl1bPIctl+xa4S2GzKAqGlVjkH+SYOEn30+iI79C5FBT6ljTEr+Vkob0UwlTf/50TKF1UCJThiqagvxdb0+L2+1OEmLDzXMGJyGC04IeMYwF//5QZ7mHzdRugmBGAeHo7utwCn7MlmoMgHWCP7ES53YZFHfR0nDO5lOzk6awvS6Aa8zyGw35InFjem3EeF0MM07n0qfSh7EF59uhj4fcKeM+UWbhb09jIMW7Xz2dytgDgSNz24agS6G7uFbEnoJ3s4yVC4bZBPae5tiQxMsAWUeo1m/4bXU7dfRGFDP4nlrQ6yZq4H9GbYIYesEA7ol3WhJNdRBJMpu4xUtylHzhER8Q68FMy4EhI= on: tags: true repo: DiamondLightSource/ispyb-api
diff --git a/tests/test_em_structures.py b/tests/test_em_structures.py index db10ab93..9d729fb1 100644 --- a/tests/test_em_structures.py +++ b/tests/test_em_structures.py @@ -65,10 +65,8 @@ def test_insert_ctf(testdb): params["dosePerFrame"] = 20 motion_cor_id = emacquisition.insert_motion_correction(list(params.values())) - params = emacquisition.get_ctf_params() - params["motionCorrectionId"] = motion_cor_id - ctf_id = emacquisition.insert_ctf(list(params.values())) - assert ctf_id is not None + ctf_id = emacquisition.insert_ctf(motion_correction_id=motion_cor_id) + assert ctf_id def test_insert_drift(testdb):
Testing broken Travis doesn't seem to run any more. Should probably look into setting up Azure for the repository.
Travis does run, but for some reason it's not visible on the actual PRs. You can still find the results here: https://travis-ci.org/github/DiamondLightSource/ispyb-api Indeed. I suspect it's a travis-ci.org deprecation thing. I'll see if I can migrate the repository to travis-ci.com
2020-08-27T08:18:32
-1.0
aljp/drf_model_pusher
39
aljp__drf_model_pusher-39
['33']
de6cd8300fe03e71c9c92b361d4fa21c1667d84e
diff --git a/Pipfile b/Pipfile index d44695f..fa6c3d7 100644 --- a/Pipfile +++ b/Pipfile @@ -6,7 +6,7 @@ name = "pypi" [packages] Django = "==2.0" djangorestframework = "*" -pusher = "*" +pusher = "==2.1.4" [dev-packages] pytest = "*" diff --git a/drf_model_pusher/backends.py b/drf_model_pusher/backends.py index 38c1ba6..14223de 100644 --- a/drf_model_pusher/backends.py +++ b/drf_model_pusher/backends.py @@ -3,6 +3,8 @@ """ from collections import defaultdict +from django.conf import settings + from drf_model_pusher.providers import PusherProvider from drf_model_pusher.signals import view_pre_destroy, view_post_save @@ -63,26 +65,20 @@ def get_pusher_socket(self, view): def push_change(self, event, instance=None, pre_destroy=False, ignore=True): """Send a signal to push the update""" channels, event_name, data = self.get_packet(event, instance) + kwargs = dict( + sender=self.__class__, + instance=self, + channels=channels, + event_name=event_name, + data=data, + socket_id=self.pusher_socket_id if ignore else None, + provider_class=self.provider_class, + ) + if pre_destroy: - view_pre_destroy.send( - sender=self.__class__, - instance=self, - channels=channels, - event_name=event_name, - data=data, - socket_id=self.pusher_socket_id if ignore else None, - provider_class=self.provider_class, - ) + view_pre_destroy.send(**kwargs) else: - view_post_save.send( - sender=self.__class__, - instance=self, - channels=channels, - event_name=event_name, - data=data, - socket_id=self.pusher_socket_id if ignore else None, - provider_class=self.provider_class, - ) + view_post_save.send(**kwargs) def get_event_name(self, event_type): """Return the model name and the event_type separated by a dot""" @@ -127,6 +123,19 @@ def get_channel(self, instance=None): return "private-{channel}".format(channel=channel) +class PresencePusherBackend(PusherBackend): + """PresencePusherBackend is the base class for implementing serializers + with Pusher and prefixing the channel with `presence-`.""" + + class Meta: + abstract = True + + def get_channel(self, instance=None): + """Return the channel prefixed with `presence-`""" + channel = super().get_channel(instance=instance) + return "presence-{channel}".format(channel=channel) + + def get_models_pusher_backends(model): """Return the pusher backends registered for a model""" return pusher_backend_registry.get(model.__name__.lower(), []) diff --git a/drf_model_pusher/providers.py b/drf_model_pusher/providers.py index d66e358..65ee6e1 100644 --- a/drf_model_pusher/providers.py +++ b/drf_model_pusher/providers.py @@ -29,9 +29,15 @@ def configure(self): ) def trigger(self, channels, event_name, data, socket_id=None): + if not isinstance(channels, list): + raise TypeError("channels must be a list, received {0}".format(str(type(channels)))) + if self._disabled: return + if self._pusher is None: + self.configure() + self._pusher.trigger(channels, event_name, data, socket_id) diff --git a/example/models.py b/example/models.py index cf68b27..e0a9cc4 100644 --- a/example/models.py +++ b/example/models.py @@ -1,5 +1,13 @@ from django.db import models -class MyModel(models.Model): +class MyPublicModel(models.Model): + name = models.CharField(max_length=32) + + +class MyPrivateModel(models.Model): + name = models.CharField(max_length=32) + + +class MyPresenceModel(models.Model): name = models.CharField(max_length=32) diff --git a/example/pusher_backends.py b/example/pusher_backends.py index f5a9a72..bc8363e 100644 --- a/example/pusher_backends.py +++ b/example/pusher_backends.py @@ -1,6 +1,14 @@ -from drf_model_pusher.backends import PusherBackend -from example.serializers import MyModelSerializer +from drf_model_pusher.backends import PusherBackend, PrivatePusherBackend, PresencePusherBackend +from example.serializers import MyPublicModelSerializer, MyPrivateModelSerializer, MyPresenceModelSerializer -class MyModelPusherBackend(PusherBackend): - serializer_class = MyModelSerializer +class MyPublicModelPusherBackend(PusherBackend): + serializer_class = MyPublicModelSerializer + + +class MyPrivateModelBackend(PrivatePusherBackend): + serializer_class = MyPrivateModelSerializer + + +class MyPresenceModelBackend(PresencePusherBackend): + serializer_class = MyPresenceModelSerializer diff --git a/example/serializers.py b/example/serializers.py index 30d2ff9..980de4c 100644 --- a/example/serializers.py +++ b/example/serializers.py @@ -1,9 +1,21 @@ from rest_framework import serializers -from example.models import MyModel +from example.models import MyPublicModel, MyPrivateModel, MyPresenceModel -class MyModelSerializer(serializers.ModelSerializer): +class MyPublicModelSerializer(serializers.ModelSerializer): class Meta: - model = MyModel + model = MyPublicModel + fields = ("name",) + + +class MyPrivateModelSerializer(serializers.ModelSerializer): + class Meta: + model = MyPrivateModel + fields = ("name",) + + +class MyPresenceModelSerializer(serializers.ModelSerializer): + class Meta: + model = MyPresenceModel fields = ("name",) diff --git a/example/views.py b/example/views.py index 0e5cbdb..e6f0200 100644 --- a/example/views.py +++ b/example/views.py @@ -1,13 +1,30 @@ from rest_framework import viewsets from drf_model_pusher.views import ModelPusherViewMixin -from example.models import MyModel -from example.serializers import MyModelSerializer +from example.models import MyPublicModel, MyPrivateModel, MyPresenceModel +from example.serializers import MyPublicModelSerializer, MyPrivateModelSerializer, \ + MyPresenceModelSerializer -class MyModelViewSet(ModelPusherViewMixin, viewsets.ModelViewSet): - queryset = MyModel.objects.all() - serializer_class = MyModelSerializer +class MyPublicModelViewSet(ModelPusherViewMixin, viewsets.ModelViewSet): + queryset = MyPublicModel.objects.all() + serializer_class = MyPublicModelSerializer def get_pusher_channels(self): return ["channel"] + + +class MyPrivateModelViewSet(ModelPusherViewMixin, viewsets.ModelViewSet): + queryset = MyPrivateModel.objects.all() + serializer_class = MyPrivateModelSerializer + + def get_pusher_channels(self): + return ["private-channel"] + + +class MyPresenceModelViewSet(ModelPusherViewMixin, viewsets.ModelViewSet): + queryset = MyPresenceModel.objects.all() + serializer_class = MyPresenceModelSerializer + + def get_pusher_channels(self): + return ["presence-channel"] diff --git a/requirements.txt b/requirements.txt index 339c70e..bde2bfe 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,7 +15,7 @@ more-itertools==4.2.0 ndg-httpsclient==0.5.0 packaging==17.1 pluggy==0.6.0 -pusher==2.0.1 +pusher==2.1.4 py==1.5.4 pyasn1==0.4.3 pycparser==2.18
diff --git a/tests/test_views.py b/tests/test_views.py index 81a33de..1672d1a 100644 --- a/tests/test_views.py +++ b/tests/test_views.py @@ -1,16 +1,17 @@ from unittest import TestCase, mock from unittest.mock import Mock +from django.test import override_settings from pytest import mark from rest_framework.test import APIRequestFactory -from example.models import MyModel -from example.serializers import MyModelSerializer -from example.views import MyModelViewSet +from example.models import MyPublicModel, MyPrivateModel, MyPresenceModel +from example.serializers import MyPublicModelSerializer, MyPrivateModelSerializer, MyPresenceModelSerializer +from example.views import MyPublicModelViewSet, MyPrivateModelViewSet, MyPresenceModelViewSet @mark.django_db -class TestModelPusherViewMixin(TestCase): +class TestModelPusherViewMixinPublicChannels(TestCase): """Integration tests between models, serializers, pusher backends, and views.""" @mock.patch("pusher.Pusher.trigger") @@ -19,50 +20,168 @@ def test_creations_are_pushed(self, trigger: Mock): request_factory = APIRequestFactory() create_request = request_factory.post(path="/mymodels/", data={"name": "Henry"}) - view = MyModelViewSet.as_view({"post": "create"}) + view = MyPublicModelViewSet.as_view({"post": "create"}) response = view(create_request) - instance = MyModel.objects.last() + instance = MyPublicModel.objects.last() self.assertEqual(response.status_code, 201, response.data) trigger.assert_called_once_with( - ["channel"], "mymodel.create", MyModelSerializer(instance=instance).data, None + ["channel"], "mypublicmodel.create", MyPublicModelSerializer(instance=instance).data, None ) @mock.patch("pusher.Pusher.trigger") def test_updates_are_pushed(self, trigger: Mock): - instance = MyModel.objects.create(name="Julie") + instance = MyPublicModel.objects.create(name="Julie") request_factory = APIRequestFactory() partial_update_request = request_factory.patch( path="/mymodels/123/", data={"name": "Michelle"} ) - view = MyModelViewSet.as_view({"patch": "partial_update"}) + view = MyPublicModelViewSet.as_view({"patch": "partial_update"}) response = view(partial_update_request, pk=instance.pk) - instance = MyModel.objects.last() + instance = MyPublicModel.objects.last() self.assertEqual(response.status_code, 200, response.data) self.assertEqual(instance.name, "Michelle") trigger.assert_called_once_with( - ["channel"], "mymodel.update", MyModelSerializer(instance=instance).data, None + ["channel"], "mypublicmodel.update", MyPublicModelSerializer(instance=instance).data, None ) @mock.patch("pusher.Pusher.trigger") def test_deletions_are_pushed(self, trigger: Mock): - instance = MyModel.objects.create(name="Henry") + instance = MyPublicModel.objects.create(name="Henry") request_factory = APIRequestFactory() delete_request = request_factory.delete(path="/mymodels/1/") - view = MyModelViewSet.as_view({"delete": "destroy"}) + view = MyPublicModelViewSet.as_view({"delete": "destroy"}) response = view(delete_request, pk=instance.pk) self.assertEqual(response.status_code, 204, response.data) - with self.assertRaises(MyModel.DoesNotExist): - instance = MyModel.objects.get(pk=instance.pk) + with self.assertRaises(MyPublicModel.DoesNotExist): + instance = MyPublicModel.objects.get(pk=instance.pk) trigger.assert_called_once_with( - ["channel"], "mymodel.delete", MyModelSerializer(instance=instance).data, None + ["channel"], "mypublicmodel.delete", MyPublicModelSerializer(instance=instance).data, None + ) + + [email protected]_db +class TestModelPusherViewMixinPrivateChannels(TestCase): + """Integration tests between models, serializers, pusher backends, and views.""" + + @mock.patch("pusher.Pusher.trigger") + def test_creations_are_pushed(self, trigger: Mock): + + request_factory = APIRequestFactory() + create_request = request_factory.post(path="/mymodels/", data={"name": "Henry"}) + + view = MyPrivateModelViewSet.as_view({"post": "create"}) + response = view(create_request) + instance = MyPrivateModel.objects.last() + + self.assertEqual(response.status_code, 201, response.data) + + trigger.assert_called_once_with( + ["private-channel"], "myprivatemodel.create", MyPrivateModelSerializer(instance=instance).data, None + ) + + @mock.patch("pusher.Pusher.trigger") + def test_updates_are_pushed(self, trigger: Mock): + instance = MyPrivateModel.objects.create(name="Julie") + + request_factory = APIRequestFactory() + partial_update_request = request_factory.patch( + path="/mymodels/123/", data={"name": "Michelle"} + ) + + view = MyPrivateModelViewSet.as_view({"patch": "partial_update"}) + response = view(partial_update_request, pk=instance.pk) + instance = MyPrivateModel.objects.last() + + self.assertEqual(response.status_code, 200, response.data) + self.assertEqual(instance.name, "Michelle") + + trigger.assert_called_once_with( + ["private-channel"], "myprivatemodel.update", MyPrivateModelSerializer(instance=instance).data, None + ) + + @mock.patch("pusher.Pusher.trigger") + def test_deletions_are_pushed(self, trigger: Mock): + instance = MyPrivateModel.objects.create(name="Henry") + + request_factory = APIRequestFactory() + delete_request = request_factory.delete(path="/mymodels/1/") + + view = MyPrivateModelViewSet.as_view({"delete": "destroy"}) + response = view(delete_request, pk=instance.pk) + + self.assertEqual(response.status_code, 204, response.data) + with self.assertRaises(MyPrivateModel.DoesNotExist): + instance = MyPrivateModel.objects.get(pk=instance.pk) + + trigger.assert_called_once_with( + ["private-channel"], "myprivatemodel.delete", MyPrivateModelSerializer(instance=instance).data, None + ) + + [email protected]_db +class TestModelPusherViewMixinPresenceChannels(TestCase): + """Integration tests between models, serializers, pusher backends, and views.""" + + @mock.patch("pusher.Pusher.trigger") + def test_creations_are_pushed(self, trigger: Mock): + + request_factory = APIRequestFactory() + create_request = request_factory.post(path="/mymodels/", data={"name": "Henry"}) + + view = MyPresenceModelViewSet.as_view({"post": "create"}) + response = view(create_request) + instance = MyPresenceModel.objects.last() + + self.assertEqual(response.status_code, 201, response.data) + + trigger.assert_called_once_with( + ["presence-channel"], "mypresencemodel.create", MyPresenceModelSerializer(instance=instance).data, None + ) + + @mock.patch("pusher.Pusher.trigger") + def test_updates_are_pushed(self, trigger: Mock): + instance = MyPresenceModel.objects.create(name="Julie") + + request_factory = APIRequestFactory() + partial_update_request = request_factory.patch( + path="/mymodels/123/", data={"name": "Michelle"} + ) + + view = MyPresenceModelViewSet.as_view({"patch": "partial_update"}) + response = view(partial_update_request, pk=instance.pk) + instance = MyPresenceModel.objects.last() + + self.assertEqual(response.status_code, 200, response.data) + self.assertEqual(instance.name, "Michelle") + + trigger.assert_called_once_with( + ["presence-channel"], "mypresencemodel.update", MyPresenceModelSerializer(instance=instance).data, None + ) + + @mock.patch("pusher.Pusher.trigger") + def test_deletions_are_pushed(self, trigger: Mock): + instance = MyPresenceModel.objects.create(name="Henry") + + request_factory = APIRequestFactory() + delete_request = request_factory.delete(path="/mymodels/1/") + + view = MyPresenceModelViewSet.as_view({"delete": "destroy"}) + response = view(delete_request, pk=instance.pk) + + self.assertEqual(response.status_code, 204, response.data) + with self.assertRaises(MyPresenceModel.DoesNotExist): + instance = MyPresenceModel.objects.get(pk=instance.pk) + + trigger.assert_called_once_with( + ["presence-channel"], "mypresencemodel.delete", MyPresenceModelSerializer(instance=instance).data, None )
Add support for Pusher presence channels As mentioned in #24
I have scoped how this could be implemented. The primary aims are to implement [presence channels](https://pusher.com/docs/channels/using_channels/presence-channels) but also include a mechanism for minimising the number of pusher events by checking members are present before an event is fired. Also of note is that presence channels have a maximum of 100 users per channel. My plan is as follows: - Add backend for Pusher presence channels - Implement setting/kwarg for optimising number of events (default: off) - Implement `push_changes` on the new backend to respect the new setting/kwarg
2019-08-19T11:16:43
-1.0
digling/edictor
213
digling__edictor-213
['214']
5da345e3630802bc554dd2bd1c21dd5007703628
diff --git a/src/edictor/app/edictor.html b/src/edictor/app/edictor.html index 6753d7e..bad8ab8 100644 --- a/src/edictor/app/edictor.html +++ b/src/edictor/app/edictor.html @@ -129,13 +129,6 @@ PARTIAL COGNATE SETS </a> </li> - <li> - <a id="toggle_forms" onclick="loadAjax(event, 'sortable', 'forms', 'largebox');"> - <span class="glyphicon glyphicon-remove"></span> - <span class="glyphicon glyphicon-ok" style="display: none"></span> - MORPHEMES AND GROUPED SOUNDS - </a> - </li> <li> <a id="toggle_glosses" onclick="loadAjax(event, 'sortable', 'glosses', 'largebox');"> <span class="glyphicon glyphicon-remove"></span> diff --git a/src/edictor/app/js/glosses.js b/src/edictor/app/js/glosses.js index 56e4d5c..44e4d2d 100644 --- a/src/edictor/app/js/glosses.js +++ b/src/edictor/app/js/glosses.js @@ -533,24 +533,13 @@ GLOSSES.markID = function(event, node) { GLOSSES.markIDs = function(event, node) { event.preventDefault(); - // if (node.dataset["idx"] in this.joined) { - // for (nodeidx in this.joined) { - // this.markID(event, document.getElementById('GLOSSES_idx-' + nodeidx)); - // } - // this.joined = {}; - // return; - // } - var current_node = node.parentNode; - // this.joined = {}; - while (current_node.nextElementSibling.id != "") { - current_node.childNodes[0].dataset["marked"] = "0" - current_node.childNodes[0].classList.remove("id_batch") - GLOSSES.joined[current_node.childNodes[0].dataset["idx"]] = false; - current_node = current_node.nextElementSibling; - } - current_node.childNodes[0].dataset["marked"] = "0" - current_node.childNodes[0].classList.remove("id_batch") - GLOSSES.joined[current_node.childNodes[0].dataset["idx"]] = false; + if (node.dataset["idx"] in this.joined) { + for (nodeidx in this.joined) { + this.markID(event, document.getElementById('GLOSSES_idx-' + nodeidx)); + } + this.joined = {}; + return; + } }; GLOSSES.plotMorphemes = function(tokens, morphemes, cogids) { diff --git a/src/edictor/app/js/segments.js b/src/edictor/app/js/segments.js index f039ca3..9a7346e 100644 --- a/src/edictor/app/js/segments.js +++ b/src/edictor/app/js/segments.js @@ -39,12 +39,12 @@ SEG.prepare_tokens = function(idx, tokens) { 'title="click to segment, right click to group" ' + 'data-idx="' + idx + '" ' + 'data-pos="' + i + '" ' + - 'oncontextmenu="SEG.groupSounds(event, this)" ' + - 'onclick="SEG.splitForm(this);" ')); + 'oncontextmenu="SEG.groupSounds(event, this);" ' + )); if (token.indexOf(".") != -1) { out.push('<span title="click to group sounds" ' + 'class="residue pointed dolgo_DOT" ' + - 'onclick="SEG.ungroupSounds(this)" ' + + 'oncontextmenu="SEG.ungroupSounds(event, this)" ' + 'data-idx="' + idx + '" ' + 'data-pos="' + i + '">' + '·</span>'); @@ -55,7 +55,7 @@ SEG.prepare_tokens = function(idx, tokens) { plotWord(token, 'span', 'pointed', 'data-idx="' + idx + '" ' + 'data-pos="' + i + '" ' + - 'onclick="SEG.joinForm(this);"')); + 'oncontextmenu="SEG.joinForm(event, this);"')); } } return out.join(""); @@ -72,18 +72,22 @@ SEG.splitForm = function(node) { var after = tokens.slice(pos, tokens.length); tokens = before.join(" ") + " " + CFG.morpheme_separator + " " + after.join(" "); + node.parentNode.parentNode.dataset["value"] = tokens; node.parentNode.innerHTML = SEG.prepare_tokens(idx, tokens.split(" ")); WLS[idx][CFG._segments] = tokens; storeModification([idx], [CFG._segments], [tokens]); + highLight(); }; -SEG.joinForm = function(node) { +SEG.joinForm = function(event, node) { + event.preventDefault(); var idx = node.dataset["idx"]; var pos = parseInt(node.dataset["pos"]); var tokens = WLS[idx][CFG._segments].split(" "); var before = tokens.slice(0, pos); var after = tokens.slice(pos + 1, tokens.length); tokens = before.join(" ") + " " + after.join(" "); + node.parentNode.parentNode.dataset["value"] = tokens; node.parentNode.innerHTML = SEG.prepare_tokens(idx, tokens.split(" ")); WLS[idx][CFG._segments] = tokens; storeModification([idx], [CFG._segments], [tokens]); @@ -91,18 +95,25 @@ SEG.joinForm = function(node) { SEG.groupSounds = function(event, node) { event.preventDefault(); - var idx = node.dataset["idx"]; - var pos = parseInt(node.dataset["pos"]); - var tokens = WLS[idx][CFG._segments].split(" "); - var before = tokens.slice(0, pos + 1); - var after = tokens.slice(pos + 1, tokens.length); - tokens = before.join(" ") + "." + after.join(" "); - node.parentNode.innerHTML = SEG.prepare_tokens(idx, tokens.split(" ")); - WLS[idx][CFG._segments] = tokens; - storeModification([idx], [CFG._segments], [tokens]); + if (event.ctrlKey == true) { + var idx = node.dataset["idx"]; + var pos = parseInt(node.dataset["pos"]); + var tokens = WLS[idx][CFG._segments].split(" "); + var before = tokens.slice(0, pos + 1); + var after = tokens.slice(pos + 1, tokens.length); + tokens = before.join(" ") + "." + after.join(" "); + node.parentNode.parentNode.dataset["value"] = tokens; + node.parentNode.innerHTML = SEG.prepare_tokens(idx, tokens.split(" ")); + WLS[idx][CFG._segments] = tokens; + storeModification([idx], [CFG._segments], [tokens]); + } + else { + SEG.splitForm(node); + } }; -SEG.ungroupSounds = function(node) { +SEG.ungroupSounds = function(event, node) { + event.preventDefault(); var idx = node.dataset["idx"]; var pos = parseInt(node.dataset["pos"]); var tokens = WLS[idx][CFG._segments].split(" "); @@ -116,6 +127,7 @@ SEG.ungroupSounds = function(node) { tokens_.push(tokens[i]); } } + node.parentNode.parentNode.dataset["value"] = tokens_.join(" "); node.parentNode.innerHTML = SEG.prepare_tokens(idx, tokens_); WLS[idx][CFG._segments] = tokens_.join(" "); storeModification([idx], [CFG._segments], [tokens_.join(" ")]); @@ -190,7 +202,13 @@ SEG.make_table = function(){ var idx, morphemes, cogids, tokens, concept, doculect; var filter, tokens_; var table = []; - for (i = 0; idx = WLS.rows[i]; i += 1) { + /* get current indices */ + var tokens = document.getElementsByClassName(WLS.header[CFG._segments]); + var idxs = []; + for (i = 0; i < tokens.length; i += 1) { + idxs.push(tokens[i].parentNode.id.split("_")[1]); + } + for (i = 0; idx = idxs[i]; i += 1) { /* retrieve segment, morpheme, cognate sets */ tokens = WLS[idx][CFG._segments]; morphemes = (CFG._morphemes != -1) diff --git a/src/edictor/app/js/wordlist.js b/src/edictor/app/js/wordlist.js index 9de22b5..00869c4 100644 --- a/src/edictor/app/js/wordlist.js +++ b/src/edictor/app/js/wordlist.js @@ -2337,17 +2337,18 @@ function getDate(with_seconds) { } /* highlight all IPA entries which are specified as such */ -function highLight() -{ - var items, i, tokens, roots, word, m, concepts, concept, morphemes, parts, part, j, textout, k, morph; +function highLight() { + var items, i, tokens, roots, word, m, concepts, concept, morphemes, parts, part, j, textout, k, morph, idx; for (i = 0; head = WLS.header[i]; i += 1) { if (CFG['highlight'].indexOf(head) != -1 ) { tokens = document.getElementsByClassName(head); - for (j=0; j<tokens.length; j++) { + for (j = 0; j < tokens.length; j += 1) { if (tokens[j].innerHTML == tokens[j].dataset.value) { - word = plotWord(tokens[j].dataset.value); - tokens[j].innerHTML = '<div class="lock_alignment">'+word+"</div>"; + /* check grouping sounds on right click etc. */ + idx = tokens[j].parentNode.id.split("_")[1]; + word = SEG.prepare_tokens(idx, tokens[j].dataset.value.split(" ")); + tokens[j].innerHTML = '<div class="lock_alignment">' + word + "</div>"; } } } diff --git a/src/edictor/app/panels/forms.html b/src/edictor/app/panels/forms.html deleted file mode 100644 index 6382de7..0000000 --- a/src/edictor/app/panels/forms.html +++ /dev/null @@ -1,36 +0,0 @@ -<span class="main_handle pull-left" ></span> -<button type="button" onclick="$('#toggle_forms > span').toggle();$('#forms').toggle();window.location.href='#top';" class="pull-right close" style="margin-left:5px"><span aria-hidden="true">&times;</span><span class="sr-only">Close</span></button> -<h3> -Edit Morphemes and Grouped Sounds -</h3> -<input id="input_segments_filter-form" class="form-control pull-left textfield" type="text" - placeholder="filter by form" onkeyup="SEG.filter_form=this.value;" style="width:250px" /> -<input id="input_segments_filter-sound" class="form-control pull-left textfield" type="text" - placeholder="filter by sound" onkeyup="SEG.filter_sound=this.value;" style="width:250px" /> - <button type="button" class="btn-primary titled btn submit3 pull-right" - title="show help" - onclick="UTIL.show_help('forms');" - style="margin-left:5px;"><span class="glyphicon glyphicon-question-sign"></span></button> - -<button type="button" - onclick="showSpinner(function(){SEG.present();});" - class="btn-primary submit3 titled btn">OK</button> - - <br><br> -<!--<span style="margin-top:40px;margin-left:2px; background-color: #2d6ca2; color:white; border-radius:5px; border:2px solid #2d6ca2;padding:5px;margin-right:10px;"> - <b title="Select between full and partial colexification in the display.">GROUP:</b> - <input name="segmentsgroup_mode" checked="" onchange="SEG.groupby='form';" - type="radio"> FORM - <input name="segmentsgroup_mode" onchange="SEG.groupby='concept';" type="radio"> - GLOSS - <input name="segmentsgroup_mode" onchange="SEG.groupby='cognate';" type="radio"> - Cognate - -</span>--> - - -<div style="max-height:1000px;overflow-x:scroll;overflow-y:scroll;display:none;" - id="forms_table"></div> -<div class="help-message" id="forms_help" style="display:none;"> - <h3>Help on Segmenting and Grouping</h3> -</div>
diff --git a/tests/test_cli.py b/tests/test_cli.py index 77d8cb8..2fc1a91 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -10,6 +10,7 @@ def run(capsys, *args): def test_server(capsys): + pass - output = run( - capsys, "server") +#output = run( +# capsys, "server")
Morpheme segments and Grouping Sounds I just figured that combining morpheme segments and sound grouping in the wordlist panel can be done when accepting that left mouse click (the normal thing) is reserved to edit the data, and the right mouse click is used for prototpycial segmentation, plus control click is then used to trigger grouping of sounds (plus ungrouping). This is probably the best solution and can be easily adjusted, so we can get rid of the extra panel.
2024-07-02T12:51:34
-1.0
fmartingr/django-stats2
10
fmartingr__django-stats2-10
['8']
6a9194093edc57ad2cb1c083c29733cd97cff77a
diff --git a/django_stats2/objects.py b/django_stats2/objects.py index 62000df..8c99f83 100644 --- a/django_stats2/objects.py +++ b/django_stats2/objects.py @@ -140,14 +140,13 @@ def _get_model_queryset(self, date=timezone.now().date()): # try to merge all object into the first one found items = ModelStat.objects.filter(**_kwargs) model_obj = items.first() + duplicates = items.exclude(pk=model_obj.pk) - def incr_original(item): + for item in duplicates: model_obj.value += item.value - map(incr_original, items.exclude(pk=model_obj.pk)) - model_obj.save() - items.exclude(pk=model_obj.pk).delete() + duplicates.delete() return model_obj @@ -207,6 +206,10 @@ def _decr_ddbb(self, date, value): # Globals def _get_value(self, date=None): value_type = 'history' if date else 'total' + + if not stats2_settings.USE_CACHE: + return self._get_ddbb(value_type, date) + cache_value = self._get_cache(value_type, date) # If we don't have a cache value we must retireve it from the ddbb @@ -222,6 +225,9 @@ def _get_value(self, date=None): def _get_between(self, date_start, date_end): cache_value = self._get_cache('between', date_start, date_end) + if not stats2_settings.USE_CACHE: + return self.get_ddbb_between(date_start, date_end) + # If we don't have the cache value we retrieve it from the ddbb if cache_value is None: ddbb_value = self._get_ddbb_between(date_start, date_end)
diff --git a/tests/test_objects.py b/tests/test_objects.py index bb01462..67fb983 100644 --- a/tests/test_objects.py +++ b/tests/test_objects.py @@ -232,6 +232,7 @@ class RaceConditionTestCase(TransactionTestCase): and create more than one ModelStat with the same parameters defying the constraint. """ + def setUp(self): self.now = datetime.datetime.now() @@ -247,8 +248,15 @@ def tearDown(self): ModelStat.objects.all().delete() def test_get_model_queryset_race_condition(self): - try: - self.assertEqual(self.stat.get(date=self.now), 5) + result = 5 + + def get_model_directly(): ModelStat.objects.get(date=self.now, name='visits') + + self.assertRaises(ModelStat.MultipleObjectsReturned, + get_model_directly) + + try: + self.assertEqual(self.stat.get(date=self.now), result) except ModelStat.MultipleObjectsReturned: self.fail('Race condition not avoided')
`unique_together` fails for Global Stats The `unique_together` fails when the `content_type` Foreign key is null - http://stackoverflow.com/questions/33307892/django-unique-together-with-nullable-foreignkey If two processes create a stat almost at the same time the database does not stop it so the third time `get_or_create` is called it gives a error as 2 or more objects are returned. Perhaps it could be solved with partial indices - http://stackoverflow.com/questions/8289100/create-unique-constraint-with-null-columns
2017-03-15T15:43:50
-1.0
aljp/drf_model_pusher
36
aljp__drf_model_pusher-36
['31']
c0e27e38f11a577b7ca23d3a15f9404593e8fab7
diff --git a/Pipfile b/Pipfile index 9bc7963..d44695f 100644 --- a/Pipfile +++ b/Pipfile @@ -4,7 +4,7 @@ verify_ssl = true name = "pypi" [packages] -django = "==2.0" +Django = "==2.0" djangorestframework = "*" pusher = "*" @@ -12,6 +12,7 @@ pusher = "*" pytest = "*" pytest-django = "*" tox = "*" +sphinx = "*" coverage = "*" [requires] diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..0cb02a5 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SPHINXPROJ = DRFModelPusher +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..3a6c1e1 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +sys.path.insert(0, os.path.abspath('..')) + + +# -- Project information ----------------------------------------------------- + +project = 'DRF Model Pusher' +copyright = '2018, Adam Jacquier-Parr, Matthew Egan' +author = 'Adam Jacquier-Parr, Matthew Egan' + +# The short X.Y version +version = '' +# The full version, including alpha/beta/rc tags +release = '0.2.0' + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.viewcode', + 'sphinx.ext.githubpages', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path . +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = 'DRFModelPusherdoc' + + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'DRFModelPusher.tex', 'DRF Model Pusher Documentation', + 'Adam Jacquier-Parr, Matthew Egan', 'manual'), +] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'drfmodelpusher', 'DRF Model Pusher Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'DRFModelPusher', 'DRF Model Pusher Documentation', + author, 'DRFModelPusher', 'One line description of project.', + 'Miscellaneous'), +] + + +# -- Extension configuration ------------------------------------------------- \ No newline at end of file diff --git a/docs/custom_providers.rst b/docs/custom_providers.rst new file mode 100644 index 0000000..4b998fa --- /dev/null +++ b/docs/custom_providers.rst @@ -0,0 +1,32 @@ +.. DRF Model Pusher documentation for implementing custom providers + +Implementing Custom Providers +============================================ + +DRF Model Pusher provides functionality to allow you to implement your own custom providers if none of the included providers suit your needs. + +To implement a custom provider you need to implement a few methods expected by the `drf_model_pusher.providers.BaseProvider` class. These methods are:: + + from drf_model_pusher.providers import BaseProvider + + class MyCustomProvider(BaseProvider): + def configure(self): + """ + This method can be used to setup a connection with the provider or implement other one-time initial configuration. + """ + pass + + def parse_packet(self, backend, channels, event_name, data, socket_id=None): + """ + This method is available to be implemented as a hook before the event is sent. This could be useful + for logging or sanitizing any data before transit. + """ + return channels, event_name, data + + def trigger(self, channels, event_name, data, socket_id=None): + """ + This method is where the event should be sent to the provider. + """ + pass + +.. code-block:: python diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..47f7635 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,53 @@ +.. DRF Model Pusher documentation master file, created by + sphinx-quickstart on Sun Sep 9 11:57:11 2018. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to DRF Model Pusher's documentation! +============================================ + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + custom_providers + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + + + +Modules +======= + +.. automodule:: drf_model_pusher + :members: + +.. automodule:: drf_model_pusher.apps + :members: + +.. automodule:: drf_model_pusher.backends + :members: + +.. automodule:: drf_model_pusher.providers + :members: + +.. automodule:: drf_model_pusher.receivers + :members: + +.. automodule:: drf_model_pusher.signals + :members: + +.. automodule:: drf_model_pusher.config + :members: + +.. automodule:: drf_model_pusher.views + :members: + +.. automodule:: drf_model_pusher.exceptions + :members: diff --git a/drf_model_pusher/__init__.py b/drf_model_pusher/__init__.py index f598219..c5de074 100644 --- a/drf_model_pusher/__init__.py +++ b/drf_model_pusher/__init__.py @@ -1,1 +1,5 @@ +""" +DRF Model Pusher Package +""" + default_app_config = "drf_model_pusher.apps.DrfModelPusherConfig" diff --git a/drf_model_pusher/apps.py b/drf_model_pusher/apps.py index 57b1313..5de8909 100644 --- a/drf_model_pusher/apps.py +++ b/drf_model_pusher/apps.py @@ -1,3 +1,4 @@ +"""AppConfig for drf_model_pusher""" import os from importlib import import_module @@ -9,7 +10,9 @@ class DrfModelPusherConfig(AppConfig): name = "drf_model_pusher" def ready(self): + """Attach receivers to Signals and import pusher backends.""" from drf_model_pusher.config import connect_pusher_views + connect_pusher_views() pusher_backends_file = "pusher_backends.py" @@ -22,4 +25,3 @@ def ready(self): for app_config in apps.get_app_configs(): if os.path.exists(os.path.join(app_config.path, pusher_backends_file)): import_module("{0}.pusher_backends".format(app_config.name)) - diff --git a/drf_model_pusher/backends.py b/drf_model_pusher/backends.py index e9a99e0..38c1ba6 100644 --- a/drf_model_pusher/backends.py +++ b/drf_model_pusher/backends.py @@ -1,8 +1,9 @@ """ -Mixin Classes for Pusher integration with Views +PusherBackend classes define how changes from a Model are serialized, and then which provider will send the message. """ from collections import defaultdict +from drf_model_pusher.providers import PusherProvider from drf_model_pusher.signals import view_pre_destroy, view_post_save pusher_backend_registry = defaultdict(list) @@ -11,7 +12,7 @@ class PusherBackendMetaclass(type): """ Register PusherBackend's with a registry for model lookups, supports - abstract classes + "abstract" classes which are not registered but can extend functionality. """ def __new__(mcs, cls, bases, dicts): @@ -31,6 +32,13 @@ def __new__(mcs, cls, bases, dicts): return final_cls +class PacketAdapter(object): + """Adapt data from the (event, channels, data) to a potentially different format.""" + + def parse_packet(self, channels, event_name, data): + return channels, event_name, data + + class PusherBackend(metaclass=PusherBackendMetaclass): """ PusherBackend is the base class for implementing serializers with Pusher @@ -39,15 +47,21 @@ class PusherBackend(metaclass=PusherBackendMetaclass): class Meta: abstract = True + packet_adapter_class = PacketAdapter + provider_class = PusherProvider + def __init__(self, view): self.view = view self.pusher_socket_id = self.get_pusher_socket(view) + self.packet_adapter = PacketAdapter() def get_pusher_socket(self, view): + """Return the socket from the request header.""" pusher_socket = view.request.META.get("HTTP_X_PUSHER_SOCKET_ID", None) return pusher_socket def push_change(self, event, instance=None, pre_destroy=False, ignore=True): + """Send a signal to push the update""" channels, event_name, data = self.get_packet(event, instance) if pre_destroy: view_pre_destroy.send( @@ -57,6 +71,7 @@ def push_change(self, event, instance=None, pre_destroy=False, ignore=True): event_name=event_name, data=data, socket_id=self.pusher_socket_id if ignore else None, + provider_class=self.provider_class, ) else: view_post_save.send( @@ -66,6 +81,7 @@ def push_change(self, event, instance=None, pre_destroy=False, ignore=True): event_name=event_name, data=data, socket_id=self.pusher_socket_id if ignore else None, + provider_class=self.provider_class, ) def get_event_name(self, event_type): @@ -94,6 +110,7 @@ def get_packet(self, event, instance): channels = self.get_channels(instance=instance) event_name = self.get_event_name(event) data = self.get_serializer(self.view, instance=instance).data + channels, event_name, data = self.packet_adapter.parse_packet(channels, event_name, data) return channels, event_name, data diff --git a/drf_model_pusher/config.py b/drf_model_pusher/config.py index eeeff52..cbf8bfa 100644 --- a/drf_model_pusher/config.py +++ b/drf_model_pusher/config.py @@ -1,3 +1,4 @@ +"""Methods for configuration drf_model_pusher""" from drf_model_pusher.receivers import send_pusher_event from drf_model_pusher.signals import view_post_save, view_pre_destroy diff --git a/drf_model_pusher/exceptions.py b/drf_model_pusher/exceptions.py index 7fe638c..2ede679 100644 --- a/drf_model_pusher/exceptions.py +++ b/drf_model_pusher/exceptions.py @@ -1,3 +1,6 @@ +"""drf_model_pusher Exception Classes""" + + class ModelPusherException(Exception): """ A base exception class that can be used within the package to raise user errors diff --git a/drf_model_pusher/providers.py b/drf_model_pusher/providers.py new file mode 100644 index 0000000..d66e358 --- /dev/null +++ b/drf_model_pusher/providers.py @@ -0,0 +1,46 @@ + +from django.conf import settings +from pusher import Pusher + + +class PusherProvider(object): + """ + This class provides a wrapper to Pusher so that we can mock it or disable it easily + """ + + def __init__(self): + self._pusher = None + self._disabled = False + + if hasattr(settings, "DRF_MODEL_PUSHER_DISABLED"): + self._disabled = settings.DRF_MODEL_PUSHER_DISABLED + + def configure(self): + try: + pusher_cluster = settings.PUSHER_CLUSTER + except AttributeError: + pusher_cluster = "mt1" + + self._pusher = Pusher( + app_id=settings.PUSHER_APP_ID, + key=settings.PUSHER_KEY, + secret=settings.PUSHER_SECRET, + cluster=pusher_cluster, + ) + + def trigger(self, channels, event_name, data, socket_id=None): + if self._disabled: + return + + self._pusher.trigger(channels, event_name, data, socket_id) + + +class AblyProvider(object): + def __init__(self, *args, **kwargs): + pass + + def configure(self): + pass + + def trigger(self, channels, event_name, data): + pass diff --git a/drf_model_pusher/proxies.py b/drf_model_pusher/proxies.py deleted file mode 100644 index 8d76bab..0000000 --- a/drf_model_pusher/proxies.py +++ /dev/null @@ -1,20 +0,0 @@ -from django.conf import settings -from pusher import Pusher - - -class PusherProxy(object): - """ - This class provides a wrapper to Pusher so that we can mock it or disable it easily - """ - def __init__(self, *args, **kwargs): - self._pusher = Pusher(*args, **kwargs) - self._disabled = False - - if hasattr(settings, "DRF_MODEL_PUSHER_DISABLED"): - self._disabled = settings.DRF_MODEL_PUSHER_DISABLED - - def trigger(self, channels, event_name, data): - if self._disabled: - return - - self._pusher.trigger(channels, event_name, data) \ No newline at end of file diff --git a/drf_model_pusher/receivers.py b/drf_model_pusher/receivers.py index b980e77..034161c 100644 --- a/drf_model_pusher/receivers.py +++ b/drf_model_pusher/receivers.py @@ -1,23 +1,22 @@ -from django.conf import settings - -from drf_model_pusher.proxies import PusherProxy +"""The receiver methods attach to callbacks to signals""" +from drf_model_pusher.providers import PusherProvider def send_pusher_event( - signal, sender, instance, channels, event_name, data, socket_id=None, **kwargs + signal, + sender, + instance, + channels, + event_name, + data, + socket_id=None, + **kwargs ): """ - Send a pusher event from a signal + Sends an update using the provided provider class """ - try: - pusher_cluster = settings.PUSHER_CLUSTER - except AttributeError: - pusher_cluster = "mt1" - pusher = PusherProxy( - app_id=settings.PUSHER_APP_ID, - key=settings.PUSHER_KEY, - secret=settings.PUSHER_SECRET, - cluster=pusher_cluster, - ) - pusher.trigger(channels, event_name, data) + push_provider_class = kwargs.get("provider_class", PusherProvider) + push_provider = push_provider_class() + push_provider.configure() + push_provider.trigger(channels, event_name, data, socket_id) diff --git a/drf_model_pusher/views.py b/drf_model_pusher/views.py index fe57ea3..ac9ba94 100644 --- a/drf_model_pusher/views.py +++ b/drf_model_pusher/views.py @@ -1,5 +1,3 @@ -from typing import List - from drf_model_pusher.backends import get_models_pusher_backends from drf_model_pusher.exceptions import ModelPusherException from drf_model_pusher.signals import view_post_save @@ -29,14 +27,17 @@ def get_models_pusher_backends(self): elif hasattr(self, "get_queryset"): model = self.get_queryset().model else: - raise ModelPusherException("View must have a queryset attribute or get_queryset method defined") + raise ModelPusherException( + "View must have a queryset attribute or get_queryset method defined" + ) return get_models_pusher_backends(model) - def get_pusher_channels(self) -> List[str]: + def get_pusher_channels(self): """Return the channel from the view""" raise NotImplementedError( "{0} must implement the `get_pusher_channels` method".format( - self.__class__.__name__) + self.__class__.__name__ + ) ) def get_pusher_backends(self): diff --git a/setup.py b/setup.py old mode 100644 new mode 100755 index 5c7ae59..51c2ace --- a/setup.py +++ b/setup.py @@ -18,7 +18,7 @@ EMAIL = "[email protected]" AUTHOR = "Adam Jacquier-Parr" REQUIRES_PYTHON = ">=3.6.0" -VERSION = "0.1.0" +VERSION = "0.2.0" # What packages are required for this module to be executed? REQUIRED = ["django", "djangorestframework", "pusher"]
diff --git a/tests/test_views.py b/tests/test_views.py index 267b6ae..81a33de 100644 --- a/tests/test_views.py +++ b/tests/test_views.py @@ -26,7 +26,7 @@ def test_creations_are_pushed(self, trigger: Mock): self.assertEqual(response.status_code, 201, response.data) trigger.assert_called_once_with( - ["channel"], "mymodel.create", MyModelSerializer(instance=instance).data + ["channel"], "mymodel.create", MyModelSerializer(instance=instance).data, None ) @mock.patch("pusher.Pusher.trigger") @@ -46,7 +46,7 @@ def test_updates_are_pushed(self, trigger: Mock): self.assertEqual(instance.name, "Michelle") trigger.assert_called_once_with( - ["channel"], "mymodel.update", MyModelSerializer(instance=instance).data + ["channel"], "mymodel.update", MyModelSerializer(instance=instance).data, None ) @mock.patch("pusher.Pusher.trigger") @@ -64,5 +64,5 @@ def test_deletions_are_pushed(self, trigger: Mock): instance = MyModel.objects.get(pk=instance.pk) trigger.assert_called_once_with( - ["channel"], "mymodel.delete", MyModelSerializer(instance=instance).data + ["channel"], "mymodel.delete", MyModelSerializer(instance=instance).data, None )
Add docs for "Implementing custom providers" Dependant on #26 being merged
2018-10-02T08:55:16
-1.0
fmartingr/django-stats2
7
fmartingr__django-stats2-7
['6']
f855164e91abc141b732f48c317538d36f186c52
diff --git a/.travis.yml b/.travis.yml index 7eebefa..7708c6e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -21,6 +21,10 @@ env: - TOX_ENV=py35-django110 - TOX_ENV=py34-django110 - TOX_ENV=py27-django110 + - TOX_ENV=py35-django111 + - TOX_ENV=py34-django111 + - TOX_ENV=py27-django111 + matrix: fast_finish: true @@ -28,9 +32,9 @@ matrix: - env: TOX_ENV=py32-django17 - env: TOX_ENV=py33-django17 - env: TOX_ENV=py34-django17 - - env: TOX_ENV=py27-django110 - - env: TOX_ENV=py34-django110 - - env: TOX_ENV=py35-django110 + - env: TOX_ENV=py27-django111 + - env: TOX_ENV=py34-django111 + - env: TOX_ENV=py35-django111 install: # Virtualenv < 14 for python 3.2 build diff --git a/README.md b/README.md index ad63dc3..1755609 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,8 @@ pip install django_stats2 ## Configuration + +### Model stats ``` python # models.py from django.db import models @@ -31,6 +33,14 @@ class MyModel(StatsMixin, models.Model): read_count = StatField() ``` +### Global stats + +``` python +from django_stats2.objects import Stat + +stat = Stat(name='total_visits') +``` + ## Settings ``` python # Prefix for the cache keys @@ -68,16 +78,20 @@ from myapp.models import MyModel obj = MyModel.objects.first() +stat = obj.read_count +# or +stat = Stat(name='total_visits') + # Now we can access the Stat using -obj.read_count # Returns str(stat) -obj.read_count.get([date]) # Returns int stat, returns total if date not present -obj.read_count.total() # Same as before but returns int -obj.read_count.get_for_date(date) # Return stat for a current date (same as .get(date)) -obj.read_count.get_between_date(date_start, date_end) # Between two dates -obj.read_count.incr(value=1, date=date.today()) # Increment stat by amount -obj.read_count.decr(value=1, date=date.today()) # Decrement stat by amount -obj.read_count.set(value=1, date=date.today()) # Set a fixed amount -obj.read_count.store(value=1, date=date.today()) # Force store value in database +stat # Returns str(stat) +stat.get([date]) # Returns int stat, returns total if date not present +stat.total() # Same as before but returns int +stat.get_for_date(date) # Return stat for a current date (same as .get(date)) +stat.get_between_date(date_start, date_end) # Between two dates +stat.incr(value=1, date=date.today()) # Increment stat by amount +stat.decr(value=1, date=date.today()) # Decrement stat by amount +stat.set(value=1, date=date.today()) # Set a fixed amount +stat.store(value=1, date=date.today()) # Force store value in database ``` # Contribute @@ -85,7 +99,7 @@ obj.read_count.store(value=1, date=date.today()) # Force store value in databas The project provides a sample project to play with the stats2 app, just create a virtualenv, install django and start coding. ``` -virtualenv -p python3.5 .virtualenv +python3 -m venv .virtualenv source .virtualenv/bin/activate pip install django cd sampleproject diff --git a/django_stats2/migrations/0002_auto_20161025_1156.py b/django_stats2/migrations/0002_auto_20161025_1156.py new file mode 100644 index 0000000..26c1f37 --- /dev/null +++ b/django_stats2/migrations/0002_auto_20161025_1156.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.10.2 on 2016-10-25 11:56 +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('django_stats2', '0001_initial'), + ] + + operations = [ + migrations.AlterField( + model_name='modelstat', + name='content_type', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType'), + ), + migrations.AlterField( + model_name='modelstat', + name='object_id', + field=models.PositiveIntegerField(null=True), + ), + migrations.AlterUniqueTogether( + name='modelstat', + unique_together=set([('content_type', 'object_id', 'name', 'date')]), + ), + ] diff --git a/django_stats2/models.py b/django_stats2/models.py index 41ed239..8c6af08 100644 --- a/django_stats2/models.py +++ b/django_stats2/models.py @@ -5,8 +5,10 @@ class ModelStat(models.Model): - content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) - object_id = models.PositiveIntegerField() + content_type = models.ForeignKey(ContentType, + on_delete=models.CASCADE, + null=True) + object_id = models.PositiveIntegerField(null=True) content_object = GenericForeignKey('content_type', 'object_id') date = models.DateField(db_index=True) name = models.CharField(max_length=128) diff --git a/django_stats2/objects.py b/django_stats2/objects.py index 53bbc9a..e5d907c 100644 --- a/django_stats2/objects.py +++ b/django_stats2/objects.py @@ -19,7 +19,7 @@ class Stat(object): 'between': '{cache_key_prefix}:{prefix}:{name}:{pk}:{date}_{date_end}', } - def __init__(self, name, model_instance): + def __init__(self, name, model_instance=None): """ Setup the base fields for the stat to work properly and the cache connection to store the data. @@ -27,8 +27,9 @@ def __init__(self, name, model_instance): self.cache = self._get_cache_instance() self.name = name self.model_instance = model_instance - self.content_type = ContentType.objects.get_for_model( - self.model_instance) + if self.model_instance: + self.content_type = ContentType.objects.get_for_model( + self.model_instance) # Cache handling def _get_cache_instance(self): @@ -39,6 +40,16 @@ def _get_cache_instance(self): cache = caches['default'] return cache + def _get_stat_prefix(self): + """ + Return the stat prefix for the cache key + - Return the lowercase class name for models + - '_global' otherwise + """ + if self.model_instance: + return self.model_instance.__class__.__name__.lower() + return '_global' + def _get_cache_key(self, value_type='total', date=None, date_end=None): if isinstance(date, datetime): date = date.date() @@ -48,9 +59,9 @@ def _get_cache_key(self, value_type='total', date=None, date_end=None): return self.cache_key_format.get(value_type).format( cache_key_prefix=self.cache_key_prefix, - prefix=self.model_instance.__class__.__name__.lower(), + prefix=self._get_stat_prefix(), name=self.name, - pk=self.object_id, + pk=self.object_id or '', date=date, date_end=date_end) @@ -58,7 +69,7 @@ def _get_cache(self, value_type='total', date=None, date_end=None): cache_key = self._get_cache_key(value_type, date, date_end) return self.cache.get(cache_key) - def _set_cache(self, value_type='total', date=None, value=0, date_end=None): + def _set_cache(self, value_type='total', date=None, value=0, date_end=None): # noqa cache_key = self._get_cache_key(value_type, date, date_end) timeout = getattr(stats2_settings, 'CACHE_TIMEOUT_{}'.format(value_type).upper(), @@ -100,22 +111,36 @@ def _delete_cache(self, date=None): caches[stats2_settings.CACHE_KEY].delete(cache_key) # Database handlers + def _get_manager_kwargs(self, date=None): + """Returns kwargs to filter ModelStat by Stat type""" + if self.model_instance: + manager_kwargs = { + 'content_type_id': self.content_type.pk, + 'object_id': self.object_id, + 'name': self.name + } + else: + manager_kwargs = { + 'name': self.name + } + + if date: + manager_kwargs['date'] = date + + return manager_kwargs + def _get_model_queryset(self, date=timezone.now().date()): - model_obj, created = ModelStat.objects.get_or_create( - content_type_id=self.content_type.pk, - object_id=self.object_id, - date=date, - name=self.name - ) + """Returns the ModelStat queryset for this Stat""" + manager_kwargs = self._get_manager_kwargs(date) + + model_obj, created = ModelStat.objects.get_or_create(**manager_kwargs) return model_obj def _get_ddbb(self, value_type='total', date=None): if value_type == 'total': stat_result = ModelStat.objects.filter( - content_type_id=self.content_type.pk, - object_id=self.object_id, - name=self.name, + **self._get_manager_kwargs() ).aggregate(Sum('value')) stat = stat_result.get('value__sum') @@ -124,10 +149,7 @@ def _get_ddbb(self, value_type='total', date=None): if value_type == 'history': try: stat_result = ModelStat.objects.get( - content_type_id=self.content_type.pk, - object_id=self.object_id, - name=self.name, - date=date + **self._get_manager_kwargs(date) ) return stat_result.value except ModelStat.DoesNotExist: @@ -139,11 +161,9 @@ def _get_ddbb(self, value_type='total', date=None): def _get_ddbb_between(self, date_start, date_end): try: stat_result = ModelStat.objects.filter( - content_type_id=self.content_type.pk, - object_id=self.object_id, - name=self.name, date__gte=date_start, date__lte=date_end, + **self._get_manager_kwargs() ).aggregate(Sum('value')) stat = stat_result.get('value__sum') return stat @@ -154,12 +174,7 @@ def _get_ddbb_between(self, date_start, date_end): return 0 def _set_ddbb(self, date, value): - object_kwargs = { - "content_type_id": self.content_type.pk, - "object_id": self.object_id, - "name": self.name, - "date": date, - } + object_kwargs = self._get_manager_kwargs(date) try: obj = ModelStat.objects.get(**object_kwargs) @@ -260,7 +275,9 @@ def object_id(self): :returns: Model instance primary key :rtype: int """ - return self.model_instance.pk + if self.model_instance: + return self.model_instance.pk + return None def __repr__(self): return str(self.total()) diff --git a/tox.ini b/tox.ini index 16efcb7..11144a3 100644 --- a/tox.ini +++ b/tox.ini @@ -5,14 +5,16 @@ envlist = {py27,py32,py33,py34,py35}-django18, {py27,py32,py33,py34,py35}-django19, {py27,py32,py33,py34,py35}-django110, + {py27,py32,py33,py34,py35}-django111, [testenv] commands = ./runtests.py deps = - django17: Django==1.7.11 - django18: Django==1.8.13 - django19: Django==1.9.7 - django110: Django==1.10b1 + django17: Django>=1.7,<1.7.99 + django18: Django>=1.8,<1.8.99 + django19: Django>=1.9,<1.9.99 + django110: Django>=1.10,<1.10.99 + django111: Django>=1.11,<1.11.99 -rrequirements/tests.txt basepython = py32: python3.2
diff --git a/tests/test_objects.py b/tests/test_objects.py index a1c500a..af339f3 100644 --- a/tests/test_objects.py +++ b/tests/test_objects.py @@ -2,16 +2,26 @@ from unittest import TestCase from django.core.cache import caches -from django.test import override_settings from django.test.testcases import TransactionTestCase from django_stats2 import settings as stats2_settings +from django_stats2.objects import Stat from django_stats2.models import ModelStat from .models import Note -class StatTotalsTestCase(TestCase): +class StatTestCase(TestCase): + def setUp(self): + self.stat = Stat(name='total_visits') + self.note = Note.objects.create(title='Title', content='Content') + + def test_stat_prefix_is_correct(self): + self.assertEqual(self.stat._get_stat_prefix(), '_global') + self.assertEqual(self.note.reads._get_stat_prefix(), 'note') + + +class ModelStatTotalsTestCase(TestCase): def setUp(self): self.note = Note.objects.create(title='Title', content='content') @@ -29,7 +39,7 @@ def test_stat_returns_zero_and_dont_create_modelstat_when_no_data(self): self.assertEquals(ModelStat.objects.count(), 0) -class StatCacheTestCase(TransactionTestCase): +class ModelStatCacheTestCase(TransactionTestCase): def setUp(self): self.note = Note.objects.create(title='Title', content='Content') @@ -56,59 +66,49 @@ def test_retireve_stat_second_time_uses_cache_instead_of_query(self): self.assertEquals(stat, stat2) -class StatOperationsBaseTestCase(TransactionTestCase): - queries_per_set = 3 - - def setUp(self): - self.note = Note.objects.create(title='Title', content='Content') - - def tearDown(self): - self.note.delete() - ModelStat.objects.all().delete() - caches[stats2_settings.CACHE_KEY].clear() - +class StatOperationsBase: def test_incr(self): - stat = self.note.reads.get() + stat = self.stat.get() - self.note.reads.incr() - self.assertEquals(stat+1, self.note.reads.get()) + self.stat.incr() + self.assertEquals(stat+1, self.stat.get()) - self.note.reads.incr(2) - self.assertEquals(stat+3, self.note.reads.get()) + self.stat.incr(2) + self.assertEquals(stat+3, self.stat.get()) self.assertEquals(ModelStat.objects.first().value, 3) def test_incr_date(self): - self.note.reads.incr() - self.note.reads.incr( + self.stat.incr() + self.stat.incr( date=datetime.datetime.now()+datetime.timedelta(days=-1)) self.assertEquals(ModelStat.objects.count(), 2) def test_decr(self): - stat = self.note.reads.get() + stat = self.stat.get() - self.note.reads.decr() - self.assertEquals(stat-1, self.note.reads.get()) + self.stat.decr() + self.assertEquals(stat-1, self.stat.get()) - self.note.reads.decr(2) - self.assertEquals(stat-3, self.note.reads.get()) + self.stat.decr(2) + self.assertEquals(stat-3, self.stat.get()) self.assertEquals(ModelStat.objects.first().value, -3) def test_decr_date(self): - self.note.reads.decr() - self.note.reads.decr( + self.stat.decr() + self.stat.decr( date=datetime.datetime.now()+datetime.timedelta(days=-1)) self.assertEquals(ModelStat.objects.count(), 2) def test_set(self): with self.assertNumQueries(self.queries_per_set): - self.note.reads.set(10) + self.stat.set(10) self.assertEqual( caches[stats2_settings.CACHE_KEY].get( - self.note.reads._get_cache_key('history', + self.stat._get_cache_key('history', datetime.date.today())), 10) @@ -116,11 +116,11 @@ def test_set_date(self): yesterday = datetime.datetime.utcnow()+datetime.timedelta(days=-1) today = datetime.datetime.utcnow() - self.note.reads.set(10, date=yesterday) - self.assertEqual(self.note.reads.get(), 10) - self.assertEqual(self.note.reads.get(date=today), 0) + self.stat.set(10, date=yesterday) + self.assertEqual(self.stat.get(), 10) + self.assertEqual(self.stat.get(date=today), 0) self.assertEqual( - caches[stats2_settings.CACHE_KEY].get(self.note.reads._get_cache_key(date=today)), + caches[stats2_settings.CACHE_KEY].get(self.stat._get_cache_key(date=today)), 10) def test_get_total(self): @@ -129,33 +129,33 @@ def test_get_total(self): yesterday3 = datetime.date.today()+datetime.timedelta(days=-3) with self.assertNumQueries(self.queries_per_set*3): - self.note.reads.set(date=yesterday, value=1) - self.note.reads.set(date=yesterday2, value=2) - self.note.reads.set(date=yesterday3, value=3) + self.stat.set(date=yesterday, value=1) + self.stat.set(date=yesterday2, value=2) + self.stat.set(date=yesterday3, value=3) self.assertIn( - self.note.reads._get_cache_key(value_type='history', + self.stat._get_cache_key(value_type='history', date=yesterday), caches[stats2_settings.CACHE_KEY]) self.assertIn( - self.note.reads._get_cache_key(value_type='history', + self.stat._get_cache_key(value_type='history', date=yesterday2), caches[stats2_settings.CACHE_KEY]) self.assertIn( - self.note.reads._get_cache_key(value_type='history', + self.stat._get_cache_key(value_type='history', date=yesterday3), caches[stats2_settings.CACHE_KEY]) with self.assertNumQueries(1): - self.assertEqual(self.note.reads.total(), 6) + self.assertEqual(self.stat.total(), 6) def test_get_date(self): yesterday = datetime.date.today()+datetime.timedelta(days=-1) - self.note.reads.set(10, date=yesterday) + self.stat.set(10, date=yesterday) with self.assertNumQueries(0): self.assertEquals( - self.note.reads.get_for_date(yesterday), + self.stat.get_for_date(yesterday), 10) # Clear cache and try again to try get from ddbb @@ -163,24 +163,24 @@ def test_get_date(self): with self.assertNumQueries(1): self.assertEquals( - self.note.reads.get_for_date(yesterday), + self.stat.get_for_date(yesterday), 10) def test_get_between(self): # Fill the past 5 days for i in range(1, 6): day = datetime.date.today() + datetime.timedelta(days=i*-1) - self.note.reads.set(date=day, value=1) + self.stat.set(date=day, value=1) with self.assertNumQueries(1): - two_days = self.note.reads.get_between_date( + two_days = self.stat.get_between_date( datetime.date.today() + datetime.timedelta(days=-2), datetime.date.today() + datetime.timedelta(days=-1) ) # Test cache with self.assertNumQueries(0): - two_days = self.note.reads.get_between_date( + two_days = self.stat.get_between_date( datetime.date.today() + datetime.timedelta(days=-2), datetime.date.today() + datetime.timedelta(days=-1) ) @@ -188,7 +188,7 @@ def test_get_between(self): # Assert result self.assertEqual(two_days, 2) - four_days = self.note.reads.get_between_date( + four_days = self.stat.get_between_date( datetime.date.today() + datetime.timedelta(days=-4), datetime.date.today() + datetime.timedelta(days=-1) ) @@ -196,7 +196,31 @@ def test_get_between(self): def test_store(self): day = datetime.date.today() - self.note.reads.store(3, date=day) + self.stat.store(3, date=day) with self.assertNumQueries(1): - self.assertEquals(self.note.reads.get(date=day), 3) + self.assertEquals(self.stat.get(date=day), 3) + + +class ModelStatOperationsTestCase(StatOperationsBase, TransactionTestCase): + queries_per_set = 3 + + def setUp(self): + self.note = Note.objects.create(title='Title', content='Content') + self.stat = self.note.reads + + def tearDown(self): + self.note.delete() + ModelStat.objects.all().delete() + caches[stats2_settings.CACHE_KEY].clear() + + +class GlobalStatOperationsTestCase(StatOperationsBase, TransactionTestCase): + queries_per_set = 3 + + def setUp(self): + self.stat = Stat(name='total_visits') + + def tearDown(self): + ModelStat.objects.all().delete() + caches[stats2_settings.CACHE_KEY].clear()
Feature Request: Stats without related object. It would be good to have stats not related to a particular object such as total stats for an application.
2016-10-25T11:31:13
-1.0
aljp/drf_model_pusher
10
aljp__drf_model_pusher-10
['7']
1fda92533f40ee0ba15bc9ed6fd78e780706bf0b
diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..c09b232 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,18 @@ +language: python +python: + - "3.6" + +env: + - DJANGO_VERSION=1.10 + - DJANGO_VERSION=1.11 + - DJANGO_VERSION=2.0 + +install: +- pip install -r requirements.txt +- yes | pip uninstall Django +- pip install Django==$DJANGO_VERSION +- pip install black==18.6b1 + +script: +- black --check . +- python manage.py test diff --git a/Pipfile b/Pipfile index bb463b8..91caf11 100644 --- a/Pipfile +++ b/Pipfile @@ -1,18 +1,25 @@ [[source]] + url = "https://pypi.python.org/simple" verify_ssl = true name = "pypi" + [packages] + django = "*" djangorestframework = "*" pusher = "*" + [dev-packages] + black = "*" pytest = "*" pytest-django = "*" tox = "*" + [requires] + python_version = "3.6" diff --git a/README.md b/README.md index 6a71938..162a28b 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,5 @@ -![PyPI Version Badge](https://img.shields.io/badge/PyPI-0.1.1-green.svg) +[![PyPI version](https://badge.fury.io/py/drf-model-pusher.svg)](https://badge.fury.io/py/drf-model-pusher) +[![Build Status](https://travis-ci.org/aljp/drf_model_pusher.svg?branch=master)](https://travis-ci.org/aljp/drf_model_pusher) # DRF Model Pusher diff --git a/drf_model_pusher/backends.py b/drf_model_pusher/backends.py index 1c0e96d..be683dc 100644 --- a/drf_model_pusher/backends.py +++ b/drf_model_pusher/backends.py @@ -15,19 +15,18 @@ class PusherBackendMetaclass(type): """ def __new__(mcs, cls, bases, dicts): - if dicts.get('Meta') and hasattr(dicts.get('Meta'), 'abstract'): - dicts['__metaclass__'] = mcs + if dicts.get("Meta") and hasattr(dicts.get("Meta"), "abstract"): + dicts["__metaclass__"] = mcs return super().__new__(mcs, cls, bases, dicts) assert ( - dicts.get('serializer_class', None) is not None, - 'PusherBackends require a serializer_class' - ) - dicts['__metaclass__'] = mcs + dicts.get("serializer_class", None) is not None + ), "PusherBackends require a serializer_class" + dicts["__metaclass__"] = mcs final_cls = super().__new__(mcs, cls, bases, dicts) - model_name = dicts['serializer_class'].Meta.model.__name__.lower() + model_name = dicts["serializer_class"].Meta.model.__name__.lower() pusher_backend_registry[model_name].append(final_cls) return final_cls diff --git a/drf_model_pusher/exceptions.py b/drf_model_pusher/exceptions.py index 8439c32..7fe638c 100644 --- a/drf_model_pusher/exceptions.py +++ b/drf_model_pusher/exceptions.py @@ -1,6 +1,6 @@ - class ModelPusherException(Exception): """ A base exception class that can be used within the package to raise user errors """ - pass \ No newline at end of file + + pass diff --git a/drf_model_pusher/receivers.py b/drf_model_pusher/receivers.py index 539a034..993edac 100644 --- a/drf_model_pusher/receivers.py +++ b/drf_model_pusher/receivers.py @@ -3,14 +3,7 @@ def send_pusher_event( - signal, - sender, - instance, - channel, - event_name, - data, - socket_id=None, - **kwargs, + signal, sender, instance, channel, event_name, data, socket_id=None, **kwargs ): """ Send a pusher event from a signal @@ -18,7 +11,7 @@ def send_pusher_event( try: pusher_cluster = settings.PUSHER_CLUSTER except AttributeError: - pusher_cluster = 'mt1' + pusher_cluster = "mt1" pusher = Pusher( app_id=settings.PUSHER_APP_ID, @@ -26,8 +19,4 @@ def send_pusher_event( secret=settings.PUSHER_SECRET, cluster=pusher_cluster, ) - pusher.trigger( - [channel], - event_name, - data, - ) + pusher.trigger([channel], event_name, data) diff --git a/drf_model_pusher/signals.py b/drf_model_pusher/signals.py index 7e373d9..02d193e 100644 --- a/drf_model_pusher/signals.py +++ b/drf_model_pusher/signals.py @@ -2,9 +2,9 @@ view_post_save = Signal( - providing_args=['instance', 'channel', 'event_name', 'data', 'socket_id'] + providing_args=["instance", "channel", "event_name", "data", "socket_id"] ) view_pre_destroy = Signal( - providing_args=['instance', 'channel', 'event_name', 'data', 'socket_id'] + providing_args=["instance", "channel", "event_name", "data", "socket_id"] ) diff --git a/drf_model_pusher/views.py b/drf_model_pusher/views.py index 34ef29e..02b3612 100644 --- a/drf_model_pusher/views.py +++ b/drf_model_pusher/views.py @@ -8,11 +8,13 @@ class ModelPusherViewMixin(object): pusher_backends = [] - PUSH_CREATE = 'create' - PUSH_UPDATE = 'update' - PUSH_DELETE = 'delete' + PUSH_CREATE = "create" + PUSH_UPDATE = "update" + PUSH_DELETE = "delete" - def __init__(self, push_creations=True, push_updates=True, push_deletions=True, **kwargs): + def __init__( + self, push_creations=True, push_updates=True, push_deletions=True, **kwargs + ): self.push_creations = push_creations self.push_updates = push_updates self.push_deletions = push_deletions @@ -69,5 +71,5 @@ def push(self, channel, event_name, data): sender=self.__class__, channel=channel, event_name=event_name, - data=data + data=data, ) diff --git a/example/__init__.py b/example/__init__.py index e447fa7..d7ce039 100644 --- a/example/__init__.py +++ b/example/__init__.py @@ -1,1 +1,1 @@ -from example.app import ExampleApp \ No newline at end of file +from example.app import ExampleApp diff --git a/example/models.py b/example/models.py index 9d6ec7d..cf68b27 100644 --- a/example/models.py +++ b/example/models.py @@ -2,4 +2,4 @@ class MyModel(models.Model): - name = models.CharField(max_length=32) \ No newline at end of file + name = models.CharField(max_length=32) diff --git a/example/serializers.py b/example/serializers.py index 6d72069..30d2ff9 100644 --- a/example/serializers.py +++ b/example/serializers.py @@ -6,6 +6,4 @@ class MyModelSerializer(serializers.ModelSerializer): class Meta: model = MyModel - fields = ( - "name", - ) + fields = ("name",) diff --git a/requirements.txt b/requirements.txt index 38b4536..4881940 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,6 @@ appdirs==1.4.3 asn1crypto==0.24.0 atomicwrites==1.1.5 attrs==18.1.0 -black==18.6b4 certifi==2018.4.16 cffi==1.11.5 chardet==3.0.4 diff --git a/setup.py b/setup.py index ada1844..5c7ae59 100644 --- a/setup.py +++ b/setup.py @@ -13,9 +13,7 @@ # Package meta-data. NAME = "drf_model_pusher" -DESCRIPTION = ( - "Easy to use class mixins for Django Rest Framework and Pusher to keep your API realtime." -) +DESCRIPTION = "Easy to use class mixins for Django Rest Framework and Pusher to keep your API realtime." URL = "https://github.com/aljp/drf_model_pusher" EMAIL = "[email protected]" AUTHOR = "Adam Jacquier-Parr" @@ -23,11 +21,7 @@ VERSION = "0.1.0" # What packages are required for this module to be executed? -REQUIRED = [ - "django", - "djangorestframework", - "pusher" -] +REQUIRED = ["django", "djangorestframework", "pusher"] # What packages are optional? EXTRAS = {
diff --git a/tests/test_views.py b/tests/test_views.py index 5579c1e..267b6ae 100644 --- a/tests/test_views.py +++ b/tests/test_views.py @@ -17,9 +17,7 @@ class TestModelPusherViewMixin(TestCase): def test_creations_are_pushed(self, trigger: Mock): request_factory = APIRequestFactory() - create_request = request_factory.post(path="/mymodels/", data={ - "name": "Henry" - }) + create_request = request_factory.post(path="/mymodels/", data={"name": "Henry"}) view = MyModelViewSet.as_view({"post": "create"}) response = view(create_request) @@ -28,9 +26,7 @@ def test_creations_are_pushed(self, trigger: Mock): self.assertEqual(response.status_code, 201, response.data) trigger.assert_called_once_with( - ["channel"], - "mymodel.create", - MyModelSerializer(instance=instance).data, + ["channel"], "mymodel.create", MyModelSerializer(instance=instance).data ) @mock.patch("pusher.Pusher.trigger") @@ -38,9 +34,9 @@ def test_updates_are_pushed(self, trigger: Mock): instance = MyModel.objects.create(name="Julie") request_factory = APIRequestFactory() - partial_update_request = request_factory.patch(path="/mymodels/123/", data={ - "name": "Michelle" - }) + partial_update_request = request_factory.patch( + path="/mymodels/123/", data={"name": "Michelle"} + ) view = MyModelViewSet.as_view({"patch": "partial_update"}) response = view(partial_update_request, pk=instance.pk) @@ -50,9 +46,7 @@ def test_updates_are_pushed(self, trigger: Mock): self.assertEqual(instance.name, "Michelle") trigger.assert_called_once_with( - ["channel"], - "mymodel.update", - MyModelSerializer(instance=instance).data, + ["channel"], "mymodel.update", MyModelSerializer(instance=instance).data ) @mock.patch("pusher.Pusher.trigger") @@ -70,7 +64,5 @@ def test_deletions_are_pushed(self, trigger: Mock): instance = MyModel.objects.get(pk=instance.pk) trigger.assert_called_once_with( - ["channel"], - "mymodel.delete", - MyModelSerializer(instance=instance).data, + ["channel"], "mymodel.delete", MyModelSerializer(instance=instance).data )
Enhancement: Add basic CI CI would be useful for validating PRs. As a base we would probably want the following: - [x] Automated tests - [x] Automated linting - [x] Automated matrix across supported python, django, drf versions
2018-07-27T00:04:24
-1.0
blturner/django-stardate
18
blturner__django-stardate-18
['16']
720dedf0fb628a1e200f91ca007bf1b0d3425c3f
diff --git a/example/example/settings.py b/example/example/settings.py index f833b27..86bae9b 100644 --- a/example/example/settings.py +++ b/example/example/settings.py @@ -5,6 +5,9 @@ from django.core.exceptions import ImproperlyConfigured +BASE_DIR = os.path.dirname(os.path.dirname(__file__)) + + def get_env_variable(var_name): """ Get the environment variable or return an exception. """ try: @@ -24,12 +27,12 @@ def get_env_variable(var_name): DATABASES = { 'default': { - 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. - 'NAME': 'example.db', # Or path to database file if using sqlite3. - 'USER': '', # Not used with sqlite3. - 'PASSWORD': '', # Not used with sqlite3. - 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. - 'PORT': '', # Set to empty string for default. Not used with sqlite3. + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': os.path.join(BASE_DIR, 'example.db'), + 'USER': '', + 'PASSWORD': '', + 'HOST': '', + 'PORT': '', } } @@ -89,12 +92,12 @@ def get_env_variable(var_name): 'django.contrib.staticfiles', 'django.contrib.admin', 'core', - 'social_auth', + 'social.apps.django_app.default', 'stardate', ) AUTHENTICATION_BACKENDS = ( - 'social_auth.backends.contrib.dropbox.DropboxBackend', + 'social.backends.dropbox.DropboxOAuth', 'django.contrib.auth.backends.ModelBackend', ) @@ -102,7 +105,6 @@ def get_env_variable(var_name): TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner' STARDATE_BACKEND = 'stardate.backends.dropbox.DropboxBackend' -# STARDATE_BACKEND = 'stardate.backends.local_file.LocalFileBackend' STARDATE_POST_MODEL = 'stardate.Post' try: @@ -115,5 +117,5 @@ def get_env_variable(var_name): DROPBOX_ACCESS_TYPE = 'app_folder' # DJANGO-SOCIAL-AUTH -DROPBOX_APP_ID = DROPBOX_APP_KEY -DROPBOX_API_SECRET = DROPBOX_APP_SECRET +SOCIAL_AUTH_DROPBOX_KEY = DROPBOX_APP_KEY +SOCIAL_AUTH_DROPBOX_SECRET = DROPBOX_APP_SECRET diff --git a/example/example/urls.py b/example/example/urls.py index 2dbbdda..4885878 100644 --- a/example/example/urls.py +++ b/example/example/urls.py @@ -5,7 +5,7 @@ urlpatterns = patterns( '', - url(r'^social/', include('social_auth.urls')), + url(r'^social/', include('social.apps.django_app.urls', namespace='social')), url(r'^admin/', include(admin.site.urls)), url(r'^', include('stardate.urls.index_urls')), ) diff --git a/setup.py b/setup.py index 49a1179..71fbe3e 100644 --- a/setup.py +++ b/setup.py @@ -18,13 +18,13 @@ # long_description=open('README').read(), zip_safe=False, install_requires=[ - 'Django>=1.4', - 'django-social-auth>=0.7,<0.8', + 'Django>=1.4,<1.7', 'django-markupfield', - 'dropbox>1.4', + 'dropbox>1.4,<2.1.0', 'Markdown', 'PyYAML', 'python-dateutil', + 'python-social-auth>=0.1', ], test_suite='stardate.tests' ) diff --git a/stardate/backends/dropbox.py b/stardate/backends/dropbox.py index 526fd2f..f6153d8 100644 --- a/stardate/backends/dropbox.py +++ b/stardate/backends/dropbox.py @@ -55,12 +55,7 @@ def get_post(self, path): return post def get_access_token(self): - bits = {} - token = self.social_auth.extra_data['access_token'] - for bit in token.split('&'): - b = bit.split('=') - bits[b[0]] = b[1] - return bits + return self.social_auth.extra_data['access_token'] def get_cursor(self): try: diff --git a/stardate/models.py b/stardate/models.py index a55275c..5f51c00 100644 --- a/stardate/models.py +++ b/stardate/models.py @@ -6,7 +6,7 @@ from django.db.models.query import QuerySet from django.utils import timezone -from social_auth.models import UserSocialAuth +from social.apps.django_app.default.models import UserSocialAuth from markupfield.fields import MarkupField from django.conf import settings @@ -39,7 +39,7 @@ def __init__(self, *args, **kwargs): # initialize it here try: self.backend.set_social_auth(self.social_auth) - except: + except AttributeError: pass def __unicode__(self): diff --git a/tox.ini b/tox.ini index b9590d4..b9ed7e2 100644 --- a/tox.ini +++ b/tox.ini @@ -10,32 +10,14 @@ commands = [django14] deps = Django>=1.4,<1.5 - django-social-auth - django-markupfield - dropbox - Markdown - PyYAML - python-dateutil [django15] deps = Django>=1.5,<1.6 - django-social-auth - django-markupfield - dropbox - Markdown - PyYAML - python-dateutil [django16] deps = Django>=1.6,<1.7 - django-social-auth - django-markupfield - dropbox - Markdown - PyYAML - python-dateutil [testenv:py26-django14] basepython = python2.6
diff --git a/stardate/tests/factories.py b/stardate/tests/factories.py index 0e9bc7f..51b5201 100644 --- a/stardate/tests/factories.py +++ b/stardate/tests/factories.py @@ -1,7 +1,7 @@ from django.contrib.auth.models import User from django.template.defaultfilters import slugify -from social_auth.models import UserSocialAuth +from social.apps.django_app.default.models import UserSocialAuth from stardate.models import Blog from stardate.utils import get_post_model @@ -23,7 +23,13 @@ def create_user_social_auth(**kwargs): "provider": "dropbox", "uid": "1234", "user": kwargs['user'], - "extra_data": {"access_token": "oauth_token_secret=oauth_token_secret_string&oauth_token=oauth_token_string"} + "extra_data": { + "access_token": { + u'oauth_token_secret': u'oauth_token_secret_string', + u'oauth_token': u'oauth_token_string', + u'uid': u'123' + } + } } defaults.update(kwargs) social_auth, created = UserSocialAuth.objects.get_or_create(**defaults) diff --git a/stardate/tests/test_backends.py b/stardate/tests/test_backends.py index 0bb23a5..5c38240 100644 --- a/stardate/tests/test_backends.py +++ b/stardate/tests/test_backends.py @@ -6,7 +6,7 @@ from django.test import TestCase from django.utils import timezone -from social_auth.models import UserSocialAuth +from social.apps.django_app.default.models import UserSocialAuth from stardate.models import Blog from stardate.parsers import FileParser diff --git a/stardate/tests/test_parsers.py b/stardate/tests/test_parsers.py index 5ca8642..473dff8 100644 --- a/stardate/tests/test_parsers.py +++ b/stardate/tests/test_parsers.py @@ -5,7 +5,7 @@ from django.test import TestCase from django.utils import timezone -from social_auth.models import UserSocialAuth +from social.apps.django_app.default.models import UserSocialAuth from stardate.models import Blog from stardate.parsers import FileParser
replace django-social-auth with python-social-auth
2014-06-08T23:35:28
-1.0
audeering/audonnx
68
audeering__audonnx-68
['67']
e796811fcd27459f1808169f796404c0f81c0514
diff --git a/docs/usage.rst b/docs/usage.rst index c10d3f2..5c75bf1 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -251,7 +251,8 @@ Quantize weights ---------------- To reduce the memory print of a model, -we can quantize it. +we can quantize it, +compare the `MobilenetV2 example`_. For instance, we can store model weights as 8 bit integers. For quantization make sure you have installed @@ -264,9 +265,14 @@ as well as import onnxruntime.quantization + onnx_infer_path = os.path.join(onnx_root, 'model_infer.onnx') + onnxruntime.quantization.quant_pre_process( + onnx_model_path, + onnx_infer_path, + ) onnx_quant_path = os.path.join(onnx_root, 'model_quant.onnx') onnxruntime.quantization.quantize_dynamic( - onnx_model_path, + onnx_infer_path, onnx_quant_path, weight_type=onnxruntime.quantization.QuantType.QUInt8, ) @@ -508,6 +514,7 @@ In that case do: .. _audinterface: http://tools.pp.audeering.com/audinterface/ .. _audobject: http://tools.pp.audeering.com/audobject/ .. _librosa: https://librosa.org/doc/main/index.html +.. _MobilenetV2 example: https://github.com/microsoft/onnxruntime-inference-examples/blob/main/quantization/image_classification/cpu/ReadMe.md#onnx-runtime-quantization-example .. _ONNX: https://onnx.ai/ .. _OpenSMILE: https://github.com/audeering/opensmile-python .. _table: https://onnxruntime.ai/docs/execution-providers/CUDA-ExecutionProvider.html#requirements
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index d0a3d44..5911bf1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -54,5 +54,5 @@ jobs: - name: Test building documentation run: | - python -m sphinx docs/ docs/_build/ -b html + python -m sphinx docs/ docs/_build/ -b html -W if: matrix.os == 'ubuntu-20.04'
Quantization should be used with preprocessing With the newest version of `onnxruntime` (1.16.0) we are getting the following warning when building the docs: ``` WARNING: Cell printed to stderr: WARNING:root:Please consider to run pre-processing before quantization. Refer to example: https://github.com/microsoft/onnxruntime-inference-examples/blob/main/quantization/image_classification/cpu/ReadMe.md ``` They suggest to first pre-process the model, e.g. ```bash $ python -m onnxruntime.quantization.preprocess --input mobilenetv2-7.onnx --output mobilenetv2-7-infer.onnx ``` ~~Unfortunately, `onnxruntime.quantization.preprocess` is not available inside Python with the API. Which means we will need to update the documentation on how to quantize a model to run commands in bash. This seems very strange to me, as this makes it also much harder to write scripts that automatically quantize some models.~~ It's available as `onnxruntime.quantization.quant_pre_process()`.
2023-10-10T12:52:48
-1.0
audeering/audonnx
37
audeering__audonnx-37
['35']
eb5d5589070f6f4277532f169e66186efe18f65c
diff --git a/audonnx/core/model.py b/audonnx/core/model.py index a31c40c..af458ed 100644 --- a/audonnx/core/model.py +++ b/audonnx/core/model.py @@ -206,6 +206,7 @@ def __call__( *, outputs: typing.Union[str, typing.Sequence[str]] = None, concat: bool = False, + squeeze: bool = False, ) -> typing.Union[ np.ndarray, typing.Dict[str, np.ndarray], @@ -242,6 +243,8 @@ def __call__( outputs: name of output or list with output names concat: if ``True``, concatenate output of the requested nodes + squeeze: if ``True``, + remove axes of length one from the output(s) Returns: model output @@ -279,6 +282,14 @@ def __call__( shapes = [self.outputs[node].shape for node in outputs] z = _concat(z, shapes) + if squeeze: + if isinstance(z, dict): + z = { + name: values.squeeze() for name, values in z.items() + } + else: + z = z.squeeze() + return z def __repr__(self) -> str:
diff --git a/tests/test_model.py b/tests/test_model.py index 12a3ba1..2f21e56 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -72,16 +72,24 @@ def min_max(x, sr): ] ) def test_call(model, outputs, expected): - y = model( - pytest.SIGNAL, - pytest.SAMPLING_RATE, - outputs=outputs, - ) - if isinstance(y, dict): - for key, values in y.items(): - np.testing.assert_equal(y[key], expected[key]) - else: - np.testing.assert_equal(y, expected) + for squeeze in [False, True]: + y = model( + pytest.SIGNAL, + pytest.SAMPLING_RATE, + outputs=outputs, + squeeze=squeeze, + ) + if isinstance(y, dict): + for key, values in y.items(): + if squeeze: + np.testing.assert_equal(y[key], expected[key].squeeze()) + else: + np.testing.assert_equal(y[key], expected[key]) + else: + if squeeze: + np.testing.assert_equal(y, expected.squeeze()) + else: + np.testing.assert_equal(y, expected) @pytest.mark.parametrize( @@ -206,13 +214,18 @@ def test_call_deprecated(model, output_names): ] ) def test_call_concat(model, outputs, expected): - y = model( - pytest.SIGNAL, - pytest.SAMPLING_RATE, - outputs=outputs, - concat=True, - ) - np.testing.assert_equal(y, expected) + for squeeze in [False, True]: + y = model( + pytest.SIGNAL, + pytest.SAMPLING_RATE, + outputs=outputs, + concat=True, + squeeze=squeeze, + ) + if squeeze: + np.testing.assert_equal(y, expected.squeeze()) + else: + np.testing.assert_equal(y, expected) @pytest.mark.parametrize(
Add squeeze argument to Model call As discussed in https://github.com/audeering/audonnx/pull/34#issuecomment-1162838408 it would be useful to add a `squeeze` argument to `audonnx.Model.__call__()` as this would allow to simplify adding interfaces to it, e.g. ```python interface = audinterface.Process( process_func=process_func, process_func_args={'squeeze': True}, ) ``` instead of ```python def process_func(signal, sampling_rate): return model(signal, sampling_rate)[0][0] interface = audinterface.Process(process_func=process_func) ``` The expected behavior of `squeeze` could be something like this: * `[[0]]` -> `0` * `[[0, 1]]` -> `[0, 1]`
2022-06-22T10:57:23
-1.0
End of preview. Expand in Data Studio

No dataset card yet

Downloads last month
15