instance_id
stringlengths 10
57
| patch
stringlengths 261
37.7k
| repo
stringlengths 7
53
| base_commit
stringlengths 40
40
| hints_text
stringclasses 301
values | test_patch
stringlengths 212
2.22M
| problem_statement
stringlengths 23
37.7k
| version
stringclasses 1
value | environment_setup_commit
stringlengths 40
40
| FAIL_TO_PASS
sequencelengths 1
4.94k
| PASS_TO_PASS
sequencelengths 0
7.82k
| meta
dict | created_at
stringlengths 25
25
| license
stringclasses 8
values | __index_level_0__
int64 0
6.41k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ValvePython__steam-359 | diff --git a/requirements.txt b/requirements.txt
index 92661a9..14293fb 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,7 +3,8 @@ pycryptodomex>=3.7.0
requests>=2.9.1
vdf>=3.3
gevent>=1.3.0
-protobuf>=3.0.0
+protobuf>~3.0; python_version >= '3'
+protobuf<3.18.0; python_version < '3'
gevent-eventemitter~=2.1
cachetools>=3.0.0
enum34==1.1.2; python_version < '3.4'
diff --git a/setup.py b/setup.py
index 16b8870..382f8f2 100644
--- a/setup.py
+++ b/setup.py
@@ -26,7 +26,8 @@ install_requires = [
install_extras = {
'client': [
'gevent>=1.3.0',
- 'protobuf>=3.0.0',
+ 'protobuf>~3.0; python_version >= "3"',
+ 'protobuf<3.18.0; python_version < "3"',
'gevent-eventemitter~=2.1',
],
}
diff --git a/steam/game_servers.py b/steam/game_servers.py
index 07b09f1..80bf108 100644
--- a/steam/game_servers.py
+++ b/steam/game_servers.py
@@ -142,8 +142,11 @@ def _u(data):
class StructReader(_StructReader):
- def read_cstring(self):
- return _u(super(StructReader, self).read_cstring())
+ def read_cstring(self, binary=False):
+ raw = super(StructReader, self).read_cstring()
+ if binary:
+ return raw
+ return _u(raw)
class MSRegion(IntEnum):
@@ -526,7 +529,7 @@ def a2s_players(server_addr, timeout=2, challenge=0):
return players
-def a2s_rules(server_addr, timeout=2, challenge=0):
+def a2s_rules(server_addr, timeout=2, challenge=0, binary=False):
"""Get rules from server
:param server_addr: (ip, port) for the server
@@ -535,9 +538,11 @@ def a2s_rules(server_addr, timeout=2, challenge=0):
:type timeout: float
:param challenge: (optional) challenge number
:type challenge: int
+ :param binary: (optional) return rules as raw bytes
+ :type binary: bool
:raises: :class:`RuntimeError`, :class:`socket.timeout`
:returns: a list of rules
- :rtype: :class:`list`
+ :rtype: :class:`dict`
"""
ss = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ss.connect(server_addr)
@@ -571,13 +576,14 @@ def a2s_rules(server_addr, timeout=2, challenge=0):
rules = {}
while len(rules) != num_rules:
- name = data.read_cstring()
- value = data.read_cstring()
-
- if _re_match(r'^\-?[0-9]+$', value):
- value = int(value)
- elif _re_match(r'^\-?[0-9]+\.[0-9]+$', value):
- value = float(value)
+ name = data.read_cstring(binary=binary)
+ value = data.read_cstring(binary=binary)
+
+ if not binary:
+ if _re_match(r'^\-?[0-9]+$', value):
+ value = int(value)
+ elif _re_match(r'^\-?[0-9]+\.[0-9]+$', value):
+ value = float(value)
rules[name] = value
| ValvePython/steam | 061ca33842c9ec5aa7c0866d20cbeed0759d5ea5 | diff --git a/tests/test_game_servers.py b/tests/test_game_servers.py
new file mode 100644
index 0000000..47fc89e
--- /dev/null
+++ b/tests/test_game_servers.py
@@ -0,0 +1,64 @@
+import mock
+import socket
+import unittest
+
+from steam.game_servers import a2s_rules
+
+
+class TestA2SRules(unittest.TestCase):
+ @mock.patch("socket.socket")
+ def test_returns_rules_with_default_arguments(self, mock_socket_class):
+ mock_socket = mock_socket_class.return_value
+ mock_socket.recv.side_effect = [
+ b"\xff\xff\xff\xffA\x01\x02\x03\x04",
+ b"\xff\xff\xff\xffE\x03\0text\0b\x99r\0int\x0042\0float\x0021.12\0"
+ ]
+
+ rules = a2s_rules(("addr", 1234))
+
+ self.assertEqual(
+ {
+ "text": u"b\ufffdr",
+ "int": 42,
+ "float": 21.12
+ },
+ rules)
+
+ mock_socket_class.assert_called_once_with(
+ socket.AF_INET, socket.SOCK_DGRAM)
+
+ mock_socket.connect.assert_called_once_with(("addr", 1234))
+ mock_socket.settimeout.assert_called_once_with(2)
+
+ self.assertEqual(2, mock_socket.send.call_count)
+ mock_socket.send.assert_has_calls([
+ mock.call(b"\xff\xff\xff\xffV\0\0\0\0"),
+ mock.call(b"\xff\xff\xff\xffV\x01\x02\x03\x04")
+ ])
+
+ self.assertEqual(2, mock_socket.recv.call_count)
+ mock_socket.recv.assert_has_calls([
+ mock.call(512),
+ mock.call(2048)
+ ])
+
+ mock_socket.close.assert_called_once_with()
+
+ @mock.patch("socket.socket")
+ def test_returns_rules_as_bytes_when_binary_is_true(
+ self, mock_socket_class):
+ mock_socket = mock_socket_class.return_value
+ mock_socket.recv.side_effect = [
+ b"\xff\xff\xff\xffA\x01\x02\x03\x04",
+ b"\xff\xff\xff\xffE\x03\0text\0b\x99r\0int\x0042\0float\x0021.12\0"
+ ]
+
+ rules = a2s_rules(("addr", 1234), binary=True)
+
+ self.assertEqual(
+ {
+ b"text": b"b\x99r",
+ b"int": b"42",
+ b"float": b"21.12"
+ },
+ rules)
| Option to return a2s_rules as bytes instead of strings
The `steam.game_servers.a2s_rules` function currently returns the server rules as a dictionary of strings to strings (`Dict[str, str]`), which is suitable for plain text rules. However, some game servers use the Steam `A2S_RULES` API to return binary data. For example, Bohemia Interactive's Arma and DayZ servers use the rules API to return required DLC and Mod information (see https://community.bistudio.com/wiki/Arma_3:_ServerBrowserProtocol3 for details).
Unfortunately, because `steam.game_servers.a2s_rules` decodes the A2S_RULES server response data as UTF-8 **with `"replace"` as the `errors` option**, important binary data is lost when querying an Arma or DayZ server's rules.
For example:
```python
>>> pprint(a2s_rules(("192.169.82.235",2303)))
{'\x01\x01': '\x01\x01\x01\x02\x01\x02\x01\x02\x02���_\x04ITv�\x0eNamalsk '
'Island#�:\x13Ԓ\x11\x0b'
'Livonia DLC\x03\x0eCrimsonZamboni\x04dayz\x06sumrak',
'allowedBuild': 0,
'dedicated': 1,
'island': 'namalsk',
'language': 65545,
'platform': 'win',
'requiredBuild': 0,
'requiredVersion': 113,
'timeLeft': 15}
```
It would be very helpful if we could get a server's `A2S_RULES` information as raw `bytes`, instead of `str` objects. Perhaps an optional keyword argument could be added to the `steam.game_servers.a2s_rules` function? For example:
```python
a2s_rules(("192.169.82.235",2303), binary=True)
```
The default should be `binary=False` to maintain backwards compatibility but when `binary=True`, the resulting rule names and values would be returned as `bytes` objects (i.e. `Dict[bytes, bytes]`), allowing the caller to further decode the binary data, as needed. | 0.0 | 061ca33842c9ec5aa7c0866d20cbeed0759d5ea5 | [
"tests/test_game_servers.py::TestA2SRules::test_returns_rules_as_bytes_when_binary_is_true"
] | [
"tests/test_game_servers.py::TestA2SRules::test_returns_rules_with_default_arguments"
] | {
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-09-25 17:38:38+00:00 | mit | 817 |
|
Viatorus__quom-34 | diff --git a/README.md b/README.md
index db84fbb..4486b8c 100644
--- a/README.md
+++ b/README.md
@@ -44,6 +44,10 @@ optional arguments:
--trim, -t Reduce continuous line breaks to one. Default: True
--include_directory INCLUDE_DIRECTORY, -I INCLUDE_DIRECTORY
Add include directories for header files.
+ --source_directory SOURCE_DIRECTORY, -S SOURCE_DIRECTORY
+ Set the source directories for source files.
+ Use ./ in front of a path to mark as relative to the header file.
+
```
## Simple example
diff --git a/quom/__main__.py b/quom/__main__.py
index d717853..ed25d0b 100644
--- a/quom/__main__.py
+++ b/quom/__main__.py
@@ -1,12 +1,12 @@
import argparse
import sys
-import traceback
from pathlib import Path
+from typing import List
from .quom import Quom
-def main():
+def main(args: List[str]):
parser = argparse.ArgumentParser(prog='quom', description='Single header generator for C/C++ libraries.')
parser.add_argument('input_path', metavar='input', type=Path, help='Input file path of the main file.')
parser.add_argument('output_path', metavar='output', type=Path,
@@ -20,12 +20,33 @@ def main():
help='Reduce continuous line breaks to one. Default: %(default)s')
parser.add_argument('--include_directory', '-I', type=Path, action='append', default=[],
help='Add include directories for header files.')
+ parser.add_argument('--source_directory', '-S', type=str, action='append', default=['.'],
+ help='Set the source directories for source files. '
+ 'Use ./ in front of a path to mark as relative to the header file.')
- args = parser.parse_args()
+ args = parser.parse_args(args)
+
+ # Transform source directories to distingue between:
+ # - relative from header file (starting with dot)
+ # - relative from workdir
+ # - absolute path
+ relative_source_directories = []
+ source_directories = []
+ for src in args.source_directory:
+ path = Path(src)
+ if src == '.' or src.startswith('./'):
+ relative_source_directories.append(path)
+ else:
+ source_directories.append(path.resolve())
with args.output_path.open('w+') as file:
- Quom(args.input_path, file, args.stitch, args.include_guard, args.trim, args.include_directory)
+ Quom(args.input_path, file, args.stitch, args.include_guard, args.trim, args.include_directory,
+ relative_source_directories, source_directories)
+
+
+def run():
+ main(sys.argv[1:])
if __name__ == '__main__':
- sys.exit(main())
+ run()
diff --git a/quom/quom.py b/quom/quom.py
index 20bc83e..d366219 100644
--- a/quom/quom.py
+++ b/quom/quom.py
@@ -1,3 +1,4 @@
+import os
import re
from pathlib import Path
from queue import Queue
@@ -29,12 +30,17 @@ def contains_only_whitespace_and_comment_tokens(tokens: List[Token]):
class Quom:
def __init__(self, src_file_path: Union[Path, str], dst: TextIO, stitch_format: str = None,
include_guard_format: str = None, trim: bool = True,
- include_directories: List[Union[Path, str]] = None):
+ include_directories: List[Union[Path, str]] = None,
+ relative_source_directories: List[Union[Path]] = None,
+ source_directories: List[Union[Path]] = None):
self.__dst = dst
self.__stitch_format = stitch_format
self.__include_guard_format = re.compile('^{}$'.format(include_guard_format)) if include_guard_format else None
self.__trim = trim
self.__include_directories = [Path(x) for x in include_directories] if include_directories else []
+ self.__relative_source_directories = relative_source_directories if relative_source_directories else [] \
+ if source_directories else [Path('.')]
+ self.__source_directories = source_directories if source_directories else [Path('.')]
self.__processed_files = set()
self.__source_files = Queue()
@@ -87,7 +93,9 @@ class Quom:
self.__write_token(token, is_main_header)
- self.__find_possible_source_file(file_path)
+ file_path = self.__find_possible_source_file(file_path)
+ if file_path:
+ self.__source_files.put(file_path)
def __write_token(self, token: Token, is_main_header: bool):
if isinstance(token, StartToken) or isinstance(token, EndToken):
@@ -129,16 +137,21 @@ class Quom:
contains_only_whitespace_and_comment_tokens(token.preprocessor_arguments[i + 1:]):
return True
- def __find_possible_source_file(self, header_file_path: Path):
+ def __find_possible_source_file(self, header_file_path: Path) -> Union[Path, None]:
if header_file_path.suffix in ['.c', '.cpp', '.cxx', '.cc', '.c++', '.cp', '.C']:
return
# Checks if a equivalent compilation unit exits.
for extension in ['.c', '.cpp', '.cxx', '.cc', '.c++', '.cp', '.C']:
- file_path = header_file_path.with_suffix(extension)
- if file_path.exists():
- self.__source_files.put(file_path)
- break
+ for src_dir in self.__relative_source_directories:
+ file_path = (header_file_path.parent / src_dir / header_file_path.name).with_suffix(extension)
+ if file_path.exists():
+ return file_path
+ for src_dir in self.__source_directories:
+ file_path = (src_dir / header_file_path.name).with_suffix(extension).resolve()
+ if file_path.exists():
+ return file_path
+ return None
def __scan_for_include(self, file_path: Path, token: Token, is_source_file: bool) -> Union[Token, None]:
if not isinstance(token, PreprocessorIncludeToken) or not token.is_local_include:
| Viatorus/quom | b4bb7665357892411081b86b7c33c0ffc18d2f1b | diff --git a/tests/test_quom/test_normal.py b/tests/test_quom/test_normal.py
index f0928dd..38d53f0 100644
--- a/tests/test_quom/test_normal.py
+++ b/tests/test_quom/test_normal.py
@@ -4,6 +4,7 @@ from pathlib import Path
import pytest
from quom import Quom, QuomError
+from quom.__main__ import main
FILE_MAIN_HPP = """\
#pragma once
@@ -247,3 +248,13 @@ def test_with_missing_source_file(fs):
Quom(Path('main.hpp'), dst)
assert dst.getvalue() == RESULT_NORMAL_WITHOUT_SOURCES
+
+
+def test_main(fs):
+ init()
+
+ main(['main.hpp', 'result.hpp'])
+ assert Path('result.hpp').read_text() == RESULT_NORMAL
+
+ main(['main.hpp', 'result.hpp', '-S', '.'])
+ assert Path('result.hpp').read_text() == RESULT_NORMAL
diff --git a/tests/test_quom/test_source_directory.py b/tests/test_quom/test_source_directory.py
new file mode 100644
index 0000000..790d417
--- /dev/null
+++ b/tests/test_quom/test_source_directory.py
@@ -0,0 +1,62 @@
+import os
+from io import StringIO
+from pathlib import Path
+
+from quom import Quom
+from quom.__main__ import main
+
+FILE_MAIN_HPP = """
+int foo = 3;
+
+int foo();
+"""
+
+FILE_MAIN_CPP = """
+int foo() { return 42; }
+"""
+
+RESULT = """
+int foo = 3;
+
+int foo();
+
+int foo() { return 42; }
+"""
+
+
+def test_source_directory(fs):
+ os.makedirs('project/')
+ os.chdir('project/')
+ os.makedirs('include/')
+ os.makedirs('src/')
+
+ with open('include/main.hpp', 'w+') as file:
+ file.write(FILE_MAIN_HPP)
+
+ with open('src/main.cpp', 'w+') as file:
+ file.write(FILE_MAIN_CPP)
+
+ dst = StringIO()
+ Quom(Path('include/main.hpp'), dst)
+ assert dst.getvalue() != RESULT
+
+ dst = StringIO()
+ Quom(Path('include/main.hpp'), dst, relative_source_directories=[Path('../src')])
+ assert dst.getvalue() == RESULT
+
+ dst = StringIO()
+ Quom(Path('include/main.hpp'), dst, source_directories=[Path('src').resolve()])
+ assert dst.getvalue() == RESULT
+
+ dst = StringIO()
+ Quom(Path('include/main.hpp'), dst, source_directories=[Path('/project/src')])
+ assert dst.getvalue() == RESULT
+
+ main(['include/main.hpp', 'result.hpp', '-S', './../src'])
+ assert Path('result.hpp').read_text() == RESULT
+
+ main(['include/main.hpp', 'result.hpp', '-S', 'src'])
+ assert Path('result.hpp').read_text() == RESULT
+
+ main(['include/main.hpp', 'result.hpp', '-S', '/project/src'])
+ assert Path('result.hpp').read_text() == RESULT
| Feature request - Additional directories to search for sources
Hi, I found this package very useful, but when trying to use it - encounter with a small problem. The case when sources are located in different directory then their headers - are not supported. The common folder structure of any project can be as follows:
_project/_
_public/_
_include/_
_src/_
So it would be nice if it will be possible to specify source directories relative to include directories. For instance via `--relative_source_dir` or `-SI` argument.
The most common use can be: `-SI "./" -SI "../" -SI "../src"`
By default the directory suffixes array is ["./'] but if at least one -SI argument provided it should be overwritten. | 0.0 | b4bb7665357892411081b86b7c33c0ffc18d2f1b | [
"tests/test_quom/test_normal.py::test_main",
"tests/test_quom/test_source_directory.py::test_source_directory"
] | [
"tests/test_quom/test_normal.py::test_with_missing_source_file",
"tests/test_quom/test_normal.py::test_with_include_guard_format",
"tests/test_quom/test_normal.py::test_with_mismatching_include_guard_format",
"tests/test_quom/test_normal.py::test_normal",
"tests/test_quom/test_normal.py::test_normal_without_trim",
"tests/test_quom/test_normal.py::test_with_mismatching_stitch",
"tests/test_quom/test_normal.py::test_with_stitch_location",
"tests/test_quom/test_normal.py::test_with_missing_header_file"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-04-22 19:15:57+00:00 | mit | 818 |
|
Viatorus__quom-43 | diff --git a/src/quom/quom.py b/src/quom/quom.py
index 5dbaad9..5f7d51c 100644
--- a/src/quom/quom.py
+++ b/src/quom/quom.py
@@ -80,6 +80,7 @@ class Quom:
raise QuomError('Include not found: "{}"'.format(include_path))
# Skip already processed files.
+ file_path = file_path.resolve()
if file_path in self.__processed_files:
return
self.__processed_files.add(file_path)
| Viatorus/quom | 8d13a41baea1a930d27a869ff468aa72fe25b100 | diff --git a/tests/test_quom/same_file_different_include.py b/tests/test_quom/same_file_different_include.py
new file mode 100644
index 0000000..8ef3b9d
--- /dev/null
+++ b/tests/test_quom/same_file_different_include.py
@@ -0,0 +1,41 @@
+import os
+from io import StringIO
+
+from quom import Quom
+
+FILE_FOO_HPP = """
+#include "../b/bar.hpp"
+"""
+
+FILE_BAR_HPP = """
+int foo();
+"""
+
+FILE_MAIN_CPP = """
+#include "a/foo.hpp"
+#include "b/bar.hpp"
+"""
+
+RESULT = """
+
+int foo();
+"""
+
+
+def test_same_file_different_include(fs):
+ os.makedirs('a')
+ os.makedirs('b')
+
+ with open('main.cpp', 'w+') as file:
+ file.write(FILE_MAIN_CPP)
+
+ with open('a/foo.hpp', 'w+') as file:
+ file.write(FILE_FOO_HPP)
+
+ with open('b/bar.hpp', 'w+') as file:
+ file.write(FILE_BAR_HPP)
+
+ dst = StringIO()
+ Quom('main.cpp', dst)
+
+ assert dst.getvalue() == RESULT
| option to skip already included files
Hello and thanks for creating such awesome utility! I'm using it in my [CTRE](https://github.com/hanickadot/compile-time-regular-expressions) project. I noticed sometimes quom include same file multiple times.
Would be possible to add an option to ignore already included files? Based on normalised URL preferably.
Thanks! ❤️ | 0.0 | 8d13a41baea1a930d27a869ff468aa72fe25b100 | [
"tests/test_quom/same_file_different_include.py::test_same_file_different_include"
] | [] | {
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-06-15 18:54:32+00:00 | mit | 819 |
|
Viatorus__quom-44 | diff --git a/src/quom/__main__.py b/src/quom/__main__.py
index 77d99b3..bb62485 100644
--- a/src/quom/__main__.py
+++ b/src/quom/__main__.py
@@ -29,6 +29,8 @@ def main(args: List[str]):
parser.add_argument('--source_directory', '-S', type=str, action='append', default=['.'],
help='Set the source directories for source files. '
'Use ./ or .\\ in front of a path to mark as relative to the header file.')
+ parser.add_argument('--encoding', '-e', type=str, default='utf-8',
+ help='The encoding used to read and write all files.')
args = parser.parse_args(args)
@@ -45,9 +47,9 @@ def main(args: List[str]):
else:
source_directories.append(path.resolve())
- with args.output_path.open('w+') as file:
+ with args.output_path.open('w+', encoding=args.encoding) as file:
Quom(args.input_path, file, args.stitch, args.include_guard, args.trim, args.include_directory,
- relative_source_directories, source_directories)
+ relative_source_directories, source_directories, args.encoding)
def run():
diff --git a/src/quom/quom.py b/src/quom/quom.py
index 5f7d51c..3cf4e13 100644
--- a/src/quom/quom.py
+++ b/src/quom/quom.py
@@ -31,7 +31,8 @@ class Quom:
include_guard_format: str = None, trim: bool = True,
include_directories: List[Union[Path, str]] = None,
relative_source_directories: List[Union[Path]] = None,
- source_directories: List[Union[Path]] = None):
+ source_directories: List[Union[Path]] = None,
+ encoding: str = 'utf-8'):
self.__dst = dst
self.__stitch_format = stitch_format
self.__include_guard_format = re.compile('^{}$'.format(include_guard_format)) if include_guard_format else None
@@ -40,6 +41,7 @@ class Quom:
self.__relative_source_directories = relative_source_directories if relative_source_directories else [] \
if source_directories else [Path('.')]
self.__source_directories = source_directories if source_directories else [Path('.')]
+ self.__encoding = encoding
self.__processed_files = set()
self.__source_files = Queue()
@@ -65,16 +67,11 @@ class Quom:
is_main_header=False):
# First check if file exists relative.
file_path = relative_path / include_path
- if file_path.exists():
- with file_path.open() as file:
- tokens = tokenize(file.read())
- else:
+ if not file_path.exists():
# Otherwise search in include directories.
for include_directory in self.__include_directories:
file_path = include_directory / include_path
if file_path.exists():
- with file_path.open() as file:
- tokens = tokenize(file.read())
break
else:
raise QuomError('Include not found: "{}"'.format(include_path))
@@ -85,6 +82,9 @@ class Quom:
return
self.__processed_files.add(file_path)
+ # Tokenize the file.
+ tokens = tokenize(file_path.read_text(encoding=self.__encoding))
+
for token in tokens:
# Find local includes.
token = self.__scan_for_include(file_path, token, is_source_file)
| Viatorus/quom | 1c180ae75c91edf5c174053332aa96007fe13caa | diff --git a/tests/test_quom/same_file_different_include.py b/tests/test_quom/same_file_different_include.py
index 8ef3b9d..a913dbc 100644
--- a/tests/test_quom/same_file_different_include.py
+++ b/tests/test_quom/same_file_different_include.py
@@ -26,13 +26,13 @@ def test_same_file_different_include(fs):
os.makedirs('a')
os.makedirs('b')
- with open('main.cpp', 'w+') as file:
+ with open('main.cpp', 'w+', encoding='utf-8') as file:
file.write(FILE_MAIN_CPP)
- with open('a/foo.hpp', 'w+') as file:
+ with open('a/foo.hpp', 'w+', encoding='utf-8') as file:
file.write(FILE_FOO_HPP)
- with open('b/bar.hpp', 'w+') as file:
+ with open('b/bar.hpp', 'w+', encoding='utf-8') as file:
file.write(FILE_BAR_HPP)
dst = StringIO()
diff --git a/tests/test_quom/test_file_encoding.py b/tests/test_quom/test_file_encoding.py
new file mode 100644
index 0000000..416a993
--- /dev/null
+++ b/tests/test_quom/test_file_encoding.py
@@ -0,0 +1,40 @@
+from pathlib import Path
+
+import pytest
+
+from quom.__main__ import main
+
+FILE_MAIN_HPP = """
+int foo(); // qθομ"""
+
+
+def test_file_encoding_default_encoding(fs):
+ with open('main.hpp', 'w+', encoding='utf-8') as file:
+ file.write(FILE_MAIN_HPP)
+
+ main(['main.hpp', 'result.hpp'])
+ assert Path('result.hpp').read_text('utf-8') == FILE_MAIN_HPP
+
+ with pytest.raises(UnicodeDecodeError):
+ Path('result.hpp').read_text('ascii')
+
+ with pytest.raises(UnicodeDecodeError):
+ Path('result.hpp').read_text('utf-32')
+
+
+def test_file_encoding_custom_encoding(fs):
+ with open('main.hpp', 'w+', encoding='utf-32') as file:
+ file.write(FILE_MAIN_HPP)
+
+ main(['main.hpp', 'result.hpp', '--encoding=utf-32'])
+
+ assert Path('result.hpp').read_text('utf-32') == FILE_MAIN_HPP
+
+ with pytest.raises(UnicodeDecodeError):
+ Path('result.hpp').read_text('utf-8')
+
+ with pytest.raises(UnicodeDecodeError):
+ main(['main.hpp', 'result.hpp'])
+
+ with pytest.raises(UnicodeDecodeError):
+ main(['main.hpp', 'result.hpp', '--encoding=utf-8'])
diff --git a/tests/test_quom/test_include_directory.py b/tests/test_quom/test_include_directory.py
index 1d8e572..1666ecf 100644
--- a/tests/test_quom/test_include_directory.py
+++ b/tests/test_quom/test_include_directory.py
@@ -79,28 +79,28 @@ def test_include_directory(fs):
os.makedirs('include/my_lib/util')
os.makedirs('include/my_other_lib/')
- with open('include/my_lib/main.hpp', 'w+') as file:
+ with open('include/my_lib/main.hpp', 'w+', encoding='utf-8') as file:
file.write(FILE_MAIN_HPP)
- with open('include/my_lib/core/core.hpp', 'w+') as file:
+ with open('include/my_lib/core/core.hpp', 'w+', encoding='utf-8') as file:
file.write(FILE_CORE_HPP)
- with open('include/my_lib/core/core.cpp', 'w+') as file:
+ with open('include/my_lib/core/core.cpp', 'w+', encoding='utf-8') as file:
file.write(FILE_CORE_CPP)
- with open('include/my_lib/util/foo.hpp', 'w+') as file:
+ with open('include/my_lib/util/foo.hpp', 'w+', encoding='utf-8') as file:
file.write(FILE_FOO_HPP)
- with open('include/my_lib/util/foo.cpp', 'w+') as file:
+ with open('include/my_lib/util/foo.cpp', 'w+', encoding='utf-8') as file:
file.write(FILE_FOO_CPP)
- with open('include/my_other_lib/bar.hpp', 'w+') as file:
+ with open('include/my_other_lib/bar.hpp', 'w+', encoding='utf-8') as file:
file.write(FILE_BAR_HPP)
- with open('include/my_other_lib/bar.cpp', 'w+') as file:
+ with open('include/my_other_lib/bar.cpp', 'w+', encoding='utf-8') as file:
file.write(FILE_BAR_CPP)
- with open('include/my_other_lib/info.hpp', 'w+') as file:
+ with open('include/my_other_lib/info.hpp', 'w+', encoding='utf-8') as file:
file.write(FILE_INFO_HPP)
dst = StringIO()
diff --git a/tests/test_quom/test_last_source_file.py b/tests/test_quom/test_last_source_file.py
index 96e2c98..c227c21 100644
--- a/tests/test_quom/test_last_source_file.py
+++ b/tests/test_quom/test_last_source_file.py
@@ -82,13 +82,13 @@ int foo = 42;"""
def init():
- with open('main.hpp', 'w+') as file:
+ with open('main.hpp', 'w+', encoding='utf-8') as file:
file.write(FILE_MAIN_HPP)
- with open('foo.hpp', 'w+') as file:
+ with open('foo.hpp', 'w+', encoding='utf-8') as file:
file.write(FILE_FOO_HPP)
- with open('foo.cpp', 'w+') as file:
+ with open('foo.cpp', 'w+', encoding='utf-8') as file:
file.write(FILE_FOO_CPP)
@@ -111,7 +111,7 @@ def test_normal_without_trim(fs):
def test_without_newline_at_end(fs):
- with open('main.hpp', 'w+') as file:
+ with open('main.hpp', 'w+', encoding='utf-8') as file:
file.write('int a;')
dst = StringIO()
diff --git a/tests/test_quom/test_normal.py b/tests/test_quom/test_normal.py
index a718b87..48dd016 100644
--- a/tests/test_quom/test_normal.py
+++ b/tests/test_quom/test_normal.py
@@ -166,13 +166,13 @@ int foo = 42;
def init():
- with open('main.hpp', 'w+') as file:
+ with open('main.hpp', 'w+', encoding='utf-8') as file:
file.write(FILE_MAIN_HPP)
- with open('foo.hpp', 'w+') as file:
+ with open('foo.hpp', 'w+', encoding='utf-8') as file:
file.write(FILE_FOO_HPP)
- with open('foo.cpp', 'w+') as file:
+ with open('foo.cpp', 'w+', encoding='utf-8') as file:
file.write(FILE_FOO_CPP)
diff --git a/tests/test_quom/test_source_directory.py b/tests/test_quom/test_source_directory.py
index 790d417..9bd63bc 100644
--- a/tests/test_quom/test_source_directory.py
+++ b/tests/test_quom/test_source_directory.py
@@ -30,10 +30,10 @@ def test_source_directory(fs):
os.makedirs('include/')
os.makedirs('src/')
- with open('include/main.hpp', 'w+') as file:
+ with open('include/main.hpp', 'w+', encoding='utf-8') as file:
file.write(FILE_MAIN_HPP)
- with open('src/main.cpp', 'w+') as file:
+ with open('src/main.cpp', 'w+', encoding='utf-8') as file:
file.write(FILE_MAIN_CPP)
dst = StringIO()
| Open files with UTF-8 encoding
On Windows, Python uses ANSI encoding as its default text encoding when opening files. This is fine if you only use ASCII characters since ANSI is compatible with ASCII. But otherwise you will get errors like the following:
```py
File "C:\Users\Username\AppData\Roaming\Python\Python39\site-packages\quom\__main__.py", line 54, in run
main(sys.argv[1:])
File "C:\Users\Username\AppData\Roaming\Python\Python39\site-packages\quom\__main__.py", line 49, in main
Quom(args.input_path, file, args.stitch, args.include_guard, args.trim, args.include_directory,
File "C:\Users\Username\AppData\Roaming\Python\Python39\site-packages\quom\quom.py", line 49, in __init__
self.__process_file(Path(), src_file_path, False, True)
File "C:\Users\Username\AppData\Roaming\Python\Python39\site-packages\quom\quom.py", line 70, in __process_file
tokens = tokenize(file.read())
UnicodeDecodeError: 'gbk' codec can't decode byte 0xbf in position 2: illegal multibyte sequence
```
(GBK, or CP936, is the ANSI encoding under Simplified Chinese)
My suggestion is to change to using UTF-8 encoding to open files, which is more appropriate for most programmers:
```py
file_path.open(encoding='utf-8')
``` | 0.0 | 1c180ae75c91edf5c174053332aa96007fe13caa | [
"tests/test_quom/test_file_encoding.py::test_file_encoding_custom_encoding"
] | [
"tests/test_quom/same_file_different_include.py::test_same_file_different_include",
"tests/test_quom/test_file_encoding.py::test_file_encoding_default_encoding",
"tests/test_quom/test_include_directory.py::test_include_directory",
"tests/test_quom/test_last_source_file.py::test_normal",
"tests/test_quom/test_last_source_file.py::test_normal_without_trim",
"tests/test_quom/test_last_source_file.py::test_without_newline_at_end",
"tests/test_quom/test_normal.py::test_normal",
"tests/test_quom/test_normal.py::test_normal_without_trim",
"tests/test_quom/test_normal.py::test_with_include_guard_format",
"tests/test_quom/test_normal.py::test_with_mismatching_include_guard_format",
"tests/test_quom/test_normal.py::test_with_stitch_location",
"tests/test_quom/test_normal.py::test_with_mismatching_stitch",
"tests/test_quom/test_normal.py::test_with_missing_header_file",
"tests/test_quom/test_normal.py::test_with_missing_source_file",
"tests/test_quom/test_normal.py::test_main",
"tests/test_quom/test_source_directory.py::test_source_directory"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-06-15 19:31:50+00:00 | mit | 820 |
|
Viatorus__quom-46 | diff --git a/src/quom/quom.py b/src/quom/quom.py
index 3cf4e13..0f6f7cd 100644
--- a/src/quom/quom.py
+++ b/src/quom/quom.py
@@ -1,3 +1,4 @@
+import os
import re
from pathlib import Path
from queue import Queue
@@ -46,6 +47,7 @@ class Quom:
self.__processed_files = set()
self.__source_files = Queue()
self.__cont_lb = CONTINUOUS_LINE_BREAK_START
+ self.__used_linesep = None
self.__prev_token = EmptyToken()
self.__process_file(Path(), src_file_path, False, True)
@@ -56,12 +58,7 @@ class Quom:
.format(stitch_format))
while not self.__source_files.empty():
self.__process_file(Path(), self.__source_files.get(), True)
- # Write last token.
- self.__write_token(self.__prev_token, True)
- elif self.__cont_lb == CONTINUOUS_LINE_BREAK_START or not isinstance(self.__prev_token,
- LinebreakWhitespaceToken):
- # Write last token, if not a continuous line break.
- self.__write_token(self.__prev_token, True)
+ self.__write_line_break_if_missing()
def __process_file(self, relative_path: Path, include_path: Path, is_source_file: bool,
is_main_header=False):
@@ -109,9 +106,8 @@ class Quom:
if self.__is_cont_line_break(token):
return
- # Write previous token, store current.
- if self.__prev_token:
- self.__dst.write(str(self.__prev_token.raw))
+ # Write token and store.
+ self.__dst.write(str(token.raw))
self.__prev_token = token
@staticmethod
@@ -171,10 +167,15 @@ class Quom:
while not self.__source_files.empty():
self.__process_file(Path(), self.__source_files.get(), True)
+ self.__write_line_break_if_missing()
return True
def __is_cont_line_break(self, token: Token) -> bool:
+ # Save a used line break for later.
+ if self.__used_linesep is None and isinstance(token, LinebreakWhitespaceToken):
+ self.__used_linesep = token.raw
+
if not self.__trim:
return False
@@ -187,3 +188,9 @@ class Quom:
self.__cont_lb = CONTINUOUS_LINE_BREAK_START
return self.__cont_lb >= CONTINUOUS_BREAK_REACHED
+
+ def __write_line_break_if_missing(self):
+ if not isinstance(self.__prev_token, LinebreakWhitespaceToken):
+ if self.__used_linesep is None:
+ self.__used_linesep = os.linesep # fallback
+ self.__dst.write(self.__used_linesep)
| Viatorus/quom | 4633b5aaba35454c425227ec498ea1e19caade39 | diff --git a/tests/test_quom/test_last_source_file.py b/tests/test_quom/test_last_source_file.py
index c227c21..7f148d2 100644
--- a/tests/test_quom/test_last_source_file.py
+++ b/tests/test_quom/test_last_source_file.py
@@ -53,7 +53,8 @@ extern int foo;
#include <algorithm>
-int foo = 42;"""
+int foo = 42;
+"""
RESULT_NORMAL_WITHOUT_TRIM = """\
#pragma once
@@ -78,7 +79,8 @@ extern int foo;
#include <algorithm>
-int foo = 42;"""
+int foo = 42;
+"""
def init():
diff --git a/tests/test_quom/test_line_breaks_when_stitching.py b/tests/test_quom/test_line_breaks_when_stitching.py
new file mode 100644
index 0000000..0bb6c37
--- /dev/null
+++ b/tests/test_quom/test_line_breaks_when_stitching.py
@@ -0,0 +1,98 @@
+from io import StringIO
+from pathlib import Path
+
+from quom import Quom
+
+FILE_MAIN_CPP = """\
+#include "a.hpp"
+int main() {
+ return 0;
+}
+// Stitch Begin
+// End
+"""
+
+FILE_A_HPP = 'int a;'
+
+FILE_A_CPP = """\
+#include "b.hpp"
+#include "c.hpp"
+void mid() {}"""
+
+FILE_B_HPP = 'int b;'
+FILE_C_HPP = 'int c;'
+
+FILE_B_CPP = """\
+#include <b>"""
+
+FILE_C_CPP = """\
+#include <c>"""
+
+RESULT = """\
+int a;
+int main() {
+ return 0;
+}
+// Stitch Begin
+// End
+int b;
+int c;
+void mid() {}
+#include <b>
+#include <c>
+"""
+
+RESULT_STITCH = """\
+int a;
+int main() {
+ return 0;
+}
+int b;
+int c;
+void mid() {}
+#include <b>
+#include <c>
+
+// End
+"""
+
+
+def init():
+ with open('main.hpp', 'w+', encoding='utf-8') as file:
+ file.write(FILE_MAIN_CPP)
+
+ with open('a.hpp', 'w+', encoding='utf-8') as file:
+ file.write(FILE_A_HPP)
+
+ with open('a.cpp', 'w+', encoding='utf-8') as file:
+ file.write(FILE_A_CPP)
+
+ with open('b.hpp', 'w+', encoding='utf-8') as file:
+ file.write(FILE_B_HPP)
+
+ with open('b.cpp', 'w+', encoding='utf-8') as file:
+ file.write(FILE_B_CPP)
+
+ with open('c.hpp', 'w+', encoding='utf-8') as file:
+ file.write(FILE_C_HPP)
+
+ with open('c.cpp', 'w+', encoding='utf-8') as file:
+ file.write(FILE_C_CPP)
+
+
+def test_add_line_break_in_stitched_files_if_missing(fs):
+ init()
+
+ dst = StringIO()
+ Quom(Path('main.hpp'), dst)
+
+ assert dst.getvalue() == RESULT
+
+
+def test_add_line_break_in_stitched_files_if_missing_at_stitch_location(fs):
+ init()
+
+ dst = StringIO()
+ Quom(Path('main.hpp'), dst, stitch_format='Stitch Begin')
+
+ assert dst.getvalue() == RESULT_STITCH
| when source files have no newline at EOF, content of next file is concatenated right after last closing curly bracket
When one of source files has no newline at EOF, the "#include ..."s of the next file appears right after the last closing curly bracket and that causes a compilation error.
I know it's a trivial problem to resolve when compiling fails but it would be nice to ensure the content of all source files are separated by newline.
```c++
// bad.cpp
void foo(){
// there is no newline at EOF, file ends right after this closing curly bracket.
}
// b.cpp
#include <iostream>
// this include is at first line and located outside current build directory
// main.cpp
include "bad.hpp"
include "b.hpp"
...
```
And the source file quom produces has something like:
```c++
void foo(){
}#include <iostream> // this include comes from b.cpp
```
I wrote a test to demonstrate this problem. The following test will fail at current commit which is ```4633b5aa```.
----
```python
from io import StringIO
from pathlib import Path
from quom import Quom
FILE_MAIN_CPP = """\
#include "a.hpp"
#include "b.hpp"
int main() {
return 0;
}
"""
FILE_A_HPP = """\
#ifndef A_HPP_
#define A_HPP_
#endif //A_HPP_
"""
FILE_A_CPP = """\
#include "a.hpp"
#include "b.hpp"
int mid() {
}"""
FILE_B_HPP = """\
#ifndef B_HPP_
#define B_HPP_
#endif //B_HPP_
"""
FILE_B_CPP = """\
#include <memory>
"""
RESULT = """\
#ifndef A_HPP_
#define A_HPP_
#endif //A_HPP_
#ifndef B_HPP_
#define B_HPP_
#endif //B_HPP_
int main() {
return 0;
}
void mid() {
}
#include <memory>
"""
def init():
with open('main.hpp', 'w+', encoding='utf-8') as file:
file.write(FILE_MAIN_CPP)
with open('a.hpp', 'w+', encoding='utf-8') as file:
file.write(FILE_A_HPP)
with open('a.cpp', 'w+', encoding='utf-8') as file:
file.write(FILE_A_CPP)
with open('b.hpp', 'w+', encoding='utf-8') as file:
file.write(FILE_B_HPP)
with open('b.cpp', 'w+', encoding='utf-8') as file:
file.write(FILE_B_CPP)
def test_issue_one(fs):
init()
dst = StringIO()
Quom(Path('main.hpp'), dst)
assert dst.getvalue() == RESULT
``` | 0.0 | 4633b5aaba35454c425227ec498ea1e19caade39 | [
"tests/test_quom/test_last_source_file.py::test_normal",
"tests/test_quom/test_last_source_file.py::test_normal_without_trim",
"tests/test_quom/test_line_breaks_when_stitching.py::test_add_line_break_in_stitched_files_if_missing",
"tests/test_quom/test_line_breaks_when_stitching.py::test_add_line_break_in_stitched_files_if_missing_at_stitch_location"
] | [
"tests/test_quom/test_last_source_file.py::test_without_newline_at_end"
] | {
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-07-27 20:35:58+00:00 | mit | 821 |
|
VictorPelaez__coral-reef-optimization-algorithm-43 | diff --git a/cro/cro.py b/cro/cro.py
index 44c17ae..ebc560b 100644
--- a/cro/cro.py
+++ b/cro/cro.py
@@ -20,6 +20,7 @@ class CRO(object):
self.Pd = Pd
self.fitness_coral = fitness_coral
self.opt = opt
+ self.opt_multiplier = -1 if opt == "max" else 1
self.L = L
self.ke = ke
self.seed = seed
@@ -83,7 +84,7 @@ class CRO(object):
coral_fitness = self.fitness_coral(coral)
REEF_fitness.append(coral_fitness)
- return np.array(REEF_fitness)
+ return self.opt_multiplier*np.array(REEF_fitness)
def broadcastspawning(self, REEF, REEFpob):
"""
@@ -197,7 +198,6 @@ class CRO(object):
- REEFfitness: new reef fitness
"""
k = self.k
- opt = self.opt
np.random.seed(seed=self.seed)
Nlarvae = larvae.shape[0]
@@ -222,10 +222,7 @@ class CRO(object):
REEFfitness[reef_index] = larva_fitness
REEF[reef_index] = 1
else: # occupied coral
- if opt == "max":
- fitness_comparison = larva_fitness > REEFfitness[reef_indices]
- else:
- fitness_comparison = larva_fitness < REEFfitness[reef_indices]
+ fitness_comparison = larva_fitness < REEFfitness[reef_indices]
if np.any(fitness_comparison):
reef_index = reef_indices[np.where(fitness_comparison)[0][0]]
@@ -243,7 +240,6 @@ class CRO(object):
- pob: reef population
- fitness: reef fitness
- Fa: fraction of corals to be duplicated
- - opt: type of optimization ('max' or 'min')
Output:
- Alarvae: created larvae,
- Afitness: larvae's fitness
@@ -255,8 +251,7 @@ class CRO(object):
N = pob.shape[0]
NA = int(np.round(Fa*N))
- if self.opt=='max': ind = np.argsort(-fitness);
- else: ind = np.argsort(fitness)
+ ind = np.argsort(fitness)
fitness = fitness[ind]
Alarvae = pob[ind[0:NA], :]
@@ -284,10 +279,8 @@ class CRO(object):
Pd = self.Pd
np.random.seed(seed = self.seed)
- if (self.opt=='max'):
- ind = np.argsort(REEFfitness)
- else:
- ind = np.argsort(-REEFfitness)
+ # Sort by worse fitness (hence the minus sign)
+ ind = np.argsort(-REEFfitness)
sortind = ind[:int(np.round(Fd*REEFpob.shape[0]))]
p = np.random.rand(len(sortind))
@@ -390,13 +383,10 @@ class CRO(object):
Bestfitness = []
Meanfitness = []
- if opt=='max':
- if verbose: print('Reef initialization:', np.max(REEFfitness))
- Bestfitness.append(np.max(REEFfitness))
- else:
- if verbose: print('Reef initialization:', np.min(REEFfitness))
- Bestfitness.append(np.min(REEFfitness))
- Meanfitness.append(np.mean(REEFfitness))
+ Bestfitness.append(self.opt_multiplier*np.min(REEFfitness))
+ Meanfitness.append(self.opt_multiplier*np.mean(REEFfitness))
+ if verbose:
+ print('Reef initialization:', self.opt_multiplier*np.min(REEFfitness))
for n in range(Ngen):
@@ -420,23 +410,18 @@ class CRO(object):
(REEF, REEFpob, REEFfitness) = self.depredation(REEF, REEFpob, REEFfitness)
(REEF, REEFpob, REEFfitness) = self.extremedepredation(REEF, REEFpob, REEFfitness, int(np.round(self.ke*N*M)))
- if opt=='max': Bestfitness.append(np.max(REEFfitness))
- else: Bestfitness.append(np.min(REEFfitness))
- Meanfitness.append(np.mean(REEFfitness))
+ Bestfitness.append(self.opt_multiplier*np.min(REEFfitness))
+ Meanfitness.append(self.opt_multiplier*np.mean(REEFfitness))
- if (n%10==0) & (n!=Ngen):
- if (opt=='max') & (verbose): print('Best-fitness:', np.max(REEFfitness), '\n', str(n/Ngen*100) + '% completado \n' );
- if (opt=='min') & (verbose): print('Best-fitness:', np.min(REEFfitness), '\n', str(n/Ngen*100) + '% completado \n' );
+ if all([n%10 == 0, n != Ngen, verbose]):
+ print('Best-fitness:', self.opt_multiplier*np.min(REEFfitness), '\n', str(n/Ngen*100) + '% completado \n' );
- if opt=='max':
- if verbose: print('Best-fitness:', np.max(REEFfitness), '\n', str(100) + '% completado \n' )
- ind_best = np.where(REEFfitness == np.max(REEFfitness))[0][0]
- else:
- if verbose: print('Best-fitness:', np.min(REEFfitness), '\n', str(100) + '% completado \n' )
- ind_best = np.where(REEFfitness == np.min(REEFfitness))[0][0]
+ if verbose:
+ print('Best-fitness:', self.opt_multiplier*np.min(REEFfitness), '\n', str(100) + '% completado \n' )
+ ind_best = np.where(REEFfitness == np.min(REEFfitness))[0][0]
self.plot_results(Bestfitness, Meanfitness)
print('Best coral: ', REEFpob[ind_best, :])
- print('Best solution:', REEFfitness[ind_best])
+ print('Best solution:', self.opt_multiplier*REEFfitness[ind_best])
return (REEF, REEFpob, REEFfitness, ind_best, Bestfitness, Meanfitness)
| VictorPelaez/coral-reef-optimization-algorithm | 9939a280c87090b7ae575a23670f49a265ad017a | diff --git a/cro/tests.py b/cro/tests.py
index abd19cf..9bde3df 100644
--- a/cro/tests.py
+++ b/cro/tests.py
@@ -91,14 +91,14 @@ def test_larvaesettling_nonemptyreef():
[1,0,1,1]])
REEF = np.array([0,1,1,1])
- REEFfitness = np.array([0,1,2,11])
+ REEFfitness = -np.array([0,1,2,11])
larvae = np.array([[1,0,0,0],
[0,1,1,0],
[0,1,0,0],
[1,0,0,1]])
- larvaefitness = np.array([8,6,4,9])
+ larvaefitness = -np.array([8,6,4,9])
N, L = REEFpob.shape
M = 1
@@ -126,7 +126,7 @@ def test_larvaesettling_nonemptyreef():
[1,0,0,1],
[0,0,1,0],
[1,0,1,1]])
- REEFfitness_exp = np.array([8,9,2,11])
+ REEFfitness_exp = -np.array([8,9,2,11])
np.testing.assert_almost_equal(REEF_res, np.array([1,1,1,1]))
np.testing.assert_almost_equal(REEFpob_res, REEFpob_exp)
| create a opt_multiplier (-1 or 1) with opt argument
One of the main point suggested in Issue #22.
Using opt argument, create a self.opt_multiplier (1, -1)
It looks an improvement in 2 functions:
- budding()
```
if self.opt=='max': ind = np.argsort(-fitness);
else: ind = np.argsort(fitness)
```
- depredation()
```
if (self.opt=='max'):
ind = np.argsort(REEFfitness)
else:
ind = np.argsort(-REEFfitness)
```
@apastors Did your proposal cover anything else? regards | 0.0 | 9939a280c87090b7ae575a23670f49a265ad017a | [
"cro/tests.py::test_larvaesettling_nonemptyreef"
] | [
"cro/tests.py::test_croCreation",
"cro/tests.py::test_croInit",
"cro/tests.py::test_reefinitializationDisc",
"cro/tests.py::test_larvaesettling_emptyreef"
] | {
"failed_lite_validators": [
"has_issue_reference",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2017-10-29 08:56:15+00:00 | mit | 822 |
|
VictorPelaez__coral-reef-optimization-algorithm-45 | diff --git a/.travis.yml b/.travis.yml
index 8bc2fee..15ab2f9 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -9,7 +9,6 @@ install:
- pip install -r requirements.txt
script:
- pytest tests/
-
deploy:
provider: pypi
user: victorpelaez
diff --git a/cro/cro.py b/cro/cro.py
index b7231dd..7c24105 100644
--- a/cro/cro.py
+++ b/cro/cro.py
@@ -1,15 +1,24 @@
#!/usr/bin/env python
# coding=utf-8
###############################################################################
-
-from __future__ import division
+from __future__ import division, print_function
+import sys
+import logging
import numpy as np
+
+from .reef_initialization import get_reefinit_function
from .larvaemutation import get_larvaemutation_function
class CRO(object):
def __init__(self, Ngen, N, M, Fb, Fa, Fd, r0, k, Pd, fitness_coral, opt, L=None,
ke=0.2, npolyps=1, seed=None, mode='bin', param_grid={}, verbose=False):
+ # Set logging configuration
+ logging_level = logging.INFO if verbose else logging.WARNING
+ logging.basicConfig(stream=sys.stdout,
+ format="%(message)s")
+ logging.getLogger().setLevel(logging_level)
+
self.Ngen = Ngen
self.N = N
self.M = M
@@ -29,9 +38,11 @@ class CRO(object):
self.mode = mode
self.param_grid = param_grid
self.verbose = verbose
-
- print("[*Running] Initialization: ", self.opt)
+ self.reefinit_function = get_reefinit_function(mode)
+ self.larvaemutation_function = get_larvaemutation_function(mode)
+ logging.info("Running Initialization: %s", self.opt)
+
def reefinitialization (self):
"""
function [REEF,REEFpob]=reefinitialization(M,N,r0,L)
@@ -45,37 +56,9 @@ class CRO(object):
- REEF: reef matrix
- REEFpob: population matrix
"""
-
- # print error. Maybe use other place for all arg-checks
- if ( (self.param_grid=={}) & (self.mode =='disc') ):
- print('\nThis mode (', self.mode, ') needs a param_grid as a dictionary')
- return -1
-
- # commom for all modes
- np.random.seed(seed = self.seed)
- O = int(np.round(self.N*self.M*self.r0)) # number of occupied reefs
-
- # Binary mode
- if self.mode =='bin':
- A = np.random.randint(2, size=[O, self.L])
- B = np.zeros([( (self.N*self.M)-O), self.L], int)
- REEFpob = np.concatenate([A,B]) # Population creation
- REEF = np.array((REEFpob.any(axis=1)),int)
- return (REEF, REEFpob)
-
- # Discrete mode
- elif self.mode =='disc':
- for key, value in self.param_grid.items():
- valmax = (value[1] - value[0] + 1)
- A = np.random.randint(valmax, size=[O, self.L]) + value[0]
- B = np.zeros([( (self.N*self.M)-O), self.L], int)
- REEFpob = np.concatenate([A,B]) # Population creation
- REEF = np.array((REEFpob.any(axis=1)),int)
- return (REEF, REEFpob)
-
- else:
- print('\nThis mode (', self.mode, ') is not available')
- return -1
+ np.random.seed(seed = self.seed) # commom for all modes
+ REEF, REEFpob = self.reefinit_function(self.M, self.N, self.r0, self.L, param_grid=self.param_grid)
+ return REEF, REEFpob
def fitness(self, REEFpob):
"""
@@ -163,8 +146,8 @@ class CRO(object):
pos = np.random.randint(brooders.shape[1], size=(npolyps, nbrooders))
- larvaemutation_function = get_larvaemutation_function(self.mode)
- brooders = larvaemutation_function(brooders, pos, delta=1, param_grid=self.param_grid, seed=self.seed)
+ brooders = self.larvaemutation_function(brooders, pos, delta=1,
+ param_grid=self.param_grid, seed=self.seed)
return brooders
@@ -384,8 +367,7 @@ class CRO(object):
Bestfitness.append(self.opt_multiplier*np.min(REEFfitness))
Meanfitness.append(self.opt_multiplier*np.mean(REEFfitness))
- if verbose:
- print('Reef initialization:', self.opt_multiplier*np.min(REEFfitness))
+ logging.info('Reef initialization: %s', self.opt_multiplier*np.min(REEFfitness))
for n in range(Ngen):
@@ -413,14 +395,13 @@ class CRO(object):
Meanfitness.append(self.opt_multiplier*np.mean(REEFfitness))
if all([n%10 == 0, n != Ngen, verbose]):
- print('Best-fitness:', self.opt_multiplier*np.min(REEFfitness), '\n', str(n/Ngen*100) + '% completado \n' );
+ logging.info('Best-fitness: %s, (%.2f%% completado)', self.opt_multiplier*np.min(REEFfitness), n/Ngen*100)
- if verbose:
- print('Best-fitness:', self.opt_multiplier*np.min(REEFfitness), '\n', str(100) + '% completado \n' )
+ logging.info('Best-fitness: %s. (100%% completado)', self.opt_multiplier*np.min(REEFfitness))
ind_best = np.where(REEFfitness == np.min(REEFfitness))[0][0]
self.plot_results(Bestfitness, Meanfitness)
print('Best coral: ', REEFpob[ind_best, :])
- print('Best solution:', self.opt_multiplier*REEFfitness[ind_best])
+ print('Best solution: ', self.opt_multiplier*REEFfitness[ind_best])
return (REEF, REEFpob, REEFfitness, ind_best, Bestfitness, Meanfitness)
diff --git a/cro/larvaemutation.py b/cro/larvaemutation.py
index 835e7f6..20211a7 100644
--- a/cro/larvaemutation.py
+++ b/cro/larvaemutation.py
@@ -87,6 +87,6 @@ def get_larvaemutation_function(mode):
logging.info("Using {}".format(name))
else:
name, func = mode_functions[0]
- logging.info("Using {} for initializing the reef".format(name))
+ logging.info("Using {} for the brooding operator".format(name))
- return func
\ No newline at end of file
+ return func
diff --git a/cro/reef_initialization.py b/cro/reef_initialization.py
new file mode 100644
index 0000000..211b87f
--- /dev/null
+++ b/cro/reef_initialization.py
@@ -0,0 +1,86 @@
+"""
+Module that contains all the functions that perform the reef
+initialization.
+
+Every function should start with the mode name it performs,
+followed by an underscore and the function name.
+
+It should accept the following arguments:
+ - M: Reef size
+ - N: Reef size
+ - r0: occupied/total ratio
+ - L: coral length
+ - kwargs: extra arguments that the function might need (see
+ https://stackoverflow.com/a/1769475 for an
+ explanation on kwargs)
+
+It should return a tuple with (REEF, REEFpob)
+"""
+import logging
+
+import numpy as np
+
+from .utils import get_module_functions
+
+def bin_binary(M, N, r0, L, **kwargs):
+ """
+ Each value in each coral in the reef is a boolean value, i.e,
+ either a 0 or a 1
+ """
+ O = int(np.round(N*M*r0)) # number of occupied reefs
+ A = np.random.randint(2, size=[O, L])
+ B = np.zeros([((N*M)-O), L], int)
+ REEFpob = np.concatenate([A, B]) # Population creation
+ REEF = np.array((REEFpob.any(axis=1)),int)
+ return (REEF, REEFpob)
+
+def disc_equal_range(M, N, r0, L, **kwargs):
+ """
+ Each value in each coral in the reef is an integer in the range
+ specified by the keyword argument `param_grid`. `param_grid`
+ must have the next format:
+
+ >>> param_grid = {
+ "x": [2, 10]
+ }
+
+ where "x" can be basically anything, and its value is a list
+ with both minimum and maximum value.
+ In this example each coral will contain integers between 2 and 10
+ """
+ try:
+ param_grid = kwargs["param_grid"]
+ except KeyError:
+ raise ValueError("disc mode needs a param_grid as a dictionary")
+
+ O = int(np.round(N*M*r0)) # number of occupied reefs
+ for _, value in param_grid.items():
+ valmax = (value[1] - value[0] + 1)
+ A = np.random.randint(valmax, size=[O, L]) + value[0]
+ B = np.zeros([((N*M)-O), L], int)
+ REEFpob = np.concatenate([A,B]) # Population creation
+ REEF = np.array((REEFpob.any(axis=1)),int)
+ return (REEF, REEFpob)
+
+"""""
+UTILS
+"""""
+def get_reefinit_function(mode):
+ """
+ Returns the init function for the given mode.
+ If more than one function exists, return one randomly.
+ """
+ reef_init_functions = get_module_functions(__name__)
+ mode_functions = [(name, func) for name, func in reef_init_functions.items()
+ if name.startswith(mode)]
+ if not mode_functions:
+ raise ValueError("No initialization function for mode {}".format(mode))
+ elif len(mode_functions) > 1:
+ logging.warning("More than one initialization function for mode {}".format(mode))
+ name, func = mode_functions[0]
+ logging.info("Using {}".format(name))
+ else:
+ name, func = mode_functions[0]
+ logging.info("Using {} for initializing the reef".format(name))
+
+ return func
diff --git a/cro/utils.py b/cro/utils.py
index 5ffd7ff..980e657 100644
--- a/cro/utils.py
+++ b/cro/utils.py
@@ -31,5 +31,3 @@ def get_module_functions(module_name):
"""
current_module = sys.modules[module_name]
return dict(getmembers(current_module, predicate=isfunction))
-
-
diff --git a/examples/example_advanced.py b/examples/example_advanced.py
index 9b5d55b..39e1378 100644
--- a/examples/example_advanced.py
+++ b/examples/example_advanced.py
@@ -90,9 +90,9 @@ if __name__ == '__main__':
get_prediction=lambda gbr, X: gbr.predict(X),
metric=mean_squared_error)
start = time.time()
- cro = CRO(Ngen, N, M, Fb, Fa, Fd, r0, k, Pd, fitness_coral, opt, L, verbose=True)
+ cro = CRO(Ngen, N, M, Fb, Fa, Fd, r0, k, Pd, fitness_coral, opt, L, seed=13, verbose=True)
(REEF, REEFpob, REEFfitness, ind_best, Bestfitness, Meanfitness) = cro.fit(X, y, gbr)
print("Example II: feature selection, regression (min mse): ", time.time() - start, "seconds.")
names = np.array(['CRIM', 'ZN', 'INDUS', 'CHAS', 'NOX', 'RM', 'AGE', 'DIS', 'RAD', 'TAX', 'PTRATIO', 'B', 'LSTAT'])
- print(names[REEFpob[ind_best, :]>0])
\ No newline at end of file
+ print(names[REEFpob[ind_best, :]>0])
| VictorPelaez/coral-reef-optimization-algorithm | bc0b2fd006187f3e269c6e1898241866f502bd25 | diff --git a/tests/__init__.py b/tests/__init__.py
index e69de29..1581836 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -0,0 +1,4 @@
+import sys
+import os
+
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
diff --git a/tests/test_cro.py b/tests/test_cro.py
index f71a78f..5c8a9ed 100644
--- a/tests/test_cro.py
+++ b/tests/test_cro.py
@@ -3,11 +3,6 @@
###############################################################################
# run with:
# python -m pytest tests/test_cro.py
-
-import sys
-import os
-sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
-
import numpy as np
from cro.cro import CRO
@@ -119,7 +114,6 @@ def test_larvaesettling_nonemptyreef():
"""
Due to the passed seed,
[1,0,0,0] will be placed in the empty coral (index 0)
-
Then, larva [0,1,1,0] will try to settle in indices [0,3,1], settling in the third try (index 1)
Larva [0,1,0,0] will try in indices [0,3,3], being discarded
Larva [1,0,0,1] will try in indices [3,3,1], settling in the third try (index 1)
@@ -176,4 +170,4 @@ def test_settle_larvae():
assert REEF_res[indices] == 1
np.testing.assert_almost_equal(REEFpob_res[indices, :], larvae)
- np.testing.assert_almost_equal(REEFfitness_res[indices], larvaefitness)
\ No newline at end of file
+ np.testing.assert_almost_equal(REEFfitness_res[indices], larvaefitness)
diff --git a/tests/test_fitness.py b/tests/test_fitness.py
index 30f0a0e..73eafe7 100644
--- a/tests/test_fitness.py
+++ b/tests/test_fitness.py
@@ -3,11 +3,6 @@
###############################################################################
# run with:
# python -m pytest tests/test_fitness.py
-
-import sys
-import os
-sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
-
import numpy as np
from cro.fitness import max_ones
@@ -37,4 +32,4 @@ def test_feature_selection():
"""
To be added
"""
- pass
\ No newline at end of file
+ pass
diff --git a/tests/test_larvaemutation.py b/tests/test_larvaemutation.py
index 9713491..c564422 100644
--- a/tests/test_larvaemutation.py
+++ b/tests/test_larvaemutation.py
@@ -3,11 +3,6 @@
###############################################################################
# run with:
# python -m pytest tests/test_larvaemutation.py
-
-import sys
-import os
-sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
-
import numpy as np
from cro.larvaemutation import get_larvaemutation_function
@@ -79,4 +74,4 @@ def test_get_larvaemutation_function():
f = get_larvaemutation_function('bin')
assert 'function bin_larvaemutation' in str(f)
f = get_larvaemutation_function('disc')
- assert 'function disc_larvaemutation' in str(f)
\ No newline at end of file
+ assert 'function disc_larvaemutation' in str(f)
diff --git a/tests/test_reefinitialization.py b/tests/test_reefinitialization.py
index 567c7dd..198f6bd 100644
--- a/tests/test_reefinitialization.py
+++ b/tests/test_reefinitialization.py
@@ -3,28 +3,28 @@
###############################################################################
# run with:
# python -m pytest tests/test_larvaemutation.py
-
-import sys
-import os
-sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
-
import numpy as np
-#from cro.reefinitialization import bin_binary, disc_equal_range
+from cro.reef_initialization import bin_binary, disc_equal_range
# ------------------------------------------------------
# reefinitialization module
# ------------------------------------------------------
-
def test_bin_binary():
"""
Test that corals in the population only contain values in {0, 1}
"""
- pass
-
-
+ M, N, r0, L = 2, 2, 0.6, 8
+ REEF, REEFpob = bin_binary(M, N, r0, L)
+ assert set(REEFpob.ravel()) == {0, 1}
+
def test_disc_equal_range():
"""
Test that corals in population contain values specified in the grid
"""
- pass
\ No newline at end of file
+ M, N, r0, L = 2, 2, 0.6, 8
+ grid = {'x': [2, 10]} # Discrete values between 2 and 10
+
+ REEF, REEFpob = disc_equal_range(M, N, r0, L, param_grid=grid)
+ p = sum(REEFpob[np.where(REEFpob!=0)]<grid['x'][0]) + sum(REEFpob[np.where(REEFpob!=0)]>grid['x'][1])
+ assert p == 0
| Refactor Reef initialization
To improve the code architecture, a module with different initialization functions should be considered: one function per mode that accepts all the parameters it needs (`M`, `N`, `L`, `param_grid`, ...) and have it return `REEFpob`. This way we could add documentation for each initialization explaining how it is done and which parameters it needs. Also each function could be easily testable. | 0.0 | bc0b2fd006187f3e269c6e1898241866f502bd25 | [
"tests/test_cro.py::test_croCreation",
"tests/test_cro.py::test_croInit",
"tests/test_cro.py::test_reefinitializationDisc",
"tests/test_cro.py::test_larvaesettling_emptyreef",
"tests/test_cro.py::test_larvaesettling_nonemptyreef",
"tests/test_cro.py::test_brooding",
"tests/test_cro.py::test_settle_larvae",
"tests/test_fitness.py::test_max_ones",
"tests/test_fitness.py::test_feature_selection",
"tests/test_larvaemutation.py::test_bin_larvaemutattion",
"tests/test_larvaemutation.py::test_disc_larvaemutattion",
"tests/test_larvaemutation.py::test_cont_larvaemutattion",
"tests/test_larvaemutation.py::test_get_larvaemutation_function",
"tests/test_reefinitialization.py::test_bin_binary",
"tests/test_reefinitialization.py::test_disc_equal_range"
] | [] | {
"failed_lite_validators": [
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2017-11-02 21:28:47+00:00 | mit | 823 |
|
Vipul-Cariappa__py-utility-7 | diff --git a/README.md b/README.md
index e6336ee..47feb7f 100644
--- a/README.md
+++ b/README.md
@@ -4,7 +4,8 @@ Utility functions for managing and monitoring python resources.
## installation
<pre><code>pip install py-utility</code></pre>
-## Decorator Function
+
+## Documentation
### memoryit
memoryit returns the total memory used by the function at runtime in bytes.
@@ -12,25 +13,32 @@ memoryit returns the total memory used by the function at runtime in bytes.
Example Code:
<br>
<pre><code>from pyutility import memoryit
-@memoryit
+
def prime_check(x):
for i in range(2, x):
if x % i == 0:
return False
- return True</code></pre>
+ return True
+
+if __name__ == "__main__":
+ ...
+ memoryit(prime_check, args=(89,))
+ ...
+
+ </code></pre>
### limit_memory
-limit_memory limits the memory consumption of function at runtime. It takes the limit in MB. You will find unexpected behaviour if too low value is set. The default value is 25 MB. It is throw MemoryError if it exceeds the limit.
+limit_memory limits the memory consumption of function at runtime. It takes the limit in MB. You will find unexpected behaviour if too low value is set. The default value is 25 MB. It throws MemoryError if it exceeds the limit.
<br>
Example Code:
<br>
<pre><code>from pyutility import limit_memory
-@limit_memory(30)
-def prime_check(x):
- for i in range(2, x):
- if x % i == 0:
- return False
- return True</code></pre>
+
+if __name__ == "__main__":
+ ...
+ limit_memory(prime_check, memory=30, args=(89,))
+ ...
+ </code></pre>
### timeit
timeit returns the total time take to execute the function in seconds.
@@ -38,12 +46,11 @@ timeit returns the total time take to execute the function in seconds.
Example Code:
<br>
<pre><code>from pyutility import timeit
-@timeit
-def prime_check(x):
- for i in range(2, x):
- if x % i == 0:
- return False
- return True</code></pre>
+
+if __name__ == "__main__":
+ ...
+ timeit(prime_check, args=(89,))
+ ...</code></pre>
### limit_time
limit_time limits the time used to execute the function. It takes limit as seconds. The default value is 10 seconds. It throws TimeoutError if it exceeds the limit.
@@ -51,12 +58,12 @@ limit_time limits the time used to execute the function. It takes limit as secon
Example Code:
<br>
<pre><code>from pyutility import limit_time
-@limit_time(30)
-def prime_check(x):
- for i in range(2, x):
- if x % i == 0:
- return False
- return True</code></pre>
+
+if __name__ == "__main__":
+ ...
+ limit_time(prime_check, time=2, args=(89,))
+ ...</code></pre>
## Contribution
All contributions are welcomed. If it is a bug fix or new feature, creating new issue is highly recommend before any pull request.
+ If you want any new feature to be included create new issue asking for the feature with a use case and example code if possible.
diff --git a/pyutility/memoryutil.py b/pyutility/memoryutil.py
index 29c5f3f..ca8d3ed 100644
--- a/pyutility/memoryutil.py
+++ b/pyutility/memoryutil.py
@@ -4,7 +4,7 @@ import resource
def me_worker(func, storage, *args, **kwargs):
- """measures the peak memory consumption of given function; should be run by memoryit decorator as a new process
+ """measures the peak memory consumption of given function
Args:
func (`function`): function to execute
@@ -30,11 +30,11 @@ def me_worker(func, storage, *args, **kwargs):
def li_worker(func, limit, storage, *args, **kwargs):
- """limits the memory consumption of given function; should be run by limit_memory decorator as a new process
+ """limits the memory consumption of given function
Args:
func (`function`): function to execute
- limit (`int`): maximum allowed memory consuption
+ limit (`int`): maximum allowed memory consumption
storage (`list`): multiprocessing.Manager().List() to store the peak memory
args (`tuple`): arguments for the function
kwargs(`dict`): keyword arguments for the function
@@ -57,53 +57,51 @@ def li_worker(func, limit, storage, *args, **kwargs):
return 0
-def memoryit(func):
- """decorator function to measures the peak memory consumption of given function
+def memoryit(func, args=(), kwargs={}):
+ """measures the peak memory consumption of given function
Args:
func (`function`): function to execute
+ args (`tuple`): arguments for the function
+ kwargs(`dict`): keyword arguments for the function
Return:
peak memory used during the execution of given function in bytes (`int`)
"""
- def wrapper(*args, **kwargs):
- ctx = mp.get_context('spawn')
- manager = ctx.Manager()
- com_obj = manager.list()
- p = ctx.Process(target=me_worker, args=(
- func, com_obj, *args), kwargs=kwargs)
- p.start()
- p.join()
-
- return com_obj[-1]
+ ctx = mp.get_context('spawn')
+ manager = ctx.Manager()
+ com_obj = manager.list()
+ p = ctx.Process(target=me_worker, args=(
+ func, com_obj, *args), kwargs=kwargs)
+ p.start()
+ p.join()
- return wrapper
+ return com_obj[-1]
-def limit_memory(value=15):
- """decorator function to limits the memory consumption of given function
+def limit_memory(func, memory=25, args=(), kwargs={}):
+ """limits the memory consumption of given function.
+ If limit set is very low it will not behave in expected way.
Args:
- value (`int`): maximum allowed memory consumption in MB
func (`function`): function to execute
+ limit (`int`): maximum allowed memory consumption in MB default is 25 MB
+ args (`tuple`): arguments for the function
+ kwargs(`dict`): keyword arguments for the function
Return:
return value of function or MemoryError
"""
- def decorator(func):
- def wrapper(*args, **kwargs):
- ctx = mp.get_context('spawn')
- manager = ctx.Manager()
- com_obj = manager.list()
- p = ctx.Process(target=li_worker, args=(
- func, value, com_obj, *args), kwargs=kwargs)
- p.start()
- p.join()
-
- if isinstance(com_obj[-1], Exception):
- raise com_obj[-1]
- else:
- return com_obj[-1]
-
- return wrapper
- return decorator
+
+ ctx = mp.get_context('spawn')
+ manager = ctx.Manager()
+ com_obj = manager.list()
+ p = ctx.Process(target=li_worker, args=(
+ func, memory, com_obj, *args), kwargs=kwargs)
+ p.start()
+ p.join()
+
+ if isinstance(com_obj[-1], Exception):
+ raise com_obj[-1]
+ else:
+ return com_obj[-1]
diff --git a/pyutility/timeutil.py b/pyutility/timeutil.py
index 325830a..dc2c0bd 100644
--- a/pyutility/timeutil.py
+++ b/pyutility/timeutil.py
@@ -4,8 +4,7 @@ import signal
def me_worker(func, storage, *args, **kwargs):
- """measures the time taken to execute given function should be run by timeit decorator as a new process
-
+ """measures the time taken to execute given function
Args:
func (`function`): function to execute
storage (`list`): multiprocessing.Manager().List() to store the time taken to excute
@@ -13,7 +12,7 @@ def me_worker(func, storage, *args, **kwargs):
kwargs(`dict`): keyword arguments for the function
Return:
- time taken to execute the given function in seconds (`int`)
+ time taken to execute the given function in seconds (`float`)
"""
t1 = time()
func(*args, **kwargs)
@@ -23,7 +22,7 @@ def me_worker(func, storage, *args, **kwargs):
def li_worker(func, time, storage, *args, **kwargs):
- """limits the time taken for exection of given function; should be run by limit_time decorator as a new process
+ """limits the time taken for exection of given function
Args:
func (`function`): function to execute
@@ -52,53 +51,51 @@ def li_worker(func, time, storage, *args, **kwargs):
return 0
-def timeit(func):
- """decorator function to measure time taken to execute a given function in new process
+def timeit(func, args=(), kwargs={}):
+ """measures the time taken to execute given function
Args:
func (`function`): function to execute
+ args (`tuple`): arguments for the function
+ kwargs(`dict`): keyword arguments for the function
Return:
- time taken to execute the given function in seconds (`int`)
+ time taken to execute the given function in seconds (`float`)
"""
- def wrapper(*args, **kwargs):
- ctx = mp.get_context('spawn')
- manager = ctx.Manager()
- com_obj = manager.list()
- p = ctx.Process(target=me_worker, args=(
- func, com_obj, *args), kwargs=kwargs)
- p.start()
- p.join()
- return com_obj[-1]
- return wrapper
+ ctx = mp.get_context('spawn')
+ manager = ctx.Manager()
+ com_obj = manager.list()
+ p = ctx.Process(target=me_worker, args=(
+ func, com_obj, *args), kwargs=kwargs)
+ p.start()
+ p.join()
+
+ return com_obj[-1]
-def limit_time(time=10):
- """decorator function to limits the time taken to execute given function
+def limit_time(func, time=10, args=(), kwargs={}):
+ """limits the time taken for exection of given function
Args:
- value (`int`): maximum allowed time in seconds
func (`function`): function to execute
+ limit (`int`): maximum allowed time in seconds, default is 10
+ args (`tuple`): arguments for the function
+ kwargs(`dict`): keyword arguments for the function
Return:
return value of function or TimeoutError
"""
- def inner(func):
- def wrapper(*args, **kwargs):
-
- ctx = mp.get_context('spawn')
- manager = ctx.Manager()
- com_obj = manager.list()
- p = ctx.Process(target=li_worker, args=(
- func, time, com_obj, *args), kwargs=kwargs)
- p.start()
- p.join()
-
- if isinstance(com_obj[-1], Exception):
- raise com_obj[-1]
- else:
- return com_obj[-1]
-
- return wrapper
- return inner
+
+ ctx = mp.get_context('spawn')
+ manager = ctx.Manager()
+ com_obj = manager.list()
+ p = ctx.Process(target=li_worker, args=(
+ func, time, com_obj, *args), kwargs=kwargs)
+ p.start()
+ p.join()
+
+ if isinstance(com_obj[-1], Exception):
+ raise com_obj[-1]
+ else:
+ return com_obj[-1]
diff --git a/pyutility/utility.py b/pyutility/utility.py
index d0eefbc..841fd7e 100644
--- a/pyutility/utility.py
+++ b/pyutility/utility.py
@@ -15,7 +15,7 @@ def me_worker(func, storage, *args, **kwargs):
kwargs(`dict`): keyword arguments for the function
Return:
- peak memory used, time taken during the execution of given function in bytes (`list` of 'int', 'float')
+ peak memory used, time taken for the execution of given function (`list` of 'int', 'float')
"""
tm.start()
@@ -74,58 +74,54 @@ def li_worker(func, storage, time, memory, *args, **kwargs):
return 0
-def measureit(func):
- """decorator function to measures the peak memory consumption and time taken to execute given function
+def measureit(func, args=(), kwargs={}):
+ """measures the peak memory consumption and time taken for execution of given function
Args:
func (`function`): function to execute
+ args (`tuple`): arguments for the function
+ kwargs(`dict`): keyword arguments for the function
Return:
- peak memory used, time taken during the execution of given function in bytes (`tuple` of 'int', 'float')
+ peak memory used (MB), time taken (seconds) during the execution of given function (`list` of 'int', 'float')
"""
- def wrapper(*args, **kwargs):
- ctx = mp.get_context('spawn')
- manager = ctx.Manager()
- com_obj = manager.list()
- p = ctx.Process(target=me_worker, args=(
- func, com_obj, *args), kwargs=kwargs)
- p.start()
- p.join()
-
- if len(com_obj) == 2:
- return tuple(com_obj)
-
- # else
- raise com_obj[-1]
+ ctx = mp.get_context('spawn')
+ manager = ctx.Manager()
+ com_obj = manager.list()
+ p = ctx.Process(target=me_worker, args=(
+ func, com_obj, *args), kwargs=kwargs)
+ p.start()
+ p.join()
- return wrapper
+ if len(com_obj) == 2:
+ return tuple(com_obj)
+ # else
+ raise com_obj[-1]
-def limit_resource(time=10, memory=25):
- """decorator function to limits the memory consumption and time taken to execute given function
+
+def limit_resource(func, time=10, memory=25, args=(), kwargs={}):
+ """limits the memory consumption and time taken to execute given function
Args:
- time (`int`): maximum allowed time consuption in seconds
- memory (`int`): maximum allowed memory consuption in MB
func (`function`): function to execute
+ time (`int`): maximum allowed time in seconds, default is 10
+ memory (`int`): maximum allowed memory consumption in MB, default is 25
+ args (`tuple`): arguments for the function
+ kwargs(`dict`): keyword arguments for the function
Return:
return value of function or MemoryError or TimeoutError
"""
- def decorator(func):
- def wrapper(*args, **kwargs):
- ctx = mp.get_context('spawn')
- manager = ctx.Manager()
- com_obj = manager.list()
- p = ctx.Process(target=li_worker, args=(
- func, com_obj, time, memory, *args), kwargs=kwargs)
- p.start()
- p.join()
-
- if isinstance(com_obj[-1], Exception):
- raise com_obj[-1]
- else:
- return com_obj[-1]
-
- return wrapper
- return decorator
+ ctx = mp.get_context('spawn')
+ manager = ctx.Manager()
+ com_obj = manager.list()
+ p = ctx.Process(target=li_worker, args=(
+ func, com_obj, time, memory, *args), kwargs=kwargs)
+ p.start()
+ p.join()
+
+ if isinstance(com_obj[-1], Exception):
+ raise com_obj[-1]
+ # else
+ return com_obj[-1]
| Vipul-Cariappa/py-utility | 8f66fc1fc569c34798865ab063f1da4e7753c9d5 | diff --git a/tests/func.py b/tests/func.py
new file mode 100644
index 0000000..cbe21ea
--- /dev/null
+++ b/tests/func.py
@@ -0,0 +1,20 @@
+def memory(x):
+ x = [i for i in range(x)]
+ return -1
+
+
+def time(x):
+ # recursive function to find xth fibonacci number
+ if x < 3:
+ return 1
+ return time(x-1) + time(x-2)
+
+
+def error(x=None):
+ # error function
+ return "a" / 2
+
+
+def return_check(*args, **kwagrs):
+ # args and kwargs function
+ return list(args) + list(kwagrs.values())
diff --git a/tests/test_memory.py b/tests/test_memory.py
index c5b3197..437a8ac 100644
--- a/tests/test_memory.py
+++ b/tests/test_memory.py
@@ -1,50 +1,46 @@
from unittest import TestCase
from pyutility import limit_memory, memoryit
-
-def func1(x):
- x = [i for i in range(x)]
- return -1
-
-
-def func2():
- # error function
- return "a" / 2
-
-
-def func3(*args, **kwagrs):
- # args and kwargs function
- return list(args) + list(kwagrs.values())
+from .func import memory, error, return_check
class MemoryitTest(TestCase):
- def setUp(self):
- self.er_func = memoryit(func2)
- self.func = memoryit(func1)
- self.ka_func = memoryit(func3)
- def test_memoryit_1(self):
- self.assertIsInstance(self.func(5), int)
+ def test_memoryit1(self):
+ v = memoryit(memory, args=(5,))
+ self.assertIsInstance(v, int)
- def test_memoryit_2(self):
- self.assertRaises(Exception, self.er_func)
+ def test_memoryit2(self):
+ self.assertRaises(Exception, memoryit, error, 5)
class LimitMemoryTest(TestCase):
- def setUp(self):
- self.er_func = limit_memory()(func2)
- self.func = limit_memory()(func1)
- self.ka_func = limit_memory()(func3)
def test_limit_memory_1(self):
- self.assertEqual(self.func(3), -1)
+ v = limit_memory(memory, args=(10,))
+ self.assertEqual(v, -1)
def test_limit_memory_2(self):
- self.assertRaises(Exception, self.er_func)
+ self.assertRaises(
+ Exception,
+ limit_memory,
+ error,
+ args=(100_000_000,)
+ )
def test_limit_memory_3(self):
- self.assertRaises(MemoryError, self.func, 100_000_000)
+ self.assertRaises(
+ MemoryError,
+ limit_memory,
+ memory,
+ args=(500_000_000,)
+ )
def test_limit_memory_4(self):
- self.assertEqual(self.ka_func(
- 1, 2, 3, four=4, five=5), [1, 2, 3, 4, 5])
+ v = limit_memory(
+ return_check,
+ args=(1, 2, 3),
+ kwargs={"four": 4, "five": 5}
+ )
+
+ self.assertEqual(v, [1, 2, 3, 4, 5])
diff --git a/tests/test_time.py b/tests/test_time.py
index ce5ec03..78bbd2c 100644
--- a/tests/test_time.py
+++ b/tests/test_time.py
@@ -1,52 +1,49 @@
from unittest import TestCase
from pyutility import limit_time, timeit
-
-def func1(x):
- # recursive function to find xth fibonacci number
- if x < 3:
- return 1
- return func1(x-1) + func1(x-2)
-
-
-def func2():
- # error function
- return "a" / 2
-
-
-def func3(*args, **kwagrs):
- # args and kwargs function
- return list(args) + list(kwagrs.values())
+from .func import time, error, return_check
class TimeitTest(TestCase):
- def setUp(self):
- self.er_func = timeit(func2)
- self.func = timeit(func1)
- self.ka_func = timeit(func3)
def test_timeit1(self):
- self.assertIsInstance(self.func(5), float)
+ v = timeit(time, args=(5,))
+ self.assertIsInstance(v, float)
def test_timeit2(self):
- self.assertRaises(Exception, self.er_func)
+ self.assertRaises(Exception, timeit, error, 5)
class LimitTimeTest(TestCase):
- def setUp(self):
- self.er_func = limit_time(2)(func2)
- self.func = limit_time(2)(func1)
- self.ka_func = limit_time(2)(func3)
def test_limit_time_1(self):
- self.assertEqual(self.func(10), 55)
+ v = limit_time(time, time=2, args=(10,))
+ self.assertEqual(v, 55)
def test_limit_time_2(self):
- self.assertRaises(Exception, self.er_func)
+ self.assertRaises(
+ Exception,
+ limit_time,
+ error,
+ time=2,
+ args=(2,)
+ )
def test_limit_time_3(self):
- self.assertRaises(TimeoutError, self.func, 50)
+ self.assertRaises(
+ TimeoutError,
+ limit_time,
+ time,
+ time=2,
+ args=(50,)
+ )
def test_limit_time_4(self):
- self.assertEqual(self.ka_func(
- 1, 2, 3, four=4, five=5), [1, 2, 3, 4, 5])
+ v = limit_time(
+ return_check,
+ time=2,
+ args=(1, 2, 3),
+ kwargs={"four": 4, "five": 5}
+ )
+
+ self.assertEqual(v, [1, 2, 3, 4, 5])
diff --git a/tests/test_utility.py b/tests/test_utility.py
index 603316a..266cdfa 100644
--- a/tests/test_utility.py
+++ b/tests/test_utility.py
@@ -1,73 +1,65 @@
from unittest import TestCase
from pyutility import limit_resource, measureit
-
-def func1a(x):
- x = [i for i in range(x)]
- return -1
-
-
-def func1b(x):
- # recursive function to find xth fibonacci number
- if x < 3:
- return 1
- return func1b(x-1) + func1b(x-2)
-
-
-def func2():
- # error function
- return "a" / 2
-
-
-def func3(*args, **kwagrs):
- # args and kwargs function
- return list(args) + list(kwagrs.values())
+from .func import memory, time, return_check, error
class MeasureitTest(TestCase):
- def setUp(self):
- self.er_func = measureit(func2)
- self.func_m = measureit(func1a)
- self.func_t = measureit(func1b)
- self.ka_func = measureit(func3)
def test_measureit_1(self):
- self.assertIsInstance(self.func_m(100), tuple)
+ v = measureit(memory, args=(100,))
+ self.assertIsInstance(v, tuple)
def test_measureit_2(self):
- x = self.func_t(10)
- self.assertIsInstance(x[0], int)
- self.assertIsInstance(x[1], float)
+ v = measureit(time, args=(15,))
+ self.assertIsInstance(v[0], int)
+ self.assertIsInstance(v[1], float)
def test_measureit_3(self):
- self.assertIsInstance(self.func_t(15), tuple)
+ v = measureit(time, args=(15,))
+ self.assertIsInstance(v, tuple)
def test_measureit_4(self):
- self.assertRaises(Exception, self.er_func)
+ self.assertRaises(Exception, measureit, error, 100)
class LimitResourceTest(TestCase):
- def setUp(self):
- self.er_func = limit_resource(time=2)(func2)
- self.func_m = limit_resource(time=2)(func1a)
- self.func_t = limit_resource(time=2)(func1b)
- self.ka_func = limit_resource(time=2)(func3)
def test_limit_resource_1(self):
- self.assertEqual(self.func_m(300), -1)
+ v = limit_resource(memory, time=2, args=(300,))
+ self.assertEqual(v, -1)
def test_limit_resource_2(self):
- self.assertEqual(self.func_t(3), 2)
+ v = limit_resource(time, time=2, args=(3,))
+ self.assertEqual(v, 2)
def test_limit_resource_3(self):
- self.assertRaises(Exception, self.er_func)
+ self.assertRaises(Exception, error)
def test_limit_resource_4(self):
- self.assertRaises(MemoryError, self.func_m, 100_000_000)
+ self.assertRaises(
+ MemoryError,
+ limit_resource,
+ memory,
+ args=(100_000_000,),
+ time=2
+ )
def test_limit_resource_5(self):
- self.assertRaises(TimeoutError, self.func_t, 50)
+ self.assertRaises(
+ TimeoutError,
+ limit_resource,
+ time,
+ args=(50,),
+ time=2
+ )
def test_limit_resource_6(self):
- self.assertEqual(self.ka_func(
- 1, 2, 3, four=4, five=5), [1, 2, 3, 4, 5])
+ v = limit_resource(
+ return_check,
+ time=2,
+ args=(1, 2, 3),
+ kwargs={"four": 4, "five": 5}
+ )
+
+ self.assertEqual(v, [1, 2, 3, 4, 5])
| Multiprocessing Errors
Python throws EOFError and RuntimeError, when decorator is used. Even when `__name__ == __main__` if statement is present it throws PicklingError. This is because multiprocessing is using spawn to create new processes. Some details can be found here: [StackOverFlow Question](https://stackoverflow.com/questions/41385708/multiprocessing-example-giving-attributeerror)
Either using other method to create new process or changing it to function may resolve the problem. | 0.0 | 8f66fc1fc569c34798865ab063f1da4e7753c9d5 | [
"tests/test_memory.py::MemoryitTest::test_memoryit1",
"tests/test_memory.py::LimitMemoryTest::test_limit_memory_1",
"tests/test_memory.py::LimitMemoryTest::test_limit_memory_3",
"tests/test_memory.py::LimitMemoryTest::test_limit_memory_4",
"tests/test_time.py::TimeitTest::test_timeit1",
"tests/test_time.py::LimitTimeTest::test_limit_time_1",
"tests/test_time.py::LimitTimeTest::test_limit_time_3",
"tests/test_time.py::LimitTimeTest::test_limit_time_4",
"tests/test_utility.py::MeasureitTest::test_measureit_1",
"tests/test_utility.py::MeasureitTest::test_measureit_2",
"tests/test_utility.py::MeasureitTest::test_measureit_3",
"tests/test_utility.py::LimitResourceTest::test_limit_resource_1",
"tests/test_utility.py::LimitResourceTest::test_limit_resource_2",
"tests/test_utility.py::LimitResourceTest::test_limit_resource_4",
"tests/test_utility.py::LimitResourceTest::test_limit_resource_5",
"tests/test_utility.py::LimitResourceTest::test_limit_resource_6"
] | [
"tests/test_memory.py::MemoryitTest::test_memoryit2",
"tests/test_memory.py::LimitMemoryTest::test_limit_memory_2",
"tests/test_time.py::TimeitTest::test_timeit2",
"tests/test_time.py::LimitTimeTest::test_limit_time_2",
"tests/test_utility.py::MeasureitTest::test_measureit_4",
"tests/test_utility.py::LimitResourceTest::test_limit_resource_3"
] | {
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false
} | 2020-12-05 14:07:56+00:00 | mit | 824 |
|
VirtusLab__git-machete-179 | diff --git a/git_machete/cmd.py b/git_machete/cmd.py
index 03f722b..9772a79 100644
--- a/git_machete/cmd.py
+++ b/git_machete/cmd.py
@@ -89,7 +89,7 @@ class MacheteClient:
def read_definition_file(self, verify_branches: bool = True) -> None:
with open(self._definition_file_path) as f:
- lines: List[str] = [line.rstrip() for line in f.readlines() if not line.isspace()]
+ lines: List[str] = [line.rstrip() for line in f.readlines()]
at_depth = {}
last_depth = -1
@@ -98,6 +98,8 @@ class MacheteClient:
invalid_branches: List[str] = []
for index, line in enumerate(lines):
+ if line == "" or line.isspace():
+ continue
prefix = "".join(itertools.takewhile(str.isspace, line))
if prefix and not self.__indent:
self.__indent = prefix
@@ -1819,7 +1821,6 @@ def launch(orig_args: List[str]) -> None:
cli_opts = CommandLineOptions()
git = GitContext(cli_opts)
- machete_client = MacheteClient(cli_opts, git)
if sys.version_info.major == 2 or (sys.version_info.major == 3 and sys.version_info.minor < 6):
version_str = f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}"
@@ -1967,6 +1968,7 @@ def launch(orig_args: List[str]) -> None:
return in_args[0]
try:
+ machete_client = MacheteClient(cli_opts, git)
cmd = None
# Let's first extract the common options like `--help` or `--verbose` that might appear BEFORE the command,
# as in e.g. `git machete --verbose status`.
| VirtusLab/git-machete | 974914523542045104806e52be8135027603f771 | diff --git a/git_machete/tests/functional/test_machete.py b/git_machete/tests/functional/test_machete.py
index f549acf..fb587a0 100644
--- a/git_machete/tests/functional/test_machete.py
+++ b/git_machete/tests/functional/test_machete.py
@@ -10,8 +10,10 @@ from contextlib import redirect_stdout
from typing import Iterable
from git_machete import cmd
+from git_machete.exceptions import MacheteException
from git_machete.git_operations import GitContext
from git_machete.options import CommandLineOptions
+from git_machete.utils import fmt
cli_opts: CommandLineOptions = CommandLineOptions()
git: GitContext = GitContext(cli_opts)
@@ -92,6 +94,12 @@ class MacheteTester(unittest.TestCase):
git.flush_caches()
return out.getvalue()
+ @staticmethod
+ def rewrite_definition_file(new_body: str) -> None:
+ definition_file_path = git.get_git_subpath("machete")
+ with open(os.path.join(os.getcwd(), definition_file_path), 'w') as def_file:
+ def_file.writelines(new_body)
+
def assert_command(self, cmds: Iterable[str], expected_result: str) -> None:
self.assertEqual(self.launch_command(*cmds), self.adapt(expected_result))
@@ -175,6 +183,25 @@ class MacheteTester(unittest.TestCase):
""",
)
+ def test_branch_reappers_in_definition(self) -> None:
+ body: str = \
+ """master
+ \tdevelop
+ \t\n
+ develop
+ """
+ expected_error_msg: str = fmt('.git/machete, line 5: branch `develop` re-appears in the tree definition. Edit the definition file manually with `git machete edit`')
+
+ self.repo_sandbox.new_branch("root")
+ self.rewrite_definition_file(body)
+
+ machete_client = cmd.MacheteClient(cli_opts, git) # Only to workaround sys.exit while calling launch(['status'])
+ try:
+ machete_client.read_definition_file()
+ except MacheteException as e:
+ if e.parameter != expected_error_msg:
+ self.fail(f'Actual Exception message: {e} \nis not equal to expected message: {expected_error_msg}')
+
def test_show(self) -> None:
self.setup_discover_standard_tree()
| Line numbers displayed in errors refer to non-empty lines, not all lines
.git/machete file:
```
master
develop
develop
```
The empty lines lead to an error in `git machete status`:
`.git/machete, line 3: branch develop re-appears in the tree definition. Edit the definition file manually with git machete edit`
while it should say:
`.git/machete, line 5: branch develop re-appears in the tree definition. Edit the definition file manually with git machete edit`
This is due to how `read_definition_file` is implemented, should be pretty easy to fix. | 0.0 | 974914523542045104806e52be8135027603f771 | [
"git_machete/tests/functional/test_machete.py::MacheteTester::test_branch_reappers_in_definition"
] | [
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_few_possible_downstream_branches_and_yes_option",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_no_downstream_branches",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_one_downstream_branch",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_discover_traverse_squash",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_down",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_first_root_with_downstream",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_first_root_without_downstream",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_last",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_next_successor_exists",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_next_successor_on_another_root_tree",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_prev_successor_exists",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_prev_successor_on_another_root_tree",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_root",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_up",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_help",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_down",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_first",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_last",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_next",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_prev",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_root",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_up",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_merge",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push_override",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push_untracked",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_fork_point_not_specified",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_fork_point_specified"
] | {
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-08-19 13:47:27+00:00 | mit | 825 |
|
VirtusLab__git-machete-330 | diff --git a/git_machete/cli.py b/git_machete/cli.py
index 19a6a38..8824ec9 100644
--- a/git_machete/cli.py
+++ b/git_machete/cli.py
@@ -358,15 +358,25 @@ def create_cli_parser() -> argparse.ArgumentParser:
def update_cli_opts_using_parsed_args(
cli_opts: git_machete.options.CommandLineOptions,
parsed_args: argparse.Namespace) -> None:
+
+ # Warning: In mypy, Arguments that come from untyped functions/variables are silently treated by mypy as Any.
+ # Since argparse is not typed, everything that comes from argparse.Namespace will be taken as Any :(
+ # Even if we add type=LocalBranchShortName into argument parser for branch,
+ # python debugger will see branch as LocalBranchShortName but mypy always will see it as Any,
+ # until you specifically tell mypy what is the exact type by casting (right now it's done this way below, but casting does not solve all of the problems).
+ #
+ # The reasonable solution here would be to use Typed Argument Parser which is a wrapper over argparse with modernised solution for typing.
+ # But it would add external dependency to git-machete, so let's stick to current casting.
+
for opt, arg in vars(parsed_args).items():
if opt == "branch":
- cli_opts.opt_branch = arg
+ cli_opts.opt_branch = LocalBranchShortName.of(arg)
elif opt == "checked_out_since":
cli_opts.opt_checked_out_since = arg
elif opt == "color":
cli_opts.opt_color = arg
elif opt == "down_fork_point":
- cli_opts.opt_down_fork_point = arg
+ cli_opts.opt_down_fork_point = AnyRevision.of(arg)
elif opt == "debug":
cli_opts.opt_debug = True
elif opt == "draft":
@@ -374,7 +384,7 @@ def update_cli_opts_using_parsed_args(
elif opt == "fetch":
cli_opts.opt_fetch = True
elif opt == "fork_point":
- cli_opts.opt_fork_point = arg
+ cli_opts.opt_fork_point = AnyRevision.of(arg)
elif opt == "inferred":
cli_opts.opt_inferred = True
elif opt == "list_commits_with_hashes":
@@ -397,7 +407,7 @@ def update_cli_opts_using_parsed_args(
elif opt == "no_push_untracked":
cli_opts.opt_push_untracked = False
elif opt == "onto":
- cli_opts.opt_onto = arg
+ cli_opts.opt_onto = LocalBranchShortName.of(arg)
elif opt == "override_to":
cli_opts.opt_override_to = arg
elif opt == "override_to_inferred":
@@ -412,7 +422,7 @@ def update_cli_opts_using_parsed_args(
elif opt == "as_root":
cli_opts.opt_as_root = True
elif opt == "roots":
- cli_opts.opt_roots = arg.split(",")
+ cli_opts.opt_roots = list(map(LocalBranchShortName.of, arg.split(",")))
elif opt == "return_to":
cli_opts.opt_return_to = arg
elif opt == "stat":
| VirtusLab/git-machete | 18f7e32640cab8483c32a231a478c23c1a50ec67 | diff --git a/git_machete/tests/functional/test_machete.py b/git_machete/tests/functional/test_machete.py
index e6f934d..36165d7 100644
--- a/git_machete/tests/functional/test_machete.py
+++ b/git_machete/tests/functional/test_machete.py
@@ -2647,3 +2647,31 @@ class MacheteTester(unittest.TestCase):
roots_only_commit_hash, log_content,
msg="Verify that commits from parent branch are not visible when "
"executing `git machete log`.")
+
+ @mock.patch('git_machete.utils.run_cmd', mock_run_cmd) # to hide git outputs in tests
+ def test_add(self) -> None:
+ """
+ Verify behaviour of a 'git machete add' command.
+ """
+ (
+ self.repo_sandbox.new_branch("master")
+ .commit("master commit.")
+ .new_branch("develop")
+ .commit("develop commit.")
+ .new_branch("feature")
+ .commit("feature commit.")
+ .check_out("develop")
+ .commit("New commit on develop")
+ )
+ self.launch_command("discover", "-y")
+ self.repo_sandbox.new_branch("bugfix/feature_fail")
+
+ self.assert_command(['add', '-y', 'bugfix/feature_fail'], 'Adding `bugfix/feature_fail` onto the inferred upstream (parent) branch `develop`\n'
+ 'Added branch `bugfix/feature_fail` onto `develop`\n', strip_indentation=False)
+
+ # test with --onto option
+ self.repo_sandbox.new_branch("chore/remove_indentation")
+
+ self.assert_command(['add', '--onto=feature'],
+ 'Added branch `chore/remove_indentation` onto `feature`\n',
+ strip_indentation=False)
| `AttributeError: 'str' object has no attribute 'full_name'` due to (?) mistyped `CommandLineOptions.opt_branch`
For an already existing, but not yet managed branch `foo/bar`:
```
$ git machete add foo/bar
Traceback (most recent call last):
File "/usr/local/bin/git-machete", line 4, in <module>
__import__('pkg_resources').run_script('git-machete==3.5.0', 'git-machete')
File "/usr/lib/python3.7/site-packages/pkg_resources/__init__.py", line 666, in run_script
self.require(requires)[0].run_script(script_name, ns)
File "/usr/lib/python3.7/site-packages/pkg_resources/__init__.py", line 1469, in run_script
exec(script_code, namespace, namespace)
File "/usr/local/lib/python3.7/site-packages/git_machete-3.5.0-py3.7.egg/EGG-INFO/scripts/git-machete", line 19, in <module>
File "/usr/local/lib/python3.7/site-packages/git_machete-3.5.0-py3.7.egg/git_machete/cli.py", line 803, in main
File "/usr/local/lib/python3.7/site-packages/git_machete-3.5.0-py3.7.egg/git_machete/cli.py", line 522, in launch
File "/usr/local/lib/python3.7/site-packages/git_machete-3.5.0-py3.7.egg/git_machete/client.py", line 263, in add
File "/usr/local/lib/python3.7/site-packages/git_machete-3.5.0-py3.7.egg/git_machete/client.py", line 1502, in __infer_upstream
File "/usr/local/lib/python3.7/site-packages/git_machete-3.5.0-py3.7.egg/git_machete/client.py", line 1479, in __match_log_to_filtered_reflogs
File "/usr/local/lib/python3.7/site-packages/git_machete-3.5.0-py3.7.egg/git_machete/git_operations.py", line 495, in spoonfeed_log_shas
File "/usr/local/lib/python3.7/site-packages/git_machete-3.5.0-py3.7.egg/git_machete/git_operations.py", line 487, in __get_log_shas
AttributeError: 'str' object has no attribute 'full_name'
```
This is most likely due to assignment `cli_opts.opt_branch = arg`, with `arg` being `str`-typed (?)... not sure why it passed mypy in the first place, pls investigate.
Also, regression test + preferable a systematic solution against this kind of errors would be very useful. | 0.0 | 18f7e32640cab8483c32a231a478c23c1a50ec67 | [
"git_machete/tests/functional/test_machete.py::MacheteTester::test_add"
] | [
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_few_possible_downstream_branches_and_yes_option",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_no_downstream_branches",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_one_downstream_branch",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_anno_prs",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_branch_reappers_in_definition",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_checkout_prs",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_checkout_prs_freshly_cloned",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_checkout_prs_from_fork_with_deleted_repo",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_discover_traverse_squash",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_create_pr",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_down",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_first_root_with_downstream",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_first_root_without_downstream",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_last",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_next_successor_exists",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_next_successor_on_another_root_tree",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_prev_successor_exists",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_prev_successor_on_another_root_tree",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_root",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_up",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_help",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_log",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_retarget_pr",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_down",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_first",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_last",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_next",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_prev",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_root",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_up",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_down_fork_point_and_multiple_children_of_last_branch",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_invalid_down_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_valid_down_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_merge",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_with_invalid_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_with_valid_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push_override",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push_untracked",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_fork_point_not_specified",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_fork_point_specified",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_invalid_fork_point"
] | {
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-11-03 12:50:09+00:00 | mit | 826 |
|
VirtusLab__git-machete-348 | diff --git a/git_machete/client.py b/git_machete/client.py
index 556fa3d..d1d9146 100644
--- a/git_machete/client.py
+++ b/git_machete/client.py
@@ -1631,6 +1631,7 @@ class MacheteClient:
*,
branch: LocalBranchShortName,
is_called_from_traverse: bool,
+ is_called_from_create_pr: bool,
opt_push_untracked: bool,
opt_push_tracked: bool,
opt_yes: bool
@@ -1654,6 +1655,7 @@ class MacheteClient:
new_remote=rems[index],
branch=branch,
is_called_from_traverse=is_called_from_traverse,
+ is_called_from_create_pr=is_called_from_create_pr,
opt_push_untracked=opt_push_untracked,
opt_push_tracked=opt_push_tracked,
opt_yes=opt_yes)
@@ -1667,12 +1669,13 @@ class MacheteClient:
new_remote: str,
branch: LocalBranchShortName,
is_called_from_traverse: bool,
+ is_called_from_create_pr: bool,
opt_push_untracked: bool,
opt_push_tracked: bool,
opt_yes: bool
) -> None:
rems: List[str] = self.__git.get_remotes()
- can_pick_other_remote = len(rems) > 1
+ can_pick_other_remote = len(rems) > 1 and not is_called_from_create_pr
other_remote_choice = "o[ther-remote]" if can_pick_other_remote else ""
remote_branch = RemoteBranchShortName.of(f"{new_remote}/{branch}")
if not self.__git.get_commit_sha_by_revision(remote_branch):
@@ -1694,6 +1697,7 @@ class MacheteClient:
self.__pick_remote(
branch=branch,
is_called_from_traverse=is_called_from_traverse,
+ is_called_from_create_pr=is_called_from_create_pr,
opt_push_untracked=opt_push_untracked,
opt_push_tracked=opt_push_tracked,
opt_yes=opt_yes)
@@ -1708,6 +1712,7 @@ class MacheteClient:
self.__pick_remote(
branch=branch,
is_called_from_traverse=is_called_from_traverse,
+ is_called_from_create_pr=is_called_from_create_pr,
opt_push_untracked=opt_push_untracked,
opt_push_tracked=opt_push_tracked,
opt_yes=opt_yes)
@@ -1783,6 +1788,7 @@ class MacheteClient:
self.__pick_remote(
branch=branch,
is_called_from_traverse=is_called_from_traverse,
+ is_called_from_create_pr=is_called_from_create_pr,
opt_push_untracked=opt_push_untracked,
opt_push_tracked=opt_push_tracked,
opt_yes=opt_yes)
@@ -1994,9 +2000,24 @@ class MacheteClient:
self.flush_caches()
base: Optional[LocalBranchShortName] = self.up_branch.get(LocalBranchShortName.of(head))
+ if not base:
+ raise MacheteException(f'Could not determine base branch for PR. Branch `{head}` is a root branch.')
org: str
repo: str
- _, (org, repo) = self.__derive_remote_and_github_org_and_repo()
+ remote, (org, repo) = self.__derive_remote_and_github_org_and_repo()
+ print(f"Fetching {remote}...")
+ self.__git.fetch_remote(remote)
+ if '/'.join([remote, base]) not in self.__git.get_remote_branches():
+ warn(f'Base branch for this PR (`{base}`) is not found on remote, pushing...')
+ self.handle_untracked_branch(
+ branch=base,
+ new_remote=remote,
+ is_called_from_traverse=False,
+ is_called_from_create_pr=True,
+ opt_push_tracked=False,
+ opt_push_untracked=True,
+ opt_yes=False)
+
current_user: Optional[str] = git_machete.github.derive_current_user_login()
debug(f'create_github_pr({head})', f'organization is {org}, repository is {repo}')
debug(f'create_github_pr({head})', 'current GitHub user is ' + (current_user or '<none>'))
@@ -2079,13 +2100,16 @@ class MacheteClient:
new_remote=rmt,
branch=branch,
is_called_from_traverse=is_called_from_traverse,
+ is_called_from_create_pr=False,
opt_push_untracked=opt_push_untracked,
opt_push_tracked=opt_push_tracked,
opt_yes=opt_yes)
elif len(rems) == 1:
self.handle_untracked_branch(
- new_remote=rems[0], branch=branch,
+ new_remote=rems[0],
+ branch=branch,
is_called_from_traverse=is_called_from_traverse,
+ is_called_from_create_pr=False,
opt_push_untracked=opt_push_untracked,
opt_push_tracked=opt_push_tracked,
opt_yes=opt_yes)
@@ -2094,6 +2118,7 @@ class MacheteClient:
new_remote="origin",
branch=branch,
is_called_from_traverse=is_called_from_traverse,
+ is_called_from_create_pr=False,
opt_push_untracked=opt_push_untracked,
opt_push_tracked=opt_push_tracked,
opt_yes=opt_yes)
@@ -2103,6 +2128,7 @@ class MacheteClient:
self.__pick_remote(
branch=branch,
is_called_from_traverse=is_called_from_traverse,
+ is_called_from_create_pr=False,
opt_push_untracked=opt_push_untracked,
opt_push_tracked=opt_push_tracked,
opt_yes=opt_yes)
| VirtusLab/git-machete | 931fd2c58aaebdc325e1fd2c9ceacecee80f72e7 | diff --git a/git_machete/tests/functional/test_machete.py b/git_machete/tests/functional/test_machete.py
index 36165d7..55749e7 100644
--- a/git_machete/tests/functional/test_machete.py
+++ b/git_machete/tests/functional/test_machete.py
@@ -1927,6 +1927,8 @@ class MacheteTester(unittest.TestCase):
git_api_state_for_test_create_pr = MockGithubAPIState([{'head': {'ref': 'ignore-trailing', 'repo': mock_repository_info}, 'user': {'login': 'github_user'}, 'base': {'ref': 'hotfix/add-trigger'}, 'number': '3', 'html_url': 'www.github.com', 'state': 'open'}],
issues=[{'number': '4'}, {'number': '5'}, {'number': '6'}])
+ # We need to mock GITHUB_REMOTE_PATTERNS in the tests for `test_github_create_pr` due to `git fetch` executed by `create-pr` subcommand.
+ @mock.patch('git_machete.github.GITHUB_REMOTE_PATTERNS', FAKE_GITHUB_REMOTE_PATTERNS)
@mock.patch('git_machete.utils.run_cmd', mock_run_cmd) # to hide git outputs in tests
@mock.patch('git_machete.options.CommandLineOptions', FakeCommandLineOptions)
@mock.patch('git_machete.client.MacheteClient.ask_if', mock_ask_if)
@@ -2093,6 +2095,47 @@ class MacheteTester(unittest.TestCase):
self.assertEqual(e.exception.parameter, expected_error_message,
'Verify that expected error message has appeared when creating PR from root branch.')
+ git_api_state_for_test_create_pr_missing_base_branch_on_remote = MockGithubAPIState([{'head': {'ref': 'chore/redundant_checks', 'repo': mock_repository_info}, 'user': {'login': 'github_user'}, 'base': {'ref': 'restrict_access'}, 'number': '18', 'html_url': 'www.github.com', 'state': 'open'}])
+
+ # We need to mock GITHUB_REMOTE_PATTERNS in the tests for `test_github_create_pr` due to `git fetch` executed by `create-pr` subcommand.
+ @mock.patch('git_machete.github.GITHUB_REMOTE_PATTERNS', FAKE_GITHUB_REMOTE_PATTERNS)
+ @mock.patch('git_machete.utils.run_cmd', mock_run_cmd) # to hide git outputs in tests
+ @mock.patch('git_machete.options.CommandLineOptions', FakeCommandLineOptions)
+ @mock.patch('git_machete.client.MacheteClient.ask_if', mock_ask_if)
+ @mock.patch('urllib.request.urlopen', MockContextManager)
+ @mock.patch('urllib.request.Request', git_api_state_for_test_create_pr_missing_base_branch_on_remote.new_request())
+ def test_github_create_pr_missing_base_branch_on_remote(self) -> None:
+ (
+ self.repo_sandbox.new_branch("root")
+ .commit("initial commit")
+ .new_branch("develop")
+ .commit("first commit on develop")
+ .push()
+ .new_branch("feature/api_handling")
+ .commit("Introduce GET and POST methods on API")
+ .new_branch("feature/api_exception_handling")
+ .commit("catch exceptions coming from API")
+ .push()
+ .delete_branch("root")
+ )
+
+ self.launch_command('discover')
+
+ expected_msg = ("Fetching origin...\n"
+ "Warn: Base branch for this PR (`feature/api_handling`) is not found on remote, pushing...\n"
+ "Creating a PR from `feature/api_exception_handling` to `feature/api_handling`... OK, see www.github.com\n")
+ self.assert_command(['github', 'create-pr'], expected_msg, strip_indentation=False)
+ self.assert_command(
+ ['status'],
+ """
+ develop
+ |
+ o-feature/api_handling
+ |
+ o-feature/api_exception_handling * PR #19
+ """,
+ )
+
git_api_state_for_test_checkout_prs = MockGithubAPIState([
{'head': {'ref': 'chore/redundant_checks', 'repo': mock_repository_info}, 'user': {'login': 'github_user'}, 'base': {'ref': 'restrict_access'}, 'number': '18', 'html_url': 'www.github.com', 'state': 'open'},
{'head': {'ref': 'restrict_access', 'repo': mock_repository_info}, 'user': {'login': 'github_user'}, 'base': {'ref': 'allow-ownership-link'}, 'number': '17', 'html_url': 'www.github.com', 'state': 'open'},
| `github create-pr`: check if base branch for PR exists in remote
Attempt to create a pull request with base branch being already deleted from remote ends up with `Unprocessable Entity` error. (example in #332).
Proposed solution:
Perform `git fetch <remote>` at the beginning of `create-pr`. if base branch is not present in remote branches, perform `handle_untracked_branch` with relevant remote for missing base branch. | 0.0 | 931fd2c58aaebdc325e1fd2c9ceacecee80f72e7 | [
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_create_pr_missing_base_branch_on_remote"
] | [
"git_machete/tests/functional/test_machete.py::MacheteTester::test_add",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_few_possible_downstream_branches_and_yes_option",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_no_downstream_branches",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_one_downstream_branch",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_anno_prs",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_branch_reappers_in_definition",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_checkout_prs",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_checkout_prs_freshly_cloned",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_checkout_prs_from_fork_with_deleted_repo",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_discover_traverse_squash",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_create_pr",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_down",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_first_root_with_downstream",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_first_root_without_downstream",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_last",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_next_successor_exists",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_next_successor_on_another_root_tree",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_prev_successor_exists",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_prev_successor_on_another_root_tree",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_root",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_up",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_help",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_log",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_retarget_pr",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_down",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_first",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_last",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_next",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_prev",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_root",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_up",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_down_fork_point_and_multiple_children_of_last_branch",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_invalid_down_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_valid_down_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_merge",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_with_invalid_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_with_valid_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push_override",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push_untracked",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_fork_point_not_specified",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_fork_point_specified",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_invalid_fork_point"
] | {
"failed_lite_validators": [
"has_issue_reference",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-11-16 14:09:49+00:00 | mit | 827 |
|
VirtusLab__git-machete-350 | diff --git a/git_machete/exceptions.py b/git_machete/exceptions.py
index a1f9893..ae09ea3 100644
--- a/git_machete/exceptions.py
+++ b/git_machete/exceptions.py
@@ -21,7 +21,7 @@ class UnprocessableEntityHTTPError(MacheteException):
or creating a pull request for a branch that already has a PR.
"""
def __init__(self, msg: str) -> None:
- self.msg: str = 'UnprocessableEntityHTTPError: ' + msg
+ self.msg: str = msg
def __str__(self) -> str:
return str(self.msg)
diff --git a/git_machete/github.py b/git_machete/github.py
index 031bcb5..8b246d4 100644
--- a/git_machete/github.py
+++ b/git_machete/github.py
@@ -9,9 +9,9 @@ import subprocess
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple
import urllib.request
-from urllib.error import HTTPError
+import urllib.error
-from git_machete.utils import debug, fmt, find_or_none
+from git_machete.utils import debug, fmt
from git_machete.exceptions import MacheteException, UnprocessableEntityHTTPError
from git_machete.git_operations import GitContext, LocalBranchShortName
@@ -158,7 +158,7 @@ def __fire_github_api_request(method: str, path: str, token: Optional[str], requ
with urllib.request.urlopen(http_request) as response:
parsed_response_body: Any = json.loads(response.read().decode())
return parsed_response_body
- except HTTPError as err:
+ except urllib.error.HTTPError as err:
if err.code == http.HTTPStatus.UNPROCESSABLE_ENTITY:
error_response = json.loads(err.read().decode())
error_reason: str = __extract_failure_info_from_422(error_response)
@@ -193,13 +193,8 @@ def create_pull_request(org: str, repo: str, head: str, base: str, title: str, d
'body': description,
'draft': draft
}
- prs: List[GitHubPullRequest] = derive_pull_requests(org, repo)
- pr_found: Optional[GitHubPullRequest] = find_or_none(lambda pr: pr.base == base and pr.head == head, prs)
- if not pr_found:
- pr = __fire_github_api_request('POST', f'/repos/{org}/{repo}/pulls', token, request_body)
- return __parse_pr_json(pr)
- else:
- raise MacheteException(f'Pull request for branch {head} is already created under link {pr_found.html_url}!\nPR details: {pr_found}')
+ pr = __fire_github_api_request('POST', f'/repos/{org}/{repo}/pulls', token, request_body)
+ return __parse_pr_json(pr)
def add_assignees_to_pull_request(org: str, repo: str, number: int, assignees: List[str]) -> None:
| VirtusLab/git-machete | 5c5bef4a52ef72bdb4bc095210d15acdc1826ffd | diff --git a/git_machete/tests/functional/test_machete.py b/git_machete/tests/functional/test_machete.py
index 55749e7..24e5542 100644
--- a/git_machete/tests/functional/test_machete.py
+++ b/git_machete/tests/functional/test_machete.py
@@ -174,6 +174,9 @@ class MockGithubAPIRequest:
def update_pull_request(self) -> "MockGithubAPIResponse":
pull_no: str = self.find_number(self.parsed_url.path, 'pulls')
if not pull_no:
+ if self.is_pull_created():
+ return self.make_response_object(HTTPStatus.UNPROCESSABLE_ENTITY, {'message': 'Validation Failed', 'errors': [
+ {'message': f'A pull request already exists for test_repo:{json.loads(self.json_data)["head"]}.'}]})
return self.create_pull_request()
pull: Dict[str, Any] = self.github_api_state.get_pull(pull_no)
return self.fill_pull_request_data(json.loads(self.json_data), pull)
@@ -221,6 +224,17 @@ class MockGithubAPIRequest:
self.github_api_state.issues.append(issue)
return self.make_response_object(HTTPStatus.CREATED, issue)
+ def is_pull_created(self) -> bool:
+ deserialized_json_data = json.loads(self.json_data)
+ head: str = deserialized_json_data['head']
+ base: str = deserialized_json_data['base']
+ for pull in self.github_api_state.pulls:
+ pull_head: str = pull['head']['ref']
+ pull_base: str = pull['base']['ref']
+ if (head, base) == (pull_head, pull_base):
+ return True
+ return False
+
@staticmethod
def get_index_or_none(entity: Dict[str, Any], base: List[Dict[str, Any]]) -> Optional[int]:
try:
@@ -245,6 +259,15 @@ class MockGithubAPIRequest:
return str(max(numbers) + 1)
+class MockHTTPError(HTTPError):
+ def __init__(self, url: str, code: int, msg: Any, hdrs: Dict[str, str], fp: Any) -> None:
+ super().__init__(url, code, msg, hdrs, fp)
+ self.msg = msg
+
+ def read(self, n: int = 1) -> bytes:
+ return json.dumps(self.msg).encode()
+
+
class MockContextManager:
def __init__(self, obj: MockGithubAPIResponse) -> None:
self.obj = obj
@@ -252,6 +275,8 @@ class MockContextManager:
def __enter__(self) -> MockGithubAPIResponse:
if self.obj.status_code == HTTPStatus.NOT_FOUND:
raise HTTPError(None, 404, 'Not found', None, None)
+ elif self.obj.status_code == HTTPStatus.UNPROCESSABLE_ENTITY:
+ raise MockHTTPError(None, 422, self.obj.response_data, None, None)
return self.obj
def __exit__(self, *args: Any) -> None:
@@ -1927,6 +1952,7 @@ class MacheteTester(unittest.TestCase):
git_api_state_for_test_create_pr = MockGithubAPIState([{'head': {'ref': 'ignore-trailing', 'repo': mock_repository_info}, 'user': {'login': 'github_user'}, 'base': {'ref': 'hotfix/add-trigger'}, 'number': '3', 'html_url': 'www.github.com', 'state': 'open'}],
issues=[{'number': '4'}, {'number': '5'}, {'number': '6'}])
+ @mock.patch('urllib.error.HTTPError', MockHTTPError) # need to provide read() method, which does not actually reads error from url
# We need to mock GITHUB_REMOTE_PATTERNS in the tests for `test_github_create_pr` due to `git fetch` executed by `create-pr` subcommand.
@mock.patch('git_machete.github.GITHUB_REMOTE_PATTERNS', FAKE_GITHUB_REMOTE_PATTERNS)
@mock.patch('git_machete.utils.run_cmd', mock_run_cmd) # to hide git outputs in tests
@@ -2062,12 +2088,12 @@ class MacheteTester(unittest.TestCase):
)
# check against attempt to create already existing pull request
machete_client = MacheteClient(git)
- expected_error_message = "Pull request for branch hotfix/add-trigger is already created under link www.github.com!\nPR details: PR #6 by github_user: hotfix/add-trigger -> master"
+ expected_error_message = "A pull request already exists for test_repo:hotfix/add-trigger."
machete_client.read_definition_file()
with self.assertRaises(MacheteException) as e:
machete_client.create_github_pr(head=LocalBranchShortName.of('hotfix/add-trigger'), opt_draft=False, opt_onto=None)
if e:
- self.assertEqual(e.exception.parameter, expected_error_message,
+ self.assertEqual(e.exception.msg, expected_error_message, # type: ignore
'Verify that expected error message has appeared when given pull request to create is already created.')
# check against head branch is ancestor or equal to base branch
| Is the check for PR existence still needed now that we read the body of 422 response?
`git_machete.github.create_pull_request`:
```
prs: List[GitHubPullRequest] = derive_pull_requests(org, repo)
pr_found: Optional[GitHubPullRequest] = find_or_none(lambda pr: pr.base == base and pr.head == head, prs)
if not pr_found:
pr = __fire_github_api_request('POST', f'/repos/{org}/{repo}/pulls', token, request_body)
return __parse_pr_json(pr)
else:
raise MacheteException(f'Pull request for branch {head} is already created under link {pr_found.html_url}!\nPR details: {pr_found}')
```
Maybe we can skip one request to the API (`derive_pull_requests`) and rely on our new handling for 422 now? | 0.0 | 5c5bef4a52ef72bdb4bc095210d15acdc1826ffd | [
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_create_pr"
] | [
"git_machete/tests/functional/test_machete.py::MacheteTester::test_add",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_few_possible_downstream_branches_and_yes_option",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_no_downstream_branches",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_one_downstream_branch",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_anno_prs",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_branch_reappers_in_definition",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_checkout_prs",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_checkout_prs_freshly_cloned",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_checkout_prs_from_fork_with_deleted_repo",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_discover_traverse_squash",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_create_pr_missing_base_branch_on_remote",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_down",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_first_root_with_downstream",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_first_root_without_downstream",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_last",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_next_successor_exists",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_next_successor_on_another_root_tree",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_prev_successor_exists",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_prev_successor_on_another_root_tree",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_root",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_up",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_help",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_log",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_retarget_pr",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_down",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_first",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_last",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_next",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_prev",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_root",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_up",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_down_fork_point_and_multiple_children_of_last_branch",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_invalid_down_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_valid_down_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_merge",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_with_invalid_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_with_valid_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push_override",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push_untracked",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_fork_point_not_specified",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_fork_point_specified",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_invalid_fork_point"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false
} | 2021-11-18 14:42:19+00:00 | mit | 828 |
|
VirtusLab__git-machete-353 | diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md
index 4cb7056..395fed7 100644
--- a/RELEASE_NOTES.md
+++ b/RELEASE_NOTES.md
@@ -1,5 +1,9 @@
# Release notes
+## New in git-machete 3.6.0
+
+- added: `t` alias for `traverse` command
+
## New in git-machete 3.5.0
- added: new way of acquiring the github token (from `~/.github-token`)
diff --git a/docs/source/cli_help/traverse.rst b/docs/source/cli_help/traverse.rst
index 8b23310..d74e384 100644
--- a/docs/source/cli_help/traverse.rst
+++ b/docs/source/cli_help/traverse.rst
@@ -13,10 +13,10 @@ traverse
.. code-block:: shell
- git machete traverse [-F|--fetch] [-l|--list-commits] [-M|--merge]
- [-n|--no-edit-merge|--no-interactive-rebase] [--no-detect-squash-merges]
- [--[no-]push] [--[no-]push-untracked]
- [--return-to=WHERE] [--start-from=WHERE] [-w|--whole] [-W] [-y|--yes]
+ git machete t[raverse] [-F|--fetch] [-l|--list-commits] [-M|--merge]
+ [-n|--no-edit-merge|--no-interactive-rebase] [--no-detect-squash-merges]
+ [--[no-]push] [--[no-]push-untracked]
+ [--return-to=WHERE] [--start-from=WHERE] [-w|--whole] [-W] [-y|--yes]
Traverses the branch dependency tree in pre-order (i.e. simply in the order as they occur in the definition file).
By default traverse starts from current branch, however this behaviour can be customized using options: ``--start-from=``, ``--whole`` or ``-w``, ``-W``.
diff --git a/git_machete/__init__.py b/git_machete/__init__.py
index bf5afe7..826cf62 100644
--- a/git_machete/__init__.py
+++ b/git_machete/__init__.py
@@ -1,1 +1,1 @@
-__version__ = '3.5.1'
+__version__ = '3.6.0'
diff --git a/git_machete/cli.py b/git_machete/cli.py
index 0a4398c..34d369c 100644
--- a/git_machete/cli.py
+++ b/git_machete/cli.py
@@ -26,7 +26,8 @@ alias_by_command: Dict[str, str] = {
"edit": "e",
"go": "g",
"log": "l",
- "status": "s"
+ "status": "s",
+ "traverse": "t"
}
command_groups: List[Tuple[str, List[str]]] = [
@@ -316,6 +317,7 @@ def create_cli_parser() -> argparse.ArgumentParser:
traverse_parser = subparsers.add_parser(
'traverse',
+ aliases=['t'],
argument_default=argparse.SUPPRESS,
usage=argparse.SUPPRESS,
add_help=False,
@@ -748,7 +750,7 @@ def launch(orig_args: List[str]) -> None:
opt_list_commits=cli_opts.opt_list_commits,
opt_list_commits_with_hashes=cli_opts.opt_list_commits_with_hashes,
opt_no_detect_squash_merges=cli_opts.opt_no_detect_squash_merges)
- elif cmd == "traverse":
+ elif cmd in {"traverse", alias_by_command["traverse"]}:
if cli_opts.opt_start_from not in {"here", "root", "first-root"}:
raise MacheteException(
"Invalid argument for `--start-from`. "
diff --git a/git_machete/docs.py b/git_machete/docs.py
index 513561d..803bb3d 100644
--- a/git_machete/docs.py
+++ b/git_machete/docs.py
@@ -606,10 +606,10 @@ long_docs: Dict[str, str] = {
when detecting if a branch is merged into its upstream (parent).
""",
"traverse": """
- <b>Usage: git machete traverse [-F|--fetch] [-l|--list-commits] [-M|--merge]
- [-n|--no-edit-merge|--no-interactive-rebase] [--no-detect-squash-merges]
- [--[no-]push] [--[no-]push-untracked]
- [--return-to=WHERE] [--start-from=WHERE] [-w|--whole] [-W] [-y|--yes]</b>
+ <b>Usage: git machete t[raverse] [-F|--fetch] [-l|--list-commits] [-M|--merge]
+ [-n|--no-edit-merge|--no-interactive-rebase] [--no-detect-squash-merges]
+ [--[no-]push] [--[no-]push-untracked]
+ [--return-to=WHERE] [--start-from=WHERE] [-w|--whole] [-W] [-y|--yes]</b>
Traverses the branch dependency tree in pre-order (i.e. simply in the order as they occur in the definition file).
By default traverse starts from current branch, however this behaviour can be customized using options: `--starts-from=`, `--whole` or `-w`, `-W`.
| VirtusLab/git-machete | b5ce4b7f67ea347ff0e1357a61d5412027218a87 | diff --git a/git_machete/tests/functional/test_machete.py b/git_machete/tests/functional/test_machete.py
index 24e5542..53cef1e 100644
--- a/git_machete/tests/functional/test_machete.py
+++ b/git_machete/tests/functional/test_machete.py
@@ -538,8 +538,39 @@ class MacheteTester(unittest.TestCase):
@mock.patch('git_machete.utils.run_cmd', mock_run_cmd) # to hide git outputs in tests
def test_traverse_no_push_override(self) -> None:
self.setup_discover_standard_tree()
+ self.repo_sandbox.check_out("hotfix/add-trigger")
+ self.launch_command("t", "-Wy", "--no-push", "--push", "--start-from=here")
+ self.assert_command(
+ ["status", "-l"],
+ """
+ develop
+ |
+ | Allow ownership links
+ | 1st round of fixes
+ x-allow-ownership-link (ahead of origin)
+ | |
+ | | Build arbitrarily long chains
+ | x-build-chain (untracked)
+ |
+ | Call web service
+ | 1st round of fixes
+ | 2nd round of fixes
+ o-call-ws (ahead of origin)
+ |
+ | Drop unneeded SQL constraints
+ x-drop-constraint (untracked)
- self.launch_command("traverse", "-Wy", "--no-push", "--push")
+ master
+ |
+ | HOTFIX Add the trigger (amended)
+ o-hotfix/add-trigger *
+ |
+ | Ignore trailing data (amended)
+ o-ignore-trailing
+ """,
+ )
+ self.repo_sandbox.check_out("ignore-trailing")
+ self.launch_command("t", "-Wy", "--no-push", "--push")
self.assert_command(
["status", "-l"],
"""
| Add `t` as an alias for `traverse` (just as `s` is for `status`) | 0.0 | b5ce4b7f67ea347ff0e1357a61d5412027218a87 | [
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push_override"
] | [
"git_machete/tests/functional/test_machete.py::MacheteTester::test_add",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_few_possible_downstream_branches_and_yes_option",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_no_downstream_branches",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_one_downstream_branch",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_anno_prs",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_branch_reappers_in_definition",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_checkout_prs",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_checkout_prs_freshly_cloned",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_checkout_prs_from_fork_with_deleted_repo",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_discover_traverse_squash",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_create_pr",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_create_pr_missing_base_branch_on_remote",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_down",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_first_root_with_downstream",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_first_root_without_downstream",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_last",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_next_successor_exists",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_next_successor_on_another_root_tree",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_prev_successor_exists",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_prev_successor_on_another_root_tree",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_root",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_up",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_help",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_log",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_retarget_pr",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_down",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_first",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_last",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_next",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_prev",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_root",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_up",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_down_fork_point_and_multiple_children_of_last_branch",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_invalid_down_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_valid_down_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_merge",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_with_invalid_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_with_valid_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push_untracked",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_fork_point_not_specified",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_fork_point_specified",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_invalid_fork_point"
] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-11-22 09:53:54+00:00 | mit | 829 |
|
VirtusLab__git-machete-463 | diff --git a/git_machete/client.py b/git_machete/client.py
index b8a55b1..0e44ae6 100644
--- a/git_machete/client.py
+++ b/git_machete/client.py
@@ -1947,28 +1947,27 @@ class MacheteClient:
if pr_from_github:
result.append(pr_from_github)
else:
- if len(prs_list) > 1:
- warn(f"PR #{pr_no} is not found in repository `{org}/{repo}`, skipping.")
- else:
- raise MacheteException(f"PR #{pr_no} is not found in repository `{org}/{repo}`")
+ raise MacheteException(f"PR #{pr_no} is not found in repository `{org}/{repo}`")
if not result:
raise MacheteException(
f"Given PRs: {', '.join(map(str, prs_list))} are not found in repository `{org}/{repo}`")
return result
if all:
if not all_opened_prs_from_github:
- raise MacheteException(f"Currently there is not any pull request opened in repository `{org}/{repo}`")
+ warn(f"Currently there are no pull requests opened in repository `{org}/{repo}`")
+ return []
return all_opened_prs_from_github
elif my and user:
result = [pr for pr in all_opened_prs_from_github if pr.user == user]
if not result:
- raise MacheteException(
- f"Current user {user} has no open pull request in repository `{org}/{repo}`")
+ warn(f"Current user `{user}` has no open pull request in repository `{org}/{repo}`")
+ return []
return result
elif by:
result = [pr for pr in all_opened_prs_from_github if pr.user == by]
if not result:
- raise MacheteException(f"User {by} has no open pull request in repository `{org}/{repo}`")
+ warn(f"User `{by}` has no open pull request in repository `{org}/{repo}`")
+ return []
return result
return []
| VirtusLab/git-machete | d5093f1a6f16a6f834fe71ddae9e4e63a2b18a03 | diff --git a/git_machete/tests/functional/test_machete.py b/git_machete/tests/functional/test_machete.py
index fc4d788..7718ce8 100644
--- a/git_machete/tests/functional/test_machete.py
+++ b/git_machete/tests/functional/test_machete.py
@@ -2220,6 +2220,7 @@ class MacheteTester(unittest.TestCase):
@mock.patch('git_machete.github.GITHUB_REMOTE_PATTERNS', FAKE_GITHUB_REMOTE_PATTERNS)
@mock.patch('git_machete.options.CommandLineOptions', FakeCommandLineOptions)
@mock.patch('git_machete.utils.run_cmd', mock_run_cmd) # to hide git outputs in tests
+ @mock.patch('git_machete.github.__get_github_token', mock__get_github_token)
@mock.patch('urllib.request.Request', git_api_state_for_test_checkout_prs.new_request())
@mock.patch('urllib.request.urlopen', MockContextManager)
def test_github_checkout_prs(self) -> None:
@@ -2416,6 +2417,16 @@ class MacheteTester(unittest.TestCase):
self.assertEqual(e.exception.parameter, expected_error_message,
'Verify that expected error message has appeared when given pull request to checkout does not exists.')
+ with self.assertRaises(MacheteException) as e:
+ self.launch_command('github', 'checkout-prs', '19', '100')
+ if e:
+ self.assertEqual(e.exception.parameter, expected_error_message,
+ 'Verify that expected error message has appeared when one of the given pull requests to checkout does not exists.')
+
+ # check against user with no open pull requests
+ expected_msg = f"Warn: User `tester` has no open pull request in repository `{org}/{repo}`\n"
+ self.assert_command(['github', 'checkout-prs', '--by', 'tester'], expected_msg, strip_indentation=False)
+
# Check against closed pull request with head branch deleted from remote
local_path = popen("mktemp -d")
self.repo_sandbox.new_repo(GitRepositorySandbox.second_remote_path)
| `checkout_github_prs()` should NOT fail if no PRs for the given criteria are found | 0.0 | d5093f1a6f16a6f834fe71ddae9e4e63a2b18a03 | [
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_checkout_prs"
] | [
"git_machete/tests/functional/test_machete.py::MacheteTester::test_add",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_few_possible_downstream_branches_and_yes_option",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_no_downstream_branches",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_one_downstream_branch",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_anno_prs",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_branch_reappears_in_definition",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_discover_traverse_squash",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_checkout_prs_freshly_cloned",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_checkout_prs_from_fork_with_deleted_repo",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_create_pr",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_create_pr_missing_base_branch_on_remote",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_down",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_first_root_with_downstream",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_first_root_without_downstream",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_last",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_next_successor_exists",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_next_successor_on_another_root_tree",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_prev_successor_exists",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_prev_successor_on_another_root_tree",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_root",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_up",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_help",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_log",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_retarget_pr",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_down",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_first",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_last",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_next",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_prev",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_root",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_up",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_down_fork_point_and_multiple_children_of_last_branch",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_invalid_down_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_valid_down_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_merge",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_with_invalid_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_with_valid_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push_override",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push_untracked",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_fork_point_not_specified",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_fork_point_specified",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_invalid_fork_point"
] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false
} | 2022-03-08 10:15:24+00:00 | mit | 830 |
|
VirtusLab__git-machete-483 | diff --git a/docs/source/cli_help/github.rst b/docs/source/cli_help/github.rst
index 0bfdb25..a5cef47 100644
--- a/docs/source/cli_help/github.rst
+++ b/docs/source/cli_help/github.rst
@@ -25,7 +25,7 @@ Creates, checks out and manages GitHub PRs while keeping them reflected in branc
Check out the head branch of the given pull requests (specified by number),
also traverse chain of pull requests upwards, adding branches one by one to git-machete and check them out locally.
Once the specified pull requests are checked out locally, annotate local branches with corresponding pull request numbers.
- If only one PR is given, then switch the local repository's HEAD to its head branch.
+ If only one PR has been checked out, then switch the local repository's HEAD to its head branch.
**Options:**
diff --git a/git_machete/cli.py b/git_machete/cli.py
index d90f1a2..ec291d9 100644
--- a/git_machete/cli.py
+++ b/git_machete/cli.py
@@ -546,7 +546,9 @@ def launch(orig_args: List[str]) -> None:
branch=branch,
opt_onto=cli_opts.opt_onto,
opt_as_root=cli_opts.opt_as_root,
- opt_yes=cli_opts.opt_yes)
+ opt_yes=cli_opts.opt_yes,
+ verbose=True,
+ switch_head_if_new_branch=True)
elif cmd == "advance":
machete_client.read_definition_file(perform_interactive_slide_out=should_perform_interactive_slide_out)
git.expect_no_operation_in_progress()
diff --git a/git_machete/client.py b/git_machete/client.py
index 7871c51..1a42aa7 100644
--- a/git_machete/client.py
+++ b/git_machete/client.py
@@ -219,7 +219,8 @@ class MacheteClient:
opt_onto: Optional[LocalBranchShortName],
opt_as_root: bool,
opt_yes: bool,
- verbose: bool = True
+ verbose: bool,
+ switch_head_if_new_branch: bool
) -> None:
if branch in self.managed_branches:
raise MacheteException(
@@ -237,7 +238,7 @@ class MacheteClient:
msg = common_line + f"Check out `{branch}` locally?" + get_pretty_choices('y', 'N')
opt_yes_msg = common_line + f"Checking out `{branch}` locally..."
if self.ask_if(msg, opt_yes_msg, opt_yes=opt_yes, verbose=verbose) in ('y', 'yes'):
- self.__git.create_branch(branch, remote_branch.full_name())
+ self.__git.create_branch(branch, remote_branch.full_name(), switch_head=switch_head_if_new_branch)
else:
return
# Not dealing with `onto` here. If it hasn't been explicitly
@@ -256,7 +257,7 @@ class MacheteClient:
current_branch = self.__git.get_current_branch_or_none()
if current_branch and current_branch in self.managed_branches:
opt_onto = current_branch
- self.__git.create_branch(branch, out_of)
+ self.__git.create_branch(branch, out_of, switch_head=switch_head_if_new_branch)
else:
return
@@ -1913,21 +1914,23 @@ class MacheteClient:
opt_as_root=True,
opt_onto=None,
opt_yes=True,
- verbose=verbose)
+ verbose=verbose,
+ switch_head_if_new_branch=False)
else:
self.add(
branch=branch,
opt_onto=reversed_path[index - 1],
opt_as_root=False,
opt_yes=True,
- verbose=verbose)
+ verbose=verbose,
+ switch_head_if_new_branch=False)
if pr not in checked_out_prs:
print(fmt(f"Pull request `#{pr.number}` checked out at local branch `{pr.head}`"))
checked_out_prs.append(pr)
debug('Current GitHub user is ' + (current_user or '<none>'))
self.__sync_annotations_to_definition_file(all_open_prs, current_user, verbose=verbose)
- if pr and len(applicable_prs) == 1 and len(checked_out_prs) == 1:
+ if pr and len(checked_out_prs) == 1:
self.__git.checkout(LocalBranchShortName.of(pr.head))
if verbose:
print(fmt(f"Switched to local branch `{pr.head}`"))
@@ -2266,7 +2269,12 @@ class MacheteClient:
current_branch = self.__git.get_current_branch()
if current_branch not in self.managed_branches:
- self.add(branch=current_branch, opt_onto=opt_onto, opt_as_root=False, opt_yes=opt_yes)
+ self.add(branch=current_branch,
+ opt_onto=opt_onto,
+ opt_as_root=False,
+ opt_yes=opt_yes,
+ verbose=True,
+ switch_head_if_new_branch=True)
if current_branch not in self.managed_branches:
raise MacheteException(
"Command `github create-pr` can NOT be executed on the branch"
diff --git a/git_machete/docs.py b/git_machete/docs.py
index f6ad05e..b697234 100644
--- a/git_machete/docs.py
+++ b/git_machete/docs.py
@@ -351,7 +351,7 @@ long_docs: Dict[str, str] = {
Check out the head branch of the given pull requests (specified by number),
also traverse chain of pull requests upwards, adding branches one by one to git-machete and check them out locally.
Once the specified pull requests are checked out locally, annotate local branches with corresponding pull request numbers.
- If only one PR is given, then switch the local repository's HEAD to its head branch.
+ If only one PR has been checked out, then switch the local repository's HEAD to its head branch.
<b>Options:</b>
<b>--all</b> Checkout all open PRs.
diff --git a/git_machete/git_operations.py b/git_machete/git_operations.py
index d83eaed..ca3a753 100644
--- a/git_machete/git_operations.py
+++ b/git_machete/git_operations.py
@@ -580,8 +580,10 @@ class GitContext:
))
return self.__reflogs_cached[branch]
- def create_branch(self, branch: LocalBranchShortName, out_of_revision: AnyRevision) -> None:
- self._run_git("checkout", "-b", branch, out_of_revision)
+ def create_branch(self, branch: LocalBranchShortName, out_of_revision: AnyRevision, switch_head: bool) -> None:
+ self._run_git("branch", branch, out_of_revision)
+ if switch_head:
+ self._run_git("checkout", branch)
self.flush_caches() # the repository state has changed because of a successful branch creation, let's defensively flush all the caches
def flush_caches(self) -> None:
| VirtusLab/git-machete | 5a2b68e1a7a7fff7e226f2e6ff96f7936bf98439 | diff --git a/git_machete/tests/functional/test_machete.py b/git_machete/tests/functional/test_machete.py
index b76db02..38a3d45 100644
--- a/git_machete/tests/functional/test_machete.py
+++ b/git_machete/tests/functional/test_machete.py
@@ -50,8 +50,7 @@ def mock_exit_script(status_code: Optional[int] = None, error: Optional[BaseExce
def mock_fetch_ref(cls: Any, remote: str, ref: str) -> None:
branch: LocalBranchShortName = LocalBranchShortName.of(ref[ref.index(':') + 1:])
- git.create_branch(branch, get_current_commit_hash())
- git.checkout(branch)
+ git.create_branch(branch, get_current_commit_hash(), switch_head=True)
def mock_run_cmd(cmd: str, *args: str, **kwargs: Any) -> int:
@@ -2404,13 +2403,13 @@ class MacheteTester(unittest.TestCase):
| |
| o-chore/redundant_checks PR #18 (github_user)
|
- o-enhance/add_user * PR #19 (github_user)
+ o-enhance/add_user PR #19 (github_user)
bugfix/add_user
|
o-testing/add_user PR #22 (github_user)
|
- o-chore/comments PR #24 (github_user)
+ o-chore/comments * PR #24 (github_user)
"""
)
| `github checkout-prs --by=...` switches to ALL checked out branches one by one
Installed version from PR #435 (commit `6ee6681e33cf037e8c86b1cf340a443b4d77fcee`).
Testing on this very repo:
```
➜ 12:29 ~/git-machete develop $ g m s
master v3.7.2
│
└─develop v3.8.0
➜ 12:29 ~/git-machete develop $ gb
* develop
master
➜ 12:30 ~/git-machete develop $ g m github checkout-prs --by=amalota
Checking for open GitHub PRs...
branch 'feature/command_clean' set up to track 'origin/feature/command_clean'.
Switched to a new branch 'feature/command_clean'
Pull request #435 checked out at local branch feature/command_clean
branch 'code_quality/github_token_msg' set up to track 'origin/code_quality/github_token_msg'.
Switched to a new branch 'code_quality/github_token_msg'
Pull request #469 checked out at local branch code_quality/github_token_msg
branch 'feature/advance_with_push_to_remote' set up to track 'origin/feature/advance_with_push_to_remote'.
Switched to a new branch 'feature/advance_with_push_to_remote'
Pull request #473 checked out at local branch feature/advance_with_push_to_remote
➜ 12:30 ~/git-machete feature/advance_with_push_to_remote PR #473 (amalota) $
```
So... first HEAD hsa been switched to `feature/command_clean`, then to `code_quality/github_token_msg`, finally to `feature/advance_with_push_to_remote`.
This is contrary to docs in github.rst:
```
If only one PR is given, then switch the local repository's HEAD to its head branch.
```
In fact, the doc should probably say:
```
If only one PR has been checked out, then switch the local repository's HEAD to its head branch.
```
and `github checkout-prs` should behave adequately (i.e. for the sample above, it should **never** switch HEAD).
Pls provide regression tests as well. | 0.0 | 5a2b68e1a7a7fff7e226f2e6ff96f7936bf98439 | [
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_checkout_prs",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_checkout_prs_from_fork_with_deleted_repo"
] | [
"git_machete/tests/functional/test_machete.py::MacheteTester::test_add",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_few_possible_downstream_branches_and_yes_option",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_no_downstream_branches",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_advance_with_one_downstream_branch",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_anno_prs",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_branch_reappears_in_definition",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_clean",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_discover_traverse_squash",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_checkout_prs_freshly_cloned",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_create_pr",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_create_pr_missing_base_branch_on_remote",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_github_sync",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_down",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_first_root_with_downstream",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_first_root_without_downstream",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_last",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_next_successor_exists",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_next_successor_on_another_root_tree",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_prev_successor_exists",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_prev_successor_on_another_root_tree",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_root",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_go_up",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_help",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_log",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_retarget_pr",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_down",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_first",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_last",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_next",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_prev",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_root",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_show_up",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_down_fork_point_and_multiple_children_of_last_branch",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_invalid_down_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_slide_out_with_valid_down_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_merge",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_with_invalid_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_squash_with_valid_fork_point",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push_override",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_traverse_no_push_untracked",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_fork_point_not_specified",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_fork_point_specified",
"git_machete/tests/functional/test_machete.py::MacheteTester::test_update_with_invalid_fork_point"
] | {
"failed_lite_validators": [
"has_issue_reference",
"has_git_commit_hash",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2022-04-05 12:58:57+00:00 | mit | 831 |
|
VirtusLab__git-machete-534 | diff --git a/git_machete/client.py b/git_machete/client.py
index d063583..19ef0c4 100644
--- a/git_machete/client.py
+++ b/git_machete/client.py
@@ -2109,7 +2109,7 @@ class MacheteClient:
description_path = self.__git.get_main_git_subpath('info', 'description')
description: str = utils.slurp_file_or_empty(description_path)
- ok_str = ' <green><b>OK</b></green>'
+ ok_str = ' <green>-> <b>OK</b></green>'
print(fmt(f'Creating a {"draft " if opt_draft else ""}PR from `{head}` to `{base}`...'), end='', flush=True)
pr: GitHubPullRequest = create_pull_request(org, repo, head=head, base=base, title=commits[0].subject,
description=description, draft=opt_draft)
diff --git a/git_machete/github.py b/git_machete/github.py
index f8dc8c4..49f089b 100644
--- a/git_machete/github.py
+++ b/git_machete/github.py
@@ -181,8 +181,9 @@ def __fire_github_api_request(method: str, path: str, token: Optional[str], requ
'Visit `https://github.com/settings/tokens` to generate a new one.')
elif err.code == http.HTTPStatus.NOT_FOUND:
raise MacheteException(
- f'Given endpoint: {url}, not found in GitHub or you don\'t have'
- f' the permission to access it (expired token?).') # TODO (#164): make dedicated exception here
+ f'`{method} {url}` request ended up in 404 response from GitHub. A valid GitHub API token is required.\n'
+ f'Provide a GitHub API token with `repo` access via one of the: {get_github_token_possible_providers()} '
+ 'Visit `https://github.com/settings/tokens` to generate a new one.') # TODO (#164): make dedicated exception here
else:
first_line = fmt(f'GitHub API returned {err.code} HTTP status with error message: `{err.reason}`\n')
raise MacheteException(first_line + "Please open an issue regarding this topic under link: https://github.com/VirtusLab/git-machete/issues/new")
| VirtusLab/git-machete | 7a72f615bfa9bd8e9c436dfb726216dfa07d0839 | diff --git a/tests/test_github.py b/tests/test_github.py
index 90fe555..d8fce10 100644
--- a/tests/test_github.py
+++ b/tests/test_github.py
@@ -454,7 +454,7 @@ class TestGithub:
expected_msg = ("Fetching origin...\n"
"Warn: Base branch for this PR (`feature/api_handling`) is not found on remote, pushing...\n"
- "Creating a PR from `feature/api_exception_handling` to `feature/api_handling`... OK, see www.github.com\n")
+ "Creating a PR from `feature/api_exception_handling` to `feature/api_handling`... -> OK, see www.github.com\n")
assert_command(['github', 'create-pr'], expected_msg, strip_indentation=False)
assert_command(
['status'],
| Does `/pulls` endpoint for a public GitHub repository really require authentication?
As @mkondratek reported:
I've tried to use `create-pr` with that result:
```sh
mikko@mikko-MS-7C91:~/IdeaProjects/git-machete-intellij-plugin$ git machete github create-pr
Fetching origin...
Creating a PR from hook-executors-improvements to develop...Given endpoint: https://api.github.com/repos/VirtusLab/git-machete-intellij-plugin/pulls, not found in GitHub or you don't have the permission to access it (expired token?).
```
I have no token (generated and) set so that's the reason. After that - works.
_Originally posted by @mkondratek in https://github.com/VirtusLab/git-machete/issues/493#issuecomment-1113174706_
---
My addition: note that https://api.github.com/repos/VirtusLab/git-machete-intellij-plugin/pulls can be readily accessed from browser and returns legit results. Let's check why this error happens when accessing GitHub API from git-machete CLI. If this can't be circumvented (e.g. for private repos), let's at least improve the error message (since it doesn't even clearly state that the token might be missing, only mentions expiry) :thinking: | 0.0 | 7a72f615bfa9bd8e9c436dfb726216dfa07d0839 | [
"tests/test_github.py::TestGithub::test_github_create_pr_missing_base_branch_on_remote"
] | [
"tests/test_github.py::TestGithub::test_github_retarget_pr",
"tests/test_github.py::TestGithub::test_github_anno_prs",
"tests/test_github.py::TestGithub::test_github_create_pr",
"tests/test_github.py::TestGithub::test_github_checkout_prs",
"tests/test_github.py::TestGithub::test_github_checkout_prs_freshly_cloned",
"tests/test_github.py::TestGithub::test_github_checkout_prs_from_fork_with_deleted_repo",
"tests/test_github.py::TestGithub::test_github_sync"
] | {
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2022-05-27 10:32:01+00:00 | mit | 832 |
|
WIPACrepo__iceprod-339 | diff --git a/iceprod/core/functions.py b/iceprod/core/functions.py
index 964f20b9..619cc991 100644
--- a/iceprod/core/functions.py
+++ b/iceprod/core/functions.py
@@ -270,16 +270,11 @@ def get_local_ip_address():
def gethostname():
"""Get hostname of this computer."""
ret = socket.getfqdn()
- try:
- resp = requests.get('http://simprod.icecube.wisc.edu/downloads/getip.php', timeout=1)
- resp.raise_for_status()
- logging.info('getip: %r', resp.text)
- ret2 = resp.text.split(' ')[-1]
- if len(ret2.split('.')) > 1:
- ret = '.'.join(ret.split('.')[:1]+ret2.split('.')[1:])
- except Exception:
- logging.info('error getting global ip', exc_info=True)
- return ret
+ ret2 = socket.gethostname()
+ if len(ret2) > len(ret):
+ return ret2
+ else:
+ return ret
@contextmanager
| WIPACrepo/iceprod | 168fdf0ddea222567e6c87401d11b121d7369927 | diff --git a/tests/core/functions_test.py b/tests/core/functions_test.py
index ffe9f4ac..9ece4946 100644
--- a/tests/core/functions_test.py
+++ b/tests/core/functions_test.py
@@ -672,29 +672,25 @@ class functions_test(AsyncTestCase):
logging.info('%s',e)
@patch('socket.getfqdn')
- @requests_mock.mock()
+ @patch('socket.gethostname')
@unittest_reporter
- def test_301_gethostname(self, fqdn, http_mock):
+ def test_301_gethostname(self, fqdn, hostname):
fqdn.return_value = 'myhost'
- http_mock.get('/downloads/getip.php', text='123 myhost')
+ hostname.return_value = 'myhost'
host = iceprod.core.functions.gethostname()
self.assertEqual(host, 'myhost')
- self.assertEqual(http_mock.call_count, 1)
- http_mock.get('/downloads/getip.php', text='123 dyn.test.com')
+ hostname.return_value = 'myhost.test.com'
host = iceprod.core.functions.gethostname()
self.assertEqual(host, 'myhost.test.com')
- self.assertEqual(http_mock.call_count, 2)
fqdn.return_value = 'myhost.foo.bar'
host = iceprod.core.functions.gethostname()
self.assertEqual(host, 'myhost.test.com')
- self.assertEqual(http_mock.call_count, 3)
fqdn.return_value = 'myhost.foo.bar.baz'
host = iceprod.core.functions.gethostname()
- self.assertEqual(host, 'myhost.test.com')
- self.assertEqual(http_mock.call_count, 4)
+ self.assertEqual(host, 'myhost.foo.bar.baz')
@unittest_reporter
def test_302_isurl(self):
| remove getip.php request
This request: http://simprod.icecube.wisc.edu/downloads/getip.php
is on an old SL6 server. We'd like to decommission that server. Replace this lookup with something else, or remove entirely. | 0.0 | 168fdf0ddea222567e6c87401d11b121d7369927 | [
"tests/core/functions_test.py::functions_test::test_301_gethostname"
] | [
"tests/core/functions_test.py::functions_test::test_001_uncompress",
"tests/core/functions_test.py::functions_test::test_002_uncompress_tar",
"tests/core/functions_test.py::functions_test::test_003_uncompress_tar",
"tests/core/functions_test.py::functions_test::test_020_iscompressed",
"tests/core/functions_test.py::functions_test::test_021_istarred",
"tests/core/functions_test.py::functions_test::test_100_md5sum",
"tests/core/functions_test.py::functions_test::test_101_check_md5sum",
"tests/core/functions_test.py::functions_test::test_102_sha1sum",
"tests/core/functions_test.py::functions_test::test_103_check_sha1sum",
"tests/core/functions_test.py::functions_test::test_104_sha256sum",
"tests/core/functions_test.py::functions_test::test_105_check_sha256sum",
"tests/core/functions_test.py::functions_test::test_106_sha512sum",
"tests/core/functions_test.py::functions_test::test_107_check_sha512sum",
"tests/core/functions_test.py::functions_test::test_200_removedirs",
"tests/core/functions_test.py::functions_test::test_201_copy",
"tests/core/functions_test.py::functions_test::test_300_getInterfaces",
"tests/core/functions_test.py::functions_test::test_302_isurl",
"tests/core/functions_test.py::functions_test::test_304_download",
"tests/core/functions_test.py::functions_test::test_305_download",
"tests/core/functions_test.py::functions_test::test_320_download",
"tests/core/functions_test.py::functions_test::test_404_upload",
"tests/core/functions_test.py::functions_test::test_405_upload",
"tests/core/functions_test.py::functions_test::test_410_upload",
"tests/core/functions_test.py::functions_test::test_420_upload",
"tests/core/functions_test.py::functions_test::test_504_delete",
"tests/core/functions_test.py::functions_test::test_505_delete",
"tests/core/functions_test.py::functions_test::test_510_delete"
] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false
} | 2023-04-19 18:14:54+00:00 | mit | 833 |
|
WPI-MMR__gym_solo-38 | diff --git a/examples/solo8_vanilla/interactive_pos_control.py b/examples/solo8_vanilla/interactive_pos_control.py
index 8773e53..446e3c4 100644
--- a/examples/solo8_vanilla/interactive_pos_control.py
+++ b/examples/solo8_vanilla/interactive_pos_control.py
@@ -17,8 +17,7 @@ if __name__ == '__main__':
env.obs_factory.register_observation(obs.TorsoIMU(env.robot))
env.reward_factory.register_reward(1,rewards.UprightReward(env.robot))
- env.termination_factory.register_termination(
- terms.TimeBasedTermination(10000000))
+ env.termination_factory.register_termination(terms.PerpetualTermination())
joint_params = []
num_joints = env.client.getNumJoints(env.robot)
diff --git a/examples/solo8_vanilla/observation_dump.py b/examples/solo8_vanilla/observation_dump.py
index 68bdde0..3dc3df9 100644
--- a/examples/solo8_vanilla/observation_dump.py
+++ b/examples/solo8_vanilla/observation_dump.py
@@ -19,8 +19,7 @@ if __name__ == '__main__':
env.obs_factory.register_observation(obs.TorsoIMU(env.robot))
env.reward_factory.register_reward(1,rewards.UprightReward(env.robot))
- env.termination_factory.register_termination(
- terms.TimeBasedTermination(100000000))
+ env.termination_factory.register_termination(terms.PerpetualTermination())
try:
print("""\n
diff --git a/gym_solo/core/termination.py b/gym_solo/core/termination.py
index 6135f8a..72a7b33 100644
--- a/gym_solo/core/termination.py
+++ b/gym_solo/core/termination.py
@@ -80,4 +80,19 @@ class TimeBasedTermination(Termination):
Otherwise return false.
"""
self.step_delta += 1
- return self.step_delta > self.max_step_delta
\ No newline at end of file
+ return self.step_delta > self.max_step_delta
+
+
+class PerpetualTermination(Termination):
+ """Termination condition that never ends. Useful for demos / testing."""
+ def reset(self):
+ """Unneeded."""
+ pass
+
+ def is_terminated(self) -> bool:
+ """Always returns that the episode is *not* terminated.
+
+ Returns:
+ bool: False
+ """
+ return False
\ No newline at end of file
| WPI-MMR/gym_solo | fdf8acff06a8d143e0d9890eb15d86fc643a6843 | diff --git a/gym_solo/core/test_termination_conditions.py b/gym_solo/core/test_termination_conditions.py
index bad6698..a6979b4 100644
--- a/gym_solo/core/test_termination_conditions.py
+++ b/gym_solo/core/test_termination_conditions.py
@@ -27,4 +27,13 @@ class TestTimeBasedTermination(unittest.TestCase):
self.assertEqual(i+1, term.step_delta)
self.assertEqual(True, term.is_terminated())
- self.assertEqual(max_step_delta + 1, term.step_delta)
\ No newline at end of file
+ self.assertEqual(max_step_delta + 1, term.step_delta)
+
+
+class TestPerpetualTermination(unittest.TestCase):
+ def test_is_terminated(self):
+ term = termination.PerpetualTermination()
+
+ # Arbitrary count, just need to ensure that always returns False
+ for i in range(1000):
+ self.assertFalse(term.is_terminated())
\ No newline at end of file
| Create Never Ending Termination
A couple of PRs have this issue (#33, #35), but if a user wants to run gym indefinitely, they would need to create a `TimeBasedTermination` with a really big number. While that works, it's pretty jank and we should just make a `NoTermination` which just goes on forever instead. | 0.0 | fdf8acff06a8d143e0d9890eb15d86fc643a6843 | [
"gym_solo/core/test_termination_conditions.py::TestPerpetualTermination::test_is_terminated"
] | [
"gym_solo/core/test_termination_conditions.py::TestTimeBasedTermination::test_attributes",
"gym_solo/core/test_termination_conditions.py::TestTimeBasedTermination::test_is_terminated",
"gym_solo/core/test_termination_conditions.py::TestTimeBasedTermination::test_reset"
] | {
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2021-01-16 19:53:13+00:00 | mit | 834 |
|
WPI-MMR__gym_solo-6 | diff --git a/gym_solo/core/rewards.py b/gym_solo/core/rewards.py
new file mode 100644
index 0000000..d364f98
--- /dev/null
+++ b/gym_solo/core/rewards.py
@@ -0,0 +1,63 @@
+from abc import ABC, abstractmethod
+from dataclasses import dataclass
+from typing import List
+
+from gym_solo import solo_types
+
+
+class Reward(ABC):
+ @abstractmethod
+ def compute(self) -> solo_types.reward:
+ """Compute the reward for the current state.
+
+ Returns:
+ solo_types.reward: The reward evalulated at the current state.
+ """
+ pass
+
+
+@dataclass
+class _WeightedReward:
+ reward: Reward
+ weight: float
+
+
+class RewardFactory:
+ """A factory to dynamically create rewards.
+
+ Note that this factory is currently implemented to combined rewards via
+ a linear combination. For example, if the user wanted to register rewards
+ r1, r2, and r3, the final reward would be r1 + r2 + r3.
+
+ Obviously, you can add coefficients to the rewards, and that functionality
+ is further explained in register_reward() below.
+
+ If you need more functionality then a linear combination (exponential
+ temporal decay), then it's probably in your best interest to implement that
+ in a custom Reward.
+ """
+ def __init__(self):
+ """Create a new RewardFactory."""
+ self._rewards: List[_WeightedReward] = []
+
+ def register_reward(self, weight: float, reward: Reward):
+ """Register a reward to be computed per state.
+
+ Args:
+ weight (float): The weight to be applied to this reward when it is
+ is combined linearly with the other rewards. The domain for this
+ value is (-∞, ∞).
+ reward (Reward): A Reward object which .compute() will be called on at
+ reward computation time.
+ """
+ self._rewards.append(_WeightedReward(reward=reward, weight=weight))
+
+ def get_reward(self) -> float:
+ """Evaluate the current state and get the combined reward.
+
+ Returns:
+ float: The reward from the current state. Note that this reward is a
+ combination of multiple atomic sub-rewards, as explained by the
+ strategies earlier.
+ """
+ return sum(wr.weight * wr.reward.compute() for wr in self._rewards)
\ No newline at end of file
diff --git a/gym_solo/solo_types.py b/gym_solo/solo_types.py
index 85dc7ae..365d7e8 100644
--- a/gym_solo/solo_types.py
+++ b/gym_solo/solo_types.py
@@ -3,4 +3,7 @@ from typing import List
import numpy as np
# A state observation
-obs = np.ndarray
\ No newline at end of file
+obs = np.ndarray
+
+# A reward after a step
+reward = float
\ No newline at end of file
| WPI-MMR/gym_solo | e4bcb11fb1b18da997494d56acf95bdd512a076a | diff --git a/gym_solo/core/test_rewards_factory.py b/gym_solo/core/test_rewards_factory.py
new file mode 100644
index 0000000..fd70bea
--- /dev/null
+++ b/gym_solo/core/test_rewards_factory.py
@@ -0,0 +1,35 @@
+import unittest
+from gym_solo.core import rewards
+
+from parameterized import parameterized
+
+
+class TestReward(rewards.Reward):
+ def __init__(self, return_value):
+ self._return_value = return_value
+
+ def compute(self):
+ return self._return_value
+
+
+class TestRewardsFactory(unittest.TestCase):
+ def test_empty(self):
+ rf = rewards.RewardFactory()
+ self.assertListEqual(rf._rewards, [])
+
+ @parameterized.expand([
+ ('single', {1: 2.5}, 2.5),
+ ('two_happy', {1: 1, 2: 2}, 5),
+ ('0-weight', {0: 1, 2: 2}, 4),
+ ('negative-weight', {-1: 1, 2: 2}, 3),
+ ('three', {1: 1, 2: 2, 3: 3}, 14),
+ ])
+ def test_register_and_compute(self, name, rewards_dict, expected_reward):
+ rf = rewards.RewardFactory()
+ for weight, reward in rewards_dict.items():
+ rf.register_reward(weight, TestReward(reward))
+ self.assertEqual(rf.get_reward(), expected_reward)
+
+
+if __name__ == '__main__':
+ unittest.main()
\ No newline at end of file
| Create Rewards Factory
Similar to the observation factory. The reward factory should be able to take in `Reward` objects, similar to an `Observation` object, evaluate the rewards for the state, and combine them.
I'm thinking of making the final reward a linear combination of the registered rewards. With that in mind, consider the following example:
```python
r1 = Reward()
r2 = Reward()
r3 = Reward()
rf = RewardFactory()
r1.register_reward(-1, r1)
r1.register_reward(.1, r2)
r1.register_reward(.9, r3)
```
Notice that `register_reward()` has two args: `weight: float` and `reward: gym_solo.core.rewards.Reward`. Thus, the final reward would evaluate to: `-r1() + 0.1 * r2() + 0.9 * r3()`. Figure that if you need functionality more elaborate than a linear combination, you should be offloading that processing into a `Reward` class.
@mahajanrevant thoughts on this? lmk if you think we need anything stronger than linear combinations in the `RewardFactory`. | 0.0 | e4bcb11fb1b18da997494d56acf95bdd512a076a | [
"gym_solo/core/test_rewards_factory.py::TestRewardsFactory::test_empty",
"gym_solo/core/test_rewards_factory.py::TestRewardsFactory::test_register_and_compute_0_single",
"gym_solo/core/test_rewards_factory.py::TestRewardsFactory::test_register_and_compute_1_two_happy",
"gym_solo/core/test_rewards_factory.py::TestRewardsFactory::test_register_and_compute_2_0_weight",
"gym_solo/core/test_rewards_factory.py::TestRewardsFactory::test_register_and_compute_3_negative_weight",
"gym_solo/core/test_rewards_factory.py::TestRewardsFactory::test_register_and_compute_4_three"
] | [] | {
"failed_lite_validators": [
"has_added_files"
],
"has_test_patch": true,
"is_lite": false
} | 2020-10-27 21:14:37+00:00 | mit | 835 |
|
Workday__prism-python-8 | diff --git a/.travis.yml b/.travis.yml
index 4da15ef..478c235 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -7,4 +7,4 @@ python:
install:
- pip install .
script:
-- echo "skipping tests"
+- pytest
diff --git a/environment.yml b/environment.yml
index de104b0..9049cf6 100644
--- a/environment.yml
+++ b/environment.yml
@@ -4,3 +4,4 @@ channels:
dependencies:
- python=3.7
- requests
+ - pytest
diff --git a/prism/data/schema.json b/prism/data/schema.json
new file mode 100644
index 0000000..896c055
--- /dev/null
+++ b/prism/data/schema.json
@@ -0,0 +1,42 @@
+{
+ "fields": [
+ {
+ "defaultValue": "textField",
+ "description": "this is a Text Field",
+ "name": "State2",
+ "parseFormat": "",
+ "precision": 0,
+ "ordinal": 1,
+ "scale": 0,
+ "type": {
+ "id": "Schema_Field_Type=Text"
+ }
+ },
+ {
+ "defaultValue": "0",
+ "description": "this is an Integer Field",
+ "name": "Population2",
+ "parseFormat": "",
+ "precision": 9,
+ "ordinal": 2,
+ "scale": 0,
+ "type": {
+ "id": "Schema_Field_Type=Numeric"
+ }
+ }
+ ],
+ "parseOptions": {
+ "charset": {
+ "id": "Encoding=UTF-8"
+ },
+ "fieldsDelimitedBy": ",",
+ "fieldsEnclosedBy": "\"",
+ "headerLinesToIgnore": 1,
+ "type": {
+ "id": "Schema_File_Type=Delimited"
+ }
+ },
+ "schemaVersion": {
+ "id": "Schema_Version=1.0"
+ }
+}
\ No newline at end of file
diff --git a/setup.py b/setup.py
index f8e5ce8..5ea16f2 100644
--- a/setup.py
+++ b/setup.py
@@ -13,7 +13,13 @@ setup(
author_email='[email protected]',
url='https://github.com/Workday/prism-python',
packages=['prism'],
+ package_data={'prism': ['data/*']},
install_requires=requirements,
+ extras_require={
+ 'dev': [
+ 'pytest',
+ ]
+ },
keywords='prism',
classifiers=[
'Programming Language :: Python :: 3.6',
| Workday/prism-python | 11b6c089a2d2c5b65e4ecc1caf57ccf30e426612 | diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 0000000..317f19d
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,11 @@
+import os
+import pytest
+
[email protected]
+def rootdir():
+ return os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')
+
[email protected]
+def schema_file(rootdir):
+ """Path to example JSON schema"""
+ return os.path.join(rootdir, 'prism', 'data', 'schema.json')
diff --git a/tests/test_prism.py b/tests/test_prism.py
new file mode 100644
index 0000000..bfaad2f
--- /dev/null
+++ b/tests/test_prism.py
@@ -0,0 +1,5 @@
+import prism
+
+def test_load_schema(schema_file):
+ schema = prism.load_schema(schema_file)
+ assert type(schema) is dict
| Add PyTest
Add a simple unit test to make sure the package successfully imports. Unit tests can be expanded in the future to offer greater coverage. | 0.0 | 11b6c089a2d2c5b65e4ecc1caf57ccf30e426612 | [
"tests/test_prism.py::test_load_schema"
] | [] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2019-12-09 23:44:53+00:00 | apache-2.0 | 836 |
|
WorksApplications__SudachiPy-158 | diff --git a/sudachipy/dictionarylib/dictionarybuilder.py b/sudachipy/dictionarylib/dictionarybuilder.py
index 5f3512a..bde5871 100644
--- a/sudachipy/dictionarylib/dictionarybuilder.py
+++ b/sudachipy/dictionarylib/dictionarybuilder.py
@@ -334,7 +334,7 @@ class DictionaryBuilder(object):
@staticmethod
def __is_id(text):
- return re.match(r'U?\d+', text)
+ return re.match(r'U?\d+$', text)
def parse_id(self, text):
if text.startswith('U'):
diff --git a/sudachipy/dictionarylib/doublearraylexicon.py b/sudachipy/dictionarylib/doublearraylexicon.py
index 6e1a7ce..fb77ee8 100644
--- a/sudachipy/dictionarylib/doublearraylexicon.py
+++ b/sudachipy/dictionarylib/doublearraylexicon.py
@@ -78,14 +78,22 @@ class DoubleArrayLexicon(Lexicon):
return self.word_params.size
def get_word_id(self, headword: str, pos_id: int, reading_form: str) -> int:
+ for wid, _ in self.lookup(headword.encode('utf-8'), 0):
+ if self._compare_word_id(wid, headword, pos_id, reading_form):
+ return wid
+
for wid in range(self.word_infos.size()):
- info = self.word_infos.get_word_info(wid)
- if info.surface == headword \
- and info.pos_id == pos_id \
- and info.reading_form == reading_form:
+ if self._compare_word_id(wid, headword, pos_id, reading_form):
return wid
+
return -1
+ def _compare_word_id(self, wid: int, headword: str, pos_id: int, reading_form: str) -> bool:
+ info = self.word_infos.get_word_info(wid)
+ return info.surface == headword \
+ and info.pos_id == pos_id \
+ and info.reading_form == reading_form
+
def get_dictionary_id(self, word_id: int) -> int:
return 0
| WorksApplications/SudachiPy | 7e5b501111920cef057b821412af595db248a7b8 | diff --git a/tests/dictionarylib/test_dictionarybuilder.py b/tests/dictionarylib/test_dictionarybuilder.py
index f1ead7a..a07a647 100644
--- a/tests/dictionarylib/test_dictionarybuilder.py
+++ b/tests/dictionarylib/test_dictionarybuilder.py
@@ -39,6 +39,8 @@ class TestDictionaryBuilder(TestCase):
wf.write("東京都,0,0,0,東京都,名詞,固有名詞,地名,一般,*,*,ヒガシキョウト,東京都,*,B,\"東,名詞,普通名詞,一般,*,*,*,ヒガシ/2\",*,1/2,1/2\n")
wf.write("東,-1,-1,0,東,名詞,普通名詞,一般,*,*,*,ヒガシ,ひがし,*,A,*,*,*,*\n")
wf.write("京都,0,0,0,京都,名詞,固有名詞,地名,一般,*,*,キョウト,京都,*,A,*,*,*,*\n")
+ wf.write("1,-1,-1,0,1,名詞,数詞,*,*,*,*,イチ,1,*,A,*,*,*,*\n")
+ wf.write("東1,0,0,0,東1,名詞,普通名詞,一般,*,*,*,ヒガシイチ,東1,*,C,\"東,名詞,普通名詞,一般,*,*,*,ヒガシ/1,名詞,数詞,*,*,*,*,イチ\",\"東,名詞,普通名詞,一般,*,*,*,ヒガシ/U3\",*")
self.logger = getLogger()
self.logger.disabled = True
@@ -218,13 +220,13 @@ class TestDictionaryBuilder(TestCase):
self.assertEqual('test', header.description)
# grammar
- self.assertEqual(2, grammar.get_part_of_speech_size())
+ self.assertEqual(3, grammar.get_part_of_speech_size())
self.assertEqual(["名詞", "固有名詞", "地名", "一般", "*", "*"], grammar.get_part_of_speech_string(0))
self.assertEqual(["名詞", "普通名詞", "一般", "*", "*", "*"], grammar.get_part_of_speech_string(1))
self.assertEqual(200, grammar.get_connect_cost(0, 0))
# lexicon
- self.assertEqual(3, lexicon.size())
+ self.assertEqual(5, lexicon.size())
self.assertEqual(0, lexicon.get_cost(0))
wi = lexicon.get_word_info(0)
self.assertEqual('東京都', wi.surface)
| Slow building of user dictionary with surface form split info
It seems take 30+ seconds per entry to build user dictionary, if its words have split info with word info (構成語情報).
```
$ sudachipy ubuild user.csv -o user.dic
reading the source file...1 words
writing the POS table...2 bytes
writing the connection matrix...4 bytes
building the trie...done
writing the trie...1028 bytes
writing the word-ID table...9 bytes
writing the word parameters...10 bytes
writing the word_infos...70 bytes
writing word_info offsets...4 bytes
real 0m38.654s
user 0m38.499s
sys 0m0.139s
```
user.csv:
```csv
舞台藝術,5146,5146,8000,舞台藝術,名詞,普通名詞,一般,*,*,*,ブタイゲイジュツ,舞台芸術,*,C,"舞台,名詞,普通名詞,一般,*,*,*,ブタイ/藝術,名詞,普通名詞,一般,*,*,*,ゲイジュツ","舞台,名詞,普通名詞,一般,*,*,*,ブタイ/藝術,名詞,普通名詞,一般,*,*,*,ゲイジュツ","舞台,名詞,普通名詞,一般,*,*,*,ブタイ/藝術,名詞,普通名詞,一般,*,*,*,ゲイジュツ",*
```
If I have two lines then it takes 1m.
※ No problem with **word id** split info
```csv
舞台藝術,5146,5146,8000,舞台藝術,名詞,普通名詞,一般,*,*,*,ブタイゲイジュツ,舞台芸術,*,C,647312/659236,647312/659236,647312/659236,*
```
```
$ sudachipy ubuild user.csv -o user.dic
real 0m0.925s
user 0m0.776s
sys 0m0.159s
```
----
```
$ sudachipy --version
sudachipy 0.5.2
```
| 0.0 | 7e5b501111920cef057b821412af595db248a7b8 | [
"tests/dictionarylib/test_dictionarybuilder.py::TestDictionaryBuilder::test_build"
] | [
"tests/dictionarylib/test_dictionarybuilder.py::TestDictionaryBuilder::test_add_to_trie",
"tests/dictionarylib/test_dictionarybuilder.py::TestDictionaryBuilder::test_convert_matrix",
"tests/dictionarylib/test_dictionarybuilder.py::TestDictionaryBuilder::test_convert_postable",
"tests/dictionarylib/test_dictionarybuilder.py::TestDictionaryBuilder::test_decode",
"tests/dictionarylib/test_dictionarybuilder.py::TestDictionaryBuilder::test_parse_line_empty_headword",
"tests/dictionarylib/test_dictionarybuilder.py::TestDictionaryBuilder::test_parse_line_invalid_columns",
"tests/dictionarylib/test_dictionarybuilder.py::TestDictionaryBuilder::test_parse_line_same_readingform",
"tests/dictionarylib/test_dictionarybuilder.py::TestDictionaryBuilder::test_parse_line_toolong_headword",
"tests/dictionarylib/test_dictionarybuilder.py::TestDictionaryBuilder::test_parse_line_toomany_split",
"tests/dictionarylib/test_dictionarybuilder.py::TestDictionaryBuilder::test_parse_splitinfo",
"tests/dictionarylib/test_dictionarybuilder.py::TestDictionaryBuilder::test_parse_splitinfo_invalid_system_wordid_in_userdict",
"tests/dictionarylib/test_dictionarybuilder.py::TestDictionaryBuilder::test_parse_splitinfo_invalid_wordid",
"tests/dictionarylib/test_dictionarybuilder.py::TestDictionaryBuilder::test_parse_splitinfo_invalid_wordid_userdict",
"tests/dictionarylib/test_dictionarybuilder.py::TestDictionaryBuilder::test_parseline",
"tests/dictionarylib/test_dictionarybuilder.py::TestDictionaryBuilder::test_write_intarray",
"tests/dictionarylib/test_dictionarybuilder.py::TestDictionaryBuilder::test_write_string"
] | {
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2021-06-24 03:14:56+00:00 | apache-2.0 | 837 |
|
Wortmeister-HQ__zahlwort2num-6 | diff --git a/zahlwort2num/__init__.py b/zahlwort2num/__init__.py
index cacf7d9..2e0a2b5 100644
--- a/zahlwort2num/__init__.py
+++ b/zahlwort2num/__init__.py
@@ -12,6 +12,7 @@ class ZahlConverter:
'eine': 1,
'er': 1,
'zwei': 2,
+ 'zwo': 2,
'drei': 3,
'drit': 3,
'vier': 4,
| Wortmeister-HQ/zahlwort2num | 325417a06bc6f79ca13605bb54b0dbc6d45c8f6e | diff --git a/zahlwort2num/tests/test_convert.py b/zahlwort2num/tests/test_convert.py
index 91fd15f..905ec5c 100644
--- a/zahlwort2num/tests/test_convert.py
+++ b/zahlwort2num/tests/test_convert.py
@@ -6,6 +6,7 @@ class TestConverter(TestCase):
def test_hardcoded_values_upto_100(self):
self.assertTrue(w2n.convert('eins') == 1)
self.assertTrue(w2n.convert('zwei') == 2)
+ self.assertTrue(w2n.convert('zwo') == 2)
self.assertTrue(w2n.convert('drei') == 3)
self.assertTrue(w2n.convert('vier') == 4)
self.assertTrue(w2n.convert('fünf') == 5)
@@ -128,12 +129,14 @@ class TestConverter(TestCase):
'zwei milliarden',
'eintausend',
'null',
- 'neunundvierzig'
+ 'neunundvierzig',
+ 'zwohundertzwoundzwanzig',
+ 'zwotausend'
]
numbers = [7, 99, 1000, 200000, 4504, 4511, 225, 3625, 12854, 653521,
99, 500002, 1004000, 700000000009001009001, 9000000000000000000000000011,
- 2000000000, 1000, 0, 49]
+ 2000000000, 1000, 0, 49, 222, 2000]
self.compare_sets(numbers, words)
| Does not work with alternative pronunciation "zwo"
```py
>>> from zahlwort2num import convert
>>> convert("zwo")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "C:\ProgramData\miniforge3\lib\site-packages\zahlwort2num\__init__.py", line 137, in convert
return c.convert()
File "C:\ProgramData\miniforge3\lib\site-packages\zahlwort2num\__init__.py", line 123, in convert
return self.ordBn(number)
File "C:\ProgramData\miniforge3\lib\site-packages\zahlwort2num\__init__.py", line 108, in ordBn
return self.ordWithBN(number, 0)
File "C:\ProgramData\miniforge3\lib\site-packages\zahlwort2num\__init__.py", line 88, in ordWithBN
return self.convOrd(number)
File "C:\ProgramData\miniforge3\lib\site-packages\zahlwort2num\__init__.py", line 81, in convOrd
return self.convt2(number)
File "C:\ProgramData\miniforge3\lib\site-packages\zahlwort2num\__init__.py", line 129, in <lambda>
self.convt2 = lambda number: self.mult(number, 'tausend', 1000, self.convh2)
File "C:\ProgramData\miniforge3\lib\site-packages\zahlwort2num\__init__.py", line 60, in mult
return fun(spliter[0])
File "C:\ProgramData\miniforge3\lib\site-packages\zahlwort2num\__init__.py", line 130, in <lambda>
self.convh2 = lambda number: self.mult(number, 'hundert', 100, self.convu2)
File "C:\ProgramData\miniforge3\lib\site-packages\zahlwort2num\__init__.py", line 60, in mult
return fun(spliter[0])
File "C:\ProgramData\miniforge3\lib\site-packages\zahlwort2num\__init__.py", line 131, in <lambda>
self.convu2 = lambda number: self.mult(number, 'und', 1, lambda word: self.CONST_NUMS[word])
File "C:\ProgramData\miniforge3\lib\site-packages\zahlwort2num\__init__.py", line 60, in mult
return fun(spliter[0])
File "C:\ProgramData\miniforge3\lib\site-packages\zahlwort2num\__init__.py", line 131, in <lambda>
self.convu2 = lambda number: self.mult(number, 'und', 1, lambda word: self.CONST_NUMS[word])
KeyError: 'zwo'
``` | 0.0 | 325417a06bc6f79ca13605bb54b0dbc6d45c8f6e | [
"zahlwort2num/tests/test_convert.py::TestConverter::test_hardcoded_values_upto_100",
"zahlwort2num/tests/test_convert.py::TestConverter::test_more_specific"
] | [
"zahlwort2num/tests/test_convert.py::TestConverter::test_bugs_found_by_users",
"zahlwort2num/tests/test_convert.py::TestConverter::test_negative_values",
"zahlwort2num/tests/test_convert.py::TestConverter::test_negative_with_ordinal",
"zahlwort2num/tests/test_convert.py::TestConverter::test_ordinal_numbers",
"zahlwort2num/tests/test_convert.py::TestConverter::test_swiss_variant",
"zahlwort2num/tests/test_convert.py::TestConverter::test_yet_another_ordinal_edge_case"
] | {
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true
} | 2022-10-28 09:45:02+00:00 | mit | 838 |
|
XKNX__xknx-1008 | diff --git a/docs/changelog.md b/docs/changelog.md
index 98a4c42b..9a9e0700 100644
--- a/docs/changelog.md
+++ b/docs/changelog.md
@@ -6,6 +6,12 @@ nav_order: 2
# Changelog
+## Unreleased changes
+
+### Internal
+
+- Fix DPT2ByteFloat numeric range issues
+
## 0.22.1 Wrong delivery 2022-07-29
### Management
diff --git a/xknx/dpt/dpt_2byte_float.py b/xknx/dpt/dpt_2byte_float.py
index ead6abdc..c5858011 100644
--- a/xknx/dpt/dpt_2byte_float.py
+++ b/xknx/dpt/dpt_2byte_float.py
@@ -49,33 +49,23 @@ class DPT2ByteFloat(DPTNumeric):
@classmethod
def to_knx(cls, value: float) -> tuple[int, int]:
"""Serialize to KNX/IP raw data."""
-
- def calc_exponent(float_value: float, sign: bool) -> tuple[int, int]:
- """Return float exponent."""
- exponent = 0
- significand = abs(int(float_value * 100))
-
- while significand < -2048 or significand > 2048:
- exponent += 1
- significand >>= 1
-
- if sign:
- significand ^= 0x7FF # invert
- significand += 1 # and add 1
-
- return exponent, significand
-
try:
knx_value = float(value)
if not cls._test_boundaries(knx_value):
raise ValueError
- sign = knx_value < 0
- exponent, significand = calc_exponent(knx_value, sign)
+ value = knx_value * 100
+ exponent = 0
+ while value < -2048 or value > 2047:
+ exponent += 1
+ value /= 2
+
+ mantisse = int(round(value)) & 0x7FF
+ msb = exponent << 3 | mantisse >> 8
+ if value < 0:
+ msb |= 0x80
- return (sign << 7) | (exponent << 3) | (
- significand >> 8
- ), significand & 0xFF
+ return msb, mantisse & 0xFF
except ValueError:
raise ConversionError(f"Could not serialize {cls.__name__}", value=value)
diff --git a/xknx/secure/keyring.py b/xknx/secure/keyring.py
index e23bc7e7..1727e5e9 100644
--- a/xknx/secure/keyring.py
+++ b/xknx/secure/keyring.py
@@ -222,7 +222,7 @@ class XMLDevice(AttributeReader):
class Keyring(AttributeReader):
"""Class for loading and decrypting knxkeys XML files."""
- backbone: XMLBackbone
+ backbone: XMLBackbone | None = None
interfaces: list[XMLInterface]
group_addresses: list[XMLGroupAddress]
devices: list[XMLDevice]
| XKNX/xknx | 180ec53481c40f8bebf4b9fcaad2b450857d7109 | diff --git a/requirements/testing.txt b/requirements/testing.txt
index 76aac084..ee9c39d4 100644
--- a/requirements/testing.txt
+++ b/requirements/testing.txt
@@ -8,6 +8,6 @@ pylint==2.14.5
pytest==7.1.2
pytest-asyncio==0.19.0
pytest-cov==3.0.0
-setuptools==63.4.2
+setuptools==64.0.1
tox==3.25.1
mypy==0.971
diff --git a/test/dpt_tests/dpt_float_test.py b/test/dpt_tests/dpt_float_test.py
index 0a9dc336..083fa7c1 100644
--- a/test/dpt_tests/dpt_float_test.py
+++ b/test/dpt_tests/dpt_float_test.py
@@ -79,6 +79,13 @@ class TestDPTFloat:
assert DPT2ByteFloat.to_knx(DPT2ByteFloat.value_max) == (0x7F, 0xFF)
assert DPT2ByteFloat.from_knx((0x7F, 0xFF)) == DPT2ByteFloat.value_max
+ def test_close_to_limit(self):
+ """Test parsing and streaming of DPT2ByteFloat with numeric limit."""
+ assert DPT2ByteFloat.to_knx(20.48) == (0x0C, 0x00)
+ assert DPT2ByteFloat.from_knx((0x0C, 0x00)) == 20.48
+ assert DPT2ByteFloat.to_knx(-20.48) == (0x80, 0x00)
+ assert DPT2ByteFloat.from_knx((0x80, 0x00)) == -20.48
+
def test_min(self):
"""Test parsing and streaming of DPT2ByteFloat with minimum value."""
assert DPT2ByteFloat.to_knx(DPT2ByteFloat.value_min) == (0xF8, 0x00)
| AttributeError: 'Keyring' object has no attribute 'backbone'
Hi.
My ETS generates a .knxkeys file that doesn't have a "backbone" parameter:
```
<?xml version="1.0" encoding="utf-8"?>
<Keyring Project="Apartment" CreatedBy="ETS 5.7.4 (Build 1093)" Created="2022-08-09T15:53:32" Signature="yaIT..." xmlns="http://knx.org/xml/keyring/1">
<Interface Type="Tunneling" Host="1.1.251" UserID="2" Password="HIDDEN" IndividualAddress="HIDDEN" Authentication="HIDDEN" />
<Interface Type="Tunneling" Host="1.1.251" UserID="3" Password="HIDDEN" IndividualAddress="HIDDEN" Authentication="HIDDEN" />
<Interface Type="Tunneling" Host="1.1.251" UserID="4" Password="HIDDEN" IndividualAddress="HIDDEN" Authentication="HIDDEN" />
<Interface Type="Tunneling" Host="1.1.251" UserID="5" Password="HIDDEN" IndividualAddress="HIDDEN" Authentication="HIDDEN" />
<Devices>
<Device IndividualAddress="1.1.251" SequenceNumber="24" />
</Devices>
</Keyring>
```
Because of that i get the following error:
```
There was an error during loading the knxkeys file.
Traceback (most recent call last):
File "/opt/homebrew/lib/python3.9/site-packages/xknx/secure/keyring.py", line 316, in load_key_ring
keyring.decrypt(password)
File "/opt/homebrew/lib/python3.9/site-packages/xknx/secure/keyring.py", line 299, in decrypt
if self.backbone is not None:
AttributeError: 'Keyring' object has no attribute 'backbone'
Traceback (most recent call last):
File "/opt/homebrew/lib/python3.9/site-packages/xknx/secure/keyring.py", line 316, in load_key_ring
keyring.decrypt(password)
File "/opt/homebrew/lib/python3.9/site-packages/xknx/secure/keyring.py", line 299, in decrypt
if self.backbone is not None:
AttributeError: 'Keyring' object has no attribute 'backbone'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/Users/max/Projects/KNX/example_telegram_monitor.py", line 69, in <module>
asyncio.run(main(sys.argv[1:]))
File "/opt/homebrew/Cellar/[email protected]/3.9.13_1/Frameworks/Python.framework/Versions/3.9/lib/python3.9/asyncio/runners.py", line 44, in run
return loop.run_until_complete(main)
File "/opt/homebrew/Cellar/[email protected]/3.9.13_1/Frameworks/Python.framework/Versions/3.9/lib/python3.9/asyncio/base_events.py", line 647, in run_until_complete
return future.result()
File "/Users/max/Projects/KNX/example_telegram_monitor.py", line 65, in main
await monitor(address_filters)
File "/Users/max/Projects/KNX/example_telegram_monitor.py", line 47, in monitor
await xknx.start()
File "/opt/homebrew/lib/python3.9/site-packages/xknx/xknx.py", line 128, in start
await self.knxip_interface.start()
File "/opt/homebrew/lib/python3.9/site-packages/xknx/io/knxip_interface.py", line 69, in start
await self._start()
File "/opt/homebrew/lib/python3.9/site-packages/xknx/io/knxip_interface.py", line 105, in _start
keyring: Keyring = load_key_ring(
File "/opt/homebrew/lib/python3.9/site-packages/xknx/secure/keyring.py", line 321, in load_key_ring
raise InvalidSecureConfiguration() from exception
xknx.exceptions.exception.InvalidSecureConfiguration
```
I get the same error in the standalone version of XKNX and in Home Assistant when using the "TCP with IP Secure" connection mode.
My code:
```
connection_config = ConnectionConfig(
connection_type=ConnectionType.TUNNELING_TCP_SECURE,
gateway_ip="192.168.11.24",
secure_config=SecureConfig(
user_id=3,
knxkeys_file_path="/Users/max/KNX/Deployment/Apartment.knxkeys",
knxkeys_password="HIDDEN",
),
)
xknx = XKNX(connection_config=connection_config, daemon_mode=True)
```
As a temporary fix i've applied the following patch:
```
--- xknx/secure/keyring.py 2022-08-09 19:49:28.000000000 +0300
+++ xknx/secure/keyring_fixed.py 2022-08-09 20:27:51.000000000 +0300
@@ -233,6 +233,7 @@
def __init__(self) -> None:
"""Initialize the Keyring."""
+ self.backbone = None
self.interfaces = []
self.devices = []
self.group_addresses = []
```
- [X] using xknx standalone
- [X] using Home-Assistant knx integration
**Version information:**
- Standalone xknx: 0.22.1
- xknx / Home-Assistant release with the issue: 2022.7.7
**KNX installation:**
ETS 5.7.4
KNX IP Interface: MDT SCN-IP000.03 | 0.0 | 180ec53481c40f8bebf4b9fcaad2b450857d7109 | [
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_close_to_limit"
] | [
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_value_from_documentation",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_value_taken_from_live_thermostat",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_zero_value",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_room_temperature",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_high_temperature",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_minor_negative_temperature",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_very_cold_temperature",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_max",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_min",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_close_to_max",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_close_to_min",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_to_knx_min_exceeded",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_to_knx_max_exceeded",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_to_knx_wrong_parameter",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_from_knx_wrong_parameter",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_from_knx_wrong_parameter2",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_temperature_settings",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_temperature_assert_min_exceeded",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_temperature_assert_min_exceeded_from_knx",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_lux_settings",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_lux_assert_min_exceeded",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_humidity_settings",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_humidity_assert_min_exceeded",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_enthalpy_settings",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_partspermillion_settings",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_voltage_settings",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_4byte_float_values_from_power_meter",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_14_033",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_14_055",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_14_057",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_4byte_float_values_from_voltage_meter",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_4byte_float_zero_value",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_4byte_float_special_value",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_4byte_float_to_knx_wrong_parameter",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_4byte_float_from_knx_wrong_parameter",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_4byte_float_from_knx_wrong_parameter2",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_4byte_flaot_from_knx_unpack_error",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_electric_current_settings",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_electric_potential_settings",
"test/dpt_tests/dpt_float_test.py::TestDPTFloat::test_power_settings"
] | {
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2022-08-10 08:07:07+00:00 | mit | 839 |
|
XKNX__xknx-1114 | diff --git a/docs/changelog.md b/docs/changelog.md
index 2cc605ca..b056f7de 100644
--- a/docs/changelog.md
+++ b/docs/changelog.md
@@ -12,6 +12,10 @@ nav_order: 2
- ExposeSensor: Add `cooldown` option to allow rate-limiting of sent telegrams.
+### Connection
+
+- Disconnect when tunnelling sequence number (UDP) gets out of sync.
+
### Internals
- Add `task.done()` to TaskRegistry tasks.
diff --git a/xknx/io/tunnel.py b/xknx/io/tunnel.py
index 20833f81..e57c871d 100644
--- a/xknx/io/tunnel.py
+++ b/xknx/io/tunnel.py
@@ -607,6 +607,9 @@ class UDPTunnel(_Tunnel):
self.expected_sequence_number,
tunneling_request,
)
+ # Tunnelling server should repeat that frame and disconnect after that was also not ACKed.
+ # some don't seem to initiate disconnection here so we take a shortcut and disconnect ourselves
+ self._tunnel_lost()
def _send_tunnelling_ack(
self, communication_channel_id: int, sequence_counter: int
| XKNX/xknx | d1d6085102331fe9430004e55085612fd993b2f7 | diff --git a/requirements/testing.txt b/requirements/testing.txt
index 7954b357..e07b3e32 100644
--- a/requirements/testing.txt
+++ b/requirements/testing.txt
@@ -1,13 +1,13 @@
-r production.txt
-pre-commit==2.20.0
-isort==5.11.3
+pre-commit==2.21.0
+isort==5.11.4
flake8==6.0.0
-flake8-isort==5.0.3
+flake8-isort==6.0.0
pydocstyle==6.1.1
pylint==2.15.9
pytest==7.2.0
pytest-asyncio==0.20.3
pytest-cov==4.0.0
setuptools==65.6.3
-tox==4.0.16
+tox==4.0.17
mypy==0.991
diff --git a/test/io_tests/tunnel_test.py b/test/io_tests/tunnel_test.py
index cd177980..53178978 100644
--- a/test/io_tests/tunnel_test.py
+++ b/test/io_tests/tunnel_test.py
@@ -155,7 +155,9 @@ class TestUDPTunnel:
assert self.tunnel.expected_sequence_number == 11
assert self.tg_received_mock.call_count == 1
# wrong sequence number - no ACK, not processed
- self.tunnel._request_received(test_frame_9, None, None)
+ # reconnect if `auto_reconnect` was True
+ with pytest.raises(CommunicationError):
+ self.tunnel._request_received(test_frame_9, None, None)
await time_travel(0)
assert self.tunnel.transport.send.call_args_list == []
self.tunnel.transport.send.reset_mock()
| 'TunnellingRequest with sequence number not equal to expected' error every few hours
Every few hours 'Received TunnellingRequest with sequence number not equal to expected' error is stuck while the sequence finally loops around. During that time messages are sent but not read.
- [ ] using xknx standalone
- [x] using Home-Assistant knx integration
**Version information:**
- xknx / Home-Assistant release with the issue: 1.2.1
- last working xknx / Home-Assistant release (if known): unknown
**KNX installation:**
`configuration.yaml`
[knx.yaml.txt](https://github.com/XKNX/xknx/files/10067390/knx.yaml.txt)
**Diagnostic data of the config entry (only when Home Assistant is used)**
[config_entry-knx-ed093e5771cbc5d18e4307a64fd3a504.json.txt](https://github.com/XKNX/xknx/files/10067378/config_entry-knx-ed093e5771cbc5d18e4307a64fd3a504.json.txt)
**Traceback (if applicable):**
[log.txt](https://github.com/XKNX/xknx/files/10067435/log.txt)
| 0.0 | d1d6085102331fe9430004e55085612fd993b2f7 | [
"test/io_tests/tunnel_test.py::TestUDPTunnel::test_repeated_tunnel_request"
] | [
"test/io_tests/tunnel_test.py::TestUDPTunnel::test_tunnel_request_received[\\x06\\x10\\x04",
"test/io_tests/tunnel_test.py::TestUDPTunnel::test_tunnel_request_received_callback",
"test/io_tests/tunnel_test.py::TestUDPTunnel::test_tunnel_request_received_cemi_too_small",
"test/io_tests/tunnel_test.py::TestUDPTunnel::test_tunnel_request_received_apci_unsupported",
"test/io_tests/tunnel_test.py::TestUDPTunnel::test_tunnel_connect_send_disconnect[False-data_endpoint_addr0-local_endpoint0]",
"test/io_tests/tunnel_test.py::TestUDPTunnel::test_tunnel_connect_send_disconnect[True-None-local_endpoint1]",
"test/io_tests/tunnel_test.py::TestUDPTunnel::test_tunnel_request_description",
"test/io_tests/tunnel_test.py::TestTCPTunnel::test_tunnel_heartbeat",
"test/io_tests/tunnel_test.py::TestTCPTunnel::test_tunnel_heartbeat_no_answer",
"test/io_tests/tunnel_test.py::TestTCPTunnel::test_tunnel_heartbeat_error"
] | {
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2022-12-21 21:20:28+00:00 | mit | 840 |
|
XKNX__xknx-1190 | diff --git a/docs/changelog.md b/docs/changelog.md
index 03b50890..5834d7d3 100644
--- a/docs/changelog.md
+++ b/docs/changelog.md
@@ -17,6 +17,7 @@ nav_order: 2
- Parse Data Secure credentials form Keyring from non-IP-Secure interfaces.
- Parse Data Secure credentials from Keyrings exported for specific interfaces.
+- Fix callback for Cover target position when called with same value consecutively.
- Fix Windows TCP transport bug when using IP Secure Tunnelling.
### Protocol
diff --git a/xknx/devices/cover.py b/xknx/devices/cover.py
index 750913e1..489c1364 100644
--- a/xknx/devices/cover.py
+++ b/xknx/devices/cover.py
@@ -360,7 +360,7 @@ class Cover(Device):
await self._stop_position_update()
await self.position_current.process(telegram, always_callback=True)
- await self.position_target.process(telegram)
+ await self.position_target.process(telegram, always_callback=True)
await self.angle.process(telegram)
await self.locked.process(telegram)
| XKNX/xknx | 88fc1ead4cdb10bdacee6824905540365588bf9d | diff --git a/test/devices_tests/cover_test.py b/test/devices_tests/cover_test.py
index 2617622e..9dbe67ad 100644
--- a/test/devices_tests/cover_test.py
+++ b/test/devices_tests/cover_test.py
@@ -830,6 +830,9 @@ class TestCover:
("1/2/1", DPTBinary(1), "long"),
("1/2/2", DPTBinary(1), "short"),
("1/2/4", DPTArray(42), "position"),
+ ("1/2/3", DPTBinary(1), "stop"),
+ # call position with same value again to make sure `always_callback` is set for target position
+ ("1/2/4", DPTArray(42), "position"),
("1/2/5", DPTArray(42), "position state"),
("1/2/6", DPTArray(42), "angle"),
("1/2/7", DPTArray(51), "angle state"),
| Cover update callback stops in some special case
Hello!
My intention is to use `device_updated_cb` callback for Cover to process some logic during its movement etc..
I faced the problem when callback stops working in some special case, for example:
Pretend the blind initial position is 0% and we need to move it to 90%
1) start moving the blind to some position `await cover.set_position(90)`
2) send Stop during movement `cover.stop()`
3) then continue to move it to the previously given position `await cover.set_position(90)`
Here the `device_updated_cb` stops
4) if then or instead to send the blind to any other position but except 90%, the callback gets back to work, for example, sending to any of 0-89 or 91-100% .
Not sure if I'm doing something not correctly.
Here is the reduced example:
`
async def cover_update_cb(cover: Cover):
print('Something inside cover update callback')
cover: Cover = None
async def test():
global cover
await asyncio.sleep(1)
await cover.set_position(90)
await asyncio.sleep(4)
print('STOP command')
await cover.stop()
await asyncio.sleep(2)
print(f'Continue to move to 90')
await cover.set_position(90)
# here cover_update_cb stops. If instead set any different position from a position that the blind started moving from at the beginning (in our case 90) then callback will work.
async def main():
global cover
xknx = XKNX(daemon_mode=True)
cover = Cover(
xknx,
'Cover',
group_address_stop = "1/1/2",
group_address_position = "1/1/3",
group_address_position_state = "1/1/4",
device_updated_cb=cover_update_cb
)
loop = asyncio.get_running_loop()
loop.create_task(test())
await xknx.start()
await xknx.stop()
asyncio.run(main())
`
| 0.0 | 88fc1ead4cdb10bdacee6824905540365588bf9d | [
"test/devices_tests/cover_test.py::TestCover::test_process_callback"
] | [
"test/devices_tests/cover_test.py::TestCover::test_supports_stop_true",
"test/devices_tests/cover_test.py::TestCover::test_supports_stop_false",
"test/devices_tests/cover_test.py::TestCover::test_supports_position_true",
"test/devices_tests/cover_test.py::TestCover::test_supports_position_false",
"test/devices_tests/cover_test.py::TestCover::test_supports_angle_true",
"test/devices_tests/cover_test.py::TestCover::test_support_angle_false",
"test/devices_tests/cover_test.py::TestCover::test_support_locked",
"test/devices_tests/cover_test.py::TestCover::test_sync",
"test/devices_tests/cover_test.py::TestCover::test_sync_state",
"test/devices_tests/cover_test.py::TestCover::test_sync_angle",
"test/devices_tests/cover_test.py::TestCover::test_sync_angle_state",
"test/devices_tests/cover_test.py::TestCover::test_set_up",
"test/devices_tests/cover_test.py::TestCover::test_set_short_down",
"test/devices_tests/cover_test.py::TestCover::test_set_down_inverted",
"test/devices_tests/cover_test.py::TestCover::test_set_short_up",
"test/devices_tests/cover_test.py::TestCover::test_set_up_inverted",
"test/devices_tests/cover_test.py::TestCover::test_set_down",
"test/devices_tests/cover_test.py::TestCover::test_stop",
"test/devices_tests/cover_test.py::TestCover::test_stop_angle",
"test/devices_tests/cover_test.py::TestCover::test_position",
"test/devices_tests/cover_test.py::TestCover::test_position_without_binary",
"test/devices_tests/cover_test.py::TestCover::test_position_without_position_address_up",
"test/devices_tests/cover_test.py::TestCover::test_position_without_position_address_down",
"test/devices_tests/cover_test.py::TestCover::test_position_without_position_address_uninitialized_up",
"test/devices_tests/cover_test.py::TestCover::test_position_without_position_address_uninitialized_down",
"test/devices_tests/cover_test.py::TestCover::test_angle",
"test/devices_tests/cover_test.py::TestCover::test_angle_not_supported",
"test/devices_tests/cover_test.py::TestCover::test_process_position",
"test/devices_tests/cover_test.py::TestCover::test_process_angle",
"test/devices_tests/cover_test.py::TestCover::test_process_locked",
"test/devices_tests/cover_test.py::TestCover::test_process_up",
"test/devices_tests/cover_test.py::TestCover::test_process_down",
"test/devices_tests/cover_test.py::TestCover::test_process_stop",
"test/devices_tests/cover_test.py::TestCover::test_process_short_stop",
"test/devices_tests/cover_test.py::TestCover::test_is_traveling",
"test/devices_tests/cover_test.py::TestCover::test_auto_stop",
"test/devices_tests/cover_test.py::TestCover::test_periodic_update",
"test/devices_tests/cover_test.py::TestCover::test_has_group_address"
] | {
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2023-02-11 06:57:04+00:00 | mit | 841 |
|
XKNX__xknx-234 | diff --git a/examples/example_light_rgbw.py b/examples/example_light_rgbw.py
new file mode 100644
index 00000000..dd060c98
--- /dev/null
+++ b/examples/example_light_rgbw.py
@@ -0,0 +1,49 @@
+"""Example for setting different colors on a RGBW light."""
+import asyncio
+
+from xknx import XKNX
+from xknx.devices import RemoteValueColorRGBW
+
+
+async def main():
+ """Connect to KNX/IP bus and set different colors."""
+ xknx = XKNX()
+ await xknx.start()
+
+ rgbw = RemoteValueColorRGBW(xknx,
+ group_address='1/1/40',
+ group_address_state='1/1/41',
+ device_name="RGBWLight")
+
+ await rgbw.set([255, 255, 255, 0, 15]) # cold-white
+ await asyncio.sleep(1)
+ await rgbw.set([0, 0, 0, 255, 15]) # warm-white
+ await asyncio.sleep(1)
+ await rgbw.set([0, 0, 0, 0, 15]) # off
+ await asyncio.sleep(1)
+
+ await rgbw.set([255, 0, 0, 0]) # red
+ await asyncio.sleep(1)
+ await rgbw.set([0, 255, 0, 0]) # green
+ await asyncio.sleep(1)
+ await rgbw.set([0, 0, 255, 0]) # blue
+ await asyncio.sleep(1)
+ await rgbw.set([0, 0, 0, 0, 15]) # off
+ await asyncio.sleep(1)
+
+ await rgbw.set([255, 255, 0, 0, 15])
+ await asyncio.sleep(1)
+ await rgbw.set([0, 255, 255, 0, 15])
+ await asyncio.sleep(1)
+ await rgbw.set([255, 0, 255, 0, 15])
+ await asyncio.sleep(1)
+ await rgbw.set([0, 0, 0, 0, 15]) # off
+ await asyncio.sleep(1)
+
+ await xknx.stop()
+
+
+# pylint: disable=invalid-name
+loop = asyncio.get_event_loop()
+loop.run_until_complete(main())
+loop.close()
diff --git a/xknx/devices/remote_value_color_rgbw.py b/xknx/devices/remote_value_color_rgbw.py
index 6c2e436e..beea0ca9 100644
--- a/xknx/devices/remote_value_color_rgbw.py
+++ b/xknx/devices/remote_value_color_rgbw.py
@@ -35,19 +35,23 @@ class RemoteValueColorRGBW(RemoteValue):
Convert value (4-6 bytes) to payload (6 bytes).
* Structure of DPT 251.600
- ** Bytes 0, 1:
- *** Bit 0-11: 0
- *** Bit 12,13,14,15: R,G,B,W value valid?
- ** Byte 2: R value
- ** Byte 3: G value
- ** Byte 4: B value
- ** Byte 5: W value
+ ** Byte 0: R value
+ ** Byte 1: G value
+ ** Byte 2: B value
+ ** Byte 3: W value
+ ** Byte 4: 0x00 (reserved)
+ ** Byte 5:
+ *** Bit 0: W value valid?
+ *** Bit 1: B value valid?
+ *** Bit 2: G value valid?
+ *** Bit 3: R value valid?
+ *** Bit 4-7: 0
In case we receive
* > 6 bytes: error
* 6 bytes: all bytes are passed through
- * 5 bytes: 0x00 left padding
- * 4 bytes: 0x000f left padding
+ * 5 bytes: 0x00?? fill up to 6 bytes
+ * 4 bytes: 0x000f right padding to 6 bytes
* < 4 bytes: error
"""
if not isinstance(value, (list, tuple)):
@@ -56,12 +60,16 @@ class RemoteValueColorRGBW(RemoteValue):
if len(value) < 4 or len(value) > 6:
raise ConversionError("Cannot serialize value to DPT 251.600 (wrong length, expecting list of 4-6 bytes)",
value=value, type=type(value))
- rgbw = value[len(value)-4:]
+ rgbw = value[:4]
if any(not isinstance(color, int) for color in rgbw) \
or any(color < 0 for color in rgbw) \
or any(color > 255 for color in rgbw):
raise ConversionError("Cannot serialize DPT 251.600 (wrong RGBW values)", value=value)
- return DPTArray([0x00, 0x0f][:6-len(value)] + list(value))
+ if len(value) < 5:
+ return DPTArray(list(rgbw) + [0x00, 0x0f])
+ if len(value) < 6:
+ return DPTArray(list(rgbw) + [0x00] + list(value[4:]))
+ return DPTArray(value)
def from_knx(self, payload):
"""
@@ -72,7 +80,7 @@ class RemoteValueColorRGBW(RemoteValue):
"""
result = []
for i in range(0, len(payload.value) - 2):
- valid = payload.value[1] & (0x08 >> i) != 0
- result.append(payload.value[2 + i] if valid else self.previous_value[i])
+ valid = (payload.value[5] & (0x08 >> i)) != 0 # R,G,B,W value valid?
+ result.append(payload.value[i] if valid else self.previous_value[i])
self.previous_value = result
return result
diff --git a/xknx/exceptions/exception.py b/xknx/exceptions/exception.py
index 879e14b2..626299bd 100644
--- a/xknx/exceptions/exception.py
+++ b/xknx/exceptions/exception.py
@@ -6,7 +6,15 @@ class XKNXException(Exception):
def __eq__(self, other):
"""Equal operator."""
- return self.__dict__ == other.__dict__
+ return repr(self) == repr(other)
+
+ def __hash__(self):
+ """Hash function."""
+ return hash(str(self))
+
+ def __repr__(self):
+ """Representation of object."""
+ return str(self)
class CouldNotParseTelegram(XKNXException):
@@ -14,7 +22,7 @@ class CouldNotParseTelegram(XKNXException):
def __init__(self, description, **kwargs):
"""Initialize CouldNotParseTelegram class."""
- super().__init__("Could not parse Telegram")
+ super().__init__()
self.description = description
self.parameter = kwargs
@@ -32,7 +40,7 @@ class CouldNotParseKNXIP(XKNXException):
def __init__(self, description=""):
"""Initialize CouldNotParseTelegram class."""
- super().__init__("Could not parse KNXIP")
+ super().__init__()
self.description = description
def __str__(self):
@@ -46,7 +54,7 @@ class ConversionError(XKNXException):
def __init__(self, description, **kwargs):
"""Initialize ConversionError class."""
- super().__init__("Conversion Error")
+ super().__init__()
self.description = description
self.parameter = kwargs
@@ -63,7 +71,7 @@ class CouldNotParseAddress(XKNXException):
def __init__(self, address=None):
"""Initialize CouldNotParseAddress class."""
- super().__init__("Could not parse address")
+ super().__init__()
self.address = address
def __str__(self):
@@ -76,7 +84,7 @@ class DeviceIllegalValue(XKNXException):
def __init__(self, value, description):
"""Initialize DeviceIllegalValue class."""
- super().__init__("Illegal value for device")
+ super().__init__()
self.value = value
self.description = description
| XKNX/xknx | 4b71c28db4cde694695da3dfad0072dd5383717f | diff --git a/test/core_tests/exceptions_test.py b/test/core_tests/exceptions_test.py
new file mode 100644
index 00000000..41fc1b74
--- /dev/null
+++ b/test/core_tests/exceptions_test.py
@@ -0,0 +1,51 @@
+"""Unit tests for exceptions"""
+import pytest
+
+from xknx.exceptions import (
+ ConversionError, CouldNotParseAddress, CouldNotParseKNXIP,
+ CouldNotParseTelegram, DeviceIllegalValue, XKNXException)
+
+
[email protected](
+ "base,equal,diff",
+ [
+ (
+ ConversionError("desc1"),
+ ConversionError("desc1"),
+ ConversionError("desc2"),
+ ),
+ (
+ CouldNotParseAddress(123),
+ CouldNotParseAddress(123),
+ CouldNotParseAddress(321),
+ ),
+ (
+ CouldNotParseKNXIP("desc1"),
+ CouldNotParseKNXIP("desc1"),
+ CouldNotParseKNXIP("desc2"),
+ ),
+ (
+ CouldNotParseTelegram("desc", arg1=1, arg2=2),
+ CouldNotParseTelegram("desc", arg1=1, arg2=2),
+ CouldNotParseTelegram("desc", arg1=2, arg2=1),
+ ),
+ (
+ DeviceIllegalValue("value1", "desc"),
+ DeviceIllegalValue("value1", "desc"),
+ DeviceIllegalValue("value1", "desc2"),
+ ),
+ (
+ XKNXException("desc1"),
+ XKNXException("desc1"),
+ XKNXException("desc2"),
+ ),
+ ],
+)
+def test_exceptions(base, equal, diff):
+ """Test hashability and repr of exceptions."""
+ assert hash(base) == hash(equal)
+ assert hash(base) != hash(diff)
+ assert base == equal
+ assert base != diff
+ assert repr(base) == repr(equal)
+ assert repr(base) != repr(diff)
diff --git a/test/devices_tests/light_test.py b/test/devices_tests/light_test.py
index b6283672..5de6eddf 100644
--- a/test/devices_tests/light_test.py
+++ b/test/devices_tests/light_test.py
@@ -318,7 +318,7 @@ class TestLight(unittest.TestCase):
self.assertEqual(xknx.telegrams.qsize(), 1)
telegram = xknx.telegrams.get_nowait()
self.assertEqual(telegram,
- Telegram(GroupAddress('1/2/5'), payload=DPTArray((0, 15, 23, 24, 25, 26))))
+ Telegram(GroupAddress('1/2/5'), payload=DPTArray((23, 24, 25, 26, 0, 15))))
self.assertEqual(light.current_color, ([23, 24, 25], 26))
def test_set_color_rgbw_not_possible(self):
@@ -489,7 +489,7 @@ class TestLight(unittest.TestCase):
group_address_color='1/2/4',
group_address_rgbw='1/2/5')
self.assertEqual(light.current_color, (None, None))
- telegram = Telegram(GroupAddress('1/2/5'), payload=DPTArray((0, 15, 23, 24, 25, 26)))
+ telegram = Telegram(GroupAddress('1/2/5'), payload=DPTArray((23, 24, 25, 26, 0, 15)))
self.loop.run_until_complete(asyncio.Task(light.process(telegram)))
self.assertEqual(light.current_color, ([23, 24, 25], 26))
diff --git a/test/devices_tests/remote_value_color_rgbw_test.py b/test/devices_tests/remote_value_color_rgbw_test.py
index 2a56d3b0..d352c299 100644
--- a/test/devices_tests/remote_value_color_rgbw_test.py
+++ b/test/devices_tests/remote_value_color_rgbw_test.py
@@ -26,32 +26,32 @@ class TestRemoteValueColorRGBW(unittest.TestCase):
remote_value = RemoteValueColorRGBW(xknx)
input_list = [100, 101, 102, 127]
input_tuple = (100, 101, 102, 127)
- expected = DPTArray((0x00, 0x0f, 0x64, 0x65, 0x66, 0x7f))
+ expected = DPTArray((0x64, 0x65, 0x66, 0x7f, 0x00, 0x0f))
self.assertEqual(remote_value.to_knx(input_tuple), expected)
self.assertEqual(remote_value.to_knx(input_list), expected)
- self.assertEqual(remote_value.to_knx((15,) + input_tuple), expected)
- self.assertEqual(remote_value.to_knx([15] + input_list), expected)
- self.assertEqual(remote_value.to_knx((0, 15) + input_tuple), expected)
- self.assertEqual(remote_value.to_knx([0, 15] + input_list), expected)
+ self.assertEqual(remote_value.to_knx(input_tuple + (15,)), expected)
+ self.assertEqual(remote_value.to_knx(input_list + [15]), expected)
+ self.assertEqual(remote_value.to_knx(input_tuple + (0, 15)), expected)
+ self.assertEqual(remote_value.to_knx(input_list + [0, 15]), expected)
def test_from_knx(self):
"""Test from_knx function with normal operation."""
xknx = XKNX(loop=self.loop)
remote_value = RemoteValueColorRGBW(xknx)
self.assertEqual(
- remote_value.from_knx(DPTArray((0x00, 0x00, 0x64, 0x65, 0x66, 0x7f))),
+ remote_value.from_knx(DPTArray((0x64, 0x65, 0x66, 0x7f, 0x00, 0x00))),
[0, 0, 0, 0])
self.assertEqual(
- remote_value.from_knx(DPTArray((0x00, 0x0f, 0x64, 0x65, 0x66, 0x7f))),
+ remote_value.from_knx(DPTArray((0x64, 0x65, 0x66, 0x7f, 0x00, 0x0f))),
[100, 101, 102, 127])
self.assertEqual(
- remote_value.from_knx(DPTArray((0x00, 0x00, 0x64, 0x65, 0x66, 0x7f))),
+ remote_value.from_knx(DPTArray((0x64, 0x65, 0x66, 0x7f, 0x00, 0x00))),
[100, 101, 102, 127])
self.assertEqual(
- remote_value.from_knx(DPTArray((0x00, 0x09, 0xff, 0x65, 0x66, 0xff))),
+ remote_value.from_knx(DPTArray((0xff, 0x65, 0x66, 0xff, 0x00, 0x09))),
[255, 101, 102, 255])
self.assertEqual(
- remote_value.from_knx(DPTArray((0x00, 0x01, 0x64, 0x65, 0x66, 0x7f))),
+ remote_value.from_knx(DPTArray((0x64, 0x65, 0x66, 0x7f, 0x00, 0x01))),
[255, 101, 102, 127])
def test_to_knx_error(self):
@@ -90,7 +90,7 @@ class TestRemoteValueColorRGBW(unittest.TestCase):
telegram,
Telegram(
GroupAddress('1/2/3'),
- payload=DPTArray((0x00, 0x0f, 0x64, 0x65, 0x66, 0x67))))
+ payload=DPTArray((0x64, 0x65, 0x66, 0x67, 0x00, 0x0f))))
self.loop.run_until_complete(asyncio.Task(remote_value.set((100, 101, 104, 105))))
self.assertEqual(xknx.telegrams.qsize(), 1)
telegram = xknx.telegrams.get_nowait()
@@ -98,7 +98,7 @@ class TestRemoteValueColorRGBW(unittest.TestCase):
telegram,
Telegram(
GroupAddress('1/2/3'),
- payload=DPTArray((0x00, 0x0f, 0x64, 0x65, 0x68, 0x69))))
+ payload=DPTArray((0x64, 0x65, 0x68, 0x69, 0x00, 0x0f))))
def test_process(self):
"""Test process telegram."""
@@ -108,7 +108,7 @@ class TestRemoteValueColorRGBW(unittest.TestCase):
group_address=GroupAddress("1/2/3"))
telegram = Telegram(
group_address=GroupAddress("1/2/3"),
- payload=DPTArray((0x00, 0x0f, 0x64, 0x65, 0x66, 0x67)))
+ payload=DPTArray((0x64, 0x65, 0x66, 0x67, 0x00, 0x0f)))
self.loop.run_until_complete(asyncio.Task(remote_value.process(telegram)))
self.assertEqual(remote_value.value, [100, 101, 102, 103])
| XKNXException is an unhashable exception
**Description of problem:**
Seem the XKNXException exception is not hashable causing problems with python traceback module: https://bugs.python.org/issue28603. Oddly that aught to have been solved already.
- [ ] using xknx standalone
- [X] using Home-Assistant knx plugin
**Version information:**
- xknx / Home-Assistant release with the issue:
- last working xknx / Home-Assistant release (if known): 0.98
**KNX installation:**
<!--
Please provide details about your installation.
- Manufacturer and model of relevant actors, sensors or interfaces.
- if you have access to ETS:
- provide relevant group address parameters (DPT, Flags)
- if applicable: excerpt of bus monitor output
-->
**Problem-relevant `xknx.yaml` or `configuration.yaml` entries (fill out even if it seems unimportant):**
**Traceback (if applicable):**
Aug 22 08:18:24 hass hass[35975]: TypeError: unhashable type: 'XKNXException'
Aug 22 08:18:24 hass hass[35975]: _seen.add(exc_value)
Aug 22 08:18:24 hass hass[35975]: File "/usr/lib/python3.6/traceback.py", line 462, in __init__
Aug 22 08:18:24 hass hass[35975]: type(value), value, tb, limit=limit).format(chain=chain):
Aug 22 08:18:24 hass hass[35975]: File "/usr/lib/python3.6/traceback.py", line 100, in print_exception
Aug 22 08:18:24 hass hass[35975]: traceback.print_exception(ei[0], ei[1], tb, None, sio)
Aug 22 08:18:24 hass hass[35975]: File "/usr/lib/python3.6/logging/__init__.py", line 533, in formatException
Aug 22 08:18:24 hass hass[35975]: record.exc_text = self.formatException(record.exc_info)
Aug 22 08:18:24 hass hass[35975]: File "/usr/lib/python3.6/logging/__init__.py", line 583, in format
Aug 22 08:18:24 hass hass[35975]: return fmt.format(record)
Aug 22 08:18:24 hass hass[35975]: File "/usr/lib/python3.6/logging/__init__.py", line 838, in format
Aug 22 08:18:24 hass hass[35975]: msg = self.format(record)
Aug 22 08:18:24 hass hass[35975]: File "/usr/lib/python3.6/logging/__init__.py", line 992, in emit | 0.0 | 4b71c28db4cde694695da3dfad0072dd5383717f | [
"test/core_tests/exceptions_test.py::test_exceptions[base0-equal0-diff0]",
"test/core_tests/exceptions_test.py::test_exceptions[base1-equal1-diff1]",
"test/core_tests/exceptions_test.py::test_exceptions[base2-equal2-diff2]",
"test/core_tests/exceptions_test.py::test_exceptions[base3-equal3-diff3]",
"test/core_tests/exceptions_test.py::test_exceptions[base4-equal4-diff4]",
"test/core_tests/exceptions_test.py::test_exceptions[base5-equal5-diff5]",
"test/devices_tests/light_test.py::TestLight::test_process_color_rgbw",
"test/devices_tests/light_test.py::TestLight::test_set_color_rgbw",
"test/devices_tests/remote_value_color_rgbw_test.py::TestRemoteValueColorRGBW::test_from_knx",
"test/devices_tests/remote_value_color_rgbw_test.py::TestRemoteValueColorRGBW::test_process",
"test/devices_tests/remote_value_color_rgbw_test.py::TestRemoteValueColorRGBW::test_set",
"test/devices_tests/remote_value_color_rgbw_test.py::TestRemoteValueColorRGBW::test_to_knx"
] | [
"test/devices_tests/light_test.py::TestLight::test_do",
"test/devices_tests/light_test.py::TestLight::test_has_group_address",
"test/devices_tests/light_test.py::TestLight::test_process_color",
"test/devices_tests/light_test.py::TestLight::test_process_color_temperature",
"test/devices_tests/light_test.py::TestLight::test_process_color_temperature_payload_invalid_length",
"test/devices_tests/light_test.py::TestLight::test_process_color_temperature_wrong_payload",
"test/devices_tests/light_test.py::TestLight::test_process_dimm",
"test/devices_tests/light_test.py::TestLight::test_process_dimm_payload_invalid_length",
"test/devices_tests/light_test.py::TestLight::test_process_dimm_wrong_payload",
"test/devices_tests/light_test.py::TestLight::test_process_switch",
"test/devices_tests/light_test.py::TestLight::test_process_switch_callback",
"test/devices_tests/light_test.py::TestLight::test_process_tunable_white",
"test/devices_tests/light_test.py::TestLight::test_process_tunable_white_payload_invalid_length",
"test/devices_tests/light_test.py::TestLight::test_process_tunable_white_wrong_payload",
"test/devices_tests/light_test.py::TestLight::test_set_brightness",
"test/devices_tests/light_test.py::TestLight::test_set_brightness_not_dimmable",
"test/devices_tests/light_test.py::TestLight::test_set_color",
"test/devices_tests/light_test.py::TestLight::test_set_color_not_possible",
"test/devices_tests/light_test.py::TestLight::test_set_color_rgbw_not_possible",
"test/devices_tests/light_test.py::TestLight::test_set_color_temp",
"test/devices_tests/light_test.py::TestLight::test_set_color_temp_unsupported",
"test/devices_tests/light_test.py::TestLight::test_set_off",
"test/devices_tests/light_test.py::TestLight::test_set_on",
"test/devices_tests/light_test.py::TestLight::test_set_tw",
"test/devices_tests/light_test.py::TestLight::test_set_tw_unsupported",
"test/devices_tests/light_test.py::TestLight::test_supports_color_false",
"test/devices_tests/light_test.py::TestLight::test_supports_color_temp_false",
"test/devices_tests/light_test.py::TestLight::test_supports_color_temp_true",
"test/devices_tests/light_test.py::TestLight::test_supports_color_true",
"test/devices_tests/light_test.py::TestLight::test_supports_dimm_no",
"test/devices_tests/light_test.py::TestLight::test_supports_dimm_yes",
"test/devices_tests/light_test.py::TestLight::test_supports_rgbw_false",
"test/devices_tests/light_test.py::TestLight::test_supports_rgbw_true",
"test/devices_tests/light_test.py::TestLight::test_supports_tw_no",
"test/devices_tests/light_test.py::TestLight::test_supports_tw_yes",
"test/devices_tests/light_test.py::TestLight::test_sync",
"test/devices_tests/light_test.py::TestLight::test_sync_state_address",
"test/devices_tests/light_test.py::TestLight::test_wrong_do",
"test/devices_tests/remote_value_color_rgbw_test.py::TestRemoteValueColorRGBW::test_to_knx_error",
"test/devices_tests/remote_value_color_rgbw_test.py::TestRemoteValueColorRGBW::test_to_process_error"
] | {
"failed_lite_validators": [
"has_hyperlinks",
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2019-09-10 16:14:51+00:00 | mit | 842 |
|
XKNX__xknx-235 | diff --git a/examples/example_light_rgbw.py b/examples/example_light_rgbw.py
new file mode 100644
index 00000000..dd060c98
--- /dev/null
+++ b/examples/example_light_rgbw.py
@@ -0,0 +1,49 @@
+"""Example for setting different colors on a RGBW light."""
+import asyncio
+
+from xknx import XKNX
+from xknx.devices import RemoteValueColorRGBW
+
+
+async def main():
+ """Connect to KNX/IP bus and set different colors."""
+ xknx = XKNX()
+ await xknx.start()
+
+ rgbw = RemoteValueColorRGBW(xknx,
+ group_address='1/1/40',
+ group_address_state='1/1/41',
+ device_name="RGBWLight")
+
+ await rgbw.set([255, 255, 255, 0, 15]) # cold-white
+ await asyncio.sleep(1)
+ await rgbw.set([0, 0, 0, 255, 15]) # warm-white
+ await asyncio.sleep(1)
+ await rgbw.set([0, 0, 0, 0, 15]) # off
+ await asyncio.sleep(1)
+
+ await rgbw.set([255, 0, 0, 0]) # red
+ await asyncio.sleep(1)
+ await rgbw.set([0, 255, 0, 0]) # green
+ await asyncio.sleep(1)
+ await rgbw.set([0, 0, 255, 0]) # blue
+ await asyncio.sleep(1)
+ await rgbw.set([0, 0, 0, 0, 15]) # off
+ await asyncio.sleep(1)
+
+ await rgbw.set([255, 255, 0, 0, 15])
+ await asyncio.sleep(1)
+ await rgbw.set([0, 255, 255, 0, 15])
+ await asyncio.sleep(1)
+ await rgbw.set([255, 0, 255, 0, 15])
+ await asyncio.sleep(1)
+ await rgbw.set([0, 0, 0, 0, 15]) # off
+ await asyncio.sleep(1)
+
+ await xknx.stop()
+
+
+# pylint: disable=invalid-name
+loop = asyncio.get_event_loop()
+loop.run_until_complete(main())
+loop.close()
diff --git a/xknx/devices/remote_value_color_rgbw.py b/xknx/devices/remote_value_color_rgbw.py
index 6c2e436e..beea0ca9 100644
--- a/xknx/devices/remote_value_color_rgbw.py
+++ b/xknx/devices/remote_value_color_rgbw.py
@@ -35,19 +35,23 @@ class RemoteValueColorRGBW(RemoteValue):
Convert value (4-6 bytes) to payload (6 bytes).
* Structure of DPT 251.600
- ** Bytes 0, 1:
- *** Bit 0-11: 0
- *** Bit 12,13,14,15: R,G,B,W value valid?
- ** Byte 2: R value
- ** Byte 3: G value
- ** Byte 4: B value
- ** Byte 5: W value
+ ** Byte 0: R value
+ ** Byte 1: G value
+ ** Byte 2: B value
+ ** Byte 3: W value
+ ** Byte 4: 0x00 (reserved)
+ ** Byte 5:
+ *** Bit 0: W value valid?
+ *** Bit 1: B value valid?
+ *** Bit 2: G value valid?
+ *** Bit 3: R value valid?
+ *** Bit 4-7: 0
In case we receive
* > 6 bytes: error
* 6 bytes: all bytes are passed through
- * 5 bytes: 0x00 left padding
- * 4 bytes: 0x000f left padding
+ * 5 bytes: 0x00?? fill up to 6 bytes
+ * 4 bytes: 0x000f right padding to 6 bytes
* < 4 bytes: error
"""
if not isinstance(value, (list, tuple)):
@@ -56,12 +60,16 @@ class RemoteValueColorRGBW(RemoteValue):
if len(value) < 4 or len(value) > 6:
raise ConversionError("Cannot serialize value to DPT 251.600 (wrong length, expecting list of 4-6 bytes)",
value=value, type=type(value))
- rgbw = value[len(value)-4:]
+ rgbw = value[:4]
if any(not isinstance(color, int) for color in rgbw) \
or any(color < 0 for color in rgbw) \
or any(color > 255 for color in rgbw):
raise ConversionError("Cannot serialize DPT 251.600 (wrong RGBW values)", value=value)
- return DPTArray([0x00, 0x0f][:6-len(value)] + list(value))
+ if len(value) < 5:
+ return DPTArray(list(rgbw) + [0x00, 0x0f])
+ if len(value) < 6:
+ return DPTArray(list(rgbw) + [0x00] + list(value[4:]))
+ return DPTArray(value)
def from_knx(self, payload):
"""
@@ -72,7 +80,7 @@ class RemoteValueColorRGBW(RemoteValue):
"""
result = []
for i in range(0, len(payload.value) - 2):
- valid = payload.value[1] & (0x08 >> i) != 0
- result.append(payload.value[2 + i] if valid else self.previous_value[i])
+ valid = (payload.value[5] & (0x08 >> i)) != 0 # R,G,B,W value valid?
+ result.append(payload.value[i] if valid else self.previous_value[i])
self.previous_value = result
return result
diff --git a/xknx/exceptions/exception.py b/xknx/exceptions/exception.py
index 879e14b2..626299bd 100644
--- a/xknx/exceptions/exception.py
+++ b/xknx/exceptions/exception.py
@@ -6,7 +6,15 @@ class XKNXException(Exception):
def __eq__(self, other):
"""Equal operator."""
- return self.__dict__ == other.__dict__
+ return repr(self) == repr(other)
+
+ def __hash__(self):
+ """Hash function."""
+ return hash(str(self))
+
+ def __repr__(self):
+ """Representation of object."""
+ return str(self)
class CouldNotParseTelegram(XKNXException):
@@ -14,7 +22,7 @@ class CouldNotParseTelegram(XKNXException):
def __init__(self, description, **kwargs):
"""Initialize CouldNotParseTelegram class."""
- super().__init__("Could not parse Telegram")
+ super().__init__()
self.description = description
self.parameter = kwargs
@@ -32,7 +40,7 @@ class CouldNotParseKNXIP(XKNXException):
def __init__(self, description=""):
"""Initialize CouldNotParseTelegram class."""
- super().__init__("Could not parse KNXIP")
+ super().__init__()
self.description = description
def __str__(self):
@@ -46,7 +54,7 @@ class ConversionError(XKNXException):
def __init__(self, description, **kwargs):
"""Initialize ConversionError class."""
- super().__init__("Conversion Error")
+ super().__init__()
self.description = description
self.parameter = kwargs
@@ -63,7 +71,7 @@ class CouldNotParseAddress(XKNXException):
def __init__(self, address=None):
"""Initialize CouldNotParseAddress class."""
- super().__init__("Could not parse address")
+ super().__init__()
self.address = address
def __str__(self):
@@ -76,7 +84,7 @@ class DeviceIllegalValue(XKNXException):
def __init__(self, value, description):
"""Initialize DeviceIllegalValue class."""
- super().__init__("Illegal value for device")
+ super().__init__()
self.value = value
self.description = description
diff --git a/xknx/knxip/cemi_frame.py b/xknx/knxip/cemi_frame.py
index ec925a83..3f023c62 100644
--- a/xknx/knxip/cemi_frame.py
+++ b/xknx/knxip/cemi_frame.py
@@ -105,7 +105,10 @@ class CEMIFrame(KNXIPBody):
def from_knx(self, raw):
"""Parse/deserialize from KNX/IP raw data."""
- self.code = CEMIMessageCode(raw[0])
+ try:
+ self.code = CEMIMessageCode(raw[0])
+ except ValueError:
+ raise CouldNotParseKNXIP("Could not understand CEMIMessageCode: {0} ".format(raw[0]))
if self.code == CEMIMessageCode.L_DATA_IND or \
self.code == CEMIMessageCode.L_Data_REQ or \
@@ -140,7 +143,11 @@ class CEMIFrame(KNXIPBody):
tpci_apci = cemi[9 + addil] * 256 + cemi[10 + addil]
- self.cmd = APCICommand(tpci_apci & 0xFFC0)
+ try:
+ self.cmd = APCICommand(tpci_apci & 0xFFC0)
+ except ValueError:
+ raise CouldNotParseKNXIP(
+ "APCI not supported: {0:#012b}".format(tpci_apci & 0xFFC0))
apdu = cemi[10 + addil:]
if len(apdu) != self.mpdu_len:
| XKNX/xknx | 4b71c28db4cde694695da3dfad0072dd5383717f | diff --git a/test/core_tests/exceptions_test.py b/test/core_tests/exceptions_test.py
new file mode 100644
index 00000000..41fc1b74
--- /dev/null
+++ b/test/core_tests/exceptions_test.py
@@ -0,0 +1,51 @@
+"""Unit tests for exceptions"""
+import pytest
+
+from xknx.exceptions import (
+ ConversionError, CouldNotParseAddress, CouldNotParseKNXIP,
+ CouldNotParseTelegram, DeviceIllegalValue, XKNXException)
+
+
[email protected](
+ "base,equal,diff",
+ [
+ (
+ ConversionError("desc1"),
+ ConversionError("desc1"),
+ ConversionError("desc2"),
+ ),
+ (
+ CouldNotParseAddress(123),
+ CouldNotParseAddress(123),
+ CouldNotParseAddress(321),
+ ),
+ (
+ CouldNotParseKNXIP("desc1"),
+ CouldNotParseKNXIP("desc1"),
+ CouldNotParseKNXIP("desc2"),
+ ),
+ (
+ CouldNotParseTelegram("desc", arg1=1, arg2=2),
+ CouldNotParseTelegram("desc", arg1=1, arg2=2),
+ CouldNotParseTelegram("desc", arg1=2, arg2=1),
+ ),
+ (
+ DeviceIllegalValue("value1", "desc"),
+ DeviceIllegalValue("value1", "desc"),
+ DeviceIllegalValue("value1", "desc2"),
+ ),
+ (
+ XKNXException("desc1"),
+ XKNXException("desc1"),
+ XKNXException("desc2"),
+ ),
+ ],
+)
+def test_exceptions(base, equal, diff):
+ """Test hashability and repr of exceptions."""
+ assert hash(base) == hash(equal)
+ assert hash(base) != hash(diff)
+ assert base == equal
+ assert base != diff
+ assert repr(base) == repr(equal)
+ assert repr(base) != repr(diff)
diff --git a/test/devices_tests/light_test.py b/test/devices_tests/light_test.py
index b6283672..5de6eddf 100644
--- a/test/devices_tests/light_test.py
+++ b/test/devices_tests/light_test.py
@@ -318,7 +318,7 @@ class TestLight(unittest.TestCase):
self.assertEqual(xknx.telegrams.qsize(), 1)
telegram = xknx.telegrams.get_nowait()
self.assertEqual(telegram,
- Telegram(GroupAddress('1/2/5'), payload=DPTArray((0, 15, 23, 24, 25, 26))))
+ Telegram(GroupAddress('1/2/5'), payload=DPTArray((23, 24, 25, 26, 0, 15))))
self.assertEqual(light.current_color, ([23, 24, 25], 26))
def test_set_color_rgbw_not_possible(self):
@@ -489,7 +489,7 @@ class TestLight(unittest.TestCase):
group_address_color='1/2/4',
group_address_rgbw='1/2/5')
self.assertEqual(light.current_color, (None, None))
- telegram = Telegram(GroupAddress('1/2/5'), payload=DPTArray((0, 15, 23, 24, 25, 26)))
+ telegram = Telegram(GroupAddress('1/2/5'), payload=DPTArray((23, 24, 25, 26, 0, 15)))
self.loop.run_until_complete(asyncio.Task(light.process(telegram)))
self.assertEqual(light.current_color, ([23, 24, 25], 26))
diff --git a/test/devices_tests/remote_value_color_rgbw_test.py b/test/devices_tests/remote_value_color_rgbw_test.py
index 2a56d3b0..d352c299 100644
--- a/test/devices_tests/remote_value_color_rgbw_test.py
+++ b/test/devices_tests/remote_value_color_rgbw_test.py
@@ -26,32 +26,32 @@ class TestRemoteValueColorRGBW(unittest.TestCase):
remote_value = RemoteValueColorRGBW(xknx)
input_list = [100, 101, 102, 127]
input_tuple = (100, 101, 102, 127)
- expected = DPTArray((0x00, 0x0f, 0x64, 0x65, 0x66, 0x7f))
+ expected = DPTArray((0x64, 0x65, 0x66, 0x7f, 0x00, 0x0f))
self.assertEqual(remote_value.to_knx(input_tuple), expected)
self.assertEqual(remote_value.to_knx(input_list), expected)
- self.assertEqual(remote_value.to_knx((15,) + input_tuple), expected)
- self.assertEqual(remote_value.to_knx([15] + input_list), expected)
- self.assertEqual(remote_value.to_knx((0, 15) + input_tuple), expected)
- self.assertEqual(remote_value.to_knx([0, 15] + input_list), expected)
+ self.assertEqual(remote_value.to_knx(input_tuple + (15,)), expected)
+ self.assertEqual(remote_value.to_knx(input_list + [15]), expected)
+ self.assertEqual(remote_value.to_knx(input_tuple + (0, 15)), expected)
+ self.assertEqual(remote_value.to_knx(input_list + [0, 15]), expected)
def test_from_knx(self):
"""Test from_knx function with normal operation."""
xknx = XKNX(loop=self.loop)
remote_value = RemoteValueColorRGBW(xknx)
self.assertEqual(
- remote_value.from_knx(DPTArray((0x00, 0x00, 0x64, 0x65, 0x66, 0x7f))),
+ remote_value.from_knx(DPTArray((0x64, 0x65, 0x66, 0x7f, 0x00, 0x00))),
[0, 0, 0, 0])
self.assertEqual(
- remote_value.from_knx(DPTArray((0x00, 0x0f, 0x64, 0x65, 0x66, 0x7f))),
+ remote_value.from_knx(DPTArray((0x64, 0x65, 0x66, 0x7f, 0x00, 0x0f))),
[100, 101, 102, 127])
self.assertEqual(
- remote_value.from_knx(DPTArray((0x00, 0x00, 0x64, 0x65, 0x66, 0x7f))),
+ remote_value.from_knx(DPTArray((0x64, 0x65, 0x66, 0x7f, 0x00, 0x00))),
[100, 101, 102, 127])
self.assertEqual(
- remote_value.from_knx(DPTArray((0x00, 0x09, 0xff, 0x65, 0x66, 0xff))),
+ remote_value.from_knx(DPTArray((0xff, 0x65, 0x66, 0xff, 0x00, 0x09))),
[255, 101, 102, 255])
self.assertEqual(
- remote_value.from_knx(DPTArray((0x00, 0x01, 0x64, 0x65, 0x66, 0x7f))),
+ remote_value.from_knx(DPTArray((0x64, 0x65, 0x66, 0x7f, 0x00, 0x01))),
[255, 101, 102, 127])
def test_to_knx_error(self):
@@ -90,7 +90,7 @@ class TestRemoteValueColorRGBW(unittest.TestCase):
telegram,
Telegram(
GroupAddress('1/2/3'),
- payload=DPTArray((0x00, 0x0f, 0x64, 0x65, 0x66, 0x67))))
+ payload=DPTArray((0x64, 0x65, 0x66, 0x67, 0x00, 0x0f))))
self.loop.run_until_complete(asyncio.Task(remote_value.set((100, 101, 104, 105))))
self.assertEqual(xknx.telegrams.qsize(), 1)
telegram = xknx.telegrams.get_nowait()
@@ -98,7 +98,7 @@ class TestRemoteValueColorRGBW(unittest.TestCase):
telegram,
Telegram(
GroupAddress('1/2/3'),
- payload=DPTArray((0x00, 0x0f, 0x64, 0x65, 0x68, 0x69))))
+ payload=DPTArray((0x64, 0x65, 0x68, 0x69, 0x00, 0x0f))))
def test_process(self):
"""Test process telegram."""
@@ -108,7 +108,7 @@ class TestRemoteValueColorRGBW(unittest.TestCase):
group_address=GroupAddress("1/2/3"))
telegram = Telegram(
group_address=GroupAddress("1/2/3"),
- payload=DPTArray((0x00, 0x0f, 0x64, 0x65, 0x66, 0x67)))
+ payload=DPTArray((0x64, 0x65, 0x66, 0x67, 0x00, 0x0f)))
self.loop.run_until_complete(asyncio.Task(remote_value.process(telegram)))
self.assertEqual(remote_value.value, [100, 101, 102, 103])
diff --git a/test/knxip_tests/cemi_frame_test.py b/test/knxip_tests/cemi_frame_test.py
new file mode 100644
index 00000000..dae3f63c
--- /dev/null
+++ b/test/knxip_tests/cemi_frame_test.py
@@ -0,0 +1,71 @@
+"""Tests for the CEMIFrame object"""
+
+from unittest.mock import MagicMock
+
+from pytest import fixture, raises
+
+from xknx.exceptions import CouldNotParseKNXIP
+from xknx.knx import DPTBinary, PhysicalAddress
+from xknx.knxip.cemi_frame import CEMIFrame
+from xknx.knxip.knxip_enum import APCICommand, CEMIMessageCode
+
+
+def get_data(code, adil, flags, src, dst, mpdu_len, tpci_apci, payload):
+ return [
+ code,
+ adil, # adil
+ (flags >> 8) & 255, # flags
+ flags & 255, # flags
+ (src >> 8) & 255, # src
+ src & 255, # src
+ (dst >> 8) & 255, # dst
+ dst & 255, # dst
+ mpdu_len, # mpdu_len
+ (tpci_apci >> 8) & 255, # tpci_apci
+ tpci_apci & 255, # tpci_apci
+ *payload, # payload
+ ]
+
+
+@fixture(name="frame")
+def fixture_frame():
+ """Fixture to get a simple mocked frame"""
+ xknx = MagicMock()
+ return CEMIFrame(xknx)
+
+
+def test_valid_command(frame):
+ """Test for valid frame parsing"""
+ packet_len = frame.from_knx(get_data(0x29, 0, 0, 0, 0, 1, 0, []))
+ assert frame.code == CEMIMessageCode.L_DATA_IND
+ assert frame.cmd == APCICommand.GROUP_READ
+ assert frame.flags == 0
+ assert frame.mpdu_len == 1
+ assert frame.payload == DPTBinary(0)
+ assert frame.src_addr == PhysicalAddress(0)
+ assert frame.dst_addr == PhysicalAddress(0)
+ assert packet_len == 11
+
+
+def test_invalid_tpci_apci(frame):
+ """Test for invalid APCICommand"""
+ with raises(CouldNotParseKNXIP, match=r".*APCI not supported: .*"):
+ frame.from_knx(get_data(0x29, 0, 0, 0, 0, 1, 0xFFC0, []))
+
+
+def test_invalid_apdu_len(frame):
+ """Test for invalid apdu len"""
+ with raises(CouldNotParseKNXIP, match=r".*APDU LEN should be .*"):
+ frame.from_knx(get_data(0x29, 0, 0, 0, 0, 2, 0, []))
+
+
+def test_invalid_invalid_len(frame):
+ """Test for invalid cemi len"""
+ with raises(CouldNotParseKNXIP, match=r".*CEMI too small"):
+ frame.from_knx(get_data(0x29, 0, 0, 0, 0, 2, 0, [])[:5])
+
+
+def test_invalid_invalid_code(frame):
+ """Test for invalid cemi code"""
+ with raises(CouldNotParseKNXIP, match=r".*Could not understand CEMIMessageCode"):
+ frame.from_knx(get_data(0x0, 0, 0, 0, 0, 2, 0, []))
| ValueError: [448,256] is not a valid APCICommand
**Description of problem:**
Exception during processing of messages, likely while ETS is communicating with new Secure IP Interface.
- [ ] using xknx standalone
- [X] using Home-Assistant knx plugin
**Version information:**
- xknx / Home-Assistant release with the issue: 0.98
- last working xknx / Home-Assistant release (if known): None
**KNX installation:**
<!--
Please provide details about your installation.
- Manufacturer and model of relevant actors, sensors or interfaces.
- if you have access to ETS:
- provide relevant group address parameters (DPT, Flags)
- if applicable: excerpt of bus monitor output
-->
ETS 5.7.2
**Problem-relevant `xknx.yaml` or `configuration.yaml` entries (fill out even if it seems unimportant):**
**Traceback (if applicable):**
```
Aug 19 19:09:24 hass hass[32967]: Traceback (most recent call last):
Aug 19 19:09:24 hass hass[32967]: File "/usr/lib/python3.6/asyncio/events.py", line 127, in _run
Aug 19 19:09:24 hass hass[32967]: self._callback(*self._args)
Aug 19 19:09:24 hass hass[32967]: File "/usr/lib/python3.6/asyncio/selector_events.py", line 1078, in _read_ready
Aug 19 19:09:24 hass hass[32967]: self._protocol.datagram_received(data, addr)
Aug 19 19:09:24 hass hass[32967]: File "/home/homeassistant/env/lib/python3.6/site-packages/xknx/io/udp_client.py", line 55, in datagram_received
Aug 19 19:09:24 hass hass[32967]: self.data_received_callback(data)
Aug 19 19:09:24 hass hass[32967]: File "/home/homeassistant/env/lib/python3.6/site-packages/xknx/io/udp_client.py", line 87, in data_received_callback
Aug 19 19:09:24 hass hass[32967]: knxipframe.from_knx(raw)
Aug 19 19:09:24 hass hass[32967]: File "/home/homeassistant/env/lib/python3.6/site-packages/xknx/knxip/knxip.py", line 78, in from_knx
Aug 19 19:09:24 hass hass[32967]: pos += self.body.from_knx(data[pos:])
Aug 19 19:09:24 hass hass[32967]: File "/home/homeassistant/env/lib/python3.6/site-packages/xknx/knxip/tunnelling_request.py", line 48, in from_knx
Aug 19 19:09:24 hass hass[32967]: pos += self.cemi.from_knx(raw[pos:])
Aug 19 19:09:24 hass hass[32967]: File "/home/homeassistant/env/lib/python3.6/site-packages/xknx/knxip/cemi_frame.py", line 113, in from_knx
Aug 19 19:09:24 hass hass[32967]: return self.from_knx_data_link_layer(raw)
Aug 19 19:09:24 hass hass[32967]: File "/home/homeassistant/env/lib/python3.6/site-packages/xknx/knxip/cemi_frame.py", line 143, in from_knx_data_link_layer
Aug 19 19:09:24 hass hass[32967]: self.cmd = APCICommand(tpci_apci & 0xFFC0)
Aug 19 19:09:24 hass hass[32967]: File "/usr/lib/python3.6/enum.py", line 291, in __call__
Aug 19 19:09:24 hass hass[32967]: return cls.__new__(cls, value)
Aug 19 19:09:24 hass hass[32967]: File "/usr/lib/python3.6/enum.py", line 533, in __new__
Aug 19 19:09:24 hass hass[32967]: return cls._missing_(value)
Aug 19 19:09:24 hass hass[32967]: File "/usr/lib/python3.6/enum.py", line 546, in _missing_
Aug 19 19:09:24 hass hass[32967]: raise ValueError("%r is not a valid %s" % (value, cls.__name__))
Aug 19 19:09:24 hass hass[32967]: ValueError: 448 is not a valid APCICommand
```
```
Aug 22 08:15:49 hass hass[35975]: ValueError: 256 is not a valid APCICommand
Aug 22 08:15:49 hass hass[35975]: raise ValueError("%r is not a valid %s" % (value, cls.__name__))
Aug 22 08:15:49 hass hass[35975]: File "/usr/lib/python3.6/enum.py", line 546, in _missing_
Aug 22 08:15:49 hass hass[35975]: return cls._missing_(value)
Aug 22 08:15:49 hass hass[35975]: File "/usr/lib/python3.6/enum.py", line 533, in __new__
Aug 22 08:15:49 hass hass[35975]: return cls.__new__(cls, value)
Aug 22 08:15:49 hass hass[35975]: File "/usr/lib/python3.6/enum.py", line 291, in __call__
Aug 22 08:15:49 hass hass[35975]: self.cmd = APCICommand(tpci_apci & 0xFFC0)
Aug 22 08:18:24 hass hass[35975]: File "/usr/lib/python3.6/asyncio/base_events.py", line 1426, in _run_once
Aug 22 08:18:24 hass hass[35975]: self._run_once()
Aug 22 08:18:24 hass hass[35975]: File "/usr/lib/python3.6/asyncio/base_events.py", line 421, in run_forever
Aug 22 08:18:24 hass hass[35975]: self.run_forever()
Aug 22 08:18:24 hass hass[35975]: File "/usr/lib/python3.6/asyncio/base_events.py", line 454, in run_until_complete
Aug 22 08:18:24 hass hass[35975]: return loop.run_until_complete(main)
Aug 22 08:18:24 hass hass[35975]: File "/home/homeassistant/env/lib/python3.6/site-packages/homeassistant/util/async_.py", line 37, in asyncio_run
Aug 22 08:18:24 hass hass[35975]: exit_code = asyncio_run(setup_and_run_hass(config_dir, args))
Aug 22 08:18:24 hass hass[35975]: File "/home/homeassistant/env/lib/python3.6/site-packages/homeassistant/__main__.py", line 419, in main
Aug 22 08:18:24 hass hass[35975]: sys.exit(main())
Aug 22 08:18:24 hass hass[35975]: File "/home/homeassistant/env/bin/hass", line 10, in <module>
``` | 0.0 | 4b71c28db4cde694695da3dfad0072dd5383717f | [
"test/core_tests/exceptions_test.py::test_exceptions[base0-equal0-diff0]",
"test/core_tests/exceptions_test.py::test_exceptions[base1-equal1-diff1]",
"test/core_tests/exceptions_test.py::test_exceptions[base2-equal2-diff2]",
"test/core_tests/exceptions_test.py::test_exceptions[base3-equal3-diff3]",
"test/core_tests/exceptions_test.py::test_exceptions[base4-equal4-diff4]",
"test/core_tests/exceptions_test.py::test_exceptions[base5-equal5-diff5]",
"test/devices_tests/light_test.py::TestLight::test_process_color_rgbw",
"test/devices_tests/light_test.py::TestLight::test_set_color_rgbw",
"test/devices_tests/remote_value_color_rgbw_test.py::TestRemoteValueColorRGBW::test_from_knx",
"test/devices_tests/remote_value_color_rgbw_test.py::TestRemoteValueColorRGBW::test_process",
"test/devices_tests/remote_value_color_rgbw_test.py::TestRemoteValueColorRGBW::test_set",
"test/devices_tests/remote_value_color_rgbw_test.py::TestRemoteValueColorRGBW::test_to_knx",
"test/knxip_tests/cemi_frame_test.py::test_invalid_tpci_apci",
"test/knxip_tests/cemi_frame_test.py::test_invalid_invalid_code"
] | [
"test/devices_tests/light_test.py::TestLight::test_do",
"test/devices_tests/light_test.py::TestLight::test_has_group_address",
"test/devices_tests/light_test.py::TestLight::test_process_color",
"test/devices_tests/light_test.py::TestLight::test_process_color_temperature",
"test/devices_tests/light_test.py::TestLight::test_process_color_temperature_payload_invalid_length",
"test/devices_tests/light_test.py::TestLight::test_process_color_temperature_wrong_payload",
"test/devices_tests/light_test.py::TestLight::test_process_dimm",
"test/devices_tests/light_test.py::TestLight::test_process_dimm_payload_invalid_length",
"test/devices_tests/light_test.py::TestLight::test_process_dimm_wrong_payload",
"test/devices_tests/light_test.py::TestLight::test_process_switch",
"test/devices_tests/light_test.py::TestLight::test_process_switch_callback",
"test/devices_tests/light_test.py::TestLight::test_process_tunable_white",
"test/devices_tests/light_test.py::TestLight::test_process_tunable_white_payload_invalid_length",
"test/devices_tests/light_test.py::TestLight::test_process_tunable_white_wrong_payload",
"test/devices_tests/light_test.py::TestLight::test_set_brightness",
"test/devices_tests/light_test.py::TestLight::test_set_brightness_not_dimmable",
"test/devices_tests/light_test.py::TestLight::test_set_color",
"test/devices_tests/light_test.py::TestLight::test_set_color_not_possible",
"test/devices_tests/light_test.py::TestLight::test_set_color_rgbw_not_possible",
"test/devices_tests/light_test.py::TestLight::test_set_color_temp",
"test/devices_tests/light_test.py::TestLight::test_set_color_temp_unsupported",
"test/devices_tests/light_test.py::TestLight::test_set_off",
"test/devices_tests/light_test.py::TestLight::test_set_on",
"test/devices_tests/light_test.py::TestLight::test_set_tw",
"test/devices_tests/light_test.py::TestLight::test_set_tw_unsupported",
"test/devices_tests/light_test.py::TestLight::test_supports_color_false",
"test/devices_tests/light_test.py::TestLight::test_supports_color_temp_false",
"test/devices_tests/light_test.py::TestLight::test_supports_color_temp_true",
"test/devices_tests/light_test.py::TestLight::test_supports_color_true",
"test/devices_tests/light_test.py::TestLight::test_supports_dimm_no",
"test/devices_tests/light_test.py::TestLight::test_supports_dimm_yes",
"test/devices_tests/light_test.py::TestLight::test_supports_rgbw_false",
"test/devices_tests/light_test.py::TestLight::test_supports_rgbw_true",
"test/devices_tests/light_test.py::TestLight::test_supports_tw_no",
"test/devices_tests/light_test.py::TestLight::test_supports_tw_yes",
"test/devices_tests/light_test.py::TestLight::test_sync",
"test/devices_tests/light_test.py::TestLight::test_sync_state_address",
"test/devices_tests/light_test.py::TestLight::test_wrong_do",
"test/devices_tests/remote_value_color_rgbw_test.py::TestRemoteValueColorRGBW::test_to_knx_error",
"test/devices_tests/remote_value_color_rgbw_test.py::TestRemoteValueColorRGBW::test_to_process_error",
"test/knxip_tests/cemi_frame_test.py::test_valid_command",
"test/knxip_tests/cemi_frame_test.py::test_invalid_apdu_len",
"test/knxip_tests/cemi_frame_test.py::test_invalid_invalid_len"
] | {
"failed_lite_validators": [
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2019-09-10 16:47:37+00:00 | mit | 843 |
|
XKNX__xknx-271 | diff --git a/xknx/exceptions/__init__.py b/xknx/exceptions/__init__.py
index 0875fb6f..95609fa0 100644
--- a/xknx/exceptions/__init__.py
+++ b/xknx/exceptions/__init__.py
@@ -2,4 +2,5 @@
# flake8: noqa
from .exception import (
ConversionError, CouldNotParseAddress, CouldNotParseKNXIP,
- CouldNotParseTelegram, DeviceIllegalValue, XKNXException)
+ CouldNotParseTelegram, DeviceIllegalValue, UnsupportedCEMIMessage,
+ XKNXException)
diff --git a/xknx/exceptions/exception.py b/xknx/exceptions/exception.py
index 626299bd..18702952 100644
--- a/xknx/exceptions/exception.py
+++ b/xknx/exceptions/exception.py
@@ -39,7 +39,7 @@ class CouldNotParseKNXIP(XKNXException):
"""Exception class for wrong KNXIP data."""
def __init__(self, description=""):
- """Initialize CouldNotParseTelegram class."""
+ """Initialize CouldNotParseKNXIP class."""
super().__init__()
self.description = description
@@ -49,6 +49,20 @@ class CouldNotParseKNXIP(XKNXException):
.format(self.description)
+class UnsupportedCEMIMessage(XKNXException):
+ """Exception class for unsupported CEMI Messages."""
+
+ def __init__(self, description=""):
+ """Initialize UnsupportedCEMIMessage class."""
+ super().__init__()
+ self.description = description
+
+ def __str__(self):
+ """Return object as readable string."""
+ return '<UnsupportedCEMIMessage description="{0}" />' \
+ .format(self.description)
+
+
class ConversionError(XKNXException):
"""Exception class for error while converting one type to another."""
diff --git a/xknx/knxip/cemi_frame.py b/xknx/knxip/cemi_frame.py
index 1fc80ead..dffa5064 100644
--- a/xknx/knxip/cemi_frame.py
+++ b/xknx/knxip/cemi_frame.py
@@ -12,7 +12,8 @@ Documentation within:
File: AN117 v02.01 KNX IP Communication Medium DV.pdf
"""
from xknx.dpt import DPTArray, DPTBinary
-from xknx.exceptions import ConversionError, CouldNotParseKNXIP
+from xknx.exceptions import (
+ ConversionError, CouldNotParseKNXIP, UnsupportedCEMIMessage)
from xknx.telegram import GroupAddress, PhysicalAddress, Telegram, TelegramType
from .body import KNXIPBody
@@ -106,20 +107,25 @@ class CEMIFrame(KNXIPBody):
def from_knx(self, raw):
"""Parse/deserialize from KNX/IP raw data."""
try:
- self.code = CEMIMessageCode(raw[0])
- except ValueError:
- raise CouldNotParseKNXIP("Could not understand CEMIMessageCode: {0} ".format(raw[0]))
-
- if self.code == CEMIMessageCode.L_DATA_IND or \
- self.code == CEMIMessageCode.L_Data_REQ or \
- self.code == CEMIMessageCode.L_DATA_CON:
- return self.from_knx_data_link_layer(raw)
- raise CouldNotParseKNXIP("Could not understand CEMIMessageCode: {0} / {1}".format(self.code, raw[0]))
+ try:
+ self.code = CEMIMessageCode(raw[0])
+ except ValueError:
+ raise UnsupportedCEMIMessage("CEMIMessageCode not implemented: {0} ".format(raw[0]))
+
+ if self.code == CEMIMessageCode.L_DATA_IND or \
+ self.code == CEMIMessageCode.L_Data_REQ or \
+ self.code == CEMIMessageCode.L_DATA_CON:
+ return self.from_knx_data_link_layer(raw)
+ raise UnsupportedCEMIMessage("Could not handle CEMIMessageCode: {0} / {1}".format(self.code, raw[0]))
+ except UnsupportedCEMIMessage as unsupported_cemi_err:
+ self.xknx.logger.warning("Ignoring not implemented CEMI: %s", unsupported_cemi_err)
+ return len(raw)
def from_knx_data_link_layer(self, cemi):
"""Parse L_DATA_IND, CEMIMessageCode.L_Data_REQ, CEMIMessageCode.L_DATA_CON."""
if len(cemi) < 11:
- raise CouldNotParseKNXIP("CEMI too small")
+ # eg. ETS Line-Scan issues L_DATA_IND with length 10
+ raise UnsupportedCEMIMessage("CEMI too small. Length: {0}; CEMI: {1}".format(len(cemi), cemi))
# AddIL (Additional Info Length), as specified within
# KNX Chapter 3.6.3/4.1.4.3 "Additional information."
@@ -146,7 +152,7 @@ class CEMIFrame(KNXIPBody):
try:
self.cmd = APCICommand(tpci_apci & 0xFFC0)
except ValueError:
- raise CouldNotParseKNXIP(
+ raise UnsupportedCEMIMessage(
"APCI not supported: {0:#012b}".format(tpci_apci & 0xFFC0))
apdu = cemi[10 + addil:]
| XKNX/xknx | 9790cdb696285b5700ec06c571193a7feeb96a39 | diff --git a/test/knxip_tests/cemi_frame_test.py b/test/knxip_tests/cemi_frame_test.py
index b305a3b3..711762ec 100644
--- a/test/knxip_tests/cemi_frame_test.py
+++ b/test/knxip_tests/cemi_frame_test.py
@@ -5,7 +5,7 @@ from unittest.mock import MagicMock
from pytest import fixture, raises
from xknx.dpt import DPTBinary
-from xknx.exceptions import CouldNotParseKNXIP
+from xknx.exceptions import CouldNotParseKNXIP, UnsupportedCEMIMessage
from xknx.knxip.cemi_frame import CEMIFrame
from xknx.knxip.knxip_enum import APCICommand, CEMIMessageCode
from xknx.telegram import PhysicalAddress
@@ -50,8 +50,8 @@ def test_valid_command(frame):
def test_invalid_tpci_apci(frame):
"""Test for invalid APCICommand"""
- with raises(CouldNotParseKNXIP, match=r".*APCI not supported: .*"):
- frame.from_knx(get_data(0x29, 0, 0, 0, 0, 1, 0xFFC0, []))
+ with raises(UnsupportedCEMIMessage, match=r".*APCI not supported: .*"):
+ frame.from_knx_data_link_layer(get_data(0x29, 0, 0, 0, 0, 1, 0xFFC0, []))
def test_invalid_apdu_len(frame):
@@ -62,11 +62,5 @@ def test_invalid_apdu_len(frame):
def test_invalid_invalid_len(frame):
"""Test for invalid cemi len"""
- with raises(CouldNotParseKNXIP, match=r".*CEMI too small"):
- frame.from_knx(get_data(0x29, 0, 0, 0, 0, 2, 0, [])[:5])
-
-
-def test_invalid_invalid_code(frame):
- """Test for invalid cemi code"""
- with raises(CouldNotParseKNXIP, match=r".*Could not understand CEMIMessageCode"):
- frame.from_knx(get_data(0x0, 0, 0, 0, 0, 2, 0, []))
+ with raises(UnsupportedCEMIMessage, match=r".*CEMI too small.*"):
+ frame.from_knx_data_link_layer(get_data(0x29, 0, 0, 0, 0, 2, 0, [])[:5])
| Error when reading group addresses with timers
I have an extensive KNX installation with all kinds of actors, sensors, switches, etc.
My integration with Home Assistant works perfect, but I do get the following error in the logs when input is detected on an object linked to a timer:
> Error doing job: Exception in callback UDPTransport._on_read_ready
Traceback (most recent call last):
File "uvloop/cbhandles.pyx", line 69, in uvloop.loop.Handle._run
File "uvloop/handles/udp.pyx", line 64, in uvloop.loop.UDPTransport._on_read_ready
File "/usr/local/lib/python3.6/site-packages/xknx/io/udp_client.py", line 55, in datagram_received self.data_received_callback(data)
File "/usr/local/lib/python3.6/site-packages/xknx/io/udp_client.py", line 85, in data_received_callback knxipframe.from_knx(raw)
File "/usr/local/lib/python3.6/site-packages/xknx/knxip/knxip.py", line 78, in from_knx pos += self.body.from_knx(data[pos:])
File "/usr/local/lib/python3.6/site-packages/xknx/knxip/tunnelling_request.py", line 48, in from_knx pos += self.cemi.from_knx(raw[pos:])
File "/usr/local/lib/python3.6/site-packages/xknx/knxip/cemi_frame.py", line 116, in from_knx return self.from_knx_data_link_layer(raw)
File "/usr/local/lib/python3.6/site-packages/xknx/knxip/cemi_frame.py", line 147, in from_knx_data_link_layer self.cmd = APCICommand(tpci_apci & 0xFFC0)
File "/usr/local/lib/python3.6/enum.py", line 291, in __call__ return cls.__new__(cls, value)
File "/usr/local/lib/python3.6/enum.py", line 533, in __new__ return cls._missing_(value)
File "/usr/local/lib/python3.6/enum.py", line 546, in _missing_ raise ValueError("%r is not a valid %s" % (value, cls.__name__))
ValueError: 960 is not a valid APCICommand
Two minutes after this error I then receive:
`Error: reading rading group address from KNX bus failed: ErrorCode.E_CONNECTION_ID` | 0.0 | 9790cdb696285b5700ec06c571193a7feeb96a39 | [
"test/knxip_tests/cemi_frame_test.py::test_valid_command",
"test/knxip_tests/cemi_frame_test.py::test_invalid_tpci_apci",
"test/knxip_tests/cemi_frame_test.py::test_invalid_apdu_len",
"test/knxip_tests/cemi_frame_test.py::test_invalid_invalid_len"
] | [] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2020-03-22 23:56:49+00:00 | mit | 844 |
|
XKNX__xknx-406 | diff --git a/xknx/devices/weather.py b/xknx/devices/weather.py
index 8b155818..48791d01 100644
--- a/xknx/devices/weather.py
+++ b/xknx/devices/weather.py
@@ -189,7 +189,7 @@ class Weather(Device):
xknx,
group_address_state=group_address_air_pressure,
sync_state=sync_state,
- value_type="pressure",
+ value_type="pressure_2byte",
device_name=self.name,
feature_name="Air pressure",
after_update_cb=self.after_update,
| XKNX/xknx | 5a3bbc5c493fd18761b3d0b3b383d3a35b3efa54 | diff --git a/test/devices_tests/weather_test.py b/test/devices_tests/weather_test.py
index b3956b46..02312d17 100644
--- a/test/devices_tests/weather_test.py
+++ b/test/devices_tests/weather_test.py
@@ -79,16 +79,9 @@ class TestWeather(unittest.TestCase):
"""Test resolve state with pressure."""
xknx = XKNX(loop=self.loop)
weather = Weather(name="weather", xknx=xknx, group_address_air_pressure="1/3/4")
- weather._air_pressure.payload = DPTArray(
- (
- 0xC5,
- 0xE6,
- 0xE6,
- 0x63,
- )
- )
+ weather._air_pressure.payload = DPTArray((0x6C, 0xAD))
- self.assertEqual(weather.air_pressure, -7388.79833984375)
+ self.assertEqual(weather.air_pressure, 98058.24)
self.assertEqual(weather._air_pressure.unit_of_measurement, "Pa")
self.assertEqual(weather._air_pressure.ha_device_class, "pressure")
| KNX Weather entity payload error for air pressure
**Description of problem:**
I set up the new weather entity. For the air pressure the HA log shows an error due to an invalid payload.
- [ ] using xknx standalone
- [X] using Home-Assistant knx plugin
**Version information:**
- xknx / Home-Assistant release with the issue: 0.13.0
**KNX installation:**
Jung Weather Station 2225 WSU
- communication object: Air Pressure DPT 9.006

**Problem-relevant `xknx.yaml` or `configuration.yaml` entries (fill out even if it seems unimportant):**
````
- name: 'Dach'
address_temperature: '6/5/11'
address_brightness_south: '6/5/6'
address_brightness_west: '6/5/7'
address_brightness_east: '6/5/5'
address_wind_speed: '6/5/0'
address_rain_alarm: '6/5/37'
# address_frost_alarm: "7/0/6"
address_wind_alarm: '6/5/29'
# address_day_night: "7/0/8"
address_air_pressure: '6/5/13'
address_humidity: '6/5/12'
expose_sensors: False
sync_state: True
````
**Traceback (if applicable):**
```
2020-09-19 10:35:51 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/xknx/remote_value/remote_value.py", line 151, in read_state
await self.process(telegram)
File "/usr/local/lib/python3.8/site-packages/xknx/remote_value/remote_value.py", line 92, in process
raise CouldNotParseTelegram("payload invalid",
xknx.exceptions.exception.CouldNotParseTelegram: <CouldNotParseTelegram description="payload invalid" device_name="Dach" feature_name="Value" group_address="6/5/13" payload="<DPTArray value="[0x6c,0xad]" />"/>
```
| 0.0 | 5a3bbc5c493fd18761b3d0b3b383d3a35b3efa54 | [
"test/devices_tests/weather_test.py::TestWeather::test_pressure"
] | [
"test/devices_tests/weather_test.py::TestWeather::test_brightness",
"test/devices_tests/weather_test.py::TestWeather::test_cloudy_summer",
"test/devices_tests/weather_test.py::TestWeather::test_cloudy_winter",
"test/devices_tests/weather_test.py::TestWeather::test_day_night",
"test/devices_tests/weather_test.py::TestWeather::test_expose_sensor",
"test/devices_tests/weather_test.py::TestWeather::test_has_group_address",
"test/devices_tests/weather_test.py::TestWeather::test_humidity",
"test/devices_tests/weather_test.py::TestWeather::test_iter_remote_values",
"test/devices_tests/weather_test.py::TestWeather::test_rain_alarm",
"test/devices_tests/weather_test.py::TestWeather::test_state_lightning",
"test/devices_tests/weather_test.py::TestWeather::test_state_snowy_rainy",
"test/devices_tests/weather_test.py::TestWeather::test_sunny_summer",
"test/devices_tests/weather_test.py::TestWeather::test_sunny_winter",
"test/devices_tests/weather_test.py::TestWeather::test_temperature",
"test/devices_tests/weather_test.py::TestWeather::test_weather_default",
"test/devices_tests/weather_test.py::TestWeather::test_weather_state_attributes",
"test/devices_tests/weather_test.py::TestWeather::test_wind_alarm",
"test/devices_tests/weather_test.py::TestWeather::test_wind_speed"
] | {
"failed_lite_validators": [
"has_hyperlinks",
"has_media"
],
"has_test_patch": true,
"is_lite": false
} | 2020-09-20 12:20:12+00:00 | mit | 845 |
|
XKNX__xknx-454 | diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index f470c332..29b7fbe0 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -2,33 +2,33 @@
You are awesome! Thanks for contributing to our project!
Please, DO NOT DELETE ANY TEXT from this template!.
-->
-
+## Description
<!--
Please include a summary of the change and which issue is fixed.
Please also include relevant motivation and context.
List any dependencies that are required for this change.
-->
-## Description
-
Fixes # (issue)
+## Type of change
<!--
Please tick the applicable options.
NOTE: Ticking multiple options most likely indicates
that your change is to big and it is suggested to split it into several smaller PRs.
-->
-## Type of change
- [ ] Bug fix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
- [ ] This change requires a documentation update
-# Checklist:
+## Checklist:
- [ ] I have performed a self-review of my own code
-- [ ] I have made corresponding changes to the documentation
-- [ ] My changes generate no new warnings
-- [ ] I have added tests that prove my fix is effective or that my feature works
+- [ ] The documentation has been adjusted accordingly
+- [ ] The changes generate no new warnings
+- [ ] Tests have been added that prove the fix is effective or that the feature works
+- [ ] The changes are documented in the changelog
+- [ ] The Homeassistant plugin has been adjusted in case of new config options
diff --git a/home-assistant-plugin/custom_components/xknx/const.py b/home-assistant-plugin/custom_components/xknx/const.py
index 8bb99529..b5bdd447 100644
--- a/home-assistant-plugin/custom_components/xknx/const.py
+++ b/home-assistant-plugin/custom_components/xknx/const.py
@@ -18,6 +18,7 @@ DOMAIN = "xknx"
CONF_STATE_ADDRESS = "state_address"
CONF_SYNC_STATE = "sync_state"
+CONF_RESET_AFTER = "reset_after"
class ColorTempModes(Enum):
diff --git a/home-assistant-plugin/custom_components/xknx/factory.py b/home-assistant-plugin/custom_components/xknx/factory.py
index 157565c4..9e721817 100644
--- a/home-assistant-plugin/custom_components/xknx/factory.py
+++ b/home-assistant-plugin/custom_components/xknx/factory.py
@@ -202,6 +202,7 @@ def _create_switch(knx_module: XKNX, config: ConfigType) -> XknxSwitch:
name=config[CONF_NAME],
group_address=config[CONF_ADDRESS],
group_address_state=config.get(SwitchSchema.CONF_STATE_ADDRESS),
+ reset_after=config.get(SwitchSchema.CONF_RESET_AFTER),
)
diff --git a/home-assistant-plugin/custom_components/xknx/schema.py b/home-assistant-plugin/custom_components/xknx/schema.py
index a224d9cb..89a6fac7 100644
--- a/home-assistant-plugin/custom_components/xknx/schema.py
+++ b/home-assistant-plugin/custom_components/xknx/schema.py
@@ -15,6 +15,7 @@ from homeassistant.const import (
import homeassistant.helpers.config_validation as cv
from .const import (
+ CONF_RESET_AFTER,
CONF_STATE_ADDRESS,
CONF_SYNC_STATE,
OPERATION_MODES,
@@ -86,7 +87,7 @@ class BinarySensorSchema:
CONF_SYNC_STATE = CONF_SYNC_STATE
CONF_IGNORE_INTERNAL_STATE = "ignore_internal_state"
CONF_CONTEXT_TIMEOUT = "context_timeout"
- CONF_RESET_AFTER = "reset_after"
+ CONF_RESET_AFTER = CONF_RESET_AFTER
DEFAULT_NAME = "KNX Binary Sensor"
@@ -253,6 +254,7 @@ class SwitchSchema:
"""Voluptuous schema for KNX switches."""
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
+ CONF_RESET_AFTER = CONF_RESET_AFTER
DEFAULT_NAME = "KNX Switch"
SCHEMA = vol.Schema(
@@ -260,6 +262,7 @@ class SwitchSchema:
vol.Required(CONF_ADDRESS): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_STATE_ADDRESS): cv.string,
+ vol.Optional(CONF_RESET_AFTER): cv.positive_float,
}
)
diff --git a/xknx/devices/switch.py b/xknx/devices/switch.py
index 670eb569..0a9dab20 100644
--- a/xknx/devices/switch.py
+++ b/xknx/devices/switch.py
@@ -6,6 +6,7 @@ It provides functionality for
* switching 'on' and 'off'.
* reading the current state from KNX bus.
"""
+import asyncio
import logging
from xknx.remote_value import RemoteValueSwitch
@@ -22,6 +23,7 @@ class Switch(Device):
self,
xknx,
name,
+ reset_after=None,
group_address=None,
group_address_state=None,
device_updated_cb=None,
@@ -30,6 +32,10 @@ class Switch(Device):
# pylint: disable=too-many-arguments
super().__init__(xknx, name, device_updated_cb)
+ self.reset_after = reset_after
+ self._reset_task = None
+ self.state = False
+
self.switch = RemoteValueSwitch(
xknx,
group_address,
@@ -42,6 +48,11 @@ class Switch(Device):
"""Iterate the devices RemoteValue classes."""
yield self.switch
+ def __del__(self):
+ """Destructor. Cleaning up if this was not done before."""
+ if self._reset_task:
+ self._reset_task.cancel()
+
@classmethod
def from_config(cls, xknx, name, config):
"""Initialize object from configuration structure."""
@@ -55,12 +66,6 @@ class Switch(Device):
group_address_state=group_address_state,
)
- @property
- def state(self):
- """Return the current switch state of the device."""
- # None will return False
- return bool(self.switch.value)
-
async def set_on(self):
"""Switch on switch."""
await self.switch.on()
@@ -82,7 +87,18 @@ class Switch(Device):
async def process_group_write(self, telegram):
"""Process incoming and outgoing GROUP WRITE telegram."""
- await self.switch.process(telegram)
+ if await self.switch.process(telegram):
+ self.state = self.switch.value
+ if self.reset_after is not None and self.state:
+ if self._reset_task:
+ self._reset_task.cancel()
+ self._reset_task = asyncio.create_task(
+ self._reset_state(self.reset_after)
+ )
+
+ async def _reset_state(self, wait_seconds: float):
+ await asyncio.sleep(wait_seconds)
+ await self.set_off()
def __str__(self):
"""Return object as readable string."""
| XKNX/xknx | 5b6766ed6f83b863e53b6b6b7d608930ea80718f | diff --git a/requirements/testing.txt b/requirements/testing.txt
index 3c8a67ce..71727a7d 100644
--- a/requirements/testing.txt
+++ b/requirements/testing.txt
@@ -1,6 +1,6 @@
-r production.txt
pre-commit==2.7.1
-isort==5.5.4
+isort==5.5.5
coveralls==2.1.2
flake8==3.8.4
flake8-isort==4.0.0
diff --git a/test/devices_tests/switch_test.py b/test/devices_tests/switch_test.py
index 42f651b2..a9520431 100644
--- a/test/devices_tests/switch_test.py
+++ b/test/devices_tests/switch_test.py
@@ -76,6 +76,47 @@ class TestSwitch(unittest.TestCase):
self.assertEqual(switch.state, False)
+ def test_process_reset_after(self):
+ """Test process reset_after."""
+ xknx = XKNX()
+ reset_after_sec = 0.001
+ switch = Switch(
+ xknx, "TestInput", group_address="1/2/3", reset_after=reset_after_sec
+ )
+ telegram_on = Telegram(
+ group_address=GroupAddress("1/2/3"), payload=DPTBinary(1)
+ )
+
+ self.loop.run_until_complete(switch.process(telegram_on))
+ self.assertTrue(switch.state)
+ self.assertEqual(xknx.telegrams.qsize(), 0)
+ self.loop.run_until_complete(asyncio.sleep(reset_after_sec * 2))
+ self.assertEqual(xknx.telegrams.qsize(), 1)
+ self.loop.run_until_complete(switch.process(xknx.telegrams.get_nowait()))
+ self.assertFalse(switch.state)
+
+ def test_process_reset_after_cancel_existing(self):
+ """Test process reset_after cancels existing reset tasks."""
+ xknx = XKNX()
+ reset_after_sec = 0.01
+ switch = Switch(
+ xknx, "TestInput", group_address="1/2/3", reset_after=reset_after_sec
+ )
+ telegram_on = Telegram(
+ group_address=GroupAddress("1/2/3"), payload=DPTBinary(1)
+ )
+
+ self.loop.run_until_complete(switch.process(telegram_on))
+ self.assertTrue(switch.state)
+ self.assertEqual(xknx.telegrams.qsize(), 0)
+ self.loop.run_until_complete(asyncio.sleep(reset_after_sec / 2))
+ # half way through the reset timer
+ self.loop.run_until_complete(switch.process(telegram_on))
+ self.assertTrue(switch.state)
+
+ self.loop.run_until_complete(asyncio.sleep(reset_after_sec / 2))
+ self.assertEqual(xknx.telegrams.qsize(), 0)
+
def test_process_callback(self):
"""Test process / reading telegrams from telegram queue. Test if callback was called."""
# pylint: disable=no-self-use
| Switch: add auto-off function
Sometimes it is required to s always send a "OFF" right after sending a "ON" eg. for door openers.
I suggest to add a `reset_after` option to Switch, just like BinarySensor, to send the "OFF" telegram after a timer is finished. | 0.0 | 5b6766ed6f83b863e53b6b6b7d608930ea80718f | [
"test/devices_tests/switch_test.py::TestSwitch::test_process_reset_after",
"test/devices_tests/switch_test.py::TestSwitch::test_process_reset_after_cancel_existing"
] | [
"test/devices_tests/switch_test.py::TestSwitch::test_do",
"test/devices_tests/switch_test.py::TestSwitch::test_has_group_address",
"test/devices_tests/switch_test.py::TestSwitch::test_process",
"test/devices_tests/switch_test.py::TestSwitch::test_process_callback",
"test/devices_tests/switch_test.py::TestSwitch::test_set_off",
"test/devices_tests/switch_test.py::TestSwitch::test_set_on",
"test/devices_tests/switch_test.py::TestSwitch::test_sync",
"test/devices_tests/switch_test.py::TestSwitch::test_sync_state_address",
"test/devices_tests/switch_test.py::TestSwitch::test_wrong_do"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2020-10-07 19:40:38+00:00 | mit | 846 |
|
XKNX__xknx-469 | diff --git a/changelog.md b/changelog.md
index 97c850a5..7fdd1b95 100644
--- a/changelog.md
+++ b/changelog.md
@@ -4,6 +4,7 @@
### Devices
+- Sensor: add `always_callback` option
- ClimateMode: Refactor climate modes in operation_mode and controller_mode, also fixes a bug for binary operation modes where the mode would be set to AWAY no matter what value was sent to the bus.
### Internals
diff --git a/docs/sensor.md b/docs/sensor.md
index c741e111..22ae2581 100644
--- a/docs/sensor.md
+++ b/docs/sensor.md
@@ -15,6 +15,7 @@ Sensors are monitoring temperature, air humidity, pressure etc. from KNX bus.
sensor = Sensor(
xknx=xknx,
name='DiningRoom.Temperature.Sensor',
+ always_callback=False,
group_address_state='6/2/1',
sync_state=True,
value_type='temperature'
@@ -25,6 +26,7 @@ Sensors are monitoring temperature, air humidity, pressure etc. from KNX bus.
* `xknx` is the XKNX object.
* `name` is the name of the object.
+* `always_callback` defines if a callback/update should always be triggered no matter if the previous and the new state are identical.
* `group_address_state` is the KNX group address of the sensor device.
* `sync_state` defines if the value should be actively read from the bus. If `False` no GroupValueRead telegrams will be sent to its group address. Defaults to `True`
* `value_type` controls how the value should be rendered in a human readable representation. The attribut may have may have the values `percent`, `temperature`, `illuminance`, `speed_ms` or `current`.
diff --git a/home-assistant-plugin/custom_components/xknx/factory.py b/home-assistant-plugin/custom_components/xknx/factory.py
index de552ff2..005d24d5 100644
--- a/home-assistant-plugin/custom_components/xknx/factory.py
+++ b/home-assistant-plugin/custom_components/xknx/factory.py
@@ -214,6 +214,7 @@ def _create_sensor(knx_module: XKNX, config: ConfigType) -> XknxSensor:
name=config[CONF_NAME],
group_address_state=config[SensorSchema.CONF_STATE_ADDRESS],
sync_state=config[SensorSchema.CONF_SYNC_STATE],
+ always_callback=config[SensorSchema.CONF_ALWAYS_CALLBACK],
value_type=config[CONF_TYPE],
)
diff --git a/home-assistant-plugin/custom_components/xknx/schema.py b/home-assistant-plugin/custom_components/xknx/schema.py
index 9c628bed..d97a532a 100644
--- a/home-assistant-plugin/custom_components/xknx/schema.py
+++ b/home-assistant-plugin/custom_components/xknx/schema.py
@@ -306,6 +306,7 @@ class NotifySchema:
class SensorSchema:
"""Voluptuous schema for KNX sensors."""
+ CONF_ALWAYS_CALLBACK = "always_callback"
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
CONF_SYNC_STATE = CONF_SYNC_STATE
DEFAULT_NAME = "KNX Sensor"
@@ -318,6 +319,7 @@ class SensorSchema:
cv.boolean,
cv.string,
),
+ vol.Optional(CONF_ALWAYS_CALLBACK, default=False): cv.boolean,
vol.Required(CONF_STATE_ADDRESS): cv.string,
vol.Required(CONF_TYPE): vol.Any(int, float, str),
}
diff --git a/xknx/devices/sensor.py b/xknx/devices/sensor.py
index 1ac1471e..de62ec95 100644
--- a/xknx/devices/sensor.py
+++ b/xknx/devices/sensor.py
@@ -19,14 +19,15 @@ class Sensor(Device):
xknx,
name,
group_address_state=None,
- sync_state=True,
- value_type=None,
+ sync_state: bool = True,
+ always_callback: bool = False,
+ value_type: str = None,
device_updated_cb=None,
):
"""Initialize Sensor class."""
# pylint: disable=too-many-arguments
super().__init__(xknx, name, device_updated_cb)
-
+ self.always_callback = always_callback
self.sensor_value = RemoteValueSensor(
xknx,
group_address_state=group_address_state,
@@ -45,6 +46,7 @@ class Sensor(Device):
"""Initialize object from configuration structure."""
group_address_state = config.get("group_address_state")
sync_state = config.get("sync_state", True)
+ always_callback = config.get("always_callback", False)
value_type = config.get("value_type")
return cls(
@@ -52,11 +54,16 @@ class Sensor(Device):
name,
group_address_state=group_address_state,
sync_state=sync_state,
+ always_callback=always_callback,
value_type=value_type,
)
async def process_group_write(self, telegram):
"""Process incoming and outgoing GROUP WRITE telegram."""
+ await self.sensor_value.process(telegram, always_callback=self.always_callback)
+
+ async def process_group_response(self, telegram):
+ """Process incoming GroupValueResponse telegrams."""
await self.sensor_value.process(telegram)
def unit_of_measurement(self):
| XKNX/xknx | 715856fdb69deceba4d54afe4ccf3f7ad304c5ec | diff --git a/test/devices_tests/sensor_test.py b/test/devices_tests/sensor_test.py
index 2043eb3c..5875ea4e 100644
--- a/test/devices_tests/sensor_test.py
+++ b/test/devices_tests/sensor_test.py
@@ -47,6 +47,55 @@ class TestSensor(unittest.TestCase):
self.assertEqual(sensor.unit_of_measurement(), "K")
self.assertEqual(sensor.ha_device_class(), None)
+ def test_always_callback_sensor(self):
+ """Test always callback sensor."""
+ xknx = XKNX()
+ sensor = Sensor(
+ xknx,
+ "TestSensor",
+ group_address_state="1/2/3",
+ always_callback=False,
+ value_type="volume_liquid_litre",
+ )
+
+ after_update_callback = Mock()
+
+ async def async_after_update_callback(device):
+ """Async callback."""
+ after_update_callback(device)
+
+ sensor.register_device_updated_cb(async_after_update_callback)
+
+ payload = DPTArray(
+ (
+ 0x00,
+ 0x00,
+ 0x01,
+ 0x00,
+ )
+ )
+
+ # set initial payload of sensor
+ sensor.sensor_value.payload = payload
+
+ telegram = Telegram(group_address=GroupAddress("1/2/3"), payload=payload)
+
+ # verify not called when always_callback is False
+ self.loop.run_until_complete(sensor.process(telegram))
+ after_update_callback.assert_not_called()
+ after_update_callback.reset_mock()
+
+ sensor.always_callback = True
+
+ # verify called when always_callback is True
+ self.loop.run_until_complete(sensor.process(telegram))
+ after_update_callback.assert_called_once()
+ after_update_callback.reset_mock()
+
+ # verify not called when processing read responses
+ self.loop.run_until_complete(sensor.process_group_response(telegram))
+ after_update_callback.assert_not_called()
+
def test_str_acceleration(self):
"""Test resolve state with acceleration sensor."""
xknx = XKNX()
| Feature request: add the "ignore_internal_state" property to "Sensors" the same way it works already for "Binary Sensors"
Hi,
In the last release the property "ignore_internal_state" was added to the knx "binary sensor", and it would be great to have this property for knx "sensors" as well.
For example I am using a sensor with type "scene_number" together with an automation in order to execute some actions within HA when a knx scene number is called, but if the same scene is called two times without any other scene number in between, the value of the HA sensor state is not changed and the automation is not called again.
Note : the KNX scene entity is no help in this case as its state in HA does not change when triggered (it stays on "scening") and is only used the other way around (make HA to trigger a KNX scene).
Thank you very much in advance ! | 0.0 | 715856fdb69deceba4d54afe4ccf3f7ad304c5ec | [
"test/devices_tests/sensor_test.py::TestSensor::test_always_callback_sensor"
] | [
"test/devices_tests/sensor_test.py::TestSensor::test_has_group_address",
"test/devices_tests/sensor_test.py::TestSensor::test_process",
"test/devices_tests/sensor_test.py::TestSensor::test_process_callback",
"test/devices_tests/sensor_test.py::TestSensor::test_str_absolute_temperature",
"test/devices_tests/sensor_test.py::TestSensor::test_str_acceleration",
"test/devices_tests/sensor_test.py::TestSensor::test_str_acceleration_angular",
"test/devices_tests/sensor_test.py::TestSensor::test_str_activation_energy",
"test/devices_tests/sensor_test.py::TestSensor::test_str_active_energy",
"test/devices_tests/sensor_test.py::TestSensor::test_str_active_energy_kwh",
"test/devices_tests/sensor_test.py::TestSensor::test_str_activity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_amplitude",
"test/devices_tests/sensor_test.py::TestSensor::test_str_angle",
"test/devices_tests/sensor_test.py::TestSensor::test_str_angle_deg",
"test/devices_tests/sensor_test.py::TestSensor::test_str_angle_rad",
"test/devices_tests/sensor_test.py::TestSensor::test_str_angular_frequency",
"test/devices_tests/sensor_test.py::TestSensor::test_str_angular_momentum",
"test/devices_tests/sensor_test.py::TestSensor::test_str_angular_velocity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_apparant_energy",
"test/devices_tests/sensor_test.py::TestSensor::test_str_apparant_energy_kvah",
"test/devices_tests/sensor_test.py::TestSensor::test_str_area",
"test/devices_tests/sensor_test.py::TestSensor::test_str_brightness",
"test/devices_tests/sensor_test.py::TestSensor::test_str_capacitance",
"test/devices_tests/sensor_test.py::TestSensor::test_str_charge_density_surface",
"test/devices_tests/sensor_test.py::TestSensor::test_str_charge_density_volume",
"test/devices_tests/sensor_test.py::TestSensor::test_str_color_temperature",
"test/devices_tests/sensor_test.py::TestSensor::test_str_common_temperature",
"test/devices_tests/sensor_test.py::TestSensor::test_str_compressibility",
"test/devices_tests/sensor_test.py::TestSensor::test_str_conductance",
"test/devices_tests/sensor_test.py::TestSensor::test_str_counter_pulses",
"test/devices_tests/sensor_test.py::TestSensor::test_str_current",
"test/devices_tests/sensor_test.py::TestSensor::test_str_delta_time_hrs",
"test/devices_tests/sensor_test.py::TestSensor::test_str_delta_time_min",
"test/devices_tests/sensor_test.py::TestSensor::test_str_delta_time_ms",
"test/devices_tests/sensor_test.py::TestSensor::test_str_delta_time_sec",
"test/devices_tests/sensor_test.py::TestSensor::test_str_density",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_charge",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_current",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_current_density",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_dipole_moment",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_displacement",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_field_strength",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_flux",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_flux_density",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_polarization",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_potential",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_potential_difference",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electrical_conductivity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electromagnetic_moment",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electromotive_force",
"test/devices_tests/sensor_test.py::TestSensor::test_str_energy",
"test/devices_tests/sensor_test.py::TestSensor::test_str_enthalpy",
"test/devices_tests/sensor_test.py::TestSensor::test_str_flow_rate_m3h",
"test/devices_tests/sensor_test.py::TestSensor::test_str_force",
"test/devices_tests/sensor_test.py::TestSensor::test_str_frequency",
"test/devices_tests/sensor_test.py::TestSensor::test_str_heat_quantity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_heatcapacity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_heatflowrate",
"test/devices_tests/sensor_test.py::TestSensor::test_str_humidity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_illuminance",
"test/devices_tests/sensor_test.py::TestSensor::test_str_impedance",
"test/devices_tests/sensor_test.py::TestSensor::test_str_kelvin_per_percent",
"test/devices_tests/sensor_test.py::TestSensor::test_str_length",
"test/devices_tests/sensor_test.py::TestSensor::test_str_length_mm",
"test/devices_tests/sensor_test.py::TestSensor::test_str_light_quantity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_long_delta_timesec",
"test/devices_tests/sensor_test.py::TestSensor::test_str_luminance",
"test/devices_tests/sensor_test.py::TestSensor::test_str_luminous_flux",
"test/devices_tests/sensor_test.py::TestSensor::test_str_luminous_intensity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_magnetic_field_strength",
"test/devices_tests/sensor_test.py::TestSensor::test_str_magnetic_flux",
"test/devices_tests/sensor_test.py::TestSensor::test_str_magnetic_flux_density",
"test/devices_tests/sensor_test.py::TestSensor::test_str_magnetic_moment",
"test/devices_tests/sensor_test.py::TestSensor::test_str_magnetic_polarization",
"test/devices_tests/sensor_test.py::TestSensor::test_str_magnetization",
"test/devices_tests/sensor_test.py::TestSensor::test_str_magnetomotive_force",
"test/devices_tests/sensor_test.py::TestSensor::test_str_mass",
"test/devices_tests/sensor_test.py::TestSensor::test_str_mass_flux",
"test/devices_tests/sensor_test.py::TestSensor::test_str_mol",
"test/devices_tests/sensor_test.py::TestSensor::test_str_momentum",
"test/devices_tests/sensor_test.py::TestSensor::test_str_percent",
"test/devices_tests/sensor_test.py::TestSensor::test_str_percent_u8",
"test/devices_tests/sensor_test.py::TestSensor::test_str_percent_v16",
"test/devices_tests/sensor_test.py::TestSensor::test_str_percent_v8",
"test/devices_tests/sensor_test.py::TestSensor::test_str_phaseangledeg",
"test/devices_tests/sensor_test.py::TestSensor::test_str_phaseanglerad",
"test/devices_tests/sensor_test.py::TestSensor::test_str_power",
"test/devices_tests/sensor_test.py::TestSensor::test_str_power_2byte",
"test/devices_tests/sensor_test.py::TestSensor::test_str_power_density",
"test/devices_tests/sensor_test.py::TestSensor::test_str_powerfactor",
"test/devices_tests/sensor_test.py::TestSensor::test_str_ppm",
"test/devices_tests/sensor_test.py::TestSensor::test_str_pressure",
"test/devices_tests/sensor_test.py::TestSensor::test_str_pressure_2byte",
"test/devices_tests/sensor_test.py::TestSensor::test_str_pulse",
"test/devices_tests/sensor_test.py::TestSensor::test_str_rain_amount",
"test/devices_tests/sensor_test.py::TestSensor::test_str_reactance",
"test/devices_tests/sensor_test.py::TestSensor::test_str_reactive_energy",
"test/devices_tests/sensor_test.py::TestSensor::test_str_reactive_energy_kvarh",
"test/devices_tests/sensor_test.py::TestSensor::test_str_resistance",
"test/devices_tests/sensor_test.py::TestSensor::test_str_resistivity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_rotation_angle",
"test/devices_tests/sensor_test.py::TestSensor::test_str_scene_number",
"test/devices_tests/sensor_test.py::TestSensor::test_str_self_inductance",
"test/devices_tests/sensor_test.py::TestSensor::test_str_solid_angle",
"test/devices_tests/sensor_test.py::TestSensor::test_str_sound_intensity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_speed",
"test/devices_tests/sensor_test.py::TestSensor::test_str_stress",
"test/devices_tests/sensor_test.py::TestSensor::test_str_surface_tension",
"test/devices_tests/sensor_test.py::TestSensor::test_str_temperature",
"test/devices_tests/sensor_test.py::TestSensor::test_str_temperature_a",
"test/devices_tests/sensor_test.py::TestSensor::test_str_temperature_difference",
"test/devices_tests/sensor_test.py::TestSensor::test_str_temperature_difference_2byte",
"test/devices_tests/sensor_test.py::TestSensor::test_str_temperature_f",
"test/devices_tests/sensor_test.py::TestSensor::test_str_thermal_capacity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_thermal_conductivity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_thermoelectric_power",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_1",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_2",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_period_100msec",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_period_10msec",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_period_hrs",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_period_min",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_period_msec",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_period_sec",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_seconds",
"test/devices_tests/sensor_test.py::TestSensor::test_str_torque",
"test/devices_tests/sensor_test.py::TestSensor::test_str_voltage",
"test/devices_tests/sensor_test.py::TestSensor::test_str_volume",
"test/devices_tests/sensor_test.py::TestSensor::test_str_volume_flow",
"test/devices_tests/sensor_test.py::TestSensor::test_str_volume_flux",
"test/devices_tests/sensor_test.py::TestSensor::test_str_volume_liquid_litre",
"test/devices_tests/sensor_test.py::TestSensor::test_str_volume_m3",
"test/devices_tests/sensor_test.py::TestSensor::test_str_weight",
"test/devices_tests/sensor_test.py::TestSensor::test_str_wind_speed_kmh",
"test/devices_tests/sensor_test.py::TestSensor::test_str_wind_speed_ms",
"test/devices_tests/sensor_test.py::TestSensor::test_str_work",
"test/devices_tests/sensor_test.py::TestSensor::test_sync"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2020-10-17 20:17:34+00:00 | mit | 847 |
|
XKNX__xknx-470 | diff --git a/changelog.md b/changelog.md
index 97c850a5..26bbe12a 100644
--- a/changelog.md
+++ b/changelog.md
@@ -4,11 +4,12 @@
### Devices
+- Sensor: add `always_callback` option
- ClimateMode: Refactor climate modes in operation_mode and controller_mode, also fixes a bug for binary operation modes where the mode would be set to AWAY no matter what value was sent to the bus.
### Internals
-- StateUpdater: Only request one GA at a time.
+- StateUpdater: Only request 3 GAs at a time.
## 0.15.1 bugfix for binary sensors
diff --git a/docs/sensor.md b/docs/sensor.md
index c741e111..22ae2581 100644
--- a/docs/sensor.md
+++ b/docs/sensor.md
@@ -15,6 +15,7 @@ Sensors are monitoring temperature, air humidity, pressure etc. from KNX bus.
sensor = Sensor(
xknx=xknx,
name='DiningRoom.Temperature.Sensor',
+ always_callback=False,
group_address_state='6/2/1',
sync_state=True,
value_type='temperature'
@@ -25,6 +26,7 @@ Sensors are monitoring temperature, air humidity, pressure etc. from KNX bus.
* `xknx` is the XKNX object.
* `name` is the name of the object.
+* `always_callback` defines if a callback/update should always be triggered no matter if the previous and the new state are identical.
* `group_address_state` is the KNX group address of the sensor device.
* `sync_state` defines if the value should be actively read from the bus. If `False` no GroupValueRead telegrams will be sent to its group address. Defaults to `True`
* `value_type` controls how the value should be rendered in a human readable representation. The attribut may have may have the values `percent`, `temperature`, `illuminance`, `speed_ms` or `current`.
diff --git a/home-assistant-plugin/custom_components/xknx/factory.py b/home-assistant-plugin/custom_components/xknx/factory.py
index de552ff2..005d24d5 100644
--- a/home-assistant-plugin/custom_components/xknx/factory.py
+++ b/home-assistant-plugin/custom_components/xknx/factory.py
@@ -214,6 +214,7 @@ def _create_sensor(knx_module: XKNX, config: ConfigType) -> XknxSensor:
name=config[CONF_NAME],
group_address_state=config[SensorSchema.CONF_STATE_ADDRESS],
sync_state=config[SensorSchema.CONF_SYNC_STATE],
+ always_callback=config[SensorSchema.CONF_ALWAYS_CALLBACK],
value_type=config[CONF_TYPE],
)
diff --git a/home-assistant-plugin/custom_components/xknx/schema.py b/home-assistant-plugin/custom_components/xknx/schema.py
index 9c628bed..d97a532a 100644
--- a/home-assistant-plugin/custom_components/xknx/schema.py
+++ b/home-assistant-plugin/custom_components/xknx/schema.py
@@ -306,6 +306,7 @@ class NotifySchema:
class SensorSchema:
"""Voluptuous schema for KNX sensors."""
+ CONF_ALWAYS_CALLBACK = "always_callback"
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
CONF_SYNC_STATE = CONF_SYNC_STATE
DEFAULT_NAME = "KNX Sensor"
@@ -318,6 +319,7 @@ class SensorSchema:
cv.boolean,
cv.string,
),
+ vol.Optional(CONF_ALWAYS_CALLBACK, default=False): cv.boolean,
vol.Required(CONF_STATE_ADDRESS): cv.string,
vol.Required(CONF_TYPE): vol.Any(int, float, str),
}
diff --git a/xknx/core/state_updater.py b/xknx/core/state_updater.py
index 268730df..db7df306 100644
--- a/xknx/core/state_updater.py
+++ b/xknx/core/state_updater.py
@@ -14,12 +14,12 @@ logger = logging.getLogger("xknx.state_updater")
class StateUpdater:
"""Class for keeping the states of RemoteValues up to date."""
- def __init__(self, xknx):
+ def __init__(self, xknx, parallel_reads: int = 3):
"""Initialize StateUpdater class."""
self.xknx = xknx
self.started = False
self._workers = {}
- self._one_by_one = asyncio.Lock()
+ self._semaphore = asyncio.Semaphore(value=parallel_reads)
def register_remote_value(self, remote_value: RemoteValue, tracker_options=True):
"""Register a RemoteValue to initialize its state and/or track for expiration."""
@@ -77,7 +77,7 @@ class StateUpdater:
async def read_state_mutex():
"""Schedule to read the state from the KNX bus - one at a time."""
- async with self._one_by_one:
+ async with self._semaphore:
# wait until there is nothing else to send to the bus
await self.xknx.telegram_queue.outgoing_queue.join()
logger.debug(
diff --git a/xknx/devices/sensor.py b/xknx/devices/sensor.py
index 1ac1471e..de62ec95 100644
--- a/xknx/devices/sensor.py
+++ b/xknx/devices/sensor.py
@@ -19,14 +19,15 @@ class Sensor(Device):
xknx,
name,
group_address_state=None,
- sync_state=True,
- value_type=None,
+ sync_state: bool = True,
+ always_callback: bool = False,
+ value_type: str = None,
device_updated_cb=None,
):
"""Initialize Sensor class."""
# pylint: disable=too-many-arguments
super().__init__(xknx, name, device_updated_cb)
-
+ self.always_callback = always_callback
self.sensor_value = RemoteValueSensor(
xknx,
group_address_state=group_address_state,
@@ -45,6 +46,7 @@ class Sensor(Device):
"""Initialize object from configuration structure."""
group_address_state = config.get("group_address_state")
sync_state = config.get("sync_state", True)
+ always_callback = config.get("always_callback", False)
value_type = config.get("value_type")
return cls(
@@ -52,11 +54,16 @@ class Sensor(Device):
name,
group_address_state=group_address_state,
sync_state=sync_state,
+ always_callback=always_callback,
value_type=value_type,
)
async def process_group_write(self, telegram):
"""Process incoming and outgoing GROUP WRITE telegram."""
+ await self.sensor_value.process(telegram, always_callback=self.always_callback)
+
+ async def process_group_response(self, telegram):
+ """Process incoming GroupValueResponse telegrams."""
await self.sensor_value.process(telegram)
def unit_of_measurement(self):
| XKNX/xknx | 715856fdb69deceba4d54afe4ccf3f7ad304c5ec | diff --git a/test/devices_tests/sensor_test.py b/test/devices_tests/sensor_test.py
index 2043eb3c..5875ea4e 100644
--- a/test/devices_tests/sensor_test.py
+++ b/test/devices_tests/sensor_test.py
@@ -47,6 +47,55 @@ class TestSensor(unittest.TestCase):
self.assertEqual(sensor.unit_of_measurement(), "K")
self.assertEqual(sensor.ha_device_class(), None)
+ def test_always_callback_sensor(self):
+ """Test always callback sensor."""
+ xknx = XKNX()
+ sensor = Sensor(
+ xknx,
+ "TestSensor",
+ group_address_state="1/2/3",
+ always_callback=False,
+ value_type="volume_liquid_litre",
+ )
+
+ after_update_callback = Mock()
+
+ async def async_after_update_callback(device):
+ """Async callback."""
+ after_update_callback(device)
+
+ sensor.register_device_updated_cb(async_after_update_callback)
+
+ payload = DPTArray(
+ (
+ 0x00,
+ 0x00,
+ 0x01,
+ 0x00,
+ )
+ )
+
+ # set initial payload of sensor
+ sensor.sensor_value.payload = payload
+
+ telegram = Telegram(group_address=GroupAddress("1/2/3"), payload=payload)
+
+ # verify not called when always_callback is False
+ self.loop.run_until_complete(sensor.process(telegram))
+ after_update_callback.assert_not_called()
+ after_update_callback.reset_mock()
+
+ sensor.always_callback = True
+
+ # verify called when always_callback is True
+ self.loop.run_until_complete(sensor.process(telegram))
+ after_update_callback.assert_called_once()
+ after_update_callback.reset_mock()
+
+ # verify not called when processing read responses
+ self.loop.run_until_complete(sensor.process_group_response(telegram))
+ after_update_callback.assert_not_called()
+
def test_str_acceleration(self):
"""Test resolve state with acceleration sensor."""
xknx = XKNX()
| Use Semaphores in StateUpdater
In https://github.com/XKNX/xknx/pull/457 we adjusted the state updater to update each value one by one. However, we should leverage the power of semaphores to update more than one state concurrently as the bus can handle more than one telegram per second.
A value between 3 and 5 should be fine imho. But this would need further testing. | 0.0 | 715856fdb69deceba4d54afe4ccf3f7ad304c5ec | [
"test/devices_tests/sensor_test.py::TestSensor::test_always_callback_sensor"
] | [
"test/devices_tests/sensor_test.py::TestSensor::test_has_group_address",
"test/devices_tests/sensor_test.py::TestSensor::test_process",
"test/devices_tests/sensor_test.py::TestSensor::test_process_callback",
"test/devices_tests/sensor_test.py::TestSensor::test_str_absolute_temperature",
"test/devices_tests/sensor_test.py::TestSensor::test_str_acceleration",
"test/devices_tests/sensor_test.py::TestSensor::test_str_acceleration_angular",
"test/devices_tests/sensor_test.py::TestSensor::test_str_activation_energy",
"test/devices_tests/sensor_test.py::TestSensor::test_str_active_energy",
"test/devices_tests/sensor_test.py::TestSensor::test_str_active_energy_kwh",
"test/devices_tests/sensor_test.py::TestSensor::test_str_activity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_amplitude",
"test/devices_tests/sensor_test.py::TestSensor::test_str_angle",
"test/devices_tests/sensor_test.py::TestSensor::test_str_angle_deg",
"test/devices_tests/sensor_test.py::TestSensor::test_str_angle_rad",
"test/devices_tests/sensor_test.py::TestSensor::test_str_angular_frequency",
"test/devices_tests/sensor_test.py::TestSensor::test_str_angular_momentum",
"test/devices_tests/sensor_test.py::TestSensor::test_str_angular_velocity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_apparant_energy",
"test/devices_tests/sensor_test.py::TestSensor::test_str_apparant_energy_kvah",
"test/devices_tests/sensor_test.py::TestSensor::test_str_area",
"test/devices_tests/sensor_test.py::TestSensor::test_str_brightness",
"test/devices_tests/sensor_test.py::TestSensor::test_str_capacitance",
"test/devices_tests/sensor_test.py::TestSensor::test_str_charge_density_surface",
"test/devices_tests/sensor_test.py::TestSensor::test_str_charge_density_volume",
"test/devices_tests/sensor_test.py::TestSensor::test_str_color_temperature",
"test/devices_tests/sensor_test.py::TestSensor::test_str_common_temperature",
"test/devices_tests/sensor_test.py::TestSensor::test_str_compressibility",
"test/devices_tests/sensor_test.py::TestSensor::test_str_conductance",
"test/devices_tests/sensor_test.py::TestSensor::test_str_counter_pulses",
"test/devices_tests/sensor_test.py::TestSensor::test_str_current",
"test/devices_tests/sensor_test.py::TestSensor::test_str_delta_time_hrs",
"test/devices_tests/sensor_test.py::TestSensor::test_str_delta_time_min",
"test/devices_tests/sensor_test.py::TestSensor::test_str_delta_time_ms",
"test/devices_tests/sensor_test.py::TestSensor::test_str_delta_time_sec",
"test/devices_tests/sensor_test.py::TestSensor::test_str_density",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_charge",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_current",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_current_density",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_dipole_moment",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_displacement",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_field_strength",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_flux",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_flux_density",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_polarization",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_potential",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electric_potential_difference",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electrical_conductivity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electromagnetic_moment",
"test/devices_tests/sensor_test.py::TestSensor::test_str_electromotive_force",
"test/devices_tests/sensor_test.py::TestSensor::test_str_energy",
"test/devices_tests/sensor_test.py::TestSensor::test_str_enthalpy",
"test/devices_tests/sensor_test.py::TestSensor::test_str_flow_rate_m3h",
"test/devices_tests/sensor_test.py::TestSensor::test_str_force",
"test/devices_tests/sensor_test.py::TestSensor::test_str_frequency",
"test/devices_tests/sensor_test.py::TestSensor::test_str_heat_quantity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_heatcapacity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_heatflowrate",
"test/devices_tests/sensor_test.py::TestSensor::test_str_humidity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_illuminance",
"test/devices_tests/sensor_test.py::TestSensor::test_str_impedance",
"test/devices_tests/sensor_test.py::TestSensor::test_str_kelvin_per_percent",
"test/devices_tests/sensor_test.py::TestSensor::test_str_length",
"test/devices_tests/sensor_test.py::TestSensor::test_str_length_mm",
"test/devices_tests/sensor_test.py::TestSensor::test_str_light_quantity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_long_delta_timesec",
"test/devices_tests/sensor_test.py::TestSensor::test_str_luminance",
"test/devices_tests/sensor_test.py::TestSensor::test_str_luminous_flux",
"test/devices_tests/sensor_test.py::TestSensor::test_str_luminous_intensity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_magnetic_field_strength",
"test/devices_tests/sensor_test.py::TestSensor::test_str_magnetic_flux",
"test/devices_tests/sensor_test.py::TestSensor::test_str_magnetic_flux_density",
"test/devices_tests/sensor_test.py::TestSensor::test_str_magnetic_moment",
"test/devices_tests/sensor_test.py::TestSensor::test_str_magnetic_polarization",
"test/devices_tests/sensor_test.py::TestSensor::test_str_magnetization",
"test/devices_tests/sensor_test.py::TestSensor::test_str_magnetomotive_force",
"test/devices_tests/sensor_test.py::TestSensor::test_str_mass",
"test/devices_tests/sensor_test.py::TestSensor::test_str_mass_flux",
"test/devices_tests/sensor_test.py::TestSensor::test_str_mol",
"test/devices_tests/sensor_test.py::TestSensor::test_str_momentum",
"test/devices_tests/sensor_test.py::TestSensor::test_str_percent",
"test/devices_tests/sensor_test.py::TestSensor::test_str_percent_u8",
"test/devices_tests/sensor_test.py::TestSensor::test_str_percent_v16",
"test/devices_tests/sensor_test.py::TestSensor::test_str_percent_v8",
"test/devices_tests/sensor_test.py::TestSensor::test_str_phaseangledeg",
"test/devices_tests/sensor_test.py::TestSensor::test_str_phaseanglerad",
"test/devices_tests/sensor_test.py::TestSensor::test_str_power",
"test/devices_tests/sensor_test.py::TestSensor::test_str_power_2byte",
"test/devices_tests/sensor_test.py::TestSensor::test_str_power_density",
"test/devices_tests/sensor_test.py::TestSensor::test_str_powerfactor",
"test/devices_tests/sensor_test.py::TestSensor::test_str_ppm",
"test/devices_tests/sensor_test.py::TestSensor::test_str_pressure",
"test/devices_tests/sensor_test.py::TestSensor::test_str_pressure_2byte",
"test/devices_tests/sensor_test.py::TestSensor::test_str_pulse",
"test/devices_tests/sensor_test.py::TestSensor::test_str_rain_amount",
"test/devices_tests/sensor_test.py::TestSensor::test_str_reactance",
"test/devices_tests/sensor_test.py::TestSensor::test_str_reactive_energy",
"test/devices_tests/sensor_test.py::TestSensor::test_str_reactive_energy_kvarh",
"test/devices_tests/sensor_test.py::TestSensor::test_str_resistance",
"test/devices_tests/sensor_test.py::TestSensor::test_str_resistivity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_rotation_angle",
"test/devices_tests/sensor_test.py::TestSensor::test_str_scene_number",
"test/devices_tests/sensor_test.py::TestSensor::test_str_self_inductance",
"test/devices_tests/sensor_test.py::TestSensor::test_str_solid_angle",
"test/devices_tests/sensor_test.py::TestSensor::test_str_sound_intensity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_speed",
"test/devices_tests/sensor_test.py::TestSensor::test_str_stress",
"test/devices_tests/sensor_test.py::TestSensor::test_str_surface_tension",
"test/devices_tests/sensor_test.py::TestSensor::test_str_temperature",
"test/devices_tests/sensor_test.py::TestSensor::test_str_temperature_a",
"test/devices_tests/sensor_test.py::TestSensor::test_str_temperature_difference",
"test/devices_tests/sensor_test.py::TestSensor::test_str_temperature_difference_2byte",
"test/devices_tests/sensor_test.py::TestSensor::test_str_temperature_f",
"test/devices_tests/sensor_test.py::TestSensor::test_str_thermal_capacity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_thermal_conductivity",
"test/devices_tests/sensor_test.py::TestSensor::test_str_thermoelectric_power",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_1",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_2",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_period_100msec",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_period_10msec",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_period_hrs",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_period_min",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_period_msec",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_period_sec",
"test/devices_tests/sensor_test.py::TestSensor::test_str_time_seconds",
"test/devices_tests/sensor_test.py::TestSensor::test_str_torque",
"test/devices_tests/sensor_test.py::TestSensor::test_str_voltage",
"test/devices_tests/sensor_test.py::TestSensor::test_str_volume",
"test/devices_tests/sensor_test.py::TestSensor::test_str_volume_flow",
"test/devices_tests/sensor_test.py::TestSensor::test_str_volume_flux",
"test/devices_tests/sensor_test.py::TestSensor::test_str_volume_liquid_litre",
"test/devices_tests/sensor_test.py::TestSensor::test_str_volume_m3",
"test/devices_tests/sensor_test.py::TestSensor::test_str_weight",
"test/devices_tests/sensor_test.py::TestSensor::test_str_wind_speed_kmh",
"test/devices_tests/sensor_test.py::TestSensor::test_str_wind_speed_ms",
"test/devices_tests/sensor_test.py::TestSensor::test_str_work",
"test/devices_tests/sensor_test.py::TestSensor::test_sync"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2020-10-17 20:40:35+00:00 | mit | 848 |
|
XKNX__xknx-482 | diff --git a/changelog.md b/changelog.md
index b3ef0a15..5ed77445 100644
--- a/changelog.md
+++ b/changelog.md
@@ -12,6 +12,7 @@
### Bugfixes
- HA Switch entity: keep state without state_address
+- Cover: fix `set_position` without writable position / auto_stop_if_necessary
## 0.15.2 Winter is coming
diff --git a/xknx/devices/cover.py b/xknx/devices/cover.py
index 4ff53c1a..7596c1e5 100644
--- a/xknx/devices/cover.py
+++ b/xknx/devices/cover.py
@@ -270,13 +270,20 @@ class Cover(Device):
async def process_group_write(self, telegram):
"""Process incoming and outgoing GROUP WRITE telegram."""
+ # call after_update to account for travelcalculator changes
if await self.updown.process(telegram):
- if self.updown.value == RemoteValueUpDown.Direction.UP:
+ if (
+ not self.is_opening()
+ and self.updown.value == RemoteValueUpDown.Direction.UP
+ ):
self.travelcalculator.start_travel_up()
- else:
+ await self.after_update()
+ elif (
+ not self.is_closing()
+ and self.updown.value == RemoteValueUpDown.Direction.DOWN
+ ):
self.travelcalculator.start_travel_down()
- # call after_update to account for travelcalculator changes
- await self.after_update()
+ await self.after_update()
# stop from bus
if await self.stop_.process(telegram) or await self.step.process(telegram):
if self.is_traveling():
| XKNX/xknx | 785d73b850a97a688a3b1a5e8baf34e82685d1b6 | diff --git a/test/devices_tests/cover_test.py b/test/devices_tests/cover_test.py
index 16e4547c..0865454c 100644
--- a/test/devices_tests/cover_test.py
+++ b/test/devices_tests/cover_test.py
@@ -397,6 +397,10 @@ class TestCover(unittest.TestCase):
telegram, Telegram(GroupAddress("1/2/1"), payload=DPTBinary(0))
)
self.assertEqual(cover.travelcalculator.travel_to_position, 50)
+ self.assertTrue(cover.is_opening())
+ # process the outgoing telegram to make sure it doesn't overwrite the target position
+ self.loop.run_until_complete(cover.process(telegram))
+ self.assertEqual(cover.travelcalculator.travel_to_position, 50)
def test_position_without_position_address_down(self):
"""Test moving cover down - with no absolute positioning supported."""
@@ -416,6 +420,10 @@ class TestCover(unittest.TestCase):
telegram, Telegram(GroupAddress("1/2/1"), payload=DPTBinary(1))
)
self.assertEqual(cover.travelcalculator.travel_to_position, 80)
+ self.assertTrue(cover.is_closing())
+ # process the outgoing telegram to make sure it doesn't overwrite the target position
+ self.loop.run_until_complete(cover.process(telegram))
+ self.assertEqual(cover.travelcalculator.travel_to_position, 80)
def test_position_without_position_address_uninitialized_up(self):
"""Test moving uninitialized cover to absolute position - with no absolute positioning supported."""
| cover.set_cover_position not stopping with no position_address
**Description of problem:**
Dear xknx team,
covers with **no position_address** do not longer stop when moving the slider in the ha frontpanel to a position e.g. 30%. I attached my according HA config as well as some debug log from HA. This was working in precious versions of Home Assistant for me ( version 0.111).
The **service cover.set_cover_position ist not stopping at this value**. The services cover.close_cover; service cover.open_cover; cover.stop_cover are working currently.
Maybe a bug to the reworked travelcalculator / cover.py? It seems to me, that there is no stop movement send to actor when the calculated position is reached.
Remark: My covers do not have a positional feedback. But this applies not only to my knx config, this reproducible to other knx hardware setup with HA as well (a friend of mine reproduces this bug with his knx covers (which have a position_address). When deleting the position address in his config, this bug occurs as well. It would be great, to have this feature back / working again! Let me know, if i can assist you reproducing this bug.
All the best
Jan
- [x ] using Home-Assistant knx plugin
**Version information:**
- xknx / Home-Assistant release with the issue: 0.116.4
- last working xknx / Home-Assistant release (if known): 0.111
**KNX installation:**
<!--
- Manufacturer and model of relevant actors, sensors or interfaces.
-->
**Problem-relevant `xknx.yaml` or `configuration.yaml` entries (fill out even if it seems unimportant):**
[LOG_ha.txt](https://github.com/XKNX/xknx/files/5434327/LOG_ha.txt)
[knx_cover.yaml.txt](https://github.com/XKNX/xknx/files/5434337/knx_cover.yaml.txt)
[knx.yaml.txt](https://github.com/XKNX/xknx/files/5434338/knx.yaml.txt)

| 0.0 | 785d73b850a97a688a3b1a5e8baf34e82685d1b6 | [
"test/devices_tests/cover_test.py::TestCover::test_position_without_position_address_down",
"test/devices_tests/cover_test.py::TestCover::test_position_without_position_address_up"
] | [
"test/devices_tests/cover_test.py::TestCover::test_angle",
"test/devices_tests/cover_test.py::TestCover::test_angle_not_supported",
"test/devices_tests/cover_test.py::TestCover::test_auto_stop",
"test/devices_tests/cover_test.py::TestCover::test_do",
"test/devices_tests/cover_test.py::TestCover::test_has_group_address",
"test/devices_tests/cover_test.py::TestCover::test_is_traveling",
"test/devices_tests/cover_test.py::TestCover::test_position",
"test/devices_tests/cover_test.py::TestCover::test_position_without_position_address_uninitialized_down",
"test/devices_tests/cover_test.py::TestCover::test_position_without_position_address_uninitialized_up",
"test/devices_tests/cover_test.py::TestCover::test_process_angle",
"test/devices_tests/cover_test.py::TestCover::test_process_callback",
"test/devices_tests/cover_test.py::TestCover::test_process_down",
"test/devices_tests/cover_test.py::TestCover::test_process_position",
"test/devices_tests/cover_test.py::TestCover::test_process_short_stop",
"test/devices_tests/cover_test.py::TestCover::test_process_stop",
"test/devices_tests/cover_test.py::TestCover::test_process_up",
"test/devices_tests/cover_test.py::TestCover::test_set_down",
"test/devices_tests/cover_test.py::TestCover::test_set_down_inverted",
"test/devices_tests/cover_test.py::TestCover::test_set_short_down",
"test/devices_tests/cover_test.py::TestCover::test_set_short_up",
"test/devices_tests/cover_test.py::TestCover::test_set_up",
"test/devices_tests/cover_test.py::TestCover::test_set_up_inverted",
"test/devices_tests/cover_test.py::TestCover::test_stop",
"test/devices_tests/cover_test.py::TestCover::test_support_angle_false",
"test/devices_tests/cover_test.py::TestCover::test_supports_angle_true",
"test/devices_tests/cover_test.py::TestCover::test_supports_position_false",
"test/devices_tests/cover_test.py::TestCover::test_supports_position_true",
"test/devices_tests/cover_test.py::TestCover::test_supports_stop_false",
"test/devices_tests/cover_test.py::TestCover::test_supports_stop_true",
"test/devices_tests/cover_test.py::TestCover::test_sync",
"test/devices_tests/cover_test.py::TestCover::test_sync_angle",
"test/devices_tests/cover_test.py::TestCover::test_sync_angle_state",
"test/devices_tests/cover_test.py::TestCover::test_sync_state",
"test/devices_tests/cover_test.py::TestCover::test_wrong_do"
] | {
"failed_lite_validators": [
"has_hyperlinks",
"has_media",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2020-10-25 20:45:25+00:00 | mit | 849 |
|
XKNX__xknx-572 | diff --git a/changelog.md b/changelog.md
index 8692b1d2..b8648063 100644
--- a/changelog.md
+++ b/changelog.md
@@ -2,6 +2,10 @@
## Unreleased changes
+### HA integration
+
+- knx_event: fire also for outgoing telegrams
+
### Devices
- BinarySensor: return `None` for `BinarySensor.counter` when context timeout is not used (and don't calculate it)
diff --git a/home-assistant-plugin/custom_components/xknx/__init__.py b/home-assistant-plugin/custom_components/xknx/__init__.py
index fab7ec68..f7f63d8d 100644
--- a/home-assistant-plugin/custom_components/xknx/__init__.py
+++ b/home-assistant-plugin/custom_components/xknx/__init__.py
@@ -368,6 +368,7 @@ class KNXModule:
self.telegram_received_cb,
address_filters=address_filters,
group_addresses=[],
+ match_for_outgoing_telegrams=True,
)
async def service_event_register_modify(self, call):
diff --git a/xknx/core/telegram_queue.py b/xknx/core/telegram_queue.py
index c793ebc4..ea68f572 100644
--- a/xknx/core/telegram_queue.py
+++ b/xknx/core/telegram_queue.py
@@ -37,15 +37,22 @@ class TelegramQueue:
callback: "AsyncTelegramCallback",
address_filters: Optional[List[AddressFilter]] = None,
group_addresses: Optional[List[GroupAddress]] = None,
+ match_for_outgoing_telegrams: bool = False,
):
"""Initialize Callback class."""
self.callback = callback
self._match_all = address_filters is None and group_addresses is None
+ self._match_outgoing = match_for_outgoing_telegrams
self.address_filters = [] if address_filters is None else address_filters
self.group_addresses = [] if group_addresses is None else group_addresses
def is_within_filter(self, telegram: Telegram) -> bool:
"""Test if callback is filtering for group address."""
+ if (
+ not self._match_outgoing
+ and telegram.direction == TelegramDirection.OUTGOING
+ ):
+ return False
if self._match_all:
return True
if isinstance(telegram.destination_address, GroupAddress):
@@ -69,12 +76,14 @@ class TelegramQueue:
telegram_received_cb: "AsyncTelegramCallback",
address_filters: Optional[List[AddressFilter]] = None,
group_addresses: Optional[List[GroupAddress]] = None,
+ match_for_outgoing: bool = False,
) -> Callback:
- """Register callback for a telegram beeing received from KNX bus."""
+ """Register callback for a telegram being received from KNX bus."""
callback = TelegramQueue.Callback(
telegram_received_cb,
address_filters=address_filters,
group_addresses=group_addresses,
+ match_for_outgoing_telegrams=match_for_outgoing,
)
self.telegram_received_cbs.append(callback)
return callback
@@ -165,6 +174,10 @@ class TelegramQueue:
await self.xknx.knxip_interface.send_telegram(telegram)
if isinstance(telegram.payload, GroupValueWrite):
await self.xknx.devices.process(telegram)
+
+ for telegram_received_cb in self.telegram_received_cbs:
+ if telegram_received_cb.is_within_filter(telegram):
+ await telegram_received_cb.callback(telegram)
else:
raise CommunicationError("No KNXIP interface defined")
| XKNX/xknx | afa30c7e0acfa91ad68390eb89120e9f50e35b92 | diff --git a/test/core_tests/telegram_queue_test.py b/test/core_tests/telegram_queue_test.py
index 9543f66b..6ef23d1b 100644
--- a/test/core_tests/telegram_queue_test.py
+++ b/test/core_tests/telegram_queue_test.py
@@ -117,10 +117,62 @@ class TestTelegramQueue(unittest.TestCase):
payload=GroupValueWrite(DPTBinary(1)),
)
self.loop.run_until_complete(
- xknx.telegram_queue.process_telegram_incoming(telegram)
+ xknx.telegram_queue.process_telegram_incoming(telegram),
)
async_telegram_received_cb.assert_called_once_with(telegram)
+ @patch("xknx.io.KNXIPInterface")
+ def test_register_with_outgoing_telegrams(self, if_mock):
+ """Test telegram_received_callback with outgoing telegrams."""
+ # pylint: disable=no-self-use
+ xknx = XKNX()
+ async_telegram_received_cb = AsyncMock()
+
+ async_if_send_telegram = asyncio.Future()
+ async_if_send_telegram.set_result(None)
+ if_mock.send_telegram.return_value = async_if_send_telegram
+
+ xknx.telegram_queue.register_telegram_received_cb(
+ async_telegram_received_cb, None, None, True
+ )
+
+ telegram = Telegram(
+ destination_address=GroupAddress("1/2/3"),
+ direction=TelegramDirection.OUTGOING,
+ payload=GroupValueWrite(DPTBinary(1)),
+ )
+
+ xknx.knxip_interface = if_mock
+ self.loop.run_until_complete(
+ xknx.telegram_queue.process_telegram_outgoing(telegram)
+ )
+ async_telegram_received_cb.assert_called_once_with(telegram)
+
+ @patch("xknx.io.KNXIPInterface")
+ def test_register_with_outgoing_telegrams_does_not_trigger(self, if_mock):
+ """Test telegram_received_callback with outgoing telegrams."""
+ # pylint: disable=no-self-use
+ xknx = XKNX()
+ async_telegram_received_cb = AsyncMock()
+
+ async_if_send_telegram = asyncio.Future()
+ async_if_send_telegram.set_result(None)
+ if_mock.send_telegram.return_value = async_if_send_telegram
+
+ xknx.telegram_queue.register_telegram_received_cb(async_telegram_received_cb)
+
+ telegram = Telegram(
+ destination_address=GroupAddress("1/2/3"),
+ direction=TelegramDirection.OUTGOING,
+ payload=GroupValueWrite(DPTBinary(1)),
+ )
+
+ xknx.knxip_interface = if_mock
+ self.loop.run_until_complete(
+ xknx.telegram_queue.process_telegram_outgoing(telegram)
+ )
+ async_telegram_received_cb.assert_not_called()
+
#
# TEST UNREGISTER
#
| Enhance HA knx_event
There are some requests about updating the HA knx_event.
Eg. https://github.com/home-assistant/core/issues/43256
Currently we just fire the `knx_event` for incoming telegrams, as stated in the documentation.
We could
- [ ] fire on outgoing telegrams too, not only on incoming
- [x] add a parameter "direction" for incoming/outgoing
- [x] add a parameter "source" for IA (eg. to identify which wall switch was used)
- [x] add a parameter "type" for write or response (maybe also read)
I'm still not very convinced about using the `knx_event` at all since we can't really decode the payload and dealing with raw payloads should not be a user-facing task in HA (besides maybe for DPT 1/2/3). | 0.0 | afa30c7e0acfa91ad68390eb89120e9f50e35b92 | [
"test/core_tests/telegram_queue_test.py::TestTelegramQueue::test_register_with_outgoing_telegrams"
] | [
"test/core_tests/telegram_queue_test.py::TestTelegramQueue::test_callback_group_addresses",
"test/core_tests/telegram_queue_test.py::TestTelegramQueue::test_callback_negative_address_filters",
"test/core_tests/telegram_queue_test.py::TestTelegramQueue::test_callback_no_filters",
"test/core_tests/telegram_queue_test.py::TestTelegramQueue::test_callback_positive_address_filters",
"test/core_tests/telegram_queue_test.py::TestTelegramQueue::test_outgoing",
"test/core_tests/telegram_queue_test.py::TestTelegramQueue::test_process_all_telegrams",
"test/core_tests/telegram_queue_test.py::TestTelegramQueue::test_process_exception",
"test/core_tests/telegram_queue_test.py::TestTelegramQueue::test_process_to_callback",
"test/core_tests/telegram_queue_test.py::TestTelegramQueue::test_process_to_device",
"test/core_tests/telegram_queue_test.py::TestTelegramQueue::test_rate_limit",
"test/core_tests/telegram_queue_test.py::TestTelegramQueue::test_register",
"test/core_tests/telegram_queue_test.py::TestTelegramQueue::test_register_with_outgoing_telegrams_does_not_trigger",
"test/core_tests/telegram_queue_test.py::TestTelegramQueue::test_start",
"test/core_tests/telegram_queue_test.py::TestTelegramQueue::test_unregister"
] | {
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-01-24 14:07:32+00:00 | mit | 850 |
|
XKNX__xknx-832 | diff --git a/changelog.md b/changelog.md
index e312e426..35e3c87c 100644
--- a/changelog.md
+++ b/changelog.md
@@ -4,7 +4,8 @@
### Connection
-- Fix rate limiter wait time. Don't add time waiting for ACK or L_DATA.con frames to the rate_limit.
+- Handle separate Tunneling control and data endpoints
+- Fix rate limiter wait time: don't add time waiting for ACK or L_DATA.con frames to the rate_limit
## Internals
diff --git a/xknx/io/request_response/connect.py b/xknx/io/request_response/connect.py
index ec97d58a..b5ae8696 100644
--- a/xknx/io/request_response/connect.py
+++ b/xknx/io/request_response/connect.py
@@ -27,6 +27,7 @@ class Connect(RequestResponse):
self.route_back = route_back
super().__init__(xknx, self.udp_client, ConnectResponse)
self.communication_channel = 0
+ self.data_endpoint = HPAI()
self.identifier = 0
def create_knxipframe(self) -> KNXIPFrame:
@@ -48,6 +49,6 @@ class Connect(RequestResponse):
def on_success_hook(self, knxipframe: KNXIPFrame) -> None:
"""Set communication channel and identifier after having received a valid answer."""
assert isinstance(knxipframe.body, ConnectResponse)
- assert isinstance(knxipframe.body.identifier, int)
self.communication_channel = knxipframe.body.communication_channel
+ self.data_endpoint = knxipframe.body.data_endpoint
self.identifier = knxipframe.body.identifier
diff --git a/xknx/io/request_response/tunnelling.py b/xknx/io/request_response/tunnelling.py
index 6a295bea..f7fc0e60 100644
--- a/xknx/io/request_response/tunnelling.py
+++ b/xknx/io/request_response/tunnelling.py
@@ -26,6 +26,7 @@ class Tunnelling(RequestResponse):
self,
xknx: XKNX,
udp_client: UDPClient,
+ data_endpoint: tuple[str, int] | None,
telegram: Telegram,
src_address: IndividualAddress,
sequence_counter: int,
@@ -34,6 +35,7 @@ class Tunnelling(RequestResponse):
"""Initialize Tunnelling class."""
self.xknx = xknx
self.udp_client = udp_client
+ self.data_endpoint_addr = data_endpoint
self.src_address = src_address
super().__init__(xknx, self.udp_client, TunnellingAck)
@@ -42,6 +44,10 @@ class Tunnelling(RequestResponse):
self.sequence_counter = sequence_counter
self.communication_channel_id = communication_channel_id
+ async def send_request(self) -> None:
+ """Build knxipframe (within derived class) and send via UDP."""
+ self.udpclient.send(self.create_knxipframe(), addr=self.data_endpoint_addr)
+
def create_knxipframe(self) -> KNXIPFrame:
"""Create KNX/IP Frame object to be sent to device."""
cemi = CEMIFrame.init_from_telegram(
diff --git a/xknx/io/tunnel.py b/xknx/io/tunnel.py
index 65752e0a..cd555a13 100644
--- a/xknx/io/tunnel.py
+++ b/xknx/io/tunnel.py
@@ -69,6 +69,7 @@ class Tunnel(Interface):
self.init_udp_client()
self._src_address = xknx.own_address
+ self._data_endpoint_addr: tuple[str, int] | None = None
self.sequence_number = 0
self.communication_channel: int | None = None
self.number_heartbeat_failed = 0
@@ -144,6 +145,7 @@ class Tunnel(Interface):
)
)
self.stop_heartbeat()
+ self._data_endpoint_addr = None
if self.auto_reconnect:
self._reconnect_task = asyncio.create_task(self._reconnect())
else:
@@ -172,6 +174,7 @@ class Tunnel(Interface):
XknxConnectionState.DISCONNECTED
)
self.stop_heartbeat()
+ self._data_endpoint_addr = None
self._stop_reconnect()
await self._disconnect_request(False)
await self.udp_client.stop()
@@ -188,6 +191,11 @@ class Tunnel(Interface):
await connect.start()
if connect.success:
self.communication_channel = connect.communication_channel
+ # assign data_endpoint received from server
+ self._data_endpoint_addr = (
+ connect.data_endpoint.ip_addr,
+ connect.data_endpoint.port,
+ )
# Use the individual address provided by the tunnelling server
self._src_address = IndividualAddress(connect.identifier)
self.xknx.current_address = self._src_address
@@ -274,6 +282,7 @@ class Tunnel(Interface):
tunnelling = Tunnelling(
self.xknx,
self.udp_client,
+ self._data_endpoint_addr,
telegram,
self._src_address,
self.sequence_number,
@@ -355,7 +364,9 @@ class Tunnel(Interface):
communication_channel_id=communication_channel_id,
sequence_counter=sequence_counter,
)
- self.udp_client.send(KNXIPFrame.init_from_body(ack))
+ self.udp_client.send(
+ KNXIPFrame.init_from_body(ack), addr=self._data_endpoint_addr
+ )
def _disconnect_request_received(
self, disconnect_request: DisconnectRequest
diff --git a/xknx/io/udp_client.py b/xknx/io/udp_client.py
index 2637a73f..1aeee391 100644
--- a/xknx/io/udp_client.py
+++ b/xknx/io/udp_client.py
@@ -116,7 +116,7 @@ class UDPClient:
)
else:
knx_logger.debug(
- "Received from %s:%s at %s:\n%s",
+ "Received from %s:%s at %s:\n %s",
source[0],
source[1],
time.time(),
@@ -213,27 +213,28 @@ class UDPClient:
(transport, _) = await loop.create_datagram_endpoint(
lambda: udp_client_factory,
local_addr=self.local_addr,
- remote_addr=self.remote_addr,
)
# TODO: typing - remove cast - loop.create_datagram_endpoint should return a DatagramTransport
self.transport = cast(asyncio.DatagramTransport, transport)
- def send(self, knxipframe: KNXIPFrame) -> None:
+ def send(self, knxipframe: KNXIPFrame, addr: tuple[str, int] | None = None) -> None:
"""Send KNXIPFrame to socket."""
+ _addr = addr or self.remote_addr
knx_logger.debug(
- "Sending to %s:%s at %s:\n%s",
- self.remote_addr[0],
- self.remote_addr[1],
- time.time(),
- knxipframe,
+ "Sending to %s:%s at %s:\n %s", _addr[0], _addr[1], time.time(), knxipframe
)
if self.transport is None:
raise XKNXException("Transport not connected")
if self.multicast:
+ if addr is not None:
+ logger.warning(
+ "Multicast send to specific address is invalid. %s",
+ knxipframe,
+ )
self.transport.sendto(bytes(knxipframe.to_knx()), self.remote_addr)
else:
- self.transport.sendto(bytes(knxipframe.to_knx()))
+ self.transport.sendto(bytes(knxipframe.to_knx()), addr=_addr)
def getsockname(self) -> tuple[str, int]:
"""Return socket IP and port."""
diff --git a/xknx/knxip/connect_response.py b/xknx/knxip/connect_response.py
index 3f9c3835..865923ef 100644
--- a/xknx/knxip/connect_response.py
+++ b/xknx/knxip/connect_response.py
@@ -32,8 +32,8 @@ class ConnectResponse(KNXIPBodyResponse):
communication_channel: int = 0,
status_code: ErrorCode = ErrorCode.E_NO_ERROR,
request_type: ConnectRequestType = ConnectRequestType.TUNNEL_CONNECTION,
- control_endpoint: HPAI = HPAI(),
- identifier: int | None = None,
+ data_endpoint: HPAI = HPAI(),
+ identifier: int = 0,
):
"""Initialize ConnectResponse class."""
super().__init__(xknx)
@@ -41,7 +41,7 @@ class ConnectResponse(KNXIPBodyResponse):
self.communication_channel = communication_channel
self.status_code = status_code
self.request_type = request_type
- self.control_endpoint = control_endpoint
+ self.data_endpoint = data_endpoint
# identifier shall contain KNX Individual Address assigned to this KNXnet/IP Tunnelling connection
self.identifier = identifier
@@ -67,11 +67,11 @@ class ConnectResponse(KNXIPBodyResponse):
pos = 2
if self.status_code == ErrorCode.E_NO_ERROR:
- pos += self.control_endpoint.from_knx(raw[pos:])
+ pos += self.data_endpoint.from_knx(raw[pos:])
pos += crd_from_knx(raw[pos:])
else:
# do not parse HPAI and CRD in case of errors - just check length
- pos += len(raw[pos:])
+ pos = len(raw)
return pos
def to_knx(self) -> list[int]:
@@ -91,7 +91,7 @@ class ConnectResponse(KNXIPBodyResponse):
data = []
data.append(self.communication_channel)
data.append(self.status_code.value)
- data.extend(self.control_endpoint.to_knx())
+ data.extend(self.data_endpoint.to_knx())
data.extend(crd_to_knx())
return data
@@ -102,7 +102,7 @@ class ConnectResponse(KNXIPBodyResponse):
"<ConnectResponse "
f'communication_channel="{self.communication_channel}" '
f'status_code="{self.status_code}" '
- f'control_endpoint="{self.control_endpoint}" '
+ f'data_endpoint="{self.data_endpoint}" '
f'request_type="{self.request_type}" '
f'identifier="{self.identifier}" />'
)
diff --git a/xknx/knxip/knxip.py b/xknx/knxip/knxip.py
index b5425a86..ed333e0a 100644
--- a/xknx/knxip/knxip.py
+++ b/xknx/knxip/knxip.py
@@ -101,7 +101,7 @@ class KNXIPFrame:
data.extend(self.body.to_knx())
return data
- def __str__(self) -> str:
+ def __repr__(self) -> str:
"""Return object as readable string."""
return f'<KNXIPFrame {self.header}\n body="{self.body}" />'
| XKNX/xknx | e2f84405a5a62492c4cbf86de9e32abef043cecb | diff --git a/test/io_tests/tunnel_test.py b/test/io_tests/tunnel_test.py
index 3615b9cd..d9d8aa48 100644
--- a/test/io_tests/tunnel_test.py
+++ b/test/io_tests/tunnel_test.py
@@ -1,14 +1,24 @@
"""Unit test for KNX/IP Tunnelling Request/Response."""
import asyncio
-from unittest.mock import Mock, patch
+from unittest.mock import AsyncMock, Mock, patch
import pytest
from xknx import XKNX
from xknx.dpt import DPTArray
from xknx.io import Tunnel
-from xknx.knxip import HPAI, CEMIFrame, KNXIPFrame, TunnellingAck, TunnellingRequest
+from xknx.knxip import (
+ HPAI,
+ CEMIFrame,
+ ConnectRequest,
+ ConnectResponse,
+ DisconnectRequest,
+ DisconnectResponse,
+ KNXIPFrame,
+ TunnellingAck,
+ TunnellingRequest,
+)
from xknx.knxip.knxip_enum import CEMIMessageCode
-from xknx.telegram import Telegram, TelegramDirection
+from xknx.telegram import IndividualAddress, Telegram, TelegramDirection
from xknx.telegram.apci import GroupValueWrite
@@ -103,3 +113,88 @@ class TestTunnel:
# one call for the outgoing request and one for the ACK for the confirmation
assert self.tunnel.udp_client.send.call_count == 2
await task
+
+ async def test_tunnel_connect_send_disconnect(self, time_travel):
+ """Test initiating a tunnelling connection."""
+ local_addr = ("192.168.1.1", 12345)
+ gateway_control_addr = ("192.168.1.2", 3671)
+ gateway_data_addr = ("192.168.1.2", 56789)
+ self.tunnel.udp_client.connect = AsyncMock()
+ self.tunnel.udp_client.getsockname = Mock(return_value=local_addr)
+ self.tunnel.udp_client.send = Mock()
+ self.tunnel.udp_client.stop = AsyncMock()
+
+ # Connect
+ connect_request = ConnectRequest(
+ self.xknx,
+ control_endpoint=HPAI(*local_addr),
+ data_endpoint=HPAI(*local_addr),
+ )
+ connect_frame = KNXIPFrame.init_from_body(connect_request)
+
+ connection_task = asyncio.create_task(self.tunnel.connect())
+ await time_travel(0)
+ self.tunnel.udp_client.connect.assert_called_once()
+ self.tunnel.udp_client.send.assert_called_once_with(connect_frame)
+
+ connect_response_frame = KNXIPFrame.init_from_body(
+ ConnectResponse(
+ self.xknx,
+ communication_channel=23,
+ data_endpoint=HPAI(*gateway_data_addr),
+ identifier=7,
+ )
+ )
+ self.tunnel.udp_client.handle_knxipframe(
+ connect_response_frame, gateway_control_addr
+ )
+ await connection_task
+ assert self.tunnel._data_endpoint_addr == gateway_data_addr
+ assert self.tunnel._src_address == IndividualAddress(7)
+
+ # Send - use data endpoint
+ self.tunnel.udp_client.send.reset_mock()
+ test_telegram = Telegram(payload=GroupValueWrite(DPTArray((1,))))
+ test_telegram_frame = KNXIPFrame.init_from_body(
+ TunnellingRequest(
+ self.xknx,
+ communication_channel_id=23,
+ sequence_counter=0,
+ cemi=CEMIFrame.init_from_telegram(
+ self.xknx,
+ test_telegram,
+ code=CEMIMessageCode.L_DATA_REQ,
+ src_addr=IndividualAddress(7),
+ ),
+ )
+ )
+ asyncio.create_task(self.tunnel.send_telegram(test_telegram))
+ await time_travel(0)
+ self.tunnel.udp_client.send.assert_called_once_with(
+ test_telegram_frame, addr=gateway_data_addr
+ )
+ # skip ack and confirmation
+
+ # Disconnect
+ self.tunnel.udp_client.send.reset_mock()
+ disconnect_request = DisconnectRequest(
+ self.xknx, communication_channel_id=23, control_endpoint=HPAI(*local_addr)
+ )
+ disconnect_frame = KNXIPFrame.init_from_body(disconnect_request)
+
+ disconnection_task = asyncio.create_task(self.tunnel.disconnect())
+ await time_travel(0)
+ self.tunnel.udp_client.send.assert_called_once_with(disconnect_frame)
+
+ disconnect_response_frame = KNXIPFrame.init_from_body(
+ DisconnectResponse(
+ self.xknx,
+ communication_channel_id=23,
+ )
+ )
+ self.tunnel.udp_client.handle_knxipframe(
+ disconnect_response_frame, gateway_control_addr
+ )
+ await disconnection_task
+ assert self.tunnel._data_endpoint_addr is None
+ self.tunnel.udp_client.stop.assert_called_once()
diff --git a/test/io_tests/tunnelling_test.py b/test/io_tests/tunnelling_test.py
index bfb4142c..b893e3cc 100644
--- a/test/io_tests/tunnelling_test.py
+++ b/test/io_tests/tunnelling_test.py
@@ -26,6 +26,7 @@ class TestTunnelling:
"""Test tunnelling from KNX bus."""
xknx = XKNX()
communication_channel_id = 23
+ data_endpoint = ("192.168.1.2", 4567)
udp_client = UDPClient(xknx, ("192.168.1.1", 0), ("192.168.1.2", 1234))
telegram = Telegram(
destination_address=GroupAddress("1/2/3"),
@@ -36,6 +37,7 @@ class TestTunnelling:
tunnelling = Tunnelling(
xknx,
udp_client,
+ data_endpoint,
telegram,
src_address,
sequence_counter,
@@ -60,7 +62,7 @@ class TestTunnelling:
) as mock_udp_getsockname:
mock_udp_getsockname.return_value = ("192.168.1.3", 4321)
await tunnelling.start()
- mock_udp_send.assert_called_with(exp_knxipframe)
+ mock_udp_send.assert_called_with(exp_knxipframe, addr=data_endpoint)
# Response KNX/IP-Frame with wrong type
wrong_knxipframe = KNXIPFrame(xknx)
diff --git a/test/knxip_tests/connect_response_test.py b/test/knxip_tests/connect_response_test.py
index 71f61a4c..3bd85054 100644
--- a/test/knxip_tests/connect_response_test.py
+++ b/test/knxip_tests/connect_response_test.py
@@ -38,9 +38,7 @@ class TestKNXIPConnectResponse:
assert isinstance(knxipframe.body, ConnectResponse)
assert knxipframe.body.communication_channel == 1
assert knxipframe.body.status_code == ErrorCode.E_NO_ERROR
- assert knxipframe.body.control_endpoint == HPAI(
- ip_addr="192.168.42.10", port=3671
- )
+ assert knxipframe.body.data_endpoint == HPAI(ip_addr="192.168.42.10", port=3671)
assert knxipframe.body.request_type == ConnectRequestType.TUNNEL_CONNECTION
assert knxipframe.body.identifier == 4607
@@ -49,7 +47,7 @@ class TestKNXIPConnectResponse:
communication_channel=1,
status_code=ErrorCode.E_NO_ERROR,
request_type=ConnectRequestType.TUNNEL_CONNECTION,
- control_endpoint=HPAI(ip_addr="192.168.42.10", port=3671),
+ data_endpoint=HPAI(ip_addr="192.168.42.10", port=3671),
identifier=4607,
)
knxipframe2 = KNXIPFrame.init_from_body(connect_response)
@@ -151,7 +149,7 @@ class TestKNXIPConnectResponse:
communication_channel=192,
status_code=ErrorCode.E_NO_MORE_CONNECTIONS,
request_type=ConnectRequestType.TUNNEL_CONNECTION,
- control_endpoint=HPAI(ip_addr="10.1.0.41", port=3671),
+ data_endpoint=HPAI(ip_addr="10.1.0.41", port=3671),
identifier=0,
)
knxipframe2 = KNXIPFrame.init_from_body(connect_response)
diff --git a/test/str_test.py b/test/str_test.py
index 72c9ef56..3d34719f 100644
--- a/test/str_test.py
+++ b/test/str_test.py
@@ -526,12 +526,12 @@ class TestStringRepresentations:
connect_response = ConnectResponse(xknx)
connect_response.communication_channel = 13
connect_response.request_type = ConnectRequestType.TUNNEL_CONNECTION
- connect_response.control_endpoint = HPAI(ip_addr="192.168.42.1", port=33941)
+ connect_response.data_endpoint = HPAI(ip_addr="192.168.42.1", port=33941)
connect_response.identifier = 42
assert (
str(connect_response)
== '<ConnectResponse communication_channel="13" status_code="ErrorCode.E_NO_ERROR" '
- 'control_endpoint="192.168.42.1:33941" '
+ 'data_endpoint="192.168.42.1:33941" '
'request_type="ConnectRequestType.TUNNEL_CONNECTION" identifier="42" />'
)
| Handle different Tunneling data and control endpoints correctly
KNX tunneling servers can have different data and control endpoints (eg. Calimero does).
`DisconnectRequest`s should be sent to the other device’s control endpoint instead of to the data endpoint. (see KNX specification 03_08_04 Tunneling §2.6 Frame confirmation)
I have not found an other usecase for the control endpoint for us yet. | 0.0 | e2f84405a5a62492c4cbf86de9e32abef043cecb | [
"test/io_tests/tunnel_test.py::TestTunnel::test_tunnel_connect_send_disconnect",
"test/io_tests/tunnelling_test.py::TestTunnelling::test_tunnelling",
"test/knxip_tests/connect_response_test.py::TestKNXIPConnectResponse::test_connect_response",
"test/knxip_tests/connect_response_test.py::TestKNXIPConnectResponse::test_connect_response_connection_error_gira",
"test/str_test.py::TestStringRepresentations::test_connect_response"
] | [
"test/io_tests/tunnel_test.py::TestTunnel::test_tunnel_request_received",
"test/io_tests/tunnel_test.py::TestTunnel::test_tunnel_request_received_cemi_too_small",
"test/io_tests/tunnel_test.py::TestTunnel::test_tunnel_request_received_apci_unsupported",
"test/knxip_tests/connect_response_test.py::TestKNXIPConnectResponse::test_from_knx_wrong_crd",
"test/knxip_tests/connect_response_test.py::TestKNXIPConnectResponse::test_from_knx_wrong_crd2",
"test/knxip_tests/connect_response_test.py::TestKNXIPConnectResponse::test_connect_response_connection_error_lox",
"test/knxip_tests/connect_response_test.py::TestKNXIPConnectResponse::test_connect_response_connection_error_mdt",
"test/str_test.py::TestStringRepresentations::test_remote_value",
"test/str_test.py::TestStringRepresentations::test_binary_sensor",
"test/str_test.py::TestStringRepresentations::test_climate",
"test/str_test.py::TestStringRepresentations::test_climate_mode",
"test/str_test.py::TestStringRepresentations::test_cover",
"test/str_test.py::TestStringRepresentations::test_fan",
"test/str_test.py::TestStringRepresentations::test_light",
"test/str_test.py::TestStringRepresentations::test_light_dimmable",
"test/str_test.py::TestStringRepresentations::test_light_color",
"test/str_test.py::TestStringRepresentations::test_notification",
"test/str_test.py::TestStringRepresentations::test_scene",
"test/str_test.py::TestStringRepresentations::test_sensor",
"test/str_test.py::TestStringRepresentations::test_expose_sensor",
"test/str_test.py::TestStringRepresentations::test_switch",
"test/str_test.py::TestStringRepresentations::test_weather",
"test/str_test.py::TestStringRepresentations::test_datetime",
"test/str_test.py::TestStringRepresentations::test_could_not_parse_telegramn_exception",
"test/str_test.py::TestStringRepresentations::test_could_not_parse_telegramn_exception_parameter",
"test/str_test.py::TestStringRepresentations::test_could_not_parse_knxip_exception",
"test/str_test.py::TestStringRepresentations::test_conversion_error_exception",
"test/str_test.py::TestStringRepresentations::test_conversion_error_exception_parameter",
"test/str_test.py::TestStringRepresentations::test_could_not_parse_address_exception",
"test/str_test.py::TestStringRepresentations::test_device_illegal_value_exception",
"test/str_test.py::TestStringRepresentations::test_address",
"test/str_test.py::TestStringRepresentations::test_dpt_array",
"test/str_test.py::TestStringRepresentations::test_dpt_binary",
"test/str_test.py::TestStringRepresentations::test_telegram",
"test/str_test.py::TestStringRepresentations::test_dib_generic",
"test/str_test.py::TestStringRepresentations::test_dib_supp_svc_families",
"test/str_test.py::TestStringRepresentations::test_dib_device_informatio",
"test/str_test.py::TestStringRepresentations::test_hpai",
"test/str_test.py::TestStringRepresentations::test_header",
"test/str_test.py::TestStringRepresentations::test_connect_request",
"test/str_test.py::TestStringRepresentations::test_disconnect_request",
"test/str_test.py::TestStringRepresentations::test_disconnect_response",
"test/str_test.py::TestStringRepresentations::test_connectionstate_request",
"test/str_test.py::TestStringRepresentations::test_connectionstate_response",
"test/str_test.py::TestStringRepresentations::test_search_reqeust",
"test/str_test.py::TestStringRepresentations::test_search_response",
"test/str_test.py::TestStringRepresentations::test_tunnelling_request",
"test/str_test.py::TestStringRepresentations::test_tunnelling_ack",
"test/str_test.py::TestStringRepresentations::test_cemi_frame",
"test/str_test.py::TestStringRepresentations::test_knxip_frame",
"test/str_test.py::TestStringRepresentations::test_gateway_descriptor",
"test/str_test.py::TestStringRepresentations::test_routing_indication_str"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-12-27 10:18:03+00:00 | mit | 851 |
|
XKNX__xknx-910 | diff --git a/changelog.md b/changelog.md
index aef5dcc2..b7f15948 100644
--- a/changelog.md
+++ b/changelog.md
@@ -6,10 +6,12 @@
- Add support for SearchRequestExtended to find interfaces that allow IP Secure
- Use XKNX `state_updater` argument to set default method for StateUpdater. StateUpdater is always started - Device / RemoteValue can always opt in to use it, even if default is `False`.
+- Add support for DPT 16.001 (DPT_String_8859_1) as `DPTLatin1` with value_type "latin_1".
### Bug fixes
- Stop SecureSession keepalive_task when session is stopped (and don't restart it from sending STATUS_CLOSE)
+- Fix encoding invalid characters for DPTString (value_type "string")
## 0.20.0 IP Secure 2022-03-29
diff --git a/xknx/dpt/__init__.py b/xknx/dpt/__init__.py
index 669c6993..cea33e90 100644
--- a/xknx/dpt/__init__.py
+++ b/xknx/dpt/__init__.py
@@ -172,7 +172,7 @@ from .dpt_date import DPTDate
from .dpt_datetime import DPTDateTime
from .dpt_hvac_mode import DPTControllerStatus, DPTHVACContrMode, DPTHVACMode
from .dpt_scaling import DPTAngle, DPTScaling
-from .dpt_string import DPTString
+from .dpt_string import DPTLatin1, DPTString
from .dpt_time import DPTTime
__all__ = [
@@ -249,6 +249,7 @@ __all__ = [
"DPTHVACMode",
"DPTImpedance",
"DPTKelvinPerPercent",
+ "DPTLatin1",
"DPTLength",
"DPTLengthMm",
"DPTLightQuantity",
diff --git a/xknx/dpt/dpt_string.py b/xknx/dpt/dpt_string.py
index 0788bc33..faf33207 100644
--- a/xknx/dpt/dpt_string.py
+++ b/xknx/dpt/dpt_string.py
@@ -8,7 +8,7 @@ from .dpt import DPTBase
class DPTString(DPTBase):
"""
- Abstraction for KNX 14 Octet ASCII String.
+ Abstraction for KNX 14 Octet ASCII string.
DPT 16.000
"""
@@ -19,15 +19,15 @@ class DPTString(DPTBase):
value_type = "string"
unit = ""
+ _encoding = "ascii"
+
@classmethod
def from_knx(cls, raw: tuple[int, ...]) -> str:
"""Parse/deserialize from KNX/IP raw data."""
cls.test_bytesarray(raw)
- value = ""
- for byte in raw:
- if byte != 0x00:
- value += chr(byte)
- return value
+ return bytes(byte for byte in raw if byte != 0x00).decode(
+ cls._encoding, errors="replace"
+ )
@classmethod
def to_knx(cls, value: str) -> tuple[int, ...]:
@@ -36,15 +36,26 @@ class DPTString(DPTBase):
knx_value = str(value)
if not cls._test_boundaries(knx_value):
raise ValueError
- raw = [ord(character) for character in knx_value]
- raw.extend([0] * (cls.payload_length - len(raw)))
- # replace invalid characters with question marks
- # bytes(knx_value, 'ascii') would raise UnicodeEncodeError
- return tuple(map(lambda char: char if char <= 0xFF else ord("?"), raw))
except ValueError:
raise ConversionError(f"Could not serialize {cls.__name__}", value=value)
+ # replace invalid characters with question marks
+ raw_bytes = knx_value.encode(cls._encoding, errors="replace")
+ padding = bytes(cls.payload_length - len(raw_bytes))
+ return tuple(raw_bytes + padding)
@classmethod
def _test_boundaries(cls, value: str) -> bool:
"""Test if value is within defined range for this object."""
return len(value) <= cls.payload_length
+
+
+class DPTLatin1(DPTString):
+ """
+ Abstraction for KNX 14 Octet Latin-1 (ISO 8859-1) string.
+
+ DPT 16.001
+ """
+
+ dpt_sub_number = 1
+ value_type = "latin_1"
+ _encoding = "latin_1"
| XKNX/xknx | cf242d29979d21f5433b3e9e36b35ea8b1d14356 | diff --git a/test/dpt_tests/dpt_string_test.py b/test/dpt_tests/dpt_string_test.py
index b73065ae..0cf820e5 100644
--- a/test/dpt_tests/dpt_string_test.py
+++ b/test/dpt_tests/dpt_string_test.py
@@ -1,167 +1,91 @@
"""Unit test for KNX string object."""
import pytest
-from xknx.dpt import DPTString
+from xknx.dpt import DPTLatin1, DPTString
from xknx.exceptions import ConversionError
class TestDPTString:
- """Test class for KNX float object."""
+ """Test class for KNX ASCII string object."""
- def test_value_from_documentation(self):
- """Test parsing and streaming Example from documentation."""
- raw = (
- 0x4B,
- 0x4E,
- 0x58,
- 0x20,
- 0x69,
- 0x73,
- 0x20,
- 0x4F,
- 0x4B,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- )
- string = "KNX is OK"
- assert DPTString.to_knx(string) == raw
- assert DPTString.from_knx(raw) == string
-
- def test_value_empty_string(self):
- """Test parsing and streaming empty string."""
- raw = (
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- )
- string = ""
- assert DPTString.to_knx(string) == raw
- assert DPTString.from_knx(raw) == string
+ @pytest.mark.parametrize(
+ "string,raw",
+ [
+ (
+ "KNX is OK",
+ (75, 78, 88, 32, 105, 115, 32, 79, 75, 0, 0, 0, 0, 0),
+ ),
+ (
+ "",
+ (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
+ ),
+ (
+ "AbCdEfGhIjKlMn",
+ (65, 98, 67, 100, 69, 102, 71, 104, 73, 106, 75, 108, 77, 110),
+ ),
+ (
+ ".,:;-_!?$@&#%/",
+ (46, 44, 58, 59, 45, 95, 33, 63, 36, 64, 38, 35, 37, 47),
+ ),
+ ],
+ )
+ @pytest.mark.parametrize("test_dpt", [DPTString, DPTLatin1])
+ def test_values(self, string, raw, test_dpt):
+ """Test parsing and streaming strings."""
+ assert test_dpt.to_knx(string) == raw
+ assert test_dpt.from_knx(raw) == string
- def test_value_max_string(self):
- """Test parsing and streaming large string."""
- raw = (
- 0x41,
- 0x41,
- 0x41,
- 0x41,
- 0x41,
- 0x42,
- 0x42,
- 0x42,
- 0x42,
- 0x42,
- 0x43,
- 0x43,
- 0x43,
- 0x43,
- )
- string = "AAAAABBBBBCCCC"
- assert DPTString.to_knx(string) == raw
- assert DPTString.from_knx(raw) == string
-
- def test_value_special_chars(self):
- """Test parsing and streaming string with special chars."""
- raw = (
- 0x48,
- 0x65,
- 0x79,
- 0x21,
- 0x3F,
- 0x24,
- 0x20,
- 0xC4,
- 0xD6,
- 0xDC,
- 0xE4,
- 0xF6,
- 0xFC,
- 0xDF,
- )
- string = "Hey!?$ ÄÖÜäöüß"
- assert DPTString.to_knx(string) == raw
- assert DPTString.from_knx(raw) == string
-
- def test_to_knx_invalid_chars(self):
- """Test streaming string with invalid chars."""
- raw = (
- 0x4D,
- 0x61,
- 0x74,
- 0x6F,
- 0x75,
- 0x3F,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- )
- string = "Matouš"
- knx_string = "Matou?"
+ @pytest.mark.parametrize(
+ "string,knx_string,raw",
+ [
+ (
+ "Matouš",
+ "Matou?",
+ (77, 97, 116, 111, 117, 63, 0, 0, 0, 0, 0, 0, 0, 0),
+ ),
+ (
+ "Gänsefüßchen",
+ "G?nsef??chen",
+ (71, 63, 110, 115, 101, 102, 63, 63, 99, 104, 101, 110, 0, 0),
+ ),
+ ],
+ )
+ def test_to_knx_ascii_invalid_chars(self, string, knx_string, raw):
+ """Test streaming ASCII string with invalid chars."""
assert DPTString.to_knx(string) == raw
assert DPTString.from_knx(raw) == knx_string
+ @pytest.mark.parametrize(
+ "string,raw",
+ [
+ (
+ "Gänsefüßchen",
+ (71, 228, 110, 115, 101, 102, 252, 223, 99, 104, 101, 110, 0, 0),
+ ),
+ (
+ "àáâãåæçèéêëìíî",
+ (224, 225, 226, 227, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238),
+ ),
+ ],
+ )
+ def test_to_knx_latin_1(self, string, raw):
+ """Test streaming Latin-1 strings."""
+ assert DPTLatin1.to_knx(string) == raw
+ assert DPTLatin1.from_knx(raw) == string
+
def test_to_knx_too_long(self):
"""Test serializing DPTString to KNX with wrong value (to long)."""
with pytest.raises(ConversionError):
DPTString.to_knx("AAAAABBBBBCCCCx")
- def test_from_knx_wrong_parameter_too_large(self):
- """Test parsing of KNX string with too many elements."""
- raw = (
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- )
- with pytest.raises(ConversionError):
- DPTString.from_knx(raw)
-
- def test_from_knx_wrong_parameter_too_small(self):
- """Test parsing of KNX string with too less elements."""
- raw = (
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- )
+ @pytest.mark.parametrize(
+ "raw",
+ [
+ ((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),),
+ ((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),),
+ ],
+ )
+ def test_from_knx_wrong_parameter_length(self, raw):
+ """Test parsing of KNX string with wrong elements length."""
with pytest.raises(ConversionError):
DPTString.from_knx(raw)
| ISO 8859-1 String support (Latin-1)
- support DPT 16.001 ISO 8859-1 strings
currently we only support DPT 16.000 but I think we encode/decode it as Unicode (`char()`) instead of just ASCII. | 0.0 | cf242d29979d21f5433b3e9e36b35ea8b1d14356 | [
"test/dpt_tests/dpt_string_test.py::TestDPTString::test_values[DPTString-KNX",
"test/dpt_tests/dpt_string_test.py::TestDPTString::test_values[DPTString--raw1]",
"test/dpt_tests/dpt_string_test.py::TestDPTString::test_values[DPTString-AbCdEfGhIjKlMn-raw2]",
"test/dpt_tests/dpt_string_test.py::TestDPTString::test_values[DPTString-.,:;-_!?$@&#%/-raw3]",
"test/dpt_tests/dpt_string_test.py::TestDPTString::test_values[DPTLatin1-KNX",
"test/dpt_tests/dpt_string_test.py::TestDPTString::test_values[DPTLatin1--raw1]",
"test/dpt_tests/dpt_string_test.py::TestDPTString::test_values[DPTLatin1-AbCdEfGhIjKlMn-raw2]",
"test/dpt_tests/dpt_string_test.py::TestDPTString::test_values[DPTLatin1-.,:;-_!?$@&#%/-raw3]",
"test/dpt_tests/dpt_string_test.py::TestDPTString::test_to_knx_ascii_invalid_chars[Matou\\u0161-Matou?-raw0]",
"test/dpt_tests/dpt_string_test.py::TestDPTString::test_to_knx_ascii_invalid_chars[G\\xe4nsef\\xfc\\xdfchen-G?nsef??chen-raw1]",
"test/dpt_tests/dpt_string_test.py::TestDPTString::test_to_knx_latin_1[G\\xe4nsef\\xfc\\xdfchen-raw0]",
"test/dpt_tests/dpt_string_test.py::TestDPTString::test_to_knx_latin_1[\\xe0\\xe1\\xe2\\xe3\\xe5\\xe6\\xe7\\xe8\\xe9\\xea\\xeb\\xec\\xed\\xee-raw1]",
"test/dpt_tests/dpt_string_test.py::TestDPTString::test_to_knx_too_long",
"test/dpt_tests/dpt_string_test.py::TestDPTString::test_from_knx_wrong_parameter_length[raw0]",
"test/dpt_tests/dpt_string_test.py::TestDPTString::test_from_knx_wrong_parameter_length[raw1]"
] | [] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2022-04-05 14:17:08+00:00 | mit | 852 |
|
Yelp__pyramid-hypernova-15 | diff --git a/pyramid_hypernova/batch.py b/pyramid_hypernova/batch.py
index e4592f1..869b5ea 100644
--- a/pyramid_hypernova/batch.py
+++ b/pyramid_hypernova/batch.py
@@ -131,9 +131,9 @@ class BatchRequest(object):
__, __, exc_traceback = sys.exc_info()
error = HypernovaError(
- repr(type(e)),
+ type(e).__name__,
str(e),
- traceback.format_tb(exc_traceback),
+ [line.rstrip('\n') for line in traceback.format_tb(exc_traceback)],
)
self.plugin_controller.on_error(error, jobs)
response = create_fallback_response(jobs, True, self.json_encoder, error)
| Yelp/pyramid-hypernova | 573e13042412461fa31bf046473c0033f9dc3fec | diff --git a/tests/batch_test.py b/tests/batch_test.py
index 6a9dcf3..1b9f790 100644
--- a/tests/batch_test.py
+++ b/tests/batch_test.py
@@ -331,7 +331,7 @@ class TestBatchRequest(object):
assert response == {
token.identifier: JobResult(
error=HypernovaError(
- name="<class 'fido.exceptions.NetworkError'>",
+ name='NetworkError',
message='oh no',
stack=mock.ANY,
),
@@ -475,16 +475,20 @@ class TestBatchRequestLifecycleMethods(object):
with mock.patch(
'fido.fetch'
) as mock_fetch, mock.patch(
- 'traceback.format_tb'
- ) as mock_format_tb:
+ 'traceback.format_tb',
+ return_value=[
+ 'Traceback:\n',
+ ' foo:\n',
+ ],
+ ):
mock_fetch.return_value.wait.return_value.json.side_effect = NetworkError('oh no')
batch_request.submit()
spy_plugin_controller.on_error.assert_called_once_with(
HypernovaError(
- name="<class 'fido.exceptions.NetworkError'>",
+ name='NetworkError',
message='oh no',
- stack=mock_format_tb.return_value,
+ stack=['Traceback:', ' foo:'],
),
batch_request.jobs,
)
| "stack" property on HypernovaError is formatted inconsistently
When the error is a Python traceback, each element in the list has a trailing newline:
```python
[' File "/nail/home/ckuehl/pg/yelp-main/virtualenv_run/lib/python2.7/site-packages/pyramid_hypernova/batch.py", line 116, in process_responses\n response_json = r.json()\n',
' File "/nail/home/ckuehl/pg/yelp-main/virtualenv_run/lib/python2.7/site-packages/fido/fido.py", line 98, in json\n return json.loads(self.body.decode(\'utf-8\'))\n',
' File "/usr/lib/python2.7/json/__init__.py", line 339, in loads\n return _default_decoder.decode(s)\n',
[...]
```
When the error comes from the Hypernova server (a JS traceback), each element has no trailing newline:
```python
[u'ReferenceError: Component "yelp_main__DEV__yelp_main__SearchApp__dynamic" not registered',
u'at YOUR-COMPONENT-DID-NOT-REGISTER_yelp_main__DEV__yelp_main__SearchApp__dynamic:1:1',
u'at notFound (/nail/home/ckuehl/proj/react_component_renderer/node_modules/hypernova/lib/utils/BatchManager.js:36:15)',
u'at /nail/home/ckuehl/proj/react_component_renderer/node_modules/hypernova/lib/utils/BatchManager.js:186:35',
u'at tryCatcher (/nail/home/ckuehl/proj/react_component_renderer/node_modules/bluebird/js/release/util.js:16:23)',
u'at Promise._settlePromiseFromHandler (/nail/home/ckuehl/proj/react_component_renderer/node_modules/bluebird/js/release/promise.js:512:31)',
[...]
```
Could we normalize these so that it's easier to print these out in a human-readable way? | 0.0 | 573e13042412461fa31bf046473c0033f9dc3fec | [
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_unhealthy_service[test_data0-None]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_unhealthy_service[test_data2-2]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_unhealthy_service[test_data2-None]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_unhealthy_service[test_data1-2]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_unhealthy_service[test_data0-2]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_unhealthy_service[test_data0-1]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_unhealthy_service[test_data1-None]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_unhealthy_service[test_data1-1]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_unhealthy_service[test_data2-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error_on_unhealthy_service[test_data2-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error_on_unhealthy_service[test_data0-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error_on_unhealthy_service[test_data1-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error_on_unhealthy_service[test_data2-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error_on_unhealthy_service[test_data1-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error_on_unhealthy_service[test_data0-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error_on_unhealthy_service[test_data1-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error_on_unhealthy_service[test_data2-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error_on_unhealthy_service[test_data0-1]"
] | [
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_component_errors[test_data0-1]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_component_errors[test_data1-2]",
"tests/batch_test.py::TestBatchRequest::test_successful_batch_request[test_data0-2]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_no_jobs_doesnt_post[2-test_data0]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_application_error[test_data1-None]",
"tests/batch_test.py::TestBatchRequest::test_successful_batch_request[test_data0-1]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_application_error[test_data2-2]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_application_error[test_data2-1]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_component_errors[test_data0-None]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_application_error[test_data0-2]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_no_jobs_doesnt_post[None-test_data0]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_component_errors[test_data2-1]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_no_jobs_doesnt_post[None-test_data1]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_no_jobs_doesnt_post[1-test_data2]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_no_jobs_doesnt_post[1-test_data0]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_application_error[test_data1-1]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_component_errors[test_data1-1]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_application_error[test_data0-1]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_no_jobs_doesnt_post[None-test_data2]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_no_jobs_doesnt_post[1-test_data1]",
"tests/batch_test.py::TestBatchRequest::test_successful_batch_request[test_data1-1]",
"tests/batch_test.py::TestBatchRequest::test_successful_batch_request[test_data1-None]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_application_error[test_data0-None]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_application_error[test_data1-2]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_component_errors[test_data2-None]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_no_jobs_doesnt_post[2-test_data2]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_no_jobs_doesnt_post[2-test_data1]",
"tests/batch_test.py::TestBatchRequest::test_successful_batch_request[test_data2-None]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_component_errors[test_data0-2]",
"tests/batch_test.py::TestBatchRequest::test_successful_batch_request[test_data0-None]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_component_errors[test_data1-None]",
"tests/batch_test.py::TestBatchRequest::test_successful_batch_request[test_data1-2]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_component_errors[test_data2-2]",
"tests/batch_test.py::TestBatchRequest::test_batch_request_with_application_error[test_data2-None]",
"tests/batch_test.py::TestBatchRequest::test_successful_batch_request[test_data2-1]",
"tests/batch_test.py::TestBatchRequest::test_successful_batch_request[test_data2-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_after_response[test_data1-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_after_response[test_data0-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_get_view_data[test_data0-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error[test_data1-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_success[test_data2-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_prepare_request[test_data0-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_will_send_request[test_data0-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_success[test_data0-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_get_view_data[test_data0-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_success[test_data1-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_success[test_data0-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_after_response[test_data2-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_after_response[test_data2-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error[test_data0-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_get_view_data[test_data1-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_will_send_request[test_data2-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_will_send_request[test_data0-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_will_send_request[test_data1-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_prepare_request[test_data1-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_prepare_request[test_data2-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_get_view_data[test_data2-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error[test_data2-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_prepare_request[test_data2-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_success[test_data1-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_get_view_data[test_data1-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_prepare_request[test_data2-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error[test_data0-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error[test_data0-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_after_response[test_data0-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_after_response[test_data2-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_get_view_data[test_data1-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_after_response[test_data1-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_success[test_data0-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_get_view_data[test_data0-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_success[test_data2-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_success[test_data2-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error[test_data1-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_prepare_request[test_data1-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_will_send_request[test_data1-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_prepare_request[test_data0-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error[test_data1-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_get_view_data[test_data2-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_after_response[test_data1-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error[test_data2-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_will_send_request[test_data1-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_will_send_request[test_data2-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_error[test_data2-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_after_response[test_data0-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_get_view_data[test_data2-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_on_success[test_data1-1]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_will_send_request[test_data2-2]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_will_send_request[test_data0-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_prepare_request[test_data1-None]",
"tests/batch_test.py::TestBatchRequestLifecycleMethods::test_calls_prepare_request[test_data0-1]",
"tests/batch_test.py::test_create_fallback_response[jobs2-True-json_encoder2]",
"tests/batch_test.py::test_create_fallback_response[jobs5-False-json_encoder5]",
"tests/batch_test.py::test_create_job_groups[5-expected5]",
"tests/batch_test.py::test_create_job_groups[1-expected1]",
"tests/batch_test.py::test_create_jobs_payload",
"tests/batch_test.py::test_create_job_groups[2-expected2]",
"tests/batch_test.py::test_create_fallback_response[jobs1-False-json_encoder1]",
"tests/batch_test.py::test_create_job_groups[None-expected0]",
"tests/batch_test.py::test_create_fallback_response[jobs0-True-json_encoder0]",
"tests/batch_test.py::test_create_job_groups[6-expected6]",
"tests/batch_test.py::test_create_fallback_response[jobs3-False-json_encoder3]",
"tests/batch_test.py::test_create_fallback_response[jobs4-True-json_encoder4]",
"tests/batch_test.py::test_create_job_groups[4-expected4]",
"tests/batch_test.py::test_create_job_groups[3-expected3]"
] | {
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true
} | 2019-01-18 00:11:18+00:00 | mit | 853 |
|
Yelp__virtualenv-tools-22 | diff --git a/tox.ini b/tox.ini
index 43f5f76..fceab09 100644
--- a/tox.ini
+++ b/tox.ini
@@ -16,4 +16,4 @@ envdir = venv-{[tox]project}
commands =
[pep8]
-ignore = E265,E501
+ignore = E265,E501,W504
diff --git a/virtualenv_tools.py b/virtualenv_tools.py
index 2b3cfae..2f76a24 100644
--- a/virtualenv_tools.py
+++ b/virtualenv_tools.py
@@ -31,7 +31,7 @@ ACTIVATION_SCRIPTS = [
_pybin_match = re.compile(r'^python\d+\.\d+$')
_pypy_match = re.compile(r'^\d+.\d+$')
_activation_path_re = re.compile(
- r'^(?:set -gx |setenv |)VIRTUAL_ENV[ =]"(.*?)"\s*$',
+ r'^(?:set -gx |setenv |)VIRTUAL_ENV[ =][\'"](.*?)[\'"]\s*$',
)
VERBOSE = False
MAGIC_LENGTH = 4 + 4 # magic length + 4 byte timestamp
@@ -169,7 +169,11 @@ def update_pycs(lib_dir, new_path):
for dirname, dirnames, filenames in os.walk(lib_dir):
for filename in filenames:
- if filename.endswith(('.pyc', '.pyo')):
+ if (
+ filename.endswith(('.pyc', '.pyo')) and
+ # python 2, virtualenv 20.x symlinks os.pyc
+ not os.path.islink(os.path.join(dirname, filename))
+ ):
filename = os.path.join(dirname, filename)
local_path = get_new_path(filename)
update_pyc(filename, local_path)
@@ -236,11 +240,13 @@ def get_orig_path(venv_path):
with open(activate_path) as activate:
for line in activate:
- if line.startswith('VIRTUAL_ENV="'):
- return line.split('"', 2)[1]
+ # virtualenv 20 changes the position
+ for possible in ('VIRTUAL_ENV="', "VIRTUAL_ENV='"):
+ if line.startswith(possible):
+ return line.split(possible[-1], 2)[1]
else:
raise AssertionError(
- 'Could not find VIRTUAL_ENV=" in activation script: %s' %
+ 'Could not find VIRTUAL_ENV= in activation script: %s' %
activate_path
)
| Yelp/virtualenv-tools | 5247e0c8c39a584b217ae7de82bc9051903f863f | diff --git a/tests/virtualenv_tools_test.py b/tests/virtualenv_tools_test.py
index e34d596..44fdc27 100644
--- a/tests/virtualenv_tools_test.py
+++ b/tests/virtualenv_tools_test.py
@@ -185,7 +185,7 @@ def test_verbose(venv, capsys):
run(venv.before, venv.after, args=('--verbose',))
out, _ = capsys.readouterr()
# Lots of output
- assert len(out.splitlines()) > 25
+ assert len(out.splitlines()) > 10
def test_non_absolute_error(capsys):
| Unable to find the VIRTUAL_ENV variable in activate.sh for recent virtualenv versions.
Starting in `virtualenv==20.0.0`, the `VIRTUAL_ENV` variable is now defined using single quotes instead of double quotes. This breaks `virtualenv-tools3` as it specifically searches for the string `VIRTUAL_ENV="` in `activate.sh`. | 0.0 | 5247e0c8c39a584b217ae7de82bc9051903f863f | [
"tests/virtualenv_tools_test.py::test_bad_pyc",
"tests/virtualenv_tools_test.py::test_already_up_to_date"
] | [
"tests/virtualenv_tools_test.py::test_help[helpargs1]",
"tests/virtualenv_tools_test.py::test_not_a_virtualenv_missing_bindir",
"tests/virtualenv_tools_test.py::test_help[helpargs0]",
"tests/virtualenv_tools_test.py::test_not_a_virtualenv_missing_site_packages",
"tests/virtualenv_tools_test.py::test_not_a_virtualenv_missing_activate_file",
"tests/virtualenv_tools_test.py::test_not_a_virtualenv_missing_versioned_lib_directory",
"tests/virtualenv_tools_test.py::test_non_absolute_error"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2020-02-12 00:45:18+00:00 | bsd-3-clause | 854 |
|
Zepmanbc__creopyson-2 | diff --git a/creopyson/creo.py b/creopyson/creo.py
index 890f29d..1d86c48 100755
--- a/creopyson/creo.py
+++ b/creopyson/creo.py
@@ -1,4 +1,5 @@
"""Creo module."""
+from creopyson.exceptions import MissingKey
def cd(client, dirname):
@@ -108,7 +109,11 @@ def list_dirs(client, dirname=None):
data = {"dirname": "*"}
if dirname:
data["dirname"] = dirname
- return client._creoson_post("creo", "list_dirs", data, "dirlist")
+ try:
+ result = client._creoson_post("creo", "list_dirs", data, "dirlist")
+ except MissingKey:
+ result = []
+ return result
def list_files(client, filename=None):
| Zepmanbc/creopyson | 4a214ea618fba75d4200cc3dcbc6371d924765e6 | diff --git a/tests/fixtures.py b/tests/fixtures.py
index 8919eca..6d2ff6e 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -55,7 +55,7 @@ def mk_creoson_post_dict(monkeypatch):
@pytest.fixture
def mk_creoson_post_None(monkeypatch):
"""Mock _creoson_post return None."""
- def fake_func(client, command, function, data=None):
+ def fake_func(client, command, function, data=None, key_data=None):
return None
monkeypatch.setattr(
creopyson.connection.Client, '_creoson_post', fake_func)
@@ -64,7 +64,7 @@ def mk_creoson_post_None(monkeypatch):
@pytest.fixture
def mk_creoson_post_list(monkeypatch):
"""Mock _creoson_post return list."""
- def fake_func(client, command, function, data=None):
+ def fake_func(client, command, function, data=None, key_data=None):
return ['information']
monkeypatch.setattr(
creopyson.connection.Client, '_creoson_post', fake_func)
diff --git a/tests/test_creo.py b/tests/test_creo.py
index ccdb9fe..1398006 100644
--- a/tests/test_creo.py
+++ b/tests/test_creo.py
@@ -1,5 +1,6 @@
"""Creo testing."""
import creopyson
+from creopyson.exceptions import MissingKey
from .fixtures import mk_creoson_post_dict, mk_creoson_post_None
@@ -43,6 +44,23 @@ def test_creo_list_dirs(mk_creoson_post_dict):
c = creopyson.Client()
result = c.creo_list_dirs("filter_*")
assert isinstance(result, (list))
+ result = c.creo_list_dirs()
+ assert isinstance(result, (list))
+
+
+def test_creo_list_dirs_empty(monkeypatch):
+ """Correction issue #1.
+
+ if there is no folder in directory, creoson does not return `data`.
+ Need to return an empty list.
+ """
+ def fake_func(client, command, function, data=None, key_data=None):
+ raise MissingKey("Missing `data` in creoson return")
+ monkeypatch.setattr(
+ creopyson.connection.Client, '_creoson_post', fake_func)
+ c = creopyson.Client()
+ result = c.creo_list_dirs()
+ assert result == []
def test_creo_list_files(mk_creoson_post_dict):
| Exception thrown in creo_list_dirs
* Creopyson version: 0.3.1
* Python version: 3.6
* Operating System: Windows
### Description
I tried to list the directories in the current session with `creo_list_dirs()` but I received an exception telling that Creoson did not return the right data.
### What I Did
```
>>> c.creo_list_dirs()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "C:\Users\Tim\.virtualenvs\creopyson_test-yJfFsqNW\lib\site-packages\creopyson\creo.py", line 111, in list_dirs
return client._creoson_post("creo", "list_dirs", data, "dirlist")
File "C:\Users\Tim\.virtualenvs\creopyson_test-yJfFsqNW\lib\site-packages\creopyson\connection.py", line 81, in _creoson_post
raise MissingKey("Missing `data` in creoson return")
creopyson.exceptions.MissingKey: Missing `data` in creoson return
``` | 0.0 | 4a214ea618fba75d4200cc3dcbc6371d924765e6 | [
"tests/test_creo.py::test_creo_list_dirs_empty"
] | [
"tests/test_creo.py::test_creo_cd",
"tests/test_creo.py::test_creo_delete_files",
"tests/test_creo.py::test_creo_get_config",
"tests/test_creo.py::test_creo_get_std_color",
"tests/test_creo.py::test_creo_list_dirs",
"tests/test_creo.py::test_creo_list_files",
"tests/test_creo.py::test_creo_mkdir",
"tests/test_creo.py::test_creo_pwd",
"tests/test_creo.py::test_creo_rmdir_ok",
"tests/test_creo.py::test_creo_set_config_ok",
"tests/test_creo.py::test_creo_set_std_color_ok"
] | {
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true
} | 2019-07-03 19:45:24+00:00 | mit | 855 |
|
a2i2__surround-106 | diff --git a/surround/cli.py b/surround/cli.py
index e8922cb..f57dc50 100644
--- a/surround/cli.py
+++ b/surround/cli.py
@@ -93,6 +93,16 @@ def is_valid_dir(aparser, arg):
else:
return arg
+def allowed_to_access_dir(path):
+ try:
+ os.makedirs(path, exist_ok=True)
+ except OSError:
+ print("error: can't write to " + path)
+
+ if os.access(path, os.R_OK | os.W_OK | os.F_OK | os.X_OK):
+ return True
+ return False
+
def is_valid_name(aparser, arg):
if not arg.isalpha() or not arg.islower():
aparser.error("Name %s must be lowercase letters" % arg)
@@ -229,26 +239,29 @@ def parse_tutorial_args(args):
def parse_init_args(args):
- if args.project_name:
- project_name = args.project_name
- else:
- while True:
- project_name = input("Name of project: ")
- if not project_name.isalpha() or not project_name.islower():
- print("Project name requires lowercase letters only")
- else:
- break
-
- if args.description:
- project_description = args.description
- else:
- project_description = input("What is the purpose of this project?: ")
+ if allowed_to_access_dir(args.path):
+ if args.project_name:
+ project_name = args.project_name
+ else:
+ while True:
+ project_name = input("Name of project: ")
+ if not project_name.isalpha() or not project_name.islower():
+ print("error: project name requires lowercase letters only")
+ else:
+ break
+
+ if args.description:
+ project_description = args.description
+ else:
+ project_description = input("What is the purpose of this project?: ")
- new_dir = os.path.join(args.path, project_name)
- if process(new_dir, PROJECTS["new"], project_name, project_description, "new"):
- print("Project created at %s" % os.path.join(os.path.abspath(args.path), project_name))
+ new_dir = os.path.join(args.path, project_name)
+ if process(new_dir, PROJECTS["new"], project_name, project_description, "new"):
+ print("info: project created at %s" % os.path.join(os.path.abspath(args.path), project_name))
+ else:
+ print("error: directory %s already exists" % new_dir)
else:
- print("Directory %s already exists" % new_dir)
+ print("error: permission denied")
def parse_tool_args(parsed_args, remote_parser, tool):
if tool == "tutorial":
@@ -278,10 +291,10 @@ def main():
tutorial_parser = sub_parser.add_parser('tutorial', help="Create the tutorial project")
tutorial_parser.add_argument('tutorial', help="Create the Surround tutorial project", action='store_true')
- tutorial_parser.add_argument('path', type=lambda x: is_valid_dir(parser, x), help="Path for creating the tutorial project", nargs='?', default="./")
+ tutorial_parser.add_argument('path', help="Path for creating the tutorial project", nargs='?', default="./")
init_parser = sub_parser.add_parser('init', help="Initialise a new Surround project")
- init_parser.add_argument('path', type=lambda x: is_valid_dir(parser, x), help="Path for creating a Surround project", nargs='?', default="./")
+ init_parser.add_argument('path', help="Path for creating a Surround project", nargs='?', default="./")
init_parser.add_argument('-p', '--project-name', help="Name of the project", type=lambda x: is_valid_name(parser, x))
init_parser.add_argument('-d', '--description', help="A description for the project")
| a2i2/surround | 6e5a32dafc6ba858edc1b21cf6644fb2c6b77815 | diff --git a/surround/tests/cli/project_cli/init_test.py b/surround/tests/cli/project_cli/init_test.py
index 24d0f53..1bbce4e 100644
--- a/surround/tests/cli/project_cli/init_test.py
+++ b/surround/tests/cli/project_cli/init_test.py
@@ -9,13 +9,13 @@ class InitTest(unittest.TestCase):
def test_happy_path(self):
process = subprocess.run(['surround', 'init', './', '-p', 'temp', '-d', 'temp'], encoding='utf-8', stdout=subprocess.PIPE)
- self.assertEqual(process.stdout, "Project created at " + os.getcwd() + "/temp\n")
+ self.assertEqual(process.stdout, "info: project created at " + os.getcwd() + "/temp\n")
is_temp = os.path.isdir(os.path.join(os.getcwd() + "/temp"))
self.assertEqual(is_temp, True)
process = subprocess.run(['surround', 'init', './', '-p', 'temp', '-d', 'temp'], encoding='utf-8', stdout=subprocess.PIPE)
- self.assertEqual(process.stdout, "Directory ./temp already exists\n")
+ self.assertEqual(process.stdout, "error: directory ./temp already exists\n")
def tearDown(self):
# Remove residual files
diff --git a/surround/tests/cli/remote_cli/add_test.py b/surround/tests/cli/remote_cli/add_test.py
index 07c9ea4..537a8d8 100644
--- a/surround/tests/cli/remote_cli/add_test.py
+++ b/surround/tests/cli/remote_cli/add_test.py
@@ -9,7 +9,7 @@ class AddTest(unittest.TestCase):
def test_rejecting_path(self):
process = subprocess.run(['surround', 'init', './', '-p', 'temp', '-d', 'temp'], encoding='utf-8', stdout=subprocess.PIPE)
- self.assertEqual(process.stdout, "Project created at " + os.getcwd() + "/temp\n")
+ self.assertEqual(process.stdout, "info: project created at " + os.getcwd() + "/temp\n")
is_temp = os.path.isdir(os.path.join(os.getcwd() + "/temp"))
self.assertEqual(is_temp, True)
@@ -24,7 +24,7 @@ class AddTest(unittest.TestCase):
def test_happy_path(self):
process = subprocess.run(['surround', 'init', os.getcwd(), '-p', 'temp', '-d', 'temp'], encoding='utf-8', stdout=subprocess.PIPE)
- self.assertEqual(process.stdout, "Project created at " + os.getcwd() + "/temp\n")
+ self.assertEqual(process.stdout, "info: project created at " + os.getcwd() + "/temp\n")
is_temp = os.path.isdir(os.path.join(os.getcwd() + "/temp"))
self.assertEqual(is_temp, True)
diff --git a/surround/tests/cli/remote_cli/list_test.py b/surround/tests/cli/remote_cli/list_test.py
index 71970eb..d993284 100644
--- a/surround/tests/cli/remote_cli/list_test.py
+++ b/surround/tests/cli/remote_cli/list_test.py
@@ -9,7 +9,7 @@ class ListTest(unittest.TestCase):
def test_rejecting_path(self):
process = subprocess.run(['surround', 'init', './', '-p', 'temp', '-d', 'temp'], encoding='utf-8', stdout=subprocess.PIPE)
- self.assertEqual(process.stdout, "Project created at " + os.getcwd() + "/temp\n")
+ self.assertEqual(process.stdout, "info: project created at " + os.getcwd() + "/temp\n")
is_temp = os.path.isdir(os.path.join(os.getcwd() + "/temp"))
self.assertEqual(is_temp, True)
@@ -19,7 +19,7 @@ class ListTest(unittest.TestCase):
def test_happy_path(self):
process = subprocess.run(['surround', 'init', './', '-p', 'temp', '-d', 'temp'], encoding='utf-8', stdout=subprocess.PIPE)
- self.assertEqual(process.stdout, "Project created at " + os.getcwd() + "/temp\n")
+ self.assertEqual(process.stdout, "info: project created at " + os.getcwd() + "/temp\n")
is_temp = os.path.isdir(os.path.join(os.getcwd() + "/temp"))
self.assertEqual(is_temp, True)
diff --git a/surround/tests/cli/remote_cli/remote_test.py b/surround/tests/cli/remote_cli/remote_test.py
index e8ed4cd..7236530 100644
--- a/surround/tests/cli/remote_cli/remote_test.py
+++ b/surround/tests/cli/remote_cli/remote_test.py
@@ -12,7 +12,7 @@ class RemoteTest(unittest.TestCase):
self.assertEqual(process.stdout, "error: not a surround project\nerror: goto project root directory\n")
process = subprocess.run(['surround', 'init', './', '-p', 'temp', '-d', 'temp'], encoding='utf-8', stdout=subprocess.PIPE)
- self.assertEqual(process.stdout, "Project created at " + os.getcwd() + "/temp\n")
+ self.assertEqual(process.stdout, "info: project created at " + os.getcwd() + "/temp\n")
is_temp = os.path.isdir(os.path.join(os.getcwd() + "/temp"))
self.assertEqual(is_temp, True)
@@ -22,7 +22,7 @@ class RemoteTest(unittest.TestCase):
def test_remote_add(self):
process = subprocess.run(['surround', 'init', './', '-p', 'temp', '-d', 'temp'], encoding='utf-8', stdout=subprocess.PIPE)
- self.assertEqual(process.stdout, "Project created at " + os.getcwd() + "/temp\n")
+ self.assertEqual(process.stdout, "info: project created at " + os.getcwd() + "/temp\n")
is_temp = os.path.isdir(os.path.join(os.getcwd() + "/temp"))
self.assertEqual(is_temp, True)
| `surround tutorial` doesn't work if destination directory doesn't exist
`surround tutorial <path>` doesn't work if the path doesn't exist. Not sure if this is intended behaviour, but it might be nice to follow what Git does, i.e.
* Create the directory if it doesn't exist
* Print error if the directory exists but isn't empty
My experience below:
```
➜ surround tutorial
usage: surround tutorial [-h] path
surround tutorial: error: the following arguments are required: path
➜ surround tutorial my-first-surround-app
usage: surround [-h] {tutorial,init} ...
surround: error: Invalid directory or can't write to my-first-surround-app
➜ mkdir surround-tutorial
➜ surround tutorial my-first-surround-app
The tutorial project has been created.
Start by reading the README.md file at:
/home/rohan/Development/sandbox/my-first-surround-app/README.md
```
After doing the above, I ended up with all the files in `my-first-surround-app/tutorial`, instead of just in `my-first-surround-app` as I expected. | 0.0 | 6e5a32dafc6ba858edc1b21cf6644fb2c6b77815 | [
"surround/tests/cli/project_cli/init_test.py::InitTest::test_happy_path",
"surround/tests/cli/remote_cli/add_test.py::AddTest::test_happy_path",
"surround/tests/cli/remote_cli/add_test.py::AddTest::test_rejecting_path",
"surround/tests/cli/remote_cli/list_test.py::ListTest::test_happy_path",
"surround/tests/cli/remote_cli/list_test.py::ListTest::test_rejecting_path",
"surround/tests/cli/remote_cli/remote_test.py::RemoteTest::test_remote",
"surround/tests/cli/remote_cli/remote_test.py::RemoteTest::test_remote_add"
] | [] | {
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true
} | 2019-04-04 06:46:13+00:00 | bsd-3-clause | 856 |
|
abantos__bolt-88 | diff --git a/bolt/about.py b/bolt/about.py
index 3c285ba..9b6ae95 100644
--- a/bolt/about.py
+++ b/bolt/about.py
@@ -8,4 +8,4 @@ A task runner written in Python
copyright = u'2016 Abantos'
author = u'Isaac Rodriguez'
version = u'0.2'
-release = u'0.2.2'
+release = u'0.2.3'
diff --git a/bolt/tasks/bolt_setup.py b/bolt/tasks/bolt_setup.py
index 6190eb5..e10aa8b 100644
--- a/bolt/tasks/bolt_setup.py
+++ b/bolt/tasks/bolt_setup.py
@@ -26,8 +26,12 @@ configure the task. ::
import distutils.core as dcore
import logging
+import bolt.errors as errors
import bolt.utils as utilities
+
+class BuildSetupError(errors.TaskError): pass
+
DEFAULT_ARGUMENTS = ['build']
DEFAULT_SETUP_SCRIPT = 'setup.py'
@@ -50,11 +54,13 @@ class ExecuteSetupTask(object):
self.setup_script = DEFAULT_SETUP_SCRIPT
generator = _SetupArgumentGenerator()
self.args = generator.generate_from(config)
- self._execute_setup()
+ result = self._execute_setup()
+ if not result.dist_files:
+ raise BuildSetupError()
def _execute_setup(self):
- dcore.run_setup(self.setup_script, self.args)
+ return dcore.run_setup(self.setup_script, self.args)
| abantos/bolt | 39a5db9fa29ec6c13dceb51480fb3e56574aa2a7 | diff --git a/test/test_tasks/test_bolt_setup.py b/test/test_tasks/test_bolt_setup.py
index d9a03b9..9e874cc 100644
--- a/test/test_tasks/test_bolt_setup.py
+++ b/test/test_tasks/test_bolt_setup.py
@@ -7,7 +7,7 @@ import _mocks as mck
class TestExecuteSetupTask(unittest.TestCase):
def setUp(self):
- self. subject = ExecuteSetupTaskSpy()
+ self.subject = ExecuteSetupTaskSpy()
return super(TestExecuteSetupTask, self).setUp()
@@ -25,6 +25,12 @@ class TestExecuteSetupTask(unittest.TestCase):
self.assertEqual(self.subject.setup_script, script)
+ def test_raises_exception_if_building_setup_fails(self):
+ self.subject.dist_files = []
+ with self.assertRaises(bsetup.BuildSetupError):
+ self.given({})
+
+
def given(self, config):
self.subject(config=config)
@@ -36,9 +42,13 @@ class TestExecuteSetupTask(unittest.TestCase):
class ExecuteSetupTaskSpy(bsetup.ExecuteSetupTask):
+
+ def __init__(self):
+ super(ExecuteSetupTaskSpy, self).__init__()
+ self.dist_files = [('bdist_wheel', '3.5', '/some/colation/the.whl')]
def _execute_setup(self):
- pass
+ return self
| Setup Task Swallows Errors Building Distribution
### Description
The current implementation of the Setup task does not evaluate the return object from `distutils.core.run_setup()` and it just assumes it works. Unfortunately, some environments will not have the right tools to build a distribution; therefore, the task will silently fail.
I found this problem in another project's pipeline where building a `wheel` inside the official Python 2.7 Docker container silently failed because the `wheel` package is not installed by default like when creating a virtual environment.
### Steps to Reproduce
- Create a setup task that builds a simple distribution wheel.
- Create a virtual environment, activate it, and uninstall the `wheel` package.
- Run the setup task within the environment.
- Check the dist folder, which will be empty.
The current behavior shows that the return code from `distutils.core.run_setup()` invoked by the task is not evaluated, and the task seems to succeed; however, the `.whl` file has not been created.
An exception is expected to be raised if the wheel doesn't succeed.
### Fix Analysis
Looking at the implementation of `distutils.core.run_setup()`, you can see the function returns a `Distribution` instance. The instance exposes a `dist_files` property, which is empty if the distribution failed to build. If it succeeds, the return value is a list of tuples containing information about each distribution. This allow us to fix the problem, or at list report an error if no distribution was built, by checking the contents of `dist_files`; something like the following code:
```python
# dcore is an alias to distutils.core.
dist_info = dcore.run_setup(self.setup_script, self.args)
if not dist_info.dist_files:
raise DistributionBuildError()
```
> NOTE:I need to think on how to handle multiple distributions; although, I believe right now the task only supports one distribution per configuration.
The following is an example of the contents of the `dist_files` property:
```
[('bdist_wheel', '3.5', 'D:\\Projects\\Abantos\\bolt\\dist\\bolt_ta-0.2.1-py2.py3-none-any.whl')]
```
### Acceptance Criteria
Raises exception if building the distribution fails. | 0.0 | 39a5db9fa29ec6c13dceb51480fb3e56574aa2a7 | [
"test/test_tasks/test_bolt_setup.py::TestExecuteSetupTask::test_raises_exception_if_building_setup_fails"
] | [
"test/test_tasks/test_bolt_setup.py::TestExecuteSetupTask::test_uses_default_if_empty_configuration",
"test/test_tasks/test_bolt_setup.py::TestExecuteSetupTask::test_uses_specified_script",
"test/test_tasks/test_bolt_setup.py::TestRegisterTasks::test_registers_setup"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false
} | 2017-03-07 16:19:15+00:00 | mit | 857 |
|
abdullahselek__HerePy-25 | diff --git a/README.rst b/README.rst
index b6bd1b8..78d2e19 100644
--- a/README.rst
+++ b/README.rst
@@ -224,6 +224,14 @@ Calculate an MxN cost matrix for M start points and N destinations
departure='2013-07-04T17:00:00+02',
modes=[herepy.RouteMode.fastest, herepy.RouteMode.car])
+Calculate route providing names instead of coordinates
+
+.. code:: python
+
+ response = routingApi.car_route([11.0, 12.0],
+ '200 S Mathilda Sunnyvale CA',
+ [herepy.RouteMode.car, herepy.RouteMode.fastest])
+
GeocoderAutoCompleteApi
-----------------------
diff --git a/herepy/routing_api.py b/herepy/routing_api.py
index d188aaf..9a9fd52 100644
--- a/herepy/routing_api.py
+++ b/herepy/routing_api.py
@@ -5,12 +5,13 @@ import sys
import json
import requests
+from herepy.geocoder_api import GeocoderApi
from herepy.here_api import HEREApi
from herepy.utils import Utils
from herepy.error import HEREError
from herepy.models import RoutingResponse, RoutingMatrixResponse
from herepy.here_enum import RouteMode, MatrixSummaryAttribute
-from typing import List
+from typing import List, Union
class RoutingApi(HEREApi):
"""A python interface into the HERE Routing API"""
@@ -53,6 +54,10 @@ class RoutingApi(HEREApi):
return str.format('geo!{0},{1}', waypoint_a[0], waypoint_a[1])
def _route(self, waypoint_a, waypoint_b, modes=None, departure=None, arrival=None):
+ if isinstance(waypoint_a, str):
+ waypoint_a = self._get_coordinates_for_location_name(waypoint_a)
+ if isinstance(waypoint_b, str):
+ waypoint_b = self._get_coordinates_for_location_name(waypoint_b)
data = {'waypoint0': self.__array_to_waypoint(waypoint_a),
'waypoint1': self.__array_to_waypoint(waypoint_b),
'mode': self.__prepare_mode_values(modes),
@@ -84,16 +89,18 @@ class RoutingApi(HEREApi):
return response
def bicycle_route(self,
- waypoint_a: List[float],
- waypoint_b: List[float],
+ waypoint_a: Union[List[float], str],
+ waypoint_b: Union[List[float], str],
modes: List[RouteMode]=None,
departure: str='now'):
"""Request a bicycle route between two points
Args:
- waypoint_a (array):
- array including latitude and longitude in order.
- waypoint_b (array):
- array including latitude and longitude in order.
+ waypoint_a:
+ array including latitude and longitude in order
+ or string with the location name
+ waypoint_b:
+ array including latitude and longitude in order
+ or string with the location name.
modes (array):
array including RouteMode enums.
departure (str):
@@ -108,16 +115,18 @@ class RoutingApi(HEREApi):
return self._route(waypoint_a, waypoint_b, modes, departure)
def car_route(self,
- waypoint_a: List[float],
- waypoint_b: List[float],
+ waypoint_a: Union[List[float], str],
+ waypoint_b: Union[List[float], str],
modes: List[RouteMode]=None,
departure: str='now'):
"""Request a driving route between two points
Args:
waypoint_a (array):
- array including latitude and longitude in order.
+ array including latitude and longitude in order
+ or string with the location name.
waypoint_b (array):
- array including latitude and longitude in order.
+ array including latitude and longitude in order
+ or string with the location name.
modes (array):
array including RouteMode enums.
departure (str):
@@ -132,16 +141,18 @@ class RoutingApi(HEREApi):
return self._route(waypoint_a, waypoint_b, modes, departure)
def pedastrian_route(self,
- waypoint_a: List[float],
- waypoint_b: List[float],
+ waypoint_a: Union[List[float], str],
+ waypoint_b: Union[List[float], str],
modes: List[RouteMode]=None,
departure: str='now'):
"""Request a pedastrian route between two points
Args:
waypoint_a (array):
- array including latitude and longitude in order.
+ array including latitude and longitude in order
+ or string with the location name.
waypoint_b (array):
- array including latitude and longitude in order.
+ array including latitude and longitude in order
+ or string with the location name.
modes (array):
array including RouteMode enums.
departure (str):
@@ -156,19 +167,22 @@ class RoutingApi(HEREApi):
return self._route(waypoint_a, waypoint_b, modes, departure)
def intermediate_route(self,
- waypoint_a: List[float],
- waypoint_b: List[float],
- waypoint_c: List[float],
+ waypoint_a: Union[List[float], str],
+ waypoint_b: Union[List[float], str],
+ waypoint_c: Union[List[float], str],
modes: List[RouteMode]=None,
departure: str='now'):
"""Request a intermediate route from three points
Args:
waypoint_a (array):
- Starting array including latitude and longitude in order.
+ Starting array including latitude and longitude in order
+ or string with the location name.
waypoint_b (array):
- Intermediate array including latitude and longitude in order.
+ Intermediate array including latitude and longitude in order
+ or string with the location name.
waypoint_c (array):
- Last array including latitude and longitude in order.
+ Last array including latitude and longitude in order
+ or string with the location name.
modes (array):
array including RouteMode enums.
departure (str):
@@ -183,17 +197,19 @@ class RoutingApi(HEREApi):
return self._route(waypoint_a, waypoint_b, modes, departure)
def public_transport(self,
- waypoint_a: List[float],
- waypoint_b: List[float],
+ waypoint_a: Union[List[float], str],
+ waypoint_b: Union[List[float], str],
combine_change: bool,
modes: List[RouteMode]=None,
departure='now'):
"""Request a public transport route between two points
Args:
waypoint_a (array):
- Starting array including latitude and longitude in order.
+ Starting array including latitude and longitude in order
+ or string with the location name.
waypoint_b (array):
- Intermediate array including latitude and longitude in order.
+ Intermediate array including latitude and longitude in order
+ or string with the location name.
combine_change (bool):
Enables the change manuever in the route response, which
indicates a public transit line change.
@@ -211,8 +227,8 @@ class RoutingApi(HEREApi):
return self._route(waypoint_a, waypoint_b, modes, departure)
def public_transport_timetable(self,
- waypoint_a: List[float],
- waypoint_b: List[float],
+ waypoint_a: Union[List[float], str],
+ waypoint_b: Union[List[float], str],
combine_change: bool,
modes: List[RouteMode]=None,
departure: str=None,
@@ -220,9 +236,11 @@ class RoutingApi(HEREApi):
"""Request a public transport route between two points based on timetables
Args:
waypoint_a (array):
- Starting array including latitude and longitude in order.
+ Starting array including latitude and longitude in order
+ or string with the location name.
waypoint_b (array):
- Intermediate array including latitude and longitude in order.
+ Intermediate array including latitude and longitude in order
+ or string with the location name.
combine_change (bool):
Enables the change manuever in the route response, which
indicates a public transit line change.
@@ -242,16 +260,18 @@ class RoutingApi(HEREApi):
return self._route(waypoint_a, waypoint_b, modes, departure, arrival)
def location_near_motorway(self,
- waypoint_a: List[float],
- waypoint_b: List[float],
+ waypoint_a: Union[List[float], str],
+ waypoint_b: Union[List[float], str],
modes: List[RouteMode]=None,
departure: str='now'):
"""Calculates the fastest car route between two location
Args:
waypoint_a (array):
- array including latitude and longitude in order.
+ array including latitude and longitude in order
+ or string with the location name.
waypoint_b (array):
- array including latitude and longitude in order.
+ array including latitude and longitude in order
+ or string with the location name.
modes (array):
array including RouteMode enums.
departure (str):
@@ -266,16 +286,18 @@ class RoutingApi(HEREApi):
return self._route(waypoint_a, waypoint_b, modes, departure)
def truck_route(self,
- waypoint_a: List[float],
- waypoint_b: List[float],
+ waypoint_a: Union[List[float], str],
+ waypoint_b: Union[List[float], str],
modes: List[RouteMode]=None,
departure: str='now'):
"""Calculates the fastest truck route between two location
Args:
waypoint_a (array):
- array including latitude and longitude in order.
+ array including latitude and longitude in order
+ or string with the location name.
waypoint_b (array):
- array including latitude and longitude in order.
+ array including latitude and longitude in order
+ or string with the location name.
modes (array):
array including RouteMode enums.
departure (str):
@@ -290,8 +312,8 @@ class RoutingApi(HEREApi):
return self._route(waypoint_a, waypoint_b, modes, departure)
def matrix(self,
- start_waypoints: List[float],
- destination_waypoints: List[float],
+ start_waypoints: Union[List[float], str],
+ destination_waypoints: Union[List[float], str],
departure: str='now',
modes: List[RouteMode]=[],
summary_attributes: List[MatrixSummaryAttribute]=[]):
@@ -299,8 +321,10 @@ class RoutingApi(HEREApi):
Args:
start_waypoints (array):
array of arrays of coordinates [lat,long] of start waypoints.
+ or array of string with the location names.
destination_waypoints (array):
array of arrays of coordinates [lat,long] of destination waypoints.
+ or array of string with the location names.
departure (str):
time when travel is expected to start, e.g.: '2013-07-04T17:00:00+02'
modes (array):
@@ -320,12 +344,27 @@ class RoutingApi(HEREApi):
'summaryAttributes': ','.join([attribute.__str__() for attribute in summary_attributes])
}
for i, start_waypoint in enumerate(start_waypoints):
+ if isinstance(start_waypoint, str):
+ start_waypoint = self._get_coordinates_for_location_name(start_waypoint)
data['start' + str(i)] = self.__array_to_waypoint(start_waypoint)
for i, destination_waypoint in enumerate(destination_waypoints):
+ if isinstance(destination_waypoint, str):
+ destination_waypoint = self._get_coordinates_for_location_name(destination_waypoint)
data['destination' + str(i)] = self.__array_to_waypoint(destination_waypoint)
response = self.__get(self.URL_CALCULATE_MATRIX, data, RoutingMatrixResponse)
return response
+ def _get_coordinates_for_location_name(self, location_name: str) -> List[float]:
+ """Use the Geocoder API to resolve a location name to a set of coordinates."""
+
+ geocoder_api = GeocoderApi(self._api_key)
+ try:
+ geocoder_response = geocoder_api.free_form(location_name)
+ coordinates = geocoder_response.Response["View"][0]["Result"][0]["Location"]["NavigationPosition"][0]
+ return [coordinates["Latitude"], coordinates["Longitude"]]
+ except (HEREError) as here_error:
+ raise WaypointNotFoundError(here_error.message)
+
@staticmethod
def _convert_datetime_to_isoformat(datetime_object):
"""Convert a datetime.datetime object to an ISO8601 string."""
| abdullahselek/HerePy | 8691e147549141dc8287ba16a4396a63ed09b73b | diff --git a/tests/test_routing_api.py b/tests/test_routing_api.py
index db150bd..9239be6 100644
--- a/tests/test_routing_api.py
+++ b/tests/test_routing_api.py
@@ -455,6 +455,22 @@ class RoutingApiTest(unittest.TestCase):
self.assertTrue(response)
self.assertIsInstance(response, herepy.RoutingMatrixResponse)
+ @responses.activate
+ def test_matrix_multiple_start_names(self):
+ with codecs.open('testdata/models/routing_matrix_multiple_starts.json', mode='r', encoding='utf-8') as f:
+ server_response = f.read()
+ responses.add(responses.GET, 'https://matrix.route.ls.hereapi.com/routing/7.2/calculatematrix.json',
+ server_response, status=200)
+ with open('testdata/models/geocoder.json', 'r') as f:
+ expectedGeocoderResponse = f.read()
+ responses.add(responses.GET, 'https://geocoder.ls.hereapi.com/6.2/geocode.json',
+ expectedGeocoderResponse, status=200)
+ response = self._api.matrix(
+ start_waypoints=['Seattle', 'Kentucky'],
+ destination_waypoints=[[9.934574, -84.065544]])
+ self.assertTrue(response)
+ self.assertIsInstance(response, herepy.RoutingMatrixResponse)
+
@responses.activate
def test_matrix_multiple_destinations(self):
with codecs.open('testdata/models/routing_matrix_multiple_destinations.json', mode='r', encoding='utf-8') as f:
@@ -467,6 +483,22 @@ class RoutingApiTest(unittest.TestCase):
self.assertTrue(response)
self.assertIsInstance(response, herepy.RoutingMatrixResponse)
+ @responses.activate
+ def test_matrix_multiple_destinations(self):
+ with codecs.open('testdata/models/routing_matrix_multiple_destinations.json', mode='r', encoding='utf-8') as f:
+ server_response = f.read()
+ responses.add(responses.GET, 'https://matrix.route.ls.hereapi.com/routing/7.2/calculatematrix.json',
+ server_response, status=200)
+ with open('testdata/models/geocoder.json', 'r') as f:
+ expectedGeocoderResponse = f.read()
+ responses.add(responses.GET, 'https://geocoder.ls.hereapi.com/6.2/geocode.json',
+ expectedGeocoderResponse, status=200)
+ response = self._api.matrix(
+ start_waypoints=[[9.933231, -84.076831]],
+ destination_waypoints=['Seattle', 'Kentucky'])
+ self.assertTrue(response)
+ self.assertIsInstance(response, herepy.RoutingMatrixResponse)
+
@responses.activate
def test_matrix_bad_request(self):
with codecs.open('testdata/models/routing_matrix_bad_request.json', mode='r', encoding='utf-8') as f:
@@ -500,4 +532,30 @@ class RoutingApiTest(unittest.TestCase):
response = self._api.truck_route([11.0, 12.0],
[22.0, 23.0],
departure=date)
-
+
+ @responses.activate
+ def test_location_by_name(self):
+ with codecs.open('testdata/models/routing_truck_route_short.json', mode='r', encoding='utf-8') as f:
+ expectedRoutingResponse = f.read()
+ responses.add(responses.GET, 'https://route.ls.hereapi.com/routing/7.2/calculateroute.json',
+ expectedRoutingResponse, status=200)
+ with open('testdata/models/geocoder.json', 'r') as f:
+ expectedGeocoderResponse = f.read()
+ responses.add(responses.GET, 'https://geocoder.ls.hereapi.com/6.2/geocode.json',
+ expectedGeocoderResponse, status=200)
+ response = self._api.truck_route('200 S Mathilda Sunnyvale CA',
+ '200 S Mathilda Sunnyvale CA')
+
+ @responses.activate
+ def test_location_by_name_throws_WaypointNotFoundError(self):
+ with codecs.open('testdata/models/routing_truck_route_short.json', mode='r', encoding='utf-8') as f:
+ expectedRoutingResponse = f.read()
+ responses.add(responses.GET, 'https://route.ls.hereapi.com/routing/7.2/calculateroute.json',
+ expectedRoutingResponse, status=200)
+ with open('testdata/models/geocoder_error.json', 'r') as f:
+ expectedGeocoderResponse = f.read()
+ responses.add(responses.GET, 'https://geocoder.ls.hereapi.com/6.2/geocode.json',
+ expectedGeocoderResponse, status=200)
+ with self.assertRaises(herepy.WaypointNotFoundError):
+ response = self._api.truck_route('200 S Mathilda Sunnyvale CA',
+ '200 S Mathilda Sunnyvale CA')
| Combine routing api and geocoder to allow routing to/from names
Add a new function to the routing api that takes names for the origin and destination.
Call the geocoder api to resolve these to coordinates.
I would be happy to create a PR for this if you would welcome this feature. | 0.0 | 8691e147549141dc8287ba16a4396a63ed09b73b | [
"tests/test_routing_api.py::RoutingApiTest::test_location_by_name_throws_WaypointNotFoundError"
] | [
"tests/test_routing_api.py::RoutingApiTest::test_carroute_route_short",
"tests/test_routing_api.py::RoutingApiTest::test_pedastrianroute_when_error_invalid_input_data_occured",
"tests/test_routing_api.py::RoutingApiTest::test_matrix_multiple_start_names",
"tests/test_routing_api.py::RoutingApiTest::test_pedastrianroute_when_error_no_route_found_occured",
"tests/test_routing_api.py::RoutingApiTest::test_publictransport_when_error_invalid_input_data_occured",
"tests/test_routing_api.py::RoutingApiTest::test_publictransport_when_error_no_route_found_occured",
"tests/test_routing_api.py::RoutingApiTest::test_truckroute_when_error_no_route_found_occured",
"tests/test_routing_api.py::RoutingApiTest::test_initiation",
"tests/test_routing_api.py::RoutingApiTest::test_matrix_bad_request",
"tests/test_routing_api.py::RoutingApiTest::test_location_by_name",
"tests/test_routing_api.py::RoutingApiTest::test_publictransport_withdefaultmodes_whensucceed",
"tests/test_routing_api.py::RoutingApiTest::test_pedastrianroute_when_error_invalid_credentials_occured",
"tests/test_routing_api.py::RoutingApiTest::test_carroute_when_error_invalid_input_data_occured",
"tests/test_routing_api.py::RoutingApiTest::test_departure_as_datetime",
"tests/test_routing_api.py::RoutingApiTest::test_locationnearmotorway_when_error_no_route_found_occured",
"tests/test_routing_api.py::RoutingApiTest::test_publictransport_route_short",
"tests/test_routing_api.py::RoutingApiTest::test_departure_as_string",
"tests/test_routing_api.py::RoutingApiTest::test_matrix_multiple_destinations",
"tests/test_routing_api.py::RoutingApiTest::test_locationnearmotorway_withdefaultmodes_whensucceed",
"tests/test_routing_api.py::RoutingApiTest::test_pedastrianroute_route_short",
"tests/test_routing_api.py::RoutingApiTest::test_truckroute_when_error_invalid_credentials_occured",
"tests/test_routing_api.py::RoutingApiTest::test_truckroute_withdefaultmodes_whensucceed",
"tests/test_routing_api.py::RoutingApiTest::test_publictransport_when_error_invalid_credentials_occured",
"tests/test_routing_api.py::RoutingApiTest::test_locationnearmotorway_whensucceed",
"tests/test_routing_api.py::RoutingApiTest::test_carroute_withdefaultmodes_whensucceed",
"tests/test_routing_api.py::RoutingApiTest::test_locationnearmotorway_when_error_invalid_credentials_occured",
"tests/test_routing_api.py::RoutingApiTest::test_intermediateroute_whensucceed",
"tests/test_routing_api.py::RoutingApiTest::test_truckroute_route_short",
"tests/test_routing_api.py::RoutingApiTest::test_truckroute_when_error_invalid_input_data_occured",
"tests/test_routing_api.py::RoutingApiTest::test_truckroute_whensucceed",
"tests/test_routing_api.py::RoutingApiTest::test_intermediateroute_when_error_invalid_credentials_occured",
"tests/test_routing_api.py::RoutingApiTest::test_intermediateroute_when_error_invalid_input_data_occured",
"tests/test_routing_api.py::RoutingApiTest::test_carroute_whensucceed",
"tests/test_routing_api.py::RoutingApiTest::test_locationnearmotorway_when_error_invalid_input_data_occured",
"tests/test_routing_api.py::RoutingApiTest::test_matrix_whensucceed",
"tests/test_routing_api.py::RoutingApiTest::test_intermediateroute_when_error_no_route_found_occured",
"tests/test_routing_api.py::RoutingApiTest::test_matrix_multiple_starts",
"tests/test_routing_api.py::RoutingApiTest::test_carroute_when_error_no_route_found_occured",
"tests/test_routing_api.py::RoutingApiTest::test_bicycleroute_route_short",
"tests/test_routing_api.py::RoutingApiTest::test_pedastrianroute_whensucceed",
"tests/test_routing_api.py::RoutingApiTest::test_intermediateroute_withdefaultmodes_whensucceed",
"tests/test_routing_api.py::RoutingApiTest::test_carroute_when_error_invalid_credentials_occured",
"tests/test_routing_api.py::RoutingApiTest::test_publictransport_whensucceed",
"tests/test_routing_api.py::RoutingApiTest::test_bicycleroute_withdefaultmodes_whensucceed",
"tests/test_routing_api.py::RoutingApiTest::test_publictransporttimetable_route_short",
"tests/test_routing_api.py::RoutingApiTest::test_pedastrianroute_withdefaultmodes_whensucceed",
"tests/test_routing_api.py::RoutingApiTest::test_publictransporttimetable_withdefaultmodes_whensucceed"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false
} | 2020-01-22 08:15:56+00:00 | mit | 858 |
|
abs-tudelft__vhdeps-21 | diff --git a/vhdeps/targets/ghdl.py b/vhdeps/targets/ghdl.py
index a354670..3e0e2b8 100644
--- a/vhdeps/targets/ghdl.py
+++ b/vhdeps/targets/ghdl.py
@@ -140,7 +140,7 @@ def _run_test_case(output_file, test_case, vcd_dir, ghdl_elaborate, ghdl_run):
exit_code, *_ = run_cmd(
output_file,
ghdl_elaborate,
- '--work=%s' % test_case.lib,
+ '--work=%s' % test_case.file.lib,
test_case.unit)
if exit_code != 0:
output_file.write('Elaboration for %s failed!\n' % test_case.unit)
@@ -151,13 +151,13 @@ def _run_test_case(output_file, test_case, vcd_dir, ghdl_elaborate, ghdl_run):
vcd_switch = []
if vcd_dir is not None:
vcd_file = '%s/%s.%s.vcd' % (
- vcd_dir, test_case.lib, test_case.unit)
+ vcd_dir, test_case.file.lib, test_case.unit)
vcd_switch.append('--vcd=%s' % vcd_file)
exit_code, stdout, *_ = run_cmd(
output_file,
ghdl_run,
- '--work=' + test_case.lib, test_case.unit,
- '--stop-time=' + test_case.get_timeout().replace(' ', ''),
+ '--work=' + test_case.file.lib, test_case.unit,
+ '--stop-time=' + test_case.file.get_timeout().replace(' ', ''),
*vcd_switch)
if 'simulation stopped by --stop-time' in stdout:
code = 1
diff --git a/vhdeps/targets/shared.py b/vhdeps/targets/shared.py
index 6318ff2..7fd85b1 100644
--- a/vhdeps/targets/shared.py
+++ b/vhdeps/targets/shared.py
@@ -16,6 +16,7 @@
import sys
import fnmatch
+from collections import namedtuple
def add_arguments_for_get_test_cases(parser):
"""Adds the appropriate command line arguments for the `get_test_cases()`
@@ -33,6 +34,8 @@ def add_arguments_for_get_test_cases(parser):
'partial match. If no patterns are specified, the matcher defaults to '
'a single \'*_tc\' pattern.')
+TestCase = namedtuple('TestCase', ('file', 'unit'))
+
def get_test_cases(vhd_list, pattern=None, **_):
"""Filters the toplevel entities in `vhd_list` using the given pattern
list, returning the resulting list."""
@@ -40,25 +43,21 @@ def get_test_cases(vhd_list, pattern=None, **_):
pattern = ['*_tc']
test_cases = []
for top in vhd_list.top:
- if top.unit is None:
- raise NotImplementedError(
- 'vhdeps\' test case runners currently do not support having '
- 'multiple test cases per VHDL file.\nThe offending file is '
- '"%s".' % top.fname)
- include = False
- for pat in pattern:
- target = top.unit
- if pat.startswith(':'):
- target = top.fname
- pat = pat[1:]
- invert = False
- if pat.startswith('!'):
- invert = True
- pat = pat[1:]
- if fnmatch.fnmatchcase(target, pat):
- include = not invert
- if include:
- test_cases.append(top)
+ for unit in top.entity_defs:
+ include = False
+ for pat in pattern:
+ target = unit
+ if pat.startswith(':'):
+ target = top.fname
+ pat = pat[1:]
+ invert = False
+ if pat.startswith('!'):
+ invert = True
+ pat = pat[1:]
+ if fnmatch.fnmatchcase(target, pat):
+ include = not invert
+ if include:
+ test_cases.append(TestCase(top, unit))
return test_cases
def run_cmd(output_file, cmd, *args):
diff --git a/vhdeps/targets/vsim.py b/vhdeps/targets/vsim.py
index 0e10b0a..a2984e3 100644
--- a/vhdeps/targets/vsim.py
+++ b/vhdeps/targets/vsim.py
@@ -381,11 +381,11 @@ def _write_tcl(vhd_list, tcl_file, **kwargs):
test_cases = get_test_cases(vhd_list, **kwargs)
for test_case in test_cases:
tcl_file.write('lappend testcases [list %s %s "%s"]\n' % (
- test_case.lib, test_case.unit, test_case.get_timeout()))
+ test_case.file.lib, test_case.unit, test_case.file.get_timeout()))
if len(test_cases) == 1:
test_case = test_cases[0]
tcl_file.write('simulate %s %s "%s"\n' % (
- test_case.lib, test_case.unit, test_case.get_timeout()))
+ test_case.file.lib, test_case.unit, test_case.file.get_timeout()))
else:
tcl_file.write('regression\n')
| abs-tudelft/vhdeps | 85fb3cccacdfa99132170c6ba8ae58d300b3ebfd | diff --git a/tests/test_ghdl.py b/tests/test_ghdl.py
index f37a06d..a02b2a8 100644
--- a/tests/test_ghdl.py
+++ b/tests/test_ghdl.py
@@ -40,6 +40,16 @@ class TestGhdlSimple(TestCase):
self.assertTrue('PASSED test_tc' in out)
self.assertTrue('Test suite PASSED' in out)
+ def test_multiple_per_file(self):
+ """Test that multiple test cases can exist in one file (GHDL)"""
+ code, out, _ = run_vhdeps('ghdl', '-i', DIR+'/complex/multi-tc-per-file')
+ self.assertEqual(code, 0)
+ self.assertTrue('working!' in out)
+ self.assertTrue('PASSED foo_tc' in out)
+ self.assertTrue('PASSED bar_tc' in out)
+ self.assertFalse('baz' in out)
+ self.assertTrue('Test suite PASSED' in out)
+
def test_failure(self):
"""Test that a single failing test case results in failure (GHDL)"""
code, out, _ = run_vhdeps('ghdl', '-i', DIR+'/simple/failure')
diff --git a/tests/test_patterns.py b/tests/test_patterns.py
index ebe47db..efa34b3 100644
--- a/tests/test_patterns.py
+++ b/tests/test_patterns.py
@@ -92,9 +92,14 @@ class TestPatterns(TestCase):
self.assertFalse(bool(re.search(r'ghdl -r [^\n]*baz', out)))
def test_multi_tc_per_file(self):
- """Test multiple test cases per file (not supported)"""
+ """Test multiple test cases per file"""
with local.env(PATH=DIR+'/ghdl/fake-ghdl:' + local.env['PATH']):
- code, _, err = run_vhdeps('ghdl', '-i', DIR+'/complex/multi-tc-per-file')
- self.assertEqual(code, 1)
- self.assertTrue('NotImplementedError: vhdeps\' test case runners currently do '
- 'not support having multiple test cases per VHDL file.' in err)
+ code, out, _ = run_vhdeps('ghdl', '-i', DIR+'/complex/multi-tc-per-file')
+ self.assertEqual(code, 0)
+ self.assertTrue(bool(re.search(r'ghdl -a [^\n]*test_tc.vhd', out)))
+ self.assertTrue(bool(re.search(r'ghdl -e [^\n]*foo_tc', out)))
+ self.assertTrue(bool(re.search(r'ghdl -e [^\n]*bar_tc', out)))
+ self.assertFalse(bool(re.search(r'ghdl -e [^\n]*baz', out)))
+ self.assertTrue(bool(re.search(r'ghdl -r [^\n]*foo_tc', out)))
+ self.assertTrue(bool(re.search(r'ghdl -r [^\n]*bar_tc', out)))
+ self.assertFalse(bool(re.search(r'ghdl -r [^\n]*baz', out)))
diff --git a/tests/test_vsim.py b/tests/test_vsim.py
index 2ab1e48..3228735 100644
--- a/tests/test_vsim.py
+++ b/tests/test_vsim.py
@@ -27,6 +27,12 @@ class TestVsimReal(TestCase):
self.assertEqual(code, 0)
self.assertTrue('working!' in out)
+ def test_multiple_per_file(self):
+ """Test running vsim on a file with multiple test cases"""
+ code, out, _ = run_vhdeps('ghdl', '-i', DIR+'/complex/multi-tc-per-file')
+ self.assertEqual(code, 0)
+ self.assertTrue('working!' in out)
+
def test_failure(self):
"""Test running vsim on a single failing test case"""
code, out, _ = run_vhdeps('vsim', '-i', DIR+'/simple/failure')
| GHDL and vsim backends cannot handle multiple test cases in one file
See title. Individual test cases are represented as `VhdFile` objects everywhere in these backends, making use of the `VhdFile.unit` attribute, which can be `None` when multiple are defined per file. This may be fixable by representing the test cases as two-tuples of the library and entity name. | 0.0 | 85fb3cccacdfa99132170c6ba8ae58d300b3ebfd | [
"tests/test_patterns.py::TestPatterns::test_multi_tc_per_file"
] | [
"tests/test_ghdl.py::TestGhdlSpecific::test_analyze_error",
"tests/test_ghdl.py::TestGhdlSpecific::test_elaborate_error",
"tests/test_ghdl.py::TestGhdlSpecific::test_multi_version",
"tests/test_ghdl.py::TestGhdlSpecific::test_no_ghdl",
"tests/test_ghdl.py::TestGhdlSpecific::test_no_plumbum",
"tests/test_ghdl.py::TestGhdlSpecific::test_no_wc",
"tests/test_ghdl.py::TestGhdlSpecific::test_unknown_version",
"tests/test_patterns.py::TestPatterns::test_negative_filename",
"tests/test_patterns.py::TestPatterns::test_negative_name",
"tests/test_patterns.py::TestPatterns::test_no_patterns",
"tests/test_patterns.py::TestPatterns::test_positive_filename",
"tests/test_patterns.py::TestPatterns::test_positive_name",
"tests/test_vsim.py::TestVsimMocked::test_batch_no_tempdir",
"tests/test_vsim.py::TestVsimMocked::test_gui_no_tempdir",
"tests/test_vsim.py::TestVsimMocked::test_gui_tempdir",
"tests/test_vsim.py::TestVsimMocked::test_no_plumbum",
"tests/test_vsim.py::TestVsimMocked::test_no_vsim",
"tests/test_vsim.py::TestVsimMocked::test_tcl_multi",
"tests/test_vsim.py::TestVsimMocked::test_tcl_single",
"tests/test_vsim.py::TestVsimMocked::test_tcl_to_file",
"tests/test_vsim.py::TestVsimMocked::test_tcl_versions",
"tests/test_vsim.py::TestVsimMocked::test_unsupported_version"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2019-07-12 09:11:14+00:00 | apache-2.0 | 859 |
|
abs-tudelft__vhdeps-22 | diff --git a/vhdeps/targets/ghdl.py b/vhdeps/targets/ghdl.py
index 3e0e2b8..ff426a1 100644
--- a/vhdeps/targets/ghdl.py
+++ b/vhdeps/targets/ghdl.py
@@ -81,7 +81,22 @@ def add_arguments(parser):
'regardless of whether it passed or not. If there are multiple test '
'cases, gtkwave is launched for the first failure.')
-def _get_ghdl_cmds(vhd_list, ieee='synopsys', no_debug=False, coverage=None, **_):
+ parser.add_argument(
+ '-W', action='append', metavar='#{#},<options>', dest='extra_args',
+ # It'd be great to use [] here ^^^ but Python devs managed to
+ # sufficiently bork argparse's internals to make that break before
+ # Python 3.8. Since it's completely asenine to require 3.8 for
+ # something like this, {} will have to do.
+ help='Pass comma-separated options to the command specified by #. The '
+ 'first # can be \'a\' for the analysis command, \'e\' for the '
+ 'elaboration command, and \'r\' for the run command. If a second '
+ 'character is specified, <options> are chained to a \'-W#,<options>\' '
+ 'option for the command specified by the first letter. For instance, '
+ '\'-Wac,-O3\' passes -O3 to the GCC compiler during the analysis '
+ 'phase.')
+
+def _get_ghdl_cmds(vhd_list, ieee='synopsys', no_debug=False,
+ coverage=None, extra_args=None, **_):
"""Returns a three-tuple of the analyze, elaborate, and run commands for
GHDL in plumbum form."""
@@ -130,6 +145,25 @@ def _get_ghdl_cmds(vhd_list, ieee='synopsys', no_debug=False, coverage=None, **_
ghdl_analyze = ghdl_analyze['-Wc,-fprofile-arcs', '-Wc,-ftest-coverage', '-Wc,-O3']
ghdl_elaborate = ghdl_elaborate['-Wl,-lgcov']
+ # Add user-specified extra arguments.
+ if extra_args:
+ for extra_arg in extra_args:
+ if ',' not in extra_arg:
+ raise ValueError('invalid value for -W')
+ target, *args = extra_arg.split(',')
+ if len(target) not in (1, 2):
+ raise ValueError('invalid value for -W')
+ if len(target) == 2:
+ args = ['-W%s,%s' % (target[1], ','.join(args))]
+ if target[0] == 'a':
+ ghdl_analyze = ghdl_analyze[args]
+ elif target[0] == 'e':
+ ghdl_elaborate = ghdl_elaborate[args]
+ elif target[0] == 'r':
+ ghdl_run = ghdl_run[args]
+ else:
+ raise ValueError('invalid value for -W')
+
return ghdl_analyze, ghdl_elaborate, ghdl_run
def _run_test_case(output_file, test_case, vcd_dir, ghdl_elaborate, ghdl_run):
| abs-tudelft/vhdeps | f2b55a77340e99d13e1adea753c63d10f2b06d79 | diff --git a/tests/test_ghdl.py b/tests/test_ghdl.py
index a02b2a8..9e16da5 100644
--- a/tests/test_ghdl.py
+++ b/tests/test_ghdl.py
@@ -2,6 +2,7 @@
from unittest import TestCase, skipIf
from unittest.mock import patch
+import re
import os
import tempfile
from plumbum import local
@@ -254,6 +255,22 @@ class TestGhdlSpecific(TestCase):
code, _, _ = run_vhdeps('ghdl', '-i', DIR+'/simple/multiple-ok', '-j')
self.assertEqual(code, 1)
+ def test_extra_options(self):
+ """Test the -W option for GHDL"""
+ with local.env(PATH=DIR+'/ghdl/fake-ghdl:' + local.env['PATH']):
+ self.assertNotEqual(run_vhdeps('ghdl', '-i', DIR+'/simple/all-good', '-W'), 0)
+ self.assertNotEqual(run_vhdeps('ghdl', '-i', DIR+'/simple/all-good', '-Wx'), 0)
+ self.assertNotEqual(run_vhdeps('ghdl', '-i', DIR+'/simple/all-good', '-W,x'), 0)
+ self.assertNotEqual(run_vhdeps('ghdl', '-i', DIR+'/simple/all-good', '-Wx,x'), 0)
+ code, out, _ = run_vhdeps(
+ 'ghdl', '-i', DIR+'/simple/all-good',
+ '-Wa,a,na,lyze', '-We,e,la,bo,rate', '-Wr,run', '-Wrx,a,b,c')
+ self.assertEqual(code, 0)
+ self.assertTrue(bool(re.search(r'ghdl -a [^\n]* a na lyze', out)))
+ self.assertTrue(bool(re.search(r'ghdl -e [^\n]* e la bo rate', out)))
+ self.assertTrue(bool(re.search(r'ghdl -r [^\n]* run', out)))
+ self.assertTrue(bool(re.search(r'ghdl -r [^\n]* -Wx,a,b,c', out)))
+
@skipIf(
not coverage_supported(),
| Passing arbitrary arguments to GHDL
Sometimes one might want to pass some extra arguments to GHDL, especially with the GCC backend to pass additional compiler flags. There is currently no command-line syntax for this. | 0.0 | f2b55a77340e99d13e1adea753c63d10f2b06d79 | [
"tests/test_ghdl.py::TestGhdlSpecific::test_extra_options"
] | [
"tests/test_ghdl.py::TestGhdlSpecific::test_analyze_error",
"tests/test_ghdl.py::TestGhdlSpecific::test_elaborate_error",
"tests/test_ghdl.py::TestGhdlSpecific::test_multi_version",
"tests/test_ghdl.py::TestGhdlSpecific::test_no_ghdl",
"tests/test_ghdl.py::TestGhdlSpecific::test_no_plumbum",
"tests/test_ghdl.py::TestGhdlSpecific::test_no_wc",
"tests/test_ghdl.py::TestGhdlSpecific::test_unknown_version"
] | {
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false
} | 2019-07-12 14:41:55+00:00 | apache-2.0 | 860 |
|
absent1706__sqlalchemy-mixins-108 | diff --git a/sqlalchemy_mixins/inspection.py b/sqlalchemy_mixins/inspection.py
index 41d1ea6..bd2584e 100644
--- a/sqlalchemy_mixins/inspection.py
+++ b/sqlalchemy_mixins/inspection.py
@@ -5,12 +5,7 @@ from sqlalchemy.orm import RelationshipProperty, DeclarativeBase
from .utils import classproperty
-class Base(DeclarativeBase):
- __abstract__ = True
-
-
-class InspectionMixin(Base):
- __abstract__ = True
+class InspectionMixin:
@classproperty
def columns(cls):
diff --git a/sqlalchemy_mixins/inspection.pyi b/sqlalchemy_mixins/inspection.pyi
index ebc9da1..09582d8 100644
--- a/sqlalchemy_mixins/inspection.pyi
+++ b/sqlalchemy_mixins/inspection.pyi
@@ -1,18 +1,16 @@
from typing import List, Protocol, Dict
from sqlalchemy.ext.hybrid import hybrid_method
-from sqlalchemy.orm import Mapper, DeclarativeBase
+from sqlalchemy.orm import Mapper
from sqlalchemy.orm.interfaces import MapperProperty
from sqlalchemy_mixins.utils import classproperty
-class Base(DeclarativeBase):
- __abstract__ = True
class MappingProtocol(Protocol):
__mapper__: Mapper
-class InspectionMixin(Base):
+class InspectionMixin:
@classproperty
def columns(cls) -> List[str]: ...
| absent1706/sqlalchemy-mixins | 4d4e5d575a3b8b3d72ee12b67b17741579ef26c1 | diff --git a/sqlalchemy_mixins/tests/test_activerecord.py b/sqlalchemy_mixins/tests/test_activerecord.py
index 901ac73..8284b2a 100644
--- a/sqlalchemy_mixins/tests/test_activerecord.py
+++ b/sqlalchemy_mixins/tests/test_activerecord.py
@@ -3,13 +3,16 @@ import unittest
import sqlalchemy as sa
from sqlalchemy import create_engine
from sqlalchemy.ext.hybrid import hybrid_property
-from sqlalchemy.orm import Query, Session, DeclarativeBase
+from sqlalchemy.orm import Query, Session, DeclarativeBase, declarative_base
from sqlalchemy_mixins import ActiveRecordMixin
from sqlalchemy_mixins.activerecord import ModelNotFoundError
class Base(DeclarativeBase):
__abstract__ = True
+
+AlternativeBase = declarative_base()
+
engine = create_engine('sqlite:///:memory:', echo=False)
sess = Session(engine)
# sess = scoped_session(sessionmaker(bind=engine))
@@ -20,6 +23,9 @@ class BaseModel(Base, ActiveRecordMixin):
pass
+class BaseModelAlternative(AlternativeBase, ActiveRecordMixin):
+ __abstract__ = True
+
class User(BaseModel):
__tablename__ = 'user'
__repr_attrs__ = ['name']
@@ -29,6 +35,13 @@ class User(BaseModel):
posts_viewonly = sa.orm.relationship('Post', viewonly=True)
+class UserAlternative(BaseModelAlternative):
+ __tablename__ = 'user_alt'
+ __repr_attrs__ = ['name']
+ id = sa.Column(sa.Integer, primary_key=True)
+ name = sa.Column(sa.String)
+
+
class Post(BaseModel):
__tablename__ = 'post'
id = sa.Column(sa.Integer, primary_key=True)
@@ -204,5 +217,21 @@ class TestActiveRecord(unittest.TestCase):
_ = User.find_or_fail(123456789)
+class TestActiveRecordAlternative(unittest.TestCase):
+ def setUp(self):
+ sess.rollback()
+
+ BaseModelAlternative.set_session(None)
+ AlternativeBase.metadata.drop_all(engine)
+ AlternativeBase.metadata.create_all(engine)
+
+ BaseModelAlternative.set_session(sess)
+
+ def test_create(self):
+ u1 = UserAlternative.create(name='Bill u1')
+ self.assertEqual(u1, sess.query(UserAlternative).first())
+
+
+
if __name__ == '__main__': # pragma: no cover
unittest.main()
| sqlalchemy.exc.ArgumentError: Class 'MyClass' already has a primary mapper defined.
I have a project with SQLAlchemy 1.4 and sqlalchemy-mixins 1.5.3.
I have upgraded SQLAlchemy to 2.0 and made the code work and passes the tests. Then, I tried to upgrade sqlalchemy-mixins to 2.0.3 but I get the error of the title.
```
File "/lib/MyClass.py", line 12, in <module>
class MyClass(MyBaseClass):
File "/venv38/lib/python3.8/site-packages/sqlalchemy/orm/decl_api.py", line 195, in __init__
_as_declarative(reg, cls, dict_)
File "/venv38/lib/python3.8/site-packages/sqlalchemy/orm/decl_base.py", line 247, in _as_declarative
return _MapperConfig.setup_mapping(registry, cls, dict_, None, {})
File "/venv38/lib/python3.8/site-packages/sqlalchemy/orm/decl_base.py", line 328, in setup_mapping
return _ClassScanMapperConfig(
File "/venv38/lib/python3.8/site-packages/sqlalchemy/orm/decl_base.py", line 520, in __init__
super().__init__(registry, cls_, mapper_kw)
File "/venv38/lib/python3.8/site-packages/sqlalchemy/orm/decl_base.py", line 344, in __init__
instrumentation.register_class(
File "/venv38/lib/python3.8/site-packages/sqlalchemy/orm/instrumentation.py", line 684, in register_class
manager._update_state(
File "/venv38/lib/python3.8/site-packages/sqlalchemy/orm/instrumentation.py", line 209, in _update_state
registry._add_manager(self)
File "/venv38/lib/python3.8/site-packages/sqlalchemy/orm/decl_api.py", line 1380, in _add_manager
raise exc.ArgumentError(
sqlalchemy.exc.ArgumentError: Class '<class 'lib.MyClass'>' already has a primary mapper defined.
```
MyBaseClass is defined as:
```python
class MyBaseClass(AnotherBaseClass):
__abstract__ = True
#SOME CODE
class AnotherBaseClass(Base, AllFeaturesMixin):
__abstract__ = True
#SOME OTHER CODE
```
Any idea of what may be happening or how to troubleshoot this?
| 0.0 | 4d4e5d575a3b8b3d72ee12b67b17741579ef26c1 | [
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_all",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_create",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_delete",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_destroy",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_fill_and_save",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_fill_wrong_attribute",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_find",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_find_or_fail",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_first",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_settable_attributes",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_update",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecordAlternative::test_create"
] | [] | {
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2023-07-25 20:54:25+00:00 | mit | 861 |
|
absent1706__sqlalchemy-mixins-110 | diff --git a/sqlalchemy_mixins/activerecord.py b/sqlalchemy_mixins/activerecord.py
index 7b082f1..122a60d 100644
--- a/sqlalchemy_mixins/activerecord.py
+++ b/sqlalchemy_mixins/activerecord.py
@@ -23,50 +23,56 @@ class ActiveRecordMixin(InspectionMixin, SessionMixin):
return self
- def save(self):
+ def save(self, commit=True):
"""Saves the updated model to the current entity db.
+ :param commit: where to commit the transaction
"""
- try:
- self.session.add(self)
- self.session.commit()
- return self
- except:
- self.session.rollback()
- raise
+ self.session.add(self)
+ if commit:
+ self._commit_or_fail()
+ return self
@classmethod
- def create(cls, **kwargs):
+ def create(cls, commit=True, **kwargs):
"""Create and persist a new record for the model
+ :param commit: where to commit the transaction
:param kwargs: attributes for the record
:return: the new model instance
"""
- return cls().fill(**kwargs).save()
+ return cls().fill(**kwargs).save(commit=commit)
- def update(self, **kwargs):
+ def update(self, commit=True, **kwargs):
"""Same as :meth:`fill` method but persists changes to database.
+ :param commit: where to commit the transaction
"""
- return self.fill(**kwargs).save()
+ return self.fill(**kwargs).save(commit=commit)
- def delete(self):
+ def delete(self, commit=True):
"""Removes the model from the current entity session and mark for deletion.
+ :param commit: where to commit the transaction
"""
+ self.session.delete(self)
+ if commit:
+ self._commit_or_fail()
+
+ def _commit_or_fail(self):
try:
- self.session.delete(self)
self.session.commit()
except:
self.session.rollback()
raise
@classmethod
- def destroy(cls, *ids):
+ def destroy(cls, *ids, commit=True):
"""Delete the records with the given ids
:type ids: list
:param ids: primary key ids of records
+ :param commit: where to commit the transaction
"""
for pk in ids:
obj = cls.find(pk)
if obj:
- obj.delete()
+ obj.delete(commit=commit)
cls.session.flush()
@classmethod
| absent1706/sqlalchemy-mixins | cbd35a6ac4d1ae641b08b5542fa13de22c2b74fb | diff --git a/sqlalchemy_mixins/tests/test_activerecord.py b/sqlalchemy_mixins/tests/test_activerecord.py
index 8284b2a..1f8b0df 100644
--- a/sqlalchemy_mixins/tests/test_activerecord.py
+++ b/sqlalchemy_mixins/tests/test_activerecord.py
@@ -1,5 +1,6 @@
import unittest
+import sqlalchemy
import sqlalchemy as sa
from sqlalchemy import create_engine
from sqlalchemy.ext.hybrid import hybrid_property
@@ -115,6 +116,30 @@ class TestActiveRecord(unittest.TestCase):
self.assertEqual(p11, sess.query(Post).first())
self.assertEqual(p11.archived, True)
+ def test_save_commits(self):
+ with self.assertRaises(sqlalchemy.exc.InvalidRequestError):
+ with sess.begin():
+ u1 = User()
+ u1.fill(name='Bill u1')
+ u1.save()
+ u2 = User()
+ u2.fill(name='Bill u2')
+ u2.save()
+ self.assertEqual([u1, u2], sess.query(User).order_by(User.id.asc()).all())
+ # The first user is saved even when the block raises a Exception
+ self.assertEqual([u1], sess.query(User).order_by(User.id.asc()).all())
+
+ def test_save_do_not_commit(self):
+ with sess.begin():
+ u1 = User()
+ u1.fill(name='Bill u1')
+ u1.save(commit=False)
+ u2 = User()
+ u2.fill(name='Bill u2')
+ u2.save(commit=False)
+
+ self.assertEqual([u1,u2], sess.query(User).order_by(User.id.asc()).all())
+
def test_create(self):
u1 = User.create(name='Bill u1')
self.assertEqual(u1, sess.query(User).first())
@@ -158,6 +183,16 @@ class TestActiveRecord(unittest.TestCase):
self.assertEqual(sess.query(Post).get(11).public, True)
self.assertEqual(sess.query(Post).get(11).user, u2)
+ def test_update_no_commit(self):
+ u1 = User(name='Bill', id=1)
+ u1.save()
+ u1.update(name='Joe', commit=False)
+ self.assertEqual('Joe', sess.query(User).where(User.id==1).first().name)
+ sess.rollback()
+ self.assertEqual('Bill', sess.query(User).where(User.id==1).first().name)
+
+
+
def test_fill_wrong_attribute(self):
u1 = User(name='Bill u1')
sess.add(u1)
@@ -179,6 +214,15 @@ class TestActiveRecord(unittest.TestCase):
u1.delete()
self.assertEqual(sess.query(User).get(1), None)
+ def test_delete_without_commit(self):
+ u1 = User()
+ u1.save()
+ u1.delete(commit=False)
+ self.assertIsNone(sess.query(User).one_or_none())
+ sess.rollback()
+ self.assertIsNotNone(sess.query(User).one_or_none())
+
+
def test_destroy(self):
u1, u2, p11, p12, p13 = self._seed()
@@ -186,6 +230,16 @@ class TestActiveRecord(unittest.TestCase):
Post.destroy(11, 12)
self.assertEqual(set(sess.query(Post).all()), {p13})
+
+ def test_destroy_no_commit(self):
+ u1, u2, p11, p12, p13 = self._seed()
+ sess.commit()
+ self.assertEqual(set(sess.query(Post).order_by(Post.id).all()), {p11, p12, p13})
+ Post.destroy(11, 12, commit=False)
+ self.assertEqual(set(sess.query(Post).order_by(Post.id).all()), {p13})
+ sess.rollback()
+ self.assertEqual(set(sess.query(Post).order_by(Post.id).all()), {p11, p12, p13})
+
def test_all(self):
u1, u2, p11, p12, p13 = self._seed()
@@ -231,6 +285,12 @@ class TestActiveRecordAlternative(unittest.TestCase):
u1 = UserAlternative.create(name='Bill u1')
self.assertEqual(u1, sess.query(UserAlternative).first())
+ def test_create_no_commit(self):
+ u1 = UserAlternative.create(name='Bill u1', commit=False)
+ self.assertEqual(u1, sess.query(UserAlternative).first())
+ sess.rollback()
+ self.assertIsNone(sess.query(UserAlternative).one_or_none())
+
if __name__ == '__main__': # pragma: no cover
| save from Activerecord can commit external transactions
I have some code in a transaction:
```python
with session.begin():
an_object.save()
another_object.save()
```
both objects have the ActiveRecordMixin.
That code used to work, and even after going to sqlalchemy 2, it worked.
But now with sqlalchemy-mixins 2.0.3, the save method commits the transaction, even when it wasn't the save method who opened it and that code is broken. I was thinking if it makes sense to check whether there is an active transaction an in that case don't commit.
Something like this (of course more tidy):
``` python
def save(self):
"""Saves the updated model to the current entity db.
"""
# If there is an active transaction, we are not responsible for commit or rollback.
if self.session.get_transaction():
self.session.add(self)
self.session.flush()
return self
try:
self.session.add(self)
self.session.commit()
return self
except:
self.session.rollback()
raise
```
I realized the same happens with delete
| 0.0 | cbd35a6ac4d1ae641b08b5542fa13de22c2b74fb | [
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_delete_without_commit",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_destroy_no_commit",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_save_do_not_commit",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_update_no_commit",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecordAlternative::test_create_no_commit"
] | [
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_all",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_create",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_delete",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_destroy",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_fill_and_save",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_fill_wrong_attribute",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_find",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_find_or_fail",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_first",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_save_commits",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_settable_attributes",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecord::test_update",
"sqlalchemy_mixins/tests/test_activerecord.py::TestActiveRecordAlternative::test_create"
] | {
"failed_lite_validators": [
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false
} | 2023-07-31 13:01:12+00:00 | mit | 862 |
|
aburrell__ocbpy-111 | diff --git a/Changelog.rst b/Changelog.rst
index 72966f5..30b8cf1 100644
--- a/Changelog.rst
+++ b/Changelog.rst
@@ -21,6 +21,7 @@ Summary of all changes made since the first stable release
* ENH: Added function to select data along a satellite track
* ENH: Changed attributes in VectorData into properties to ensure expected
behaviour if altering the class data after initialisation
+* ENH: Added IMAGE SI12, SI13, and WIC DMSP corrections to `harmonic`
* MAINT: Removed support for Python 2.7, 3.5, and 3.6; added support for 3.10
* MAINT: Improved PEP8 compliance
* MAINT: Updated pysat routines to v3.0.0 standards
diff --git a/docs/citing.rst b/docs/citing.rst
index 9a60f6a..f0da9fd 100644
--- a/docs/citing.rst
+++ b/docs/citing.rst
@@ -42,8 +42,8 @@ which may also be cited if a description of the package is desired.
IMAGE FUV Boundaries
--------------------
-Please cite both the papers discussing both the instrument and the boundary
-retrieval method.
+Please cite both the papers discussing the instrument and the appropriate
+boundary retrieval method.
* **SI12/SI13**: Mende, S., et al. Space Science Reviews (2000) 91: 287-318.
http://doi.org/10.1023/A:1005292301251.
@@ -53,6 +53,8 @@ retrieval method.
high‐latitude ionospheric climatologies and empirical models,
J. Geophys. Res. Space Physics, 122, 932–947,
http://doi.org/10.1002/2016JA023235.
+* **OCB**: Chisham, G. et al. (2022) Ionospheric Boundaries Derived from Auroral
+ Images. In Prep.
* **OCB**: Chisham, G. (2017) Auroral Boundary Derived from IMAGE Satellite
Mission Data (May 2000 - Oct 2002), Version 1.1, Polar Data Centre, Natural
Environment Research Council, UK.
diff --git a/docs/ocb_datasets.rst b/docs/ocb_datasets.rst
index 56fdffb..08138a2 100644
--- a/docs/ocb_datasets.rst
+++ b/docs/ocb_datasets.rst
@@ -19,16 +19,24 @@ provided in the :py:mod:`ocbpy.boundaries.files` sub-module.
IMAGE
-----
-Data from three auroral instruments provide northern hemisphere OCB and EAB
-locations for 3 May 2000 02:41:43 UT - 31 Oct 2002 20:05:16, though not all of
-the times included in these files contain high-quality estimations of the
-boundary locations. Recommended selection criteria are included as defaults in
-the :py:class:`~ocbpy.OCBoundary` class. There are also boundary
-files that combine the information from all instruments to obtain the OCB and
-EAB. You can read more about the OCB determination, EAB determination, this
-selection criteria, and the three auroral instruments (IMAGE Wideband Imaging
-Camera (WIC) and FUV Spectrographic Imagers SI12 and SI13) in the articles
-listed in :ref:`cite-image`.
+Data from three auroral instruments provide northern hemisphere poleward auroral
+boundary (PAB) and EAB locations for 3 May 2000 02:41:43 UT - 31 Oct 2002
+20:05:16, though not all of the times included in these files contain
+high-quality estimations of the boundary locations. Recommended selection
+criteria are included as defaults in the :py:class:`~ocbpy.OCBoundary` class.
+There are also boundary files that combine the information from all instruments
+to obtain the OCB and EAB. These combined files are the default boundaries for
+the IMAGE time period. You can read more about the OCB determination, EAB
+determination, this selection criteria, and the three auroral instruments
+(IMAGE Wideband Imaging Camera (WIC) and FUV Spectrographic Imagers SI12 and
+SI13) in the articles listed in :ref:`cite-image`.
+
+The most recent corrects for each instrument that add the DMSP particle
+precipitation corrections to the PAB and EAB locations are included in
+:py:mod:`ocbpy.ocb_correction`. These corrections should be applied to the
+data used to obtain the circle fits included in the instrument files, not the
+circle fits themselves. These data sets may be obtained from the British
+Antarctic Survey.
.. _bound-data-ampere:
diff --git a/ocbpy/boundaries/README.md b/ocbpy/boundaries/README.md
index ad99f4b..9d42e78 100644
--- a/ocbpy/boundaries/README.md
+++ b/ocbpy/boundaries/README.md
@@ -1,8 +1,8 @@
This directory contains files with Open Closed field line Boundaries obtained
from different instruments
-IMAGE (si12/si13/wic) File Format
----------------------------------
+IMAGE (image/si12/si13/wic) File Format
+---------------------------------------
YR, SOY, NB, PHICENT, RCENT, R, A, R_ERR, R_MERIT
YR : Year
@@ -20,7 +20,9 @@ R_MERIT : Radial distance from the most typical pole location in degrees
There are certain ranges for NB, RCENT, and R that you shouldn’t use that can
be found (and explained) in Chisham (2017), doi:10.1002/2016JA023235. These
ranges are the defaults in OCBoundary.get_next_good_ocb_ind. When using these
-boundaries, remember to cite Chisham (2017).
+boundaries, remember to cite Chisham (2017). From ocbpy version 0.3.0 onward,
+the SI12, SI13, and WIC files contain uncorrected auroral boundary fits, while
+the IMAGE file contains DMSP corrected average boundaries for the OCB and EAB.
AMPERE (amp) File Format
------------------------
diff --git a/ocbpy/ocb_correction.py b/ocbpy/ocb_correction.py
index ba7cf8a..b73a0e6 100644
--- a/ocbpy/ocb_correction.py
+++ b/ocbpy/ocb_correction.py
@@ -9,6 +9,8 @@ References
----------
.. [4] Burrell, A. G. et al.: AMPERE Polar Cap Boundaries, Ann. Geophys., 38,
481-490, doi:10.5194/angeo-38-481-2020, 2020.
+.. [6] Chisham, G. et al.: Ionospheric Boundaries Derived from Auroral Images,
+ in prep, 2022.
"""
@@ -18,7 +20,7 @@ from ocbpy.ocb_time import hr2rad
def circular(mlt, r_add=0.0):
- """Return a circular boundary correction
+ """Return a circular boundary correction.
Parameters
----------
@@ -42,7 +44,7 @@ def circular(mlt, r_add=0.0):
def elliptical(mlt, instrument='ampere', method='median'):
- """Return the results of an elliptical correction to the data boundary [4]_
+ """Return the results of an elliptical correction to the data boundary.
Parameters
----------
@@ -59,6 +61,10 @@ def elliptical(mlt, instrument='ampere', method='median'):
r_corr : float or array-like
Radius correction in degrees at this MLT
+ References
+ ----------
+ Prefered AMPERE boundary correction validated in [4]_.
+
"""
if instrument.lower() != 'ampere':
@@ -81,7 +87,7 @@ def elliptical(mlt, instrument='ampere', method='median'):
def harmonic(mlt, instrument='ampere', method='median'):
- """Return the results of a harmonic fit correction to the data boundary [4]_
+ """Return the results of a harmonic fit correction to the data boundary.
Parameters
----------
@@ -91,38 +97,67 @@ def harmonic(mlt, instrument='ampere', method='median'):
Data set's instrument name (default='ampere')
method : str
Method used to determine coefficients; accepts 'median' or
- 'gaussian' (default='median')
+ 'gaussian' when `instrument` is 'ampere'. Otherwise, accepts 'eab' or
+ 'ocb'. (default='median')
Returns
-------
r_corr : float or array-like
Radius correction in degrees at this MLT
+ References
+ ----------
+ AMPERE boundaries obtained from [4]_. IMAGE boundaries obtained from [6]_.
+
"""
- if instrument.lower() != 'ampere':
+ # Define the harmonic coefficients for each instrument and method/location
+ coeff = {'ampere': {'median': [3.31000535, -0.5452934, -1.24389141,
+ 2.42619653, -0.66677988, -1.03467488,
+ -0.30763009, 0.52426756, 0.04359299,
+ 0.60201848, 0.50618522, 1.04360529,
+ 0.25186405],
+ 'gaussian': [3.80100827, 0.98555723, -3.43760943,
+ 1.85084271, -0.36730751, -0.81975654,
+ -1.02823832, 1.30637288, -0.53599218,
+ 0.40380183, -1.22462708, -1.2733629,
+ -0.62743381]},
+ 'si12': {'ocb': [0.0405, -1.5078, 1.0, 0.5095, 1.0, 0.9371, 1.0,
+ 0.0708, 1.0, 0.0, 1.0, 0.0, 1.0],
+ 'eab': [-0.1447, -1.9779, 1.0, 2.6799, 1.0, 0.5778, 1.0,
+ -1.2297, 1.0, 0.0, 1.0, 0.0, 1.0]},
+ 'si13': {'ocb': [0.5797, -0.6837, 1.0, -0.5020, 1.0, 0.2971, 1.0,
+ -0.4173, 1.0, 0.0, 1.0, 0.0, 1.0],
+ 'eab': [0.2500, -2.9931, 1.0, 0.8818, 1.0, 0.8511, 1.0,
+ -0.6300, 1.0, 0.0, 1.0, 0.0, 1.0]},
+ 'wic': {'ocb': [1.0298, -1.1249, 1.0, -0.7380, 1.0, 0.1838, 1.0,
+ -0.6171, 1.0, 0.0, 1.0, 0.0, 1.0],
+ 'eab': [-0.4935, -2.1186, 1.0, 0.3188, 1.0, 0.5749, 1.0,
+ -0.3118, 1.0, 0.0, 1.0, 0.0, 1.0]}}
+
+ # Check the inputs
+ inst = instrument.lower()
+ if inst not in coeff.keys():
raise ValueError("no harmonic correction for {:}".format(instrument))
method = method.lower()
- coeff = {'median': [3.31000535, -0.5452934, -1.24389141, 2.42619653,
- -0.66677988, -1.03467488, -0.30763009, 0.52426756,
- 0.04359299, 0.60201848, 0.50618522, 1.04360529,
- 0.25186405],
- 'gaussian': [3.80100827, 0.98555723, -3.43760943, 1.85084271,
- -0.36730751, -0.81975654, -1.02823832, 1.30637288,
- -0.53599218, 0.40380183, -1.22462708, -1.2733629,
- -0.62743381]}
-
- if method not in coeff.keys():
- raise ValueError("unknown coefficient computation method")
+ if method not in coeff[inst].keys():
+ raise ValueError("".join(["unknown coefficient computation method, ",
+ "expects one of: {:}".format(
+ coeff[inst].keys())]))
+ # Calculate the offset
rad_mlt = hr2rad(mlt)
- r_corr = coeff[method][0] \
- + coeff[method][1] * np.cos(rad_mlt + coeff[method][2]) \
- + coeff[method][3] * np.sin(rad_mlt + coeff[method][4]) \
- + coeff[method][5] * np.cos(2.0 * (rad_mlt + coeff[method][6])) \
- + coeff[method][7] * np.sin(2.0 * (rad_mlt + coeff[method][8])) \
- + coeff[method][9] * np.cos(3.0 * (rad_mlt + coeff[method][10])) \
- + coeff[method][11] * np.sin(3.0 * (rad_mlt + coeff[method][12]))
+ r_corr = coeff[inst][method][0] \
+ + coeff[inst][method][1] * np.cos(rad_mlt + coeff[inst][method][2]) \
+ + coeff[inst][method][3] * np.sin(rad_mlt + coeff[inst][method][4]) \
+ + coeff[inst][method][5] * np.cos(2.0 * (
+ rad_mlt + coeff[inst][method][6])) \
+ + coeff[inst][method][7] * np.sin(2.0 * (
+ rad_mlt + coeff[inst][method][8])) \
+ + coeff[inst][method][9] * np.cos(3.0 * (
+ rad_mlt + coeff[inst][method][10])) \
+ + coeff[inst][method][11] * np.sin(3.0 * (
+ rad_mlt + coeff[inst][method][12]))
# Because this is a poleward shift, return the negative of the correction
return -r_corr
| aburrell/ocbpy | ce1ee978f2f1f31e8777bed474750dcef493ee31 | diff --git a/ocbpy/tests/test_ocb_correction.py b/ocbpy/tests/test_ocb_correction.py
index fc2f648..787cc62 100644
--- a/ocbpy/tests/test_ocb_correction.py
+++ b/ocbpy/tests/test_ocb_correction.py
@@ -3,8 +3,7 @@
# Copyright (C) 2017, AGB & GC
# Full license can be found in License.md
# -----------------------------------------------------------------------------
-""" Tests the ocboundary class and functions
-"""
+"""Tests the ocboundary class and functions."""
import numpy as np
import unittest
@@ -13,38 +12,48 @@ from ocbpy import ocb_correction as ocb_cor
class TestOCBCorrectionFailure(unittest.TestCase):
+ """Unit tests for correction failure evaluations."""
+
def setUp(self):
- """ Set up the test runs """
+ """Set up the test runs."""
self.mlt = 12.0
self.bad_kwarg = 'bad_kwarg'
self.functions = {'elliptical': ocb_cor.elliptical,
'harmonic': ocb_cor.harmonic}
self.bound = None
+ return
def tearDown(self):
- """ Tear down the test runs """
+ """Tear down the test runs."""
del self.mlt, self.bad_kwarg, self.functions, self.bound
+ return
def test_instrument_failure(self):
- """ Test failure when an unknown instrument is provided """
+ """Test failure when an unknown instrument is provided."""
for self.bound in self.functions.keys():
with self.subTest(bound=self.bound):
- with self.assertRaises(ValueError):
+ msg = "no {:s} correction for".format(self.bound)
+ with self.assertRaisesRegex(ValueError, msg):
self.functions[self.bound](self.mlt,
instrument=self.bad_kwarg)
+ return
def test_method_failure(self):
- """ Test failure when an unknown method is provided """
+ """Test failure when an unknown method is provided."""
+ msg = "unknown coefficient computation method"
for self.bound in self.functions.keys():
with self.subTest(bound=self.bound):
- with self.assertRaises(ValueError):
+ with self.assertRaisesRegex(ValueError, msg):
self.functions[self.bound](self.mlt, method=self.bad_kwarg)
+ return
class TestOCBCorrection(unittest.TestCase):
+ """Unit tests for the boundary correction functions."""
+
def setUp(self):
- """ Set up test runs """
+ """Set up test runs."""
self.functions = {'circular': ocb_cor.circular,
'elliptical': ocb_cor.elliptical,
'harmonic': ocb_cor.harmonic}
@@ -56,36 +65,65 @@ class TestOCBCorrection(unittest.TestCase):
-3.4392638193624325])}
self.gaus_results = {'elliptical': -2.51643691301747,
'harmonic': -2.293294645880221}
+ self.image_results = {'si12': {'ocb': np.array([0.67103129,
+ -0.10084541]),
+ 'eab': np.array([0.31691853,
+ 2.68970685])},
+ 'si13': {'ocb': np.array([0.71521016,
+ -0.86843608]),
+ 'eab': np.array([1.55220967,
+ -0.19812977])},
+ 'wic': {'ocb': np.array([0.83660688,
+ -1.62097642]),
+ 'eab': np.array([1.89268527,
+ 0.13983824])}}
self.bound = None
+ return
def tearDown(self):
- """ Clean up after each test """
+ """Clean up after each test."""
del self.mlt, self.functions, self.def_results, self.gaus_results
- del self.bound
+ del self.bound, self.image_results
+ return
def test_default_float(self):
- """ Test the boundary functions using a float and function defaults"""
+ """Test the boundary functions using a float and function defaults."""
for self.bound in self.functions.keys():
with self.subTest(bound=self.bound):
self.assertEqual(self.functions[self.bound](self.mlt[0]),
self.def_results[self.bound][0])
+ return
def test_default_arr(self):
- """ Test the boundary functions using an array and function defaults"""
+ """Test the boundary functions using an array and function defaults."""
for self.bound in self.functions.keys():
with self.subTest(bound=self.bound):
self.assertTrue(np.all(
abs(self.functions[self.bound](self.mlt)
- self.def_results[self.bound]) < 1.0e-7))
+ return
def test_circular_offset(self):
- """ Test the circular boundary function with an offset """
+ """Test the circular boundary function with an offset."""
self.assertEqual(ocb_cor.circular(self.mlt[0], r_add=1.0), 1.0)
+ return
def test_gauss_method(self):
- """ Test the boundary functions using an array and function defaults"""
+ """Test the boundary functions using an array and function defaults."""
for self.bound in self.gaus_results.keys():
with self.subTest(bound=self.bound):
self.assertAlmostEqual(
self.functions[self.bound](self.mlt[0], method='gaussian'),
self.gaus_results[self.bound])
+ return
+
+ def test_image_harmonic(self):
+ """Test the IMAGE harmonic correction functions."""
+ for self.bound in self.image_results.keys():
+ for method in self.image_results[self.bound].keys():
+ with self.subTest(bound=self.bound, method=method):
+ self.assertTrue(np.all(
+ abs(self.functions["harmonic"](
+ self.mlt, instrument=self.bound, method=method)
+ - self.image_results[self.bound][method]) < 1.0e-7))
+ return
| BUG: individual IMAGE instrument OCBs
**Describe the bug**
The new individual IMAGE "ocb" files are actually the poleward edge of the auroral oval w/o the DMSP correction.
**Expected behavior**
A clear explanation of what they are.
**To Fix**
- [x] Update documentation
| 0.0 | ce1ee978f2f1f31e8777bed474750dcef493ee31 | [
"ocbpy/tests/test_ocb_correction.py::TestOCBCorrection::test_image_harmonic"
] | [
"ocbpy/tests/test_ocb_correction.py::TestOCBCorrectionFailure::test_instrument_failure",
"ocbpy/tests/test_ocb_correction.py::TestOCBCorrectionFailure::test_method_failure",
"ocbpy/tests/test_ocb_correction.py::TestOCBCorrection::test_circular_offset",
"ocbpy/tests/test_ocb_correction.py::TestOCBCorrection::test_default_arr",
"ocbpy/tests/test_ocb_correction.py::TestOCBCorrection::test_default_float",
"ocbpy/tests/test_ocb_correction.py::TestOCBCorrection::test_gauss_method"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false
} | 2022-04-05 19:29:16+00:00 | bsd-3-clause | 863 |
|
achabotl__popthings-4 | diff --git a/popthings.py b/popthings.py
index eeda9e6..4310e2d 100644
--- a/popthings.py
+++ b/popthings.py
@@ -24,7 +24,7 @@ what's possible. They would be added to Things as two separate projects.
Note under project 1
- Task 1 @due($start + 1w) @$where
A note under task 1
- - Task 2 @start($start)
+ - Task 2 @start($start - 1)
- Checklist item under task 2
- Also a checklist item under task 2
Heading 1:
@@ -40,9 +40,11 @@ Attributes
----------
"""
+from datetime import datetime, timedelta
from io import open
import json
import logging
+import operator
import re
import sys
try:
@@ -87,6 +89,65 @@ PATTERN_TAG = re.compile(r"""(?:^|\s+)@ # space and @ before tag
# space or EOL
""", re.VERBOSE)
+# Patterns to match dates and dates with day offsets
+ISO_DATE_RE = re.compile(r"^\d{4}-\d{2}-\d{2}$")
+DATE_OFFSET_RE = re.compile(r"""
+ (?P<base_date>\d{4}-\d{2}-\d{2})
+ \s*
+ (?P<op>[+-])
+ \s*
+ (?P<offset_count>\d+)
+ """, re.X)
+
+
+def compute_date(date_str):
+ """
+ Compute new date string given dates with day offsets.
+
+ Parameters
+ ==========
+ date_str : string
+ Text string representing a date. The format must be a standalone date:
+ 'YYYY-MM-DD', a date with a day offset: 'YYYY-MM-DD + 1' or 'YYYY-MM-DD
+ - 10'. Other values as just passed through, assuming Things knows how
+ to handled them, like 'today', 'tomorrow', 'evening', 'next month',
+ etc.
+
+ Returns
+ =======
+ new_date_str : string
+ Date where the offset has been added or subtracted from the date.
+
+ """
+ date_str = date_str.strip()
+
+ # Simple ISO date
+ if ISO_DATE_RE.match(date_str):
+ return date_str
+
+ elif DATE_OFFSET_RE.match(date_str):
+ # Precompute dates with offsets of days
+ m = DATE_OFFSET_RE.match(date_str)
+ if m is None:
+ raise ValueError(f"Unable to parse date '{date_str}' as 'YYYY-MM-DD+/-offset' date.")
+
+ try:
+ op = {
+ '-': operator.sub,
+ '+': operator.add,
+ }.get(m.group('op'))
+ except KeyError:
+ raise ValueError(f"Unable to parse date {date_str} as YYYY-MM-DD +/- D date. The operator is wrong.")
+
+ count = int(m.group('offset_count'))
+ date_delta = timedelta(count)
+
+ date = datetime.strptime(m.group("base_date"), '%Y-%m-%d')
+
+ return op(date, date_delta).strftime('%Y-%m-%d')
+
+ return date_str
+
class TPNode(object):
def __init__(self, line, text, indent, type, line_number=None, tags=None):
@@ -442,8 +503,8 @@ class _ThingsRichObject(ThingsObject):
"""
super(_ThingsRichObject, self).__init__(title)
self.notes = notes
- self.when = when
- self.deadline = deadline
+ self.when = compute_date(when) if when else when
+ self.deadline = compute_date(deadline) if deadline else deadline
if tags is None:
tags = []
self.tags = tags
| achabotl/popthings | 29523615476639f8e844768ef7730258bd7d13ad | diff --git a/tests/test_date_parsing.py b/tests/test_date_parsing.py
new file mode 100644
index 0000000..127090a
--- /dev/null
+++ b/tests/test_date_parsing.py
@@ -0,0 +1,33 @@
+from unittest import TestCase
+from popthings import compute_date
+
+
+class TestDateParsing(TestCase):
+ def test_iso_date(self):
+ """ ISO formatted dates: YYYY-MM-DD."""
+ date = "2018-12-31"
+ self.assertEqual(date, compute_date(date))
+
+ def test_iso_date_with_space(self):
+ date = " 2018-12-31 "
+ self.assertEqual("2018-12-31", compute_date(date))
+
+ def test_invalid_date(self):
+ self.assertEqual("31/12/2018", compute_date("31/12/2018"))
+
+ def test_iso_date_with_positive_offset(self):
+ date = "2018-12-30 + 1"
+ self.assertEqual("2018-12-31", compute_date(date))
+
+ def test_iso_date_with_negative_offset(self):
+ date = "2018-12-30 - 1"
+ self.assertEqual("2018-12-29", compute_date(date))
+
+ def test_iso_date_with_negative_large_offset(self):
+ date = "2018-12-30 - 10"
+ self.assertEqual("2018-12-20", compute_date(date))
+
+ def test_other_values(self):
+ for word in ('today', 'tomorrow', 'evening', 'anytime', 'someday', 'next month'):
+ with self.subTest(word=word):
+ self.assertEqual(word, compute_date(word))
| Parse dates in popthings instead of Things.app
As of Things 3.8, it still parse dates with a `-` as if it was a `+`, e.g., "Today - 1 day" is actually tomorrow…
I could write my own, possible with the help of `dateutil`, or consider using [dateparser](https://dateparser.readthedocs.io/en/latest/) by [Scraping Hub](https://blog.scrapinghub.com/2015/11/09/parse-natural-language-dates-with-dateparser).
| 0.0 | 29523615476639f8e844768ef7730258bd7d13ad | [
"tests/test_date_parsing.py::TestDateParsing::test_invalid_date",
"tests/test_date_parsing.py::TestDateParsing::test_iso_date",
"tests/test_date_parsing.py::TestDateParsing::test_iso_date_with_negative_large_offset",
"tests/test_date_parsing.py::TestDateParsing::test_iso_date_with_negative_offset",
"tests/test_date_parsing.py::TestDateParsing::test_iso_date_with_positive_offset",
"tests/test_date_parsing.py::TestDateParsing::test_iso_date_with_space",
"tests/test_date_parsing.py::TestDateParsing::test_other_values"
] | [] | {
"failed_lite_validators": [
"has_hyperlinks",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2022-03-25 13:01:13+00:00 | mit | 864 |
|
acorg__dark-matter-569 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3aac224..4781caa 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,20 @@
+## 2.0.4 April 29, 2018
+
+* Added `--sampleIndexFilename` and `--pathogenIndexFilename` to
+ `proteins-to-pathogens.py`. These cause the writing of files containing
+ lines with an integer index, a space, then a sample or pathogen name.
+ These can be later used to identify the de-duplicated reads files for a
+ given sample or pathogen name.
+
+## 2.0.3 April 28, 2018
+
+* Added number of identical and positive amino acid matches to BLAST and
+ DIAMOND hsps.
+
+## 2.0.2 April 23, 2018
+
+* The protein grouper now de-duplicates read by id, not sequence.
+
## 2.0.1 April 23, 2018
* Fixed HTML tiny formatting error in `toHTML` method of `ProteinGrouper`
diff --git a/bin/proteins-to-pathogens.py b/bin/proteins-to-pathogens.py
index 004263a..3a3ce34 100755
--- a/bin/proteins-to-pathogens.py
+++ b/bin/proteins-to-pathogens.py
@@ -83,6 +83,22 @@ if __name__ == '__main__':
help=('An (optional) filename to write a pathogen-sample panel PNG '
'image to.'))
+ parser.add_argument(
+ '--sampleIndexFilename',
+ help=('An (optional) filename to write a sample index file to. '
+ 'Lines in the file will have an integer index, a space, and '
+ 'then the sample name. Only produced if --html is used '
+ '(because the pathogen-NNN-sample-MMM.fastq are only written '
+ 'in that case).'))
+
+ parser.add_argument(
+ '--pathogenIndexFilename',
+ help=('An (optional) filename to write a pathogen index file to. '
+ 'Lines in the file will have an integer index, a space, and '
+ 'then the pathogen name. Only produced if --html is used '
+ '(because the pathogen-NNN-sample-MMM.fastq are only written '
+ 'in that case).'))
+
parser.add_argument(
'--html', default=False, action='store_true',
help='If specified, output HTML instead of plain text.')
@@ -123,6 +139,16 @@ if __name__ == '__main__':
args = parser.parse_args()
+ if not args.html:
+ if args.sampleIndexFilename:
+ print('It does not make sense to use --sampleIndexFilename '
+ 'without also using --html', file=sys.stderr)
+ sys.exit(1)
+ if args.pathogenIndexFilename:
+ print('It does not make sense to use --pathogenIndexFilename '
+ 'without also using --html', file=sys.stderr)
+ sys.exit(1)
+
if args.proteinFastaFilename:
# Flatten lists of lists that we get from using both nargs='+' and
# action='append'. We use both because it allows people to use
@@ -153,6 +179,8 @@ if __name__ == '__main__':
if args.html:
print(grouper.toHTML(args.pathogenPanelFilename,
minProteinFraction=args.minProteinFraction,
- pathogenType=args.pathogenType))
+ pathogenType=args.pathogenType,
+ sampleIndexFilename=args.sampleIndexFilename,
+ pathogenIndexFilename=args.pathogenIndexFilename))
else:
print(grouper.toStr())
diff --git a/dark/__init__.py b/dark/__init__.py
index c8923d7..d0bf4db 100644
--- a/dark/__init__.py
+++ b/dark/__init__.py
@@ -5,4 +5,6 @@ if sys.version_info < (2, 7):
# Note that the version string must have the following format, otherwise it
# will not be found by the version() function in ../setup.py
-__version__ = '2.0.3'
+#
+# Remember to update ../CHANGELOG.md describing what's new in each version.
+__version__ = '2.0.4'
diff --git a/dark/proteins.py b/dark/proteins.py
index 6a1270f..8e6a850 100644
--- a/dark/proteins.py
+++ b/dark/proteins.py
@@ -178,6 +178,28 @@ class PathogenSampleFiles(object):
sampleIndex = self._samples[sampleName]
return self._readsFilenames[(pathogenIndex, sampleIndex)]
+ def writeSampleIndex(self, fp):
+ """
+ Write a file of sample indices and names, sorted by index.
+
+ @param fp: A file-like object, opened for writing.
+ """
+ print('\n'.join(
+ '%d %s' % (index, name) for (index, name) in
+ sorted((index, name) for (name, index) in self._samples.items())
+ ), file=fp)
+
+ def writePathogenIndex(self, fp):
+ """
+ Write a file of pathogen indices and names, sorted by index.
+
+ @param fp: A file-like object, opened for writing.
+ """
+ print('\n'.join(
+ '%d %s' % (index, name) for (index, name) in
+ sorted((index, name) for (name, index) in self._pathogens.items())
+ ), file=fp)
+
class ProteinGrouper(object):
"""
@@ -387,7 +409,8 @@ class ProteinGrouper(object):
return '\n'.join(result)
def toHTML(self, pathogenPanelFilename=None, minProteinFraction=0.0,
- pathogenType='viral'):
+ pathogenType='viral', sampleIndexFilename=None,
+ pathogenIndexFilename=None):
"""
Produce an HTML string representation of the pathogen summary.
@@ -398,6 +421,12 @@ class ProteinGrouper(object):
for that pathogen to be displayed.
@param pathogenType: A C{str} giving the type of the pathogen involved,
either 'bacterial' or 'viral'.
+ @param sampleIndexFilename: A C{str} filename to write a sample index
+ file to. Lines in the file will have an integer index, a space, and
+ then the sample name.
+ @param pathogenIndexFilename: A C{str} filename to write a pathogen
+ index file to. Lines in the file will have an integer index, a
+ space, and then the pathogen name.
@return: An HTML C{str} suitable for printing.
"""
if pathogenType not in ('bacterial', 'viral'):
@@ -411,6 +440,14 @@ class ProteinGrouper(object):
if pathogenPanelFilename:
self.pathogenPanel(pathogenPanelFilename)
+ if sampleIndexFilename:
+ with open(sampleIndexFilename, 'w') as fp:
+ self.pathogenSampleFiles.writeSampleIndex(fp)
+
+ if pathogenIndexFilename:
+ with open(pathogenIndexFilename, 'w') as fp:
+ self.pathogenSampleFiles.writePathogenIndex(fp)
+
pathogenNames = sorted(
pathogenName for pathogenName in self.pathogenNames
if self.maxProteinFraction(pathogenName) >= minProteinFraction)
@@ -494,7 +531,8 @@ class ProteinGrouper(object):
proteinFieldsDescription = [
'<p>',
- 'In all bullet point protein lists below, there are eight fields:',
+ 'In all bullet point protein lists below, there are the following '
+ 'fields:',
'<ol>',
'<li>Coverage fraction.</li>',
'<li>Median bit score.</li>',
| acorg/dark-matter | bb55d862e66a923688c1f4db4fdfc9467c8210f4 | diff --git a/test/test_proteins.py b/test/test_proteins.py
index ed8c49f..81bcccb 100644
--- a/test/test_proteins.py
+++ b/test/test_proteins.py
@@ -917,3 +917,35 @@ class TestPathogenSampleFiles(TestCase):
proteins['gi|327410| protein 77']['readLengths'])
self.assertEqual((2, 7),
proteins['gi|327409| ubiquitin']['readLengths'])
+
+ def testWriteSampleIndex(self):
+ """
+ The writeSampleIndex function must write a file with the expected
+ content.
+ """
+ pathogenSampleFiles = PathogenSampleFiles(None)
+ pathogenSampleFiles._samples = {
+ 'NEO11': 500,
+ 'NEO33': 24,
+ 'NEO66': 333,
+ }
+
+ fp = StringIO()
+ pathogenSampleFiles.writeSampleIndex(fp)
+ self.assertEqual('24 NEO33\n333 NEO66\n500 NEO11\n', fp.getvalue())
+
+ def testWritePathogenIndex(self):
+ """
+ The writePatogenIndex function must write a file with the expected
+ content.
+ """
+ pathogenSampleFiles = PathogenSampleFiles(None)
+ pathogenSampleFiles._pathogens = {
+ 'virus b': 4,
+ 'virus a': 3,
+ 'virus c': 9,
+ }
+
+ fp = StringIO()
+ pathogenSampleFiles.writePathogenIndex(fp)
+ self.assertEqual('3 virus a\n4 virus b\n9 virus c\n', fp.getvalue())
| The protein grouper should write a plain text index of pathogen index and name
Right now after running `proteins-to-pathogens.py` we are left with files with names like `pathogen-219-sample-228.fastq` but there is no simple way to match a pathogen name with its index. That information is in the `index.html` file but should also be in a text file so we can know which file to look in to get reads when we just have a pathogen name (e.g., from a different pipeline run). The `toStr` method could perhaps print it? | 0.0 | bb55d862e66a923688c1f4db4fdfc9467c8210f4 | [
"test/test_proteins.py::TestPathogenSampleFiles::testWritePathogenIndex",
"test/test_proteins.py::TestPathogenSampleFiles::testWriteSampleIndex"
] | [
"test/test_proteins.py::TestSplitNames::testNestedBrackets",
"test/test_proteins.py::TestSplitNames::testNoBrackets",
"test/test_proteins.py::TestSplitNames::testNormalCase",
"test/test_proteins.py::TestSplitNames::testTwoSetsOfBrackets",
"test/test_proteins.py::TestSplitNames::testWhitespaceStripping",
"test/test_proteins.py::TestGetPathogenProteinCounts::testNone",
"test/test_proteins.py::TestProteinGrouper::testAssetDir",
"test/test_proteins.py::TestProteinGrouper::testDuplicatePathogenProteinSample",
"test/test_proteins.py::TestProteinGrouper::testNoAssetDir",
"test/test_proteins.py::TestProteinGrouper::testNoFiles",
"test/test_proteins.py::TestProteinGrouper::testNoFilesToStr",
"test/test_proteins.py::TestProteinGrouper::testNoRegex",
"test/test_proteins.py::TestProteinGrouper::testOneLineInEachOfTwoFilesDifferentPathogens",
"test/test_proteins.py::TestProteinGrouper::testOneLineInEachOfTwoFilesDifferentPathogensTitle",
"test/test_proteins.py::TestProteinGrouper::testOneLineInEachOfTwoFilesSamePathogen",
"test/test_proteins.py::TestProteinGrouper::testOneLineInEachOfTwoFilesSamePathogenTitle",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFile",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFileFASTQ",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFileTitle",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFileToStr",
"test/test_proteins.py::TestProteinGrouper::testTwoLinesInOneFileDifferentPathogens",
"test/test_proteins.py::TestProteinGrouper::testTwoLinesInOneFileSamePathogen",
"test/test_proteins.py::TestProteinGrouper::testTwoLinesInOneFileTitle",
"test/test_proteins.py::TestProteinGrouper::testUnknownFormat",
"test/test_proteins.py::TestProteinGrouper::testUnknownPathogenType",
"test/test_proteins.py::TestPathogenSampleFiles::testUnknownFormat"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2018-04-29 11:55:47+00:00 | mit | 865 |
|
acorg__dark-matter-576 | diff --git a/Makefile b/Makefile
index 8bf842a..afdcfe8 100644
--- a/Makefile
+++ b/Makefile
@@ -1,10 +1,10 @@
.PHONY: check, pycodestyle, pyflakes, lint
check:
- python -m discover -v
+ env PYTHONPATH=. python -m discover -v
tcheck:
- trial --rterrors test
+ env PYTHONPATH=. trial --rterrors test
pycodestyle:
find . -path './.tox' -prune -o -path './build' -prune -o -path './dist' -prune -o -name '*.py' -print0 | xargs -0 pycodestyle
diff --git a/bin/proteins-to-pathogens.py b/bin/proteins-to-pathogens.py
index 3a3ce34..726d4a9 100755
--- a/bin/proteins-to-pathogens.py
+++ b/bin/proteins-to-pathogens.py
@@ -137,6 +137,12 @@ if __name__ == '__main__':
'contain the lengths of all reads that match proteins for a '
'pathogen.'))
+ parser.add_argument(
+ '--assetDir', default='out',
+ help=('The output directory where noninteractive-alignment-panel.py '
+ 'puts its HTML, plots and FASTA or FASTQ files, needed for '
+ 'using --html'))
+
args = parser.parse_args()
if not args.html:
@@ -162,7 +168,8 @@ if __name__ == '__main__':
else:
proteinFastaFilenames = None
- grouper = ProteinGrouper(sampleNameRegex=args.sampleNameRegex,
+ grouper = ProteinGrouper(assetDir=args.assetDir,
+ sampleNameRegex=args.sampleNameRegex,
format_=args.format,
proteinFastaFilenames=proteinFastaFilenames,
saveReadLengths=args.showReadLengths)
diff --git a/dark/__init__.py b/dark/__init__.py
index 0246a07..6a59296 100644
--- a/dark/__init__.py
+++ b/dark/__init__.py
@@ -7,4 +7,4 @@ if sys.version_info < (2, 7):
# will not be found by the version() function in ../setup.py
#
# Remember to update ../CHANGELOG.md describing what's new in each version.
-__version__ = '3.0.5'
+__version__ = '3.0.6'
diff --git a/dark/filter.py b/dark/filter.py
index 0665ffc..b0daa76 100644
--- a/dark/filter.py
+++ b/dark/filter.py
@@ -279,6 +279,23 @@ def addFASTAFilteringCommandLineOptions(parser):
help=('A file of (1-based) sequence numbers to retain. Numbers must '
'be one per line.'))
+ parser.add_argument(
+ '--idLambda', metavar='LAMBDA-FUNCTION',
+ help=('A one-argument function taking and returning a read id. '
+ 'E.g., --idLambda "lambda id: id.split(\'_\')[0]" or '
+ '--idLambda "lambda id: id[:10]". If the function returns None, '
+ 'the read will be filtered out.'))
+
+ parser.add_argument(
+ '--readLambda', metavar='LAMBDA-FUNCTION',
+ help=('A one-argument function taking and returning a read. '
+ 'E.g., --readLambda "lambda r: Read(r.id.split(\'_\')[0], '
+ 'r.sequence.strip(\'-\')". Make sure to also modify the quality '
+ 'string if you change the length of a FASTQ sequence. If the '
+ 'function returns None, the read will be filtered out. The '
+ 'function will be passed to eval with the dark.reads classes '
+ 'Read, DNARead, AARead, etc. all in scope.'))
+
# A mutually exclusive group for --keepSites, --keepSitesFile,
# --removeSites, and --removeSitesFile.
group = parser.add_mutually_exclusive_group()
@@ -381,4 +398,5 @@ def parseFASTAFilteringCommandLineOptions(args, reads):
randomSubset=args.randomSubset, trueLength=args.trueLength,
sampleFraction=args.sampleFraction,
sequenceNumbersFile=args.sequenceNumbersFile,
+ idLambda=args.idLambda, readLambda=args.readLambda,
keepSites=keepSites, removeSites=removeSites)
diff --git a/dark/proteins.py b/dark/proteins.py
index 8e6a850..33e53eb 100644
--- a/dark/proteins.py
+++ b/dark/proteins.py
@@ -9,19 +9,9 @@ import numpy as np
from textwrap import fill
from collections import Counter
-try:
- import matplotlib
- if not os.environ.get('DISPLAY'):
- # Use non-interactive Agg backend
- matplotlib.use('Agg')
- import matplotlib.pyplot as plt
-except ImportError:
- import platform
- if platform.python_implementation() == 'PyPy':
- raise NotImplementedError(
- 'matplotlib is not supported under pypy')
- else:
- raise
+import matplotlib
+matplotlib.use('PDF')
+import matplotlib.pyplot as plt
from dark.dimension import dimensionalIterator
from dark.fasta import FastaReads
diff --git a/dark/reads.py b/dark/reads.py
index 42390e4..1074f78 100644
--- a/dark/reads.py
+++ b/dark/reads.py
@@ -740,8 +740,9 @@ class ReadFilter(object):
sequence identity.
@param removeDuplicatesById: If C{True} remove duplicated reads based
only on read id.
- @param removeDescriptions: If C{True} remove the description part of read
- ids (i.e., the part following the first whitespace).
+ @param removeDescriptions: If C{True} remove the description (the part
+ following the first whitespace) from read ids. The description is
+ removed after applying the function specified by --idLambda (if any).
@param modifier: If not C{None}, a function that is passed a read
and which either returns a read or C{None}. If it returns a read,
that read is passed through the filter. If it returns C{None},
@@ -791,6 +792,14 @@ class ReadFilter(object):
file containing (1-based) sequence numbers, in ascending order,
one per line. Only those sequences matching the given numbers will
be kept.
+ @param idLambda: If not C{None}, a C{str} Python lambda function
+ specification to use to modify read ids. The function is applied
+ before removing the description (if --removeDescriptions is also
+ specified).
+ @param readLambda: If not C{None}, a C{str} Python lambda function
+ specification to use to modify reads. The function will be passed,
+ and must return, a single Read (or one of its subclasses). This
+ function is called after the --idLambda function, if any.
@param keepSites: A set of C{int} 0-based sites (i.e., indices) in
sequences that should be kept. If C{None} (the default), all sites are
kept.
@@ -819,7 +828,8 @@ class ReadFilter(object):
removeDuplicates=False, removeDuplicatesById=False,
removeDescriptions=False, modifier=None, randomSubset=None,
trueLength=None, sampleFraction=None,
- sequenceNumbersFile=None, keepSites=None, removeSites=None):
+ sequenceNumbersFile=None, idLambda=None, readLambda=None,
+ keepSites=None, removeSites=None):
if randomSubset is not None:
if sampleFraction is not None:
@@ -929,6 +939,9 @@ class ReadFilter(object):
sampleFraction = None
self.sampleFraction = sampleFraction
+ self.idLambda = eval(idLambda) if idLambda else None
+ self.readLambda = eval(readLambda) if readLambda else None
+
def filter(self, read):
"""
Check if a read passes the filter.
@@ -1038,6 +1051,20 @@ class ReadFilter(object):
elif self.removeSites is not None:
read = read.newFromSites(self.removeSites, exclude=True)
+ if self.idLambda:
+ newId = self.idLambda(read.id)
+ if newId is None:
+ return False
+ else:
+ read.id = newId
+
+ if self.readLambda:
+ newRead = self.readLambda(read)
+ if newRead is None:
+ return False
+ else:
+ read = newRead
+
if self.removeDescriptions:
read.id = read.id.split()[0]
diff --git a/doc/mac.md b/doc/mac.md
index ae0d13f..15b1c08 100644
--- a/doc/mac.md
+++ b/doc/mac.md
@@ -59,3 +59,24 @@ and follow the instructions in that file to install `numpy`. Then run the
# Install a taxonomy database (optional)
See [taxonomy.md](taxonomy.md) for details.
+
+# Running the tests
+
+If you run the tests using `make tcheck` you may encounter the following
+error:
+
+```
+RuntimeError: Python is not installed as a framework. The Mac OS X backend
+will not be able to function correctly if Python is not installed as a
+framework. See the Python documentation for more information on installing
+Python as a framework on Mac OS X. Please either reinstall Python as a
+framework, or try one of the other backends. If you are using (Ana)Conda
+please install python.app and replace the use of 'python' with 'pythonw'. See
+ 'Working with Matplotlib on OSX' in the Matplotlib FAQ for more information.
+```
+
+You can solve this by editing ~/.matplotlib/matplotlibrc (you may have to create the ~/.matplotlib directory) and inserting the following line:
+
+```
+backend: TkAgg
+```
| acorg/dark-matter | 66f246ba9417430e3f00e94ca0abc88de59a92d4 | diff --git a/test/test_proteins.py b/test/test_proteins.py
index 81bcccb..5b0d5e9 100644
--- a/test/test_proteins.py
+++ b/test/test_proteins.py
@@ -255,6 +255,42 @@ class TestProteinGrouper(TestCase):
},
pg.pathogenNames)
+ def testOneLineInOneFileWithDifferentAssetDir(self):
+ """
+ If a protein grouper is given a different assetDir name,
+ the outDir needs to have that same name, as expected.
+ """
+ fp = StringIO(
+ '0.77 46.6 48.1 5 6 74 gi|327|X|I44.6 ubiquitin [Lausannevirus]\n')
+ pg = ProteinGrouper(assetDir='differentname')
+ pg.addFile('sample-filename', fp)
+ self.assertEqual(
+ {
+ 'Lausannevirus': {
+ 'sample-filename': {
+ 'proteins': {
+ 'gi|327|X|I44.6 ubiquitin': {
+ 'bestScore': 48.1,
+ 'bluePlotFilename': 'differentname/0.png',
+ 'coverage': 0.77,
+ 'readsFilename': 'differentname/0.fasta',
+ 'hspCount': 6,
+ 'index': 0,
+ 'medianScore': 46.6,
+ 'outDir': 'differentname',
+ 'proteinLength': 74,
+ 'proteinName': 'gi|327|X|I44.6 ubiquitin',
+ 'proteinURL': (
+ 'http://www.ncbi.nlm.nih.gov/nuccore/I44'),
+ 'readCount': 5,
+ },
+ },
+ 'uniqueReadCount': None,
+ },
+ }
+ },
+ pg.pathogenNames)
+
def testOneLineInOneFileFASTQ(self):
"""
If a protein grouper is given one file with one line, its pathogenNames
diff --git a/test/test_reads.py b/test/test_reads.py
index 4e51442..5d9cd3e 100644
--- a/test/test_reads.py
+++ b/test/test_reads.py
@@ -3126,6 +3126,52 @@ class TestReadsFiltering(TestCase):
six.assertRaisesRegex(self, ValueError, error, Reads().filter,
keepSites={4}, removeSites={5})
+ def testIdLambda(self):
+ """
+ A passed idLambda function should produce the expected read ids.
+ """
+ read = Read('id1', 'ATCGCC')
+ reads = Reads(initialReads=[read])
+ result = reads.filter(idLambda='lambda id: "x-" + id.upper()')
+ self.assertEqual('x-ID1', list(result)[0].id)
+
+ def testIdLambdaReturningNone(self):
+ """
+ A passed idLambda function should produce the expected read ids,
+ including when it returns None.
+ """
+ read1 = Read('id1', 'ATCGCC')
+ read2 = Read('id2', 'GGATCG')
+ reads = Reads(initialReads=[read1, read2])
+ result = reads.filter(
+ idLambda='lambda id: "aa" if id.find("1") > -1 else None')
+ (result,) = list(result)
+ self.assertEqual('aa', result.id)
+
+ def testReadLambda(self):
+ """
+ A passed readLambda function should produce the expected reads.
+ """
+ read = Read('id1', 'ATCGCC')
+ reads = Reads(initialReads=[read])
+ result = reads.filter(readLambda='lambda r: Read("hey", "AAA")')
+ (result,) = list(result)
+ self.assertEqual(Read('hey', 'AAA'), result)
+
+ def testReadLambdaReturningNone(self):
+ """
+ A passed readLambda function should produce the expected reads,
+ including when it returns None.
+ """
+ read1 = Read('xid1', 'ATCGCC')
+ read2 = Read('yid2', 'GGATCG')
+ reads = Reads(initialReads=[read1, read2])
+ result = reads.filter(
+ readLambda=('lambda r: Read(r.id + "-x", r.sequence[:2]) '
+ 'if r.id.startswith("x") else None'))
+ (result,) = list(result)
+ self.assertEqual(Read('xid1-x', 'AT'), result)
+
class TestReadsInRAM(TestCase):
"""
| Add ability to give an anonymous Python function for read id conversion when filtering FASTA | 0.0 | 66f246ba9417430e3f00e94ca0abc88de59a92d4 | [
"test/test_reads.py::TestReadsFiltering::testIdLambda",
"test/test_reads.py::TestReadsFiltering::testIdLambdaReturningNone",
"test/test_reads.py::TestReadsFiltering::testReadLambda",
"test/test_reads.py::TestReadsFiltering::testReadLambdaReturningNone"
] | [
"test/test_proteins.py::TestSplitNames::testNestedBrackets",
"test/test_proteins.py::TestSplitNames::testNoBrackets",
"test/test_proteins.py::TestSplitNames::testNormalCase",
"test/test_proteins.py::TestSplitNames::testTwoSetsOfBrackets",
"test/test_proteins.py::TestSplitNames::testWhitespaceStripping",
"test/test_proteins.py::TestGetPathogenProteinCounts::testNone",
"test/test_proteins.py::TestProteinGrouper::testAssetDir",
"test/test_proteins.py::TestProteinGrouper::testDuplicatePathogenProteinSample",
"test/test_proteins.py::TestProteinGrouper::testNoAssetDir",
"test/test_proteins.py::TestProteinGrouper::testNoFiles",
"test/test_proteins.py::TestProteinGrouper::testNoFilesToStr",
"test/test_proteins.py::TestProteinGrouper::testNoRegex",
"test/test_proteins.py::TestProteinGrouper::testOneLineInEachOfTwoFilesDifferentPathogens",
"test/test_proteins.py::TestProteinGrouper::testOneLineInEachOfTwoFilesDifferentPathogensTitle",
"test/test_proteins.py::TestProteinGrouper::testOneLineInEachOfTwoFilesSamePathogen",
"test/test_proteins.py::TestProteinGrouper::testOneLineInEachOfTwoFilesSamePathogenTitle",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFile",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFileFASTQ",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFileTitle",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFileToStr",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFileWithDifferentAssetDir",
"test/test_proteins.py::TestProteinGrouper::testTwoLinesInOneFileDifferentPathogens",
"test/test_proteins.py::TestProteinGrouper::testTwoLinesInOneFileSamePathogen",
"test/test_proteins.py::TestProteinGrouper::testTwoLinesInOneFileTitle",
"test/test_proteins.py::TestProteinGrouper::testUnknownFormat",
"test/test_proteins.py::TestProteinGrouper::testUnknownPathogenType",
"test/test_proteins.py::TestPathogenSampleFiles::testUnknownFormat",
"test/test_proteins.py::TestPathogenSampleFiles::testWritePathogenIndex",
"test/test_proteins.py::TestPathogenSampleFiles::testWriteSampleIndex",
"test/test_reads.py::TestRead::testCasePreservation",
"test/test_reads.py::TestRead::testCheckAlphabetAAReadMatchingReturnTrue",
"test/test_reads.py::TestRead::testCheckAlphabetAAReadNotMatchingRaise",
"test/test_reads.py::TestRead::testCheckAlphabetDNAReadMatchingReturnTrue",
"test/test_reads.py::TestRead::testCheckAlphabetDNAReadNotMatchingRaise",
"test/test_reads.py::TestRead::testCheckAlphabetwithReadMustBePermissive",
"test/test_reads.py::TestRead::testEquality",
"test/test_reads.py::TestRead::testEqualityWithDifferingIds",
"test/test_reads.py::TestRead::testEqualityWithDifferingQuality",
"test/test_reads.py::TestRead::testEqualityWithDifferingSequences",
"test/test_reads.py::TestRead::testEqualityWithNoQuality",
"test/test_reads.py::TestRead::testEqualityWithOneOmittedQuality",
"test/test_reads.py::TestRead::testExpectedAttributes",
"test/test_reads.py::TestRead::testFromDict",
"test/test_reads.py::TestRead::testFromDictNoQuality",
"test/test_reads.py::TestRead::testGetitemFullCopy",
"test/test_reads.py::TestRead::testGetitemId",
"test/test_reads.py::TestRead::testGetitemLength",
"test/test_reads.py::TestRead::testGetitemQuality",
"test/test_reads.py::TestRead::testGetitemReturnsNewRead",
"test/test_reads.py::TestRead::testGetitemReversed",
"test/test_reads.py::TestRead::testGetitemSequence",
"test/test_reads.py::TestRead::testGetitemSingleIndex",
"test/test_reads.py::TestRead::testGetitemWithStep",
"test/test_reads.py::TestRead::testHashDiffersIfIdDiffers",
"test/test_reads.py::TestRead::testHashDiffersIfQualityDiffers",
"test/test_reads.py::TestRead::testHashDiffersIfSequenceDiffers",
"test/test_reads.py::TestRead::testHashIdenticalNoQuality",
"test/test_reads.py::TestRead::testHashIdenticalWithQuality",
"test/test_reads.py::TestRead::testHashViaDict",
"test/test_reads.py::TestRead::testHashViaSet",
"test/test_reads.py::TestRead::testKeepSites",
"test/test_reads.py::TestRead::testKeepSitesAllSites",
"test/test_reads.py::TestRead::testKeepSitesNoSites",
"test/test_reads.py::TestRead::testKeepSitesOutOfRange",
"test/test_reads.py::TestRead::testKeepSitesWithQuality",
"test/test_reads.py::TestRead::testLength",
"test/test_reads.py::TestRead::testLowComplexityFraction",
"test/test_reads.py::TestRead::testLowComplexityFractionEmptySequence",
"test/test_reads.py::TestRead::testLowComplexityFractionOne",
"test/test_reads.py::TestRead::testLowComplexityFractionZero",
"test/test_reads.py::TestRead::testNoQuality",
"test/test_reads.py::TestRead::testRemoveSites",
"test/test_reads.py::TestRead::testRemoveSitesAllSites",
"test/test_reads.py::TestRead::testRemoveSitesNoSites",
"test/test_reads.py::TestRead::testRemoveSitesOutOfRange",
"test/test_reads.py::TestRead::testRemoveSitesWithQuality",
"test/test_reads.py::TestRead::testToDict",
"test/test_reads.py::TestRead::testToDictNoQuality",
"test/test_reads.py::TestRead::testToFASTA",
"test/test_reads.py::TestRead::testToFASTAWithQuality",
"test/test_reads.py::TestRead::testToFASTQ",
"test/test_reads.py::TestRead::testToFASTQWithNoQuality",
"test/test_reads.py::TestRead::testToUnknownFormat",
"test/test_reads.py::TestRead::testUnequalLengths",
"test/test_reads.py::TestRead::testWalkHSPExactMatch",
"test/test_reads.py::TestRead::testWalkHSPExactMatchWithGap",
"test/test_reads.py::TestRead::testWalkHSPLeftAndRightOverhangingMatch",
"test/test_reads.py::TestRead::testWalkHSPLeftAndRightOverhangingMatchNoWhiskers",
"test/test_reads.py::TestRead::testWalkHSPLeftOverhangingMatch",
"test/test_reads.py::TestRead::testWalkHSPLeftOverhangingMatchNoWhiskers",
"test/test_reads.py::TestRead::testWalkHSPRightOverhangingMatch",
"test/test_reads.py::TestRead::testWalkHSPRightOverhangingMatchNoWhiskers",
"test/test_reads.py::TestDNARead::testGetitemReturnsNewDNARead",
"test/test_reads.py::TestDNARead::testReverseComplement",
"test/test_reads.py::TestDNARead::testReverseComplementAmbiguous",
"test/test_reads.py::TestDNARead::testReverseComplementReversesQuality",
"test/test_reads.py::TestDNARead::testTranslationOfMultipleStopCodons",
"test/test_reads.py::TestDNARead::testTranslationOfStartCodonATG",
"test/test_reads.py::TestDNARead::testTranslationOfStopCodonTAG",
"test/test_reads.py::TestDNARead::testTranslationOfStopCodonTGA",
"test/test_reads.py::TestDNARead::testTranslations",
"test/test_reads.py::TestDNARead::testTranslationsOfEmptySequence",
"test/test_reads.py::TestDNARead::testTranslationsOfOneBaseSequence",
"test/test_reads.py::TestDNARead::testTranslationsOfTwoBaseSequence",
"test/test_reads.py::TestRNARead::testGetitemReturnsNewRNARead",
"test/test_reads.py::TestRNARead::testReverseComplement",
"test/test_reads.py::TestRNARead::testReverseComplementAmbiguous",
"test/test_reads.py::TestRNARead::testTranslationOfStopCodonUAA",
"test/test_reads.py::TestAARead::testCloseCloseORF",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseCloseORF",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseCloseThenCloseCloseORF",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseCloseThenCloseCloseORFWithJunk",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseCloseThenCloseOpenORF",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseCloseThenCloseOpenORFWithJunk",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseOpenORF",
"test/test_reads.py::TestAARead::testCloseOpenORF",
"test/test_reads.py::TestAARead::testCloseOpenORFWithMultipleStarts",
"test/test_reads.py::TestAARead::testGetitemReturnsNewAARead",
"test/test_reads.py::TestAARead::testNoStartCodon_GithubIssue239",
"test/test_reads.py::TestAARead::testORFsEmptySequence",
"test/test_reads.py::TestAARead::testORFsEmptySequenceWithStart",
"test/test_reads.py::TestAARead::testORFsEmptySequenceWithStartStop",
"test/test_reads.py::TestAARead::testORFsWithJustStartsAndStops",
"test/test_reads.py::TestAARead::testORFsWithOneStopCodon",
"test/test_reads.py::TestAARead::testORFsWithTwoStopCodons",
"test/test_reads.py::TestAARead::testOpenCloseORF",
"test/test_reads.py::TestAARead::testOpenCloseORFWithMultipleStops",
"test/test_reads.py::TestAARead::testOpenCloseThenCloseCloseThenCloseOpenORF",
"test/test_reads.py::TestAARead::testOpenCloseThenCloseCloseThenCloseOpenORFWithJunk",
"test/test_reads.py::TestAARead::testOpenCloseThenCloseOpenORF",
"test/test_reads.py::TestAARead::testOpenOpenORF",
"test/test_reads.py::TestAARead::testPropertiesCorrectTranslation",
"test/test_reads.py::TestAARead::testPropertyDetailsCorrectTranslation",
"test/test_reads.py::TestAAReadWithX::testAlphabet",
"test/test_reads.py::TestAAReadWithX::testAlphabetChecking",
"test/test_reads.py::TestAAReadWithX::testGetitemReturnsNewAAReadWithX",
"test/test_reads.py::TestAAReadORF::testClosedClosedId",
"test/test_reads.py::TestAAReadORF::testClosedOpenId",
"test/test_reads.py::TestAAReadORF::testFromDict",
"test/test_reads.py::TestAAReadORF::testOpenClosedId",
"test/test_reads.py::TestAAReadORF::testOpenLeft",
"test/test_reads.py::TestAAReadORF::testOpenOpenId",
"test/test_reads.py::TestAAReadORF::testOpenRight",
"test/test_reads.py::TestAAReadORF::testSequence",
"test/test_reads.py::TestAAReadORF::testStart",
"test/test_reads.py::TestAAReadORF::testStartGreaterThanStop",
"test/test_reads.py::TestAAReadORF::testStartNegative",
"test/test_reads.py::TestAAReadORF::testStop",
"test/test_reads.py::TestAAReadORF::testStopGreaterThanOriginalSequenceLength",
"test/test_reads.py::TestAAReadORF::testToDict",
"test/test_reads.py::TestSSAARead::testCorrectAttributes",
"test/test_reads.py::TestSSAARead::testFromDict",
"test/test_reads.py::TestSSAARead::testGetitemFullCopy",
"test/test_reads.py::TestSSAARead::testGetitemId",
"test/test_reads.py::TestSSAARead::testGetitemLength",
"test/test_reads.py::TestSSAARead::testGetitemReturnsNewRead",
"test/test_reads.py::TestSSAARead::testGetitemReversed",
"test/test_reads.py::TestSSAARead::testGetitemSequence",
"test/test_reads.py::TestSSAARead::testGetitemSingleIndex",
"test/test_reads.py::TestSSAARead::testGetitemStructure",
"test/test_reads.py::TestSSAARead::testGetitemWithStep",
"test/test_reads.py::TestSSAARead::testHashDiffersIfIdDiffers",
"test/test_reads.py::TestSSAARead::testHashDiffersIfSequenceDiffers",
"test/test_reads.py::TestSSAARead::testHashDiffersIfStructureDiffers",
"test/test_reads.py::TestSSAARead::testHashViaDict",
"test/test_reads.py::TestSSAARead::testHashViaSet",
"test/test_reads.py::TestSSAARead::testKeepSites",
"test/test_reads.py::TestSSAARead::testKeepSitesAllSites",
"test/test_reads.py::TestSSAARead::testKeepSitesNoSites",
"test/test_reads.py::TestSSAARead::testKeepSitesOutOfRange",
"test/test_reads.py::TestSSAARead::testReads",
"test/test_reads.py::TestSSAARead::testRemoveSites",
"test/test_reads.py::TestSSAARead::testRemoveSitesAllSites",
"test/test_reads.py::TestSSAARead::testRemoveSitesNoSites",
"test/test_reads.py::TestSSAARead::testRemoveSitesOutOfRange",
"test/test_reads.py::TestSSAARead::testSequenceLengthMatchesStructureLength",
"test/test_reads.py::TestSSAARead::testToDict",
"test/test_reads.py::TestSSAARead::testToString",
"test/test_reads.py::TestSSAARead::testToStringWithExplicitFastaFormat",
"test/test_reads.py::TestSSAARead::testToStringWithExplicitFastaSSFormat",
"test/test_reads.py::TestSSAARead::testToStringWithStructureSuffix",
"test/test_reads.py::TestSSAARead::testToStringWithUnknownFormat",
"test/test_reads.py::TestSSAAReadWithX::testCorrectAttributes",
"test/test_reads.py::TestSSAAReadWithX::testFromDict",
"test/test_reads.py::TestSSAAReadWithX::testGetitemFullCopy",
"test/test_reads.py::TestSSAAReadWithX::testGetitemId",
"test/test_reads.py::TestSSAAReadWithX::testGetitemLength",
"test/test_reads.py::TestSSAAReadWithX::testGetitemReturnsNewRead",
"test/test_reads.py::TestSSAAReadWithX::testGetitemReversed",
"test/test_reads.py::TestSSAAReadWithX::testGetitemSequence",
"test/test_reads.py::TestSSAAReadWithX::testGetitemSingleIndex",
"test/test_reads.py::TestSSAAReadWithX::testGetitemStructure",
"test/test_reads.py::TestSSAAReadWithX::testGetitemWithStep",
"test/test_reads.py::TestSSAAReadWithX::testHashDiffersIfIdDiffers",
"test/test_reads.py::TestSSAAReadWithX::testHashDiffersIfSequenceDiffers",
"test/test_reads.py::TestSSAAReadWithX::testHashDiffersIfStructureDiffers",
"test/test_reads.py::TestSSAAReadWithX::testHashViaDict",
"test/test_reads.py::TestSSAAReadWithX::testHashViaSet",
"test/test_reads.py::TestSSAAReadWithX::testKeepSites",
"test/test_reads.py::TestSSAAReadWithX::testKeepSitesAllSites",
"test/test_reads.py::TestSSAAReadWithX::testKeepSitesNoSites",
"test/test_reads.py::TestSSAAReadWithX::testKeepSitesOutOfRange",
"test/test_reads.py::TestSSAAReadWithX::testReads",
"test/test_reads.py::TestSSAAReadWithX::testRemoveSites",
"test/test_reads.py::TestSSAAReadWithX::testRemoveSitesAllSites",
"test/test_reads.py::TestSSAAReadWithX::testRemoveSitesNoSites",
"test/test_reads.py::TestSSAAReadWithX::testRemoveSitesOutOfRange",
"test/test_reads.py::TestSSAAReadWithX::testSequenceContainingX",
"test/test_reads.py::TestSSAAReadWithX::testSequenceLengthMatchesStructureLength",
"test/test_reads.py::TestSSAAReadWithX::testToDict",
"test/test_reads.py::TestSSAAReadWithX::testToString",
"test/test_reads.py::TestSSAAReadWithX::testToStringWithExplicitFastaFormat",
"test/test_reads.py::TestSSAAReadWithX::testToStringWithExplicitFastaSSFormat",
"test/test_reads.py::TestSSAAReadWithX::testToStringWithStructureSuffix",
"test/test_reads.py::TestSSAAReadWithX::testToStringWithUnknownFormat",
"test/test_reads.py::TestTranslatedRead::testExpectedAttributes",
"test/test_reads.py::TestTranslatedRead::testExpectedFrame",
"test/test_reads.py::TestTranslatedRead::testFromDict",
"test/test_reads.py::TestTranslatedRead::testId",
"test/test_reads.py::TestTranslatedRead::testIdReverseComplemented",
"test/test_reads.py::TestTranslatedRead::testMaximumORFLength",
"test/test_reads.py::TestTranslatedRead::testMaximumORFLengthNoStops",
"test/test_reads.py::TestTranslatedRead::testOutOfRangeFrame",
"test/test_reads.py::TestTranslatedRead::testReverseComplemented",
"test/test_reads.py::TestTranslatedRead::testSequence",
"test/test_reads.py::TestTranslatedRead::testToDict",
"test/test_reads.py::TestReadClassNameToClass::testNames",
"test/test_reads.py::TestReads::testEmptyInitialReads",
"test/test_reads.py::TestReads::testInitialReads",
"test/test_reads.py::TestReads::testManuallyAddedReads",
"test/test_reads.py::TestReads::testManuallyAddedReadsLength",
"test/test_reads.py::TestReads::testNoReads",
"test/test_reads.py::TestReads::testNoReadsLength",
"test/test_reads.py::TestReads::testRepeatedIter",
"test/test_reads.py::TestReads::testSaveAsFASTA",
"test/test_reads.py::TestReads::testSaveAsFASTQ",
"test/test_reads.py::TestReads::testSaveAsFASTQFailsOnReadWithNoQuality",
"test/test_reads.py::TestReads::testSaveFASTAIsDefault",
"test/test_reads.py::TestReads::testSaveReturnsReadCount",
"test/test_reads.py::TestReads::testSaveToFileDescriptor",
"test/test_reads.py::TestReads::testSaveWithUnknownFormat",
"test/test_reads.py::TestReads::testSaveWithUppercaseFormat",
"test/test_reads.py::TestReads::testSubclass",
"test/test_reads.py::TestReads::testSubclassLength",
"test/test_reads.py::TestReads::testSubclassWithAdditionalReads",
"test/test_reads.py::TestReads::testUnfilteredLengthAdditionalReads",
"test/test_reads.py::TestReads::testUnfilteredLengthAdditionalReadsAfterFiltering",
"test/test_reads.py::TestReads::testUnfilteredLengthBeforeIterating",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialReads",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialReadsAfterFiltering",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialReadsIsReads",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialReadsIsReadsWithAdditional",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialSubclassThenFiltered",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialSubclassWithAdditionalThenFiltered",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialSubclassWithNoLen",
"test/test_reads.py::TestReads::testUnfilteredLengthNoReads",
"test/test_reads.py::TestReadsFiltering::testAddFiltersThenClearFilters",
"test/test_reads.py::TestReadsFiltering::testFilterBlacklist",
"test/test_reads.py::TestReadsFiltering::testFilterDoNotRemoveDescriptions",
"test/test_reads.py::TestReadsFiltering::testFilterDuplicates",
"test/test_reads.py::TestReadsFiltering::testFilterDuplicatesById",
"test/test_reads.py::TestReadsFiltering::testFilterHead",
"test/test_reads.py::TestReadsFiltering::testFilterHeadZero",
"test/test_reads.py::TestReadsFiltering::testFilterKeepSequences",
"test/test_reads.py::TestReadsFiltering::testFilterKeepSequencesNoSequences",
"test/test_reads.py::TestReadsFiltering::testFilterNegativeRegex",
"test/test_reads.py::TestReadsFiltering::testFilterNoArgs",
"test/test_reads.py::TestReadsFiltering::testFilterOnLengthEverythingMatches",
"test/test_reads.py::TestReadsFiltering::testFilterOnLengthNothingMatches",
"test/test_reads.py::TestReadsFiltering::testFilterOnMaxLength",
"test/test_reads.py::TestReadsFiltering::testFilterOnMinLength",
"test/test_reads.py::TestReadsFiltering::testFilterPositiveRegex",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetOfFiveFromFiveReads",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetOfFiveFromOneRead",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetOfOneFromOneRead",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetOfTwoFromFiveReads",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetOfZeroReads",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetSizeZeroNoReads",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetSizeZeroTwoReads",
"test/test_reads.py::TestReadsFiltering::testFilterRemoveDescriptions",
"test/test_reads.py::TestReadsFiltering::testFilterRemoveGaps",
"test/test_reads.py::TestReadsFiltering::testFilterRemoveGapsWithQuality",
"test/test_reads.py::TestReadsFiltering::testFilterRemoveSequences",
"test/test_reads.py::TestReadsFiltering::testFilterRemoveSequencesNoSequences",
"test/test_reads.py::TestReadsFiltering::testFilterReturnsReadInstance",
"test/test_reads.py::TestReadsFiltering::testFilterTruncateTitles",
"test/test_reads.py::TestReadsFiltering::testFilterWhitelist",
"test/test_reads.py::TestReadsFiltering::testFilterWithMinLengthEqualToMaxLength",
"test/test_reads.py::TestReadsFiltering::testFilterWithModifierThatChangesIds",
"test/test_reads.py::TestReadsFiltering::testFilterWithModifierThatOmits",
"test/test_reads.py::TestReadsFiltering::testFilterWithModifierThatOmitsAndChangesIds",
"test/test_reads.py::TestReadsFiltering::testFilteredReadsInstanceHasExpectedLength",
"test/test_reads.py::TestReadsFiltering::testKeepSites",
"test/test_reads.py::TestReadsFiltering::testKeepSitesAllSites",
"test/test_reads.py::TestReadsFiltering::testKeepSitesNoSites",
"test/test_reads.py::TestReadsFiltering::testKeepSitesOutOfRange",
"test/test_reads.py::TestReadsFiltering::testKeepSitesWithQuality",
"test/test_reads.py::TestReadsFiltering::testLineNumberFile",
"test/test_reads.py::TestReadsFiltering::testLineNumberFileEmpty",
"test/test_reads.py::TestReadsFiltering::testLineNumberFileFirstLineTooSmall",
"test/test_reads.py::TestReadsFiltering::testLineNumberFileNonAscending",
"test/test_reads.py::TestReadsFiltering::testLineNumberFileRunOutOfSequences",
"test/test_reads.py::TestReadsFiltering::testRemoveAndKeepSites",
"test/test_reads.py::TestReadsFiltering::testRemoveSites",
"test/test_reads.py::TestReadsFiltering::testRemoveSitesAllSites",
"test/test_reads.py::TestReadsFiltering::testRemoveSitesNoSites",
"test/test_reads.py::TestReadsFiltering::testRemoveSitesOutOfRange",
"test/test_reads.py::TestReadsFiltering::testRemoveSitesWithQuality",
"test/test_reads.py::TestReadsFiltering::testSampleFractionAndNoTrueLengthRaisesValueError",
"test/test_reads.py::TestReadsFiltering::testSampleFractionAndRandomSubsetRaisesValueError",
"test/test_reads.py::TestReadsFiltering::testSampleFractionOne",
"test/test_reads.py::TestReadsFiltering::testSampleFractionPointOne",
"test/test_reads.py::TestReadsFiltering::testSampleFractionZero",
"test/test_reads.py::TestReadsInRAM::testAdd",
"test/test_reads.py::TestReadsInRAM::testFromReads",
"test/test_reads.py::TestReadsInRAM::testNoReads",
"test/test_reads.py::TestReadsInRAM::testOneReadIndex",
"test/test_reads.py::TestReadsInRAM::testOneReadLength",
"test/test_reads.py::TestReadsInRAM::testOneReadList",
"test/test_reads.py::TestReadsInRAM::testSetItem",
"test/test_reads.py::TestReadsInRAM::testTwoReadsIndex",
"test/test_reads.py::TestReadsInRAM::testTwoReadsLength",
"test/test_reads.py::TestReadsInRAM::testTwoReadsList",
"test/test_reads.py::TestSummarizePosition::testCorrectFrequencies",
"test/test_reads.py::TestSummarizePosition::testExcludeShortSequences",
"test/test_reads.py::TestSummarizePosition::testFrequenciesNoReads",
"test/test_reads.py::TestSummarizePosition::testIndexLargerThanSequenceLength",
"test/test_reads.py::TestSummarizePosition::testNumberOfExclusionsNoReads",
"test/test_reads.py::TestSitesMatching::testAllMatches",
"test/test_reads.py::TestSitesMatching::testIgnoreCase",
"test/test_reads.py::TestSitesMatching::testMatchCase",
"test/test_reads.py::TestSitesMatching::testMultipleReadsAll",
"test/test_reads.py::TestSitesMatching::testMultipleReadsAllWithDifferingLengths",
"test/test_reads.py::TestSitesMatching::testMultipleReadsAny",
"test/test_reads.py::TestSitesMatching::testMultipleReadsAnyWithDifferingLengths",
"test/test_reads.py::TestSitesMatching::testNoMatches",
"test/test_reads.py::TestSitesMatching::testPartialMatch"
] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false
} | 2018-05-27 14:07:28+00:00 | mit | 866 |
|
acorg__dark-matter-610 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index d7f91b4..add466a 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,9 @@
+## 3.0.23 June 22, 2018
+
+Changed the way reference sequence insertions are stored in a
+`dark.sam.PaddedSAM` instance to make it possible to tell which query
+sequences caused reference insertions.
+
## 3.0.22 June 21, 2018
Made `dark/sam.py` properly deal with secondary alignments that are missing
diff --git a/dark/__init__.py b/dark/__init__.py
index db6691a..e3d166b 100644
--- a/dark/__init__.py
+++ b/dark/__init__.py
@@ -7,4 +7,4 @@ if sys.version_info < (2, 7):
# will not be found by the version() function in ../setup.py
#
# Remember to update ../CHANGELOG.md describing what's new in each version.
-__version__ = '3.0.22'
+__version__ = '3.0.23'
diff --git a/dark/sam.py b/dark/sam.py
index f0ffa19..2165ea6 100644
--- a/dark/sam.py
+++ b/dark/sam.py
@@ -29,12 +29,12 @@ class PaddedSAM(object):
"""
def __init__(self, filename):
self.samfile = AlignmentFile(filename)
- # self.referenceInsertions will be keyed by offset into the reference
- # sequence. The inserted bases would need to begin at this offset. The
- # value will be a Counter whose keys are the nucleotides proposed for
- # insertion, with a value indicating how many times the nucleotide was
- # proposed for insertion at that offset.
- self.referenceInsertions = defaultdict(Counter)
+ # self.referenceInsertions will be keyed by query id (the query
+ # that would cause a reference insertion). The values will be lists
+ # of 2-tuples, with each 2-tuple containing an offset into the
+ # reference sequence and the C{str} of nucleotide that would be
+ # inserted starting at that offset.
+ self.referenceInsertions = defaultdict(list)
def close(self):
"""
@@ -182,6 +182,16 @@ class PaddedSAM(object):
if rcSuffix:
read.query_name += rcSuffix
+ # Adjust the query id if it's a duplicate and we're not
+ # allowing duplicates.
+ if allowDuplicateIds:
+ queryId = read.query_name
+ else:
+ count = idCount[read.query_name]
+ idCount[read.query_name] += 1
+ queryId = read.query_name + (
+ '' if count == 0 else '/%d' % count)
+
referenceStart = read.reference_start
atStart = True
queryIndex = 0
@@ -205,9 +215,9 @@ class PaddedSAM(object):
# query but record what would have been inserted into the
# reference.
atStart = False
- for i in range(length):
- self.referenceInsertions[referenceIndex + i][
- query[queryIndex + i]] += 1
+ self.referenceInsertions[queryId].append(
+ (referenceIndex,
+ query[queryIndex:queryIndex + length]))
elif operation == CDEL:
# Delete from the reference. Some bases from the reference
# would need to be deleted to continue the match. So we put
@@ -293,14 +303,7 @@ class PaddedSAM(object):
padChar * (referenceLength -
(referenceStart + len(alignedSequence))))
- if allowDuplicateIds:
- suffix = ''
- else:
- count = idCount[read.query_name]
- idCount[read.query_name] += 1
- suffix = '' if count == 0 else '/%d' % count
-
- yield Read('%s%s' % (read.query_name, suffix), paddedSequence)
+ yield Read(queryId, paddedSequence)
@contextmanager
| acorg/dark-matter | d75630ac7a4f6f99bd0c4e87737290ff971b92d5 | diff --git a/test/test_sam.py b/test/test_sam.py
index e31f801..f8fd7e9 100644
--- a/test/test_sam.py
+++ b/test/test_sam.py
@@ -347,8 +347,32 @@ class TestPaddedSAM(TestCase):
self.assertEqual(Read('query1', '-TCGG-----'), read)
self.assertEqual(
{
- 3: {'T': 1},
- 4: {'A': 1},
+ 'query1': [(3, 'TA')],
+ },
+ ps.referenceInsertions)
+ ps.close()
+
+ def testPrimaryAndSecondaryReferenceInsertion(self):
+ """
+ A primary and secondary insertion into the reference (of the same
+ query) must result in the expected padded sequences and the expected
+ value in the referenceInsertions dictionary.
+ """
+ data = '\n'.join([
+ '@SQ SN:ref1 LN:10',
+ 'query1 0 ref1 2 60 2M2I2M * 0 0 TCTAGG ZZZZZZ',
+ 'query1 256 ref1 4 60 2M3I1M * 0 0 * *',
+ ]).replace(' ', '\t')
+
+ with dataFile(data) as filename:
+ ps = PaddedSAM(filename)
+ (read1, read2) = list(ps.queries())
+ self.assertEqual(Read('query1', '-TCGG-----'), read1)
+ self.assertEqual(Read('query1/1', '---TCG----'), read2)
+ self.assertEqual(
+ {
+ 'query1': [(3, 'TA')],
+ 'query1/1': [(5, 'TAG')],
},
ps.referenceInsertions)
ps.close()
| SAM referenceInsertion should record what reads cause reference insertions
The code currently records all reference insertions but doesn't keep track of which reads caused insertions. That makes it impossible to ignore insertions according to which read created it. | 0.0 | d75630ac7a4f6f99bd0c4e87737290ff971b92d5 | [
"test/test_sam.py::TestPaddedSAM::testPrimaryAndSecondaryReferenceInsertion",
"test/test_sam.py::TestPaddedSAM::testReferenceInsertion"
] | [
"test/test_sam.py::TestPaddedSAM::testAllMMatch",
"test/test_sam.py::TestPaddedSAM::testAllowDuplicateIds",
"test/test_sam.py::TestPaddedSAM::testDropDuplicates",
"test/test_sam.py::TestPaddedSAM::testDropSecondary",
"test/test_sam.py::TestPaddedSAM::testDropSupplementary",
"test/test_sam.py::TestPaddedSAM::testDuplicateIdDisambiguation",
"test/test_sam.py::TestPaddedSAM::testHardClipLeft",
"test/test_sam.py::TestPaddedSAM::testHardClipRight",
"test/test_sam.py::TestPaddedSAM::testKF414679SoftClipLeft",
"test/test_sam.py::TestPaddedSAM::testKeepQualityControlFailures",
"test/test_sam.py::TestPaddedSAM::testMinLength",
"test/test_sam.py::TestPaddedSAM::testMinLengthWithReferenceDeletion",
"test/test_sam.py::TestPaddedSAM::testMixedMatch",
"test/test_sam.py::TestPaddedSAM::testMixedMatchSpecificReference",
"test/test_sam.py::TestPaddedSAM::testMixedMatchSpecificReferenceButNoMatches",
"test/test_sam.py::TestPaddedSAM::testNotSecondaryAndNotSupplementaryWithNoSequence",
"test/test_sam.py::TestPaddedSAM::testQueryHardClipAndSoftClipProtrudesBothSides",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipLeft",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipProtrudesBothSides",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipProtrudesLeft",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipProtrudesRight",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipReachesLeftEdge",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipReachesRightEdge",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipRight",
"test/test_sam.py::TestPaddedSAM::testRcNeeded",
"test/test_sam.py::TestPaddedSAM::testRcSuffix",
"test/test_sam.py::TestPaddedSAM::testReferenceDeletion",
"test/test_sam.py::TestPaddedSAM::testReferenceDeletionAlternateChar",
"test/test_sam.py::TestPaddedSAM::testReferenceSkip",
"test/test_sam.py::TestPaddedSAM::testReferenceSkipAlternateChar",
"test/test_sam.py::TestPaddedSAM::testReferencesToStr",
"test/test_sam.py::TestPaddedSAM::testSecondaryWithNoPreviousSequence",
"test/test_sam.py::TestPaddedSAM::testSecondaryWithNoSequence",
"test/test_sam.py::TestPaddedSAM::testSupplementaryWithNoPreviousSequence",
"test/test_sam.py::TestPaddedSAM::testSupplementaryWithNoSequence",
"test/test_sam.py::TestPaddedSAM::testUnequalReferenceLengths",
"test/test_sam.py::TestPaddedSAM::testUnknownReferences"
] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2018-06-22 17:31:59+00:00 | mit | 867 |
|
acorg__dark-matter-617 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 67af944..2065375 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,11 @@
+## 3.0.28 July 3, 2018
+
+Added `alsoYieldAlignments` option to `PaddedSAM.queries` method to have
+the returned generator also yield the `pysam.AlignedSegment` instance with
+the gap-padded query sequence. This makes it possible to retrieve padded
+queries from SAM/BAM and generate SAM/BAM (or FASTQ) of some subset of the
+queries.
+
## 3.0.27 June 30, 2018
Added `bin/filter-sam.py` script.
diff --git a/bin/filter-sam.py b/bin/filter-sam.py
index 18b5b47..363fed1 100755
--- a/bin/filter-sam.py
+++ b/bin/filter-sam.py
@@ -38,26 +38,26 @@ if __name__ == '__main__':
'error message is printed unless --quiet is used).'))
parser.add_argument(
- '--dropUnmapped', default=False, action='store_true',
- help='If given, unmapped matches will not be output.')
+ '--dropUnmapped', default=False, action='store_true',
+ help='If given, unmapped matches will not be output.')
parser.add_argument(
- '--dropSecondary', default=False, action='store_true',
- help='If given, secondary matches will not be output.')
+ '--dropSecondary', default=False, action='store_true',
+ help='If given, secondary matches will not be output.')
parser.add_argument(
- '--dropSupplementary', default=False, action='store_true',
- help='If given, supplementary matches will not be output.')
+ '--dropSupplementary', default=False, action='store_true',
+ help='If given, supplementary matches will not be output.')
parser.add_argument(
- '--dropDuplicates', default=False, action='store_true',
- help=('If given, matches flagged as optical or PCR duplicates will '
- 'not be output.'))
+ '--dropDuplicates', default=False, action='store_true',
+ help=('If given, matches flagged as optical or PCR duplicates will '
+ 'not be output.'))
parser.add_argument(
- '--keepQCFailures', default=False, action='store_true',
- help=('If given, reads that are considered quality control failures '
- 'will be included in the output.'))
+ '--keepQCFailures', default=False, action='store_true',
+ help=('If given, reads that are considered quality control failures '
+ 'will be included in the output.'))
parser.add_argument(
'--referenceWhitelist', metavar='NAME', action='append',
@@ -106,17 +106,17 @@ if __name__ == '__main__':
if (filterRead(Read(alignment.query_name,
alignment.query_sequence,
- alignment.qual))
- and not (
- (alignment.is_unmapped and dropUnmapped) or
- (alignment.is_secondary and dropSecondary) or
- (alignment.is_supplementary and dropSupplementary) or
- (alignment.is_duplicate and dropDuplicates) or
- (alignment.is_qcfail and not keepQCFailures) or
- (referenceWhitelist is not None and
- alignment.reference_name not in referenceWhitelist) or
- (referenceBlacklist is not None and
- alignment.reference_name in referenceBlacklist))):
+ alignment.qual)) and
+ not (
+ (alignment.is_unmapped and dropUnmapped) or
+ (alignment.is_secondary and dropSecondary) or
+ (alignment.is_supplementary and dropSupplementary) or
+ (alignment.is_duplicate and dropDuplicates) or
+ (alignment.is_qcfail and not keepQCFailures) or
+ (referenceWhitelist is not None and
+ alignment.reference_name not in referenceWhitelist) or
+ (referenceBlacklist is not None and
+ alignment.reference_name in referenceBlacklist))):
kept += 1
save(alignment)
out.close()
diff --git a/dark/__init__.py b/dark/__init__.py
index dabfdd7..2f8e41d 100644
--- a/dark/__init__.py
+++ b/dark/__init__.py
@@ -7,4 +7,4 @@ if sys.version_info < (2, 7):
# will not be found by the version() function in ../setup.py
#
# Remember to update ../CHANGELOG.md describing what's new in each version.
-__version__ = '3.0.27'
+__version__ = '3.0.28'
diff --git a/dark/sam.py b/dark/sam.py
index f2bb28b..a5d9363 100644
--- a/dark/sam.py
+++ b/dark/sam.py
@@ -61,7 +61,7 @@ class PaddedSAM(object):
dropSecondary=False, dropSupplementary=False,
dropDuplicates=False, allowDuplicateIds=False,
keepQCFailures=False, rcNeeded=False, padChar='-',
- queryInsertionChar='N'):
+ queryInsertionChar='N', alsoYieldAlignments=False):
"""
Produce padded (with gaps) queries according to the CIGAR string and
reference sequence length for each matching query sequence.
@@ -107,13 +107,18 @@ class PaddedSAM(object):
is inserted as a 'missing' query character (i.e., a base that can
be assumed to have been lost due to an error) whose existence is
necessary for the match to continue.
+ @param alsoYieldAlignments: If C{True} the returned generator will
+ yield 2-tuples containing a padded query and the
+ C{pysam.AlignedSegment} for each query.
@raises UnequalReferenceLengthError: If C{referenceName} is C{None}
and the reference sequence lengths in the SAM/BAM file are not all
identical.
@raises UnknownReference: If C{referenceName} does not exist.
@return: A generator that yields C{Read} instances that are padded
with gap characters to align them to the length of the reference
- sequence.
+ sequence. See C{alsoYieldAlignments}, above, to have the generator
+ yield tuples also containing the corresponding
+ C{pysam.AlignedSegment}.
"""
samfile = self.samfile
@@ -142,24 +147,24 @@ class PaddedSAM(object):
MATCH_OPERATIONS = {CMATCH, CEQUAL, CDIFF}
lastQuery = None
- for lineNumber, read in enumerate(samfile.fetch(), start=1):
- if (read.is_unmapped or
- (read.is_secondary and dropSecondary) or
- (read.is_supplementary and dropSupplementary) or
- (read.is_duplicate and dropDuplicates) or
- (read.is_qcfail and not keepQCFailures) or
+ for lineNumber, alignment in enumerate(samfile.fetch(), start=1):
+ if (alignment.is_unmapped or
+ (alignment.is_secondary and dropSecondary) or
+ (alignment.is_supplementary and dropSupplementary) or
+ (alignment.is_duplicate and dropDuplicates) or
+ (alignment.is_qcfail and not keepQCFailures) or
(referenceId is not None and
- read.reference_id != referenceId)):
+ alignment.reference_id != referenceId)):
continue
- query = read.query_sequence
+ query = alignment.query_sequence
# Secondary (and presumably supplementary) alignments may have
# a '*' (None in pysam) SEQ field, indicating that the previous
# sequence should be used. This is best practice according to
# section 2.5.2 of https://samtools.github.io/hts-specs/SAMv1.pdf
if query is None:
- if read.is_secondary or read.is_supplementary:
+ if alignment.is_secondary or alignment.is_supplementary:
if lastQuery is None:
raise ValueError(
'Query line %d has an empty SEQ field, but no '
@@ -176,29 +181,29 @@ class PaddedSAM(object):
# due to it being reverse complimented for the alignment).
lastQuery = query
- if read.is_reverse:
+ if alignment.is_reverse:
if rcNeeded:
query = DNARead('id', query).reverseComplement().sequence
if rcSuffix:
- read.query_name += rcSuffix
+ alignment.query_name += rcSuffix
# Adjust the query id if it's a duplicate and we're not
# allowing duplicates.
if allowDuplicateIds:
- queryId = read.query_name
+ queryId = alignment.query_name
else:
- count = idCount[read.query_name]
- idCount[read.query_name] += 1
- queryId = read.query_name + (
+ count = idCount[alignment.query_name]
+ idCount[alignment.query_name] += 1
+ queryId = alignment.query_name + (
'' if count == 0 else '/%d' % count)
- referenceStart = read.reference_start
+ referenceStart = alignment.reference_start
atStart = True
queryIndex = 0
referenceIndex = referenceStart
alignedSequence = ''
- for operation, length in read.cigartuples:
+ for operation, length in alignment.cigartuples:
# The operations are tested in the order they appear in
# https://samtools.github.io/hts-specs/SAMv1.pdf It would be
@@ -303,7 +308,11 @@ class PaddedSAM(object):
padChar * (referenceLength -
(referenceStart + len(alignedSequence))))
- yield Read(queryId, paddedSequence)
+ read = Read(queryId, paddedSequence)
+ if alsoYieldAlignments:
+ yield (read, alignment)
+ else:
+ yield read
@contextmanager
| acorg/dark-matter | 252de3e0a17525dde6a5f4167ea1e54a70ba91ee | diff --git a/test/test_sam.py b/test/test_sam.py
index f8fd7e9..a6ee95a 100644
--- a/test/test_sam.py
+++ b/test/test_sam.py
@@ -706,3 +706,31 @@ class TestPaddedSAM(TestCase):
queries = ps.queries()
assertRaisesRegex(self, ValueError, error, list, queries)
ps.close()
+
+ def testAlsoYieldAlignments(self):
+ """
+ A request for queries and their pysam alignments should have the
+ expected result.
+ """
+ data = '\n'.join([
+ '@SQ SN:ref1 LN:10',
+ 'query1 0 ref1 2 60 2=2X2M * 0 0 TCTAGG 123456',
+ 'query2 0 ref1 2 60 2= * 0 0 TC XY',
+ ]).replace(' ', '\t')
+
+ with dataFile(data) as filename:
+ ps = PaddedSAM(filename)
+ ((read1, alignment1),
+ (read2, alignment2)) = list(ps.queries(alsoYieldAlignments=True))
+
+ self.assertEqual(Read('query1', '-TCTAGG---'), read1)
+ self.assertEqual('TCTAGG', alignment1.query_sequence)
+ self.assertEqual('123456', ''.join(
+ map(lambda x: chr(x + 33), alignment1.query_qualities)))
+
+ self.assertEqual(Read('query2', '-TC-------'), read2)
+ self.assertEqual('TC', alignment2.query_sequence)
+ self.assertEqual('XY', ''.join(
+ map(lambda x: chr(x + 33), alignment2.query_qualities)))
+
+ ps.close()
| Add an option to the `PaddedSAM.queries` method to have it also return the pysam.AlignedSegment | 0.0 | 252de3e0a17525dde6a5f4167ea1e54a70ba91ee | [
"test/test_sam.py::TestPaddedSAM::testAlsoYieldAlignments"
] | [
"test/test_sam.py::TestPaddedSAM::testAllMMatch",
"test/test_sam.py::TestPaddedSAM::testAllowDuplicateIds",
"test/test_sam.py::TestPaddedSAM::testDropDuplicates",
"test/test_sam.py::TestPaddedSAM::testDropSecondary",
"test/test_sam.py::TestPaddedSAM::testDropSupplementary",
"test/test_sam.py::TestPaddedSAM::testDuplicateIdDisambiguation",
"test/test_sam.py::TestPaddedSAM::testHardClipLeft",
"test/test_sam.py::TestPaddedSAM::testHardClipRight",
"test/test_sam.py::TestPaddedSAM::testKF414679SoftClipLeft",
"test/test_sam.py::TestPaddedSAM::testKeepQualityControlFailures",
"test/test_sam.py::TestPaddedSAM::testMinLength",
"test/test_sam.py::TestPaddedSAM::testMinLengthWithReferenceDeletion",
"test/test_sam.py::TestPaddedSAM::testMixedMatch",
"test/test_sam.py::TestPaddedSAM::testMixedMatchSpecificReference",
"test/test_sam.py::TestPaddedSAM::testMixedMatchSpecificReferenceButNoMatches",
"test/test_sam.py::TestPaddedSAM::testNotSecondaryAndNotSupplementaryWithNoSequence",
"test/test_sam.py::TestPaddedSAM::testPrimaryAndSecondaryReferenceInsertion",
"test/test_sam.py::TestPaddedSAM::testQueryHardClipAndSoftClipProtrudesBothSides",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipLeft",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipProtrudesBothSides",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipProtrudesLeft",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipProtrudesRight",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipReachesLeftEdge",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipReachesRightEdge",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipRight",
"test/test_sam.py::TestPaddedSAM::testRcNeeded",
"test/test_sam.py::TestPaddedSAM::testRcSuffix",
"test/test_sam.py::TestPaddedSAM::testReferenceDeletion",
"test/test_sam.py::TestPaddedSAM::testReferenceDeletionAlternateChar",
"test/test_sam.py::TestPaddedSAM::testReferenceInsertion",
"test/test_sam.py::TestPaddedSAM::testReferenceSkip",
"test/test_sam.py::TestPaddedSAM::testReferenceSkipAlternateChar",
"test/test_sam.py::TestPaddedSAM::testReferencesToStr",
"test/test_sam.py::TestPaddedSAM::testSecondaryWithNoPreviousSequence",
"test/test_sam.py::TestPaddedSAM::testSecondaryWithNoSequence",
"test/test_sam.py::TestPaddedSAM::testSupplementaryWithNoPreviousSequence",
"test/test_sam.py::TestPaddedSAM::testSupplementaryWithNoSequence",
"test/test_sam.py::TestPaddedSAM::testUnequalReferenceLengths",
"test/test_sam.py::TestPaddedSAM::testUnknownReferences"
] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false
} | 2018-07-03 11:07:54+00:00 | mit | 868 |
|
acorg__dark-matter-631 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2468b6e..d82fc9b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,9 @@
+## 3.0.38 October 5, 2018
+
+Fixed [#630](https://github.com/acorg/dark-matter/issues/630) to deal with
+non-hard-clipped queries that have a CIGAR string that indicates they have
+been clipped.
+
## 3.0.37 October 1, 2018
Add a `--titlesJSONFile` option to `noninteractive-alignment-panel.py`.
diff --git a/dark/__init__.py b/dark/__init__.py
index 0c505ca..7513e6e 100644
--- a/dark/__init__.py
+++ b/dark/__init__.py
@@ -7,4 +7,4 @@ if sys.version_info < (2, 7):
# will not be found by the version() function in ../setup.py
#
# Remember to update ../CHANGELOG.md describing what's new in each version.
-__version__ = '3.0.37'
+__version__ = '3.0.38'
diff --git a/dark/sam.py b/dark/sam.py
index eef7854..0167fdc 100644
--- a/dark/sam.py
+++ b/dark/sam.py
@@ -65,6 +65,63 @@ def samReferencesToStr(filenameOrSamfile, indent=0):
return _references(sam)
+def _hardClip(sequence, cigartuples):
+ """
+ Hard clip (if necessary) a sequence.
+
+ @param sequence: A C{str} nucleotide sequence.
+ @param cigartuples: An iterable of (operation, length) tuples, specifying
+ matching as per the SAM specification.
+ @return: A hard-clipped C{str} sequence if hard-clipping is indicated by
+ the CIGAR operations and has not already been performed (as indicated
+ by the lengths of the sequence and the sum of the CIGAR operation
+ lengths).
+ """
+ hardClipCount = cigarLength = 0
+ for (operation, length) in cigartuples:
+ hardClipCount += operation == CHARD_CLIP
+ cigarLength += length
+
+ sequenceLength = len(sequence)
+ clipLeft = clipRight = 0
+
+ if hardClipCount == 0:
+ pass
+ elif hardClipCount == 1:
+ # Hard clip either at the start or the end.
+ if cigartuples[0][0] == CHARD_CLIP:
+ clipLeft = cigartuples[0][1]
+ if sequenceLength == cigarLength:
+ # The LHS hard clipping has not been done.
+ sequence = sequence[clipLeft:]
+ elif cigartuples[-1][0] == CHARD_CLIP:
+ clipRight = cigartuples[-1][1]
+ if sequenceLength == cigarLength:
+ # The RHS hard clipping has not been done.
+ sequence = sequence[:-clipRight]
+ else:
+ raise ValueError(
+ 'Invalid CIGAR tuples (%s) contains hard-clipping operation '
+ 'that is neither at the start nor the end of the sequence.' %
+ (cigartuples,))
+ elif hardClipCount == 2:
+ # Hard clip at both the start and end.
+ assert cigartuples[0][0] == cigartuples[-1][0] == CHARD_CLIP
+ clipLeft, clipRight = cigartuples[0][1], cigartuples[-1][1]
+ if sequenceLength == cigarLength:
+ # The hard clipping has not been done.
+ sequence = sequence[clipLeft:-clipRight]
+ else:
+ raise ValueError(
+ 'Invalid CIGAR tuples (%s) specifies hard-clipping %d times (2 '
+ 'is the maximum).' % (cigartuples, hardClipCount))
+
+ assert len(sequence) + clipLeft + clipRight == cigarLength, (
+ '%d + %d + %d != %d' % (len(sequence), clipLeft, clipRight,
+ cigarLength))
+ return sequence
+
+
class SAMFilter(object):
"""
Filter a SAM/BAM file.
@@ -378,16 +435,16 @@ class PaddedSAM(object):
'Query line %d has an empty SEQ field, but no '
'previous alignment is present.' % lineNumber)
else:
- query = lastQuery
+ query = _hardClip(lastQuery, alignment.cigartuples)
else:
raise InvalidSAM(
'Query line %d has an empty SEQ field, but the '
'alignment is not marked as secondary or '
'supplementary.' % lineNumber)
-
- # Remember the last query here (before we potentially modify it
- # due to it being reverse complimented for the alignment).
- lastQuery = query
+ else:
+ # Remember the last query here (before we potentially modify
+ # it due to it being reverse complimented for the alignment).
+ lastQuery = query
if alignment.is_reverse:
if rcNeeded:
@@ -495,8 +552,13 @@ class PaddedSAM(object):
if operation in _CONSUMES_REFERENCE:
referenceIndex += length
- # Sanity check that we consumed the entire query.
- assert queryIndex == len(query)
+ if queryIndex != len(query):
+ # Oops, we did not consume the entire query.
+ raise ValueError(
+ 'Query %s not fully consumed when parsing CIGAR string. '
+ 'Query %s (len %d), final query index %d, CIGAR: %r' %
+ (alignment.query_name, query, len(query), queryIndex,
+ alignment.cigartuples))
# We cannot test we consumed the entire reference. The CIGAR
# string applies to (and exhausts) the query but is silent
| acorg/dark-matter | 845ae82628e8ca00f8a1a1abbe07b07c1ca26677 | diff --git a/test/test_sam.py b/test/test_sam.py
index 28c640b..35acba8 100644
--- a/test/test_sam.py
+++ b/test/test_sam.py
@@ -4,10 +4,12 @@ from tempfile import mkstemp
from os import close, unlink, write
from contextlib import contextmanager
+from pysam import CHARD_CLIP, CMATCH
+
from dark.reads import Read, ReadFilter
from dark.sam import (
PaddedSAM, SAMFilter, UnequalReferenceLengthError, UnknownReference,
- InvalidSAM, samReferencesToStr)
+ InvalidSAM, samReferencesToStr, _hardClip)
# These tests actually use the filesystem to read files. That's due to the API
@@ -273,6 +275,32 @@ class TestPaddedSAM(TestCase):
assertRaisesRegex(self, UnequalReferenceLengthError, error,
PaddedSAM, SAMFilter(filename))
+ def testQueryTooLong(self):
+ """
+ If the query sequence is longer than the total of the lengths in the
+ CIGAR operations, a ValueError must be raised.
+ """
+ # This test just returns. It used to be possible to reach the
+ # "Query ... not fully consumed when parsing CIGAR string."
+ # ValueError in sam.py, prior to the fix of
+ # https://github.com/acorg/dark-matter/issues/630 but it is not
+ # possible to get a CIGAR string that has a different total length
+ # from the sequence length through to our code in sam.py because
+ # pysam catches the error. I'm leaving this test here because it
+ # documents that the error checked for in sam.py cannot currently
+ # be reached and the test may become useful. For now it just returns.
+ return
+ data = '\n'.join([
+ '@SQ SN:ref1 LN:90',
+ 'query1 0 ref1 1 60 4M * 0 0 TCTAGG ZZZZZZ',
+ ]).replace(' ', '\t')
+
+ with dataFile(data) as filename:
+ ps = PaddedSAM(SAMFilter(filename))
+ error = ('^Query TCTAGG not fully consumed when parsing CIGAR '
+ 'string\\.')
+ assertRaisesRegex(self, ValueError, error, list, ps.queries())
+
def testAllMMatch(self):
"""
A simple all-'M' match must result in the expected padded sequence.
@@ -866,6 +894,33 @@ class TestPaddedSAM(TestCase):
self.assertEqual('XY', ''.join(
map(lambda x: chr(x + 33), read2.alignment.query_qualities)))
+ def testHardClippingInCIGARButQueryNotHardClipped(self):
+ """
+ As documented in https://github.com/acorg/dark-matter/issues/630 we
+ have to deal correctly with a case in which the CIGAR string says a
+ query should be hard clipped but the query sequence in the SAM file
+ actually isn't. This can be due to a prior alignment with a soft clip,
+ in which case the full query sequence has to be given before the
+ secondary alignment with the hard clip.
+ """
+ data = '\n'.join([
+ '@SQ SN:Chimp-D00220 LN:8',
+ '@SQ SN:D-AM494716 LN:8',
+ 'query1 0 Chimp-D00220 1 0 3S5M * 0 0 TTTTGGTT 12345678',
+ 'query1 256 D-AM494716 1 0 3H5M * 0 0 * *',
+ ]).replace(' ', '\t')
+
+ with dataFile(data) as filename:
+ ps = PaddedSAM(SAMFilter(filename))
+ (read1, read2) = list(ps.queries(addAlignment=True))
+
+ self.assertEqual(Read('query1', 'TGGTT---'), read1)
+ self.assertEqual('TTTTGGTT', read1.alignment.query_sequence)
+
+ self.assertEqual(Read('query1/1', 'TGGTT---'), read2)
+ # pysam uses None for the query sequence on a secondary alignment.
+ self.assertIs(None, read2.alignment.query_sequence)
+
class TestSamReferencesToStr(TestCase):
"""
@@ -897,3 +952,104 @@ class TestSamReferencesToStr(TestCase):
with dataFile(data) as filename:
self.assertEqual(' id1 (length 90)\n id2 (length 91)',
samReferencesToStr(filename, indent=2))
+
+
+class TestHardClip(TestCase):
+ """
+ Test the _hardClip function.
+ """
+ def testCIGARLengthTooHigh(self):
+ """
+ If the total length of the CIGAR operations exceeds the length of the
+ sequence, an AssertionError must be raised.
+ """
+ self.assertRaises(AssertionError, _hardClip, 'CGT', ((CMATCH, 5),))
+
+ def testCIGARLengthTooLow(self):
+ """
+ If the total length of the CIGAR operations is less than the length of
+ the sequence, an AssertionError must be raised.
+ """
+ self.assertRaises(AssertionError, _hardClip, 'CGT', ((CMATCH, 2),))
+
+ def testHardClipInMiddle(self):
+ """
+ If hard clipping is given as an operation not at the beginning or end
+ of the sequence, a ValueError must be raised.
+ """
+ error = ('^Invalid CIGAR tuples .* contains hard-clipping operation '
+ 'that is neither at the start nor the end of the sequence\.$')
+ self.assertRaisesRegex(
+ ValueError, error,
+ _hardClip, 'CGT', ((CMATCH, 1), (CHARD_CLIP, 1), (CMATCH, 1),))
+
+ def testThreeHardClips(self):
+ """
+ If hard clipping is specified more than twice, a ValueError must be
+ raised.
+ """
+ error = ('^Invalid CIGAR tuples .* specifies hard-clipping 3 times '
+ '\(2 is the maximum\).$')
+ self.assertRaisesRegex(
+ ValueError, error,
+ _hardClip, 'CGT',
+ ((CHARD_CLIP, 1), (CHARD_CLIP, 1), (CHARD_CLIP, 1),))
+
+ def testNoClip(self):
+ """
+ If no hard clipping is indicated, the function must return the
+ original sequence.
+ """
+ self.assertEqual('CGT', _hardClip('CGT', ((CMATCH, 3),)))
+
+ def testClipLeft(self):
+ """
+ If hard clipping on the left is indicated, and has not been done,
+ the function must return the expected sequence.
+ """
+ self.assertEqual('CGT',
+ _hardClip('CAACGT', ((CHARD_CLIP, 3), (CMATCH, 3),)))
+
+ def testClipRight(self):
+ """
+ If hard clipping on the right is indicated, and has not been done,
+ the function must return the expected sequence.
+ """
+ self.assertEqual('CA',
+ _hardClip('CAACGT', ((CMATCH, 2), (CHARD_CLIP, 4),)))
+
+ def testClipBoth(self):
+ """
+ If hard clipping on the left and right is indicated, and has not been
+ done, the function must return the expected sequence.
+ """
+ self.assertEqual(
+ 'AA',
+ _hardClip('CAACGT',
+ ((CHARD_CLIP, 1), (CMATCH, 2), (CHARD_CLIP, 3),)))
+
+ def testClipLeftAlreadyDone(self):
+ """
+ If hard clipping on the left is indicated, and has already been done,
+ the function must return the expected sequence.
+ """
+ self.assertEqual('CGT',
+ _hardClip('CGT', ((CHARD_CLIP, 3), (CMATCH, 3),)))
+
+ def testClipRightAlreadyDone(self):
+ """
+ If hard clipping on the right is indicated, and has already been done,
+ the function must return the expected sequence.
+ """
+ self.assertEqual('CA',
+ _hardClip('CA', ((CMATCH, 2), (CHARD_CLIP, 4),)))
+
+ def testClipBothAlreadyDone(self):
+ """
+ If hard clipping on the left and right is indicated, and has already
+ been done, the function must return the expected sequence.
+ """
+ self.assertEqual(
+ 'AA',
+ _hardClip('AA',
+ ((CHARD_CLIP, 1), (CMATCH, 2), (CHARD_CLIP, 3),)))
| Change SAM filtering to allow for CIGAR strings that indicate hard clipping but that have sequences that are not clipped
It can happen that an aligner (`bwa mem -a` does this) can generate SAM lines like these two:
```
K00234:90:HTWVHBBXX:6:1101:1844:10493 0 Chimp-D00220 364 0 3S78M * 0 0
TTTTGGTTATCGCTGGATGTGTCTGCGGCGTTTTATCATCTTCCTCTTCATCCTGCTGCTATGCCTCATCTTATTGTTGGT
AAFFFJAJJJJJFJJJ7JJJJFJFJJFJFJAFJJJJAAF<FFFAF<JJ-FF<JF-AF7F<AJF--<-F7-AA-7JFJJFJ<
NM:i:1 MD:Z:69C8 AS:i:73 XS:i:73
K00234:90:HTWVHBBXX:6:1101:1844:10493 256 D-AM494716 364 0 3H78M * 0 0
* *
NM:i:1 MD:Z:69C8 AS:i:73
```
those are actually just 2 lines in the SAM file, giving alignments for the same read. The issue here is that the first alignment has a CIGAR string of `3S78M` (3 soft clipped nts, 78 matches) and because it's a soft clip the bases are still in the query string. On the next line we have `*` and `*` for the query and quality (because this is a secondary match (flag = 256)) and a CIGAR string of `3H78M` (3 hard clipped, 78 match). Normally with hard clipping the bases are removed from the sequence, but in this case because the query is first given with a soft clip the bases are retained. So code that assumes hard -clipped bases will be removed will break.
You might think `bwa mem -a -H` would cause hard clipping to always be done (perhaps by repeating the (clipped) query sequence in secondary matches) but that's not what happens - bwa reverts to the non- `-a` option behaviour.
So our code in `dark/sam.py` will need to be smarter. It should figure out in advance if the query has actually been hard clipped and use that to decide whether a hard clip indicator in the CIGAR string should consume query bases or not. | 0.0 | 845ae82628e8ca00f8a1a1abbe07b07c1ca26677 | [
"test/test_sam.py::TestSAMFilter::testAlignmentCount",
"test/test_sam.py::TestSAMFilter::testDropDuplicates",
"test/test_sam.py::TestSAMFilter::testDropSecondary",
"test/test_sam.py::TestSAMFilter::testDropSupplementary",
"test/test_sam.py::TestSAMFilter::testKeepQualityControlFailures",
"test/test_sam.py::TestSAMFilter::testMaxScore",
"test/test_sam.py::TestSAMFilter::testMaxScoreNoScores",
"test/test_sam.py::TestSAMFilter::testMinAndMaxScore",
"test/test_sam.py::TestSAMFilter::testMinLength",
"test/test_sam.py::TestSAMFilter::testMinScore",
"test/test_sam.py::TestSAMFilter::testMinScoreNoScores",
"test/test_sam.py::TestSAMFilter::testStoreQueryIds",
"test/test_sam.py::TestSAMFilter::testUnknownReferences",
"test/test_sam.py::TestPaddedSAM::testAllMMatch",
"test/test_sam.py::TestPaddedSAM::testAllowDuplicateIds",
"test/test_sam.py::TestPaddedSAM::testAlsoYieldAlignments",
"test/test_sam.py::TestPaddedSAM::testDropDuplicates",
"test/test_sam.py::TestPaddedSAM::testDropSecondary",
"test/test_sam.py::TestPaddedSAM::testDropSupplementary",
"test/test_sam.py::TestPaddedSAM::testDuplicateIdDisambiguation",
"test/test_sam.py::TestPaddedSAM::testHardClipLeft",
"test/test_sam.py::TestPaddedSAM::testHardClipRight",
"test/test_sam.py::TestPaddedSAM::testHardClippingInCIGARButQueryNotHardClipped",
"test/test_sam.py::TestPaddedSAM::testKF414679SoftClipLeft",
"test/test_sam.py::TestPaddedSAM::testKeepQualityControlFailures",
"test/test_sam.py::TestPaddedSAM::testMinLength",
"test/test_sam.py::TestPaddedSAM::testMixedMatch",
"test/test_sam.py::TestPaddedSAM::testMixedMatchSpecificReference",
"test/test_sam.py::TestPaddedSAM::testMixedMatchSpecificReferenceButNoMatches",
"test/test_sam.py::TestPaddedSAM::testNotSecondaryAndNotSupplementaryWithNoSequence",
"test/test_sam.py::TestPaddedSAM::testPrimaryAndSecondaryReferenceInsertion",
"test/test_sam.py::TestPaddedSAM::testQueryHardClipAndSoftClipProtrudesBothSides",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipLeft",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipProtrudesBothSides",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipProtrudesLeft",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipProtrudesRight",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipReachesLeftEdge",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipReachesRightEdge",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipRight",
"test/test_sam.py::TestPaddedSAM::testQueryTooLong",
"test/test_sam.py::TestPaddedSAM::testRcNeeded",
"test/test_sam.py::TestPaddedSAM::testRcSuffix",
"test/test_sam.py::TestPaddedSAM::testReferenceDeletion",
"test/test_sam.py::TestPaddedSAM::testReferenceDeletionAlternateChar",
"test/test_sam.py::TestPaddedSAM::testReferenceInsertion",
"test/test_sam.py::TestPaddedSAM::testReferenceSkip",
"test/test_sam.py::TestPaddedSAM::testReferenceSkipAlternateChar",
"test/test_sam.py::TestPaddedSAM::testSecondaryWithNoPreviousSequence",
"test/test_sam.py::TestPaddedSAM::testSecondaryWithNoSequence",
"test/test_sam.py::TestPaddedSAM::testSupplementaryWithNoPreviousSequence",
"test/test_sam.py::TestPaddedSAM::testSupplementaryWithNoSequence",
"test/test_sam.py::TestPaddedSAM::testUnequalReferenceLengths",
"test/test_sam.py::TestSamReferencesToStr::testIndent",
"test/test_sam.py::TestSamReferencesToStr::testSimple",
"test/test_sam.py::TestHardClip::testCIGARLengthTooHigh",
"test/test_sam.py::TestHardClip::testCIGARLengthTooLow",
"test/test_sam.py::TestHardClip::testClipBoth",
"test/test_sam.py::TestHardClip::testClipBothAlreadyDone",
"test/test_sam.py::TestHardClip::testClipLeft",
"test/test_sam.py::TestHardClip::testClipLeftAlreadyDone",
"test/test_sam.py::TestHardClip::testClipRight",
"test/test_sam.py::TestHardClip::testClipRightAlreadyDone",
"test/test_sam.py::TestHardClip::testHardClipInMiddle",
"test/test_sam.py::TestHardClip::testNoClip",
"test/test_sam.py::TestHardClip::testThreeHardClips"
] | [] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false
} | 2018-10-05 08:26:07+00:00 | mit | 869 |
|
acorg__dark-matter-637 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index a68180a..77e9eb6 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,9 @@
+## 3.0.44 Nov 1, 2018
+
+Fix [636](https://github.com/acorg/dark-matter/issues/636) in which SAM file
+parsing threw an exception when an unmapped sequence with no CIGAR string
+occurred in a SAM file (this can happen when running `bowtie2 --all`).
+
## 3.0.43 Nov 1, 2018
Fixed thinko in 3.0.42.
diff --git a/dark/__init__.py b/dark/__init__.py
index f10e319..e8e1c5a 100644
--- a/dark/__init__.py
+++ b/dark/__init__.py
@@ -7,4 +7,4 @@ if sys.version_info < (2, 7):
# will not be found by the version() function in ../setup.py
#
# Remember to update ../CHANGELOG.md describing what's new in each version.
-__version__ = '3.0.43'
+__version__ = '3.0.44'
diff --git a/dark/sam.py b/dark/sam.py
index a4f2c1a..f6052ca 100644
--- a/dark/sam.py
+++ b/dark/sam.py
@@ -332,14 +332,24 @@ class SAMFilter(object):
# the previous sequence should be used. This is best
# practice according to section 2.5.2 of
# https://samtools.github.io/hts-specs/SAMv1.pdf So we use
- # the last alignment if we get None as a query sequence.
+ # the last alignment query and quality strings if we get
+ # None as a query sequence.
if alignment.query_sequence is None:
if lastAlignment is None:
raise InvalidSAM(
'pysam produced an alignment (number %d) with no '
'query sequence without previously giving an '
'alignment with a sequence.' % count)
- # Use the previous query sequence and quality.
+ # Use the previous query sequence and quality. I'm not
+ # making the call to _hardClip dependent on
+ # alignment.cigartuples (as in the else clause below)
+ # because I don't think it's possible for
+ # alignment.cigartuples to be None in this case. If we
+ # have a second match on a query, then it must be
+ # aligned to something (i.e., it cannot be unmapped
+ # with no CIGAR string). The assertion will tell us if
+ # this is ever not the case.
+ assert alignment.cigartuples
(alignment.query_sequence,
alignment.query_qualities, _) = _hardClip(
lastAlignment.query_sequence,
@@ -347,11 +357,12 @@ class SAMFilter(object):
alignment.cigartuples)
else:
lastAlignment = alignment
- (alignment.query_sequence,
- alignment.query_qualities, _) = _hardClip(
- alignment.query_sequence,
- alignment.query_qualities,
- alignment.cigartuples)
+ if alignment.cigartuples:
+ (alignment.query_sequence,
+ alignment.query_qualities, _) = _hardClip(
+ alignment.query_sequence,
+ alignment.query_qualities,
+ alignment.cigartuples)
if ((filterRead is None or
filterRead(Read(alignment.query_name,
| acorg/dark-matter | 7fc47a737e687f6b0f0bfe7414c7f8947bb16bea | diff --git a/test/test_sam.py b/test/test_sam.py
index 524f8a2..ab4bf4c 100644
--- a/test/test_sam.py
+++ b/test/test_sam.py
@@ -247,6 +247,24 @@ class TestSAMFilter(TestCase):
self.assertEqual('query1', alignment1.query_name)
self.assertEqual('query4', alignment2.query_name)
+ def testCloseButNoCIGAR(self):
+ """
+ An unmapped query with no CIGAR string must be passed through
+ unchanged if dropUnmapped is not specified.
+ """
+ data = '\n'.join([
+ '@SQ SN:ref LN:10',
+ 'query1 4 * 0 0 * * 0 0 TCTAGG ZZZZZZ',
+ ]).replace(' ', '\t')
+
+ with dataFile(data) as filename:
+ sf = SAMFilter(filename)
+ (alignment,) = list(sf.alignments())
+ self.assertEqual('query1', alignment.query_name)
+ self.assertEqual('TCTAGG', alignment.query_sequence)
+ self.assertEqual('ZZZZZZ', ''.join(
+ map(lambda x: chr(x + 33), alignment.query_qualities)))
+
class TestPaddedSAM(TestCase):
"""
| SAM parsing throws an exception
Fix this:
```sh
$ filter-sam.py --referenceId Sorsum_high_freq --dropSecond --dropSupp --samfile sorsum-assortedGenotypesDieter-bowtie2.sam >/dev/null
Traceback (most recent call last):
File "/Users/terry/.virtualenvs/lm36/bin/filter-sam.py", line 90, in <module>
for kept, alignment in enumerate(samFilter.alignments(), start=1):
File "/Users/terry/.virtualenvs/lm36/lib/python3.6/site-packages/dark/sam.py", line 354, in alignments
alignment.cigartuples)
File "/Users/terry/.virtualenvs/lm36/lib/python3.6/site-packages/dark/sam.py", line 88, in _hardClip
for (operation, length) in cigartuples:
TypeError: 'NoneType' object is not iterable
``` | 0.0 | 7fc47a737e687f6b0f0bfe7414c7f8947bb16bea | [
"test/test_sam.py::TestSAMFilter::testCloseButNoCIGAR"
] | [
"test/test_sam.py::TestSAMFilter::testAlignmentCount",
"test/test_sam.py::TestSAMFilter::testDropDuplicates",
"test/test_sam.py::TestSAMFilter::testDropSecondary",
"test/test_sam.py::TestSAMFilter::testDropSupplementary",
"test/test_sam.py::TestSAMFilter::testKeepQualityControlFailures",
"test/test_sam.py::TestSAMFilter::testMaxScore",
"test/test_sam.py::TestSAMFilter::testMaxScoreNoScores",
"test/test_sam.py::TestSAMFilter::testMinAndMaxScore",
"test/test_sam.py::TestSAMFilter::testMinLength",
"test/test_sam.py::TestSAMFilter::testMinScore",
"test/test_sam.py::TestSAMFilter::testMinScoreNoScores",
"test/test_sam.py::TestSAMFilter::testStoreQueryIds",
"test/test_sam.py::TestSAMFilter::testUnknownReferences",
"test/test_sam.py::TestPaddedSAM::testAllMMatch",
"test/test_sam.py::TestPaddedSAM::testAllowDuplicateIds",
"test/test_sam.py::TestPaddedSAM::testAlsoYieldAlignments",
"test/test_sam.py::TestPaddedSAM::testDropDuplicates",
"test/test_sam.py::TestPaddedSAM::testDropSecondary",
"test/test_sam.py::TestPaddedSAM::testDropSupplementary",
"test/test_sam.py::TestPaddedSAM::testDuplicateIdDisambiguation",
"test/test_sam.py::TestPaddedSAM::testHardClipLeft",
"test/test_sam.py::TestPaddedSAM::testHardClipRight",
"test/test_sam.py::TestPaddedSAM::testHardClippingInCIGARButQueryNotHardClipped",
"test/test_sam.py::TestPaddedSAM::testKF414679SoftClipLeft",
"test/test_sam.py::TestPaddedSAM::testKeepQualityControlFailures",
"test/test_sam.py::TestPaddedSAM::testMinLength",
"test/test_sam.py::TestPaddedSAM::testMixedMatch",
"test/test_sam.py::TestPaddedSAM::testMixedMatchSpecificReference",
"test/test_sam.py::TestPaddedSAM::testMixedMatchSpecificReferenceButNoMatches",
"test/test_sam.py::TestPaddedSAM::testNotSecondaryAndNotSupplementaryWithNoSequence",
"test/test_sam.py::TestPaddedSAM::testPrimaryAndSecondaryReferenceInsertion",
"test/test_sam.py::TestPaddedSAM::testQueryHardClipAndSoftClipProtrudesBothSides",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipLeft",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipProtrudesBothSides",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipProtrudesLeft",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipProtrudesRight",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipReachesLeftEdge",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipReachesRightEdge",
"test/test_sam.py::TestPaddedSAM::testQuerySoftClipRight",
"test/test_sam.py::TestPaddedSAM::testQueryTooLong",
"test/test_sam.py::TestPaddedSAM::testRcNeeded",
"test/test_sam.py::TestPaddedSAM::testRcSuffix",
"test/test_sam.py::TestPaddedSAM::testReferenceDeletion",
"test/test_sam.py::TestPaddedSAM::testReferenceDeletionAlternateChars",
"test/test_sam.py::TestPaddedSAM::testReferenceInsertion",
"test/test_sam.py::TestPaddedSAM::testReferenceSkip",
"test/test_sam.py::TestPaddedSAM::testReferenceSkipAlternateChars",
"test/test_sam.py::TestPaddedSAM::testSecondaryAlignmentHasQuery",
"test/test_sam.py::TestPaddedSAM::testSecondaryWithNoPreviousSequence",
"test/test_sam.py::TestPaddedSAM::testSecondaryWithNoSequence",
"test/test_sam.py::TestPaddedSAM::testSupplementaryAlignmentHasQuery",
"test/test_sam.py::TestPaddedSAM::testSupplementaryWithNoPreviousSequence",
"test/test_sam.py::TestPaddedSAM::testSupplementaryWithNoSequence",
"test/test_sam.py::TestPaddedSAM::testUnequalReferenceLengths",
"test/test_sam.py::TestSamReferencesToStr::testIndent",
"test/test_sam.py::TestSamReferencesToStr::testSimple",
"test/test_sam.py::TestHardClip::testClipBoth",
"test/test_sam.py::TestHardClip::testClipBothAlreadyDone",
"test/test_sam.py::TestHardClip::testClipLeft",
"test/test_sam.py::TestHardClip::testClipLeftAlreadyDone",
"test/test_sam.py::TestHardClip::testClipRight",
"test/test_sam.py::TestHardClip::testClipRightAlreadyDone",
"test/test_sam.py::TestHardClip::testHardClipInMiddle",
"test/test_sam.py::TestHardClip::testNoClip",
"test/test_sam.py::TestHardClip::testThreeHardClips"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2018-11-01 19:19:21+00:00 | mit | 870 |
|
acorg__dark-matter-647 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2ce0779..3931a9b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,8 @@
+## 3.0.50 Nov 23, 2018
+
+Added `bin/convert-diamond-to-sam.py` script to convert DIAMOND output
+format 6 to SAM.
+
## 3.0.49 Nov 22, 2018
Added `btop2cigar` to `dark.btop` to convert BTOP strings to CIGAR strings.
diff --git a/bin/convert-diamond-to-sam.py b/bin/convert-diamond-to-sam.py
new file mode 100755
index 0000000..38e3365
--- /dev/null
+++ b/bin/convert-diamond-to-sam.py
@@ -0,0 +1,145 @@
+#!/usr/bin/env python
+
+# See https://samtools.github.io/hts-specs/SAMv1.pdf for the SAM file
+# format specification.
+
+from __future__ import print_function, division
+
+import sys
+import argparse
+from os.path import basename
+from tempfile import TemporaryFile
+from functools import partial
+
+from dark import __version__ as VERSION
+from dark.btop import btop2cigar
+from dark.diamond.conversion import diamondTabularFormatToDicts
+from dark.reads import DNARead
+
+FIELDS = 'bitscore btop qframe qqual qseq qseqid slen sstart stitle'
+
+parser = argparse.ArgumentParser(
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ description=('Convert DIAMOND tabular format to SAM. The DIAMOND '
+ 'invocation *must* include --outfmt 6 %s' % FIELDS))
+
+parser.add_argument(
+ '--printFields', default=False, action='store_true',
+ help=('Print the field names in the order that they must be given to '
+ 'diamond --outfmt 6 to produce correct input for this script, '
+ 'then exit.'))
+
+parser.add_argument(
+ '--mappingQuality', type=int, default=255,
+ help=('The mapping quality to use for MAPQ (field 5). The default (255) '
+ 'indicates that mapping quality information is not available.'))
+
+parser.add_argument(
+ '--ram', action='store_true', default=False,
+ help=('Do not use a temporary file to hold the non-header SAM output. '
+ 'This will run faster but use more memory since all non-header SAM '
+ 'output will be stored in RAM and only written out when the full '
+ 'header can be determined.'))
+
+parser.add_argument(
+ '--keepDescriptions', action='store_true', default=False,
+ help=('Do not discard text after the first space in query or subject '
+ 'sequence ids. Note that. Note that this violates the SAM '
+ 'specification, but since SAM files are TAB-separated there '
+ 'is probably only a small chance this will cause any problems '
+ 'downstream.'))
+
+args = parser.parse_args()
+
+if args.printFields:
+ print(FIELDS)
+ sys.exit(0)
+
+idOnly = not args.keepDescriptions
+mappingQuality = args.mappingQuality
+ram = args.ram
+
+if 0 > mappingQuality > 255:
+ raise ValueError('Mapping quality must be between 0 and 255 (inclusive)')
+
+referenceLengths = {}
+
+if ram:
+ nonHeaderLines = []
+ emit = nonHeaderLines.append
+else:
+ tf = TemporaryFile(mode='w+t', encoding='utf-8')
+ emit = partial(print, file=tf)
+
+for match in diamondTabularFormatToDicts(sys.stdin, FIELDS.split()):
+ qseqid = match['qseqid'].split()[0] if idOnly else match['qseqid']
+ stitle = match['stitle'].split()[0] if idOnly else match['stitle']
+
+ referenceLengths[stitle] = match['slen']
+
+ # If the query frame is less than zero, the match was with a reverse
+ # complemented translation of the query. Put the reverse compliment
+ # into the SAM output, which seems to be standard / accepted practice
+ # based on my web searches. See e.g., https://www.biostars.org/p/131891/
+ # for what Bowtie2 does and for some comments on this issue for SAM/BAM
+ # files in general.
+ if match['qframe'] > 0:
+ flag = 0
+ qseq = match['qseq']
+ qqual = match['qqual'] or '*'
+ else:
+ flag = 16
+ qseq = DNARead('id', match['qseq']).reverseComplement().sequence
+ qqual = match['qqual'][::-1] if match['qqual'] else '*'
+
+ emit('\t'.join(map(str, [
+ # 1. QNAME
+ qseqid,
+ # 2. FLAG
+ flag,
+ # 3. RNAME
+ stitle,
+ # 4. POS
+ match['sstart'],
+ # 5. MAPQ
+ mappingQuality,
+ # 6. CIGAR
+ btop2cigar(match['btop']),
+ # 7. RNEXT
+ '*',
+ # 8. PNEXT
+ 0,
+ # 9. TLEN
+ 0,
+ # 10. SEQ
+ qseq,
+ # 11. QUAL
+ qqual,
+ # 12. Alignment score
+ 'AS:i:%d' % int(match['bitscore'])])))
+
+
+progName = basename(sys.argv[0])
+
+# Print SAM headers.
+print('\n'.join(
+ [
+ '@PG\tID:DIAMOND\tPN:DIAMOND',
+ '@PG\tID:%s\tPN:%s (version %s)\tCL:%s %s\tPP:DIAMOND' %
+ (progName, progName, VERSION, progName, ' '.join(sys.argv[1:])),
+ '@CO\t%s is from the dark-matter package '
+ '(https://github.com/acorg/dark-matter/)' % progName,
+ ] +
+ [
+ '@SQ\tSN:%s\tLN:%d' % (name, referenceLengths[name])
+ for name in sorted(referenceLengths)
+ ]))
+
+# Print non-header lines.
+if ram:
+ print('\n'.join(nonHeaderLines))
+else:
+ tf.seek(0)
+ for line in tf:
+ print(line, end='')
+ tf.close()
diff --git a/dark/__init__.py b/dark/__init__.py
index e37701e..a768585 100644
--- a/dark/__init__.py
+++ b/dark/__init__.py
@@ -7,4 +7,4 @@ if sys.version_info < (2, 7):
# will not be found by the version() function in ../setup.py
#
# Remember to update ../CHANGELOG.md describing what's new in each version.
-__version__ = '3.0.49'
+__version__ = '3.0.50'
diff --git a/dark/diamond/conversion.py b/dark/diamond/conversion.py
index 29c2694..42c2a8d 100644
--- a/dark/diamond/conversion.py
+++ b/dark/diamond/conversion.py
@@ -4,6 +4,7 @@ import six
import bz2
from json import dumps, loads
from operator import itemgetter
+from collections import Counter
from Bio.File import as_handle
@@ -13,6 +14,85 @@ from dark.alignments import Alignment, ReadAlignments
from dark.diamond.hsp import normalizeHSP
+# The keys in the following are DIAMOND format 6 field names. The values
+# are one-argument functions that take a string and return an appropriately
+# converted field value.
+#
+# The following fields are taken from the DIAMOND manual v0.9.22 2018-05-11.
+# Fields whose name doesn't appear here will be left as strings.
+DIAMOND_FIELD_CONVERTER = {
+ 'bitscore': float,
+ 'evalue': float,
+ 'frame': int,
+ 'gapopen': int,
+ 'gaps': int,
+ 'identicalCount': lambda nident: None if nident is None else int(nident),
+ 'length': int,
+ 'mismatch': int,
+ 'nident': int,
+ 'pident': float,
+ 'positive': int,
+ 'positiveCount': lambda pos: None if pos is None else int(pos),
+ 'ppos': float,
+ 'qcovhsp': float,
+ 'qend': int,
+ 'qframe': int,
+ 'qlen': int,
+ 'qstart': int,
+ 'score': float,
+ 'send': int,
+ 'slen': int,
+ 'sstart': int,
+}
+
+
+def diamondTabularFormatToDicts(filename, fieldNames):
+ """
+ Read DIAMOND tabular (--outfmt 6) output and convert lines to dictionaries.
+
+ @param filename: Either a C{str} file name or an open file pointer.
+ @param fieldNames: A C{list} or C{tuple} of C{str} DIAMOND field names.
+ Run 'diamond -help' to see the full list.
+ @raise ValueError: If a line of C{filename} does not have the expected
+ number of TAB-separated fields (i.e., len(fieldNames)). Or if
+ C{fieldNames} is empty or contains duplicates.
+ @return: A generator that yields C{dict}s with keys that are the DIAMOND
+ field names and values as converted by DIAMOND_FIELD_CONVERTER.
+ """
+ nFields = len(fieldNames)
+ if not nFields:
+ raise ValueError('fieldNames cannot be empty.')
+
+ c = Counter(fieldNames)
+ if c.most_common(1)[0][1] > 1:
+ raise ValueError(
+ 'fieldNames contains duplicated names: %s.' %
+ (', '.join(sorted(x[0] for x in c.most_common() if x[1] > 1))))
+
+ def identity(x):
+ return x
+
+ convertFunc = DIAMOND_FIELD_CONVERTER.get
+
+ with as_handle(filename) as fp:
+ for count, line in enumerate(fp, start=1):
+ result = {}
+ line = line[:-1]
+ values = line.split('\t')
+ if len(values) != nFields:
+ raise ValueError(
+ 'Line %d of %s had %d field values (expected %d). '
+ 'Line was %r.' %
+ (count,
+ (filename if isinstance(filename, six.string_types)
+ else 'input'),
+ len(values), nFields, line))
+ for fieldName, value in zip(fieldNames, values):
+ value = convertFunc(fieldName, identity)(value)
+ result[fieldName] = value
+ yield result
+
+
class DiamondTabularFormatReader(object):
"""
Provide a method that yields parsed tabular records from a file. Store and
diff --git a/dark/reads.py b/dark/reads.py
index b2a4bcc..18171d0 100644
--- a/dark/reads.py
+++ b/dark/reads.py
@@ -766,7 +766,7 @@ class ReadFilter(object):
only return those that have length <= 100, so your result may have
less than 20 reads. The former version extracts reads of the
desired length and then takes 20 reads at random from that set, so
- you'll always get 20 raeds in your result, assuming there are at
+ you'll always get 20 reads in your result, assuming there are at
least that many reads satisfying the length filter.
@param trueLength: The C{int} number of reads in this C{Reads} instance.
Under normal circumstances it will not be necessary to pass this
diff --git a/setup.py b/setup.py
index fef9e17..bc089e7 100644
--- a/setup.py
+++ b/setup.py
@@ -35,6 +35,7 @@ scripts = [
'bin/compare-sequences.py',
'bin/convert-blast-xml-to-json.py',
'bin/convert-diamond-to-json.py',
+ 'bin/convert-diamond-to-sam.py',
'bin/convert-sam-to-fastq.sh',
'bin/dark-matter-version.py',
'bin/dna-to-aa.py',
| acorg/dark-matter | b36a743b85c8a5b2b8b6d21d337deb70cff193d5 | diff --git a/test/diamond/test_conversion.py b/test/diamond/test_conversion.py
index 9f80670..fbc6e3f 100644
--- a/test/diamond/test_conversion.py
+++ b/test/diamond/test_conversion.py
@@ -3,6 +3,7 @@ from unittest import TestCase
from io import BytesIO, StringIO
import bz2file
from bz2 import compress
+from six import assertRaisesRegex
try:
from unittest.mock import patch
@@ -13,8 +14,8 @@ from ..mocking import mockOpen
from json import dumps
-from dark.diamond.conversion import (JSONRecordsReader,
- DiamondTabularFormatReader)
+from dark.diamond.conversion import (
+ JSONRecordsReader, DiamondTabularFormatReader, diamondTabularFormatToDicts)
from dark.reads import Reads, AARead
@@ -1137,3 +1138,113 @@ class TestJSONRecordsReader(TestCase):
reader = JSONRecordsReader('file.json')
alignment = list(reader.readAlignments(reads))[0]
self.assertEqual('id1 1', alignment.read.id)
+
+
+class TestDiamondTabularFormatToDicts(TestCase):
+ """
+ Tests for the diamondTabularFormatToDicts function.
+ """
+
+ def testEmptyFieldNameList(self):
+ """
+ If an empty field name list is passed, the function must raise a
+ ValueError.
+ """
+ error = '^fieldNames cannot be empty\\.$'
+ assertRaisesRegex(self, ValueError, error, list,
+ diamondTabularFormatToDicts(None, []))
+
+ def testDuplicatesInFieldNameList(self):
+ """
+ If a field name list that contains duplicates is passed, the function
+ must raise a ValueError.
+ """
+ error = '^fieldNames contains duplicated names: a, b\\.$'
+ assertRaisesRegex(
+ self, ValueError, error, list,
+ diamondTabularFormatToDicts(None, ['a', 'b', 'a', 'c', 'b']))
+
+ def testTooFewFields(self):
+ """
+ If an input line does not have enough fields, a ValueError must be
+ raised.
+ """
+ data = StringIO('a\tb\n')
+ error = (r"^Line 1 of input had 2 field values \(expected 3\)\. "
+ r"Line was 'a\\tb'\.")
+ assertRaisesRegex(
+ self, ValueError, error, list,
+ diamondTabularFormatToDicts(data, ['a', 'b', 'c']))
+
+ def testTooManyFields(self):
+ """
+ If an input line has too many fields, a ValueError must be raised.
+ """
+ data = StringIO('a\tb\tc\n')
+ error = (r"^Line 1 of input had 3 field values \(expected 2\)\. "
+ r"Line was 'a\\tb\\tc'\.")
+ assertRaisesRegex(
+ self, ValueError, error, list,
+ diamondTabularFormatToDicts(data, ['a', 'b']))
+
+ def testUnknownField(self):
+ """
+ An unknown field name must result in a returned field name and value
+ that are identical to those in the function call and its input string.
+ """
+ data = StringIO('3.5\n')
+ (result,) = list(diamondTabularFormatToDicts(data, ['__blah__']))
+ self.assertEqual({'__blah__': '3.5'}, result)
+
+ def testConversions(self):
+ """
+ The fields in input lines must be recognized and converted to their
+ correct types.
+ """
+ fields = [
+ 'bitscore',
+ 'evalue',
+ 'frame',
+ 'identicalCount',
+ 'positiveCount',
+ 'qstart',
+ 'qend',
+ 'sstart',
+ 'send',
+ 'qseq',
+ ]
+ data = StringIO(
+ ('3.5 1.7 1 7 4 10 12 1 2 ACGT\n'
+ '3.6 1.8 2 8 5 11 13 2 3 TGCA').replace(' ', '\t') + '\n'
+ )
+ (result1, result2) = list(diamondTabularFormatToDicts(data, fields))
+
+ self.assertEqual(
+ {
+ 'bitscore': 3.5,
+ 'evalue': 1.7,
+ 'frame': 1,
+ 'identicalCount': 7,
+ 'positiveCount': 4,
+ 'qstart': 10,
+ 'qend': 12,
+ 'sstart': 1,
+ 'send': 2,
+ 'qseq': 'ACGT',
+ },
+ result1)
+
+ self.assertEqual(
+ {
+ 'bitscore': 3.6,
+ 'evalue': 1.8,
+ 'frame': 2,
+ 'identicalCount': 8,
+ 'positiveCount': 5,
+ 'qstart': 11,
+ 'qend': 13,
+ 'sstart': 2,
+ 'send': 3,
+ 'qseq': 'TGCA',
+ },
+ result2)
| Add a script to convert DIAMOND output to SAM | 0.0 | b36a743b85c8a5b2b8b6d21d337deb70cff193d5 | [
"test/diamond/test_conversion.py::TestDiamondTabularFormatReader::testDiamondInput",
"test/diamond/test_conversion.py::TestDiamondTabularFormatReader::testDiamondInputWithoutNidentOrPositives",
"test/diamond/test_conversion.py::TestDiamondTabularFormatReader::testDiamondParams",
"test/diamond/test_conversion.py::TestDiamondTabularFormatReader::testSaveAsJSON",
"test/diamond/test_conversion.py::TestDiamondTabularFormatReader::testSaveAsJSONBzip2",
"test/diamond/test_conversion.py::TestDiamondTabularFormatReader::testSpacesMustBePreserved",
"test/diamond/test_conversion.py::TestJSONRecordsReader::testCorrectNumberOfAlignments",
"test/diamond/test_conversion.py::TestJSONRecordsReader::testCorrectNumberOfAlignmentsMatchMissingEnd",
"test/diamond/test_conversion.py::TestJSONRecordsReader::testCorrectNumberOfAlignmentsMatchMissingMiddle",
"test/diamond/test_conversion.py::TestJSONRecordsReader::testCorrectNumberOfAlignmentsMatchMissingStart",
"test/diamond/test_conversion.py::TestJSONRecordsReader::testCorrectNumberOfAlignmentsTwoMatchesMissingEnd",
"test/diamond/test_conversion.py::TestJSONRecordsReader::testSpaceInReadIdNotInJSONRecord",
"test/diamond/test_conversion.py::TestJSONRecordsReader::testSpacesMustBePreserved",
"test/diamond/test_conversion.py::TestDiamondTabularFormatToDicts::testConversions",
"test/diamond/test_conversion.py::TestDiamondTabularFormatToDicts::testDuplicatesInFieldNameList",
"test/diamond/test_conversion.py::TestDiamondTabularFormatToDicts::testEmptyFieldNameList",
"test/diamond/test_conversion.py::TestDiamondTabularFormatToDicts::testTooFewFields",
"test/diamond/test_conversion.py::TestDiamondTabularFormatToDicts::testTooManyFields",
"test/diamond/test_conversion.py::TestDiamondTabularFormatToDicts::testUnknownField"
] | [] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false
} | 2018-11-23 14:15:52+00:00 | mit | 871 |
|
acorg__dark-matter-649 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 271f053..588c254 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 3.0.52 Nov 23, 2018
+
+Added hard-clipping to CIGAR in SAM created by `convert-diamond-to-sam.py`.
+
## 3.0.51 Nov 23, 2018
Use `from six import StringIO` to avoid a PY2/3 incompatibility.
diff --git a/bin/convert-diamond-to-sam.py b/bin/convert-diamond-to-sam.py
index 38e3365..0b3484f 100755
--- a/bin/convert-diamond-to-sam.py
+++ b/bin/convert-diamond-to-sam.py
@@ -16,7 +16,8 @@ from dark.btop import btop2cigar
from dark.diamond.conversion import diamondTabularFormatToDicts
from dark.reads import DNARead
-FIELDS = 'bitscore btop qframe qqual qseq qseqid slen sstart stitle'
+FIELDS = ('bitscore btop qframe qend qqual qlen qseq qseqid qstart slen '
+ 'sstart stitle')
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
@@ -92,6 +93,21 @@ for match in diamondTabularFormatToDicts(sys.stdin, FIELDS.split()):
qseq = DNARead('id', match['qseq']).reverseComplement().sequence
qqual = match['qqual'][::-1] if match['qqual'] else '*'
+ # Make a CIGAR string, including hard-clipped bases at the start and
+ # end of the query (DIAMOND outputs a hard-clipped query sequence).
+ startClipCount = match['qstart'] - 1
+ endClipCount = match['qlen'] - match['qend']
+
+ assert startClipCount >= 0
+ assert endClipCount >= 0, (
+ 'Query sequence %s has length %d but the qend value is %d' %
+ (qseq, len(match['qseq']), match['qend']))
+
+ cigar = (
+ ('%dH' % startClipCount if startClipCount else '') +
+ btop2cigar(match['btop'], concise=False, aa=True) +
+ ('%dH' % endClipCount if endClipCount else ''))
+
emit('\t'.join(map(str, [
# 1. QNAME
qseqid,
@@ -104,7 +120,7 @@ for match in diamondTabularFormatToDicts(sys.stdin, FIELDS.split()):
# 5. MAPQ
mappingQuality,
# 6. CIGAR
- btop2cigar(match['btop']),
+ cigar,
# 7. RNEXT
'*',
# 8. PNEXT
diff --git a/dark/__init__.py b/dark/__init__.py
index 3e72b1a..147b204 100644
--- a/dark/__init__.py
+++ b/dark/__init__.py
@@ -7,4 +7,4 @@ if sys.version_info < (2, 7):
# will not be found by the version() function in ../setup.py
#
# Remember to update ../CHANGELOG.md describing what's new in each version.
-__version__ = '3.0.51'
+__version__ = '3.0.52'
diff --git a/dark/btop.py b/dark/btop.py
index 9855734..aa338da 100644
--- a/dark/btop.py
+++ b/dark/btop.py
@@ -73,24 +73,34 @@ def countGaps(btopString):
return (queryGaps, subjectGaps)
-def btop2cigar(btopString, concise=False):
+def btop2cigar(btopString, concise=False, aa=False):
"""
Convert a BTOP string to a CIGAR string.
@param btopString: A C{str} BTOP sequence.
@param concise: If C{True}, use 'M' for matches and mismatches instead
of the more specific 'X' and '='.
-
- @raise ValueError: If L{parseBtop} finds an error in C{btopString}.
+ @param aa: If C{True}, C{btopString} will be interpreted as though it
+ refers to amino acids (as in the BTOP string produced by DIAMOND).
+ In that case, it is not possible to use the 'precise' CIGAR characters
+ because amino acids have multiple codons so we cannot know whether
+ an amino acid match is due to an exact nucleotide matches or not.
+ Also, the numbers in the BTOP string will be multiplied by 3 since
+ they refer to a number of amino acids matching.
+ @raise ValueError: If L{parseBtop} finds an error in C{btopString} or
+ if C{aa} and C{concise} are both C{True}.
@return: A C{str} CIGAR string.
"""
+ if aa and concise:
+ raise ValueError('aa and concise cannot both be True')
+
result = []
thisLength = thisOperation = currentLength = currentOperation = None
for item in parseBtop(btopString):
if isinstance(item, int):
thisLength = item
- thisOperation = CMATCH if concise else CEQUAL
+ thisOperation = CEQUAL if concise else CMATCH
else:
thisLength = 1
query, reference = item
@@ -108,13 +118,16 @@ def btop2cigar(btopString, concise=False):
else:
# A substitution was needed.
assert query != reference
- thisOperation = CMATCH if concise else CDIFF
+ thisOperation = CDIFF if concise else CMATCH
if thisOperation == currentOperation:
currentLength += thisLength
else:
if currentOperation:
- result.append('%d%s' % (currentLength, currentOperation))
+ result.append(
+ '%d%s' %
+ ((3 * currentLength) if aa else currentLength,
+ currentOperation))
currentLength, currentOperation = thisLength, thisOperation
# We reached the end of the BTOP string. If there was an operation
@@ -122,6 +135,8 @@ def btop2cigar(btopString, concise=False):
# case where btopString was empty.
assert currentOperation or btopString == ''
if currentOperation:
- result.append('%d%s' % (currentLength, currentOperation))
+ result.append(
+ '%d%s' %
+ ((3 * currentLength) if aa else currentLength, currentOperation))
return ''.join(result)
| acorg/dark-matter | 3d48d201d41d8cabaa1f31d5dd7987e882a68af3 | diff --git a/test/test_btop.py b/test/test_btop.py
index 9f1f22a..e019563 100644
--- a/test/test_btop.py
+++ b/test/test_btop.py
@@ -164,15 +164,15 @@ class TestCountGaps(TestCase):
self.assertEqual((3, 2), countGaps('-GG-34-T-T39F-'))
-class TestBtop2CigarConcise(TestCase):
+class TestBtop2CigarPrecise(TestCase):
"""
- Tests for the btop2cigar function when concise is True.
+ Tests for the btop2cigar function when concise is False.
"""
def testEmpty(self):
"""
An empty BTOP string must result in an empty CIGAR string.
"""
- self.assertEqual('', btop2cigar('', concise=True))
+ self.assertEqual('', btop2cigar('', concise=False))
def testMixedMatch(self):
"""
@@ -180,7 +180,7 @@ class TestBtop2CigarConcise(TestCase):
CIGAR sense where M could be identical characters or not), the CIGAR
string must be all Ms.
"""
- self.assertEqual('7M', btop2cigar('2GC3AT', concise=True))
+ self.assertEqual('7M', btop2cigar('2GC3AT', concise=False))
def testRefenceInsertion(self):
"""
@@ -188,7 +188,7 @@ class TestBtop2CigarConcise(TestCase):
subject (reference) does not, the CIGAR string must indicate an
insertion to the reference.
"""
- self.assertEqual('1I', btop2cigar('A-', concise=True))
+ self.assertEqual('1I', btop2cigar('A-', concise=False))
def testQueryInsertion(self):
"""
@@ -196,7 +196,7 @@ class TestBtop2CigarConcise(TestCase):
but the query does not, the CIGAR string must indicate an deletion in
the reference.
"""
- self.assertEqual('1D', btop2cigar('-A', concise=True))
+ self.assertEqual('1D', btop2cigar('-A', concise=False))
def testAll(self):
"""
@@ -204,25 +204,36 @@ class TestBtop2CigarConcise(TestCase):
string must be correct.
"""
self.assertEqual('7M2I4M2D5M',
- btop2cigar('2GC3ATC-G-4-T-A5', concise=True))
+ btop2cigar('2GC3ATC-G-4-T-A5', concise=False))
+ def testAllAA(self):
+ """
+ If a BTOP string specifies all possible variations, and we indicate
+ that the BTOP string refers to amino acids, the CIGAR string must be
+ correct (i.e., all counts must be tripled).
+ """
+ self.assertEqual(
+ '21M6I12M6D15M',
+ btop2cigar('2GC3ATC-G-4-T-A5', concise=False, aa=True))
-class TestBtop2CigarPrecise(TestCase):
+
+class TestBtop2CigarConcise(TestCase):
"""
- Tests for the btop2cigar function when concise is False.
+ Tests for the btop2cigar function when concise is True.
"""
+
def testEmpty(self):
"""
An empty BTOP string must result in an empty CIGAR string.
"""
- self.assertEqual('', btop2cigar('', concise=False))
+ self.assertEqual('', btop2cigar('', concise=True))
def testMixedMatch(self):
"""
If a BTOP string specifies that some characters match and some do
not, the CIGAR string must be specific about the matches / mismatches.
"""
- self.assertEqual('2=1X3=1X', btop2cigar('2GC3AT', concise=False))
+ self.assertEqual('2=1X3=1X', btop2cigar('2GC3AT', concise=True))
def testRefenceInsertion(self):
"""
@@ -230,7 +241,7 @@ class TestBtop2CigarPrecise(TestCase):
subject (reference) does not, the CIGAR string must indicate an
insertion to the reference.
"""
- self.assertEqual('1I', btop2cigar('A-', concise=False))
+ self.assertEqual('1I', btop2cigar('A-', concise=True))
def testQueryInsertion(self):
"""
@@ -238,7 +249,7 @@ class TestBtop2CigarPrecise(TestCase):
but the query does not, the CIGAR string must indicate an deletion in
the reference.
"""
- self.assertEqual('1D', btop2cigar('-A', concise=False))
+ self.assertEqual('1D', btop2cigar('-A', concise=True))
def testAll(self):
"""
@@ -246,4 +257,12 @@ class TestBtop2CigarPrecise(TestCase):
string must be correct.
"""
self.assertEqual('2=1X3=1X2I4=2D5=',
- btop2cigar('2GC3ATC-G-4-T-A5', concise=False))
+ btop2cigar('2GC3ATC-G-4-T-A5', concise=True))
+
+ def testWithAATrue(self):
+ """
+ If concise and aa are both set to True, a ValueError must be raised.
+ """
+ error = '^aa and concise cannot both be True$'
+ assertRaisesRegex(self, ValueError, error, btop2cigar, '',
+ concise=True, aa=True)
| Add soft-clipping to CIGAR string in convert-diamond-to-sam.py | 0.0 | 3d48d201d41d8cabaa1f31d5dd7987e882a68af3 | [
"test/test_btop.py::TestBtop2CigarPrecise::testAll",
"test/test_btop.py::TestBtop2CigarPrecise::testAllAA",
"test/test_btop.py::TestBtop2CigarPrecise::testMixedMatch",
"test/test_btop.py::TestBtop2CigarConcise::testAll",
"test/test_btop.py::TestBtop2CigarConcise::testMixedMatch",
"test/test_btop.py::TestBtop2CigarConcise::testWithAATrue"
] | [
"test/test_btop.py::TestParseBtop::testConsecutiveGaps",
"test/test_btop.py::TestParseBtop::testConsecutiveIdentical",
"test/test_btop.py::TestParseBtop::testEmpty",
"test/test_btop.py::TestParseBtop::testOneLetter",
"test/test_btop.py::TestParseBtop::testOneLetterThenANumber",
"test/test_btop.py::TestParseBtop::testOneNumber",
"test/test_btop.py::TestParseBtop::testOneNumberThatIsZero",
"test/test_btop.py::TestParseBtop::testOneNumberWithLeadingZeroes",
"test/test_btop.py::TestParseBtop::testOneNumberWithTrailingOneLetter",
"test/test_btop.py::TestParseBtop::testOneQuerySubjectPair",
"test/test_btop.py::TestParseBtop::testOneQuerySubjectPairAndANumber",
"test/test_btop.py::TestParseBtop::testThreeLetters",
"test/test_btop.py::TestParseBtop::testTwoNumbersWithOneLetterBetween",
"test/test_btop.py::TestParseBtop::testTwoQuerySubjectPairs",
"test/test_btop.py::TestCountGaps::testEmpty",
"test/test_btop.py::TestCountGaps::testLettersButNoGaps",
"test/test_btop.py::TestCountGaps::testMultipleQueryAndSubjectGaps",
"test/test_btop.py::TestCountGaps::testNumberOnly",
"test/test_btop.py::TestCountGaps::testOneQueryAndOneSubjectGap",
"test/test_btop.py::TestCountGaps::testOneQueryGap",
"test/test_btop.py::TestCountGaps::testOneSubjectGap",
"test/test_btop.py::TestBtop2CigarPrecise::testEmpty",
"test/test_btop.py::TestBtop2CigarPrecise::testQueryInsertion",
"test/test_btop.py::TestBtop2CigarPrecise::testRefenceInsertion",
"test/test_btop.py::TestBtop2CigarConcise::testEmpty",
"test/test_btop.py::TestBtop2CigarConcise::testQueryInsertion",
"test/test_btop.py::TestBtop2CigarConcise::testRefenceInsertion"
] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false
} | 2018-11-23 16:46:53+00:00 | mit | 872 |
|
acorg__dark-matter-663 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index fa389b6..e329501 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,10 @@
+## 3.0.60 Dec 13, 2018
+
+In `reads.py`, changed the `_makeComplementTable` function so that
+uppercase and lowercase bases are correctly reverse complemented into their
+respective uppercase and lowercase complementary letters. Added a test to
+`test/reads.py` to confirm that `reverseComplement` does this.
+
## 3.0.59 Dec 11, 2018
Added `--sampleName` option to `proteins-to-pathogens`.
diff --git a/dark/__init__.py b/dark/__init__.py
index 278f52a..ffb465c 100644
--- a/dark/__init__.py
+++ b/dark/__init__.py
@@ -8,4 +8,4 @@ if sys.version_info < (2, 7):
#
# Remember to update ../CHANGELOG.md describing what's new in each version.
-__version__ = '3.0.59'
+__version__ = '3.0.60'
diff --git a/dark/reads.py b/dark/reads.py
index 18171d0..df381e0 100644
--- a/dark/reads.py
+++ b/dark/reads.py
@@ -27,7 +27,8 @@ def _makeComplementTable(complementData):
"""
table = list(range(256))
for _from, to in complementData.items():
- table[ord(_from[0])] = ord(to[0])
+ table[ord(_from[0].lower())] = ord(to[0].lower())
+ table[ord(_from[0].upper())] = ord(to[0].upper())
return ''.join(map(chr, table))
| acorg/dark-matter | 31c6f64ccf9e55821a250a5a4324860c29782a0e | diff --git a/test/test_reads.py b/test/test_reads.py
index d586ab1..4663234 100644
--- a/test/test_reads.py
+++ b/test/test_reads.py
@@ -652,6 +652,16 @@ class TestDNARead(TestCase):
read = DNARead('id', 'ATCGMRWSVHXN')
self.assertEqual('NXDBSWYKCGAT', read.reverseComplement().sequence)
+ def testReverseComplementLowercaseLetters(self):
+ """
+ The reverseComplement function must correctly reverse complement
+ lowercase letters. The issue is described here:
+ https://github.com/acorg/dark-matter/issues/662
+ """
+ read = DNARead('id', 'CAGCAGctgcagcaccagcaccagcagcttcCACAT')
+ expected = ('ATGTGgaagctgctggtgctggtgctgcagCTGCTG')
+ self.assertEqual(expected, read.reverseComplement().sequence)
+
def testTranslationsOfEmptySequence(self):
"""
The translations function must correctly return all six (empty)
| Extract-ORFs.py mistranslates lowercase letters
In a sequence with mixed uppercase and lowercase letters, extract-ORFs.py output has incorrect translations only for the sites of lowercase letters. (Even though the `translations` function of `dark/reads.py` translate a sequence with mixed upper and lower case letters correctly) | 0.0 | 31c6f64ccf9e55821a250a5a4324860c29782a0e | [
"test/test_reads.py::TestDNARead::testReverseComplementLowercaseLetters"
] | [
"test/test_reads.py::TestRead::testCasePreservation",
"test/test_reads.py::TestRead::testCheckAlphabetAAReadMatchingReturnTrue",
"test/test_reads.py::TestRead::testCheckAlphabetAAReadNotMatchingRaise",
"test/test_reads.py::TestRead::testCheckAlphabetDNAReadMatchingReturnTrue",
"test/test_reads.py::TestRead::testCheckAlphabetDNAReadNotMatchingRaise",
"test/test_reads.py::TestRead::testCheckAlphabetwithReadMustBePermissive",
"test/test_reads.py::TestRead::testEquality",
"test/test_reads.py::TestRead::testEqualityWithDifferingIds",
"test/test_reads.py::TestRead::testEqualityWithDifferingQuality",
"test/test_reads.py::TestRead::testEqualityWithDifferingSequences",
"test/test_reads.py::TestRead::testEqualityWithNoQuality",
"test/test_reads.py::TestRead::testEqualityWithOneOmittedQuality",
"test/test_reads.py::TestRead::testExpectedAttributes",
"test/test_reads.py::TestRead::testFromDict",
"test/test_reads.py::TestRead::testFromDictNoQuality",
"test/test_reads.py::TestRead::testGetitemFullCopy",
"test/test_reads.py::TestRead::testGetitemId",
"test/test_reads.py::TestRead::testGetitemLength",
"test/test_reads.py::TestRead::testGetitemQuality",
"test/test_reads.py::TestRead::testGetitemReturnsNewRead",
"test/test_reads.py::TestRead::testGetitemReversed",
"test/test_reads.py::TestRead::testGetitemSequence",
"test/test_reads.py::TestRead::testGetitemSingleIndex",
"test/test_reads.py::TestRead::testGetitemWithStep",
"test/test_reads.py::TestRead::testHashDiffersIfIdDiffers",
"test/test_reads.py::TestRead::testHashDiffersIfQualityDiffers",
"test/test_reads.py::TestRead::testHashDiffersIfSequenceDiffers",
"test/test_reads.py::TestRead::testHashIdenticalNoQuality",
"test/test_reads.py::TestRead::testHashIdenticalWithQuality",
"test/test_reads.py::TestRead::testHashViaDict",
"test/test_reads.py::TestRead::testHashViaSet",
"test/test_reads.py::TestRead::testKeepSites",
"test/test_reads.py::TestRead::testKeepSitesAllSites",
"test/test_reads.py::TestRead::testKeepSitesNoSites",
"test/test_reads.py::TestRead::testKeepSitesOutOfRange",
"test/test_reads.py::TestRead::testKeepSitesWithQuality",
"test/test_reads.py::TestRead::testLength",
"test/test_reads.py::TestRead::testLowComplexityFraction",
"test/test_reads.py::TestRead::testLowComplexityFractionEmptySequence",
"test/test_reads.py::TestRead::testLowComplexityFractionOne",
"test/test_reads.py::TestRead::testLowComplexityFractionZero",
"test/test_reads.py::TestRead::testNoQuality",
"test/test_reads.py::TestRead::testRemoveSites",
"test/test_reads.py::TestRead::testRemoveSitesAllSites",
"test/test_reads.py::TestRead::testRemoveSitesNoSites",
"test/test_reads.py::TestRead::testRemoveSitesOutOfRange",
"test/test_reads.py::TestRead::testRemoveSitesWithQuality",
"test/test_reads.py::TestRead::testToDict",
"test/test_reads.py::TestRead::testToDictNoQuality",
"test/test_reads.py::TestRead::testToFASTA",
"test/test_reads.py::TestRead::testToFASTAWithQuality",
"test/test_reads.py::TestRead::testToFASTQ",
"test/test_reads.py::TestRead::testToFASTQWithNoQuality",
"test/test_reads.py::TestRead::testToUnknownFormat",
"test/test_reads.py::TestRead::testUnequalLengths",
"test/test_reads.py::TestRead::testWalkHSPExactMatch",
"test/test_reads.py::TestRead::testWalkHSPExactMatchWithGap",
"test/test_reads.py::TestRead::testWalkHSPLeftAndRightOverhangingMatch",
"test/test_reads.py::TestRead::testWalkHSPLeftAndRightOverhangingMatchNoWhiskers",
"test/test_reads.py::TestRead::testWalkHSPLeftOverhangingMatch",
"test/test_reads.py::TestRead::testWalkHSPLeftOverhangingMatchNoWhiskers",
"test/test_reads.py::TestRead::testWalkHSPRightOverhangingMatch",
"test/test_reads.py::TestRead::testWalkHSPRightOverhangingMatchNoWhiskers",
"test/test_reads.py::TestDNARead::testGetitemReturnsNewDNARead",
"test/test_reads.py::TestDNARead::testReverseComplement",
"test/test_reads.py::TestDNARead::testReverseComplementAmbiguous",
"test/test_reads.py::TestDNARead::testReverseComplementReversesQuality",
"test/test_reads.py::TestDNARead::testTranslationOfMultipleStopCodons",
"test/test_reads.py::TestDNARead::testTranslationOfStartCodonATG",
"test/test_reads.py::TestDNARead::testTranslationOfStopCodonTAG",
"test/test_reads.py::TestDNARead::testTranslationOfStopCodonTGA",
"test/test_reads.py::TestDNARead::testTranslations",
"test/test_reads.py::TestDNARead::testTranslationsOfEmptySequence",
"test/test_reads.py::TestDNARead::testTranslationsOfOneBaseSequence",
"test/test_reads.py::TestDNARead::testTranslationsOfTwoBaseSequence",
"test/test_reads.py::TestRNARead::testGetitemReturnsNewRNARead",
"test/test_reads.py::TestRNARead::testReverseComplement",
"test/test_reads.py::TestRNARead::testReverseComplementAmbiguous",
"test/test_reads.py::TestRNARead::testTranslationOfStopCodonUAA",
"test/test_reads.py::TestAARead::testCloseCloseORF",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseCloseORF",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseCloseThenCloseCloseORF",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseCloseThenCloseCloseORFWithJunk",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseCloseThenCloseOpenORF",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseCloseThenCloseOpenORFWithJunk",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseOpenORF",
"test/test_reads.py::TestAARead::testCloseOpenORF",
"test/test_reads.py::TestAARead::testCloseOpenORFWithMultipleStarts",
"test/test_reads.py::TestAARead::testGetitemReturnsNewAARead",
"test/test_reads.py::TestAARead::testNoStartCodon_GithubIssue239",
"test/test_reads.py::TestAARead::testORFsEmptySequence",
"test/test_reads.py::TestAARead::testORFsEmptySequenceWithStart",
"test/test_reads.py::TestAARead::testORFsEmptySequenceWithStartStop",
"test/test_reads.py::TestAARead::testORFsWithJustStartsAndStops",
"test/test_reads.py::TestAARead::testORFsWithOneStopCodon",
"test/test_reads.py::TestAARead::testORFsWithTwoStopCodons",
"test/test_reads.py::TestAARead::testOpenCloseORF",
"test/test_reads.py::TestAARead::testOpenCloseORFWithMultipleStops",
"test/test_reads.py::TestAARead::testOpenCloseThenCloseCloseThenCloseOpenORF",
"test/test_reads.py::TestAARead::testOpenCloseThenCloseCloseThenCloseOpenORFWithJunk",
"test/test_reads.py::TestAARead::testOpenCloseThenCloseOpenORF",
"test/test_reads.py::TestAARead::testOpenOpenORF",
"test/test_reads.py::TestAARead::testPropertiesCorrectTranslation",
"test/test_reads.py::TestAARead::testPropertyDetailsCorrectTranslation",
"test/test_reads.py::TestAAReadWithX::testAlphabet",
"test/test_reads.py::TestAAReadWithX::testAlphabetChecking",
"test/test_reads.py::TestAAReadWithX::testGetitemReturnsNewAAReadWithX",
"test/test_reads.py::TestAAReadORF::testClosedClosedId",
"test/test_reads.py::TestAAReadORF::testClosedOpenId",
"test/test_reads.py::TestAAReadORF::testFromDict",
"test/test_reads.py::TestAAReadORF::testOpenClosedId",
"test/test_reads.py::TestAAReadORF::testOpenLeft",
"test/test_reads.py::TestAAReadORF::testOpenOpenId",
"test/test_reads.py::TestAAReadORF::testOpenRight",
"test/test_reads.py::TestAAReadORF::testSequence",
"test/test_reads.py::TestAAReadORF::testStart",
"test/test_reads.py::TestAAReadORF::testStartGreaterThanStop",
"test/test_reads.py::TestAAReadORF::testStartNegative",
"test/test_reads.py::TestAAReadORF::testStop",
"test/test_reads.py::TestAAReadORF::testStopGreaterThanOriginalSequenceLength",
"test/test_reads.py::TestAAReadORF::testToDict",
"test/test_reads.py::TestSSAARead::testCorrectAttributes",
"test/test_reads.py::TestSSAARead::testFromDict",
"test/test_reads.py::TestSSAARead::testGetitemFullCopy",
"test/test_reads.py::TestSSAARead::testGetitemId",
"test/test_reads.py::TestSSAARead::testGetitemLength",
"test/test_reads.py::TestSSAARead::testGetitemReturnsNewRead",
"test/test_reads.py::TestSSAARead::testGetitemReversed",
"test/test_reads.py::TestSSAARead::testGetitemSequence",
"test/test_reads.py::TestSSAARead::testGetitemSingleIndex",
"test/test_reads.py::TestSSAARead::testGetitemStructure",
"test/test_reads.py::TestSSAARead::testGetitemWithStep",
"test/test_reads.py::TestSSAARead::testHashDiffersIfIdDiffers",
"test/test_reads.py::TestSSAARead::testHashDiffersIfSequenceDiffers",
"test/test_reads.py::TestSSAARead::testHashDiffersIfStructureDiffers",
"test/test_reads.py::TestSSAARead::testHashViaDict",
"test/test_reads.py::TestSSAARead::testHashViaSet",
"test/test_reads.py::TestSSAARead::testKeepSites",
"test/test_reads.py::TestSSAARead::testKeepSitesAllSites",
"test/test_reads.py::TestSSAARead::testKeepSitesNoSites",
"test/test_reads.py::TestSSAARead::testKeepSitesOutOfRange",
"test/test_reads.py::TestSSAARead::testReads",
"test/test_reads.py::TestSSAARead::testRemoveSites",
"test/test_reads.py::TestSSAARead::testRemoveSitesAllSites",
"test/test_reads.py::TestSSAARead::testRemoveSitesNoSites",
"test/test_reads.py::TestSSAARead::testRemoveSitesOutOfRange",
"test/test_reads.py::TestSSAARead::testSequenceLengthMatchesStructureLength",
"test/test_reads.py::TestSSAARead::testToDict",
"test/test_reads.py::TestSSAARead::testToString",
"test/test_reads.py::TestSSAARead::testToStringWithExplicitFastaFormat",
"test/test_reads.py::TestSSAARead::testToStringWithExplicitFastaSSFormat",
"test/test_reads.py::TestSSAARead::testToStringWithStructureSuffix",
"test/test_reads.py::TestSSAARead::testToStringWithUnknownFormat",
"test/test_reads.py::TestSSAAReadWithX::testCorrectAttributes",
"test/test_reads.py::TestSSAAReadWithX::testFromDict",
"test/test_reads.py::TestSSAAReadWithX::testGetitemFullCopy",
"test/test_reads.py::TestSSAAReadWithX::testGetitemId",
"test/test_reads.py::TestSSAAReadWithX::testGetitemLength",
"test/test_reads.py::TestSSAAReadWithX::testGetitemReturnsNewRead",
"test/test_reads.py::TestSSAAReadWithX::testGetitemReversed",
"test/test_reads.py::TestSSAAReadWithX::testGetitemSequence",
"test/test_reads.py::TestSSAAReadWithX::testGetitemSingleIndex",
"test/test_reads.py::TestSSAAReadWithX::testGetitemStructure",
"test/test_reads.py::TestSSAAReadWithX::testGetitemWithStep",
"test/test_reads.py::TestSSAAReadWithX::testHashDiffersIfIdDiffers",
"test/test_reads.py::TestSSAAReadWithX::testHashDiffersIfSequenceDiffers",
"test/test_reads.py::TestSSAAReadWithX::testHashDiffersIfStructureDiffers",
"test/test_reads.py::TestSSAAReadWithX::testHashViaDict",
"test/test_reads.py::TestSSAAReadWithX::testHashViaSet",
"test/test_reads.py::TestSSAAReadWithX::testKeepSites",
"test/test_reads.py::TestSSAAReadWithX::testKeepSitesAllSites",
"test/test_reads.py::TestSSAAReadWithX::testKeepSitesNoSites",
"test/test_reads.py::TestSSAAReadWithX::testKeepSitesOutOfRange",
"test/test_reads.py::TestSSAAReadWithX::testReads",
"test/test_reads.py::TestSSAAReadWithX::testRemoveSites",
"test/test_reads.py::TestSSAAReadWithX::testRemoveSitesAllSites",
"test/test_reads.py::TestSSAAReadWithX::testRemoveSitesNoSites",
"test/test_reads.py::TestSSAAReadWithX::testRemoveSitesOutOfRange",
"test/test_reads.py::TestSSAAReadWithX::testSequenceContainingX",
"test/test_reads.py::TestSSAAReadWithX::testSequenceLengthMatchesStructureLength",
"test/test_reads.py::TestSSAAReadWithX::testToDict",
"test/test_reads.py::TestSSAAReadWithX::testToString",
"test/test_reads.py::TestSSAAReadWithX::testToStringWithExplicitFastaFormat",
"test/test_reads.py::TestSSAAReadWithX::testToStringWithExplicitFastaSSFormat",
"test/test_reads.py::TestSSAAReadWithX::testToStringWithStructureSuffix",
"test/test_reads.py::TestSSAAReadWithX::testToStringWithUnknownFormat",
"test/test_reads.py::TestTranslatedRead::testExpectedAttributes",
"test/test_reads.py::TestTranslatedRead::testExpectedFrame",
"test/test_reads.py::TestTranslatedRead::testFromDict",
"test/test_reads.py::TestTranslatedRead::testId",
"test/test_reads.py::TestTranslatedRead::testIdReverseComplemented",
"test/test_reads.py::TestTranslatedRead::testMaximumORFLength",
"test/test_reads.py::TestTranslatedRead::testMaximumORFLengthNoStops",
"test/test_reads.py::TestTranslatedRead::testOutOfRangeFrame",
"test/test_reads.py::TestTranslatedRead::testReverseComplemented",
"test/test_reads.py::TestTranslatedRead::testSequence",
"test/test_reads.py::TestTranslatedRead::testToDict",
"test/test_reads.py::TestReadClassNameToClass::testNames",
"test/test_reads.py::TestReads::testEmptyInitialReads",
"test/test_reads.py::TestReads::testInitialReads",
"test/test_reads.py::TestReads::testManuallyAddedReads",
"test/test_reads.py::TestReads::testManuallyAddedReadsLength",
"test/test_reads.py::TestReads::testNoReads",
"test/test_reads.py::TestReads::testNoReadsLength",
"test/test_reads.py::TestReads::testRepeatedIter",
"test/test_reads.py::TestReads::testSaveAsFASTA",
"test/test_reads.py::TestReads::testSaveAsFASTQ",
"test/test_reads.py::TestReads::testSaveAsFASTQFailsOnReadWithNoQuality",
"test/test_reads.py::TestReads::testSaveFASTAIsDefault",
"test/test_reads.py::TestReads::testSaveReturnsReadCount",
"test/test_reads.py::TestReads::testSaveToFileDescriptor",
"test/test_reads.py::TestReads::testSaveWithUnknownFormat",
"test/test_reads.py::TestReads::testSaveWithUppercaseFormat",
"test/test_reads.py::TestReads::testSubclass",
"test/test_reads.py::TestReads::testSubclassLength",
"test/test_reads.py::TestReads::testSubclassWithAdditionalReads",
"test/test_reads.py::TestReads::testUnfilteredLengthAdditionalReads",
"test/test_reads.py::TestReads::testUnfilteredLengthAdditionalReadsAfterFiltering",
"test/test_reads.py::TestReads::testUnfilteredLengthBeforeIterating",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialReads",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialReadsAfterFiltering",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialReadsIsReads",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialReadsIsReadsWithAdditional",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialSubclassThenFiltered",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialSubclassWithAdditionalThenFiltered",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialSubclassWithNoLen",
"test/test_reads.py::TestReads::testUnfilteredLengthNoReads",
"test/test_reads.py::TestReadsFiltering::testAddFiltersThenClearFilters",
"test/test_reads.py::TestReadsFiltering::testFilterBlacklist",
"test/test_reads.py::TestReadsFiltering::testFilterDoNotRemoveDescriptions",
"test/test_reads.py::TestReadsFiltering::testFilterDuplicates",
"test/test_reads.py::TestReadsFiltering::testFilterDuplicatesById",
"test/test_reads.py::TestReadsFiltering::testFilterHead",
"test/test_reads.py::TestReadsFiltering::testFilterHeadZero",
"test/test_reads.py::TestReadsFiltering::testFilterKeepSequences",
"test/test_reads.py::TestReadsFiltering::testFilterKeepSequencesNoSequences",
"test/test_reads.py::TestReadsFiltering::testFilterNegativeRegex",
"test/test_reads.py::TestReadsFiltering::testFilterNoArgs",
"test/test_reads.py::TestReadsFiltering::testFilterOnLengthEverythingMatches",
"test/test_reads.py::TestReadsFiltering::testFilterOnLengthNothingMatches",
"test/test_reads.py::TestReadsFiltering::testFilterOnMaxLength",
"test/test_reads.py::TestReadsFiltering::testFilterOnMinLength",
"test/test_reads.py::TestReadsFiltering::testFilterPositiveRegex",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetOfFiveFromFiveReads",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetOfFiveFromOneRead",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetOfOneFromOneRead",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetOfTwoFromFiveReads",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetOfZeroReads",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetSizeZeroNoReads",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetSizeZeroTwoReads",
"test/test_reads.py::TestReadsFiltering::testFilterRemoveDescriptions",
"test/test_reads.py::TestReadsFiltering::testFilterRemoveGaps",
"test/test_reads.py::TestReadsFiltering::testFilterRemoveGapsWithQuality",
"test/test_reads.py::TestReadsFiltering::testFilterRemoveSequences",
"test/test_reads.py::TestReadsFiltering::testFilterRemoveSequencesNoSequences",
"test/test_reads.py::TestReadsFiltering::testFilterReturnsReadInstance",
"test/test_reads.py::TestReadsFiltering::testFilterTruncateTitles",
"test/test_reads.py::TestReadsFiltering::testFilterWhitelist",
"test/test_reads.py::TestReadsFiltering::testFilterWithMinLengthEqualToMaxLength",
"test/test_reads.py::TestReadsFiltering::testFilterWithModifierThatChangesIds",
"test/test_reads.py::TestReadsFiltering::testFilterWithModifierThatOmits",
"test/test_reads.py::TestReadsFiltering::testFilterWithModifierThatOmitsAndChangesIds",
"test/test_reads.py::TestReadsFiltering::testFilteredReadsInstanceHasExpectedLength",
"test/test_reads.py::TestReadsFiltering::testIdLambda",
"test/test_reads.py::TestReadsFiltering::testIdLambdaReturningNone",
"test/test_reads.py::TestReadsFiltering::testKeepSites",
"test/test_reads.py::TestReadsFiltering::testKeepSitesAllSites",
"test/test_reads.py::TestReadsFiltering::testKeepSitesNoSites",
"test/test_reads.py::TestReadsFiltering::testKeepSitesOutOfRange",
"test/test_reads.py::TestReadsFiltering::testKeepSitesWithQuality",
"test/test_reads.py::TestReadsFiltering::testLineNumberFile",
"test/test_reads.py::TestReadsFiltering::testLineNumberFileEmpty",
"test/test_reads.py::TestReadsFiltering::testLineNumberFileFirstLineTooSmall",
"test/test_reads.py::TestReadsFiltering::testLineNumberFileNonAscending",
"test/test_reads.py::TestReadsFiltering::testLineNumberFileRunOutOfSequences",
"test/test_reads.py::TestReadsFiltering::testReadLambda",
"test/test_reads.py::TestReadsFiltering::testReadLambdaReturningNone",
"test/test_reads.py::TestReadsFiltering::testRemoveAndKeepSites",
"test/test_reads.py::TestReadsFiltering::testRemoveSites",
"test/test_reads.py::TestReadsFiltering::testRemoveSitesAllSites",
"test/test_reads.py::TestReadsFiltering::testRemoveSitesNoSites",
"test/test_reads.py::TestReadsFiltering::testRemoveSitesOutOfRange",
"test/test_reads.py::TestReadsFiltering::testRemoveSitesWithQuality",
"test/test_reads.py::TestReadsFiltering::testSampleFractionAndNoTrueLengthRaisesValueError",
"test/test_reads.py::TestReadsFiltering::testSampleFractionAndRandomSubsetRaisesValueError",
"test/test_reads.py::TestReadsFiltering::testSampleFractionOne",
"test/test_reads.py::TestReadsFiltering::testSampleFractionPointOne",
"test/test_reads.py::TestReadsFiltering::testSampleFractionZero",
"test/test_reads.py::TestReadsInRAM::testAdd",
"test/test_reads.py::TestReadsInRAM::testFromReads",
"test/test_reads.py::TestReadsInRAM::testNoReads",
"test/test_reads.py::TestReadsInRAM::testOneReadIndex",
"test/test_reads.py::TestReadsInRAM::testOneReadLength",
"test/test_reads.py::TestReadsInRAM::testOneReadList",
"test/test_reads.py::TestReadsInRAM::testSetItem",
"test/test_reads.py::TestReadsInRAM::testTwoReadsIndex",
"test/test_reads.py::TestReadsInRAM::testTwoReadsLength",
"test/test_reads.py::TestReadsInRAM::testTwoReadsList",
"test/test_reads.py::TestSummarizePosition::testCorrectFrequencies",
"test/test_reads.py::TestSummarizePosition::testExcludeShortSequences",
"test/test_reads.py::TestSummarizePosition::testFrequenciesNoReads",
"test/test_reads.py::TestSummarizePosition::testIndexLargerThanSequenceLength",
"test/test_reads.py::TestSummarizePosition::testNumberOfExclusionsNoReads",
"test/test_reads.py::TestSitesMatching::testAllMatches",
"test/test_reads.py::TestSitesMatching::testIgnoreCase",
"test/test_reads.py::TestSitesMatching::testMatchCase",
"test/test_reads.py::TestSitesMatching::testMultipleReadsAll",
"test/test_reads.py::TestSitesMatching::testMultipleReadsAllWithDifferingLengths",
"test/test_reads.py::TestSitesMatching::testMultipleReadsAny",
"test/test_reads.py::TestSitesMatching::testMultipleReadsAnyWithDifferingLengths",
"test/test_reads.py::TestSitesMatching::testNoMatches",
"test/test_reads.py::TestSitesMatching::testPartialMatch"
] | {
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2018-12-14 13:22:15+00:00 | mit | 873 |
|
acorg__dark-matter-680 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index cf03d75..fc519cd 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,16 @@
+## 3.0.70 Jul 30, 2019
+
+Added `titleRegex` and `negativeTitleRegex` to `ProteinGrouper` class and
+`--titleRegex` and `--negativeTitleRegex` arguments to
+`proteins-to-pathogens.py`.
+
+## 3.0.69 Jul 30, 2019
+
+Added `--title` and `--preamble` args to output from
+`proteins-to-pathogens.py`. Fixed `ProteinGrouper` HTML NCBI protein link
+and added genome link. Added positive and negative filtering by regex to
+`TitlesAlignments` and tests. Improved NCBI link generation and tests.
+
## 3.0.68 Jun 9, 2019
Refactored `SAMFilter` to allow filtering alignments in pileups. Added
diff --git a/bin/proteins-to-pathogens.py b/bin/proteins-to-pathogens.py
index ed0bbd8..c39670e 100755
--- a/bin/proteins-to-pathogens.py
+++ b/bin/proteins-to-pathogens.py
@@ -157,6 +157,14 @@ if __name__ == '__main__':
'--preamble',
help='Optional preamble text to show after the title.')
+ parser.add_argument(
+ '--titleRegex', default=None,
+ Help='A regex that pathogen names must match.')
+
+ parser.add_argument(
+ '--negativeTitleRegex', default=None,
+ help='a regex that pathogen names must not match.')
+
args = parser.parse_args()
if args.sampleName and args.sampleNameRegex:
@@ -192,7 +200,9 @@ if __name__ == '__main__':
sampleNameRegex=args.sampleNameRegex,
format_=args.format,
proteinFastaFilenames=proteinFastaFilenames,
- saveReadLengths=args.showReadLengths)
+ saveReadLengths=args.showReadLengths,
+ titleRegex=args.titleRegex,
+ negativeTitleRegex=args.negativeTitleRegex)
if args.filenames:
filenames = args.filenames
diff --git a/dark/__init__.py b/dark/__init__.py
index 8df318a..50781d0 100644
--- a/dark/__init__.py
+++ b/dark/__init__.py
@@ -8,4 +8,4 @@ if sys.version_info < (2, 7):
#
# Remember to update ../CHANGELOG.md describing what's new in each version.
-__version__ = '3.0.69'
+__version__ = '3.0.70'
diff --git a/dark/proteins.py b/dark/proteins.py
index 1cef243..1310abd 100644
--- a/dark/proteins.py
+++ b/dark/proteins.py
@@ -11,6 +11,7 @@ from textwrap import fill
from dark.dimension import dimensionalIterator
from dark.fasta import FastaReads
from dark.fastq import FastqReads
+from dark.filter import TitleFilter
from dark.html import NCBISequenceLinkURL
from dark.reads import Reads
@@ -211,6 +212,14 @@ class ProteinGrouper(object):
proteins each matched pathogen has.
@param saveReadLengths: If C{True}, save the lengths of all reads matching
proteins.
+ @param titleRegex: A regex that pathogen names must match.
+ Note that this matching is done on the final part of the protein title
+ in square brackets, according to the convention used by the NCBI viral
+ refseq database and RVDB.
+ @param negativeTitleRegex: A regex that pathogen names must not match.
+ Note that this matching is done on the final part of the protein title
+ in square brackets, according to the convention used by the NCBI viral
+ refseq database and RVDB.
@raise ValueError: If C{format_} is unknown.
"""
@@ -218,7 +227,8 @@ class ProteinGrouper(object):
def __init__(self, assetDir='out', sampleName=None, sampleNameRegex=None,
format_='fasta', proteinFastaFilenames=None,
- saveReadLengths=False):
+ saveReadLengths=False, titleRegex=None,
+ negativeTitleRegex=None):
self._assetDir = assetDir
self._sampleName = sampleName
self._sampleNameRegex = (re.compile(sampleNameRegex) if sampleNameRegex
@@ -229,6 +239,12 @@ class ProteinGrouper(object):
raise ValueError("format_ must be either 'fasta' or 'fastq'.")
self._saveReadLengths = saveReadLengths
+ if titleRegex or negativeTitleRegex:
+ self.titleFilter = TitleFilter(
+ positiveRegex=titleRegex, negativeRegex=negativeTitleRegex)
+ else:
+ self.titleFilter = None
+
self._pathogenProteinCount = getPathogenProteinCounts(
proteinFastaFilenames)
@@ -285,6 +301,11 @@ class ProteinGrouper(object):
proteinName, pathogenName = splitNames(names)
+ # Ignore pathogens with names we don't want.
+ if (self.titleFilter and self.titleFilter.accept(
+ pathogenName) == TitleFilter.REJECT):
+ continue
+
if pathogenName not in self.pathogenNames:
self.pathogenNames[pathogenName] = {}
| acorg/dark-matter | e76e23cc5230e4db4742c4418b6e5b8ddb0bc8d4 | diff --git a/test/test_proteins.py b/test/test_proteins.py
index 3cd8d09..f68397a 100644
--- a/test/test_proteins.py
+++ b/test/test_proteins.py
@@ -744,6 +744,94 @@ class TestProteinGrouper(TestCase):
'GENBANK|I44.6|GENBANK|J77|VP1\n',
pg.toStr())
+ def testPathogenNameRegex(self):
+ """
+ If a protein grouper is given one file with two lines from different
+ pathogens, and one pathogen title does not match a passed title regex,
+ the pathogenNames dict must be as expected.
+ """
+ fp = StringIO(
+ '0.63 41.3 44.2 9 9 12 acc|GENBANK|I44.6|GENBANK|J77|VP1 '
+ '[Lausannevirus]\n'
+ '0.77 46.6 48.1 5 6 74 acc|GENBANK|I44.7|GENBANK|J78|VP2 '
+ '[Hepatitis B virus]\n'
+ )
+ pg = ProteinGrouper(titleRegex='Lausannevirus')
+ pg.addFile('sample-filename', fp)
+ self.assertEqual(
+ {
+ 'Lausannevirus': {
+ 'sample-filename': {
+ 'proteins': {
+ 'acc|GENBANK|I44.6|GENBANK|J77|VP1': {
+ 'bestScore': 44.2,
+ 'bluePlotFilename': 'out/0.png',
+ 'coverage': 0.63,
+ 'readsFilename': 'out/0.fasta',
+ 'hspCount': 9,
+ 'index': 0,
+ 'medianScore': 41.3,
+ 'outDir': 'out',
+ 'proteinLength': 12,
+ 'proteinName': (
+ 'acc|GENBANK|I44.6|GENBANK|J77|VP1'),
+ 'proteinURL': ('http://www.ncbi.nlm.nih.gov/'
+ 'nuccore/I44.6'),
+ 'genomeURL': (
+ 'http://www.ncbi.nlm.nih.gov/nuccore/J77'),
+ 'readCount': 9,
+ },
+ },
+ 'uniqueReadCount': None,
+ },
+ },
+ },
+ pg.pathogenNames)
+
+ def testPathogenNegativeNameRegex(self):
+ """
+ If a protein grouper is given one file with two lines from different
+ pathogens, and one pathogen title does not match a passed negative
+ title regex, the pathogenNames dict must be as expected.
+ """
+ fp = StringIO(
+ '0.63 41.3 44.2 9 9 12 acc|GENBANK|I44.6|GENBANK|J77|VP1 '
+ '[Lausannevirus]\n'
+ '0.77 46.6 48.1 5 6 74 acc|GENBANK|I44.7|GENBANK|J78|VP2 '
+ '[Hepatitis B virus]\n'
+ )
+ pg = ProteinGrouper(negativeTitleRegex='Hepatitis')
+ pg.addFile('sample-filename', fp)
+ self.assertEqual(
+ {
+ 'Lausannevirus': {
+ 'sample-filename': {
+ 'proteins': {
+ 'acc|GENBANK|I44.6|GENBANK|J77|VP1': {
+ 'bestScore': 44.2,
+ 'bluePlotFilename': 'out/0.png',
+ 'coverage': 0.63,
+ 'readsFilename': 'out/0.fasta',
+ 'hspCount': 9,
+ 'index': 0,
+ 'medianScore': 41.3,
+ 'outDir': 'out',
+ 'proteinLength': 12,
+ 'proteinName': (
+ 'acc|GENBANK|I44.6|GENBANK|J77|VP1'),
+ 'proteinURL': ('http://www.ncbi.nlm.nih.gov/'
+ 'nuccore/I44.6'),
+ 'genomeURL': (
+ 'http://www.ncbi.nlm.nih.gov/nuccore/J77'),
+ 'readCount': 9,
+ },
+ },
+ 'uniqueReadCount': None,
+ },
+ },
+ },
+ pg.pathogenNames)
+
class TestPathogenSampleFiles(TestCase):
"""
| Add title regex (positive and negative) to protein grouper | 0.0 | e76e23cc5230e4db4742c4418b6e5b8ddb0bc8d4 | [
"test/test_proteins.py::TestProteinGrouper::testPathogenNameRegex",
"test/test_proteins.py::TestProteinGrouper::testPathogenNegativeNameRegex"
] | [
"test/test_proteins.py::TestSplitNames::testNestedBrackets",
"test/test_proteins.py::TestSplitNames::testNoBrackets",
"test/test_proteins.py::TestSplitNames::testNormalCase",
"test/test_proteins.py::TestSplitNames::testTwoSetsOfBrackets",
"test/test_proteins.py::TestSplitNames::testWhitespaceStripping",
"test/test_proteins.py::TestGetPathogenProteinCounts::testExpected",
"test/test_proteins.py::TestGetPathogenProteinCounts::testExpectedWithTwoFiles",
"test/test_proteins.py::TestGetPathogenProteinCounts::testNone",
"test/test_proteins.py::TestProteinGrouper::testAssetDir",
"test/test_proteins.py::TestProteinGrouper::testDuplicatePathogenProteinSample",
"test/test_proteins.py::TestProteinGrouper::testNoAssetDir",
"test/test_proteins.py::TestProteinGrouper::testNoFiles",
"test/test_proteins.py::TestProteinGrouper::testNoFilesToStr",
"test/test_proteins.py::TestProteinGrouper::testNoFilesToStrWithTitle",
"test/test_proteins.py::TestProteinGrouper::testNoFilesToStrWithTitleAndPreamble",
"test/test_proteins.py::TestProteinGrouper::testNoRegex",
"test/test_proteins.py::TestProteinGrouper::testNoSampleName",
"test/test_proteins.py::TestProteinGrouper::testOneLineInEachOfTwoFilesDifferentPathogens",
"test/test_proteins.py::TestProteinGrouper::testOneLineInEachOfTwoFilesDifferentPathogensTitle",
"test/test_proteins.py::TestProteinGrouper::testOneLineInEachOfTwoFilesSamePathogen",
"test/test_proteins.py::TestProteinGrouper::testOneLineInEachOfTwoFilesSamePathogenTitle",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFile",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFileFASTQ",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFileTitle",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFileToStr",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFileWithDifferentAssetDir",
"test/test_proteins.py::TestProteinGrouper::testTwoLinesInOneFileDifferentPathogens",
"test/test_proteins.py::TestProteinGrouper::testTwoLinesInOneFileSamePathogen",
"test/test_proteins.py::TestProteinGrouper::testTwoLinesInOneFileTitle",
"test/test_proteins.py::TestProteinGrouper::testUnknownFormat",
"test/test_proteins.py::TestProteinGrouper::testUnknownPathogenType",
"test/test_proteins.py::TestPathogenSampleFiles::testIdenticalReadsRemoved",
"test/test_proteins.py::TestPathogenSampleFiles::testOpenNotCalledOnRepeatedCall",
"test/test_proteins.py::TestPathogenSampleFiles::testProteinsSavedCorrectly",
"test/test_proteins.py::TestPathogenSampleFiles::testReadLengthsAdded",
"test/test_proteins.py::TestPathogenSampleFiles::testUnknownFormat",
"test/test_proteins.py::TestPathogenSampleFiles::testWritePathogenIndex",
"test/test_proteins.py::TestPathogenSampleFiles::testWriteSampleIndex"
] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2019-07-30 15:24:11+00:00 | mit | 874 |
|
acorg__dark-matter-686 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 6a08922..5f2e8a9 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,12 @@
+## 3.0.79 Aug 8, 2019
+
+Add `--omitVirusLinks` and `--omitSampleProteinCount` options to
+`proteins-to-pathogens.py` to make HTML output less confusing when running
+on RVDB or OKIAV databases. Removed highlighting of pathogens with high
+protein fraction since that was done in a non-useful way. Removed index
+field from HTML output and removed HSP count unless it differs from the
+read count.
+
## 3.0.78 Aug 2, 2019
Fixed silly import error.
diff --git a/bin/proteins-to-pathogens.py b/bin/proteins-to-pathogens.py
index 252d025..3daa58e 100755
--- a/bin/proteins-to-pathogens.py
+++ b/bin/proteins-to-pathogens.py
@@ -170,6 +170,23 @@ if __name__ == '__main__':
'--negativeTitleRegex', default=None,
help='a regex that pathogen names must not match.')
+ parser.add_argument(
+ '--omitVirusLinks', default=False, action='store_true',
+ help=('If specified, the HTML output (use --html to get this) for '
+ 'viruses will not contain links to ICTV and ViralZone. '
+ 'This should be used when working with viruses that do not yet '
+ 'have names that can be looked up.'))
+
+ parser.add_argument(
+ '--omitSampleProteinCount', default=False, action='store_true',
+ help=('If specified, the HTML output (use --html to get this) for '
+ 'viruses will not contain counts of the number of proteins '
+ 'matched by each sample for a given pathogen. This should be '
+ 'used when working with RVDB where there are many sequences '
+ 'for some proteins and a sample matches many of them, leading '
+ 'to incorrect reporting of the number of proteins of a pathogen '
+ 'that are matched by samples.'))
+
args = parser.parse_args()
if args.sampleName and args.sampleNameRegex:
@@ -186,6 +203,19 @@ if __name__ == '__main__':
print('It does not make sense to use --pathogenIndexFilename '
'without also using --html', file=sys.stderr)
sys.exit(1)
+ if args.omitVirusLinks:
+ print('It does not make sense to use --omitVirusLinks '
+ 'without also using --html', file=sys.stderr)
+ sys.exit(1)
+ if args.omitSampleProteinCount:
+ print('It does not make sense to use --omitSampleProteinCount '
+ 'without also using --html', file=sys.stderr)
+ sys.exit(1)
+
+ if args.omitVirusLinks and args.pathogenType != 'viral':
+ print('The --omitVirusLinks option only makes sense with '
+ '--pathogenType viral', file=sys.stderr)
+ sys.exit(1)
if args.proteinFastaFilename:
# Flatten lists of lists that we get from using both nargs='+' and
@@ -220,11 +250,14 @@ if __name__ == '__main__':
grouper.addFile(filename, fp)
if args.html:
- print(grouper.toHTML(args.pathogenPanelFilename,
- minProteinFraction=args.minProteinFraction,
- pathogenType=args.pathogenType,
- title=args.title, preamble=args.preamble,
- sampleIndexFilename=args.sampleIndexFilename,
- pathogenIndexFilename=args.pathogenIndexFilename))
+ print(grouper.toHTML(
+ args.pathogenPanelFilename,
+ minProteinFraction=args.minProteinFraction,
+ pathogenType=args.pathogenType,
+ title=args.title, preamble=args.preamble,
+ sampleIndexFilename=args.sampleIndexFilename,
+ pathogenIndexFilename=args.pathogenIndexFilename,
+ omitVirusLinks=args.omitVirusLinks,
+ omitSampleProteinCount=args.omitSampleProteinCount))
else:
print(grouper.toStr())
diff --git a/dark/__init__.py b/dark/__init__.py
index 15c32b1..6e9c357 100644
--- a/dark/__init__.py
+++ b/dark/__init__.py
@@ -8,4 +8,4 @@ if sys.version_info < (2, 7):
#
# Remember to update ../CHANGELOG.md describing what's new in each version.
-__version__ = '3.0.78'
+__version__ = '3.0.79'
diff --git a/dark/proteins.py b/dark/proteins.py
index 0449e7e..d6cefee 100644
--- a/dark/proteins.py
+++ b/dark/proteins.py
@@ -244,6 +244,7 @@ class ProteinGrouper(object):
VIRALZONE = 'https://viralzone.expasy.org/search?query='
ICTV = 'https://talk.ictvonline.org/search-124283882/?q='
READCOUNT_MARKER = '*READ-COUNT*'
+ READ_AND_HSP_COUNT_STR_SEP = '/'
def __init__(self, assetDir='out', sampleName=None, sampleNameRegex=None,
format_='fasta', proteinFastaFilenames=None,
@@ -362,7 +363,7 @@ class ProteinGrouper(object):
proteinURL = NCBISequenceLinkURL(proteinName, field=2)
genomeURL = NCBISequenceLinkURL(proteinName, field=4)
- proteins[proteinName] = {
+ proteinInfo = proteins[proteinName] = {
'bestScore': float(bestScore),
'bluePlotFilename': join(outDir, '%d.png' % index),
'coverage': float(coverage),
@@ -378,6 +379,12 @@ class ProteinGrouper(object):
'readCount': int(readCount),
}
+ if proteinInfo['readCount'] == proteinInfo['hspCount']:
+ proteinInfo['readAndHspCountStr'] = readCount
+ else:
+ proteinInfo['readAndHspCountStr'] = '%s%s%s' % (
+ readCount, self.READ_AND_HSP_COUNT_STR_SEP, hspCount)
+
if self._saveReadLengths:
readsClass = (FastaReads if self._format == 'fasta'
else FastqReads)
@@ -435,8 +442,8 @@ class ProteinGrouper(object):
for proteinName in sorted(proteins):
append(
' %(coverage).2f\t%(medianScore).2f\t'
- '%(bestScore).2f\t%(readCount)4d\t%(hspCount)4d\t'
- '%(index)3d\t%(proteinName)s'
+ '%(bestScore).2f\t%(readAndHspCountStr)11s\t'
+ '%(proteinName)s'
% proteins[proteinName])
append('')
@@ -445,7 +452,8 @@ class ProteinGrouper(object):
def toHTML(self, pathogenPanelFilename=None, minProteinFraction=0.0,
pathogenType='viral', title='Summary of pathogens',
preamble=None, sampleIndexFilename=None,
- pathogenIndexFilename=None):
+ pathogenIndexFilename=None, omitVirusLinks=False,
+ omitSampleProteinCount=False):
"""
Produce an HTML string representation of the pathogen summary.
@@ -465,6 +473,13 @@ class ProteinGrouper(object):
@param pathogenIndexFilename: A C{str} filename to write a pathogen
index file to. Lines in the file will have an integer index, a
space, and then the pathogen name.
+ @param omitVirusLinks: If C{True}, links to ICTV and ViralZone will be
+ omitted in output.
+ @param omitSampleProteinCount: If C{True}, do not display a number of
+ matched pathogen proteins for a sample. This should be used when
+ those numbers are inaccurate (e.g., when using the unclustered RVDB
+ protein database and there are many sequences for the same
+ protein).
@return: An HTML C{str} suitable for printing.
"""
if pathogenType not in ('bacterial', 'viral'):
@@ -475,7 +490,6 @@ class ProteinGrouper(object):
if not exists(self._pathogenDataDir):
os.mkdir(self._pathogenDataDir)
- highlightSymbol = '★'
self._computeUniqueReadCounts()
if pathogenPanelFilename:
@@ -548,10 +562,6 @@ class ProteinGrouper(object):
margin-top: 10px;
margin-bottom: 3px;
}
- .significant {
- color: red;
- margin-right: 2px;
- }
.sample {
margin-top: 5px;
margin-bottom: 2px;
@@ -599,10 +609,10 @@ class ProteinGrouper(object):
'<li>Coverage fraction.</li>',
'<li>Median bit score.</li>',
'<li>Best bit score.</li>',
- '<li>Read count.</li>',
- '<li>HSP count (a read can match a protein more than once).</li>',
- '<li>Protein length (in AAs).</li>',
- '<li>Index (just ignore this).</li>',
+ '<li>Read count (if the HSP count differs, read and HSP ',
+ ('counts are both given, separated by "%s").</li>' %
+ self.READ_AND_HSP_COUNT_STR_SEP),
+ '<li>Protein length (in amino acids).</li>',
]
if self._saveReadLengths:
@@ -644,11 +654,6 @@ class ProteinGrouper(object):
'but all pathogens have at least %.2f%% of their '
'proteins matched by at least one sample.' % percent)
- append('Samples that match a pathogen (and pathogens with a '
- 'matching sample) with at least this protein fraction are '
- 'highlighted using <span class="significant">%s</span>.' %
- highlightSymbol)
-
append('</p>')
if pathogenPanelFilename:
@@ -692,7 +697,7 @@ class ProteinGrouper(object):
samples = self.pathogenNames[pathogenName]
sampleCount = len(samples)
pathogenProteinCount = self._pathogenProteinCount[pathogenName]
- if pathogenType == 'viral':
+ if pathogenType == 'viral' and not omitVirusLinks:
quoted = quote(pathogenName)
pathogenLinksHTML = (
' (<a href="%s%s">ICTV</a>, <a href="%s%s">ViralZone</a>)'
@@ -755,21 +760,19 @@ class ProteinGrouper(object):
uniqueReadCount = samples[sampleName]['uniqueReadCount']
pathogenReadCount += uniqueReadCount
- if pathogenProteinCount and (
- proteinCount / pathogenProteinCount >=
- minProteinFraction):
- highlight = ('<span class="significant">%s</span>' %
- highlightSymbol)
+ if omitSampleProteinCount:
+ proteinCountHTML = ''
else:
- highlight = ''
+ proteinCountHTML = '%d protein%s, ' % (
+ proteinCount, '' if proteinCount == 1 else 's')
append(
'<p class="sample indented">'
- '%sSample <a href="#sample-%s">%s</a> '
- '(%d protein%s, <a href="%s">%d de-duplicated (by id) '
+ 'Sample <a href="#sample-%s">%s</a> '
+ '(%s<a href="%s">%d de-duplicated (by id) '
'read%s</a>, <a href="%s">panel</a>):</p>' %
- (highlight, sampleName, sampleName,
- proteinCount, '' if proteinCount == 1 else 's',
+ (sampleName, sampleName,
+ proteinCountHTML,
readsFileName,
uniqueReadCount, '' if uniqueReadCount == 1 else 's',
self.sampleNames[sampleName]))
@@ -780,8 +783,7 @@ class ProteinGrouper(object):
'<li>'
'<span class="stats">'
'%(coverage).2f %(medianScore)6.2f %(bestScore)6.2f '
- '%(readCount)5d %(hspCount)5d %(proteinLength)4d '
- '%(index)3d '
+ '%(readAndHspCountStr)11s %(proteinLength)4d '
% proteinMatch
)
@@ -861,7 +863,7 @@ class ProteinGrouper(object):
'<a id="sample-%s"></a>'
'<p class="sample">Sample '
'<span class="sample-name">%s</span> '
- 'matched proteins from 0 pathogens.</p>' %
+ 'did not match anything.</p>' %
(sampleName, sampleName))
continue
@@ -875,24 +877,19 @@ class ProteinGrouper(object):
proteinCount = len(proteins)
pathogenProteinCount = self._pathogenProteinCount[pathogenName]
- highlight = ''
if pathogenProteinCount:
proteinCountStr = '%d/%d protein%s' % (
proteinCount, pathogenProteinCount,
'' if pathogenProteinCount == 1 else 's')
- if (proteinCount / pathogenProteinCount >=
- minProteinFraction):
- highlight = ('<span class="significant">%s</span>' %
- highlightSymbol)
else:
proteinCountStr = '%d protein%s' % (
proteinCount, '' if proteinCount == 1 else 's')
append(
'<p class="sample indented">'
- '%s<a href="#pathogen-%s">%s</a> %s, '
+ '<a href="#pathogen-%s">%s</a> %s, '
'<a href="%s">%d de-duplicated (by id) read%s</a>:</p>' %
- (highlight, pathogenName, pathogenName,
+ (pathogenName, pathogenName,
proteinCountStr, readsFileName,
uniqueReadCount, '' if uniqueReadCount == 1 else 's'))
append('<ul class="protein-list indented">')
@@ -902,8 +899,7 @@ class ProteinGrouper(object):
'<li>'
'<span class="stats">'
'%(coverage).2f %(medianScore)6.2f %(bestScore)6.2f '
- '%(readCount)5d %(hspCount)5d %(proteinLength)4d '
- '%(index)3d '
+ '%(readAndHspCountStr)11s %(proteinLength)4d '
'</span> '
'<span class="protein-name">'
'%(proteinName)s'
| acorg/dark-matter | 3398b2f5af3d2fa4dcce840ad04ff605a5221fb9 | diff --git a/test/test_proteins.py b/test/test_proteins.py
index ba46f58..cf00b38 100644
--- a/test/test_proteins.py
+++ b/test/test_proteins.py
@@ -259,6 +259,7 @@ class TestProteinGrouper(TestCase):
'genomeURL': (
'http://www.ncbi.nlm.nih.gov/nuccore/J77'),
'readCount': 5,
+ 'readAndHspCountStr': '5/6',
},
},
'uniqueReadCount': None,
@@ -298,6 +299,7 @@ class TestProteinGrouper(TestCase):
'genomeURL': (
'http://www.ncbi.nlm.nih.gov/nuccore/J77'),
'readCount': 5,
+ 'readAndHspCountStr': '5/6',
},
},
'uniqueReadCount': None,
@@ -337,6 +339,7 @@ class TestProteinGrouper(TestCase):
'genomeURL': (
'http://www.ncbi.nlm.nih.gov/nuccore/J77'),
'readCount': 5,
+ 'readAndHspCountStr': '5/6',
},
},
'uniqueReadCount': None,
@@ -411,6 +414,7 @@ class TestProteinGrouper(TestCase):
'genomeURL': (
'http://www.ncbi.nlm.nih.gov/nuccore/J77'),
'readCount': 9,
+ 'readAndHspCountStr': '9',
},
'acc|GENBANK|I44.7|GENBANK|J78|VP2': {
'bestScore': 48.1,
@@ -429,6 +433,7 @@ class TestProteinGrouper(TestCase):
'genomeURL': (
'http://www.ncbi.nlm.nih.gov/nuccore/J78'),
'readCount': 5,
+ 'readAndHspCountStr': '5/6',
},
},
'uniqueReadCount': None,
@@ -472,6 +477,7 @@ class TestProteinGrouper(TestCase):
'genomeURL': (
'http://www.ncbi.nlm.nih.gov/nuccore/J77'),
'readCount': 9,
+ 'readAndHspCountStr': '9',
},
},
'uniqueReadCount': None,
@@ -497,6 +503,7 @@ class TestProteinGrouper(TestCase):
'genomeURL': (
'http://www.ncbi.nlm.nih.gov/nuccore/J78'),
'readCount': 5,
+ 'readAndHspCountStr': '5/6',
},
},
'uniqueReadCount': None,
@@ -543,6 +550,7 @@ class TestProteinGrouper(TestCase):
'genomeURL': (
'http://www.ncbi.nlm.nih.gov/nuccore/J77'),
'readCount': 9,
+ 'readAndHspCountStr': '9',
},
},
'uniqueReadCount': None,
@@ -566,6 +574,7 @@ class TestProteinGrouper(TestCase):
'genomeURL': (
'http://www.ncbi.nlm.nih.gov/nuccore/J78'),
'readCount': 5,
+ 'readAndHspCountStr': '5/6',
},
},
'uniqueReadCount': None,
@@ -633,6 +642,7 @@ class TestProteinGrouper(TestCase):
'genomeURL': (
'http://www.ncbi.nlm.nih.gov/nuccore/J77'),
'readCount': 9,
+ 'readAndHspCountStr': '9',
},
},
'uniqueReadCount': None,
@@ -658,6 +668,7 @@ class TestProteinGrouper(TestCase):
'genomeURL': (
'http://www.ncbi.nlm.nih.gov/nuccore/J78'),
'readCount': 5,
+ 'readAndHspCountStr': '5/6',
},
},
'uniqueReadCount': None,
@@ -740,7 +751,7 @@ class TestProteinGrouper(TestCase):
'\n'
'Lausannevirus (in 1 sample)\n'
' sample-filename (1 protein, 5 reads)\n'
- ' 0.77\t46.60\t48.10\t 5\t 6\t 0\tacc|'
+ ' 0.77\t46.60\t48.10\t 5/6\tacc|'
'GENBANK|I44.6|GENBANK|J77|VP1\n',
pg.toStr())
@@ -780,6 +791,7 @@ class TestProteinGrouper(TestCase):
'genomeURL': (
'http://www.ncbi.nlm.nih.gov/nuccore/J77'),
'readCount': 9,
+ 'readAndHspCountStr': '9',
},
},
'uniqueReadCount': None,
@@ -824,6 +836,7 @@ class TestProteinGrouper(TestCase):
'genomeURL': (
'http://www.ncbi.nlm.nih.gov/nuccore/J77'),
'readCount': 9,
+ 'readAndHspCountStr': '9',
},
},
'uniqueReadCount': None,
@@ -1076,6 +1089,7 @@ class TestPathogenSampleFiles(TestCase):
'genomeURL': (
'http://www.ncbi.nlm.nih.gov/nuccore/J77'),
'readCount': 9,
+ 'readAndHspCountStr': '9',
},
'acc|GENBANK|I44.7|GENBANK|J78|VP2': {
'bestScore': 48.1,
@@ -1094,6 +1108,7 @@ class TestPathogenSampleFiles(TestCase):
'genomeURL': (
'http://www.ncbi.nlm.nih.gov/nuccore/J78'),
'readCount': 5,
+ 'readAndHspCountStr': '5/6',
},
},
'uniqueReadCount': 3,
| Add options to proteins-to-pathogens.py to omit virus links and incorrect virus protein counts | 0.0 | 3398b2f5af3d2fa4dcce840ad04ff605a5221fb9 | [
"test/test_proteins.py::TestProteinGrouper::testOneLineInEachOfTwoFilesDifferentPathogens",
"test/test_proteins.py::TestProteinGrouper::testOneLineInEachOfTwoFilesSamePathogen",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFile",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFileFASTQ",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFileToStr",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFileWithDifferentAssetDir",
"test/test_proteins.py::TestProteinGrouper::testPathogenNameRegex",
"test/test_proteins.py::TestProteinGrouper::testPathogenNegativeNameRegex",
"test/test_proteins.py::TestProteinGrouper::testTwoLinesInOneFileDifferentPathogens",
"test/test_proteins.py::TestProteinGrouper::testTwoLinesInOneFileSamePathogen",
"test/test_proteins.py::TestPathogenSampleFiles::testProteinsSavedCorrectly"
] | [
"test/test_proteins.py::TestSplitNames::testNestedBrackets",
"test/test_proteins.py::TestSplitNames::testNoBrackets",
"test/test_proteins.py::TestSplitNames::testNormalCase",
"test/test_proteins.py::TestSplitNames::testTwoSetsOfBrackets",
"test/test_proteins.py::TestSplitNames::testWhitespaceStripping",
"test/test_proteins.py::TestGetPathogenProteinCounts::testExpected",
"test/test_proteins.py::TestGetPathogenProteinCounts::testExpectedWithTwoFiles",
"test/test_proteins.py::TestGetPathogenProteinCounts::testNone",
"test/test_proteins.py::TestProteinGrouper::testAssetDir",
"test/test_proteins.py::TestProteinGrouper::testDuplicatePathogenProteinSample",
"test/test_proteins.py::TestProteinGrouper::testNoAssetDir",
"test/test_proteins.py::TestProteinGrouper::testNoFiles",
"test/test_proteins.py::TestProteinGrouper::testNoFilesToStr",
"test/test_proteins.py::TestProteinGrouper::testNoFilesToStrWithTitle",
"test/test_proteins.py::TestProteinGrouper::testNoFilesToStrWithTitleAndPreamble",
"test/test_proteins.py::TestProteinGrouper::testNoRegex",
"test/test_proteins.py::TestProteinGrouper::testNoSampleName",
"test/test_proteins.py::TestProteinGrouper::testOneLineInEachOfTwoFilesDifferentPathogensTitle",
"test/test_proteins.py::TestProteinGrouper::testOneLineInEachOfTwoFilesSamePathogenTitle",
"test/test_proteins.py::TestProteinGrouper::testOneLineInOneFileTitle",
"test/test_proteins.py::TestProteinGrouper::testTwoLinesInOneFileTitle",
"test/test_proteins.py::TestProteinGrouper::testUnknownFormat",
"test/test_proteins.py::TestProteinGrouper::testUnknownPathogenType",
"test/test_proteins.py::TestPathogenSampleFiles::testIdenticalReadsRemoved",
"test/test_proteins.py::TestPathogenSampleFiles::testOpenNotCalledOnRepeatedCall",
"test/test_proteins.py::TestPathogenSampleFiles::testPathogenIndex",
"test/test_proteins.py::TestPathogenSampleFiles::testReadLengthsAdded",
"test/test_proteins.py::TestPathogenSampleFiles::testUnknownFormat",
"test/test_proteins.py::TestPathogenSampleFiles::testWritePathogenIndex",
"test/test_proteins.py::TestPathogenSampleFiles::testWriteSampleIndex"
] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2019-08-08 13:18:14+00:00 | mit | 875 |
|
acorg__dark-matter-704 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index ee662c9..9212725 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 3.1.31 Jan 13, 2020
+
+Added `sam-coverage-depth.py`.
+
## 3.1.30 Jan 13, 2020
Added `--minGenomeLength` to `make-protein-database.py`.
diff --git a/bin/sam-coverage-depth.py b/bin/sam-coverage-depth.py
new file mode 100755
index 0000000..7516f61
--- /dev/null
+++ b/bin/sam-coverage-depth.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+
+from __future__ import print_function
+
+import sys
+import argparse
+from collections import Counter
+from numpy import std
+
+from dark.filter import (
+ addFASTAFilteringCommandLineOptions, parseFASTAFilteringCommandLineOptions)
+from dark.reads import Reads
+from dark.sam import samfile, SAMFilter, samReferences, UnknownReference
+from dark.utils import pct
+
+
+def baseCountsToStr(counts):
+ """
+ Convert base counts to a string.
+
+ @param counts: A C{Counter} instance.
+ @return: A C{str} representation of nucleotide counts.
+ """
+ return ' '.join([
+ ('%s:%d' % (base, counts[base])) for base in sorted(counts)])
+
+
+parser = argparse.ArgumentParser(
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ description=('Print SAM/BAM file coverage statistics by offset. '
+ 'Output lines show the offset.'))
+
+addFASTAFilteringCommandLineOptions(parser)
+SAMFilter.addFilteringOptions(parser, samfileIsPositional=True)
+
+parser.add_argument(
+ '--noOffsets', default=False, action='store_true',
+ help='Do not print per-offset details of base counts.')
+
+parser.add_argument(
+ '--noStats', default=False, action='store_true',
+ help='Do not print final average and standard deviation statistics.')
+
+args = parser.parse_args()
+
+if args.noOffsets and args.noStats:
+ print('You have used both --noOffsets and --noStats, so there is no '
+ 'output!', file=sys.stderr)
+ sys.exit(1)
+
+# We don't have a file of reads, we just want a read filter that we
+# can use to filter the SAM file query sequences.
+reads = parseFASTAFilteringCommandLineOptions(args, Reads())
+samFilter = SAMFilter.parseFilteringOptions(args, reads.filterRead)
+
+printOffsets = not args.noOffsets
+printStats = not args.noStats
+
+if samFilter.referenceIds and len(samFilter.referenceIds) > 1:
+ print('Only one reference id can be given. To calculate coverage for more '
+ 'than one reference, run this script multiple times.',
+ file=sys.stderr)
+ sys.exit(1)
+
+try:
+ referenceLengths = samFilter.referenceLengths()
+except UnknownReference:
+ referenceId = samFilter.referenceIds.pop()
+ referenceIds = samReferences(args.samfile)
+ print('Reference %r does not appear in SAM file %s. Known '
+ 'references are: %s.' % (
+ referenceId, args.samfile, ', '.join(sorted(referenceIds))),
+ file=sys.stderr)
+ sys.exit(1)
+
+if printStats:
+ counts = []
+
+with samfile(args.samfile) as sam:
+
+ if samFilter.referenceIds:
+ # No need to check if the given reference id is in referenceLengths
+ # because the samFilter.referenceLengths call above catched that.
+ referenceId = samFilter.referenceIds.pop()
+ else:
+ if len(referenceLengths) == 1:
+ referenceId = list(referenceLengths)[0]
+ else:
+ print('SAM file %r contains %d references (%s). Only one '
+ 'reference id can be analyzed at a time. Please use '
+ '--referenceId to specify the one you want examined.' % (
+ args.samfile, len(referenceLengths),
+ ', '.join(sorted(referenceLengths))), file=sys.stderr)
+ sys.exit(1)
+
+ for column in sam.pileup(reference=referenceId):
+ bases = Counter()
+ for read in column.pileups:
+ if (not read.is_del and not read.is_refskip and
+ samFilter.filterAlignment(read.alignment)):
+ base = read.alignment.query_sequence[read.query_position]
+ bases[base] += 1
+
+ baseCount = sum(bases.values())
+
+ if printStats:
+ counts.append(baseCount)
+
+ if printOffsets:
+ print('%d: %d %s' % (column.reference_pos + 1, baseCount,
+ baseCountsToStr(bases)))
+
+if printStats:
+ referenceLength = referenceLengths[referenceId]
+ print('Reference id: %s' % referenceId)
+ print('Reference length: %d' % referenceLength)
+ print('Bases covered: %s' % pct(len(counts), referenceLength))
+ print('Min coverage depth: %d' % (
+ 0 if len(counts) < referenceLength else min(counts)))
+ print('Max coverage depth: %d' % max(counts))
+ print('Mean coverage depth: %.3f' % (sum(counts) / referenceLength))
+ print('Coverage depth s.d.: %.3f' % std(counts))
diff --git a/dark/__init__.py b/dark/__init__.py
index 324a302..da9777a 100644
--- a/dark/__init__.py
+++ b/dark/__init__.py
@@ -7,4 +7,4 @@ if sys.version_info < (2, 7):
# will not be found by the version() function in ../setup.py
#
# Remember to update ../CHANGELOG.md describing what's new in each version.
-__version__ = '3.1.30'
+__version__ = '3.1.31'
diff --git a/dark/sam.py b/dark/sam.py
index b668b4f..07a5e08 100644
--- a/dark/sam.py
+++ b/dark/sam.py
@@ -334,13 +334,10 @@ class SAMFilter(object):
SAM/BAM file is read.
@return: A C{SAMFilter} instance.
"""
- referenceIds = (set(chain.from_iterable(args.referenceId))
- if args.referenceId else None)
-
return cls(
args.samfile,
filterRead=filterRead,
- referenceIds=referenceIds,
+ referenceIds=set(args.referenceId) if args.referenceId else None,
storeQueryIds=storeQueryIds,
dropUnmapped=args.dropUnmapped,
dropSecondary=args.dropSecondary,
diff --git a/dark/utils.py b/dark/utils.py
index 0059330..1ffa57f 100644
--- a/dark/utils.py
+++ b/dark/utils.py
@@ -259,6 +259,22 @@ def countPrint(mesg, count, len1, len2=None):
count, len2, percentage(count, len2)))
+def pct(a, b):
+ """
+ Format a string showing two integers and what percentage the first
+ is of the second.
+
+ @param a: An C{int}, the numerator.
+ @param b: An C{int}, the denominator.
+ """
+ assert 0 <= a <= b
+ if b:
+ return ('%d/%d (%.3f%%)' %
+ (a, b, (a / b if b else 0.0) * 100.0))
+ else:
+ return '0/0 (0.000%)'
+
+
@contextmanager
def cd(newdir):
"""
| acorg/dark-matter | 5d785403629c4bd5c2e848219ed0fc4fc896c3cf | diff --git a/test/test_utils.py b/test/test_utils.py
index f7d7ed6..5656906 100644
--- a/test/test_utils.py
+++ b/test/test_utils.py
@@ -14,8 +14,8 @@ except ImportError:
from .mocking import mockOpen
from dark.utils import (
- numericallySortFilenames, median, asHandle, parseRangeString, StringIO,
- baseCountsToStr, nucleotidesToStr, countPrint)
+ numericallySortFilenames, median, asHandle, parseRangeString, pct,
+ StringIO, baseCountsToStr, nucleotidesToStr, countPrint)
class TestNumericallySortFilenames(TestCase):
@@ -382,3 +382,36 @@ class TestCountPrint(TestCase):
' 2/8 (25.00%) of sequence 2',
countPrint('Count is', count, len1, len2)
)
+
+
+class TestPct(TestCase):
+ """
+ Test the pct function.
+ """
+ def testZeroNumerator(self):
+ """
+ The pct function must produce the correct result if the numerator is
+ zero.
+ """
+ self.assertEqual('0/10 (0.000%)', pct(0, 10))
+
+ def testZeroDenominator(self):
+ """
+ The pct function must produce the correct result if the denominator is
+ zero.
+ """
+ self.assertEqual('0/0 (0.000%)', pct(0, 0))
+
+ def testOneHalf(self):
+ """
+ The pct function must produce the correct result if the numerator is
+ one half of the denominator.
+ """
+ self.assertEqual('5/10 (50.000%)', pct(5, 10))
+
+ def testOneSeventh(self):
+ """
+ The pct function must produce the correct result if the numerator is
+ one seventh of the denominator.
+ """
+ self.assertEqual('2/14 (14.286%)', pct(2, 14))
| Add a script to print per-offset coverage and overall coverage stats from a SAM/BAM file | 0.0 | 5d785403629c4bd5c2e848219ed0fc4fc896c3cf | [
"test/test_utils.py::TestNumericallySortFilenames::testBasename",
"test/test_utils.py::TestNumericallySortFilenames::testNoNames",
"test/test_utils.py::TestNumericallySortFilenames::testOneNonNumericName",
"test/test_utils.py::TestNumericallySortFilenames::testOneNumericName",
"test/test_utils.py::TestNumericallySortFilenames::testSeveralNames",
"test/test_utils.py::TestNumericallySortFilenames::testSeveralNamesWithUnequalPrefixLengths",
"test/test_utils.py::TestMedian::testEmptyArgRaises",
"test/test_utils.py::TestMedian::testMedianOfFive",
"test/test_utils.py::TestMedian::testMedianOfFour",
"test/test_utils.py::TestMedian::testMedianOfOne",
"test/test_utils.py::TestMedian::testMedianOfThree",
"test/test_utils.py::TestMedian::testMedianOfTwo",
"test/test_utils.py::TestAsHandle::testOpenFile",
"test/test_utils.py::TestAsHandle::testStr",
"test/test_utils.py::TestParseRangeString::testEmptyString",
"test/test_utils.py::TestParseRangeString::testSingleNumber",
"test/test_utils.py::TestParseRangeString::testSingleNumberSpaceAfter",
"test/test_utils.py::TestParseRangeString::testSingleNumberSpaceBefore",
"test/test_utils.py::TestParseRangeString::testSingleNumberSpaceBeforeAndAfter",
"test/test_utils.py::TestParseRangeString::testSingleRange",
"test/test_utils.py::TestParseRangeString::testSingleRangeWithSpaceAfterHyphen",
"test/test_utils.py::TestParseRangeString::testSingleRangeWithSpaceBeforeAfterHyphen",
"test/test_utils.py::TestParseRangeString::testSingleRangeWithSpaceBeforeHyphen",
"test/test_utils.py::TestParseRangeString::testTwoOverlappingRanges",
"test/test_utils.py::TestParseRangeString::testTwoRanges",
"test/test_utils.py::TestParseRangeString::testTwoRangesAndANumber",
"test/test_utils.py::TestParseRangeString::testTwoRangesAndTwoNumbers",
"test/test_utils.py::TestParseRangeString::testZeroConversion",
"test/test_utils.py::TestStringIO::testContextManager",
"test/test_utils.py::TestStringIO::testInitializedRead",
"test/test_utils.py::TestStringIO::testInitiallyEmpty",
"test/test_utils.py::TestStringIO::testWriteRead",
"test/test_utils.py::TestBaseCountsToStr::testSimple",
"test/test_utils.py::TestNucleotidesToStr::testSimple",
"test/test_utils.py::TestCountPrint::testSimple",
"test/test_utils.py::TestCountPrint::testTwoSequences",
"test/test_utils.py::TestPct::testOneHalf",
"test/test_utils.py::TestPct::testOneSeventh",
"test/test_utils.py::TestPct::testZeroDenominator",
"test/test_utils.py::TestPct::testZeroNumerator"
] | [] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2020-01-13 01:04:10+00:00 | mit | 876 |
|
acorg__dark-matter-705 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 18dbb93..5a22e5b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 3.1.37 Jan 13, 2020
+
+Add final `genome-protein-summary.py` script.
+
## 3.1.35 Jan 13, 2020
Drop Python 3.8 from Travis and tox checking.
diff --git a/bin/genome-protein-summary.py b/bin/genome-protein-summary.py
index cb0608d..71ce8b4 100755
--- a/bin/genome-protein-summary.py
+++ b/bin/genome-protein-summary.py
@@ -14,22 +14,19 @@ from dark.genbank import GenomeRanges
from dark.genomes import GenomeProteinInfo
from dark.reads import Reads
from dark.sam import SAMFilter, samReferences
+from dark.utils import pct
-def pct(a, b):
- assert a <= b
- if b:
- return ('%d/%d (%.3f%%)' %
- (a, b, (a / b if b else 0.0) * 100.0))
- else:
- return '0/0 (0.00%)'
-
-
-def summarize(gpi, sortOn):
+def summarize(gpi, sortOn, minReadOffsetCount):
"""
Print a summary of the genome proteins.
@param gpi: A C{GenomeProteinInfo} instance.
+ @param sortOn: How to sort proteins for output. One of 'coverage',
+ 'depth', 'name', 'offset', or 'readCount'.
+ @param minReadOffsetCount: The minimum number of reads offsets that must
+ overlap a protein for the read to be considered as sufficiently
+ intersecting the protein.
"""
genome = gpi.genome
@@ -48,9 +45,9 @@ def summarize(gpi, sortOn):
print(' Whole genome coverage (not just proteins):')
print(' Reads matching genome: %d' % len(gpi.readIdsMatchingGenome))
- print(' Covered offsets: %s' % (
+ print(' Covered genome offsets: %s' % (
pct(len(gpi.coveredOffsetCount), genome['length'])))
- print(' Average depth: %.3f' % (
+ print(' Average depth across genome: %.3f' % (
sum(gpi.coveredOffsetCount.values()) / genome['length']))
coveredProteinOffsetCount = coveredProteinBasesCount = 0
@@ -58,16 +55,15 @@ def summarize(gpi, sortOn):
coveredProteinOffsetCount += bool(gpi.coveredOffsetCount[offset])
coveredProteinBasesCount += gpi.coveredOffsetCount[offset]
- print(' Total protein coverage:')
+ print(' Total protein coverage (irrespective of minReadOffsetCount):')
print(' Reads matching proteins: %d' % len(gpi.readIdsForAllProteins()))
- print(' Covered offsets: %s' % (
+ print(' Proteins with any coverage: %s' %
+ pct(len(gpi.coveredProteins), genome['proteinCount']))
+ print(' Covered protein offsets: %s' % (
pct(coveredProteinOffsetCount, len(gpi.offsets))))
- print(' Average depth: %.3f' % (
+ print(' Average depth across proteins: %.3f' % (
coveredProteinBasesCount / len(gpi.offsets)))
- print(' Proteins matched: %s (sorted by %s):' % (
- pct(len(gpi.coveredProteins), genome['proteinCount']), sortOn))
-
if sortOn == 'name':
def key(proteinAccession):
return gpi.proteins[proteinAccession]['name']
@@ -93,25 +89,41 @@ def summarize(gpi, sortOn):
return coverage['totalBases'] / coverage['ntLength']
reverse = True
- for i, proteinAccession in enumerate(
- sorted(gpi.coveredProteins, key=key, reverse=reverse),
- start=1):
+ if minReadOffsetCount is None:
+ print(' Proteins covered (no minReadOffsetCount):')
+ else:
+ print(' Proteins covered (minReadOffsetCount=%d):' %
+ minReadOffsetCount)
+
+ proteinCount = 0
+ for proteinAccession in sorted(gpi.coveredProteins, key=key,
+ reverse=reverse):
protein = gpi.proteins[proteinAccession]
- print(' %d: %s (%d AA, %d nt with stop codon, %s)' %
- (i, protein['product'], protein['length'],
- protein['length'] * 3 + 3, protein['accession']))
- coverage = gpi.proteinCoverageInfo(proteinAccession)
+ coverage = gpi.proteinCoverageInfo(proteinAccession,
+ minReadOffsetCount)
+
+ readCount = len(coverage['readIds'])
- print(' Read count: %d' % len(coverage['readIds']))
+ if readCount:
+ proteinCount += 1
- print(' Covered offsets: %s' % (
- pct(coverage['coveredOffsets'], coverage['ntLength'])))
+ print(' %d: %s (%d AA, %d nt with stop codon, %s)' %
+ (proteinCount, protein['product'], protein['length'],
+ protein['length'] * 3 + 3, protein['accession']))
- print(' Average depth: %.3f' % (
- coverage['totalBases'] / coverage['ntLength']))
+ print(' Read count: %d' % readCount)
- print(' Offsets: %s' % protein['offsets'])
+ print(' Covered offsets: %s' % (
+ pct(coverage['coveredOffsets'], coverage['ntLength'])))
+
+ print(' Average depth: %.3f' % (
+ coverage['totalBases'] / coverage['ntLength']))
+
+ print(' Offsets: %s' % protein['offsets'])
+
+ print(' Proteins matched: %s (sorted by %s):' % (
+ pct(proteinCount, genome['proteinCount']), sortOn))
if __name__ == '__main__':
@@ -124,11 +136,23 @@ if __name__ == '__main__':
help=('The filename of an Sqlite3 database holding protein and '
'genome information, as built by make-protein-database.py'))
+ parser.add_argument(
+ '--progress', default=False, action='store_true',
+ help='Print progress info to standard error.')
+
parser.add_argument(
'--sortOn', default='readCount',
choices=('coverage', 'depth', 'name', 'offset', 'readCount'),
help='How to sort proteins for output.')
+ parser.add_argument(
+ '--minReadOffsetCount', type=int,
+ help=('The minimum number of reads offsets that must overlap a '
+ 'protein for the read to be considered as sufficiently '
+ 'intersecting the protein. Use this to prevent reads that '
+ 'just overlap the protein in a very small number offsets '
+ 'from being counted.'))
+
parser.add_argument(
'--skipTranslationChecks', dest='checkTranslations',
action='store_false', default=True,
@@ -184,11 +208,13 @@ if __name__ == '__main__':
referenceId, file=sys.stderr)
else:
if samfiles:
- print('Processing %d SAM file%s for matches with %r:' %
- (len(samfiles), '' if len(samfiles) == 1 else 's',
- referenceId), file=sys.stderr)
+ if args.progress:
+ print('Processing %d SAM file%s for matches with %r:' %
+ (len(samfiles), '' if len(samfiles) == 1 else 's',
+ referenceId), file=sys.stderr)
for i, filename in enumerate(samfiles, start=1):
- print(' %d: %s' % (i, filename), file=sys.stderr)
+ if args.progress:
+ print(' %d: %s' % (i, filename), file=sys.stderr)
gpInfo.addSAM(filename, filterAlignment)
- summarize(gpInfo, args.sortOn)
+ summarize(gpInfo, args.sortOn, args.minReadOffsetCount)
diff --git a/dark/__init__.py b/dark/__init__.py
index 7336433..8aacdfa 100644
--- a/dark/__init__.py
+++ b/dark/__init__.py
@@ -7,4 +7,4 @@ if sys.version_info < (2, 7):
# will not be found by the version() function in ../setup.py
#
# Remember to update ../CHANGELOG.md describing what's new in each version.
-__version__ = '3.1.36'
+__version__ = '3.1.37'
diff --git a/dark/genomes.py b/dark/genomes.py
index 63b642b..e920d56 100644
--- a/dark/genomes.py
+++ b/dark/genomes.py
@@ -24,10 +24,11 @@ class GenomeProteinInfo(object):
# self.proteins is keyed by protein accession number.
self.proteins = {}
self.coveredProteins = set()
- # self.offsets is keyed by offset, values are dicts that contain a list
- # of protein accession numbers that overlap that offset and a set of
- # read ids (if any) that match at that offset. The offsets keys are
- # only those that correspond to a protein.
+ # self.offsets is keyed by genome offset, values are dicts that
+ # contain a list of protein accession numbers that overlap that
+ # offset and a set of read ids (if any) that match at that offset.
+ # The offsets keys are only those that correspond to one or more
+ # proteins in the genome.
self.offsets = {}
# self.coveredOffsetCount holds the read counts for all offsets covered
# by reads, regardless of whether the offsets correspond to proteins or
@@ -123,11 +124,17 @@ class GenomeProteinInfo(object):
offsetInfo['proteinAccessions'])
offsetInfo['readIds'].add(readId)
- def proteinCoverageInfo(self, proteinAccession):
+ def proteinCoverageInfo(self, proteinAccession, minReadOffsetCount=None):
"""
Calculate coverage information for a protein.
@param proteinAccession: A C{str} accession number.
+ @param minReadOffsetCount: An C{int}, specifying the minimum number of
+ reads offsets that must overlap the protein for the read to be
+ considered as sufficiently intersecting the protein. Use this to
+ prevent reads that just overlap the protein in a very small number
+ offsets from being counted. Or C{None} to indicate that no such
+ filtering should be applied.
@raises KeyError: If C{proteinAccession} is not known.
@return: A C{dict} containing
* the number of covered offsets,
@@ -143,6 +150,13 @@ class GenomeProteinInfo(object):
offsetsSeen = set()
proteinLength = 0
+ if minReadOffsetCount is not None and minReadOffsetCount < 2:
+ # Zero or one is equivalent to not giving a value.
+ minReadOffsetCount = None
+
+ if minReadOffsetCount:
+ readOffsetCounts = Counter()
+
for (start, stop, forward) in GenomeRanges(protein['offsets']).ranges:
proteinLength += stop - start
for offset in range(start, stop):
@@ -153,10 +167,15 @@ class GenomeProteinInfo(object):
coveredOffsets += 1
totalBases += len(readIds)
allReadIds.update(readIds)
-
- # The +3 in the following is because the database holds the AA length,
- # not including the stop codon. But the database range covers the stop
- # codon.
+ if minReadOffsetCount:
+ readOffsetCounts.update(readIds)
+
+ # Sanity check that the sum of the range lengths is the same as the
+ # overall length given in the database.
+ #
+ # The +3 in the following is because the database holds the AA
+ # length, not including the stop codon. But the database range
+ # covers the stop codon.
dbProteinLength = self.proteins[proteinAccession]['length'] * 3 + 3
if proteinLength != dbProteinLength:
raise ValueError(
@@ -164,6 +183,12 @@ class GenomeProteinInfo(object):
'database protein length (%d) for protein %s!' %
(proteinLength, dbProteinLength, proteinAccession))
+ # Do not report on reads whose overlapping offset count is too low.
+ if minReadOffsetCount:
+ unwanted = set(readId for readId in readOffsetCounts
+ if readOffsetCounts[readId] < minReadOffsetCount)
+ allReadIds.symmetric_difference_update(unwanted)
+
return {
'coveredOffsets': coveredOffsets,
'totalBases': totalBases,
| acorg/dark-matter | 7e8ca419ef2d6d10287c020e194587a400052ad6 | diff --git a/test/test_genomes.py b/test/test_genomes.py
index 4819f31..c221136 100644
--- a/test/test_genomes.py
+++ b/test/test_genomes.py
@@ -172,3 +172,32 @@ class TestGenomeProteinInfo(TestCase):
self.assertEqual(750, info['totalBases'])
self.assertEqual((3221 - 2306) + (1623 - 0), info['ntLength'])
self.assertEqual({'query1', 'query2', 'query3'}, info['readIds'])
+
+ def testTooFewReadOffsetsBAM1(self):
+ """
+ Test that a read is not returned as overlapping a protein unless it
+ meets the minimum number of required overlapping offsets.
+ """
+ gpi = GenomeProteinInfo('KJ586809.1', DB, True)
+ gpi.addSAM(BAM1)
+
+ # Look at protein AJF20804.1 coverage (its ranges are 2306-3221 and
+ # 0-1623). There should be no matching reads because the query
+ # (query1) is only 200 nt long and so cannot match with at least
+ # 500 nucleotides.
+ info = gpi.proteinCoverageInfo('AJF20804.1', 500)
+ self.assertEqual(set(), info['readIds'])
+
+ def testSufficientReadOffsetsBAM1(self):
+ """
+ Test that a read is returned as overlapping a protein when it meets
+ the minimum number of required overlapping offsets.
+ """
+ gpi = GenomeProteinInfo('KJ586809.1', DB, True)
+ gpi.addSAM(BAM1)
+
+ # Look at protein AJF20804.1 coverage (its ranges are 2306-3221 and
+ # 0-1623). The query (query1) must be returned as it has 200
+ # matching nucleotides.
+ info = gpi.proteinCoverageInfo('AJF20804.1', 199)
+ self.assertEqual({'query1'}, info['readIds'])
| Add tool to summarize protein hits in a SAM/BAM file | 0.0 | 7e8ca419ef2d6d10287c020e194587a400052ad6 | [
"test/test_genomes.py::TestGenomeProteinInfo::testSufficientReadOffsetsBAM1",
"test/test_genomes.py::TestGenomeProteinInfo::testTooFewReadOffsetsBAM1"
] | [
"test/test_genomes.py::TestGenomeProteinInfo::testLoadBAM1",
"test/test_genomes.py::TestGenomeProteinInfo::testLoadBAM12",
"test/test_genomes.py::TestGenomeProteinInfo::testLoadBAM123",
"test/test_genomes.py::TestGenomeProteinInfo::testLoadReference"
] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2020-01-13 10:35:47+00:00 | mit | 877 |
|
acorg__dark-matter-726 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 66b60e7..17a90c5 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 3.1.70 March 24, 2020
+
+Added `--maxNFraction` arg to `filter-fasta.py`.
+
## 3.1.69 March 23, 2020
Added `--md5` arg to `fasta-sequences.py`.
@@ -429,9 +433,9 @@ Removed bone-headed use of full path to `fasta-join.sh` from
## 3.0.63 Jan 14, 2019
-Added `compareAaReads` and `matchToString` to `aa.py`. Wrote tests in
+Added `compareAaReads` and `matchToString` to `aa.py`. Wrote tests in
`test_aa.py` for both. Moved `countPrint` to utils, used by `matchToString`
-in `dna.py` and `aa.py`. Added `compare-aa-sequences.py` to the bin.
+in `dna.py` and `aa.py`. Added `compare-aa-sequences.py` to the bin.
## 3.0.62 Dec 30, 2018
@@ -444,9 +448,9 @@ and the underlying `ReadFilter` class.
## 3.0.60 Dec 13, 2018
-In `reads.py`, changed the `_makeComplementTable` function so that
+In `reads.py`, changed the `_makeComplementTable` function so that
uppercase and lowercase bases are correctly reverse complemented into their
-respective uppercase and lowercase complementary letters. Added a test to
+respective uppercase and lowercase complementary letters. Added a test to
`test/reads.py` to confirm that `reverseComplement` does this.
## 3.0.59 Dec 11, 2018
diff --git a/dark/__init__.py b/dark/__init__.py
index 52bc094..01d72bb 100644
--- a/dark/__init__.py
+++ b/dark/__init__.py
@@ -7,4 +7,4 @@ if sys.version_info < (2, 7):
# will not be found by the version() function in ../setup.py
#
# Remember to update ../CHANGELOG.md describing what's new in each version.
-__version__ = '3.1.69'
+__version__ = '3.1.70'
diff --git a/dark/filter.py b/dark/filter.py
index 07e00d7..216db82 100644
--- a/dark/filter.py
+++ b/dark/filter.py
@@ -1,3 +1,5 @@
+from __future__ import division
+
import re
from math import ceil
from collections import OrderedDict
@@ -187,6 +189,10 @@ def addFASTAFilteringCommandLineOptions(parser):
'--maxLength', type=int, metavar='N',
help='The maximum sequence length')
+ parser.add_argument(
+ '--maxNFraction', type=float, metavar='N',
+ help='The maximum fraction of Ns that can be present in the sequence')
+
parser.add_argument(
'--whitelist', action='append', metavar='SEQUENCE-ID',
help='Sequence titles (ids) that should be whitelisted')
@@ -297,6 +303,7 @@ def parseFASTAFilteringCommandLineOptions(args, reads):
return reads.filter(
minLength=args.minLength, maxLength=args.maxLength,
+ maxNFraction=args.maxNFraction,
whitelist=set(args.whitelist) if args.whitelist else None,
blacklist=set(args.blacklist) if args.blacklist else None,
whitelistFile=args.whitelistFile, blacklistFile=args.blacklistFile,
diff --git a/dark/reads.py b/dark/reads.py
index 8da2fef..113ffdd 100644
--- a/dark/reads.py
+++ b/dark/reads.py
@@ -817,6 +817,8 @@ class ReadFilter(object):
@param minLength: The minimum acceptable length.
@param maxLength: The maximum acceptable length.
+ @param maxNFraction: The maximum fraction of Ns that can be present in the
+ sequence.
@param removeGaps: If C{True} remove all gaps ('-' characters) from the
read sequences.
@param whitelist: If not C{None}, a set of exact read ids that are
@@ -935,8 +937,8 @@ class ReadFilter(object):
# save and restore the state of the RNG and/or to optionally add
# 'seed=XXX' to the end of the id of the first read, etc.
- def __init__(self, minLength=None, maxLength=None, removeGaps=False,
- whitelist=None, blacklist=None,
+ def __init__(self, minLength=None, maxLength=None, maxNFraction=None,
+ removeGaps=False, whitelist=None, blacklist=None,
whitelistFile=None, blacklistFile=None,
titleRegex=None, negativeTitleRegex=None,
truncateTitlesAfter=None, keepSequences=None,
@@ -960,6 +962,7 @@ class ReadFilter(object):
self.minLength = minLength
self.maxLength = maxLength
+ self.maxNFraction = maxNFraction
self.removeGaps = removeGaps
self.head = head
self.removeDuplicates = removeDuplicates
@@ -1126,6 +1129,11 @@ class ReadFilter(object):
(self.maxLength is not None and readLen > self.maxLength)):
return False
+ if self.maxNFraction is not None:
+ nFraction = read.sequence.count('N') / readLen
+ if self.maxNFraction < nFraction:
+ return False
+
if self.removeGaps:
if read.quality is None:
read = read.__class__(read.id, read.sequence.replace('-', ''))
| acorg/dark-matter | 4c3115b2c5e585068988dfec0a7330b2d4f2ba37 | diff --git a/test/test_reads.py b/test/test_reads.py
index 273b87d..ea53b3f 100644
--- a/test/test_reads.py
+++ b/test/test_reads.py
@@ -2550,6 +2550,46 @@ class TestReads(TestCase):
self.assertEqual(sorted([read2, read3]), sorted(reads))
self.assertEqual(3, reads.unfilteredLength())
+ def testMaxNFractionAllPassNoNs(self):
+ """
+ Test filtering by maximum fraction of Ns. If there are no Ns in the
+ sequences, all must pass the filtering.
+ """
+ read1 = Read('id1', 'ATTA')
+ read2 = Read('id2', 'ATTAAC')
+ initialReads = Reads([read1, read2])
+ initialReads.filter(maxNFraction=0.9)
+
+ reads = Reads(initialReads)
+ self.assertEqual([read1, read2], list(reads))
+
+ def testMaxNFractionOnePasses(self):
+ """
+ Test filtering by maximum fraction of Ns. If there are too many Ns in
+ one of the sequences, only one must pass the filtering.
+ """
+ read1 = Read('id1', 'ATTA')
+ read2 = Read('id2', 'ATTNNN')
+ initialReads = Reads([read1, read2])
+ initialReads.filter(maxNFraction=0.4)
+
+ reads = Reads(initialReads)
+ self.assertEqual([read1], list(reads))
+
+ def testMaxNFractionAllPassNs(self):
+ """
+ Test filtering by maximum fraction of Ns. If there are Ns in the
+ sequence, but below the threshold, all sequences must pass the
+ filtering.
+ """
+ read1 = Read('id1', 'ATTA')
+ read2 = Read('id2', 'ATTNNN')
+ initialReads = Reads([read1, read2])
+ initialReads.filter(maxNFraction=0.6)
+
+ reads = Reads(initialReads)
+ self.assertEqual([read1, read2], list(reads))
+
def testNoVariableSitesConfirm(self):
"""
If two Reads have no bases that are variable, nothing should be
| Add filtering options for maximum number of Ns in a sequence | 0.0 | 4c3115b2c5e585068988dfec0a7330b2d4f2ba37 | [
"test/test_reads.py::TestReads::testMaxNFractionAllPassNoNs",
"test/test_reads.py::TestReads::testMaxNFractionAllPassNs",
"test/test_reads.py::TestReads::testMaxNFractionOnePasses"
] | [
"test/test_reads.py::TestRead::testCasePreservation",
"test/test_reads.py::TestRead::testCheckAlphabetAAReadMatchingReturnTrue",
"test/test_reads.py::TestRead::testCheckAlphabetAAReadNotMatchingRaise",
"test/test_reads.py::TestRead::testCheckAlphabetDNAReadMatchingReturnTrue",
"test/test_reads.py::TestRead::testCheckAlphabetDNAReadNotMatchingRaise",
"test/test_reads.py::TestRead::testCheckAlphabetwithReadMustBePermissive",
"test/test_reads.py::TestRead::testEquality",
"test/test_reads.py::TestRead::testEqualityWithDifferingIds",
"test/test_reads.py::TestRead::testEqualityWithDifferingQuality",
"test/test_reads.py::TestRead::testEqualityWithDifferingSequences",
"test/test_reads.py::TestRead::testEqualityWithNoQuality",
"test/test_reads.py::TestRead::testEqualityWithOneOmittedQuality",
"test/test_reads.py::TestRead::testExpectedAttributes",
"test/test_reads.py::TestRead::testFromDict",
"test/test_reads.py::TestRead::testFromDictNoQuality",
"test/test_reads.py::TestRead::testGetitemFullCopy",
"test/test_reads.py::TestRead::testGetitemId",
"test/test_reads.py::TestRead::testGetitemLength",
"test/test_reads.py::TestRead::testGetitemQuality",
"test/test_reads.py::TestRead::testGetitemReturnsNewRead",
"test/test_reads.py::TestRead::testGetitemReversed",
"test/test_reads.py::TestRead::testGetitemSequence",
"test/test_reads.py::TestRead::testGetitemSingleIndex",
"test/test_reads.py::TestRead::testGetitemWithStep",
"test/test_reads.py::TestRead::testHashDiffersIfIdDiffers",
"test/test_reads.py::TestRead::testHashDiffersIfQualityDiffers",
"test/test_reads.py::TestRead::testHashDiffersIfSequenceDiffers",
"test/test_reads.py::TestRead::testHashIdenticalNoQuality",
"test/test_reads.py::TestRead::testHashIdenticalWithQuality",
"test/test_reads.py::TestRead::testHashViaDict",
"test/test_reads.py::TestRead::testHashViaSet",
"test/test_reads.py::TestRead::testKeepSites",
"test/test_reads.py::TestRead::testKeepSitesAllSites",
"test/test_reads.py::TestRead::testKeepSitesNoSites",
"test/test_reads.py::TestRead::testKeepSitesOutOfRange",
"test/test_reads.py::TestRead::testKeepSitesWithQuality",
"test/test_reads.py::TestRead::testLength",
"test/test_reads.py::TestRead::testLowComplexityFraction",
"test/test_reads.py::TestRead::testLowComplexityFractionEmptySequence",
"test/test_reads.py::TestRead::testLowComplexityFractionOne",
"test/test_reads.py::TestRead::testLowComplexityFractionZero",
"test/test_reads.py::TestRead::testNoQuality",
"test/test_reads.py::TestRead::testRemoveSites",
"test/test_reads.py::TestRead::testRemoveSitesAllSites",
"test/test_reads.py::TestRead::testRemoveSitesNoSites",
"test/test_reads.py::TestRead::testRemoveSitesOutOfRange",
"test/test_reads.py::TestRead::testRemoveSitesWithQuality",
"test/test_reads.py::TestRead::testReverseNoQuality",
"test/test_reads.py::TestRead::testReverseWithQuality",
"test/test_reads.py::TestRead::testToDict",
"test/test_reads.py::TestRead::testToDictNoQuality",
"test/test_reads.py::TestRead::testToFASTA",
"test/test_reads.py::TestRead::testToFASTAWithQuality",
"test/test_reads.py::TestRead::testToFASTQ",
"test/test_reads.py::TestRead::testToFASTQWithNoQuality",
"test/test_reads.py::TestRead::testToUnknownFormat",
"test/test_reads.py::TestRead::testUnequalLengths",
"test/test_reads.py::TestRead::testWalkHSPExactMatch",
"test/test_reads.py::TestRead::testWalkHSPExactMatchWithGap",
"test/test_reads.py::TestRead::testWalkHSPLeftAndRightOverhangingMatch",
"test/test_reads.py::TestRead::testWalkHSPLeftAndRightOverhangingMatchNoWhiskers",
"test/test_reads.py::TestRead::testWalkHSPLeftOverhangingMatch",
"test/test_reads.py::TestRead::testWalkHSPLeftOverhangingMatchNoWhiskers",
"test/test_reads.py::TestRead::testWalkHSPRightOverhangingMatch",
"test/test_reads.py::TestRead::testWalkHSPRightOverhangingMatchNoWhiskers",
"test/test_reads.py::TestDNARead::testGetitemReturnsNewDNARead",
"test/test_reads.py::TestDNARead::testReverseComplement",
"test/test_reads.py::TestDNARead::testReverseComplementAmbiguous",
"test/test_reads.py::TestDNARead::testReverseComplementLowercaseLetters",
"test/test_reads.py::TestDNARead::testReverseComplementReversesQuality",
"test/test_reads.py::TestDNARead::testTranslationOfMultipleStopCodons",
"test/test_reads.py::TestDNARead::testTranslationOfStartCodonATG",
"test/test_reads.py::TestDNARead::testTranslationOfStopCodonTAG",
"test/test_reads.py::TestDNARead::testTranslationOfStopCodonTGA",
"test/test_reads.py::TestDNARead::testTranslations",
"test/test_reads.py::TestDNARead::testTranslationsOfEmptySequence",
"test/test_reads.py::TestDNARead::testTranslationsOfOneBaseSequence",
"test/test_reads.py::TestDNARead::testTranslationsOfTwoBaseSequence",
"test/test_reads.py::TestRNARead::testGetitemReturnsNewRNARead",
"test/test_reads.py::TestRNARead::testReverseComplement",
"test/test_reads.py::TestRNARead::testReverseComplementAmbiguous",
"test/test_reads.py::TestRNARead::testTranslationOfStopCodonUAA",
"test/test_reads.py::TestDNAKozakRead::testEqualFunction",
"test/test_reads.py::TestDNAKozakRead::testEqualFunctionDifferentKozakSequence",
"test/test_reads.py::TestDNAKozakRead::testEqualFunctionDifferentOriginalSequence",
"test/test_reads.py::TestDNAKozakRead::testSequence",
"test/test_reads.py::TestDNAKozakRead::testStart",
"test/test_reads.py::TestDNAKozakRead::testStartGreaterThanStop",
"test/test_reads.py::TestDNAKozakRead::testStartNegative",
"test/test_reads.py::TestDNAKozakRead::testStop",
"test/test_reads.py::TestDNAKozakRead::testStopGreaterThanOriginalSequenceLength",
"test/test_reads.py::TestAARead::testCloseCloseORF",
"test/test_reads.py::TestAARead::testCloseCloseORFWithJunk",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseCloseORF",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseCloseThenCloseCloseORF",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseCloseThenCloseCloseORFWithJunk",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseCloseThenCloseOpenORFWithJunk",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseCloseThenNothingORF",
"test/test_reads.py::TestAARead::testCloseCloseThenCloseOpenORF",
"test/test_reads.py::TestAARead::testCloseOpenORFWithMultipleStarts",
"test/test_reads.py::TestAARead::testGetitemReturnsNewAARead",
"test/test_reads.py::TestAARead::testNoStartCodon_GithubIssue239",
"test/test_reads.py::TestAARead::testORFsEmptySequence",
"test/test_reads.py::TestAARead::testORFsEmptySequenceWithStart",
"test/test_reads.py::TestAARead::testORFsEmptySequenceWithStartOpenORFs",
"test/test_reads.py::TestAARead::testORFsEmptySequenceWithStartStop",
"test/test_reads.py::TestAARead::testORFsEmptySequenceWithStartStopOpenORFs",
"test/test_reads.py::TestAARead::testORFsEmptySequenceWithStopStartOpenORFs",
"test/test_reads.py::TestAARead::testORFsSequenceWithOneAAOpenORFs",
"test/test_reads.py::TestAARead::testORFsWithJustStartsAndStops",
"test/test_reads.py::TestAARead::testORFsWithOneStopCodon",
"test/test_reads.py::TestAARead::testORFsWithOneStopCodonOpenORFs",
"test/test_reads.py::TestAARead::testORFsWithTwoStopCodons",
"test/test_reads.py::TestAARead::testOpenCloseORF",
"test/test_reads.py::TestAARead::testOpenCloseORFWithMultipleStops",
"test/test_reads.py::TestAARead::testOpenCloseThenCloseCloseThenCloseOpenORF",
"test/test_reads.py::TestAARead::testOpenCloseThenCloseCloseThenCloseOpenORFWithJunk",
"test/test_reads.py::TestAARead::testOpenCloseThenCloseOpenORF",
"test/test_reads.py::TestAARead::testOpenOpenORF",
"test/test_reads.py::TestAARead::testPropertiesCorrectTranslation",
"test/test_reads.py::TestAARead::testPropertyDetailsCorrectTranslation",
"test/test_reads.py::TestAAReadWithX::testAlphabet",
"test/test_reads.py::TestAAReadWithX::testAlphabetChecking",
"test/test_reads.py::TestAAReadWithX::testGetitemReturnsNewAAReadWithX",
"test/test_reads.py::TestAAReadORF::testClosedClosedId",
"test/test_reads.py::TestAAReadORF::testClosedOpenId",
"test/test_reads.py::TestAAReadORF::testFromDict",
"test/test_reads.py::TestAAReadORF::testOpenClosedId",
"test/test_reads.py::TestAAReadORF::testOpenLeft",
"test/test_reads.py::TestAAReadORF::testOpenOpenId",
"test/test_reads.py::TestAAReadORF::testOpenRight",
"test/test_reads.py::TestAAReadORF::testSequence",
"test/test_reads.py::TestAAReadORF::testStart",
"test/test_reads.py::TestAAReadORF::testStartGreaterThanStop",
"test/test_reads.py::TestAAReadORF::testStartNegative",
"test/test_reads.py::TestAAReadORF::testStop",
"test/test_reads.py::TestAAReadORF::testStopGreaterThanOriginalSequenceLength",
"test/test_reads.py::TestAAReadORF::testToDict",
"test/test_reads.py::TestSSAARead::testCorrectAttributes",
"test/test_reads.py::TestSSAARead::testFromDict",
"test/test_reads.py::TestSSAARead::testGetitemFullCopy",
"test/test_reads.py::TestSSAARead::testGetitemId",
"test/test_reads.py::TestSSAARead::testGetitemLength",
"test/test_reads.py::TestSSAARead::testGetitemReturnsNewRead",
"test/test_reads.py::TestSSAARead::testGetitemReversed",
"test/test_reads.py::TestSSAARead::testGetitemSequence",
"test/test_reads.py::TestSSAARead::testGetitemSingleIndex",
"test/test_reads.py::TestSSAARead::testGetitemStructure",
"test/test_reads.py::TestSSAARead::testGetitemWithStep",
"test/test_reads.py::TestSSAARead::testHashDiffersIfIdDiffers",
"test/test_reads.py::TestSSAARead::testHashDiffersIfSequenceDiffers",
"test/test_reads.py::TestSSAARead::testHashDiffersIfStructureDiffers",
"test/test_reads.py::TestSSAARead::testHashViaDict",
"test/test_reads.py::TestSSAARead::testHashViaSet",
"test/test_reads.py::TestSSAARead::testKeepSites",
"test/test_reads.py::TestSSAARead::testKeepSitesAllSites",
"test/test_reads.py::TestSSAARead::testKeepSitesNoSites",
"test/test_reads.py::TestSSAARead::testKeepSitesOutOfRange",
"test/test_reads.py::TestSSAARead::testReads",
"test/test_reads.py::TestSSAARead::testRemoveSites",
"test/test_reads.py::TestSSAARead::testRemoveSitesAllSites",
"test/test_reads.py::TestSSAARead::testRemoveSitesNoSites",
"test/test_reads.py::TestSSAARead::testRemoveSitesOutOfRange",
"test/test_reads.py::TestSSAARead::testSequenceLengthMatchesStructureLength",
"test/test_reads.py::TestSSAARead::testToDict",
"test/test_reads.py::TestSSAARead::testToString",
"test/test_reads.py::TestSSAARead::testToStringWithExplicitFastaFormat",
"test/test_reads.py::TestSSAARead::testToStringWithExplicitFastaSSFormat",
"test/test_reads.py::TestSSAARead::testToStringWithStructureSuffix",
"test/test_reads.py::TestSSAARead::testToStringWithUnknownFormat",
"test/test_reads.py::TestSSAAReadWithX::testCorrectAttributes",
"test/test_reads.py::TestSSAAReadWithX::testFromDict",
"test/test_reads.py::TestSSAAReadWithX::testGetitemFullCopy",
"test/test_reads.py::TestSSAAReadWithX::testGetitemId",
"test/test_reads.py::TestSSAAReadWithX::testGetitemLength",
"test/test_reads.py::TestSSAAReadWithX::testGetitemReturnsNewRead",
"test/test_reads.py::TestSSAAReadWithX::testGetitemReversed",
"test/test_reads.py::TestSSAAReadWithX::testGetitemSequence",
"test/test_reads.py::TestSSAAReadWithX::testGetitemSingleIndex",
"test/test_reads.py::TestSSAAReadWithX::testGetitemStructure",
"test/test_reads.py::TestSSAAReadWithX::testGetitemWithStep",
"test/test_reads.py::TestSSAAReadWithX::testHashDiffersIfIdDiffers",
"test/test_reads.py::TestSSAAReadWithX::testHashDiffersIfSequenceDiffers",
"test/test_reads.py::TestSSAAReadWithX::testHashDiffersIfStructureDiffers",
"test/test_reads.py::TestSSAAReadWithX::testHashViaDict",
"test/test_reads.py::TestSSAAReadWithX::testHashViaSet",
"test/test_reads.py::TestSSAAReadWithX::testKeepSites",
"test/test_reads.py::TestSSAAReadWithX::testKeepSitesAllSites",
"test/test_reads.py::TestSSAAReadWithX::testKeepSitesNoSites",
"test/test_reads.py::TestSSAAReadWithX::testKeepSitesOutOfRange",
"test/test_reads.py::TestSSAAReadWithX::testReads",
"test/test_reads.py::TestSSAAReadWithX::testRemoveSites",
"test/test_reads.py::TestSSAAReadWithX::testRemoveSitesAllSites",
"test/test_reads.py::TestSSAAReadWithX::testRemoveSitesNoSites",
"test/test_reads.py::TestSSAAReadWithX::testRemoveSitesOutOfRange",
"test/test_reads.py::TestSSAAReadWithX::testSequenceContainingX",
"test/test_reads.py::TestSSAAReadWithX::testSequenceLengthMatchesStructureLength",
"test/test_reads.py::TestSSAAReadWithX::testToDict",
"test/test_reads.py::TestSSAAReadWithX::testToString",
"test/test_reads.py::TestSSAAReadWithX::testToStringWithExplicitFastaFormat",
"test/test_reads.py::TestSSAAReadWithX::testToStringWithExplicitFastaSSFormat",
"test/test_reads.py::TestSSAAReadWithX::testToStringWithStructureSuffix",
"test/test_reads.py::TestSSAAReadWithX::testToStringWithUnknownFormat",
"test/test_reads.py::TestTranslatedRead::testExpectedAttributes",
"test/test_reads.py::TestTranslatedRead::testExpectedFrame",
"test/test_reads.py::TestTranslatedRead::testFromDict",
"test/test_reads.py::TestTranslatedRead::testId",
"test/test_reads.py::TestTranslatedRead::testIdReverseComplemented",
"test/test_reads.py::TestTranslatedRead::testMaximumORFLength",
"test/test_reads.py::TestTranslatedRead::testMaximumORFLengthNoOpenORF",
"test/test_reads.py::TestTranslatedRead::testMaximumORFLengthNoStops",
"test/test_reads.py::TestTranslatedRead::testOutOfRangeFrame",
"test/test_reads.py::TestTranslatedRead::testReverseComplemented",
"test/test_reads.py::TestTranslatedRead::testSequence",
"test/test_reads.py::TestTranslatedRead::testToDict",
"test/test_reads.py::TestReadClassNameToClass::testNames",
"test/test_reads.py::TestReads::testEmptyInitialReads",
"test/test_reads.py::TestReads::testInitialReads",
"test/test_reads.py::TestReads::testManuallyAddedReads",
"test/test_reads.py::TestReads::testManuallyAddedReadsLength",
"test/test_reads.py::TestReads::testNoReads",
"test/test_reads.py::TestReads::testNoReadsLength",
"test/test_reads.py::TestReads::testNoVariableSitesConfirm",
"test/test_reads.py::TestReads::testNoVariableSitesUnconfirm",
"test/test_reads.py::TestReads::testOneAmbiguousCompatibleVariableSitesConfirm",
"test/test_reads.py::TestReads::testOneAmbiguousCompatibleVariableSitesUnconfirm",
"test/test_reads.py::TestReads::testOneAmbiguousIncompatibleVariableSitesConfirm",
"test/test_reads.py::TestReads::testOneAmbiguousIncompatibleVariableSitesUnconfirm",
"test/test_reads.py::TestReads::testOneVariableSitesConfirm",
"test/test_reads.py::TestReads::testOneVariableSitesUnconfirm",
"test/test_reads.py::TestReads::testRepeatedIter",
"test/test_reads.py::TestReads::testSaveAsFASTA",
"test/test_reads.py::TestReads::testSaveAsFASTQ",
"test/test_reads.py::TestReads::testSaveAsFASTQFailsOnReadWithNoQuality",
"test/test_reads.py::TestReads::testSaveFASTAIsDefault",
"test/test_reads.py::TestReads::testSaveReturnsReadCount",
"test/test_reads.py::TestReads::testSaveToFileDescriptor",
"test/test_reads.py::TestReads::testSaveWithUnknownFormat",
"test/test_reads.py::TestReads::testSaveWithUppercaseFormat",
"test/test_reads.py::TestReads::testSubclass",
"test/test_reads.py::TestReads::testSubclassLength",
"test/test_reads.py::TestReads::testSubclassWithAdditionalReads",
"test/test_reads.py::TestReads::testUnfilteredLengthAdditionalReads",
"test/test_reads.py::TestReads::testUnfilteredLengthAdditionalReadsAfterFiltering",
"test/test_reads.py::TestReads::testUnfilteredLengthBeforeIterating",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialReads",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialReadsAfterFiltering",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialReadsIsReads",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialReadsIsReadsWithAdditional",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialSubclassThenFiltered",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialSubclassWithAdditionalThenFiltered",
"test/test_reads.py::TestReads::testUnfilteredLengthInitialSubclassWithNoLen",
"test/test_reads.py::TestReads::testUnfilteredLengthNoReads",
"test/test_reads.py::TestReads::testVariableSitesHeterogeneous",
"test/test_reads.py::TestReads::testVariableSitesHeterogeneousCounts",
"test/test_reads.py::TestReads::testVariableSitesTooHomogeneous",
"test/test_reads.py::TestReads::testVariableSitesUnequalLengths",
"test/test_reads.py::TestReadsFiltering::testAddFiltersThenClearFilters",
"test/test_reads.py::TestReadsFiltering::testFilterBlacklist",
"test/test_reads.py::TestReadsFiltering::testFilterDoNotRemoveDescriptions",
"test/test_reads.py::TestReadsFiltering::testFilterDuplicates",
"test/test_reads.py::TestReadsFiltering::testFilterDuplicatesById",
"test/test_reads.py::TestReadsFiltering::testFilterDuplicatesByIdMD5",
"test/test_reads.py::TestReadsFiltering::testFilterDuplicatesUseMD5",
"test/test_reads.py::TestReadsFiltering::testFilterHead",
"test/test_reads.py::TestReadsFiltering::testFilterHeadZero",
"test/test_reads.py::TestReadsFiltering::testFilterKeepSequences",
"test/test_reads.py::TestReadsFiltering::testFilterKeepSequencesNoSequences",
"test/test_reads.py::TestReadsFiltering::testFilterNegativeRegex",
"test/test_reads.py::TestReadsFiltering::testFilterNoArgs",
"test/test_reads.py::TestReadsFiltering::testFilterOnLengthEverythingMatches",
"test/test_reads.py::TestReadsFiltering::testFilterOnLengthNothingMatches",
"test/test_reads.py::TestReadsFiltering::testFilterOnMaxLength",
"test/test_reads.py::TestReadsFiltering::testFilterOnMinLength",
"test/test_reads.py::TestReadsFiltering::testFilterPositiveRegex",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetOfFiveFromFiveReads",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetOfFiveFromOneRead",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetOfOneFromOneRead",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetOfTwoFromFiveReads",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetOfZeroReads",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetSizeZeroNoReads",
"test/test_reads.py::TestReadsFiltering::testFilterRandomSubsetSizeZeroTwoReads",
"test/test_reads.py::TestReadsFiltering::testFilterRemoveDescriptions",
"test/test_reads.py::TestReadsFiltering::testFilterRemoveGaps",
"test/test_reads.py::TestReadsFiltering::testFilterRemoveGapsWithQuality",
"test/test_reads.py::TestReadsFiltering::testFilterRemoveSequences",
"test/test_reads.py::TestReadsFiltering::testFilterRemoveSequencesNoSequences",
"test/test_reads.py::TestReadsFiltering::testFilterReturnsReadInstance",
"test/test_reads.py::TestReadsFiltering::testFilterTruncateTitles",
"test/test_reads.py::TestReadsFiltering::testFilterWhitelist",
"test/test_reads.py::TestReadsFiltering::testFilterWithMinLengthEqualToMaxLength",
"test/test_reads.py::TestReadsFiltering::testFilterWithModifierThatChangesIds",
"test/test_reads.py::TestReadsFiltering::testFilterWithModifierThatOmits",
"test/test_reads.py::TestReadsFiltering::testFilterWithModifierThatOmitsAndChangesIds",
"test/test_reads.py::TestReadsFiltering::testFilteredReadsInstanceHasExpectedLength",
"test/test_reads.py::TestReadsFiltering::testIdLambda",
"test/test_reads.py::TestReadsFiltering::testIdLambdaReturningNone",
"test/test_reads.py::TestReadsFiltering::testKeepSites",
"test/test_reads.py::TestReadsFiltering::testKeepSitesAllSites",
"test/test_reads.py::TestReadsFiltering::testKeepSitesNoSites",
"test/test_reads.py::TestReadsFiltering::testKeepSitesOutOfRange",
"test/test_reads.py::TestReadsFiltering::testKeepSitesWithQuality",
"test/test_reads.py::TestReadsFiltering::testLineNumberFile",
"test/test_reads.py::TestReadsFiltering::testLineNumberFileEmpty",
"test/test_reads.py::TestReadsFiltering::testLineNumberFileFirstLineTooSmall",
"test/test_reads.py::TestReadsFiltering::testLineNumberFileNonAscending",
"test/test_reads.py::TestReadsFiltering::testLineNumberFileRunOutOfSequences",
"test/test_reads.py::TestReadsFiltering::testReadLambda",
"test/test_reads.py::TestReadsFiltering::testReadLambdaReturningNone",
"test/test_reads.py::TestReadsFiltering::testRemoveAndKeepSites",
"test/test_reads.py::TestReadsFiltering::testRemoveSites",
"test/test_reads.py::TestReadsFiltering::testRemoveSitesAllSites",
"test/test_reads.py::TestReadsFiltering::testRemoveSitesNoSites",
"test/test_reads.py::TestReadsFiltering::testRemoveSitesOutOfRange",
"test/test_reads.py::TestReadsFiltering::testRemoveSitesWithQuality",
"test/test_reads.py::TestReadsFiltering::testReverse",
"test/test_reads.py::TestReadsFiltering::testReverseAndReverseComplement",
"test/test_reads.py::TestReadsFiltering::testReverseComplement",
"test/test_reads.py::TestReadsFiltering::testReverseComplementAARead",
"test/test_reads.py::TestReadsFiltering::testReverseComplementNonDNA",
"test/test_reads.py::TestReadsFiltering::testSampleFractionAndNoTrueLengthRaisesValueError",
"test/test_reads.py::TestReadsFiltering::testSampleFractionAndRandomSubsetRaisesValueError",
"test/test_reads.py::TestReadsFiltering::testSampleFractionOne",
"test/test_reads.py::TestReadsFiltering::testSampleFractionPointOne",
"test/test_reads.py::TestReadsFiltering::testSampleFractionZero",
"test/test_reads.py::TestReadsInRAM::testAdd",
"test/test_reads.py::TestReadsInRAM::testFastaFile",
"test/test_reads.py::TestReadsInRAM::testFromReads",
"test/test_reads.py::TestReadsInRAM::testNoReads",
"test/test_reads.py::TestReadsInRAM::testOneReadIndex",
"test/test_reads.py::TestReadsInRAM::testOneReadLength",
"test/test_reads.py::TestReadsInRAM::testOneReadList",
"test/test_reads.py::TestReadsInRAM::testSetItem",
"test/test_reads.py::TestReadsInRAM::testTwoReadsIndex",
"test/test_reads.py::TestReadsInRAM::testTwoReadsLength",
"test/test_reads.py::TestReadsInRAM::testTwoReadsList",
"test/test_reads.py::TestSummarizePosition::testCorrectFrequencies",
"test/test_reads.py::TestSummarizePosition::testExcludeShortSequences",
"test/test_reads.py::TestSummarizePosition::testFrequenciesNoReads",
"test/test_reads.py::TestSummarizePosition::testIndexLargerThanSequenceLength",
"test/test_reads.py::TestSummarizePosition::testNumberOfExclusionsNoReads",
"test/test_reads.py::TestSitesMatching::testAllMatches",
"test/test_reads.py::TestSitesMatching::testIgnoreCase",
"test/test_reads.py::TestSitesMatching::testMatchCase",
"test/test_reads.py::TestSitesMatching::testMultipleReadsAll",
"test/test_reads.py::TestSitesMatching::testMultipleReadsAllWithDifferingLengths",
"test/test_reads.py::TestSitesMatching::testMultipleReadsAny",
"test/test_reads.py::TestSitesMatching::testMultipleReadsAnyWithDifferingLengths",
"test/test_reads.py::TestSitesMatching::testNoMatches",
"test/test_reads.py::TestSitesMatching::testPartialMatch"
] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2020-03-24 11:35:19+00:00 | mit | 878 |
|
adafruit__Adafruit_CircuitPython_GPS-76 | diff --git a/adafruit_gps.py b/adafruit_gps.py
index 10bc89c..a28a065 100644
--- a/adafruit_gps.py
+++ b/adafruit_gps.py
@@ -50,11 +50,11 @@ _ST_MAX = _RMC_4_1
_SENTENCE_PARAMS = (
# 0 - _GLL
- "dcdcfcC",
+ "dcdcscC",
# 1 - _RMC
- "fcdcdcffiDCC",
+ "scdcdcffsDCC",
# 2 - _GGA
- "fdcdciiffsfsIS",
+ "sdcdciiffsfsIS",
# 3 - _GSA
"ciIIIIIIIIIIIIfff",
# 4 - _GSA_4_11
@@ -68,7 +68,7 @@ _SENTENCE_PARAMS = (
# 8 - _GSV19
"iiiiiiIiiiIiiiIiiiI",
# 9 - _RMC_4_1
- "fcdcdcffiDCCC",
+ "scdcdcffsDCCC",
)
@@ -394,9 +394,9 @@ class GPS:
return (data_type, sentence[delimiter + 1 :])
def _update_timestamp_utc(self, time_utc, date=None):
- hours = time_utc // 10000
- mins = (time_utc // 100) % 100
- secs = time_utc % 100
+ hours = int(time_utc[0:2])
+ mins = int(time_utc[2:4])
+ secs = int(time_utc[4:6])
if date is None:
if self.timestamp_utc is None:
day, month, year = 0, 0, 0
@@ -405,9 +405,9 @@ class GPS:
month = self.timestamp_utc.tm_mon
year = self.timestamp_utc.tm_year
else:
- day = date // 10000
- month = (date // 100) % 100
- year = 2000 + date % 100
+ day = int(date[0:2])
+ month = int(date[2:4])
+ year = 2000 + int(date[4:6])
self.timestamp_utc = time.struct_time(
(year, month, day, hours, mins, secs, 0, 0, -1)
@@ -429,7 +429,7 @@ class GPS:
self.longitude = _read_degrees(data, 2, "w")
# UTC time of position
- self._update_timestamp_utc(int(data[4]))
+ self._update_timestamp_utc(data[4])
# Status Valid(A) or Invalid(V)
self.isactivedata = data[5]
@@ -450,7 +450,7 @@ class GPS:
return False # Params didn't parse
# UTC time of position and date
- self._update_timestamp_utc(int(data[0]), data[8])
+ self._update_timestamp_utc(data[0], data[8])
# Status Valid(A) or Invalid(V)
self.isactivedata = data[1]
@@ -494,7 +494,7 @@ class GPS:
return False # Params didn't parse
# UTC time of position
- self._update_timestamp_utc(int(data[0]))
+ self._update_timestamp_utc(data[0])
# Latitude
self.latitude = _read_degrees(data, 1, "s")
| adafruit/Adafruit_CircuitPython_GPS | b9e06e93f258ef3251b8104e19c9cfd435d12d60 | diff --git a/tests/adafruit_gps_test.py b/tests/adafruit_gps_test.py
index 9e09c85..8a956f3 100644
--- a/tests/adafruit_gps_test.py
+++ b/tests/adafruit_gps_test.py
@@ -140,14 +140,14 @@ def test_GPS_update_timestamp_UTC_date_None():
assert gps.datetime is None
assert gps.timestamp_utc is None
exp_struct = time.struct_time((0, 0, 0, 22, 14, 11, 0, 0, -1))
- gps._update_timestamp_utc(time_utc=221411)
+ gps._update_timestamp_utc(time_utc="221411")
assert gps.timestamp_utc == exp_struct
def test_GPS_update_timestamp_UTC_date_not_None():
gps = GPS(uart=UartMock())
exp_struct = time.struct_time((2021, 10, 2, 22, 14, 11, 0, 0, -1))
- gps._update_timestamp_utc(time_utc=221411, date=21021)
+ gps._update_timestamp_utc(time_utc="221411", date="021021")
assert gps.timestamp_utc == exp_struct
@@ -157,7 +157,7 @@ def test_GPS_update_timestamp_timestamp_utc_was_not_none_new_date_none():
gps.timestamp_utc = time.struct_time((2021, 10, 2, 22, 10, 11, 0, 0, -1))
exp_struct = time.struct_time((2021, 10, 2, 22, 14, 11, 0, 0, -1))
# update the timestamp
- gps._update_timestamp_utc(time_utc=221411)
+ gps._update_timestamp_utc(time_utc="221411")
assert gps.timestamp_utc == exp_struct
| Usage of float causes timestamp_utc to be imprecise
NMEA timestamp is written as HHMMSS.sss, which is stored as float in the module.
Later it is converted to int which is inaccurate:
See example:
```
>> int(184105.000)
184104
```
This is then math'ed into the fields in time_struct.
Propose storing the NMEA sentence as string instead, and grabbing the sections of the sting directly. | 0.0 | b9e06e93f258ef3251b8104e19c9cfd435d12d60 | [
"tests/adafruit_gps_test.py::test_GPS_update_timestamp_timestamp_utc_was_not_none_new_date_none",
"tests/adafruit_gps_test.py::test_GPS_update_timestamp_UTC_date_None",
"tests/adafruit_gps_test.py::test_GPS_update_timestamp_UTC_date_not_None"
] | [
"tests/adafruit_gps_test.py::test_parse_talker_regular",
"tests/adafruit_gps_test.py::test_GPS_update_empty_sentence",
"tests/adafruit_gps_test.py::test_read_degrees[west",
"tests/adafruit_gps_test.py::test_GPS_update_from_GSA[greater",
"tests/adafruit_gps_test.py::test_parse_float_invalid[None]",
"tests/adafruit_gps_test.py::test_parse_degrees[long",
"tests/adafruit_gps_test.py::test_parse_float",
"tests/adafruit_gps_test.py::test_GPS_update_rmc_has_magnetic_variation[E]",
"tests/adafruit_gps_test.py::test_parse_data_unknown_sentence_type[10]",
"tests/adafruit_gps_test.py::test_GPS_update_from_GSV_both_parts_sats_are_removed",
"tests/adafruit_gps_test.py::test_parse_str_invalid[]",
"tests/adafruit_gps_test.py::test_read_sentence_too_few_in_waiting",
"tests/adafruit_gps_test.py::test_GPS_update_rmc_fix_is_set",
"tests/adafruit_gps_test.py::test_GPS_update_data_type_too_short",
"tests/adafruit_gps_test.py::test_GPS_update_sentence_is_None",
"tests/adafruit_gps_test.py::test_read_degrees[south",
"tests/adafruit_gps_test.py::test_GPS_send_command_without_checksum",
"tests/adafruit_gps_test.py::test_parse_degrees[leading",
"tests/adafruit_gps_test.py::test_parse_int_invalid[None]",
"tests/adafruit_gps_test.py::test_parse_str_invalid[None]",
"tests/adafruit_gps_test.py::test_parse_degrees[regular",
"tests/adafruit_gps_test.py::test_parse_str_valid",
"tests/adafruit_gps_test.py::test_GPS_update_from_GLL",
"tests/adafruit_gps_test.py::test_parse_int_invalid[]",
"tests/adafruit_gps_test.py::test_GPS_update_with_unknown_talker",
"tests/adafruit_gps_test.py::test_read_degrees[north",
"tests/adafruit_gps_test.py::test_GPS_update_from_GSV_first_part",
"tests/adafruit_gps_test.py::test_GPS_update_rmc_debug_shows_sentence",
"tests/adafruit_gps_test.py::test_GPS_update_rmc_no_magnetic_variation",
"tests/adafruit_gps_test.py::test_parse_degrees_too_short",
"tests/adafruit_gps_test.py::test_GPS_update_from_GSA[smaller",
"tests/adafruit_gps_test.py::test_GPS_update_rmc_invalid_checksum",
"tests/adafruit_gps_test.py::test_GPS_update_from_GGA",
"tests/adafruit_gps_test.py::test_parse_data_unexpected_parameter_type",
"tests/adafruit_gps_test.py::test_GPS_update_rmc_has_magnetic_variation[W]",
"tests/adafruit_gps_test.py::test_parse_data_unknown_sentence_type[-1]",
"tests/adafruit_gps_test.py::test_parse_talker_prop_code",
"tests/adafruit_gps_test.py::test_read_degrees[east",
"tests/adafruit_gps_test.py::test_GPS_update_rmc_fix_is_set_new",
"tests/adafruit_gps_test.py::test_parse_float_invalid[]",
"tests/adafruit_gps_test.py::test_GPS_send_command_with_checksum",
"tests/adafruit_gps_test.py::test_GPS_update_from_RMC",
"tests/adafruit_gps_test.py::test_param_types_does_not_match_data_items",
"tests/adafruit_gps_test.py::test_parse_int",
"tests/adafruit_gps_test.py::test_parse_sentence_invalid_delimiter"
] | {
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-12-08 21:02:48+00:00 | mit | 879 |
|
adafruit__circup-21 | diff --git a/circup.py b/circup.py
index da2acab..5e9aef8 100644
--- a/circup.py
+++ b/circup.py
@@ -502,9 +502,21 @@ def ensure_latest_bundle():
logger.exception(ex)
if tag > old_tag:
logger.info("New version available ({}).".format(tag))
- get_bundle(tag)
- with open(BUNDLE_DATA, "w", encoding="utf-8") as data:
- json.dump({"tag": tag}, data)
+ try:
+ get_bundle(tag)
+ with open(BUNDLE_DATA, "w", encoding="utf-8") as data:
+ json.dump({"tag": tag}, data)
+ except requests.exceptions.HTTPError as ex:
+ # See #20 for reason this this
+ click.secho(
+ (
+ "There was a problem downloading the bundle. "
+ "Please try again in a moment."
+ ),
+ fg="red",
+ )
+ logger.exception(ex)
+ sys.exit(1)
else:
logger.info("Current library bundle up to date ({}).".format(tag))
@@ -761,10 +773,14 @@ def install(name, py): # pragma: no cover
shutil.copyfile(source_path, target_path)
else:
# Use pre-compiled mpy modules.
- module_name = os.path.basename(metadata["path"]).replace(".py", ".mpy")
+ module_name = os.path.basename(metadata["path"]).replace(
+ ".py", ".mpy"
+ )
if not module_name:
# Must be a directory based module.
- module_name = os.path.basename(os.path.dirname(metadata["path"]))
+ module_name = os.path.basename(
+ os.path.dirname(metadata["path"])
+ )
major_version = CPY_VERSION.split(".")[0]
bundle_platform = "{}mpy".format(major_version)
bundle_path = ""
| adafruit/circup | 024a8bc0cb4555cb122aadd567bd991222fd630a | diff --git a/tests/test_circup.py b/tests/test_circup.py
index 32e8ae8..40b87ee 100644
--- a/tests/test_circup.py
+++ b/tests/test_circup.py
@@ -26,6 +26,7 @@ import circup
import ctypes
import pytest
import json
+import requests
from unittest import mock
@@ -545,8 +546,8 @@ def test_ensure_latest_bundle_bad_bundle_data():
def test_ensure_latest_bundle_to_update():
"""
- If the version found in the BUNDLE_DATA is out of date, the cause an update
- to the bundle.
+ If the version found in the BUNDLE_DATA is out of date, then cause an
+ update to the bundle.
"""
with mock.patch("circup.get_latest_tag", return_value="54321"), mock.patch(
"circup.os.path.isfile", return_value=True
@@ -561,6 +562,30 @@ def test_ensure_latest_bundle_to_update():
assert mock_json.dump.call_count == 1 # Current version saved to file.
+def test_ensure_latest_bundle_to_update_http_error():
+ """
+ If an HTTP error happens during a bundle update, print a friendly
+ error message and exit 1.
+ """
+ with mock.patch("circup.get_latest_tag", return_value="54321"), mock.patch(
+ "circup.os.path.isfile", return_value=True
+ ), mock.patch("circup.open"), mock.patch(
+ "circup.get_bundle", side_effect=requests.exceptions.HTTPError("404")
+ ) as mock_gb, mock.patch(
+ "circup.json"
+ ) as mock_json, mock.patch(
+ "circup.click.secho"
+ ) as mock_click, mock.patch(
+ "circup.sys.exit"
+ ) as mock_exit:
+ mock_json.load.return_value = {"tag": "12345"}
+ circup.ensure_latest_bundle()
+ mock_gb.assert_called_once_with("54321")
+ assert mock_json.dump.call_count == 0 # not saved.
+ mock_click.call_count == 1 # friendly message.
+ mock_exit.assert_called_once_with(1) # exit 1.
+
+
def test_ensure_latest_bundle_no_update():
"""
If the version found in the BUNDLE_DATA is NOT out of date, just log the
| Error while running CircUp Show during bundle release
I kind of got lucky finding this one. At the time I ran it, the release had been created, but the bundle asset hadn't been finished being generated.
```
bash-3.2$ circup show
Found device at /Volumes/CIRCUITPY, running CircuitPython 5.0.0-alpha.4.
Downloading latest version information.
Traceback (most recent call last):
File "/usr/local/bin/circup", line 10, in <module>
sys.exit(main())
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 1137, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.7/site-packages/circup.py", line 719, in show
available_modules = get_bundle_versions()
File "/usr/local/lib/python3.7/site-packages/circup.py", line 403, in get_bundle_versions
ensure_latest_bundle()
File "/usr/local/lib/python3.7/site-packages/circup.py", line 505, in ensure_latest_bundle
get_bundle(tag)
File "/usr/local/lib/python3.7/site-packages/circup.py", line 547, in get_bundle
r.raise_for_status()
File "/usr/local/lib/python3.7/site-packages/requests/models.py", line 940, in raise_for_status
raise HTTPError(http_error_msg, response=self)
requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://github.com/adafruit/Adafruit_CircuitPython_Bundle/releases/download/20191029/adafruit-circuitpython-bundle-py-20191029.zip
``` | 0.0 | 024a8bc0cb4555cb122aadd567bd991222fd630a | [
"tests/test_circup.py::test_ensure_latest_bundle_to_update_http_error"
] | [
"tests/test_circup.py::test_Module_init_file_module",
"tests/test_circup.py::test_Module_init_directory_module",
"tests/test_circup.py::test_Module_outofdate",
"tests/test_circup.py::test_Module_outofdate_bad_versions",
"tests/test_circup.py::test_Module_row",
"tests/test_circup.py::test_Module_update_dir",
"tests/test_circup.py::test_Module_update_file",
"tests/test_circup.py::test_Module_repr",
"tests/test_circup.py::test_find_device_posix_exists",
"tests/test_circup.py::test_find_device_posix_no_mount_command",
"tests/test_circup.py::test_find_device_posix_missing",
"tests/test_circup.py::test_find_device_nt_exists",
"tests/test_circup.py::test_find_device_nt_missing",
"tests/test_circup.py::test_find_device_unknown_os",
"tests/test_circup.py::test_get_latest_tag",
"tests/test_circup.py::test_extract_metadata_python",
"tests/test_circup.py::test_extract_metadata_byte_code",
"tests/test_circup.py::test_find_modules",
"tests/test_circup.py::test_find_modules_goes_bang",
"tests/test_circup.py::test_get_bundle_versions",
"tests/test_circup.py::test_get_circuitpython_version",
"tests/test_circup.py::test_get_device_versions",
"tests/test_circup.py::test_get_modules_empty_path",
"tests/test_circup.py::test_get_modules_that_are_files",
"tests/test_circup.py::test_get_modules_that_are_directories",
"tests/test_circup.py::test_get_modules_that_are_directories_with_no_metadata",
"tests/test_circup.py::test_ensure_latest_bundle_no_bundle_data",
"tests/test_circup.py::test_ensure_latest_bundle_bad_bundle_data",
"tests/test_circup.py::test_ensure_latest_bundle_to_update",
"tests/test_circup.py::test_ensure_latest_bundle_no_update",
"tests/test_circup.py::test_get_bundle",
"tests/test_circup.py::test_get_bundle_network_error"
] | {
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false
} | 2019-10-30 08:45:28+00:00 | mit | 880 |
|
adamboche__python-marshmallow-union-29 | diff --git a/src/marshmallow_union/__init__.py b/src/marshmallow_union/__init__.py
index 69e3099..b3b86e1 100644
--- a/src/marshmallow_union/__init__.py
+++ b/src/marshmallow_union/__init__.py
@@ -59,9 +59,22 @@ class Union(marshmallow.fields.Field):
for candidate_field in fields:
try:
- return candidate_field.serialize(
- attr, obj, error_store=error_store, **kwargs
- )
+ try:
+ return candidate_field.serialize(
+ attr, obj, error_store=error_store, **kwargs
+ )
+ except TypeError:
+ # When serialising a mapping (eg dict) value item, 'attr' and 'obj'
+ # is none (as a dict value is not an attribute of anything). This
+ # causes issues with the attribute-get methods within
+ # 'marshmallow', but can be bypassed by passing the known 'value'
+ # directly to '_serialize'
+ if attr is obj is None:
+ # pylint: disable=protected-access
+ return candidate_field._serialize(
+ value, attr, obj, **kwargs
+ )
+ raise
# pylint: disable=broad-except
except Exception as exc:
pass
| adamboche/python-marshmallow-union | 49f82a94403b686e56c4509cf75bca40a9dfe23a | diff --git a/tests/test_union.py b/tests/test_union.py
index 10e6066..c4ffd4f 100644
--- a/tests/test_union.py
+++ b/tests/test_union.py
@@ -32,6 +32,19 @@ class OtherSchema(marshmallow.Schema):
)
+class MappingSchema(marshmallow.Schema):
+ """Schema with union inside mapping."""
+ items = marshmallow.fields.Dict(
+ marshmallow.fields.String(),
+ marshmallow_union.Union(
+ [
+ marshmallow.fields.Integer(),
+ marshmallow.fields.List(marshmallow.fields.Integer()),
+ ],
+ ),
+ )
+
+
class StrIntSchema(marshmallow.Schema):
"""Schema with str and int candidates."""
@@ -46,6 +59,7 @@ class StrIntSchema(marshmallow.Schema):
({"name": "Alice", "number_or_numbers": [25, 50]}, OtherSchema()),
({"x": 5}, StrIntSchema()),
({"x": "hello"}, StrIntSchema()),
+ ({"items": {"a": 42, "b": [17]}}, MappingSchema()),
],
)
def test_round_trip(data, schema):
@@ -60,6 +74,7 @@ def test_round_trip(data, schema):
[
({"name": "Alice", "number_or_numbers": "twenty-five"}, PersonSchema()),
({"name": "Alice", "number_or_numbers": {"x": 14}}, PersonSchema()),
+ ({"items": {"a": 42, "b": "spam"}}, MappingSchema()),
],
)
def test_raises(data, schema):
| Support union for mapping value
When using a union for a mapping value-type, marshmallow-union raises `ExceptionGroup` during serialisation.
All candidate fields raise as marshmallow-union calls `schema.serialise(attr, obj, ...)`. However, inside a mapping value-type, marshmallow calls `schema._serialise(value, None, None)`, as the mapping values are not part of the serialised object.
### Reproduction
```python
import typing as t
import dataclasses
import marshmallow
import marshmallow_dataclass
@marshmallow_dataclass.dataclass
class Foo:
bar: t.Dict[str, t.Union[int, str]] = dataclasses.field(default_factory=dict)
obj = Foo(bar={"spam": "eggs", "ham": 42})
schema = Foo.Schema()
schema.dump(obj)
```
### Expected
```python
{'bar': {'spam': 'eggs', 'ham': 42}}
```
### Actual
```python
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~/env-tmp/lib/python3.7/site-packages/marshmallow/schema.py", line 553, in dump
result = self._serialize(processed_obj, many=many)
File "~/env-tmp/lib/python3.7/site-packages/marshmallow/schema.py", line 517, in _serialize
value = field_obj.serialize(attr_name, obj, accessor=self.get_attribute)
File "~/env-tmp/lib/python3.7/site-packages/marshmallow/fields.py", line 325, in serialize
return self._serialize(value, attr, obj, **kwargs)
File "`/env-tmp/lib/python3.7/site-packages/marshmallow/fields.py", line 1510, in _serialize
result[keys[k]] = self.value_field._serialize(v, None, None, **kwargs)
File "~/env-tmp/lib/python3.7/site-packages/marshmallow_union/__init__.py", line 69, in _serialize
raise ExceptionGroup("All serializers raised exceptions.\n", error_store.errors)
marshmallow_union.ExceptionGroup: ('All serializers raised exceptions.\n', {})
```
### Environment
* OS: Ubuntu 19:10
* Python: 3.7.5
* marshmallow: 3.2.2
* marshmallow-union: 0.1.12
* marshmallow-dataclass: 7.1 | 0.0 | 49f82a94403b686e56c4509cf75bca40a9dfe23a | [
"tests/test_union.py::test_round_trip[data5-schema5]"
] | [
"tests/test_union.py::test_round_trip[data0-schema0]",
"tests/test_union.py::test_round_trip[data1-schema1]",
"tests/test_union.py::test_round_trip[data2-schema2]",
"tests/test_union.py::test_round_trip[data3-schema3]",
"tests/test_union.py::test_round_trip[data4-schema4]",
"tests/test_union.py::test_raises[data0-schema0]",
"tests/test_union.py::test_raises[data1-schema1]",
"tests/test_union.py::test_raises[data2-schema2]"
] | {
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true
} | 2019-12-27 12:40:46+00:00 | mit | 881 |
|
adamboche__python-marshmallow-union-33 | diff --git a/src/marshmallow_union/__init__.py b/src/marshmallow_union/__init__.py
index 22d5ef4..ee47920 100644
--- a/src/marshmallow_union/__init__.py
+++ b/src/marshmallow_union/__init__.py
@@ -13,6 +13,7 @@ class MarshmallowUnionException(Exception):
class ExceptionGroup(MarshmallowUnionException):
"""Collection of possibly multiple exceptions."""
+
def __init__(self, msg: str, errors):
self.msg = msg
self.errors = errors
@@ -63,8 +64,8 @@ class Union(marshmallow.fields.Field):
for candidate_field in fields:
try:
- return candidate_field.serialize(
- attr, obj, error_store=error_store, **kwargs
+ return candidate_field._serialize(
+ value, attr, obj, error_store=error_store, **kwargs
)
except ValueError as e:
error_store.store_error({attr: e})
| adamboche/python-marshmallow-union | 58bfc9fb069e00478afba87da3e003464cbdaebe | diff --git a/tests/test_union.py b/tests/test_union.py
index 6377c06..c93f004 100644
--- a/tests/test_union.py
+++ b/tests/test_union.py
@@ -52,6 +52,14 @@ class IntStrSchema(marshmallow.Schema):
x = marshmallow_union.Union([marshmallow.fields.Int(), marshmallow.fields.String()])
+class ListUnionSchema(marshmallow.Schema):
+ """Schema with a list of unions."""
+
+ l = marshmallow.fields.List(
+ marshmallow_union.Union([marshmallow.fields.Int(), marshmallow.fields.String()])
+ )
+
+
@pytest.mark.parametrize(
"data, schema",
[
@@ -59,6 +67,7 @@ class IntStrSchema(marshmallow.Schema):
({"name": "Alice", "number_or_numbers": [25, 50]}, PersonSchema()),
({"name": "Alice", "number_or_numbers": [25, 50]}, OtherSchema()),
({"x": 5}, IntStrSchema()),
+ ({"l": ["h", 5, "n", 1]}, ListUnionSchema()),
({"x": "hello"}, IntStrSchema()),
({"items": {"a": 42, "b": [17]}}, MappingSchema()),
],
| _serialize ignores the given value
The `_serialize` method of `Union` calls the `serialize` method (instead of `_serialize`) on the underlying fields. This means that it ignores the given `value` parameter, and may try to serialize a completely different value.
See: https://github.com/adamboche/python-marshmallow-union/blob/master/src/marshmallow_union/__init__.py#L66-L68
Initially reported in: https://github.com/lovasoa/marshmallow_dataclass/issues/67 | 0.0 | 58bfc9fb069e00478afba87da3e003464cbdaebe | [
"tests/test_union.py::test_round_trip[data4-schema4]"
] | [
"tests/test_union.py::test_round_trip[data0-schema0]",
"tests/test_union.py::test_round_trip[data1-schema1]",
"tests/test_union.py::test_round_trip[data2-schema2]",
"tests/test_union.py::test_round_trip[data3-schema3]",
"tests/test_union.py::test_round_trip[data5-schema5]",
"tests/test_union.py::test_round_trip[data6-schema6]",
"tests/test_union.py::test_load_raises[data0-schema0]",
"tests/test_union.py::test_load_raises[data1-schema1]",
"tests/test_union.py::test_load_raises[data2-schema2]",
"tests/test_union.py::test_dump_raises[data0-schema0]",
"tests/test_union.py::test_dump_raises[data1-schema1]",
"tests/test_union.py::test_dump_raises[data2-schema2]"
] | {
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false
} | 2020-05-23 13:45:41+00:00 | mit | 882 |
|
adamchainz__apig-wsgi-118 | diff --git a/src/apig_wsgi.py b/src/apig_wsgi.py
index efb01d2..d5ea0f0 100644
--- a/src/apig_wsgi.py
+++ b/src/apig_wsgi.py
@@ -94,10 +94,8 @@ def get_environ(event, binary_support):
if key == "CONTENT_TYPE":
environ["CONTENT_TYPE"] = values[-1]
- continue
elif key == "HOST":
environ["SERVER_NAME"] = values[-1]
- continue
elif key == "X_FORWARDED_FOR":
environ["REMOTE_ADDR"] = values[-1].split(", ")[0]
elif key == "X_FORWARDED_PROTO":
| adamchainz/apig-wsgi | 4f3b043beb5067379a7d1459aea751eb74a27e5e | diff --git a/tests/test_apig_wsgi.py b/tests/test_apig_wsgi.py
index 736d859..fff9c37 100644
--- a/tests/test_apig_wsgi.py
+++ b/tests/test_apig_wsgi.py
@@ -343,10 +343,60 @@ def test_special_headers(simple_app):
simple_app.handler(event, None)
assert simple_app.environ["CONTENT_TYPE"] == "text/plain"
+ assert simple_app.environ["HTTP_CONTENT_TYPE"] == "text/plain"
assert simple_app.environ["SERVER_NAME"] == "example.com"
+ assert simple_app.environ["HTTP_HOST"] == "example.com"
assert simple_app.environ["REMOTE_ADDR"] == "1.2.3.4"
+ assert simple_app.environ["HTTP_X_FORWARDED_FOR"] == "1.2.3.4, 5.6.7.8"
assert simple_app.environ["wsgi.url_scheme"] == "https"
+ assert simple_app.environ["HTTP_X_FORWARDED_PROTO"] == "https"
assert simple_app.environ["SERVER_PORT"] == "123"
+ assert simple_app.environ["HTTP_X_FORWARDED_PORT"] == "123"
+
+
+def test_special_content_type(simple_app):
+ event = make_event(headers={"Content-Type": ["text/plain"]})
+
+ simple_app.handler(event, None)
+
+ assert simple_app.environ["CONTENT_TYPE"] == "text/plain"
+ assert simple_app.environ["HTTP_CONTENT_TYPE"] == "text/plain"
+
+
+def test_special_host(simple_app):
+ event = make_event(headers={"Host": ["example.com"]})
+
+ simple_app.handler(event, None)
+
+ assert simple_app.environ["SERVER_NAME"] == "example.com"
+ assert simple_app.environ["HTTP_HOST"] == "example.com"
+
+
+def test_special_x_forwarded_for(simple_app):
+ event = make_event(headers={"X-Forwarded-For": ["1.2.3.4, 5.6.7.8"]})
+
+ simple_app.handler(event, None)
+
+ assert simple_app.environ["REMOTE_ADDR"] == "1.2.3.4"
+ assert simple_app.environ["HTTP_X_FORWARDED_FOR"] == "1.2.3.4, 5.6.7.8"
+
+
+def test_x_forwarded_proto(simple_app):
+ event = make_event(headers={"X-Forwarded-Proto": ["https"]})
+
+ simple_app.handler(event, None)
+
+ assert simple_app.environ["wsgi.url_scheme"] == "https"
+ assert simple_app.environ["HTTP_X_FORWARDED_PROTO"] == "https"
+
+
+def test_x_forwarded_port(simple_app):
+ event = make_event(headers={"X-Forwarded-Port": ["123"]})
+
+ simple_app.handler(event, None)
+
+ assert simple_app.environ["SERVER_PORT"] == "123"
+ assert simple_app.environ["HTTP_X_FORWARDED_PORT"] == "123"
def test_no_headers(simple_app):
| 2.5.0 Multivalue support breaks Host Header support
The new update for 2.5.0 broke for us, because we're trying to access the HTTP Host header directly, rather than accessing via the `SERVER_NAME` WSGI variable.
Was this intentional, or just a byproduct of trying to also set `SERVER_NAME` for WSGI spec compliance?
| 0.0 | 4f3b043beb5067379a7d1459aea751eb74a27e5e | [
"tests/test_apig_wsgi.py::test_special_headers",
"tests/test_apig_wsgi.py::test_special_content_type",
"tests/test_apig_wsgi.py::test_special_host"
] | [
"tests/test_apig_wsgi.py::test_get",
"tests/test_apig_wsgi.py::test_get_missing_content_type",
"tests/test_apig_wsgi.py::test_get_binary_support_default_text_content_types[text/plain]",
"tests/test_apig_wsgi.py::test_get_binary_support_default_text_content_types[text/html]",
"tests/test_apig_wsgi.py::test_get_binary_support_default_text_content_types[application/json]",
"tests/test_apig_wsgi.py::test_get_binary_support_default_text_content_types[application/vnd.api+json]",
"tests/test_apig_wsgi.py::test_get_binary_support_custom_text_content_types[test/custom]",
"tests/test_apig_wsgi.py::test_get_binary_support_custom_text_content_types[application/vnd.custom]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/plain]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/html]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/json]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/vnd.api+json]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[test/custom]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[application/vnd.custom]",
"tests/test_apig_wsgi.py::test_get_binary_support_no_content_type",
"tests/test_apig_wsgi.py::test_post",
"tests/test_apig_wsgi.py::test_post_binary_support",
"tests/test_apig_wsgi.py::test_querystring_none",
"tests/test_apig_wsgi.py::test_querystring_none_single",
"tests/test_apig_wsgi.py::test_querystring_empty",
"tests/test_apig_wsgi.py::test_querystring_empty_single",
"tests/test_apig_wsgi.py::test_querystring_one",
"tests/test_apig_wsgi.py::test_querystring_one_single",
"tests/test_apig_wsgi.py::test_querystring_encoding_value",
"tests/test_apig_wsgi.py::test_querystring_encoding_key",
"tests/test_apig_wsgi.py::test_querystring_multi",
"tests/test_apig_wsgi.py::test_plain_header",
"tests/test_apig_wsgi.py::test_plain_header_single",
"tests/test_apig_wsgi.py::test_plain_header_multi",
"tests/test_apig_wsgi.py::test_special_x_forwarded_for",
"tests/test_apig_wsgi.py::test_x_forwarded_proto",
"tests/test_apig_wsgi.py::test_x_forwarded_port",
"tests/test_apig_wsgi.py::test_no_headers",
"tests/test_apig_wsgi.py::test_headers_None",
"tests/test_apig_wsgi.py::test_exc_info",
"tests/test_apig_wsgi.py::test_request_context",
"tests/test_apig_wsgi.py::test_full_event"
] | {
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true
} | 2020-02-26 12:33:27+00:00 | isc | 883 |
|
adamchainz__apig-wsgi-156 | diff --git a/HISTORY.rst b/HISTORY.rst
index 048a4ab..8eb3af1 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -1,6 +1,12 @@
History
=======
+* Add defaults for ``SERVER_HOST``, ``SERVER_PORT`` and ``wsgi.url_scheme``.
+ This enables responding to `ELB health check events
+ <https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html#enable-health-checks-lambda>`__,
+ which don't contain the relevant headers
+ (`Issue #155 <https://github.com/adamchainz/apig-wsgi/pull/155>`__).
+
2.6.0 (2020-03-07)
------------------
diff --git a/src/apig_wsgi.py b/src/apig_wsgi.py
index 4c7a17a..c12b638 100644
--- a/src/apig_wsgi.py
+++ b/src/apig_wsgi.py
@@ -62,12 +62,15 @@ def get_environ(event, context, binary_support):
"REQUEST_METHOD": method,
"SCRIPT_NAME": "",
"SERVER_PROTOCOL": "HTTP/1.1",
+ "SERVER_NAME": "",
+ "SERVER_PORT": "",
"wsgi.errors": sys.stderr,
"wsgi.input": BytesIO(body),
"wsgi.multiprocess": False,
"wsgi.multithread": False,
"wsgi.run_once": False,
"wsgi.version": (1, 0),
+ "wsgi.url_scheme": "http",
}
# Multi-value query strings need explicit activation on ALB
| adamchainz/apig-wsgi | d42e72a00409c1ce51011db1d46ecd9a1667984a | diff --git a/tests/test_apig_wsgi.py b/tests/test_apig_wsgi.py
index 2e091a8..8339a89 100644
--- a/tests/test_apig_wsgi.py
+++ b/tests/test_apig_wsgi.py
@@ -472,6 +472,29 @@ def test_full_event(simple_app):
assert simple_app.environ["apig_wsgi.full_event"] == event
+def test_elb_health_check(simple_app):
+ """
+ Check compatibility with health check events as per:
+ https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html#enable-health-checks-lambda # noqa: B950
+ """
+ event = {
+ "requestContext": {"elb": {"targetGroupArn": "..."}},
+ "httpMethod": "GET",
+ "path": "/",
+ "queryStringParameters": {},
+ "headers": {"user-agent": "ELB-HealthChecker/2.0"},
+ "body": "",
+ "isBase64Encoded": False,
+ }
+
+ simple_app.handler(event, None)
+
+ environ = simple_app.environ
+ assert environ["SERVER_NAME"] == ""
+ assert environ["SERVER_PORT"] == ""
+ assert environ["wsgi.url_scheme"] == "http"
+
+
def test_context(simple_app):
context = ContextStub(aws_request_id="test-request-id")
| ELB Health Checks not handled
Hello!
Thank you for your tool it's great.
We have a little issue when using a lambda as an ELB target when we turn on health check because the event looks like this:
```
{
"requestContext": {
"elb": {
"targetGroupArn": "arn:aws:elasticloadbalancing:region:123456789012:targetgroup/my-target-group/6d0ecf831eec9f09"
}
},
"httpMethod": "GET",
"path": "/",
"queryStringParameters": {},
"headers": {
"user-agent": "ELB-HealthChecker/2.0"
},
"body": "",
"isBase64Encoded": false
}
```
source: https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html#enable-health-checks-lambda
This has almost no headers leading to this error
```
File "/var/task/flask/app.py", line 2306, in wsgi_app
ctx = self.request_context(environ)
File "/var/task/flask/app.py", line 2223, in request_context
return RequestContext(self, environ)
File "/var/task/flask/ctx.py", line 284, in __init__
self.url_adapter = app.create_url_adapter(self.request)
File "/var/task/flask/app.py", line 2040, in create_url_adapter
subdomain=subdomain)
File "/var/task/werkzeug/routing.py", line 1502, in bind_to_environ
wsgi_server_name = get_host(environ).lower()
File "/var/task/werkzeug/wsgi.py", line 168, in get_host
rv = environ["SERVER_NAME"]
```
i found with some testing that i can patch the event like this to make it work:
```
def elb_health_check_patcher(handler):
@wraps(handler)
def wrapper(event, context):
user_agent = event.get('headers', {}).get('user-agent')
if user_agent == "ELB-HealthChecker/2.0":
event['headers']['x-forwarded-proto'] = 'http'
event['headers']['host'] = 'elb-health-check'
return handler(event, context)
return wrapper
handler = elb_health_check_patcher(make_lambda_handler(my_app))
```
I may propose a PR soon if you're open to :) | 0.0 | d42e72a00409c1ce51011db1d46ecd9a1667984a | [
"tests/test_apig_wsgi.py::test_elb_health_check"
] | [
"tests/test_apig_wsgi.py::test_get",
"tests/test_apig_wsgi.py::test_get_missing_content_type",
"tests/test_apig_wsgi.py::test_get_binary_support_default_text_content_types[text/plain]",
"tests/test_apig_wsgi.py::test_get_binary_support_default_text_content_types[text/html]",
"tests/test_apig_wsgi.py::test_get_binary_support_default_text_content_types[application/json]",
"tests/test_apig_wsgi.py::test_get_binary_support_default_text_content_types[application/vnd.api+json]",
"tests/test_apig_wsgi.py::test_get_binary_support_custom_text_content_types[test/custom]",
"tests/test_apig_wsgi.py::test_get_binary_support_custom_text_content_types[application/vnd.custom]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/plain]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/html]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/json]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/vnd.api+json]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[test/custom]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[application/vnd.custom]",
"tests/test_apig_wsgi.py::test_get_binary_support_no_content_type",
"tests/test_apig_wsgi.py::test_post",
"tests/test_apig_wsgi.py::test_post_binary_support",
"tests/test_apig_wsgi.py::test_querystring_none",
"tests/test_apig_wsgi.py::test_querystring_none_single",
"tests/test_apig_wsgi.py::test_querystring_empty",
"tests/test_apig_wsgi.py::test_querystring_empty_single",
"tests/test_apig_wsgi.py::test_querystring_one",
"tests/test_apig_wsgi.py::test_querystring_one_single",
"tests/test_apig_wsgi.py::test_querystring_encoding_value",
"tests/test_apig_wsgi.py::test_querystring_encoding_key",
"tests/test_apig_wsgi.py::test_querystring_multi",
"tests/test_apig_wsgi.py::test_plain_header",
"tests/test_apig_wsgi.py::test_plain_header_single",
"tests/test_apig_wsgi.py::test_plain_header_multi",
"tests/test_apig_wsgi.py::test_special_headers",
"tests/test_apig_wsgi.py::test_special_content_type",
"tests/test_apig_wsgi.py::test_special_host",
"tests/test_apig_wsgi.py::test_special_x_forwarded_for",
"tests/test_apig_wsgi.py::test_x_forwarded_proto",
"tests/test_apig_wsgi.py::test_x_forwarded_port",
"tests/test_apig_wsgi.py::test_no_headers",
"tests/test_apig_wsgi.py::test_headers_None",
"tests/test_apig_wsgi.py::test_exc_info",
"tests/test_apig_wsgi.py::test_request_context",
"tests/test_apig_wsgi.py::test_full_event",
"tests/test_apig_wsgi.py::test_context"
] | {
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2020-07-10 18:59:38+00:00 | isc | 884 |
|
adamchainz__apig-wsgi-187 | diff --git a/HISTORY.rst b/HISTORY.rst
index 724abe9..f4bc806 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -3,6 +3,9 @@ History
=======
* Support Python 3.9.
+* Fix query string parameter encoding so that symbols are correctly re-encoded
+ for WSGI, for API Gateway format version 1
+ (`Issue #186 <https://github.com/adamchainz/apig-wsgi/pull/186>`__).
2.9.0 (2020-10-12)
------------------
@@ -11,7 +14,8 @@ History
* Always send ``isBase64Encoded`` in responses, as per the AWS documentation.
* Support `format version
2 <https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html>`,
- which was introduced by API Gateway for “HTTP API's”.
+ which was introduced by API Gateway for “HTTP API's”
+ (`Issue #124 <https://github.com/adamchainz/apig-wsgi/pull/124>`__)..
* ``binary_support`` now defaults to ``None``, which means that it will
automatically enable binary support for format version 2 events.
diff --git a/src/apig_wsgi.py b/src/apig_wsgi.py
index a261c38..424d008 100644
--- a/src/apig_wsgi.py
+++ b/src/apig_wsgi.py
@@ -2,6 +2,7 @@ import sys
from base64 import b64decode, b64encode
from collections import defaultdict
from io import BytesIO
+from urllib.parse import urlencode
__all__ = ("make_lambda_handler",)
@@ -87,14 +88,13 @@ def get_environ_v1(event, context, binary_support):
# Multi-value query strings need explicit activation on ALB
if "multiValueQueryStringParameters" in event:
- # may be None when testing on console
- multi_params = event["multiValueQueryStringParameters"] or {}
+ environ["QUERY_STRING"] = urlencode(
+ # may be None when testing on console
+ event["multiValueQueryStringParameters"] or (),
+ doseq=True,
+ )
else:
- single_params = event.get("queryStringParameters") or {}
- multi_params = {key: [value] for key, value in single_params.items()}
- environ["QUERY_STRING"] = "&".join(
- "{}={}".format(key, val) for (key, vals) in multi_params.items() for val in vals
- )
+ environ["QUERY_STRING"] = urlencode(event.get("queryStringParameters") or ())
# Multi-value headers need explicit activation on ALB
if "multiValueHeaders" in event:
| adamchainz/apig-wsgi | afaf2e5ae3279b15657431b24dcc01105ec2a5ce | diff --git a/tests/test_apig_wsgi.py b/tests/test_apig_wsgi.py
index 6eca17c..2c98ab4 100644
--- a/tests/test_apig_wsgi.py
+++ b/tests/test_apig_wsgi.py
@@ -328,19 +328,19 @@ class TestV1Events:
assert simple_app.environ["QUERY_STRING"] == "foo=bar"
- def test_querystring_encoding_value(self, simple_app):
- event = make_v1_event(qs_params={"foo": ["a%20bar"]})
+ def test_querystring_encoding_plus_value(self, simple_app):
+ event = make_v1_event(qs_params={"a": ["b+c"]}, qs_params_multi=False)
simple_app.handler(event, None)
- assert simple_app.environ["QUERY_STRING"] == "foo=a%20bar"
+ assert simple_app.environ["QUERY_STRING"] == "a=b%2Bc"
- def test_querystring_encoding_key(self, simple_app):
- event = make_v1_event(qs_params={"a%20foo": ["bar"]})
+ def test_querystring_encoding_plus_key(self, simple_app):
+ event = make_v1_event(qs_params={"a+b": ["c"]}, qs_params_multi=False)
simple_app.handler(event, None)
- assert simple_app.environ["QUERY_STRING"] == "a%20foo=bar"
+ assert simple_app.environ["QUERY_STRING"] == "a%2Bb=c"
def test_querystring_multi(self, simple_app):
event = make_v1_event(qs_params={"foo": ["bar", "baz"]})
@@ -349,6 +349,20 @@ class TestV1Events:
assert simple_app.environ["QUERY_STRING"] == "foo=bar&foo=baz"
+ def test_querystring_multi_encoding_plus_value(self, simple_app):
+ event = make_v1_event(qs_params={"a": ["b+c", "d"]})
+
+ simple_app.handler(event, None)
+
+ assert simple_app.environ["QUERY_STRING"] == "a=b%2Bc&a=d"
+
+ def test_querystring_multi_encoding_plus_key(self, simple_app):
+ event = make_v1_event(qs_params={"a+b": ["c"]})
+
+ simple_app.handler(event, None)
+
+ assert simple_app.environ["QUERY_STRING"] == "a%2Bb=c"
+
def test_plain_header(self, simple_app):
event = make_v1_event(headers={"Test-Header": ["foobar"]})
| QueryStringParameters encoding issue
Hi,
I suspect an issue while providing queryParameters.
When performing a query to my API using <API_ROOT>/logs?start_time=2020-10-12T14:00:00%2B02:00&end_time=2020-10-12T15:00:00%2B02:00
I get my query string values in Flask endpoint as:
start_time = "2020-10-12T14:00:00 02:00"
end_time = "2020-10-12T15:00:00 02:00"
and it should be :
start_time = "2020-10-12T14:00:00+02:00"
end_time = "2020-10-12T15:00:00+02:00"
Note: The "+" has been removed
I think this is because API Gateway is providing queryStringParameters decoded and your are not encoding them back to store them in environ["QUERY_STRING"].
If we look to what other WSGI adapters (like Zappa) are doing:
```
if 'multiValueQueryStringParameters' in event_info:
query = event_info['multiValueQueryStringParameters']
query_string = urlencode(query, doseq=True) if query else ''
else:
query = event_info.get('queryStringParameters', {})
query_string = urlencode(query) if query else ''
```
Hope it helps. | 0.0 | afaf2e5ae3279b15657431b24dcc01105ec2a5ce | [
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_encoding_plus_value",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_encoding_plus_key",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_multi_encoding_plus_value",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_multi_encoding_plus_key"
] | [
"tests/test_apig_wsgi.py::TestV1Events::test_get",
"tests/test_apig_wsgi.py::TestV1Events::test_get_missing_content_type",
"tests/test_apig_wsgi.py::TestV1Events::test_get_single_header",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_default_text_content_types[text/plain]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_default_text_content_types[text/html]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_default_text_content_types[application/json]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_default_text_content_types[application/vnd.api+json]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_custom_text_content_types[test/custom]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_custom_text_content_types[application/vnd.custom]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/plain]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/html]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/json]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/vnd.api+json]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[test/custom]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[application/vnd.custom]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_no_content_type",
"tests/test_apig_wsgi.py::TestV1Events::test_post",
"tests/test_apig_wsgi.py::TestV1Events::test_post_binary_support",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_none",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_none_single",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_empty",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_empty_single",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_one",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_one_single",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_multi",
"tests/test_apig_wsgi.py::TestV1Events::test_plain_header",
"tests/test_apig_wsgi.py::TestV1Events::test_plain_header_single",
"tests/test_apig_wsgi.py::TestV1Events::test_plain_header_multi",
"tests/test_apig_wsgi.py::TestV1Events::test_special_headers",
"tests/test_apig_wsgi.py::TestV1Events::test_special_content_type",
"tests/test_apig_wsgi.py::TestV1Events::test_special_host",
"tests/test_apig_wsgi.py::TestV1Events::test_special_x_forwarded_for",
"tests/test_apig_wsgi.py::TestV1Events::test_x_forwarded_proto",
"tests/test_apig_wsgi.py::TestV1Events::test_x_forwarded_port",
"tests/test_apig_wsgi.py::TestV1Events::test_no_headers",
"tests/test_apig_wsgi.py::TestV1Events::test_headers_None",
"tests/test_apig_wsgi.py::TestV1Events::test_exc_info",
"tests/test_apig_wsgi.py::TestV1Events::test_request_context",
"tests/test_apig_wsgi.py::TestV1Events::test_full_event",
"tests/test_apig_wsgi.py::TestV1Events::test_elb_health_check",
"tests/test_apig_wsgi.py::TestV1Events::test_context",
"tests/test_apig_wsgi.py::TestV2Events::test_get",
"tests/test_apig_wsgi.py::TestV2Events::test_get_missing_content_type",
"tests/test_apig_wsgi.py::TestV2Events::test_cookie",
"tests/test_apig_wsgi.py::TestV2Events::test_two_cookies",
"tests/test_apig_wsgi.py::TestV2Events::test_mixed_cookies",
"tests/test_apig_wsgi.py::TestV2Events::test_set_one_cookie",
"tests/test_apig_wsgi.py::TestV2Events::test_set_two_cookies",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_default_text_content_types[text/plain]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_default_text_content_types[text/html]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_default_text_content_types[application/json]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_default_text_content_types[application/vnd.api+json]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_custom_text_content_types[test/custom]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_custom_text_content_types[application/vnd.custom]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/plain]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/html]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/json]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/vnd.api+json]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[test/custom]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[application/vnd.custom]",
"tests/test_apig_wsgi.py::TestV2Events::test_special_headers",
"tests/test_apig_wsgi.py::TestUnknownVersionEvents::test_errors"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2020-10-12 22:45:22+00:00 | mit | 885 |
|
adamchainz__apig-wsgi-272 | diff --git a/HISTORY.rst b/HISTORY.rst
index 6b356b4..cd7d803 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -2,6 +2,8 @@
History
=======
+* Fix handling of query string encoding in ALB deployments.
+
2.11.0 (2021-05-10)
-------------------
diff --git a/example/README.rst b/example/README.rst
index b0d17a3..c00ff7b 100644
--- a/example/README.rst
+++ b/example/README.rst
@@ -47,7 +47,13 @@ other requirements into it, and run the deployment playbook:
source venv/bin/activate
python -m pip install -U pip wheel
python -m pip install -r requirements.txt
- ansible-playbook playbook.yml
+
+To run the playbook, you'll need to specify a VPC ID and two subnet IDs in order
+to create an ALB.
+
+.. code-block:: sh
+
+ ansible-playbook playbook.yml -e "vpc_id=vpc-12345678" -e "subnet_id_1=subnet-12345678" -e "subnet_id_2=subnet-12345678"
Ansible should complete with a ``PLAY RECAP`` at the end like:
diff --git a/example/deployment/files/cloudformation_site.yml b/example/deployment/files/cloudformation_site.yml
index ac2fcfa..72f4716 100644
--- a/example/deployment/files/cloudformation_site.yml
+++ b/example/deployment/files/cloudformation_site.yml
@@ -16,6 +16,15 @@ Parameters:
LambdaIAMRoleArn:
Type: String
+ VpcId:
+ Type: String
+
+ SubnetId1:
+ Type: String
+
+ SubnetId2:
+ Type: String
+
Resources:
# Lambda
@@ -33,13 +42,20 @@ Resources:
Runtime: python3.8
Timeout: 20
- LambdaPermission:
+ LambdaPermissionAPIGW:
Type: AWS::Lambda::Permission
Properties:
Action: lambda:InvokeFunction
FunctionName: !GetAtt LambdaFunction.Arn
Principal: apigateway.amazonaws.com
+ LambdaPermissionALB:
+ Type: AWS::Lambda::Permission
+ Properties:
+ Action: lambda:InvokeFunction
+ FunctionName: !GetAtt LambdaFunction.Arn
+ Principal: elasticloadbalancing.amazonaws.com
+
# new style HTTP API
HttpApi:
@@ -110,6 +126,49 @@ Resources:
DeploymentId: !Ref Deployment
StageName: prod
+ # ALB
+
+ LoadBalancer:
+ Type: AWS::ElasticLoadBalancingV2::LoadBalancer
+ Properties:
+ Scheme: internet-facing
+ Subnets:
+ - !Ref SubnetId1
+ - !Ref SubnetId2
+ SecurityGroups: [!Ref SecurityGroup]
+
+ TargetGroup:
+ Type: AWS::ElasticLoadBalancingV2::TargetGroup
+ DependsOn: LambdaPermissionALB
+ Properties:
+ TargetType: lambda
+ Targets:
+ - Id: !GetAtt LambdaFunction.Arn
+ TargetGroupAttributes:
+ - Key: lambda.multi_value_headers.enabled
+ Value: True
+
+ HttpListener:
+ Type: AWS::ElasticLoadBalancingV2::Listener
+ Properties:
+ LoadBalancerArn: !Ref LoadBalancer
+ Port: 80
+ Protocol: HTTP
+ DefaultActions:
+ - TargetGroupArn: !Ref TargetGroup
+ Type: forward
+
+ SecurityGroup:
+ Type: AWS::EC2::SecurityGroup
+ Properties:
+ GroupDescription: Allow http on port 80
+ VpcId: !Ref VpcId
+ SecurityGroupIngress:
+ - IpProtocol: tcp
+ FromPort: 80
+ ToPort: 80
+ CidrIp: 0.0.0.0/0
+
Outputs:
HttpApiUrl:
@@ -117,3 +176,6 @@ Outputs:
RestApiUrl:
Value: !Sub https://${RestApi}.execute-api.${AWS::Region}.amazonaws.com/${StageApi}/
+
+ AlbUrl:
+ Value: !GetAtt LoadBalancer.DNSName
diff --git a/example/deployment/playbook.yml b/example/deployment/playbook.yml
index 1eae34c..728919a 100755
--- a/example/deployment/playbook.yml
+++ b/example/deployment/playbook.yml
@@ -95,6 +95,9 @@
PackageS3Key: '{{ zip_name }}'
PackageS3ObjectVersion: '{{ (s3_result.stdout|from_json).VersionId }}'
LambdaIAMRoleArn: '{{ base_stack_result.stack_outputs.LambdaIAMRoleArn }}'
+ VpcId: '{{ vpc_id }}'
+ SubnetId1: '{{ subnet_id_1 }}'
+ SubnetId2: '{{ subnet_id_2 }}'
register: site_stack_result
- name: debug
@@ -102,3 +105,4 @@
msg: |
New style "HTTP API" using v2 events at {{ site_stack_result.stack_outputs.HttpApiUrl }}
Old style REST API using v1 events at {{ site_stack_result.stack_outputs.RestApiUrl }}
+ ALB at {{ site_stack_result.stack_outputs.AlbUrl }}
diff --git a/src/apig_wsgi.py b/src/apig_wsgi.py
index a8f41b5..814cab3 100644
--- a/src/apig_wsgi.py
+++ b/src/apig_wsgi.py
@@ -13,6 +13,8 @@ DEFAULT_NON_BINARY_CONTENT_TYPE_PREFIXES = (
"application/vnd.api+json",
)
+RESERVED_URI_CHARACTERS = r"!#$&'()*+,/:;=?@[]%"
+
def make_lambda_handler(
wsgi_app, binary_support=None, non_binary_content_type_prefixes=None
@@ -37,13 +39,19 @@ def make_lambda_handler(
non_binary_content_type_prefixes = tuple(non_binary_content_type_prefixes)
def handler(event, context):
- # Assume version 1 since ALB isn't documented as sending a version.
- version = event.get("version", "1.0")
- if version == "1.0":
+ # ALB doesn't send a version, but requestContext will contain a key named 'elb'.
+ if "requestContext" in event and "elb" in event["requestContext"]:
+ version = "alb"
+ else:
+ version = event.get("version", "1.0")
+
+ if version in ("1.0", "alb"):
# Binary support deafults 'off' on version 1
event_binary_support = binary_support or False
environ = get_environ_v1(
- event, context, binary_support=event_binary_support
+ event,
+ context,
+ encode_query_params=(version == "1.0"),
)
response = V1Response(
binary_support=event_binary_support,
@@ -65,7 +73,7 @@ def make_lambda_handler(
return handler
-def get_environ_v1(event, context, binary_support):
+def get_environ_v1(event, context, encode_query_params):
body = get_body(event)
environ = {
"CONTENT_LENGTH": str(len(body)),
@@ -87,15 +95,24 @@ def get_environ_v1(event, context, binary_support):
"apig_wsgi.multi_value_headers": False,
}
+ if encode_query_params:
+ safe_chars = ""
+ else:
+ safe_chars = RESERVED_URI_CHARACTERS
+
# Multi-value query strings need explicit activation on ALB
if "multiValueQueryStringParameters" in event:
environ["QUERY_STRING"] = urlencode(
# may be None when testing on console
event["multiValueQueryStringParameters"] or (),
doseq=True,
+ safe=safe_chars,
)
else:
- environ["QUERY_STRING"] = urlencode(event.get("queryStringParameters") or ())
+ environ["QUERY_STRING"] = urlencode(
+ event.get("queryStringParameters") or (),
+ safe=safe_chars,
+ )
# Multi-value headers need explicit activation on ALB
if "multiValueHeaders" in event:
| adamchainz/apig-wsgi | 066f0b7c1e81e81dd4991c388f46ddfc4ddaf94e | diff --git a/tests/test_apig_wsgi.py b/tests/test_apig_wsgi.py
index 71c1fbf..e107817 100644
--- a/tests/test_apig_wsgi.py
+++ b/tests/test_apig_wsgi.py
@@ -106,6 +106,7 @@ def make_v1_event(
event["isBase64Encoded"] = True
else:
event["body"] = body
+ event["isBase64Encoded"] = False
if request_context is not None:
event["requestContext"] = request_context
@@ -538,6 +539,81 @@ class TestV1Events:
assert response["body"] == "Hi there!"
+# ALB tests
+
+
+def make_alb_event(*args, request_context=None, **kwargs):
+ if request_context is None:
+ request_context = {}
+ request_context["elb"] = {
+ "targetGroupArn": "arn:aws:elasticloadbalancing:::targetgroup/etc"
+ }
+
+ event = make_v1_event(*args, request_context=request_context, **kwargs)
+
+ del event["version"]
+ if "isBase64Encoded" not in event:
+ event["isBase64Encoded"] = False
+
+ return event
+
+
+class TestAlbEvents:
+ # Query string params from ALB are the same as rawQueryStringParameters
+ # in API GW V2... that is they don't need to be encoded.
+ def test_querystring_encoding_plus_value(self, simple_app):
+ event = make_alb_event(qs_params={"a": ["b+c"]}, qs_params_multi=False)
+
+ simple_app.handler(event, None)
+
+ assert simple_app.environ["QUERY_STRING"] == "a=b+c"
+
+ def test_querystring_encoding_plus_key(self, simple_app):
+ event = make_alb_event(qs_params={"a+b": ["c"]}, qs_params_multi=False)
+
+ simple_app.handler(event, None)
+
+ assert simple_app.environ["QUERY_STRING"] == "a+b=c"
+
+ def test_querystring_multi(self, simple_app):
+ event = make_alb_event(qs_params={"foo": ["bar", "baz"]})
+
+ simple_app.handler(event, None)
+
+ assert simple_app.environ["QUERY_STRING"] == "foo=bar&foo=baz"
+
+ def test_querystring_multi_encoding_plus_value(self, simple_app):
+ event = make_alb_event(qs_params={"a": ["b+c", "d"]})
+
+ simple_app.handler(event, None)
+
+ assert simple_app.environ["QUERY_STRING"] == "a=b+c&a=d"
+
+ def test_querystring_multi_encoding_plus_key(self, simple_app):
+ event = make_alb_event(qs_params={"a+b": ["c"]})
+
+ simple_app.handler(event, None)
+
+ assert simple_app.environ["QUERY_STRING"] == "a+b=c"
+
+ def test_querystring_contains_encoded_value(self, simple_app):
+ event = make_alb_event(qs_params={"a": ["foo%3Dbar"]}, qs_params_multi=False)
+
+ simple_app.handler(event, None)
+
+ assert simple_app.environ["QUERY_STRING"] == "a=foo%3Dbar"
+
+ def test_querystring_multi_contains_encoded_value(self, simple_app):
+ # a = ['foo=bar', '$20', '100%']
+ event = make_alb_event(
+ qs_params={"a": ["foo%3Dbar", "%2420", "100%25"]}, qs_params_multi=True
+ )
+
+ simple_app.handler(event, None)
+
+ assert simple_app.environ["QUERY_STRING"] == "a=foo%3Dbar&a=%2420&a=100%25"
+
+
# v2 tests
| Query Params - difference between API GW and ALB
I noticed if I go to a URL with a query string like this: `?x=foo%3Dbar`
The event sent to a lambda from ALB contains:
```json
"queryStringParameters": {
"x": "foo%3Dbar"
}
```
And the event sent from API Gateway contains:
```json
"queryStringParameters": {
"x": "foo=bar"
}
```
For the ALB, the QUERY_STRING in the WSGI environment is set to:
```
x=foo%253Dbar
```
And for API Gateway, it is set to:
```
x=foo%3Dbar
```
So I think the processing of query params for ALB needs to not urlencode the data.
I am working on a PR for this and will hopefully submit later today. | 0.0 | 066f0b7c1e81e81dd4991c388f46ddfc4ddaf94e | [
"tests/test_apig_wsgi.py::TestAlbEvents::test_querystring_encoding_plus_value",
"tests/test_apig_wsgi.py::TestAlbEvents::test_querystring_encoding_plus_key",
"tests/test_apig_wsgi.py::TestAlbEvents::test_querystring_multi_encoding_plus_value",
"tests/test_apig_wsgi.py::TestAlbEvents::test_querystring_multi_encoding_plus_key",
"tests/test_apig_wsgi.py::TestAlbEvents::test_querystring_contains_encoded_value",
"tests/test_apig_wsgi.py::TestAlbEvents::test_querystring_multi_contains_encoded_value"
] | [
"tests/test_apig_wsgi.py::TestV1Events::test_get",
"tests/test_apig_wsgi.py::TestV1Events::test_get_missing_content_type",
"tests/test_apig_wsgi.py::TestV1Events::test_get_single_header",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_default_text_content_types[text/plain]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_default_text_content_types[text/html]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_default_text_content_types[application/json]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_default_text_content_types[application/vnd.api+json]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_custom_text_content_types[test/custom]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_custom_text_content_types[application/vnd.custom]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/plain]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/html]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/json]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/vnd.api+json]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[test/custom]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[application/vnd.custom]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_no_content_type",
"tests/test_apig_wsgi.py::TestV1Events::test_post",
"tests/test_apig_wsgi.py::TestV1Events::test_post_binary_support",
"tests/test_apig_wsgi.py::TestV1Events::test_path_unquoting",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_none",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_none_single",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_empty",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_empty_single",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_one",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_one_single",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_encoding_plus_value",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_encoding_plus_key",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_multi",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_multi_encoding_plus_value",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_multi_encoding_plus_key",
"tests/test_apig_wsgi.py::TestV1Events::test_plain_header",
"tests/test_apig_wsgi.py::TestV1Events::test_plain_header_single",
"tests/test_apig_wsgi.py::TestV1Events::test_plain_header_multi",
"tests/test_apig_wsgi.py::TestV1Events::test_special_headers",
"tests/test_apig_wsgi.py::TestV1Events::test_special_content_type",
"tests/test_apig_wsgi.py::TestV1Events::test_special_host",
"tests/test_apig_wsgi.py::TestV1Events::test_special_x_forwarded_for",
"tests/test_apig_wsgi.py::TestV1Events::test_x_forwarded_proto",
"tests/test_apig_wsgi.py::TestV1Events::test_x_forwarded_port",
"tests/test_apig_wsgi.py::TestV1Events::test_no_headers",
"tests/test_apig_wsgi.py::TestV1Events::test_headers_None",
"tests/test_apig_wsgi.py::TestV1Events::test_exc_info",
"tests/test_apig_wsgi.py::TestV1Events::test_request_context",
"tests/test_apig_wsgi.py::TestV1Events::test_full_event",
"tests/test_apig_wsgi.py::TestV1Events::test_elb_health_check",
"tests/test_apig_wsgi.py::TestV1Events::test_context",
"tests/test_apig_wsgi.py::TestV1Events::test_empty_and_uncloseable_content",
"tests/test_apig_wsgi.py::TestAlbEvents::test_querystring_multi",
"tests/test_apig_wsgi.py::TestV2Events::test_get",
"tests/test_apig_wsgi.py::TestV2Events::test_get_missing_content_type",
"tests/test_apig_wsgi.py::TestV2Events::test_cookie",
"tests/test_apig_wsgi.py::TestV2Events::test_two_cookies",
"tests/test_apig_wsgi.py::TestV2Events::test_mixed_cookies",
"tests/test_apig_wsgi.py::TestV2Events::test_set_one_cookie",
"tests/test_apig_wsgi.py::TestV2Events::test_set_two_cookies",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_default_text_content_types[text/plain]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_default_text_content_types[text/html]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_default_text_content_types[application/json]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_default_text_content_types[application/vnd.api+json]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_custom_text_content_types[test/custom]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_custom_text_content_types[application/vnd.custom]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/plain]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/html]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/json]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/vnd.api+json]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[test/custom]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[application/vnd.custom]",
"tests/test_apig_wsgi.py::TestV2Events::test_plain_header",
"tests/test_apig_wsgi.py::TestV2Events::test_special_headers",
"tests/test_apig_wsgi.py::TestV2Events::test_path_unquoting",
"tests/test_apig_wsgi.py::TestV2Events::test_empty_and_uncloseable_content",
"tests/test_apig_wsgi.py::TestUnknownVersionEvents::test_errors"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-08-24 10:00:38+00:00 | mit | 886 |
|
adamchainz__apig-wsgi-290 | diff --git a/HISTORY.rst b/HISTORY.rst
index b2d240b..c1e65a8 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -2,6 +2,10 @@
History
=======
+* Handle ``requestContext`` being ``None``, as can happen from Lambda invoke.
+
+ Thanks to scottmn for the report in `Issue #289 <https://github.com/adamchainz/apig-wsgi/issues/289>`__.
+
2.12.0 (2021-08-24)
-------------------
diff --git a/src/apig_wsgi/__init__.py b/src/apig_wsgi/__init__.py
index 2d1389d..0217ad6 100644
--- a/src/apig_wsgi/__init__.py
+++ b/src/apig_wsgi/__init__.py
@@ -58,7 +58,11 @@ def make_lambda_handler(
def handler(event: Dict[str, Any], context: Any) -> Dict[str, Any]:
# ALB doesn't send a version, but requestContext will contain a key named 'elb'.
- if "requestContext" in event and "elb" in event["requestContext"]:
+ if (
+ "requestContext" in event
+ and isinstance(event["requestContext"], dict)
+ and "elb" in event["requestContext"]
+ ):
version = "alb"
else:
version = event.get("version", "1.0")
| adamchainz/apig-wsgi | 87e53add20433c5a8b8550b1958d4160e95ab49c | diff --git a/tests/test_apig_wsgi.py b/tests/test_apig_wsgi.py
index 7697d40..528ae91 100644
--- a/tests/test_apig_wsgi.py
+++ b/tests/test_apig_wsgi.py
@@ -1,7 +1,17 @@
import sys
from base64 import b64encode
from io import BytesIO
-from typing import Any, Callable, Dict, Generator, Iterable, List, Optional, Tuple
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Generator,
+ Iterable,
+ List,
+ Optional,
+ Tuple,
+ Union,
+)
import pytest
@@ -82,6 +92,13 @@ class ContextStub:
# v1 tests
+class Sentinel:
+ pass
+
+
+SENTINEL = Sentinel()
+
+
def make_v1_event(
*,
method: str = "GET",
@@ -92,7 +109,7 @@ def make_v1_event(
headers_multi: bool = True,
body: str = "",
binary: bool = False,
- request_context: Optional[Dict[str, Any]] = None,
+ request_context: Union[Dict[str, Any], None, Sentinel] = SENTINEL,
) -> Dict[str, Any]:
if headers is None:
headers = {"Host": ["example.com"]}
@@ -125,7 +142,7 @@ def make_v1_event(
event["body"] = body
event["isBase64Encoded"] = False
- if request_context is not None:
+ if request_context is not SENTINEL:
event["requestContext"] = request_context
return event
@@ -507,6 +524,14 @@ class TestV1Events:
assert simple_app.environ["apig_wsgi.request_context"] == context
+ def test_request_context_none(self, simple_app: App) -> None:
+ # Invoking lambdas can lead to requestContext being JSON null
+ event = make_v1_event(request_context=None)
+
+ simple_app.handler(event, None)
+
+ # Simply don't crash
+
def test_full_event(self, simple_app: App) -> None:
event = make_v1_event()
| Error when requestContext is None
### Python Version
3.8
### Package Version
2.12.0
### Description
I have a lambda that is called through API Gateway, but can also be invoked directly via a lambda client. Upgrading to 2.12.0 started causing this error when invoked via client.
```
[ERROR] TypeError: argument of type 'NoneType' is not iterable
Traceback (most recent call last):
File "/var/task/apig_wsgi/__init__.py", line 61, in handler
if "requestContext" in event and "elb" in event["requestContext"]:
```
This is being done from Kotlin code, using `com.amazonaws.services.lambda.AWSLambda` client. When an `AwsProxyRequest` object is constructed, it has `requestContext=null` by default. This results in the error above.
I can update my invoke code to set a dummy requestContext, but I do believe it is valid to be None in the direct lambda invocation use case. Does it make sense to add a `not None` check here for requestContext?
Thanks.
| 0.0 | 87e53add20433c5a8b8550b1958d4160e95ab49c | [
"tests/test_apig_wsgi.py::TestV1Events::test_request_context_none"
] | [
"tests/test_apig_wsgi.py::TestV1Events::test_get",
"tests/test_apig_wsgi.py::TestV1Events::test_get_missing_content_type",
"tests/test_apig_wsgi.py::TestV1Events::test_get_single_header",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_default_text_content_types[text/plain]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_default_text_content_types[text/html]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_default_text_content_types[application/json]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_default_text_content_types[application/vnd.api+json]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_custom_text_content_types[test/custom]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_custom_text_content_types[application/vnd.custom]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/plain]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/html]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/json]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/vnd.api+json]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[test/custom]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[application/vnd.custom]",
"tests/test_apig_wsgi.py::TestV1Events::test_get_binary_support_no_content_type",
"tests/test_apig_wsgi.py::TestV1Events::test_post",
"tests/test_apig_wsgi.py::TestV1Events::test_post_binary_support",
"tests/test_apig_wsgi.py::TestV1Events::test_path_unquoting",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_none",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_none_single",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_empty",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_empty_single",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_one",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_one_single",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_encoding_plus_value",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_encoding_plus_key",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_multi",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_multi_encoding_plus_value",
"tests/test_apig_wsgi.py::TestV1Events::test_querystring_multi_encoding_plus_key",
"tests/test_apig_wsgi.py::TestV1Events::test_plain_header",
"tests/test_apig_wsgi.py::TestV1Events::test_plain_header_single",
"tests/test_apig_wsgi.py::TestV1Events::test_plain_header_multi",
"tests/test_apig_wsgi.py::TestV1Events::test_special_headers",
"tests/test_apig_wsgi.py::TestV1Events::test_special_content_type",
"tests/test_apig_wsgi.py::TestV1Events::test_special_host",
"tests/test_apig_wsgi.py::TestV1Events::test_special_x_forwarded_for",
"tests/test_apig_wsgi.py::TestV1Events::test_x_forwarded_proto",
"tests/test_apig_wsgi.py::TestV1Events::test_x_forwarded_port",
"tests/test_apig_wsgi.py::TestV1Events::test_no_headers",
"tests/test_apig_wsgi.py::TestV1Events::test_headers_None",
"tests/test_apig_wsgi.py::TestV1Events::test_exc_info",
"tests/test_apig_wsgi.py::TestV1Events::test_request_context",
"tests/test_apig_wsgi.py::TestV1Events::test_full_event",
"tests/test_apig_wsgi.py::TestV1Events::test_elb_health_check",
"tests/test_apig_wsgi.py::TestV1Events::test_context",
"tests/test_apig_wsgi.py::TestV1Events::test_empty_and_uncloseable_content",
"tests/test_apig_wsgi.py::TestAlbEvents::test_querystring_encoding_plus_value",
"tests/test_apig_wsgi.py::TestAlbEvents::test_querystring_encoding_plus_key",
"tests/test_apig_wsgi.py::TestAlbEvents::test_querystring_multi",
"tests/test_apig_wsgi.py::TestAlbEvents::test_querystring_multi_encoding_plus_value",
"tests/test_apig_wsgi.py::TestAlbEvents::test_querystring_multi_encoding_plus_key",
"tests/test_apig_wsgi.py::TestAlbEvents::test_querystring_contains_encoded_value",
"tests/test_apig_wsgi.py::TestAlbEvents::test_querystring_multi_contains_encoded_value",
"tests/test_apig_wsgi.py::TestV2Events::test_get",
"tests/test_apig_wsgi.py::TestV2Events::test_get_missing_content_type",
"tests/test_apig_wsgi.py::TestV2Events::test_cookie",
"tests/test_apig_wsgi.py::TestV2Events::test_two_cookies",
"tests/test_apig_wsgi.py::TestV2Events::test_mixed_cookies",
"tests/test_apig_wsgi.py::TestV2Events::test_set_one_cookie",
"tests/test_apig_wsgi.py::TestV2Events::test_set_two_cookies",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_default_text_content_types[text/plain]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_default_text_content_types[text/html]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_default_text_content_types[application/json]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_default_text_content_types[application/vnd.api+json]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_custom_text_content_types[test/custom]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_custom_text_content_types[application/vnd.custom]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/plain]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/html]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/json]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/vnd.api+json]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[test/custom]",
"tests/test_apig_wsgi.py::TestV2Events::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[application/vnd.custom]",
"tests/test_apig_wsgi.py::TestV2Events::test_plain_header",
"tests/test_apig_wsgi.py::TestV2Events::test_special_headers",
"tests/test_apig_wsgi.py::TestV2Events::test_path_unquoting",
"tests/test_apig_wsgi.py::TestV2Events::test_empty_and_uncloseable_content",
"tests/test_apig_wsgi.py::TestUnknownVersionEvents::test_errors"
] | {
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2021-09-23 20:25:57+00:00 | mit | 887 |
|
adamchainz__apig-wsgi-80 | diff --git a/HISTORY.rst b/HISTORY.rst
index c6365ca..3669cc5 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -7,6 +7,7 @@ Pending Release
.. Insert new release notes below this line
* Update Python support to 3.5-3.7, as 3.4 has reached its end of life.
+* Return binary content for gzipped responses with text or JSON content types.
2.2.0 (2019-04-15)
------------------
diff --git a/apig_wsgi.py b/apig_wsgi.py
index aa36d78..be2930f 100644
--- a/apig_wsgi.py
+++ b/apig_wsgi.py
@@ -101,14 +101,7 @@ class Response(object):
def as_apig_response(self):
response = {"statusCode": self.status_code, "headers": dict(self.headers)}
-
- content_type = self._get_content_type()
- should_send_binary = self.binary_support and (
- content_type is None
- or not content_type.startswith(("text/", "application/json"))
- )
-
- if should_send_binary:
+ if self._should_send_binary():
response["isBase64Encoded"] = True
response["body"] = b64encode(self.body.getvalue()).decode("utf-8")
else:
@@ -116,10 +109,31 @@ class Response(object):
return response
+ def _should_send_binary(self):
+ """
+ Determines if binary response should be sent to API Gateway
+ """
+ if not self.binary_support:
+ return False
+
+ content_type = self._get_content_type()
+ non_binary_content_types = ("text/", "application/json")
+ if not content_type.startswith(non_binary_content_types):
+ return True
+
+ content_encoding = self._get_content_encoding()
+ # Content type is non-binary but the content encoding might be.
+ return "gzip" in content_encoding.lower()
+
def _get_content_type(self):
- content_type_headers = [
- v for k, v in self.headers if k.lower() == "content-type"
- ]
- if len(content_type_headers):
- return content_type_headers[-1]
+ return self._get_header("content-type") or ""
+
+ def _get_content_encoding(self):
+ return self._get_header("content-encoding") or ""
+
+ def _get_header(self, header_name):
+ header_name = header_name.lower()
+ matching_headers = [v for k, v in self.headers if k.lower() == header_name]
+ if len(matching_headers):
+ return matching_headers[-1]
return None
| adamchainz/apig-wsgi | 80868a1a740711fe1fdf3c523d0e400c788a1b14 | diff --git a/test_apig_wsgi.py b/test_apig_wsgi.py
index 6a921ea..72531fd 100644
--- a/test_apig_wsgi.py
+++ b/test_apig_wsgi.py
@@ -21,6 +21,11 @@ def simple_app():
yield app
+parametrize_text_content_type = pytest.mark.parametrize(
+ "text_content_type", ["text/plain", "text/html", "application/json"]
+)
+
+
def make_event(
method="GET",
qs_params=None,
@@ -69,14 +74,15 @@ def test_get_missing_content_type(simple_app):
assert response == {"statusCode": 200, "headers": {}, "body": "Hello World\n"}
-def test_get_binary_support_text(simple_app):
+@parametrize_text_content_type
+def test_get_binary_support_text(simple_app, text_content_type):
simple_app.handler = make_lambda_handler(simple_app, binary_support=True)
+ simple_app.headers = [("Content-Type", text_content_type)]
response = simple_app.handler(make_event(), None)
-
assert response == {
"statusCode": 200,
- "headers": {"Content-Type": "text/plain"},
+ "headers": {"Content-Type": text_content_type},
"body": "Hello World\n",
}
@@ -96,6 +102,27 @@ def test_get_binary_support_binary(simple_app):
}
+@parametrize_text_content_type
+def test_get_binary_support_binary_text_with_gzip_content_encoding(
+ simple_app, text_content_type
+):
+ simple_app.handler = make_lambda_handler(simple_app, binary_support=True)
+ simple_app.headers = [
+ ("Content-Type", text_content_type),
+ ("Content-Encoding", "gzip"),
+ ]
+ simple_app.response = b"\x13\x37"
+
+ response = simple_app.handler(make_event(), None)
+
+ assert response == {
+ "statusCode": 200,
+ "headers": {"Content-Type": text_content_type, "Content-Encoding": "gzip"},
+ "body": b64encode(b"\x13\x37").decode("utf-8"),
+ "isBase64Encoded": True,
+ }
+
+
def test_get_binary_support_no_content_type(simple_app):
simple_app.handler = make_lambda_handler(simple_app, binary_support=True)
simple_app.headers = []
| Ability to send binary response with `text/` or `application/json` content type
Problem
--------
`Response.as_apig_response` is hardcoded to not return binary response when the content-type of the response is either `text/*` or `application/json`.
https://github.com/adamchainz/apig-wsgi/blob/b0ce56cbb3ff67586c398f57bc13590bf0961940/apig_wsgi.py#L105-L109
This becomes a problem for use cases like returning a gzip response from the application with the above content types.
Possible Solution
-----------------
**Check if the `Content-Encoding` header has `gzip` in it or not, and return the binary response based on that.**
This fix would keep the hardcoded values, something like this should work:
```python
def _should_send_binary(self):
"""Determines if binary response should be sent to API Gateway
"""
non_binary_content_types = ("text/", "application/json")
content_type = self._get_content_type() or ''
content_encoding = self._get_content_encoding() or ''
supports_binary = self.binary_support
is_binary_content_type = not content_type.startswith(non_binary_content_types)
if supports_binary and is_binary_content_type:
return True
# Content type is non-binary but the content encoding is.
elif supports_binary and not is_binary_content_type:
return 'gzip' in content_encoding.lower()
return False
```
**Allow `Response` class to be extended so that users can handle this logic on their own.**
This can be done by introducing the same `Response._should_send_binary` method which defaults to the current behaviour. The `make_lambda_handler` function can then be allowed to have an argument like `response_class` to facilitate this.
I can create a PR for either of the fixes which seem more appropriate to you.
What do you think about this?
| 0.0 | 80868a1a740711fe1fdf3c523d0e400c788a1b14 | [
"test_apig_wsgi.py::test_get_binary_support_binary_text_with_gzip_content_encoding[text/plain]",
"test_apig_wsgi.py::test_get_binary_support_binary_text_with_gzip_content_encoding[text/html]",
"test_apig_wsgi.py::test_get_binary_support_binary_text_with_gzip_content_encoding[application/json]"
] | [
"test_apig_wsgi.py::test_get",
"test_apig_wsgi.py::test_get_missing_content_type",
"test_apig_wsgi.py::test_get_binary_support_text[text/plain]",
"test_apig_wsgi.py::test_get_binary_support_text[text/html]",
"test_apig_wsgi.py::test_get_binary_support_text[application/json]",
"test_apig_wsgi.py::test_get_binary_support_binary",
"test_apig_wsgi.py::test_get_binary_support_no_content_type",
"test_apig_wsgi.py::test_post",
"test_apig_wsgi.py::test_post_binary_support",
"test_apig_wsgi.py::test_querystring_none",
"test_apig_wsgi.py::test_querystring_empty",
"test_apig_wsgi.py::test_querystring_one",
"test_apig_wsgi.py::test_plain_header",
"test_apig_wsgi.py::test_special_headers",
"test_apig_wsgi.py::test_no_headers",
"test_apig_wsgi.py::test_headers_None",
"test_apig_wsgi.py::test_exc_info",
"test_apig_wsgi.py::test_request_context"
] | {
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2019-08-18 23:01:41+00:00 | isc | 888 |
|
adamchainz__apig-wsgi-93 | diff --git a/HISTORY.rst b/HISTORY.rst
index 4b65c01..1ed117d 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -13,6 +13,9 @@ Pending Release
(`docs <https://docs.python.org/3.8/library/importlib.metadata.html#distribution-versions>`__ /
`backport <https://pypi.org/project/importlib-metadata/>`__).
* Update Python support to 3.5-3.8.
+* Add `application/vnd.api+json` to default non-binary content type prefixes.
+* Add support for custom non-binary content type prefixes. This lets you control
+ which content types should be treated as plain text when binary support is enabled.
2.3.0 (2019-08-19)
------------------
diff --git a/README.rst b/README.rst
index 8f54234..3404304 100644
--- a/README.rst
+++ b/README.rst
@@ -39,8 +39,8 @@ Python 3.5-3.8 supported.
Usage
=====
-``make_lambda_handler(app, binary_support=False)``
---------------------------------------------------
+``make_lambda_handler(app, binary_support=False, non_binary_content_type_prefixes=None)``
+-----------------------------------------------------------------------------------------
``app`` should be a WSGI app, for example from Django's ``wsgi.py`` or Flask's
``Flask()`` object.
@@ -53,8 +53,10 @@ using ``'*/*'`` is the best way to do it, since it is used to match the request
'Accept' header as well, which WSGI applications are likely to ignore).
Note that binary responses aren't sent if your response has a 'Content-Type'
-starting 'text/html' or 'application/json' - this is to support sending larger
-text responses.
+starting 'text/', 'application/json' or 'application/vnd.api+json' - this
+is to support sending larger text responses. To support other content types
+than the ones specified above, you can set `non_binary_content_type_prefixes`
+to a list of content type prefixes of your choice.
If the event from API Gateway contains the ``requestContext`` key, for example
from custom request authorizers, this will be available in the WSGI environ
diff --git a/src/apig_wsgi.py b/src/apig_wsgi.py
index e804a20..f6bd5ee 100644
--- a/src/apig_wsgi.py
+++ b/src/apig_wsgi.py
@@ -5,16 +5,41 @@ from urllib.parse import urlencode
__all__ = ("make_lambda_handler",)
+DEFAULT_NON_BINARY_CONTENT_TYPE_PREFIXES = (
+ "text/",
+ "application/json",
+ "application/vnd.api+json",
+)
-def make_lambda_handler(wsgi_app, binary_support=False):
+
+def make_lambda_handler(
+ wsgi_app, binary_support=False, non_binary_content_type_prefixes=None
+):
"""
Turn a WSGI app callable into a Lambda handler function suitable for
running on API Gateway.
+
+ Parameters
+ ----------
+ wsgi_app : function
+ WSGI Application callable
+ binary_support : bool
+ Whether to support returning APIG-compatible binary responses
+ non_binary_content_type_prefixes : tuple of str
+ Tuple of content type prefixes which should be considered "Non-Binary" when
+ `binray_support` is True. This prevents apig_wsgi from unexpectedly encoding
+ non-binary responses as binary.
"""
+ if non_binary_content_type_prefixes is None:
+ non_binary_content_type_prefixes = DEFAULT_NON_BINARY_CONTENT_TYPE_PREFIXES
+ non_binary_content_type_prefixes = tuple(non_binary_content_type_prefixes)
def handler(event, context):
environ = get_environ(event, binary_support=binary_support)
- response = Response(binary_support=binary_support)
+ response = Response(
+ binary_support=binary_support,
+ non_binary_content_type_prefixes=non_binary_content_type_prefixes,
+ )
result = wsgi_app(environ, response.start_response)
response.consume(result)
return response.as_apig_response()
@@ -73,11 +98,12 @@ def get_environ(event, binary_support):
class Response(object):
- def __init__(self, binary_support):
+ def __init__(self, binary_support, non_binary_content_type_prefixes):
self.status_code = 500
self.headers = []
self.body = BytesIO()
self.binary_support = binary_support
+ self.non_binary_content_type_prefixes = non_binary_content_type_prefixes
def start_response(self, status, response_headers, exc_info=None):
if exc_info is not None:
@@ -113,8 +139,7 @@ class Response(object):
return False
content_type = self._get_content_type()
- non_binary_content_types = ("text/", "application/json")
- if not content_type.startswith(non_binary_content_types):
+ if not content_type.startswith(self.non_binary_content_type_prefixes):
return True
content_encoding = self._get_content_encoding()
| adamchainz/apig-wsgi | af9d96d0ac950f9b04e6d365acf0b3a4f5e8e690 | diff --git a/tests/test_apig_wsgi.py b/tests/test_apig_wsgi.py
index 72531fd..f6b77e9 100644
--- a/tests/test_apig_wsgi.py
+++ b/tests/test_apig_wsgi.py
@@ -6,6 +6,8 @@ import pytest
from apig_wsgi import make_lambda_handler
+CUSTOM_NON_BINARY_CONTENT_TYPE_PREFIXES = ["test/custom", "application/vnd.custom"]
+
@pytest.fixture()
def simple_app():
@@ -21,8 +23,14 @@ def simple_app():
yield app
-parametrize_text_content_type = pytest.mark.parametrize(
- "text_content_type", ["text/plain", "text/html", "application/json"]
+parametrize_default_text_content_type = pytest.mark.parametrize(
+ "text_content_type",
+ ["text/plain", "text/html", "application/json", "application/vnd.api+json"],
+)
+
+
+parametrize_custom_text_content_type = pytest.mark.parametrize(
+ "text_content_type", CUSTOM_NON_BINARY_CONTENT_TYPE_PREFIXES
)
@@ -74,8 +82,8 @@ def test_get_missing_content_type(simple_app):
assert response == {"statusCode": 200, "headers": {}, "body": "Hello World\n"}
-@parametrize_text_content_type
-def test_get_binary_support_text(simple_app, text_content_type):
+@parametrize_default_text_content_type
+def test_get_binary_support_default_text_content_types(simple_app, text_content_type):
simple_app.handler = make_lambda_handler(simple_app, binary_support=True)
simple_app.headers = [("Content-Type", text_content_type)]
@@ -87,6 +95,23 @@ def test_get_binary_support_text(simple_app, text_content_type):
}
+@parametrize_custom_text_content_type
+def test_get_binary_support_custom_text_content_types(simple_app, text_content_type):
+ simple_app.handler = make_lambda_handler(
+ simple_app,
+ binary_support=True,
+ non_binary_content_type_prefixes=CUSTOM_NON_BINARY_CONTENT_TYPE_PREFIXES,
+ )
+ simple_app.headers = [("Content-Type", text_content_type)]
+
+ response = simple_app.handler(make_event(), None)
+ assert response == {
+ "statusCode": 200,
+ "headers": {"Content-Type": text_content_type},
+ "body": "Hello World\n",
+ }
+
+
def test_get_binary_support_binary(simple_app):
simple_app.handler = make_lambda_handler(simple_app, binary_support=True)
simple_app.headers = [("Content-Type", "application/octet-stream")]
@@ -102,8 +127,8 @@ def test_get_binary_support_binary(simple_app):
}
-@parametrize_text_content_type
-def test_get_binary_support_binary_text_with_gzip_content_encoding(
+@parametrize_default_text_content_type
+def test_get_binary_support_binary_default_text_with_gzip_content_encoding(
simple_app, text_content_type
):
simple_app.handler = make_lambda_handler(simple_app, binary_support=True)
@@ -123,6 +148,31 @@ def test_get_binary_support_binary_text_with_gzip_content_encoding(
}
+@parametrize_custom_text_content_type
+def test_get_binary_support_binary_custom_text_with_gzip_content_encoding(
+ simple_app, text_content_type
+):
+ simple_app.handler = make_lambda_handler(
+ simple_app,
+ binary_support=True,
+ non_binary_content_type_prefixes=CUSTOM_NON_BINARY_CONTENT_TYPE_PREFIXES,
+ )
+ simple_app.headers = [
+ ("Content-Type", text_content_type),
+ ("Content-Encoding", "gzip"),
+ ]
+ simple_app.response = b"\x13\x37"
+
+ response = simple_app.handler(make_event(), None)
+
+ assert response == {
+ "statusCode": 200,
+ "headers": {"Content-Type": text_content_type, "Content-Encoding": "gzip"},
+ "body": b64encode(b"\x13\x37").decode("utf-8"),
+ "isBase64Encoded": True,
+ }
+
+
def test_get_binary_support_no_content_type(simple_app):
simple_app.handler = make_lambda_handler(simple_app, binary_support=True)
simple_app.headers = []
| Ability to have custom white list for non binary content types
## Problem
The `Response` class has a whitelist of "non binary content types" which includes:
- `text/*`
- `application/json`
https://github.com/adamchainz/apig-wsgi/blob/f24dd8df21a4ab693992ababf4cfb9735af27b57/src/apig_wsgi.py#L116
All other response types are assumed to be binary if `binary_support` is True.
This becomes a problem if you want to return text content for other content types. For example, [JSON:API](https://jsonapi.org/) APIs have `application/vnd.api+json` content type.
## Possible solution
The library should provide ability to either extend the `Response` class or pass the `non_binary_content_types` directly to `make_lambda_handler` call.
```python
wsgi_handler = make_lambda_handler(application, binary_support=True, response_class=MyResponse)
wsgi_handler = make_lambda_handler(application, binary_support=True, non_binary_content_types=my_content_type_list)
```
I can create PR for either of these approaches.
What do you think? | 0.0 | af9d96d0ac950f9b04e6d365acf0b3a4f5e8e690 | [
"tests/test_apig_wsgi.py::test_get_binary_support_default_text_content_types[application/vnd.api+json]",
"tests/test_apig_wsgi.py::test_get_binary_support_custom_text_content_types[test/custom]",
"tests/test_apig_wsgi.py::test_get_binary_support_custom_text_content_types[application/vnd.custom]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[test/custom]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_custom_text_with_gzip_content_encoding[application/vnd.custom]"
] | [
"tests/test_apig_wsgi.py::test_get",
"tests/test_apig_wsgi.py::test_get_missing_content_type",
"tests/test_apig_wsgi.py::test_get_binary_support_default_text_content_types[text/plain]",
"tests/test_apig_wsgi.py::test_get_binary_support_default_text_content_types[text/html]",
"tests/test_apig_wsgi.py::test_get_binary_support_default_text_content_types[application/json]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/plain]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_default_text_with_gzip_content_encoding[text/html]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/json]",
"tests/test_apig_wsgi.py::test_get_binary_support_binary_default_text_with_gzip_content_encoding[application/vnd.api+json]",
"tests/test_apig_wsgi.py::test_get_binary_support_no_content_type",
"tests/test_apig_wsgi.py::test_post",
"tests/test_apig_wsgi.py::test_post_binary_support",
"tests/test_apig_wsgi.py::test_querystring_none",
"tests/test_apig_wsgi.py::test_querystring_empty",
"tests/test_apig_wsgi.py::test_querystring_one",
"tests/test_apig_wsgi.py::test_plain_header",
"tests/test_apig_wsgi.py::test_special_headers",
"tests/test_apig_wsgi.py::test_no_headers",
"tests/test_apig_wsgi.py::test_headers_None",
"tests/test_apig_wsgi.py::test_exc_info",
"tests/test_apig_wsgi.py::test_request_context"
] | {
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2019-11-13 19:03:20+00:00 | isc | 889 |
|
adamchainz__ec2-metadata-166 | diff --git a/src/ec2_metadata.py b/src/ec2_metadata.py
index ec3e695..a03813d 100644
--- a/src/ec2_metadata.py
+++ b/src/ec2_metadata.py
@@ -1,3 +1,5 @@
+import time
+
import requests
from cached_property import cached_property
@@ -10,6 +12,7 @@ SERVICE_URL = "http://169.254.169.254/2016-09-02/"
DYNAMIC_URL = SERVICE_URL + "dynamic/"
METADATA_URL = SERVICE_URL + "meta-data/"
USERDATA_URL = SERVICE_URL + "user-data/"
+TOKEN_TTL_SECONDS = 21600
class BaseLazyObject(object):
@@ -24,8 +27,26 @@ class EC2Metadata(BaseLazyObject):
if session is None:
session = requests.Session()
self._session = session
+ self._token_updated_at = 0
+
+ def _ensure_fresh_token(self):
+ """ Update the metadata token if needed.
+
+ Tokens are rotated 1 minute before they would expire.
+ """
+ now = time.time()
+ if now - self._token_updated_at > (TOKEN_TTL_SECONDS - 60):
+ token = self._session.put(
+ "http://169.254.169.254/latest/api/token",
+ headers={
+ "X-aws-ec2-metadata-token-ttl-seconds": str(TOKEN_TTL_SECONDS)
+ },
+ ).text
+ self._session.headers.update({"X-aws-ec2-metadata-token": token})
+ self._token_updated_at = now
def _get_url(self, url, allow_404=False):
+ self._ensure_fresh_token()
resp = self._session.get(url, timeout=1.0)
if resp.status_code != 404 or not allow_404:
resp.raise_for_status()
| adamchainz/ec2-metadata | 2607b63c8ecb07c82b8716d6738814000fc3437d | diff --git a/tests/test_ec2_metadata.py b/tests/test_ec2_metadata.py
index 0e4b43d..e71ccf1 100644
--- a/tests/test_ec2_metadata.py
+++ b/tests/test_ec2_metadata.py
@@ -4,6 +4,7 @@ import requests
from ec2_metadata import (
DYNAMIC_URL,
METADATA_URL,
+ TOKEN_TTL_SECONDS,
USERDATA_URL,
NetworkInterface,
ec2_metadata,
@@ -16,11 +17,20 @@ def clear_it():
example_mac = "00:11:22:33:44:55"
+example_token = "AABBCC"
# EC2Metadata tests
+def _mock_token_request(requests_mock):
+ requests_mock.put(
+ "http://169.254.169.254/latest/api/token",
+ headers={"X-aws-ec2-metadata-token-ttl-seconds": str(TOKEN_TTL_SECONDS)},
+ text=example_token,
+ )
+
+
def add_identity_doc_response(requests_mock, overrides=None):
identity_doc = {
"accountId": "123456789012",
@@ -35,6 +45,7 @@ def add_identity_doc_response(requests_mock, overrides=None):
}
if overrides:
identity_doc.update(overrides)
+ _mock_token_request(requests_mock)
requests_mock.get(DYNAMIC_URL + "instance-identity/document", json=identity_doc)
return identity_doc
@@ -45,17 +56,20 @@ def test_account_id(requests_mock):
def test_account_id_error(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(DYNAMIC_URL + "instance-identity/document", status_code=500)
with pytest.raises(requests.exceptions.HTTPError):
ec2_metadata.account_id
def test_ami_id(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "ami-id", text="ami-12345678")
assert ec2_metadata.ami_id == "ami-12345678"
def test_ami_id_cached(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "ami-id", text="ami-12345678")
ec2_metadata.ami_id
requests_mock.get(METADATA_URL + "ami-id", status_code=500)
@@ -63,6 +77,7 @@ def test_ami_id_cached(requests_mock):
def test_ami_id_cached_cleared(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "ami-id", text="ami-12345678")
ec2_metadata.ami_id
@@ -74,42 +89,50 @@ def test_ami_id_cached_cleared(requests_mock):
def test_ami_launch_index(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "ami-launch-index", text="0")
assert ec2_metadata.ami_launch_index == 0
def test_ami_manifest_path(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "ami-manifest-path", text="(unknown)")
assert ec2_metadata.ami_manifest_path == "(unknown)"
def test_availability_zone(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "placement/availability-zone", text="eu-west-1a")
assert ec2_metadata.availability_zone == "eu-west-1a"
def test_iam_info(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "iam/info", text="{}")
assert ec2_metadata.iam_info == {}
def test_iam_info_none(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "iam/info", status_code=404)
assert ec2_metadata.iam_info is None
def test_iam_info_unexpected(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "iam/info", status_code=500)
with pytest.raises(requests.exceptions.HTTPError):
ec2_metadata.iam_info
def test_instance_action(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "instance-action", text="none")
assert ec2_metadata.instance_action == "none"
def test_instance_id(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "instance-id", text="i-12345678")
assert ec2_metadata.instance_id == "i-12345678"
@@ -120,6 +143,7 @@ def test_instance_identity(requests_mock):
def test_instance_profile_arn(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(
METADATA_URL + "iam/info", text='{"InstanceProfileArn": "arn:foobar"}'
)
@@ -127,11 +151,13 @@ def test_instance_profile_arn(requests_mock):
def test_instance_profile_arn_none(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "iam/info", status_code=404)
assert ec2_metadata.instance_profile_arn is None
def test_instance_profile_id(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(
METADATA_URL + "iam/info", text='{"InstanceProfileId": "some-id"}'
)
@@ -139,31 +165,37 @@ def test_instance_profile_id(requests_mock):
def test_instance_profile_id_none(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "iam/info", status_code=404)
assert ec2_metadata.instance_profile_id is None
def test_instance_type(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "instance-type", text="t2.nano")
assert ec2_metadata.instance_type == "t2.nano"
def test_kernel_id(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "kernel-id", text="aki-dc9ed9af")
assert ec2_metadata.kernel_id == "aki-dc9ed9af"
def test_kernel_id_none(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "kernel-id", status_code=404)
assert ec2_metadata.kernel_id is None
def test_mac(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "mac", text=example_mac)
assert ec2_metadata.mac == example_mac
def test_network_interfaces(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "network/interfaces/macs/", text=example_mac + "/")
assert ec2_metadata.network_interfaces == {
example_mac: NetworkInterface(example_mac, ec2_metadata)
@@ -171,6 +203,7 @@ def test_network_interfaces(requests_mock):
def test_private_hostname(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(
METADATA_URL + "local-hostname", text="ip-172-30-0-0.eu-west-1.compute.internal"
)
@@ -178,11 +211,13 @@ def test_private_hostname(requests_mock):
def test_private_ipv4(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "local-ipv4", text="172.30.0.0")
assert ec2_metadata.private_ipv4 == "172.30.0.0"
def test_public_hostname(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(
METADATA_URL + "public-hostname", text="ec2-1-2-3-4.compute-1.amazonaws.com"
)
@@ -190,16 +225,19 @@ def test_public_hostname(requests_mock):
def test_public_hostname_none(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "public-hostname", status_code=404)
assert ec2_metadata.public_hostname is None
def test_public_ipv4(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "public-ipv4", text="1.2.3.4")
assert ec2_metadata.public_ipv4 == "1.2.3.4"
def test_public_ipv4_none(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "public-ipv4", status_code=404)
assert ec2_metadata.public_ipv4 is None
@@ -210,18 +248,21 @@ def test_region(requests_mock):
def test_reservation_id(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "reservation-id", text="r-12345678901234567")
assert ec2_metadata.reservation_id == "r-12345678901234567"
def test_security_groups_single(requests_mock):
# most common case: a single SG
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "security-groups", text="security-group-one")
assert ec2_metadata.security_groups == ["security-group-one"]
def test_security_groups_two(requests_mock):
# another common case: multiple SGs
+ _mock_token_request(requests_mock)
requests_mock.get(
METADATA_URL + "security-groups", text="security-group-one\nsecurity-group-2"
)
@@ -231,16 +272,19 @@ def test_security_groups_two(requests_mock):
def test_security_groups_emptystring(requests_mock):
# Check '' too. Can't create an instance without a SG on EC2 but we should
# safely handle it, perhaps it's possible in e.g. OpenStack.
+ _mock_token_request(requests_mock)
requests_mock.get(METADATA_URL + "security-groups", text="")
assert ec2_metadata.security_groups == []
def test_user_data_none(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(USERDATA_URL, status_code=404)
assert ec2_metadata.user_data is None
def test_user_data_something(requests_mock):
+ _mock_token_request(requests_mock)
requests_mock.get(USERDATA_URL, content=b"foobar")
assert ec2_metadata.user_data == b"foobar"
@@ -250,6 +294,7 @@ def test_user_data_something(requests_mock):
def add_interface_response(requests_mock, url, text="", **kwargs):
full_url = METADATA_URL + "network/interfaces/macs/" + example_mac + url
+ _mock_token_request(requests_mock)
requests_mock.get(full_url, text=text, **kwargs)
| Update to version 2.0
https://aws.amazon.com/blogs/security/defense-in-depth-open-firewalls-reverse-proxies-ssrf-vulnerabilities-ec2-instance-metadata-service/
This requires a new "knock" PUT request to get a session token before accessing any bits of data. | 0.0 | 2607b63c8ecb07c82b8716d6738814000fc3437d | [
"tests/test_ec2_metadata.py::test_network_interface_equal",
"tests/test_ec2_metadata.py::test_network_interface_not_equal",
"tests/test_ec2_metadata.py::test_network_interface_not_equal_class",
"tests/test_ec2_metadata.py::test_network_interface_repr"
] | [] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false
} | 2020-03-06 02:57:13+00:00 | isc | 890 |
|
adamchainz__ec2-metadata-350 | diff --git a/HISTORY.rst b/HISTORY.rst
index 9a6d9e6..23dbfa2 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -2,6 +2,10 @@
History
=======
+* Return ``None`` for ``availability_zone_id`` when the underlying endpoint returns HTTP 404.
+
+ Thanks to Amir Rossert in `PR #350 <https://github.com/adamchainz/ec2-metadata/pull/350>`__.
+
2.9.0 (2022-03-25)
------------------
diff --git a/README.rst b/README.rst
index 1671b48..f435b1f 100644
--- a/README.rst
+++ b/README.rst
@@ -133,8 +133,8 @@ See AWS docs page `Retrieve the target lifecycle state through instance metadata
The name of the current AZ e.g. ``'eu-west-1a'``.
-``availability_zone_id: str``
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+``availability_zone_id: str | None``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The unique, cross-account ID of the current AZ e.g. ``'use1-az6'``.
See AWS docs page `AZ IDs for your AWS resources
diff --git a/src/ec2_metadata/__init__.py b/src/ec2_metadata/__init__.py
index 0db8376..520d9aa 100644
--- a/src/ec2_metadata/__init__.py
+++ b/src/ec2_metadata/__init__.py
@@ -98,8 +98,13 @@ class EC2Metadata(BaseLazyObject):
return self._get_url(f"{self.metadata_url}placement/availability-zone").text
@cached_property
- def availability_zone_id(self) -> str:
- return self._get_url(f"{self.metadata_url}placement/availability-zone-id").text
+ def availability_zone_id(self) -> str | None:
+ resp = self._get_url(
+ f"{self.metadata_url}placement/availability-zone-id", allow_404=True
+ )
+ if resp.status_code == 404:
+ return None
+ return resp.text
@cached_property
def ami_launch_index(self) -> int:
| adamchainz/ec2-metadata | d72b0da489514e46519f49bd080860afb9fd3189 | diff --git a/tests/test_ec2_metadata.py b/tests/test_ec2_metadata.py
index 24f6af1..55f701b 100644
--- a/tests/test_ec2_metadata.py
+++ b/tests/test_ec2_metadata.py
@@ -161,6 +161,14 @@ def test_availability_zone_id(em_requests_mock):
assert ec2_metadata.availability_zone_id == "use1-az6"
+def test_availability_zone_id_none(em_requests_mock):
+ em_requests_mock.get(
+ "http://169.254.169.254/latest/meta-data/placement/availability-zone-id",
+ status_code=404,
+ )
+ assert ec2_metadata.availability_zone_id is None
+
+
def test_domain(em_requests_mock):
em_requests_mock.get(
"http://169.254.169.254/latest/meta-data/services/domain",
| availability_zone_id may return 404
### Python Version
3.8.10
### Package Version
2.7.0
### Description
In some cases `ec2_metadata.availability_zone_id` returns 404
```
----> 1 ec2_metadata.availability_zone_id
File functools.py:967, in __get__(self, instance, owner)
File ec2_metadata/__init__.py:87, in availability_zone_id(self)
File ec2_metadata/__init__.py:65, in _get_url(self, url, allow_404)
File requests/models.py:960, in raise_for_status(self)
HTTPError: 404 Client Error: Not Found for url: http://169.254.169.254/latest/meta-data/placement/availability-zone-id
```
I assume that all that is needed is to use `allow_404` in `_get_url` | 0.0 | d72b0da489514e46519f49bd080860afb9fd3189 | [
"tests/test_ec2_metadata.py::test_availability_zone_id_none"
] | [
"tests/test_ec2_metadata.py::test_instance_profile_name",
"tests/test_ec2_metadata.py::test_network_interface_repr",
"tests/test_ec2_metadata.py::test_public_hostname_none",
"tests/test_ec2_metadata.py::test_tags_one",
"tests/test_ec2_metadata.py::test_kernel_id",
"tests/test_ec2_metadata.py::test_network_interface_owner_id",
"tests/test_ec2_metadata.py::test_instance_profile_id",
"tests/test_ec2_metadata.py::test_instance_type",
"tests/test_ec2_metadata.py::test_iam_info_none",
"tests/test_ec2_metadata.py::test_network_interface_device_number",
"tests/test_ec2_metadata.py::test_instance_profile_arn_none",
"tests/test_ec2_metadata.py::test_network_interface_subnet_ipv4_cidr_block",
"tests/test_ec2_metadata.py::test_region",
"tests/test_ec2_metadata.py::test_network_interface_subnet_id",
"tests/test_ec2_metadata.py::test_autoscaling_target_lifecycle_state_none",
"tests/test_ec2_metadata.py::test_network_interface_vpc_ipv4_cidr_block",
"tests/test_ec2_metadata.py::test_network_interface_ipv6s",
"tests/test_ec2_metadata.py::test_tags_multiple",
"tests/test_ec2_metadata.py::test_ami_id_cached",
"tests/test_ec2_metadata.py::test_spot_instance_action_none",
"tests/test_ec2_metadata.py::test_account_id_token_error",
"tests/test_ec2_metadata.py::test_security_groups_emptystring",
"tests/test_ec2_metadata.py::test_instance_profile_arn",
"tests/test_ec2_metadata.py::test_network_interface_vpc_ipv6_cidr_blocks_none",
"tests/test_ec2_metadata.py::test_instance_identity",
"tests/test_ec2_metadata.py::test_network_interface_private_ipv4s",
"tests/test_ec2_metadata.py::test_tags_len",
"tests/test_ec2_metadata.py::test_network_interface_public_ipv4s_empty",
"tests/test_ec2_metadata.py::test_domain",
"tests/test_ec2_metadata.py::test_ami_id_cached_cleared",
"tests/test_ec2_metadata.py::test_public_ipv4_none",
"tests/test_ec2_metadata.py::test_network_interfaces",
"tests/test_ec2_metadata.py::test_tags_empty",
"tests/test_ec2_metadata.py::test_ami_manifest_path",
"tests/test_ec2_metadata.py::test_network_interface_ipv4_associations",
"tests/test_ec2_metadata.py::test_network_interface_vpc_ipv6_cidr_blocks",
"tests/test_ec2_metadata.py::test_availability_zone",
"tests/test_ec2_metadata.py::test_network_interface_vpc_id",
"tests/test_ec2_metadata.py::test_network_interface_not_equal",
"tests/test_ec2_metadata.py::test_kernel_id_none",
"tests/test_ec2_metadata.py::test_private_hostname",
"tests/test_ec2_metadata.py::test_network_interface_public_ipv4s",
"tests/test_ec2_metadata.py::test_iam_security_credentials_none",
"tests/test_ec2_metadata.py::test_account_id_error",
"tests/test_ec2_metadata.py::test_iam_security_credentials",
"tests/test_ec2_metadata.py::test_instance_profile_id_none",
"tests/test_ec2_metadata.py::test_tags_repeat_access",
"tests/test_ec2_metadata.py::test_network_interface_subnet_ipv6_cidr_blocks_none",
"tests/test_ec2_metadata.py::test_instance_action",
"tests/test_ec2_metadata.py::test_network_interface_vpc_ipv4_cidr_blocks_none",
"tests/test_ec2_metadata.py::test_autoscaling_target_lifecycle_state_in_service",
"tests/test_ec2_metadata.py::test_account_id",
"tests/test_ec2_metadata.py::test_tags_not_enabled",
"tests/test_ec2_metadata.py::test_network_interface_public_hostname_none",
"tests/test_ec2_metadata.py::test_network_interface_interface_id",
"tests/test_ec2_metadata.py::test_availability_zone_id",
"tests/test_ec2_metadata.py::test_user_data_none",
"tests/test_ec2_metadata.py::test_reservation_id",
"tests/test_ec2_metadata.py::test_network_interface_public_hostname",
"tests/test_ec2_metadata.py::test_network_interface_not_equal_class",
"tests/test_ec2_metadata.py::test_network_interface_ipv6s_none",
"tests/test_ec2_metadata.py::test_iam_info_unexpected",
"tests/test_ec2_metadata.py::test_security_groups_two",
"tests/test_ec2_metadata.py::test_spot_instance_action",
"tests/test_ec2_metadata.py::test_network_interface_private_hostname",
"tests/test_ec2_metadata.py::test_custom_session",
"tests/test_ec2_metadata.py::test_network_interface_equal",
"tests/test_ec2_metadata.py::test_user_data_something",
"tests/test_ec2_metadata.py::test_network_interface_vpc_ipv4_cidr_blocks",
"tests/test_ec2_metadata.py::test_security_groups_single",
"tests/test_ec2_metadata.py::test_partition",
"tests/test_ec2_metadata.py::test_instance_id",
"tests/test_ec2_metadata.py::test_network_interface_subnet_ipv4_cidr_block_none",
"tests/test_ec2_metadata.py::test_mac",
"tests/test_ec2_metadata.py::test_private_ipv4",
"tests/test_ec2_metadata.py::test_ami_launch_index",
"tests/test_ec2_metadata.py::test_public_hostname",
"tests/test_ec2_metadata.py::test_tags_iter",
"tests/test_ec2_metadata.py::test_public_ipv4",
"tests/test_ec2_metadata.py::test_network_interface_subnet_ipv6_cidr_blocks",
"tests/test_ec2_metadata.py::test_ami_id",
"tests/test_ec2_metadata.py::test_network_interface_vpc_ipv4_cidr_block_none",
"tests/test_ec2_metadata.py::test_network_interface_security_group_ids",
"tests/test_ec2_metadata.py::test_iam_info",
"tests/test_ec2_metadata.py::test_network_interface_security_groups"
] | {
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2022-03-06 15:47:09+00:00 | mit | 891 |
|
adamchainz__pip-lock-121 | diff --git a/HISTORY.rst b/HISTORY.rst
index 9f9cc5b..568f9d1 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -12,6 +12,7 @@ History
``importlib.metadata.version("pip-lock")``
(`docs <https://docs.python.org/3.8/library/importlib.metadata.html#distribution-versions>`__ /
`backport <https://pypi.org/project/importlib-metadata/>`__).
+* Fix parsing of package names featuring extras e.g. ``package[extra1,extra2]``.
2.0.0 (2019-02-28)
------------------
diff --git a/src/pip_lock.py b/src/pip_lock.py
index 9d93bfa..3c1e7a0 100644
--- a/src/pip_lock.py
+++ b/src/pip_lock.py
@@ -40,8 +40,11 @@ def get_package_versions(lines, ignore_external=False):
if ignore_external and line.startswith("-e"):
continue
- name, version_plus = line.split("==", 1)
- versions[name.lower()] = version_plus.split(" ", 1)[0]
+ full_name, version_and_extras = line.split("==", 1)
+ # Strip extras
+ name = full_name.split("[", 1)[0].lower()
+ version = version_and_extras.split(" ", 1)[0]
+ versions[name] = version
return versions
| adamchainz/pip-lock | 4a0e0106254aeceb13f7ff068ce295424b1d44a2 | diff --git a/tests/test_pip_lock.py b/tests/test_pip_lock.py
index da3f02a..c4a3881 100644
--- a/tests/test_pip_lock.py
+++ b/tests/test_pip_lock.py
@@ -17,7 +17,7 @@ def create_file(tmpdir, name, text):
return str(t)
-class TestReadPip(object):
+class TestReadPip:
def test_read(self, tmpdir):
path = create_file(tmpdir, "requirements.txt", "package1==1.0\npackage2==1.1")
assert read_pip(path) == ["package1==1.0", "package2==1.1"]
@@ -33,7 +33,7 @@ class TestReadPip(object):
assert read_pip(path) == [""]
-class TestGetPackageVersion(object):
+class TestGetPackageVersion:
def test_version(self):
assert get_package_versions(["package1==1.0", "package2==1.1"]) == {
"package1": "1.0",
@@ -53,7 +53,7 @@ class TestGetPackageVersion(object):
assert get_package_versions(["https://www.google.com"]) == {}
-class TestGetMismatches(object):
+class TestGetMismatches:
def setUp(self, tmpdir):
super(TestGetMismatches, self).setUp()
self.requirements_path = create_file(
@@ -115,8 +115,17 @@ class TestGetMismatches(object):
assert get_mismatches(requirements_path) == {}
+ @mock.patch("pip_lock.pip_freeze")
+ def test_package_with_extra(self, pip_freeze, tmpdir):
+ pip_freeze.return_value = ["package==1.1"]
+ requirements_path = create_file(
+ tmpdir, "requirements.txt", "package[anextra]==1.1"
+ )
+
+ assert get_mismatches(requirements_path) == {}
+
-class TestPrintErrors(object):
+class TestPrintErrors:
def test_errors(self, capsys):
print_errors(["error message 1", "error message 2"])
_, err = capsys.readouterr()
@@ -131,7 +140,7 @@ class TestPrintErrors(object):
assert "post text" in err
-class TestCheckRequirements(object):
+class TestCheckRequirements:
@mock.patch("pip_lock.get_mismatches")
def test_no_mismatches(self, get_mismatches):
get_mismatches.return_value = {}
| pip-lock does detect dependencies with setup extras
There are requirement mismatches with requirements.txt
* Package django-storages[boto3] is in requirements.txt but not in virtualenv | 0.0 | 4a0e0106254aeceb13f7ff068ce295424b1d44a2 | [
"tests/test_pip_lock.py::TestGetMismatches::test_package_with_extra"
] | [
"tests/test_pip_lock.py::TestReadPip::test_read",
"tests/test_pip_lock.py::TestReadPip::test_include",
"tests/test_pip_lock.py::TestReadPip::test_empty",
"tests/test_pip_lock.py::TestGetPackageVersion::test_version",
"tests/test_pip_lock.py::TestGetPackageVersion::test_ignore_empty",
"tests/test_pip_lock.py::TestGetPackageVersion::test_ignore_comments",
"tests/test_pip_lock.py::TestGetPackageVersion::test_ignore_includes",
"tests/test_pip_lock.py::TestGetPackageVersion::test_ignore_urls",
"tests/test_pip_lock.py::TestGetMismatches::test_relative_requirements_file",
"tests/test_pip_lock.py::TestGetMismatches::test_version_mismatch",
"tests/test_pip_lock.py::TestGetMismatches::test_missing",
"tests/test_pip_lock.py::TestGetMismatches::test_no_mismatches",
"tests/test_pip_lock.py::TestGetMismatches::test_no_mismatches_case_insensitive",
"tests/test_pip_lock.py::TestGetMismatches::test_empty",
"tests/test_pip_lock.py::TestGetMismatches::test_editable_packages",
"tests/test_pip_lock.py::TestPrintErrors::test_errors",
"tests/test_pip_lock.py::TestPrintErrors::test_pre_post_text",
"tests/test_pip_lock.py::TestCheckRequirements::test_no_mismatches",
"tests/test_pip_lock.py::TestCheckRequirements::test_mismatches",
"tests/test_pip_lock.py::TestCheckRequirements::test_relative_requirements_file"
] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2019-12-19 11:50:52+00:00 | isc | 892 |
|
adamchainz__pip-lock-208 | diff --git a/HISTORY.rst b/HISTORY.rst
index ed86591..e15c981 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -2,6 +2,9 @@
History
=======
+* Fix to ignore lines that represent arguments to pip, such as
+ ``--find-links``.
+
2.2.0 (2020-12-13)
------------------
diff --git a/src/pip_lock.py b/src/pip_lock.py
index 38edddf..a050b3f 100644
--- a/src/pip_lock.py
+++ b/src/pip_lock.py
@@ -27,7 +27,7 @@ def get_package_versions(lines, ignore_external_and_at=False):
for line in lines:
line = line.strip()
- if len(line) == 0 or line.startswith("#") or line.startswith("-r "):
+ if len(line) == 0 or line.startswith(("#", "-")):
continue
if line.startswith("https://"):
| adamchainz/pip-lock | a70ab3c56a094044a1de38f9c66c29d9f022b274 | diff --git a/tests/test_pip_lock.py b/tests/test_pip_lock.py
index cca27a5..7dde0c4 100644
--- a/tests/test_pip_lock.py
+++ b/tests/test_pip_lock.py
@@ -49,6 +49,9 @@ class TestGetPackageVersion:
def test_ignore_includes(self):
assert get_package_versions(["-r example.txt"]) == {}
+ def test_ignore_arguments(self):
+ assert get_package_versions(["--find-links file:./wheels"]) == {}
+
def test_ignore_urls(self):
assert get_package_versions(["https://www.google.com"]) == {}
| Crashes on lines providing arguments to pip
For example `--find-links` | 0.0 | a70ab3c56a094044a1de38f9c66c29d9f022b274 | [
"tests/test_pip_lock.py::TestGetPackageVersion::test_ignore_arguments"
] | [
"tests/test_pip_lock.py::TestReadPip::test_read",
"tests/test_pip_lock.py::TestReadPip::test_include",
"tests/test_pip_lock.py::TestReadPip::test_empty",
"tests/test_pip_lock.py::TestGetPackageVersion::test_version",
"tests/test_pip_lock.py::TestGetPackageVersion::test_ignore_empty",
"tests/test_pip_lock.py::TestGetPackageVersion::test_ignore_comments",
"tests/test_pip_lock.py::TestGetPackageVersion::test_ignore_includes",
"tests/test_pip_lock.py::TestGetPackageVersion::test_ignore_urls",
"tests/test_pip_lock.py::TestGetMismatches::test_relative_requirements_file",
"tests/test_pip_lock.py::TestGetMismatches::test_version_mismatch",
"tests/test_pip_lock.py::TestGetMismatches::test_missing",
"tests/test_pip_lock.py::TestGetMismatches::test_no_mismatches",
"tests/test_pip_lock.py::TestGetMismatches::test_no_mismatches_case_insensitive",
"tests/test_pip_lock.py::TestGetMismatches::test_empty",
"tests/test_pip_lock.py::TestGetMismatches::test_editable_packages_ignored",
"tests/test_pip_lock.py::TestGetMismatches::test_at_packages_ignored",
"tests/test_pip_lock.py::TestGetMismatches::test_package_with_extra",
"tests/test_pip_lock.py::TestPrintErrors::test_errors",
"tests/test_pip_lock.py::TestPrintErrors::test_pre_post_text",
"tests/test_pip_lock.py::TestCheckRequirements::test_no_mismatches",
"tests/test_pip_lock.py::TestCheckRequirements::test_mismatches",
"tests/test_pip_lock.py::TestCheckRequirements::test_relative_requirements_file"
] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2021-01-22 11:34:16+00:00 | mit | 893 |
|
adamchainz__time-machine-101 | diff --git a/HISTORY.rst b/HISTORY.rst
index 0178c5f..3548383 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -2,6 +2,8 @@
History
=======
+* Prevent ``ImportError`` on Windows where ``time.tzset()`` is unavailable.
+
2.0.0 (2021-01-17)
------------------
diff --git a/src/time_machine.py b/src/time_machine.py
index ed2cfb2..9b4de81 100644
--- a/src/time_machine.py
+++ b/src/time_machine.py
@@ -5,7 +5,6 @@ import os
import sys
import uuid
from time import gmtime as orig_gmtime
-from time import tzset
from types import GeneratorType
from typing import Optional
from unittest import TestCase, mock
@@ -21,6 +20,12 @@ except ImportError:
# Dummy value that won't compare equal to any value
CLOCK_REALTIME = float("inf")
+try:
+ from time import tzset
+except ImportError: # pragma: no cover
+ # Windows
+ tzset = None
+
try:
# Python 3.8+ or have installed backports.zoneinfo
from zoneinfo import ZoneInfo
@@ -137,13 +142,13 @@ class Coordinates:
self._start()
def _start(self):
- if self._destination_tzname is not None:
+ if tzset is not None and self._destination_tzname is not None:
self._orig_tz = os.environ.get("TZ")
os.environ["TZ"] = self._destination_tzname
tzset()
def _stop(self):
- if self._destination_tzname is not None:
+ if tzset is not None and self._destination_tzname is not None:
if self._orig_tz is None:
del os.environ["TZ"]
else:
| adamchainz/time-machine | 5b2536a1bfb47dc0619d7d03302cf1edc5a51112 | diff --git a/tests/test_time_machine.py b/tests/test_time_machine.py
index 8f6aa5f..ce7695b 100644
--- a/tests/test_time_machine.py
+++ b/tests/test_time_machine.py
@@ -379,6 +379,17 @@ def test_destination_datetime_tzinfo_zoneinfo_nested():
assert time.tzname == orig_tzname
[email protected](ZoneInfo is None, reason="Requires ZoneInfo")
+def test_destination_datetime_tzinfo_zoneinfo_windows():
+ orig_timezone = time.timezone
+
+ pretend_windows_no_tzset = mock.patch.object(time_machine, "tzset", new=None)
+
+ dest = LIBRARY_EPOCH_DATETIME.replace(tzinfo=ZoneInfo("Africa/Addis_Ababa"))
+ with pretend_windows_no_tzset, time_machine.travel(dest):
+ assert time.timezone == orig_timezone
+
+
@time_machine.travel(EPOCH_DATETIME.replace(tzinfo=None) + dt.timedelta(seconds=120))
def test_destination_datetime_naive():
assert time.time() == EPOCH + 120.0
| [v2] ImportError for tzset on Windows
In one of my project, I received an automatic update to update time-machine to the newly published v2 and my CI crashes on Windows (Python 3.7, 3.8 and 3.9) with the following error:
```
ImportError: cannot import name 'tzset' from 'time' (unknown location)
```
I don't own a Windows machine, but one can check the logs here: https://github.com/browniebroke/netlify-builds/pull/27/checks?check_run_id=1716666697#step:6:52
Not really urgent or anything, but I thought I report the problem. Is it a missing dependency or something different with the stdlib? | 0.0 | 5b2536a1bfb47dc0619d7d03302cf1edc5a51112 | [
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo_windows",
"tests/test_time_machine.py::test_traveller_object",
"tests/test_time_machine.py::test_move_to_datetime_with_tzinfo_zoneinfo",
"tests/test_time_machine.py::test_fixture_unused"
] | [
"tests/test_time_machine.py::test_import_without_clock_realtime",
"tests/test_time_machine.py::test_datetime_now_no_args",
"tests/test_time_machine.py::test_datetime_now_no_args_no_tick",
"tests/test_time_machine.py::test_datetime_now_arg",
"tests/test_time_machine.py::test_datetime_utcnow",
"tests/test_time_machine.py::test_datetime_utcnow_no_tick",
"tests/test_time_machine.py::test_date_today",
"tests/test_time_machine.py::test_time_clock_gettime_realtime",
"tests/test_time_machine.py::test_time_clock_gettime_monotonic_unaffected",
"tests/test_time_machine.py::test_time_clock_gettime_ns_realtime",
"tests/test_time_machine.py::test_time_clock_gettime_ns_monotonic_unaffected",
"tests/test_time_machine.py::test_time_gmtime_no_args",
"tests/test_time_machine.py::test_time_gmtime_no_args_no_tick",
"tests/test_time_machine.py::test_time_gmtime_arg",
"tests/test_time_machine.py::test_time_localtime",
"tests/test_time_machine.py::test_time_localtime_no_tick",
"tests/test_time_machine.py::test_time_localtime_arg",
"tests/test_time_machine.py::test_time_strftime_no_args",
"tests/test_time_machine.py::test_time_strftime_no_args_no_tick",
"tests/test_time_machine.py::test_time_strftime_arg",
"tests/test_time_machine.py::test_time_time",
"tests/test_time_machine.py::test_time_time_windows",
"tests/test_time_machine.py::test_time_time_no_tick",
"tests/test_time_machine.py::test_time_time_ns",
"tests/test_time_machine.py::test_time_time_ns_no_tick",
"tests/test_time_machine.py::test_nestable",
"tests/test_time_machine.py::test_unsupported_type",
"tests/test_time_machine.py::test_exceptions_dont_break_it",
"tests/test_time_machine.py::test_destination_datetime",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_non_zoneinfo",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo_nested",
"tests/test_time_machine.py::test_destination_datetime_naive",
"tests/test_time_machine.py::test_destination_date",
"tests/test_time_machine.py::test_destination_string",
"tests/test_time_machine.py::test_destination_callable_lambda_float",
"tests/test_time_machine.py::test_destination_callable_lambda_string",
"tests/test_time_machine.py::test_destination_generator",
"tests/test_time_machine.py::test_function_decorator",
"tests/test_time_machine.py::test_coroutine_decorator",
"tests/test_time_machine.py::test_class_decorator_fails_non_testcase",
"tests/test_time_machine.py::UnitTestMethodTests::test_method_decorator",
"tests/test_time_machine.py::UnitTestClassTests::test_class_decorator",
"tests/test_time_machine.py::UnitTestClassTests::test_stacked_method_decorator",
"tests/test_time_machine.py::UnitTestClassCustomSetUpClassTests::test_class_decorator",
"tests/test_time_machine.py::test_shift_with_timedelta",
"tests/test_time_machine.py::test_shift_integer_delta",
"tests/test_time_machine.py::test_shift_negative_delta",
"tests/test_time_machine.py::test_shift_wrong_delta",
"tests/test_time_machine.py::test_shift_when_tick",
"tests/test_time_machine.py::test_move_to_datetime",
"tests/test_time_machine.py::test_move_to_datetime_no_tick",
"tests/test_time_machine.py::test_move_to_past_datetime",
"tests/test_time_machine.py::test_uuid1",
"tests/test_time_machine.py::test_fixture_used",
"tests/test_time_machine.py::test_fixture_used_twice"
] | {
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-01-18 10:34:15+00:00 | mit | 894 |
|
adamchainz__time-machine-152 | diff --git a/HISTORY.rst b/HISTORY.rst
index 3265158..5390cf4 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -2,6 +2,10 @@
History
=======
+* Allow passing ``tick`` to ``Coordinates.move_to()`` and the pytest fixture’s
+ ``time_machine.move_to()``. This allows freezing or unfreezing of time when
+ travelling.
+
2.2.0 (2021-07-02)
------------------
diff --git a/README.rst b/README.rst
index c4dc27b..d95ff70 100644
--- a/README.rst
+++ b/README.rst
@@ -235,12 +235,14 @@ Here’s a worked example changing the current timezone:
The ``start()`` method and entry of the context manager both return a ``Coordinates`` object that corresponds to the given "trip" in time.
This has a couple methods that can be used to travel to other times.
-``move_to(destination)``
-^^^^^^^^^^^^^^^^^^^^^^^^
+``move_to(destination, tick=None)``
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
``move_to()`` moves the current time to a new destination.
``destination`` may be any of the types supported by ``travel``.
+``tick`` may be set to a boolean, to change the ``tick`` flag of ``travel``.
+
For example:
.. code-block:: python
diff --git a/src/time_machine/__init__.py b/src/time_machine/__init__.py
index 7f297a9..a8302b8 100644
--- a/src/time_machine/__init__.py
+++ b/src/time_machine/__init__.py
@@ -163,12 +163,18 @@ class Coordinates:
self._destination_timestamp_ns += int(total_seconds * NANOSECONDS_PER_SECOND)
- def move_to(self, destination: DestinationType) -> None:
+ def move_to(
+ self,
+ destination: DestinationType,
+ tick: Optional[bool] = None,
+ ) -> None:
self._stop()
timestamp, self._destination_tzname = extract_timestamp_tzname(destination)
self._destination_timestamp_ns = int(timestamp * NANOSECONDS_PER_SECOND)
self._requested = False
self._start()
+ if tick is not None:
+ self._tick = tick
def _start(self) -> None:
if HAVE_TZSET and self._destination_tzname is not None:
@@ -421,13 +427,19 @@ if pytest is not None: # pragma: no branch
self.traveller = None
self.coordinates = None
- def move_to(self, destination: DestinationType) -> None:
+ def move_to(
+ self,
+ destination: DestinationType,
+ tick: Optional[bool] = None,
+ ) -> None:
if self.traveller is None:
- self.traveller = travel(destination)
+ if tick is None:
+ tick = True
+ self.traveller = travel(destination, tick=tick)
self.coordinates = self.traveller.start()
else:
assert self.coordinates is not None
- self.coordinates.move_to(destination)
+ self.coordinates.move_to(destination, tick=tick)
def stop(self) -> None:
if self.traveller is not None:
| adamchainz/time-machine | 665670beddc1f330ce182984bfd1fddeda0587a1 | diff --git a/tests/test_time_machine.py b/tests/test_time_machine.py
index ccf3862..175a7d5 100644
--- a/tests/test_time_machine.py
+++ b/tests/test_time_machine.py
@@ -633,6 +633,20 @@ def test_move_to_datetime_with_tzinfo_zoneinfo():
assert time.daylight == orig_daylight
+def test_move_to_datetime_change_tick_on():
+ with time_machine.travel(EPOCH, tick=False) as traveller:
+ traveller.move_to(EPOCH_PLUS_ONE_YEAR_DATETIME, tick=True)
+ assert time.time() == EPOCH_PLUS_ONE_YEAR
+ assert time.time() > EPOCH_PLUS_ONE_YEAR
+
+
+def test_move_to_datetime_change_tick_off():
+ with time_machine.travel(EPOCH, tick=True) as traveller:
+ traveller.move_to(EPOCH_PLUS_ONE_YEAR_DATETIME, tick=False)
+ assert time.time() == EPOCH_PLUS_ONE_YEAR
+ assert time.time() == EPOCH_PLUS_ONE_YEAR
+
+
# uuid tests
@@ -664,6 +678,19 @@ def test_fixture_used(time_machine):
assert time.time() == EPOCH
+def test_fixture_used_tick_false(time_machine):
+ time_machine.move_to(EPOCH, tick=False)
+ assert time.time() == EPOCH
+ assert time.time() == EPOCH
+
+
+def test_fixture_used_tick_true(time_machine):
+ time_machine.move_to(EPOCH, tick=True)
+ original = time.time()
+ assert original == EPOCH
+ assert original < time.time() < EPOCH + 10.0
+
+
def test_fixture_used_twice(time_machine):
time_machine.move_to(EPOCH)
assert time.time() == EPOCH
| Freeze time instead of just shifting it
Hi! This library looks very cool, even more with the added support for pytest. I have a question which might be more related with me not understanding the use cases or purpose than the tool itself.
I thought I would be able to tell the library: everytime I call now I want you to return this. Instead what I found is that when I call the `move_to` method in the time_machine fixture, time goes back to that time but the starts moving forward accurately. What I wanted to be able to do is to always get the same value when utcnow is called so I can test the value of queries that I am generating on the fly.
Is this possible? Thanks!
| 0.0 | 665670beddc1f330ce182984bfd1fddeda0587a1 | [
"tests/test_time_machine.py::test_move_to_datetime_change_tick_on",
"tests/test_time_machine.py::test_move_to_datetime_change_tick_off",
"tests/test_time_machine.py::test_fixture_used_tick_false",
"tests/test_time_machine.py::test_fixture_used_tick_true"
] | [
"tests/test_time_machine.py::test_import_without_clock_realtime",
"tests/test_time_machine.py::test_datetime_now_no_args",
"tests/test_time_machine.py::test_datetime_now_no_args_no_tick",
"tests/test_time_machine.py::test_datetime_now_arg",
"tests/test_time_machine.py::test_datetime_utcnow",
"tests/test_time_machine.py::test_datetime_utcnow_no_tick",
"tests/test_time_machine.py::test_date_today",
"tests/test_time_machine.py::test_time_clock_gettime_realtime",
"tests/test_time_machine.py::test_time_clock_gettime_monotonic_unaffected",
"tests/test_time_machine.py::test_time_clock_gettime_ns_realtime",
"tests/test_time_machine.py::test_time_clock_gettime_ns_monotonic_unaffected",
"tests/test_time_machine.py::test_time_gmtime_no_args",
"tests/test_time_machine.py::test_time_gmtime_no_args_no_tick",
"tests/test_time_machine.py::test_time_gmtime_arg",
"tests/test_time_machine.py::test_time_localtime",
"tests/test_time_machine.py::test_time_localtime_no_tick",
"tests/test_time_machine.py::test_time_localtime_arg",
"tests/test_time_machine.py::test_time_strftime_no_args",
"tests/test_time_machine.py::test_time_strftime_no_args_no_tick",
"tests/test_time_machine.py::test_time_strftime_arg",
"tests/test_time_machine.py::test_time_time",
"tests/test_time_machine.py::test_time_time_windows",
"tests/test_time_machine.py::test_time_time_no_tick",
"tests/test_time_machine.py::test_time_time_ns",
"tests/test_time_machine.py::test_time_time_ns_no_tick",
"tests/test_time_machine.py::test_nestable",
"tests/test_time_machine.py::test_unsupported_type",
"tests/test_time_machine.py::test_exceptions_dont_break_it",
"tests/test_time_machine.py::test_destination_datetime",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_non_zoneinfo",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo_nested",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo_windows",
"tests/test_time_machine.py::test_destination_int",
"tests/test_time_machine.py::test_destination_datetime_naive",
"tests/test_time_machine.py::test_destination_date",
"tests/test_time_machine.py::test_destination_string",
"tests/test_time_machine.py::test_destination_callable_lambda_float",
"tests/test_time_machine.py::test_destination_callable_lambda_string",
"tests/test_time_machine.py::test_destination_generator",
"tests/test_time_machine.py::test_traveller_object",
"tests/test_time_machine.py::test_function_decorator",
"tests/test_time_machine.py::test_coroutine_decorator",
"tests/test_time_machine.py::test_class_decorator_fails_non_testcase",
"tests/test_time_machine.py::UnitTestMethodTests::test_method_decorator",
"tests/test_time_machine.py::UnitTestClassTests::test_class_decorator",
"tests/test_time_machine.py::UnitTestClassTests::test_stacked_method_decorator",
"tests/test_time_machine.py::UnitTestClassCustomSetUpClassTests::test_class_decorator",
"tests/test_time_machine.py::test_shift_with_timedelta",
"tests/test_time_machine.py::test_shift_integer_delta",
"tests/test_time_machine.py::test_shift_negative_delta",
"tests/test_time_machine.py::test_shift_wrong_delta",
"tests/test_time_machine.py::test_shift_when_tick",
"tests/test_time_machine.py::test_move_to_datetime",
"tests/test_time_machine.py::test_move_to_datetime_no_tick",
"tests/test_time_machine.py::test_move_to_past_datetime",
"tests/test_time_machine.py::test_move_to_datetime_with_tzinfo_zoneinfo",
"tests/test_time_machine.py::test_uuid1",
"tests/test_time_machine.py::test_fixture_unused",
"tests/test_time_machine.py::test_fixture_used",
"tests/test_time_machine.py::test_fixture_used_twice"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-07-05 15:02:34+00:00 | mit | 895 |
|
adamchainz__time-machine-207 | diff --git a/HISTORY.rst b/HISTORY.rst
index 78e067b..7af039e 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -2,6 +2,10 @@
History
=======
+* Add ``time_machine.escape_hatch``, which provides functions to bypass time-machine.
+
+ Thanks to Matt Pegler for the feature request in `Issue #206 <https://github.com/adamchainz/time-machine/issues/206>`__.
+
2.4.1 (2021-11-27)
------------------
diff --git a/README.rst b/README.rst
index 4532090..f23ce06 100644
--- a/README.rst
+++ b/README.rst
@@ -310,6 +310,52 @@ For example:
assert dt.date.today().isoformat() == "2015-10-21"
+``escape_hatch``
+----------------
+
+The ``escape_hatch`` object provides functions to bypass time-machine.
+These allow you to call the real datetime functions, without any mocking.
+It also provides a way to check if time-machine is currently time travelling.
+
+These capabilities are useful in rare circumstances.
+For example, if you need to authenticate with an external service during time travel, you may need the real value of ``datetime.now()``.
+
+The functions are:
+
+* ``escape_hatch.is_travelling() -> bool`` - returns ``True`` if ``time_machine.travel()`` is active, ``False`` otherwise.
+
+* ``escape_hatch.datetime.datetime.now()`` - wraps the real ``datetime.datetime.now()``.
+
+* ``escape_hatch.datetime.datetime.utcnow()`` - wraps the real ``datetime.datetime.utcnow()``.
+
+* ``escape_hatch.time.clock_gettime()`` - wraps the real ``time.clock_gettime()``.
+
+* ``escape_hatch.time.clock_gettime_ns()`` - wraps the real ``time.clock_gettime_ns()``.
+
+* ``escape_hatch.time.gmtime()`` - wraps the real ``time.gmtime()``.
+
+* ``escape_hatch.time.localtime()`` - wraps the real ``time.localtime()``.
+
+* ``escape_hatch.time.strftime()`` - wraps the real ``time.strftime()``.
+
+* ``escape_hatch.time.time()`` - wraps the real ``time.time()``.
+
+* ``escape_hatch.time.time_ns()`` - wraps the real ``time.time_ns()``.
+
+For example:
+
+.. code-block:: python
+
+ import time_machine
+
+
+ with time_machine.travel(...):
+ if time_machine.escape_hatch.is_travelling():
+ print("We need to go back to the future!")
+
+ real_now = time_machine.escape_hatch.datetime.datetime.now()
+ external_authenticate(now=real_now)
+
Caveats
=======
diff --git a/src/time_machine/__init__.py b/src/time_machine/__init__.py
index 0e67b1b..4c57870 100644
--- a/src/time_machine/__init__.py
+++ b/src/time_machine/__init__.py
@@ -77,6 +77,9 @@ DestinationType = Union[
TypingGenerator[DestinationBaseType, None, None],
]
+# copied from typeshed:
+_TimeTuple = Tuple[int, int, int, int, int, int, int, int, int]
+
def extract_timestamp_tzname(
destination: DestinationType,
@@ -389,10 +392,6 @@ def localtime(secs: Optional[float] = None) -> struct_time:
return _time_machine.original_localtime(coordinates_stack[-1].time())
-# copied from typeshed:
-_TimeTuple = Tuple[int, int, int, int, int, int, int, int, int]
-
-
def strftime(format: str, t: Union[_TimeTuple, struct_time, None] = None) -> str:
if t is not None:
return _time_machine.original_strftime(format, t)
@@ -453,3 +452,63 @@ if pytest is not None: # pragma: no branch
yield fixture
finally:
fixture.stop()
+
+
+# escape hatch
+
+
+class _EscapeHatchDatetimeDatetime:
+ def now(self, tz: Optional[dt.tzinfo] = None) -> dt.datetime:
+ return _time_machine.original_now(tz)
+
+ def utcnow(self) -> dt.datetime:
+ return _time_machine.original_utcnow()
+
+
+class _EscapeHatchDatetime:
+ def __init__(self) -> None:
+ self.datetime = _EscapeHatchDatetimeDatetime()
+
+
+class _EscapeHatchTime:
+ def clock_gettime(self, clk_id: int) -> float:
+ return _time_machine.original_clock_gettime(clk_id)
+
+ if sys.version_info >= (3, 7):
+
+ def clock_gettime_ns(self, clk_id: int) -> int:
+ return _time_machine.original_clock_gettime_ns(clk_id)
+
+ def gmtime(self, secs: Optional[float] = None) -> struct_time:
+ return _time_machine.original_gmtime(secs)
+
+ def localtime(self, secs: Optional[float] = None) -> struct_time:
+ return _time_machine.original_localtime(secs)
+
+ def strftime(
+ self, format: str, t: Union[_TimeTuple, struct_time, None] = None
+ ) -> str:
+ if t is not None:
+ return _time_machine.original_strftime(format, t)
+ else:
+ return _time_machine.original_strftime(format)
+
+ def time(self) -> float:
+ return _time_machine.original_time()
+
+ if sys.version_info >= (3, 7):
+
+ def time_ns(self) -> int:
+ return _time_machine.original_time_ns()
+
+
+class _EscapeHatch:
+ def __init__(self) -> None:
+ self.datetime = _EscapeHatchDatetime()
+ self.time = _EscapeHatchTime()
+
+ def is_travelling(self) -> bool:
+ return bool(coordinates_stack)
+
+
+escape_hatch = _EscapeHatch()
| adamchainz/time-machine | 29a9f0aa44204c9bf236f350fdc5015fd10e8e2c | diff --git a/tests/test_time_machine.py b/tests/test_time_machine.py
index a23a99e..8cb4f63 100644
--- a/tests/test_time_machine.py
+++ b/tests/test_time_machine.py
@@ -222,18 +222,18 @@ def test_time_localtime_arg():
assert local_time.tm_mday == 1
-def test_time_strftime_no_args():
+def test_time_strftime_format():
with time_machine.travel(EPOCH):
assert time.strftime("%Y-%m-%d") == "1970-01-01"
assert int(time.strftime("%Y")) >= 2020
-def test_time_strftime_no_args_no_tick():
+def test_time_strftime_format_no_tick():
with time_machine.travel(EPOCH, tick=False):
assert time.strftime("%S") == "00"
-def test_time_strftime_arg():
+def test_time_strftime_format_t():
with time_machine.travel(EPOCH):
assert (
time.strftime("%Y-%m-%d", time.localtime(EPOCH_PLUS_ONE_YEAR))
@@ -718,3 +718,100 @@ def test_fixture_used_twice(time_machine):
time_machine.move_to(EPOCH_PLUS_ONE_YEAR)
assert time.time() == EPOCH_PLUS_ONE_YEAR
+
+
+# escape hatch tests
+
+
+class TestEscapeHatch:
+ def test_is_travelling_false(self):
+ assert time_machine.escape_hatch.is_travelling() is False
+
+ def test_is_travelling_true(self):
+ with time_machine.travel(EPOCH):
+ assert time_machine.escape_hatch.is_travelling() is True
+
+ def test_datetime_now(self):
+ real_now = dt.datetime.now()
+
+ with time_machine.travel(EPOCH):
+ eh_now = time_machine.escape_hatch.datetime.datetime.now()
+ assert eh_now >= real_now
+
+ def test_datetime_now_tz(self):
+ real_now = dt.datetime.now(tz=dt.timezone.utc)
+
+ with time_machine.travel(EPOCH):
+ eh_now = time_machine.escape_hatch.datetime.datetime.now(tz=dt.timezone.utc)
+ assert eh_now >= real_now
+
+ def test_datetime_utcnow(self):
+ real_now = dt.datetime.utcnow()
+
+ with time_machine.travel(EPOCH):
+ eh_now = time_machine.escape_hatch.datetime.datetime.utcnow()
+ assert eh_now >= real_now
+
+ @py_have_clock_gettime
+ def test_time_clock_gettime(self):
+ now = time.clock_gettime(time.CLOCK_REALTIME)
+
+ with time_machine.travel(EPOCH + 180.0):
+ eh_now = time_machine.escape_hatch.time.clock_gettime(time.CLOCK_REALTIME)
+ assert eh_now >= now
+
+ @py_3_7_plus
+ @py_have_clock_gettime
+ def test_time_clock_gettime_ns(self):
+ now = time.clock_gettime_ns(time.CLOCK_REALTIME)
+
+ with time_machine.travel(EPOCH + 190.0):
+ eh_now = time_machine.escape_hatch.time.clock_gettime_ns(
+ time.CLOCK_REALTIME
+ )
+ assert eh_now >= now
+
+ def test_time_gmtime(self):
+ now = time.gmtime()
+
+ with time_machine.travel(EPOCH):
+ eh_now = time_machine.escape_hatch.time.gmtime()
+ assert eh_now >= now
+
+ def test_time_localtime(self):
+ now = time.localtime()
+
+ with time_machine.travel(EPOCH):
+ eh_now = time_machine.escape_hatch.time.localtime()
+ assert eh_now >= now
+
+ def test_time_strftime_no_arg(self):
+ today = dt.date.today()
+
+ with time_machine.travel(EPOCH):
+ eh_formatted = time_machine.escape_hatch.time.strftime("%Y-%m-%d")
+ eh_today = dt.datetime.strptime(eh_formatted, "%Y-%m-%d").date()
+ assert eh_today >= today
+
+ def test_time_strftime_arg(self):
+ with time_machine.travel(EPOCH):
+ formatted = time_machine.escape_hatch.time.strftime(
+ "%Y-%m-%d",
+ time.localtime(EPOCH_PLUS_ONE_YEAR),
+ )
+ assert formatted == "1971-01-01"
+
+ def test_time_time(self):
+ now = time.time()
+
+ with time_machine.travel(EPOCH):
+ eh_now = time_machine.escape_hatch.time.time()
+ assert eh_now >= now
+
+ @py_3_7_plus
+ def test_time_time_ns(self):
+ now = time.time_ns()
+
+ with time_machine.travel(EPOCH):
+ eh_now = time_machine.escape_hatch.time.time_ns()
+ assert eh_now >= now
| Flag that we're time traveling, with access to true datetime
### Description
In some of our integration tests we make API calls to external services. Some of these require a timestamp for use in authentication. I know that excluding some modules from time-machine isn't possible due to the implementation, but having some signal that we're time traveling would suffice, along with access to the unpatched datetime module. | 0.0 | 29a9f0aa44204c9bf236f350fdc5015fd10e8e2c | [
"tests/test_time_machine.py::TestEscapeHatch::test_is_travelling_false",
"tests/test_time_machine.py::TestEscapeHatch::test_is_travelling_true",
"tests/test_time_machine.py::TestEscapeHatch::test_datetime_now",
"tests/test_time_machine.py::TestEscapeHatch::test_datetime_now_tz",
"tests/test_time_machine.py::TestEscapeHatch::test_datetime_utcnow",
"tests/test_time_machine.py::TestEscapeHatch::test_time_clock_gettime",
"tests/test_time_machine.py::TestEscapeHatch::test_time_clock_gettime_ns",
"tests/test_time_machine.py::TestEscapeHatch::test_time_gmtime",
"tests/test_time_machine.py::TestEscapeHatch::test_time_localtime",
"tests/test_time_machine.py::TestEscapeHatch::test_time_strftime_no_arg",
"tests/test_time_machine.py::TestEscapeHatch::test_time_strftime_arg",
"tests/test_time_machine.py::TestEscapeHatch::test_time_time",
"tests/test_time_machine.py::TestEscapeHatch::test_time_time_ns"
] | [
"tests/test_time_machine.py::test_import_without_clock_realtime",
"tests/test_time_machine.py::test_datetime_now_no_args",
"tests/test_time_machine.py::test_datetime_now_no_args_no_tick",
"tests/test_time_machine.py::test_datetime_now_arg",
"tests/test_time_machine.py::test_datetime_utcnow",
"tests/test_time_machine.py::test_datetime_utcnow_no_tick",
"tests/test_time_machine.py::test_date_today",
"tests/test_time_machine.py::test_time_clock_gettime_realtime",
"tests/test_time_machine.py::test_time_clock_gettime_monotonic_unaffected",
"tests/test_time_machine.py::test_time_clock_gettime_ns_realtime",
"tests/test_time_machine.py::test_time_clock_gettime_ns_monotonic_unaffected",
"tests/test_time_machine.py::test_time_gmtime_no_args",
"tests/test_time_machine.py::test_time_gmtime_no_args_no_tick",
"tests/test_time_machine.py::test_time_gmtime_arg",
"tests/test_time_machine.py::test_time_localtime",
"tests/test_time_machine.py::test_time_localtime_no_tick",
"tests/test_time_machine.py::test_time_localtime_arg",
"tests/test_time_machine.py::test_time_strftime_format",
"tests/test_time_machine.py::test_time_strftime_format_no_tick",
"tests/test_time_machine.py::test_time_strftime_format_t",
"tests/test_time_machine.py::test_time_time",
"tests/test_time_machine.py::test_time_time_windows",
"tests/test_time_machine.py::test_time_time_no_tick",
"tests/test_time_machine.py::test_time_time_ns",
"tests/test_time_machine.py::test_time_time_ns_no_tick",
"tests/test_time_machine.py::test_nestable",
"tests/test_time_machine.py::test_unsupported_type",
"tests/test_time_machine.py::test_exceptions_dont_break_it",
"tests/test_time_machine.py::test_destination_datetime",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_non_zoneinfo",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo_nested",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo_no_orig_tz",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo_windows",
"tests/test_time_machine.py::test_destination_int",
"tests/test_time_machine.py::test_destination_datetime_naive",
"tests/test_time_machine.py::test_destination_date",
"tests/test_time_machine.py::test_destination_string",
"tests/test_time_machine.py::test_destination_callable_lambda_float",
"tests/test_time_machine.py::test_destination_callable_lambda_string",
"tests/test_time_machine.py::test_destination_generator",
"tests/test_time_machine.py::test_traveller_object",
"tests/test_time_machine.py::test_function_decorator",
"tests/test_time_machine.py::test_coroutine_decorator",
"tests/test_time_machine.py::test_class_decorator_fails_non_testcase",
"tests/test_time_machine.py::UnitTestMethodTests::test_method_decorator",
"tests/test_time_machine.py::UnitTestClassTests::test_class_decorator",
"tests/test_time_machine.py::UnitTestClassTests::test_stacked_method_decorator",
"tests/test_time_machine.py::UnitTestClassCustomSetUpClassTests::test_class_decorator",
"tests/test_time_machine.py::test_shift_with_timedelta",
"tests/test_time_machine.py::test_shift_integer_delta",
"tests/test_time_machine.py::test_shift_negative_delta",
"tests/test_time_machine.py::test_shift_wrong_delta",
"tests/test_time_machine.py::test_shift_when_tick",
"tests/test_time_machine.py::test_move_to_datetime",
"tests/test_time_machine.py::test_move_to_datetime_no_tick",
"tests/test_time_machine.py::test_move_to_past_datetime",
"tests/test_time_machine.py::test_move_to_datetime_with_tzinfo_zoneinfo",
"tests/test_time_machine.py::test_move_to_datetime_change_tick_on",
"tests/test_time_machine.py::test_move_to_datetime_change_tick_off",
"tests/test_time_machine.py::test_uuid1",
"tests/test_time_machine.py::test_fixture_unused",
"tests/test_time_machine.py::test_fixture_used",
"tests/test_time_machine.py::test_fixture_used_tick_false",
"tests/test_time_machine.py::test_fixture_used_tick_true",
"tests/test_time_machine.py::test_fixture_used_twice"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-12-14 07:56:53+00:00 | mit | 896 |
|
adamchainz__time-machine-298 | diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index c524071..71b1598 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -2,6 +2,10 @@
Changelog
=========
+* Add support for ``datetime.timedelta`` to ``time_machine.travel()``.
+
+ Thanks to Nate Dudenhoeffer in `PR #298 <https://github.com/adamchainz/time-machine/pull/298>`__.
+
* Add ``shift()`` method to the ``time_machine`` pytest fixture.
Thanks to Stefaan Lippens in `PR #312 <https://github.com/adamchainz/time-machine/pull/312>`__.
diff --git a/README.rst b/README.rst
index 755325c..88db11f 100644
--- a/README.rst
+++ b/README.rst
@@ -77,6 +77,9 @@ It may be:
If it has ``tzinfo`` set to a |zoneinfo-instance|_, the current timezone will also be mocked.
* A ``datetime.date``.
This will be converted to a UTC datetime with the time 00:00:00.
+* A ``datetime.timedelta``.
+ This will be interpreted relative to the current time.
+ If already within a ``travel()`` block, the ``shift()`` method is easier to use (documented below).
* A ``float`` or ``int`` specifying a `Unix timestamp <https://en.m.wikipedia.org/wiki/Unix_time>`__
* A string, which will be parsed with `dateutil.parse <https://dateutil.readthedocs.io/en/stable/parser.html>`__ and converted to a timestamp.
Again, if the result is naive, it will be assumed to have the UTC time zone.
diff --git a/src/time_machine/__init__.py b/src/time_machine/__init__.py
index c0d6bda..4fa2142 100644
--- a/src/time_machine/__init__.py
+++ b/src/time_machine/__init__.py
@@ -84,6 +84,7 @@ DestinationBaseType = Union[
int,
float,
dt.datetime,
+ dt.timedelta,
dt.date,
str,
]
@@ -124,6 +125,8 @@ def extract_timestamp_tzname(
if dest.tzinfo is None:
dest = dest.replace(tzinfo=dt.timezone.utc)
timestamp = dest.timestamp()
+ elif isinstance(dest, dt.timedelta):
+ timestamp = time() + dest.total_seconds()
elif isinstance(dest, dt.date):
timestamp = dt.datetime.combine(
dest, dt.time(0, 0), tzinfo=dt.timezone.utc
| adamchainz/time-machine | ac337ab7dc568675de7f90c8fb387160e3f08496 | diff --git a/tests/test_time_machine.py b/tests/test_time_machine.py
index 9736be4..0f086bf 100644
--- a/tests/test_time_machine.py
+++ b/tests/test_time_machine.py
@@ -444,6 +444,24 @@ def test_destination_date():
assert time.time() == EPOCH
+def test_destination_timedelta():
+ now = time.time()
+ with time_machine.travel(dt.timedelta(seconds=3600)):
+ assert now + 3600 <= time.time() <= now + 3601
+
+
+def test_destination_timedelta_negative():
+ now = time.time()
+ with time_machine.travel(dt.timedelta(seconds=-3600)):
+ assert now - 3600 <= time.time() <= now - 3599
+
+
+def test_destination_timedelta_nested():
+ with time_machine.travel(EPOCH):
+ with time_machine.travel(dt.timedelta(seconds=10)):
+ assert time.time() == EPOCH + 10.0
+
+
@time_machine.travel("1970-01-01 00:01 +0000")
def test_destination_string():
assert time.time() == EPOCH + 60.0
| Allow passing dt.timedelta objects to travel()
It would offset the current time by the given amount, which would be pretty useful for some tests that check "X is true until N minutes in the future" | 0.0 | ac337ab7dc568675de7f90c8fb387160e3f08496 | [
"tests/test_time_machine.py::test_destination_timedelta",
"tests/test_time_machine.py::test_destination_timedelta_negative",
"tests/test_time_machine.py::test_destination_timedelta_nested"
] | [
"tests/test_time_machine.py::test_import_without_clock_realtime",
"tests/test_time_machine.py::test_datetime_now_no_args",
"tests/test_time_machine.py::test_datetime_now_no_args_no_tick",
"tests/test_time_machine.py::test_datetime_now_arg",
"tests/test_time_machine.py::test_datetime_utcnow",
"tests/test_time_machine.py::test_datetime_utcnow_no_tick",
"tests/test_time_machine.py::test_date_today",
"tests/test_time_machine.py::test_time_clock_gettime_realtime",
"tests/test_time_machine.py::test_time_clock_gettime_monotonic_unaffected",
"tests/test_time_machine.py::test_time_clock_gettime_ns_realtime",
"tests/test_time_machine.py::test_time_clock_gettime_ns_monotonic_unaffected",
"tests/test_time_machine.py::test_time_gmtime_no_args",
"tests/test_time_machine.py::test_time_gmtime_no_args_no_tick",
"tests/test_time_machine.py::test_time_gmtime_arg",
"tests/test_time_machine.py::test_time_localtime",
"tests/test_time_machine.py::test_time_localtime_no_tick",
"tests/test_time_machine.py::test_time_localtime_arg",
"tests/test_time_machine.py::test_time_montonic",
"tests/test_time_machine.py::test_time_monotonic_ns",
"tests/test_time_machine.py::test_time_strftime_format",
"tests/test_time_machine.py::test_time_strftime_format_no_tick",
"tests/test_time_machine.py::test_time_strftime_format_t",
"tests/test_time_machine.py::test_time_time",
"tests/test_time_machine.py::test_time_time_windows",
"tests/test_time_machine.py::test_time_time_no_tick",
"tests/test_time_machine.py::test_time_time_ns",
"tests/test_time_machine.py::test_time_time_ns_no_tick",
"tests/test_time_machine.py::test_nestable",
"tests/test_time_machine.py::test_unsupported_type",
"tests/test_time_machine.py::test_exceptions_dont_break_it",
"tests/test_time_machine.py::test_destination_datetime",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_non_zoneinfo",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo_nested",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo_no_orig_tz",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo_windows",
"tests/test_time_machine.py::test_destination_int",
"tests/test_time_machine.py::test_destination_datetime_naive",
"tests/test_time_machine.py::test_destination_date",
"tests/test_time_machine.py::test_destination_string",
"tests/test_time_machine.py::test_destination_callable_lambda_float",
"tests/test_time_machine.py::test_destination_callable_lambda_string",
"tests/test_time_machine.py::test_destination_generator",
"tests/test_time_machine.py::test_traveller_object",
"tests/test_time_machine.py::test_function_decorator",
"tests/test_time_machine.py::test_coroutine_decorator",
"tests/test_time_machine.py::test_class_decorator_fails_non_testcase",
"tests/test_time_machine.py::TestMethodDecorator::test_method_decorator",
"tests/test_time_machine.py::UnitTestMethodTests::test_method_decorator",
"tests/test_time_machine.py::UnitTestClassTests::test_class_decorator",
"tests/test_time_machine.py::UnitTestClassTests::test_stacked_method_decorator",
"tests/test_time_machine.py::UnitTestClassCustomSetUpClassTests::test_class_decorator",
"tests/test_time_machine.py::test_shift_with_timedelta",
"tests/test_time_machine.py::test_shift_integer_delta",
"tests/test_time_machine.py::test_shift_negative_delta",
"tests/test_time_machine.py::test_shift_wrong_delta",
"tests/test_time_machine.py::test_shift_when_tick",
"tests/test_time_machine.py::test_move_to_datetime",
"tests/test_time_machine.py::test_move_to_datetime_no_tick",
"tests/test_time_machine.py::test_move_to_past_datetime",
"tests/test_time_machine.py::test_move_to_datetime_with_tzinfo_zoneinfo",
"tests/test_time_machine.py::test_move_to_datetime_change_tick_on",
"tests/test_time_machine.py::test_move_to_datetime_change_tick_off",
"tests/test_time_machine.py::test_uuid1",
"tests/test_time_machine.py::test_fixture_unused",
"tests/test_time_machine.py::test_fixture_used",
"tests/test_time_machine.py::test_fixture_used_tick_false",
"tests/test_time_machine.py::test_fixture_used_tick_true",
"tests/test_time_machine.py::test_fixture_move_to_twice",
"tests/test_time_machine.py::test_fixture_move_to_and_shift",
"tests/test_time_machine.py::test_fixture_shift_without_move_to",
"tests/test_time_machine.py::TestEscapeHatch::test_is_travelling_false",
"tests/test_time_machine.py::TestEscapeHatch::test_is_travelling_true",
"tests/test_time_machine.py::TestEscapeHatch::test_datetime_now",
"tests/test_time_machine.py::TestEscapeHatch::test_datetime_now_tz",
"tests/test_time_machine.py::TestEscapeHatch::test_datetime_utcnow",
"tests/test_time_machine.py::TestEscapeHatch::test_time_clock_gettime",
"tests/test_time_machine.py::TestEscapeHatch::test_time_clock_gettime_ns",
"tests/test_time_machine.py::TestEscapeHatch::test_time_gmtime",
"tests/test_time_machine.py::TestEscapeHatch::test_time_localtime",
"tests/test_time_machine.py::TestEscapeHatch::test_time_monotonic",
"tests/test_time_machine.py::TestEscapeHatch::test_time_monotonic_ns",
"tests/test_time_machine.py::TestEscapeHatch::test_time_strftime_no_arg",
"tests/test_time_machine.py::TestEscapeHatch::test_time_strftime_arg",
"tests/test_time_machine.py::TestEscapeHatch::test_time_time",
"tests/test_time_machine.py::TestEscapeHatch::test_time_time_ns"
] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2022-11-03 01:33:14+00:00 | mit | 897 |
|
adamchainz__time-machine-433 | diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index d83e48d..5984577 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -2,6 +2,10 @@
Changelog
=========
+* Fix segmentation fault when the first ``travel()`` call in a process uses a ``timedelta``.
+
+ Thanks to Marcin Sulikowski for the report in `Issue #431 <https://github.com/adamchainz/time-machine/issues/431>`__.
+
2.14.0 (2024-03-03)
-------------------
diff --git a/src/time_machine/__init__.py b/src/time_machine/__init__.py
index 877b325..2553b3d 100644
--- a/src/time_machine/__init__.py
+++ b/src/time_machine/__init__.py
@@ -5,6 +5,7 @@ import functools
import inspect
import os
import sys
+import time as time_module
import uuid
from collections.abc import Generator
from time import gmtime as orig_gmtime
@@ -126,7 +127,7 @@ def extract_timestamp_tzname(
dest = dest.replace(tzinfo=dt.timezone.utc)
timestamp = dest.timestamp()
elif isinstance(dest, dt.timedelta):
- timestamp = time() + dest.total_seconds()
+ timestamp = time_module.time() + dest.total_seconds()
elif isinstance(dest, dt.date):
timestamp = dt.datetime.combine(
dest, dt.time(0, 0), tzinfo=dt.timezone.utc
| adamchainz/time-machine | 8e28b9e74c3d6a50b7d6ec09a9e50903d4dc8bdd | diff --git a/tests/test_time_machine.py b/tests/test_time_machine.py
index 11dc433..5557148 100644
--- a/tests/test_time_machine.py
+++ b/tests/test_time_machine.py
@@ -3,6 +3,7 @@ from __future__ import annotations
import asyncio
import datetime as dt
import os
+import subprocess
import sys
import time
import typing
@@ -10,6 +11,7 @@ import uuid
from contextlib import contextmanager
from importlib.util import module_from_spec
from importlib.util import spec_from_file_location
+from textwrap import dedent
from unittest import SkipTest
from unittest import TestCase
from unittest import mock
@@ -472,6 +474,25 @@ def test_destination_timedelta():
assert now + 3600 <= time.time() <= now + 3601
+def test_destination_timedelta_first_travel_in_process():
+ # Would previously segfault
+ subprocess.run(
+ [
+ sys.executable,
+ "-c",
+ dedent(
+ """
+ from datetime import timedelta
+ import time_machine
+ with time_machine.travel(timedelta()):
+ pass
+ """
+ ),
+ ],
+ check=True,
+ )
+
+
def test_destination_timedelta_negative():
now = time.time()
with time_machine.travel(dt.timedelta(seconds=-3600)):
| Segmentation fault in `time_machine_travel(timedelta())`
### Python Version
3.12
### pytest Version
_No response_
### Package Version
2.14.0
### Description
The following Python script:
```py
from datetime import timedelta
import time_machine
with time_machine.travel(timedelta()):
pass
```
crashes due to a segmentation fault in the constructor of the `travel` class.
The crash happens when the constructor tries to determine the destination timestamp like this:
```py
timestamp = time() + dest.total_seconds()
```
where `time()` calls `result = _time_machine.original_time()` which crashes if time_machine's patches are not applied yet and thus the `original_time` function pointer is still `NULL`. | 0.0 | 8e28b9e74c3d6a50b7d6ec09a9e50903d4dc8bdd | [
"tests/test_time_machine.py::test_destination_timedelta_first_travel_in_process"
] | [
"tests/test_time_machine.py::test_import_without_clock_realtime",
"tests/test_time_machine.py::test_datetime_now_no_args",
"tests/test_time_machine.py::test_datetime_now_no_args_no_tick",
"tests/test_time_machine.py::test_datetime_now_arg",
"tests/test_time_machine.py::test_datetime_utcnow",
"tests/test_time_machine.py::test_datetime_utcnow_no_tick",
"tests/test_time_machine.py::test_date_today",
"tests/test_time_machine.py::test_time_clock_gettime_realtime",
"tests/test_time_machine.py::test_time_clock_gettime_monotonic_unaffected",
"tests/test_time_machine.py::test_time_clock_gettime_ns_realtime",
"tests/test_time_machine.py::test_time_clock_gettime_ns_monotonic_unaffected",
"tests/test_time_machine.py::test_time_gmtime_no_args",
"tests/test_time_machine.py::test_time_gmtime_no_args_no_tick",
"tests/test_time_machine.py::test_time_gmtime_arg",
"tests/test_time_machine.py::test_time_localtime",
"tests/test_time_machine.py::test_time_localtime_no_tick",
"tests/test_time_machine.py::test_time_localtime_arg",
"tests/test_time_machine.py::test_time_montonic",
"tests/test_time_machine.py::test_time_monotonic_ns",
"tests/test_time_machine.py::test_time_strftime_format",
"tests/test_time_machine.py::test_time_strftime_format_no_tick",
"tests/test_time_machine.py::test_time_strftime_format_t",
"tests/test_time_machine.py::test_time_time",
"tests/test_time_machine.py::test_time_time_windows",
"tests/test_time_machine.py::test_time_time_no_tick",
"tests/test_time_machine.py::test_time_time_ns",
"tests/test_time_machine.py::test_time_time_ns_no_tick",
"tests/test_time_machine.py::test_nestable",
"tests/test_time_machine.py::test_unsupported_type",
"tests/test_time_machine.py::test_exceptions_dont_break_it",
"tests/test_time_machine.py::test_destination_datetime",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_non_zoneinfo",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo_nested",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo_no_orig_tz",
"tests/test_time_machine.py::test_destination_datetime_tzinfo_zoneinfo_windows",
"tests/test_time_machine.py::test_destination_int",
"tests/test_time_machine.py::test_destination_datetime_naive",
"tests/test_time_machine.py::test_destination_date",
"tests/test_time_machine.py::test_destination_timedelta",
"tests/test_time_machine.py::test_destination_timedelta_negative",
"tests/test_time_machine.py::test_destination_timedelta_nested",
"tests/test_time_machine.py::test_destination_string",
"tests/test_time_machine.py::test_destination_string_naive[1970-01-01",
"tests/test_time_machine.py::test_destination_string_naive[1970-01-01-UTC-0]",
"tests/test_time_machine.py::test_destination_string_naive[1970-01-01-Europe/Amsterdam--3600]",
"tests/test_time_machine.py::test_destination_string_naive[1970-01-01-US/Eastern-18000]",
"tests/test_time_machine.py::test_destination_callable_lambda_float",
"tests/test_time_machine.py::test_destination_callable_lambda_string",
"tests/test_time_machine.py::test_destination_generator",
"tests/test_time_machine.py::test_traveller_object",
"tests/test_time_machine.py::test_function_decorator",
"tests/test_time_machine.py::test_coroutine_decorator",
"tests/test_time_machine.py::test_class_decorator_fails_non_testcase",
"tests/test_time_machine.py::ClassDecoratorInheritanceTests::test_ineheritance_correctly_rebound",
"tests/test_time_machine.py::TestMethodDecorator::test_method_decorator",
"tests/test_time_machine.py::UnitTestMethodTests::test_method_decorator",
"tests/test_time_machine.py::UnitTestClassTests::test_class_decorator",
"tests/test_time_machine.py::UnitTestClassTests::test_stacked_method_decorator",
"tests/test_time_machine.py::UnitTestClassCustomSetUpClassTests::test_class_decorator",
"tests/test_time_machine.py::test_shift_with_timedelta",
"tests/test_time_machine.py::test_shift_integer_delta",
"tests/test_time_machine.py::test_shift_negative_delta",
"tests/test_time_machine.py::test_shift_wrong_delta",
"tests/test_time_machine.py::test_shift_when_tick",
"tests/test_time_machine.py::test_move_to_datetime",
"tests/test_time_machine.py::test_move_to_datetime_no_tick",
"tests/test_time_machine.py::test_move_to_past_datetime",
"tests/test_time_machine.py::test_move_to_datetime_with_tzinfo_zoneinfo",
"tests/test_time_machine.py::test_move_to_datetime_change_tick_on",
"tests/test_time_machine.py::test_move_to_datetime_change_tick_off",
"tests/test_time_machine.py::test_uuid1",
"tests/test_time_machine.py::test_fixture_unused",
"tests/test_time_machine.py::test_fixture_used",
"tests/test_time_machine.py::test_fixture_used_tick_false",
"tests/test_time_machine.py::test_fixture_used_tick_true",
"tests/test_time_machine.py::test_fixture_move_to_twice",
"tests/test_time_machine.py::test_fixture_move_to_and_shift",
"tests/test_time_machine.py::test_fixture_shift_without_move_to",
"tests/test_time_machine.py::TestEscapeHatch::test_is_travelling_false",
"tests/test_time_machine.py::TestEscapeHatch::test_is_travelling_true",
"tests/test_time_machine.py::TestEscapeHatch::test_datetime_now",
"tests/test_time_machine.py::TestEscapeHatch::test_datetime_now_tz",
"tests/test_time_machine.py::TestEscapeHatch::test_datetime_utcnow",
"tests/test_time_machine.py::TestEscapeHatch::test_time_clock_gettime",
"tests/test_time_machine.py::TestEscapeHatch::test_time_clock_gettime_ns",
"tests/test_time_machine.py::TestEscapeHatch::test_time_gmtime",
"tests/test_time_machine.py::TestEscapeHatch::test_time_localtime",
"tests/test_time_machine.py::TestEscapeHatch::test_time_monotonic",
"tests/test_time_machine.py::TestEscapeHatch::test_time_monotonic_ns",
"tests/test_time_machine.py::TestEscapeHatch::test_time_strftime_no_arg",
"tests/test_time_machine.py::TestEscapeHatch::test_time_strftime_arg",
"tests/test_time_machine.py::TestEscapeHatch::test_time_time",
"tests/test_time_machine.py::TestEscapeHatch::test_time_time_ns"
] | {
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2024-03-22 23:03:40+00:00 | mit | 898 |
|
adamchainz__time-machine-54 | diff --git a/HISTORY.rst b/HISTORY.rst
index 485c825..f5a2f8c 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -1,6 +1,12 @@
History
=======
+* Correctly return naive datetimes from ``datetime.utcnow()`` whilst time
+ travelling.
+
+ Thanks to Søren Pilgård and Bart Van Loon for the report in
+ `Issue #52 <https://github.com/adamchainz/time-machine/issues/52>`__.
+
1.2.0 (2020-07-08)
------------------
diff --git a/src/time_machine.py b/src/time_machine.py
index 0832f6a..d0ed6ff 100644
--- a/src/time_machine.py
+++ b/src/time_machine.py
@@ -206,7 +206,7 @@ def utcnow():
if not coordinates_stack:
return _time_machine.original_utcnow()
else:
- return dt.datetime.fromtimestamp(time(), dt.timezone.utc)
+ return dt.datetime.utcfromtimestamp(time())
# time module
| adamchainz/time-machine | 48a2a4df24ba188533fdf03f6cfc4b78db33f17a | diff --git a/tests/test_time_machine.py b/tests/test_time_machine.py
index bf85da5..4eacde2 100644
--- a/tests/test_time_machine.py
+++ b/tests/test_time_machine.py
@@ -88,6 +88,7 @@ def test_datetime_utcnow():
assert now.minute == 0
assert now.second == 0
assert now.microsecond == 0
+ assert now.tzinfo is None
assert dt.datetime.utcnow() >= LIBRARY_EPOCH_DATETIME
| datetime.datetime.utcnow() should not have tzinfo
Hi, I find this library very neat and have started using it in multiple tests.
But there is one issue:
```python
In [1]: import datetime
In [2]: import time_machine
In [3]: @time_machine.travel("2020-07-28 13:15:30.478705")
...: def foo():
...: return datetime.datetime.utcnow()
...:
In [4]: datetime.datetime.utcnow()
Out[4]: datetime.datetime(2020, 7, 31, 13, 46, 25, 828701)
In [5]: foo()
Out[5]: datetime.datetime(2020, 7, 28, 13, 15, 30, 478705, tzinfo=datetime.timezone.utc)
```
time_machine adds a timezone on datetime instances created via `.utcnow()` but this is wrong according to python.
It even specifies it directly in the documentation https://docs.python.org/3/library/datetime.html#datetime.datetime.utcnow
> Return the current UTC date and time, **with tzinfo None**.
This change in the behaviour of the datetime library can cause changes in behaviour of the tested code.
Eg. when serialized to a string it adds "+00:00"
| 0.0 | 48a2a4df24ba188533fdf03f6cfc4b78db33f17a | [
"tests/test_time_machine.py::test_datetime_utcnow"
] | [
"tests/test_time_machine.py::test_import_without_clock_realtime",
"tests/test_time_machine.py::test_datetime_now_no_args",
"tests/test_time_machine.py::test_datetime_now_no_args_no_tick",
"tests/test_time_machine.py::test_datetime_now_arg",
"tests/test_time_machine.py::test_datetime_utcnow_no_tick",
"tests/test_time_machine.py::test_date_today",
"tests/test_time_machine.py::test_time_clock_gettime_realtime",
"tests/test_time_machine.py::test_time_clock_gettime_monotonic_unaffected",
"tests/test_time_machine.py::test_time_clock_gettime_ns_realtime",
"tests/test_time_machine.py::test_time_clock_gettime_ns_monotonic_unaffected",
"tests/test_time_machine.py::test_time_gmtime_no_args",
"tests/test_time_machine.py::test_time_gmtime_no_args_no_tick",
"tests/test_time_machine.py::test_time_gmtime_arg",
"tests/test_time_machine.py::test_time_localtime",
"tests/test_time_machine.py::test_time_localtime_no_tick",
"tests/test_time_machine.py::test_time_localtime_arg",
"tests/test_time_machine.py::test_time_strftime_no_args",
"tests/test_time_machine.py::test_time_strftime_no_args_no_tick",
"tests/test_time_machine.py::test_time_strftime_arg",
"tests/test_time_machine.py::test_time_time",
"tests/test_time_machine.py::test_time_time_no_tick",
"tests/test_time_machine.py::test_time_time_ns",
"tests/test_time_machine.py::test_time_time_ns_no_tick",
"tests/test_time_machine.py::test_nestable",
"tests/test_time_machine.py::test_unsupported_type",
"tests/test_time_machine.py::test_exceptions_dont_break_it",
"tests/test_time_machine.py::test_destination_datetime",
"tests/test_time_machine.py::test_destination_datetime_timezone",
"tests/test_time_machine.py::test_destination_datetime_naive",
"tests/test_time_machine.py::test_destination_date",
"tests/test_time_machine.py::test_destination_string",
"tests/test_time_machine.py::test_destination_callable_lambda_float",
"tests/test_time_machine.py::test_destination_callable_lambda_string",
"tests/test_time_machine.py::test_destination_generator",
"tests/test_time_machine.py::test_traveller_object",
"tests/test_time_machine.py::test_function_decorator",
"tests/test_time_machine.py::test_coroutine_decorator",
"tests/test_time_machine.py::test_class_decorator_fails_non_testcase",
"tests/test_time_machine.py::UnitTestMethodTests::test_method_decorator",
"tests/test_time_machine.py::UnitTestClassTests::test_class_decorator",
"tests/test_time_machine.py::UnitTestClassTests::test_stacked_method_decorator",
"tests/test_time_machine.py::UnitTestClassCustomSetUpClassTests::test_class_decorator",
"tests/test_time_machine.py::test_tz_offset_float",
"tests/test_time_machine.py::test_tz_offset_timedelta",
"tests/test_time_machine.py::test_tz_offset_unsupported_type",
"tests/test_time_machine.py::test_shift_with_timedelta",
"tests/test_time_machine.py::test_shift_integer_delta",
"tests/test_time_machine.py::test_shift_negative_delta",
"tests/test_time_machine.py::test_shift_wrong_delta",
"tests/test_time_machine.py::test_shift_when_tick",
"tests/test_time_machine.py::test_move_to_datetime",
"tests/test_time_machine.py::test_move_to_datetime_when_tick",
"tests/test_time_machine.py::test_move_to_past_datetime",
"tests/test_time_machine.py::test_move_to_past_datetime_when_tick",
"tests/test_time_machine.py::test_uuid1"
] | {
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2020-08-29 21:48:07+00:00 | isc | 899 |
|
adamgilman__furlong-5 | diff --git a/.gitignore b/.gitignore
index 894a44c..06f9e54 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,7 +5,11 @@ __pycache__/
# C extensions
*.so
-
+bin/*
+include/*
+lib/*
+man/*
+pip-selfcheck.json
# Distribution / packaging
.Python
build/
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..12231f7
--- /dev/null
+++ b/README.md
@@ -0,0 +1,34 @@
+# Hello Furlong
+Furlong aims to make units conversions easy and pythonic with a simple to understand and magical interface. Simply declare the unit and value and convert to anything you can think of. If we don't support a unit conversion, open a pull request to add it for everyone!
+
+## How it works
+Bring Furlong into your project with a simple name, f is recommended but, not required
+
+`from furlong import Furlong as f`
+
+Declare the unit and value of the base measurement
+
+`length_of_wall = f(inches=300)`
+
+Then convert to any unit you'd like!
+
+```
+length_of_wall.asCentimeters()
+30.48
+```
+
+Done!
+
+## Error handling
+
+* [TODO]
+
+## Units supported
+
+* ### Length
+ * inches
+ * centimeters
+
+* ### Volume
+
+* ### Weight
diff --git a/furlong/__init__.py b/furlong/__init__.py
new file mode 100644
index 0000000..10cb1e8
--- /dev/null
+++ b/furlong/__init__.py
@@ -0,0 +1,1 @@
+from .furlong import Furlong
\ No newline at end of file
diff --git a/furlong/furlong.py b/furlong/furlong.py
new file mode 100644
index 0000000..b23faeb
--- /dev/null
+++ b/furlong/furlong.py
@@ -0,0 +1,19 @@
+class Furlong(object):
+ conversion_table = {
+ 'inches' : ('centimeters', 2.54),
+ }
+ def __init__(self, **kwargs):
+ for key, value in kwargs.items():
+ self.unit = key
+ self.value = value
+
+ for base, conv in Furlong.conversion_table.items():
+ func_name, conv_value = Furlong.conversion_table[base]
+ func_name = "as" + func_name.title()
+ setattr(self, func_name, self._make_convertor( conv_value ))
+
+ def _make_convertor(self, conv_value):
+ def _convertor():
+ return self.value * conv_value
+
+ return _convertor
\ No newline at end of file
| adamgilman/furlong | 42e6917523242f349a9b825bf0a91f1643856645 | diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/tests_fobject.py b/tests/tests_fobject.py
new file mode 100644
index 0000000..4196907
--- /dev/null
+++ b/tests/tests_fobject.py
@@ -0,0 +1,17 @@
+import unittest
+from furlong import Furlong as f
+
+class TestSimpleConversionTests(unittest.TestCase):
+ def test_fixed_conversion_test(self):
+ self.assertEqual( f(inches=12).asCentimeters(), 30.48 )
+
+
+class TestDyanmicFunctionNaming(unittest.TestCase):
+ def test_dyanmic_function_exists(self):
+ test_conv = {
+ 'testbase' : ('testconv', -1111),
+ }
+ f.conversion_table.update( test_conv )
+ self.assertIsInstance( f(testbase=1), f )
+ self.assertTrue( callable( f(testbase=1).asTestconv ) )
+ self.assertEqual( f(testbase=2).asTestconv(), -2222 )
\ No newline at end of file
| Provide a simple interface for unit conversions
Declare a unit type
```
length = f(inches=12)
length.asCentimeters()
30.48
```
Simple and extensible | 0.0 | 42e6917523242f349a9b825bf0a91f1643856645 | [
"tests/tests_fobject.py::TestSimpleConversionTests::test_fixed_conversion_test",
"tests/tests_fobject.py::TestDyanmicFunctionNaming::test_dyanmic_function_exists"
] | [] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files"
],
"has_test_patch": true,
"is_lite": false
} | 2018-07-14 09:55:52+00:00 | mit | 900 |
|
adamjstewart__fiscalyear-5 | diff --git a/.travis.yml b/.travis.yml
index f829e95..416d131 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,13 +1,15 @@
language: python
+dist: xenial
+
python:
- - 2.6
- 2.7
- - 3.3
- 3.4
- 3.5
- 3.6
+ - 3.7
+ - 3.8-dev
-script: py.test --cov=fiscalyear
+script: pytest --cov=fiscalyear
after_success: codecov
diff --git a/LICENSE b/LICENSE
index 247c0b8..f2c31c2 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,6 @@
MIT License
-Copyright (c) 2017 Adam J. Stewart
+Copyright (c) 2017-2019 Adam J. Stewart
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/README.rst b/README.rst
index b7d0874..30cf856 100644
--- a/README.rst
+++ b/README.rst
@@ -7,6 +7,12 @@
.. image:: https://readthedocs.org/projects/fiscalyear/badge/?version=latest
:target: https://fiscalyear.readthedocs.io
+.. image:: https://badge.fury.io/py/fiscalyear.svg
+ :target: https://pypi.org/project/fiscalyear/
+
+.. image:: https://anaconda.org/conda-forge/fiscalyear/badges/version.svg
+ :target: https://anaconda.org/conda-forge/fiscalyear
+
Overview
========
diff --git a/docs/installation.rst b/docs/installation.rst
index 8355ecf..41a9b4c 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -17,6 +17,16 @@ The recommended way to install ``fiscalyear`` is with ``pip``.
The fiscalyear module will now appear with your base Python installation.
+Anaconda
+--------
+
+You can also install ``fiscalyear`` with the ``conda`` package manager.
+
+.. code-block:: console
+
+ $ conda install -c conda-forge fiscalyear
+
+
Spack
-----
diff --git a/fiscalyear.py b/fiscalyear.py
index c6f8ddc..f3a7640 100644
--- a/fiscalyear.py
+++ b/fiscalyear.py
@@ -24,10 +24,41 @@ START_MONTH = 10
START_DAY = 1
+def _validate_fiscal_calendar_params(start_year, start_month, start_day):
+ """Raise an Exception if the calendar parameters are invalid.
+
+ :param start_year: Relationship between the start of the fiscal year and
+ the calendar year. Possible values: ``'previous'`` or ``'same'``.
+ :type start_year: str
+ :param start_month: The first month of the fiscal year
+ :type start_month: int or str
+ :param start_day: The first day of the first month of the fiscal year
+ :type start_day: int or str
+ :raises TypeError: If ``start_year`` is not a ``str``.
+ :raises ValueError: If ``start_year`` is not ``'previous'`` or ``'same'``
+ :raises ValueError: If ``start_month`` or ``start_day`` is not an int or
+ int-like string
+ :raises ValueError: If ``start_month`` or ``start_day`` is out of range
+ """
+ if not isinstance(start_year, str):
+ raise TypeError("'start_year' must be a 'str', not: '%s'" % type(str))
+ if start_year not in ('previous', 'same'):
+ msg = "'start_year' must be either 'previous' or 'same', not: '%s'"
+ raise ValueError(msg % start_year)
+ _check_day(start_month, start_day)
+
+
+def setup_fiscal_calendar(start_year, start_month, start_day):
+ """Change the global calendar settings."""
+ _validate_fiscal_calendar_params(start_year, start_month, start_day)
+ global START_YEAR, START_MONTH, START_DAY
+ START_YEAR = start_year
+ START_MONTH = start_month
+ START_DAY = start_day
+
+
@contextlib.contextmanager
-def fiscal_calendar(start_year=None,
- start_month=None,
- start_day=None):
+def fiscal_calendar(start_year=None, start_month=None, start_day=None):
"""A context manager that lets you modify the start of the fiscal calendar
inside the scope of a with-statement.
@@ -38,43 +69,22 @@ def fiscal_calendar(start_year=None,
:type start_month: int or str
:param start_day: The first day of the first month of the fiscal year
:type start_day: int or str
- :raises AssertionError: If ``start_year`` is not ``'previous'`` or ``'same'``
- :raises TypeError: If ``start_month`` or ``start_day`` is not an int or int-like string
+ :raises ValueError: If ``start_year`` is not ``'previous'`` or ``'same'``
+ :raises TypeError: If ``start_month`` or ``start_day`` is not an int or
+ int-like string
:raises ValueError: If ``start_month`` or ``start_day`` is out of range
"""
- global START_YEAR
- global START_MONTH
- global START_DAY
-
- # Use default values if not changed
- if start_year is None:
- start_year = START_YEAR
- if start_month is None:
- start_month = START_MONTH
- if start_day is None:
- start_day = START_DAY
-
- assert isinstance(start_year, str)
- assert start_year == 'previous' or start_year == 'same'
- start_month = _check_month(start_month)
- start_day = _check_day(start_month, start_day)
-
- # Backup previous values
- old_start_year = START_YEAR
- old_start_month = START_MONTH
- old_start_day = START_DAY
+ # If arguments are omitted, use the currently active values.
+ start_year = START_YEAR if start_year is None else start_year
+ start_month = START_MONTH if start_month is None else start_month
+ start_day = START_DAY if start_day is None else start_day
# Temporarily change global variables
- START_YEAR = start_year
- START_MONTH = start_month
- START_DAY = start_day
-
+ previous_values = (START_YEAR, START_MONTH, START_DAY)
+ setup_fiscal_calendar(start_year, start_month, start_day)
yield
-
# Restore previous values
- START_YEAR = old_start_year
- START_MONTH = old_start_month
- START_DAY = old_start_day
+ setup_fiscal_calendar(*previous_values)
def _check_int(value):
@@ -225,11 +235,9 @@ class FiscalYear(object):
return self == item
elif isinstance(item, FiscalQuarter):
return self._fiscal_year == item.fiscal_year
- elif (isinstance(item, FiscalDateTime) or
- isinstance(item, datetime.datetime)):
+ elif isinstance(item, datetime.datetime):
return self.start <= item <= self.end
- elif (isinstance(item, FiscalDate) or
- isinstance(item, datetime.date)):
+ elif isinstance(item, datetime.date):
return self.start.date() <= item <= self.end.date()
else:
raise TypeError("can't compare '%s' to '%s'" % (
@@ -405,11 +413,9 @@ class FiscalQuarter(object):
"""
if isinstance(item, FiscalQuarter):
return self == item
- elif (isinstance(item, FiscalDateTime) or
- isinstance(item, datetime.datetime)):
+ elif isinstance(item, datetime.datetime):
return self.start <= item <= self.end
- elif (isinstance(item, FiscalDate) or
- isinstance(item, datetime.date)):
+ elif isinstance(item, datetime.date):
return self.start.date() <= item <= self.end.date()
else:
raise TypeError("can't compare '%s' to '%s'" % (
diff --git a/setup.cfg b/setup.cfg
index 4bca33d..d928416 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -4,3 +4,6 @@ test=pytest
[build_sphinx]
source-dir=docs
build-dir=docs/_build
+
+[metadata]
+license-file = LICENSE
diff --git a/setup.py b/setup.py
index 7939d81..05153b5 100755
--- a/setup.py
+++ b/setup.py
@@ -42,6 +42,8 @@ setuptools.setup(
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
# Type of package
'Topic :: Office/Business :: Financial :: Accounting',
| adamjstewart/fiscalyear | 77c5c0c82a62de36e77284e924f744bb1e770a31 | diff --git a/test_fiscalyear.py b/test_fiscalyear.py
index 690835f..5b2779f 100644
--- a/test_fiscalyear.py
+++ b/test_fiscalyear.py
@@ -9,8 +9,143 @@ import pytest
US_FEDERAL = ('previous', 10, 1)
UK_PERSONAL = ('same', 4, 6)
-# Default to U.S.
-fiscalyear.START_YEAR, fiscalyear.START_MONTH, fiscalyear.START_DAY = US_FEDERAL
+
+class TestCheckInt(object):
+ @pytest.mark.parametrize("value, exception", [
+ ('asdf', TypeError),
+ ("-999", TypeError),
+ # Technically speaking, _check_int should accept negative integers
+ # but this isn't a public function + datetime doesn't handle them
+ # anyway.
+ (float(), TypeError),
+ (object(), TypeError),
+ ])
+ def test_invalid_input(self, value, exception):
+ with pytest.raises(exception):
+ fiscalyear._check_int(value)
+
+ @pytest.mark.parametrize("value", [1, 2, 0, -1, -2, "1", "0", "999"])
+ def test_valid_input(self, value):
+ assert int(value) == fiscalyear._check_int(value)
+
+
+class TestCheckYear(object):
+ @pytest.mark.parametrize("value, exception", [
+ ('asdf', TypeError),
+ (float(), TypeError),
+ (object(), TypeError),
+ ("-1", TypeError),
+ (-1, ValueError),
+ (0, ValueError),
+ ("0", ValueError),
+ (10000, ValueError),
+ ("10000", ValueError),
+ ])
+ def test_invalid_input(self, value, exception):
+ with pytest.raises(exception):
+ fiscalyear._check_year(value)
+
+ @pytest.mark.parametrize("value", [1, 2, "1", "999"])
+ def test_valid_input(self, value):
+ assert int(value) == fiscalyear._check_year(value)
+
+
+class TestCheckDay(object):
+ @pytest.mark.parametrize("month, day, exception", [
+ (1, 'asdf', TypeError),
+ (1, "-999", TypeError),
+ (1, float(), TypeError),
+ (1, object(), TypeError),
+ (1, -1, ValueError),
+ (1, "-1", TypeError),
+ (1, 0, ValueError),
+ (1, "0", ValueError),
+ (1, 32, ValueError),
+ (1, 32, ValueError),
+ ])
+ def test_invalid_input(self, month, day, exception):
+ with pytest.raises(exception):
+ fiscalyear._check_day(month, day)
+
+ @pytest.mark.parametrize("month, day", [(1, 1), (1, 2), (1, "1"), (1, 31), (1, "31")])
+ def test_valid_input(self, month, day):
+ assert int(day) == fiscalyear._check_day(month, day)
+
+
+class TestCheckQuarter(object):
+ @pytest.mark.parametrize("value, exception", [
+ ('asdf', TypeError),
+ (float(), TypeError),
+ (object(), TypeError),
+ ("-1", TypeError),
+ (-1, ValueError),
+ (0, ValueError),
+ ("0", ValueError),
+ (5, ValueError),
+ ("5", ValueError),
+ ])
+ def test_invalid_input(self, value, exception):
+ with pytest.raises(exception):
+ fiscalyear._check_quarter(value)
+
+ @pytest.mark.parametrize("value", [1, 2, "1", "4"])
+ def test_valid_input(self, value):
+ assert int(value) == fiscalyear._check_quarter(value)
+
+
+class TestCalendarSettingsValidator(object):
+ @pytest.mark.parametrize("arguments, exception", [
+ (dict(start_year='asdf', start_month=12, start_day=1), ValueError),
+ (dict(start_year=float(1999), start_month=12, start_day=1), TypeError),
+ (dict(start_year=object(), start_month=12, start_day=1), TypeError),
+
+ (dict(start_year='same', start_month='asdf', start_day=1), TypeError),
+ (dict(start_year='same', start_month=float(12), start_day=1), TypeError),
+ (dict(start_year='same', start_month=object(), start_day=1), TypeError),
+ (dict(start_year='same', start_month=-1, start_day=1), ValueError),
+ (dict(start_year='same', start_month=0, start_day=1), ValueError),
+ (dict(start_year='same', start_month=13, start_day=1), ValueError),
+
+ (dict(start_year='same', start_month=12, start_day='asdf'), TypeError),
+ (dict(start_year='same', start_month=12, start_day=float(1)), TypeError),
+ (dict(start_year='same', start_month=12, start_day=object()), TypeError),
+ (dict(start_year='same', start_month=12, start_day=0), ValueError),
+ (dict(start_year='same', start_month=12, start_day=-1), ValueError),
+ (dict(start_year='same', start_month=12, start_day=32), ValueError),
+ ])
+ def test_invalid_input(self, arguments, exception):
+ with pytest.raises(exception):
+ fiscalyear._validate_fiscal_calendar_params(**arguments)
+
+ @pytest.mark.parametrize("arguments", [
+ dict(start_year='same', start_month=1, start_day=1),
+ dict(start_year='same', start_month=1, start_day=31),
+ dict(start_year='same', start_month=12, start_day=1),
+ dict(start_year='previous', start_month=1, start_day=1),
+ dict(start_year='previous', start_month=1, start_day=31),
+ dict(start_year='previous', start_month=12, start_day=1),
+ ])
+ def test_valid_input(self, arguments):
+ fiscalyear._validate_fiscal_calendar_params(**arguments)
+
+
+class TestSetupFiscalCalendar(object):
+
+ def test_setup_fiscal_calendar(self):
+ # Test defaults
+ day = fiscalyear.FiscalDate(2017, 12, 1)
+ assert day.fiscal_year == 2018
+ assert day.quarter == 1
+
+ # Change fiscal year settings
+ fiscalyear.setup_fiscal_calendar("same", 1, 1)
+ assert day.fiscal_year == 2017
+ assert day.quarter == 4
+
+ # Restore defaults and re-test
+ fiscalyear.setup_fiscal_calendar("previous", 10, 1)
+ assert day.fiscal_year == 2018
+ assert day.quarter == 1
class TestFiscalCalendar:
@@ -86,6 +221,10 @@ class TestFiscalCalendar:
assert fiscalyear.START_DAY == 1
def test_wrong_type(self):
+ with pytest.raises(TypeError):
+ with fiscalyear.fiscal_calendar(start_year=6.5):
+ pass
+
with pytest.raises(TypeError):
with fiscalyear.fiscal_calendar(start_month=6.5):
pass
| Add a function for easily changing the global "START_*" parameters
I think it would make sense to have a function that would make changing the global parameters easier. E.g.
``` python
def setup_fiscal_year(start_year, start_month, start_day):
global START_YEAR, START_MONTH, START_DAY
START_YEAR = start_year
START_MONTH = start_month
START_DAY = start_day
def test_setup_fiscal_year():
# test defaults
day = fiscalyear.FiscalDate(2017, 12, 1)
assert day.fiscal_year == 2018
assert day.quarter == 1
# change fiscal year settings
fiscalyear.setup_fiscal_year("same", 1, 1)
assert day.fiscal_year == 2017
assert day.quarter == 4
# restore defaults and re-test
fiscalyear.setup_fiscal_year("previous", 10, 1)
assert day.fiscal_year == 2018
assert day.quarter == 1
```
This could also make it possible to change the Fiscal Year settings even if you don't import the whole module. E.g.
``` python
In [4]: from fiscalyear import FiscalQuarter, setup_fiscal_year
In [5]: quarter = FiscalQuarter(2018, 1)
In [6]: quarter.start
Out[6]: FiscalDateTime(2017, 10, 1, 0, 0)
In [7]: setup_fiscal_year('same', 1, 1)
In [8]: quarter.start
Out[8]: FiscalDateTime(2018, 1, 1, 0, 0)
``` | 0.0 | 77c5c0c82a62de36e77284e924f744bb1e770a31 | [
"test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments0-ValueError]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments1-TypeError]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments2-TypeError]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments3-TypeError]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments4-TypeError]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments5-TypeError]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments6-ValueError]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments7-ValueError]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments8-ValueError]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments9-TypeError]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments10-TypeError]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments11-TypeError]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments12-ValueError]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments13-ValueError]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments14-ValueError]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_valid_input[arguments0]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_valid_input[arguments1]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_valid_input[arguments2]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_valid_input[arguments3]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_valid_input[arguments4]",
"test_fiscalyear.py::TestCalendarSettingsValidator::test_valid_input[arguments5]",
"test_fiscalyear.py::TestSetupFiscalCalendar::test_setup_fiscal_calendar",
"test_fiscalyear.py::TestFiscalCalendar::test_wrong_type"
] | [
"test_fiscalyear.py::TestCheckInt::test_invalid_input[asdf-TypeError]",
"test_fiscalyear.py::TestCheckInt::test_invalid_input[-999-TypeError]",
"test_fiscalyear.py::TestCheckInt::test_invalid_input[0.0-TypeError]",
"test_fiscalyear.py::TestCheckInt::test_invalid_input[value3-TypeError]",
"test_fiscalyear.py::TestCheckInt::test_valid_input[1_0]",
"test_fiscalyear.py::TestCheckInt::test_valid_input[2]",
"test_fiscalyear.py::TestCheckInt::test_valid_input[0_0]",
"test_fiscalyear.py::TestCheckInt::test_valid_input[-1]",
"test_fiscalyear.py::TestCheckInt::test_valid_input[-2]",
"test_fiscalyear.py::TestCheckInt::test_valid_input[1_1]",
"test_fiscalyear.py::TestCheckInt::test_valid_input[0_1]",
"test_fiscalyear.py::TestCheckInt::test_valid_input[999]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[asdf-TypeError]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[0.0-TypeError]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[value2-TypeError]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[-1-TypeError]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[-1-ValueError]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[0-ValueError0]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[0-ValueError1]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[10000-ValueError0]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[10000-ValueError1]",
"test_fiscalyear.py::TestCheckYear::test_valid_input[1_0]",
"test_fiscalyear.py::TestCheckYear::test_valid_input[2]",
"test_fiscalyear.py::TestCheckYear::test_valid_input[1_1]",
"test_fiscalyear.py::TestCheckYear::test_valid_input[999]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1-asdf-TypeError]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1--999-TypeError]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1-0.0-TypeError]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1-day3-TypeError]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1--1-ValueError]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1--1-TypeError]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1-0-ValueError0]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1-0-ValueError1]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1-32-ValueError0]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1-32-ValueError1]",
"test_fiscalyear.py::TestCheckDay::test_valid_input[1-1_0]",
"test_fiscalyear.py::TestCheckDay::test_valid_input[1-2]",
"test_fiscalyear.py::TestCheckDay::test_valid_input[1-1_1]",
"test_fiscalyear.py::TestCheckDay::test_valid_input[1-31_0]",
"test_fiscalyear.py::TestCheckDay::test_valid_input[1-31_1]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[asdf-TypeError]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[0.0-TypeError]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[value2-TypeError]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[-1-TypeError]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[-1-ValueError]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[0-ValueError0]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[0-ValueError1]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[5-ValueError0]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[5-ValueError1]",
"test_fiscalyear.py::TestCheckQuarter::test_valid_input[1_0]",
"test_fiscalyear.py::TestCheckQuarter::test_valid_input[2]",
"test_fiscalyear.py::TestCheckQuarter::test_valid_input[1_1]",
"test_fiscalyear.py::TestCheckQuarter::test_valid_input[4]",
"test_fiscalyear.py::TestFiscalCalendar::test_start_year",
"test_fiscalyear.py::TestFiscalCalendar::test_start_month",
"test_fiscalyear.py::TestFiscalCalendar::test_start_day",
"test_fiscalyear.py::TestFiscalCalendar::test_complex",
"test_fiscalyear.py::TestFiscalCalendar::test_nested",
"test_fiscalyear.py::TestFiscalCalendar::test_out_of_range",
"test_fiscalyear.py::TestFiscalYear::test_basic",
"test_fiscalyear.py::TestFiscalYear::test_repr",
"test_fiscalyear.py::TestFiscalYear::test_str",
"test_fiscalyear.py::TestFiscalYear::test_from_string",
"test_fiscalyear.py::TestFiscalYear::test_wrong_type",
"test_fiscalyear.py::TestFiscalYear::test_out_of_range",
"test_fiscalyear.py::TestFiscalYear::test_prev_fiscal_year",
"test_fiscalyear.py::TestFiscalYear::test_next_fiscal_year",
"test_fiscalyear.py::TestFiscalYear::test_start",
"test_fiscalyear.py::TestFiscalYear::test_end",
"test_fiscalyear.py::TestFiscalYear::test_q1",
"test_fiscalyear.py::TestFiscalYear::test_q2",
"test_fiscalyear.py::TestFiscalYear::test_q3",
"test_fiscalyear.py::TestFiscalYear::test_q4",
"test_fiscalyear.py::TestFiscalYear::test_contains",
"test_fiscalyear.py::TestFiscalYear::test_less_than",
"test_fiscalyear.py::TestFiscalYear::test_less_than_equals",
"test_fiscalyear.py::TestFiscalYear::test_equals",
"test_fiscalyear.py::TestFiscalYear::test_not_equals",
"test_fiscalyear.py::TestFiscalYear::test_greater_than",
"test_fiscalyear.py::TestFiscalYear::test_greater_than_equals",
"test_fiscalyear.py::TestFiscalQuarter::test_basic",
"test_fiscalyear.py::TestFiscalQuarter::test_repr",
"test_fiscalyear.py::TestFiscalQuarter::test_str",
"test_fiscalyear.py::TestFiscalQuarter::test_from_string",
"test_fiscalyear.py::TestFiscalQuarter::test_wrong_type",
"test_fiscalyear.py::TestFiscalQuarter::test_out_of_range",
"test_fiscalyear.py::TestFiscalQuarter::test_prev_quarter",
"test_fiscalyear.py::TestFiscalQuarter::test_next_quarter",
"test_fiscalyear.py::TestFiscalQuarter::test_start",
"test_fiscalyear.py::TestFiscalQuarter::test_end",
"test_fiscalyear.py::TestFiscalQuarter::test_bad_start_year",
"test_fiscalyear.py::TestFiscalQuarter::test_q1_start",
"test_fiscalyear.py::TestFiscalQuarter::test_q1_end",
"test_fiscalyear.py::TestFiscalQuarter::test_q2_start",
"test_fiscalyear.py::TestFiscalQuarter::test_q2_end",
"test_fiscalyear.py::TestFiscalQuarter::test_q3_start",
"test_fiscalyear.py::TestFiscalQuarter::test_q3_end",
"test_fiscalyear.py::TestFiscalQuarter::test_q4_start",
"test_fiscalyear.py::TestFiscalQuarter::test_q4_end",
"test_fiscalyear.py::TestFiscalQuarter::test_contains",
"test_fiscalyear.py::TestFiscalQuarter::test_less_than",
"test_fiscalyear.py::TestFiscalQuarter::test_less_than_equals",
"test_fiscalyear.py::TestFiscalQuarter::test_equals",
"test_fiscalyear.py::TestFiscalQuarter::test_not_equals",
"test_fiscalyear.py::TestFiscalQuarter::test_greater_than",
"test_fiscalyear.py::TestFiscalQuarter::test_greater_than_equals",
"test_fiscalyear.py::TestFiscalDate::test_basic",
"test_fiscalyear.py::TestFiscalDate::test_fiscal_year",
"test_fiscalyear.py::TestFiscalDate::test_prev_fiscal_year",
"test_fiscalyear.py::TestFiscalDate::test_next_fiscal_year",
"test_fiscalyear.py::TestFiscalDate::test_prev_quarter",
"test_fiscalyear.py::TestFiscalDate::test_next_quarter",
"test_fiscalyear.py::TestFiscalDateTime::test_basic",
"test_fiscalyear.py::TestFiscalDateTime::test_fiscal_year",
"test_fiscalyear.py::TestFiscalDateTime::test_prev_fiscal_year",
"test_fiscalyear.py::TestFiscalDateTime::test_next_fiscal_year",
"test_fiscalyear.py::TestFiscalDateTime::test_prev_quarter",
"test_fiscalyear.py::TestFiscalDateTime::test_next_quarter"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2018-02-08 16:57:16+00:00 | mit | 901 |
|
adamjstewart__fiscalyear-9 | diff --git a/fiscalyear.py b/fiscalyear.py
index 661c703..d4a569f 100644
--- a/fiscalyear.py
+++ b/fiscalyear.py
@@ -530,7 +530,12 @@ class FiscalQuarter(object):
if month < START_MONTH:
year += 1
- return FiscalDateTime(year, month, START_DAY, 0, 0, 0)
+ # Find the last day of the month
+ # If START_DAY is later, choose last day of month instead
+ max_day = calendar.monthrange(year, month)[1]
+ day = min(START_DAY, max_day)
+
+ return FiscalDateTime(year, month, day, 0, 0, 0)
@property
def end(self):
| adamjstewart/fiscalyear | a59cde7a881a85c5a65e523623e23668c2cb991c | diff --git a/test_fiscalyear.py b/test_fiscalyear.py
index 2395737..f80953c 100644
--- a/test_fiscalyear.py
+++ b/test_fiscalyear.py
@@ -275,6 +275,35 @@ class TestFiscalCalendar:
with fiscalyear.fiscal_calendar(start_month=2, start_day=29):
pass
+ def test_corner_cases(self):
+ # start_day does not exist in all months
+ with fiscalyear.fiscal_calendar(start_month=5, start_day=31):
+ # Non-leap year
+ assert fiscalyear.FiscalQuarter(2019, 1).start.day == 31
+ assert fiscalyear.FiscalQuarter(2019, 1).end.day == 30
+
+ assert fiscalyear.FiscalQuarter(2019, 2).start.day == 31
+ assert fiscalyear.FiscalQuarter(2019, 2).end.day == 29
+
+ assert fiscalyear.FiscalQuarter(2019, 3).start.day == 30
+ assert fiscalyear.FiscalQuarter(2019, 3).end.day == 27
+
+ assert fiscalyear.FiscalQuarter(2019, 4).start.day == 28
+ assert fiscalyear.FiscalQuarter(2019, 4).end.day == 30
+
+ # Leap year
+ assert fiscalyear.FiscalQuarter(2020, 1).start.day == 31
+ assert fiscalyear.FiscalQuarter(2020, 1).end.day == 30
+
+ assert fiscalyear.FiscalQuarter(2020, 2).start.day == 31
+ assert fiscalyear.FiscalQuarter(2020, 2).end.day == 29
+
+ assert fiscalyear.FiscalQuarter(2020, 3).start.day == 30
+ assert fiscalyear.FiscalQuarter(2020, 3).end.day == 28
+
+ assert fiscalyear.FiscalQuarter(2020, 4).start.day == 29
+ assert fiscalyear.FiscalQuarter(2020, 4).end.day == 30
+
class TestFiscalYear:
| FiscalCalendar starting on 31st day of the month throws ValueError
If the `FiscalCalendar` starts on the 31st of any month with 31 days, then `FiscalDate().quarter` throws a `ValueError`.
Example code:
```python
from fiscalyear import FiscalDate, FiscalYear, fiscal_calendar
with fiscal_calendar(start_month=12, start_day=31):
fiscal_date = FiscalDate(2019, 10, 22)
fiscal_date.quarter
```
Traceback:
```
Traceback (most recent call last):
File "<input>", line 3, in <module>
File "/site-packages/fiscalyear.py", line 594, in quarter
if self in q:
File "/site-packages/fiscalyear.py", line 413, in __contains__
return self.start.date() <= item <= self.end.date()
File "/site-packages/fiscalyear.py", line 493, in end
next_start = self.next_quarter.start
File "/site-packages/fiscalyear.py", line 485, in start
return FiscalDateTime(year, month, START_DAY, 0, 0, 0)
ValueError: day is out of range for month
```
The quarters generated are:
* December 31st
* March 31st
* June 31st ← Not a valid day, so it breaks.
| 0.0 | a59cde7a881a85c5a65e523623e23668c2cb991c | [
"test_fiscalyear.py::TestFiscalCalendar::test_corner_cases",
"test_fiscalyear.py::TestFiscalQuarter::test_current",
"test_fiscalyear.py::TestFiscalQuarter::test_start",
"test_fiscalyear.py::TestFiscalQuarter::test_end",
"test_fiscalyear.py::TestFiscalQuarter::test_contains",
"test_fiscalyear.py::TestFiscalDate::test_basic",
"test_fiscalyear.py::TestFiscalDate::test_prev_fiscal_year",
"test_fiscalyear.py::TestFiscalDate::test_next_fiscal_year",
"test_fiscalyear.py::TestFiscalDate::test_prev_quarter",
"test_fiscalyear.py::TestFiscalDate::test_next_quarter",
"test_fiscalyear.py::TestFiscalDateTime::test_basic",
"test_fiscalyear.py::TestFiscalDateTime::test_prev_fiscal_year",
"test_fiscalyear.py::TestFiscalDateTime::test_next_fiscal_year",
"test_fiscalyear.py::TestFiscalDateTime::test_prev_quarter",
"test_fiscalyear.py::TestFiscalDateTime::test_next_quarter"
] | [
"test_fiscalyear.py::TestCheckInt::test_invalid_input[asdf-TypeError]",
"test_fiscalyear.py::TestCheckInt::test_invalid_input[-999-TypeError]",
"test_fiscalyear.py::TestCheckInt::test_invalid_input[0.0-TypeError]",
"test_fiscalyear.py::TestCheckInt::test_invalid_input[value3-TypeError]",
"test_fiscalyear.py::TestCheckInt::test_valid_input[1_0]",
"test_fiscalyear.py::TestCheckInt::test_valid_input[2]",
"test_fiscalyear.py::TestCheckInt::test_valid_input[0_0]",
"test_fiscalyear.py::TestCheckInt::test_valid_input[-1]",
"test_fiscalyear.py::TestCheckInt::test_valid_input[-2]",
"test_fiscalyear.py::TestCheckInt::test_valid_input[1_1]",
"test_fiscalyear.py::TestCheckInt::test_valid_input[0_1]",
"test_fiscalyear.py::TestCheckInt::test_valid_input[999]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[asdf-TypeError]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[0.0-TypeError]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[value2-TypeError]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[-1-TypeError]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[-1-ValueError]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[0-ValueError0]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[0-ValueError1]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[10000-ValueError0]",
"test_fiscalyear.py::TestCheckYear::test_invalid_input[10000-ValueError1]",
"test_fiscalyear.py::TestCheckYear::test_valid_input[1_0]",
"test_fiscalyear.py::TestCheckYear::test_valid_input[2]",
"test_fiscalyear.py::TestCheckYear::test_valid_input[1_1]",
"test_fiscalyear.py::TestCheckYear::test_valid_input[999]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1-asdf-TypeError]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1--999-TypeError]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1-0.0-TypeError]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1-day3-TypeError]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1--1-ValueError]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1--1-TypeError]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1-0-ValueError0]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1-0-ValueError1]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1-32-ValueError0]",
"test_fiscalyear.py::TestCheckDay::test_invalid_input[1-32-ValueError1]",
"test_fiscalyear.py::TestCheckDay::test_valid_input[1-1_0]",
"test_fiscalyear.py::TestCheckDay::test_valid_input[1-2]",
"test_fiscalyear.py::TestCheckDay::test_valid_input[1-1_1]",
"test_fiscalyear.py::TestCheckDay::test_valid_input[1-31_0]",
"test_fiscalyear.py::TestCheckDay::test_valid_input[1-31_1]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[asdf-TypeError]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[0.0-TypeError]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[value2-TypeError]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[-1-TypeError]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[-1-ValueError]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[0-ValueError0]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[0-ValueError1]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[5-ValueError0]",
"test_fiscalyear.py::TestCheckQuarter::test_invalid_input[5-ValueError1]",
"test_fiscalyear.py::TestCheckQuarter::test_valid_input[1_0]",
"test_fiscalyear.py::TestCheckQuarter::test_valid_input[2]",
"test_fiscalyear.py::TestCheckQuarter::test_valid_input[1_1]",
"test_fiscalyear.py::TestCheckQuarter::test_valid_input[4]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_invalid_input[arguments0-ValueError]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_invalid_input[arguments1-TypeError]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_invalid_input[arguments2-TypeError]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_invalid_input[arguments3-TypeError]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_invalid_input[arguments4-TypeError]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_invalid_input[arguments5-TypeError]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_invalid_input[arguments6-ValueError]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_invalid_input[arguments7-ValueError]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_invalid_input[arguments8-ValueError]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_invalid_input[arguments9-TypeError]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_invalid_input[arguments10-TypeError]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_invalid_input[arguments11-TypeError]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_invalid_input[arguments12-ValueError]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_invalid_input[arguments13-ValueError]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_invalid_input[arguments14-ValueError]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_valid_input[arguments0]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_valid_input[arguments1]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_valid_input[arguments2]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_valid_input[arguments3]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_valid_input[arguments4]",
"test_fiscalyear.py::TestValidateFiscalCalendarParams::test_valid_input[arguments5]",
"test_fiscalyear.py::TestSetupFiscalCalendar::test_start_year",
"test_fiscalyear.py::TestSetupFiscalCalendar::test_start_month",
"test_fiscalyear.py::TestSetupFiscalCalendar::test_start_day",
"test_fiscalyear.py::TestSetupFiscalCalendar::test_complex",
"test_fiscalyear.py::TestFiscalCalendar::test_start_year",
"test_fiscalyear.py::TestFiscalCalendar::test_start_month",
"test_fiscalyear.py::TestFiscalCalendar::test_start_day",
"test_fiscalyear.py::TestFiscalCalendar::test_complex",
"test_fiscalyear.py::TestFiscalCalendar::test_nested",
"test_fiscalyear.py::TestFiscalCalendar::test_wrong_type",
"test_fiscalyear.py::TestFiscalCalendar::test_out_of_range",
"test_fiscalyear.py::TestFiscalYear::test_basic",
"test_fiscalyear.py::TestFiscalYear::test_current",
"test_fiscalyear.py::TestFiscalYear::test_repr",
"test_fiscalyear.py::TestFiscalYear::test_str",
"test_fiscalyear.py::TestFiscalYear::test_from_string",
"test_fiscalyear.py::TestFiscalYear::test_wrong_type",
"test_fiscalyear.py::TestFiscalYear::test_out_of_range",
"test_fiscalyear.py::TestFiscalYear::test_prev_fiscal_year",
"test_fiscalyear.py::TestFiscalYear::test_next_fiscal_year",
"test_fiscalyear.py::TestFiscalYear::test_start",
"test_fiscalyear.py::TestFiscalYear::test_end",
"test_fiscalyear.py::TestFiscalYear::test_q1",
"test_fiscalyear.py::TestFiscalYear::test_q2",
"test_fiscalyear.py::TestFiscalYear::test_q3",
"test_fiscalyear.py::TestFiscalYear::test_q4",
"test_fiscalyear.py::TestFiscalYear::test_contains",
"test_fiscalyear.py::TestFiscalYear::test_less_than",
"test_fiscalyear.py::TestFiscalYear::test_less_than_equals",
"test_fiscalyear.py::TestFiscalYear::test_equals",
"test_fiscalyear.py::TestFiscalYear::test_not_equals",
"test_fiscalyear.py::TestFiscalYear::test_greater_than",
"test_fiscalyear.py::TestFiscalYear::test_greater_than_equals",
"test_fiscalyear.py::TestFiscalQuarter::test_basic",
"test_fiscalyear.py::TestFiscalQuarter::test_repr",
"test_fiscalyear.py::TestFiscalQuarter::test_str",
"test_fiscalyear.py::TestFiscalQuarter::test_from_string",
"test_fiscalyear.py::TestFiscalQuarter::test_wrong_type",
"test_fiscalyear.py::TestFiscalQuarter::test_out_of_range",
"test_fiscalyear.py::TestFiscalQuarter::test_prev_quarter",
"test_fiscalyear.py::TestFiscalQuarter::test_next_quarter",
"test_fiscalyear.py::TestFiscalQuarter::test_bad_start_year",
"test_fiscalyear.py::TestFiscalQuarter::test_q1_start",
"test_fiscalyear.py::TestFiscalQuarter::test_q1_end",
"test_fiscalyear.py::TestFiscalQuarter::test_q2_start",
"test_fiscalyear.py::TestFiscalQuarter::test_q2_end",
"test_fiscalyear.py::TestFiscalQuarter::test_q3_start",
"test_fiscalyear.py::TestFiscalQuarter::test_q3_end",
"test_fiscalyear.py::TestFiscalQuarter::test_q4_start",
"test_fiscalyear.py::TestFiscalQuarter::test_q4_end",
"test_fiscalyear.py::TestFiscalQuarter::test_less_than",
"test_fiscalyear.py::TestFiscalQuarter::test_less_than_equals",
"test_fiscalyear.py::TestFiscalQuarter::test_equals",
"test_fiscalyear.py::TestFiscalQuarter::test_not_equals",
"test_fiscalyear.py::TestFiscalQuarter::test_greater_than",
"test_fiscalyear.py::TestFiscalQuarter::test_greater_than_equals",
"test_fiscalyear.py::TestFiscalDate::test_fiscal_year",
"test_fiscalyear.py::TestFiscalDateTime::test_fiscal_year"
] | {
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true
} | 2019-10-23 01:02:35+00:00 | mit | 902 |
|
adamlwgriffiths__Pyrr-88 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9215a38..624dd4e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,11 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).
+## [0.10.3] - 2019-04-19
+
+- Fix some of the plane.create_(xz,xy,yz) not inverting correctly.
+- Fix incorrect distance in point_height_above_plane.
+
## [0.10.2] - 2019-03-13
- Add MANIFEST.in and add /tests and /docs to source dist (#90)
diff --git a/pyrr/plane.py b/pyrr/plane.py
index 97e5b98..12d06e0 100755
--- a/pyrr/plane.py
+++ b/pyrr/plane.py
@@ -5,7 +5,8 @@ Planes are represented using a numpy.array of shape (4,).
The values represent the plane equation using the values A,B,C,D.
The first three values are the normal vector.
-The fourth value is the distance of the plane from the origin, down the normal.
+The fourth value is the distance of the origin from the plane, down the normal.
+A negative value indicates the origin is behind the plane, relative to the normal.
.. seealso: http://en.wikipedia.org/wiki/Plane_(geometry)
.. seealso: http://mathworld.wolfram.com/Plane.html
@@ -17,12 +18,15 @@ from .utils import all_parameters_as_numpy_arrays, parameters_as_numpy_arrays
def create(normal=None, distance=0.0, dtype=None):
- """Creates a plane that runs along the X,Y plane.
+ """Creates a plane oriented toward the normal, at distance below the origin.
+ If no normal is provided, the plane will by created at the origin with a normal
+ of [0, 0, 1].
- It crosses the origin with a normal of 0,0,1 (+Z).
+ Negative distance indicates the plane is facing toward the origin.
:rtype: numpy.array
- :return: A plane that runs along the X,Y plane.
+ :return: A plane with the specified normal at a distance from the origin of
+ -distance.
"""
if normal is None:
normal = [0.0, 0.0, 1.0]
@@ -75,37 +79,43 @@ def create_from_position(position, normal, dtype=None):
normal.
"""
dtype = dtype or position.dtype
- # -d = a * px + b * py + c * pz
+ # -d = a * x + b * y + c * z
n = vector.normalize(normal)
d = -np.sum(n * position)
- return create(n, d, dtype)
+ return create(n, -d, dtype)
def create_xy(invert=False, distance=0., dtype=None):
"""Create a plane on the XY plane, starting at the origin with +Z being
the up vector.
- The distance is the distance along the normal (-Z if inverted, otherwise +Z).
+ The plane is distance units along the Z axis. -Z if inverted.
"""
- invert = -1. if invert else 1.
- return np.array([0., 0., 1. * invert, distance])
+ pl = np.array([0., 0., 1., distance])
+ if invert:
+ pl = invert_normal(pl)
+ return pl
def create_xz(invert=False, distance=0., dtype=None):
"""Create a plane on the XZ plane, starting at the origin with +Y being
the up vector.
- The distance is the distance along the normal (-Y if inverted, otherwise +Y).
+ The plane is distance units along the Y axis. -Y if inverted.
"""
- invert = -1. if invert else 1.
- return np.array([0., 1. * invert, 0., distance])
+ pl = np.array([0., 1., 0., distance])
+ if invert:
+ pl = invert_normal(pl)
+ return pl
def create_yz(invert=False, distance=0., dtype=None):
"""Create a plane on the YZ plane, starting at the origin with +X being
the up vector.
- The distance is the distance along the normal (-X if inverted, otherwise +X).
+ The plane is distance units along the X axis. -X if inverted.
"""
- invert = -1. if invert else 1.
- return np.array([1. * invert, 0., 0., distance])
+ pl = np.array([1., 0., 0., distance])
+ if invert:
+ pl = invert_normal(pl)
+ return pl
def invert_normal(plane):
"""Flips the normal of the plane.
@@ -127,7 +137,7 @@ def position(plane):
:rtype: numpy.array
:return: A valid position that lies on the plane.
"""
- return plane[:3] * plane[3]
+ return normal(plane) * distance(plane)
def normal(plane):
"""Extracts the normal vector from a plane.
@@ -137,3 +147,10 @@ def normal(plane):
:return: The normal vector of the plane.
"""
return plane[:3].copy()
+
+def distance(plane):
+ """Distance the plane is from the origin along its the normal.
+
+ Negative value indicates the plane is facing the origin.
+ """
+ return plane[3]
diff --git a/pyrr/version.py b/pyrr/version.py
index cb94a70..a76b028 100755
--- a/pyrr/version.py
+++ b/pyrr/version.py
@@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
# the version of software
# this is used by the setup.py script
-__version__ = '0.10.2'
+__version__ = '0.10.3'
+
| adamlwgriffiths/Pyrr | 530820191954abcb9c7c54602fe13ba16912ba48 | diff --git a/pyrr/geometric_tests.py b/pyrr/geometric_tests.py
index 2f60c2e..601e8f6 100755
--- a/pyrr/geometric_tests.py
+++ b/pyrr/geometric_tests.py
@@ -5,7 +5,7 @@ various forms data types.
from __future__ import absolute_import, division, print_function
import math
import numpy as np
-from . import rectangle, vector, vector3
+from . import rectangle, vector, vector3, plane
from .utils import all_parameters_as_numpy_arrays, parameters_as_numpy_arrays, solve_quadratic_equation
"""
@@ -75,12 +75,12 @@ def point_intersect_rectangle(point, rect):
return None
return point
-@parameters_as_numpy_arrays('ray', 'plane')
-def ray_intersect_plane(ray, plane, front_only=False):
+@parameters_as_numpy_arrays('ray', 'pl')
+def ray_intersect_plane(ray, pl, front_only=False):
"""Calculates the intersection point of a ray and a plane.
:param numpy.array ray: The ray to test for intersection.
- :param numpy.array plane: The ray to test for intersection.
+ :param numpy.array pl: The plane to test for intersection.
:param boolean front_only: Specifies if the ray should
only hit the front of the plane.
Collisions from the rear of the plane will be
@@ -103,8 +103,8 @@ def ray_intersect_plane(ray, plane, front_only=False):
if rd.n == 0, the ray is parallel to the
plane.
"""
- p = plane[:3] * plane[3]
- n = plane[:3]
+ p = plane.position(pl)
+ n = plane.normal(pl)
rd_n = vector.dot(ray[1], n)
if rd_n == 0.0:
@@ -306,7 +306,7 @@ def ray_intersect_aabb(ray, aabb):
return point
@all_parameters_as_numpy_arrays
-def point_height_above_plane(point, plane):
+def point_height_above_plane(point, pl):
"""Calculates how high a point is above a plane.
:param numpy.array point: The point to check.
@@ -316,17 +316,17 @@ def point_height_above_plane(point, plane):
negative if the point is behind the plane.
"""
"""
- http://www.vitutor.com/geometry/distance/point_plane.html
- d(P) = (AX + BY + CZ + D) / sqrt(A^2 + B^2 + C^2)
-
- Normal is unit length, so it's length is 1.0.
- Therefore, we can ignore the division all together.
- Just perform Pn . [XYZ1]
+ Because we store normalised normal, we can simply
+ use: n . (p - p0)
+ where:
+ n is the plane normal
+ p is the plane position
+ p0 is the point
"""
- return np.dot(plane, [point[0], point[1], point[2], 1.0])
+ return vector.dot(plane.normal(pl), point - plane.position(pl))
@all_parameters_as_numpy_arrays
-def point_closest_point_on_plane(point, plane):
+def point_closest_point_on_plane(point, pl):
"""Calculates the point on a plane that is closest to a point.
:param numpy.array point: The point to check with.
@@ -343,8 +343,8 @@ def point_closest_point_on_plane(point, plane):
d is the value of normal dot position
n is the plane normal
"""
- n = plane[:3]
- p = n * plane[3]
+ n = plane.normal(pl)
+ p = n * plane.distance(pl)
d = np.dot(p, n)
qn = np.dot(point, n)
return point + (n * (d - qn))
diff --git a/tests/test_geometric_tests.py b/tests/test_geometric_tests.py
index c982b12..0d52e3e 100644
--- a/tests/test_geometric_tests.py
+++ b/tests/test_geometric_tests.py
@@ -204,6 +204,15 @@ class test_geometric_tests(unittest.TestCase):
self.assertEqual(result, None)
def test_point_height_above_plane(self):
+ pl = plane.create([0., 1., 0.], 1.)
+ p = np.array([0., 1., 0.])
+ result = gt.point_height_above_plane(p, pl)
+ self.assertEqual(result, 0.)
+
+ p = np.array([0., 0., 0.])
+ result = gt.point_height_above_plane(p, pl)
+ self.assertEqual(result, -1.)
+
v1 = np.array([ 0.0, 0.0, 1.0])
v2 = np.array([ 1.0, 0.0, 1.0])
v3 = np.array([ 0.0, 1.0, 1.0])
@@ -215,6 +224,11 @@ class test_geometric_tests(unittest.TestCase):
result = gt.point_height_above_plane(p, pl)
self.assertEqual(result, 19.)
+ pl = plane.create_xz(distance=5.)
+ p = np.array([0., 5., 0.])
+ h = gt.point_height_above_plane(p, pl)
+ self.assertEqual(h, 0.)
+
def test_point_closest_point_on_plane(self):
pl = np.array([ 0.0, 1.0, 0.0, 0.0])
p = np.array([ 5.0, 20.0, 5.0])
@@ -300,4 +314,3 @@ class test_geometric_tests(unittest.TestCase):
if __name__ == '__main__':
unittest.main()
-
diff --git a/tests/test_plane.py b/tests/test_plane.py
index 79fa8b8..0cb7ba6 100644
--- a/tests/test_plane.py
+++ b/tests/test_plane.py
@@ -3,7 +3,7 @@ try:
except:
import unittest
import numpy as np
-from pyrr import plane
+from pyrr import plane, vector
class test_plane(unittest.TestCase):
def test_import(self):
@@ -15,18 +15,26 @@ class test_plane(unittest.TestCase):
result = plane.create()
self.assertTrue(np.allclose(result, [0,0,1,0]))
- def test_create_2(self):
result = plane.create([1.,0.,0.], 5.)
self.assertTrue(np.allclose(result, [1.,0.,0.,5.]))
def test_create_from_points(self):
- vecs = np.array([
- [ 1.0, 0.0, 0.0 ],
- [ 0.0, 1.0, 0.0 ],
- [ 1.0, 1.0, 0.0 ]
- ])
- result = plane.create_from_points(*vecs)
+ result = plane.create_from_points(
+ [1., 0., 0.],
+ [0., 1., 0.],
+ [1., 1., 0.],
+ )
self.assertTrue(np.allclose(result, [0.,0.,1.,0.]))
+ self.assertTrue(np.allclose(plane.position(result), [0., 0., 0.]))
+
+ result = plane.create_from_points(
+ [1., 1., 0.],
+ [1., 1., 1.],
+ [0., 1., 1.],
+ )
+ expected = plane.create([0.,1.,0.], 1.)
+ self.assertTrue(np.allclose(result, expected))
+ self.assertTrue(np.allclose(plane.position(result), [0., 1., 0.]))
def test_create_from_position(self):
position = np.array([1.0, 0.0, 0.0])
@@ -34,29 +42,42 @@ class test_plane(unittest.TestCase):
result = plane.create_from_position(position, normal)
self.assertTrue(np.allclose(result, [0., 1., 0., 0.]))
+ p0 = position + [1., 0., 0.]
+ p = position
+ n = vector.normalise(normal)
+ coplanar = p - p0
+ self.assertEqual(np.sum(n * coplanar), 0.)
+
def test_create_xy(self):
result = plane.create_xy()
self.assertTrue(np.allclose(result, [0., 0., 1., 0.]))
- def test_create_xy_invert_distance(self):
+ result = plane.create_xy(distance=2.)
+ self.assertTrue(np.allclose(result, [0., 0., 1., 2.]))
+
result = plane.create_xy(invert=True, distance=2.)
- self.assertTrue(np.allclose(result, [0., 0., -1., 2.]))
+ self.assertTrue(np.allclose(result, [0., 0., -1., -2.]))
def test_create_xz(self):
result = plane.create_xz()
self.assertTrue(np.allclose(result, [0., 1., 0., 0.]))
- def test_create_xz_invert_distance(self):
+ result = plane.create_xz(distance=2.)
+ self.assertTrue(np.allclose(result, [0., 1., 0., 2.]))
+
result = plane.create_xz(invert=True, distance=2.)
- self.assertTrue(np.allclose(result, [0., -1., 0., 2.]))
+ self.assertTrue(np.allclose(result, [0., -1., 0., -2.]))
def test_create_yz(self):
result = plane.create_yz()
self.assertTrue(np.allclose(result, [1., 0., 0., 0.]))
- def test_create_yz_invert_distance(self):
+ result = plane.create_yz(distance=2.)
+ self.assertTrue(np.allclose(result, [1., 0., 0., 2.]))
+
result = plane.create_yz(invert=True, distance=2.)
- self.assertTrue(np.allclose(result, [-1., 0., 0., 2.]))
+ print(result)
+ self.assertTrue(np.allclose(result, [-1., 0., 0., -2.]))
def test_invert_normal(self):
p = np.array([1.0, 0.0, 0.0, 1.0])
@@ -64,9 +85,12 @@ class test_plane(unittest.TestCase):
self.assertTrue(np.allclose(result, [-1.0, 0.0, 0.0, -1.0]))
def test_position(self):
- p = plane.create_xz(distance=5.)
+ p = plane.create_xz(distance=-5.)
result = plane.position(p)
- self.assertTrue(np.allclose(result, [0.,5.,0.]))
+ self.assertTrue(np.allclose(result, [0.,-5.,0.]))
+
+ p = plane.create_from_position(position=[0., 0., 1.], normal=[0., 0., 1.])
+ self.assertTrue(np.allclose(plane.position(p), [0., 0., 1.]))
def test_normal(self):
p = plane.create_xz(distance=5.)
@@ -75,4 +99,3 @@ class test_plane(unittest.TestCase):
if __name__ == '__main__':
unittest.main()
-
| Inconsitent Position / Distance to plane
Thank you for this useful Library.
There is some unexpected behavior for planes concerning the sign of the distance:
Take for example the following Plane:
```
plane = pyrr.plane.create_from_position(position=[0, 0, 1], normal=[0, 0, 1])
```
which returns the expected plane: `array([ 0, 0, 1, -1])`. However `pyrr.plane.position(plane)` returns `array([ 0, 0, -1])`, instead of `[0, 0, 1]`
A related issue can be observed when calculating the distance of a point calculated to lie on the plane, which should therefore be 0:
```pyrr.geometric_tests.point_height_above_plane(pyrr.geometric_tests.point_closest_point_on_plane([0, 0, 1], plane), plane)```
but returns -2
| 0.0 | 530820191954abcb9c7c54602fe13ba16912ba48 | [
"tests/test_geometric_tests.py::test_geometric_tests::test_point_closest_point_on_plane",
"tests/test_geometric_tests.py::test_geometric_tests::test_point_height_above_plane",
"tests/test_plane.py::test_plane::test_create_from_points",
"tests/test_plane.py::test_plane::test_create_xy",
"tests/test_plane.py::test_plane::test_create_xz",
"tests/test_plane.py::test_plane::test_create_yz",
"tests/test_plane.py::test_plane::test_position"
] | [
"tests/test_geometric_tests.py::test_geometric_tests::test_import",
"tests/test_geometric_tests.py::test_geometric_tests::test_point_closest_point_on_line",
"tests/test_geometric_tests.py::test_geometric_tests::test_point_closest_point_on_line_2",
"tests/test_geometric_tests.py::test_geometric_tests::test_point_closest_point_on_line_segment",
"tests/test_geometric_tests.py::test_geometric_tests::test_point_closest_point_on_ray",
"tests/test_geometric_tests.py::test_geometric_tests::test_point_intersect_line",
"tests/test_geometric_tests.py::test_geometric_tests::test_point_intersect_line_invalid",
"tests/test_geometric_tests.py::test_geometric_tests::test_point_intersect_line_segment",
"tests/test_geometric_tests.py::test_geometric_tests::test_point_intersect_line_segment_invalid",
"tests/test_geometric_tests.py::test_geometric_tests::test_point_intersect_rectangle_invalid_intersections_1",
"tests/test_geometric_tests.py::test_geometric_tests::test_point_intersect_rectangle_invalid_intersections_2",
"tests/test_geometric_tests.py::test_geometric_tests::test_point_intersect_rectangle_invalid_intersections_3",
"tests/test_geometric_tests.py::test_geometric_tests::test_point_intersect_rectangle_valid_intersections_1",
"tests/test_geometric_tests.py::test_geometric_tests::test_point_intersect_rectangle_valid_intersections_2",
"tests/test_geometric_tests.py::test_geometric_tests::test_point_intersect_rectangle_valid_intersections_3",
"tests/test_geometric_tests.py::test_geometric_tests::test_ray_coincident_ray",
"tests/test_geometric_tests.py::test_geometric_tests::test_ray_coincident_ray_2",
"tests/test_geometric_tests.py::test_geometric_tests::test_ray_coincident_ray_3",
"tests/test_geometric_tests.py::test_geometric_tests::test_ray_intersect_aabb_invalid_1",
"tests/test_geometric_tests.py::test_geometric_tests::test_ray_intersect_aabb_valid_1",
"tests/test_geometric_tests.py::test_geometric_tests::test_ray_intersect_aabb_valid_2",
"tests/test_geometric_tests.py::test_geometric_tests::test_ray_intersect_aabb_valid_3",
"tests/test_geometric_tests.py::test_geometric_tests::test_ray_intersect_plane",
"tests/test_geometric_tests.py::test_geometric_tests::test_ray_intersect_plane_front_only",
"tests/test_geometric_tests.py::test_geometric_tests::test_ray_intersect_plane_invalid",
"tests/test_geometric_tests.py::test_geometric_tests::test_ray_intersect_sphere_no_solution_1",
"tests/test_geometric_tests.py::test_geometric_tests::test_ray_intersect_sphere_no_solution_2",
"tests/test_geometric_tests.py::test_geometric_tests::test_ray_parallel_ray",
"tests/test_geometric_tests.py::test_geometric_tests::test_ray_parallel_ray_2",
"tests/test_geometric_tests.py::test_geometric_tests::test_ray_parallel_ray_3",
"tests/test_geometric_tests.py::test_geometric_tests::test_sphere_does_intersect_sphere_1",
"tests/test_geometric_tests.py::test_geometric_tests::test_sphere_does_intersect_sphere_2",
"tests/test_geometric_tests.py::test_geometric_tests::test_sphere_does_intersect_sphere_3",
"tests/test_geometric_tests.py::test_geometric_tests::test_sphere_does_intersect_sphere_4",
"tests/test_geometric_tests.py::test_geometric_tests::test_sphere_penetration_sphere_1",
"tests/test_geometric_tests.py::test_geometric_tests::test_sphere_penetration_sphere_2",
"tests/test_geometric_tests.py::test_geometric_tests::test_sphere_penetration_sphere_3",
"tests/test_geometric_tests.py::test_geometric_tests::test_sphere_penetration_sphere_4",
"tests/test_geometric_tests.py::test_geometric_tests::test_vector_parallel_vector",
"tests/test_geometric_tests.py::test_geometric_tests::test_vector_parallel_vector_invalid",
"tests/test_plane.py::test_plane::test_create",
"tests/test_plane.py::test_plane::test_create_from_position",
"tests/test_plane.py::test_plane::test_import",
"tests/test_plane.py::test_plane::test_invert_normal",
"tests/test_plane.py::test_plane::test_normal"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2019-02-06 12:26:02+00:00 | bsd-3-clause | 903 |
|
adbar__simplemma-114 | diff --git a/simplemma/language_detector.py b/simplemma/language_detector.py
index 52369a6..f6fab14 100644
--- a/simplemma/language_detector.py
+++ b/simplemma/language_detector.py
@@ -198,14 +198,18 @@ class LanguageDetector:
Returns:
float: The proportion of text in the target language(s).
"""
- return sum(
- percentage
- for (
- lang_code,
- percentage,
- ) in self.proportion_in_each_language(text).items()
- if lang_code != "unk"
- )
+ tokens = self._token_sampler.sample_text(text)
+ if len(tokens) == 0:
+ return 0
+
+ in_target = 0
+ for token in tokens:
+ for lang_code in self._lang:
+ candidate = self._lemmatization_strategy.get_lemma(token, lang_code)
+ if candidate is not None:
+ in_target += 1
+ break
+ return in_target / len(tokens)
def main_language(
self,
| adbar/simplemma | fa1d96469ca601b5249b8d5cbb42c1474cfd83bb | diff --git a/tests/test_language_detector.py b/tests/test_language_detector.py
index 4affdd1..ad68f0c 100644
--- a/tests/test_language_detector.py
+++ b/tests/test_language_detector.py
@@ -108,6 +108,15 @@ def test_in_target_language() -> None:
== 1.0
)
+ langs = ("en", "de")
+ text = "It was a true gift"
+ assert (
+ LanguageDetector(lang=langs).proportion_in_target_languages(text)
+ == in_target_language(text, lang=langs)
+ == 1.0
+ )
+ in_target_language("It was a true gift", lang=("en", "de"))
+
def test_main_language():
text = "Dieser Satz ist auf Deutsch."
| in_target_language can count words twice and return ratios above 1.0
I noticed that in current `main` version, the `in_target_language` function can return ratios above 1.0 when given more than one language and the words in the input are matching several languages.
Example:
>>> in_target_language('It was a true gift', lang='en')
1.0
>>> in_target_language('It was a true gift', lang='de')
0.6666666666666666
>>> in_target_language('It was a true gift', lang=('en','de'))
1.6666666666666665
Simplemma 0.9.1 doesn't have this problem:
>>> in_target_language('It was a true gift', lang='en')
1.0
>>> in_target_language('It was a true gift', lang='de')
0.6666666666666666
>>> in_target_language('It was a true gift', lang=('en','de'))
1.0
It's not just a question of capping the scores at 1.0. The problem is that a single word can count more than once if it happens to match multiple languages. Below is an example that demonstrates the problem. I added nonsense words that don't match any language, but their presence is compensated by words that are counted twice.
Current `main` version:
>>> in_target_language('It was a true gift xxx yyy', lang=('en','de'))
1.0
Simplemma 0.9.1:
>>> in_target_language('It was a true gift xxx yyy', lang=('en','de'))
0.6 | 0.0 | fa1d96469ca601b5249b8d5cbb42c1474cfd83bb | [
"tests/test_language_detector.py::test_in_target_language"
] | [
"tests/test_language_detector.py::test_proportion_in_each_language",
"tests/test_language_detector.py::test_main_language"
] | {
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true
} | 2023-09-12 17:03:16+00:00 | mit | 904 |
|
adbar__simplemma-85 | diff --git a/simplemma/__init__.py b/simplemma/__init__.py
index 74510e6..6a553e8 100644
--- a/simplemma/__init__.py
+++ b/simplemma/__init__.py
@@ -7,6 +7,7 @@ __license__ = "MIT"
__version__ = "0.9.1"
+from .dictionary_factory import DictionaryFactory, DefaultDictionaryFactory
from .language_detector import LanguageDetector, in_target_language, langdetect
from .lemmatizer import Lemmatizer, lemmatize, lemma_iterator, text_lemmatizer, is_known
from .tokenizer import Tokenizer, RegexTokenizer, simple_tokenizer
@@ -16,5 +17,3 @@ from .token_sampler import (
MostCommonTokenSampler,
RelaxedMostCommonTokenSampler,
)
-from .dictionary_factory import DictionaryFactory, DefaultDictionaryFactory
-from .dictionary_pickler import *
diff --git a/simplemma/dictionary_pickler.py b/training/dictionary_pickler.py
similarity index 90%
rename from simplemma/dictionary_pickler.py
rename to training/dictionary_pickler.py
index 5f400ee..f0b92e0 100644
--- a/simplemma/dictionary_pickler.py
+++ b/training/dictionary_pickler.py
@@ -7,15 +7,11 @@ from operator import itemgetter
from pathlib import Path
from typing import Dict, List, Optional
-try:
- from .dictionary_factory import SUPPORTED_LANGUAGES
- from .strategies.defaultrules import DEFAULT_RULES
- from .utils import levenshtein_dist
-# local error, also ModuleNotFoundError for Python >= 3.6
-except ImportError: # pragma: no cover
- from dictionary_factory import SUPPORTED_LANGUAGES # type: ignore
- from strategies.defaultrules import DEFAULT_RULES # type: ignore
- from utils import levenshtein_dist # type: ignore
+import simplemma
+from simplemma.dictionary_factory import SUPPORTED_LANGUAGES
+from simplemma.strategies.defaultrules import DEFAULT_RULES
+from simplemma.utils import levenshtein_dist
+
LOGGER = logging.getLogger(__name__)
@@ -138,7 +134,7 @@ def _pickle_dict(
mydict = dict(sorted(mydict.items(), key=itemgetter(1)))
if filepath is None:
filename = f"data/{langcode}.plzma"
- filepath = str(Path(__file__).parent / filename)
+ filepath = str(Path(simplemma.__file__).parent / filename)
with lzma.open(filepath, "wb") as filehandle: # , filters=my_filters, preset=9
pickle.dump(mydict, filehandle, protocol=4)
LOGGER.debug("%s %s", langcode, len(mydict))
diff --git a/eval/eval-requirements.txt b/training/eval/eval-requirements.txt
similarity index 100%
rename from eval/eval-requirements.txt
rename to training/eval/eval-requirements.txt
diff --git a/eval/udscore.py b/training/eval/udscore.py
similarity index 84%
rename from eval/udscore.py
rename to training/eval/udscore.py
index 34e3a9c..e6bd507 100644
--- a/eval/udscore.py
+++ b/training/eval/udscore.py
@@ -5,7 +5,8 @@ from collections import Counter
from os import makedirs, path
from conllu import parse_incr # type: ignore
-from simplemma import lemmatize
+from simplemma import Lemmatizer, DefaultDictionaryFactory
+from simplemma.strategies.default import DefaultStrategy
if not path.exists("csv"):
makedirs("csv")
@@ -66,6 +67,16 @@ for filedata in data_files:
with open(filename, "r", encoding="utf-8") as myfile:
data_file = myfile.read()
start = time.time()
+ dictionary_factory = DefaultDictionaryFactory()
+ strategies = DefaultStrategy(greedy=False)
+ lemmatizer = Lemmatizer(
+ dictionary_factory=dictionary_factory,
+ lemmatization_strategy=DefaultStrategy(greedy=False),
+ )
+ greedy_lemmatizer = Lemmatizer(
+ dictionary_factory=dictionary_factory,
+ lemmatization_strategy=DefaultStrategy(greedy=True),
+ )
print("==", filedata, "==")
for tokenlist in parse_incr(data_file):
for token in tokenlist:
@@ -75,13 +86,10 @@ for filedata in data_files:
continue
initial = bool(token["id"] == 1)
+ token_form = token["form"].lower() if initial else token["form"]
- greedy_candidate = lemmatize(
- token["form"], lang=language, greedy=True, initial=initial
- )
- candidate = lemmatize(
- token["form"], lang=language, greedy=False, initial=initial
- )
+ candidate = lemmatizer.lemmatize(token_form, lang=language)
+ greedy_candidate = greedy_lemmatizer.lemmatize(token_form, lang=language)
if token["upos"] in ("ADJ", "NOUN"):
focus_total += 1
| adbar/simplemma | b575330e558851c728af74bb62208af445d6a283 | diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index 4ebd08b..c73b552 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -86,11 +86,11 @@ jobs:
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Code format with black
- run: black --check --diff simplemma tests
+ run: black --check --diff simplemma training tests
- name: Type checking with mypy
if: matrix.python-version != '3.6'
- run: mypy -p simplemma -p tests
+ run: mypy -p simplemma -p training -p tests
- name: Test with pytest
run: pytest --cov=./ --cov-report=xml
diff --git a/tests/test_dictionary_pickler.py b/tests/test_dictionary_pickler.py
index 1dad1fa..95dd1bb 100644
--- a/tests/test_dictionary_pickler.py
+++ b/tests/test_dictionary_pickler.py
@@ -1,7 +1,7 @@
import os
import tempfile
-from simplemma import dictionary_pickler
+from training import dictionary_pickler
TEST_DIR = os.path.abspath(os.path.dirname(__file__))
| Separate scripts used to create the dictionaries from actual src & tests
`tests/udscore.py` and `src/dictionary_pickler.py` seem to be scripts used to create the dictionaries and validate their quality.
I suggest that they are moved to a separate folder `training` or whatever you want | 0.0 | b575330e558851c728af74bb62208af445d6a283 | [
"tests/test_dictionary_pickler.py::test_logic"
] | [] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2023-05-10 17:08:31+00:00 | mit | 905 |
|
aequitas__python-rflink-39 | diff --git a/.travis.yml b/.travis.yml
index 30fa75f..e52730d 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -7,11 +7,13 @@ python:
- 3.8
- 2.7
- pypy3
-install: pip install tox-travis python-coveralls codeclimate-test-reporter
+# python-coveralls 5.0 no longer supports python34
+install: pip install tox-travis codeclimate-test-reporter
script: tox
matrix:
allow_failures:
- python: 2.7
+ - python: 3.4
- python: pypy3
include:
- python: 3.7
diff --git a/rflink/parser.py b/rflink/parser.py
index 05e7ea0..8a74a52 100644
--- a/rflink/parser.py
+++ b/rflink/parser.py
@@ -305,7 +305,7 @@ def decode_packet(packet: str) -> PacketType:
# convert key=value pairs where needed
for attr in filter(None, attrs.strip(DELIM).split(DELIM)):
- key, value = attr.lower().split("=")
+ key, value = attr.lower().split("=", 1)
if key in VALUE_TRANSLATION:
value = VALUE_TRANSLATION[key](value)
name = PACKET_FIELDS.get(key, key)
diff --git a/rflink/protocol.py b/rflink/protocol.py
index a3c54da..9922910 100644
--- a/rflink/protocol.py
+++ b/rflink/protocol.py
@@ -151,7 +151,7 @@ class PacketHandling(ProtocolBase):
try:
packet = decode_packet(raw_packet)
except BaseException:
- log.exception("failed to parse packet: %s", packet)
+ log.exception("failed to parse packet data: %s", raw_packet)
log.debug("decoded packet: %s", packet)
| aequitas/python-rflink | 1b2ea3d82e4138fb5b35c763df94aa0a63d24acf | diff --git a/tests/test_parse.py b/tests/test_parse.py
index 3f55831..274e0ae 100644
--- a/tests/test_parse.py
+++ b/tests/test_parse.py
@@ -80,6 +80,10 @@ PROTOCOL_SAMPLES = os.path.join(os.path.dirname(__file__), "protocol_samples.txt
"20;00;Internal Pullup on RF-in disabled;",
{"message": "Internal Pullup on RF-in disabled"},
],
+ [
+ "20;9A;FA500;ID=0000db9e;SWITCH=01;CMD=SET_LEVEL=2;",
+ {"command": "set_level=2"},
+ ],
],
)
def test_packet_parsing(packet, expect):
| decode_packet: too many values to unpack (expected 2)
I use rflink integration in my HA setup and often get this in HA log:
> 2020-01-22 07:49:56 ERROR (MainThread) [rflink.protocol] failed to parse packet: None
> Traceback (most recent call last):
> File "/usr/local/lib/python3.7/site-packages/rflink/protocol.py", line 152, in handle_raw_packet
> packet = decode_packet(raw_packet)
> File "/usr/local/lib/python3.7/site-packages/rflink/parser.py", line 308, in decode_packet
> key, value = attr.lower().split("=")
> ValueError: too many values to unpack (expected 2)
Obvoiusly there is no impact on the functionality, but the messages flood the log.
I hope it's possible to rewrite the code so it account for situations like this. | 0.0 | 1b2ea3d82e4138fb5b35c763df94aa0a63d24acf | [
"tests/test_parse.py::test_packet_parsing[20;9A;FA500;ID=0000db9e;SWITCH=01;CMD=SET_LEVEL=2;-expect15]"
] | [
"tests/test_parse.py::test_packet_parsing[20;2D;UPM/Esic;ID=0001;TEMP=00cf;HUM=16;BAT=OK;-expect0]",
"tests/test_parse.py::test_packet_parsing[20;36;Alecto",
"tests/test_parse.py::test_packet_parsing[20;08;UPM/Esic;ID=1003;RAIN=0010;BAT=OK;-expect2]",
"tests/test_parse.py::test_packet_parsing[20;46;Kaku;ID=44;SWITCH=4;CMD=OFF;-expect3]",
"tests/test_parse.py::test_packet_parsing[20;E0;NewKaku;ID=cac142;SWITCH=1;CMD=ALLOFF;-expect4]",
"tests/test_parse.py::test_packet_parsing[20;00;Nodo",
"tests/test_parse.py::test_packet_parsing[20;01;VER=1.1;REV=45;BUILD=04;-expect6]",
"tests/test_parse.py::test_packet_parsing[20;01;PONG;-expect7]",
"tests/test_parse.py::test_packet_parsing[20;02;STATUS;setRF433=ON;setNodoNRF=OFF;setMilight=OFF;setLivingColors=OFF;setAnsluta=OFF;setGPIO=OFF;setBLE=OFF;setMysensors=OFF;-expect8]",
"tests/test_parse.py::test_packet_parsing[20;01;CMD",
"tests/test_parse.py::test_packet_parsing[20;02;OK;-expect10]",
"tests/test_parse.py::test_packet_parsing[20;01;mock;ID=0;BFORECAST=1;HSTATUS=0-expect11]",
"tests/test_parse.py::test_packet_parsing[20;05;RTS;ID=147907;SWITCH=01;CMD=UP;-expect13]",
"tests/test_parse.py::test_packet_parsing[20;00;Internal",
"tests/test_parse.py::test_descriptions",
"tests/test_parse.py::test_units",
"tests/test_parse.py::test_packet_valiation[20;00;Nodo",
"tests/test_parse.py::test_packet_valiation[20;03;Cresta;ID=8301;WINDIR=0005;WINSP=0000;WINGS=0000;WINTMP=00c3;WINCHL=00c3;BAT=LOW;]",
"tests/test_parse.py::test_packet_valiation[20;04;Cresta;ID=3001;TEMP=00b4;HUM=50;BAT=OK;]",
"tests/test_parse.py::test_packet_valiation[20;05;Cresta;ID=2801;TEMP=00af;HUM=53;BAT=OK;]",
"tests/test_parse.py::test_packet_valiation[20;06;NewKaku;ID=008440e6;SWITCH=a;CMD=OFF;]",
"tests/test_parse.py::test_packet_valiation[20;07;AB400D;ID=41;SWITCH=1;CMD=ON;]",
"tests/test_parse.py::test_packet_valiation[20;08;SilvercrestDB;ID=04d6bb97;SWITCH=1;CMD=ON;CHIME=01;]",
"tests/test_parse.py::test_packet_valiation[20;2D;UPM/Esic;ID=0001;TEMP=00cf;HUM=16;BAT=OK;]",
"tests/test_parse.py::test_packet_valiation[20;6A;UPM/Esic;ID=1002;WINSP=0041;WINDIR=5A;BAT=OK;]",
"tests/test_parse.py::test_packet_valiation[20;08;UPM/Esic;ID=1003;RAIN=0010;BAT=OK;]",
"tests/test_parse.py::test_packet_valiation[20;31;Mebus;ID=c201;TEMP=00cf;]",
"tests/test_parse.py::test_packet_valiation[20;32;Auriol;ID=008f;TEMP=00d3;BAT=OK;]",
"tests/test_parse.py::test_packet_valiation[20;A2;Auriol",
"tests/test_parse.py::test_packet_valiation[20;33;Cresta;ID=3001;TEMP=00b0;HUM=50;]",
"tests/test_parse.py::test_packet_valiation[20;0C;Cresta;ID=8001;RAIN=001c;]",
"tests/test_parse.py::test_packet_valiation[20;47;Cresta;ID=8001;WINDIR=0002;WINSP=0060;WINGS=0088;WINCHL=b0;]",
"tests/test_parse.py::test_packet_valiation[20;47;Cresta;ID=8001;TEMP=00b0;UV=00d0;]",
"tests/test_parse.py::test_packet_valiation[20;36;Alecto",
"tests/test_parse.py::test_packet_valiation[20;07;Mebus;ID=ea01;TEMP=0017;]",
"tests/test_parse.py::test_packet_valiation[20;3D;Alecto",
"tests/test_parse.py::test_packet_valiation[20;26;Alecto",
"tests/test_parse.py::test_packet_valiation[20;30;Alecto",
"tests/test_parse.py::test_packet_valiation[20;16;Alecto",
"tests/test_parse.py::test_packet_valiation[20;17;Alecto",
"tests/test_parse.py::test_packet_valiation[20;74;Oregon",
"tests/test_parse.py::test_packet_valiation[20;b3;Oregon",
"tests/test_parse.py::test_packet_valiation[20;e5;Oregon",
"tests/test_parse.py::test_packet_valiation[20;46;Oregon",
"tests/test_parse.py::test_packet_valiation[20;83;Oregon",
"tests/test_parse.py::test_packet_valiation[20;32;Oregon",
"tests/test_parse.py::test_packet_valiation[20;4a;Oregon",
"tests/test_parse.py::test_packet_valiation[20;ba;Oregon",
"tests/test_parse.py::test_packet_valiation[20;AF;SelectPlus;ID=1bb4;CHIME=01;]",
"tests/test_parse.py::test_packet_valiation[20;FC;Plieger",
"tests/test_parse.py::test_packet_valiation[20;47;Byron",
"tests/test_parse.py::test_packet_valiation[20;12;Pir;ID=aa66;PIR=ON;]",
"tests/test_parse.py::test_packet_valiation[20;63;SmokeAlert;ID=123456;SMOKEALERT=ON;]",
"tests/test_parse.py::test_packet_valiation[20;06;Kaku;ID=41;SWITCH=1;CMD=ON;]",
"tests/test_parse.py::test_packet_valiation[20;0C;Kaku;ID=41;SWITCH=2;CMD=OFF;]",
"tests/test_parse.py::test_packet_valiation[20;0D;Kaku;ID=41;SWITCH=2;CMD=ON;]",
"tests/test_parse.py::test_packet_valiation[20;46;Kaku;ID=44;SWITCH=4;CMD=OFF;]",
"tests/test_parse.py::test_packet_valiation[20;E0;NewKaku;ID=cac142;SWITCH=1;CMD=ALLOFF;]",
"tests/test_parse.py::test_packet_valiation[20;3B;NewKaku;ID=cac142;SWITCH=3;CMD=OFF;0]",
"tests/test_parse.py::test_packet_valiation[20;0B;NewKaku;ID=000005;SWITCH=2;CMD=ON;]",
"tests/test_parse.py::test_packet_valiation[20;0E;NewKaku;ID=000005;SWITCH=2;CMD=OFF;]",
"tests/test_parse.py::test_packet_valiation[20;12;NewKaku;ID=000002;SWITCH=2;CMD=OFF;]",
"tests/test_parse.py::test_packet_valiation[20;1E;NewKaku;ID=00000a;SWITCH=2;CMD=OFF;]",
"tests/test_parse.py::test_packet_valiation[20;1F;NewKaku;ID=00000a;SWITCH=2;CMD=ON;]",
"tests/test_parse.py::test_packet_valiation[20;01;NewKaku;ID=000007;SWITCH=2;CMD=OFF;]",
"tests/test_parse.py::test_packet_valiation[20;04;NewKaku;ID=000007;SWITCH=2;CMD=ON;]",
"tests/test_parse.py::test_packet_valiation[20;04;NewKaku;ID=000007;SWITCH=2;CMD=SET_LEVEL=14;]",
"tests/test_parse.py::test_packet_valiation[20;0C;HomeEasy;ID=7900b200;SWITCH=0b;CMD=ALLON;]",
"tests/test_parse.py::test_packet_valiation[20;AD;FA500;ID=0d00b900;SWITCH=0001;CMD=UNKOWN;]",
"tests/test_parse.py::test_packet_valiation[20;AE;FA500;ID=0a01;SWITCH=0a01;CMD=OFF;]",
"tests/test_parse.py::test_packet_valiation[20;03;Eurodomest;ID=03696b;SWITCH=00;CMD=OFF;]",
"tests/test_parse.py::test_packet_valiation[20;04;Eurodomest;ID=03696b;SWITCH=07;CMD=ALLOFF;]",
"tests/test_parse.py::test_packet_valiation[20;41;Conrad",
"tests/test_parse.py::test_packet_valiation[20;47;Blyss;ID=ff98;SWITCH=A1;CMD=ON;]",
"tests/test_parse.py::test_packet_valiation[20;73;Kambrook;ID=010203;SWITCH=A1;CMD=OFF;]",
"tests/test_parse.py::test_packet_valiation[20;39;RTS;ID=1a602a;SWITCH=01;CMD=DOWN;]",
"tests/test_parse.py::test_packet_valiation[20;01;MiLightv1;ID=F746;SWITCH=00;RGBW=3c00;CMD=ON;]",
"tests/test_parse.py::test_packet_valiation[20;3B;NewKaku;ID=cac142;SWITCH=3;CMD=OFF;1]",
"tests/test_parse.py::test_packet_valiation[10;Kaku;00004d;1;OFF;]",
"tests/test_parse.py::test_packet_valiation[10;AB400D;00004d;1;OFF;]",
"tests/test_parse.py::test_packet_valiation[10;Impuls;00004d;1;OFF;]",
"tests/test_parse.py::test_packet_valiation[10;NewKaku;00c142;1;ON;]",
"tests/test_parse.py::test_packet_valiation[10;NewKaku;128ac4d;1;OFF;]",
"tests/test_parse.py::test_packet_valiation[10;Eurodomest;123456;01;ON;]",
"tests/test_parse.py::test_packet_valiation[10;Blyss;ff98;A1;OFF;]",
"tests/test_parse.py::test_packet_valiation[10;Conrad;ff0607;1;OFF;]",
"tests/test_parse.py::test_packet_valiation[10;Kambrook;050325;a1;ON;]",
"tests/test_parse.py::test_packet_valiation[10;X10;000041;1;OFF;]",
"tests/test_parse.py::test_packet_valiation[10;HomeConfort;01b523;D3;ON;]",
"tests/test_parse.py::test_packet_valiation[10;FA500;001b523;D3;ON;]",
"tests/test_parse.py::test_packet_valiation[10;Powerfix;000080;0;ON;]",
"tests/test_parse.py::test_packet_valiation[10;Ikea",
"tests/test_parse.py::test_packet_valiation[10;HomeEasy;7900b100;3;ON;]",
"tests/test_parse.py::test_packet_valiation[10;EV1527;000080;0;ON;]",
"tests/test_parse.py::test_packet_valiation[10;Chuango;000080;2;ON;]",
"tests/test_parse.py::test_packet_valiation[10;Selectplus;001c33;]",
"tests/test_parse.py::test_packet_valiation[10;Byron;112233;01;OFF;]",
"tests/test_parse.py::test_packet_valiation[10;DELTRONIC;001c33;]",
"tests/test_parse.py::test_packet_valiation[10;BYRON;00009F;01;ON;]",
"tests/test_parse.py::test_packet_valiation[10;FA20RF;67f570;1;ON;]",
"tests/test_parse.py::test_packet_valiation[10;MERTIK;64;UP;]",
"tests/test_parse.py::test_packet_valiation[10;RTS;1a602a;0;ON;]",
"tests/test_parse.py::test_packet_valiation[10;RTS;1b602b;0123;PAIR;]",
"tests/test_parse.py::test_packet_valiation[10;RTS;1b602b;0123;0;PAIR;]",
"tests/test_parse.py::test_packet_valiation[10;MiLightv1;F746;00;3c00;ON;]",
"tests/test_parse.py::test_packet_valiation[10;MiLightv1;F746;01;34BC;PAIR;]",
"tests/test_parse.py::test_packet_valiation[10;MiLightv1;F746;01;34BC;UNPAIR;]",
"tests/test_parse.py::test_packet_valiation[10;MiLightv1;F746;01;34BC;BRIGHT;]",
"tests/test_parse.py::test_packet_valiation[10;MiLightv1;F746;01;34BC;COLOR;]",
"tests/test_parse.py::test_packet_valiation[10;UNITEC;7796;01;ON;]",
"tests/test_parse.py::test_packet_valiation[10;UNITEC;7796;01;PAIR;]",
"tests/test_parse.py::test_packet_valiation[10;REBOOT;]",
"tests/test_parse.py::test_packet_valiation[10;PING;]",
"tests/test_parse.py::test_packet_valiation[10;VERSION;]",
"tests/test_parse.py::test_packet_valiation[10;RFDEBUG=ON;]",
"tests/test_parse.py::test_packet_valiation[10;RFUDEBUG=ON;]",
"tests/test_parse.py::test_packet_valiation[10;QRFDEBUG=ON;]",
"tests/test_parse.py::test_packet_valiation[10;TRISTATEINVERT;]",
"tests/test_parse.py::test_packet_valiation[10;RTSCLEAN;]",
"tests/test_parse.py::test_packet_valiation[10;RTSRECCLEAN=9;]",
"tests/test_parse.py::test_packet_valiation[10;RTSSHOW;]",
"tests/test_parse.py::test_packet_valiation[10;RTSINVERT;]",
"tests/test_parse.py::test_packet_valiation[10;RTSLONGTX;]",
"tests/test_parse.py::test_packet_valiation[11;20;0B;NewKaku;ID=000005;SWITCH=2;CMD=ON;]",
"tests/test_parse.py::test_packet_valiation[20;D3;OK;]",
"tests/test_parse.py::test_packet_valiation[20;D4;NewKaku;ID=000005;SWITCH=2;CMD=ON;]"
] | {
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2020-01-25 20:01:16+00:00 | mit | 906 |
|
aesara-devs__aesara-465 | diff --git a/aesara/tensor/math_opt.py b/aesara/tensor/math_opt.py
index e90d6b425..ad0eeb92b 100644
--- a/aesara/tensor/math_opt.py
+++ b/aesara/tensor/math_opt.py
@@ -11,7 +11,6 @@ from functools import reduce
import numpy as np
import aesara.scalar.basic as aes
-from aesara import compile
from aesara.assert_op import assert_op
from aesara.configdefaults import config
from aesara.graph.basic import Constant, Variable
@@ -74,6 +73,7 @@ from aesara.tensor.math import (
expm1,
ge,
int_div,
+ isinf,
log,
log1p,
makeKeepDims,
@@ -2286,34 +2286,33 @@ def local_log1p(fgraph, node):
@register_stabilize
@register_specialize
@local_optimizer([log])
-def local_log_add(fgraph, node):
- # log(exp(x)+exp(y))
- #
- # Suppose x >= y
- # log(exp(x) + exp(y))
- # log(exp(x) * (1 + exp(y)/exp(x)))
- # x + log(1 + exp(y)/exp(x))
- # x + log1p(exp(y)/exp(x))
- # x + log1p(exp(y-x))
+def local_log_add_exp(fgraph, node):
+ # log(exp(x)+exp(y)+exp(z)) = max + log(x-max, y-max, z-max)
+
if node.op == log:
z = node.inputs[0]
if z.owner and z.owner.op == add:
zi = z.owner.inputs
- if len(zi) != 2:
- # -- upgrading Maximum to handle multiple inputs wasn't trivial
- # TODO
- # raise NotImplementedError()
- return
pre_exp = [x.owner.inputs[0] for x in zi if x.owner and x.owner.op == exp]
+ # all arguments to add are exp(<something>)
if len(pre_exp) == len(zi):
- # all arguments to add are exp(<something>)
- max_pre = maximum(*pre_exp)
-
- ret = max_pre + log1p(exp(add(*[p - max_pre for p in pre_exp])))
- ret.tag.values_eq_approx = values_eq_approx_remove_inf
+ # Do not offset when max_pre = -np.inf, to avoid nan in the output
+ # Switch statement is placed directly inside add to break the self-symmetry
+ # of the returned output (otherwise the optimization would not stabilize)
+ max_pre = reduce(maximum, pre_exp)
+ ret = max_pre + log(
+ add(
+ *[
+ switch(isinf(max_pre), exp(max_pre), exp(p - max_pre))
+ for p in pre_exp
+ ]
+ )
+ )
return [ret]
+@register_stabilize
+@register_specialize
@local_optimizer([log])
def local_log_sum_exp(fgraph, node):
# log(sum_i(exp(x_i))) = x_max + log(sum_i(exp(x_i - x_max)))
@@ -2342,7 +2341,19 @@ def local_log_sum_exp(fgraph, node):
max_pre_exp = aet_max(pre_exp, axis=axis)
max_pre_exp_keepdims = makeKeepDims(pre_exp, max_pre_exp, axis)
- ret = max_pre_exp + log(aet_sum(exp(pre_exp - max_pre_exp_keepdims), axis=axis))
+ # Do not offset when max_pre = -np.inf, to avoid nan in the output
+ # Switch statement is placed directly inside sum to break the self-symmetry
+ # of the returned output (otherwise the optimization would not stabilize)
+ ret = max_pre_exp + log(
+ aet_sum(
+ switch(
+ isinf(max_pre_exp_keepdims),
+ exp(max_pre_exp_keepdims),
+ exp(pre_exp - max_pre_exp_keepdims),
+ ),
+ axis=axis,
+ ),
+ )
# Restore the dimshuffle op, if any.
if dimshuffle_op:
@@ -2351,14 +2362,6 @@ def local_log_sum_exp(fgraph, node):
return [ret]
-compile.optdb.register(
- "local_log_sum_exp",
- in2out(local_log_sum_exp, ignore_newtrees=True),
- 1.6,
- "fast_run",
-)
-
-
def add_calculate(num, denum, aslist=False, out_type=None):
# TODO: make sure that this function and mul_calculate are similar
if out_type is None:
| aesara-devs/aesara | b84ac43a8dd23c888131e9b3f5654f81402da76c | diff --git a/tests/tensor/test_math_opt.py b/tests/tensor/test_math_opt.py
index 98a1d8a80..55dfbae0b 100644
--- a/tests/tensor/test_math_opt.py
+++ b/tests/tensor/test_math_opt.py
@@ -1840,10 +1840,7 @@ def test_log1p():
assert [node.op for node in f.maker.fgraph.toposort()] == [log1p]
[email protected](
- reason="log(add(exp)) is not stabilized when adding more than 2 elements, see #623"
-)
-def test_log_add():
+def test_local_log_add_exp():
m = config.mode
if m == "FAST_COMPILE":
m = "FAST_RUN"
@@ -1858,26 +1855,28 @@ def test_log_add():
y = dvector()
f = function([x, y], log(exp(x) + exp(y)), mode=m)
- f([10000], [10000]) # causes overflow if handled incorrectly
- assert np.isfinite(f([10000], [10000]))
+ # test that it gives the correct result when it doesn't overflow
+ f([10], [10]) # doesn't causes overflow
+ utt.assert_allclose(f([10], [10]), 10 + np.log1p(1))
+
+ assert np.isfinite(f([10000], [10000])) # causes overflow if handled incorrectly
utt.assert_allclose(f([10000], [10000]), 10000 + np.log1p(1))
- # test that it give the same result when it don't overflow
- f([10], [10]) # don't causes overflow
- utt.assert_allclose(f([10], [10]), 10 + np.log1p(1))
+ # test that when max = +-inf, optimized output still works correctly
+ assert f([-np.inf], [-np.inf]) == -np.inf
+ assert f([np.inf], [np.inf]) == np.inf
+ assert f([np.inf], [-np.inf]) == np.inf
- # test that it also works with more than two args, (this currently fails)
+ # test that it also works with more than two args
x = dvector()
y = dvector()
f = function([x, y], log(exp(x) + exp(y) + exp(x - y) + exp(x + y)), mode=m)
- f([10000], [10000]) # causes overflow if handled incorrectly
+ assert np.isfinite(f([10000], [10000])) # causes overflow if handled incorrectly
utt.assert_allclose(f([10000], [10000]), 20000)
# TODO: test that the optimization works in the presence of broadcasting.
- # TODO: (write and) test that the optimization works with Sum in addition to working with Add.
-
def test_local_subtensor_of_dot():
m1 = matrix()
@@ -4001,6 +4000,16 @@ def test_local_log_sum_exp3():
assert np.allclose(optimised_ret, 100.0)
+def test_local_log_sum_exp_inf():
+ # Test that when max = +-inf, optimized output still works correctly
+ x = vector("x")
+ f = compile_graph_log_sum_exp(x, axis=0)
+
+ assert f([-np.inf, -np.inf]) == -np.inf
+ assert f([np.inf, np.inf]) == np.inf
+ assert f([-np.inf, np.inf]) == np.inf
+
+
def test_local_reciprocal_1_plus_exp():
x = vector("x")
y = aet.reciprocal(1 + exp(x))
| LogSumExp of `-inf` returns `nan` instead of `-inf`
```python
x = at.vector('x')
res1 = at.log(at.sum(at.exp(x))) # nan with sum
res2 = at.log(at.prod(at.exp(x))) # but not with prod
fun = aesara.function([x], [res1, res2])
print(fun(np.array([-np.inf, -np.inf]))) # [array(nan), array(-inf)]
```
for reference in numpy we get `-inf`, together with a divide by zero encountered in log
```python
print(np.log(np.sum(np.exp([-np.inf, -np.inf])))) # -inf
```
This showed up in the pymc3 `logsumexp` function which returns `nan` with this input:
```python
from pymc3.math import logsumexp
x = at.vector('x')
res = logsumexp(x)
fun = aesara.function([x], res)
print(fun(np.array([-np.inf, -np.inf]))) # [nan]
```
Whereas the scipy reference works fine
```python
print(scipy.special.logsumexp([-np.inf, -np.inf])) # -inf
```
Weirdly it happens with addition, but not subtraction or multiplication, so maybe the problem is not the sum but the addition?
```python
x = at.vector('x')
res1 = at.log(at.exp(x[0]) + at.exp(x[1]))
res2 = at.log(at.exp(x[0]) - at.exp(x[1]))
fun = aesara.function([x], [res1, res2])
print(fun(np.array([-np.inf, -np.inf]))) # [array(nan), array(-inf)] | 0.0 | b84ac43a8dd23c888131e9b3f5654f81402da76c | [
"tests/tensor/test_math_opt.py::test_local_log_add_exp"
] | [
"tests/tensor/test_math_opt.py::TestGreedyDistribute::test_main",
"tests/tensor/test_math_opt.py::TestAlgebraicCanonize::test_muldiv",
"tests/tensor/test_math_opt.py::TestAlgebraicCanonize::test_abs_mul_div",
"tests/tensor/test_math_opt.py::test_mixeddiv",
"tests/tensor/test_math_opt.py::TestFusion::test_add_mul_fusion_inplace",
"tests/tensor/test_math_opt.py::TestLocalUselessElemwiseComparison::test_inequality_with_self",
"tests/tensor/test_math_opt.py::TestLocalUselessElemwiseComparison::test_shape_add_inequality",
"tests/tensor/test_math_opt.py::TestLocalUselessElemwiseComparison::test_and",
"tests/tensor/test_math_opt.py::TestLocalUselessElemwiseComparison::test_and_int",
"tests/tensor/test_math_opt.py::TestLocalUselessElemwiseComparison::test_or",
"tests/tensor/test_math_opt.py::TestLocalUselessElemwiseComparison::test_or_int",
"tests/tensor/test_math_opt.py::TestLocalUselessElemwiseComparison::test_xor",
"tests/tensor/test_math_opt.py::TestExpLog::test_log_exp",
"tests/tensor/test_math_opt.py::TestLocalMergeSwitchSameCond::test_elemwise",
"tests/tensor/test_math_opt.py::TestLocalSumProd::test_local_sum_prod_all_to_none",
"tests/tensor/test_math_opt.py::TestLocalSumProd::test_local_sum_sum_prod_prod",
"tests/tensor/test_math_opt.py::TestLocalSumProd::test_local_sum_sum_int8",
"tests/tensor/test_math_opt.py::TestLocalSumProd::test_local_sum_sum_dtype",
"tests/tensor/test_math_opt.py::test_local_add_specialize",
"tests/tensor/test_math_opt.py::TestIntDivByOne::test1",
"tests/tensor/test_math_opt.py::TestIntDivByOne::test3",
"tests/tensor/test_math_opt.py::test_local_sumsqr2dot",
"tests/tensor/test_math_opt.py::TestSigmoidOpts::test_1msigmoid",
"tests/tensor/test_math_opt.py::TestSigmoidOpts::test_perform_sigm_times_exp",
"tests/tensor/test_math_opt.py::TestSoftplusOpts::test_logsigm_to_softplus",
"tests/tensor/test_math_opt.py::TestSigmoidUtils::test_compute_mul",
"tests/tensor/test_math_opt.py::TestSigmoidUtils::test_parse_mul_tree",
"tests/tensor/test_math_opt.py::TestSigmoidUtils::test_is_1pexp"
] | {
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-06-06 13:15:11+00:00 | bsd-3-clause | 907 |
|
aesara-devs__aesara-779 | diff --git a/aesara/tensor/__init__.py b/aesara/tensor/__init__.py
index af894fa98..787bd4871 100644
--- a/aesara/tensor/__init__.py
+++ b/aesara/tensor/__init__.py
@@ -134,6 +134,9 @@ from aesara.tensor.extra_ops import ( # noqa
squeeze,
unique,
unravel_index,
+ linspace,
+ logspace,
+ geomspace,
)
from aesara.tensor.shape import ( # noqa
reshape,
diff --git a/aesara/tensor/extra_ops.py b/aesara/tensor/extra_ops.py
index 9db12b045..d054f5d46 100644
--- a/aesara/tensor/extra_ops.py
+++ b/aesara/tensor/extra_ops.py
@@ -1636,6 +1636,29 @@ class BroadcastTo(Op):
broadcast_to_ = BroadcastTo()
+def geomspace(start, end, steps, base=10.0):
+ from aesara.tensor.math import log
+
+ start = at.as_tensor_variable(start)
+ end = at.as_tensor_variable(end)
+ return base ** linspace(log(start) / log(base), log(end) / log(base), steps)
+
+
+def logspace(start, end, steps, base=10.0):
+ start = at.as_tensor_variable(start)
+ end = at.as_tensor_variable(end)
+ return base ** linspace(start, end, steps)
+
+
+def linspace(start, end, steps):
+ start = at.as_tensor_variable(start)
+ end = at.as_tensor_variable(end)
+ arr = at.arange(steps)
+ arr = at.shape_padright(arr, max(start.ndim, end.ndim))
+ multiplier = (end - start) / (steps - 1)
+ return start + arr * multiplier
+
+
def broadcast_to(
x: TensorVariable, shape: Union[TensorVariable, Tuple[Variable]]
) -> TensorVariable:
| aesara-devs/aesara | 980c4c2c72b9c8b51d8627cc75fb655a9fd600ed | diff --git a/tests/tensor/test_extra_ops.py b/tests/tensor/test_extra_ops.py
index a6aa85cb0..ce57ae6ae 100644
--- a/tests/tensor/test_extra_ops.py
+++ b/tests/tensor/test_extra_ops.py
@@ -36,6 +36,9 @@ from aesara.tensor.extra_ops import (
diff,
fill_diagonal,
fill_diagonal_offset,
+ geomspace,
+ linspace,
+ logspace,
ravel_multi_index,
repeat,
searchsorted,
@@ -65,6 +68,11 @@ from aesara.utils import LOCAL_BITWIDTH, PYTHON_INT_BITWIDTH
from tests import unittest_tools as utt
+def set_test_value(x, v):
+ x.tag.test_value = v
+ return x
+
+
def test_cpu_contiguous():
a = fmatrix("a")
i = iscalar("i")
@@ -1222,3 +1230,28 @@ def test_broadcast_arrays():
assert np.array_equal(x_bcast_val, x_bcast_exp)
assert np.array_equal(y_bcast_val, y_bcast_exp)
+
+
[email protected](
+ "start, stop, num_samples",
+ [
+ (1, 10, 50),
+ (np.array([5, 6]), np.array([[10, 10], [10, 10]]), 25),
+ (1, np.array([5, 6]), 30),
+ ],
+)
+def test_space_ops(start, stop, num_samples):
+ z = linspace(start, stop, num_samples)
+ aesara_res = function(inputs=[], outputs=z)()
+ numpy_res = np.linspace(start, stop, num=num_samples)
+ assert np.allclose(aesara_res, numpy_res)
+
+ z = logspace(start, stop, num_samples)
+ aesara_res = function(inputs=[], outputs=z)()
+ numpy_res = np.logspace(start, stop, num=num_samples)
+ assert np.allclose(aesara_res, numpy_res)
+
+ z = geomspace(start, stop, num_samples)
+ aesara_res = function(inputs=[], outputs=z)()
+ numpy_res = np.geomspace(start, stop, num=num_samples)
+ assert np.allclose(aesara_res, numpy_res)
| Implement Op for `linspace` and similar numpy functions
The following numpy functions don't have to counterpart in Aesara:
* [linspace](https://numpy.org/doc/stable/reference/generated/numpy.linspace.html)
* [logspace](https://numpy.org/doc/stable/reference/generated/numpy.logspace.html)
* [geomspace](https://numpy.org/doc/stable/reference/generated/numpy.geomspace.html)
I am not familiar with all of them, so some might not make sense, or they might be low priority.
`linspace` would definitely be nice to have. | 0.0 | 980c4c2c72b9c8b51d8627cc75fb655a9fd600ed | [
"tests/tensor/test_extra_ops.py::test_cpu_contiguous",
"tests/tensor/test_extra_ops.py::TestSearchsortedOp::test_searchsortedOp_on_sorted_input",
"tests/tensor/test_extra_ops.py::TestSearchsortedOp::test_searchsortedOp_wrong_side_kwd",
"tests/tensor/test_extra_ops.py::TestSearchsortedOp::test_searchsortedOp_on_no_1d_inp",
"tests/tensor/test_extra_ops.py::TestSearchsortedOp::test_searchsortedOp_on_float_sorter",
"tests/tensor/test_extra_ops.py::TestSearchsortedOp::test_searchsortedOp_on_int_sorter",
"tests/tensor/test_extra_ops.py::TestSearchsortedOp::test_searchsortedOp_on_right_side",
"tests/tensor/test_extra_ops.py::TestSearchsortedOp::test_infer_shape",
"tests/tensor/test_extra_ops.py::TestSearchsortedOp::test_grad",
"tests/tensor/test_extra_ops.py::TestDiffOp::test_diffOp",
"tests/tensor/test_extra_ops.py::TestDiffOp::test_output_type[x_type0]",
"tests/tensor/test_extra_ops.py::TestDiffOp::test_output_type[x_type1]",
"tests/tensor/test_extra_ops.py::TestDiffOp::test_output_type[x_type2]",
"tests/tensor/test_extra_ops.py::TestDiffOp::test_output_type[x_type3]",
"tests/tensor/test_extra_ops.py::TestDiffOp::test_infer_shape",
"tests/tensor/test_extra_ops.py::TestDiffOp::test_grad",
"tests/tensor/test_extra_ops.py::TestDiffOp::test_grad_not_implemented",
"tests/tensor/test_extra_ops.py::TestSqueeze::test_op",
"tests/tensor/test_extra_ops.py::TestSqueeze::test_infer_shape",
"tests/tensor/test_extra_ops.py::TestSqueeze::test_grad",
"tests/tensor/test_extra_ops.py::TestSqueeze::test_var_interface",
"tests/tensor/test_extra_ops.py::TestSqueeze::test_axis",
"tests/tensor/test_extra_ops.py::TestCompress::test_op",
"tests/tensor/test_extra_ops.py::TestRepeat::test_basic",
"tests/tensor/test_extra_ops.py::TestRepeat::test_grad",
"tests/tensor/test_extra_ops.py::TestRepeat::test_broadcastable",
"tests/tensor/test_extra_ops.py::TestBartlett::test_perform",
"tests/tensor/test_extra_ops.py::TestBartlett::test_infer_shape",
"tests/tensor/test_extra_ops.py::TestFillDiagonal::test_infer_shape",
"tests/tensor/test_extra_ops.py::TestFillDiagonalOffset::test_gradient",
"tests/tensor/test_extra_ops.py::TestFillDiagonalOffset::test_infer_shape",
"tests/tensor/test_extra_ops.py::test_to_one_hot",
"tests/tensor/test_extra_ops.py::TestUnique::test_basic_vector[x0-inp0-None]",
"tests/tensor/test_extra_ops.py::TestUnique::test_basic_vector[x1-inp1-None]",
"tests/tensor/test_extra_ops.py::TestUnique::test_basic_vector[x2-inp2-0]",
"tests/tensor/test_extra_ops.py::TestUnique::test_basic_vector[x3-inp3-0]",
"tests/tensor/test_extra_ops.py::TestUnique::test_basic_vector[x4-inp4--1]",
"tests/tensor/test_extra_ops.py::TestUnique::test_basic_vector[x5-inp5--1]",
"tests/tensor/test_extra_ops.py::TestUnique::test_infer_shape[x0-inp0-None]",
"tests/tensor/test_extra_ops.py::TestUnique::test_infer_shape[x2-inp2-0]",
"tests/tensor/test_extra_ops.py::TestUnique::test_infer_shape[x3-inp3-0]",
"tests/tensor/test_extra_ops.py::TestUnique::test_infer_shape[x4-inp4--1]",
"tests/tensor/test_extra_ops.py::TestUnique::test_infer_shape[x5-inp5--1]",
"tests/tensor/test_extra_ops.py::test_broadcast_shape_symbolic[s1_vals0-s2_vals0-exp_res0]",
"tests/tensor/test_extra_ops.py::test_broadcast_shape_symbolic[s1_vals1-s2_vals1-exp_res1]",
"tests/tensor/test_extra_ops.py::test_broadcast_shape_symbolic[s1_vals2-s2_vals2-exp_res2]",
"tests/tensor/test_extra_ops.py::TestBroadcastTo::test_avoid_useless_scalars",
"tests/tensor/test_extra_ops.py::TestBroadcastTo::test_avoid_useless_subtensors",
"tests/tensor/test_extra_ops.py::TestBroadcastTo::test_perform",
"tests/tensor/test_extra_ops.py::TestBroadcastTo::test_gradient[<lambda>-input_dims0]",
"tests/tensor/test_extra_ops.py::TestBroadcastTo::test_gradient[<lambda>-input_dims1]",
"tests/tensor/test_extra_ops.py::TestBroadcastTo::test_gradient[<lambda>-input_dims2]",
"tests/tensor/test_extra_ops.py::TestBroadcastTo::test_gradient[<lambda>-input_dims3]",
"tests/tensor/test_extra_ops.py::TestBroadcastTo::test_infer_shape",
"tests/tensor/test_extra_ops.py::TestBroadcastTo::test_inplace",
"tests/tensor/test_extra_ops.py::test_broadcast_arrays",
"tests/tensor/test_extra_ops.py::test_space_ops[1-10-50]",
"tests/tensor/test_extra_ops.py::test_space_ops[start1-stop1-25]",
"tests/tensor/test_extra_ops.py::test_space_ops[1-stop2-30]"
] | [] | {
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false
} | 2022-01-22 11:36:31+00:00 | bsd-3-clause | 908 |
|
aesara-devs__aesara-854 | diff --git a/aesara/tensor/c_code/dimshuffle.c b/aesara/tensor/c_code/dimshuffle.c
index d97df5254..6c67bd1bf 100644
--- a/aesara/tensor/c_code/dimshuffle.c
+++ b/aesara/tensor/c_code/dimshuffle.c
@@ -31,6 +31,8 @@ int APPLY_SPECIFIC(cpu_dimshuffle)(PyArrayObject *input, PyArrayObject **res,
PyArrayObject *transposed_input =
(PyArrayObject *)PyArray_Transpose(_input, &permute);
+ Py_DECREF(_input);
+
PyDimMem_FREE(permute.ptr);
npy_intp *res_shape = PyArray_DIMS(transposed_input);
@@ -68,7 +70,7 @@ int APPLY_SPECIFIC(cpu_dimshuffle)(PyArrayObject *input, PyArrayObject **res,
*res = (PyArrayObject *)PyArray_Newshape(transposed_input, &reshape_shape,
NPY_CORDER);
- /* Py_XDECREF(transposed_input); */
+ Py_DECREF(transposed_input);
PyDimMem_FREE(reshape_shape.ptr);
| aesara-devs/aesara | 3d8553f7b56caed8686e3db9bfbe5ffea110b70a | diff --git a/tests/tensor/test_elemwise.py b/tests/tensor/test_elemwise.py
index 0ad4a76a6..1ad8ca9b2 100644
--- a/tests/tensor/test_elemwise.py
+++ b/tests/tensor/test_elemwise.py
@@ -1,4 +1,5 @@
import math
+import tracemalloc
from copy import copy
import numpy as np
@@ -141,6 +142,41 @@ class TestDimShuffle(unittest_tools.InferShapeTester):
# Confirm the broadcasted value in the output
assert np.array_equiv(outputs[0].storage[0], 2039)
+ @pytest.mark.parametrize("inplace", [True, False])
+ def test_memory_leak(self, inplace):
+ import gc
+
+ n = 100_000
+
+ x = aesara.shared(np.ones(n, dtype=np.float64))
+
+ y = x.dimshuffle([0, "x"])
+ y.owner.op.inplace = inplace
+
+ f = aesara.function([], y, mode=Mode(optimizer=None))
+
+ assert len(f.maker.fgraph.apply_nodes) == 2
+ assert isinstance(f.maker.fgraph.toposort()[0].op, DimShuffle)
+
+ assert f.maker.fgraph.toposort()[0].op.inplace is inplace
+
+ tracemalloc.start()
+
+ blocks_last = None
+ block_diffs = []
+ for i in range(50):
+ x.set_value(np.ones(n))
+ _ = f()
+ _ = gc.collect()
+ blocks_i, _ = tracemalloc.get_traced_memory()
+ if blocks_last is not None:
+ blocks_diff = (blocks_i - blocks_last) // 10 ** 3
+ block_diffs.append(blocks_diff)
+ blocks_last = blocks_i
+
+ tracemalloc.stop()
+ assert np.allclose(np.mean(block_diffs), 0)
+
class TestBroadcast:
# this is to allow other types to reuse this class to test their ops
| Memory leak in `DimShuffle`
## Description of your problem or feature request
**Please provide a minimal, self-contained, and reproducible example.**
```python
import os
import psutil
import aesara
from aesara import tensor as at
process = psutil.Process(os.getpid())
old_rss = None
n = 100_000
x = at.sharedvar.TensorSharedVariable(
"test", at.type.dvector, np.ones(n), strict=True
)
y = x.dimshuffle([0, "x"])
f = aesara.function([], y)
for i in range(100):
x.set_value(np.ones(n))
f()
if i % 10 == 0:
new_rss = process.memory_info().rss
if old_rss is not None:
print(f"Python process increased RSS by {round((new_rss - old_rss) / 1024**2, 2)} MB")
old_rss = new_rss
```
**Please provide the full traceback of any errors.**
```python
Python process increased RSS by 6.62 MB
Python process increased RSS by 7.74 MB
Python process increased RSS by 7.64 MB
Python process increased RSS by 7.68 MB
Python process increased RSS by 7.64 MB
Python process increased RSS by 7.68 MB
Python process increased RSS by 7.41 MB
Python process increased RSS by 7.63 MB
Python process increased RSS by 7.85 MB
```
**Please provide any additional information below.**
This happens when the reshape operation is compiled to C. When I run the same code setting `aesara.config.cxx = ""`, the Python process does not increase RSS.
## Versions and main components
* Aesara version: 2.4.0
* Aesara config (`python -c "import aesara; print(aesara.config)"`)
* Python version: 3.9
* Operating system: Ubuntu 18.04
* How did you install Aesara: (conda/pip) pip | 0.0 | 3d8553f7b56caed8686e3db9bfbe5ffea110b70a | [
"tests/tensor/test_elemwise.py::TestDimShuffle::test_memory_leak[True]",
"tests/tensor/test_elemwise.py::TestDimShuffle::test_memory_leak[False]"
] | [
"tests/tensor/test_elemwise.py::TestDimShuffle::test_perform",
"tests/tensor/test_elemwise.py::TestDimShuffle::test_c_or_py",
"tests/tensor/test_elemwise.py::TestDimShuffle::test_infer_shape",
"tests/tensor/test_elemwise.py::TestDimShuffle::test_c_views",
"tests/tensor/test_elemwise.py::TestBroadcast::test_perform",
"tests/tensor/test_elemwise.py::TestBroadcast::test_c",
"tests/tensor/test_elemwise.py::TestBroadcast::test_perform_inplace",
"tests/tensor/test_elemwise.py::TestBroadcast::test_c_inplace",
"tests/tensor/test_elemwise.py::TestBroadcast::test_fill",
"tests/tensor/test_elemwise.py::TestBroadcast::test_fill_var",
"tests/tensor/test_elemwise.py::TestBroadcast::test_fill_grad",
"tests/tensor/test_elemwise.py::TestBroadcast::test_weird_strides",
"tests/tensor/test_elemwise.py::TestBroadcast::test_same_inputs",
"tests/tensor/test_elemwise.py::TestCAReduce::test_perform_noopt",
"tests/tensor/test_elemwise.py::TestCAReduce::test_perform_nan",
"tests/tensor/test_elemwise.py::TestCAReduce::test_c_noopt",
"tests/tensor/test_elemwise.py::TestCAReduce::test_infer_shape",
"tests/tensor/test_elemwise.py::TestBitOpReduceGrad::test_all_grad",
"tests/tensor/test_elemwise.py::TestBitOpReduceGrad::test_any_grad",
"tests/tensor/test_elemwise.py::TestElemwise::test_elemwise_grad_bool",
"tests/tensor/test_elemwise.py::TestElemwise::test_infer_shape",
"tests/tensor/test_elemwise.py::TestElemwise::test_input_dimensions_overflow",
"tests/tensor/test_elemwise.py::TestElemwise::test_input_dimensions_match_python",
"tests/tensor/test_elemwise.py::test_not_implemented_elemwise_grad"
] | {
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true
} | 2022-03-11 02:00:34+00:00 | bsd-3-clause | 909 |
|
aesara-devs__aesara-902 | diff --git a/aesara/compile/builders.py b/aesara/compile/builders.py
index b714138eb..4328e95e7 100644
--- a/aesara/compile/builders.py
+++ b/aesara/compile/builders.py
@@ -375,6 +375,11 @@ class OpFromGraph(Op, HasInnerGraph):
self.kwargs = kwargs
self.input_types = [inp.type for inp in inputs]
self.output_types = [out.type for out in outputs]
+
+ self.lop_overrides = lop_overrides
+ self.grad_overrides = grad_overrides
+ self.rop_overrides = rop_overrides
+
if lop_overrides != "default":
if grad_overrides != "default":
raise ValueError(
@@ -732,19 +737,71 @@ class OpFromGraph(Op, HasInnerGraph):
]
return ret_l
+ def __call__(self, *inputs, **kwargs):
+ # The user interface doesn't expect the shared variable inputs of the
+ # inner-graph, but, since `Op.make_node` does (and `Op.__call__`
+ # dispatches to `Op.make_node`), we need to compensate here
+ num_expected_inps = len(self.inner_inputs) - len(self.shared_inputs)
+
+ if len(inputs) == num_expected_inps:
+ actual_inputs = inputs + tuple(self.shared_inputs)
+ return super().__call__(*actual_inputs, **kwargs)
+ elif len(inputs) == len(self.inner_inputs):
+ return super().__call__(*inputs, **kwargs)
+ else:
+ raise ValueError(f"Expected at least {num_expected_inps} input(s)")
+
def make_node(self, *inputs):
+ # The `inputs` received here should correspond to the inputs in the
+ # `Apply` nodes we produce below
+ if len(inputs) != len(self.inner_inputs):
+ raise ValueError(f"Expected {len(self.inner_inputs)} input(s)")
+
num_expected_inps = len(self.inner_inputs) - len(self.shared_inputs)
- if len(inputs) != num_expected_inps:
- raise ValueError(
- f"Expected {int(num_expected_inps)} inputs, got {len(inputs)}"
- )
- inputs = [
- inp_t.filter_variable(inp) for inp, inp_t in zip(inputs, self.input_types)
+ non_shared_inputs = inputs[:num_expected_inps]
+
+ non_shared_inputs = [
+ inp_t.filter_variable(inp)
+ for inp, inp_t in zip(non_shared_inputs, self.input_types)
]
+
+ shared_inputs = inputs[num_expected_inps:]
+ local_shared_inputs = self.inner_inputs[num_expected_inps:]
+
+ inner_and_input_shareds = list(zip(local_shared_inputs, shared_inputs))
+
+ if not all(inp_s == inn_s for inn_s, inp_s in inner_and_input_shareds):
+ # The shared variables are not equal to the original shared
+ # variables, so we construct a new `Op` that uses the new shared
+ # variables instead
+ replace = {
+ old_inp: new_inp for old_inp, new_inp in zip(self.inner_inputs, inputs)
+ }
+ replace.update(inner_and_input_shareds)
+
+ # If the new shared variables are inconsistent with the inner-graph,
+ # such errors should arise in this step
+ new_outputs = clone_replace(
+ self.inner_outputs, replace=replace, share_inputs=True
+ )
+
+ new_op = type(self)(
+ inputs=non_shared_inputs,
+ outputs=new_outputs,
+ inline=self.is_inline,
+ lop_overrides=self.lop_overrides,
+ grad_overrides=self.grad_overrides,
+ rop_overrides=self.rop_overrides,
+ connection_pattern=self._connection_pattern,
+ name=self.name,
+ )
+ else:
+ new_op = self
+
apply_node = Apply(
- self,
- list(inputs) + self.shared_inputs,
- [type() for type in self.output_types],
+ new_op,
+ list(non_shared_inputs) + new_op.shared_inputs,
+ [type() for type in new_op.output_types],
)
return apply_node
| aesara-devs/aesara | 6ef1452a83681e2ccd582ba12aaa7be778e1ff15 | diff --git a/tests/compile/test_builders.py b/tests/compile/test_builders.py
index a552c7a09..9d894aae7 100644
--- a/tests/compile/test_builders.py
+++ b/tests/compile/test_builders.py
@@ -3,6 +3,7 @@ from functools import partial
import numpy as np
import pytest
+import aesara.tensor as at
from aesara.compile import shared
from aesara.compile.builders import OpFromGraph
from aesara.compile.function import function
@@ -29,6 +30,12 @@ class TestOpFromGraph(unittest_tools.InferShapeTester):
def test_valid_input(self):
x, y, z = matrices("xyz")
+ with pytest.raises(ValueError, match="Expected at least.*"):
+ OpFromGraph([x], [x])()
+
+ with pytest.raises(ValueError, match=r"Expected 1 input\(s\)"):
+ OpFromGraph([x], [x]).make_node()
+
with pytest.raises(TypeError):
OpFromGraph((x,), (x,))
@@ -451,6 +458,39 @@ class TestOpFromGraph(unittest_tools.InferShapeTester):
grad_f = grad(f, y)
assert grad_f.tag.test_value is not None
+ def test_make_node_shared(self):
+ """Make sure we can provide `OpFromGraph.make_node` new shared inputs and get a valid `OpFromGraph`."""
+
+ x = at.scalar("x")
+ y = shared(1.0, name="y")
+
+ test_ofg = OpFromGraph([x], [x + y])
+ assert test_ofg.inputs == [x]
+ assert test_ofg.shared_inputs == [y]
+
+ out = test_ofg(x)
+
+ y_clone = y.clone()
+ assert y_clone != y
+ y_clone.name = "y_clone"
+
+ out_new = test_ofg.make_node(*(out.owner.inputs[:1] + [y_clone])).outputs[0]
+
+ assert out_new.owner.op.inputs == [x]
+ assert out_new.owner.op.shared_inputs == [y_clone]
+
+ out_fn = function([x], out_new)
+
+ assert np.array_equal(out_fn(1.0), 2.0)
+
+ y_clone.set_value(2.0)
+
+ assert np.array_equal(out_fn(1.0), 3.0)
+
+ # This should also work, because the containers are the same:
+ # y.set_value(1.0)
+ # assert np.array_equal(out_fn(1.0), 2.0)
+
def test_debugprint():
x, y, z = matrices("xyz")
diff --git a/tests/scan/test_opt.py b/tests/scan/test_opt.py
index 06eb27372..af14b7930 100644
--- a/tests/scan/test_opt.py
+++ b/tests/scan/test_opt.py
@@ -4,6 +4,7 @@ import pytest
import aesara
import aesara.tensor.basic as at
from aesara import function, scan, shared
+from aesara.compile.builders import OpFromGraph
from aesara.compile.io import In
from aesara.compile.mode import get_default_mode
from aesara.configdefaults import config
@@ -550,6 +551,28 @@ class TestPushOutNonSeqScan:
utt.assert_allclose(output_opt[0], output_no_opt[0])
utt.assert_allclose(output_opt[1], output_no_opt[1])
+ def test_OpFromGraph_shared(self):
+ """Make sure that a simple `OpFromGraph` with a shared variable can be pushed out."""
+
+ y = shared(1.0, name="y")
+
+ test_ofg = OpFromGraph([], [1 + y])
+
+ def inner_func():
+ return test_ofg()
+
+ out, out_updates = aesara.scan(inner_func, n_steps=10)
+
+ out_fn = function([], out, updates=out_updates)
+
+ res = out_fn()
+ assert np.array_equal(res, np.repeat(2.0, 10))
+
+ y.set_value(2.0)
+
+ res = out_fn()
+ assert np.array_equal(res, np.repeat(3.0, 10))
+
class TestPushOutAddScan:
"""
| `OpFromGraph.make_node`'s expected inputs and actual inputs are inconsistent
`OpFromGraph.make_node` expects inputs corresponding to each of its inner-graph inputs, except for the shared variables; however, `OpFromGraph.make_node` also constructs `Apply` nodes such that `Apply.inputs` _contains_ the shared variable inputs of the inner-graph. This leads to an inherent inconsistency that prevents rewrites from reconstructing nodes via `Op.make_node(*Apply.inputs)`.
Here's an example in which this issue results in an optimization failure:
```python
import aesara
import aesara.tensor as at
from aesara.compile.builders import OpFromGraph
y = aesara.shared(1.0, name="y")
test_ofg = OpFromGraph([], [1 + y])
test_ofg.inputs
# []
test_ofg.shared_inputs
# [y]
def inner_func():
return test_ofg()
out, out_updates = aesara.scan(inner_func, n_steps=10)
with aesara.config.change_flags(on_opt_error="raise"):
out_fn = aesara.function([], out)
# ERROR (aesara.graph.opt): Optimization failure due to: push_out_non_seq_scan
# ERROR (aesara.graph.opt): node: for{cpu,scan_fn}(TensorConstant{10}, TensorConstant{10}, y)
# ERROR (aesara.graph.opt): TRACEBACK:
# ERROR (aesara.graph.opt): Traceback (most recent call last):
# File ".../Aesara/aesara/graph/opt.py", line 1850, in process_node
# replacements = lopt.transform(fgraph, node)
# File ".../Aesara/aesara/graph/opt.py", line 1055, in transform
# return self.fn(fgraph, node)
# File ".../Aesara/aesara/scan/opt.py", line 282, in push_out_non_seq_scan
# nw_outer_node = nd.op.make_node(*outside_ins)
# File ".../Aesara/aesara/compile/builders.py", line 738, in make_node
# raise ValueError(
# ValueError: Expected 1 inputs, got 2
``` | 0.0 | 6ef1452a83681e2ccd582ba12aaa7be778e1ff15 | [
"tests/compile/test_builders.py::TestOpFromGraph::test_valid_input",
"tests/compile/test_builders.py::TestOpFromGraph::test_make_node_shared",
"tests/scan/test_opt.py::TestPushOutNonSeqScan::test_OpFromGraph_shared"
] | [
"tests/compile/test_builders.py::TestOpFromGraph::test_straightforward[OpFromGraph]",
"tests/compile/test_builders.py::TestOpFromGraph::test_straightforward[cls_ofg1]",
"tests/compile/test_builders.py::TestOpFromGraph::test_size_changes[OpFromGraph]",
"tests/compile/test_builders.py::TestOpFromGraph::test_size_changes[cls_ofg1]",
"tests/compile/test_builders.py::TestOpFromGraph::test_grad[OpFromGraph]",
"tests/compile/test_builders.py::TestOpFromGraph::test_grad[cls_ofg1]",
"tests/compile/test_builders.py::TestOpFromGraph::test_grad_grad[OpFromGraph]",
"tests/compile/test_builders.py::TestOpFromGraph::test_grad_grad[cls_ofg1]",
"tests/compile/test_builders.py::TestOpFromGraph::test_shared[OpFromGraph]",
"tests/compile/test_builders.py::TestOpFromGraph::test_shared[cls_ofg1]",
"tests/compile/test_builders.py::TestOpFromGraph::test_shared_grad[OpFromGraph]",
"tests/compile/test_builders.py::TestOpFromGraph::test_shared_grad[cls_ofg1]",
"tests/compile/test_builders.py::TestOpFromGraph::test_grad_override[OpFromGraph]",
"tests/compile/test_builders.py::TestOpFromGraph::test_grad_override[cls_ofg1]",
"tests/compile/test_builders.py::TestOpFromGraph::test_lop_override[OpFromGraph]",
"tests/compile/test_builders.py::TestOpFromGraph::test_lop_override[cls_ofg1]",
"tests/compile/test_builders.py::TestOpFromGraph::test_rop[OpFromGraph]",
"tests/compile/test_builders.py::TestOpFromGraph::test_rop[cls_ofg1]",
"tests/compile/test_builders.py::TestOpFromGraph::test_rop_override[OpFromGraph]",
"tests/compile/test_builders.py::TestOpFromGraph::test_rop_override[cls_ofg1]",
"tests/compile/test_builders.py::TestOpFromGraph::test_connection_pattern_override[OpFromGraph]",
"tests/compile/test_builders.py::TestOpFromGraph::test_connection_pattern_override[cls_ofg1]",
"tests/compile/test_builders.py::TestOpFromGraph::test_nested[OpFromGraph]",
"tests/compile/test_builders.py::TestOpFromGraph::test_nested[cls_ofg1]",
"tests/compile/test_builders.py::TestOpFromGraph::test_connection_pattern[OpFromGraph]",
"tests/compile/test_builders.py::TestOpFromGraph::test_connection_pattern[cls_ofg1]",
"tests/compile/test_builders.py::TestOpFromGraph::test_infer_shape",
"tests/compile/test_builders.py::TestOpFromGraph::test_compute_test_value",
"tests/compile/test_builders.py::test_debugprint",
"tests/scan/test_opt.py::TestPushOutDot::test_pushout_all",
"tests/scan/test_opt.py::TestScanMerge::test_belongs_to_set",
"tests/scan/test_opt.py::TestSaveMem::test_save_mem_reduced_number_of_steps",
"tests/scan/test_opt.py::TestSaveMem::test_save_mem_store_steps",
"tests/scan/test_opt.py::test_opt_order"
] | {
"failed_lite_validators": [
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false
} | 2022-04-13 21:10:22+00:00 | bsd-3-clause | 910 |
|
aesara-devs__aesara-928 | diff --git a/aesara/scalar/basic.py b/aesara/scalar/basic.py
index 0bf12a70a..36a05c387 100644
--- a/aesara/scalar/basic.py
+++ b/aesara/scalar/basic.py
@@ -2307,6 +2307,8 @@ class Pow(BinaryScalarOp):
if (
node.inputs[0].type == node.outputs[0].type
and node.inputs[1].type == node.outputs[0].type
+ and None not in node.inputs[0].type.shape
+ and None not in node.inputs[1].type.shape
and
# amdlibm 3.0 do not have a float64 version of this SIMD function
node.inputs[0].dtype == "float32"
diff --git a/aesara/tensor/elemwise.py b/aesara/tensor/elemwise.py
index b678ad16b..ce7d67b5f 100644
--- a/aesara/tensor/elemwise.py
+++ b/aesara/tensor/elemwise.py
@@ -913,7 +913,7 @@ second dimension
checks = cgen.make_checks(orders, idtypes, sub)
# Check if all inputs (except broadcasted scalar) are fortran.
- # In that case, create an fortran output ndarray.
+ # In that case, create a fortran output ndarray.
z = list(zip(inames, inputs))
alloc_fortran = " && ".join(
[
@@ -1071,7 +1071,7 @@ second dimension
# If all inputs and outputs are contiguous
# and the scalar op define optimized code for that case
- # use it! The scalar_op need to check the broadcast flag himself.
+ # use it! The scalar_op needs to check the type-level shapes itself.
if (
all(o.ndim >= 1 for o in node.outputs)
and
@@ -1088,11 +1088,19 @@ second dimension
# compiler to vectorize the code as their won't be as
# many ptr and the stride will be hard coded.
if all(
- [
- io.broadcastable == node.outputs[0].broadcastable
- or all(io.broadcastable)
- for io in node.inputs + node.outputs
- ]
+ # io.type.shape == node.outputs[1].type.shape
+ # Elemwise does not specify non-broadcastable static/type-levelshape
+ # information for its outputs yet
+ node.outputs[0].type.is_super(io.type)
+ for io in node.inputs + node.outputs
+ ) and (
+ len(node.inputs) <= 1
+ # If either one of the inputs has a `None` shape, we cannot
+ # assume they will have the same size
+ or all(
+ len(set(inp_shape)) == 1 and None not in inp_shape
+ for inp_shape in zip(*(inp.type.shape for inp in node.inputs))
+ )
):
z = onames[0]
contig = f"""
@@ -1188,7 +1196,7 @@ second dimension
return support_code
def c_code_cache_version_apply(self, node):
- version = [13] # the version corresponding to the c code in this Op
+ version = [14] # the version corresponding to the c code in this Op
# now we insert versions for the ops on which we depend...
scalar_node = Apply(
diff --git a/aesara/tensor/elemwise_cgen.py b/aesara/tensor/elemwise_cgen.py
index cfc5abde8..9e50369fb 100644
--- a/aesara/tensor/elemwise_cgen.py
+++ b/aesara/tensor/elemwise_cgen.py
@@ -66,10 +66,12 @@ def make_checks(loop_orders, dtypes, sub):
if index != "x":
# Initialize the variables associated to the jth loop
# jump = stride - adjust
+ # If the variable has size 1 in that dim, we set the stride to zero to
+ # emulate broadcasting
jump = f"({var}_stride{index}) - ({adjust})"
init += f"""
{var}_n{index} = PyArray_DIMS({var})[{index}];
- {var}_stride{index} = PyArray_STRIDES({var})[{index}] / sizeof({dtype});
+ {var}_stride{index} = ({var}_n{index} == 1)? 0 : PyArray_STRIDES({var})[{index}] / sizeof({dtype});
{var}_jump{index}_{j} = {jump};
"""
adjust = f"{var}_n{index}*{var}_stride{index}"
@@ -90,26 +92,85 @@ def make_checks(loop_orders, dtypes, sub):
# elements of to_compare are pairs ( input_variable_idx, input_variable_dim_idx )
if len(to_compare) < 2:
continue
- j0, x0 = to_compare[0]
- for (j, x) in to_compare[1:]:
- check += f"""
- if (%(lv{j0})s_n{x0} != %(lv{j})s_n{x})
+
+ # Find first dimension size that is != 1
+ jl, xl = to_compare[-1]
+ non1size_dim_check = f"""
+ npy_intp non1size_dim{xl};
+ non1size_dim{xl} = """
+ for (j, x) in to_compare[:-1]:
+ non1size_dim_check += f"(%(lv{j})s_n{x} != 1) ? %(lv{j})s_n{x} : "
+ non1size_dim_check += f"%(lv{jl})s_n{xl};"
+ check += non1size_dim_check
+
+ # Check the nonsize1 dims match
+ # TODO: This is a bit inefficient because we are comparing one dimension against itself
+ check += f"""
+ if (non1size_dim{xl} != 1)
{{
- PyErr_Format(PyExc_ValueError, "Input dimension mismatch. (input[%%i].shape[%%i] = %%lld, input[%%i].shape[%%i] = %%lld)",
- {j0},
- {x0},
- (long long int) %(lv{j0})s_n{x0},
- {j},
- {x},
- (long long int) %(lv{j})s_n{x}
- );
- %(fail)s
- }}
+ """
+ for (j, x) in to_compare:
+ check += f"""
+ if ((%(lv{j})s_n{x} != non1size_dim{x}) && (%(lv{j})s_n{x} != 1))
+ {{
+ PyErr_Format(PyExc_ValueError, "Input dimension mismatch. One other input has shape[%%i] = %%lld, but input[%%i].shape[%%i] = %%lld.",
+ {x},
+ (long long int) non1size_dim{x},
+ {j},
+ {x},
+ (long long int) %(lv{j})s_n{x}
+ );
+ %(fail)s
+ }}
"""
+ check += """
+ }
+ """
return init % sub + check % sub
+def compute_broadcast_dimensions(array_name: str, loop_orders, sub) -> str:
+ """Create c_code to compute broadcasted dimensions of multiple arrays, arising from
+ Elemwise operations.
+
+ The code returned by this function populates the array `array_name`, but does not
+ initialize it.
+
+ TODO: We can decide to either specialize C code even further given the input types
+ or make it general, regardless of whether static broadcastable information is given
+ """
+ dims_c_code = ""
+ for i, candidates in enumerate(zip(*loop_orders)):
+ # TODO: Are candidates always either "x" or "i"? If that's the case we can
+ # simplify some logic here (e.g., we don't need to track the `idx`).
+ nonx_candidates = tuple(
+ (idx, c) for idx, c in enumerate(candidates) if c != "x"
+ )
+
+ # All inputs are known to be broadcastable
+ if not nonx_candidates:
+ dims_c_code += f"{array_name}[{i}] = 1;\n"
+ continue
+
+ # There is only one informative source of size
+ if len(nonx_candidates) == 1:
+ idx, candidate = nonx_candidates[0]
+ var = sub[f"lv{int(idx)}"]
+ dims_c_code += f"{array_name}[{i}] = {var}_n{candidate};\n"
+ continue
+
+ # In this case any non-size 1 variable will define the right size
+ dims_c_code += f"{array_name}[{i}] = "
+ for (idx, candidate) in nonx_candidates[:-1]:
+ var = sub[f"lv{int(idx)}"]
+ dims_c_code += f"({var}_n{candidate} != 1)? {var}_n{candidate}: "
+ idx, candidate = nonx_candidates[-1]
+ var = sub[f"lv{idx}"]
+ dims_c_code += f"{var}_n{candidate};\n"
+ return dims_c_code
+
+
def make_alloc(loop_orders, dtype, sub, fortran="0"):
"""Generate C code to allocate outputs.
@@ -125,20 +186,7 @@ def make_alloc(loop_orders, dtype, sub, fortran="0"):
if type.startswith("AESARA_COMPLEX"):
type = type.replace("AESARA_COMPLEX", "NPY_COMPLEX")
nd = len(loop_orders[0])
- init_dims = ""
- # For each dimension, the tensors are either all broadcasted, in
- # which case the output will also be broadcastable (dimension =
- # 1), or one or more are not broadcasted, in which case the number
- # of elements of the output in that dimension will be equal to the
- # number of elements of any of them.
- for i, candidates in enumerate(zip(*loop_orders)):
- for j, candidate in enumerate(candidates):
- if candidate != "x":
- var = sub[f"lv{int(j)}"]
- init_dims += f"dims[{i}] = {var}_n{candidate};\n"
- break
- else:
- init_dims += f"dims[{i}] = 1;\n"
+ init_dims = compute_broadcast_dimensions("dims", loop_orders, sub)
# TODO: it would be interesting to allocate the output in such a
# way that its contiguous dimensions match one of the input's
@@ -310,26 +358,8 @@ def make_reordered_loop(
"""
# Get the (sorted) total number of iterations of each loop
- # Get totals in the initial order
- # For each dimension, the tensors are either all broadcasted, in
- # which case there is only one iteration of the loop, or one or
- # more are not broadcasted, in which case the number of elements
- # of any of them will be equal to the number of iterations we have
- # to do.
- totals = []
- for i, candidates in enumerate(zip(*init_loop_orders)):
- for j, candidate in enumerate(candidates):
- if candidate != "x":
- var = sub[f"lv{int(j)}"]
- total = f"{var}_n{candidate}"
- break
- else:
- total = "1"
- totals.append(total)
-
- declare_totals = f"""
- int init_totals[{nnested}] = {{{", ".join(totals)}}};
- """
+ declare_totals = f"int init_totals[{nnested}];\n"
+ declare_totals += compute_broadcast_dimensions("init_totals", init_loop_orders, sub)
# Sort totals to match the new order that was computed by sorting
# the loop vector. One integer variable per loop is declared.
| aesara-devs/aesara | b60cf7240a6d17ed80db2bd9b43c6faf377d64fe | diff --git a/tests/tensor/test_elemwise.py b/tests/tensor/test_elemwise.py
index c2b2d230b..1b298a92a 100644
--- a/tests/tensor/test_elemwise.py
+++ b/tests/tensor/test_elemwise.py
@@ -206,77 +206,117 @@ class TestBroadcast:
return np.asarray(np.random.random(shp), dtype=aesara.config.floatX)
def with_linker(self, linker, op, type, rand_val):
- for xsh, ysh in [
- ((3, 5), (3, 5)),
- ((3, 5), (1, 5)),
- ((3, 5), (3, 1)),
- ((1, 5), (5, 1)),
- ((1, 1), (1, 1)),
- ((self.openmp_minsize,), (self.openmp_minsize,)),
- (
- (self.openmp_minsize_sqrt, self.openmp_minsize_sqrt),
- (self.openmp_minsize_sqrt, self.openmp_minsize_sqrt),
- ),
- ((2, 3, 4, 5), (2, 3, 4, 5)),
- ((2, 3, 4, 5), (1, 3, 1, 5)),
- ((2, 3, 4, 5), (1, 1, 1, 1)),
- ((), ()),
- ]:
- x = type(aesara.config.floatX, [(entry == 1) for entry in xsh])("x")
- y = type(aesara.config.floatX, [(entry == 1) for entry in ysh])("y")
- e = op(aes.add)(x, y)
- f = make_function(copy(linker).accept(FunctionGraph([x, y], [e])))
- xv = rand_val(xsh)
- yv = rand_val(ysh)
- zv = xv + yv
-
- unittest_tools.assert_allclose(f(xv, yv), zv)
+ for shape_info in ("complete", "only_broadcastable", "none"):
+ for xsh, ysh in [
+ ((3, 5), (3, 5)),
+ ((3, 5), (1, 5)),
+ ((3, 5), (3, 1)),
+ ((1, 5), (5, 1)),
+ ((1, 1), (1, 1)),
+ ((self.openmp_minsize,), (self.openmp_minsize,)),
+ (
+ (self.openmp_minsize_sqrt, self.openmp_minsize_sqrt),
+ (self.openmp_minsize_sqrt, self.openmp_minsize_sqrt),
+ ),
+ ((2, 3, 4, 5), (2, 3, 4, 5)),
+ ((2, 3, 4, 5), (1, 3, 1, 5)),
+ ((2, 3, 4, 5), (1, 1, 1, 1)),
+ ((), ()),
+ ]:
+ if shape_info == "complete":
+ x_type = type(aesara.config.floatX, shape=xsh)
+ y_type = type(aesara.config.floatX, shape=ysh)
+ elif shape_info == "only_broadcastable":
+ # This condition is here for backwards compatibility, when the only
+ # type shape provided by Aesara was broadcastable/non-broadcastable
+ x_type = type(
+ aesara.config.floatX,
+ broadcastable=[(entry == 1) for entry in xsh],
+ )
+ y_type = type(
+ aesara.config.floatX,
+ broadcastable=[(entry == 1) for entry in ysh],
+ )
+ else:
+ x_type = type(aesara.config.floatX, shape=[None for _ in xsh])
+ y_type = type(aesara.config.floatX, shape=[None for _ in ysh])
- # test Elemwise.infer_shape
- # the Shape op don't implement c_code!
- if isinstance(linker, PerformLinker):
- x = type(aesara.config.floatX, [(entry == 1) for entry in xsh])("x")
- y = type(aesara.config.floatX, [(entry == 1) for entry in ysh])("y")
+ x = x_type("x")
+ y = y_type("y")
e = op(aes.add)(x, y)
- f = make_function(copy(linker).accept(FunctionGraph([x, y], [e.shape])))
- assert tuple(f(xv, yv)) == tuple(zv.shape)
+ f = make_function(copy(linker).accept(FunctionGraph([x, y], [e])))
+ xv = rand_val(xsh)
+ yv = rand_val(ysh)
+ zv = xv + yv
- def with_linker_inplace(self, linker, op, type, rand_val):
- for xsh, ysh in [
- ((5, 5), (5, 5)),
- ((5, 5), (1, 5)),
- ((5, 5), (5, 1)),
- ((1, 1), (1, 1)),
- ((2, 3, 4, 5), (2, 3, 4, 5)),
- ((2, 3, 4, 5), (1, 3, 1, 5)),
- ((2, 3, 4, 5), (1, 1, 1, 1)),
- ((), ()),
- ]:
- x = type(aesara.config.floatX, [(entry == 1) for entry in xsh])("x")
- y = type(aesara.config.floatX, [(entry == 1) for entry in ysh])("y")
- e = op(aes.Add(aes.transfer_type(0)), {0: 0})(x, y)
- f = make_function(copy(linker).accept(FunctionGraph([x, y], [e])))
- xv = rand_val(xsh)
- yv = rand_val(ysh)
- zv = xv + yv
+ unittest_tools.assert_allclose(f(xv, yv), zv)
- f(xv, yv)
+ # test Elemwise.infer_shape
+ # the Shape op don't implement c_code!
+ if isinstance(linker, PerformLinker):
+ x = x_type("x")
+ y = y_type("y")
+ e = op(aes.add)(x, y)
+ f = make_function(
+ copy(linker).accept(FunctionGraph([x, y], [e.shape]))
+ )
+ assert tuple(f(xv, yv)) == tuple(zv.shape)
- assert (xv == zv).all()
- # test Elemwise.infer_shape
- # the Shape op don't implement c_code!
- if isinstance(linker, PerformLinker):
- x = type(aesara.config.floatX, [(entry == 1) for entry in xsh])("x")
- y = type(aesara.config.floatX, [(entry == 1) for entry in ysh])("y")
+ def with_linker_inplace(self, linker, op, type, rand_val):
+ for shape_info in ("complete", "only_broadcastable", "none"):
+ for xsh, ysh in [
+ ((5, 5), (5, 5)),
+ ((5, 5), (1, 5)),
+ ((5, 5), (5, 1)),
+ ((1, 1), (1, 1)),
+ ((2, 3, 4, 5), (2, 3, 4, 5)),
+ ((2, 3, 4, 5), (1, 3, 1, 5)),
+ ((2, 3, 4, 5), (1, 1, 1, 1)),
+ ((), ()),
+ ]:
+ if shape_info == "complete":
+ x_type = type(aesara.config.floatX, shape=xsh)
+ y_type = type(aesara.config.floatX, shape=ysh)
+ elif shape_info == "only_broadcastable":
+ # This condition is here for backwards compatibility, when the only
+ # type shape provided by Aesara was broadcastable/non-broadcastable
+ x_type = type(
+ aesara.config.floatX,
+ broadcastable=[(entry == 1) for entry in xsh],
+ )
+ y_type = type(
+ aesara.config.floatX,
+ broadcastable=[(entry == 1) for entry in ysh],
+ )
+ else:
+ x_type = type(aesara.config.floatX, shape=[None for _ in xsh])
+ y_type = type(aesara.config.floatX, shape=[None for _ in ysh])
+
+ x = x_type("x")
+ y = y_type("y")
e = op(aes.Add(aes.transfer_type(0)), {0: 0})(x, y)
- f = make_function(copy(linker).accept(FunctionGraph([x, y], [e.shape])))
+ f = make_function(copy(linker).accept(FunctionGraph([x, y], [e])))
xv = rand_val(xsh)
yv = rand_val(ysh)
zv = xv + yv
f(xv, yv)
- assert xv.shape == zv.shape
+ assert (xv == zv).all()
+ # test Elemwise.infer_shape
+ # the Shape op don't implement c_code!
+ if isinstance(linker, PerformLinker):
+ x = x_type("x")
+ y = y_type("y")
+ e = op(aes.Add(aes.transfer_type(0)), {0: 0})(x, y)
+ f = make_function(
+ copy(linker).accept(FunctionGraph([x, y], [e.shape]))
+ )
+ xv = rand_val(xsh)
+ yv = rand_val(ysh)
+ zv = xv + yv
+ assert xv.shape == zv.shape
+ assert tuple(f(xv, yv)) == zv.shape
def test_perform(self):
self.with_linker(PerformLinker(), self.op, self.type, self.rand_val)
@@ -746,10 +786,6 @@ class TestElemwise(unittest_tools.InferShapeTester):
def test_input_dimensions_match_python(self):
self.check_input_dimensions_match(Mode(linker="py"))
- @pytest.mark.xfail(
- reason="Elemwise C implementation does not broadcast parameters",
- exception=ValueError,
- )
@pytest.mark.skipif(
not aesara.config.cxx, reason="G++ not available, so we need to skip this test."
)
| C Elemwise implementation doesn't broadcast variables
I'm seeing a very weird error in `Elemwise`:
First, here's a basic broadcasting operations in NumPy:
```python
import numpy as np
x = np.array([[-1.32720483],
[ 0.23442016]])
m = np.array([0., 0.])
z = x - m
```
```python
>>> z
array([[-1.32720483, -1.32720483],
[ 0.23442016, 0.23442016]])
```
In Aesara, here's the equivalent operation using `TensorConstant`s:
```python
import aesara
import aesara.tensor as at
x_at = at.as_tensor(x)
m_at = at.as_tensor(m)
z_at = x_at - m_at
```
```python
>>> aesara.dprint(z_at)
Elemwise{sub,no_inplace} [id A] ''
|TensorConstant{[[-1.32720..23442016]]} [id B]
|InplaceDimShuffle{x,0} [id C] ''
|TensorConstant{(2,) of 0.0} [id D]
```
The resulting graph is a simple `Elemwise` for the subtraction `Op`–as expected. There's also an `InplaceDimShuffle` that adds a broadcastable dimension to the second argument, so that both inputs have the same number of dimensions. This `InplaceDimShuffle` is equivalent to `np.expand_dims(m, 0)`, which–when subtracted from `x`–yields the same value as `z`.
So far, everything is good, because
```python
>>> np.array_equal(aesara.function([], z_at)(), z)
True
```
Now, when we replace the `TensorConstant`s with generic `TensorVariable`s, we get a strange error:
```python
x_v = at.matrix("x")
m_v = at.vector("m")
z_v = x_v - m_v
```
```python
>>> aesara.function([x_v, m_v], z_v)(x, m)
<ipython-input-23-66cc28afa70f> in <module>
----> 1 aesara.function([x_v, m_v], z_v)(x, m)
~/projects/code/python/Aesara/aesara/compile/function/types.py in __call__(self, *args, **kwargs)
989 node=self.fn.nodes[self.fn.position_of_error],
990 thunk=thunk,
--> 991 storage_map=getattr(self.fn, "storage_map", None),
992 )
993 else:
~/projects/code/python/Aesara/aesara/link/utils.py in raise_with_op(fgraph, node, thunk, exc_info, storage_map)
506 # Some exception need extra parameter in inputs. So forget the
507 # extra long error message in that case.
--> 508 raise exc_value.with_traceback(exc_trace)
509
510
~/projects/code/python/Aesara/aesara/compile/function/types.py in __call__(self, *args, **kwargs)
973 outputs = (
974 self.fn()
--> 975 if output_subset is None
976 else self.fn(output_subset=output_subset)
977 )
ValueError: Input dimension mis-match. (input[0].shape[1] = 1, input[1].shape[1] = 2)
Apply node that caused the error: Elemwise{sub,no_inplace}(x, InplaceDimShuffle{x,0}.0)
Toposort index: 1
Inputs types: [TensorType(float64, matrix), TensorType(float64, row)]
Inputs shapes: [(2, 1), (1, 2)]
Inputs strides: [(8, 8), (16, 8)]
Inputs values: [array([[-1.32720483],
[ 0.23442016]]), array([[0., 0.]])]
Outputs clients: [['output']]
```
We can emulate this issue using the Python implementation of `Elemwise`, as well:
```python
>>> aesara.function([x_v, m_v], z_v, mode="FAST_COMPILE")(x, m)
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
~/projects/code/python/Aesara/aesara/link/vm.py in __call__(self)
312 ):
--> 313 thunk()
314 for old_s in old_storage:
~/projects/code/python/Aesara/aesara/graph/op.py in rval(p, i, o, n)
472 def rval(p=p, i=node_input_storage, o=node_output_storage, n=node):
--> 473 r = p(n, [x[0] for x in i], o)
474 for o in node.outputs:
~/projects/code/python/Aesara/aesara/tensor/elemwise.py in perform(self, node, inputs, output_storage)
760 base_exc_str = f"Dimension mismatch; shapes are {', '.join(msg)}"
--> 761 raise ValueError(base_exc_str)
762
ValueError: Dimension mismatch; shapes are (2, 1), (*, 2)
```
From this output, we can see that this erroneous error is apparently the result of some bad input validation code in `Elemwise`.
The same is true for the C implementation, although that's a little less apparent from the output. In this case, the C code for this validation step is [here](https://github.com/pymc-devs/aesara/blob/master/aesara/tensor/elemwise_cgen.py#L98). | 0.0 | b60cf7240a6d17ed80db2bd9b43c6faf377d64fe | [
"tests/tensor/test_elemwise.py::TestBroadcast::test_c",
"tests/tensor/test_elemwise.py::TestBroadcast::test_c_inplace",
"tests/tensor/test_elemwise.py::TestElemwise::test_input_dimensions_match_c"
] | [
"tests/tensor/test_elemwise.py::TestDimShuffle::test_perform",
"tests/tensor/test_elemwise.py::TestDimShuffle::test_c_or_py",
"tests/tensor/test_elemwise.py::TestDimShuffle::test_infer_shape",
"tests/tensor/test_elemwise.py::TestDimShuffle::test_c_views",
"tests/tensor/test_elemwise.py::TestDimShuffle::test_memory_leak[True]",
"tests/tensor/test_elemwise.py::TestDimShuffle::test_memory_leak[False]",
"tests/tensor/test_elemwise.py::TestDimShuffle::test_static_shape",
"tests/tensor/test_elemwise.py::TestBroadcast::test_perform",
"tests/tensor/test_elemwise.py::TestBroadcast::test_perform_inplace",
"tests/tensor/test_elemwise.py::TestBroadcast::test_fill",
"tests/tensor/test_elemwise.py::TestBroadcast::test_fill_var",
"tests/tensor/test_elemwise.py::TestBroadcast::test_fill_grad",
"tests/tensor/test_elemwise.py::TestBroadcast::test_weird_strides",
"tests/tensor/test_elemwise.py::TestBroadcast::test_same_inputs",
"tests/tensor/test_elemwise.py::TestCAReduce::test_perform_noopt",
"tests/tensor/test_elemwise.py::TestCAReduce::test_perform_nan",
"tests/tensor/test_elemwise.py::TestCAReduce::test_c_noopt",
"tests/tensor/test_elemwise.py::TestCAReduce::test_infer_shape",
"tests/tensor/test_elemwise.py::TestCAReduce::test_str",
"tests/tensor/test_elemwise.py::TestBitOpReduceGrad::test_all_grad",
"tests/tensor/test_elemwise.py::TestBitOpReduceGrad::test_any_grad",
"tests/tensor/test_elemwise.py::TestElemwise::test_elemwise_grad_bool",
"tests/tensor/test_elemwise.py::TestElemwise::test_infer_shape",
"tests/tensor/test_elemwise.py::TestElemwise::test_input_dimensions_overflow",
"tests/tensor/test_elemwise.py::TestElemwise::test_input_dimensions_match_python",
"tests/tensor/test_elemwise.py::TestElemwise::test_str",
"tests/tensor/test_elemwise.py::test_not_implemented_elemwise_grad"
] | {
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2022-04-25 12:50:52+00:00 | bsd-3-clause | 911 |
|
aesara-devs__aesara-968 | diff --git a/aesara/compile/function/types.py b/aesara/compile/function/types.py
index 8aa1e3ee8..d1f9eae2f 100644
--- a/aesara/compile/function/types.py
+++ b/aesara/compile/function/types.py
@@ -785,6 +785,8 @@ class Function:
f_cpy.finder[swap[in_ori.variable]] = container
in_cpy.variable = swap[in_ori.variable]
+ f_cpy.trust_input = self.trust_input
+ f_cpy.unpack_single = self.unpack_single
f_cpy.name = name
f_cpy.maker.fgraph.name = name
return f_cpy
| aesara-devs/aesara | 104dc0379c3585a0bc8ebb87f61f2ae38d101b83 | diff --git a/tests/compile/function/test_types.py b/tests/compile/function/test_types.py
index 61322a2e7..ea8b81986 100644
--- a/tests/compile/function/test_types.py
+++ b/tests/compile/function/test_types.py
@@ -299,7 +299,7 @@ class TestFunction:
t()
def test_copy(self):
- a = scalar() # the a is for 'anonymous' (un-named).
+ a = scalar()
x, s = scalars("xs")
f = function(
@@ -312,26 +312,34 @@ class TestFunction:
)
g = copy.copy(f)
- # if they both return, assume that they return equivalent things.
+
+ assert f.unpack_single == g.unpack_single
+ assert f.trust_input == g.trust_input
assert g.container[x].storage is not f.container[x].storage
assert g.container[a].storage is not f.container[a].storage
assert g.container[s].storage is not f.container[s].storage
- assert g.value[a] is f.value[a] # should not have been copied
- assert (
- g.value[s] is not f.value[s]
- ) # should have been copied because it is mutable.
- assert not (g.value[s] != f.value[s]).any() # its contents should be identical
+ # Should not have been copied
+ assert g.value[a] is f.value[a]
- assert f(2, 1) == g(
- 2
- ) # they should be in sync, default value should be copied.
- assert f(2, 1) == g(
- 2
- ) # they should be in sync, default value should be copied.
- f(1, 2) # put them out of sync
- assert f(1, 2) != g(1, 2) # they should not be equal anymore.
+ # Should have been copied because it is mutable
+ assert g.value[s] is not f.value[s]
+
+ # Their contents should be equal, though
+ assert np.array_equal(g.value[s], f.value[s])
+
+ # They should be in sync, default value should be copied
+ assert np.array_equal(f(2, 1), g(2))
+
+ # They should be in sync, default value should be copied
+ assert np.array_equal(f(2, 1), g(2))
+
+ # Put them out of sync
+ f(1, 2)
+
+ # They should not be equal anymore
+ assert not np.array_equal(f(1, 2), g(1, 2))
def test_copy_share_memory(self):
x = fscalar("x")
@@ -478,9 +486,9 @@ class TestFunction:
ori = function([x], out, mode=mode, updates={z: z * 2})
cpy = ori.copy(delete_updates=True)
- assert cpy(1)[0] == 4
- assert cpy(1)[0] == 4
- assert cpy(1)[0] == 4
+ assert cpy(1) == 4
+ assert cpy(1) == 4
+ assert cpy(1) == 4
# Test if unused implicit and explicit inputs from delete_updates
# are ignored as intended.
| Copied function does not respect original single output format
```python
import aesara
import aesara.tensor as at
f1 = aesara.function([], at.constant(1.0))
f2 = f1.copy()
print(f1(), f2()) # 1.0 [array(1., dtype=float32)]
``` | 0.0 | 104dc0379c3585a0bc8ebb87f61f2ae38d101b83 | [
"tests/compile/function/test_types.py::TestFunction::test_copy",
"tests/compile/function/test_types.py::TestFunction::test_copy_delete_updates"
] | [
"tests/compile/function/test_types.py::TestFunction::test_empty",
"tests/compile/function/test_types.py::TestFunction::test_extra_inputs",
"tests/compile/function/test_types.py::TestFunction::test_missing_inputs",
"tests/compile/function/test_types.py::TestFunction::test_input_anon_singleton",
"tests/compile/function/test_types.py::TestFunction::test_input_anon_unpack",
"tests/compile/function/test_types.py::TestFunction::test_naming_rule0",
"tests/compile/function/test_types.py::TestFunction::test_naming_rule1",
"tests/compile/function/test_types.py::TestFunction::test_naming_rule2",
"tests/compile/function/test_types.py::TestFunction::test_naming_rule3",
"tests/compile/function/test_types.py::TestFunction::test_naming_rule4",
"tests/compile/function/test_types.py::TestFunction::test_state_access[mode0]",
"tests/compile/function/test_types.py::TestFunction::test_state_access[mode2]",
"tests/compile/function/test_types.py::TestFunction::test_state_access[mode3]",
"tests/compile/function/test_types.py::TestFunction::test_same_names",
"tests/compile/function/test_types.py::TestFunction::test_weird_names",
"tests/compile/function/test_types.py::TestFunction::test_copy_share_memory",
"tests/compile/function/test_types.py::TestFunction::test_swap_SharedVariable",
"tests/compile/function/test_types.py::TestFunction::test_swap_SharedVariable_with_given",
"tests/compile/function/test_types.py::TestFunction::test_shared_state0",
"tests/compile/function/test_types.py::TestFunction::test_shared_state1",
"tests/compile/function/test_types.py::TestFunction::test_shared_state2",
"tests/compile/function/test_types.py::TestFunction::test_shared_state_not_implicit",
"tests/compile/function/test_types.py::TestFunction::test_constant_output",
"tests/compile/function/test_types.py::TestFunction::test_borrow_input",
"tests/compile/function/test_types.py::TestFunction::test_borrow_output",
"tests/compile/function/test_types.py::TestFunction::test_disconnected_input",
"tests/compile/function/test_types.py::TestFunction::test_masked_input",
"tests/compile/function/test_types.py::TestFunction::test_givens_input_var",
"tests/compile/function/test_types.py::TestFunction::test_free",
"tests/compile/function/test_types.py::TestFunction::test_default_values",
"tests/compile/function/test_types.py::TestFunction::test_check_for_aliased_inputs",
"tests/compile/function/test_types.py::TestPicklefunction::test_deepcopy",
"tests/compile/function/test_types.py::TestPicklefunction::test_deepcopy_trust_input",
"tests/compile/function/test_types.py::TestPicklefunction::test_output_keys",
"tests/compile/function/test_types.py::TestPicklefunction::test_deepcopy_shared_container",
"tests/compile/function/test_types.py::TestPicklefunction::test_pickle",
"tests/compile/function/test_types.py::TestPicklefunction::test_multiple_functions",
"tests/compile/function/test_types.py::TestPicklefunction::test_pickle_class_with_functions",
"tests/compile/function/test_types.py::test_empty_givens_updates"
] | {
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false
} | 2022-05-20 03:14:15+00:00 | bsd-3-clause | 912 |
|
aesara-devs__aesara-995 | diff --git a/aesara/graph/type.py b/aesara/graph/type.py
index f26d72664..f35a6d969 100644
--- a/aesara/graph/type.py
+++ b/aesara/graph/type.py
@@ -180,10 +180,10 @@ class Type(MetaObject):
return None
- def is_valid_value(self, data: D) -> bool:
+ def is_valid_value(self, data: D, strict: bool = True) -> bool:
"""Return ``True`` for any python object that would be a legal value for a `Variable` of this `Type`."""
try:
- self.filter(data, strict=True)
+ self.filter(data, strict=strict)
return True
except (TypeError, ValueError):
return False
diff --git a/aesara/tensor/random/type.py b/aesara/tensor/random/type.py
index ff00669c4..68373b08c 100644
--- a/aesara/tensor/random/type.py
+++ b/aesara/tensor/random/type.py
@@ -1,9 +1,14 @@
+from typing import Generic, TypeVar
+
import numpy as np
import aesara
from aesara.graph.type import Type
+T = TypeVar("T", np.random.RandomState, np.random.Generator)
+
+
gen_states_keys = {
"MT19937": (["state"], ["key", "pos"]),
"PCG64": (["state", "has_uint32", "uinteger"], ["state", "inc"]),
@@ -18,22 +23,15 @@ gen_states_keys = {
numpy_bit_gens = {0: "MT19937", 1: "PCG64", 2: "Philox", 3: "SFC64"}
-class RandomType(Type):
+class RandomType(Type, Generic[T]):
r"""A Type wrapper for `numpy.random.Generator` and `numpy.random.RandomState`."""
- @classmethod
- def filter(cls, data, strict=False, allow_downcast=None):
- if cls.is_valid_value(data, strict):
- return data
- else:
- raise TypeError()
-
@staticmethod
- def may_share_memory(a, b):
+ def may_share_memory(a: T, b: T):
return a._bit_generator is b._bit_generator
-class RandomStateType(RandomType):
+class RandomStateType(RandomType[np.random.RandomState]):
r"""A Type wrapper for `numpy.random.RandomState`.
The reason this exists (and `Generic` doesn't suffice) is that
@@ -49,28 +47,38 @@ class RandomStateType(RandomType):
def __repr__(self):
return "RandomStateType"
- @staticmethod
- def is_valid_value(a, strict):
- if isinstance(a, np.random.RandomState):
- return True
+ def filter(self, data, strict: bool = False, allow_downcast=None):
+ """
+ XXX: This doesn't convert `data` to the same type of underlying RNG type
+ as `self`. It really only checks that `data` is of the appropriate type
+ to be a valid `RandomStateType`.
+
+ In other words, it serves as a `Type.is_valid_value` implementation,
+ but, because the default `Type.is_valid_value` depends on
+ `Type.filter`, we need to have it here to avoid surprising circular
+ dependencies in sub-classes.
+ """
+ if isinstance(data, np.random.RandomState):
+ return data
- if not strict and isinstance(a, dict):
+ if not strict and isinstance(data, dict):
gen_keys = ["bit_generator", "gauss", "has_gauss", "state"]
state_keys = ["key", "pos"]
for key in gen_keys:
- if key not in a:
- return False
+ if key not in data:
+ raise TypeError()
for key in state_keys:
- if key not in a["state"]:
- return False
+ if key not in data["state"]:
+ raise TypeError()
- state_key = a["state"]["key"]
+ state_key = data["state"]["key"]
if state_key.shape == (624,) and state_key.dtype == np.uint32:
- return True
+ # TODO: Add an option to convert to a `RandomState` instance?
+ return data
- return False
+ raise TypeError()
@staticmethod
def values_eq(a, b):
@@ -114,7 +122,7 @@ aesara.compile.register_view_op_c_code(
random_state_type = RandomStateType()
-class RandomGeneratorType(RandomType):
+class RandomGeneratorType(RandomType[np.random.Generator]):
r"""A Type wrapper for `numpy.random.Generator`.
The reason this exists (and `Generic` doesn't suffice) is that
@@ -130,16 +138,25 @@ class RandomGeneratorType(RandomType):
def __repr__(self):
return "RandomGeneratorType"
- @staticmethod
- def is_valid_value(a, strict):
- if isinstance(a, np.random.Generator):
- return True
+ def filter(self, data, strict=False, allow_downcast=None):
+ """
+ XXX: This doesn't convert `data` to the same type of underlying RNG type
+ as `self`. It really only checks that `data` is of the appropriate type
+ to be a valid `RandomGeneratorType`.
+
+ In other words, it serves as a `Type.is_valid_value` implementation,
+ but, because the default `Type.is_valid_value` depends on
+ `Type.filter`, we need to have it here to avoid surprising circular
+ dependencies in sub-classes.
+ """
+ if isinstance(data, np.random.Generator):
+ return data
- if not strict and isinstance(a, dict):
- if "bit_generator" not in a:
- return False
+ if not strict and isinstance(data, dict):
+ if "bit_generator" not in data:
+ raise TypeError()
else:
- bit_gen_key = a["bit_generator"]
+ bit_gen_key = data["bit_generator"]
if hasattr(bit_gen_key, "_value"):
bit_gen_key = int(bit_gen_key._value)
@@ -148,16 +165,16 @@ class RandomGeneratorType(RandomType):
gen_keys, state_keys = gen_states_keys[bit_gen_key]
for key in gen_keys:
- if key not in a:
- return False
+ if key not in data:
+ raise TypeError()
for key in state_keys:
- if key not in a["state"]:
- return False
+ if key not in data["state"]:
+ raise TypeError()
- return True
+ return data
- return False
+ raise TypeError()
@staticmethod
def values_eq(a, b):
| aesara-devs/aesara | 174117f9b0a2c1ddb13e6244bbb31d6c75c12245 | diff --git a/tests/tensor/random/test_type.py b/tests/tensor/random/test_type.py
index 5f45af2fe..a34d4cbad 100644
--- a/tests/tensor/random/test_type.py
+++ b/tests/tensor/random/test_type.py
@@ -56,15 +56,17 @@ class TestRandomStateType:
with pytest.raises(TypeError):
rng_type.filter(1)
- rng = rng.get_state(legacy=False)
- assert rng_type.is_valid_value(rng, strict=False)
+ rng_dict = rng.get_state(legacy=False)
- rng["state"] = {}
+ assert rng_type.is_valid_value(rng_dict) is False
+ assert rng_type.is_valid_value(rng_dict, strict=False)
- assert rng_type.is_valid_value(rng, strict=False) is False
+ rng_dict["state"] = {}
- rng = {}
- assert rng_type.is_valid_value(rng, strict=False) is False
+ assert rng_type.is_valid_value(rng_dict, strict=False) is False
+
+ rng_dict = {}
+ assert rng_type.is_valid_value(rng_dict, strict=False) is False
def test_values_eq(self):
@@ -147,15 +149,17 @@ class TestRandomGeneratorType:
with pytest.raises(TypeError):
rng_type.filter(1)
- rng = rng.__getstate__()
- assert rng_type.is_valid_value(rng, strict=False)
+ rng_dict = rng.__getstate__()
+
+ assert rng_type.is_valid_value(rng_dict) is False
+ assert rng_type.is_valid_value(rng_dict, strict=False)
- rng["state"] = {}
+ rng_dict["state"] = {}
- assert rng_type.is_valid_value(rng, strict=False) is False
+ assert rng_type.is_valid_value(rng_dict, strict=False) is False
- rng = {}
- assert rng_type.is_valid_value(rng, strict=False) is False
+ rng_dict = {}
+ assert rng_type.is_valid_value(rng_dict, strict=False) is False
def test_values_eq(self):
| Error w/ deepcopy of pymc v4 model
## Generating a deep copy of pymc v4 model results in aesara error
**Please provide a minimal, self-contained, and reproducible example.**
```python
from copy import deepcopy
import numpy as np
import pymc as pm
RANDOM_SEED = 8927
rng = np.random.default_rng(RANDOM_SEED)
# True parameter values
alpha, sigma = 1, 1
beta = [1, 2.5]
# Size of dataset
size = 100
# Predictor variable
X1 = np.random.randn(size)
X2 = np.random.randn(size) * 0.2
# Simulate outcome variable
Y = alpha + beta[0] * X1 + beta[1] * X2 + rng.normal(size=size) * sigma
basic_model = pm.Model()
with basic_model:
# Priors for unknown model parameters
alpha = pm.Normal("alpha", mu=0, sigma=10)
beta = pm.Normal("beta", mu=0, sigma=10, shape=2)
sigma = pm.HalfNormal("sigma", sigma=1)
# Expected value of outcome
mu = alpha + beta[0] * X1 + beta[1] * X2
# Likelihood (sampling distribution) of observations
Y_obs = pm.Normal("Y_obs", mu=mu, sigma=sigma, observed=Y)
model_copy = deepcopy(basic_model)
```
**Please provide the full traceback of any errors.**
```python
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
Input In [9], in <cell line: 41>()
38 # Likelihood (sampling distribution) of observations
39 Y_obs = pm.Normal("Y_obs", mu=mu, sigma=sigma, observed=Y)
---> 41 model_copy = deepcopy(basic_model)
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:172, in deepcopy(x, memo, _nil)
170 y = x
171 else:
--> 172 y = _reconstruct(x, memo, *rv)
174 # If is its own copy, don't memoize.
175 if y is not x:
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:270, in _reconstruct(x, memo, func, args, state, listiter, dictiter, deepcopy)
268 if state is not None:
269 if deep:
--> 270 state = deepcopy(state, memo)
271 if hasattr(y, '__setstate__'):
272 y.__setstate__(state)
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:146, in deepcopy(x, memo, _nil)
144 copier = _deepcopy_dispatch.get(cls)
145 if copier is not None:
--> 146 y = copier(x, memo)
147 else:
148 if issubclass(cls, type):
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:230, in _deepcopy_dict(x, memo, deepcopy)
228 memo[id(x)] = y
229 for key, value in x.items():
--> 230 y[deepcopy(key, memo)] = deepcopy(value, memo)
231 return y
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:146, in deepcopy(x, memo, _nil)
144 copier = _deepcopy_dispatch.get(cls)
145 if copier is not None:
--> 146 y = copier(x, memo)
147 else:
148 if issubclass(cls, type):
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:230, in _deepcopy_dict(x, memo, deepcopy)
228 memo[id(x)] = y
229 for key, value in x.items():
--> 230 y[deepcopy(key, memo)] = deepcopy(value, memo)
231 return y
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:172, in deepcopy(x, memo, _nil)
170 y = x
171 else:
--> 172 y = _reconstruct(x, memo, *rv)
174 # If is its own copy, don't memoize.
175 if y is not x:
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:270, in _reconstruct(x, memo, func, args, state, listiter, dictiter, deepcopy)
268 if state is not None:
269 if deep:
--> 270 state = deepcopy(state, memo)
271 if hasattr(y, '__setstate__'):
272 y.__setstate__(state)
[... skipping similar frames: _deepcopy_dict at line 230 (1 times), deepcopy at line 146 (1 times)]
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:172, in deepcopy(x, memo, _nil)
170 y = x
171 else:
--> 172 y = _reconstruct(x, memo, *rv)
174 # If is its own copy, don't memoize.
175 if y is not x:
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:270, in _reconstruct(x, memo, func, args, state, listiter, dictiter, deepcopy)
268 if state is not None:
269 if deep:
--> 270 state = deepcopy(state, memo)
271 if hasattr(y, '__setstate__'):
272 y.__setstate__(state)
[... skipping similar frames: deepcopy at line 146 (1 times)]
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:230, in _deepcopy_dict(x, memo, deepcopy)
228 memo[id(x)] = y
229 for key, value in x.items():
--> 230 y[deepcopy(key, memo)] = deepcopy(value, memo)
231 return y
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:146, in deepcopy(x, memo, _nil)
144 copier = _deepcopy_dispatch.get(cls)
145 if copier is not None:
--> 146 y = copier(x, memo)
147 else:
148 if issubclass(cls, type):
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:205, in _deepcopy_list(x, memo, deepcopy)
203 append = y.append
204 for a in x:
--> 205 append(deepcopy(a, memo))
206 return y
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:172, in deepcopy(x, memo, _nil)
170 y = x
171 else:
--> 172 y = _reconstruct(x, memo, *rv)
174 # If is its own copy, don't memoize.
175 if y is not x:
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:270, in _reconstruct(x, memo, func, args, state, listiter, dictiter, deepcopy)
268 if state is not None:
269 if deep:
--> 270 state = deepcopy(state, memo)
271 if hasattr(y, '__setstate__'):
272 y.__setstate__(state)
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:146, in deepcopy(x, memo, _nil)
144 copier = _deepcopy_dispatch.get(cls)
145 if copier is not None:
--> 146 y = copier(x, memo)
147 else:
148 if issubclass(cls, type):
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:230, in _deepcopy_dict(x, memo, deepcopy)
228 memo[id(x)] = y
229 for key, value in x.items():
--> 230 y[deepcopy(key, memo)] = deepcopy(value, memo)
231 return y
File ~\AppData\Local\Programs\Python\Python39\lib\copy.py:153, in deepcopy(x, memo, _nil)
151 copier = getattr(x, "__deepcopy__", None)
152 if copier is not None:
--> 153 y = copier(memo)
154 else:
155 reductor = dispatch_table.get(cls)
File ~\.virtualenvs\pymc4-venv\lib\site-packages\aesara\link\basic.py:142, in Container.__deepcopy__(self, memo)
132 r = type(self)(
133 deepcopy(self.type, memo=memo),
134 deepcopy(self.storage, memo=memo),
(...)
138 name=deepcopy(self.name, memo=memo),
139 )
140 # Work around NumPy deepcopy of ndarray with 0 dimension that
141 # don't return an ndarray.
--> 142 if r.storage[0] is not None and not self.type.is_valid_value(r.storage[0]):
143 assert not data_was_in_memo
144 assert self.type.is_valid_value(self.storage[0])
TypeError: is_valid_value() missing 1 required positional argument: 'strict'
```
This used to work on pymc3 and theano-pymc
## Versions and main components
* Aesara version: 2.66
* Aesara config (`python -c "import aesara; print(aesara.config)"`)
* Python version: 3.9
* Operating system: Windows
* How did you install Aesara: pip
* Pymc version: 4.0
| 0.0 | 174117f9b0a2c1ddb13e6244bbb31d6c75c12245 | [
"tests/tensor/random/test_type.py::TestRandomStateType::test_filter"
] | [
"tests/tensor/random/test_type.py::test_view_op_c_code",
"tests/tensor/random/test_type.py::TestRandomStateType::test_pickle",
"tests/tensor/random/test_type.py::TestRandomStateType::test_repr",
"tests/tensor/random/test_type.py::TestRandomStateType::test_values_eq",
"tests/tensor/random/test_type.py::TestRandomStateType::test_may_share_memory",
"tests/tensor/random/test_type.py::TestRandomGeneratorType::test_pickle",
"tests/tensor/random/test_type.py::TestRandomGeneratorType::test_repr",
"tests/tensor/random/test_type.py::TestRandomGeneratorType::test_may_share_memory"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2022-06-14 17:50:52+00:00 | bsd-3-clause | 913 |
|
agrc__sweeper-51 | diff --git a/.gitignore b/.gitignore
index 9fbddef..1fe895c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,3 +4,4 @@
__pycache__
dist/
build/
+.env/
diff --git a/.vscode/settings.json b/.vscode/settings.json
index 1d21cf6..40c0fd3 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -12,6 +12,7 @@
"pformat",
"posix",
"pypi",
+ "pytest",
"sdist",
"usaddress",
"xxhash"
diff --git a/readme.md b/readme.md
index b9dcff1..d9d1c5f 100644
--- a/readme.md
+++ b/readme.md
@@ -80,7 +80,7 @@ A normalized string representing the entire address that was passed into the con
1. install required dependencies to work on sweeper
- `pip install -e ".[develop]"`
1. install required dependencies to run sweeper tests
- - `pip install -e ".[test]"`
+ - `pip install -e ".[tests]"`
1. run tests: `pytest`
### Uploading to pypi.org
diff --git a/src/sweeper/address_parser.py b/src/sweeper/address_parser.py
index 5e61825..5cccd1d 100644
--- a/src/sweeper/address_parser.py
+++ b/src/sweeper/address_parser.py
@@ -7,19 +7,20 @@ A module that parses street addresses into their various parts.
import json
import pprint
from os.path import dirname, join, realpath
+from re import compile
import usaddress
TAG_MAPPING = {
'AddressNumber': 'address_number',
- # 'AddressNumberPrefix': 'address1',
+ 'AddressNumberPrefix': 'address_number',
'AddressNumberSuffix': 'address_number_suffix',
'StreetNamePreDirectional': 'prefix_direction',
'StreetName': 'street_name',
- # 'StreetNamePreModifier': 'address1',
- # 'StreetNamePreType': 'address1',
+ # 'StreetNamePreModifier': 'street_name', #: handled in class below
+ # 'StreetNamePreType': 'street_name', #: handled in class below
'StreetNamePostDirectional': 'street_direction',
- # 'StreetNamePostModifier': 'address1',
+ 'StreetNamePostModifier': 'street_type',
'StreetNamePostType': 'street_type',
# 'CornerOf': 'address1',
# 'IntersectionSeparator': 'address1',
@@ -28,7 +29,7 @@ TAG_MAPPING = {
# 'USPSBoxGroupType': 'address1',
# 'USPSBoxID': 'address1',
# 'USPSBoxType': 'address1',
- # 'BuildingName': 'address2',
+ 'BuildingName': 'unit_id',
'OccupancyType': 'unit_type',
'OccupancyIdentifier': 'unit_id',
# 'SubaddressIdentifier': 'address2',
@@ -41,6 +42,7 @@ TAG_MAPPING = {
TWO_CHAR_DIRECTIONS = ['NO', 'SO', 'EA', 'WE']
with open(join(dirname(realpath(__file__)), 'street_types.json'), 'r') as file:
STREET_TYPES = json.loads(file.read())
+HWY_REGEX = compile('(SR|STATE ROUTE|HIGHWAY)')
class Address():
@@ -77,6 +79,19 @@ class Address():
if self.po_box is not None:
return
+ try:
+ #: e.g. US HWY
+ self.street_name = f'{normalize_street_name_pre_type(self.StreetNamePreType)} {self.street_name}'
+ del self.StreetNamePreType
+ except AttributeError:
+ pass
+
+ try:
+ self.street_name = f'{self.StreetNamePreModifier} {self.street_name}'
+ del self.StreetNamePreModifier
+ except AttributeError:
+ pass
+
#: look for two-character prefix directions which usaddress does not handle
if self.street_name:
street_name_parts = self.street_name.split(' ')
@@ -135,6 +150,7 @@ def normalize_direction(direction_text):
return direction_text[0].upper()
+
def normalize_street_type(type_text):
'''
returns the standard abbreviation for the input street type
@@ -148,6 +164,14 @@ def normalize_street_type(type_text):
raise InvalidStreetTypeError(type_text)
+def normalize_street_name_pre_type(text):
+ '''normalizes highways by doing things like replaces SR with HWY and removes US
+
+ No need to worried about casing or "."s because usaddress has already taken care of them by this point.
+ '''
+ return HWY_REGEX.sub('HWY', text).replace('US ', '')
+
+
class InvalidStreetTypeError(Exception):
'''
exception for when the street type does not have a corresponding value in street_types.json
| agrc/sweeper | 75254926a9de985e82bb2fa116948302d4fb8abe | diff --git a/src/sweeper/tests/test_address_parser.py b/src/sweeper/tests/test_address_parser.py
index c4f2caf..ae08739 100644
--- a/src/sweeper/tests/test_address_parser.py
+++ b/src/sweeper/tests/test_address_parser.py
@@ -386,3 +386,89 @@ class TestBadAddresses():
assert address.street_name is None
assert address.address_number == '100'
assert address.prefix_direction == 'S'
+
+
+class TestHighways():
+ '''tests to make sure that state routes and us highways are parsed correctly
+ '''
+
+ def test_state_routes(self):
+ address = Address('910 S SR 22')
+
+ assert address.address_number == '910'
+ assert address.prefix_direction == 'S'
+ assert address.street_name == 'HWY 22'
+ assert address.normalized == '910 S HWY 22'
+
+ def test_state_route_expanded(self):
+ address = Address('910 S State Route 22')
+
+ assert address.address_number == '910'
+ assert address.prefix_direction == 'S'
+ assert address.street_name == 'HWY 22'
+ assert address.normalized == '910 S HWY 22'
+
+ def test_state_route_with_punctuation(self):
+ address = Address('910 S S.R. 22')
+
+ assert address.address_number == '910'
+ assert address.prefix_direction == 'S'
+ assert address.street_name == 'HWY 22'
+ assert address.normalized == '910 S HWY 22'
+
+ def test_state_route_casing(self):
+ address = Address('910 S sr 22')
+
+ assert address.address_number == '910'
+ assert address.prefix_direction == 'S'
+ assert address.street_name == 'HWY 22'
+ assert address.normalized == '910 S HWY 22'
+
+
+ def test_highways(self):
+ address = Address('1910 N US HWY 89')
+
+ assert address.address_number == '1910'
+ assert address.prefix_direction == 'N'
+ assert address.street_name == 'HWY 89'
+ assert address.normalized == '1910 N HWY 89'
+
+ address = Address('1106 S OLD HWY 89')
+
+ assert address.address_number == '1106'
+ assert address.prefix_direction == 'S'
+ assert address.street_name == 'OLD HWY 89'
+ assert address.normalized == '1106 S OLD HWY 89'
+
+ def test_highway_expanded(self):
+ address = Address('1910 N US highway 89')
+
+ assert address.address_number == '1910'
+ assert address.prefix_direction == 'N'
+ assert address.street_name == 'HWY 89'
+ assert address.normalized == '1910 N HWY 89'
+
+ def test_highway_with_punctuation(self):
+ address = Address('1910 N U.S. highway 89')
+
+ assert address.address_number == '1910'
+ assert address.prefix_direction == 'N'
+ assert address.street_name == 'HWY 89'
+ assert address.normalized == '1910 N HWY 89'
+
+ def test_highway_casing(self):
+ address = Address('1910 N u.s. highway 89')
+
+ assert address.address_number == '1910'
+ assert address.prefix_direction == 'N'
+ assert address.street_name == 'HWY 89'
+ assert address.normalized == '1910 N HWY 89'
+
+ def test_street_name_with_sr(self):
+ address = Address('1910 s woodsrow dr')
+
+ assert address.address_number == '1910'
+ assert address.prefix_direction == 'S'
+ assert address.street_name == 'WOODSROW'
+ assert address.street_type == 'DR'
+ assert address.normalized == '1910 S WOODSROW DR'
| State Routes SR
Addresses like 910 S SR 22 or 1910 N US HWY 89
will return 22 and 89 for the street_name | 0.0 | 75254926a9de985e82bb2fa116948302d4fb8abe | [
"src/sweeper/tests/test_address_parser.py::TestHighways::test_state_routes",
"src/sweeper/tests/test_address_parser.py::TestHighways::test_state_route_expanded",
"src/sweeper/tests/test_address_parser.py::TestHighways::test_state_route_with_punctuation",
"src/sweeper/tests/test_address_parser.py::TestHighways::test_state_route_casing",
"src/sweeper/tests/test_address_parser.py::TestHighways::test_highways",
"src/sweeper/tests/test_address_parser.py::TestHighways::test_highway_expanded",
"src/sweeper/tests/test_address_parser.py::TestHighways::test_highway_with_punctuation",
"src/sweeper/tests/test_address_parser.py::TestHighways::test_highway_casing"
] | [
"src/sweeper/tests/test_address_parser.py::TestAddressNumber::test_parses_address_number",
"src/sweeper/tests/test_address_parser.py::TestAddressNumberSuffix::test_parses_number_suffix",
"src/sweeper/tests/test_address_parser.py::TestPrefixDirection::test_parses_prefix_direction",
"src/sweeper/tests/test_address_parser.py::TestPrefixDirection::test_no_prefix_direction",
"src/sweeper/tests/test_address_parser.py::TestStreetName::test_parses_street_name",
"src/sweeper/tests/test_address_parser.py::TestStreetName::test_multi_word_street_name",
"src/sweeper/tests/test_address_parser.py::TestStreetName::test_no_prefix_direction_street",
"src/sweeper/tests/test_address_parser.py::TestStreetDirection::test_street_direction",
"src/sweeper/tests/test_address_parser.py::TestNormalizeDirection::test_normalize_direction",
"src/sweeper/tests/test_address_parser.py::TestNormalizeDirection::test_two_characters",
"src/sweeper/tests/test_address_parser.py::TestWhiteSpace::test_white_space",
"src/sweeper/tests/test_address_parser.py::TestWhiteSpace::test_double_spaces",
"src/sweeper/tests/test_address_parser.py::TestNormalizeStreetType::test_normalize_street_type",
"src/sweeper/tests/test_address_parser.py::TestNormalizeStreetType::test_raises_exceptions",
"src/sweeper/tests/test_address_parser.py::TestNormalizeStreetType::test_street_names_with_types",
"src/sweeper/tests/test_address_parser.py::TestUnitParts::test_add_hash_if_no_type",
"src/sweeper/tests/test_address_parser.py::TestUnitParts::test_strip_hash_if_type",
"src/sweeper/tests/test_address_parser.py::TestPOBox::test_parses_po_boxes",
"src/sweeper/tests/test_address_parser.py::test_normalized_address_string",
"src/sweeper/tests/test_address_parser.py::test_strip_periods",
"src/sweeper/tests/test_address_parser.py::test_steve",
"src/sweeper/tests/test_address_parser.py::TestBadAddresses::test_missing_street_names",
"src/sweeper/tests/test_address_parser.py::TestHighways::test_street_name_with_sr"
] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2020-03-26 22:39:19+00:00 | mit | 914 |
|
agronholm__anyio-137 | diff --git a/src/anyio/__init__.py b/src/anyio/__init__.py
index 2b2cfec..cb1fd4b 100644
--- a/src/anyio/__init__.py
+++ b/src/anyio/__init__.py
@@ -14,6 +14,7 @@ from typing import TypeVar, Callable, Union, Optional, Awaitable, Coroutine, Any
import sniffio
+from ._utils import convert_ipv6_sockaddr
from .abc import (
Lock, Condition, Event, Semaphore, CapacityLimiter, CancelScope, TaskGroup, IPAddressType,
SocketStream, UDPSocket, ConnectedUDPSocket, IPSockAddrType, Listener, SocketListener)
@@ -32,8 +33,7 @@ IPPROTO_IPV6 = getattr(socket, 'IPPROTO_IPV6', 41) # https://bugs.python.org/is
T_Retval = TypeVar('T_Retval', covariant=True)
T_Agen = TypeVar('T_Agen')
T_Item = TypeVar('T_Item')
-GetAddrInfoReturnType = List[Tuple[AddressFamily, SocketKind, int, str,
- Union[Tuple[str, int], Tuple[str, int, int, int]]]]
+GetAddrInfoReturnType = List[Tuple[AddressFamily, SocketKind, int, str, Tuple[str, int]]]
AnyIPAddressFamily = Literal[AddressFamily.AF_UNSPEC, AddressFamily.AF_INET,
AddressFamily.AF_INET6]
IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6]
@@ -607,15 +607,18 @@ async def create_connected_udp_socket(
reuse_port)
-def getaddrinfo(host: Union[bytearray, bytes, str], port: Union[str, int, None], *,
- family: Union[int, AddressFamily] = 0, type: Union[int, SocketKind] = 0,
- proto: int = 0, flags: int = 0) -> Awaitable[GetAddrInfoReturnType]:
+async def getaddrinfo(host: Union[bytearray, bytes, str], port: Union[str, int, None], *,
+ family: Union[int, AddressFamily] = 0, type: Union[int, SocketKind] = 0,
+ proto: int = 0, flags: int = 0) -> GetAddrInfoReturnType:
"""
Look up a numeric IP address given a host name.
Internationalized domain names are translated according to the (non-transitional) IDNA 2008
standard.
+ .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of
+ (host, port), unlike what :func:`socket.getaddrinfo` does.
+
:param host: host name
:param port: port number
:param family: socket family (`'AF_INET``, ...)
@@ -637,8 +640,10 @@ def getaddrinfo(host: Union[bytearray, bytes, str], port: Union[str, int, None],
else:
encoded_host = host
- return _get_asynclib().getaddrinfo(encoded_host, port, family=family, type=type, proto=proto,
- flags=flags)
+ gai_res = await _get_asynclib().getaddrinfo(encoded_host, port, family=family, type=type,
+ proto=proto, flags=flags)
+ return [(family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr))
+ for family, type, proto, canonname, sockaddr in gai_res]
def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[Tuple[str, str]]:
diff --git a/src/anyio/_backends/_asyncio.py b/src/anyio/_backends/_asyncio.py
index 03d3093..d85e45d 100644
--- a/src/anyio/_backends/_asyncio.py
+++ b/src/anyio/_backends/_asyncio.py
@@ -854,23 +854,12 @@ class ConnectedUDPSocket(abc.ConnectedUDPSocket):
self._transport.sendto(item)
-def _convert_ipv6_sockaddr(sockaddr: Optional[IPSockAddrType]) -> Optional[Tuple[str, int]]:
- # This is more complicated than it should be because of MyPy
- if sockaddr is None:
- return None
- elif len(sockaddr) == 4 and sockaddr[-1]:
- # Add scopeid to the address
- return sockaddr[0] + '%' + str(sockaddr[-1]), sockaddr[1]
- else:
- return sockaddr[:2]
-
-
async def connect_tcp(host: str, port: int,
- local_addr: Optional[IPSockAddrType] = None) -> SocketStream:
+ local_addr: Optional[Tuple[str, int]] = None) -> SocketStream:
transport, protocol = cast(
Tuple[asyncio.Transport, StreamProtocol],
await get_running_loop().create_connection(StreamProtocol, host, port,
- local_addr=_convert_ipv6_sockaddr(local_addr))
+ local_addr=local_addr)
)
transport.pause_reading()
return SocketStream(transport, protocol)
@@ -892,8 +881,8 @@ async def create_udp_socket(
reuse_port: bool
) -> Union[UDPSocket, ConnectedUDPSocket]:
result = await get_running_loop().create_datagram_endpoint(
- DatagramProtocol, local_addr=_convert_ipv6_sockaddr(local_address),
- remote_addr=_convert_ipv6_sockaddr(remote_address), family=family, reuse_port=reuse_port)
+ DatagramProtocol, local_addr=local_address, remote_addr=remote_address, family=family,
+ reuse_port=reuse_port)
transport = cast(asyncio.DatagramTransport, result[0])
protocol = cast(DatagramProtocol, result[1])
if protocol.exception:
diff --git a/src/anyio/_utils.py b/src/anyio/_utils.py
index ff68d2c..0f7bdac 100644
--- a/src/anyio/_utils.py
+++ b/src/anyio/_utils.py
@@ -16,3 +16,26 @@ class ResourceGuard:
def __exit__(self, exc_type, exc_val, exc_tb):
self._guarded = False
+
+
+def convert_ipv6_sockaddr(sockaddr):
+ """
+ Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format.
+
+ If the scope ID is nonzero, it is added to the address, separated with ``%``.
+ Otherwise the flow id and scope id are simply cut off from the tuple.
+ Any other kinds of socket addresses are returned as-is.
+
+ :param sockaddr: the result of :meth:`~socket.socket.getsockname`
+ :return: the converted socket address
+
+ """
+ # This is more complicated than it should be because of MyPy
+ if isinstance(sockaddr, tuple) and len(sockaddr) == 4:
+ if sockaddr[3]:
+ # Add scopeid to the address
+ return sockaddr[0] + '%' + str(sockaddr[3]), sockaddr[1]
+ else:
+ return sockaddr[:2]
+ else:
+ return sockaddr
diff --git a/src/anyio/abc/sockets.py b/src/anyio/abc/sockets.py
index f02d1c8..50d8a4d 100644
--- a/src/anyio/abc/sockets.py
+++ b/src/anyio/abc/sockets.py
@@ -6,7 +6,7 @@ from typing import TypeVar, Tuple, Union, Generic
from .streams import UnreliableObjectStream, ByteStream, Listener
IPAddressType = Union[str, IPv4Address, IPv6Address]
-IPSockAddrType = Union[Tuple[str, int], Tuple[str, int, int, int]]
+IPSockAddrType = Tuple[str, int]
SockAddrType = Union[IPSockAddrType, str]
UDPPacketType = Tuple[bytes, IPSockAddrType]
T_Retval = TypeVar('T_Retval')
@@ -41,8 +41,7 @@ class _SocketMixin(Generic[T_SockAddr]):
"""
The bound address of the underlying local socket.
- For IPv4 TCP streams, this is a tuple of (IP address, port).
- For IPv6 TCP streams, this is a tuple of (IP address, port, flowinfo, scopeid).
+ For TCP streams, this is a tuple of (IP address, port).
For UNIX socket streams, this is the path to the socket.
"""
@@ -56,8 +55,7 @@ class SocketStream(Generic[T_SockAddr], ByteStream, _SocketMixin[T_SockAddr]):
"""
The address this socket is connected to.
- For IPv4 TCP streams, this is a tuple of (IP address, port).
- For IPv6 TCP streams, this is a tuple of (IP address, port, flowinfo, scopeid).
+ For TCP streams, this is a tuple of (IP address, port).
For UNIX socket streams, this is the path to the socket.
"""
@@ -80,9 +78,4 @@ class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketMixin[IPSockAddrT
@property
@abstractmethod
def remote_address(self) -> IPSockAddrType:
- """
- The address this socket is connected to.
-
- For IPv4 sockets, this is a tuple of (IP address, port).
- For IPv6 sockets, this is a tuple of (IP address, port, flowinfo, scopeid).
- """
+ """The address this socket is connected to."""
| agronholm/anyio | cfba438b71237aeb93bfb955a8e70edd8f101ac9 | diff --git a/tests/test_sockets.py b/tests/test_sockets.py
index 24fb5de..7f47bb6 100644
--- a/tests/test_sockets.py
+++ b/tests/test_sockets.py
@@ -747,8 +747,7 @@ async def test_getaddrinfo_ipv6addr(sock_type):
# IDNA trips up over raw IPv6 addresses
proto = 0 if platform.system() == 'Windows' else 6
assert await getaddrinfo('::1', 0, type=sock_type) == [
- (socket.AddressFamily.AF_INET6, socket.SocketKind.SOCK_STREAM, proto, '',
- ('::1', 0, 0, 0))
+ (socket.AddressFamily.AF_INET6, socket.SocketKind.SOCK_STREAM, proto, '', ('::1', 0))
]
| Should we expose the user to 4-tuple IPv6 socket addresses?
The result of `sock.getsockname()` for IPv6 sockets is a 4-tuple (address, port, flowinfo, scopeid). The last two fields are specific to IPv6. The flowinfo field is useless/deprecated and the scopeid is only meaningful for link-local addresses. The important part here is that the address field can carry a scope ID by appending it to the address with a `%` separator, like `fe80::dead:beef%1`.
Most people are not used to dealing with 4-tuple socket addresses, so maybe it would make sense to make it part of the address always. On the other hand, others might *expect* the usual behavior and get confused when they get 2-tuples instead! | 0.0 | cfba438b71237aeb93bfb955a8e70edd8f101ac9 | [
"tests/test_sockets.py::test_getaddrinfo_ipv6addr[asyncio-SocketKind.SOCK_STREAM0]",
"tests/test_sockets.py::test_getaddrinfo_ipv6addr[asyncio-SocketKind.SOCK_STREAM1]",
"tests/test_sockets.py::test_getaddrinfo_ipv6addr[asyncio+uvloop-SocketKind.SOCK_STREAM0]",
"tests/test_sockets.py::test_getaddrinfo_ipv6addr[asyncio+uvloop-SocketKind.SOCK_STREAM1]"
] | [
"tests/test_sockets.py::TestTCPStream::test_send_receive[asyncio-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_send_receive[asyncio-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_send_receive[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_send_receive[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_send_large_buffer[asyncio-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_send_large_buffer[asyncio-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_send_large_buffer[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_send_large_buffer[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_send_eof[asyncio-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_send_eof[asyncio-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_send_eof[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_send_eof[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_iterate[asyncio-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_iterate[asyncio-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_iterate[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_iterate[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_socket_options[asyncio-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_socket_options[asyncio-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_socket_options[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_socket_options[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_happy_eyeballs[asyncio-dualstack]",
"tests/test_sockets.py::TestTCPStream::test_happy_eyeballs[asyncio-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_happy_eyeballs[asyncio-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_happy_eyeballs[asyncio+uvloop-dualstack]",
"tests/test_sockets.py::TestTCPStream::test_happy_eyeballs[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_happy_eyeballs[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_connection_refused[asyncio-multi]",
"tests/test_sockets.py::TestTCPStream::test_connection_refused[asyncio-single]",
"tests/test_sockets.py::TestTCPStream::test_connection_refused[asyncio+uvloop-multi]",
"tests/test_sockets.py::TestTCPStream::test_connection_refused[asyncio+uvloop-single]",
"tests/test_sockets.py::TestTCPStream::test_receive_timeout[asyncio-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_receive_timeout[asyncio-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_receive_timeout[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_receive_timeout[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_concurrent_send[asyncio-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_concurrent_send[asyncio-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_concurrent_send[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_concurrent_send[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_concurrent_receive[asyncio-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_concurrent_receive[asyncio-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_concurrent_receive[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_concurrent_receive[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_close_during_receive[asyncio-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_close_during_receive[asyncio-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_close_during_receive[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_close_during_receive[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_receive_after_close[asyncio-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_receive_after_close[asyncio-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_receive_after_close[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_receive_after_close[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_send_after_close[asyncio-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_send_after_close[asyncio-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_send_after_close[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_send_after_close[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_connect_tcp_with_tls[asyncio-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_connect_tcp_with_tls[asyncio-ipv6]",
"tests/test_sockets.py::TestTCPStream::test_connect_tcp_with_tls[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestTCPStream::test_connect_tcp_with_tls[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestTCPListener::test_accept[asyncio-ipv4]",
"tests/test_sockets.py::TestTCPListener::test_accept[asyncio-ipv6]",
"tests/test_sockets.py::TestTCPListener::test_accept[asyncio-both]",
"tests/test_sockets.py::TestTCPListener::test_accept[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestTCPListener::test_accept[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestTCPListener::test_accept[asyncio+uvloop-both]",
"tests/test_sockets.py::TestTCPListener::test_socket_options[asyncio-ipv4]",
"tests/test_sockets.py::TestTCPListener::test_socket_options[asyncio-ipv6]",
"tests/test_sockets.py::TestTCPListener::test_socket_options[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestTCPListener::test_socket_options[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestTCPListener::test_reuse_port[asyncio-ipv4]",
"tests/test_sockets.py::TestTCPListener::test_reuse_port[asyncio-ipv6]",
"tests/test_sockets.py::TestTCPListener::test_reuse_port[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestTCPListener::test_reuse_port[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestUNIXStream::test_send_receive[asyncio-str]",
"tests/test_sockets.py::TestUNIXStream::test_send_receive[asyncio-path]",
"tests/test_sockets.py::TestUNIXStream::test_send_receive[asyncio+uvloop-str]",
"tests/test_sockets.py::TestUNIXStream::test_send_receive[asyncio+uvloop-path]",
"tests/test_sockets.py::TestUNIXStream::test_send_large_buffer[asyncio]",
"tests/test_sockets.py::TestUNIXStream::test_send_large_buffer[asyncio+uvloop]",
"tests/test_sockets.py::TestUNIXStream::test_send_eof[asyncio]",
"tests/test_sockets.py::TestUNIXStream::test_send_eof[asyncio+uvloop]",
"tests/test_sockets.py::TestUNIXStream::test_iterate[asyncio]",
"tests/test_sockets.py::TestUNIXStream::test_iterate[asyncio+uvloop]",
"tests/test_sockets.py::TestUNIXStream::test_socket_options[asyncio]",
"tests/test_sockets.py::TestUNIXStream::test_socket_options[asyncio+uvloop]",
"tests/test_sockets.py::TestUNIXStream::test_concurrent_send[asyncio]",
"tests/test_sockets.py::TestUNIXStream::test_concurrent_send[asyncio+uvloop]",
"tests/test_sockets.py::TestUNIXStream::test_concurrent_receive[asyncio]",
"tests/test_sockets.py::TestUNIXStream::test_concurrent_receive[asyncio+uvloop]",
"tests/test_sockets.py::TestUNIXStream::test_close_during_receive[asyncio]",
"tests/test_sockets.py::TestUNIXStream::test_close_during_receive[asyncio+uvloop]",
"tests/test_sockets.py::TestUNIXStream::test_receive_after_close[asyncio]",
"tests/test_sockets.py::TestUNIXStream::test_receive_after_close[asyncio+uvloop]",
"tests/test_sockets.py::TestUNIXStream::test_send_after_close[asyncio]",
"tests/test_sockets.py::TestUNIXStream::test_send_after_close[asyncio+uvloop]",
"tests/test_sockets.py::TestUNIXListener::test_accept[asyncio-str]",
"tests/test_sockets.py::TestUNIXListener::test_accept[asyncio-path]",
"tests/test_sockets.py::TestUNIXListener::test_accept[asyncio+uvloop-str]",
"tests/test_sockets.py::TestUNIXListener::test_accept[asyncio+uvloop-path]",
"tests/test_sockets.py::TestUNIXListener::test_socket_options[asyncio]",
"tests/test_sockets.py::TestUNIXListener::test_socket_options[asyncio+uvloop]",
"tests/test_sockets.py::test_serve_listeners[asyncio]",
"tests/test_sockets.py::test_serve_listeners[asyncio+uvloop]",
"tests/test_sockets.py::TestUDPSocket::test_send_receive[asyncio-ipv4]",
"tests/test_sockets.py::TestUDPSocket::test_send_receive[asyncio-ipv6]",
"tests/test_sockets.py::TestUDPSocket::test_send_receive[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestUDPSocket::test_send_receive[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestUDPSocket::test_iterate[asyncio-ipv4]",
"tests/test_sockets.py::TestUDPSocket::test_iterate[asyncio-ipv6]",
"tests/test_sockets.py::TestUDPSocket::test_iterate[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestUDPSocket::test_iterate[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestUDPSocket::test_socket_options[asyncio-ipv4]",
"tests/test_sockets.py::TestUDPSocket::test_socket_options[asyncio-ipv6]",
"tests/test_sockets.py::TestUDPSocket::test_socket_options[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestUDPSocket::test_socket_options[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestUDPSocket::test_reuse_port[asyncio-ipv4]",
"tests/test_sockets.py::TestUDPSocket::test_reuse_port[asyncio-ipv6]",
"tests/test_sockets.py::TestUDPSocket::test_reuse_port[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestUDPSocket::test_reuse_port[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestUDPSocket::test_concurrent_receive[asyncio-ipv4]",
"tests/test_sockets.py::TestUDPSocket::test_concurrent_receive[asyncio-ipv6]",
"tests/test_sockets.py::TestUDPSocket::test_concurrent_receive[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestUDPSocket::test_concurrent_receive[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestUDPSocket::test_close_during_receive[asyncio-ipv4]",
"tests/test_sockets.py::TestUDPSocket::test_close_during_receive[asyncio-ipv6]",
"tests/test_sockets.py::TestUDPSocket::test_close_during_receive[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestUDPSocket::test_close_during_receive[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestUDPSocket::test_receive_after_close[asyncio-ipv4]",
"tests/test_sockets.py::TestUDPSocket::test_receive_after_close[asyncio-ipv6]",
"tests/test_sockets.py::TestUDPSocket::test_receive_after_close[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestUDPSocket::test_receive_after_close[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestUDPSocket::test_send_after_close[asyncio-ipv4]",
"tests/test_sockets.py::TestUDPSocket::test_send_after_close[asyncio-ipv6]",
"tests/test_sockets.py::TestUDPSocket::test_send_after_close[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestUDPSocket::test_send_after_close[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_send_receive[asyncio-ipv4]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_send_receive[asyncio-ipv6]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_send_receive[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_send_receive[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_iterate[asyncio-ipv4]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_iterate[asyncio-ipv6]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_iterate[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_iterate[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_socket_options[asyncio-ipv4]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_socket_options[asyncio-ipv6]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_socket_options[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_socket_options[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_reuse_port[asyncio-ipv4]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_reuse_port[asyncio-ipv6]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_reuse_port[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_reuse_port[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_concurrent_receive[asyncio-ipv4]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_concurrent_receive[asyncio-ipv6]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_concurrent_receive[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_concurrent_receive[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_close_during_receive[asyncio-ipv4]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_close_during_receive[asyncio-ipv6]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_close_during_receive[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_close_during_receive[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_receive_after_close[asyncio-ipv4]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_receive_after_close[asyncio-ipv6]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_receive_after_close[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_receive_after_close[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_send_after_close[asyncio-ipv4]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_send_after_close[asyncio-ipv6]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_send_after_close[asyncio+uvloop-ipv4]",
"tests/test_sockets.py::TestConnectedUDPSocket::test_send_after_close[asyncio+uvloop-ipv6]",
"tests/test_sockets.py::test_getaddrinfo[asyncio]",
"tests/test_sockets.py::test_getaddrinfo[asyncio+uvloop]",
"tests/test_sockets.py::test_getnameinfo[asyncio]",
"tests/test_sockets.py::test_getnameinfo[asyncio+uvloop]"
] | {
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2020-07-31 12:55:49+00:00 | mit | 915 |
|
agronholm__anyio-227 | diff --git a/docs/versionhistory.rst b/docs/versionhistory.rst
index b9fa806..466bc78 100644
--- a/docs/versionhistory.rst
+++ b/docs/versionhistory.rst
@@ -54,6 +54,7 @@ This library adheres to `Semantic Versioning 2.0 <http://semver.org/>`_.
- Added the ``run_sync_from_thread()`` function
- Added ``UNIXSocketStream`` as a ``SocketStream`` subclass, capable of sending and receiving
file descriptors
+- Added thread pooling for asyncio
- Added the ``FileReadStream`` and ``FileWriteStream`` classes
- Added the ``TaskGroup.start()`` method and a corresponding ``BlockingPortal.start_task()`` method
- Added the ``name`` argument to ``BlockingPortal.spawn_task()``
@@ -81,6 +82,8 @@ This library adheres to `Semantic Versioning 2.0 <http://semver.org/>`_.
- Changed the asyncio ``TaskGroup.spawn()`` method to avoid the use of a coroutine wrapper on
Python 3.8+ and added a hint for hiding the wrapper in tracebacks on earlier Pythons (supported
by Pytest, Sentry etc.)
+- Changed the default thread limiter on asyncio to be scoped to an event loop so that multiple
+ running event loops don't conflict with each other
**2.2.0**
diff --git a/src/anyio/_backends/_asyncio.py b/src/anyio/_backends/_asyncio.py
index ac67930..2ba1e8d 100644
--- a/src/anyio/_backends/_asyncio.py
+++ b/src/anyio/_backends/_asyncio.py
@@ -4,6 +4,7 @@ import concurrent.futures
import math
import socket
import sys
+from asyncio.base_events import _run_until_complete_cb # type: ignore
from collections import OrderedDict, deque
from concurrent.futures import Future
from dataclasses import dataclass
@@ -11,8 +12,9 @@ from functools import partial, wraps
from inspect import (
CORO_RUNNING, CORO_SUSPENDED, GEN_RUNNING, GEN_SUSPENDED, getcoroutinestate, getgeneratorstate)
from io import IOBase
+from queue import Empty, Queue
from socket import AddressFamily, SocketKind, SocketType
-from threading import Thread
+from threading import Thread, current_thread
from types import TracebackType
from typing import (
Any, Awaitable, Callable, Collection, Coroutine, Deque, Dict, Generator, List, Optional,
@@ -40,6 +42,15 @@ else:
if sys.version_info >= (3, 7):
from asyncio import all_tasks, create_task, current_task, get_running_loop
from asyncio import run as native_run
+
+ def find_root_task() -> asyncio.Task:
+ for task in all_tasks():
+ if task._callbacks:
+ for cb, context in task._callbacks: # type: ignore
+ if cb is _run_until_complete_cb or cb.__module__ == 'uvloop.loop':
+ return task
+
+ raise RuntimeError('Cannot find root task for setting cleanup callback')
else:
_T = TypeVar('_T')
@@ -115,6 +126,14 @@ else:
return asyncio.Task.current_task(loop)
+ def find_root_task() -> asyncio.Task:
+ for task in all_tasks():
+ for cb in task._callbacks:
+ if cb is _run_until_complete_cb or cb.__module__ == 'uvloop.loop':
+ return task
+
+ raise RuntimeError('Cannot find root task for setting cleanup callback')
+
T_Retval = TypeVar('T_Retval')
# Check whether there is native support for task names in asyncio (3.8+)
@@ -619,47 +638,87 @@ class TaskGroup(abc.TaskGroup):
_Retval_Queue_Type = Tuple[Optional[T_Retval], Optional[BaseException]]
-async def run_sync_in_worker_thread(
- func: Callable[..., T_Retval], *args, cancellable: bool = False,
- limiter: Optional['CapacityLimiter'] = None) -> T_Retval:
- def thread_worker():
+def _thread_pool_worker(work_queue: Queue, workers: Set[Thread],
+ idle_workers: Set[Thread]) -> None:
+ func: Callable
+ args: tuple
+ future: asyncio.Future
+ limiter: CapacityLimiter
+ thread = current_thread()
+ while True:
try:
+ func, args, future = work_queue.get(timeout=10)
+ except Empty:
+ workers.remove(thread)
+ return
+ finally:
+ idle_workers.discard(thread)
+
+ if func is None:
+ # Shutdown command received
+ workers.remove(thread)
+ return
+
+ if not future.cancelled():
with claim_worker_thread('asyncio'):
- threadlocals.loop = loop
- result = func(*args)
- except BaseException as exc:
- if not loop.is_closed():
- loop.call_soon_threadsafe(limiter.release_on_behalf_of, task)
- if not cancelled:
- loop.call_soon_threadsafe(queue.put_nowait, (None, exc))
+ loop = threadlocals.loop = future._loop
+ try:
+ result = func(*args)
+ except BaseException as exc:
+ idle_workers.add(thread)
+ if not loop.is_closed() and not future.cancelled():
+ loop.call_soon_threadsafe(future.set_exception, exc)
+ else:
+ idle_workers.add(thread)
+ if not loop.is_closed() and not future.cancelled():
+ loop.call_soon_threadsafe(future.set_result, result)
else:
- if not loop.is_closed():
- loop.call_soon_threadsafe(limiter.release_on_behalf_of, task)
- if not cancelled:
- loop.call_soon_threadsafe(queue.put_nowait, (result, None))
+ idle_workers.add(thread)
+
+ work_queue.task_done()
+
+_threadpool_work_queue: RunVar[Queue] = RunVar('_threadpool_work_queue')
+_threadpool_idle_workers: RunVar[Set[Thread]] = RunVar('_threadpool_idle_workers')
+_threadpool_workers: RunVar[Set[Thread]] = RunVar('_threadpool_workers')
+
+
+def _loop_shutdown_callback(f: asyncio.Future) -> None:
+ """This is called when the root task has finished."""
+ for _ in range(len(_threadpool_workers.get())):
+ _threadpool_work_queue.get().put_nowait((None, None, None))
+
+
+async def run_sync_in_worker_thread(
+ func: Callable[..., T_Retval], *args, cancellable: bool = False,
+ limiter: Optional['CapacityLimiter'] = None) -> T_Retval:
await checkpoint()
- loop = get_running_loop()
- task = current_task()
- queue: asyncio.Queue[_Retval_Queue_Type] = asyncio.Queue(1)
- cancelled = False
- limiter = limiter or _default_thread_limiter
- await limiter.acquire_on_behalf_of(task)
- thread = Thread(target=thread_worker, daemon=True)
- thread.start()
- exception: Optional[BaseException] = None
- with CancelScope(shield=not cancellable):
- try:
- retval, exception = await queue.get()
- except BaseException as exc:
- exception = exc
- finally:
- cancelled = True
- if exception is not None:
- raise exception
- else:
- return cast(T_Retval, retval)
+ # If this is the first run in this event loop thread, set up the necessary variables
+ try:
+ work_queue = _threadpool_work_queue.get()
+ idle_workers = _threadpool_idle_workers.get()
+ workers = _threadpool_workers.get()
+ except LookupError:
+ work_queue = Queue()
+ idle_workers = set()
+ workers = set()
+ _threadpool_work_queue.set(work_queue)
+ _threadpool_idle_workers.set(idle_workers)
+ _threadpool_workers.set(workers)
+ find_root_task().add_done_callback(_loop_shutdown_callback)
+
+ async with (limiter or current_default_thread_limiter()):
+ with CancelScope(shield=not cancellable):
+ future: asyncio.Future = asyncio.Future()
+ work_queue.put_nowait((func, args, future))
+ if not idle_workers:
+ args = (work_queue, workers, idle_workers)
+ thread = Thread(target=_thread_pool_worker, args=args, name='AnyIO worker thread')
+ workers.add(thread)
+ thread.start()
+
+ return await future
def run_sync_from_thread(func: Callable[..., T_Retval], *args,
@@ -1536,11 +1595,16 @@ class CapacityLimiter(abc.CapacityLimiter):
tuple(self._borrowers), len(self._wait_queue))
-def current_default_thread_limiter():
- return _default_thread_limiter
+_default_thread_limiter: RunVar[CapacityLimiter] = RunVar('_default_thread_limiter')
-_default_thread_limiter = CapacityLimiter(40)
+def current_default_thread_limiter():
+ try:
+ return _default_thread_limiter.get()
+ except LookupError:
+ limiter = CapacityLimiter(40)
+ _default_thread_limiter.set(limiter)
+ return limiter
#
| agronholm/anyio | 8c136ffff989fb12c738bf87183046285b738a63 | diff --git a/tests/test_threads.py b/tests/test_threads.py
index 9d07ed7..56eb5b8 100644
--- a/tests/test_threads.py
+++ b/tests/test_threads.py
@@ -9,7 +9,7 @@ import pytest
from anyio import (
create_blocking_portal, create_capacity_limiter, create_event, create_task_group,
- get_cancelled_exc_class, get_current_task, run_async_from_thread, run_sync_from_thread,
+ get_cancelled_exc_class, get_current_task, run, run_async_from_thread, run_sync_from_thread,
run_sync_in_worker_thread, sleep, start_blocking_portal, wait_all_tasks_blocked)
if sys.version_info < (3, 7):
@@ -48,6 +48,23 @@ async def test_run_sync_from_thread():
assert result == 3
+def test_run_sync_from_thread_pooling():
+ async def main():
+ thread_ids = set()
+ for _ in range(5):
+ thread_ids.add(await run_sync_in_worker_thread(threading.get_ident))
+
+ # Expects that all the work has been done in the same worker thread
+ assert len(thread_ids) == 1
+ assert thread_ids.pop() != threading.get_ident()
+ assert threading.active_count() == initial_count + 1
+
+ # The thread should not exist after the event loop has been closed
+ initial_count = threading.active_count()
+ run(main, backend='asyncio')
+ assert threading.active_count() == initial_count
+
+
async def test_run_async_from_thread_exception():
async def add(a, b):
assert threading.get_ident() == event_loop_thread_id
| Use thread pools in run_sync_in_worker_thread()
Right now, the asyncio implementation of this starts a new thread every time this function is called. We should be using thread pools instead, like trio does. | 0.0 | 8c136ffff989fb12c738bf87183046285b738a63 | [
"tests/test_threads.py::test_run_sync_from_thread_pooling"
] | [
"tests/test_threads.py::test_run_async_from_thread[asyncio]",
"tests/test_threads.py::test_run_async_from_thread[asyncio+uvloop]",
"tests/test_threads.py::test_run_sync_from_thread[asyncio]",
"tests/test_threads.py::test_run_sync_from_thread[asyncio+uvloop]",
"tests/test_threads.py::test_run_async_from_thread_exception[asyncio]",
"tests/test_threads.py::test_run_async_from_thread_exception[asyncio+uvloop]",
"tests/test_threads.py::test_run_sync_from_thread_exception[asyncio]",
"tests/test_threads.py::test_run_sync_from_thread_exception[asyncio+uvloop]",
"tests/test_threads.py::test_run_anyio_async_func_from_thread[asyncio]",
"tests/test_threads.py::test_run_anyio_async_func_from_thread[asyncio+uvloop]",
"tests/test_threads.py::test_run_in_thread_cancelled[asyncio]",
"tests/test_threads.py::test_run_in_thread_cancelled[asyncio+uvloop]",
"tests/test_threads.py::test_run_in_thread_exception[asyncio]",
"tests/test_threads.py::test_run_in_thread_exception[asyncio+uvloop]",
"tests/test_threads.py::test_run_in_custom_limiter[asyncio]",
"tests/test_threads.py::test_run_in_custom_limiter[asyncio+uvloop]",
"tests/test_threads.py::test_run_async_from_unclaimed_thread",
"tests/test_threads.py::test_run_sync_from_unclaimed_thread",
"tests/test_threads.py::test_cancel_worker_thread[asyncio-uncancellable]",
"tests/test_threads.py::test_cancel_worker_thread[asyncio-cancellable]",
"tests/test_threads.py::test_cancel_worker_thread[asyncio+uvloop-uncancellable]",
"tests/test_threads.py::test_cancel_worker_thread[asyncio+uvloop-cancellable]",
"tests/test_threads.py::test_cancel_asyncio_native_task[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_successful_call[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_successful_call[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_aexit_with_exception[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_aexit_with_exception[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_aexit_without_exception[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_aexit_without_exception[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_call_portal_from_event_loop_thread[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_call_portal_from_event_loop_thread[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_start_with_new_event_loop[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_start_with_new_event_loop[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_start_with_nonexistent_backend",
"tests/test_threads.py::TestBlockingPortal::test_call_stopped_portal[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_call_stopped_portal[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_spawn_task[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_spawn_task[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_spawn_task_cancel_later[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_spawn_task_cancel_later[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_spawn_task_cancel_immediately[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_spawn_task_cancel_immediately[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_spawn_task_with_name[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_spawn_task_with_name[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_async_context_manager_success[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_async_context_manager_success[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_async_context_manager_error[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_async_context_manager_error[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_async_context_manager_error_ignore[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_async_context_manager_error_ignore[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_start_no_value[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_start_no_value[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_start_with_value[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_start_with_value[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_start_crash_before_started_call[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_start_crash_before_started_call[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_start_crash_after_started_call[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_start_crash_after_started_call[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_start_no_started_call[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_start_no_started_call[asyncio+uvloop]",
"tests/test_threads.py::TestBlockingPortal::test_start_with_name[asyncio]",
"tests/test_threads.py::TestBlockingPortal::test_start_with_name[asyncio+uvloop]"
] | {
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false
} | 2021-03-03 21:08:05+00:00 | mit | 916 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.