Dataset Viewer
instance_id
stringlengths 12
57
| base_commit
stringlengths 40
40
| created_at
stringdate 2015-01-06 14:05:07
2025-04-29 17:56:51
| environment_setup_commit
stringlengths 40
40
| hints_text
stringlengths 0
158k
| patch
stringlengths 261
20.8k
| problem_statement
stringlengths 11
52.5k
| repo
stringlengths 7
53
| test_patch
stringlengths 280
206k
| meta
dict | version
stringclasses 463
values | install_config
dict | requirements
stringlengths 93
34k
⌀ | environment
stringlengths 772
20k
⌀ | FAIL_TO_PASS
sequencelengths 1
856
| FAIL_TO_FAIL
sequencelengths 0
536
| PASS_TO_PASS
sequencelengths 0
7.87k
| PASS_TO_FAIL
sequencelengths 0
92
| license_name
stringclasses 35
values | __index_level_0__
int64 11
21.4k
| num_tokens_patch
int64 103
4.99k
| before_filepaths
sequencelengths 0
14
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
jpadilla__pyjwt-71 | 0afba10cf16834e154a59280de089c30de3d9a61 | 2015-01-06 14:05:07 | 2d0e8272dbd1372289bff1b8e8eba446bed4befa | jpadilla: @mark-adams ready to move this forward. One thing I'd like to do before merging this in would be to write some comment blocks for the algorithms. | diff --git a/jwt/__init__.py b/jwt/__init__.py
index 3a70913..b9a9986 100644
--- a/jwt/__init__.py
+++ b/jwt/__init__.py
@@ -5,16 +5,15 @@ Minimum implementation based on this spec:
http://self-issued.info/docs/draft-jones-json-web-token-01.html
"""
-import base64
import binascii
-import hashlib
-import hmac
-from datetime import datetime, timedelta
+
from calendar import timegm
from collections import Mapping
+from datetime import datetime, timedelta
-from .compat import (json, string_types, text_type, constant_time_compare,
- timedelta_total_seconds)
+from jwt.utils import base64url_decode, base64url_encode
+
+from .compat import (json, string_types, text_type, timedelta_total_seconds)
__version__ = '0.4.1'
@@ -22,6 +21,7 @@ __all__ = [
# Functions
'encode',
'decode',
+ 'register_algorithm',
# Exceptions
'InvalidTokenError',
@@ -33,9 +33,25 @@ __all__ = [
# Deprecated aliases
'ExpiredSignature',
'InvalidAudience',
- 'InvalidIssuer',
+ 'InvalidIssuer'
]
+_algorithms = {}
+
+
+def register_algorithm(alg_id, alg_obj):
+ """ Registers a new Algorithm for use when creating and verifying JWTs """
+ if alg_id in _algorithms:
+ raise ValueError('Algorithm already has a handler.')
+
+ if not isinstance(alg_obj, Algorithm):
+ raise TypeError('Object is not of type `Algorithm`')
+
+ _algorithms[alg_id] = alg_obj
+
+from jwt.algorithms import Algorithm, _register_default_algorithms # NOQA
+_register_default_algorithms()
+
class InvalidTokenError(Exception):
pass
@@ -56,187 +72,11 @@ class InvalidAudienceError(InvalidTokenError):
class InvalidIssuerError(InvalidTokenError):
pass
-
# Compatibility aliases (deprecated)
ExpiredSignature = ExpiredSignatureError
InvalidAudience = InvalidAudienceError
InvalidIssuer = InvalidIssuerError
-signing_methods = {
- 'none': lambda msg, key: b'',
- 'HS256': lambda msg, key: hmac.new(key, msg, hashlib.sha256).digest(),
- 'HS384': lambda msg, key: hmac.new(key, msg, hashlib.sha384).digest(),
- 'HS512': lambda msg, key: hmac.new(key, msg, hashlib.sha512).digest()
-}
-
-verify_methods = {
- 'HS256': lambda msg, key: hmac.new(key, msg, hashlib.sha256).digest(),
- 'HS384': lambda msg, key: hmac.new(key, msg, hashlib.sha384).digest(),
- 'HS512': lambda msg, key: hmac.new(key, msg, hashlib.sha512).digest()
-}
-
-
-def prepare_HS_key(key):
- if not isinstance(key, string_types) and not isinstance(key, bytes):
- raise TypeError('Expecting a string- or bytes-formatted key.')
-
- if isinstance(key, text_type):
- key = key.encode('utf-8')
-
- return key
-
-prepare_key_methods = {
- 'none': lambda key: None,
- 'HS256': prepare_HS_key,
- 'HS384': prepare_HS_key,
- 'HS512': prepare_HS_key
-}
-
-try:
- from cryptography.hazmat.primitives import interfaces, hashes
- from cryptography.hazmat.primitives.serialization import (
- load_pem_private_key, load_pem_public_key, load_ssh_public_key
- )
- from cryptography.hazmat.primitives.asymmetric import ec, padding
- from cryptography.hazmat.backends import default_backend
- from cryptography.exceptions import InvalidSignature
-
- def sign_rsa(msg, key, hashalg):
- signer = key.signer(
- padding.PKCS1v15(),
- hashalg
- )
-
- signer.update(msg)
- return signer.finalize()
-
- def verify_rsa(msg, key, hashalg, sig):
- verifier = key.verifier(
- sig,
- padding.PKCS1v15(),
- hashalg
- )
-
- verifier.update(msg)
-
- try:
- verifier.verify()
- return True
- except InvalidSignature:
- return False
-
- signing_methods.update({
- 'RS256': lambda msg, key: sign_rsa(msg, key, hashes.SHA256()),
- 'RS384': lambda msg, key: sign_rsa(msg, key, hashes.SHA384()),
- 'RS512': lambda msg, key: sign_rsa(msg, key, hashes.SHA512())
- })
-
- verify_methods.update({
- 'RS256': lambda msg, key, sig: verify_rsa(msg, key, hashes.SHA256(), sig),
- 'RS384': lambda msg, key, sig: verify_rsa(msg, key, hashes.SHA384(), sig),
- 'RS512': lambda msg, key, sig: verify_rsa(msg, key, hashes.SHA512(), sig)
- })
-
- def prepare_RS_key(key):
- if isinstance(key, interfaces.RSAPrivateKey) or \
- isinstance(key, interfaces.RSAPublicKey):
- return key
-
- if isinstance(key, string_types):
- if isinstance(key, text_type):
- key = key.encode('utf-8')
-
- try:
- if key.startswith(b'ssh-rsa'):
- key = load_ssh_public_key(key, backend=default_backend())
- else:
- key = load_pem_private_key(key, password=None, backend=default_backend())
- except ValueError:
- key = load_pem_public_key(key, backend=default_backend())
- else:
- raise TypeError('Expecting a PEM-formatted key.')
-
- return key
-
- prepare_key_methods.update({
- 'RS256': prepare_RS_key,
- 'RS384': prepare_RS_key,
- 'RS512': prepare_RS_key
- })
-
- def sign_ecdsa(msg, key, hashalg):
- signer = key.signer(ec.ECDSA(hashalg))
-
- signer.update(msg)
- return signer.finalize()
-
- def verify_ecdsa(msg, key, hashalg, sig):
- verifier = key.verifier(sig, ec.ECDSA(hashalg))
-
- verifier.update(msg)
-
- try:
- verifier.verify()
- return True
- except InvalidSignature:
- return False
-
- signing_methods.update({
- 'ES256': lambda msg, key: sign_ecdsa(msg, key, hashes.SHA256()),
- 'ES384': lambda msg, key: sign_ecdsa(msg, key, hashes.SHA384()),
- 'ES512': lambda msg, key: sign_ecdsa(msg, key, hashes.SHA512()),
- })
-
- verify_methods.update({
- 'ES256': lambda msg, key, sig: verify_ecdsa(msg, key, hashes.SHA256(), sig),
- 'ES384': lambda msg, key, sig: verify_ecdsa(msg, key, hashes.SHA384(), sig),
- 'ES512': lambda msg, key, sig: verify_ecdsa(msg, key, hashes.SHA512(), sig),
- })
-
- def prepare_ES_key(key):
- if isinstance(key, interfaces.EllipticCurvePrivateKey) or \
- isinstance(key, interfaces.EllipticCurvePublicKey):
- return key
-
- if isinstance(key, string_types):
- if isinstance(key, text_type):
- key = key.encode('utf-8')
-
- # Attempt to load key. We don't know if it's
- # a Signing Key or a Verifying Key, so we try
- # the Verifying Key first.
- try:
- key = load_pem_public_key(key, backend=default_backend())
- except ValueError:
- key = load_pem_private_key(key, password=None, backend=default_backend())
-
- else:
- raise TypeError('Expecting a PEM-formatted key.')
-
- return key
-
- prepare_key_methods.update({
- 'ES256': prepare_ES_key,
- 'ES384': prepare_ES_key,
- 'ES512': prepare_ES_key
- })
-
-except ImportError:
- pass
-
-
-def base64url_decode(input):
- rem = len(input) % 4
-
- if rem > 0:
- input += b'=' * (4 - rem)
-
- return base64.urlsafe_b64decode(input)
-
-
-def base64url_encode(input):
- return base64.urlsafe_b64encode(input).replace(b'=', b'')
-
def header(jwt):
if isinstance(jwt, text_type):
@@ -290,8 +130,10 @@ def encode(payload, key, algorithm='HS256', headers=None, json_encoder=None):
# Segments
signing_input = b'.'.join(segments)
try:
- key = prepare_key_methods[algorithm](key)
- signature = signing_methods[algorithm](signing_input, key)
+ alg_obj = _algorithms[algorithm]
+ key = alg_obj.prepare_key(key)
+ signature = alg_obj.sign(signing_input, key)
+
except KeyError:
raise NotImplementedError('Algorithm not supported')
@@ -360,17 +202,12 @@ def verify_signature(payload, signing_input, header, signature, key='',
raise TypeError('audience must be a string or None')
try:
- algorithm = header['alg'].upper()
- key = prepare_key_methods[algorithm](key)
+ alg_obj = _algorithms[header['alg'].upper()]
+ key = alg_obj.prepare_key(key)
- if algorithm.startswith('HS'):
- expected = verify_methods[algorithm](signing_input, key)
+ if not alg_obj.verify(signing_input, key, signature):
+ raise DecodeError('Signature verification failed')
- if not constant_time_compare(signature, expected):
- raise DecodeError('Signature verification failed')
- else:
- if not verify_methods[algorithm](signing_input, key, signature):
- raise DecodeError('Signature verification failed')
except KeyError:
raise DecodeError('Algorithm not supported')
diff --git a/jwt/algorithms.py b/jwt/algorithms.py
new file mode 100644
index 0000000..89ea75b
--- /dev/null
+++ b/jwt/algorithms.py
@@ -0,0 +1,200 @@
+import hashlib
+import hmac
+
+from jwt import register_algorithm
+from jwt.compat import constant_time_compare, string_types, text_type
+
+try:
+ from cryptography.hazmat.primitives import interfaces, hashes
+ from cryptography.hazmat.primitives.serialization import (
+ load_pem_private_key, load_pem_public_key, load_ssh_public_key
+ )
+ from cryptography.hazmat.primitives.asymmetric import ec, padding
+ from cryptography.hazmat.backends import default_backend
+ from cryptography.exceptions import InvalidSignature
+
+ has_crypto = True
+except ImportError:
+ has_crypto = False
+
+
+def _register_default_algorithms():
+ """ Registers the algorithms that are implemented by the library """
+ register_algorithm('none', NoneAlgorithm())
+ register_algorithm('HS256', HMACAlgorithm(hashlib.sha256))
+ register_algorithm('HS384', HMACAlgorithm(hashlib.sha384))
+ register_algorithm('HS512', HMACAlgorithm(hashlib.sha512))
+
+ if has_crypto:
+ register_algorithm('RS256', RSAAlgorithm(hashes.SHA256()))
+ register_algorithm('RS384', RSAAlgorithm(hashes.SHA384()))
+ register_algorithm('RS512', RSAAlgorithm(hashes.SHA512()))
+
+ register_algorithm('ES256', ECAlgorithm(hashes.SHA256()))
+ register_algorithm('ES384', ECAlgorithm(hashes.SHA384()))
+ register_algorithm('ES512', ECAlgorithm(hashes.SHA512()))
+
+
+class Algorithm(object):
+ """ The interface for an algorithm used to sign and verify JWTs """
+ def prepare_key(self, key):
+ """
+ Performs necessary validation and conversions on the key and returns
+ the key value in the proper format for sign() and verify()
+ """
+ raise NotImplementedError
+
+ def sign(self, msg, key):
+ """
+ Returns a digital signature for the specified message using the
+ specified key value
+ """
+ raise NotImplementedError
+
+ def verify(self, msg, key, sig):
+ """
+ Verifies that the specified digital signature is valid for the specified
+ message and key values.
+ """
+ raise NotImplementedError
+
+
+class NoneAlgorithm(Algorithm):
+ """
+ Placeholder for use when no signing or verification operations are required
+ """
+ def prepare_key(self, key):
+ return None
+
+ def sign(self, msg, key):
+ return b''
+
+ def verify(self, msg, key):
+ return True
+
+
+class HMACAlgorithm(Algorithm):
+ """
+ Performs signing and verification operations using HMAC and the specified
+ hash function
+ """
+ def __init__(self, hash_alg):
+ self.hash_alg = hash_alg
+
+ def prepare_key(self, key):
+ if not isinstance(key, string_types) and not isinstance(key, bytes):
+ raise TypeError('Expecting a string- or bytes-formatted key.')
+
+ if isinstance(key, text_type):
+ key = key.encode('utf-8')
+
+ return key
+
+ def sign(self, msg, key):
+ return hmac.new(key, msg, self.hash_alg).digest()
+
+ def verify(self, msg, key, sig):
+ return constant_time_compare(sig, self.sign(msg, key))
+
+if has_crypto:
+
+ class RSAAlgorithm(Algorithm):
+ """
+ Performs signing and verification operations using RSASSA-PKCS-v1_5 and
+ the specified hash function
+ """
+
+ def __init__(self, hash_alg):
+ self.hash_alg = hash_alg
+
+ def prepare_key(self, key):
+ if isinstance(key, interfaces.RSAPrivateKey) or \
+ isinstance(key, interfaces.RSAPublicKey):
+ return key
+
+ if isinstance(key, string_types):
+ if isinstance(key, text_type):
+ key = key.encode('utf-8')
+
+ try:
+ if key.startswith(b'ssh-rsa'):
+ key = load_ssh_public_key(key, backend=default_backend())
+ else:
+ key = load_pem_private_key(key, password=None, backend=default_backend())
+ except ValueError:
+ key = load_pem_public_key(key, backend=default_backend())
+ else:
+ raise TypeError('Expecting a PEM-formatted key.')
+
+ return key
+
+ def sign(self, msg, key):
+ signer = key.signer(
+ padding.PKCS1v15(),
+ self.hash_alg
+ )
+
+ signer.update(msg)
+ return signer.finalize()
+
+ def verify(self, msg, key, sig):
+ verifier = key.verifier(
+ sig,
+ padding.PKCS1v15(),
+ self.hash_alg
+ )
+
+ verifier.update(msg)
+
+ try:
+ verifier.verify()
+ return True
+ except InvalidSignature:
+ return False
+
+ class ECAlgorithm(Algorithm):
+ """
+ Performs signing and verification operations using ECDSA and the
+ specified hash function
+ """
+ def __init__(self, hash_alg):
+ self.hash_alg = hash_alg
+
+ def prepare_key(self, key):
+ if isinstance(key, interfaces.EllipticCurvePrivateKey) or \
+ isinstance(key, interfaces.EllipticCurvePublicKey):
+ return key
+
+ if isinstance(key, string_types):
+ if isinstance(key, text_type):
+ key = key.encode('utf-8')
+
+ # Attempt to load key. We don't know if it's
+ # a Signing Key or a Verifying Key, so we try
+ # the Verifying Key first.
+ try:
+ key = load_pem_public_key(key, backend=default_backend())
+ except ValueError:
+ key = load_pem_private_key(key, password=None, backend=default_backend())
+
+ else:
+ raise TypeError('Expecting a PEM-formatted key.')
+
+ return key
+
+ def sign(self, msg, key):
+ signer = key.signer(ec.ECDSA(self.hash_alg))
+
+ signer.update(msg)
+ return signer.finalize()
+
+ def verify(self, msg, key, sig):
+ verifier = key.verifier(sig, ec.ECDSA(self.hash_alg))
+
+ verifier.update(msg)
+
+ try:
+ verifier.verify()
+ return True
+ except InvalidSignature:
+ return False
diff --git a/jwt/utils.py b/jwt/utils.py
new file mode 100644
index 0000000..e6c1ef3
--- /dev/null
+++ b/jwt/utils.py
@@ -0,0 +1,14 @@
+import base64
+
+
+def base64url_decode(input):
+ rem = len(input) % 4
+
+ if rem > 0:
+ input += b'=' * (4 - rem)
+
+ return base64.urlsafe_b64decode(input)
+
+
+def base64url_encode(input):
+ return base64.urlsafe_b64encode(input).replace(b'=', b'')
diff --git a/setup.py b/setup.py
index 62d5df7..e703db6 100755
--- a/setup.py
+++ b/setup.py
@@ -1,8 +1,9 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
-import sys
import re
+import sys
+
from setuptools import setup
| Move algorithm-specific logic to be class-based to allow for better extensibility
In #42, we discussed changing to more of a registry model for registration of algorithms. This issue is suggesting that we move to that sort of a model. | jpadilla/pyjwt | diff --git a/tests/test_jwt.py b/tests/test_jwt.py
index a57ab31..bd9ca06 100644
--- a/tests/test_jwt.py
+++ b/tests/test_jwt.py
@@ -45,6 +45,10 @@ class TestJWT(unittest.TestCase):
self.payload = {'iss': 'jeff', 'exp': utc_timestamp() + 15,
'claim': 'insanity'}
+ def test_register_algorithm_rejects_non_algorithm_obj(self):
+ with self.assertRaises(TypeError):
+ jwt.register_algorithm('AAA123', {})
+
def test_encode_decode(self):
secret = 'secret'
jwt_message = jwt.encode(self.payload, secret)
@@ -549,35 +553,15 @@ class TestJWT(unittest.TestCase):
load_output = jwt.load(jwt_message)
jwt.verify_signature(key=pub_rsakey, *load_output)
- def test_rsa_related_signing_methods(self):
- if has_crypto:
- self.assertTrue('RS256' in jwt.signing_methods)
- self.assertTrue('RS384' in jwt.signing_methods)
- self.assertTrue('RS512' in jwt.signing_methods)
- else:
- self.assertFalse('RS256' in jwt.signing_methods)
- self.assertFalse('RS384' in jwt.signing_methods)
- self.assertFalse('RS512' in jwt.signing_methods)
-
- def test_rsa_related_verify_methods(self):
- if has_crypto:
- self.assertTrue('RS256' in jwt.verify_methods)
- self.assertTrue('RS384' in jwt.verify_methods)
- self.assertTrue('RS512' in jwt.verify_methods)
- else:
- self.assertFalse('RS256' in jwt.verify_methods)
- self.assertFalse('RS384' in jwt.verify_methods)
- self.assertFalse('RS512' in jwt.verify_methods)
-
- def test_rsa_related_key_preparation_methods(self):
+ def test_rsa_related_algorithms(self):
if has_crypto:
- self.assertTrue('RS256' in jwt.prepare_key_methods)
- self.assertTrue('RS384' in jwt.prepare_key_methods)
- self.assertTrue('RS512' in jwt.prepare_key_methods)
+ self.assertTrue('RS256' in jwt._algorithms)
+ self.assertTrue('RS384' in jwt._algorithms)
+ self.assertTrue('RS512' in jwt._algorithms)
else:
- self.assertFalse('RS256' in jwt.prepare_key_methods)
- self.assertFalse('RS384' in jwt.prepare_key_methods)
- self.assertFalse('RS512' in jwt.prepare_key_methods)
+ self.assertFalse('RS256' in jwt._algorithms)
+ self.assertFalse('RS384' in jwt._algorithms)
+ self.assertFalse('RS512' in jwt._algorithms)
@unittest.skipIf(not has_crypto, "Can't run without cryptography library")
def test_encode_decode_with_ecdsa_sha256(self):
@@ -669,35 +653,15 @@ class TestJWT(unittest.TestCase):
load_output = jwt.load(jwt_message)
jwt.verify_signature(key=pub_eckey, *load_output)
- def test_ecdsa_related_signing_methods(self):
- if has_crypto:
- self.assertTrue('ES256' in jwt.signing_methods)
- self.assertTrue('ES384' in jwt.signing_methods)
- self.assertTrue('ES512' in jwt.signing_methods)
- else:
- self.assertFalse('ES256' in jwt.signing_methods)
- self.assertFalse('ES384' in jwt.signing_methods)
- self.assertFalse('ES512' in jwt.signing_methods)
-
- def test_ecdsa_related_verify_methods(self):
- if has_crypto:
- self.assertTrue('ES256' in jwt.verify_methods)
- self.assertTrue('ES384' in jwt.verify_methods)
- self.assertTrue('ES512' in jwt.verify_methods)
- else:
- self.assertFalse('ES256' in jwt.verify_methods)
- self.assertFalse('ES384' in jwt.verify_methods)
- self.assertFalse('ES512' in jwt.verify_methods)
-
- def test_ecdsa_related_key_preparation_methods(self):
+ def test_ecdsa_related_algorithms(self):
if has_crypto:
- self.assertTrue('ES256' in jwt.prepare_key_methods)
- self.assertTrue('ES384' in jwt.prepare_key_methods)
- self.assertTrue('ES512' in jwt.prepare_key_methods)
+ self.assertTrue('ES256' in jwt._algorithms)
+ self.assertTrue('ES384' in jwt._algorithms)
+ self.assertTrue('ES512' in jwt._algorithms)
else:
- self.assertFalse('ES256' in jwt.prepare_key_methods)
- self.assertFalse('ES384' in jwt.prepare_key_methods)
- self.assertFalse('ES512' in jwt.prepare_key_methods)
+ self.assertFalse('ES256' in jwt._algorithms)
+ self.assertFalse('ES384' in jwt._algorithms)
+ self.assertFalse('ES512' in jwt._algorithms)
def test_check_audience(self):
payload = {
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 2
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"cryptography",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cffi==1.17.1
cryptography==44.0.2
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pycparser==2.22
-e git+https://github.com/jpadilla/pyjwt.git@0afba10cf16834e154a59280de089c30de3d9a61#egg=PyJWT
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: pyjwt
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cffi==1.17.1
- cryptography==44.0.2
- pycparser==2.22
prefix: /opt/conda/envs/pyjwt
| [
"tests/test_jwt.py::TestJWT::test_register_algorithm_rejects_non_algorithm_obj"
] | [
"tests/test_jwt.py::TestJWT::test_decodes_valid_es384_jwt",
"tests/test_jwt.py::TestJWT::test_decodes_valid_rs384_jwt",
"tests/test_jwt.py::TestJWT::test_ecdsa_related_algorithms",
"tests/test_jwt.py::TestJWT::test_encode_decode_with_ecdsa_sha256",
"tests/test_jwt.py::TestJWT::test_encode_decode_with_ecdsa_sha384",
"tests/test_jwt.py::TestJWT::test_encode_decode_with_ecdsa_sha512",
"tests/test_jwt.py::TestJWT::test_encode_decode_with_rsa_sha256",
"tests/test_jwt.py::TestJWT::test_encode_decode_with_rsa_sha384",
"tests/test_jwt.py::TestJWT::test_encode_decode_with_rsa_sha512",
"tests/test_jwt.py::TestJWT::test_rsa_related_algorithms"
] | [
"tests/test_jwt.py::TestJWT::test_allow_skip_verification",
"tests/test_jwt.py::TestJWT::test_bad_secret",
"tests/test_jwt.py::TestJWT::test_bytes_secret",
"tests/test_jwt.py::TestJWT::test_check_audience",
"tests/test_jwt.py::TestJWT::test_check_audience_in_array",
"tests/test_jwt.py::TestJWT::test_check_issuer",
"tests/test_jwt.py::TestJWT::test_custom_headers",
"tests/test_jwt.py::TestJWT::test_custom_json_encoder",
"tests/test_jwt.py::TestJWT::test_decode_invalid_crypto_padding",
"tests/test_jwt.py::TestJWT::test_decode_invalid_header_padding",
"tests/test_jwt.py::TestJWT::test_decode_invalid_header_string",
"tests/test_jwt.py::TestJWT::test_decode_invalid_payload_padding",
"tests/test_jwt.py::TestJWT::test_decode_invalid_payload_string",
"tests/test_jwt.py::TestJWT::test_decode_skip_expiration_verification",
"tests/test_jwt.py::TestJWT::test_decode_skip_notbefore_verification",
"tests/test_jwt.py::TestJWT::test_decode_unicode_value",
"tests/test_jwt.py::TestJWT::test_decode_with_expiration",
"tests/test_jwt.py::TestJWT::test_decode_with_expiration_with_leeway",
"tests/test_jwt.py::TestJWT::test_decode_with_notbefore",
"tests/test_jwt.py::TestJWT::test_decode_with_notbefore_with_leeway",
"tests/test_jwt.py::TestJWT::test_decodes_valid_jwt",
"tests/test_jwt.py::TestJWT::test_encode_bad_type",
"tests/test_jwt.py::TestJWT::test_encode_datetime",
"tests/test_jwt.py::TestJWT::test_encode_decode",
"tests/test_jwt.py::TestJWT::test_encode_decode_with_algo_none",
"tests/test_jwt.py::TestJWT::test_invalid_crypto_alg",
"tests/test_jwt.py::TestJWT::test_load_no_verification",
"tests/test_jwt.py::TestJWT::test_load_verify_valid_jwt",
"tests/test_jwt.py::TestJWT::test_no_secret",
"tests/test_jwt.py::TestJWT::test_nonascii_secret",
"tests/test_jwt.py::TestJWT::test_raise_exception_invalid_audience",
"tests/test_jwt.py::TestJWT::test_raise_exception_invalid_audience_in_array",
"tests/test_jwt.py::TestJWT::test_raise_exception_invalid_issuer",
"tests/test_jwt.py::TestJWT::test_raise_exception_token_without_audience",
"tests/test_jwt.py::TestJWT::test_raise_exception_token_without_issuer",
"tests/test_jwt.py::TestJWT::test_unicode_secret",
"tests/test_jwt.py::TestJWT::test_verify_signature_no_secret"
] | [] | MIT License | 11 | 4,219 | [
"jwt/__init__.py",
"setup.py"
] |
msiemens__tinydb-46 | 65c302427777434c3c01bf36eb83ab86e6323a5e | 2015-01-07 00:39:47 | 65c302427777434c3c01bf36eb83ab86e6323a5e | diff --git a/tinydb/database.py b/tinydb/database.py
index 31a7483..cdaad19 100644
--- a/tinydb/database.py
+++ b/tinydb/database.py
@@ -199,7 +199,7 @@ class Table(object):
old_ids = self._read().keys()
if old_ids:
- self._last_id = max(int(i, 10) for i in old_ids)
+ self._last_id = max(i for i in old_ids)
else:
self._last_id = 0
@@ -257,10 +257,11 @@ class Table(object):
:rtype: dict
"""
- data = self._db._read(self.name)
-
- for eid in list(data):
- data[eid] = Element(data[eid], eid)
+ raw_data = self._db._read(self.name)
+ data = {}
+ for key in list(raw_data):
+ eid = int(key)
+ data[eid] = Element(raw_data[key], eid)
return data
| Can not handle data by integer eid
The id of the element will change to a unicode string after JSON serialization/deserialization. This causes no way to get the element by integer eid.
```python
Python 2.7.6 (default, Sep 9 2014, 15:04:36)
[GCC 4.2.1 Compatible Apple LLVM 6.0 (clang-600.0.39)] on darwin
Type "help", "copyright", "credits" or "license" for more information.
>>> from tinydb import TinyDB
>>> db=TinyDB('/tmp/test.json')
>>> db.insert({'foo':'bar'})
1
>>> db.all()
[{u'foo': u'bar'}]
>>> element = db.all()[0]
>>> element.eid
u'1'
>>> assert db.get(eid=1) is not None
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AssertionError
>>> assert db.get(eid='1') is not None
>>> db.update({'foo':'blah'}, eids=[1])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/wolfg/.virtualenvs/opensource/lib/python2.7/site-packages/tinydb/database.py", line 335, in update
cond, eids)
File "/Users/wolfg/.virtualenvs/opensource/lib/python2.7/site-packages/tinydb/database.py", line 222, in process_elements
func(data, eid)
File "/Users/wolfg/.virtualenvs/opensource/lib/python2.7/site-packages/tinydb/database.py", line 334, in <lambda>
self.process_elements(lambda data, eid: data[eid].update(fields),
KeyError: 1
>>> db.update({'foo':'blah'}, eids=['1'])
>>> db.all()
[{u'foo': u'blah'}]
>>> db.contains(eids=[1])
False
>>> db.contains(eids=['1'])
True
``` | msiemens/tinydb | diff --git a/tests/test_tinydb.py b/tests/test_tinydb.py
index 6f4e435..35b6fc1 100644
--- a/tests/test_tinydb.py
+++ b/tests/test_tinydb.py
@@ -337,3 +337,34 @@ def test_unicode_json(tmpdir):
assert _db.contains(where('value') == unic_str1)
assert _db.contains(where('value') == byte_str2)
assert _db.contains(where('value') == unic_str2)
+
+
+def test_eids_json(tmpdir):
+ """
+ Regression test for issue #45
+ """
+
+ path = str(tmpdir.join('db.json'))
+
+ with TinyDB(path) as _db:
+ _db.purge()
+ assert _db.insert({'int': 1, 'char': 'a'}) == 1
+ assert _db.insert({'int': 1, 'char': 'a'}) == 2
+
+ _db.purge()
+ assert _db.insert_multiple([{'int': 1, 'char': 'a'},
+ {'int': 1, 'char': 'b'},
+ {'int': 1, 'char': 'c'}]) == [1, 2, 3]
+
+ assert _db.contains(eids=[1, 2])
+ assert not _db.contains(eids=[88])
+
+ _db.update({'int': 2}, eids=[1, 2])
+ assert _db.count(where('int') == 2) == 2
+
+ el = _db.all()[0]
+ assert _db.get(eid=el.eid) == el
+ assert _db.get(eid=float('NaN')) is None
+
+ _db.remove(eids=[1, 2])
+ assert len(_db) == 1
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 2.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.4",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
-e git+https://github.com/msiemens/tinydb.git@65c302427777434c3c01bf36eb83ab86e6323a5e#egg=tinydb
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: tinydb
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/tinydb
| [
"tests/test_tinydb.py::test_eids_json"
] | [] | [
"tests/test_tinydb.py::test_purge[db0]",
"tests/test_tinydb.py::test_purge[db1]",
"tests/test_tinydb.py::test_all[db0]",
"tests/test_tinydb.py::test_all[db1]",
"tests/test_tinydb.py::test_insert[db0]",
"tests/test_tinydb.py::test_insert[db1]",
"tests/test_tinydb.py::test_insert_ids[db0]",
"tests/test_tinydb.py::test_insert_ids[db1]",
"tests/test_tinydb.py::test_insert_multiple[db0]",
"tests/test_tinydb.py::test_insert_multiple[db1]",
"tests/test_tinydb.py::test_insert_multiple_with_ids[db0]",
"tests/test_tinydb.py::test_insert_multiple_with_ids[db1]",
"tests/test_tinydb.py::test_remove[db0]",
"tests/test_tinydb.py::test_remove[db1]",
"tests/test_tinydb.py::test_remove_multiple[db0]",
"tests/test_tinydb.py::test_remove_multiple[db1]",
"tests/test_tinydb.py::test_remove_ids[db0]",
"tests/test_tinydb.py::test_remove_ids[db1]",
"tests/test_tinydb.py::test_update[db0]",
"tests/test_tinydb.py::test_update[db1]",
"tests/test_tinydb.py::test_update_transform[db0]",
"tests/test_tinydb.py::test_update_transform[db1]",
"tests/test_tinydb.py::test_update_ids[db0]",
"tests/test_tinydb.py::test_update_ids[db1]",
"tests/test_tinydb.py::test_search[db0]",
"tests/test_tinydb.py::test_search[db1]",
"tests/test_tinydb.py::test_contians[db0]",
"tests/test_tinydb.py::test_contians[db1]",
"tests/test_tinydb.py::test_get[db0]",
"tests/test_tinydb.py::test_get[db1]",
"tests/test_tinydb.py::test_get_ids[db0]",
"tests/test_tinydb.py::test_get_ids[db1]",
"tests/test_tinydb.py::test_count[db0]",
"tests/test_tinydb.py::test_count[db1]",
"tests/test_tinydb.py::test_contains[db0]",
"tests/test_tinydb.py::test_contains[db1]",
"tests/test_tinydb.py::test_contains_ids[db0]",
"tests/test_tinydb.py::test_contains_ids[db1]",
"tests/test_tinydb.py::test_get_idempotent[db0]",
"tests/test_tinydb.py::test_get_idempotent[db1]",
"tests/test_tinydb.py::test_multiple_dbs",
"tests/test_tinydb.py::test_unique_ids",
"tests/test_tinydb.py::test_lastid_after_open"
] | [] | MIT License | 12 | 250 | [
"tinydb/database.py"
] |
|
pozytywnie__webapp-health-monitor-12 | 64bb87f0c5c8ec9863b7daf1fdd3f7ff6532738f | 2015-01-12 12:24:43 | 64bb87f0c5c8ec9863b7daf1fdd3f7ff6532738f | diff --git a/webapp_health_monitor/verificators/base.py b/webapp_health_monitor/verificators/base.py
index 0f4b6af..668536a 100644
--- a/webapp_health_monitor/verificators/base.py
+++ b/webapp_health_monitor/verificators/base.py
@@ -4,8 +4,8 @@ from webapp_health_monitor import errors
class Verificator(object):
verificator_name = None
- def __init__(self, logger):
- self.logger = logger
+ def __init__(self, **kwargs):
+ pass
def run(self):
raise NotImplementedError()
@@ -40,7 +40,6 @@ class RangeVerificator(Verificator):
def _check_value(self):
value = self._get_value()
- self.logger.check_range(self.lower_bound, value, self.upper_bound)
self._check_lower_bound(value)
self._check_upper_bound(value)
| Use standard python logging method
Simplify logging by using only build-in logging. Remove forwarding of custom logger class. | pozytywnie/webapp-health-monitor | diff --git a/tests/test_verificators.py b/tests/test_verificators.py
index 84190b2..69c0a96 100644
--- a/tests/test_verificators.py
+++ b/tests/test_verificators.py
@@ -14,55 +14,39 @@ from webapp_health_monitor.verificators.system import (
class RangeVerificatorTest(TestCase):
def test_lack_of_value_extractor_raises_bad_configuration(self):
- logger = mock.Mock()
- verificator = RangeVerificator(logger)
+ verificator = RangeVerificator()
verificator.lower_bound = 0
verificator.upper_bound = 0
self.assertRaises(errors.BadConfigurationError, verificator.run)
def test_lack_of_bounds_raises_bad_configuration(self):
- logger = mock.Mock()
- verificator = RangeVerificator(logger)
+ verificator = RangeVerificator()
verificator.value_extractor = mock.Mock()
self.assertRaises(errors.BadConfigurationError, verificator.run)
def test_bad_bounds_raises_bad_configuration(self):
- logger = mock.Mock()
- verificator = RangeVerificator(logger)
+ verificator = RangeVerificator()
verificator.value_extractor = mock.Mock()
verificator.lower_bound = 1
verificator.upper_bound = 0
self.assertRaises(errors.BadConfigurationError, verificator.run)
def test_value_below_lower_bound_raises_verification_error(self):
- logger = mock.Mock()
- verificator = RangeVerificator(logger)
+ verificator = RangeVerificator()
verificator._get_value = mock.Mock(return_value=99)
verificator.value_extractor = mock.Mock()
verificator.lower_bound = 100
self.assertRaises(errors.VerificationError, verificator.run)
def test_value_over_upper_bound_raises_verification_error(self):
- logger = mock.Mock()
- verificator = RangeVerificator(logger)
+ verificator = RangeVerificator()
verificator._get_value = mock.Mock(return_value=100)
verificator.value_extractor = mock.Mock()
verificator.upper_bound = 99
self.assertRaises(errors.VerificationError, verificator.run)
- def test_check_logging(self):
- logger = mock.Mock()
- verificator = RangeVerificator(logger)
- verificator._get_value = mock.Mock(return_value=1)
- verificator.value_extractor = mock.Mock()
- verificator.lower_bound = 0
- verificator.upper_bound = 2
- verificator.run()
- logger.check_range.assert_called_with(0, 1, 2)
-
def test_get_value(self):
- logger = mock.Mock()
- verificator = RangeVerificator(logger)
+ verificator = RangeVerificator()
verificator.value_extractor = mock.Mock(
extract=mock.Mock(return_value=1))
self.assertEqual(1, verificator._get_value())
@@ -74,8 +58,7 @@ class FreeDiskSpaceVerificatorTest(TestCase):
def test_using_value_extractor(self, FreeDiskSpaceExtractor):
class AppVerificator(FreeDiskSpaceVerificator):
mount_point = '/home'
- logger = mock.Mock()
- verificator = AppVerificator(logger)
+ verificator = AppVerificator()
FreeDiskSpaceExtractor.return_value.extract.return_value = 100
self.assertEqual(100, verificator._get_value())
FreeDiskSpaceExtractor.assert_called_with('/home')
@@ -87,8 +70,7 @@ class PercentUsedDiskSpaceVerificatorTest(TestCase):
def test_using_value_extractor(self, PercentUsedDiskSpaceExtractor):
class AppVerificator(PercentUsedDiskSpaceVerificator):
mount_point = '/home'
- logger = mock.Mock()
- verificator = AppVerificator(logger)
+ verificator = AppVerificator()
PercentUsedDiskSpaceExtractor.return_value.extract.return_value = 100
self.assertEqual(100, verificator._get_value())
PercentUsedDiskSpaceExtractor.assert_called_with('/home')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
coveralls==4.0.1
docopt==0.6.2
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
mock==1.0.1
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
tomli==2.2.1
urllib3==2.3.0
-e git+https://github.com/pozytywnie/webapp-health-monitor.git@64bb87f0c5c8ec9863b7daf1fdd3f7ff6532738f#egg=Webapp_Health_Monitor
| name: webapp-health-monitor
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- coveralls==4.0.1
- docopt==0.6.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- mock==1.0.1
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/webapp-health-monitor
| [
"tests/test_verificators.py::RangeVerificatorTest::test_bad_bounds_raises_bad_configuration",
"tests/test_verificators.py::RangeVerificatorTest::test_get_value",
"tests/test_verificators.py::RangeVerificatorTest::test_lack_of_bounds_raises_bad_configuration",
"tests/test_verificators.py::RangeVerificatorTest::test_lack_of_value_extractor_raises_bad_configuration",
"tests/test_verificators.py::RangeVerificatorTest::test_value_below_lower_bound_raises_verification_error",
"tests/test_verificators.py::RangeVerificatorTest::test_value_over_upper_bound_raises_verification_error",
"tests/test_verificators.py::FreeDiskSpaceVerificatorTest::test_using_value_extractor",
"tests/test_verificators.py::PercentUsedDiskSpaceVerificatorTest::test_using_value_extractor"
] | [] | [] | [] | MIT License | 20 | 220 | [
"webapp_health_monitor/verificators/base.py"
] |
|
ipython__ipython-7469 | 296f56bf70643d1d19ae0c1ab2a9d86b326d5559 | 2015-01-15 00:40:54 | 148242288b1aeecf899f0d1fb086d13f37024c53 | diff --git a/IPython/utils/io.py b/IPython/utils/io.py
index df1e39e60..3d236eb4d 100644
--- a/IPython/utils/io.py
+++ b/IPython/utils/io.py
@@ -267,17 +267,18 @@ def atomic_writing(path, text=True, encoding='utf-8', **kwargs):
path = os.path.join(os.path.dirname(path), os.readlink(path))
dirname, basename = os.path.split(path)
- handle, tmp_path = tempfile.mkstemp(prefix=basename, dir=dirname)
+ tmp_dir = tempfile.mkdtemp(prefix=basename, dir=dirname)
+ tmp_path = os.path.join(tmp_dir, basename)
if text:
- fileobj = io.open(handle, 'w', encoding=encoding, **kwargs)
+ fileobj = io.open(tmp_path, 'w', encoding=encoding, **kwargs)
else:
- fileobj = io.open(handle, 'wb', **kwargs)
+ fileobj = io.open(tmp_path, 'wb', **kwargs)
try:
yield fileobj
except:
fileobj.close()
- os.remove(tmp_path)
+ shutil.rmtree(tmp_dir)
raise
# Flush to disk
@@ -299,6 +300,7 @@ def atomic_writing(path, text=True, encoding='utf-8', **kwargs):
os.remove(path)
os.rename(tmp_path, path)
+ shutil.rmtree(tmp_dir)
def raw_print(*args, **kw):
| atomic write umask
Reported on gitter.im/ipython/ipython by @bigzachattack
Sadly no one was on the chat at that time, too busy with a big bearded men
dressed in red trying to smuggle things in our houses through the cheminee.
```
I noticed today working in master, that any new notebook I create or copy has permissions
0o600 and ignores my umask. In IPython.utils.io.atomic_writing() uses mkstemp() to create
a temporary file for the atomic write. According to the python docs, mkstemp creates
files as 0o600.
After the write succeeds to the tmp file, _copy_metadata is called to copy the metadata
from the original file to destination file. It will throw an exception if there is no
source file. Thus when the notebook is copied into the notebook dir, it has
permissions 0o600.
Is this desired behavior, temporary, or a bug? I work in an environment where are default
permissions are 0o660 to allow for users to easily share information, so defaulting new
notebooks to 0o600 seriously inhibits this ability.
``` | ipython/ipython | diff --git a/IPython/utils/tests/test_io.py b/IPython/utils/tests/test_io.py
index 023c9641b..aa00a882b 100644
--- a/IPython/utils/tests/test_io.py
+++ b/IPython/utils/tests/test_io.py
@@ -1,16 +1,9 @@
# encoding: utf-8
"""Tests for io.py"""
-#-----------------------------------------------------------------------------
-# Copyright (C) 2008-2011 The IPython Development Team
-#
-# Distributed under the terms of the BSD License. The full license is in
-# the file COPYING, distributed as part of this software.
-#-----------------------------------------------------------------------------
-
-#-----------------------------------------------------------------------------
-# Imports
-#-----------------------------------------------------------------------------
+# Copyright (c) IPython Development Team.
+# Distributed under the terms of the Modified BSD License.
+
from __future__ import print_function
from __future__ import absolute_import
@@ -24,7 +17,7 @@
import nose.tools as nt
-from IPython.testing.decorators import skipif
+from IPython.testing.decorators import skipif, skip_win32
from IPython.utils.io import (Tee, capture_output, unicode_std_stream,
atomic_writing,
)
@@ -36,10 +29,6 @@
else:
from StringIO import StringIO
-#-----------------------------------------------------------------------------
-# Tests
-#-----------------------------------------------------------------------------
-
def test_tee_simple():
"Very simple check with stdout only"
@@ -177,6 +166,33 @@ class CustomExc(Exception): pass
with stdlib_io.open(f1, 'r') as f:
nt.assert_equal(f.read(), u'written from symlink')
+def _save_umask():
+ global umask
+ umask = os.umask(0)
+ os.umask(umask)
+
+def _restore_umask():
+ os.umask(umask)
+
+@skip_win32
[email protected]_setup(_save_umask, _restore_umask)
+def test_atomic_writing_umask():
+ with TemporaryDirectory() as td:
+ os.umask(0o022)
+ f1 = os.path.join(td, '1')
+ with atomic_writing(f1) as f:
+ f.write(u'1')
+ mode = stat.S_IMODE(os.stat(f1).st_mode)
+ nt.assert_equal(mode, 0o644, '{:o} != 644'.format(mode))
+
+ os.umask(0o057)
+ f2 = os.path.join(td, '2')
+ with atomic_writing(f2) as f:
+ f.write(u'2')
+ mode = stat.S_IMODE(os.stat(f2).st_mode)
+ nt.assert_equal(mode, 0o620, '{:o} != 620'.format(mode))
+
+
def test_atomic_writing_newlines():
with TemporaryDirectory() as td:
path = os.path.join(td, 'testfile')
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 2.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[all]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"mock",
"sphinx",
"pandoc",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"docs/source/install/install.rst"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs @ file:///croot/attrs_1668696182826/work
Babel==2.14.0
certifi @ file:///croot/certifi_1671487769961/work/certifi
charset-normalizer==3.4.1
docutils==0.19
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
idna==3.10
imagesize==1.4.1
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1648562407465/work
importlib-resources==5.12.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/ipython/ipython.git@296f56bf70643d1d19ae0c1ab2a9d86b326d5559#egg=ipython
Jinja2==3.1.6
jsonschema==4.17.3
MarkupSafe==2.1.5
mistune==3.0.2
mock==5.2.0
nose==1.3.7
numpydoc==1.5.0
packaging @ file:///croot/packaging_1671697413597/work
pandoc==2.4
pkgutil_resolve_name==1.3.10
pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work
plumbum==1.8.3
ply==3.11
py @ file:///opt/conda/conda-bld/py_1644396412707/work
Pygments==2.17.2
pyrsistent==0.19.3
pytest==7.1.2
pytz==2025.2
pyzmq==26.2.1
requests==2.31.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tornado==6.2
typing_extensions @ file:///croot/typing_extensions_1669924550328/work
urllib3==2.0.7
zipp @ file:///croot/zipp_1672387121353/work
| name: ipython
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib-metadata=4.11.3=py37h06a4308_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=22.0=py37h06a4308_0
- pip=22.3.1=py37h06a4308_0
- pluggy=1.0.0=py37h06a4308_1
- py=1.11.0=pyhd3eb1b0_0
- pytest=7.1.2=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- typing_extensions=4.4.0=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- babel==2.14.0
- charset-normalizer==3.4.1
- docutils==0.19
- idna==3.10
- imagesize==1.4.1
- importlib-resources==5.12.0
- jinja2==3.1.6
- jsonschema==4.17.3
- markupsafe==2.1.5
- mistune==3.0.2
- mock==5.2.0
- nose==1.3.7
- numpydoc==1.5.0
- pandoc==2.4
- pkgutil-resolve-name==1.3.10
- plumbum==1.8.3
- ply==3.11
- pygments==2.17.2
- pyrsistent==0.19.3
- pytz==2025.2
- pyzmq==26.2.1
- requests==2.31.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- tornado==6.2
- urllib3==2.0.7
prefix: /opt/conda/envs/ipython
| [
"IPython/utils/tests/test_io.py::test_atomic_writing_umask"
] | [] | [
"IPython/utils/tests/test_io.py::test_tee_simple",
"IPython/utils/tests/test_io.py::TeeTestCase::test",
"IPython/utils/tests/test_io.py::test_io_init",
"IPython/utils/tests/test_io.py::test_capture_output",
"IPython/utils/tests/test_io.py::test_UnicodeStdStream",
"IPython/utils/tests/test_io.py::test_UnicodeStdStream_nowrap",
"IPython/utils/tests/test_io.py::test_atomic_writing",
"IPython/utils/tests/test_io.py::test_atomic_writing_newlines"
] | [] | BSD 3-Clause "New" or "Revised" License | 22 | 348 | [
"IPython/utils/io.py"
] |
|
martinblech__xmltodict-81 | a3a95592b875cc3d2472a431a197c9c1a5d8a788 | 2015-01-18 14:47:10 | b80fc18b7dbf278bf460f514fb4ead693c60d6f7 | diff --git a/xmltodict.py b/xmltodict.py
index 4fdbb16..b0ba601 100755
--- a/xmltodict.py
+++ b/xmltodict.py
@@ -318,7 +318,8 @@ def unparse(input_dict, output=None, encoding='utf-8', full_document=True,
can be customized with the `newl` and `indent` parameters.
"""
- ((key, value),) = input_dict.items()
+ if full_document and len(input_dict) != 1:
+ raise ValueError('Document must have exactly one root.')
must_return = False
if output is None:
output = StringIO()
@@ -326,7 +327,8 @@ def unparse(input_dict, output=None, encoding='utf-8', full_document=True,
content_handler = XMLGenerator(output, encoding)
if full_document:
content_handler.startDocument()
- _emit(key, value, content_handler, **kwargs)
+ for key, value in input_dict.items():
+ _emit(key, value, content_handler, **kwargs)
if full_document:
content_handler.endDocument()
if must_return:
| Parameter to Disable Multiple Root Check
I'm trying to convert a dict to an xml snippet, but this xml snippet is just supposed to be part of a later full document, so it may or may not have one root element. Unfortunately a ValueError is thrown if there is more than one possible root element - it would be great if there was a keyword parameter to disable that check. (Or perhaps when `full_document=False`)
So for example:
```python
xmltodict.unparse({'node': [1,2,3]}, full_document=False)
# would produce something like this without throwing an error:
"""<node>1</node>
<node>2</node>
<node>3</node>
"""
``` | martinblech/xmltodict | diff --git a/tests/test_dicttoxml.py b/tests/test_dicttoxml.py
index e449316..4b1d4b8 100644
--- a/tests/test_dicttoxml.py
+++ b/tests/test_dicttoxml.py
@@ -49,10 +49,21 @@ class DictToXMLTestCase(unittest.TestCase):
self.assertEqual(obj, parse(unparse(obj)))
self.assertEqual(unparse(obj), unparse(parse(unparse(obj))))
+ def test_no_root(self):
+ self.assertRaises(ValueError, unparse, {})
+
def test_multiple_roots(self):
self.assertRaises(ValueError, unparse, {'a': '1', 'b': '2'})
self.assertRaises(ValueError, unparse, {'a': ['1', '2', '3']})
+ def test_no_root_nofulldoc(self):
+ self.assertEqual(unparse({}, full_document=False), '')
+
+ def test_multiple_roots_nofulldoc(self):
+ obj = OrderedDict((('a', 1), ('b', 2)))
+ xml = unparse(obj, full_document=False)
+ self.assertEqual(xml, '<a>1</a><b>2</b>')
+
def test_nested(self):
obj = {'a': {'b': '1', 'c': '2'}}
self.assertEqual(obj, parse(unparse(obj)))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 0.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
nose==1.3.7
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
-e git+https://github.com/martinblech/xmltodict.git@a3a95592b875cc3d2472a431a197c9c1a5d8a788#egg=xmltodict
| name: xmltodict
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- nose==1.3.7
prefix: /opt/conda/envs/xmltodict
| [
"tests/test_dicttoxml.py::DictToXMLTestCase::test_multiple_roots_nofulldoc",
"tests/test_dicttoxml.py::DictToXMLTestCase::test_no_root_nofulldoc"
] | [] | [
"tests/test_dicttoxml.py::DictToXMLTestCase::test_attr_order_roundtrip",
"tests/test_dicttoxml.py::DictToXMLTestCase::test_attrib",
"tests/test_dicttoxml.py::DictToXMLTestCase::test_attrib_and_cdata",
"tests/test_dicttoxml.py::DictToXMLTestCase::test_cdata",
"tests/test_dicttoxml.py::DictToXMLTestCase::test_encoding",
"tests/test_dicttoxml.py::DictToXMLTestCase::test_fulldoc",
"tests/test_dicttoxml.py::DictToXMLTestCase::test_list",
"tests/test_dicttoxml.py::DictToXMLTestCase::test_multiple_roots",
"tests/test_dicttoxml.py::DictToXMLTestCase::test_nested",
"tests/test_dicttoxml.py::DictToXMLTestCase::test_no_root",
"tests/test_dicttoxml.py::DictToXMLTestCase::test_preprocessor",
"tests/test_dicttoxml.py::DictToXMLTestCase::test_preprocessor_skipkey",
"tests/test_dicttoxml.py::DictToXMLTestCase::test_pretty_print",
"tests/test_dicttoxml.py::DictToXMLTestCase::test_root",
"tests/test_dicttoxml.py::DictToXMLTestCase::test_semistructured",
"tests/test_dicttoxml.py::DictToXMLTestCase::test_simple_cdata"
] | [] | MIT License | 30 | 267 | [
"xmltodict.py"
] |
|
jacebrowning__dropthebeat-25 | 3f0891ee65703490136f44851c06b8356992a05c | 2015-01-19 02:00:09 | 3f0891ee65703490136f44851c06b8356992a05c | diff --git a/dtb/gui.py b/dtb/gui.py
index f96defc..7651d52 100755
--- a/dtb/gui.py
+++ b/dtb/gui.py
@@ -193,13 +193,19 @@ class Application(ttk.Frame): # pragma: no cover - manual test, pylint: disable
def do_ignore(self):
"""Ignore selected songs."""
for index in (int(s) for s in self.listbox_incoming.curselection()):
- self.incoming[index].ignore()
+ song = self.incoming[index]
+ song.ignore()
self.update()
def do_download(self):
- """Download all songs."""
- for index in (int(s) for s in self.listbox_incoming.curselection()):
- self.incoming[index].download()
+ """Download selected songs."""
+ indicies = (int(s) for s in self.listbox_incoming.curselection())
+ try:
+ for index in indicies:
+ song = self.incoming[index]
+ song.download(catch=False)
+ except IOError as exc:
+ self.show_error_from_exception(exc, "Download Error")
self.update()
def update(self):
@@ -219,6 +225,12 @@ class Application(ttk.Frame): # pragma: no cover - manual test, pylint: disable
for song in self.incoming:
self.listbox_incoming.insert(tk.END, song.in_string)
+ @staticmethod
+ def show_error_from_exception(exception, title="Error"):
+ """Convert an exception to an error dialog."""
+ message = str(exception).capitalize().replace(": ", ":\n\n")
+ messagebox.showerror(title, message)
+
def main(args=None):
"""Process command-line arguments and run the program."""
diff --git a/dtb/song.py b/dtb/song.py
index 7bd39a9..5df079d 100755
--- a/dtb/song.py
+++ b/dtb/song.py
@@ -67,7 +67,7 @@ class Song(object):
filename = os.path.basename(self.source)
return "{} (to {})".format(filename, self.friendname)
- def download(self):
+ def download(self, catch=True):
"""Move the song to the user's download directory.
@return: path to downloaded file or None on broken links
@@ -78,6 +78,9 @@ class Song(object):
dst = None
# Move the file or copy from the link
try:
+ if not os.path.isdir(self.downloads):
+ msg = "invalid download location: {}".format(self.downloads)
+ raise IOError(msg)
if src == self.path:
logging.info("moving {}...".format(src))
# Copy then delete in case the operation is cancelled
@@ -95,8 +98,9 @@ class Song(object):
logging.warning("broken link: {}".format(self.path))
os.remove(self.path)
except IOError as error:
- # TODO: these errors need to be left uncaught for the GUI
- logging.warning(error)
+ logging.error(error)
+ if not catch:
+ raise
return dst
def ignore(self):
| Strange Behavior when download path does not exist
If my download path does not exist on my machine I end up with a file named whatever is the directory was supposed to be. e.g.
Download path: `~/jkloo/downloads/fake`
The directory `fake` does not exist. The result is a file (`testfile.jpg`) downloads as `fake` in the directory `~/jkloo/downloads`. Adding the `.jpg` extension and opening the image reveals the expected file.
This is likely only an issue when a user manually changes their DL path in `info.yml` or the first time the GUI is run on a machine / for a user.
possible fixes:
1. only save the DL path if it exists
- create the DL directory if it doesn't exist
- error pop-up if path does not exist
| jacebrowning/dropthebeat | diff --git a/dtb/test/test_song.py b/dtb/test/test_song.py
index c80778a..250dfa8 100644
--- a/dtb/test/test_song.py
+++ b/dtb/test/test_song.py
@@ -100,10 +100,22 @@ class TestSong(unittest.TestCase): # pylint: disable=R0904
mock_remove.assert_called_once_with(self.broken.path)
@patch('os.remove', Mock(side_effect=IOError))
- def test_download_error(self):
+ def test_download_error_caught(self):
"""Verify errors are caught while downloading."""
self.song.download()
+ @patch('os.remove', Mock(side_effect=IOError))
+ def test_download_error_uncaught(self):
+ """Verify errors are not caught while downloading if requested."""
+ self.assertRaises(IOError, self.song.download, catch=False)
+
+ @patch('os.remove')
+ @patch('os.path.isdir', Mock(return_value=False))
+ def test_download_invalid_dest(self, mock_remove):
+ """Verify downloads are only attempted with a valid destination."""
+ self.song.download()
+ self.assertFalse(mock_remove.called)
+
@patch('os.remove')
def test_ignore(self, mock_remove):
"""Verify a song can be ignored."""
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_media",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 2
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "python setup.py install",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | DropTheBeat==0.1.dev0
exceptiongroup==1.2.2
iniconfig==2.1.0
nose==1.3.7
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
PyYAML==3.13
tomli==2.2.1
| name: dropthebeat
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- dropthebeat==0.1.dev0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- nose==1.3.7
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pyyaml==3.13
- tomli==2.2.1
prefix: /opt/conda/envs/dropthebeat
| [
"dtb/test/test_song.py::TestSong::test_download_error_uncaught"
] | [] | [
"dtb/test/test_song.py::TestSong::test_download_broken",
"dtb/test/test_song.py::TestSong::test_download_error_caught",
"dtb/test/test_song.py::TestSong::test_download_invalid_dest",
"dtb/test/test_song.py::TestSong::test_download_link",
"dtb/test/test_song.py::TestSong::test_download_song",
"dtb/test/test_song.py::TestSong::test_ignore",
"dtb/test/test_song.py::TestSong::test_in_string",
"dtb/test/test_song.py::TestSong::test_link",
"dtb/test/test_song.py::TestSong::test_link_missing_directory",
"dtb/test/test_song.py::TestSong::test_out_string",
"dtb/test/test_song.py::TestSong::test_source_file",
"dtb/test/test_song.py::TestSong::test_source_file_bad",
"dtb/test/test_song.py::TestSong::test_source_link",
"dtb/test/test_song.py::TestSong::test_source_song",
"dtb/test/test_song.py::TestSong::test_str"
] | [] | The MIT License (MIT) | 31 | 726 | [
"dtb/gui.py",
"dtb/song.py"
] |
|
pre-commit__pre-commit-hooks-39 | 9f107a03276857c668fe3e090752d3d22a4195e5 | 2015-02-27 02:24:38 | f82fb149af2c1b552b50e3e38e38ed3a44d4cda1 | diff --git a/pre_commit_hooks/autopep8_wrapper.py b/pre_commit_hooks/autopep8_wrapper.py
index a79a120..f6f55fb 100644
--- a/pre_commit_hooks/autopep8_wrapper.py
+++ b/pre_commit_hooks/autopep8_wrapper.py
@@ -10,7 +10,7 @@ import autopep8
def main(argv=None):
argv = argv if argv is not None else sys.argv[1:]
- args = autopep8.parse_args(argv)
+ args = autopep8.parse_args(argv, apply_config=True)
retv = 0
for filename in args.files:
diff --git a/setup.py b/setup.py
index 4fb9139..b86acd1 100644
--- a/setup.py
+++ b/setup.py
@@ -27,7 +27,7 @@ setup(
packages=find_packages('.', exclude=('tests*', 'testing*')),
install_requires=[
'argparse',
- 'autopep8',
+ 'autopep8>=1.1',
'flake8',
'plumbum',
'pyflakes',
| Autopep8 doesn't respect pep8 section in setup.cfg
Since https://github.com/hhatto/autopep8/pull/167 autopep8 has started reading the ```pep8``` section from ```tox.ini``` or ```setup.cfg``` of a project. However, the autopep8 hook ignores this as it calls ```autopep8.parse_args()``` and ```autopep8.fix_code()``` without a second value (which defaults to ```False```).
Any way we could get this to work? | pre-commit/pre-commit-hooks | diff --git a/tests/autopep8_wrapper_test.py b/tests/autopep8_wrapper_test.py
index f32e8a0..9a395c9 100644
--- a/tests/autopep8_wrapper_test.py
+++ b/tests/autopep8_wrapper_test.py
@@ -2,7 +2,7 @@ from __future__ import absolute_import
from __future__ import unicode_literals
import io
-import os.path
+import os
import pytest
@@ -17,9 +17,30 @@ from pre_commit_hooks.autopep8_wrapper import main
),
)
def test_main_failing(tmpdir, input_src, expected_ret, output_src):
- filename = os.path.join(tmpdir.strpath, 'test.py')
+ filename = tmpdir.join('test.py').strpath
with io.open(filename, 'w') as file_obj:
file_obj.write(input_src)
ret = main([filename, '-i', '-v'])
assert ret == expected_ret
assert io.open(filename).read() == output_src
+
+
[email protected]_fixture
+def in_tmpdir(tmpdir):
+ pwd = os.getcwd()
+ os.chdir(tmpdir.strpath)
+ try:
+ yield
+ finally:
+ os.chdir(pwd)
+
+
[email protected]('in_tmpdir')
+def test_respects_config_file():
+ with io.open('setup.cfg', 'w') as setup_cfg:
+ setup_cfg.write('[pep8]\nignore=E221')
+
+ with io.open('test.py', 'w') as test_py:
+ test_py.write('print(1 + 2)\n')
+
+ assert main(['test.py', '-i', '-v']) == 0
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 2
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8",
"pylint"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==3.3.9
autopep8==2.3.2
dill==0.3.9
exceptiongroup==1.2.2
flake8==7.2.0
iniconfig==2.1.0
isort==6.0.1
mccabe==0.7.0
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
plumbum==1.9.0
-e git+https://github.com/pre-commit/pre-commit-hooks.git@9f107a03276857c668fe3e090752d3d22a4195e5#egg=pre_commit_hooks
pycodestyle==2.13.0
pyflakes==3.3.1
pylint==3.3.6
pytest==8.3.5
PyYAML==6.0.2
simplejson==3.20.1
swebench_matterhorn @ file:///swebench_matterhorn
tomli==2.2.1
tomlkit==0.13.2
typing_extensions==4.13.0
| name: pre-commit-hooks
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- astroid==3.3.9
- autopep8==2.3.2
- dill==0.3.9
- exceptiongroup==1.2.2
- flake8==7.2.0
- iniconfig==2.1.0
- isort==6.0.1
- mccabe==0.7.0
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- plumbum==1.9.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pylint==3.3.6
- pytest==8.3.5
- pyyaml==6.0.2
- simplejson==3.20.1
- swebench-matterhorn==0.0.0
- tomli==2.2.1
- tomlkit==0.13.2
- typing-extensions==4.13.0
prefix: /opt/conda/envs/pre-commit-hooks
| [
"tests/autopep8_wrapper_test.py::test_respects_config_file"
] | [] | [
"tests/autopep8_wrapper_test.py::test_main_failing[print(1"
] | [] | MIT License | 46 | 267 | [
"pre_commit_hooks/autopep8_wrapper.py",
"setup.py"
] |
|
tornadoweb__tornado-1373 | cf2a54794ff5067d6d815013d6570ee10f74d5e5 | 2015-03-09 04:21:31 | cf2a54794ff5067d6d815013d6570ee10f74d5e5 | diff --git a/tornado/httpserver.py b/tornado/httpserver.py
index 226f966a..13a6e92f 100644
--- a/tornado/httpserver.py
+++ b/tornado/httpserver.py
@@ -37,11 +37,9 @@ from tornado import httputil
from tornado import iostream
from tornado import netutil
from tornado.tcpserver import TCPServer
-from tornado.util import Configurable
-class HTTPServer(TCPServer, Configurable,
- httputil.HTTPServerConnectionDelegate):
+class HTTPServer(TCPServer, httputil.HTTPServerConnectionDelegate):
r"""A non-blocking, single-threaded HTTP server.
A server is defined by a subclass of `.HTTPServerConnectionDelegate`,
@@ -122,20 +120,12 @@ class HTTPServer(TCPServer, Configurable,
two arguments ``(server_conn, request_conn)`` (in accordance with the
documentation) instead of one ``(request_conn)``.
"""
- def __init__(self, *args, **kwargs):
- # Ignore args to __init__; real initialization belongs in
- # initialize since we're Configurable. (there's something
- # weird in initialization order between this class,
- # Configurable, and TCPServer so we can't leave __init__ out
- # completely)
- pass
-
- def initialize(self, request_callback, no_keep_alive=False, io_loop=None,
- xheaders=False, ssl_options=None, protocol=None,
- decompress_request=False,
- chunk_size=None, max_header_size=None,
- idle_connection_timeout=None, body_timeout=None,
- max_body_size=None, max_buffer_size=None):
+ def __init__(self, request_callback, no_keep_alive=False, io_loop=None,
+ xheaders=False, ssl_options=None, protocol=None,
+ decompress_request=False,
+ chunk_size=None, max_header_size=None,
+ idle_connection_timeout=None, body_timeout=None,
+ max_body_size=None, max_buffer_size=None):
self.request_callback = request_callback
self.no_keep_alive = no_keep_alive
self.xheaders = xheaders
@@ -152,14 +142,6 @@ class HTTPServer(TCPServer, Configurable,
read_chunk_size=chunk_size)
self._connections = set()
- @classmethod
- def configurable_base(cls):
- return HTTPServer
-
- @classmethod
- def configurable_default(cls):
- return HTTPServer
-
@gen.coroutine
def close_all_connections(self):
while self._connections:
diff --git a/tornado/simple_httpclient.py b/tornado/simple_httpclient.py
index 6321a81d..f3cb1b86 100644
--- a/tornado/simple_httpclient.py
+++ b/tornado/simple_httpclient.py
@@ -135,14 +135,10 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient):
release_callback = functools.partial(self._release_fetch, key)
self._handle_request(request, release_callback, callback)
- def _connection_class(self):
- return _HTTPConnection
-
def _handle_request(self, request, release_callback, final_callback):
- self._connection_class()(
- self.io_loop, self, request, release_callback,
- final_callback, self.max_buffer_size, self.tcp_client,
- self.max_header_size)
+ _HTTPConnection(self.io_loop, self, request, release_callback,
+ final_callback, self.max_buffer_size, self.tcp_client,
+ self.max_header_size)
def _release_fetch(self, key):
del self.active[key]
@@ -352,7 +348,14 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
self.request.headers["Accept-Encoding"] = "gzip"
req_path = ((self.parsed.path or '/') +
(('?' + self.parsed.query) if self.parsed.query else ''))
- self.connection = self._create_connection(stream)
+ self.stream.set_nodelay(True)
+ self.connection = HTTP1Connection(
+ self.stream, True,
+ HTTP1ConnectionParameters(
+ no_keep_alive=True,
+ max_header_size=self.max_header_size,
+ decompress=self.request.decompress_response),
+ self._sockaddr)
start_line = httputil.RequestStartLine(self.request.method,
req_path, '')
self.connection.write_headers(start_line, self.request.headers)
@@ -361,20 +364,10 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
else:
self._write_body(True)
- def _create_connection(self, stream):
- stream.set_nodelay(True)
- connection = HTTP1Connection(
- stream, True,
- HTTP1ConnectionParameters(
- no_keep_alive=True,
- max_header_size=self.max_header_size,
- decompress=self.request.decompress_response),
- self._sockaddr)
- return connection
-
def _write_body(self, start_read):
if self.request.body is not None:
self.connection.write(self.request.body)
+ self.connection.finish()
elif self.request.body_producer is not None:
fut = self.request.body_producer(self.connection.write)
if is_future(fut):
@@ -385,7 +378,7 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
self._read_response()
self.io_loop.add_future(fut, on_body_written)
return
- self.connection.finish()
+ self.connection.finish()
if start_read:
self._read_response()
diff --git a/tornado/util.py b/tornado/util.py
index 606ced19..d943ce2b 100644
--- a/tornado/util.py
+++ b/tornado/util.py
@@ -198,21 +198,21 @@ class Configurable(object):
__impl_class = None
__impl_kwargs = None
- def __new__(cls, *args, **kwargs):
+ def __new__(cls, **kwargs):
base = cls.configurable_base()
- init_kwargs = {}
+ args = {}
if cls is base:
impl = cls.configured_class()
if base.__impl_kwargs:
- init_kwargs.update(base.__impl_kwargs)
+ args.update(base.__impl_kwargs)
else:
impl = cls
- init_kwargs.update(kwargs)
+ args.update(kwargs)
instance = super(Configurable, cls).__new__(impl)
# initialize vs __init__ chosen for compatibility with AsyncHTTPClient
# singleton magic. If we get rid of that we can switch to __init__
# here too.
- instance.initialize(*args, **init_kwargs)
+ instance.initialize(**args)
return instance
@classmethod
@@ -233,9 +233,6 @@ class Configurable(object):
"""Initialize a `Configurable` subclass instance.
Configurable classes should use `initialize` instead of ``__init__``.
-
- .. versionchanged:: 4.2
- Now accepts positional arguments in addition to keyword arguments.
"""
@classmethod
diff --git a/tornado/web.py b/tornado/web.py
index 62f3779d..155da550 100644
--- a/tornado/web.py
+++ b/tornado/web.py
@@ -650,8 +650,7 @@ class RequestHandler(object):
else:
assert isinstance(status, int) and 300 <= status <= 399
self.set_status(status)
- self.set_header("Location", urlparse.urljoin(utf8(self.request.uri),
- utf8(url)))
+ self.set_header("Location", utf8(url))
self.finish()
def write(self, chunk):
| redirect requests starting with '//' to '/' leading to wrong place
Tornado uses `urljoin` to join `self.request.uri` and the destination but when `self.request.uri` starts with '//' it generates locations still start with '//' because this behaviour of `urljoin`:
```
>>> from urllib.parse import urljoin
>>> urljoin('//abc', '/abc')
'//abc/abc'
```
I suggest using `self.request.full_url()` instead. Also, the HTTP specification says that the Location header should include the host part.
PS: `self.request.full_url()` doesn't work for proxy requests that have full urls in their request line. | tornadoweb/tornado | diff --git a/tornado/test/httpserver_test.py b/tornado/test/httpserver_test.py
index c1ba831c..62ef6ca3 100644
--- a/tornado/test/httpserver_test.py
+++ b/tornado/test/httpserver_test.py
@@ -162,22 +162,19 @@ class BadSSLOptionsTest(unittest.TestCase):
application = Application()
module_dir = os.path.dirname(__file__)
existing_certificate = os.path.join(module_dir, 'test.crt')
- existing_key = os.path.join(module_dir, 'test.key')
- self.assertRaises((ValueError, IOError),
- HTTPServer, application, ssl_options={
- "certfile": "/__mising__.crt",
+ self.assertRaises(ValueError, HTTPServer, application, ssl_options={
+ "certfile": "/__mising__.crt",
})
- self.assertRaises((ValueError, IOError),
- HTTPServer, application, ssl_options={
- "certfile": existing_certificate,
- "keyfile": "/__missing__.key"
+ self.assertRaises(ValueError, HTTPServer, application, ssl_options={
+ "certfile": existing_certificate,
+ "keyfile": "/__missing__.key"
})
# This actually works because both files exist
HTTPServer(application, ssl_options={
"certfile": existing_certificate,
- "keyfile": existing_key,
+ "keyfile": existing_certificate
})
diff --git a/tornado/test/runtests.py b/tornado/test/runtests.py
index cb9969d3..20133d4e 100644
--- a/tornado/test/runtests.py
+++ b/tornado/test/runtests.py
@@ -8,7 +8,6 @@ import operator
import textwrap
import sys
from tornado.httpclient import AsyncHTTPClient
-from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado.netutil import Resolver
from tornado.options import define, options, add_parse_callback
@@ -124,8 +123,6 @@ def main():
define('httpclient', type=str, default=None,
callback=lambda s: AsyncHTTPClient.configure(
s, defaults=dict(allow_ipv6=False)))
- define('httpserver', type=str, default=None,
- callback=HTTPServer.configure)
define('ioloop', type=str, default=None)
define('ioloop_time_monotonic', default=False)
define('resolver', type=str, default=None,
diff --git a/tornado/test/util_test.py b/tornado/test/util_test.py
index 0936c89a..a0fbae43 100644
--- a/tornado/test/util_test.py
+++ b/tornado/test/util_test.py
@@ -46,15 +46,13 @@ class TestConfigurable(Configurable):
class TestConfig1(TestConfigurable):
- def initialize(self, pos_arg=None, a=None):
+ def initialize(self, a=None):
self.a = a
- self.pos_arg = pos_arg
class TestConfig2(TestConfigurable):
- def initialize(self, pos_arg=None, b=None):
+ def initialize(self, b=None):
self.b = b
- self.pos_arg = pos_arg
class ConfigurableTest(unittest.TestCase):
@@ -104,10 +102,9 @@ class ConfigurableTest(unittest.TestCase):
self.assertIsInstance(obj, TestConfig1)
self.assertEqual(obj.a, 3)
- obj = TestConfigurable(42, a=4)
+ obj = TestConfigurable(a=4)
self.assertIsInstance(obj, TestConfig1)
self.assertEqual(obj.a, 4)
- self.assertEqual(obj.pos_arg, 42)
self.checkSubclasses()
# args bound in configure don't apply when using the subclass directly
@@ -120,10 +117,9 @@ class ConfigurableTest(unittest.TestCase):
self.assertIsInstance(obj, TestConfig2)
self.assertEqual(obj.b, 5)
- obj = TestConfigurable(42, b=6)
+ obj = TestConfigurable(b=6)
self.assertIsInstance(obj, TestConfig2)
self.assertEqual(obj.b, 6)
- self.assertEqual(obj.pos_arg, 42)
self.checkSubclasses()
# args bound in configure don't apply when using the subclass directly
diff --git a/tornado/test/web_test.py b/tornado/test/web_test.py
index f3c8505a..a52f1667 100644
--- a/tornado/test/web_test.py
+++ b/tornado/test/web_test.py
@@ -597,6 +597,7 @@ class WSGISafeWebTest(WebTestCase):
url("/redirect", RedirectHandler),
url("/web_redirect_permanent", WebRedirectHandler, {"url": "/web_redirect_newpath"}),
url("/web_redirect", WebRedirectHandler, {"url": "/web_redirect_newpath", "permanent": False}),
+ url("//web_redirect_double_slash", WebRedirectHandler, {"url": '/web_redirect_newpath'}),
url("/header_injection", HeaderInjectionHandler),
url("/get_argument", GetArgumentHandler),
url("/get_arguments", GetArgumentsHandler),
@@ -730,6 +731,11 @@ js_embed()
self.assertEqual(response.code, 302)
self.assertEqual(response.headers['Location'], '/web_redirect_newpath')
+ def test_web_redirect_double_slash(self):
+ response = self.fetch("//web_redirect_double_slash", follow_redirects=False)
+ self.assertEqual(response.code, 301)
+ self.assertEqual(response.headers['Location'], '/web_redirect_newpath')
+
def test_header_injection(self):
response = self.fetch("/header_injection")
self.assertEqual(response.body, b"ok")
diff --git a/tornado/testing.py b/tornado/testing.py
index 93f0dbe1..3d3bcf72 100644
--- a/tornado/testing.py
+++ b/tornado/testing.py
@@ -417,8 +417,10 @@ class AsyncHTTPSTestCase(AsyncHTTPTestCase):
Interface is generally the same as `AsyncHTTPTestCase`.
"""
def get_http_client(self):
- return AsyncHTTPClient(io_loop=self.io_loop, force_instance=True,
- defaults=dict(validate_cert=False))
+ # Some versions of libcurl have deadlock bugs with ssl,
+ # so always run these tests with SimpleAsyncHTTPClient.
+ return SimpleAsyncHTTPClient(io_loop=self.io_loop, force_instance=True,
+ defaults=dict(validate_cert=False))
def get_httpserver_options(self):
return dict(ssl_options=self.get_ssl_options())
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 4
} | 4.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"maint/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
autopep8==1.1
certifi==14.5.14
coverage==3.7.1
docutils==0.12
flake8==2.3.0
importlib-metadata==4.8.3
iniconfig==1.1.1
Jinja2==2.7.3
MarkupSafe==0.23
mccabe==0.3
packaging==21.3
pep8==1.6.0
pkginfo==1.2.1
pluggy==1.0.0
py==1.11.0
pycurl==7.19.5.1
pyflakes==0.8.1
Pygments==2.0.2
pyparsing==3.1.4
pytest==7.0.1
requests==2.5.1
Sphinx==1.2.3
sphinx-rtd-theme==0.1.6
tomli==1.2.3
-e git+https://github.com/tornadoweb/tornado.git@cf2a54794ff5067d6d815013d6570ee10f74d5e5#egg=tornado
tox==1.8.1
twine==1.4.0
Twisted==15.0.0
typing_extensions==4.1.1
virtualenv==12.0.7
zipp==3.6.0
zope.interface==4.1.2
| name: tornado
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- autopep8==1.1
- certifi==14.5.14
- coverage==3.7.1
- docutils==0.12
- flake8==2.3.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jinja2==2.7.3
- markupsafe==0.23
- mccabe==0.3
- packaging==21.3
- pep8==1.6.0
- pkginfo==1.2.1
- pluggy==1.0.0
- py==1.11.0
- pycurl==7.19.5.1
- pyflakes==0.8.1
- pygments==2.0.2
- pyparsing==3.1.4
- pytest==7.0.1
- requests==2.5.1
- sphinx==1.2.3
- sphinx-rtd-theme==0.1.6
- tomli==1.2.3
- tox==1.8.1
- twine==1.4.0
- twisted==15.0.0
- typing-extensions==4.1.1
- virtualenv==12.0.7
- zipp==3.6.0
- zope-interface==4.1.2
prefix: /opt/conda/envs/tornado
| [
"tornado/test/web_test.py::WSGISafeWebTest::test_web_redirect_double_slash"
] | [
"tornado/test/httpserver_test.py::HTTPServerRawTest::test_malformed_first_line",
"tornado/test/httpserver_test.py::HTTPServerRawTest::test_malformed_headers",
"tornado/test/httpserver_test.py::UnixSocketTest::test_unix_socket_bad_request",
"tornado/test/httpserver_test.py::MaxHeaderSizeTest::test_large_headers",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_body_size_override_reset",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_buffered",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_buffered_chunked",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_streaming",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_streaming_chunked",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_timeout",
"tornado/test/web_test.py::ClearAllCookiesTest::test_clear_all_cookies"
] | [
"tornado/test/httpserver_test.py::SSLv23Test::test_large_post",
"tornado/test/httpserver_test.py::SSLv23Test::test_non_ssl_request",
"tornado/test/httpserver_test.py::SSLv23Test::test_ssl",
"tornado/test/httpserver_test.py::SSLv3Test::test_large_post",
"tornado/test/httpserver_test.py::SSLv3Test::test_non_ssl_request",
"tornado/test/httpserver_test.py::SSLv3Test::test_ssl",
"tornado/test/httpserver_test.py::TLSv1Test::test_large_post",
"tornado/test/httpserver_test.py::TLSv1Test::test_non_ssl_request",
"tornado/test/httpserver_test.py::TLSv1Test::test_ssl",
"tornado/test/httpserver_test.py::SSLContextTest::test_large_post",
"tornado/test/httpserver_test.py::SSLContextTest::test_non_ssl_request",
"tornado/test/httpserver_test.py::SSLContextTest::test_ssl",
"tornado/test/httpserver_test.py::BadSSLOptionsTest::test_missing_arguments",
"tornado/test/httpserver_test.py::BadSSLOptionsTest::test_missing_key",
"tornado/test/httpserver_test.py::HTTPConnectionTest::test_100_continue",
"tornado/test/httpserver_test.py::HTTPConnectionTest::test_multipart_form",
"tornado/test/httpserver_test.py::HTTPConnectionTest::test_newlines",
"tornado/test/httpserver_test.py::HTTPServerTest::test_double_slash",
"tornado/test/httpserver_test.py::HTTPServerTest::test_empty_post_parameters",
"tornado/test/httpserver_test.py::HTTPServerTest::test_empty_query_string",
"tornado/test/httpserver_test.py::HTTPServerTest::test_malformed_body",
"tornado/test/httpserver_test.py::HTTPServerTest::test_query_string_encoding",
"tornado/test/httpserver_test.py::HTTPServerTest::test_types",
"tornado/test/httpserver_test.py::HTTPServerRawTest::test_chunked_request_body",
"tornado/test/httpserver_test.py::HTTPServerRawTest::test_empty_request",
"tornado/test/httpserver_test.py::XHeaderTest::test_ip_headers",
"tornado/test/httpserver_test.py::XHeaderTest::test_scheme_headers",
"tornado/test/httpserver_test.py::SSLXHeaderTest::test_request_without_xprotocol",
"tornado/test/httpserver_test.py::ManualProtocolTest::test_manual_protocol",
"tornado/test/httpserver_test.py::UnixSocketTest::test_unix_socket",
"tornado/test/httpserver_test.py::KeepAliveTest::test_cancel_during_download",
"tornado/test/httpserver_test.py::KeepAliveTest::test_finish_while_closed",
"tornado/test/httpserver_test.py::KeepAliveTest::test_http10",
"tornado/test/httpserver_test.py::KeepAliveTest::test_http10_keepalive",
"tornado/test/httpserver_test.py::KeepAliveTest::test_http10_keepalive_extra_crlf",
"tornado/test/httpserver_test.py::KeepAliveTest::test_keepalive_chunked",
"tornado/test/httpserver_test.py::KeepAliveTest::test_pipelined_cancel",
"tornado/test/httpserver_test.py::KeepAliveTest::test_pipelined_requests",
"tornado/test/httpserver_test.py::KeepAliveTest::test_request_close",
"tornado/test/httpserver_test.py::KeepAliveTest::test_two_requests",
"tornado/test/httpserver_test.py::GzipTest::test_gzip",
"tornado/test/httpserver_test.py::GzipTest::test_uncompressed",
"tornado/test/httpserver_test.py::GzipUnsupportedTest::test_gzip_unsupported",
"tornado/test/httpserver_test.py::GzipUnsupportedTest::test_uncompressed",
"tornado/test/httpserver_test.py::StreamingChunkSizeTest::test_chunked_body",
"tornado/test/httpserver_test.py::StreamingChunkSizeTest::test_chunked_compressed",
"tornado/test/httpserver_test.py::StreamingChunkSizeTest::test_compressed_body",
"tornado/test/httpserver_test.py::StreamingChunkSizeTest::test_regular_body",
"tornado/test/httpserver_test.py::MaxHeaderSizeTest::test_small_headers",
"tornado/test/httpserver_test.py::IdleTimeoutTest::test_idle_after_use",
"tornado/test/httpserver_test.py::IdleTimeoutTest::test_unused_connection",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_streaming_chunked_override",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_streaming_override",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_small_body",
"tornado/test/httpserver_test.py::LegacyInterfaceTest::test_legacy_interface",
"tornado/test/util_test.py::RaiseExcInfoTest::test_two_arg_exception",
"tornado/test/util_test.py::ConfigurableTest::test_config_args",
"tornado/test/util_test.py::ConfigurableTest::test_config_class",
"tornado/test/util_test.py::ConfigurableTest::test_config_class_args",
"tornado/test/util_test.py::ConfigurableTest::test_default",
"tornado/test/util_test.py::UnicodeLiteralTest::test_unicode_escapes",
"tornado/test/util_test.py::ArgReplacerTest::test_keyword",
"tornado/test/util_test.py::ArgReplacerTest::test_omitted",
"tornado/test/util_test.py::ArgReplacerTest::test_position",
"tornado/test/util_test.py::TimedeltaToSecondsTest::test_timedelta_to_seconds",
"tornado/test/util_test.py::ImportObjectTest::test_import_member",
"tornado/test/util_test.py::ImportObjectTest::test_import_member_unicode",
"tornado/test/util_test.py::ImportObjectTest::test_import_module",
"tornado/test/util_test.py::ImportObjectTest::test_import_module_unicode",
"tornado/test/web_test.py::SecureCookieV1Test::test_arbitrary_bytes",
"tornado/test/web_test.py::SecureCookieV1Test::test_cookie_tampering_future_timestamp",
"tornado/test/web_test.py::SecureCookieV1Test::test_round_trip",
"tornado/test/web_test.py::CookieTest::test_cookie_special_char",
"tornado/test/web_test.py::CookieTest::test_get_cookie",
"tornado/test/web_test.py::CookieTest::test_set_cookie",
"tornado/test/web_test.py::CookieTest::test_set_cookie_domain",
"tornado/test/web_test.py::CookieTest::test_set_cookie_expires_days",
"tornado/test/web_test.py::CookieTest::test_set_cookie_false_flags",
"tornado/test/web_test.py::CookieTest::test_set_cookie_max_age",
"tornado/test/web_test.py::CookieTest::test_set_cookie_overwrite",
"tornado/test/web_test.py::AuthRedirectTest::test_absolute_auth_redirect",
"tornado/test/web_test.py::AuthRedirectTest::test_relative_auth_redirect",
"tornado/test/web_test.py::ConnectionCloseTest::test_connection_close",
"tornado/test/web_test.py::RequestEncodingTest::test_group_encoding",
"tornado/test/web_test.py::RequestEncodingTest::test_group_question_mark",
"tornado/test/web_test.py::RequestEncodingTest::test_slashes",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument_invalid_unicode",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument_plus",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_argument",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_body_arguments",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_query_arguments",
"tornado/test/web_test.py::WSGISafeWebTest::test_header_injection",
"tornado/test/web_test.py::WSGISafeWebTest::test_multi_header",
"tornado/test/web_test.py::WSGISafeWebTest::test_no_gzip",
"tornado/test/web_test.py::WSGISafeWebTest::test_optional_path",
"tornado/test/web_test.py::WSGISafeWebTest::test_redirect",
"tornado/test/web_test.py::WSGISafeWebTest::test_reverse_url",
"tornado/test/web_test.py::WSGISafeWebTest::test_types",
"tornado/test/web_test.py::WSGISafeWebTest::test_uimodule_resources",
"tornado/test/web_test.py::WSGISafeWebTest::test_uimodule_unescaped",
"tornado/test/web_test.py::WSGISafeWebTest::test_web_redirect",
"tornado/test/web_test.py::NonWSGIWebTests::test_empty_flush",
"tornado/test/web_test.py::NonWSGIWebTests::test_flow_control",
"tornado/test/web_test.py::ErrorResponseTest::test_default",
"tornado/test/web_test.py::ErrorResponseTest::test_failed_write_error",
"tornado/test/web_test.py::ErrorResponseTest::test_write_error",
"tornado/test/web_test.py::StaticFileTest::test_absolute_static_url",
"tornado/test/web_test.py::StaticFileTest::test_absolute_version_exclusion",
"tornado/test/web_test.py::StaticFileTest::test_include_host_override",
"tornado/test/web_test.py::StaticFileTest::test_relative_version_exclusion",
"tornado/test/web_test.py::StaticFileTest::test_static_304_if_modified_since",
"tornado/test/web_test.py::StaticFileTest::test_static_304_if_none_match",
"tornado/test/web_test.py::StaticFileTest::test_static_404",
"tornado/test/web_test.py::StaticFileTest::test_static_etag",
"tornado/test/web_test.py::StaticFileTest::test_static_files",
"tornado/test/web_test.py::StaticFileTest::test_static_head",
"tornado/test/web_test.py::StaticFileTest::test_static_head_range",
"tornado/test/web_test.py::StaticFileTest::test_static_if_modified_since_pre_epoch",
"tornado/test/web_test.py::StaticFileTest::test_static_if_modified_since_time_zone",
"tornado/test/web_test.py::StaticFileTest::test_static_invalid_range",
"tornado/test/web_test.py::StaticFileTest::test_static_range_if_none_match",
"tornado/test/web_test.py::StaticFileTest::test_static_unsatisfiable_range_invalid_start",
"tornado/test/web_test.py::StaticFileTest::test_static_unsatisfiable_range_zero_suffix",
"tornado/test/web_test.py::StaticFileTest::test_static_url",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_end_edge",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_full_file",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_full_past_end",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_neg_end",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_partial_past_end",
"tornado/test/web_test.py::StaticDefaultFilenameTest::test_static_default_filename",
"tornado/test/web_test.py::StaticDefaultFilenameTest::test_static_default_redirect",
"tornado/test/web_test.py::StaticFileWithPathTest::test_serve",
"tornado/test/web_test.py::CustomStaticFileTest::test_serve",
"tornado/test/web_test.py::CustomStaticFileTest::test_static_url",
"tornado/test/web_test.py::HostMatchingTest::test_host_matching",
"tornado/test/web_test.py::NamedURLSpecGroupsTest::test_named_urlspec_groups",
"tornado/test/web_test.py::ClearHeaderTest::test_clear_header",
"tornado/test/web_test.py::Header304Test::test_304_headers",
"tornado/test/web_test.py::StatusReasonTest::test_status",
"tornado/test/web_test.py::DateHeaderTest::test_date_header",
"tornado/test/web_test.py::RaiseWithReasonTest::test_httperror_str",
"tornado/test/web_test.py::RaiseWithReasonTest::test_raise_with_reason",
"tornado/test/web_test.py::ErrorHandlerXSRFTest::test_404_xsrf",
"tornado/test/web_test.py::ErrorHandlerXSRFTest::test_error_xsrf",
"tornado/test/web_test.py::GzipTestCase::test_gzip",
"tornado/test/web_test.py::GzipTestCase::test_gzip_not_requested",
"tornado/test/web_test.py::GzipTestCase::test_gzip_static",
"tornado/test/web_test.py::GzipTestCase::test_vary_already_present",
"tornado/test/web_test.py::PathArgsInPrepareTest::test_kw",
"tornado/test/web_test.py::PathArgsInPrepareTest::test_pos",
"tornado/test/web_test.py::ExceptionHandlerTest::test_http_error",
"tornado/test/web_test.py::ExceptionHandlerTest::test_known_error",
"tornado/test/web_test.py::ExceptionHandlerTest::test_unknown_error",
"tornado/test/web_test.py::BuggyLoggingTest::test_buggy_log_exception",
"tornado/test/web_test.py::UIMethodUIModuleTest::test_ui_method",
"tornado/test/web_test.py::GetArgumentErrorTest::test_catch_error",
"tornado/test/web_test.py::MultipleExceptionTest::test_multi_exception",
"tornado/test/web_test.py::SetLazyPropertiesTest::test_set_properties",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_from_ui_module_is_lazy",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_from_ui_module_works",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_works",
"tornado/test/web_test.py::UnimplementedHTTPMethodsTest::test_unimplemented_standard_methods",
"tornado/test/web_test.py::UnimplementedNonStandardMethodsTest::test_unimplemented_other",
"tornado/test/web_test.py::UnimplementedNonStandardMethodsTest::test_unimplemented_patch",
"tornado/test/web_test.py::AllHTTPMethodsTest::test_standard_methods",
"tornado/test/web_test.py::PatchMethodTest::test_other",
"tornado/test/web_test.py::PatchMethodTest::test_patch",
"tornado/test/web_test.py::FinishInPrepareTest::test_finish_in_prepare",
"tornado/test/web_test.py::Default404Test::test_404",
"tornado/test/web_test.py::Custom404Test::test_404",
"tornado/test/web_test.py::DefaultHandlerArgumentsTest::test_403",
"tornado/test/web_test.py::HandlerByNameTest::test_handler_by_name",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_close_during_upload",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_early_return",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_early_return_with_data",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_streaming_body",
"tornado/test/web_test.py::StreamingRequestFlowControlTest::test_flow_control",
"tornado/test/web_test.py::IncorrectContentLengthTest::test_content_length_too_high",
"tornado/test/web_test.py::IncorrectContentLengthTest::test_content_length_too_low",
"tornado/test/web_test.py::ClientCloseTest::test_client_close",
"tornado/test/web_test.py::SignedValueTest::test_expired",
"tornado/test/web_test.py::SignedValueTest::test_known_values",
"tornado/test/web_test.py::SignedValueTest::test_name_swap",
"tornado/test/web_test.py::SignedValueTest::test_non_ascii",
"tornado/test/web_test.py::SignedValueTest::test_payload_tampering",
"tornado/test/web_test.py::SignedValueTest::test_signature_tampering",
"tornado/test/web_test.py::XSRFTest::test_cross_user",
"tornado/test/web_test.py::XSRFTest::test_distinct_tokens",
"tornado/test/web_test.py::XSRFTest::test_refresh_token",
"tornado/test/web_test.py::XSRFTest::test_versioning",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_body_no_cookie",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_cookie_no_body",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_no_token",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_header",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_non_hex_token",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_post_body",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_query_string",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_short_token",
"tornado/test/web_test.py::FinishExceptionTest::test_finish_exception",
"tornado/test/web_test.py::DecoratorTest::test_addslash",
"tornado/test/web_test.py::DecoratorTest::test_removeslash",
"tornado/test/web_test.py::CacheTest::test_multiple_strong_etag_match",
"tornado/test/web_test.py::CacheTest::test_multiple_strong_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_multiple_weak_etag_match",
"tornado/test/web_test.py::CacheTest::test_multiple_weak_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_strong_etag_match",
"tornado/test/web_test.py::CacheTest::test_strong_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_weak_etag_match",
"tornado/test/web_test.py::CacheTest::test_weak_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_wildcard_etag",
"tornado/test/web_test.py::RequestSummaryTest::test_missing_remote_ip"
] | [] | Apache License 2.0 | 56 | 1,757 | [
"tornado/httpserver.py",
"tornado/simple_httpclient.py",
"tornado/util.py",
"tornado/web.py"
] |
|
caesar0301__treelib-40 | 65635f48781f4426be9f55f1555d0c08454157bc | 2015-03-10 07:23:19 | bbd7bc557ab87dd0ebc449495f6041825be4a7c8 | diff --git a/treelib/tree.py b/treelib/tree.py
index 9bcf610..634566c 100644
--- a/treelib/tree.py
+++ b/treelib/tree.py
@@ -556,16 +556,16 @@ class Tree(object):
if not self.contains(nid):
raise NodeIDAbsentError("Node '%s' is not in the tree" % nid)
- label = ('{0}'.format(self[nid].tag.decode('utf-8')))\
+ label = ('{0}'.format(self[nid].tag))\
if idhidden \
else ('{0}[{1}]'.format(
- self[nid].tag.decode('utf-8'),
- self[nid].identifier.decode('utf-8')))
+ self[nid].tag,
+ self[nid].identifier))
filter = (self._real_true) if (filter is None) else filter
if level == self.ROOT:
- func(label)
+ func(label.encode('utf8'))
else:
leading = ''.join(map(lambda x: DT_VLINE + ' ' * 3
if not x else ' ' * 4, iflast[0:-1]))
| AttributeError: 'str' object has no attribute 'decode'
python3.4, OSX 10.10
```python
>>> from treelib import Tree, Node
>>> tree = Tree()
>>> tree.create_node("Harry", "harry")
>>> tree.create_node("Jane", "jane", parent="harry")
>>> tree.show()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.4/site-packages/treelib/tree.py", line 517, in show
func=print)
File "/usr/local/lib/python3.4/site-packages/treelib/tree.py", line 560, in _print_backend
if idhidden \
AttributeError: 'str' object has no attribute 'decode'
``` | caesar0301/treelib | diff --git a/tests/test_treelib.py b/tests/test_treelib.py
index 952f851..a061c8a 100644
--- a/tests/test_treelib.py
+++ b/tests/test_treelib.py
@@ -1,4 +1,10 @@
#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+try:
+ from StringIO import StringIO as BytesIO
+except ImportError:
+ from io import BytesIO
import unittest
from treelib import Tree, Node
from treelib.tree import NodeIDAbsentError
@@ -58,9 +64,9 @@ class NodeCase(unittest.TestCase):
class TreeCase(unittest.TestCase):
def setUp(self):
tree = Tree()
- tree.create_node("Harry", "harry")
- tree.create_node("Jane", "jane", parent="harry")
- tree.create_node("Bill", "bill", parent="harry")
+ tree.create_node("Hárry", "hárry")
+ tree.create_node("Jane", "jane", parent="hárry")
+ tree.create_node("Bill", "bill", parent="hárry")
tree.create_node("Diane", "diane", parent="jane")
tree.create_node("George", "george", parent="bill")
self.tree = tree
@@ -71,14 +77,14 @@ class TreeCase(unittest.TestCase):
self.assertEqual(isinstance(self.copytree, Tree), True)
def test_is_root(self):
- self.assertTrue(self.tree._nodes['harry'].is_root())
+ self.assertTrue(self.tree._nodes['hárry'].is_root())
self.assertFalse(self.tree._nodes['jane'].is_root())
def test_paths_to_leaves(self):
paths = self.tree.paths_to_leaves()
self.assertEqual( len(paths), 2 )
- self.assertTrue( ['harry', 'jane', 'diane'] in paths )
- self.assertTrue( ['harry', 'bill', 'george'] in paths )
+ self.assertTrue( ['hárry', 'jane', 'diane'] in paths )
+ self.assertTrue( ['hárry', 'bill', 'george'] in paths )
def test_nodes(self):
self.assertEqual(len(self.tree.nodes), 5)
@@ -148,7 +154,7 @@ class TreeCase(unittest.TestCase):
# Try getting the level of the node
"""
self.tree.show()
- Harry
+ Hárry
|___ Bill
| |___ George
| |___ Jill
@@ -161,7 +167,7 @@ class TreeCase(unittest.TestCase):
self.assertEqual(self.tree.depth(self.tree.get_node("george")), 2)
self.assertEqual(self.tree.depth("jane"), 1)
self.assertEqual(self.tree.depth("bill"), 1)
- self.assertEqual(self.tree.depth("harry"), 0)
+ self.assertEqual(self.tree.depth("hárry"), 0)
# Try getting Exception
node = Node("Test One", "identifier 1")
@@ -177,11 +183,11 @@ class TreeCase(unittest.TestCase):
in leaves), True)
def test_link_past_node(self):
- self.tree.create_node("Jill", "jill", parent="harry")
+ self.tree.create_node("Jill", "jill", parent="hárry")
self.tree.create_node("Mark", "mark", parent="jill")
- self.assertEqual("mark" not in self.tree.is_branch("harry"), True)
+ self.assertEqual("mark" not in self.tree.is_branch("hárry"), True)
self.tree.link_past_node("jill")
- self.assertEqual("mark" in self.tree.is_branch("harry"), True)
+ self.assertEqual("mark" in self.tree.is_branch("hárry"), True)
def test_expand_tree(self):
nodes = [self.tree[nid] for nid in self.tree.expand_tree()]
@@ -202,7 +208,7 @@ class TreeCase(unittest.TestCase):
self.tree.remove_node("jill")
def test_rsearch(self):
- for nid in ["harry", "jane", "diane"]:
+ for nid in ["hárry", "jane", "diane"]:
self.assertEqual(nid in self.tree.rsearch("diane"), True)
def test_subtree(self):
@@ -216,8 +222,8 @@ class TreeCase(unittest.TestCase):
def test_remove_subtree(self):
subtree_shallow = self.tree.remove_subtree("jane")
- self.assertEqual("jane" not in self.tree.is_branch("harry"), True)
- self.tree.paste("harry", subtree_shallow)
+ self.assertEqual("jane" not in self.tree.is_branch("hárry"), True)
+ self.tree.paste("hárry", subtree_shallow)
def test_to_json(self):
self.assertEqual.__self__.maxDiff = None
@@ -225,7 +231,7 @@ class TreeCase(unittest.TestCase):
self.tree.to_json(True)
def test_siblings(self):
- self.assertEqual(len(self.tree.siblings("harry")) == 0, True)
+ self.assertEqual(len(self.tree.siblings("hárry")) == 0, True)
self.assertEqual(self.tree.siblings("jane")[0].identifier == "bill",
True)
@@ -239,13 +245,29 @@ class TreeCase(unittest.TestCase):
self.tree.remove_node("jill")
def test_level(self):
- self.assertEqual(self.tree.level('harry'), 0)
+ self.assertEqual(self.tree.level('hárry'), 0)
depth = self.tree.depth()
self.assertEqual(self.tree.level('diane'), depth)
self.assertEqual(self.tree.level('diane',
lambda x: x.identifier!='jane'),
depth-1)
+ def test_print_backend(self):
+ reader = BytesIO()
+
+ def write(line):
+ reader.write(line + b'\n')
+
+ self.tree._print_backend(func=write)
+
+ assert reader.getvalue() == """\
+Hárry
+├── Bill
+│ └── George
+└── Jane
+ └── Diane
+""".encode('utf8')
+
def tearDown(self):
self.tree = None
self.copytree = None
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 1.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
nose==1.3.7
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
-e git+https://github.com/caesar0301/treelib.git@65635f48781f4426be9f55f1555d0c08454157bc#egg=treelib
| name: treelib
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- nose==1.3.7
prefix: /opt/conda/envs/treelib
| [
"tests/test_treelib.py::TreeCase::test_print_backend"
] | [] | [
"tests/test_treelib.py::NodeCase::test_data",
"tests/test_treelib.py::NodeCase::test_initialization",
"tests/test_treelib.py::NodeCase::test_set_bpointer",
"tests/test_treelib.py::NodeCase::test_set_fpointer",
"tests/test_treelib.py::NodeCase::test_set_identifier",
"tests/test_treelib.py::NodeCase::test_set_is_leaf",
"tests/test_treelib.py::NodeCase::test_set_tag",
"tests/test_treelib.py::TreeCase::test_children",
"tests/test_treelib.py::TreeCase::test_depth",
"tests/test_treelib.py::TreeCase::test_expand_tree",
"tests/test_treelib.py::TreeCase::test_getitem",
"tests/test_treelib.py::TreeCase::test_is_root",
"tests/test_treelib.py::TreeCase::test_leaves",
"tests/test_treelib.py::TreeCase::test_level",
"tests/test_treelib.py::TreeCase::test_link_past_node",
"tests/test_treelib.py::TreeCase::test_move_node",
"tests/test_treelib.py::TreeCase::test_nodes",
"tests/test_treelib.py::TreeCase::test_parent",
"tests/test_treelib.py::TreeCase::test_paste_tree",
"tests/test_treelib.py::TreeCase::test_paths_to_leaves",
"tests/test_treelib.py::TreeCase::test_remove_node",
"tests/test_treelib.py::TreeCase::test_remove_subtree",
"tests/test_treelib.py::TreeCase::test_rsearch",
"tests/test_treelib.py::TreeCase::test_siblings",
"tests/test_treelib.py::TreeCase::test_subtree",
"tests/test_treelib.py::TreeCase::test_to_json",
"tests/test_treelib.py::TreeCase::test_tree",
"tests/test_treelib.py::TreeCase::test_tree_data"
] | [] | Apache License 2.0 | 58 | 282 | [
"treelib/tree.py"
] |
|
enthought__okonomiyaki-34 | d32923ad74059883e31aaed8c12d3cd5e0288acd | 2015-03-17 21:13:01 | d32923ad74059883e31aaed8c12d3cd5e0288acd | diff --git a/okonomiyaki/platforms/epd_platform.py b/okonomiyaki/platforms/epd_platform.py
index 441712a..d32d37a 100644
--- a/okonomiyaki/platforms/epd_platform.py
+++ b/okonomiyaki/platforms/epd_platform.py
@@ -172,20 +172,16 @@ def _guess_architecture():
"""
Returns the architecture of the running python.
"""
- x86 = "x86"
- amd64 = "amd64"
- bits = platform.architecture()[0]
machine = platform.machine()
- if machine in ("AMD64", "x86_64"):
- if bits == "32bit":
- return x86
- elif bits == "64bit":
- return amd64
- elif machine in ("x86", "i386", "i686") and bits == "32bit":
- return x86
+
+ if machine in ("AMD64", "x86_64", "x86", "i386", "i686"):
+ if sys.maxsize > 2 ** 32:
+ return "amd64"
+ else:
+ return "x86"
else:
- raise OkonomiyakiError("Unknown bits/machine combination {0}/{1}".
- format(bits, machine))
+ raise OkonomiyakiError("Unknown machine combination {0!r}".
+ format(machine))
def _guess_epd_platform(arch=None):
diff --git a/okonomiyaki/platforms/platform.py b/okonomiyaki/platforms/platform.py
index bb20a39..f5db84f 100644
--- a/okonomiyaki/platforms/platform.py
+++ b/okonomiyaki/platforms/platform.py
@@ -3,6 +3,8 @@ from __future__ import absolute_import
import platform
import sys
+from okonomiyaki.platforms import epd_platform
+
from okonomiyaki.bundled.traitlets import HasTraits, Enum, Instance, Unicode
from okonomiyaki.platforms.epd_platform import EPDPlatform
from okonomiyaki.errors import OkonomiyakiError
@@ -200,18 +202,14 @@ def _guess_architecture():
"""
Returns the architecture of the running python.
"""
- bits = platform.architecture()[0]
- machine = platform.machine()
- if machine in ("AMD64", "x86_64"):
- if bits == "32bit":
- return Arch.from_name(X86)
- elif bits == "64bit":
- return Arch.from_name(X86_64)
- elif machine in ("x86", "i386", "i686") and bits == "32bit":
+ epd_platform_arch = epd_platform._guess_architecture()
+ if epd_platform_arch == "x86":
return Arch.from_name(X86)
+ elif epd_platform_arch == "amd64":
+ return Arch.from_name(X86_64)
else:
- raise OkonomiyakiError("Unknown bits/machine combination {0}/{1}".
- format(bits, machine))
+ raise OkonomiyakiError("Unknown architecture {0!r}".
+ format(epd_platform_arch))
def _guess_machine():
| Fix platform guessing on 64 bits processes on 32 bits kernel
See #31 | enthought/okonomiyaki | diff --git a/okonomiyaki/platforms/tests/common.py b/okonomiyaki/platforms/tests/common.py
index b7ff851..8eb942b 100644
--- a/okonomiyaki/platforms/tests/common.py
+++ b/okonomiyaki/platforms/tests/common.py
@@ -63,14 +63,12 @@ mock_osx_10_7 = MultiPatcher([
# Architecture mocking
mock_machine = lambda machine: Patcher(mock.patch("platform.machine",
lambda: machine))
-mock_architecture = lambda arch: Patcher(mock.patch("platform.architecture",
- lambda: arch))
mock_machine_x86 = Patcher(mock_machine("x86"))
-mock_architecture_32bit = Patcher(mock_architecture(("32bit",)))
+mock_architecture_32bit = Patcher(mock.patch("sys.maxsize", 2**32-1))
mock_machine_x86_64 = Patcher(mock_machine("x86_64"))
-mock_architecture_64bit = Patcher(mock_architecture(("64bit",)))
+mock_architecture_64bit = Patcher(mock.patch("sys.maxsize", 2**64-1))
mock_x86 = MultiPatcher([mock_machine_x86, mock_architecture_32bit])
mock_x86_64 = MultiPatcher([mock_machine_x86_64, mock_architecture_64bit])
diff --git a/okonomiyaki/platforms/tests/test_epd_platform.py b/okonomiyaki/platforms/tests/test_epd_platform.py
index edb27a8..9cf711f 100644
--- a/okonomiyaki/platforms/tests/test_epd_platform.py
+++ b/okonomiyaki/platforms/tests/test_epd_platform.py
@@ -8,9 +8,12 @@ from okonomiyaki.platforms.epd_platform import (_guess_architecture,
_guess_epd_platform, applies)
from okonomiyaki.platforms.legacy import _SUBDIR
-from .common import (mock_centos_3_5, mock_centos_5_8, mock_centos_6_3,
- mock_darwin, mock_machine_armv71, mock_solaris,
- mock_ubuntu_raring, mock_windows, mock_x86, mock_x86_64)
+from .common import (mock_architecture_32bit, mock_architecture_64bit,
+ mock_centos_3_5, mock_centos_5_8, mock_centos_6_3,
+ mock_darwin, mock_machine_x86, mock_machine_x86_64,
+ mock_machine_armv71, mock_solaris,
+ mock_ubuntu_raring, mock_windows, mock_x86,
+ mock_x86_64)
class TestEPDPlatform(unittest.TestCase):
@@ -94,12 +97,52 @@ class TestGuessEPDPlatform(unittest.TestCase):
@mock_darwin
def test_guess_darwin_platform(self):
- epd_platform = _guess_epd_platform("x86")
+ # When
+ with mock_machine_x86:
+ epd_platform = _guess_epd_platform("x86")
+
+ # Then
self.assertEqual(epd_platform.short, "osx-32")
- epd_platform = _guess_epd_platform("amd64")
+ # When
+ with mock_machine_x86:
+ epd_platform = _guess_epd_platform("amd64")
+
+ # Then
+ self.assertEqual(epd_platform.short, "osx-64")
+
+ # When
+ with mock_machine_x86:
+ with mock_architecture_32bit:
+ epd_platform = _guess_epd_platform()
+
+ # Then
+ self.assertEqual(epd_platform.short, "osx-32")
+
+ # When
+ with mock_machine_x86:
+ with mock_architecture_64bit:
+ epd_platform = _guess_epd_platform()
+
+ # Then
self.assertEqual(epd_platform.short, "osx-64")
+ # When
+ with mock_machine_x86_64:
+ with mock_architecture_64bit:
+ epd_platform = _guess_epd_platform()
+
+ # Then
+ self.assertEqual(epd_platform.short, "osx-64")
+
+ # When
+ with mock_machine_x86_64:
+ with mock_architecture_32bit:
+ epd_platform = _guess_epd_platform()
+
+ # Then
+ self.assertEqual(epd_platform.short, "osx-32")
+
def test_guess_linux2_platform(self):
with mock_centos_5_8:
epd_platform = _guess_epd_platform("x86")
@@ -109,27 +152,27 @@ class TestGuessEPDPlatform(unittest.TestCase):
self.assertEqual(epd_platform.short, "rh5-64")
with mock.patch("platform.machine", lambda: "x86"):
- with mock.patch("platform.architecture", lambda: ("32bit",)):
+ with mock_architecture_32bit:
epd_platform = _guess_epd_platform()
self.assertEqual(epd_platform.short, "rh5-32")
with mock.patch("platform.machine", lambda: "i386"):
- with mock.patch("platform.architecture", lambda: ("32bit",)):
+ with mock_architecture_32bit:
epd_platform = _guess_epd_platform()
self.assertEqual(epd_platform.short, "rh5-32")
with mock.patch("platform.machine", lambda: "i686"):
- with mock.patch("platform.architecture", lambda: ("32bit",)):
+ with mock_architecture_32bit:
epd_platform = _guess_epd_platform()
self.assertEqual(epd_platform.short, "rh5-32")
with mock.patch("platform.machine", lambda: "x86_64"):
- with mock.patch("platform.architecture", lambda: ("32bit",)):
+ with mock_architecture_32bit:
epd_platform = _guess_epd_platform()
self.assertEqual(epd_platform.short, "rh5-32")
with mock.patch("platform.machine", lambda: "x86_64"):
- with mock.patch("platform.architecture", lambda: ("64bit",)):
+ with mock_architecture_64bit:
epd_platform = _guess_epd_platform()
self.assertEqual(epd_platform.short, "rh5-64")
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 2
} | 0.4 | {
"env_vars": null,
"env_yml_path": [],
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [],
"python": "3.7",
"reqs_path": [
"dev_requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
coverage==7.2.7
docutils==0.20.1
enum34==1.1.10
exceptiongroup==1.2.2
flake8==5.0.4
haas==0.9.0
importlib-metadata==4.2.0
iniconfig==2.0.0
mccabe==0.7.0
mock==5.2.0
-e git+https://github.com/enthought/okonomiyaki.git@d32923ad74059883e31aaed8c12d3cd5e0288acd#egg=okonomiyaki
packaging==24.0
pbr==6.1.1
pluggy==1.2.0
pycodestyle==2.9.1
pyflakes==2.5.0
pytest==7.4.4
six==1.17.0
statistics==1.0.3.5
stevedore==3.5.2
tomli==2.0.1
typing_extensions==4.7.1
zipp==3.15.0
| name: okonomiyaki
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.2.7
- docutils==0.20.1
- enum34==1.1.10
- exceptiongroup==1.2.2
- flake8==5.0.4
- haas==0.9.0
- importlib-metadata==4.2.0
- iniconfig==2.0.0
- mccabe==0.7.0
- mock==5.2.0
- packaging==24.0
- pbr==6.1.1
- pluggy==1.2.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pytest==7.4.4
- six==1.17.0
- statistics==1.0.3.5
- stevedore==3.5.2
- tomli==2.0.1
- typing-extensions==4.7.1
- zipp==3.15.0
prefix: /opt/conda/envs/okonomiyaki
| [
"okonomiyaki/platforms/tests/test_epd_platform.py::TestEPDPlatformApplies::test_all",
"okonomiyaki/platforms/tests/test_epd_platform.py::TestEPDPlatformApplies::test_current_linux",
"okonomiyaki/platforms/tests/test_epd_platform.py::TestEPDPlatformApplies::test_current_windows",
"okonomiyaki/platforms/tests/test_epd_platform.py::TestGuessEPDPlatform::test_guess_darwin_platform",
"okonomiyaki/platforms/tests/test_epd_platform.py::TestGuessEPDPlatform::test_guess_linux2_platform"
] | [] | [
"okonomiyaki/platforms/tests/test_epd_platform.py::TestEPDPlatform::test_epd_platform_from_string",
"okonomiyaki/platforms/tests/test_epd_platform.py::TestEPDPlatform::test_guessed_epd_platform",
"okonomiyaki/platforms/tests/test_epd_platform.py::TestEPDPlatform::test_short_names_consistency",
"okonomiyaki/platforms/tests/test_epd_platform.py::TestEPDPlatformApplies::test_applies_rh",
"okonomiyaki/platforms/tests/test_epd_platform.py::TestGuessEPDPlatform::test_guess_linux2_unsupported",
"okonomiyaki/platforms/tests/test_epd_platform.py::TestGuessEPDPlatform::test_guess_solaris_unsupported",
"okonomiyaki/platforms/tests/test_epd_platform.py::TestGuessEPDPlatform::test_guess_unsupported_processor",
"okonomiyaki/platforms/tests/test_epd_platform.py::TestGuessEPDPlatform::test_guess_win32_platform"
] | [] | BSD License | 68 | 806 | [
"okonomiyaki/platforms/epd_platform.py",
"okonomiyaki/platforms/platform.py"
] |
|
ministryofjustice__salt-shaker-21 | f7ab3acca99aa24b58d6d14f747c1d7b1daeac2c | 2015-03-20 14:58:51 | a7349bcd65608b0b2f18aadf3c181009b2d78398 | diff --git a/shaker/helpers.py b/shaker/helpers.py
index a04e149..8aa577f 100644
--- a/shaker/helpers.py
+++ b/shaker/helpers.py
@@ -2,6 +2,7 @@ import logging
import json
import requests
import os
+import re
def get_valid_github_token(online_validation_enabled = False):
"""
@@ -97,3 +98,69 @@ def validate_github_access(response):
logging.error("Unknown problem checking credentials: %s" % response_message)
return valid_credentials
+
+def parse_metadata(metadata):
+ """
+ Entry function to handle the metadata parsing workflow and return a metadata
+ object which is cleaned up
+
+ Args:
+ metadata (dictionary): Keyed salt formula dependency information
+
+ Returns:
+ parsed_metadata (dictionary): The original metadata parsed and cleaned up
+ """
+ # Remove duplicates
+ parsed_metadata = resolve_metadata_duplicates(metadata)
+ return parsed_metadata
+
+def resolve_metadata_duplicates(metadata):
+ """
+ Strip duplicates out of a metadata file. If we have no additional criteria,
+ simply take the first one. Or can resolve by latest version or preferred organisation
+ if required
+
+ Args:
+ metadata (dictionary): Keyed salt formula dependency information
+
+ Returns:
+ resolved_dependencies (dictionary): The original metadata stripped of duplicates
+ If the metadata could not be resolved then we return the original args version
+ """
+ # Only start to make alterations if we have a valid metadata format
+ # Otherwise throw an exception
+
+ # If metadata is not a dictionary or does not contain
+ # a dependencies field then throw an exception
+ if not (isinstance(metadata, type({}))):
+ raise TypeError("resolve_metadata_duplicates: Metadata is not a "
+ "dictionary but type '%s'" % (type(metadata)))
+ elif not ("dependencies" in metadata):
+ raise IndexError("resolve_metadata_duplicates: Metadata has "
+ "no key called 'dependencies'"
+ )
+ # Count the duplicates we find
+ count_duplicates = 0
+
+ resolved_dependency_collection = {}
+ for dependency in metadata["dependencies"]:
+ # Filter out formula name
+ org, formula = dependency.split(':')[1].split('.git')[0].split('/')
+
+ # Simply take the first formula found, ignore subsequent
+ # formulas with the same name even from different organisations
+ # Just warn, not erroring out
+ if formula not in resolved_dependency_collection:
+ resolved_dependency_collection[formula] = dependency
+ else:
+ # Do some sort of tag resolution
+ count_duplicates += 1
+ logging.warning("resolve_metadata_duplicates: Skipping duplicate dependency %s" %(formula))
+
+ # Only alter the metadata if we need to
+ if count_duplicates > 0:
+ resolved_dependencies = resolved_dependency_collection.values()
+ metadata["dependencies"] = resolved_dependencies
+
+ return metadata
+
diff --git a/shaker/resolve_deps.py b/shaker/resolve_deps.py
index c7a205c..f8a7b45 100644
--- a/shaker/resolve_deps.py
+++ b/shaker/resolve_deps.py
@@ -131,7 +131,8 @@ def get_reqs(org_name, formula_name, constraint=None):
# Check for successful access and any credential problems
if helpers.validate_github_access(metadata):
found_metadata = True
- data = yaml.load(metadata.text)
+ # Read in the yaml metadata from body, stripping out duplicate entries
+ data = helpers.parse_metadata(yaml.load(metadata.text))
reqs = data['dependencies'] if 'dependencies' in data and data['dependencies'] else []
else:
reqs = requests.get(req_url.format(org_name, formula_name,
| Duplicates in metadata.yml
We don't throw an error if some idiot (me) puts duplicate lines in metadata.yml | ministryofjustice/salt-shaker | diff --git a/tests/test_metadata_handling.py b/tests/test_metadata_handling.py
new file mode 100644
index 0000000..7723861
--- /dev/null
+++ b/tests/test_metadata_handling.py
@@ -0,0 +1,67 @@
+import unittest
+import yaml
+from shaker import helpers
+from nose.tools import raises
+
+class TestMetadataHandling(unittest.TestCase):
+
+ # Sample metadata with duplicates
+ _sample_metadata_duplicates = {
+ "dependencies": [
+ "[email protected]:test_organisation/test1-formula.git==v1.0.1",
+ "[email protected]:test_organisation/test1-formula.git==v1.0.2",
+ "[email protected]:test_organisation/test2-formula.git==v2.0.1",
+ "[email protected]:test_organisation/test3-formula.git==v3.0.1",
+ "[email protected]:test_organisation/test3-formula.git==v3.0.2"
+ ],
+ "entry": ["dummy"]
+ }
+
+ _sample_metadata_no_duplicates = {
+ "dependencies": [
+ "[email protected]:test_organisation/test1-formula.git==v1.0.1",
+ "[email protected]:test_organisation/test2-formula.git==v2.0.1",
+ "[email protected]:test_organisation/test3-formula.git==v3.0.1"
+ ],
+ "entry": ["dummy"]
+ }
+
+ def test_resolve_metadata_duplicates(self):
+ """
+ Check if we successfully remove duplicates from a sample metadata
+ """
+ original_metadata = self._sample_metadata_duplicates
+ expected_metadata = self._sample_metadata_no_duplicates
+ resolved_metadata = helpers.resolve_metadata_duplicates(original_metadata)
+
+ expected_metadata_dependencies = expected_metadata["dependencies"]
+ resolved_metadata_dependencies = resolved_metadata["dependencies"]
+ expected_metadata_entries = expected_metadata["entry"]
+ resolved_metadata_entries = resolved_metadata["entry"]
+
+ # Test dependencies found
+ for expected_metadata_dependency in expected_metadata_dependencies:
+ self.assertTrue(expected_metadata_dependency in resolved_metadata_dependencies,
+ "test_resolve_metadata_duplicates: dependency '%s' not found in de-duplicated metadata"
+ % (expected_metadata_dependency))
+
+ # Test entry found
+ for expected_metadata_entry in expected_metadata_entries:
+ self.assertTrue(expected_metadata_entry in resolved_metadata_entries,
+ "test_resolve_metadata_duplicates: Entry '%s' not found in de-duplicated metadata"
+ % (expected_metadata_entry))
+
+ @raises(TypeError)
+ def test_resolve_metadata_duplicates_bad_metadata_object(self):
+ """
+ Check if bad yaml metadata will throw up a TypeError.
+ """
+ # Callable with bad metadata
+ helpers.resolve_metadata_duplicates("not-a-dictionary")
+
+ @raises(IndexError)
+ def test_resolve_metadata_duplicates_metadata_missing_index(self):
+ """
+ Check if metadata with a missing index will throw an error
+ """
+ helpers.resolve_metadata_duplicates({})
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"responses",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y libgit2-dev libssh2-1-dev"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
nose==1.3.7
packaging==24.2
pluggy==1.5.0
pycparser==2.22
pygit2==1.15.1
pytest==8.3.5
PyYAML==6.0.2
requests==2.32.3
responses==0.25.7
-e git+https://github.com/ministryofjustice/salt-shaker.git@f7ab3acca99aa24b58d6d14f747c1d7b1daeac2c#egg=salt_shaker
tomli==2.2.1
urllib3==2.3.0
| name: salt-shaker
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- nose==1.3.7
- packaging==24.2
- pluggy==1.5.0
- pycparser==2.22
- pygit2==1.15.1
- pytest==8.3.5
- pyyaml==6.0.2
- requests==2.32.3
- responses==0.25.7
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/salt-shaker
| [
"tests/test_metadata_handling.py::TestMetadataHandling::test_resolve_metadata_duplicates",
"tests/test_metadata_handling.py::TestMetadataHandling::test_resolve_metadata_duplicates_bad_metadata_object",
"tests/test_metadata_handling.py::TestMetadataHandling::test_resolve_metadata_duplicates_metadata_missing_index"
] | [] | [] | [] | null | 69 | 866 | [
"shaker/helpers.py",
"shaker/resolve_deps.py"
] |
|
ipython__ipython-8111 | 2af39462d92d3834d5780a87f44d5e6cee7ecb81 | 2015-03-21 23:44:47 | ff02638008de8c90ca5f177e559efa048a2557a0 | diff --git a/IPython/config/application.py b/IPython/config/application.py
index ef97162b3..264d3793a 100644
--- a/IPython/config/application.py
+++ b/IPython/config/application.py
@@ -159,7 +159,7 @@ def _log_level_changed(self, name, old, new):
help="The date format used by logging formatters for %(asctime)s"
)
def _log_datefmt_changed(self, name, old, new):
- self._log_format_changed()
+ self._log_format_changed('log_format', self.log_format, self.log_format)
log_format = Unicode("[%(name)s]%(highlevel)s %(message)s", config=True,
help="The Logging format template",
| `_log_format_changed()` missing 3 required positional arguments
In `IPython.config.application`, the `_log_datefmt_changed` handler calls `_log_format_changed` but does not pass it the required arguments. My guess would be that this is the correct implementation:
```
def _log_datefmt_changed(self, name, old, new):
self._log_format_changed(name, self.log_format, self.log_format)
```
However I am not really sure what the `name` parameter is for, so I might be wrong. | ipython/ipython | diff --git a/IPython/config/tests/test_application.py b/IPython/config/tests/test_application.py
index a03d548c2..5da6a1306 100644
--- a/IPython/config/tests/test_application.py
+++ b/IPython/config/tests/test_application.py
@@ -80,6 +80,7 @@ def test_log(self):
# trigger reconstruction of the log formatter
app.log.handlers = [handler]
app.log_format = "%(message)s"
+ app.log_datefmt = "%Y-%m-%d %H:%M"
app.log.info("hello")
nt.assert_in("hello", stream.getvalue())
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 3.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
execnet==1.9.0
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
-e git+https://github.com/ipython/ipython.git@2af39462d92d3834d5780a87f44d5e6cee7ecb81#egg=ipython
nose==1.3.7
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-asyncio==0.16.0
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-xdist==3.0.2
requests==2.27.1
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: ipython
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- execnet==1.9.0
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-asyncio==0.16.0
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-xdist==3.0.2
- requests==2.27.1
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/ipython
| [
"IPython/config/tests/test_application.py::TestApplication::test_log"
] | [] | [
"IPython/config/tests/test_application.py::TestApplication::test_aliases",
"IPython/config/tests/test_application.py::TestApplication::test_basic",
"IPython/config/tests/test_application.py::TestApplication::test_config",
"IPython/config/tests/test_application.py::TestApplication::test_config_propagation",
"IPython/config/tests/test_application.py::TestApplication::test_extra_args",
"IPython/config/tests/test_application.py::TestApplication::test_flag_clobber",
"IPython/config/tests/test_application.py::TestApplication::test_flags",
"IPython/config/tests/test_application.py::TestApplication::test_flatten_aliases",
"IPython/config/tests/test_application.py::TestApplication::test_flatten_flags",
"IPython/config/tests/test_application.py::TestApplication::test_multi_file",
"IPython/config/tests/test_application.py::TestApplication::test_unicode_argv"
] | [] | BSD 3-Clause "New" or "Revised" License | 71 | 176 | [
"IPython/config/application.py"
] |
|
CleanCut__green-40 | 9450d48e8099b15e87ddbd12243fb61db29fe4ba | 2015-03-25 15:20:15 | 9450d48e8099b15e87ddbd12243fb61db29fe4ba | diff --git a/green/loader.py b/green/loader.py
index f93d26c..50e5e91 100644
--- a/green/loader.py
+++ b/green/loader.py
@@ -121,11 +121,21 @@ def findDottedModuleAndParentDir(file_path):
return (dotted_module, parent_dir)
+def isNoseDisabledCase(test_case_class, attrname):
+ test_func = getattr(test_case_class, attrname)
+ nose_enabled = getattr(test_func, "__test__", None)
+
+ if nose_enabled is False:
+ return True
+ else:
+ return False
+
def loadFromTestCase(test_case_class):
debug("Examining test case {}".format(test_case_class.__name__), 3)
test_case_names = list(filter(
lambda attrname: (attrname.startswith('test') and
- callable(getattr(test_case_class, attrname))),
+ callable(getattr(test_case_class, attrname)) and
+ not isNoseDisabledCase(test_case_class, attrname)),
dir(test_case_class)))
debug("Test case names: {}".format(test_case_names))
test_case_names.sort(
| Make green work with nose_parameterized
Green doesn't work with `nose_parameterized` since it executes tests that `nose_parameterized` [marks](https://github.com/wolever/nose-parameterized/blob/master/nose_parameterized/parameterized.py#L232) as disabled using the nose-specific [`__test__`](https://github.com/nose-devs/nose/blob/master/nose/tools/nontrivial.py#L140) attribute
This attribute is easy to detect, so we should prune any tests that have it set. | CleanCut/green | diff --git a/green/test/test_loader.py b/green/test/test_loader.py
index 09f0b76..397844f 100644
--- a/green/test/test_loader.py
+++ b/green/test/test_loader.py
@@ -264,6 +264,17 @@ class TestLoadFromTestCase(unittest.TestCase):
set(['test_method1', 'test_method2']))
+ def test_nose_disabled_attribute(self):
+ "Tests disabled by nose generators dont get loaded"
+ class HasDisabled(unittest.TestCase):
+ def test_method(self):
+ pass
+
+ test_method.__test__ = False
+
+ suite = loader.loadFromTestCase(HasDisabled)
+ self.assertEqual(suite.countTestCases(), 0)
+
class TestLoadFromModuleFilename(unittest.TestCase):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.4",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
-e git+https://github.com/CleanCut/green.git@9450d48e8099b15e87ddbd12243fb61db29fe4ba#egg=green
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-termstyle==0.1.10
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: green
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-termstyle==0.1.10
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/green
| [
"green/test/test_loader.py::TestLoadFromTestCase::test_nose_disabled_attribute"
] | [
"green/test/test_loader.py::TestCompletions::test_completionPartial",
"green/test/test_loader.py::TestCompletions::test_completionPartialShort",
"green/test/test_loader.py::TestLoadTargets::test_emptyDirAbsolute",
"green/test/test_loader.py::TestLoadTargets::test_emptyDirRelative",
"green/test/test_loader.py::TestLoadTargets::test_partiallyGoodName"
] | [
"green/test/test_loader.py::TestToProtoTestList::test_moduleImportFailure",
"green/test/test_loader.py::TestToProtoTestList::test_moduleImportFailureIgnored",
"green/test/test_loader.py::TestCompletions::test_completionBad",
"green/test/test_loader.py::TestCompletions::test_completionDot",
"green/test/test_loader.py::TestCompletions::test_completionEmpty",
"green/test/test_loader.py::TestCompletions::test_completionExact",
"green/test/test_loader.py::TestCompletions::test_completionIgnoresErrors",
"green/test/test_loader.py::TestIsPackage::test_no",
"green/test/test_loader.py::TestIsPackage::test_yes",
"green/test/test_loader.py::TestDottedModule::test_bad_path",
"green/test/test_loader.py::TestDottedModule::test_good_path",
"green/test/test_loader.py::TestLoadFromTestCase::test_normal",
"green/test/test_loader.py::TestLoadFromTestCase::test_runTest",
"green/test/test_loader.py::TestLoadFromModuleFilename::test_skipped_module",
"green/test/test_loader.py::TestDiscover::test_bad_input",
"green/test/test_loader.py::TestLoadTargets::test_BigDirWithAbsoluteImports",
"green/test/test_loader.py::TestLoadTargets::test_DirWithInit",
"green/test/test_loader.py::TestLoadTargets::test_DottedName",
"green/test/test_loader.py::TestLoadTargets::test_DottedNamePackageFromPath",
"green/test/test_loader.py::TestLoadTargets::test_MalformedModuleByName",
"green/test/test_loader.py::TestLoadTargets::test_ModuleByName",
"green/test/test_loader.py::TestLoadTargets::test_duplicate_targets",
"green/test/test_loader.py::TestLoadTargets::test_emptyDirDot",
"green/test/test_loader.py::TestLoadTargets::test_explicit_filename_error",
"green/test/test_loader.py::TestLoadTargets::test_multiple_targets",
"green/test/test_loader.py::TestLoadTargets::test_relativeDotDir"
] | [] | MIT License | 74 | 270 | [
"green/loader.py"
] |
|
eadhost__eadator-2 | 9ca6058a79729250f0c4399ac54e48d1543017c3 | 2015-03-26 06:44:18 | 9ca6058a79729250f0c4399ac54e48d1543017c3 | diff --git a/eadator/eadator.py b/eadator/eadator.py
index d1734ea..6a0c32e 100755
--- a/eadator/eadator.py
+++ b/eadator/eadator.py
@@ -16,14 +16,20 @@ def main(argv=None):
type=argparse.FileType('r'))
parser.add_argument('--dtd', required=False, )
parser.add_argument('--xsd', required=False, )
+ parser.add_argument('--count', action='store_true' )
if argv is None:
argv = parser.parse_args()
- message, valid = validate(argv.eadfile[0], argv.dtd, argv.xsd)
+ message, valid, error_count = validate(argv.eadfile[0], argv.dtd, argv.xsd)
if not valid:
pp(message)
+
+ if argv.count:
+ print("Error count : %d" % error_count)
+
+ if not valid:
exit(1)
def validate(eadfile, dtd=None, xsd=None):
@@ -48,12 +54,14 @@ def validate(eadfile, dtd=None, xsd=None):
validator = etree.XMLSchema(etree.parse(xsd))
message = None
+ error_count = 0
valid = validator.validate(eadfile)
if not valid:
message = validator.error_log
+ error_count = len(message)
- return message, valid
+ return message, valid, error_count
# main() idiom for importing into REPL for debugging
| Add the number of errors
Hello,
Could you add the number of errors at the end of the list of errors?
It could be useful for verify if a modification adds or deletes an error.
Thanks. | eadhost/eadator | diff --git a/tests/test_eadator.py b/tests/test_eadator.py
index a90571d..68d55d9 100755
--- a/tests/test_eadator.py
+++ b/tests/test_eadator.py
@@ -17,17 +17,32 @@ class TestEadator(unittest.TestCase):
type=argparse.FileType('r'))
parser.add_argument('--dtd', default="%s/ents/ead.dtd" % lib_folder, required=False, )
parser.add_argument('--xsd', default="%s/ents/ead.xsd" % lib_folder, required=False, )
+ parser.add_argument('--count', action='store_true' )
# test valid instances
eadator.main(parser.parse_args([os.path.join(cmd_folder,'test-dtd-valid.xml')]))
eadator.main(parser.parse_args([os.path.join(cmd_folder,'test-xsd-valid.xml')]))
- eadator.validate(os.path.join(cmd_folder,'test-dtd-valid.xml'))
- eadator.validate(os.path.join(cmd_folder,'test-xsd-valid.xml'))
+
+ message, valid, error_count = eadator.validate(os.path.join(cmd_folder,'test-dtd-valid.xml'))
+ self.assertTrue(valid)
+ self.assertEqual(0,error_count)
+
+ message, valid, error_count = eadator.validate(os.path.join(cmd_folder,'test-xsd-valid.xml'))
+ self.assertTrue(valid)
+ self.assertEqual(0,error_count)
# test invalid instances
self.assertRaises(SystemExit, eadator.main, parser.parse_args([os.path.join(cmd_folder,'test-dtd-invalid.xml')]))
self.assertRaises(SystemExit, eadator.main, parser.parse_args([os.path.join(cmd_folder,'test-dtd-invalid.xml')]))
+ message, valid, error_count = eadator.validate(os.path.join(cmd_folder,'test-dtd-invalid.xml'))
+ self.assertFalse(valid)
+ self.assertEqual(1,error_count)
+
+ message, valid, error_count = eadator.validate(os.path.join(cmd_folder,'test-xsd-invalid.xml'))
+ self.assertFalse(valid)
+ self.assertEqual(1,error_count)
+
if __name__ == '__main__':
unittest.main()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 1
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y libxml2-dev libxslt-dev"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/eadhost/eadator.git@9ca6058a79729250f0c4399ac54e48d1543017c3#egg=eadator
exceptiongroup==1.2.2
iniconfig==2.1.0
lxml==5.3.1
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
tomli==2.2.1
| name: eadator
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- lxml==5.3.1
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
prefix: /opt/conda/envs/eadator
| [
"tests/test_eadator.py::TestEadator::test_eadator"
] | [] | [] | [] | BSD License | 75 | 362 | [
"eadator/eadator.py"
] |
|
mkdocs__mkdocs-395 | 88bb485ee4bd863f1cbfed6a786ef995cc844929 | 2015-04-02 19:29:19 | bfc393ce2dd31d0fea2be3a5b0fec20ed361bfe0 | diff --git a/mkdocs/nav.py b/mkdocs/nav.py
index c8257e12..932399b4 100644
--- a/mkdocs/nav.py
+++ b/mkdocs/nav.py
@@ -209,14 +209,17 @@ def _generate_site_navigation(pages_config, url_context, use_directory_urls=True
)
raise exceptions.ConfigurationError(msg)
+ # If both the title and child_title are None, then we
+ # have just been given a path. If that path contains a /
+ # then lets automatically nest it.
+ if title is None and child_title is None and os.path.sep in path:
+ filename = path.split(os.path.sep)[-1]
+ child_title = filename_to_title(filename)
+
if title is None:
filename = path.split(os.path.sep)[0]
title = filename_to_title(filename)
- if child_title is None and os.path.sep in path:
- filename = path.split(os.path.sep)[-1]
- child_title = filename_to_title(filename)
-
url = utils.get_url_path(path, use_directory_urls)
if not child_title:
| Title is used as a section if file is in subdirectory
Assuming I have a file at `research/stats.md` and a config line:
```
pages:
- ["research/stats.md", "Stats about Our Collection"]
```
I would assume that it would generate a top-level nav item titled "Stats about Our Collection".
In reality, it generates a section **Stats about Our Collection** with a sub-item titled **stats**.
I'm 90% sure this has to do with the logic in [nav.py](https://github.com/mkdocs/mkdocs/blob/master/mkdocs/nav.py#L212-L218) around `child_titles`.
| mkdocs/mkdocs | diff --git a/mkdocs/tests/nav_tests.py b/mkdocs/tests/nav_tests.py
index 7013a66e..b6876f35 100644
--- a/mkdocs/tests/nav_tests.py
+++ b/mkdocs/tests/nav_tests.py
@@ -63,6 +63,39 @@ class SiteNavigationTests(unittest.TestCase):
self.assertEqual(len(site_navigation.nav_items), 3)
self.assertEqual(len(site_navigation.pages), 6)
+ def test_nested_ungrouped(self):
+ pages = [
+ ('index.md', 'Home'),
+ ('about/contact.md', 'Contact'),
+ ('about/sub/license.md', 'License Title')
+ ]
+ expected = dedent("""
+ Home - /
+ Contact - /about/contact/
+ License Title - /about/sub/license/
+ """)
+ site_navigation = nav.SiteNavigation(pages)
+ self.assertEqual(str(site_navigation).strip(), expected)
+ self.assertEqual(len(site_navigation.nav_items), 3)
+ self.assertEqual(len(site_navigation.pages), 3)
+
+ def test_nested_ungrouped_no_titles(self):
+ pages = [
+ ('index.md',),
+ ('about/contact.md'),
+ ('about/sub/license.md')
+ ]
+ expected = dedent("""
+ Home - /
+ About
+ Contact - /about/contact/
+ License - /about/sub/license/
+ """)
+ site_navigation = nav.SiteNavigation(pages)
+ self.assertEqual(str(site_navigation).strip(), expected)
+ self.assertEqual(len(site_navigation.nav_items), 2)
+ self.assertEqual(len(site_navigation.pages), 3)
+
def test_walk_simple_toc(self):
pages = [
('index.md', 'Home'),
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.11 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"mock",
"pytest"
],
"pre_install": [
"pip install tox"
],
"python": "3.4",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
distlib==0.3.9
filelock==3.4.1
ghp-import==2.1.0
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
Jinja2==3.0.3
Markdown==2.4.1
MarkupSafe==2.0.1
-e git+https://github.com/mkdocs/mkdocs.git@88bb485ee4bd863f1cbfed6a786ef995cc844929#egg=mkdocs
mock==5.2.0
nose==1.3.7
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
PyYAML==6.0.1
six==1.17.0
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
virtualenv==20.17.1
watchdog==2.3.1
zipp==3.6.0
| name: mkdocs
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- distlib==0.3.9
- filelock==3.4.1
- ghp-import==2.1.0
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- jinja2==3.0.3
- markdown==2.4.1
- markupsafe==2.0.1
- mock==5.2.0
- nose==1.3.7
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.1
- six==1.17.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- virtualenv==20.17.1
- watchdog==2.3.1
- zipp==3.6.0
prefix: /opt/conda/envs/mkdocs
| [
"mkdocs/tests/nav_tests.py::SiteNavigationTests::test_nested_ungrouped"
] | [] | [
"mkdocs/tests/nav_tests.py::SiteNavigationTests::test_base_url",
"mkdocs/tests/nav_tests.py::SiteNavigationTests::test_empty_toc_item",
"mkdocs/tests/nav_tests.py::SiteNavigationTests::test_generate_site_navigation",
"mkdocs/tests/nav_tests.py::SiteNavigationTests::test_generate_site_navigation_windows",
"mkdocs/tests/nav_tests.py::SiteNavigationTests::test_indented_toc",
"mkdocs/tests/nav_tests.py::SiteNavigationTests::test_invalid_pages_config",
"mkdocs/tests/nav_tests.py::SiteNavigationTests::test_nested_ungrouped_no_titles",
"mkdocs/tests/nav_tests.py::SiteNavigationTests::test_relative_md_links_have_slash",
"mkdocs/tests/nav_tests.py::SiteNavigationTests::test_simple_toc",
"mkdocs/tests/nav_tests.py::SiteNavigationTests::test_walk_empty_toc",
"mkdocs/tests/nav_tests.py::SiteNavigationTests::test_walk_indented_toc",
"mkdocs/tests/nav_tests.py::SiteNavigationTests::test_walk_simple_toc"
] | [] | BSD 2-Clause "Simplified" License | 80 | 265 | [
"mkdocs/nav.py"
] |
|
mkdocs__mkdocs-402 | 74e60382b84b3af9969b30cc2cd9a98894d113f5 | 2015-04-03 08:53:55 | bfc393ce2dd31d0fea2be3a5b0fec20ed361bfe0 | diff --git a/mkdocs/compat.py b/mkdocs/compat.py
index 49bd396a..518a4937 100644
--- a/mkdocs/compat.py
+++ b/mkdocs/compat.py
@@ -13,6 +13,7 @@ if PY2:
httpserver = httpserver
import SocketServer
socketserver = SocketServer
+ from HTMLParser import HTMLParser
import itertools
zip = itertools.izip
@@ -30,6 +31,7 @@ else: # PY3
httpserver = httpserver
import socketserver
socketserver = socketserver
+ from html.parser import HTMLParser
zip = zip
diff --git a/mkdocs/toc.py b/mkdocs/toc.py
index 410aff5a..89627381 100644
--- a/mkdocs/toc.py
+++ b/mkdocs/toc.py
@@ -14,9 +14,7 @@ The steps we take to generate a table of contents are:
* Parse table of contents HTML into the underlying data structure.
"""
-import re
-
-TOC_LINK_REGEX = re.compile('<a href=["]([^"]*)["]>([^<]*)</a>')
+from mkdocs.compat import HTMLParser
class TableOfContents(object):
@@ -52,6 +50,32 @@ class AnchorLink(object):
return ret
+class TOCParser(HTMLParser):
+
+ def __init__(self):
+ HTMLParser.__init__(self)
+ self.links = []
+
+ self.in_anchor = True
+ self.attrs = None
+ self.title = ''
+
+ def handle_starttag(self, tag, attrs):
+
+ if tag == 'a':
+ self.in_anchor = True
+ self.attrs = dict(attrs)
+
+ def handle_endtag(self, tag):
+ if tag == 'a':
+ self.in_anchor = False
+
+ def handle_data(self, data):
+
+ if self.in_anchor:
+ self.title += data
+
+
def _parse_html_table_of_contents(html):
"""
Given a table of contents string that has been automatically generated by
@@ -63,9 +87,11 @@ def _parse_html_table_of_contents(html):
parents = []
ret = []
for line in lines:
- match = TOC_LINK_REGEX.search(line)
- if match:
- href, title = match.groups()
+ parser = TOCParser()
+ parser.feed(line)
+ if parser.title:
+ href = parser.attrs['href']
+ title = parser.title
nav = AnchorLink(title, href)
# Add the item to its parent if required. If it is a topmost
# item then instead append it to our return value.
| Not all headers are automatically linked
I have an API reference site for a project that's hosted on ReadTheDocs using mkdocs as the documentation engine. Headers that contain things like `<code>` blocks aren't linked, while all others seem to be.
I can reproduce this locally with a plain mkdocs install using the RTD theme.
Here's an example:
http://carbon.lpghatguy.com/en/latest/Classes/Collections.Tuple/
All three of the methods in that page should be automatically linked in the sidebar navigation, but only the one without any fancy decoration is. All of them have been given valid HTML ids, so they're possible to link, they just aren't.
The markdown for that page, which works around a couple RTD bugs and doesn't look that great, is here:
https://raw.githubusercontent.com/lua-carbon/carbon/master/docs/Classes/Collections.Tuple.md | mkdocs/mkdocs | diff --git a/mkdocs/tests/toc_tests.py b/mkdocs/tests/toc_tests.py
index 03ab9cd0..b0bdea11 100644
--- a/mkdocs/tests/toc_tests.py
+++ b/mkdocs/tests/toc_tests.py
@@ -29,6 +29,20 @@ class TableOfContentsTests(unittest.TestCase):
toc = self.markdown_to_toc(md)
self.assertEqual(str(toc).strip(), expected)
+ def test_indented_toc_html(self):
+ md = dedent("""
+ # Heading 1
+ ## <code>Heading</code> 2
+ ## Heading 3
+ """)
+ expected = dedent("""
+ Heading 1 - #heading-1
+ Heading 2 - #heading-2
+ Heading 3 - #heading-3
+ """)
+ toc = self.markdown_to_toc(md)
+ self.assertEqual(str(toc).strip(), expected)
+
def test_flat_toc(self):
md = dedent("""
# Heading 1
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 0.11 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
coverage==7.2.7
exceptiongroup==1.2.2
ghp-import==2.1.0
importlib-metadata==6.7.0
iniconfig==2.0.0
Jinja2==3.1.6
Markdown==2.4.1
MarkupSafe==2.1.5
-e git+https://github.com/mkdocs/mkdocs.git@74e60382b84b3af9969b30cc2cd9a98894d113f5#egg=mkdocs
packaging==24.0
pluggy==1.2.0
pytest==7.4.4
pytest-cov==4.1.0
python-dateutil==2.9.0.post0
PyYAML==6.0.1
six==1.17.0
tomli==2.0.1
typing_extensions==4.7.1
watchdog==3.0.0
zipp==3.15.0
| name: mkdocs
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.2.7
- exceptiongroup==1.2.2
- ghp-import==2.1.0
- importlib-metadata==6.7.0
- iniconfig==2.0.0
- jinja2==3.1.6
- markdown==2.4.1
- markupsafe==2.1.5
- packaging==24.0
- pluggy==1.2.0
- pytest==7.4.4
- pytest-cov==4.1.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.1
- six==1.17.0
- tomli==2.0.1
- typing-extensions==4.7.1
- watchdog==3.0.0
- zipp==3.15.0
prefix: /opt/conda/envs/mkdocs
| [
"mkdocs/tests/toc_tests.py::TableOfContentsTests::test_indented_toc_html"
] | [] | [
"mkdocs/tests/toc_tests.py::TableOfContentsTests::test_flat_h2_toc",
"mkdocs/tests/toc_tests.py::TableOfContentsTests::test_flat_toc",
"mkdocs/tests/toc_tests.py::TableOfContentsTests::test_indented_toc",
"mkdocs/tests/toc_tests.py::TableOfContentsTests::test_mixed_toc"
] | [] | BSD 2-Clause "Simplified" License | 81 | 646 | [
"mkdocs/compat.py",
"mkdocs/toc.py"
] |
|
mkdocs__mkdocs-443 | 74d3191e419b7cb79fe66f700119ead3365f70d0 | 2015-04-09 12:25:57 | bfc393ce2dd31d0fea2be3a5b0fec20ed361bfe0 | diff --git a/mkdocs/main.py b/mkdocs/main.py
index d73f9091..8b9a9412 100755
--- a/mkdocs/main.py
+++ b/mkdocs/main.py
@@ -55,7 +55,7 @@ def main(cmd, args, options=None):
build(config, clean_site_dir=clean_site_dir)
gh_deploy(config)
elif cmd == 'new':
- new(args, options)
+ new(args)
else:
print('MkDocs (version {0})'.format(__version__))
print('mkdocs [help|new|build|serve|gh-deploy|json] {options}')
diff --git a/mkdocs/new.py b/mkdocs/new.py
index 88531757..af969670 100644
--- a/mkdocs/new.py
+++ b/mkdocs/new.py
@@ -1,10 +1,13 @@
# coding: utf-8
from __future__ import print_function
+
import os
from io import open
-config_text = 'site_name: My Docs\n'
-index_text = """# Welcome to MkDocs
+from mkdocs import compat
+
+config_text = compat.unicode('site_name: My Docs\n')
+index_text = compat.unicode("""# Welcome to MkDocs
For full documentation visit [mkdocs.org](http://mkdocs.org).
@@ -21,10 +24,11 @@ For full documentation visit [mkdocs.org](http://mkdocs.org).
docs/
index.md # The documentation homepage.
... # Other markdown pages, images and other files.
-"""
+""")
+
+def new(args):
-def new(args, options):
if len(args) != 1:
print("Usage 'mkdocs new [directory-name]'")
return
| `mkdocs new` broken under python2
current master, python 2.7.9 virtualenv
only top directory and mkdocs.yml created, no docs dir or index.md
```
(karasu)[lashni@orphan src]$ mkdocs new karasu
Creating project directory: karasu
Writing config file: karasu/mkdocs.yml
Traceback (most recent call last):
File "/home/lashni/dev/karasu/bin/mkdocs", line 9, in <module>
load_entry_point('mkdocs==0.11.1', 'console_scripts', 'mkdocs')()
File "/home/lashni/dev/karasu/src/mkdocs/mkdocs/main.py", line 74, in run_main
main(cmd, args=sys.argv[2:], options=dict(opts))
File "/home/lashni/dev/karasu/src/mkdocs/mkdocs/main.py", line 58, in main
new(args, options)
File "/home/lashni/dev/karasu/src/mkdocs/mkdocs/new.py", line 47, in new
open(config_path, 'w', encoding='utf-8').write(config_text)
TypeError: must be unicode, not str
```
current master, python 3.4.3 virtualenv, files/dirs created successfully
```
(test)[lashni@orphan src]$ mkdocs new karasu
Creating project directory: karasu
Writing config file: karasu/mkdocs.yml
Writing initial docs: karasu/docs/index.md
``` | mkdocs/mkdocs | diff --git a/mkdocs/tests/new_tests.py b/mkdocs/tests/new_tests.py
new file mode 100644
index 00000000..e54fcb58
--- /dev/null
+++ b/mkdocs/tests/new_tests.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+import tempfile
+import unittest
+import os
+
+from mkdocs import new
+
+
+class NewTests(unittest.TestCase):
+
+ def test_new(self):
+
+ tempdir = tempfile.mkdtemp()
+ os.chdir(tempdir)
+
+ new.new(["myproject", ])
+
+ expected_paths = [
+ os.path.join(tempdir, "myproject"),
+ os.path.join(tempdir, "myproject", "mkdocs.yml"),
+ os.path.join(tempdir, "myproject", "docs"),
+ os.path.join(tempdir, "myproject", "docs", "index.md"),
+ ]
+
+ for expected_path in expected_paths:
+ self.assertTrue(os.path.exists(expected_path))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 2
} | 0.11 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
execnet==2.1.1
ghp-import==2.1.0
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
Markdown==3.7
MarkupSafe==3.0.2
-e git+https://github.com/mkdocs/mkdocs.git@74d3191e419b7cb79fe66f700119ead3365f70d0#egg=mkdocs
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
PyYAML==6.0.2
six==1.17.0
tomli==2.2.1
typing_extensions==4.13.0
watchdog==6.0.0
zipp==3.21.0
| name: mkdocs
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- ghp-import==2.1.0
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- markdown==3.7
- markupsafe==3.0.2
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- six==1.17.0
- tomli==2.2.1
- typing-extensions==4.13.0
- watchdog==6.0.0
- zipp==3.21.0
prefix: /opt/conda/envs/mkdocs
| [
"mkdocs/tests/new_tests.py::NewTests::test_new"
] | [] | [] | [] | BSD 2-Clause "Simplified" License | 86 | 422 | [
"mkdocs/main.py",
"mkdocs/new.py"
] |
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 0