Skip to content

Increase test coverage #375

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jan 28, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 32 additions & 2 deletions knox/crypto.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,24 +7,54 @@


def create_token_string() -> str:
"""
Creates a secure random token string using hexadecimal encoding.

The token length is determined by knox_settings.AUTH_TOKEN_CHARACTER_LENGTH.
Since each byte is represented by 2 hexadecimal characters, the number of
random bytes generated is half the desired character length.

Returns:
str: A hexadecimal string of length AUTH_TOKEN_CHARACTER_LENGTH containing
random bytes.
"""
return binascii.hexlify(
generate_bytes(int(knox_settings.AUTH_TOKEN_CHARACTER_LENGTH / 2))
).decode()


def make_hex_compatible(token: str) -> bytes:
"""
Converts a string token into a hex-compatible bytes object.

We need to make sure that the token, that is send is hex-compatible.
When a token prefix is used, we cannot guarantee that.

Args:
token (str): The token string to convert.

Returns:
bytes: The hex-compatible bytes representation of the token.
"""
return binascii.unhexlify(binascii.hexlify(bytes(token, 'utf-8')))


def hash_token(token: str) -> str:
"""
Calculates the hash of a token.
Token must contain an even number of hex digits or
a binascii.Error exception will be raised.

Uses the hash algorithm specified in knox_settings.SECURE_HASH_ALGORITHM.
The token is first converted to a hex-compatible format before hashing.

Args:
token (str): The token string to hash.

Returns:
str: The hexadecimal representation of the token's hash digest.

Example:
>>> hash_token("abc123")
'a123f...' # The actual hash will be longer
"""
digest = hash_func()
digest.update(make_hex_compatible(token))
Expand Down
59 changes: 59 additions & 0 deletions tests/test_crypto.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
from unittest.mock import patch

from django.test import TestCase

from knox.crypto import create_token_string, hash_token, make_hex_compatible
from knox.settings import knox_settings


class CryptoUtilsTestCase(TestCase):
def test_create_token_string(self):
"""
Verify token string creation has correct length and contains only hex characters.
"""
with patch('os.urandom') as mock_urandom:
mock_urandom.return_value = b'abcdef1234567890'
expected_length = knox_settings.AUTH_TOKEN_CHARACTER_LENGTH
token = create_token_string()
self.assertEqual(len(token), expected_length)
hex_chars = set('0123456789abcdef')
self.assertTrue(all(c in hex_chars for c in token.lower()))

def test_make_hex_compatible_with_valid_input(self):
"""
Ensure standard strings are correctly converted to hex-compatible bytes.
"""
test_token = "test123"
result = make_hex_compatible(test_token)
self.assertIsInstance(result, bytes)
expected = b'test123'
self.assertEqual(result, expected)

def test_make_hex_compatible_with_empty_string(self):
"""
Verify empty string input returns empty bytes.
"""
test_token = ""
result = make_hex_compatible(test_token)
self.assertEqual(result, b'')

def test_make_hex_compatible_with_special_characters(self):
"""
Check hex compatibility conversion handles special characters correctly.
"""
test_token = "test@#$%"
result = make_hex_compatible(test_token)
self.assertIsInstance(result, bytes)
expected = b'test@#$%'
self.assertEqual(result, expected)

def test_hash_token_with_valid_token(self):
"""
Verify hash output is correct length and contains valid hex characters.
"""
test_token = "abcdef1234567890"
result = hash_token(test_token)
self.assertIsInstance(result, str)
self.assertEqual(len(result), 128)
hex_chars = set('0123456789abcdef')
self.assertTrue(all(c in hex_chars for c in result.lower()))
87 changes: 87 additions & 0 deletions tests/test_models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
from datetime import timedelta

from django.contrib.auth import get_user_model
from django.test import TestCase
from django.utils import timezone
from freezegun import freeze_time

from knox.models import AuthToken
from knox.settings import CONSTANTS, knox_settings


class AuthTokenTests(TestCase):
"""
Auth token model tests.
"""

def setUp(self):
self.User = get_user_model()
self.user = self.User.objects.create_user(
username='testuser',
password='testpass123'
)

def test_token_creation(self):
"""
Test that tokens are created correctly with expected format.
"""
token_creation = timezone.now()
with freeze_time(token_creation):
instance, token = AuthToken.objects.create(user=self.user)
self.assertIsNotNone(token)
self.assertTrue(token.startswith(knox_settings.TOKEN_PREFIX))
self.assertEqual(
len(instance.token_key),
CONSTANTS.TOKEN_KEY_LENGTH,
)
self.assertEqual(instance.user, self.user)
self.assertEqual(
instance.expiry,
token_creation + timedelta(hours=10)
)

def test_token_creation_with_expiry(self):
"""
Test token creation with explicit expiry time.
"""
expiry_time = timedelta(hours=10)
before_creation = timezone.now()
instance, _ = AuthToken.objects.create(
user=self.user,
expiry=expiry_time
)
self.assertIsNotNone(instance.expiry)
self.assertTrue(before_creation < instance.expiry)
self.assertTrue(
(instance.expiry - before_creation - expiry_time).total_seconds() < 1
)

def test_token_string_representation(self):
"""
Test the string representation of AuthToken.
"""
instance, _ = AuthToken.objects.create(user=self.user)
expected_str = f'{instance.digest} : {self.user}'
self.assertEqual(str(instance), expected_str)

def test_multiple_tokens_for_user(self):
"""
Test that a user can have multiple valid tokens.
"""
token1, _ = AuthToken.objects.create(user=self.user)
token2, _ = AuthToken.objects.create(user=self.user)
user_tokens = self.user.auth_token_set.all()
self.assertEqual(user_tokens.count(), 2)
self.assertNotEqual(token1.digest, token2.digest)

def test_token_with_custom_prefix(self):
"""
Test token creation with custom prefix.
"""
custom_prefix = "TEST_"
instance, token = AuthToken.objects.create(
user=self.user,
prefix=custom_prefix
)
self.assertTrue(token.startswith(custom_prefix))
self.assertTrue(instance.token_key.startswith(custom_prefix))
120 changes: 120 additions & 0 deletions tests/test_settings.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
import hashlib
from datetime import timedelta
from unittest import mock

from django.core.signals import setting_changed
from django.test import override_settings

from knox.settings import (
CONSTANTS, IMPORT_STRINGS, knox_settings, reload_api_settings,
)


class TestKnoxSettings:
@override_settings(REST_KNOX={
'AUTH_TOKEN_CHARACTER_LENGTH': 32,
'TOKEN_TTL': timedelta(hours=5),
'AUTO_REFRESH': True,
})
def test_override_settings(self):
"""
Test that settings can be overridden.
"""
assert knox_settings.AUTH_TOKEN_CHARACTER_LENGTH == 32
assert knox_settings.TOKEN_TTL == timedelta(hours=5)
assert knox_settings.AUTO_REFRESH is True
# Default values should remain unchanged
assert knox_settings.AUTH_HEADER_PREFIX == 'Token'

def test_constants_immutability(self):
"""
Test that CONSTANTS cannot be modified.
"""
with self.assertRaises(Exception):
CONSTANTS.TOKEN_KEY_LENGTH = 20

with self.assertRaises(Exception):
CONSTANTS.DIGEST_LENGTH = 256

def test_constants_values(self):
"""
Test that CONSTANTS have correct values.
"""
assert CONSTANTS.TOKEN_KEY_LENGTH == 15
assert CONSTANTS.DIGEST_LENGTH == 128
assert CONSTANTS.MAXIMUM_TOKEN_PREFIX_LENGTH == 10

def test_reload_api_settings(self):
"""
Test settings reload functionality.
"""
new_settings = {
'TOKEN_TTL': timedelta(hours=2),
'AUTH_HEADER_PREFIX': 'Bearer',
}

reload_api_settings(
setting='REST_KNOX',
value=new_settings
)

assert knox_settings.TOKEN_TTL == timedelta(hours=2)
assert knox_settings.AUTH_HEADER_PREFIX == 'Bearer'

def test_token_prefix_length_validation(self):
"""
Test that TOKEN_PREFIX length is validated.
"""
with self.assertRaises(ValueError, match="Illegal TOKEN_PREFIX length"):
reload_api_settings(
setting='REST_KNOX',
value={'TOKEN_PREFIX': 'x' * 11} # Exceeds MAXIMUM_TOKEN_PREFIX_LENGTH
)

def test_import_strings(self):
"""
Test that import strings are properly handled.
"""
assert 'SECURE_HASH_ALGORITHM' in IMPORT_STRINGS
assert 'USER_SERIALIZER' in IMPORT_STRINGS

@override_settings(REST_KNOX={
'SECURE_HASH_ALGORITHM': 'hashlib.md5'
})
def test_hash_algorithm_import(self):
"""
Test that hash algorithm is properly imported.
"""
assert knox_settings.SECURE_HASH_ALGORITHM == hashlib.md5

def test_setting_changed_signal(self):
"""
Test that setting_changed signal properly triggers reload.
"""
new_settings = {
'TOKEN_TTL': timedelta(hours=3),
}

setting_changed.send(
sender=None,
setting='REST_KNOX',
value=new_settings
)

assert knox_settings.TOKEN_TTL == timedelta(hours=3)

@mock.patch('django.conf.settings')
def test_custom_token_model(self, mock_settings):
"""
Test custom token model setting.
"""
custom_model = 'custom_app.CustomToken'
mock_settings.KNOX_TOKEN_MODEL = custom_model

# Reload settings
reload_api_settings(
setting='REST_KNOX',
value={}
)

assert knox_settings.TOKEN_MODEL == custom_model
Loading