spaCy/spacy/tests/lang/bn/test_tokenizer.py
DaniÃĢl de Kok e2b70df012
Configure isort to use the Black profile, recursively isort the spacy module (#12721)
* Use isort with Black profile

* isort all the things

* Fix import cycles as a result of import sorting

* Add DOCBIN_ALL_ATTRS type definition

* Add isort to requirements

* Remove isort from build dependencies check

* Typo
2023-06-14 17:48:41 +02:00

37 lines
3.5 KiB
Python

import pytest
# fmt: off
TESTCASES = [
# Punctuation tests
("āĻ†āĻŽāĻŋ āĻŦāĻžāĻ‚āĻ˛āĻžāĻ¯āĻŧ āĻ—āĻžāĻ¨ āĻ—āĻžāĻ‡!", ["āĻ†āĻŽāĻŋ", "āĻŦāĻžāĻ‚āĻ˛āĻžāĻ¯āĻŧ", "āĻ—āĻžāĻ¨", "āĻ—āĻžāĻ‡", "!"]),
("āĻ†āĻŽāĻŋ āĻŦāĻžāĻ‚āĻ˛āĻžāĻ¯āĻŧ āĻ•āĻĨāĻž āĻ•āĻ‡āĨ¤", ["āĻ†āĻŽāĻŋ", "āĻŦāĻžāĻ‚āĻ˛āĻžāĻ¯āĻŧ", "āĻ•āĻĨāĻž", "āĻ•āĻ‡", "āĨ¤"]),
("āĻŦāĻ¸ā§āĻ¨ā§āĻ§āĻ°āĻž āĻœāĻ¨āĻ¸āĻŽā§āĻŽā§āĻ–ā§‡ āĻĻā§‹āĻˇ āĻ¸ā§āĻŦā§€āĻ•āĻžāĻ° āĻ•āĻ°āĻ˛ā§‹ āĻ¨āĻž?", ["āĻŦāĻ¸ā§āĻ¨ā§āĻ§āĻ°āĻž", "āĻœāĻ¨āĻ¸āĻŽā§āĻŽā§āĻ–ā§‡", "āĻĻā§‹āĻˇ", "āĻ¸ā§āĻŦā§€āĻ•āĻžāĻ°", "āĻ•āĻ°āĻ˛ā§‹", "āĻ¨āĻž", "?"]),
("āĻŸāĻžāĻ•āĻž āĻĨāĻžāĻ•āĻ˛ā§‡ āĻ•āĻŋ āĻ¨āĻž āĻšāĻ¯āĻŧ!", ["āĻŸāĻžāĻ•āĻž", "āĻĨāĻžāĻ•āĻ˛ā§‡", "āĻ•āĻŋ", "āĻ¨āĻž", "āĻšāĻ¯āĻŧ", "!"]),
("āĻ¸āĻ°āĻ•āĻžāĻ°āĻŋ āĻŦāĻŋāĻļā§āĻŦāĻŦāĻŋāĻĻā§āĻ¯āĻžāĻ˛ā§Ÿ-āĻāĻ° āĻ›āĻžāĻ¤ā§āĻ° āĻ¨āĻ‡ āĻŦāĻ˛ā§‡āĻ‡ āĻ•āĻŋ āĻāĻŽāĻ¨ āĻ†āĻšāĻ°āĻŖ?", ["āĻ¸āĻ°āĻ•āĻžāĻ°āĻŋ", "āĻŦāĻŋāĻļā§āĻŦāĻŦāĻŋāĻĻā§āĻ¯āĻžāĻ˛ā§Ÿ", "-", "āĻāĻ°", "āĻ›āĻžāĻ¤ā§āĻ°", "āĻ¨āĻ‡", "āĻŦāĻ˛ā§‡āĻ‡", "āĻ•āĻŋ", "āĻāĻŽāĻ¨", "āĻ†āĻšāĻ°āĻŖ", "?"]),
('āĻ¤āĻžāĻ°āĻž āĻŦāĻ˛ā§‡, "āĻ“āĻ°āĻž āĻ–āĻžāĻŽāĻžāĻ°ā§‡āĻ° āĻŽā§āĻ°āĻ—āĻŋāĨ¤"', ["āĻ¤āĻžāĻ°āĻž", "āĻŦāĻ˛ā§‡", ",", '"', "āĻ“āĻ°āĻž", "āĻ–āĻžāĻŽāĻžāĻ°ā§‡āĻ°", "āĻŽā§āĻ°āĻ—āĻŋ", "āĨ¤", '"']),
("ā§Š*ā§Š=ā§Ŧ?", ["ā§Š", "*", "ā§Š", "=", "ā§Ŧ", "?"]),
("āĻ•āĻžāĻāĻ āĻžāĻ˛-āĻāĻ° āĻ—āĻ¨ā§āĻ§āĻ‡ āĻ…āĻ¨ā§āĻ¯āĻ°āĻ•āĻŽ", ["āĻ•āĻžāĻāĻ āĻžāĻ˛", "-", "āĻāĻ°", "āĻ—āĻ¨ā§āĻ§āĻ‡", "āĻ…āĻ¨ā§āĻ¯āĻ°āĻ•āĻŽ"]),
# Abbreviations
("āĻĄāĻƒ āĻ–āĻžāĻ˛ā§‡āĻĻ āĻŦāĻ˛āĻ˛ā§‡āĻ¨ āĻĸāĻžāĻ•āĻžāĻ¯āĻŧ ā§Šā§Ģ āĻĄāĻŋāĻ—ā§āĻ°āĻŋ āĻ¸ā§‡.āĨ¤", ["āĻĄāĻƒ", "āĻ–āĻžāĻ˛ā§‡āĻĻ", "āĻŦāĻ˛āĻ˛ā§‡āĻ¨", "āĻĸāĻžāĻ•āĻžāĻ¯āĻŧ", "ā§Šā§Ģ", "āĻĄāĻŋāĻ—ā§āĻ°āĻŋ", "āĻ¸ā§‡.", "āĨ¤"]),
]
# fmt: on
@pytest.mark.parametrize("text,expected_tokens", TESTCASES)
def test_bn_tokenizer_handles_testcases(bn_tokenizer, text, expected_tokens):
tokens = bn_tokenizer(text)
token_list = [token.text for token in tokens if not token.is_space]
assert expected_tokens == token_list
def test_bn_tokenizer_handles_long_text(bn_tokenizer):
text = """āĻ¨āĻ°ā§āĻĨ āĻ¸āĻžāĻ‰āĻĨ āĻŦāĻŋāĻļā§āĻŦāĻŦāĻŋāĻĻā§āĻ¯āĻžāĻ˛āĻ¯āĻŧā§‡ āĻ¸āĻžāĻ°āĻžāĻŦāĻ›āĻ° āĻ•ā§‹āĻ¨ āĻ¨āĻž āĻ•ā§‹āĻ¨ āĻŦāĻŋāĻˇāĻ¯āĻŧā§‡ āĻ—āĻŦā§‡āĻˇāĻŖāĻž āĻšāĻ˛āĻ¤ā§‡āĻ‡ āĻĨāĻžāĻ•ā§‡āĨ¤ \
āĻ…āĻ­āĻŋāĻœā§āĻž āĻĢā§āĻ¯āĻžāĻ•āĻžāĻ˛ā§āĻŸāĻŋ āĻŽā§‡āĻŽā§āĻŦāĻžāĻ°āĻ—āĻŖ āĻĒā§āĻ°āĻžāĻ¯āĻŧāĻ‡ āĻļāĻŋāĻ•ā§āĻˇāĻžāĻ°ā§āĻĨā§€āĻĻā§‡āĻ° āĻ¨āĻŋāĻ¯āĻŧā§‡ āĻŦāĻŋāĻ­āĻŋāĻ¨ā§āĻ¨ āĻ—āĻŦā§‡āĻˇāĻŖāĻž āĻĒā§āĻ°āĻ•āĻ˛ā§āĻĒā§‡ āĻ•āĻžāĻœ āĻ•āĻ°ā§‡āĻ¨, \
āĻ¯āĻžāĻ° āĻŽāĻ§ā§āĻ¯ā§‡ āĻ°āĻ¯āĻŧā§‡āĻ›ā§‡ āĻ°ā§‹āĻŦāĻŸ āĻĨā§‡āĻ•ā§‡ āĻŽā§‡āĻļāĻŋāĻ¨ āĻ˛āĻžāĻ°ā§āĻ¨āĻŋāĻ‚ āĻ¸āĻŋāĻ¸ā§āĻŸā§‡āĻŽ āĻ“ āĻ†āĻ°ā§āĻŸāĻŋāĻĢāĻŋāĻļāĻŋāĻ¯āĻŧāĻžāĻ˛ āĻ‡āĻ¨ā§āĻŸā§‡āĻ˛āĻŋāĻœā§‡āĻ¨ā§āĻ¸āĨ¤ \
āĻāĻ¸āĻ•āĻ˛ āĻĒā§āĻ°āĻ•āĻ˛ā§āĻĒā§‡ āĻ•āĻžāĻœ āĻ•āĻ°āĻžāĻ° āĻŽāĻžāĻ§ā§āĻ¯āĻŽā§‡ āĻ¸āĻ‚āĻļā§āĻ˛āĻŋāĻˇā§āĻŸ āĻ•ā§āĻˇā§‡āĻ¤ā§āĻ°ā§‡ āĻ¯āĻĨā§‡āĻˇā§āĻ  āĻĒāĻ°āĻŋāĻŽāĻžāĻŖ āĻ¸ā§āĻĒā§‡āĻļāĻžāĻ˛āĻžāĻ‡āĻœāĻĄ āĻšāĻ“āĻ¯āĻŧāĻž āĻ¸āĻŽā§āĻ­āĻŦāĨ¤ \
āĻ†āĻ° āĻ—āĻŦā§‡āĻˇāĻŖāĻžāĻ° āĻ•āĻžāĻœ āĻ¤ā§‹āĻŽāĻžāĻ° āĻ•ā§āĻ¯āĻžāĻ°āĻŋāĻ¯āĻŧāĻžāĻ°āĻ•ā§‡ āĻ ā§‡āĻ˛ā§‡ āĻ¨āĻŋāĻ¯āĻŧā§‡ āĻ¯āĻžāĻŦā§‡ āĻ…āĻ¨ā§‡āĻ•āĻ–āĻžāĻ¨āĻŋ! \
āĻ•āĻ¨ā§āĻŸā§‡āĻ¸ā§āĻŸ āĻĒā§āĻ°ā§‹āĻ—ā§āĻ°āĻžāĻŽāĻžāĻ° āĻšāĻ“, āĻ—āĻŦā§‡āĻˇāĻ• āĻ•āĻŋāĻ‚āĻŦāĻž āĻĄā§‡āĻ­ā§‡āĻ˛āĻĒāĻžāĻ° - āĻ¨āĻ°ā§āĻĨ āĻ¸āĻžāĻ‰āĻĨ āĻ‡āĻ‰āĻ¨āĻŋāĻ­āĻžāĻ°ā§āĻ¸āĻŋāĻŸāĻŋāĻ¤ā§‡ āĻ¤ā§‹āĻŽāĻžāĻ° āĻĒā§āĻ°āĻ¤āĻŋāĻ­āĻž āĻŦāĻŋāĻ•āĻžāĻļā§‡āĻ° āĻ¸ā§āĻ¯ā§‹āĻ— āĻ°āĻ¯āĻŧā§‡āĻ›ā§‡āĻ‡āĨ¤ \
āĻ¨āĻ°ā§āĻĨ āĻ¸āĻžāĻ‰āĻĨā§‡āĻ° āĻ…āĻ¸āĻžāĻ§āĻžāĻ°āĻŖ āĻ•āĻŽāĻŋāĻ‰āĻ¨āĻŋāĻŸāĻŋāĻ¤ā§‡ āĻ¤ā§‹āĻŽāĻžāĻ•ā§‡ āĻ¸āĻžāĻĻāĻ° āĻ†āĻŽāĻ¨ā§āĻ¤ā§āĻ°āĻŖāĨ¤"""
tokens = bn_tokenizer(text)
assert len(tokens) == 84