Skip to content

Commit b43370f

Browse files
committed
✨ refactor(app): improve code readability by reorganizing imports
Reordered imports for better readability and alignment with PEP 8. This change enhances maintainability by ensuring consistent import order, making the codebase easier to navigate and understand. 🛠️
1 parent 2b4e38f commit b43370f

File tree

2 files changed

+41
-40
lines changed

2 files changed

+41
-40
lines changed

src/fast_langdetect/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
11
# -*- coding: utf-8 -*-
22
# @Time : 2024/1/17 下午4:00
33

4+
from .infer import LangDetector, LangDetectConfig, DetectError # noqa: F401
45
from .infer import detect
56
from .infer import detect_multilingual # noqa: F401
6-
from .infer import LangDetector, LangDetectConfig, DetectError # noqa: F401
7+
78

89
def is_japanese(string):
910
for ch in string:

src/fast_langdetect/infer.py

Lines changed: 39 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,10 @@
66
import hashlib
77
import logging
88
import os
9-
import tempfile
109
import platform
1110
import re
1211
import shutil
12+
import tempfile
1313
from pathlib import Path
1414
from typing import Dict, List, Optional, Union, Any
1515

@@ -143,29 +143,29 @@ def _load_windows_compatible(self, model_path: Path) -> Any:
143143
:raises DetectError: If all loading strategies fail
144144
"""
145145
model_path_str = str(model_path.resolve())
146-
146+
147147
# Try to load model directly
148148
try:
149149
return fasttext.load_model(model_path_str)
150150
except Exception as e:
151151
logger.debug(f"fast-langdetect: Load model failed: {e}")
152-
152+
153153
# Try to load model using relative path
154154
try:
155155
cwd = Path.cwd()
156156
rel_path = os.path.relpath(model_path, cwd)
157157
return fasttext.load_model(rel_path)
158158
except Exception as e:
159159
logger.debug(f"fast-langdetect: Failed to load model using relative path: {e}")
160-
160+
161161
# Use temporary file as last resort
162162
logger.debug(f"fast-langdetect: Using temporary file to load model: {model_path}")
163163
tmp_path = None
164164
try:
165165
# Use NamedTemporaryFile to create a temporary file
166166
tmp_fd, tmp_path = tempfile.mkstemp(suffix='.bin')
167167
os.close(tmp_fd) # Close file descriptor
168-
168+
169169
# Copy model file to temporary location
170170
shutil.copy2(model_path, tmp_path)
171171
return fasttext.load_model(tmp_path)
@@ -207,14 +207,14 @@ class LangDetectConfig:
207207
"""
208208

209209
def __init__(
210-
self,
211-
cache_dir: Optional[str] = None,
212-
custom_model_path: Optional[str] = None,
213-
proxy: Optional[str] = None,
214-
allow_fallback: bool = True,
215-
disable_verify: bool = False,
216-
verify_hash: Optional[str] = None,
217-
normalize_input: bool = False,
210+
self,
211+
cache_dir: Optional[str] = None,
212+
custom_model_path: Optional[str] = None,
213+
proxy: Optional[str] = None,
214+
allow_fallback: bool = True,
215+
disable_verify: bool = False,
216+
verify_hash: Optional[str] = None,
217+
normalize_input: bool = False,
218218
):
219219
self.cache_dir = cache_dir or CACHE_DIRECTORY
220220
self.custom_model_path = custom_model_path
@@ -276,7 +276,7 @@ def _get_model(self, low_memory: bool = True) -> Any:
276276
raise DetectError("Failed to load model") from e
277277

278278
def detect(
279-
self, text: str, low_memory: bool = True
279+
self, text: str, low_memory: bool = True
280280
) -> Dict[str, Union[str, float]]:
281281
"""
282282
Detect primary language of text.
@@ -302,11 +302,11 @@ def detect(
302302
raise DetectError("Language detection failed") from e
303303

304304
def detect_multilingual(
305-
self,
306-
text: str,
307-
low_memory: bool = False,
308-
k: int = 5,
309-
threshold: float = 0.0,
305+
self,
306+
text: str,
307+
low_memory: bool = False,
308+
k: int = 5,
309+
threshold: float = 0.0,
310310
) -> List[Dict[str, Any]]:
311311
"""
312312
Detect multiple possible languages in text.
@@ -355,24 +355,24 @@ def _normalize_text(text: str, should_normalize: bool = False) -> str:
355355
"""
356356
if not should_normalize:
357357
return text
358-
358+
359359
# Check if text is all uppercase (or mostly uppercase)
360360
if text.isupper() or (
361-
len(re.findall(r'[A-Z]', text)) > 0.8 * len(re.findall(r'[A-Za-z]', text))
362-
and len(text) > 5
361+
len(re.findall(r'[A-Z]', text)) > 0.8 * len(re.findall(r'[A-Za-z]', text))
362+
and len(text) > 5
363363
):
364364
return text.lower()
365-
365+
366366
return text
367367

368368

369369
def detect(
370-
text: str,
371-
*,
372-
low_memory: bool = True,
373-
model_download_proxy: Optional[str] = None,
374-
use_strict_mode: bool = False,
375-
normalize_input: bool = True,
370+
text: str,
371+
*,
372+
low_memory: bool = True,
373+
model_download_proxy: Optional[str] = None,
374+
use_strict_mode: bool = False,
375+
normalize_input: bool = True,
376376
) -> Dict[str, Union[str, float]]:
377377
"""
378378
Simple interface for language detection.
@@ -396,7 +396,7 @@ def detect(
396396
)
397397
if model_download_proxy or use_strict_mode or normalize_input:
398398
config = LangDetectConfig(
399-
proxy=model_download_proxy,
399+
proxy=model_download_proxy,
400400
allow_fallback=not use_strict_mode,
401401
normalize_input=normalize_input
402402
)
@@ -406,14 +406,14 @@ def detect(
406406

407407

408408
def detect_multilingual(
409-
text: str,
410-
*,
411-
low_memory: bool = False,
412-
model_download_proxy: Optional[str] = None,
413-
k: int = 5,
414-
threshold: float = 0.0,
415-
use_strict_mode: bool = False,
416-
normalize_input: bool = True,
409+
text: str,
410+
*,
411+
low_memory: bool = False,
412+
model_download_proxy: Optional[str] = None,
413+
k: int = 5,
414+
threshold: float = 0.0,
415+
use_strict_mode: bool = False,
416+
normalize_input: bool = True,
417417
) -> List[Dict[str, Any]]:
418418
"""
419419
Simple interface for multi-language detection.
@@ -439,7 +439,7 @@ def detect_multilingual(
439439
)
440440
if model_download_proxy or use_strict_mode or normalize_input:
441441
config = LangDetectConfig(
442-
proxy=model_download_proxy,
442+
proxy=model_download_proxy,
443443
allow_fallback=not use_strict_mode,
444444
normalize_input=normalize_input
445445
)

0 commit comments

Comments
 (0)