Add Ollama support, progress bar, and professional UI redesign
This commit is contained in:
@@ -3,7 +3,8 @@ Translation Service Abstraction
|
||||
Provides a unified interface for different translation providers
|
||||
"""
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional
|
||||
from typing import Optional, List
|
||||
import requests
|
||||
from deep_translator import GoogleTranslator, DeeplTranslator, LibreTranslator
|
||||
from config import config
|
||||
|
||||
@@ -65,6 +66,49 @@ class LibreTranslationProvider(TranslationProvider):
|
||||
return text
|
||||
|
||||
|
||||
class OllamaTranslationProvider(TranslationProvider):
|
||||
"""Ollama LLM translation implementation"""
|
||||
|
||||
def __init__(self, base_url: str = "http://localhost:11434", model: str = "llama3"):
|
||||
self.base_url = base_url.rstrip('/')
|
||||
self.model = model
|
||||
|
||||
def translate(self, text: str, target_language: str, source_language: str = 'auto') -> str:
|
||||
if not text or not text.strip():
|
||||
return text
|
||||
|
||||
try:
|
||||
prompt = f"Translate the following text to {target_language}. Return ONLY the translation, nothing else:\n\n{text}"
|
||||
|
||||
response = requests.post(
|
||||
f"{self.base_url}/api/generate",
|
||||
json={
|
||||
"model": self.model,
|
||||
"prompt": prompt,
|
||||
"stream": False
|
||||
},
|
||||
timeout=30
|
||||
)
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
return result.get("response", text).strip()
|
||||
except Exception as e:
|
||||
print(f"Ollama translation error: {e}")
|
||||
return text
|
||||
|
||||
@staticmethod
|
||||
def list_models(base_url: str = "http://localhost:11434") -> List[str]:
|
||||
"""List available Ollama models"""
|
||||
try:
|
||||
response = requests.get(f"{base_url.rstrip('/')}/api/tags", timeout=5)
|
||||
response.raise_for_status()
|
||||
models = response.json().get("models", [])
|
||||
return [model["name"] for model in models]
|
||||
except Exception as e:
|
||||
print(f"Error listing Ollama models: {e}")
|
||||
return []
|
||||
|
||||
|
||||
class TranslationService:
|
||||
"""Main translation service that delegates to the configured provider"""
|
||||
|
||||
@@ -85,6 +129,10 @@ class TranslationService:
|
||||
return DeepLTranslationProvider(config.DEEPL_API_KEY)
|
||||
elif service_type == "libre":
|
||||
return LibreTranslationProvider()
|
||||
elif service_type == "ollama":
|
||||
ollama_url = getattr(config, 'OLLAMA_BASE_URL', 'http://localhost:11434')
|
||||
ollama_model = getattr(config, 'OLLAMA_MODEL', 'llama3')
|
||||
return OllamaTranslationProvider(base_url=ollama_url, model=ollama_model)
|
||||
else: # Default to Google
|
||||
return GoogleTranslationProvider()
|
||||
|
||||
|
||||
Reference in New Issue
Block a user