refactor: вынес сервисы и ui-компоненты
- вынес token/chat/update логику в services - вынес диалог и текст инструкции в ui - добавил и обновил тесты для нового слоя
This commit is contained in:
@@ -1,3 +1,5 @@
|
||||
from .auto_update_service import AutoUpdateService
|
||||
from .chat_actions import load_chat_conversations, resolve_user_ids
|
||||
from .token_store import load_token, save_token
|
||||
from .update_service import UpdateChecker, detect_update_repository_url
|
||||
from .vk_service import VkService
|
||||
|
||||
|
||||
152
services/auto_update_service.py
Normal file
152
services/auto_update_service.py
Normal file
@@ -0,0 +1,152 @@
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
import urllib.request
|
||||
import zipfile
|
||||
|
||||
|
||||
class AutoUpdateService:
|
||||
@staticmethod
|
||||
def download_update_archive(download_url, destination_path):
|
||||
request = urllib.request.Request(
|
||||
download_url,
|
||||
headers={"User-Agent": "AnabasisManager-Updater"},
|
||||
)
|
||||
with urllib.request.urlopen(request, timeout=60) as response:
|
||||
with open(destination_path, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
|
||||
@staticmethod
|
||||
def download_update_text(url):
|
||||
request = urllib.request.Request(
|
||||
url,
|
||||
headers={"User-Agent": "AnabasisManager-Updater"},
|
||||
)
|
||||
with urllib.request.urlopen(request, timeout=30) as response:
|
||||
return response.read().decode("utf-8", errors="replace")
|
||||
|
||||
@staticmethod
|
||||
def sha256_file(path):
|
||||
digest = hashlib.sha256()
|
||||
with open(path, "rb") as f:
|
||||
for chunk in iter(lambda: f.read(1024 * 1024), b""):
|
||||
digest.update(chunk)
|
||||
return digest.hexdigest().lower()
|
||||
|
||||
@staticmethod
|
||||
def extract_sha256_from_text(checksum_text, target_file_name):
|
||||
target = (target_file_name or "").strip().lower()
|
||||
for raw_line in checksum_text.splitlines():
|
||||
line = raw_line.strip()
|
||||
if not line:
|
||||
continue
|
||||
match = re.search(r"\b([A-Fa-f0-9]{64})\b", line)
|
||||
if not match:
|
||||
continue
|
||||
checksum = match.group(1).lower()
|
||||
if not target:
|
||||
return checksum
|
||||
line_lower = line.lower()
|
||||
if target in line_lower:
|
||||
return checksum
|
||||
if os.path.basename(target) in line_lower:
|
||||
return checksum
|
||||
return ""
|
||||
|
||||
@classmethod
|
||||
def verify_update_checksum(cls, zip_path, checksum_url, download_name):
|
||||
if not checksum_url:
|
||||
raise RuntimeError("В релизе нет файла SHA256. Автообновление остановлено.")
|
||||
checksum_text = cls.download_update_text(checksum_url)
|
||||
expected_hash = cls.extract_sha256_from_text(checksum_text, download_name or os.path.basename(zip_path))
|
||||
if not expected_hash:
|
||||
raise RuntimeError("Не удалось найти SHA256 для архива обновления.")
|
||||
actual_hash = cls.sha256_file(zip_path)
|
||||
if actual_hash != expected_hash:
|
||||
raise RuntimeError("SHA256 не совпадает, обновление отменено.")
|
||||
|
||||
@staticmethod
|
||||
def locate_extracted_root(extracted_dir):
|
||||
entries = []
|
||||
for name in os.listdir(extracted_dir):
|
||||
full_path = os.path.join(extracted_dir, name)
|
||||
if os.path.isdir(full_path):
|
||||
entries.append(full_path)
|
||||
if len(entries) == 1:
|
||||
candidate = entries[0]
|
||||
if os.path.exists(os.path.join(candidate, "AnabasisManager.exe")):
|
||||
return candidate
|
||||
return extracted_dir
|
||||
|
||||
@staticmethod
|
||||
def build_update_script(app_dir, source_dir, exe_name, target_pid):
|
||||
script_path = os.path.join(tempfile.gettempdir(), "anabasis_apply_update.cmd")
|
||||
script_lines = [
|
||||
"@echo off",
|
||||
"setlocal",
|
||||
f"set APP_DIR={app_dir}",
|
||||
f"set SRC_DIR={source_dir}",
|
||||
f"set EXE_NAME={exe_name}",
|
||||
f"set TARGET_PID={target_pid}",
|
||||
"set BACKUP_DIR=%TEMP%\\anabasis_backup_%RANDOM%%RANDOM%",
|
||||
":wait_for_exit",
|
||||
"tasklist /FI \"PID eq %TARGET_PID%\" | find \"%TARGET_PID%\" >nul",
|
||||
"if %ERRORLEVEL% EQU 0 (",
|
||||
" timeout /t 1 /nobreak >nul",
|
||||
" goto :wait_for_exit",
|
||||
")",
|
||||
"timeout /t 1 /nobreak >nul",
|
||||
"mkdir \"%BACKUP_DIR%\" >nul 2>&1",
|
||||
"robocopy \"%APP_DIR%\" \"%BACKUP_DIR%\" /E /NFL /NDL /NJH /NJS /NP /R:6 /W:2 >nul",
|
||||
"set RC=%ERRORLEVEL%",
|
||||
"if %RC% GEQ 8 goto :backup_error",
|
||||
"robocopy \"%SRC_DIR%\" \"%APP_DIR%\" /E /NFL /NDL /NJH /NJS /NP /R:12 /W:2 >nul",
|
||||
"set RC=%ERRORLEVEL%",
|
||||
"if %RC% GEQ 8 goto :rollback",
|
||||
"start \"\" \"%APP_DIR%\\%EXE_NAME%\"",
|
||||
"timeout /t 2 /nobreak >nul",
|
||||
"tasklist /FI \"IMAGENAME eq %EXE_NAME%\" | find /I \"%EXE_NAME%\" >nul",
|
||||
"if %ERRORLEVEL% NEQ 0 goto :rollback",
|
||||
"rmdir /S /Q \"%BACKUP_DIR%\" >nul 2>&1",
|
||||
"exit /b 0",
|
||||
":rollback",
|
||||
"robocopy \"%BACKUP_DIR%\" \"%APP_DIR%\" /E /NFL /NDL /NJH /NJS /NP /R:6 /W:2 >nul",
|
||||
"start \"\" \"%APP_DIR%\\%EXE_NAME%\"",
|
||||
"echo Auto-update failed. Rollback executed. > \"%APP_DIR%\\update_error.log\"",
|
||||
"exit /b 2",
|
||||
":backup_error",
|
||||
"echo Auto-update failed during backup. Code %RC% > \"%APP_DIR%\\update_error.log\"",
|
||||
"exit /b %RC%",
|
||||
]
|
||||
with open(script_path, "w", encoding="utf-8", newline="\r\n") as f:
|
||||
f.write("\r\n".join(script_lines) + "\r\n")
|
||||
return script_path
|
||||
|
||||
@staticmethod
|
||||
def launch_update_script(script_path, work_dir):
|
||||
creation_flags = 0
|
||||
if hasattr(subprocess, "CREATE_NEW_PROCESS_GROUP"):
|
||||
creation_flags |= subprocess.CREATE_NEW_PROCESS_GROUP
|
||||
if hasattr(subprocess, "DETACHED_PROCESS"):
|
||||
creation_flags |= subprocess.DETACHED_PROCESS
|
||||
subprocess.Popen(
|
||||
["cmd.exe", "/c", script_path],
|
||||
cwd=work_dir,
|
||||
creationflags=creation_flags,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def prepare_update(cls, download_url, checksum_url, download_name):
|
||||
work_dir = tempfile.mkdtemp(prefix="anabasis_update_")
|
||||
zip_path = os.path.join(work_dir, "update.zip")
|
||||
unpack_dir = os.path.join(work_dir, "extracted")
|
||||
cls.download_update_archive(download_url, zip_path)
|
||||
cls.verify_update_checksum(zip_path, checksum_url, download_name)
|
||||
os.makedirs(unpack_dir, exist_ok=True)
|
||||
with zipfile.ZipFile(zip_path, "r") as archive:
|
||||
archive.extractall(unpack_dir)
|
||||
source_dir = cls.locate_extracted_root(unpack_dir)
|
||||
return work_dir, source_dir
|
||||
46
services/chat_actions.py
Normal file
46
services/chat_actions.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
||||
def resolve_user_ids(vk_call_with_retry, vk_api, links):
|
||||
resolved_ids = []
|
||||
failed_links = []
|
||||
for link in links:
|
||||
try:
|
||||
path = urlparse(link).path
|
||||
screen_name = path.split("/")[-1] if path else ""
|
||||
if not screen_name and len(path.split("/")) > 1:
|
||||
screen_name = path.split("/")[-2]
|
||||
if not screen_name:
|
||||
failed_links.append((link, None))
|
||||
continue
|
||||
resolved_object = vk_call_with_retry(vk_api.utils.resolveScreenName, screen_name=screen_name)
|
||||
if resolved_object and resolved_object.get("type") == "user":
|
||||
resolved_ids.append(resolved_object["object_id"])
|
||||
else:
|
||||
failed_links.append((link, None))
|
||||
except Exception as e:
|
||||
failed_links.append((link, e))
|
||||
return resolved_ids, failed_links
|
||||
|
||||
|
||||
def load_chat_conversations(vk_call_with_retry, vk_api):
|
||||
conversations = []
|
||||
start_from = None
|
||||
seen_start_tokens = set()
|
||||
while True:
|
||||
params = {"count": 200, "filter": "all"}
|
||||
if start_from:
|
||||
if start_from in seen_start_tokens:
|
||||
break
|
||||
params["start_from"] = start_from
|
||||
seen_start_tokens.add(start_from)
|
||||
response = vk_call_with_retry(vk_api.messages.getConversations, **params)
|
||||
page_items = response.get("items", [])
|
||||
if not page_items:
|
||||
break
|
||||
conversations.extend(page_items)
|
||||
start_from = response.get("next_from")
|
||||
if not start_from:
|
||||
break
|
||||
return conversations
|
||||
|
||||
136
services/token_store.py
Normal file
136
services/token_store.py
Normal file
@@ -0,0 +1,136 @@
|
||||
import base64
|
||||
import ctypes
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
from ctypes import wintypes
|
||||
|
||||
|
||||
class _DataBlob(ctypes.Structure):
|
||||
_fields_ = [("cbData", wintypes.DWORD), ("pbData", ctypes.POINTER(ctypes.c_byte))]
|
||||
|
||||
|
||||
_crypt32 = None
|
||||
_kernel32 = None
|
||||
if os.name == "nt":
|
||||
_crypt32 = ctypes.WinDLL("crypt32", use_last_error=True)
|
||||
_kernel32 = ctypes.WinDLL("kernel32", use_last_error=True)
|
||||
_crypt32.CryptProtectData.argtypes = [
|
||||
ctypes.POINTER(_DataBlob),
|
||||
wintypes.LPCWSTR,
|
||||
ctypes.POINTER(_DataBlob),
|
||||
ctypes.c_void_p,
|
||||
ctypes.c_void_p,
|
||||
wintypes.DWORD,
|
||||
ctypes.POINTER(_DataBlob),
|
||||
]
|
||||
_crypt32.CryptProtectData.restype = wintypes.BOOL
|
||||
_crypt32.CryptUnprotectData.argtypes = [
|
||||
ctypes.POINTER(_DataBlob),
|
||||
ctypes.POINTER(wintypes.LPWSTR),
|
||||
ctypes.POINTER(_DataBlob),
|
||||
ctypes.c_void_p,
|
||||
ctypes.c_void_p,
|
||||
wintypes.DWORD,
|
||||
ctypes.POINTER(_DataBlob),
|
||||
]
|
||||
_crypt32.CryptUnprotectData.restype = wintypes.BOOL
|
||||
|
||||
|
||||
def _crypt_protect_data(data, description=""):
|
||||
buffer = ctypes.create_string_buffer(data)
|
||||
data_in = _DataBlob(len(data), ctypes.cast(buffer, ctypes.POINTER(ctypes.c_byte)))
|
||||
data_out = _DataBlob()
|
||||
if not _crypt32.CryptProtectData(ctypes.byref(data_in), description, None, None, None, 0, ctypes.byref(data_out)):
|
||||
raise ctypes.WinError(ctypes.get_last_error())
|
||||
try:
|
||||
return ctypes.string_at(data_out.pbData, data_out.cbData)
|
||||
finally:
|
||||
_kernel32.LocalFree(data_out.pbData)
|
||||
|
||||
|
||||
def _crypt_unprotect_data(data):
|
||||
buffer = ctypes.create_string_buffer(data)
|
||||
data_in = _DataBlob(len(data), ctypes.cast(buffer, ctypes.POINTER(ctypes.c_byte)))
|
||||
data_out = _DataBlob()
|
||||
if not _crypt32.CryptUnprotectData(ctypes.byref(data_in), None, None, None, None, 0, ctypes.byref(data_out)):
|
||||
raise ctypes.WinError(ctypes.get_last_error())
|
||||
try:
|
||||
return ctypes.string_at(data_out.pbData, data_out.cbData)
|
||||
finally:
|
||||
_kernel32.LocalFree(data_out.pbData)
|
||||
|
||||
|
||||
def _encrypt_token(token):
|
||||
if os.name != "nt":
|
||||
raise RuntimeError("DPAPI is available only on Windows.")
|
||||
encrypted_bytes = _crypt_protect_data(token.encode("utf-8"))
|
||||
return base64.b64encode(encrypted_bytes).decode("ascii")
|
||||
|
||||
|
||||
def _decrypt_token(token_data):
|
||||
if os.name != "nt":
|
||||
raise RuntimeError("DPAPI is available only on Windows.")
|
||||
encrypted_bytes = base64.b64decode(token_data.encode("ascii"))
|
||||
decrypted_bytes = _crypt_unprotect_data(encrypted_bytes)
|
||||
return decrypted_bytes.decode("utf-8")
|
||||
|
||||
|
||||
def save_token(token, token_file, app_data_dir, expires_in=0):
|
||||
try:
|
||||
expires_in = int(expires_in)
|
||||
except (ValueError, TypeError):
|
||||
expires_in = 0
|
||||
|
||||
os.makedirs(app_data_dir, exist_ok=True)
|
||||
expiration_time = (time.time() + expires_in) if expires_in > 0 else 0
|
||||
|
||||
stored_token = token
|
||||
encrypted = False
|
||||
if os.name == "nt":
|
||||
try:
|
||||
stored_token = _encrypt_token(token)
|
||||
encrypted = True
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
data = {
|
||||
"token": stored_token,
|
||||
"expiration_time": expiration_time,
|
||||
"encrypted": encrypted,
|
||||
}
|
||||
|
||||
with open(token_file, "w", encoding="utf-8") as f:
|
||||
json.dump(data, f)
|
||||
return expiration_time
|
||||
|
||||
|
||||
def load_token(token_file):
|
||||
if not os.path.exists(token_file):
|
||||
return None, None
|
||||
|
||||
with open(token_file, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
token = data.get("token")
|
||||
encrypted = data.get("encrypted", False)
|
||||
if token and encrypted:
|
||||
try:
|
||||
token = _decrypt_token(token)
|
||||
except Exception:
|
||||
try:
|
||||
os.remove(token_file)
|
||||
except Exception:
|
||||
pass
|
||||
return None, None
|
||||
|
||||
expiration_time = data.get("expiration_time")
|
||||
if token and (expiration_time == 0 or expiration_time > time.time()):
|
||||
return token, expiration_time
|
||||
|
||||
try:
|
||||
os.remove(token_file)
|
||||
except Exception:
|
||||
pass
|
||||
return None, None
|
||||
|
||||
Reference in New Issue
Block a user