Coverage for integrations / vision / minicpm_installer.py: 80.0%

55 statements  

« prev     ^ index     » next       coverage.py v7.14.0, created at 2026-05-12 04:49 +0000

1""" 

2MiniCPM Model Installer — auto-downloads MiniCPM-V-2 for the vision sidecar. 

3 

4Follows the same sidecar installer pattern: detect GPU, download model, 

5verify cache, provide health status. Model is stored in ~/.hevolve/models/minicpm/. 

6""" 

7import logging 

8import os 

9import shutil 

10import subprocess 

11import sys 

12from pathlib import Path 

13from typing import Dict, Optional 

14 

15logger = logging.getLogger('hevolve_vision') 

16 

17DEFAULT_MODEL_ID = 'openbmb/MiniCPM-V-2' 

18DEFAULT_MODEL_DIR = os.path.join(Path.home(), '.hevolve', 'models', 'minicpm') 

19 

20 

21class MiniCPMInstaller: 

22 """Auto-download and verify MiniCPM-V-2 model weights.""" 

23 

24 def __init__(self, model_id: str = DEFAULT_MODEL_ID, 

25 model_dir: str = DEFAULT_MODEL_DIR): 

26 self.model_id = model_id 

27 self.model_dir = model_dir 

28 self._installed = False 

29 self._gpu_available = False 

30 

31 def detect_gpu(self) -> bool: 

32 """Check if a compatible GPU is available (CUDA or Apple Metal/MPS).""" 

33 try: 

34 from integrations.service_tools.vram_manager import detect_gpu as _detect_gpu 

35 info = _detect_gpu() 

36 self._gpu_available = info.get('cuda_available', False) or info.get('metal_available', False) 

37 if info.get('name'): 

38 logger.info(f"GPU detected: {info['name']} ({info.get('total_gb', 0):.1f} GB)") 

39 elif not self._gpu_available: 

40 logger.warning("No compatible GPU detected — MiniCPM requires GPU") 

41 return self._gpu_available 

42 except ImportError: 

43 # Fallback: inline detection when running standalone 

44 try: 

45 import torch 

46 if torch.cuda.is_available(): 

47 self._gpu_available = True 

48 name = torch.cuda.get_device_name(0) 

49 mem = torch.cuda.get_device_properties(0).total_memory / (1024**3) 

50 logger.info(f"CUDA GPU detected: {name} ({mem:.1f} GB)") 

51 elif hasattr(torch.backends, 'mps') and torch.backends.mps.is_available(): 

52 self._gpu_available = True 

53 import platform 

54 chip = platform.processor() or 'Apple Silicon' 

55 logger.info(f"Apple Metal (MPS) detected: {chip}") 

56 else: 

57 self._gpu_available = False 

58 logger.warning("No compatible GPU detected — MiniCPM requires GPU") 

59 return self._gpu_available 

60 except ImportError: 

61 logger.warning("PyTorch not installed — cannot detect GPU") 

62 return False 

63 

64 def is_installed(self) -> bool: 

65 """Check if model weights are already cached.""" 

66 marker = os.path.join(self.model_dir, 'config.json') 

67 self._installed = os.path.isfile(marker) 

68 return self._installed 

69 

70 def install(self, force: bool = False) -> bool: 

71 """Download MiniCPM-V-2 model to local cache. 

72 

73 Uses huggingface_hub snapshot_download for efficient partial downloads. 

74 Returns True on success. 

75 """ 

76 if self.is_installed() and not force: 

77 logger.info(f"MiniCPM already installed at {self.model_dir}") 

78 return True 

79 

80 os.makedirs(self.model_dir, exist_ok=True) 

81 

82 try: 

83 from huggingface_hub import snapshot_download 

84 logger.info(f"Downloading {self.model_id} to {self.model_dir}...") 

85 snapshot_download( 

86 repo_id=self.model_id, 

87 local_dir=self.model_dir, 

88 local_dir_use_symlinks=False, 

89 ) 

90 self._installed = True 

91 logger.info("MiniCPM download complete") 

92 return True 

93 except ImportError: 

94 logger.error("huggingface_hub not installed. Run: pip install huggingface_hub") 

95 return False 

96 except Exception as e: 

97 logger.error(f"MiniCPM download failed: {e}") 

98 return False 

99 

100 def uninstall(self) -> bool: 

101 """Remove cached model weights.""" 

102 if os.path.isdir(self.model_dir): 

103 shutil.rmtree(self.model_dir) 

104 self._installed = False 

105 logger.info("MiniCPM model removed") 

106 return True 

107 return False 

108 

109 def get_status(self) -> Dict: 

110 """Return installer status.""" 

111 return { 

112 'model_id': self.model_id, 

113 'model_dir': self.model_dir, 

114 'installed': self.is_installed(), 

115 'gpu_available': self._gpu_available, 

116 } 

117 

118 def get_model_dir(self) -> Optional[str]: 

119 """Return model directory if installed, else None.""" 

120 if self.is_installed(): 

121 return self.model_dir 

122 return None