#!/usr/bin/env python3 """ Deployment script for Hugging Face Spaces """ import os import sys import subprocess from pathlib import Path def check_requirements(): """Check if required tools are available""" print("๐Ÿ” Checking requirements...") # Check if git is available try: subprocess.run(["git", "--version"], capture_output=True, check=True) print("โœ… Git available") except (subprocess.CalledProcessError, FileNotFoundError): print("โŒ Git not found. Please install git.") return False # Check if huggingface_hub is available try: import huggingface_hub print("โœ… Hugging Face Hub available") except ImportError: print("โŒ Hugging Face Hub not found. Install with: pip install huggingface_hub") return False return True def setup_git_lfs(): """Setup Git LFS for large files""" print("๐Ÿ“ Setting up Git LFS...") try: subprocess.run(["git", "lfs", "install"], check=True) print("โœ… Git LFS installed") return True except subprocess.CalledProcessError: print("โŒ Failed to install Git LFS") return False def create_gitignore(): """Create .gitignore for the project""" print("๐Ÿ“ Creating .gitignore...") gitignore_content = """ # Python __pycache__/ *.py[cod] *$py.class *.so .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ *.egg-info/ .installed.cfg *.egg # Virtual environments venv/ env/ ENV/ # IDE .vscode/ .idea/ *.swp *.swo # OS .DS_Store Thumbs.db # Model files (too large for git) models/ *.bin *.safetensors *.pt *.pth # Cache .cache/ __pycache__/ # Logs *.log logs/ # Temporary files *.tmp *.temp # Environment variables .env .env.local .env.production # Hugging Face .huggingface/ transformers_cache/ """ with open(".gitignore", "w") as f: f.write(gitignore_content.strip()) print("โœ… .gitignore created") def create_readme(): """Create README.md for the Space""" print("๐Ÿ“– Creating README.md...") readme_content = """--- title: Textilindo AI Assistant emoji: ๐Ÿค– colorFrom: blue colorTo: purple sdk: docker pinned: false license: mit app_port: 7860 --- # Textilindo AI Assistant AI Assistant for Textilindo with training and inference capabilities. ## Features - ๐Ÿค– AI model training with LoRA - ๐Ÿ“Š Dataset creation and management - ๐Ÿงช Model testing and inference - ๐Ÿ”— External service integration - ๐Ÿ“ฑ Web interface for all operations ## Usage 1. **Check Training Ready**: Verify all components are ready 2. **Create Dataset**: Generate sample training data 3. **Setup Training**: Download models and setup environment 4. **Train Model**: Start the training process 5. **Test Model**: Interact with the trained model ## Hardware Requirements - **Minimum**: CPU Basic (2 vCPU, 8GB RAM) - **Recommended**: GPU Basic (1 T4 GPU, 16GB RAM) - **For Training**: GPU A10G or higher ## Environment Variables Set these in your Space settings: - `HUGGINGFACE_TOKEN`: Your Hugging Face token (optional) - `NOVITA_API_KEY`: Your Novita AI API key (optional) ## Support For issues and questions, check the logs and health endpoint. """ with open("README.md", "w") as f: f.write(readme_content) print("โœ… README.md created") def main(): """Main deployment function""" print("๐Ÿš€ Textilindo AI Assistant - Hugging Face Spaces Deployment") print("=" * 60) # Check requirements if not check_requirements(): print("โŒ Requirements not met. Please install missing tools.") sys.exit(1) # Setup Git LFS if not setup_git_lfs(): print("โŒ Failed to setup Git LFS") sys.exit(1) # Create necessary files create_gitignore() create_readme() print("\nโœ… Deployment preparation complete!") print("\n๐Ÿ“‹ Next steps:") print("1. Create a new Hugging Face Space") print("2. Use Docker SDK") print("3. Set hardware to GPU Basic or higher") print("4. Push your code to the Space repository") print("5. Set environment variables if needed") print("\n๐Ÿ”— Your Space will be available at: https://huggingface.co/spaces/your-username/your-space-name") if __name__ == "__main__": main()