File size: 10,496 Bytes
1d0ef1d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
"""
Hugging Face Hub Deployment Script
Deploy Illuminator model to Hugging Face Model Hub
"""

import os
import json
import torch
from pathlib import Path
from transformers import AutoConfig, AutoTokenizer, AutoModelForCausalLM
from huggingface_hub import HfApi, create_repo, upload_folder
import argparse

class HuggingFaceDeployer:
    """Deploy Illuminator model to Hugging Face Hub"""
    
    def __init__(self, model_dir="./huggingface_model", repo_name="illuminator-4b"):
        self.model_dir = Path(model_dir)
        self.repo_name = repo_name
        self.api = HfApi()
        
        print(f"πŸš€ Initializing Hugging Face deployment for {repo_name}")
        print(f"πŸ“ Model directory: {self.model_dir}")
    
    def validate_model_files(self):
        """Validate all required model files are present"""
        print("πŸ” Validating model files...")
        
        required_files = [
            "config.json",
            "tokenizer_config.json", 
            "README.md",
            "modeling_illuminator.py",
            "tokenization_illuminator.py"
        ]
        
        missing_files = []
        for file in required_files:
            if not (self.model_dir / file).exists():
                missing_files.append(file)
        
        if missing_files:
            print(f"❌ Missing required files: {missing_files}")
            return False
        
        print("βœ… All required model files present")
        return True
    
    def create_model_card(self):
        """Create or update model card with metadata"""
        print("πŸ“ Creating model card...")
        
        model_card_path = self.model_dir / "README.md"
        
        # Read existing README if it exists
        if model_card_path.exists():
            print("βœ… Model card already exists and is comprehensive")
            return True
        
        # If we reach here, something went wrong
        print("❌ Model card not found")
        return False
    
    def test_model_loading(self):
        """Test that the model can be loaded successfully"""
        print("πŸ§ͺ Testing model loading...")
        
        try:
            # Test config loading
            config_path = self.model_dir / "config.json"
            with open(config_path) as f:
                config_dict = json.load(f)
            
            print(f"βœ… Config loaded: {config_dict['model_type']}")
            
            # Test if our custom classes can be imported
            import sys
            sys.path.append(str(self.model_dir))
            
            from modeling_illuminator import IlluminatorLMHeadModel, IlluminatorConfig
            from tokenization_illuminator import IlluminatorTokenizer
            
            print("βœ… Custom model classes imported successfully")
            
            # Test basic initialization
            config = IlluminatorConfig(**config_dict)
            print(f"βœ… Model configuration created")
            
            return True
            
        except Exception as e:
            print(f"❌ Model loading test failed: {e}")
            return False
    
    def create_repository(self, private=False):
        """Create repository on Hugging Face Hub"""
        print(f"πŸ“¦ Creating repository: {self.repo_name}")
        
        try:
            repo_url = create_repo(
                repo_id=self.repo_name,
                private=private,
                exist_ok=True,
                repo_type="model"
            )
            print(f"βœ… Repository created/exists: {repo_url}")
            return repo_url
        except Exception as e:
            print(f"❌ Failed to create repository: {e}")
            return None
    
    def prepare_deployment_files(self):
        """Prepare additional files for deployment"""
        print("πŸ”§ Preparing deployment files...")
        
        # Create __init__.py for package
        init_file = self.model_dir / "__init__.py"
        if not init_file.exists():
            init_content = '''"""
Illuminator Model Package
"""

from .modeling_illuminator import IlluminatorLMHeadModel, IlluminatorConfig
from .tokenization_illuminator import IlluminatorTokenizer

__all__ = ["IlluminatorLMHeadModel", "IlluminatorConfig", "IlluminatorTokenizer"]
'''
            with open(init_file, "w") as f:
                f.write(init_content)
            print("βœ… Created __init__.py")
        
        # Create requirements.txt
        requirements_file = self.model_dir / "requirements.txt"
        if not requirements_file.exists():
            requirements = """torch>=1.9.0
transformers>=4.21.0
numpy>=1.21.0
tokenizers>=0.13.0
"""
            with open(requirements_file, "w") as f:
                f.write(requirements)
            print("βœ… Created requirements.txt")
        
        return True
    
    def upload_to_hub(self):
        """Upload model to Hugging Face Hub"""
        print("πŸš€ Uploading to Hugging Face Hub...")
        
        try:
            upload_folder(
                folder_path=str(self.model_dir),
                repo_id=self.repo_name,
                repo_type="model",
                commit_message="Upload Illuminator-4B model",
                ignore_patterns=[
                    "*.pyc",
                    "__pycache__/",
                    "*.log",
                    ".git/",
                    ".DS_Store"
                ]
            )
            
            print(f"βœ… Model uploaded successfully!")
            print(f"🌐 Model available at: https://huggingface.co/{self.repo_name}")
            return True
            
        except Exception as e:
            print(f"❌ Upload failed: {e}")
            return False
    
    def deploy(self, private=False, test_loading=True):
        """Main deployment function"""
        print("🎯 Starting Hugging Face deployment process")
        print("=" * 60)
        
        # Step 1: Validate files
        if not self.validate_model_files():
            print("❌ Deployment aborted: Missing required files")
            return False
        
        # Step 2: Test model loading (optional)
        if test_loading and not self.test_model_loading():
            print("⚠️ Model loading test failed, but continuing...")
        
        # Step 3: Prepare deployment files
        if not self.prepare_deployment_files():
            print("❌ Deployment aborted: Failed to prepare files")
            return False
        
        # Step 4: Create repository
        repo_url = self.create_repository(private=private)
        if not repo_url:
            print("❌ Deployment aborted: Failed to create repository")
            return False
        
        # Step 5: Upload to hub
        if not self.upload_to_hub():
            print("❌ Deployment aborted: Upload failed")
            return False
        
        print("\nπŸŽ‰ Deployment Complete!")
        print("=" * 60)
        print(f"βœ… Model successfully deployed to: {self.repo_name}")
        print(f"🌐 Access your model at: https://huggingface.co/{self.repo_name}")
        print("\nπŸ“‹ Next steps:")
        print("1. Test your model on the Hugging Face Hub")
        print("2. Share your model with the community")
        print("3. Monitor usage and feedback")
        
        return True

def create_example_usage_script():
    """Create an example usage script"""
    example_script = '''"""
Example usage of Illuminator-4B model
"""

from transformers import AutoTokenizer, AutoModelForCausalLM
import torch

def load_illuminator_model(model_name="your-username/illuminator-4b"):
    """Load the Illuminator model and tokenizer"""
    print(f"Loading {model_name}...")
    
    tokenizer = AutoTokenizer.from_pretrained(model_name)
    model = AutoModelForCausalLM.from_pretrained(model_name)
    
    return model, tokenizer

def generate_response(model, tokenizer, prompt, max_length=256):
    """Generate a response using the model"""
    inputs = tokenizer.encode(prompt, return_tensors="pt")
    
    with torch.no_grad():
        outputs = model.generate(
            inputs,
            max_length=max_length,
            temperature=0.8,
            do_sample=True,
            top_p=0.9,
            pad_token_id=tokenizer.pad_token_id
        )
    
    response = tokenizer.decode(outputs[0], skip_special_tokens=True)
    return response[len(prompt):].strip()

def main():
    # Load model
    model, tokenizer = load_illuminator_model()
    
    # Example prompts
    prompts = [
        "What is artificial intelligence?",
        "Explain quantum computing in simple terms:",
        "Write a Python function to calculate fibonacci numbers:",
        "What are the benefits of renewable energy?"
    ]
    
    print("πŸ€– Illuminator-4B Model Demo")
    print("=" * 40)
    
    for prompt in prompts:
        print(f"\\nπŸ’¬ Prompt: {prompt}")
        response = generate_response(model, tokenizer, prompt)
        print(f"πŸ€– Response: {response}")
        print("-" * 40)

if __name__ == "__main__":
    main()
'''
    
    with open("example_usage.py", "w") as f:
        f.write(example_script)
    
    print("βœ… Created example_usage.py")

def main():
    parser = argparse.ArgumentParser(description="Deploy Illuminator model to Hugging Face Hub")
    parser.add_argument("--repo-name", default="illuminator-4b", help="Repository name on Hugging Face Hub")
    parser.add_argument("--model-dir", default="./huggingface_model", help="Directory containing model files")
    parser.add_argument("--private", action="store_true", help="Create private repository")
    parser.add_argument("--skip-test", action="store_true", help="Skip model loading test")
    
    args = parser.parse_args()
    
    # Create deployer
    deployer = HuggingFaceDeployer(
        model_dir=args.model_dir,
        repo_name=args.repo_name
    )
    
    # Deploy model
    success = deployer.deploy(
        private=args.private,
        test_loading=not args.skip_test
    )
    
    if success:
        # Create example usage script
        create_example_usage_script()
        
        print("\n🎯 Deployment Summary:")
        print(f"Repository: {args.repo_name}")
        print(f"Model Directory: {args.model_dir}")
        print(f"Private: {args.private}")
        print("Example usage script created: example_usage.py")
        
        return 0
    else:
        print("❌ Deployment failed!")
        return 1

if __name__ == "__main__":
    exit(main())