-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathprogressive_training_system_example.py
More file actions
208 lines (154 loc) Β· 6.95 KB
/
progressive_training_system_example.py
File metadata and controls
208 lines (154 loc) Β· 6.95 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
"""
Example usage of the new Progressive Training System.
This example demonstrates how to use the new progressive training system
with dual-LoRA approach and different training modes.
"""
import torch
import torch.nn as nn
from torch.utils.data import DataLoader, TensorDataset
import sys
from pathlib import Path
sys.path.append(str(Path(__file__).parent.parent / "src"))
from training import ProgressiveTrainer, ProgressiveRackBuilder
from config.base import StackWiseConfig
def create_sample_data(batch_size: int = 4, seq_len: int = 128, vocab_size: int = 1000):
"""Create sample data for demonstration."""
# Create sample input data in dictionary format
input_ids = torch.randint(0, vocab_size, (batch_size, seq_len))
labels = torch.randint(0, vocab_size, (batch_size, seq_len))
# Create a simple dataset that returns dictionaries
class DictDataset:
def __init__(self, input_ids, labels):
self.input_ids = input_ids
self.labels = labels
def __len__(self):
return len(self.input_ids)
def __getitem__(self, idx):
return {
'input_ids': self.input_ids[idx],
'labels': self.labels[idx]
}
dataset = DictDataset(input_ids, labels)
dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True)
return dataloader
def demonstrate_basic_progressive_training():
"""Demonstrate basic progressive training."""
print("π Basic Progressive Training")
print("=" * 50)
# Create configuration
config = StackWiseConfig()
# Set model parameters
config.model.vocab_size = 10000
config.model.d_model = 512
config.model.d_ff = 2048
config.model.n_heads = 8
config.model.n_kv_heads = 2
# Set progressive training parameters
config.training.progressive.enabled = True
config.training.progressive.qlora_enabled = True
config.training.progressive.progressive_qlora = True
# Create progressive rack builder
rack_builder = ProgressiveRackBuilder(config=config, building_mode="append")
# Add stacks progressively
stack1 = rack_builder.append_stack(n_blocks=4, precision="full")
stack2 = rack_builder.append_stack(n_blocks=4, precision="half")
print(f"β
Created rack with {rack_builder.current_stacks} stacks")
print(f"β
Stack 1: {stack1.stack_id} with {len(stack1.blocks)} blocks")
print(f"β
Stack 2: {stack2.stack_id} with {len(stack2.blocks)} blocks")
return rack_builder
def demonstrate_dual_lora_approach():
"""Demonstrate dual-LoRA approach."""
print("\nπ Dual-LoRA Approach")
print("=" * 50)
# Create configuration with dual-LoRA
config = StackWiseConfig()
config.training.progressive.enabled = True
config.training.progressive.qlora_enabled = True
config.training.progressive.progressive_qlora = True
config.training.progressive.qlora_rank = 16
config.training.progressive.progressive_qlora_rank = 8
# Create progressive rack builder
rack_builder = ProgressiveRackBuilder(config=config, building_mode="append")
# Add stacks progressively
for i in range(3):
stack = rack_builder.append_stack(n_blocks=2, precision="full")
print(f"β
Added stack {stack.stack_id} with LoRA adapters")
# Show LoRA adapters
stack_lora_count = len([k for k in rack_builder.qlora_adapters.keys() if isinstance(k, int)])
progressive_lora_count = len([k for k in rack_builder.qlora_adapters.keys() if isinstance(k, str) and k.startswith('progressive_qlora_')])
print(f"β
Stack LoRA adapters: {stack_lora_count}")
print(f"β
Progressive QLoRA adapters: {progressive_lora_count}")
return rack_builder
def demonstrate_progressive_training():
"""Demonstrate progressive training with trainer."""
print("\nπ― Progressive Training with Trainer")
print("=" * 50)
# Create configuration
config = StackWiseConfig()
# Set model parameters
config.model.vocab_size = 10000
config.model.d_model = 512
config.model.d_ff = 2048
config.model.n_heads = 8
config.model.n_kv_heads = 2
# Set progressive training parameters
config.training.progressive.enabled = True
config.training.progressive.qlora_enabled = True
config.training.progressive.progressive_qlora = True
# Create progressive rack builder
rack_builder = ProgressiveRackBuilder(config=config, building_mode="append")
# Add stacks progressively
for i in range(2):
stack = rack_builder.append_stack(n_blocks=4, precision="full")
print(f"β
Added stack {stack.stack_id}")
# Create sample data
dataloader = create_sample_data()
# Create progressive trainer
trainer = ProgressiveTrainer(config=config)
# Train progressively
print("π Starting progressive training...")
results = trainer.train_rack(rack_builder, dataloader, target_stacks=2)
print(f"β
Progressive training completed with {len(results)} results")
return results
def demonstrate_precision_modes():
"""Demonstrate different precision modes."""
print("\nβ‘ Precision Modes")
print("=" * 50)
# Create configuration
config = StackWiseConfig()
config.training.progressive.enabled = True
config.training.progressive.qlora_enabled = True
# Create progressive rack builder
rack_builder = ProgressiveRackBuilder(config=config, building_mode="append")
# Add stacks with different precision modes
precision_modes = ["full", "half", "bfloat16", "nvfp4"]
for i, precision in enumerate(precision_modes):
stack = rack_builder.append_stack(n_blocks=2, precision=precision)
print(f"β
Added stack {stack.stack_id} with precision: {precision}")
print(f"β
Created rack with {rack_builder.current_stacks} stacks")
return rack_builder
def main():
"""Main demonstration function."""
print("π Progressive Training System Examples")
print("=" * 60)
try:
# Example 1: Basic progressive training
rack_builder1 = demonstrate_basic_progressive_training()
# Example 2: Dual-LoRA approach
rack_builder2 = demonstrate_dual_lora_approach()
# Example 3: Progressive training with trainer
results = demonstrate_progressive_training()
# Example 4: Precision modes
rack_builder3 = demonstrate_precision_modes()
print("\nπ All Progressive Training System examples completed successfully!")
print("\nπ Summary:")
print(f" - Basic progressive training: β
")
print(f" - Dual-LoRA approach: β
")
print(f" - Progressive training with trainer: β
")
print(f" - Precision modes: β
")
except Exception as e:
print(f"β Error during demonstration: {e}")
import traceback
traceback.print_exc()
if __name__ == "__main__":
main()