Commit 6b88ddc 1 parent de7c28a commit 6b88ddc Copy full SHA for 6b88ddc
File tree 2 files changed +2
-4
lines changed
2 files changed +2
-4
lines changed Original file line number Diff line number Diff line change 1
1
import lightning as L
2
2
import torch
3
3
import torch .nn .functional as F
4
+ from data import RandomTokenDataset
4
5
from lightning .fabric .strategies import ModelParallelStrategy
5
6
from model import ModelArgs , Transformer
6
7
from parallelism import parallelize
7
8
from torch .distributed .tensor .parallel import loss_parallel
8
9
from torch .utils .data import DataLoader
9
10
10
- from data import RandomTokenDataset
11
-
12
11
13
12
def train ():
14
13
strategy = ModelParallelStrategy (
Original file line number Diff line number Diff line change 1
1
import lightning as L
2
2
import torch
3
3
import torch .nn .functional as F
4
+ from data import RandomTokenDataset
4
5
from lightning .pytorch .strategies import ModelParallelStrategy
5
6
from model import ModelArgs , Transformer
6
7
from parallelism import parallelize
7
8
from torch .distributed .tensor .parallel import loss_parallel
8
9
from torch .utils .data import DataLoader
9
10
10
- from data import RandomTokenDataset
11
-
12
11
13
12
class Llama3 (L .LightningModule ):
14
13
def __init__ (self ):
You can’t perform that action at this time.
0 commit comments