Skip to content

Commit 054aec8

Browse files
committed
Fix: Division-by-Zero Risk and Typo
1 parent 03cfc13 commit 054aec8

File tree

2 files changed

+4
-3
lines changed

2 files changed

+4
-3
lines changed

nanovllm/engine/llm_engine.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,8 @@
1515
class LLMEngine:
1616

1717
def __init__(self, model, **kwargs):
18-
config_fileds = {field.name for field in fields(Config)}
19-
config_kwargs = {k: v for k, v in kwargs.items() if k in config_fileds}
18+
config_fields = {field.name for field in fields(Config)}
19+
config_kwargs = {k: v for k, v in kwargs.items() if k in config_fields}
2020
config = Config(model, **config_kwargs)
2121
self.ps = []
2222
self.events = []

nanovllm/layers/sampler.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,5 +13,6 @@ def forward(self, logits: torch.Tensor, temperatures: torch.Tensor):
1313
logits.div_(temperatures.unsqueeze(dim=1))
1414
probs = torch.softmax(logits, dim=-1, dtype=torch.float)
1515
# logprobs = torch.log_softmax(logits, dim=-1, dtype=torch.float)
16-
sample_tokens = probs.div_(torch.empty_like(probs).exponential_(1)).argmax(dim=-1)
16+
epsilon = 1e-10
17+
sample_tokens = probs.div_(torch.empty_like(probs).exponential_(1) + epsilon).argmax(dim=-1)
1718
return torch.where(temperatures == 0, greedy_tokens, sample_tokens)

0 commit comments

Comments
 (0)