Skip to content

Commit

Permalink
Prevent underflow when FreeMemory < overhead (ollama#8014)
Browse files Browse the repository at this point in the history
Co-authored-by: Richard Lyons <frob@cloudstaff.com>
  • Loading branch information
rick-github and frob-cloudstaff authored Dec 10, 2024
1 parent 900f64e commit 6326966
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions llm/memory.go
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ func EstimateGPULayers(gpus []discover.GpuInfo, ggml *GGML, projectors []string,
gzo = gpuZeroOverhead
}
// Only include GPUs that can fit the graph, gpu minimum, the layer buffer and at least more layer
if (gpus[i].FreeMemory - overhead) < gzo+max(graphPartialOffload, graphFullOffload)+gpus[i].MinimumMemory+2*layerSize {
if gpus[i].FreeMemory < overhead+gzo+max(graphPartialOffload, graphFullOffload)+gpus[i].MinimumMemory+2*layerSize {
slog.Debug("gpu has too little memory to allocate any layers",
"id", gpus[i].ID,
"library", gpus[i].Library,
Expand Down Expand Up @@ -228,7 +228,7 @@ func EstimateGPULayers(gpus []discover.GpuInfo, ggml *GGML, projectors []string,
for j := len(gpusWithSpace); j > 0; j-- {
g := gpusWithSpace[i%j]
used := gpuAllocations[g.i] + max(graphPartialOffload, graphFullOffload)
if (g.g.FreeMemory - overhead) > used+layerSize {
if g.g.FreeMemory > overhead+used+layerSize {
gpuAllocations[g.i] += layerSize
layerCounts[g.i]++
layerCount++
Expand All @@ -251,7 +251,7 @@ func EstimateGPULayers(gpus []discover.GpuInfo, ggml *GGML, projectors []string,
for j := len(gpusWithSpace); j > 0; j-- {
g := gpusWithSpace[layerCount%j]
used := gpuAllocations[g.i] + max(graphPartialOffload, graphFullOffload)
if (g.g.FreeMemory - overhead) > used+memoryLayerOutput {
if g.g.FreeMemory > overhead+used+memoryLayerOutput {
gpuAllocations[g.i] += memoryLayerOutput
layerCounts[g.i]++
layerCount++
Expand Down

0 comments on commit 6326966

Please sign in to comment.