Merge branch 'dev' into sdxl

This commit is contained in:
AUTOMATIC1111 2023-07-12 23:53:26 +03:00
commit 60397a7800
3 changed files with 22 additions and 3 deletions

View File

@ -6,11 +6,11 @@ function keyupEditOrder(event) {
let target = event.originalTarget || event.composedPath()[0]; let target = event.originalTarget || event.composedPath()[0];
if (!target.matches("*:is([id*='_toprow'] [id*='_prompt'], .prompt) textarea")) return; if (!target.matches("*:is([id*='_toprow'] [id*='_prompt'], .prompt) textarea")) return;
if (!event.altKey) return; if (!event.altKey) return;
event.preventDefault();
let isLeft = event.key == "ArrowLeft"; let isLeft = event.key == "ArrowLeft";
let isRight = event.key == "ArrowRight"; let isRight = event.key == "ArrowRight";
if (!isLeft && !isRight) return; if (!isLeft && !isRight) return;
event.preventDefault();
let selectionStart = target.selectionStart; let selectionStart = target.selectionStart;
let selectionEnd = target.selectionEnd; let selectionEnd = target.selectionEnd;

View File

@ -54,8 +54,9 @@ def torch_gc():
with torch.cuda.device(get_cuda_device_string()): with torch.cuda.device(get_cuda_device_string()):
torch.cuda.empty_cache() torch.cuda.empty_cache()
torch.cuda.ipc_collect() torch.cuda.ipc_collect()
elif has_mps() and hasattr(torch.mps, 'empty_cache'):
torch.mps.empty_cache() if has_mps():
mac_specific.torch_mps_gc()
def enable_tf32(): def enable_tf32():

View File

@ -1,8 +1,12 @@
import logging
import torch import torch
import platform import platform
from modules.sd_hijack_utils import CondFunc from modules.sd_hijack_utils import CondFunc
from packaging import version from packaging import version
log = logging.getLogger(__name__)
# before torch version 1.13, has_mps is only available in nightly pytorch and macOS 12.3+, # before torch version 1.13, has_mps is only available in nightly pytorch and macOS 12.3+,
# use check `getattr` and try it for compatibility. # use check `getattr` and try it for compatibility.
@ -19,9 +23,23 @@ def check_for_mps() -> bool:
return False return False
else: else:
return torch.backends.mps.is_available() and torch.backends.mps.is_built() return torch.backends.mps.is_available() and torch.backends.mps.is_built()
has_mps = check_for_mps() has_mps = check_for_mps()
def torch_mps_gc() -> None:
try:
from modules.shared import state
if state.current_latent is not None:
log.debug("`current_latent` is set, skipping MPS garbage collection")
return
from torch.mps import empty_cache
empty_cache()
except Exception:
log.warning("MPS garbage collection failed", exc_info=True)
# MPS workaround for https://github.com/pytorch/pytorch/issues/89784 # MPS workaround for https://github.com/pytorch/pytorch/issues/89784
def cumsum_fix(input, cumsum_func, *args, **kwargs): def cumsum_fix(input, cumsum_func, *args, **kwargs):
if input.device.type == 'mps': if input.device.type == 'mps':