Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -26,7 +26,7 @@ torch.backends.cuda.matmul.allow_fp16_reduced_precision_reduction = False
|
|
26 |
torch.backends.cudnn.allow_tf32 = False
|
27 |
torch.backends.cudnn.deterministic = False
|
28 |
torch.backends.cudnn.benchmark = False
|
29 |
-
torch.backends.cuda.preferred_blas_library="cublas"
|
30 |
# torch.backends.cuda.preferred_linalg_library="cusolver"
|
31 |
torch.set_float32_matmul_precision("highest")
|
32 |
|
@@ -74,16 +74,12 @@ styles = {k["name"]: (k["prompt"], k["negative_prompt"]) for k in style_list}
|
|
74 |
DEFAULT_STYLE_NAME = "Style Zero"
|
75 |
STYLE_NAMES = list(styles.keys())
|
76 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
77 |
-
|
78 |
FTP_USER = os.getenv("FTP_USER")
|
79 |
-
|
80 |
FTP_DIR = os.getenv("FTP_DIR")
|
81 |
-
FTP_HOST = "1ink.us"
|
82 |
-
#FTP_USER = "ford442"
|
83 |
-
FTP_PASS = "GoogleBez12!"
|
84 |
-
#FTP_DIR = "1ink.us/stable_diff/" # Remote directory on FTP server
|
85 |
|
86 |
-
os.putenv('TORCH_LINALG_PREFER_CUSOLVER','1')
|
87 |
|
88 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
89 |
|
|
|
26 |
torch.backends.cudnn.allow_tf32 = False
|
27 |
torch.backends.cudnn.deterministic = False
|
28 |
torch.backends.cudnn.benchmark = False
|
29 |
+
# torch.backends.cuda.preferred_blas_library="cublas"
|
30 |
# torch.backends.cuda.preferred_linalg_library="cusolver"
|
31 |
torch.set_float32_matmul_precision("highest")
|
32 |
|
|
|
74 |
DEFAULT_STYLE_NAME = "Style Zero"
|
75 |
STYLE_NAMES = list(styles.keys())
|
76 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
77 |
+
FTP_HOST = os.getenv("FTP_HOST")
|
78 |
FTP_USER = os.getenv("FTP_USER")
|
79 |
+
FTP_PASS = os.getenv("FTP_PASS")
|
80 |
FTP_DIR = os.getenv("FTP_DIR")
|
|
|
|
|
|
|
|
|
81 |
|
82 |
+
# os.putenv('TORCH_LINALG_PREFER_CUSOLVER','1')
|
83 |
|
84 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
85 |
|