fffiloni commited on
Commit
2d340ff
·
verified ·
1 Parent(s): cd0f73f

load model only if cuda available

Browse files
Files changed (1) hide show
  1. app.py +15 -11
app.py CHANGED
@@ -123,18 +123,22 @@ def download_whisper_model():
123
  except Exception as e:
124
  print(f"An error occurred while downloading the model: {str(e)}")
125
 
126
- # Download the Whisper model
127
- download_whisper_model()
128
-
129
- total_vram_in_gb = torch.cuda.get_device_properties(0).total_memory / 1073741824
130
- print(f'\033[32mCUDA版本:{torch.version.cuda}\033[0m')
131
- print(f'\033[32mPytorch版本:{torch.__version__}\033[0m')
132
- print(f'\033[32m显卡型号:{torch.cuda.get_device_name()}\033[0m')
133
- print(f'\033[32m显存大小:{total_vram_in_gb:.2f}GB\033[0m')
134
- print(f'\033[32m精度:float16\033[0m')
135
- dtype = torch.float16
136
  if torch.cuda.is_available():
137
- device = "cuda"
 
 
 
 
 
 
 
 
 
 
 
 
 
138
  else:
139
  print("cuda not available, using cpu")
140
  device = "cpu"
 
123
  except Exception as e:
124
  print(f"An error occurred while downloading the model: {str(e)}")
125
 
126
+
 
 
 
 
 
 
 
 
 
127
  if torch.cuda.is_available():
128
+ device = "cuda"
129
+
130
+ # Download the Whisper model
131
+ download_whisper_model()
132
+
133
+ total_vram_in_gb = torch.cuda.get_device_properties(0).total_memory / 1073741824
134
+ print(f'\033[32mCUDA版本:{torch.version.cuda}\033[0m')
135
+ print(f'\033[32mPytorch版本:{torch.__version__}\033[0m')
136
+ print(f'\033[32m显卡型号:{torch.cuda.get_device_name()}\033[0m')
137
+ print(f'\033[32m显存大小:{total_vram_in_gb:.2f}GB\033[0m')
138
+ print(f'\033[32m精度:float16\033[0m')
139
+
140
+ dtype = torch.float16
141
+
142
  else:
143
  print("cuda not available, using cpu")
144
  device = "cpu"