Remove cache_dir
Browse files
app.py
CHANGED
|
@@ -11,9 +11,7 @@ from typing_extensions import TypedDict
|
|
| 11 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
| 12 |
|
| 13 |
model_id = "Qwen/Qwen3-0.6B"
|
| 14 |
-
model = AutoModelForCausalLM.from_pretrained(
|
| 15 |
-
model_id, cache_dir="/big_storage/llms/hf_models/"
|
| 16 |
-
).to(device)
|
| 17 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
| 18 |
streamer = TextIteratorStreamer(tokenizer, skip_prompt=True)
|
| 19 |
|
|
|
|
| 11 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
| 12 |
|
| 13 |
model_id = "Qwen/Qwen3-0.6B"
|
| 14 |
+
model = AutoModelForCausalLM.from_pretrained(model_id).to(device)
|
|
|
|
|
|
|
| 15 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
| 16 |
streamer = TextIteratorStreamer(tokenizer, skip_prompt=True)
|
| 17 |
|