Update app.py
Browse files
app.py
CHANGED
|
@@ -18,7 +18,7 @@ print("Loading Translation Model... This may take a few minutes.")
|
|
| 18 |
tokenizer_trans = AutoTokenizer.from_pretrained(MODEL_TRANS_ID)
|
| 19 |
model_trans = AutoModelForCausalLM.from_pretrained(
|
| 20 |
MODEL_TRANS_ID,
|
| 21 |
-
|
| 22 |
device_map="auto",
|
| 23 |
trust_remote_code=True
|
| 24 |
)
|
|
@@ -28,7 +28,7 @@ print("Loading Chat Model...")
|
|
| 28 |
tokenizer = AutoTokenizer.from_pretrained(MODEL_ID, trust_remote_code=True)
|
| 29 |
model = AutoModelForCausalLM.from_pretrained(
|
| 30 |
MODEL_ID,
|
| 31 |
-
|
| 32 |
device_map="auto",
|
| 33 |
trust_remote_code=True
|
| 34 |
)
|
|
|
|
| 18 |
tokenizer_trans = AutoTokenizer.from_pretrained(MODEL_TRANS_ID)
|
| 19 |
model_trans = AutoModelForCausalLM.from_pretrained(
|
| 20 |
MODEL_TRANS_ID,
|
| 21 |
+
dtype=torch.bfloat16,
|
| 22 |
device_map="auto",
|
| 23 |
trust_remote_code=True
|
| 24 |
)
|
|
|
|
| 28 |
tokenizer = AutoTokenizer.from_pretrained(MODEL_ID, trust_remote_code=True)
|
| 29 |
model = AutoModelForCausalLM.from_pretrained(
|
| 30 |
MODEL_ID,
|
| 31 |
+
dtype=torch.bfloat16,
|
| 32 |
device_map="auto",
|
| 33 |
trust_remote_code=True
|
| 34 |
)
|