Skip to content

Commit da27fbd

Browse files
authored
Fix local load for Medusa (#1420)
# What does this PR do? <!-- Congratulations! You've made it this far! You're not quite done yet though. Once merged, your PR is going to appear in the release notes with the title you set, so make sure it's a great title that fully reflects the extent of your awesome contribution. Then, please replace this with a description of the change and which issue is fixed (if applicable). Please also include relevant motivation and context. List any dependencies (if any) that are required for this change. Once you're done, someone will review your PR shortly (see the section "Who can review?" below to tag some potential reviewers). They may suggest changes to make the code even better. If no one reviewed your PR after a week has passed, don't hesitate to post a new comment @-mentioning the same persons---sometimes notifications get lost. --> <!-- Remove if not applicable --> Close #1418 Close #1415 ## Before submitting - [ ] This PR fixes a typo or improves the docs (you can dismiss the other checks if that's the case). - [ ] Did you read the [contributor guideline](https://github.com/huggingface/transformers/blob/main/CONTRIBUTING.md#start-contributing-pull-requests), Pull Request section? - [ ] Was this discussed/approved via a Github issue or the [forum](https://discuss.huggingface.co/)? Please add a link to it if that's the case. - [ ] Did you make sure to update the documentation with your changes? Here are the [documentation guidelines](https://github.com/huggingface/transformers/tree/main/docs), and [here are tips on formatting docstrings](https://github.com/huggingface/transformers/tree/main/docs#writing-source-documentation). - [ ] Did you write any new necessary tests? ## Who can review? Anyone in the community is free to review the PR once the tests have passed. Feel free to tag members/contributors who may be interested in your PR. <!-- Your PR will be replied to more quickly if you can figure out the right person to tag with @ @OlivierDehaene OR @Narsil -->
1 parent fbeb1c4 commit da27fbd

File tree

2 files changed

+47
-7
lines changed

2 files changed

+47
-7
lines changed

server/text_generation_server/cli.py

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -198,6 +198,35 @@ def download_weights(
198198
if not extension == ".safetensors" or not auto_convert:
199199
raise e
200200

201+
elif (Path(model_id) / "medusa_lm_head.pt").exists():
202+
# Try to load as a local Medusa model
203+
try:
204+
import json
205+
206+
medusa_head = Path(model_id) / "medusa_lm_head.pt"
207+
if auto_convert:
208+
medusa_sf = Path(model_id) / "medusa_lm_head.safetensors"
209+
if not medusa_sf.exists():
210+
utils.convert_files([Path(medusa_head)], [medusa_sf], [])
211+
medusa_config = Path(model_id) / "config.json"
212+
with open(medusa_config, "r") as f:
213+
config = json.load(f)
214+
215+
model_id = config["base_model_name_or_path"]
216+
revision = "main"
217+
try:
218+
utils.weight_files(model_id, revision, extension)
219+
logger.info(
220+
f"Files for parent {model_id} are already present on the host. "
221+
"Skipping download."
222+
)
223+
return
224+
# Local files not found
225+
except (utils.LocalEntryNotFoundError, utils.EntryNotFoundError):
226+
pass
227+
except (utils.LocalEntryNotFoundError, utils.EntryNotFoundError):
228+
pass
229+
201230
elif (Path(model_id) / "adapter_config.json").exists():
202231
# Try to load as a local PEFT model
203232
try:

server/text_generation_server/models/flash_llama.py

Lines changed: 18 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -71,15 +71,26 @@ def __init__(
7171
from text_generation_server.utils.medusa import MedusaModel
7272
from huggingface_hub import hf_hub_download
7373
import json
74-
75-
medusa_config = hf_hub_download(
76-
use_medusa, revision=revision, filename="config.json"
77-
)
74+
import os
75+
from pathlib import Path
76+
77+
is_local_model = (Path(use_medusa).exists() and Path(use_medusa).is_dir()) or os.getenv(
78+
"WEIGHTS_CACHE_OVERRIDE", None
79+
) is not None
80+
81+
if not is_local_model:
82+
medusa_config = hf_hub_download(
83+
use_medusa, revision=revision, filename="config.json"
84+
)
85+
medusa_head = hf_hub_download(
86+
use_medusa, revision=revision, filename="medusa_lm_head.pt"
87+
)
88+
else:
89+
medusa_config = str(Path(use_medusa) / "config.json")
90+
medusa_head = str(Path(use_medusa) / "medusa_lm_head.pt")
91+
7892
with open(medusa_config, "r") as f:
7993
config = json.load(f)
80-
medusa_head = hf_hub_download(
81-
use_medusa, revision=revision, filename="medusa_lm_head.pt"
82-
)
8394
medusa_sf = medusa_head[: -len(".pt")] + ".safetensors"
8495
weights = Weights(
8596
[medusa_sf], device, dtype, process_group=self.process_group

0 commit comments

Comments
 (0)