Skip to content

Commit

Permalink
Switch from cached_download to hf_hub_download in remaining occur…
Browse files Browse the repository at this point in the history
…rences (huggingface#31284)

Switch from hf_hub_url to hf_hub_download in remaining occurences
  • Loading branch information
Wauplin authored Jun 6, 2024
1 parent 5fabd1e commit 9ef93fc
Show file tree
Hide file tree
Showing 12 changed files with 29 additions and 24 deletions.
5 changes: 3 additions & 2 deletions docs/source/en/tasks/semantic_segmentation.md
Original file line number Diff line number Diff line change
Expand Up @@ -245,11 +245,12 @@ You'll also want to create a dictionary that maps a label id to a label class wh

```py
>>> import json
>>> from huggingface_hub import cached_download, hf_hub_url
>>> from pathlib import Path
>>> from huggingface_hub import hf_hub_download

>>> repo_id = "huggingface/label-files"
>>> filename = "ade20k-id2label.json"
>>> id2label = json.load(open(cached_download(hf_hub_url(repo_id, filename, repo_type="dataset")), "r"))
>>> id2label = json.loads(Path(hf_hub_download(repo_id, filename, repo_type="dataset")).read_text())
>>> id2label = {int(k): v for k, v in id2label.items()}
>>> label2id = {v: k for k, v in id2label.items()}
>>> num_labels = len(id2label)
Expand Down
5 changes: 3 additions & 2 deletions docs/source/ja/tasks/semantic_segmentation.md
Original file line number Diff line number Diff line change
Expand Up @@ -83,11 +83,12 @@ pip install -q datasets transformers evaluate

```py
>>> import json
>>> from huggingface_hub import cached_download, hf_hub_url
>>> from pathlib import Path
>>> from huggingface_hub import hf_hub_download

>>> repo_id = "huggingface/label-files"
>>> filename = "ade20k-id2label.json"
>>> id2label = json.load(open(cached_download(hf_hub_url(repo_id, filename, repo_type="dataset")), "r"))
>>> id2label = json.loads(Path(hf_hub_download(repo_id, filename, repo_type="dataset")).read_text())
>>> id2label = {int(k): v for k, v in id2label.items()}
>>> label2id = {v: k for k, v in id2label.items()}
>>> num_labels = len(id2label)
Expand Down
5 changes: 3 additions & 2 deletions docs/source/ja/tasks/sequence_classification.md
Original file line number Diff line number Diff line change
Expand Up @@ -83,11 +83,12 @@ pip install -q datasets transformers evaluate

```py
>>> import json
>>> from huggingface_hub import cached_download, hf_hub_url
>>> from pathlib import Path
>>> from huggingface_hub import hf_hub_download

>>> repo_id = "huggingface/label-files"
>>> filename = "ade20k-id2label.json"
>>> id2label = json.load(open(cached_download(hf_hub_url(repo_id, filename, repo_type="dataset")), "r"))
>>> id2label = json.loads(Path(hf_hub_download(repo_id, filename, repo_type="dataset")).read_text())
>>> id2label = {int(k): v for k, v in id2label.items()}
>>> label2id = {v: k for k, v in id2label.items()}
>>> num_labels = len(id2label)
Expand Down
5 changes: 3 additions & 2 deletions docs/source/ko/tasks/semantic_segmentation.md
Original file line number Diff line number Diff line change
Expand Up @@ -82,11 +82,12 @@ pip install -q datasets transformers evaluate

```py
>>> import json
>>> from huggingface_hub import cached_download, hf_hub_url
>>> from pathlib import Path
>>> from huggingface_hub import hf_hub_download

>>> repo_id = "huggingface/label-files"
>>> filename = "ade20k-id2label.json"
>>> id2label = json.load(open(cached_download(hf_hub_url(repo_id, filename, repo_type="dataset")), "r"))
>>> id2label = json.loads(Path(hf_hub_download(repo_id, filename, repo_type="dataset")).read_text())
>>> id2label = {int(k): v for k, v in id2label.items()}
>>> label2id = {v: k for k, v in id2label.items()}
>>> num_labels = len(id2label)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,10 @@
import argparse
import json
from collections import OrderedDict
from pathlib import Path

import torch
from huggingface_hub import cached_download, hf_hub_url
from huggingface_hub import hf_hub_download

from transformers import AutoImageProcessor, CvtConfig, CvtForImageClassification

Expand Down Expand Up @@ -283,7 +284,7 @@ def convert_cvt_checkpoint(cvt_model, image_size, cvt_file_name, pytorch_dump_fo

repo_id = "huggingface/label-files"
num_labels = num_labels
id2label = json.load(open(cached_download(hf_hub_url(repo_id, img_labels_file, repo_type="dataset")), "r"))
id2label = json.loads(Path(hf_hub_download(repo_id, img_labels_file, repo_type="dataset")).read_text())
id2label = {int(k): v for k, v in id2label.items()}

id2label = id2label
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

import requests
import torch
from huggingface_hub import cached_download, hf_hub_url
from huggingface_hub import hf_hub_download
from PIL import Image

from transformers import DeformableDetrConfig, DeformableDetrForObjectDetection, DeformableDetrImageProcessor
Expand Down Expand Up @@ -109,7 +109,7 @@ def convert_deformable_detr_checkpoint(
config.num_labels = 91
repo_id = "huggingface/label-files"
filename = "coco-detection-id2label.json"
id2label = json.load(open(cached_download(hf_hub_url(repo_id, filename, repo_type="dataset")), "r"))
id2label = json.loads(Path(hf_hub_download(repo_id, filename, repo_type="dataset")).read_text())
id2label = {int(k): v for k, v in id2label.items()}
config.id2label = id2label
config.label2id = {v: k for k, v in id2label.items()}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

import requests
import torch
from huggingface_hub import cached_download, hf_hub_download, hf_hub_url
from huggingface_hub import hf_hub_download
from PIL import Image

from transformers import DetaConfig, DetaForObjectDetection, DetaImageProcessor
Expand All @@ -48,7 +48,7 @@ def get_deta_config():
config.num_labels = 91
repo_id = "huggingface/label-files"
filename = "coco-detection-id2label.json"
id2label = json.load(open(cached_download(hf_hub_url(repo_id, filename, repo_type="dataset")), "r"))
id2label = json.loads(Path(hf_hub_download(repo_id, filename, repo_type="dataset")).read_text())
id2label = {int(k): v for k, v in id2label.items()}
config.id2label = id2label
config.label2id = {v: k for k, v in id2label.items()}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

import requests
import torch
from huggingface_hub import cached_download, hf_hub_download, hf_hub_url
from huggingface_hub import hf_hub_download
from PIL import Image

from transformers import DetaConfig, DetaForObjectDetection, DetaImageProcessor, SwinConfig
Expand Down Expand Up @@ -63,7 +63,7 @@ def get_deta_config(model_name):
filename = "coco-detection-id2label.json"

config.num_labels = num_labels
id2label = json.load(open(cached_download(hf_hub_url(repo_id, filename, repo_type="dataset")), "r"))
id2label = json.loads(Path(hf_hub_download(repo_id, filename, repo_type="dataset")).read_text())
id2label = {int(k): v for k, v in id2label.items()}
config.id2label = id2label
config.label2id = {v: k for k, v in id2label.items()}
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/models/dpt/convert_dpt_hybrid_to_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

import requests
import torch
from huggingface_hub import cached_download, hf_hub_url
from huggingface_hub import hf_hub_download
from PIL import Image

from transformers import DPTConfig, DPTForDepthEstimation, DPTForSemanticSegmentation, DPTImageProcessor
Expand Down Expand Up @@ -61,7 +61,7 @@ def get_dpt_config(checkpoint_url):
config.patch_size = 16
repo_id = "huggingface/label-files"
filename = "ade20k-id2label.json"
id2label = json.load(open(cached_download(hf_hub_url(repo_id, filename, repo_type="dataset")), "r"))
id2label = json.loads(Path(hf_hub_download(repo_id, filename, repo_type="dataset")).read_text())
id2label = {int(k): v for k, v in id2label.items()}
config.id2label = id2label
config.label2id = {v: k for k, v in id2label.items()}
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/models/dpt/convert_dpt_to_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

import requests
import torch
from huggingface_hub import cached_download, hf_hub_url
from huggingface_hub import hf_hub_download
from PIL import Image

from transformers import DPTConfig, DPTForDepthEstimation, DPTForSemanticSegmentation, DPTImageProcessor
Expand Down Expand Up @@ -49,7 +49,7 @@ def get_dpt_config(checkpoint_url):
config.num_labels = 150
repo_id = "huggingface/label-files"
filename = "ade20k-id2label.json"
id2label = json.load(open(cached_download(hf_hub_url(repo_id, filename, repo_type="dataset")), "r"))
id2label = json.loads(Path(hf_hub_download(repo_id, filename, repo_type="dataset")).read_text())
id2label = {int(k): v for k, v in id2label.items()}
config.id2label = id2label
config.label2id = {v: k for k, v in id2label.items()}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
import torch
import torch.nn as nn
from classy_vision.models.regnet import RegNet, RegNetParams
from huggingface_hub import cached_download, hf_hub_url
from huggingface_hub import hf_hub_download
from torch import Tensor
from vissl.models.model_helpers import get_trunk_forward_outputs

Expand Down Expand Up @@ -165,7 +165,7 @@ def convert_weights_and_push(save_directory: Path, model_name: str = None, push_

repo_id = "huggingface/label-files"
num_labels = num_labels
id2label = json.load(open(cached_download(hf_hub_url(repo_id, filename, repo_type="dataset")), "r"))
id2label = json.loads(Path(hf_hub_download(repo_id, filename, repo_type="dataset")).read_text())
id2label = {int(k): v for k, v in id2label.items()}

id2label = id2label
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/models/regnet/convert_regnet_to_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import torch
import torch.nn as nn
from classy_vision.models.regnet import RegNet, RegNetParams, RegNetY32gf, RegNetY64gf, RegNetY128gf
from huggingface_hub import cached_download, hf_hub_url
from huggingface_hub import hf_hub_download
from torch import Tensor
from vissl.models.model_helpers import get_trunk_forward_outputs

Expand Down Expand Up @@ -225,7 +225,7 @@ def convert_weights_and_push(save_directory: Path, model_name: str = None, push_

repo_id = "huggingface/label-files"
num_labels = num_labels
id2label = json.load(open(cached_download(hf_hub_url(repo_id, filename, repo_type="dataset")), "r"))
id2label = json.loads(Path(hf_hub_download(repo_id, filename, repo_type="dataset")).read_text())
id2label = {int(k): v for k, v in id2label.items()}

id2label = id2label
Expand Down

0 comments on commit 9ef93fc

Please sign in to comment.