diff --git a/cn_clip/training/main.py b/cn_clip/training/main.py index b8d20e8..9e09c09 100644 --- a/cn_clip/training/main.py +++ b/cn_clip/training/main.py @@ -258,7 +258,16 @@ def main(): } assert args.teacher_model_name in teacher_model_dict, "Error: Valid teacher model name has not been built." - teacher_model = Model.from_pretrained(args.teacher_model_name) + try: + teacher_model = Model.from_pretrained(args.teacher_model_name) + except Exception as e: + if "Unexpected key(s) in state_dict" in str(e): + error_message = ( + "An error occurred while loading the model: {}\n" + "Maybe you should update modelscope. ".format(e) + ) + raise RuntimeError(error_message) + for k, v in teacher_model.state_dict().items(): v.requires_grad = False diff --git a/distillation.md b/distillation.md index a182a4a..e2d2387 100644 --- a/distillation.md +++ b/distillation.md @@ -59,5 +59,5 @@
-## Future Action -将会在阿里云官网上线相关的解决方案的Jupyter Notebook,提供更加清晰的实例教学,敬请期待。 +## 快速体验 +相关解决方案已经上线阿里云[PAI-DSW Gallery](https://gallery.pai-ml.com/#/preview/deepLearning/cv/cn_clip_distillation)。在PAI-DSW Gallery提供对应的Notebook,支持用户利用自有数据构建专属搜索模型。 diff --git a/distillation_En.md b/distillation_En.md index ddaac11..0de552e 100644 --- a/distillation_En.md +++ b/distillation_En.md @@ -9,8 +9,8 @@ Here we provide an example of knowledge distillation for Chinese-CLIP fine-tunin + Nvidia GPUs **with Turning, Ampere, Ada or Hopper architecture** (such as H100, A100, RTX 3090, T4, and RTX 2080). Please refer to [this document](https://en.wikipedia.org/wiki/CUDA#GPUs_supported) for the corresponding GPUs of each Nvidia architecture. + CUDA 11.4 and above. + PyTorch 1.12 and above. -+ **ModelScope**:Install FlashAttention by executing `pip install modelscope`. + Other dependencies as required in [requirements.txt](requirements.txt). ++ **ModelScope**:Install FlashAttention by executing `pip install modelscope`. ## Use it in Chinese-CLIP! It is not complicated to apply knowledge distillation to the image side in Chinese-CLIP finetune. Just add the `--distllation` configuration item to the sh script of finetune. @@ -56,5 +56,5 @@ Advantages of our approach: