浏览代码

Reorder imports

oobabooga 2 年之前
父节点
当前提交
0bec15ebcd
共有 1 个文件被更改,包括 3 次插入2 次删除
  1. 3 2
      modules/GPTQ_loader.py

+ 3 - 2
modules/GPTQ_loader.py

@@ -5,14 +5,15 @@ from pathlib import Path
 import accelerate
 import torch
 import transformers
-from transformers import AutoConfig, AutoModelForCausalLM 
+from transformers import AutoConfig, AutoModelForCausalLM
 
 import modules.shared as shared
 
 sys.path.insert(0, str(Path("repositories/GPTQ-for-LLaMa")))
 import llama_inference_offload
-from quant import make_quant
 from modelutils import find_layers
+from quant import make_quant
+
 
 def _load_quant(model, checkpoint, wbits, groupsize=-1, faster_kernel=False, exclude_layers=['lm_head'], kernel_switch_threshold=128):
     config = AutoConfig.from_pretrained(model)