@@ -6,11 +6,12 @@ python convert-to-torch.py models/opt-1.3b
The output will be written to torch-dumps/name-of-the-model.pt
'''
-
-from transformers import AutoModelForCausalLM
-import torch
-from sys import argv
+
from pathlib import Path
+from sys import argv
+import torch
+from transformers import AutoModelForCausalLM
path = Path(argv[1])
model_name = path.name
@@ -4,9 +4,9 @@ This is a library for formatting GPT-4chan and chat outputs as nice HTML.
+import copy
import re
-import copy
def generate_basic_html(s):
s = '\n'.join([f'<p style="margin-bottom: 20px">{line}</p>' for line in s.split('\n')])