-
Notifications
You must be signed in to change notification settings - Fork 8
/
Copy pathBackup_ALL_Models.py
112 lines (84 loc) · 4.67 KB
/
Backup_ALL_Models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
import subprocess
import re
import os
import shutil
def sanitize_filename_MF(name):
name = name.replace(":latest","")
return re.sub(r'[<>:"/\\|?*.]', '-', name)
def run_command(command):
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, text=True, creationflags=subprocess.CREATE_NO_WINDOW, encoding='utf-8')
output_text, error_text = process.communicate()
return output_text.strip()
def create_ollama_model_file(model_name, output_file, BackUp_Folder, Ollama_Model_Folder):
template_command = f'ollama show --template {model_name}'
template = run_command(template_command)
if not template:
print(f"Error: model '{model_name}' not found or the template is empty. Please check the model name and try again.")
return
parameters_command = f'ollama show --parameters {model_name}'
parameters = run_command(parameters_command)
system_command = f'ollama show --system {model_name}'
system_message = run_command(system_command)
modelfile_command = f'ollama show --modelfile {model_name}'
modelfile_message = run_command(modelfile_command)
model_name = sanitize_filename_MF(model_name)
new_folder_path = os.path.join(BackUp_Folder, model_name)
#****************************************************************
#****************************************************************
#****************************************************************
if os.path.exists(new_folder_path) and os.path.isdir(new_folder_path):
print(f"Model: '{model_name}' already exists in the backup folder, so it will be skipped.")
return
#****************************************************************
#****************************************************************
#****************************************************************
if not os.path.exists(new_folder_path):
os.makedirs(new_folder_path)
print(f"Created folder: {new_folder_path}")
else:
print(f"Folder already exists: {new_folder_path}")
model_content = f"""FROM {model_name}.gguf
TEMPLATE """ + '"""' + f"""{template}""" + '"""' + "\n"
for line in parameters.splitlines():
model_content += f'PARAMETER {line}\n'
model_content += f'system "{system_message}"\n'
print(model_content)
with open(os.path.join(new_folder_path, output_file), 'w') as file:
file.write(model_content)
print(f'Model file created: {output_file}')
modelfile_message = modelfile_message.strip()
model_file_location_match = re.search(fr'FROM\s+({re.escape(Ollama_Model_Folder)}[^\s]*)', modelfile_message, re.MULTILINE)
extracted_model_file_location = model_file_location_match.group(1) if model_file_location_match else "Model_file_location_not_found"
print(f"Model gguf file found: {extracted_model_file_location}")
new_model_file_name = f"{model_name}.gguf"
new_model_file_path = os.path.join(new_folder_path, new_model_file_name)
if os.path.exists(extracted_model_file_location):
shutil.copy2(extracted_model_file_location, new_model_file_path)
print(f"Copied and renamed model file to: {new_model_file_path}")
else:
print(f"Model file not found at: {extracted_model_file_location}")
def process_models(model_names):
for model_name in model_names:
model_name = model_name.strip()
print(model_name)
#output_file = f"Modelfile-{sanitize_filename_MF(model_name)}"
output_file = f"ModelFile"
#****************************************************************
#****************************************************************
#****************************************************************
# Your ollama model folder:
Ollama_Model_Folder = r"D:\llama\.ollama\models"
# Where you want to back up your models:
BackUp_Folder = r"E:\llama_backup"
#****************************************************************
#****************************************************************
#****************************************************************
create_ollama_model_file(model_name, output_file, BackUp_Folder, Ollama_Model_Folder)
def extract_names(data):
lines = data.strip().split('\n')
names = [line.split()[0] for line in lines[1:]]
return ';;;'.join(names)
data = run_command("ollama list")
model_names_string = extract_names(data)
model_names = model_names_string.split(";;;")
process_models(model_names)