Add initial rt2fs script

This commit is contained in:
fnux 2024-04-18 15:53:32 +02:00
parent 7d70eff100
commit b44e8eb96a
No known key found for this signature in database
GPG Key ID: 4502C902C00A1E12
1 changed files with 88 additions and 0 deletions

88
rt2fs Executable file
View File

@ -0,0 +1,88 @@
#!/usr/bin/env python
import base64
import json
import os
import pickle
import sys
from multiprocessing import Pool
from rt.rest1 import Rt
TEMPLATE = """{
"zammad_url": "",
"zammad_user": "",
"zammad_password": "",
"rt_url": "",
"rt_user": "",
"rt_pass": "",
"rt_start": 1,
"rt_end": 1000,
"usermap": {},
"userdata": {}
}
"""
### helpers ###
def maybe_dump_user(rt, username):
dumpfile = f"users/{username}"
if not os.path.exists(dumpfile):
user = rt.get_user(username)
with open(dumpfile, "wb") as handle:
pickle.dump(user, handle)
### main logic ###
if not os.path.exists("rt2zammad.json"):
print("Missing rt2zammad.json!")
print("Create one based on following template:")
print(TEMPLATE)
sys.exit(1)
with open("rt2zammad.json") as handle:
config = json.load(handle)
rt = Rt(config["rt_url"], config["rt_user"], config["rt_pass"])
if not rt.login():
print("Failed to login to RT!")
sys.exit(2)
os.makedirs("users", exist_ok=True)
os.makedirs("tickets", exist_ok=True)
os.makedirs("attachments", exist_ok=True)
ticket_ids = range(config["rt_start"], config["rt_end"])
def dump(id):
print(f"Dumping RT#{id}")
ticket_dumpfile = f"tickets/{id}"
if not os.path.exists(ticket_dumpfile):
ticket = rt.get_ticket(id)
if ticket is None:
print("Got 'None' while fetching ticket")
os.exit(1)
ticket["original_id"] = str(id)
if ticket["Queue"] != 'spam':
maybe_dump_user(rt, ticket["Creator"])
maybe_dump_user(rt, ticket["Owner"])
if ticket["original_id"] != ticket["numerical_id"]:
# Merged ticket
history = []
else:
history = rt.get_history(id)
for item in history:
for a, title in item["Attachments"]:
attachment = rt.get_attachment(id, a)
os.makedirs(f"attachments/{id}", exist_ok=True)
with open(f"attachments/{id}/{a}", "wb") as handle:
pickle.dump(attachment, handle)
maybe_dump_user(rt, item["Creator"])
data = {"ticket": ticket, "history": history}
with open(ticket_dumpfile, "wb") as handle:
pickle.dump(data, handle)
worker_count = 4
with Pool(worker_count) as p:
p.map(dump, ticket_ids, chunksize=10)