import and export entire system should fully work now but dont rely on anything here. keep your own personal backups elsewhere like a spreadsheet or something

This commit is contained in:
cube
2026-03-30 15:11:20 +01:00
parent b907c1a546
commit 665c8e905d

View File

@@ -305,6 +305,21 @@ def import_member(member):
db.execute("INSERT INTO blog (member_id, created, title, content, public) VALUES (?, ?, ?, ?, ?)",(mid, date_created, title, content, privacy))
db.commit()
icons = member["icons"]
for icon in icons:
db.execute("INSERT INTO icons (member_id, icon_location) VALUES (?, ?)",(mid, icon))
db.commit()
blinkies = member["blinkies"]
for blinkie in blinkies:
db.execute("INSERT INTO blinkies (member_id, blinkie_location) VALUES (?, ?)",(mid, blinkie))
db.commit()
stamps = member["stamps"]
for stamp in stamps:
db.execute("INSERT INTO stamps (member_id, stamp_location) VALUES (?, ?)",(mid, stamp))
db.commit()
@bp.route("/admin", methods=("GET", "POST"))
@@ -330,7 +345,23 @@ def admin():
elif "zip" in request.files:
file = request.files["zip"]
with zipfile.ZipFile(file, "r") as zipf:
zipf.printdir()
for f in zipf.namelist():
dirs = f.split("/")
content_type = dirs[2]
filename = dirs[3]
if content_type == "tmp":
system_content = zipf.read(f)
system_json = json.loads(system_content.decode())
groups = system_json["groups"]
import_groups(groups)
for m in system_json["members"]:
import_member(m)
else:
zipf.extract(f)
return "upload zip"
@@ -392,6 +423,9 @@ def generate_json(mid):
for icon in icons_r:
icons.append(icon[0])
main_icon_id = member[6]
main_icon = db.execute("SELECT icon_location FROM icons WHERE id=(?)",(main_icon_id,)).fetchone()
blinkies = []
for blinkie in blinkies_r:
blinkies.append(blinkie[0])
@@ -415,7 +449,8 @@ def generate_json(mid):
"icons":icons,
"blinkies":blinkies,
"stamps":stamps,
"homepage-pin":homepage
"homepage-pin":homepage,
"main-icon":main_icon
}
return data