Direct manipulation of the database with backup of user data. [help]

I like to play with things and my current fascination is can you copy all the user data from [db1
]
take a copy of a running plex docker with hourly updates on. [db2]

now delete the user data from db2 and insert the user data from [db1] making a complete up to date copy of the plex docker with up to date metadata and such.

now all dockers are sharing the same file paths for metadata so you only need 1 copy of that.
If you force all scans to be disabled on the dockers ( pick a method ) you now only have 1 docker scanning once an hour then set your import script to run shortly after and we’re now keeping the load so low you’ll wonder what’s up.

I have this running great if all I want to do is copy the source overplace of the target db but we want to preserve watch history and such for all users on the target before replacement so we can then import it into the new database. So I’m submitting my best pass at it in the hopes that someone can confirm/add/remove tables to the list and or suggest a better method.

The goal is to eliminate scans and save space thereby allowing me to host my whole family on 1 NAS with each having their own private server with the same library shared between all. Like what the kids call an appbox but smaller scale with a need to preserve ram/cpu.

import subprocess

source_db = '/home/quickfix/com.plexapp.plugins.library.db'
target_db_paths = '/home/sql.txt'
output_file = '/home/sqlrun.sql'

sqlite3_path = '/usr/lib/plexmediaserver/Plex SQLite'
debug_mode = True  # Set to True to enable debug mode

# Read the target database paths from the file
with open(target_db_paths, 'r') as file:
    target_dbs = file.readlines()
target_dbs = [path.strip() for path in target_dbs]

# Connect to the source database
source_conn = sqlite3.connect(source_db)

# Compare tables between source and target databases
for target_db in target_dbs:
    target_conn = sqlite3.connect(target_db)

    # Get the list of tables from the source and target databases
    source_cursor = source_conn.cursor()
    source_cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
    source_tables = [row[0] for row in source_cursor.fetchall()]

    target_cursor = target_conn.cursor()
    target_cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
    target_tables = [row[0] for row in target_cursor.fetchall()]

    # Find tables with different entries
    different_tables = []
    for table in source_tables:
        source_cursor.execute(f"SELECT * FROM {table}")
        source_rows = source_cursor.fetchall()

        target_cursor.execute(f"SELECT * FROM {table}")
        target_rows = target_cursor.fetchall()

        if source_rows != target_rows:
            different_tables.append(table)

    # Delete tables with different entries from the target database
    for table in different_tables:
        target_conn.execute(f"DROP TABLE IF EXISTS {table}")

    # Commit the transaction
    target_conn.commit()

    # Copy tables from source to target database
    for table in different_tables:
        source_cursor.execute(f"SELECT sql FROM sqlite_master WHERE type='table>
        create_table_sql = source_cursor.fetchone()[0]
        target_conn.execute(create_table_sql)

        source_cursor.execute(f"SELECT * FROM {table}")
        rows = source_cursor.fetchall()

        for row in rows:
            target_conn.execute(f"INSERT INTO {table} VALUES {row}")

    # Commit the transaction
    target_conn.commit()
	
    # Export difference insert commands to output file
    with open(output_file, 'a') as file:
        file.write(f"-- Difference insert commands for {target_db}\n")
        for table in different_tables:
            source_cursor.execute(f"SELECT * FROM {table}")
            rows = source_cursor.fetchall()

            for row in rows:
                insert_command = f"INSERT INTO {table} VALUES {row};\n"
                file.write(insert_command)

    # Close the target database connection
    target_conn.close()

# Close the source database connection
source_conn.close()