3 # Import an SQLite dump of a Bacula catalog into Postgres
4 # Designed for v1.63.3 (as found on Debian sarge)
8 # Copyright (c) 2006 Russell Howe <russell_howe@wreckage.org>
10 # Permission is hereby granted, free of charge, to any person obtaining a
11 # copy of this software and associated documentation files (the "Software"),
12 # to deal in the Software without restriction, including without limitation
13 # the rights to use, copy, modify, merge, publish, distribute, sublicense,
14 # and/or sell copies of the Software, and to permit persons to whom the
15 # Software is furnished to do so, subject to the following conditions:
17 # The above copyright notice and this permission notice shall be included in
18 # all copies or substantial portions of the Software.
20 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
21 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
23 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
25 # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
26 # DEALINGS IN THE SOFTWARE.
29 # Tables, in order of size
30 TABLES=(File Filename Path Job Media Pool CDImages Counters Version Client FileSet JobMedia NextId UnsavedFiles BaseFiles)
31 # Tables, in insert order
32 TABLESINSERT=(Pool CDImages Client Counters FileSet Filename Job Media JobMedia NextId Path File UnsavedFiles Version BaseFiles)
34 LOGFILE="/var/tmp/sqlite2pgsql.$$.log"
37 if [ "x" == "x$1" ]; then
38 echo "Error: importdata() called without an argument. Aborting."
44 if [ ! -r "$SQLFILE" ]; then
45 echo "Error: Cannot read from $SQLFILE. Aborting."
49 echo -n "Loading $SQLFILE into database $DBNAME..."
50 psql -d "$DBNAME" -f "$SQLFILE" || (
51 echo "Failed to load $SQLFILE into database $DBNAME. psql exited with return code $?. Aborting."
57 # Go through each of the table names, splitting the INSERT statements off
59 for table in ${TABLES[@]}; do
61 if [ ! -f "$FILE.other" ]; then
64 PATTERN="^INSERT INTO $table "
65 if [ ! -f "$FILE.data.$table" ]; then
66 echo -n "Separating $table table from database dump..."
68 echo "BEGIN;" > "$FILE.data.$table.tmp"
69 grep "$PATTERN" "$SRC" >> "$FILE.data.$table.tmp"
70 echo "COMMIT;" >> "$FILE.data.$table.tmp"
72 mv "$FILE.data.$table.tmp" "$FILE.data.$table"
73 echo "done. ($FILE.data.$table)"
74 echo -n "Stripping matched lines from the source file to speed up the next round..."
75 grep -v "$PATTERN" "$SRC" > "$FILE.other.tmp"
76 mv "$FILE.other.tmp" "$FILE.other"
79 echo "$FILE.data.$table already exists. Assuming this table has already been split"
80 echo "off from the main dump. Not regenerating."
84 echo "Seperating DDL statements from INSERT statements"
86 grep -v "^INSERT" "$FILE.other" > "$FILE.ddl"
87 echo "DDL statements are now in $FILE.ddl"
89 grep "^INSERT" "$FILE.other" > "$FILE.data.other"
90 echo "Any remaining INSERT statements are now in $FILE.data.other"
92 echo "Fixing up datatypes used in the DDL..."
94 sed -e 's/TINYINT/SMALLINT/g' \
95 -e 's/DATETIME/TIMESTAMP/g' \
96 -e 's/INTEGER UNSIGNED/INTEGER/g' \
97 -e 's/BIGINT UNSIGNED/BIGINT/g' \
98 -e 's/INTEGER AUTOINCREMENT/SERIAL/g' \
99 -e s/\ DEFAULT\ \"\"/\ DEFAULT\ \'\'/g \
100 -e s#\ TIMESTAMP\ DEFAULT\ 0#\ TIMESTAMP\ DEFAULT\ \'1/1/1970\'#g "$FILE.ddl" > "$FILE.ddl.postgres"
102 echo "Fixing Pool table..."
104 sed -e 's/,0,0);$/,NULL,NULL);/' "$FILE.data.Pool" > "$FILE.data.Pool.fixed"
106 echo "Fixing removing entries from Job table which no longer have a Pool to link to"
108 # Remove jobs which refer to nonexistent pools, and fix up invalid start and end times to be 1/1/1970
109 grep -vE '([2589]|1[0-5]),[0-9]+,[0-9]+,[0-9]+\);' "$FILE.data.Job" \
110 |sed -e s@^\\\(INSERT\ INTO\ Job\ VALUES\(\\\(\[^,\]\\\+,\\\)\\\{8\\\}\\\)0,@\\1NULL,@ \
111 -e s@^\\\(INSERT\ INTO\ Job\ VALUES\(\\\(\[^,\]\\\+,\\\)\\\{9\\\}\\\)0,@\\1\NULL,@ \
112 -e s@^\\\(INSERT\ INTO\ Job\ VALUES\(\\\(\[^,\]\\\+,\\\)\\\{17\\\}\\\)0,@\\1\NULL,@ \
113 -e s@^\\\(INSERT\ INTO\ Job\ VALUES\(\\\(\[^,\]\\\+,\\\)\\\{18\\\}\\\)0,@\\1\NULL,@ \
114 -e s@^\\\(INSERT\ INTO\ Job\ VALUES\(\\\(\[^,\]\\\+,\\\)\\\{5\\\}\\\)0,@\\1NULL,@ > "$FILE.data.Job.fixed"
116 # Remove JobMedia entries which refer to nonexistent Jobs
118 echo "Cleaning up the dump of the JobMedia table..."
120 grep -vE 'INSERT INTO JobMedia VALUES\([0-9]+,([12589]|1[0-4]),' "$FILE.data.JobMedia" > "$FILE.data.JobMedia.fixed"
122 # Remove File entries which refer to nonexistent Jobs
124 echo "Cleaning up the dump of the File table..."
126 grep -vE 'INSERT INTO File VALUES\([0-9]+,[0-9]+,([12589]|1[0-4]),' "$FILE.data.File" > "$FILE.data.File.fixed"
128 echo "OK, we should be ready to import data into PostgreSQL now. DDL first..."
129 echo "This will probably fail the first time. You will have to edit $FILE.other"
130 echo "and rearrange the CREATE TABLE statements so that the tables are created"
131 echo "in the correct order."
132 echo "After editing $FILE.other, simply rerun this script and it will carry on"
133 echo "where it left off."
135 importdata "$FILE.ddl.postgres"
137 for table in ${TABLESINSERT[@]} other; do
138 IMPORTFILE="$FILE.data.$table"
139 if [ -f "$FILE.data.$table.fixed" ]; then
140 IMPORTFILE="$FILE.data.$table.fixed"
142 importdata "$IMPORTFILE" 2>&1 |tee -a "$LOGFILE"
145 echo "All done! Check $LOGFILE for errors."