#include "bacula.h"
#include "cats.h"
-static const int dbglevel = 500;
+static const int dbglevel = 100;
#if HAVE_SQLITE3 || HAVE_MYSQL || HAVE_SQLITE || HAVE_POSTGRESQL || HAVE_DBI
*/
bool db_create_file_attributes_record(JCR *jcr, B_DB *mdb, ATTR_DBR *ar)
{
+ ASSERT(ar->FileType != FT_BASE);
+
Dmsg1(dbglevel, "Fname=%s\n", ar->fname);
Dmsg0(dbglevel, "put_file_into_catalog\n");
}
B_DB *bdb = jcr->db_batch;
- /*
- * Make sure we have an acceptable attributes record.
- */
- if (!(ar->Stream == STREAM_UNIX_ATTRIBUTES ||
- ar->Stream == STREAM_UNIX_ATTRIBUTES_EX)) {
- Mmsg1(&mdb->errmsg, _("Attempt to put non-attributes into catalog. Stream=%d\n"),
- ar->Stream);
- Jmsg(jcr, M_FATAL, 0, "%s", mdb->errmsg);
- return false;
- }
-
split_path_and_file(jcr, bdb, ar->fname);
db_lock(mdb);
Dmsg1(dbglevel, "Fname=%s\n", ar->fname);
Dmsg0(dbglevel, "put_file_into_catalog\n");
- /*
- * Make sure we have an acceptable attributes record.
- */
- if (!(ar->Stream == STREAM_UNIX_ATTRIBUTES ||
- ar->Stream == STREAM_UNIX_ATTRIBUTES_EX)) {
- Mmsg1(&mdb->errmsg, _("Attempt to put non-attributes into catalog. Stream=%d\n"),
- ar->Stream);
- Jmsg(jcr, M_ERROR, 0, "%s", mdb->errmsg);
- goto bail_out;
- }
-
split_path_and_file(jcr, mdb, ar->fname);
#endif /* ! HAVE_BATCH_FILE_INSERT */
+
+/* List of SQL commands to create temp table and indicies */
+const char *create_temp_basefile[4] = {
+ /* MySQL */
+ "CREATE TEMPORARY TABLE basefile%lld ("
+// "CREATE TABLE basefile%lld ("
+ "Path BLOB NOT NULL,"
+ "Name BLOB NOT NULL)",
+
+ /* Postgresql */
+ "CREATE TEMPORARY TABLE basefile%lld ("
+// "CREATE TABLE basefile%lld ("
+ "Path TEXT,"
+ "Name TEXT)",
+
+ /* SQLite */
+ "CREATE TEMPORARY TABLE basefile%lld ("
+ "Path TEXT,"
+ "Name TEXT)",
+
+ /* SQLite3 */
+ "CREATE TEMPORARY TABLE basefile%lld ("
+ "Path TEXT,"
+ "Name TEXT)"
+};
+
+/*
+ * Create file attributes record, or base file attributes record
+ */
+bool db_create_attributes_record(JCR *jcr, B_DB *mdb, ATTR_DBR *ar)
+{
+ bool ret;
+
+ /*
+ * Make sure we have an acceptable attributes record.
+ */
+ if (!(ar->Stream == STREAM_UNIX_ATTRIBUTES ||
+ ar->Stream == STREAM_UNIX_ATTRIBUTES_EX)) {
+ Jmsg(jcr, M_FATAL, 0, _("Attempt to put non-attributes into catalog. Stream=%d\n"));
+ return false;
+ }
+
+ if (ar->FileType != FT_BASE) {
+ ret = db_create_file_attributes_record(jcr, mdb, ar);
+
+ } else if (jcr->HasBase) {
+ ret = db_create_base_file_attributes_record(jcr, mdb, ar);
+
+ } else {
+ Jmsg0(jcr, M_FATAL, 0, _("Can't Copy/Migrate job using BaseJob"));
+ ret = true; /* in copy/migration what do we do ? */
+ }
+
+ return ret;
+}
+
+/*
+ * Create Base File record in B_DB
+ *
+ */
+bool db_create_base_file_attributes_record(JCR *jcr, B_DB *mdb, ATTR_DBR *ar)
+{
+ bool ret;
+ Dmsg1(dbglevel, "create_base_file Fname=%s\n", ar->fname);
+ Dmsg0(dbglevel, "put_base_file_into_catalog\n");
+
+ db_lock(mdb);
+ split_path_and_file(jcr, mdb, ar->fname);
+
+ mdb->esc_name = check_pool_memory_size(mdb->esc_name, mdb->fnl*2+1);
+ db_escape_string(jcr, mdb, mdb->esc_name, mdb->fname, mdb->fnl);
+
+ mdb->esc_path = check_pool_memory_size(mdb->esc_path, mdb->pnl*2+1);
+ db_escape_string(jcr, mdb, mdb->esc_path, mdb->path, mdb->pnl);
+
+ Mmsg(mdb->cmd, "INSERT INTO basefile%lld (Path, Name) VALUES ('%s','%s')",
+ (uint64_t)jcr->JobId, mdb->esc_path, mdb->esc_name);
+
+ ret = INSERT_DB(jcr, mdb, mdb->cmd);
+ db_unlock(mdb);
+
+ return ret;
+}
+
+/*
+ * Cleanup the base file temporary tables
+ */
+static void db_cleanup_base_file(JCR *jcr, B_DB *mdb)
+{
+ POOL_MEM buf(PM_MESSAGE);
+ Mmsg(buf, "DROP TABLE new_basefile%lld", (uint64_t) jcr->JobId);
+ db_sql_query(mdb, buf.c_str(), NULL, NULL);
+
+ Mmsg(buf, "DROP TABLE basefile%lld", (uint64_t) jcr->JobId);
+ db_sql_query(mdb, buf.c_str(), NULL, NULL);
+}
+
+/*
+ * Put all base file seen in the backup to the BaseFile table
+ * and cleanup temporary tables
+ */
+bool db_commit_base_file_attributes_record(JCR *jcr, B_DB *mdb)
+{
+ bool ret;
+ char ed1[50];
+
+ db_lock(mdb);
+
+ Mmsg(mdb->cmd,
+ "INSERT INTO BaseFiles (BaseJobId, JobId, FileId, FileIndex) "
+ "SELECT B.JobId AS BaseJobId, %s AS JobId, "
+ "B.FileId, B.FileIndex "
+ "FROM basefile%s AS A, new_basefile%s AS B "
+ "WHERE A.Path = B.Path "
+ "AND A.Name = B.Name "
+ "ORDER BY B.FileId",
+ edit_uint64(jcr->JobId, ed1), ed1, ed1);
+ ret = QUERY_DB(jcr, mdb, mdb->cmd);
+ jcr->nb_base_files_used = sql_affected_rows(mdb);
+ db_cleanup_base_file(jcr, mdb);
+
+ db_unlock(mdb);
+ return ret;
+}
+
+/*
+ * Find the last "accurate" backup state with Base jobs
+ * 1) Get all files with jobid in list (F subquery)
+ * 2) Take only the last version of each file (Temp subquery) => accurate list is ok
+ * 3) Put the result in a temporary table for the end of job
+ *
+ */
+bool db_create_base_file_list(JCR *jcr, B_DB *mdb, char *jobids)
+{
+ POOL_MEM buf;
+ bool ret=false;
+
+ db_lock(mdb);
+
+ if (!*jobids) {
+ Mmsg(mdb->errmsg, _("ERR=JobIds are empty\n"));
+ goto bail_out;
+ }
+
+ Mmsg(mdb->cmd, create_temp_basefile[db_type], (uint64_t) jcr->JobId);
+ if (!db_sql_query(mdb, mdb->cmd, NULL, NULL)) {
+ goto bail_out;
+ }
+ Mmsg(buf, select_recent_version[db_type], jobids, jobids);
+ Mmsg(mdb->cmd,
+"CREATE TEMPORARY TABLE new_basefile%lld AS "
+//"CREATE TABLE new_basefile%lld AS "
+ "SELECT Path.Path AS Path, Filename.Name AS Name, Temp.FileIndex AS FileIndex,"
+ "Temp.JobId AS JobId, Temp.LStat AS LStat, Temp.FileId AS FileId, "
+ "Temp.MD5 AS MD5 "
+ "FROM ( %s ) AS Temp "
+ "JOIN Filename ON (Filename.FilenameId = Temp.FilenameId) "
+ "JOIN Path ON (Path.PathId = Temp.PathId) "
+ "WHERE Temp.FileIndex > 0",
+ (uint64_t)jcr->JobId, buf.c_str());
+
+ ret = db_sql_query(mdb, mdb->cmd, NULL, NULL);
+bail_out:
+ db_unlock(mdb);
+ return ret;
+}
+
#endif /* HAVE_SQLITE3 || HAVE_MYSQL || HAVE_SQLITE || HAVE_POSTGRESQL || HAVE_DBI */