# shell script to create Bacula PostgreSQL tables
#
bindir=@SQL_BINDIR@
+db_name=@db_name@
-$bindir/psql -f - -d bacula $* <<END-OF-DATA
+$bindir/psql -f - -d ${db_name} $* <<END-OF-DATA
CREATE TABLE filename
(
primary key (filenameid)
);
+ALTER TABLE filename ALTER COLUMN name SET STATISTICS 1000;
CREATE INDEX filename_name_idx on filename (name);
CREATE TABLE path
primary key (pathid)
);
+ALTER TABLE path ALTER COLUMN path SET STATISTICS 1000;
CREATE INDEX path_name_idx on path (path);
CREATE TABLE file
CREATE INDEX job_name_idx on job (name);
+-- Create a table like Job for long term statistics
+CREATE TABLE jobstat (LIKE job);
+
CREATE TABLE Location (
LocationId serial not null,
Location text not null,
Cost integer default 0,
+ Enabled smallint,
primary key (LocationId)
);
initialwrite timestamp without time zone,
scratchpoolid integer default 0,
recyclepoolid integer default 0,
+ comment text,
primary key (mediaid)
);
create unique index client_name_idx on client (name);
+CREATE TABLE Log
+(
+ LogId serial not null,
+ JobId integer not null,
+ Time timestamp without time zone,
+ LogText text not null,
+ primary key (LogId)
+);
+create index log_name_idx on Log (JobId);
+
+CREATE TABLE LocationLog (
+ LocLogId SERIAL NOT NULL,
+ Date timestamp without time zone,
+ Comment TEXT NOT NULL,
+ MediaId INTEGER DEFAULT 0,
+ LocationId INTEGER DEFAULT 0,
+ newvolstatus text not null
+ check (newvolstatus in ('Full','Archive','Append',
+ 'Recycle','Purged','Read-Only','Disabled',
+ 'Error','Busy','Used','Cleaning','Scratch')),
+ newenabled smallint,
+ PRIMARY KEY(LocLogId)
+);
+
+
CREATE TABLE counters
(
('t', 'Waiting on start time');
INSERT INTO Status (JobStatus,JobStatusLong) VALUES
('p', 'Waiting on higher priority jobs');
-
+INSERT INTO Status (JobStatus,JobStatusLong) VALUES
+ ('a', 'SD despooling attributes');
+INSERT INTO Status (JobStatus,JobStatusLong) VALUES
+ ('i', 'Doing batch insert file records');
INSERT INTO Version (VersionId) VALUES (10);