Page Menu
Home
GRNET
Search
Configure Global Search
Log In
Files
F449156
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Flag For Later
Subscribers
None
File Metadata
Details
File Info
Storage
Attached
Created
Thu, Apr 24, 3:39 PM
Size
11 KB
Mime Type
text/x-diff
Expires
Sat, Apr 26, 3:39 PM (1 d, 17 h)
Engine
blob
Format
Raw Data
Handle
214854
Attached To
rARCHIVING archiving
View Options
diff --git a/app/models/client.rb b/app/models/client.rb
index 84ad610..fed8be4 100644
--- a/app/models/client.rb
+++ b/app/models/client.rb
@@ -1,143 +1,143 @@
# Bacula Client class.
# All hosts that are getting backed up with Bacula have a Client entry, with
# attributes concerning the Client.
class Client < ActiveRecord::Base
establish_connection BACULA_CONF
self.table_name = "#{connection_config[:database]}.Client"
self.primary_key = :ClientId
alias_attribute :name, :Name
alias_attribute :uname, :Uname
alias_attribute :auto_prune, :AutoPrune
alias_attribute :file_retention, :FileRetention
alias_attribute :job_retention, :JobRetention
has_many :jobs, foreign_key: :ClientId
has_one :host, foreign_key: :name, primary_key: :Name
scope :for_user, ->(user_id) { joins(host: :users).where(users: { id: user_id }) }
DAY_SECS = 60 * 60 * 24
delegate :manually_inserted?, :origin, :quota, to: :host, allow_nil: true
# Fetches the client's job_templates that are already persisted to
# Bacula's configuration
#
# @return [ActiveRecord::Relation] of `JobTemplate`
def persisted_jobs
host.job_templates.where(baculized: true).includes(:fileset, :schedule)
end
# Fetches the client's performed jobs in reverse chronological order
#
# @return [ActiveRecord::Relation] of `Job`
def recent_jobs
- jobs.order(SchedTime: :desc).includes(:file_set, :logs)
+ jobs.order(SchedTime: :desc).includes(:file_set)
end
# Helper method. It shows the client's job retention,
# (which is expressed in seconds) in days.
#
# @return [Integer]
def job_retention_days
job_retention / DAY_SECS
end
# Helper method. It shows the client's file retention,
# (which is expressed in seconds) in days.
#
# @return [Integer]
def file_retention_days
file_retention / DAY_SECS
end
# Helper method for auto_prune
#
# @return [String] 'yes' or 'no'
def auto_prune_human
auto_prune == 1 ? 'yes' : 'no'
end
# Helper method for displayin the last job's datetime in a nice format.
def last_job_date_formatted
if job_time = last_job_datetime
I18n.l(job_time, format: :long)
end
end
# Helper method for fetching the last job's datetime
def last_job_datetime
jobs.backup_type.terminated.last.try(:end_time)
end
# Fetches the first and last job's end times.
#
# @return [Array] of datetimes in proper format
def backup_enabled_datetime_range
jobs.backup_type.terminated.pluck(:end_time).minmax.map { |x| x.strftime('%Y-%m-%d') }
end
# Shows if a client has any backup jobs to Bacule config
#
# @return [Boolean]
def is_backed_up?
jobs.backup_type.terminated.any?
end
# Shows the total file size of the jobs that run for a specific client
#
# @return [Integer] Size in Bytes
def backup_jobs_size
jobs.backup_type.map(&:job_bytes).sum
end
# Shows the total files' count for the jobs that run for a specific client
#
# @return [Integer] File count
def files_count
jobs.map(&:job_files).sum
end
# Fetches the client's jobs that are running at the moment
#
# @return [Integer]
def running_jobs
jobs.running.count
end
# Displays the bacula config that is generated from the client's
# host
#
# @return [String]
def bacula_config
return unless host
host.baculize_config.join("\n")
end
# Fetches the job ids that will construct the desired restore
#
# @param file_set_id[Integer] the fileset
# @param restore_point[Datetime] the restore point
#
# @return [Array] of ids
def get_job_ids(file_set_id, restore_point)
job_ids = {}
backup_jobs = jobs.backup_type.terminated.where(file_set_id: file_set_id)
backup_jobs = backup_jobs.where('EndTime < ?', restore_point) if restore_point
job_ids['F'] = backup_jobs.where(level: 'F').pluck(:JobId).last
return [] if job_ids['F'].nil?
job_ids['D'] = backup_jobs.where(level: 'D').where("JobId > ?", job_ids['F']).pluck(:JobId).last
job_ids['I'] = backup_jobs.where(level: 'I').
where("JobId > ?", job_ids['D'] || job_ids['F'] ).pluck(:JobId)
job_ids.values.flatten.compact
end
# Fetches the bacula filesets that are associated with the client
def file_sets
FileSet.joins(:jobs).where(Job: { JobId: job_ids }).uniq
end
end
diff --git a/app/models/job.rb b/app/models/job.rb
index aa736a2..b283144 100644
--- a/app/models/job.rb
+++ b/app/models/job.rb
@@ -1,154 +1,162 @@
# Bacula Job table.
#
# The Job table contains one record for each Job run by Bacula.
# Thus normally, there will be one per day per machine added to the database.
# Note, the JobId is used to index Job records in the database, and it often is shown to the user
# in the Console program.
# However, care must be taken with its use as it is not unique from database to database.
# For example, the user may have a database for Client data saved on machine Rufus and another
# database for Client data saved on machine Roxie.
# In this case, the two database will each have JobIds that match those in another database.
# For a unique reference to a Job, see Job below.
#
# The Name field of the Job record corresponds to the Name resource record given in the
# Director's configuration file.
# Thus it is a generic name, and it will be normal to find many Jobs (or even all Jobs)
# with the same Name.
#
# The Job field contains a combination of the Name and the schedule time of the Job by the Director.
# Thus for a given Director, even with multiple Catalog databases, the Job will contain a unique
# name that represents the Job.
#
# For a given Storage daemon, the VolSessionId and VolSessionTime form a unique identification
# of the Job.
#
# This will be the case even if multiple Directors are using the same Storage daemon.
#
# The Job Type (or simply Type) can have one of the following values:
class Job < ActiveRecord::Base
establish_connection BACULA_CONF
self.table_name = "#{connection_config[:database]}.Job"
self.primary_key = :JobId
alias_attribute :job_id, :JobId
alias_attribute :job, :Job
alias_attribute :name, :Name
alias_attribute :type, :Type
alias_attribute :level, :Level
alias_attribute :client_id, :ClientId
alias_attribute :job_status, :JobStatus
alias_attribute :sched_time, :SchedTime
alias_attribute :start_time, :StartTime
alias_attribute :end_time, :EndTime
alias_attribute :real_end_time, :RealEndTime
alias_attribute :job_t_date, :JobTDate
alias_attribute :vol_session_id, :VolSessionId
alias_attribute :vol_session_time, :VolSessionTime
alias_attribute :job_files, :JobFiles
alias_attribute :job_bytes, :JobBytes
alias_attribute :read_bytes, :ReadBytes
alias_attribute :job_errors, :JobErrors
alias_attribute :job_missing_files, :JobMissingFiles
alias_attribute :pool_id, :PoolId
alias_attribute :file_set_id, :FileSetId
alias_attribute :prior_job_id, :PriorJobId
alias_attribute :purged_files, :PurgedFiles
alias_attribute :has_base, :HasBase
alias_attribute :has_cache, :HasCache
alias_attribute :reviewed, :Reviewed
alias_attribute :comment, :Comment
belongs_to :pool, foreign_key: :PoolId
belongs_to :file_set, foreign_key: :FileSetId
belongs_to :client, foreign_key: :ClientId
has_many :bacula_files, foreign_key: :JobId
has_many :base_files, foreign_key: :BaseJobId
has_many :job_media, foreign_key: :JobId
has_many :logs, foreign_key: :JobId
scope :running, -> { where(job_status: 'R') }
scope :terminated, -> { where(job_status: 'T') }
scope :backup_type, -> { where(type: 'B') }
scope :restore_type, -> { where(type: 'R') }
HUMAN_STATUS = {
'A' => 'Canceled by user',
'B' => 'Blocked',
'C' => 'Created, not yet running',
'D' => 'Verify found differences',
'E' => 'Terminated with errors',
'F' => 'Waiting for Client',
'M' => 'Waiting for media mount',
'R' => 'Running',
'S' => 'Waiting for Storage daemon',
'T' => 'Completed successfully',
'a' => 'SD despooling attributes',
'c' => 'Waiting for client resource',
'd' => 'Waiting on maximum jobs',
'e' => 'Non-fatal error',
'f' => 'Fatal error',
'i' => 'Doing batch insert file records',
'j' => 'Waiting for job resource',
'm' => 'Waiting for new media',
'p' => 'Waiting on higher priority jobs',
's' => 'Waiting for storage resource',
't' => 'Waiting on start time'
}
paginates_per 20
def level_human
{
'F' => 'Full',
'D' => 'Differential',
'I' => 'Incremental'
}[level]
end
# Extracts the job's compression info by looking at the job's
# logs
#
# @return [String] the compression
def compression
- logs.map { |log| log.compression }.uniq.compact.first
+ Rails.cache.fetch(['compression', 'v1', cache_key]) do
+ logs.map { |log| log.compression }.uniq.compact.first
+ end
end
# Extracts the job's encryption info by looking at the job's
# logs
#
# @return [String] the encryption
def encryption
- logs.map { |log| log.encryption }.uniq.compact.first
+ Rails.cache.fetch(['encryption', 'v1', cache_key]) do
+ logs.map { |log| log.encryption }.uniq.compact.first
+ end
end
# The duration of the job.
#
# @return [String]
def duration
return "-" if [start_time, end_time].any?(&:nil?)
distance = (end_time - start_time).to_i
distance.divmod(60).zip(['min', 'sec']).select{|x| x.first.nonzero? }.join(" ")
end
def status_human
HUMAN_STATUS[job_status]
end
def fileset
file_set.try(:file_set) || '-'
end
def start_time_formatted
if start_time
I18n.l(start_time, format: :long)
end
end
def end_time_formatted
if end_time
I18n.l(end_time, format: :long)
end
end
+
+ def cache_key
+ "jobs/#{job_id}/#{job_t_date}"
+ end
end
diff --git a/config/environments/development.rb b/config/environments/development.rb
index d722fda..4763832 100644
--- a/config/environments/development.rb
+++ b/config/environments/development.rb
@@ -1,43 +1,44 @@
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
+ #config.cache_store = :memory_store
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings =
YAML::load(File.open("#{Rails.root}/config/mailer.yml"))[Rails.env].symbolize_keys
routes.default_url_options = { host: '127.0.0.1', port: '3000' }
end
Event Timeline
Log In to Comment