remove proxy code
This commit is contained in:
66
Rakefile
66
Rakefile
@@ -14,21 +14,20 @@ Rake.application.rake_require "e621"
|
||||
Rake.application.rake_require "twitter"
|
||||
Rake.application.rake_require "ib"
|
||||
|
||||
task :set_ar_stdout => :environment do
|
||||
task set_ar_stdout: :environment do
|
||||
ActiveRecord::Base.logger = Logger.new($stdout)
|
||||
end
|
||||
|
||||
task :set_logger_stdout => :environment do
|
||||
task set_logger_stdout: :environment do
|
||||
Rails.logger = Logger.new($stdout)
|
||||
Rails.logger.formatter = proc do |severity, datetime, progname, msg|
|
||||
"#{severity}: #{msg}\n"
|
||||
end
|
||||
Rails.logger.formatter =
|
||||
proc { |severity, datetime, progname, msg| "#{severity}: #{msg}\n" }
|
||||
ActiveRecord::Base.logger = nil
|
||||
ActiveJob::Base.logger = nil
|
||||
GoodJob.logger = Rails.logger
|
||||
end
|
||||
|
||||
task :periodic_tasks => [:environment, :set_logger_stdout] do
|
||||
task periodic_tasks: %i[environment set_logger_stdout] do
|
||||
Thread.new do
|
||||
loop do
|
||||
Rake::Task["pghero:capture_space_stats"].execute
|
||||
@@ -55,22 +54,21 @@ task :periodic_tasks => [:environment, :set_logger_stdout] do
|
||||
end
|
||||
end
|
||||
|
||||
loop do
|
||||
sleep 10
|
||||
end
|
||||
loop { sleep 10 }
|
||||
end
|
||||
|
||||
namespace :blob_entries do
|
||||
task :export_samples => :environment do
|
||||
task export_samples: :environment do
|
||||
limit = ENV["limit"]&.to_i || raise("need 'limit' (num)")
|
||||
outfile = ENV["outfile"] || raise("need 'outfile' (file path, .json encoded)")
|
||||
outfile =
|
||||
ENV["outfile"] || raise("need 'outfile' (file path, .json encoded)")
|
||||
BlobEntrySampleExporter.new.export_samples(limit, outfile)
|
||||
end
|
||||
task :import_samples => :environment do
|
||||
task import_samples: :environment do
|
||||
infile = ENV["infile"] || raise("need 'infile' (file path, .json encoded)")
|
||||
BlobEntrySampleExporter.new.import_samples(infile)
|
||||
end
|
||||
task :migrate_entries => :environment do
|
||||
task migrate_entries: :environment do
|
||||
start_at = ENV["start_at"]
|
||||
batch_size = ENV["batch_size"]&.to_i || 64
|
||||
BlobEntrySampleExporter.new.migrate_blob_entries(start_at, batch_size)
|
||||
@@ -78,7 +76,7 @@ namespace :blob_entries do
|
||||
end
|
||||
|
||||
namespace :db_sampler do
|
||||
task :export => :environment do
|
||||
task export: :environment do
|
||||
url_names = ENV["url_names"] || raise("need 'url_names' (comma-separated)")
|
||||
# outfile_path = ENV["outfile"] || raise("need 'outfile' (file path)")
|
||||
# outfile = File.open(outfile_path, "wb")
|
||||
@@ -88,7 +86,7 @@ namespace :db_sampler do
|
||||
outfile.close if outfile
|
||||
end
|
||||
|
||||
task :import => [:environment] do
|
||||
task import: [:environment] do
|
||||
# infile_path = ENV["infile"] || raise("need 'infile' (file path)")
|
||||
# infile = File.open(infile_path, "rb")
|
||||
infile = $stdin
|
||||
@@ -98,23 +96,16 @@ namespace :db_sampler do
|
||||
end
|
||||
end
|
||||
|
||||
task :good_job do
|
||||
proxies = ["direct", "proxy-1", "dedipath-1", "serverhost-1"]
|
||||
proxy = ENV["proxy"]
|
||||
raise("'proxy' must be set") unless proxy
|
||||
raise("'proxy' must be one of #{proxies}") unless proxies.include?(proxy)
|
||||
|
||||
task good_job: %i[environment set_ar_stdout set_logger_stdout] do
|
||||
env_hash = {
|
||||
"RAILS_ENV" => "worker",
|
||||
"GOOD_JOB_POLL_INTERVAL" => "5",
|
||||
"GOOD_JOB_MAX_CACHE" => "10000",
|
||||
"GOOD_JOB_QUEUE_SELECT_LIMIT" => "4096",
|
||||
"GOOD_JOB_MAX_THREADS" => "4",
|
||||
"GOOD_JOB_QUEUES" => [
|
||||
"manual:4",
|
||||
"fa_post,e621:2",
|
||||
"*:6",
|
||||
].reject(&:nil?).join(";"),
|
||||
"GOOD_JOB_QUEUES" =>
|
||||
ENV["GOOD_JOB_QUEUES"] ||
|
||||
%w[manual:4 fa_post,e621:2 *:6].reject(&:nil?).join(";")
|
||||
}
|
||||
|
||||
env_hash.each do |key, value|
|
||||
@@ -127,7 +118,7 @@ task :good_job do
|
||||
exec(cmd)
|
||||
end
|
||||
|
||||
task :recompute_job_signatures => :environment do
|
||||
task recompute_job_signatures: :environment do
|
||||
ActiveRecord::Base.logger = Logger.new(STDOUT)
|
||||
ActiveRecord::Base.logger.level = :error
|
||||
|
||||
@@ -148,21 +139,22 @@ task :recompute_job_signatures => :environment do
|
||||
end
|
||||
end
|
||||
|
||||
task :workoff_failed_jobs => [:environment, :set_ar_stdout, :set_logger_stdout] do
|
||||
task workoff_failed_jobs: %i[environment set_ar_stdout set_logger_stdout] do
|
||||
worker = Delayed::Worker.new
|
||||
Delayed::Job.where(
|
||||
"last_error is not null and attempts <= 2"
|
||||
).find_each(batch_size: 1) do |job|
|
||||
worker.run(job)
|
||||
end
|
||||
Delayed::Job
|
||||
.where("last_error is not null and attempts <= 2")
|
||||
.find_each(batch_size: 1) { |job| worker.run(job) }
|
||||
end
|
||||
|
||||
task :reverse_csv do
|
||||
file = ENV["file"] || raise("need 'file' (file path)")
|
||||
in_csv = CSV.parse(File.open(file, "r+"), headers: true)
|
||||
out_csv = CSV.new(File.open("rev_" + file, "w"), write_headers: true, headers: in_csv.headers)
|
||||
in_csv.reverse_each do |row|
|
||||
out_csv << row.map(&:second)
|
||||
end
|
||||
out_csv =
|
||||
CSV.new(
|
||||
File.open("rev_" + file, "w"),
|
||||
write_headers: true,
|
||||
headers: in_csv.headers
|
||||
)
|
||||
in_csv.reverse_each { |row| out_csv << row.map(&:second) }
|
||||
out_csv.close
|
||||
end
|
||||
|
||||
Reference in New Issue
Block a user