Add Prometheus monitoring integration and enhance application metrics
This commit is contained in:
@@ -16,9 +16,12 @@
|
||||
// "postCreateCommand": "bundle install && rake db:setup",
|
||||
"postCreateCommand": ".devcontainer/post-create.sh",
|
||||
"forwardPorts": [
|
||||
8080, // pgadmin
|
||||
3000, // rails development
|
||||
3001 // rails staging
|
||||
3001, // rails staging
|
||||
9394, // prometheus exporter
|
||||
"pgadmin:8080", // pgadmin
|
||||
"grafana:3100", // grafana
|
||||
"prometheus:9090" // prometheus
|
||||
]
|
||||
// Configure tool-specific properties.
|
||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||
|
||||
@@ -16,7 +16,6 @@ services:
|
||||
OTEL_SERVICE_NAME: redux-scraper-dev
|
||||
OTEL_RESOURCE_ATTRIBUTES: application=redux-scraper-dev
|
||||
command: sleep infinity
|
||||
network_mode: service:db
|
||||
|
||||
db:
|
||||
build:
|
||||
@@ -34,7 +33,6 @@ services:
|
||||
pgadmin:
|
||||
image: dpage/pgadmin4:8.14.0
|
||||
restart: unless-stopped
|
||||
network_mode: service:db
|
||||
environment:
|
||||
PGADMIN_DEFAULT_EMAIL: admin@example.com
|
||||
PGADMIN_DEFAULT_PASSWORD: password
|
||||
@@ -42,7 +40,28 @@ services:
|
||||
PGADMIN_CONFIG_SERVER_MODE: 'False'
|
||||
PGADMIN_CONFIG_MASTER_PASSWORD_REQUIRED: 'False'
|
||||
|
||||
prometheus:
|
||||
image: prom/prometheus
|
||||
command:
|
||||
- '--config.file=/etc/prometheus/prometheus.yml'
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ./prometheus:/etc/prometheus
|
||||
- devcontainer-redux-prometheus-data:/prometheus
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- GF_SERVER_HTTP_PORT=3100
|
||||
- GF_USERS_ALLOW_SIGN_UP=false
|
||||
- GF_LOG_LEVEL=debug
|
||||
volumes:
|
||||
- devcontainer-redux-grafana-data:/var/lib/grafana
|
||||
|
||||
volumes:
|
||||
postgres-data:
|
||||
devcontainer-redux-gem-cache:
|
||||
devcontainer-redux-blob-files:
|
||||
devcontainer-redux-grafana-data:
|
||||
devcontainer-redux-prometheus-data:
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
#!/bin/bash -ex
|
||||
|
||||
function mkdir_and_chmod {
|
||||
sudo mkdir -p $1
|
||||
sudo chmod 777 $1
|
||||
}
|
||||
|
||||
mkdir_and_chmod .devcontainer/data/prometheus
|
||||
mkdir_and_chmod .devcontainer/data/grafana
|
||||
echo "Path: $PATH"
|
||||
echo "Ruby: $(which ruby)"
|
||||
echo "Gem: $(which gem)"
|
||||
|
||||
12
.devcontainer/prometheus/prometheus.yml
Normal file
12
.devcontainer/prometheus/prometheus.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
global:
|
||||
scrape_interval: 15s
|
||||
evaluation_interval: 15s
|
||||
|
||||
scrape_configs:
|
||||
- job_name: 'prometheus'
|
||||
static_configs:
|
||||
- targets: ['prometheus:9090']
|
||||
|
||||
- job_name: 'rails'
|
||||
static_configs:
|
||||
- targets: ['app:9394']
|
||||
3
Gemfile
3
Gemfile
@@ -163,6 +163,9 @@ gem "devise", "~> 4.9"
|
||||
# Authorization
|
||||
gem "pundit", "~> 2.4"
|
||||
|
||||
# Monitoring
|
||||
gem "prometheus_exporter", "~> 2.2"
|
||||
|
||||
gem "sorbet-static-and-runtime"
|
||||
gem "tapioca", require: false, group: %i[development test]
|
||||
gem "rspec-sorbet", group: [:test]
|
||||
|
||||
@@ -278,6 +278,8 @@ GEM
|
||||
prettier_print (1.2.1)
|
||||
prism (1.3.0)
|
||||
progressbar (1.13.0)
|
||||
prometheus_exporter (2.2.0)
|
||||
webrick
|
||||
pry (0.15.2)
|
||||
coderay (~> 1.1)
|
||||
method_source (~> 1.0)
|
||||
@@ -549,6 +551,7 @@ DEPENDENCIES
|
||||
pluck_each
|
||||
prettier_print
|
||||
progressbar
|
||||
prometheus_exporter (~> 2.2)
|
||||
pry
|
||||
pry-stack_explorer
|
||||
puma (~> 5.0)
|
||||
|
||||
@@ -2,3 +2,4 @@ rails: RAILS_ENV=development bundle exec rails s -p 3000
|
||||
wp-client: RAILS_ENV=development HMR=true ./bin/webpacker-dev-server
|
||||
wp-server: RAILS_ENV=development HMR=true SERVER_BUNDLE_ONLY=yes ./bin/webpacker --watch
|
||||
css: RAILS_ENV=development yarn build:css[debug] --watch
|
||||
prometheus_exporter: RAILS_ENV=development bundle exec prometheus_exporter --bind 0.0.0.0 --prefix redux_ --label '{"environment": "development"}'
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
rails: RAILS_ENV=production bundle exec rails s -b 0.0.0.0 -p 3000
|
||||
tail: tail -f log/production.log
|
||||
prometheus_exporter: RAILS_ENV=production bundle exec prometheus_exporter --bind 0.0.0.0 --prefix redux_ --label '{"environment": "production"}'
|
||||
|
||||
@@ -2,3 +2,4 @@ rails: RAILS_ENV=staging ./bin/rails s -p 3001
|
||||
wp-client: RAILS_ENV=development HMR=true ./bin/webpacker-dev-server
|
||||
wp-server: RAILS_ENV=development HMR=true SERVER_BUNDLE_ONLY=yes ./bin/webpacker --watch
|
||||
css: RAILS_ENV=development yarn build:css[debug] --watch
|
||||
prometheus_exporter: RAILS_ENV=staging bundle exec prometheus_exporter --bind 0.0.0.0 --prefix redux_ --label '{"environment": "staging"}'
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
periodic_tasks: RAILS_ENV=worker bundle exec rake periodic_tasks
|
||||
good_job: RAILS_ENV=worker bundle exec rake good_job
|
||||
prometheus_exporter: RAILS_ENV=worker bundle exec prometheus_exporter --bind 0.0.0.0 --prefix redux_ --label '{"environment": "worker"}'
|
||||
|
||||
@@ -22,6 +22,12 @@ class ApplicationController < ActionController::Base
|
||||
ReduxApplicationRecord.connection.execute("SET ivfflat.probes = 10")
|
||||
end
|
||||
|
||||
protected
|
||||
|
||||
def prometheus_client
|
||||
PrometheusExporter::Client.default
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def user_not_authorized
|
||||
|
||||
@@ -1,22 +1,50 @@
|
||||
# typed: true
|
||||
# typed: strict
|
||||
class Scraper::CurlHttpPerformer
|
||||
Response =
|
||||
Struct.new(:response_code, :response_headers, :response_time_ms, :body)
|
||||
extend T::Sig
|
||||
|
||||
# Response =
|
||||
# Struct.new(:response_code, :response_headers, :response_time_ms, :body)
|
||||
|
||||
class Response < T::Struct
|
||||
const :response_code, Integer
|
||||
const :response_headers, T::Hash[String, String]
|
||||
const :response_time_ms, Integer
|
||||
const :body, String
|
||||
end
|
||||
|
||||
sig do
|
||||
params(url: String, request_headers: T::Hash[String, String]).returns(
|
||||
Response,
|
||||
)
|
||||
end
|
||||
def get(url, request_headers)
|
||||
do_request(:http_get, url, request_headers)
|
||||
end
|
||||
|
||||
sig do
|
||||
params(url: String, request_headers: T::Hash[String, String]).returns(
|
||||
Response,
|
||||
)
|
||||
end
|
||||
def post(url, request_headers)
|
||||
do_request(:http_post, url, request_headers)
|
||||
end
|
||||
|
||||
sig { returns(String) }
|
||||
def name
|
||||
"direct"
|
||||
end
|
||||
|
||||
METHOD_MAP = { http_get: "GET", http_post: "POST" }
|
||||
METHOD_MAP =
|
||||
T.let({ http_get: "GET", http_post: "POST" }, T::Hash[Symbol, String])
|
||||
|
||||
sig do
|
||||
params(
|
||||
method: Symbol,
|
||||
url: String,
|
||||
request_headers: T::Hash[String, String],
|
||||
).returns(Response)
|
||||
end
|
||||
def do_request(method, url, request_headers)
|
||||
t, curl = get_curl
|
||||
start_at = Time.now
|
||||
@@ -49,7 +77,7 @@ class Scraper::CurlHttpPerformer
|
||||
|
||||
cf_cache_status =
|
||||
if response_headers["cf-cache-status"]
|
||||
"(#{response_headers["cf-cache-status"].light_blue})"
|
||||
"(#{response_headers["cf-cache-status"]&.light_blue})"
|
||||
else
|
||||
nil
|
||||
end
|
||||
@@ -63,11 +91,17 @@ class Scraper::CurlHttpPerformer
|
||||
"#{url.bold}",
|
||||
].reject(&:nil?).join(" ")
|
||||
|
||||
Response.new(response_code, response_headers, response_time_ms, body_str)
|
||||
Response.new(
|
||||
response_code: response_code,
|
||||
response_headers: response_headers,
|
||||
response_time_ms: response_time_ms,
|
||||
body: body_str,
|
||||
)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
sig { returns([Thread, Curl::Easy]) }
|
||||
def get_curl
|
||||
t = Thread.current
|
||||
unless t.thread_variable?(:curl)
|
||||
@@ -78,6 +112,7 @@ class Scraper::CurlHttpPerformer
|
||||
[t, curl]
|
||||
end
|
||||
|
||||
sig { params(size: Integer).returns(String) }
|
||||
def self.humansize(size)
|
||||
units = %w[B KiB MiB GiB TiB Pib EiB ZiB]
|
||||
return "0.0 B" if size == 0
|
||||
@@ -92,16 +127,17 @@ class Scraper::CurlHttpPerformer
|
||||
end
|
||||
end
|
||||
|
||||
sig { params(header_str: String).returns(T::Array[[String, String]]) }
|
||||
def self.parse_header_str(header_str)
|
||||
header_str
|
||||
.split("\r\n")
|
||||
split = header_str.split("\r\n")
|
||||
split
|
||||
.each_with_index
|
||||
.map do |str, idx|
|
||||
next nil if idx == 0
|
||||
idx = str.index(": ")
|
||||
next nil unless idx
|
||||
[str[0...idx], str[idx + 2..-1]]
|
||||
T.cast([str[0...idx], str[idx + 2..-1]], [String, String])
|
||||
end
|
||||
.reject(&:nil?)
|
||||
.compact
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# typed: strict
|
||||
class Scraper::HttpClient
|
||||
extend T::Sig
|
||||
|
||||
include HasColorLogger
|
||||
|
||||
sig { returns(Scraper::HttpClientConfig) }
|
||||
@@ -21,18 +20,13 @@ class Scraper::HttpClient
|
||||
end
|
||||
def initialize(config, http_performer)
|
||||
@config = T.let(config, Scraper::HttpClientConfig)
|
||||
@http_performer = T.let(http_performer, T.untyped)
|
||||
@http_performer = T.let(http_performer, Scraper::CurlHttpPerformer)
|
||||
@domain_last_requested_at = T.let({}, T::Hash[String, Time])
|
||||
@cookie_jar = T.let(HTTP::CookieJar.new, HTTP::CookieJar)
|
||||
init_cookie_jar
|
||||
@config.do_login(http_performer)
|
||||
end
|
||||
|
||||
sig { void }
|
||||
def close!
|
||||
@http_performer.close!
|
||||
end
|
||||
|
||||
sig do
|
||||
params(
|
||||
url: String,
|
||||
@@ -159,6 +153,9 @@ class Scraper::HttpClient
|
||||
@cookie_jar.cookies(Addressable::URI.encode url),
|
||||
),
|
||||
}
|
||||
|
||||
Metrics.observe_request_start(method, uri.host)
|
||||
|
||||
response = @http_performer.do_request(method, url, request_headers)
|
||||
|
||||
response_code = response.response_code
|
||||
@@ -173,6 +170,7 @@ class Scraper::HttpClient
|
||||
"none/none"
|
||||
|
||||
retries = 0
|
||||
total_time_ms = -1
|
||||
begin
|
||||
response_blob_entry =
|
||||
BlobEntry.find_or_build(
|
||||
@@ -203,6 +201,16 @@ class Scraper::HttpClient
|
||||
# double write blob_file while migrating
|
||||
response_blob_file =
|
||||
BlobFile.find_or_initialize_from_blob_entry(response_blob_entry)
|
||||
total_time_ms = ((Time.now - requested_at) * 1000).to_i
|
||||
|
||||
Metrics.observe_request_finish(
|
||||
method: method,
|
||||
host: uri.host,
|
||||
content_type: T.must(content_type.split(";").first),
|
||||
status_code: response_code,
|
||||
response_time_ms: response_time_ms,
|
||||
response_size_bytes: response_body.bytesize,
|
||||
)
|
||||
|
||||
log_entry.save!
|
||||
begin
|
||||
@@ -216,17 +224,6 @@ class Scraper::HttpClient
|
||||
raise
|
||||
end
|
||||
|
||||
# logger.info(
|
||||
# [
|
||||
# "[entry #{log_entry.id.to_s.bold} /",
|
||||
# "GET #{response_code_colorized} /",
|
||||
# "#{HexUtil.humansize(T.must(response_blob_entry.bytes_stored)).bold} / #{HexUtil.humansize(T.must(response_blob_entry.size)).bold}]",
|
||||
# "[#{response_time_ms.to_s.bold} ms / #{total_time_ms.to_s.bold} ms]",
|
||||
# scrubbed_uri.to_s.black,
|
||||
# ].reject(&:nil?).join(" "),
|
||||
# )
|
||||
#
|
||||
total_time_ms = ((Time.now - requested_at) * 1000).to_i
|
||||
print_request_performed_log_line(
|
||||
from_cache: false,
|
||||
log_entry: log_entry,
|
||||
|
||||
106
app/lib/scraper/http_client/metrics.rb
Normal file
106
app/lib/scraper/http_client/metrics.rb
Normal file
@@ -0,0 +1,106 @@
|
||||
# typed: strict
|
||||
|
||||
module Scraper::HttpClient::Metrics
|
||||
extend T::Sig
|
||||
|
||||
sig { params(method: Symbol, host: String).void }
|
||||
def self.observe_request_start(method, host)
|
||||
REQUEST_START_COUNTER.increment({ method: method, host: host })
|
||||
end
|
||||
|
||||
sig do
|
||||
params(
|
||||
method: Symbol,
|
||||
host: String,
|
||||
content_type: String,
|
||||
status_code: Integer,
|
||||
response_time_ms: Integer,
|
||||
response_size_bytes: Integer,
|
||||
).void
|
||||
end
|
||||
def self.observe_request_finish(
|
||||
method:,
|
||||
host:,
|
||||
content_type:,
|
||||
status_code:,
|
||||
response_time_ms:,
|
||||
response_size_bytes:
|
||||
)
|
||||
keys = {
|
||||
method: method,
|
||||
host: host,
|
||||
content_type: content_type,
|
||||
status_code: status_code,
|
||||
}.freeze
|
||||
REQUEST_FINISH_COUNTER.increment(keys)
|
||||
REQUEST_FINISH_RESPONSE_SIZE_COUNTER.observe(response_size_bytes, keys)
|
||||
REQUEST_FINISH_RESPONSE_TIME_HISTOGRAM.observe(response_time_ms, keys)
|
||||
REQUEST_FINISH_RESPONSE_SIZE_HISTOGRAM.observe(response_size_bytes, keys)
|
||||
REQUEST_FINISH_RESPONSE_TIME_SUMMARY.observe(response_time_ms, keys)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
REQUEST_START_COUNTER =
|
||||
T.let(
|
||||
PrometheusExporter::Client.default.register(
|
||||
:counter,
|
||||
"http_client_request_start_counter",
|
||||
"http client request started",
|
||||
),
|
||||
PrometheusExporter::Client::RemoteMetric,
|
||||
)
|
||||
|
||||
REQUEST_FINISH_COUNTER =
|
||||
T.let(
|
||||
PrometheusExporter::Client.default.register(
|
||||
:counter,
|
||||
"http_client_request_finish_counter",
|
||||
"count of http client requests finished, labeled by their status code and host",
|
||||
),
|
||||
PrometheusExporter::Client::RemoteMetric,
|
||||
)
|
||||
|
||||
REQUEST_FINISH_RESPONSE_SIZE_COUNTER =
|
||||
T.let(
|
||||
PrometheusExporter::Client.default.register(
|
||||
:counter,
|
||||
"http_client_request_finish_size_counter",
|
||||
"bytes of http client response size, labeled by their status code and host",
|
||||
),
|
||||
PrometheusExporter::Client::RemoteMetric,
|
||||
)
|
||||
|
||||
REQUEST_FINISH_RESPONSE_TIME_HISTOGRAM =
|
||||
T.let(
|
||||
PrometheusExporter::Client.default.register(
|
||||
:histogram,
|
||||
"http_client_request_finish_histogram",
|
||||
"histogram of http client request time in ms, labeled by their status code and host",
|
||||
buckets: Scraper::Metrics::RESPONSE_TIME_MS_BUCKETS,
|
||||
),
|
||||
PrometheusExporter::Client::RemoteMetric,
|
||||
)
|
||||
|
||||
REQUEST_FINISH_RESPONSE_TIME_SUMMARY =
|
||||
T.let(
|
||||
PrometheusExporter::Client.default.register(
|
||||
:summary,
|
||||
"http_client_request_finish_summary",
|
||||
"summary (p99, etc) of http client request time in ms, labeled by their status code and host",
|
||||
quantiles: Scraper::Metrics::QUARTILES,
|
||||
),
|
||||
PrometheusExporter::Client::RemoteMetric,
|
||||
)
|
||||
|
||||
REQUEST_FINISH_RESPONSE_SIZE_HISTOGRAM =
|
||||
T.let(
|
||||
PrometheusExporter::Client.default.register(
|
||||
:histogram,
|
||||
"http_client_request_finish_response_size_histogram",
|
||||
"histogram of http client response size in bytes, labeled by their status code and host",
|
||||
buckets: Scraper::Metrics::FILESIZE_BYTES_BUCKETS,
|
||||
),
|
||||
PrometheusExporter::Client::RemoteMetric,
|
||||
)
|
||||
end
|
||||
50
app/lib/scraper/metrics.rb
Normal file
50
app/lib/scraper/metrics.rb
Normal file
@@ -0,0 +1,50 @@
|
||||
# typed: strict
|
||||
module Scraper::Metrics
|
||||
extend T::Sig
|
||||
|
||||
FILESIZE_BYTES_BUCKETS = [
|
||||
100, # 100 B
|
||||
500, # 500 B
|
||||
1_000, # 1 KB
|
||||
5_000, # 5 KB
|
||||
10_000, # 10 KB
|
||||
50_000, # 50 KB
|
||||
100_000, # 100 KB
|
||||
250_000, # 250 KB
|
||||
500_000, # 500 KB
|
||||
1_000_000, # 1 MB
|
||||
2_500_000, # 2.5 MB
|
||||
5_000_000, # 5 MB
|
||||
10_000_000, # 10 MB
|
||||
25_000_000, # 25 MB
|
||||
50_000_000, # 50 MB
|
||||
100_000_000, # 100 MB
|
||||
].freeze
|
||||
|
||||
RESPONSE_TIME_MS_BUCKETS = [
|
||||
5, # 5 ms - extremely fast
|
||||
10, # 10 ms
|
||||
25, # 25 ms
|
||||
50, # 50 ms
|
||||
75, # 75 ms
|
||||
100, # 100 ms
|
||||
150, # 150 ms
|
||||
200, # 200 ms
|
||||
250, # 250 ms
|
||||
300, # 300 ms
|
||||
400, # 400 ms
|
||||
500, # 500 ms
|
||||
750, # 750 ms
|
||||
1_000, # 1 sec
|
||||
1_500, # 1.5 sec
|
||||
2_000, # 2 sec
|
||||
2_500, # 2.5 sec
|
||||
5_000, # 5 sec
|
||||
7_500, # 7.5 sec
|
||||
10_000, # 10 sec
|
||||
15_000, # 15 sec
|
||||
30_000, # 30 sec
|
||||
].freeze
|
||||
|
||||
QUARTILES = [0.99, 0.95, 0.90, 0.75, 0.50].freeze
|
||||
end
|
||||
18
app/models/concerns/has_prometheus_client.rb
Normal file
18
app/models/concerns/has_prometheus_client.rb
Normal file
@@ -0,0 +1,18 @@
|
||||
# typed: strict
|
||||
module HasPrometheusClient
|
||||
extend ActiveSupport::Concern
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
|
||||
included do
|
||||
sig { returns(PrometheusExporter::Client) }
|
||||
def prometheus_client
|
||||
PrometheusExporter::Client.default
|
||||
end
|
||||
|
||||
sig { params(name: String).returns(PrometheusExporter::Metric::Counter) }
|
||||
def prom_counter(name)
|
||||
PrometheusExporter::Metric::Counter.new(name, {})
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,7 +1,33 @@
|
||||
# typed: strict
|
||||
class ReduxApplicationRecord < ActiveRecord::Base
|
||||
extend T::Sig
|
||||
include HasPrometheusClient
|
||||
|
||||
self.abstract_class = true
|
||||
logger.level = Logger::ERROR
|
||||
|
||||
after_initialize { observe(:initialize) }
|
||||
after_create { observe(:create) }
|
||||
after_update { observe(:update) }
|
||||
after_destroy { observe(:destroy) }
|
||||
|
||||
private
|
||||
|
||||
ACTIVE_RECORD_COUNTER =
|
||||
T.let(
|
||||
PrometheusExporter::Client.default.register(
|
||||
:counter,
|
||||
"active_record_lifecycle",
|
||||
"active record lifecycle statistics",
|
||||
),
|
||||
PrometheusExporter::Client::RemoteMetric,
|
||||
)
|
||||
|
||||
sig { params(action: Symbol).void }
|
||||
def observe(action)
|
||||
ACTIVE_RECORD_COUNTER.observe(
|
||||
1,
|
||||
{ method: action, class_name: self.class.name },
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
@@ -25,7 +25,7 @@ redux_staging: &redux_staging
|
||||
|
||||
redux_dev: &redux_dev
|
||||
adapter: postgresql
|
||||
host: localhost
|
||||
host: db
|
||||
port: 5432
|
||||
# database: redux_development
|
||||
# username: scraper_redux
|
||||
|
||||
@@ -44,3 +44,8 @@ ActiveSupport.on_load(:good_job_base_record) do
|
||||
after_create { Scraper::JobBase.last_good_job_execution = self }
|
||||
end
|
||||
end
|
||||
|
||||
require "prometheus_exporter/instrumentation"
|
||||
ActiveSupport.on_load(:good_job_application_controller) do
|
||||
PrometheusExporter::Instrumentation::GoodJob.start
|
||||
end
|
||||
|
||||
12
config/initializers/prometheus_exporter.rb
Normal file
12
config/initializers/prometheus_exporter.rb
Normal file
@@ -0,0 +1,12 @@
|
||||
# typed: true
|
||||
require "prometheus_exporter"
|
||||
require "prometheus_exporter/client"
|
||||
require "prometheus_exporter/metric"
|
||||
require "prometheus_exporter/metric/counter"
|
||||
|
||||
unless Rails.env.test?
|
||||
require "prometheus_exporter/middleware"
|
||||
|
||||
# This reports stats per request like HTTP status and timings
|
||||
Rails.application.middleware.unshift PrometheusExporter::Middleware
|
||||
end
|
||||
@@ -42,3 +42,9 @@ pidfile ENV.fetch("PIDFILE") { "tmp/pids/server.pid" }
|
||||
|
||||
# Allow puma to be restarted by `bin/rails restart` command.
|
||||
plugin :tmp_restart
|
||||
|
||||
require 'prometheus_exporter/instrumentation'
|
||||
PrometheusExporter::Instrumentation::ActiveRecord.start(
|
||||
custom_labels: { type: "puma_single_mode" }, #optional params
|
||||
config_labels: [:database, :host] #optional params
|
||||
)
|
||||
|
||||
18
sorbet/rbi/dsl/good_job/job.rbi
generated
18
sorbet/rbi/dsl/good_job/job.rbi
generated
@@ -547,6 +547,12 @@ class GoodJob::Job
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def arel_columns(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def bind_value(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def coalesce_scheduled_at_created_at(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def create_with(*args, &blk); end
|
||||
|
||||
@@ -658,6 +664,9 @@ class GoodJob::Job
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def owns_advisory_locked(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def params_execution_count(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def preload(*args, &blk); end
|
||||
|
||||
@@ -2166,6 +2175,12 @@ class GoodJob::Job
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def arel_columns(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def bind_value(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def coalesce_scheduled_at_created_at(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def create_with(*args, &blk); end
|
||||
|
||||
@@ -2277,6 +2292,9 @@ class GoodJob::Job
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def owns_advisory_locked(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def params_execution_count(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def preload(*args, &blk); end
|
||||
|
||||
|
||||
858
sorbet/rbi/gems/prometheus_exporter@2.2.0.rbi
generated
Normal file
858
sorbet/rbi/gems/prometheus_exporter@2.2.0.rbi
generated
Normal file
@@ -0,0 +1,858 @@
|
||||
# typed: true
|
||||
|
||||
# DO NOT EDIT MANUALLY
|
||||
# This is an autogenerated file for types exported from the `prometheus_exporter` gem.
|
||||
# Please instead update this file by running `bin/tapioca gem prometheus_exporter`.
|
||||
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/version.rb#3
|
||||
module PrometheusExporter
|
||||
class << self
|
||||
# source://prometheus_exporter//lib/prometheus_exporter.rb#36
|
||||
def detect_json_serializer(preferred); end
|
||||
|
||||
# @return [Boolean]
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter.rb#45
|
||||
def has_oj?; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter.rb#25
|
||||
def hostname; end
|
||||
end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#7
|
||||
class PrometheusExporter::Client
|
||||
# @raise [ArgumentError]
|
||||
# @return [Client] a new instance of Client
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#54
|
||||
def initialize(host: T.unsafe(nil), port: T.unsafe(nil), max_queue_size: T.unsafe(nil), thread_sleep: T.unsafe(nil), json_serializer: T.unsafe(nil), custom_labels: T.unsafe(nil), logger: T.unsafe(nil), log_level: T.unsafe(nil), process_queue_once_and_stop: T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#93
|
||||
def custom_labels=(custom_labels); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#103
|
||||
def find_registered_metric(name, type: T.unsafe(nil), help: T.unsafe(nil)); end
|
||||
|
||||
# Returns the value of attribute logger.
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#52
|
||||
def logger; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#137
|
||||
def process_queue; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#97
|
||||
def register(type, name, help, opts = T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#127
|
||||
def send(str); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#113
|
||||
def send_json(obj); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#155
|
||||
def stop(wait_timeout_seconds: T.unsafe(nil)); end
|
||||
|
||||
private
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#198
|
||||
def close_socket!; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#213
|
||||
def close_socket_if_old!; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#220
|
||||
def ensure_socket!; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#174
|
||||
def ensure_worker_thread!; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#249
|
||||
def wait_for_empty_queue_with_timeout(timeout_seconds); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#167
|
||||
def worker_loop; end
|
||||
|
||||
class << self
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#41
|
||||
def default; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#45
|
||||
def default=(client); end
|
||||
end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#50
|
||||
PrometheusExporter::Client::MAX_QUEUE_SIZE = T.let(T.unsafe(nil), Integer)
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#49
|
||||
PrometheusExporter::Client::MAX_SOCKET_AGE = T.let(T.unsafe(nil), Integer)
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#8
|
||||
class PrometheusExporter::Client::RemoteMetric
|
||||
# @return [RemoteMetric] a new instance of RemoteMetric
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#11
|
||||
def initialize(name:, help:, type:, client:, opts: T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#36
|
||||
def decrement(keys = T.unsafe(nil), value = T.unsafe(nil)); end
|
||||
|
||||
# Returns the value of attribute help.
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#9
|
||||
def help; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#32
|
||||
def increment(keys = T.unsafe(nil), value = T.unsafe(nil)); end
|
||||
|
||||
# Returns the value of attribute name.
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#9
|
||||
def name; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#28
|
||||
def observe(value = T.unsafe(nil), keys = T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#19
|
||||
def standard_values(value, keys, prometheus_exporter_action = T.unsafe(nil)); end
|
||||
|
||||
# Returns the value of attribute type.
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#9
|
||||
def type; end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter.rb#9
|
||||
PrometheusExporter::DEFAULT_BIND_ADDRESS = T.let(T.unsafe(nil), String)
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter.rb#11
|
||||
PrometheusExporter::DEFAULT_LABEL = T.let(T.unsafe(nil), Hash)
|
||||
|
||||
# per: https://github.com/prometheus/prometheus/wiki/Default-port-allocations
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter.rb#8
|
||||
PrometheusExporter::DEFAULT_PORT = T.let(T.unsafe(nil), Integer)
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter.rb#10
|
||||
PrometheusExporter::DEFAULT_PREFIX = T.let(T.unsafe(nil), String)
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter.rb#13
|
||||
PrometheusExporter::DEFAULT_REALM = T.let(T.unsafe(nil), String)
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter.rb#12
|
||||
PrometheusExporter::DEFAULT_TIMEOUT = T.let(T.unsafe(nil), Integer)
|
||||
|
||||
# collects stats from resque
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/periodic_stats.rb#3
|
||||
module PrometheusExporter::Instrumentation; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/active_record.rb#5
|
||||
class PrometheusExporter::Instrumentation::ActiveRecord < ::PrometheusExporter::Instrumentation::PeriodicStats
|
||||
# @return [ActiveRecord] a new instance of ActiveRecord
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/active_record.rb#39
|
||||
def initialize(metric_labels, config_labels); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/active_record.rb#44
|
||||
def collect; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/active_record.rb#54
|
||||
def collect_active_record_pool_stats(metrics); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/active_record.rb#50
|
||||
def pid; end
|
||||
|
||||
private
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/active_record.rb#71
|
||||
def labels(pool); end
|
||||
|
||||
class << self
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/active_record.rb#8
|
||||
def start(client: T.unsafe(nil), frequency: T.unsafe(nil), custom_labels: T.unsafe(nil), config_labels: T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/active_record.rb#32
|
||||
def validate_config_labels(config_labels); end
|
||||
end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/active_record.rb#6
|
||||
PrometheusExporter::Instrumentation::ActiveRecord::ALLOWED_CONFIG_LABELS = T.let(T.unsafe(nil), Array)
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq.rb#7
|
||||
PrometheusExporter::Instrumentation::DELAYED_CLASS_NAMES = T.let(T.unsafe(nil), Array)
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/delayed_job.rb#4
|
||||
class PrometheusExporter::Instrumentation::DelayedJob
|
||||
# @return [DelayedJob] a new instance of DelayedJob
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/delayed_job.rb#37
|
||||
def initialize(client: T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/delayed_job.rb#41
|
||||
def call(job, max_attempts, enqueued_count, pending_count, include_module_name, *args, &block); end
|
||||
|
||||
class << self
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/delayed_job.rb#8
|
||||
def register_plugin(client: T.unsafe(nil), include_module_name: T.unsafe(nil)); end
|
||||
end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/delayed_job.rb#5
|
||||
PrometheusExporter::Instrumentation::DelayedJob::JOB_CLASS_REGEXP = T.let(T.unsafe(nil), Regexp)
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/good_job.rb#5
|
||||
class PrometheusExporter::Instrumentation::GoodJob < ::PrometheusExporter::Instrumentation::PeriodicStats
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/good_job.rb#15
|
||||
def collect; end
|
||||
|
||||
class << self
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/good_job.rb#6
|
||||
def start(client: T.unsafe(nil), frequency: T.unsafe(nil)); end
|
||||
end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/hutch.rb#4
|
||||
class PrometheusExporter::Instrumentation::Hutch
|
||||
# @return [Hutch] a new instance of Hutch
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/hutch.rb#5
|
||||
def initialize(klass); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/hutch.rb#10
|
||||
def handle(message); end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq.rb#6
|
||||
PrometheusExporter::Instrumentation::JOB_WRAPPER_CLASS_NAME = T.let(T.unsafe(nil), String)
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/method_profiler.rb#7
|
||||
class PrometheusExporter::Instrumentation::MethodProfiler
|
||||
class << self
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/method_profiler.rb#29
|
||||
def clear; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/method_profiler.rb#43
|
||||
def define_methods_on_module(klass, methods, name); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/method_profiler.rb#8
|
||||
def patch(klass, methods, name, instrument:); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/method_profiler.rb#71
|
||||
def patch_using_alias_method(klass, methods, name); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/method_profiler.rb#65
|
||||
def patch_using_prepend(klass, methods, name); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/method_profiler.rb#24
|
||||
def start(transfer = T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/method_profiler.rb#33
|
||||
def stop; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/method_profiler.rb#18
|
||||
def transfer; end
|
||||
end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/periodic_stats.rb#4
|
||||
class PrometheusExporter::Instrumentation::PeriodicStats
|
||||
class << self
|
||||
# @raise [ArgumentError]
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/periodic_stats.rb#5
|
||||
def start(*args, frequency:, client: T.unsafe(nil), **kwargs); end
|
||||
|
||||
# @return [Boolean]
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/periodic_stats.rb#34
|
||||
def started?; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/periodic_stats.rb#42
|
||||
def stop; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/periodic_stats.rb#38
|
||||
def worker_loop(&blk); end
|
||||
end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/process.rb#5
|
||||
class PrometheusExporter::Instrumentation::Process < ::PrometheusExporter::Instrumentation::PeriodicStats
|
||||
# @return [Process] a new instance of Process
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/process.rb#27
|
||||
def initialize(metric_labels); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/process.rb#31
|
||||
def collect; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/process.rb#65
|
||||
def collect_gc_stats(metric); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/process.rb#60
|
||||
def collect_process_stats(metric); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/process.rb#76
|
||||
def collect_v8_stats(metric); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/process.rb#42
|
||||
def pid; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/process.rb#46
|
||||
def rss; end
|
||||
|
||||
class << self
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/process.rb#6
|
||||
def start(client: T.unsafe(nil), type: T.unsafe(nil), frequency: T.unsafe(nil), labels: T.unsafe(nil)); end
|
||||
end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/puma.rb#7
|
||||
class PrometheusExporter::Instrumentation::Puma < ::PrometheusExporter::Instrumentation::PeriodicStats
|
||||
# @return [Puma] a new instance of Puma
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/puma.rb#20
|
||||
def initialize(metric_labels = T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/puma.rb#24
|
||||
def collect; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/puma.rb#39
|
||||
def collect_puma_stats(metric); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/puma.rb#35
|
||||
def pid; end
|
||||
|
||||
private
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/puma.rb#59
|
||||
def collect_worker_status(metric, status); end
|
||||
|
||||
class << self
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/puma.rb#8
|
||||
def start(client: T.unsafe(nil), frequency: T.unsafe(nil), labels: T.unsafe(nil)); end
|
||||
end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/resque.rb#5
|
||||
class PrometheusExporter::Instrumentation::Resque < ::PrometheusExporter::Instrumentation::PeriodicStats
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/resque.rb#15
|
||||
def collect; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/resque.rb#22
|
||||
def collect_resque_stats(metric); end
|
||||
|
||||
class << self
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/resque.rb#6
|
||||
def start(client: T.unsafe(nil), frequency: T.unsafe(nil)); end
|
||||
end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/shoryuken.rb#4
|
||||
class PrometheusExporter::Instrumentation::Shoryuken
|
||||
# @return [Shoryuken] a new instance of Shoryuken
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/shoryuken.rb#5
|
||||
def initialize(client: T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/shoryuken.rb#9
|
||||
def call(worker, queue, msg, body); end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq.rb#13
|
||||
class PrometheusExporter::Instrumentation::Sidekiq
|
||||
# @return [Sidekiq] a new instance of Sidekiq
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq.rb#45
|
||||
def initialize(options = T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq.rb#49
|
||||
def call(worker, msg, queue); end
|
||||
|
||||
class << self
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq.rb#14
|
||||
def death_handler; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq.rb#88
|
||||
def get_delayed_name(msg, class_name); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq.rb#84
|
||||
def get_job_wrapper_name(msg); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq.rb#74
|
||||
def get_name(class_name, msg); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq.rb#32
|
||||
def get_worker_custom_labels(worker_class, msg); end
|
||||
end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq_process.rb#4
|
||||
class PrometheusExporter::Instrumentation::SidekiqProcess < ::PrometheusExporter::Instrumentation::PeriodicStats
|
||||
# @return [SidekiqProcess] a new instance of SidekiqProcess
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq_process.rb#14
|
||||
def initialize; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq_process.rb#19
|
||||
def collect; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq_process.rb#23
|
||||
def collect_stats; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq_process.rb#41
|
||||
def current_process; end
|
||||
|
||||
class << self
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq_process.rb#5
|
||||
def start(client: T.unsafe(nil), frequency: T.unsafe(nil)); end
|
||||
end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq_queue.rb#4
|
||||
class PrometheusExporter::Instrumentation::SidekiqQueue < ::PrometheusExporter::Instrumentation::PeriodicStats
|
||||
# @return [SidekiqQueue] a new instance of SidekiqQueue
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq_queue.rb#14
|
||||
def initialize(all_queues: T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq_queue.rb#20
|
||||
def collect; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq_queue.rb#24
|
||||
def collect_queue_stats; end
|
||||
|
||||
private
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq_queue.rb#47
|
||||
def collect_current_process_queues; end
|
||||
|
||||
class << self
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq_queue.rb#5
|
||||
def start(client: T.unsafe(nil), frequency: T.unsafe(nil), all_queues: T.unsafe(nil)); end
|
||||
end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq_stats.rb#4
|
||||
class PrometheusExporter::Instrumentation::SidekiqStats < ::PrometheusExporter::Instrumentation::PeriodicStats
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq_stats.rb#14
|
||||
def collect; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq_stats.rb#18
|
||||
def collect_stats; end
|
||||
|
||||
class << self
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/sidekiq_stats.rb#5
|
||||
def start(client: T.unsafe(nil), frequency: T.unsafe(nil)); end
|
||||
end
|
||||
end
|
||||
|
||||
# collects stats from unicorn
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/unicorn.rb#11
|
||||
class PrometheusExporter::Instrumentation::Unicorn < ::PrometheusExporter::Instrumentation::PeriodicStats
|
||||
# @return [Unicorn] a new instance of Unicorn
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/unicorn.rb#24
|
||||
def initialize(pid_file:, listener_address:); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/unicorn.rb#30
|
||||
def collect; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/unicorn.rb#37
|
||||
def collect_unicorn_stats(metric); end
|
||||
|
||||
private
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/unicorn.rb#59
|
||||
def listener_address_stats; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/unicorn.rb#47
|
||||
def worker_process_count; end
|
||||
|
||||
class << self
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/unicorn.rb#12
|
||||
def start(pid_file:, listener_address:, client: T.unsafe(nil), frequency: T.unsafe(nil)); end
|
||||
end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#258
|
||||
class PrometheusExporter::LocalClient < ::PrometheusExporter::Client
|
||||
# @return [LocalClient] a new instance of LocalClient
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#261
|
||||
def initialize(collector:, json_serializer: T.unsafe(nil), custom_labels: T.unsafe(nil)); end
|
||||
|
||||
# Returns the value of attribute collector.
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#259
|
||||
def collector; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/client.rb#266
|
||||
def send(json); end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#3
|
||||
module PrometheusExporter::Metric; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#4
|
||||
class PrometheusExporter::Metric::Base
|
||||
# @return [Base] a new instance of Base
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#36
|
||||
def initialize(name, help); end
|
||||
|
||||
# Returns the value of attribute data.
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#34
|
||||
def data; end
|
||||
|
||||
# Sets the attribute data
|
||||
#
|
||||
# @param value the value to set the attribute data to.
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#34
|
||||
def data=(_arg0); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#57
|
||||
def from_json(json); end
|
||||
|
||||
# Returns the value of attribute help.
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#34
|
||||
def help; end
|
||||
|
||||
# Sets the attribute help
|
||||
#
|
||||
# @param value the value to set the attribute help to.
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#34
|
||||
def help=(_arg0); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#76
|
||||
def labels_text(labels); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#45
|
||||
def metric_text; end
|
||||
|
||||
# Returns the value of attribute name.
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#34
|
||||
def name; end
|
||||
|
||||
# Sets the attribute name
|
||||
#
|
||||
# @param value the value to set the attribute name to.
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#34
|
||||
def name=(_arg0); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#72
|
||||
def prefix(name); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#49
|
||||
def reset!; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#53
|
||||
def to_h; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#91
|
||||
def to_prometheus_text; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#41
|
||||
def type; end
|
||||
|
||||
private
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#101
|
||||
def escape_value(str); end
|
||||
|
||||
# @return [Boolean]
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#111
|
||||
def needs_escape?(str); end
|
||||
|
||||
class << self
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#30
|
||||
def default_aggregation; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#26
|
||||
def default_aggregation=(aggregation); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#22
|
||||
def default_labels; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#18
|
||||
def default_labels=(labels); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#14
|
||||
def default_prefix; end
|
||||
|
||||
# prefix applied to all metrics
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/base.rb#10
|
||||
def default_prefix=(name); end
|
||||
end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/counter.rb#4
|
||||
class PrometheusExporter::Metric::Counter < ::PrometheusExporter::Metric::Base
|
||||
# @return [Counter] a new instance of Counter
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/counter.rb#7
|
||||
def initialize(name, help); end
|
||||
|
||||
# Returns the value of attribute data.
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/counter.rb#5
|
||||
def data; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/counter.rb#42
|
||||
def decrement(labels = T.unsafe(nil), value = T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/counter.rb#37
|
||||
def increment(labels = T.unsafe(nil), value = T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/counter.rb#20
|
||||
def metric_text; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/counter.rb#32
|
||||
def observe(increment = T.unsafe(nil), labels = T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/counter.rb#28
|
||||
def remove(labels); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/counter.rb#47
|
||||
def reset(labels = T.unsafe(nil), value = T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/counter.rb#16
|
||||
def reset!; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/counter.rb#24
|
||||
def to_h; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/counter.rb#12
|
||||
def type; end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/gauge.rb#4
|
||||
class PrometheusExporter::Metric::Gauge < ::PrometheusExporter::Metric::Base
|
||||
# @return [Gauge] a new instance of Gauge
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/gauge.rb#7
|
||||
def initialize(name, help); end
|
||||
|
||||
# Returns the value of attribute data.
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/gauge.rb#5
|
||||
def data; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/gauge.rb#52
|
||||
def decrement(labels = T.unsafe(nil), value = T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/gauge.rb#47
|
||||
def increment(labels = T.unsafe(nil), value = T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/gauge.rb#20
|
||||
def metric_text; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/gauge.rb#36
|
||||
def observe(value, labels = T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/gauge.rb#32
|
||||
def remove(labels); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/gauge.rb#24
|
||||
def reset!; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/gauge.rb#36
|
||||
def set(value, labels = T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/gauge.rb#28
|
||||
def to_h; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/gauge.rb#16
|
||||
def type; end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/histogram.rb#4
|
||||
class PrometheusExporter::Metric::Histogram < ::PrometheusExporter::Metric::Base
|
||||
# @return [Histogram] a new instance of Histogram
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/histogram.rb#19
|
||||
def initialize(name, help, opts = T.unsafe(nil)); end
|
||||
|
||||
# Returns the value of attribute buckets.
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/histogram.rb#17
|
||||
def buckets; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/histogram.rb#81
|
||||
def ensure_histogram(labels); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/histogram.rb#92
|
||||
def fill_buckets(value, buckets); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/histogram.rb#51
|
||||
def metric_text; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/histogram.rb#70
|
||||
def observe(value, labels = T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/histogram.rb#41
|
||||
def remove(labels); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/histogram.rb#25
|
||||
def reset!; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/histogram.rb#31
|
||||
def to_h; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/histogram.rb#47
|
||||
def type; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/histogram.rb#99
|
||||
def with_bucket(labels, bucket); end
|
||||
|
||||
class << self
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/histogram.rb#9
|
||||
def default_buckets; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/histogram.rb#13
|
||||
def default_buckets=(buckets); end
|
||||
end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/histogram.rb#5
|
||||
PrometheusExporter::Metric::Histogram::DEFAULT_BUCKETS = T.let(T.unsafe(nil), Array)
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/summary.rb#4
|
||||
class PrometheusExporter::Metric::Summary < ::PrometheusExporter::Metric::Base
|
||||
# @return [Summary] a new instance of Summary
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/summary.rb#10
|
||||
def initialize(name, help, opts = T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/summary.rb#57
|
||||
def calculate_all_quantiles; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/summary.rb#45
|
||||
def calculate_quantiles(raw_data); end
|
||||
|
||||
# Returns the value of attribute count.
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/summary.rb#8
|
||||
def count; end
|
||||
|
||||
# makes sure we have storage
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/summary.rb#83
|
||||
def ensure_summary(labels); end
|
||||
|
||||
# Returns the value of attribute estimators.
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/summary.rb#8
|
||||
def estimators; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/summary.rb#66
|
||||
def metric_text; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/summary.rb#100
|
||||
def observe(value, labels = T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/summary.rb#34
|
||||
def remove(labels); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/summary.rb#16
|
||||
def reset!; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/summary.rb#91
|
||||
def rotate_if_needed; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/summary.rb#24
|
||||
def to_h; end
|
||||
|
||||
# Returns the value of attribute total.
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/summary.rb#8
|
||||
def total; end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/summary.rb#41
|
||||
def type; end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/summary.rb#5
|
||||
PrometheusExporter::Metric::Summary::DEFAULT_QUANTILES = T.let(T.unsafe(nil), Array)
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/metric/summary.rb#6
|
||||
PrometheusExporter::Metric::Summary::ROTATE_AGE = T.let(T.unsafe(nil), Integer)
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/middleware.rb#6
|
||||
class PrometheusExporter::Middleware
|
||||
# @return [Middleware] a new instance of Middleware
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/middleware.rb#9
|
||||
def initialize(app, config = T.unsafe(nil)); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/middleware.rb#53
|
||||
def call(env); end
|
||||
|
||||
# allows subclasses to add custom labels based on env
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/middleware.rb#93
|
||||
def custom_labels(env); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/middleware.rb#76
|
||||
def default_labels(env, result); end
|
||||
|
||||
private
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/middleware.rb#139
|
||||
def apply_redis_client_middleware!; end
|
||||
|
||||
# measures the queue time (= time between receiving the request in downstream
|
||||
# load balancer and starting request in ruby process)
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/middleware.rb#101
|
||||
def measure_queue_time(env); end
|
||||
|
||||
# determine queue start from well-known trace headers
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/middleware.rb#116
|
||||
def queue_start(env); end
|
||||
|
||||
# need to use CLOCK_REALTIME, as nginx/apache write this also out as the unix timestamp
|
||||
#
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/middleware.rb#111
|
||||
def request_start; end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/middleware.rb#7
|
||||
PrometheusExporter::Middleware::MethodProfiler = PrometheusExporter::Instrumentation::MethodProfiler
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/middleware.rb#135
|
||||
module PrometheusExporter::Middleware::RedisInstrumenter
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/method_profiler.rb#47
|
||||
def call(*_arg0, **_arg1, &_arg2); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/instrumentation/method_profiler.rb#61
|
||||
def call_pipelined(*_arg0, **_arg1, &_arg2); end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter.rb#15
|
||||
class PrometheusExporter::OjCompat
|
||||
class << self
|
||||
# source://prometheus_exporter//lib/prometheus_exporter.rb#20
|
||||
def dump(obj); end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter.rb#16
|
||||
def parse(obj); end
|
||||
end
|
||||
end
|
||||
|
||||
# source://prometheus_exporter//lib/prometheus_exporter/version.rb#4
|
||||
PrometheusExporter::VERSION = T.let(T.unsafe(nil), String)
|
||||
29
sorbet/rbi/metrics.rbi
Normal file
29
sorbet/rbi/metrics.rbi
Normal file
@@ -0,0 +1,29 @@
|
||||
#typed: strict
|
||||
class PrometheusExporter::Client::RemoteMetric
|
||||
sig do
|
||||
params(
|
||||
value: Numeric,
|
||||
keys: T.nilable(T::Hash[T.any(String, Symbol), T.untyped]),
|
||||
).void
|
||||
end
|
||||
def observe(value = 1, keys = nil)
|
||||
end
|
||||
|
||||
sig do
|
||||
params(
|
||||
keys: T.nilable(T::Hash[T.any(String, Symbol), T.untyped]),
|
||||
value: T.nilable(Numeric),
|
||||
).void
|
||||
end
|
||||
def increment(keys = nil, value = nil)
|
||||
end
|
||||
|
||||
sig do
|
||||
params(
|
||||
keys: T.nilable(T::Hash[T.any(String, Symbol), T.untyped]),
|
||||
value: T.nilable(Numeric),
|
||||
).void
|
||||
end
|
||||
def decrement(keys = nil, value = nil)
|
||||
end
|
||||
end
|
||||
12
sorbet/rbi/rspec_todo.rbi
Normal file
12
sorbet/rbi/rspec_todo.rbi
Normal file
@@ -0,0 +1,12 @@
|
||||
# typed: strict
|
||||
module RSpec::Matchers
|
||||
sig { params(expected: T.untyped).returns(T.untyped) }
|
||||
def http_client_opts_with(expected)
|
||||
end
|
||||
end
|
||||
|
||||
class HttpClientMockHelpers
|
||||
sig { returns(ColorLogger) }
|
||||
def self.logger
|
||||
end
|
||||
end
|
||||
@@ -23,3 +23,10 @@ require "sorbet-runtime"
|
||||
require "syntax_tree"
|
||||
require "timeout"
|
||||
require "xdiff/extension"
|
||||
|
||||
require "prometheus_exporter/client"
|
||||
require "prometheus_exporter/metric"
|
||||
require "prometheus_exporter/instrumentation"
|
||||
require "prometheus_exporter/instrumentation/active_record"
|
||||
require "prometheus_exporter/instrumentation/good_job"
|
||||
require "prometheus_exporter/middleware"
|
||||
|
||||
64
spec/helpers/http_client_mock_helpers.rb
Normal file
64
spec/helpers/http_client_mock_helpers.rb
Normal file
@@ -0,0 +1,64 @@
|
||||
# typed: true
|
||||
class HttpClientMockHelpers
|
||||
include HasColorLogger
|
||||
extend FactoryBot::Syntax::Methods
|
||||
extend RSpec::Mocks::ExampleMethods
|
||||
extend RSpec::Matchers
|
||||
|
||||
def self.init_http_client_mock(http_client_mock, requests)
|
||||
log_entries = []
|
||||
|
||||
requests.each do |request|
|
||||
sha256 = Digest::SHA256.digest(request[:contents])
|
||||
|
||||
log_entry =
|
||||
build(
|
||||
:http_log_entry,
|
||||
uri: request[:uri],
|
||||
verb: :get,
|
||||
content_type: request[:content_type],
|
||||
status_code: request[:status_code] || 200,
|
||||
performed_by: "direct",
|
||||
response_time_ms: rand(20..100),
|
||||
request_headers: build(:http_log_entry_header),
|
||||
response_headers: build(:http_log_entry_header),
|
||||
response:
|
||||
BlobEntry.find_by(sha256: sha256) ||
|
||||
build(
|
||||
:blob_entry,
|
||||
content_type: request[:content_type],
|
||||
content: request[:contents],
|
||||
),
|
||||
)
|
||||
log_entry.save!
|
||||
log_entries << log_entry
|
||||
|
||||
caused_by_entry = nil
|
||||
if request[:caused_by_entry_idx]
|
||||
caused_by_entry = log_entries[request[:caused_by_entry_idx]]
|
||||
elsif request[:caused_by_entry]
|
||||
caused_by_entry = request[:caused_by_entry]
|
||||
end
|
||||
|
||||
method = request[:method] || :get
|
||||
expect(http_client_mock).to(
|
||||
receive(method).with(
|
||||
log_entry.uri.to_s,
|
||||
http_client_opts_with(
|
||||
caused_by_entry: caused_by_entry,
|
||||
use_http_cache: request[:use_http_cache],
|
||||
),
|
||||
) do |uri, opts|
|
||||
logger.info "[mock http client] [#{method}] [#{uri}] [#{opts.inspect.truncate(80)}]"
|
||||
Scraper::HttpClient::Response.new(
|
||||
status_code: log_entry.status_code,
|
||||
body: log_entry.response.contents,
|
||||
log_entry: log_entry,
|
||||
)
|
||||
end,
|
||||
)
|
||||
end
|
||||
|
||||
log_entries
|
||||
end
|
||||
end
|
||||
@@ -8,7 +8,7 @@ describe Domain::E621::Job::PostsIndexJob do
|
||||
it "works" do
|
||||
file = create(:http_log_entry)
|
||||
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
|
||||
@@ -9,7 +9,7 @@ describe Domain::E621::Job::ScanPostJob do
|
||||
post = create(:domain_e621_post, e621_id: 2_227_914)
|
||||
caused_by_entry = create(:http_log_entry)
|
||||
log_entries =
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -46,7 +46,7 @@ describe Domain::E621::Job::ScanPostJob do
|
||||
it "handles a post with no file url" do
|
||||
post = create(:domain_e621_post, e621_id: 5_270_136)
|
||||
log_entries =
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
|
||||
@@ -14,7 +14,7 @@ describe Domain::E621::Job::StaticFileJob do
|
||||
)
|
||||
hle = create(:http_log_entry)
|
||||
mock_log_entries =
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -42,7 +42,7 @@ describe Domain::E621::Job::StaticFileJob do
|
||||
)
|
||||
hle = create(:http_log_entry)
|
||||
mock_log_entries =
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
|
||||
@@ -127,7 +127,7 @@ describe Domain::Fa::Job::BrowsePageJob do
|
||||
|
||||
context "with no posts found on page" do
|
||||
let! :log_entries do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -151,7 +151,7 @@ describe Domain::Fa::Job::BrowsePageJob do
|
||||
|
||||
context "duplicates found on the page" do
|
||||
let! :log_entries do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -186,7 +186,7 @@ describe Domain::Fa::Job::BrowsePageJob do
|
||||
context "with one unseen post" do
|
||||
include_context "user and post getters"
|
||||
let! :log_entries do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -237,7 +237,7 @@ describe Domain::Fa::Job::BrowsePageJob do
|
||||
context "with gaps between posts" do
|
||||
include_context "user and post getters"
|
||||
let! :log_entries do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -295,7 +295,7 @@ describe Domain::Fa::Job::BrowsePageJob do
|
||||
include_context "user and post getters"
|
||||
include_context "create user and post"
|
||||
let! :log_entries do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -338,7 +338,7 @@ describe Domain::Fa::Job::BrowsePageJob do
|
||||
before do
|
||||
found_post = post.call
|
||||
found_post.file_url_str = "http://www.example.com/foo.txt"
|
||||
file = SpecUtil.build_http_log_entry(uri: found_post.file_uri)
|
||||
file = build(:http_log_entry, uri: found_post.file_uri)
|
||||
file.save!
|
||||
found_post.file = file
|
||||
found_post.save!
|
||||
@@ -382,7 +382,7 @@ describe Domain::Fa::Job::BrowsePageJob do
|
||||
|
||||
context "with a page that responds with an error" do
|
||||
let! :log_entries do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
|
||||
@@ -7,7 +7,10 @@ describe Domain::Fa::Job::FavsJob do
|
||||
before do
|
||||
Scraper::ClientFactory.http_client_mock = http_client_mock
|
||||
@log_entries =
|
||||
SpecUtil.init_http_client_mock(http_client_mock, client_mock_config)
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
client_mock_config,
|
||||
)
|
||||
end
|
||||
|
||||
shared_context "user exists" do
|
||||
@@ -167,7 +170,7 @@ describe Domain::Fa::Job::FavsJob do
|
||||
end
|
||||
|
||||
it "stops scanning after the first page" do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
client_mock_config[0...1],
|
||||
)
|
||||
@@ -178,7 +181,10 @@ describe Domain::Fa::Job::FavsJob do
|
||||
end
|
||||
|
||||
it "scans all pages if full_scan is true" do
|
||||
SpecUtil.init_http_client_mock(http_client_mock, client_mock_config)
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
client_mock_config,
|
||||
)
|
||||
perform_now({ url_name: "zzreg", full_scan: true })
|
||||
user.reload
|
||||
expect(user.fav_posts.count).to eq(5)
|
||||
|
||||
@@ -137,7 +137,7 @@ describe Domain::Fa::Job::HomePageJob do
|
||||
context "with one unseen post" do
|
||||
include_context "user and post getters"
|
||||
let! :log_entries do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
|
||||
@@ -27,7 +27,7 @@ describe Domain::Fa::Job::UserAvatarJob do
|
||||
shared_context "avatar file found" do
|
||||
before do
|
||||
@log_entries =
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -44,7 +44,7 @@ describe Domain::Fa::Job::UserAvatarJob do
|
||||
shared_context "avatar file not found" do
|
||||
before do
|
||||
@log_entries =
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -61,7 +61,7 @@ describe Domain::Fa::Job::UserAvatarJob do
|
||||
shared_context "avatar file is a server error" do
|
||||
before do
|
||||
@log_entries =
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
|
||||
@@ -7,7 +7,7 @@ describe Domain::Fa::Job::UserFollowsJob do
|
||||
let(:http_client_mock) { instance_double("::Scraper::HttpClient") }
|
||||
let(:set_zzreg_http_mock) do
|
||||
proc do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -16,9 +16,9 @@ describe Domain::Fa::Job::UserFollowsJob do
|
||||
content_type: "text/html",
|
||||
contents:
|
||||
SpecUtil.read_fixture_file(
|
||||
"domain/fa/parser/redux/watchlist_zzreg.html"
|
||||
"domain/fa/parser/redux/watchlist_zzreg.html",
|
||||
),
|
||||
caused_by_entry_idx: nil
|
||||
caused_by_entry_idx: nil,
|
||||
},
|
||||
{
|
||||
uri: "https://www.furaffinity.net/watchlist/by/zzreg/2/?",
|
||||
@@ -26,11 +26,11 @@ describe Domain::Fa::Job::UserFollowsJob do
|
||||
content_type: "text/html",
|
||||
contents:
|
||||
SpecUtil.read_fixture_file(
|
||||
"domain/fa/parser/redux/watchlist_zzreg.html"
|
||||
"domain/fa/parser/redux/watchlist_zzreg.html",
|
||||
),
|
||||
caused_by_entry_idx: 0
|
||||
}
|
||||
]
|
||||
caused_by_entry_idx: 0,
|
||||
},
|
||||
],
|
||||
)
|
||||
end
|
||||
end
|
||||
@@ -57,7 +57,7 @@ describe Domain::Fa::Job::UserFollowsJob do
|
||||
it "creates the scanned user and followed users" do
|
||||
expect do perform_now({ url_name: "zzreg" }) end.to change(
|
||||
Domain::Fa::User,
|
||||
:count
|
||||
:count,
|
||||
).by(FOLLOWS_ON_ZZREG_PAGE + 1)
|
||||
end
|
||||
|
||||
@@ -68,7 +68,7 @@ describe Domain::Fa::Job::UserFollowsJob do
|
||||
expect(
|
||||
SpecUtil
|
||||
.enqueued_jobs(Domain::Fa::Job::UserPageJob)
|
||||
.find { |job| job[:args][0][:user] == zzreg }
|
||||
.find { |job| job[:args][0][:user] == zzreg },
|
||||
).to_not be_nil
|
||||
end
|
||||
end
|
||||
@@ -79,14 +79,14 @@ describe Domain::Fa::Job::UserFollowsJob do
|
||||
it "can be performed by url_name" do
|
||||
expect do perform_now({ url_name: "zzreg" }) end.to change(
|
||||
Domain::Fa::User,
|
||||
:count
|
||||
:count,
|
||||
).by(FOLLOWS_ON_ZZREG_PAGE)
|
||||
end
|
||||
|
||||
it "can be performed by direct post object" do
|
||||
expect do perform_now({ user: user }) end.to change(
|
||||
Domain::Fa::User,
|
||||
:count
|
||||
:count,
|
||||
).by(FOLLOWS_ON_ZZREG_PAGE)
|
||||
end
|
||||
|
||||
@@ -95,7 +95,7 @@ describe Domain::Fa::Job::UserFollowsJob do
|
||||
expect(
|
||||
SpecUtil
|
||||
.enqueued_jobs(Domain::Fa::Job::UserPageJob)
|
||||
.find { |job| job[:args][0][:user] == user }
|
||||
.find { |job| job[:args][0][:user] == user },
|
||||
).to be_nil
|
||||
end
|
||||
|
||||
@@ -116,7 +116,7 @@ describe Domain::Fa::Job::UserFollowsJob do
|
||||
# name in html is Agi_Type01, it's intentionally changed here
|
||||
name: "AGI_Type01",
|
||||
url_name: "agitype01",
|
||||
num_submissions: 10
|
||||
num_submissions: 10,
|
||||
)
|
||||
end
|
||||
|
||||
@@ -125,7 +125,7 @@ describe Domain::Fa::Job::UserFollowsJob do
|
||||
|
||||
expect do perform_now({ user: user }) end.to change(
|
||||
Domain::Fa::User,
|
||||
:count
|
||||
:count,
|
||||
).by(FOLLOWS_ON_ZZREG_PAGE - 1)
|
||||
|
||||
followed.reload
|
||||
@@ -140,7 +140,7 @@ describe Domain::Fa::Job::UserFollowsJob do
|
||||
accelo_user = Domain::Fa::User.find_by url_name: "accelo"
|
||||
expect(accelo_user).to_not be_nil
|
||||
expect(accelo_user.state_detail["first_seen_entry"]).to eq(
|
||||
@zzreg_mock_log_entries[0].id
|
||||
@zzreg_mock_log_entries[0].id,
|
||||
)
|
||||
end
|
||||
|
||||
@@ -167,7 +167,7 @@ describe Domain::Fa::Job::UserFollowsJob do
|
||||
Domain::Fa::User.create!(
|
||||
name: "Agi_Type01",
|
||||
url_name: "agitype01",
|
||||
num_submissions: 10
|
||||
num_submissions: 10,
|
||||
)
|
||||
end
|
||||
|
||||
@@ -188,27 +188,27 @@ describe Domain::Fa::Job::UserFollowsJob do
|
||||
expect(
|
||||
SpecUtil
|
||||
.enqueued_jobs(Domain::Fa::Job::UserPageJob)
|
||||
.find { |job| job[:args][0][:user] == smaz_user }
|
||||
.find { |job| job[:args][0][:user] == smaz_user },
|
||||
).to be_nil
|
||||
|
||||
expect(
|
||||
SpecUtil
|
||||
.enqueued_jobs(Domain::Fa::Job::UserPageJob)
|
||||
.find { |job| job[:args][0][:user] == agi_type01_user }
|
||||
.find { |job| job[:args][0][:user] == agi_type01_user },
|
||||
).to be_nil
|
||||
|
||||
# newly created users are enqueued by url name
|
||||
expect(
|
||||
SpecUtil
|
||||
.enqueued_jobs(Domain::Fa::Job::UserPageJob)
|
||||
.find { |job| job[:args][0][:url_name] == "accelo" }
|
||||
.find { |job| job[:args][0][:url_name] == "accelo" },
|
||||
).to_not be_nil
|
||||
end
|
||||
|
||||
it "does not enqueue a job if the user is not new" do
|
||||
perform_now({ user: user })
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::UserPageJob).length).to eq(
|
||||
FOLLOWS_ON_ZZREG_PAGE - 1
|
||||
FOLLOWS_ON_ZZREG_PAGE - 1,
|
||||
)
|
||||
end
|
||||
|
||||
@@ -216,7 +216,7 @@ describe Domain::Fa::Job::UserFollowsJob do
|
||||
Domain::Fa::Job::UserPageJob.perform_later({ url_name: "accelo" })
|
||||
perform_now({ user: user })
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::UserPageJob).length).to eq(
|
||||
FOLLOWS_ON_ZZREG_PAGE - 1
|
||||
FOLLOWS_ON_ZZREG_PAGE - 1,
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
@@ -6,7 +6,10 @@ describe Domain::Fa::Job::UserIncrementalJob do
|
||||
before do
|
||||
Scraper::ClientFactory.http_client_mock = http_client_mock
|
||||
@log_entries =
|
||||
SpecUtil.init_http_client_mock(http_client_mock, client_mock_config)
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
client_mock_config,
|
||||
)
|
||||
end
|
||||
|
||||
context "scanning a normal user" do
|
||||
@@ -17,8 +20,8 @@ describe Domain::Fa::Job::UserIncrementalJob do
|
||||
status_code: 200,
|
||||
content_type: "text/html",
|
||||
contents:
|
||||
SpecUtil.read_fixture_file("domain/fa/job/user_page_meesh.html")
|
||||
}
|
||||
SpecUtil.read_fixture_file("domain/fa/job/user_page_meesh.html"),
|
||||
},
|
||||
]
|
||||
end
|
||||
|
||||
@@ -27,22 +30,22 @@ describe Domain::Fa::Job::UserIncrementalJob do
|
||||
meesh = Domain::Fa::User.find_by(url_name: "meesh")
|
||||
expect(meesh).to_not be_nil
|
||||
expect(
|
||||
SpecUtil.enqueued_jobs(Domain::Fa::Job::UserAvatarJob).length
|
||||
SpecUtil.enqueued_jobs(Domain::Fa::Job::UserAvatarJob).length,
|
||||
).to be(1)
|
||||
|
||||
# 12 new watchers, 12 new watched
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::UserPageJob).length).to be(
|
||||
24
|
||||
24,
|
||||
)
|
||||
# new watch in last position, so enqueue scan
|
||||
expect(
|
||||
SpecUtil.enqueued_jobs(Domain::Fa::Job::UserFollowsJob).length
|
||||
SpecUtil.enqueued_jobs(Domain::Fa::Job::UserFollowsJob).length,
|
||||
).to be(1)
|
||||
expect(meesh.scanned_follows_at).to be_nil
|
||||
|
||||
# 20 newly seen faved posts
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::ScanPostJob).length).to be(
|
||||
20
|
||||
20,
|
||||
)
|
||||
# new fav in last position, so should enqueue scan
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::FavsJob).length).to be(1)
|
||||
@@ -62,17 +65,17 @@ describe Domain::Fa::Job::UserIncrementalJob do
|
||||
|
||||
# 12 new watchers, 11 new watched
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::UserPageJob).length).to be(
|
||||
23
|
||||
23,
|
||||
)
|
||||
# No new watch in last position, can skip scan
|
||||
expect(
|
||||
SpecUtil.enqueued_jobs(Domain::Fa::Job::UserFollowsJob)
|
||||
SpecUtil.enqueued_jobs(Domain::Fa::Job::UserFollowsJob),
|
||||
).to be_empty
|
||||
expect(meesh.scanned_follows_at).to be_within(1.second).of(Time.now)
|
||||
|
||||
# 19 newly seen faved posts
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::ScanPostJob).length).to be(
|
||||
19
|
||||
19,
|
||||
)
|
||||
# No new fav in last position, so don't enqueue scan
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::FavsJob)).to be_empty
|
||||
|
||||
@@ -6,7 +6,10 @@ describe Domain::Fa::Job::UserPageJob do
|
||||
before do
|
||||
Scraper::ClientFactory.http_client_mock = http_client_mock
|
||||
@log_entries =
|
||||
SpecUtil.init_http_client_mock(http_client_mock, client_mock_config)
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
client_mock_config,
|
||||
)
|
||||
end
|
||||
|
||||
context "scanning a normal user" do
|
||||
@@ -17,8 +20,8 @@ describe Domain::Fa::Job::UserPageJob do
|
||||
status_code: 200,
|
||||
content_type: "text/html",
|
||||
contents:
|
||||
SpecUtil.read_fixture_file("domain/fa/job/user_page_meesh.html")
|
||||
}
|
||||
SpecUtil.read_fixture_file("domain/fa/job/user_page_meesh.html"),
|
||||
},
|
||||
]
|
||||
end
|
||||
|
||||
@@ -27,10 +30,10 @@ describe Domain::Fa::Job::UserPageJob do
|
||||
user = Domain::Fa::User.find_by(url_name: "meesh")
|
||||
expect(user).to_not be_nil
|
||||
expect(user.avatar.file_uri.to_s).to eq(
|
||||
"https://a.furaffinity.net/1635789297/meesh.gif"
|
||||
"https://a.furaffinity.net/1635789297/meesh.gif",
|
||||
)
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::UserAvatarJob)).to match(
|
||||
[including(args: [{ user: user, caused_by_entry: @log_entries[0] }])]
|
||||
[including(args: [{ user: user, caused_by_entry: @log_entries[0] }])],
|
||||
)
|
||||
end
|
||||
end
|
||||
@@ -43,8 +46,8 @@ describe Domain::Fa::Job::UserPageJob do
|
||||
status_code: 200,
|
||||
content_type: "text/html",
|
||||
contents:
|
||||
SpecUtil.read_fixture_file("domain/fa/job/user_page_marsdust.html")
|
||||
}
|
||||
SpecUtil.read_fixture_file("domain/fa/job/user_page_marsdust.html"),
|
||||
},
|
||||
]
|
||||
end
|
||||
|
||||
@@ -53,7 +56,7 @@ describe Domain::Fa::Job::UserPageJob do
|
||||
user = Domain::Fa::User.find_by(url_name: "marsdust")
|
||||
expect(user).to_not be_nil
|
||||
expect(user.avatar.file_uri.to_s).to eq(
|
||||
"https://a.furaffinity.net/1424255659/marsdust.gif"
|
||||
"https://a.furaffinity.net/1424255659/marsdust.gif",
|
||||
)
|
||||
expect(user.num_favorites).to eq(0)
|
||||
end
|
||||
|
||||
@@ -41,8 +41,8 @@ describe Domain::Inkbunny::Job::FileJob do
|
||||
|
||||
describe "#perform" do
|
||||
it "downloads the file if url_str is present" do
|
||||
hle = SpecUtil.create_http_log_entry
|
||||
SpecUtil.init_http_client_mock(
|
||||
hle = create(:http_log_entry)
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -63,7 +63,7 @@ describe Domain::Inkbunny::Job::FileJob do
|
||||
|
||||
it "marks the post as errored if the download fails" do
|
||||
hles =
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -91,7 +91,7 @@ describe Domain::Inkbunny::Job::FileJob do
|
||||
|
||||
it "recovers from a failed download" do
|
||||
hles =
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -122,7 +122,7 @@ describe Domain::Inkbunny::Job::FileJob do
|
||||
it "throws on a non-404 error in order to retry later" do
|
||||
num_retries = 3
|
||||
hles =
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -160,7 +160,7 @@ describe Domain::Inkbunny::Job::FileJob do
|
||||
end
|
||||
|
||||
it "retries a file in error state that hasn't hit retry limit" do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
# First attempt fails
|
||||
|
||||
@@ -10,7 +10,7 @@ describe Domain::Inkbunny::Job::LatestPostsJob do
|
||||
|
||||
context "the files do not change in the response" do
|
||||
let! :log_entries do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -85,7 +85,7 @@ describe Domain::Inkbunny::Job::LatestPostsJob do
|
||||
|
||||
context "an existing file changed in the response" do
|
||||
let! :log_entries do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
|
||||
@@ -11,7 +11,7 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
end
|
||||
|
||||
let! :log_entries do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -164,7 +164,7 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
end
|
||||
|
||||
let! :log_entries do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -261,7 +261,7 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
end
|
||||
|
||||
let! :log_entries do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
|
||||
@@ -39,7 +39,7 @@ describe Domain::Inkbunny::Job::UserAvatarJob do
|
||||
|
||||
context "when avatar download succeeds" do
|
||||
let! :log_entries do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -98,7 +98,7 @@ describe Domain::Inkbunny::Job::UserAvatarJob do
|
||||
|
||||
context "when avatar download returns 404" do
|
||||
let! :log_entries do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -155,7 +155,7 @@ describe Domain::Inkbunny::Job::UserAvatarJob do
|
||||
|
||||
context "when avatar download fails with error" do
|
||||
let! :log_entries do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
|
||||
@@ -19,7 +19,7 @@ RSpec.describe Domain::Inkbunny::Job::UserGalleryJob do
|
||||
|
||||
context "when fetching posts" do
|
||||
let!(:log_entries) do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
@@ -74,7 +74,7 @@ RSpec.describe Domain::Inkbunny::Job::UserGalleryJob do
|
||||
|
||||
context "when fetching posts with null last_file_update_datetime" do
|
||||
let!(:log_entries) do
|
||||
SpecUtil.init_http_client_mock(
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
# See https://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
|
||||
require "./spec/helpers/spec_helpers"
|
||||
require "./spec/helpers/debug_helpers"
|
||||
require "./spec/helpers/http_client_mock_helpers"
|
||||
require "./spec/support/matchers/html_matchers"
|
||||
require "./spec/support/matchers/job_matchers"
|
||||
require "rspec/sorbet"
|
||||
|
||||
@@ -1,15 +1,28 @@
|
||||
# typed: false
|
||||
# typed: strict
|
||||
require "rspec/mocks"
|
||||
|
||||
class SpecUtil
|
||||
extend T::Sig
|
||||
include HasColorLogger
|
||||
extend RSpec::Mocks::ExampleMethods
|
||||
extend RSpec::Matchers
|
||||
|
||||
sig { params(length: T.nilable(Integer)).returns(String) }
|
||||
def self.random_string(length = 8)
|
||||
(0...length).map { (65 + rand(26)).chr }.join
|
||||
end
|
||||
|
||||
sig do
|
||||
params(
|
||||
expected_method: Symbol,
|
||||
expected_url: String,
|
||||
request_headers: T::Hash[String, String],
|
||||
response_code: Integer,
|
||||
response_time_ms: Integer,
|
||||
response_headers: T::Hash[String, String],
|
||||
response_body: String,
|
||||
).returns(Scraper::CurlHttpPerformer)
|
||||
end
|
||||
def self.mock_http_performer(
|
||||
expected_method,
|
||||
expected_url,
|
||||
@@ -21,122 +34,41 @@ class SpecUtil
|
||||
)
|
||||
mock = instance_double("Scraper::CurlHttpPerformer")
|
||||
allow(mock).to receive(:is_a?).with(String).and_return(false)
|
||||
allow(mock).to receive(:is_a?).with(Scraper::CurlHttpPerformer).and_return(
|
||||
true,
|
||||
)
|
||||
allow(mock).to receive(:do_request).with(
|
||||
expected_method,
|
||||
expected_url,
|
||||
request_headers,
|
||||
).and_return(
|
||||
Scraper::CurlHttpPerformer::Response.new(
|
||||
response_code,
|
||||
response_headers,
|
||||
response_time_ms,
|
||||
response_body,
|
||||
response_code: response_code,
|
||||
response_headers: response_headers,
|
||||
response_time_ms: response_time_ms,
|
||||
body: response_body,
|
||||
),
|
||||
)
|
||||
mock
|
||||
end
|
||||
|
||||
def self.build_http_log_entry(
|
||||
uri: "http://example.com/",
|
||||
status_code: 200,
|
||||
content_type: "text/plain",
|
||||
contents: nil
|
||||
)
|
||||
entry =
|
||||
::HttpLogEntry.new(
|
||||
{
|
||||
uri: uri,
|
||||
verb: :get,
|
||||
performed_by: "direct",
|
||||
status_code: status_code,
|
||||
response_time_ms: rand(20..100),
|
||||
request_headers: create_http_headers,
|
||||
response_headers: create_http_headers,
|
||||
response:
|
||||
build_blob_entry(content_type: content_type, contents: contents),
|
||||
content_type: content_type,
|
||||
requested_at: Time.now,
|
||||
},
|
||||
)
|
||||
raise entry.errors.full_messages.join(", ") unless entry.valid?
|
||||
|
||||
entry
|
||||
end
|
||||
def self.create_http_log_entry(...)
|
||||
model = build_http_log_entry(...)
|
||||
model.save!
|
||||
model
|
||||
end
|
||||
|
||||
def self.build_blob_entry(content_type: "text/plain", contents: nil)
|
||||
BlobEntry.find_or_build(
|
||||
content_type: content_type,
|
||||
contents: contents || random_string(1024),
|
||||
)
|
||||
end
|
||||
|
||||
def self.create_http_headers
|
||||
::HttpLogEntryHeader.find_or_create(
|
||||
headers: {
|
||||
test_header_key: "test header value #{random_string(16)}",
|
||||
},
|
||||
)
|
||||
end
|
||||
|
||||
def self.init_http_client_mock(http_client_mock, requests)
|
||||
log_entries = []
|
||||
|
||||
requests.each do |request|
|
||||
log_entry =
|
||||
SpecUtil.build_http_log_entry(
|
||||
uri: request[:uri],
|
||||
contents: request[:contents],
|
||||
content_type: request[:content_type],
|
||||
status_code: request[:status_code] || 200,
|
||||
)
|
||||
log_entry.save!
|
||||
log_entries << log_entry
|
||||
|
||||
caused_by_entry = nil
|
||||
if request[:caused_by_entry_idx]
|
||||
caused_by_entry = log_entries[request[:caused_by_entry_idx]]
|
||||
elsif request[:caused_by_entry]
|
||||
caused_by_entry = request[:caused_by_entry]
|
||||
end
|
||||
|
||||
method = request[:method] || :get
|
||||
expect(http_client_mock).to(
|
||||
receive(method).with(
|
||||
log_entry.uri.to_s,
|
||||
http_client_opts_with(
|
||||
caused_by_entry: caused_by_entry,
|
||||
use_http_cache: request[:use_http_cache],
|
||||
),
|
||||
) do |uri, opts|
|
||||
logger.info "[mock http client] [#{method}] [#{uri}] [#{opts.inspect.truncate(80)}]"
|
||||
Scraper::HttpClient::Response.new(
|
||||
status_code: log_entry.status_code,
|
||||
body: log_entry.response.contents,
|
||||
log_entry: log_entry,
|
||||
)
|
||||
end,
|
||||
)
|
||||
end
|
||||
|
||||
log_entries
|
||||
end
|
||||
|
||||
sig { params(path: String, mode: String).returns(String) }
|
||||
def self.read_fixture_file(path, mode: "rt")
|
||||
File.read(Rails.root.join("test/fixtures/files", path), mode: mode)
|
||||
end
|
||||
|
||||
sig do
|
||||
params(job_class: T.nilable(T.class_of(Scraper::JobBase))).returns(
|
||||
T::Array[T.untyped],
|
||||
)
|
||||
end
|
||||
def self.enqueued_jobs(job_class = nil)
|
||||
GoodJob::Job
|
||||
.order(created_at: :asc)
|
||||
.all
|
||||
.map do |job|
|
||||
{
|
||||
job: job.job_class.constantize,
|
||||
job: T.must(job.job_class).constantize,
|
||||
queue: job.queue_name,
|
||||
priority: job.priority,
|
||||
args:
|
||||
@@ -149,23 +81,38 @@ class SpecUtil
|
||||
.filter { |job| job_is_class(job_class, job) }
|
||||
end
|
||||
|
||||
sig do
|
||||
params(job_class: T.nilable(T.class_of(Scraper::JobBase))).returns(
|
||||
T::Array[T.untyped],
|
||||
)
|
||||
end
|
||||
def self.enqueued_job_args(job_class = nil)
|
||||
enqueued_jobs(job_class).map { |job| job[:args][0] }
|
||||
end
|
||||
|
||||
sig do
|
||||
params(job_class: T.nilable(T.class_of(Scraper::JobBase))).returns(
|
||||
T::Array[T.untyped],
|
||||
)
|
||||
end
|
||||
def self.clear_enqueued_jobs!(job_class = nil)
|
||||
rel = GoodJob::Job
|
||||
rel = rel.where(job_class: job_class.name) if job_class
|
||||
rel.destroy_all
|
||||
end
|
||||
|
||||
def self.shift_jobs(job_class = nil, by = 1)
|
||||
by.times do
|
||||
job = enqueued_jobs.find { |job| job_is_class(job_class, job) }
|
||||
job[:good_job].destroy if job
|
||||
end
|
||||
sig do
|
||||
params(
|
||||
job_class:
|
||||
T.nilable(
|
||||
T.any(
|
||||
T.class_of(Scraper::JobBase),
|
||||
T::Array[T.class_of(Scraper::JobBase)],
|
||||
),
|
||||
),
|
||||
job: T.untyped,
|
||||
).returns(T::Boolean)
|
||||
end
|
||||
|
||||
def self.job_is_class(job_class, job)
|
||||
if job_class.nil?
|
||||
true
|
||||
@@ -175,46 +122,4 @@ class SpecUtil
|
||||
job_class == job[:job]
|
||||
end
|
||||
end
|
||||
|
||||
def self.build_domain_fa_user(name: nil, url_name: nil)
|
||||
name ||= random_string
|
||||
url_name ||= Domain::Fa::User.name_to_url_name(name)
|
||||
Domain::Fa::User.new(name: name, url_name: url_name)
|
||||
end
|
||||
|
||||
def self.create_domain_fa_user(...)
|
||||
build_domain_fa_user(...).tap { |model| model.save! }
|
||||
end
|
||||
|
||||
def self.build_domain_fa_post(creator: nil, fa_id: nil, created_at: nil)
|
||||
@last_fa_id ||= 0
|
||||
@last_fa_id += 1
|
||||
Domain::Fa::Post.new(
|
||||
creator: creator || build_domain_fa_user,
|
||||
fa_id: fa_id || @last_fa_id,
|
||||
created_at: created_at,
|
||||
)
|
||||
end
|
||||
|
||||
def self.create_domain_fa_post(...)
|
||||
build_domain_fa_post(...).tap { |model| model.save! }
|
||||
end
|
||||
|
||||
def self.build_e621_post(
|
||||
e621_id: nil,
|
||||
file_url_str: nil,
|
||||
md5: nil,
|
||||
created_at: nil
|
||||
)
|
||||
Domain::E621::Post.new(
|
||||
e621_id: e621_id || 10,
|
||||
file_url_str: file_url_str,
|
||||
md5: md5,
|
||||
created_at: created_at,
|
||||
)
|
||||
end
|
||||
|
||||
def self.create_e621_post(...)
|
||||
build_e621_post(...).tap { |model| model.save! }
|
||||
end
|
||||
end
|
||||
|
||||
Reference in New Issue
Block a user