From 944de8a6c92cec50d99d460e076fe9bab6fc83ec Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" Date: Mon, 14 Jan 2019 10:13:34 +0100 Subject: Bump dotenv-rails from 2.5.0 to 2.6.0 (#9803) Bumps [dotenv-rails](https://github.com/bkeepers/dotenv) from 2.5.0 to 2.6.0. - [Release notes](https://github.com/bkeepers/dotenv/releases) - [Changelog](https://github.com/bkeepers/dotenv/blob/master/Changelog.md) - [Commits](https://github.com/bkeepers/dotenv/commits) Signed-off-by: dependabot[bot] --- Gemfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'Gemfile') diff --git a/Gemfile b/Gemfile index ec51950ca..2204426ee 100644 --- a/Gemfile +++ b/Gemfile @@ -13,7 +13,7 @@ gem 'hamlit-rails', '~> 0.2' gem 'pg', '~> 1.1' gem 'makara', '~> 0.4' gem 'pghero', '~> 2.2' -gem 'dotenv-rails', '~> 2.5' +gem 'dotenv-rails', '~> 2.6' gem 'aws-sdk-s3', '~> 1.30', require: false gem 'fog-core', '<= 2.1.0' -- cgit From 5ac425ad28b6df9ec862851a9b326aaa4ab5c28c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" Date: Mon, 14 Jan 2019 10:18:55 +0100 Subject: Bump sidekiq-unique-jobs from 5.0.10 to 6.0.8 (#9801) Bumps [sidekiq-unique-jobs](https://github.com/mhenrixon/sidekiq-unique-jobs) from 5.0.10 to 6.0.8. - [Release notes](https://github.com/mhenrixon/sidekiq-unique-jobs/releases) - [Changelog](https://github.com/mhenrixon/sidekiq-unique-jobs/blob/master/CHANGELOG.md) - [Commits](https://github.com/mhenrixon/sidekiq-unique-jobs/compare/v5.0.10...v6.0.8) Signed-off-by: dependabot[bot] --- Gemfile | 2 +- Gemfile.lock | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) (limited to 'Gemfile') diff --git a/Gemfile b/Gemfile index 2204426ee..edec721d6 100644 --- a/Gemfile +++ b/Gemfile @@ -75,7 +75,7 @@ gem 'rqrcode', '~> 0.10' gem 'sanitize', '~> 5.0' gem 'sidekiq', '~> 5.2' gem 'sidekiq-scheduler', '~> 3.0' -gem 'sidekiq-unique-jobs', '~> 5.0' +gem 'sidekiq-unique-jobs', '~> 6.0' gem 'sidekiq-bulk', '~>0.2.0' gem 'simple-navigation', '~> 4.0' gem 'simple_form', '~> 4.1' diff --git a/Gemfile.lock b/Gemfile.lock index f728d0850..634054d83 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -563,8 +563,9 @@ GEM rufus-scheduler (~> 3.2) sidekiq (>= 3) tilt (>= 1.4.0) - sidekiq-unique-jobs (5.0.10) - sidekiq (>= 4.0, <= 6.0) + sidekiq-unique-jobs (6.0.8) + concurrent-ruby (~> 1.0, >= 1.0.5) + sidekiq (>= 4.0, < 6.0) thor (~> 0) simple-navigation (4.0.5) activesupport (>= 2.3.2) @@ -750,7 +751,7 @@ DEPENDENCIES sidekiq (~> 5.2) sidekiq-bulk (~> 0.2.0) sidekiq-scheduler (~> 3.0) - sidekiq-unique-jobs (~> 5.0) + sidekiq-unique-jobs (~> 6.0) simple-navigation (~> 4.0) simple_form (~> 4.1) simplecov (~> 0.16) -- cgit From ee5e24807fb59c398e7dfca807c51242269c7588 Mon Sep 17 00:00:00 2001 From: Eugen Rochko Date: Tue, 15 Jan 2019 09:24:35 +0100 Subject: Add `tootctl domains crawl` (#9809) --- Gemfile | 2 + Gemfile.lock | 5 +- lib/mastodon/domains_cli.rb | 118 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 123 insertions(+), 2 deletions(-) (limited to 'Gemfile') diff --git a/Gemfile b/Gemfile index edec721d6..fdfac330f 100644 --- a/Gemfile +++ b/Gemfile @@ -145,3 +145,5 @@ group :production do gem 'lograge', '~> 0.10' gem 'redis-rails', '~> 5.0' end + +gem 'concurrent-ruby', require: false diff --git a/Gemfile.lock b/Gemfile.lock index 677281603..f96d24556 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -231,7 +231,7 @@ GEM fuubar (2.3.2) rspec-core (~> 3.0) ruby-progressbar (~> 1.4) - get_process_mem (0.2.2) + get_process_mem (0.2.3) globalid (0.4.1) activesupport (>= 4.2.0) goldfinger (2.1.0) @@ -674,6 +674,7 @@ DEPENDENCIES chewy (~> 5.0) cld3 (~> 3.2.3) climate_control (~> 0.2) + concurrent-ruby derailed_benchmarks devise (~> 4.5) devise-two-factor (~> 3.0) @@ -773,4 +774,4 @@ RUBY VERSION ruby 2.5.3p105 BUNDLED WITH - 1.16.6 + 1.17.3 diff --git a/lib/mastodon/domains_cli.rb b/lib/mastodon/domains_cli.rb index 16e298584..be68ae84b 100644 --- a/lib/mastodon/domains_cli.rb +++ b/lib/mastodon/domains_cli.rb @@ -1,5 +1,6 @@ # frozen_string_literal: true +require 'concurrent' require_relative '../../config/boot' require_relative '../../config/environment' require_relative 'cli_helper' @@ -32,5 +33,122 @@ module Mastodon say say("Removed #{removed} accounts#{dry_run}", :green) end + + option :concurrency, type: :numeric, default: 50, aliases: [:c] + option :silent, type: :boolean, default: false, aliases: [:s] + option :format, type: :string, default: 'summary', aliases: [:f] + desc 'crawl [START]', 'Crawl all known peers, optionally beginning at START' + long_desc <<-LONG_DESC + Crawl the fediverse by using the Mastodon REST API endpoints that expose + all known peers, and collect statistics from those peers, as long as those + peers support those API endpoints. When no START is given, the command uses + this server's own database of known peers to seed the crawl. + + The --concurrency (-c) option controls the number of threads performing HTTP + requests at the same time. More threads means the crawl may complete faster. + + The --silent (-s) option controls progress output. + + The --format (-f) option controls how the data is displayed at the end. By + default (`summary`), a summary of the statistics is returned. The other options + are `domains`, which returns a newline-delimited list of all discovered peers, + and `json`, which dumps all the aggregated data raw. + LONG_DESC + def crawl(start = nil) + stats = Concurrent::Hash.new + processed = Concurrent::AtomicFixnum.new(0) + failed = Concurrent::AtomicFixnum.new(0) + start_at = Time.now.to_f + seed = start ? [start] : Account.remote.domains + + pool = Concurrent::ThreadPoolExecutor.new(min_threads: 0, max_threads: options[:concurrency], idletime: 10, auto_terminate: true, max_queue: 0) + + work_unit = ->(domain) do + next if stats.key?(domain) + stats[domain] = nil + processed.increment + + begin + Request.new(:get, "https://#{domain}/api/v1/instance").perform do |res| + next unless res.code == 200 + stats[domain] = Oj.load(res.to_s) + end + + Request.new(:get, "https://#{domain}/api/v1/instance/peers").perform do |res| + next unless res.code == 200 + + Oj.load(res.to_s).reject { |peer| stats.key?(peer) }.each do |peer| + pool.post(peer, &work_unit) + end + end + + Request.new(:get, "https://#{domain}/api/v1/instance/activity").perform do |res| + next unless res.code == 200 + stats[domain]['activity'] = Oj.load(res.to_s) + end + + say('.', :green, false) unless options[:silent] + rescue StandardError + failed.increment + say('.', :red, false) unless options[:silent] + end + end + + seed.each do |domain| + pool.post(domain, &work_unit) + end + + sleep 20 + sleep 20 until pool.queue_length.zero? + + pool.shutdown + pool.wait_for_termination(20) + ensure + pool.shutdown + + say unless options[:silent] + + case options[:format] + when 'summary' + stats_to_summary(stats, processed, failed, start_at) + when 'domains' + stats_to_domains(stats) + when 'json' + stats_to_json(stats) + end + end + + private + + def stats_to_summary(stats, processed, failed, start_at) + stats.compact! + + total_domains = stats.size + total_users = stats.reduce(0) { |sum, (_key, val)| val.is_a?(Hash) && val['stats'].is_a?(Hash) ? sum + val['stats']['user_count'].to_i : sum } + total_active = stats.reduce(0) { |sum, (_key, val)| val.is_a?(Hash) && val['activity'].is_a?(Array) && val['activity'].size > 2 && val['activity'][1].is_a?(Hash) ? sum + val['activity'][1]['logins'].to_i : sum } + total_joined = stats.reduce(0) { |sum, (_key, val)| val.is_a?(Hash) && val['activity'].is_a?(Array) && val['activity'].size > 2 && val['activity'][1].is_a?(Hash) ? sum + val['activity'][1]['registrations'].to_i : sum } + + say("Visited #{processed.value} domains, #{failed.value} failed (#{(Time.now.to_f - start_at).round}s elapsed)", :green) + say("Total servers: #{total_domains}", :green) + say("Total registered: #{total_users}", :green) + say("Total active last week: #{total_active}", :green) + say("Total joined last week: #{total_joined}", :green) + end + + def stats_to_domains(stats) + say(stats.keys.join("\n")) + end + + def stats_to_json(stats) + totals.each_key do |domain| + if totals[domain].is_a?(Hash) + totals[domain]['activity'] = stats[domain] + else + totals.delete(domain) + end + end + + say(Oj.dump(totals)) + end end end -- cgit