require 'httparty' module Pretalx # See https://c3voc.de/wiki/schedule for more information about the format # Schema: https://c3voc.de/schedule/schema.json # For the different sources from Hub, see https://c3voc.de/wiki/events:38c3:schedule class ImportJob < ApplicationJob queue_as :default include ActionView::Helpers def import_schedule(conference, url, filedrop_url) response = HTTParty.get(url) response.success? or return Rails.logger.error "Failed to fetch schedule from #{url}" schedule = JSON.parse(response.body) return Rails.logger.error "Incomplete JSON received from #{url}" unless schedule.dig('schedule', 'conference', 'rooms') && schedule.dig('schedule', 'conference', 'days') filedrop_index = fetch_filedrop_index(filedrop_url) # We keep a local hash of the stages, because the sessions reference stages by name instead of id stages = {} schedule['schedule']['conference']['rooms'].each do |stage_data| stages[stage_data['name']] = Stage.find_or_initialize_by(conference:, ref_id: stage_data['guid']).tap do |stage_| stage_.name = stage_data['name'] stage_.save! end end # This is where canceled sessions are moved to, so we can still easily access them canceled_stage = Stage.find_or_initialize_by(conference:, ref_id: 'c3lingo_canceled').tap do |stage_| stage_.name = 'Canceled talk' stage_.description = 'A dummy stage where talks move to when they disappear from the Fahrplan' stage_.weight = 1000 # Sort it all the way to the right stage_.save! end # We build a list of sessions that exist in the current version of the fahrplan. That way we can move removed sessions to the "Canceled session" fake stage. existing_sessions = [] schedule['schedule']['conference']['days'].each do |day_data| day_data['rooms'].each do |stage_name, stage_data| stage = stages[stage_name] stage_data.each do |session_data| existing_sessions << session_data['guid'] Session.find_or_initialize_by(conference:, ref_id: session_data['guid']).tap do |session| session.stage = stage session.title = session_data['title'] session.language = session_data['language'] session.description = simple_format(session_data['abstract']) + simple_format(session_data['description']) session.session_format = session_data['type'] session.track = session_data['track'] session.starts_at = session_data['date'] hours, minutes = session_data['duration'].split(":").map(&:to_i) session.ends_at = session.starts_at + hours.hours + minutes.minutes session.url = session_data['url'] session.speakers = session_data['persons'].map do |speaker_data| Speaker.find_or_initialize_by(ref_id: speaker_data['guid'], conference:).tap do |speaker| speaker.name = speaker_data['name'] || speaker_data['public_name'] speaker.description = simple_format(speaker_data['biography']) speaker.save! end end session.recorded = !session_data.fetch('do_not_record', false) update_filedrop_data(session, filedrop_index[session.ref_id], filedrop_url) if filedrop_index[session.ref_id] session.save! end end end end Session.where(conference:).where.not(ref_id: existing_sessions).where(stage: conference.relevant_stages).each do |canceled| canceled.stage = canceled_stage canceled.save! end end def import_engelsystem_refs(conference, engelsystem_url) return unless data = fetch_engelsystem(engelsystem_url, "angeltypes/34/shifts") shifts = data.each_with_object({}) do |shift, hash| starts_at = parse_datetime_or_nil(conference, shift['starts_at']) if hash[starts_at].nil? hash[starts_at] = [shift] else hash[starts_at].push(shift) end end Session.joins(:conference).where(conference:).each do |session| shifts_at_time = shifts[session.starts_at - 15.minutes] unless shifts_at_time.nil? shifts_at_time.each do |shift| if session.stage.name == shift.dig("location", "name") session.engelsystem_id = shift["id"] session.engelsystem_url = shift["url"] session.save break end end end end end def perform(conference_slug, *args) conference = Conference.find_by(slug: conference_slug) import_schedule(conference, conference.data['schedule_url'], conference.data['filedrop_url']) import_engelsystem_refs(conference, conference.data['engelsystem_url']) RevisionSet.create!(conference:) heartbeat = conference.data['heartbeat_url'] HTTParty.get(heartbeat) unless heartbeat.blank? end private def fetch_engelsystem(engelsystem_url, endpoint) begin response = HTTParty.get( engelsystem_url + endpoint, headers: { 'Accept' => 'application/json', "x-api-key" => fetch_credential("engelsystem_token") }, timeout: 10 ) return response.success? ? JSON.parse(response.body)["data"] : nil rescue => e Rails.logger.warn("Engelsystem response for #{endpoint} failed: #{e.message}") return nil end end def fetch_filedrop_index(filedrop_url) return {} unless filedrop_url begin response = HTTParty.get( filedrop_url, basic_auth: { username: fetch_credential("filedrop_user"), password: fetch_credential("filedrop_password") }, headers: { 'Accept' => 'application/json' }, timeout: 30 ) data = JSON.parse(response.body) rescue => e Rails.logger.warn("Filedrop response for #{session.ref_id} failed: #{e.message}") return {} end if !data["talks"].is_a?(Array) Rails.logger.warn("Filedrop index was incomplete") return {} end return data["talks"].each_with_object({}) do |item, hash| hash[item["id"]] = item end end def update_filedrop_data(session, filedrop_data, filedrop_url) existing_comments = session.filedrop_comments.pluck(:body) new_comments = filedrop_data["comments"]&.pluck("body") || [] # Remove comments not in the JSON file (existing_comments - new_comments).each do |body| session.filedrop_comments.where(body: body).destroy_all end # Add or update comments filedrop_data["comments"]&.each do |comment_data| session.filedrop_comments.find_or_initialize_by(body: comment_data['body']).tap do |comment| comment.orig_created = parse_datetime_or_nil(session.conference, comment_data['meta']['created']) comment.save! end end existing_files = session.filedrop_files.pluck(:name, :checksum) new_files = filedrop_data['files']&.map { |d| [d['name'], d.dig('meta', 'hash')] } || [] # Remove files not in the JSON file (existing_files - new_files).each do |name, checksum| session.filedrop_files.where(name: name, checksum: checksum).destroy_all end # Add or update files filedrop_data['files']&.each do |file_data| session.filedrop_files.find_or_initialize_by(name: file_data['name'], checksum: file_data['meta']['hash']).tap do |file| file.size = file_data['meta']['size'] file.orig_created = parse_datetime_or_nil(session.conference, file_data['meta']['created']) unless file_data['url'].blank? file.download(filedrop_url + file_data['url'].sub(/\A\//, '')) file.save else Rails.logger.warn("Skipping incomplete file #{file.name} for #{session.ref_id}") end end end end def parse_datetime_or_nil(conference, datetime_string) DateTime.iso8601(datetime_string).in_time_zone(conference.time_zone) rescue nil end end end