summaryrefslogtreecommitdiff
path: root/graveyard
diff options
context:
space:
mode:
authorChristian Pointner <equinox@helsinki.at>2024-01-10 10:33:48 (GMT)
committerChristian Pointner <equinox@helsinki.at>2024-01-10 10:33:48 (GMT)
commit192c08b70372d6b54ad9e130f3195b092f2fb40c (patch)
tree8745226e8ee38e5714a310cd1b52eab7a7f8a472 /graveyard
parenta81092d119a9996e71bc1e694edc38deaae247ab (diff)
move a bunch of importers to graveyard
Diffstat (limited to 'graveyard')
-rw-r--r--graveyard/INFO52
-rw-r--r--graveyard/cron.disabled32
-rwxr-xr-xgraveyard/rhautoimport-ba212
-rwxr-xr-xgraveyard/rhautoimport-btl178
-rwxr-xr-xgraveyard/rhautoimport-cb208
-rwxr-xr-xgraveyard/rhautoimport-dm212
-rwxr-xr-xgraveyard/rhautoimport-lr226
-rwxr-xr-xgraveyard/rhautoimport-mz212
8 files changed, 1332 insertions, 0 deletions
diff --git a/graveyard/INFO b/graveyard/INFO
index 6ad5a78..cdf8b44 100644
--- a/graveyard/INFO
+++ b/graveyard/INFO
@@ -87,3 +87,55 @@
Upload: am selben Tag
Such-Kriterium: cba:broadcastDate wenn gesetzt sonst cba:productionDate
RH: wöchentlich, Freitag, 08:00-09:00
+
+- (btl) Between the Lines
+ http://www.btlonline.org/rss/btl128.xml
+
+ Original: wöchentlich Mittwochs
+ Upload: immer am selben Tag
+ Such-Kriterium: RSS Issue Date, 'broadcast-affiliate-version' im Dateinamen
+ es gibt wöchentlich neue Sendungen wir spielen jede zweite
+ RH: 2wöchentlich, Woche 2/4, Montag, 07:30-08:00
+
+- (lr) literadio on air:
+ https://cba.media/podcast/aufdraht-literadio-on-air/feed
+
+ Original: ohne System
+ Upload: ohne System
+ Such-Kriterium: neuester Eintrag der noch nicht importiert wurde
+ wenn am Freitag vor dem nächsten RH Termin kein neuer Eintrag
+ gefunden wird -> last attempt und nix importieren.
+ RH: 4wöchentlich, Woche 3, Dienstag, 16:30-17:00
+ 4wöchentlich, Woche 2, Mittwoch, 14:30-15:00, Wiederholung
+
+- (dm) das mensch. gender_queer on air:
+ https://cba.media/podcast/das-mensch-gender_queer-on-air/feed
+
+ Original: zweiter Donnerstag im Monat
+ Upload: meist 2-3 tage danach
+ Such-Kriterium: cba:broadcastDate wenn gesetzt sonst cba:productionDate
+ RH: 4wöchentlich, Woche 2, Dienstag, 13:00-14:00
+
+- (ba) barrierefrei aufgerollt
+ https://cba.media/podcast/barrierefrei-aufgerollt/feed
+
+ Original: erster Sonntag im Monat
+ Upload: "wir versuchen zwischen Montag und Mittwoch hochzuladen"
+ Such-Kriterium: cba:broadcastDate wenn gesetzt sonst cba:productionDate
+ RH: 4wöchentlich, Woche 2, Freitag, 17:00-17:30
+
+- (cb) Crossing Borders
+ https://cba.media/podcast/crossing-borders/feed
+
+ Original: erster Donnerstag im Monat
+ Upload: meistens 1-2 Tage später, (Ausreisser im Mai und Juni 2020 - aber erst 5 Termine insgesamt)
+ Such-Kriterium: cba:broadcastDate wenn gesetzt sonst cba:productionDate
+ RH: 4wöchentlich, Woche 1, Sonntag, 17:00-18:00
+
+- (mz) Medienzirkus
+ https://cba.media/podcast/medienzirkus/feed
+
+ Original: erster Freitag im Monat
+ Upload: am selben Tag, manchmal ein bis zwei Tage davor
+ Such-Kriterium: cba:broadcastDate wenn gesetzt sonst cba:productionDate
+ RH: 4wöchentlich, Woche 4, Freitag, 17:00-17:30
diff --git a/graveyard/cron.disabled b/graveyard/cron.disabled
index 4de7141..e5af32f 100644
--- a/graveyard/cron.disabled
+++ b/graveyard/cron.disabled
@@ -60,3 +60,35 @@
06 21,23 * * 3 autoimport /usr/bin/rhautoimport so
06 1,3,5 * * 4 autoimport /usr/bin/rhautoimport so
00 15 * * 4 autoimport /usr/bin/rhautoimport so last
+
+##############
+Between the Lines (Sendung abgesetzt)
+48 18,20,21 * * 3 autoimport /usr/bin/rhautoimport btl
+48 0,2,4,6,18,20,21 * * 4-6 autoimport /usr/bin/rhautoimport btl
+48 0,2,4,6,18,20 * * 7 autoimport /usr/bin/rhautoimport btl
+48 21 * * 7 autoimport /usr/bin/rhautoimport btl last
+
+##############
+# literadio on air
+57 2,4,6 * * 4-7 autoimport /usr/bin/rhautoimport lr
+57 8 * * 1 autoimport /usr/bin/rhautoimport lr last
+
+##############
+# das mensch
+06 2,4,6 * * 1-7 autoimport /usr/bin/rhautoimport dm
+06 10 * * 2 autoimport /usr/bin/rhautoimport dm last
+
+##############
+# barrierefrei aufgerollt (Sendung abgesetzt)
+11 2,4,6 * * 1-7 autoimport /usr/bin/rhautoimport ba
+11 12 * * 5 autoimport /usr/bin/rhautoimport ba last
+
+##############
+# crossing borders (Sendung abgesetzt)
+42 1,3,5 * * 1-7 autoimport /usr/bin/rhautoimport cb
+42 17 * * 5 autoimport /usr/bin/rhautoimport cb last
+
+##############
+# Medienzirkus (Sendung abgesetzt)
+23 1,3,5 * * 1-7 autoimport /usr/bin/rhautoimport mz
+23 11 * * 5 autoimport /usr/bin/rhautoimport mz last
diff --git a/graveyard/rhautoimport-ba b/graveyard/rhautoimport-ba
new file mode 100755
index 0000000..8fafe34
--- /dev/null
+++ b/graveyard/rhautoimport-ba
@@ -0,0 +1,212 @@
+#!/usr/bin/perl -w
+#
+#
+# rhautoimport
+#
+# Copyright (C) 2009-2017 Christian Pointner <equinox@helsinki.at>
+#
+# This file is part of rhautoimport.
+#
+# rhautoimport is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# any later version.
+#
+# rhautoimport is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with rhautoimport. If not, see <http://www.gnu.org/licenses/>.
+#
+
+use strict;
+use Date::Calc;
+use DateTime::Format::Strptime;
+use XML::Feed;
+use XML::Feed::Entry;
+use XML::Feed::Content;
+use XML::Feed::Enclosure;
+use URI::URL;
+use HTML::Entities;
+use RHRD::utils;
+
+use lib '/usr/local/share/rhautoimport/';
+use rhautoimport;
+
+my $STAT_FILE = $ENV{'HOME'} . "/rhautoimport-ba.stat";
+my $RSS_URL = "https://cba.media/podcast/barrierefrei-aufgerollt/feed";
+$XML::Feed::MULTIPLE_ENCLOSURES=1;
+my $RD_GROUP = "baufgeroll";
+my $PV_ID = '774';
+my $TITLE = "barrierefrei aufgerollt";
+my $LAST_RUN = 0;
+my $upload_delay_days = 3; # "we try to upload between Monday and Wednesday after the Show"
+
+binmode(STDIN, ":utf8");
+binmode(STDOUT, ":utf8");
+binmode(STDERR, ":utf8");
+
+my $curweek = RHRD::utils::get_rd_week();
+
+if($curweek == 2) {
+ if($#ARGV >= 0 && $ARGV[0] eq 'last') {
+ print "!!!This is the last attempt, there won't be a retry on error!!!\n";
+ $LAST_RUN = 1;
+ }
+}
+rhautoimport::report_title_and_last($TITLE, $LAST_RUN);
+
+
+my @today = Date::Calc::Today();
+print "today: " . Date::Calc::Date_to_Text(@today) . " (Week: " . $curweek . ")\n";
+
+my @import_date = Date::Calc::Standard_to_Business(@today);
+$import_date[2] = 5;
+@import_date = Date::Calc::Business_to_Standard(@import_date);
+
+my $dow = Date::Calc::Day_of_Week(@today);
+if($curweek == 1) {
+ @import_date = Date::Calc::Add_Delta_Days(@import_date, 7);
+} elsif($curweek == 2) {
+ if($dow > 5) {
+ @import_date = Date::Calc::Add_Delta_Days(@import_date, 28);
+ }
+} elsif($curweek == 3) {
+ @import_date = Date::Calc::Add_Delta_Days(@import_date, 21);
+} else {
+ @import_date = Date::Calc::Add_Delta_Days(@import_date, 14);
+}
+print "day of next Radio Helsinki broadcast: " . Date::Calc::Date_to_Text(@import_date) . "\n";
+
+
+my @allowed_dbs = rhautoimport::get_dropboxes($RD_GROUP);
+if(!defined $allowed_dbs[0] && defined $allowed_dbs[1]) {
+ print "$allowed_dbs[1]\n";
+ exit 1;
+}
+
+if(scalar(@allowed_dbs) != 1) {
+ print "found more or less than one Dropbox for this group?!\n";
+ exit 1;
+}
+my $show_id = $allowed_dbs[0]->{'SHOWID'};
+my $show_title = $allowed_dbs[0]->{'SHOWTITLE'};
+
+my @broadcast_date = Date::Calc::Nth_Weekday_of_Month_Year($import_date[0], $import_date[1], 7, 1);
+if(Date::Calc::Delta_Days(@broadcast_date, @import_date) <= 0) {
+ my @tmp_date = Date::Calc::Add_Delta_YM(@broadcast_date, 0, -1);
+ @broadcast_date = Date::Calc::Nth_Weekday_of_Month_Year($tmp_date[0], $tmp_date[1], 7, 1);
+}
+print "day of latest original broadcast before next Radio Helsinki broadcast: " . Date::Calc::Date_to_Text(@broadcast_date) . "\n";
+
+if(Date::Calc::Delta_Days(@broadcast_date, @today) <= $upload_delay_days) {
+ print "File won't be available by now!\n";
+ exit 42;
+}
+
+my $id = sprintf("%04d-%02d-%02d", @import_date);
+my $bd = sprintf("%04d-%02d-%02d", @broadcast_date);
+
+print "looking for files with cba:broadcastDate $bd in RSS Feed (will be using cba:productionDate if cba:broadcastDate is not set)\n";
+print " -> $RSS_URL\n";
+
+my ($result, $feed) = rhautoimport::fetch_parse_rss_cba($RSS_URL);
+unless ($result) {
+ print "Error fetching feed: $feed\n";
+ exit 1;
+}
+
+my $uri = "";
+my $file = "";
+my $sum_title = "";
+my $sum_text = "";
+
+my $i = 0;
+for my $entry ($feed->entries) {
+ $i++;
+ next unless $entry->enclosure;
+
+ my $entry_bdt = $entry->{'entry'}{'https://cba.media/help#feeds'}{'broadcastDate'};
+ my $format_string = "%a, %d %b %Y %H:%M:%S";
+ unless (defined $entry_bdt) {
+ $entry_bdt = $entry->{'entry'}{'https://cba.media/help#feeds'}{'productionDate'};
+ $format_string = "%a, %d %b %Y";
+ }
+ next unless (defined $entry_bdt);
+ my $bdt = DateTime::Format::Strptime::strptime($format_string, $entry_bdt);
+
+ next unless($broadcast_date[0] == $bdt->year && $broadcast_date[1] == $bdt->month && $broadcast_date[2] == $bdt->day);
+
+ my $j = 0;
+ for my $enclosure($entry->enclosure) {
+ $j++;
+ if($enclosure->type eq "audio/mpeg" || $enclosure->type eq "audio/ogg") {
+ print "$i/$j: (" . $enclosure->type . ", " . $enclosure->length . ") " . $enclosure->url . "\n";
+ my $orig_uri = $enclosure->url;
+ if($orig_uri =~ /^(.*)_cut(\.[^\.]+)$/) {
+ $orig_uri = $1 . $2;
+ }
+
+ $uri = new URI::URL($orig_uri);
+ my @path = $uri->path_components;
+ $file = $path[-1];
+
+ my $current_stat = `cat $STAT_FILE`;
+ my ($current_id, $current_file) = $current_stat =~ m/^(.*)\n(.*)/;
+ if($current_id eq $id && $current_file eq $file) {
+ print "Already downloaded file of today\n";
+ exit 42;
+ }
+ if(!rhautoimport::check_file_extension($file)) {
+ print "\n\nThe extension of the matching file '". $file . "' seems to be wrong - manual import necessary!!!\n";
+ print "\n\n --> https://import.helsinki.at/shows/$show_id\n";
+ exit 1;
+ }
+
+ $sum_title = decode_entities($entry->title);
+ $sum_text = decode_entities($entry->content->body);
+ print "summary:\n" . $sum_title . "\n\n" . $sum_text . "\n";
+ last;
+ }
+ }
+ last if $uri ne "";
+}
+if($uri eq "") {
+ print "No Entry found from $bd - ";
+ if($LAST_RUN) {
+ print "giving up, manual import necessary!!!\n";
+ print "\n\n --> https://import.helsinki.at/shows/$show_id\n";
+ } else {
+ print "will retry later\n";
+ }
+ exit 1;
+}
+
+print "\n\nwill import '$uri' to show $show_id, $show_title\n\n";
+
+my ($ret, $log) = rhautoimport::import_uri($show_id, $uri->as_string);
+if($ret) {
+ print "\nImport Error:\n\n";
+ print $log;
+ print "\n\nNot adding PV note!!";
+ exit 1;
+}
+print "\nImport Success:\n\n";
+print $log;
+print "\n";
+my $exit_code = 0;
+($ret, $log) = rhautoimport::pv_add_note($sum_title, $sum_text, $PV_ID, $id, "1");
+print $log;
+if($ret) {
+ print "\nIgnoring failed note import - manual intervention necessary!\n";
+ $exit_code = 23;
+}
+
+unlink($STAT_FILE);
+open(my $fhs, '>', $STAT_FILE);
+print $fhs "$id\n$file";
+close($fhs);
+
+exit $exit_code;
diff --git a/graveyard/rhautoimport-btl b/graveyard/rhautoimport-btl
new file mode 100755
index 0000000..6649e39
--- /dev/null
+++ b/graveyard/rhautoimport-btl
@@ -0,0 +1,178 @@
+#!/usr/bin/perl -w
+#
+#
+# rhautoimport
+#
+# Copyright (C) 2009-2017 Christian Pointner <equinox@helsinki.at>
+#
+# This file is part of rhautoimport.
+#
+# rhautoimport is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# any later version.
+#
+# rhautoimport is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with rhautoimport. If not, see <http://www.gnu.org/licenses/>.
+#
+
+use strict;
+use Date::Calc;
+use DateTime::Format::Strptime;
+use XML::Feed;
+use XML::Feed::Entry;
+use XML::Feed::Content;
+use XML::Feed::Enclosure;
+use URI::URL;
+use HTML::Entities;
+use RHRD::utils;
+
+use lib '/usr/local/share/rhautoimport/';
+use rhautoimport;
+
+my $STAT_FILE = $ENV{'HOME'} . "/rhautoimport-btl.stat";
+my $RSS_URL = "http://www.btlonline.org/rss/btl128.xml";
+my $RD_GROUP = "betweenlin";
+my $PV_ID = '221';
+my $TITLE="Between the Lines";
+my $LAST_RUN = 0;
+my $upload_delay_days = 0; # files are usally there on the same day
+
+binmode(STDIN, ":utf8");
+binmode(STDOUT, ":utf8");
+binmode(STDERR, ":utf8");
+
+my $curweek = RHRD::utils::get_rd_week();
+if($curweek == 2 || $curweek == 4) {
+ print "won't do anything in week 2 or 4!\n";
+ rhautoimport::report_title_and_last($TITLE, 0);
+ exit 42;
+}
+
+if($#ARGV >= 0 && $ARGV[0] eq 'last') {
+ print "!!!This is the last attempt, there won't be a retry on error!!!\n";
+ $LAST_RUN = 1;
+}
+rhautoimport::report_title_and_last($TITLE, $LAST_RUN);
+
+
+my @allowed_dbs = rhautoimport::get_dropboxes($RD_GROUP);
+if(!defined $allowed_dbs[0] && defined $allowed_dbs[1]) {
+ print "$allowed_dbs[1]\n";
+ exit 1;
+}
+
+if(scalar(@allowed_dbs) != 1) {
+ print "found more or less than one Dropbox for this group?!\n";
+ exit 1;
+}
+my $show_id = $allowed_dbs[0]->{'SHOWID'};
+my $show_title = $allowed_dbs[0]->{'SHOWTITLE'};
+
+my @today = Date::Calc::Today();
+my @import_date = Date::Calc::Add_Delta_Days(@today, 7);
+@import_date = Date::Calc::Standard_to_Business(@import_date);
+$import_date[2] = 1;
+@import_date = Date::Calc::Business_to_Standard(@import_date);
+
+my @broadcast_date = Date::Calc::Add_Delta_Days(@import_date, -5);
+
+print "today: " . Date::Calc::Date_to_Text(@today) . "\n";
+print "day of next Radio Helsinki broadcast: " . Date::Calc::Date_to_Text(@import_date) . "\n";
+print "current broadcast cycle starts/started on: " . Date::Calc::Date_to_Text(@broadcast_date) . "\n";
+
+if(Date::Calc::Delta_Days(@broadcast_date, @today) <= $upload_delay_days) {
+ print "File won't be available by now!\n";
+ exit 42;
+}
+
+my $id = sprintf("%04d-%02d-%02d", @import_date);
+my $bd = sprintf("%04d-%02d-%02d", @broadcast_date);
+my $bdfile = ".*broadcast-affiliates?.*\.mp3";
+
+print "looking for files issued on '$bd' and named like '$bdfile' in RSS Feed\n";
+print " -> $RSS_URL\n";
+
+my ($result, $feed) = rhautoimport::fetch_parse_rss($RSS_URL);
+unless ($result) {
+ print "Error fetching feed: $feed\n";
+ exit 1;
+}
+
+my $uri = "";
+my $file = "";
+my $sum_title = "";
+my $sum_text = "";
+
+for my $entry ($feed->entries) {
+ if($entry->enclosure && ($entry->enclosure->type eq "audio/mpeg" || entry->enclosure->type eq "audio/mp3")) {
+ next unless $entry->enclosure->url =~ /$bdfile/;
+
+ my $idt = DateTime::Format::Strptime::strptime("%Y-%m-%dT%H:%M:%S", $entry->issued);
+ next unless($broadcast_date[0] == $idt->year && $broadcast_date[1] == $idt->month && $broadcast_date[2] == $idt->day);
+
+ $uri = new URI::URL($entry->enclosure->url);
+ my @path = $uri->path_components;
+ $file = $path[-1];
+
+ my $current_file = `cat $STAT_FILE`;
+ if($current_file eq $file) {
+ print "Already downloaded file of today\n";
+ exit 42;
+ }
+ if(!rhautoimport::check_file_extension($file)) {
+ print "\n\nThe extension of the matching file '". $file . "' seems to be wrong - manual import necessary!!!\n";
+ print "\n\n --> https://import.helsinki.at/shows/$show_id\n";
+ exit 1;
+ }
+
+ $sum_title = decode_entities($entry->title);
+ $sum_title =~ s/ \(128 kbps\)$//;
+ $sum_title =~ s/ \(broadcast affiliate version\)//;
+ $sum_text = decode_entities($entry->content->body);
+ print "summary:\n" . $sum_title . "\n\n" . $sum_text . "\n";
+ last;
+ }
+}
+if($uri eq "") {
+ print "No Entry found for " . Date::Calc::Date_to_Text(@broadcast_date) . " - ";
+ if($LAST_RUN) {
+ print "giving up, manual import necessary!!!\n";
+ print "\n\n --> https://import.helsinki.at/shows/$show_id\n";
+ } else {
+ print "will retry later\n";
+ }
+ exit 1;
+}
+
+print "\n\nwill import '$uri' to show $show_id, $show_title\n\n";
+
+my ($ret, $log) = rhautoimport::import_uri($show_id, $uri->as_string);
+if($ret) {
+ print "\nImport Error:\n\n";
+ print $log;
+ print "\n\nNot adding PV note!!";
+ exit 1;
+}
+print "\nImport Success:\n\n";
+print $log;
+print "\n";
+my $exit_code = 0;
+($ret, $log) = rhautoimport::pv_add_note($sum_title, $sum_text, $PV_ID, $id, "1");
+print $log;
+if($ret) {
+ print "\nIgnoring failed note import - manual intervention necessary!\n";
+ $exit_code = 23;
+}
+
+unlink($STAT_FILE);
+open(my $fhs, '>', $STAT_FILE);
+print $fhs "$file";
+close($fhs);
+
+exit $exit_code;
diff --git a/graveyard/rhautoimport-cb b/graveyard/rhautoimport-cb
new file mode 100755
index 0000000..3a8c73e
--- /dev/null
+++ b/graveyard/rhautoimport-cb
@@ -0,0 +1,208 @@
+#!/usr/bin/perl -w
+#
+#
+# rhautoimport
+#
+# Copyright (C) 2009-2017 Christian Pointner <equinox@helsinki.at>
+#
+# This file is part of rhautoimport.
+#
+# rhautoimport is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# any later version.
+#
+# rhautoimport is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with rhautoimport. If not, see <http://www.gnu.org/licenses/>.
+#
+
+use strict;
+use Date::Calc;
+use DateTime::Format::Strptime;
+use XML::Feed;
+use XML::Feed::Entry;
+use XML::Feed::Content;
+use XML::Feed::Enclosure;
+use URI::URL;
+use HTML::Entities;
+use RHRD::utils;
+
+use lib '/usr/local/share/rhautoimport/';
+use rhautoimport;
+
+my $STAT_FILE = $ENV{'HOME'} . "/rhautoimport-cb.stat";
+my $RSS_URL = "https://cba.media/podcast/crossing-borders/feed";
+$XML::Feed::MULTIPLE_ENCLOSURES=1;
+my $RD_GROUP = "crossingb";
+my $PV_ID = '809';
+my $TITLE = "Crossing Borders";
+my $LAST_RUN = 0;
+my $upload_delay_days = 0;
+
+binmode(STDIN, ":utf8");
+binmode(STDOUT, ":utf8");
+binmode(STDERR, ":utf8");
+
+my $curweek = RHRD::utils::get_rd_week();
+
+if($curweek == 1) {
+ if($#ARGV >= 0 && $ARGV[0] eq 'last') {
+ print "!!!This is the last attempt, there won't be a retry on error!!!\n";
+ $LAST_RUN = 1;
+ }
+}
+rhautoimport::report_title_and_last($TITLE, $LAST_RUN);
+
+
+my @today = Date::Calc::Today();
+print "today: " . Date::Calc::Date_to_Text(@today) . " (Week: " . $curweek . ")\n";
+
+my @import_date = Date::Calc::Standard_to_Business(@today);
+$import_date[2] = 7;
+@import_date = Date::Calc::Business_to_Standard(@import_date);
+
+my $dow = Date::Calc::Day_of_Week(@today);
+if($curweek == 2) {
+ @import_date = Date::Calc::Add_Delta_Days(@import_date, 21);
+} elsif($curweek == 3) {
+ @import_date = Date::Calc::Add_Delta_Days(@import_date, 14);
+} else {
+ @import_date = Date::Calc::Add_Delta_Days(@import_date, 7);
+}
+print "day of next Radio Helsinki broadcast: " . Date::Calc::Date_to_Text(@import_date) . "\n";
+
+
+my @allowed_dbs = rhautoimport::get_dropboxes($RD_GROUP);
+if(!defined $allowed_dbs[0] && defined $allowed_dbs[1]) {
+ print "$allowed_dbs[1]\n";
+ exit 1;
+}
+
+if(scalar(@allowed_dbs) != 1) {
+ print "found more or less than one Dropbox for this group?!\n";
+ exit 1;
+}
+my $show_id = $allowed_dbs[0]->{'SHOWID'};
+my $show_title = $allowed_dbs[0]->{'SHOWTITLE'};
+
+my @broadcast_date = Date::Calc::Nth_Weekday_of_Month_Year($import_date[0], $import_date[1], 4, 1);
+if(Date::Calc::Delta_Days(@broadcast_date, @import_date) <= 0) {
+ my @tmp_date = Date::Calc::Add_Delta_YM(@broadcast_date, 0, -1);
+ @broadcast_date = Date::Calc::Nth_Weekday_of_Month_Year($tmp_date[0], $tmp_date[1], 4, 1);
+}
+print "day of latest original broadcast before next Radio Helsinki broadcast: " . Date::Calc::Date_to_Text(@broadcast_date) . "\n";
+
+if(Date::Calc::Delta_Days(@broadcast_date, @today) <= $upload_delay_days) {
+ print "File won't be available by now!\n";
+ exit 42;
+}
+
+my $id = sprintf("%04d-%02d-%02d", @import_date);
+my $bd = sprintf("%04d-%02d-%02d", @broadcast_date);
+
+print "looking for files with cba:broadcastDate $bd in RSS Feed (will be using cba:productionDate if cba:broadcastDate is not set)\n";
+print " -> $RSS_URL\n";
+
+my ($result, $feed) = rhautoimport::fetch_parse_rss_cba($RSS_URL);
+unless ($result) {
+ print "Error fetching feed: $feed\n";
+ exit 1;
+}
+
+my $uri = "";
+my $file = "";
+my $sum_title = "";
+my $sum_text = "";
+
+my $i = 0;
+for my $entry ($feed->entries) {
+ $i++;
+ next unless $entry->enclosure;
+
+ my $entry_bdt = $entry->{'entry'}{'https://cba.media/help#feeds'}{'broadcastDate'};
+ my $format_string = "%a, %d %b %Y %H:%M:%S";
+ unless (defined $entry_bdt) {
+ $entry_bdt = $entry->{'entry'}{'https://cba.media/help#feeds'}{'productionDate'};
+ $format_string = "%a, %d %b %Y";
+ }
+ next unless (defined $entry_bdt);
+ my $bdt = DateTime::Format::Strptime::strptime($format_string, $entry_bdt);
+
+ next unless($broadcast_date[0] == $bdt->year && $broadcast_date[1] == $bdt->month && $broadcast_date[2] == $bdt->day);
+
+ my $j = 0;
+ for my $enclosure($entry->enclosure) {
+ $j++;
+ if($enclosure->type eq "audio/mpeg" || $enclosure->type eq "audio/ogg") {
+ print "$i/$j: (" . $enclosure->type . ", " . $enclosure->length . ") " . $enclosure->url . "\n";
+ my $orig_uri = $enclosure->url;
+ if($orig_uri =~ /^(.*)_cut(\.[^\.]+)$/) {
+ $orig_uri = $1 . $2;
+ }
+
+ $uri = new URI::URL($orig_uri);
+ my @path = $uri->path_components;
+ $file = $path[-1];
+
+ my $current_stat = `cat $STAT_FILE`;
+ my ($current_id, $current_file) = $current_stat =~ m/^(.*)\n(.*)/;
+ if($current_id eq $id && $current_file eq $file) {
+ print "Already downloaded file of today\n";
+ exit 42;
+ }
+ if(!rhautoimport::check_file_extension($file)) {
+ print "\n\nThe extension of the matching file '". $file . "' seems to be wrong - manual import necessary!!!\n";
+ print "\n\n --> https://import.helsinki.at/shows/$show_id\n";
+ exit 1;
+ }
+
+ $sum_title = decode_entities($entry->title);
+ $sum_text = decode_entities($entry->content->body);
+ print "summary:\n" . $sum_title . "\n\n" . $sum_text . "\n";
+ last;
+ }
+ }
+ last if $uri ne "";
+}
+if($uri eq "") {
+ print "No Entry found from $bd - ";
+ if($LAST_RUN) {
+ print "giving up, manual import necessary!!!\n";
+ print "\n\n --> https://import.helsinki.at/shows/$show_id\n";
+ } else {
+ print "will retry later\n";
+ }
+ exit 1;
+}
+
+print "\n\nwill import '$uri' to show $show_id, $show_title\n\n";
+
+my ($ret, $log) = rhautoimport::import_uri($show_id, $uri->as_string);
+if($ret) {
+ print "\nImport Error:\n\n";
+ print $log;
+ print "\n\nNot adding PV note!!";
+ exit 1;
+}
+print "\nImport Success:\n\n";
+print $log;
+print "\n";
+my $exit_code = 0;
+($ret, $log) = rhautoimport::pv_add_note($sum_title, $sum_text, $PV_ID, $id, "1");
+print $log;
+if($ret) {
+ print "\nIgnoring failed note import - manual intervention necessary!\n";
+ $exit_code = 23;
+}
+
+unlink($STAT_FILE);
+open(my $fhs, '>', $STAT_FILE);
+print $fhs "$id\n$file";
+close($fhs);
+
+exit $exit_code;
diff --git a/graveyard/rhautoimport-dm b/graveyard/rhautoimport-dm
new file mode 100755
index 0000000..dae6ee7
--- /dev/null
+++ b/graveyard/rhautoimport-dm
@@ -0,0 +1,212 @@
+#!/usr/bin/perl -w
+#
+#
+# rhautoimport
+#
+# Copyright (C) 2009-2017 Christian Pointner <equinox@helsinki.at>
+#
+# This file is part of rhautoimport.
+#
+# rhautoimport is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# any later version.
+#
+# rhautoimport is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with rhautoimport. If not, see <http://www.gnu.org/licenses/>.
+#
+
+use strict;
+use Date::Calc;
+use DateTime::Format::Strptime;
+use XML::Feed;
+use XML::Feed::Entry;
+use XML::Feed::Content;
+use XML::Feed::Enclosure;
+use URI::URL;
+use HTML::Entities;
+use RHRD::utils;
+
+use lib '/usr/local/share/rhautoimport/';
+use rhautoimport;
+
+my $STAT_FILE = $ENV{'HOME'} . "/rhautoimport-dm.stat";
+my $RSS_URL = "https://cba.media/podcast/das-mensch-gender_queer-on-air/feed";
+$XML::Feed::MULTIPLE_ENCLOSURES=1;
+my $RD_GROUP = "dasmensch";
+my $PV_ID = '669';
+my $TITLE = "das mensch. gender_queer on air";
+my $LAST_RUN = 0;
+my $upload_delay_days = 2; # files are usally there at least 2-3 days after original broadcast
+
+binmode(STDIN, ":utf8");
+binmode(STDOUT, ":utf8");
+binmode(STDERR, ":utf8");
+
+my $curweek = RHRD::utils::get_rd_week();
+
+if($curweek == 2) {
+ if($#ARGV >= 0 && $ARGV[0] eq 'last') {
+ print "!!!This is the last attempt, there won't be a retry on error!!!\n";
+ $LAST_RUN = 1;
+ }
+}
+rhautoimport::report_title_and_last($TITLE, $LAST_RUN);
+
+
+my @today = Date::Calc::Today();
+print "today: " . Date::Calc::Date_to_Text(@today) . " (Week: " . $curweek . ")\n";
+
+my @import_date = Date::Calc::Standard_to_Business(@today);
+$import_date[2] = 2;
+@import_date = Date::Calc::Business_to_Standard(@import_date);
+
+my $dow = Date::Calc::Day_of_Week(@today);
+if($curweek == 1) {
+ @import_date = Date::Calc::Add_Delta_Days(@import_date, 7);
+} elsif($curweek == 2) {
+ if($dow > 2) {
+ @import_date = Date::Calc::Add_Delta_Days(@import_date, 28);
+ }
+} elsif($curweek == 3) {
+ @import_date = Date::Calc::Add_Delta_Days(@import_date, 21);
+} else {
+ @import_date = Date::Calc::Add_Delta_Days(@import_date, 14);
+}
+print "day of next Radio Helsinki broadcast: " . Date::Calc::Date_to_Text(@import_date) . "\n";
+
+
+my @allowed_dbs = rhautoimport::get_dropboxes($RD_GROUP);
+if(!defined $allowed_dbs[0] && defined $allowed_dbs[1]) {
+ print "$allowed_dbs[1]\n";
+ exit 1;
+}
+
+if(scalar(@allowed_dbs) != 1) {
+ print "found more or less than one Dropbox for this group?!\n";
+ exit 1;
+}
+my $show_id = $allowed_dbs[0]->{'SHOWID'};
+my $show_title = $allowed_dbs[0]->{'SHOWTITLE'};
+
+my @broadcast_date = Date::Calc::Nth_Weekday_of_Month_Year($import_date[0], $import_date[1], 4, 2);
+if(Date::Calc::Delta_Days(@broadcast_date, @import_date) <= 0) {
+ my @tmp_date = Date::Calc::Add_Delta_YM(@broadcast_date, 0, -1);
+ @broadcast_date = Date::Calc::Nth_Weekday_of_Month_Year($tmp_date[0], $tmp_date[1], 4, 2);
+}
+print "day of latest original broadcast before next Radio Helsinki broadcast: " . Date::Calc::Date_to_Text(@broadcast_date) . "\n";
+
+if(Date::Calc::Delta_Days(@broadcast_date, @today) <= $upload_delay_days) {
+ print "File won't be available by now!\n";
+ exit 42;
+}
+
+my $id = sprintf("%04d-%02d-%02d", @import_date);
+my $bd = sprintf("%04d-%02d-%02d", @broadcast_date);
+
+print "looking for files with cba:broadcastDate $bd in RSS Feed (will be using cba:productionDate if cba:broadcastDate is not set)\n";
+print " -> $RSS_URL\n";
+
+my ($result, $feed) = rhautoimport::fetch_parse_rss_cba($RSS_URL);
+unless ($result) {
+ print "Error fetching feed: $feed\n";
+ exit 1;
+}
+
+my $uri = "";
+my $file = "";
+my $sum_title = "";
+my $sum_text = "";
+
+my $i = 0;
+for my $entry ($feed->entries) {
+ $i++;
+ next unless $entry->enclosure;
+
+ my $entry_bdt = $entry->{'entry'}{'https://cba.media/help#feeds'}{'broadcastDate'};
+ my $format_string = "%a, %d %b %Y %H:%M:%S";
+ unless (defined $entry_bdt) {
+ $entry_bdt = $entry->{'entry'}{'https://cba.media/help#feeds'}{'productionDate'};
+ $format_string = "%a, %d %b %Y";
+ }
+ next unless (defined $entry_bdt);
+ my $bdt = DateTime::Format::Strptime::strptime($format_string, $entry_bdt);
+
+ next unless($broadcast_date[0] == $bdt->year && $broadcast_date[1] == $bdt->month && $broadcast_date[2] == $bdt->day);
+
+ my $j = 0;
+ for my $enclosure($entry->enclosure) {
+ $j++;
+ if($enclosure->type eq "audio/mpeg" || $enclosure->type eq "audio/ogg") {
+ print "$i/$j: (" . $enclosure->type . ", " . $enclosure->length . ") " . $enclosure->url . "\n";
+ my $orig_uri = $enclosure->url;
+ if($orig_uri =~ /^(.*)_cut(\.[^\.]+)$/) {
+ $orig_uri = $1 . $2;
+ }
+
+ $uri = new URI::URL($orig_uri);
+ my @path = $uri->path_components;
+ $file = $path[-1];
+
+ my $current_stat = `cat $STAT_FILE`;
+ my ($current_id, $current_file) = $current_stat =~ m/^(.*)\n(.*)/;
+ if($current_id eq $id && $current_file eq $file) {
+ print "Already downloaded file of today\n";
+ exit 42;
+ }
+ if(!rhautoimport::check_file_extension($file)) {
+ print "\n\nThe extension of the matching file '". $file . "' seems to be wrong - manual import necessary!!!\n";
+ print "\n\n --> https://import.helsinki.at/shows/$show_id\n";
+ exit 1;
+ }
+
+ $sum_title = decode_entities($entry->title);
+ $sum_text = decode_entities($entry->content->body);
+ print "summary:\n" . $sum_title . "\n\n" . $sum_text . "\n";
+ last;
+ }
+ }
+ last if $uri ne "";
+}
+if($uri eq "") {
+ print "No Entry found from $bd - ";
+ if($LAST_RUN) {
+ print "giving up, manual import necessary!!!\n";
+ print "\n\n --> https://import.helsinki.at/shows/$show_id\n";
+ } else {
+ print "will retry later\n";
+ }
+ exit 1;
+}
+
+print "\n\nwill import '$uri' to show $show_id, $show_title\n\n";
+
+my ($ret, $log) = rhautoimport::import_uri($show_id, $uri->as_string);
+if($ret) {
+ print "\nImport Error:\n\n";
+ print $log;
+ print "\n\nNot adding PV note!!";
+ exit 1;
+}
+print "\nImport Success:\n\n";
+print $log;
+print "\n";
+my $exit_code = 0;
+($ret, $log) = rhautoimport::pv_add_note($sum_title, $sum_text, $PV_ID, $id, "1");
+print $log;
+if($ret) {
+ print "\nIgnoring failed note import - manual intervention necessary!\n";
+ $exit_code = 23;
+}
+
+unlink($STAT_FILE);
+open(my $fhs, '>', $STAT_FILE);
+print $fhs "$id\n$file";
+close($fhs);
+
+exit $exit_code;
diff --git a/graveyard/rhautoimport-lr b/graveyard/rhautoimport-lr
new file mode 100755
index 0000000..14ff6f6
--- /dev/null
+++ b/graveyard/rhautoimport-lr
@@ -0,0 +1,226 @@
+#!/usr/bin/perl -w
+#
+#
+# rhautoimport
+#
+# Copyright (C) 2009-2017 Christian Pointner <equinox@helsinki.at>
+#
+# This file is part of rhautoimport.
+#
+# rhautoimport is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# any later version.
+#
+# rhautoimport is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with rhautoimport. If not, see <http://www.gnu.org/licenses/>.
+#
+
+use strict;
+use Date::Calc;
+use DateTime::Format::Strptime;
+use XML::Feed;
+use XML::Feed::Entry;
+use XML::Feed::Content;
+use XML::Feed::Enclosure;
+use URI::URL;
+use HTML::Entities;
+use RHRD::utils;
+
+use lib '/usr/local/share/rhautoimport/';
+use rhautoimport;
+
+my $STAT_FILE = $ENV{'HOME'} . "/rhautoimport-lr.stat";
+my $RSS_URL = "https://cba.media/podcast/aufdraht-literadio-on-air/feed";
+$XML::Feed::MULTIPLE_ENCLOSURES=1;
+my $RD_GROUP = "literadio";
+my $PV_ID = '53';
+my $TITLE="literadio on air";
+my $LAST_RUN = 0;
+
+binmode(STDIN, ":utf8");
+binmode(STDOUT, ":utf8");
+binmode(STDERR, ":utf8");
+
+if($#ARGV >= 0 && $ARGV[0] eq 'last') {
+ print "!!!This is the last attempt, there won't be a retry on error!!!\n";
+ $LAST_RUN = 1;
+}
+rhautoimport::report_title_and_last($TITLE, $LAST_RUN);
+
+my @allowed_dbs = rhautoimport::get_dropboxes($RD_GROUP);
+if(!defined $allowed_dbs[0] && defined $allowed_dbs[1]) {
+ print "$allowed_dbs[1]\n";
+ exit 1;
+}
+
+my $idx = 0;
+my $idx_reb = 1;
+if(scalar(@allowed_dbs) != 2) {
+ print "found more or less than 2 Dropboxes for this group?!\n";
+ exit 1;
+}
+my $show_id = $allowed_dbs[$idx]->{'SHOWID'};
+my $show_title = $allowed_dbs[$idx]->{'SHOWTITLE'};
+my $show_id_reb = $allowed_dbs[$idx_reb]->{'SHOWID'};
+my $show_title_reb = $allowed_dbs[$idx_reb]->{'SHOWTITLE'};
+
+
+my @today = Date::Calc::Today();
+my @tmp = Date::Calc::Standard_to_Business(@today);
+$tmp[2] = 2;
+my @import_date = Date::Calc::Business_to_Standard(@tmp);
+
+my $curweek = RHRD::utils::get_rd_week();
+if($curweek == 2 && $today[2] >= 4) {
+ @import_date = Date::Calc::Add_Delta_Days(@import_date, 7);
+} elsif($curweek == 3 && $today[2] == 1) {
+ @import_date = @import_date; # perl won't except an empty line here
+} else {
+ print "File won't be available by now!\n";
+ exit 42;
+}
+my @import_date_reb = Date::Calc::Add_Delta_Days(@import_date, 22);
+
+print "day of Radio Helsinki broadcast: " . Date::Calc::Date_to_Text(@import_date) . "\n";
+print "day of this show's rebroadcast: " . Date::Calc::Date_to_Text(@import_date_reb) . "\n";
+
+my @reb_date_last = Date::Calc::Add_Delta_Days(@import_date_reb, -28);
+print "day of last show's rebroadcast: " . Date::Calc::Date_to_Text(@reb_date_last) . "\n";
+if(Date::Calc::Delta_Days(@today, @reb_date_last) >= 0) {
+ print "the rebroadcast from last show has not been played yet!\n";
+ exit 42;
+}
+
+my $id = sprintf("%04d-%02d-%02d", @import_date);
+my $current_stat = `cat $STAT_FILE`;
+my ($current_id, $current_file) = $current_stat =~ m/^(.*)\n(.*)/;
+
+if($current_id eq $id) {
+ print "Already downloaded file for $id\n";
+ exit 42;
+}
+
+print "looking for files newer than '$current_file' in RSS Feed\n";
+print " -> $RSS_URL\n";
+
+my ($result, $feed) = rhautoimport::fetch_parse_rss_cba($RSS_URL);
+unless ($result) {
+ print "Error fetching feed: $feed\n";
+ exit 1;
+}
+
+my $uri = "";
+my $file = "";
+my $sum_title = "";
+my $sum_text = "";
+
+my $i = 0;
+for my $entry ($feed->entries) {
+ $i++;
+ next unless $entry->enclosure;
+
+ my $j = 0;
+ for my $enclosure($entry->enclosure) {
+ $j++;
+ if($enclosure->type eq "audio/mpeg" || $enclosure->type eq "audio/ogg") {
+ print "$i/$j: (" . $enclosure->type . ", " . $enclosure->length . ") " . $enclosure->url . "\n";
+ my $orig_uri = $enclosure->url;
+ if($orig_uri =~ /^(.*)_cut(\.[^\.]+)$/) {
+ $orig_uri = $1 . $2;
+ }
+
+ $uri = new URI::URL($orig_uri);
+ my @path = $uri->path_components;
+ $file = $path[-1];
+
+ if($file eq $current_file) {
+ last; # no new entry
+ }
+ if(!rhautoimport::check_file_extension($file)) {
+ print "\n\nThe extension of the matching file '". $file . "' seems to be wrong - manual import necessary!!!\n";
+ print "\n\n --> https://import.helsinki.at/shows/$show_id\n";
+ exit 1;
+ }
+
+ $sum_title = decode_entities($entry->title);
+ $sum_text = decode_entities($entry->content->body);
+ print "summary:\n" . $sum_title . "\n\n" . $sum_text . "\n";
+ last;
+ }
+ }
+ last; # we are only intersted in the first entry
+}
+if($uri eq "") {
+ print "No new Entry found - ";
+ if($LAST_RUN) {
+ print "giving up, manual import necessary!!!\n";
+ print "\n\n --> https://import.helsinki.at/shows/$show_id\n";
+ } else {
+ print "will retry later\n";
+ }
+ exit 1;
+}
+
+my $exit_code = 0;
+print "\n\nwill import '$uri' to show $show_id, $show_title\n";
+my ($ret, $log, $keptfile_uri) = rhautoimport::import_uri($show_id, $uri->as_string, "keep");
+if($ret == 0) {
+ print "\nImport Success:\n\n";
+ print $log;
+ print "\n";
+ ($ret, $log) = rhautoimport::pv_add_note($sum_title, $sum_text, $PV_ID, sprintf("%04d-%02d-%02d", @import_date), "1");
+ print $log;
+ if($ret) {
+ print "\nIgnoring failed note import - manual intervention necessary!\n";
+ $exit_code = 23;
+ }
+
+ print "\n";
+
+ if(!defined($keptfile_uri)) {
+ print "rhimportd didn't provide a clue where to find the kept file... will import '$uri' to rebroadcast $show_id_reb, $show_title_reb\n";
+ ($ret, $log) = rhautoimport::import_uri($show_id_reb, $uri->as_string);
+ } else {
+ print "re-using kept file '$keptfile_uri' to import rebroadcast $show_id_reb, $show_title_reb\n";
+ ($ret, $log) = rhautoimport::import_uri($show_id_reb, $keptfile_uri, "delete");
+ }
+
+ if($ret == 0) {
+ print "\nImport Success:\n\n";
+ print $log;
+ print "\n";
+ ($ret, $log) = rhautoimport::pv_add_note($sum_title, $sum_text, $PV_ID, sprintf("%04d-%02d-%02d", @import_date_reb), "2");
+ print $log;
+ if($ret) {
+ print "\nIgnoring failed note import - manual intervention necessary!\n";
+ $exit_code = 23;
+ }
+
+ } else {
+ print "\nImport Error:\n\n";
+ print $log;
+ print "\n\nNot adding PV note!!";
+ print "\n\nSince the import for the regular broadcast went through we will not retry this import!!!";
+ print "\nYou need to manually import the rebroadcast.";
+ print "\n\n --> https://import.helsinki.at/shows/$show_id_reb\n";
+ $exit_code = 23;
+ }
+} else {
+ print "\nImport Error:\n\n";
+ print $log;
+ print "\n\nNot adding PV note!!";
+ exit 1;
+}
+
+unlink($STAT_FILE);
+open(my $fhs, '>', $STAT_FILE);
+print $fhs "$id\n$file";
+close($fhs);
+
+exit $exit_code;
diff --git a/graveyard/rhautoimport-mz b/graveyard/rhautoimport-mz
new file mode 100755
index 0000000..60e2607
--- /dev/null
+++ b/graveyard/rhautoimport-mz
@@ -0,0 +1,212 @@
+#!/usr/bin/perl -w
+#
+#
+# rhautoimport
+#
+# Copyright (C) 2009-2017 Christian Pointner <equinox@helsinki.at>
+#
+# This file is part of rhautoimport.
+#
+# rhautoimport is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# any later version.
+#
+# rhautoimport is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with rhautoimport. If not, see <http://www.gnu.org/licenses/>.
+#
+
+use strict;
+use Date::Calc;
+use DateTime::Format::Strptime;
+use XML::Feed;
+use XML::Feed::Entry;
+use XML::Feed::Content;
+use XML::Feed::Enclosure;
+use URI::URL;
+use HTML::Entities;
+use RHRD::utils;
+
+use lib '/usr/local/share/rhautoimport/';
+use rhautoimport;
+
+my $STAT_FILE = $ENV{'HOME'} . "/rhautoimport-mz.stat";
+my $RSS_URL = "https://cba.media/podcast/medienzirkus/feed";
+$XML::Feed::MULTIPLE_ENCLOSURES=1;
+my $RD_GROUP = "medienzi";
+my $PV_ID = '913';
+my $TITLE = "Medienzirkus";
+my $LAST_RUN = 0;
+my $upload_delay_days = 0;
+
+binmode(STDIN, ":utf8");
+binmode(STDOUT, ":utf8");
+binmode(STDERR, ":utf8");
+
+my $curweek = RHRD::utils::get_rd_week();
+
+if($curweek == 4) {
+ if($#ARGV >= 0 && $ARGV[0] eq 'last') {
+ print "!!!This is the last attempt, there won't be a retry on error!!!\n";
+ $LAST_RUN = 1;
+ }
+}
+rhautoimport::report_title_and_last($TITLE, $LAST_RUN);
+
+
+my @today = Date::Calc::Today();
+print "today: " . Date::Calc::Date_to_Text(@today) . " (Week: " . $curweek . ")\n";
+
+my @import_date = Date::Calc::Standard_to_Business(@today);
+$import_date[2] = 5;
+@import_date = Date::Calc::Business_to_Standard(@import_date);
+
+my $dow = Date::Calc::Day_of_Week(@today);
+if($curweek == 4) {
+ if($dow >= 5) {
+ @import_date = Date::Calc::Add_Delta_Days(@import_date, 28);
+ }
+} elsif($curweek == 3) {
+ @import_date = Date::Calc::Add_Delta_Days(@import_date, 7);
+} elsif($curweek == 2) {
+ @import_date = Date::Calc::Add_Delta_Days(@import_date, 14);
+} else {
+ @import_date = Date::Calc::Add_Delta_Days(@import_date, 21);
+}
+print "day of next Radio Helsinki broadcast: " . Date::Calc::Date_to_Text(@import_date) . "\n";
+
+
+my @allowed_dbs = rhautoimport::get_dropboxes($RD_GROUP);
+if(!defined $allowed_dbs[0] && defined $allowed_dbs[1]) {
+ print "$allowed_dbs[1]\n";
+ exit 1;
+}
+
+if(scalar(@allowed_dbs) != 1) {
+ print "found more or less than one Dropbox for this group?!\n";
+ exit 1;
+}
+my $show_id = $allowed_dbs[0]->{'SHOWID'};
+my $show_title = $allowed_dbs[0]->{'SHOWTITLE'};
+
+my @broadcast_date = Date::Calc::Nth_Weekday_of_Month_Year($import_date[0], $import_date[1], 5, 1);
+if(Date::Calc::Delta_Days(@broadcast_date, @import_date) <= 0) {
+ my @tmp_date = Date::Calc::Add_Delta_YM(@broadcast_date, 0, -1);
+ @broadcast_date = Date::Calc::Nth_Weekday_of_Month_Year($tmp_date[0], $tmp_date[1], 5, 1);
+}
+print "day of latest original broadcast before next Radio Helsinki broadcast: " . Date::Calc::Date_to_Text(@broadcast_date) . "\n";
+
+if(Date::Calc::Delta_Days(@broadcast_date, @today) <= $upload_delay_days) {
+ print "File won't be available by now!\n";
+ exit 42;
+}
+
+my $id = sprintf("%04d-%02d-%02d", @import_date);
+my $bd = sprintf("%04d-%02d-%02d", @broadcast_date);
+
+print "looking for files with cba:broadcastDate $bd in RSS Feed (will be using cba:productionDate if cba:broadcastDate is not set)\n";
+print " -> $RSS_URL\n";
+
+my ($result, $feed) = rhautoimport::fetch_parse_rss_cba($RSS_URL);
+unless ($result) {
+ print "Error fetching feed: $feed\n";
+ exit 1;
+}
+
+my $uri = "";
+my $file = "";
+my $sum_title = "";
+my $sum_text = "";
+
+my $i = 0;
+for my $entry ($feed->entries) {
+ $i++;
+ next unless $entry->enclosure;
+
+ my $entry_bdt = $entry->{'entry'}{'https://cba.media/help#feeds'}{'broadcastDate'};
+ my $format_string = "%a, %d %b %Y %H:%M:%S";
+ unless (defined $entry_bdt) {
+ $entry_bdt = $entry->{'entry'}{'https://cba.media/help#feeds'}{'productionDate'};
+ $format_string = "%a, %d %b %Y";
+ }
+ next unless (defined $entry_bdt);
+ my $bdt = DateTime::Format::Strptime::strptime($format_string, $entry_bdt);
+
+ next unless($broadcast_date[0] == $bdt->year && $broadcast_date[1] == $bdt->month && $broadcast_date[2] == $bdt->day);
+
+ my $j = 0;
+ for my $enclosure($entry->enclosure) {
+ $j++;
+ if($enclosure->type eq "audio/mpeg" || $enclosure->type eq "audio/ogg") {
+ print "$i/$j: (" . $enclosure->type . ", " . $enclosure->length . ") " . $enclosure->url . "\n";
+ my $orig_uri = $enclosure->url;
+ if($orig_uri =~ /^(.*)_cut(\.[^\.]+)$/) {
+ $orig_uri = $1 . $2;
+ }
+
+ $uri = new URI::URL($orig_uri);
+ my @path = $uri->path_components;
+ $file = $path[-1];
+
+ my $current_stat = `cat $STAT_FILE`;
+ my ($current_id, $current_file) = $current_stat =~ m/^(.*)\n(.*)/;
+ if($current_id eq $id && $current_file eq $file) {
+ print "Already downloaded file of today\n";
+ exit 42;
+ }
+ if(!rhautoimport::check_file_extension($file)) {
+ print "\n\nThe extension of the matching file '". $file . "' seems to be wrong - manual import necessary!!!\n";
+ print "\n\n --> https://import.helsinki.at/shows/$show_id\n";
+ exit 1;
+ }
+
+ $sum_title = decode_entities($entry->title);
+ $sum_text = decode_entities($entry->content->body);
+ print "summary:\n" . $sum_title . "\n\n" . $sum_text . "\n";
+ last;
+ }
+ }
+ last if $uri ne "";
+}
+if($uri eq "") {
+ print "No Entry found from $bd - ";
+ if($LAST_RUN) {
+ print "giving up, manual import necessary!!!\n";
+ print "\n\n --> https://import.helsinki.at/shows/$show_id\n";
+ } else {
+ print "will retry later\n";
+ }
+ exit 1;
+}
+
+print "\n\nwill import '$uri' to show $show_id, $show_title\n\n";
+
+my ($ret, $log) = rhautoimport::import_uri($show_id, $uri->as_string);
+if($ret) {
+ print "\nImport Error:\n\n";
+ print $log;
+ print "\n\nNot adding PV note!!";
+ exit 1;
+}
+print "\nImport Success:\n\n";
+print $log;
+print "\n";
+my $exit_code = 0;
+($ret, $log) = rhautoimport::pv_add_note($sum_title, $sum_text, $PV_ID, $id, "1");
+print $log;
+if($ret) {
+ print "\nIgnoring failed note import - manual intervention necessary!\n";
+ $exit_code = 23;
+}
+
+unlink($STAT_FILE);
+open(my $fhs, '>', $STAT_FILE);
+print $fhs "$id\n$file";
+close($fhs);
+
+exit $exit_code;