fix:#65 increase max requests per ip and add error handling for fetcher if page is offline

This commit is contained in:
Elmar Kresse
2024-03-11 00:13:59 +01:00
parent 7bb0af18c1
commit 1f99e26f80
2 changed files with 21 additions and 14 deletions

View File

@ -24,6 +24,7 @@ func ParseEventsFromRemote(app *pocketbase.PocketBase) (model.Events, error) {
func FetchAllEventsAndSave(app *pocketbase.PocketBase, clock localTime.Clock) ([]model.Event, error) { func FetchAllEventsAndSave(app *pocketbase.PocketBase, clock localTime.Clock) ([]model.Event, error) {
var savedRecords []model.Event var savedRecords []model.Event
var err error = nil
var stubUrl = [2]string{ var stubUrl = [2]string{
"https://stundenplan.htwk-leipzig.de/", "https://stundenplan.htwk-leipzig.de/",
@ -53,30 +54,35 @@ func FetchAllEventsAndSave(app *pocketbase.PocketBase, clock localTime.Clock) ([
if (clock.Now().Month() >= 3) && (clock.Now().Month() <= 10) { if (clock.Now().Month() >= 3) && (clock.Now().Month() <= 10) {
url := stubUrl[0] + "ss" + stubUrl[1] url := stubUrl[0] + "ss" + stubUrl[1]
events, err := parseEventForOneSemester(url) var events []model.Event
events, err = parseEventForOneSemester(url)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to parse events for summmer semester: %w", err) // append new error to err
// fmt.Errorf("failed to parse events for summmer semester: %w", err)
err = fmt.Errorf("failed to parse events for summmer semester: %w", err)
} }
savedEvents, dbError := db.SaveEvents(events, app) savedEvents, dbError := db.SaveEvents(events, app)
if dbError != nil { if dbError != nil {
return nil, fmt.Errorf("failed to save events: %w", dbError) // fmt.Errorf("failed to save events: %w", dbError)
err = fmt.Errorf("failed to save events: %w", dbError)
} }
savedRecords = append(savedEvents, events...) savedRecords = append(savedEvents, events...)
} }
if (clock.Now().Month() >= 9) || (clock.Now().Month() <= 4) { if (clock.Now().Month() >= 9) || (clock.Now().Month() <= 4) {
url := stubUrl[0] + "ws" + stubUrl[1] url := stubUrl[0] + "ws" + stubUrl[1]
events, err := parseEventForOneSemester(url) var events []model.Event
events, err = parseEventForOneSemester(url)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to parse events for winter semester: %w", err) err = fmt.Errorf("failed to parse events for winter semester: %w", err)
} }
savedEvents, dbError := db.SaveEvents(events, app) savedEvents, dbError := db.SaveEvents(events, app)
if dbError != nil { if dbError != nil {
return nil, fmt.Errorf("failed to save events: %w", dbError) err = fmt.Errorf("failed to save events: %w", dbError)
} }
savedRecords = append(savedRecords, savedEvents...) savedRecords = append(savedRecords, savedEvents...)
} }
return savedRecords, nil return savedRecords, err
} }
func parseEventForOneSemester(url string) ([]model.Event, error) { func parseEventForOneSemester(url string) ([]model.Event, error) {

View File

@ -37,6 +37,7 @@ http {
# Docker IP Ranges (https://docs.docker.com/network/iptables/) # Docker IP Ranges (https://docs.docker.com/network/iptables/)
set_real_ip_from 172.16.0.0/12; set_real_ip_from 172.16.0.0/12;
set_real_ip_from 141.57.0.0/16;
real_ip_header CF-Connecting-IP; real_ip_header CF-Connecting-IP;
@ -89,14 +90,14 @@ http {
} }
# Limit the number of requests per IP # Limit the number of requests per IP
limit_req_zone $limit_key zone=feed:20m rate=10r/m; limit_req_zone $limit_key zone=feed:20m rate=20r/m;
limit_req_zone $limit_key zone=createFeed:10m rate=1r/m; limit_req_zone $limit_key zone=createFeed:10m rate=1r/m;
limit_req_zone $limit_key zone=modules:10m rate=3r/m; limit_req_zone $limit_key zone=modules:10m rate=30r/m;
server { server {
listen 80; listen 80;
listen [::]:80; listen [::]:80;
server_name htwekalender.de www.htwekalender.de; server_name htwkalender.de www.htwkalender.de;
location /api { location /api {
proxy_pass http://htwkalender-backend:8090; proxy_pass http://htwkalender-backend:8090;
@ -118,7 +119,7 @@ http {
proxy_cache_bypass 0; proxy_cache_bypass 0;
proxy_no_cache 0; proxy_no_cache 0;
proxy_cache mcache; # mcache=RAM proxy_cache mcache; # mcache=RAM
proxy_cache_valid 200 301 302 30m; proxy_cache_valid 200 301 302 10m;
proxy_cache_valid 403 404 5m; proxy_cache_valid 403 404 5m;
proxy_cache_lock on; proxy_cache_lock on;
proxy_cache_use_stale timeout updating; proxy_cache_use_stale timeout updating;
@ -136,7 +137,7 @@ http {
proxy_cache_bypass 0; proxy_cache_bypass 0;
proxy_no_cache 0; proxy_no_cache 0;
proxy_cache mcache; # mcache=RAM proxy_cache mcache; # mcache=RAM
proxy_cache_valid 200 301 302 30m; proxy_cache_valid 200 301 302 10m;
proxy_cache_valid 403 404 5m; proxy_cache_valid 403 404 5m;
proxy_cache_lock on; proxy_cache_lock on;
proxy_cache_use_stale timeout updating; proxy_cache_use_stale timeout updating;
@ -154,7 +155,7 @@ http {
proxy_cache_bypass 0; proxy_cache_bypass 0;
proxy_no_cache 0; proxy_no_cache 0;
proxy_cache mcache; # mcache=RAM proxy_cache mcache; # mcache=RAM
proxy_cache_valid 200 301 302 30m; proxy_cache_valid 200 301 302 10m;
proxy_cache_valid 403 404 5m; proxy_cache_valid 403 404 5m;
proxy_cache_lock on; proxy_cache_lock on;
proxy_cache_use_stale timeout updating; proxy_cache_use_stale timeout updating;
@ -172,7 +173,7 @@ http {
proxy_cache_bypass 0; proxy_cache_bypass 0;
proxy_no_cache 0; proxy_no_cache 0;
proxy_cache mcache; # mcache=RAM proxy_cache mcache; # mcache=RAM
proxy_cache_valid 200 301 302 30m; proxy_cache_valid 200 301 302 10m;
proxy_cache_valid 403 404 5m; proxy_cache_valid 403 404 5m;
proxy_cache_lock on; proxy_cache_lock on;
proxy_cache_use_stale timeout updating; proxy_cache_use_stale timeout updating;