Browse Source

Update Amica JSON urls and parsing

Toni Fadjukoff 6 years ago
parent
commit
b627518785
2 changed files with 31 additions and 24 deletions
  1. 27 23
      amica.py
  2. 4 1
      campusravita.py

+ 27 - 23
amica.py View File

2
 import datetime
2
 import datetime
3
 
3
 
4
 restaurant_info = [
4
 restaurant_info = [
5
-  [ "(TaY) Amica Minerva", "http://www.amica.fi/minerva", "", "middle", "http://www.amica.fi/api/restaurant/menu/week?language=fi&restaurantPageId=7381" ],
6
-  [ "(TaY) Tampereen normaalikoulun ravintola", "http://www.amica.fi/tampereennormaalikoulu", "", "middle", "http://www.amica.fi/api/restaurant/menu/week?language=fi&restaurantPageId=6655" ],
7
-  [ "(TTY) Ravintola Reaktori", "http://www.amica.fi/reaktori", "", "middle", "http://www.amica.fi/api/restaurant/menu/week?language=fi&restaurantPageId=69171" ]
5
+  [ "(TaY) Amica Minerva", "http://www.amica.fi/minerva", "", "middle", "https://www.amica.fi/modules/json/json/Index?costNumber=0815&language=fi" ],
6
+  [ "(TaY) Tampereen normaalikoulun ravintola", "http://www.amica.fi/tampereennormaalikoulu", "", "middle", "https://www.amica.fi/modules/json/json/Index?costNumber=0811&language=fi" ],
7
+  [ "(TTY) Ravintola Reaktori", "http://www.amica.fi/reaktori", "", "middle", "https://www.amica.fi/modules/json/json/Index?costNumber=0812&language=fi" ]
8
 ]
8
 ]
9
 
9
 
10
 def get_restaurants(use_old, week):
10
 def get_restaurants(use_old, week):
17
         title = info[0]
17
         title = info[0]
18
         url = info[4]
18
         url = info[4]
19
         temp_fname = "amica_{count}.temp.js".format(count = count)
19
         temp_fname = "amica_{count}.temp.js".format(count = count)
20
-        url = "{url}&weekDate={week_date}".format(url=url, week_date=week_date)
21
         data = pnalib.get_json_file(url, temp_fname, use_old)
20
         data = pnalib.get_json_file(url, temp_fname, use_old)
22
         if not data:
21
         if not data:
23
             continue
22
             continue
24
 
23
 
25
         week_foods = {}
24
         week_foods = {}
26
-        lunch_menus = data["LunchMenus"]
25
+        lunch_menus = data["MenusForDays"]
27
         for week_day, lunch_menu in enumerate(lunch_menus):
26
         for week_day, lunch_menu in enumerate(lunch_menus):
28
             current_day_foods = []
27
             current_day_foods = []
29
             set_menus = lunch_menu["SetMenus"]
28
             set_menus = lunch_menu["SetMenus"]
30
-            html = lunch_menu["Html"]
29
+            html = lunch_menu.get("Html", "")
31
             if len(html):
30
             if len(html):
32
-                menus = html.split("<p>")
33
-                for set_menu in menus:
34
-                    meals = set_menu.split("<br />")
35
-                    food = []
36
-                    for meal in meals:
37
-                        parts = meal.split("(")
38
-                        current_food = parts[0]
39
-                        diets = [s.strip() for s in parts.split(")")[0].split(",")]
40
-                        if diets:
41
-                            current_food += " ({allergies})".format(allergies=", ".join(diets))
42
-                        food.append(current_food)
43
-                    current_day_foods.append("\n".join(food))
31
+                    current_day_foods.append(handle_html(html))
44
             else:
32
             else:
45
                 for set_menu in set_menus:
33
                 for set_menu in set_menus:
46
-                    meals = set_menu["Meals"]
34
+                    meals = set_menu["Components"]
47
                     food = []
35
                     food = []
48
                     for meal in meals:
36
                     for meal in meals:
49
-                        current_food = meal["Name"]
50
-                        if "Diets" in meal:
51
-                            current_food += " ({allergies})".format(allergies=", ".join(meal["Diets"]))
52
-                        food.append(current_food)
37
+                        food.append(format_meal_allergies(meal))
53
                     current_day_foods.append("\n".join(food))
38
                     current_day_foods.append("\n".join(food))
54
             week_foods[week_day] = current_day_foods
39
             week_foods[week_day] = current_day_foods
55
 
40
 
56
         restaurants.append([title, "", week, week_foods, info])
41
         restaurants.append([title, "", week, week_foods, info])
57
     return restaurants
42
     return restaurants
43
+
44
+# Onko enää tarpeellinen?
45
+def handle_html(html):
46
+    menus = html.split("<p>")
47
+    for set_menu in menus:
48
+        meals = set_menu.split("<br />")
49
+        food = []
50
+        for meal in meals:
51
+            food.append(format_meal_allergies(meal))
52
+    return "\n".join(food)
53
+
54
+def format_meal_allergies(meal):
55
+    parts = meal.split("(")
56
+    current_food = parts[0]
57
+    diets = [s.strip() for s in parts[1].split(")")[0].split(",")]
58
+    if diets:
59
+        current_food += " ({allergies})".format(allergies=", ".join(diets))
60
+    return current_food
61
+

+ 4 - 1
campusravita.py View File

52
 class CampusravitaHTMLParser(html.parser.HTMLParser):
52
 class CampusravitaHTMLParser(html.parser.HTMLParser):
53
     week_re = re.compile("Ruokalista - Viikko (\d+)")
53
     week_re = re.compile("Ruokalista - Viikko (\d+)")
54
     lunch_re = re.compile("Lounas|Deli-lounas")
54
     lunch_re = re.compile("Lounas|Deli-lounas")
55
+    week = None
55
 
56
 
56
     def __init__(self):
57
     def __init__(self):
57
         html.parser.HTMLParser.__init__(self)
58
         html.parser.HTMLParser.__init__(self)
166
     parser = CampusravitaHTMLParser()
167
     parser = CampusravitaHTMLParser()
167
     parser.feed(data)
168
     parser.feed(data)
168
 
169
 
169
-    restaurants = [[restaurant_info[0][0], "", parser.week, parser.week_foods, restaurant_info[0]]]
170
+    restaurants = []
171
+    if parser.week is not None:
172
+        restaurants.append([restaurant_info[0][0], "", parser.week, parser.week_foods, restaurant_info[0]])
170
 
173
 
171
     return restaurants
174
     return restaurants
172
 
175