-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathmain.py
More file actions
117 lines (64 loc) · 2.85 KB
/
main.py
File metadata and controls
117 lines (64 loc) · 2.85 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
from fastapi import FastAPI, HTTPException
from fastapi.routing import APIRouter
from Scraper import Scraper
from StatusCodes import codes
from Schemas import MetaData
from database import CacheManager
VERSION= "2.2.0"
scraper= Scraper()
cache_manager= CacheManager()
app= FastAPI(title= "getcomics API", version= VERSION)
api_router= APIRouter()
@api_router.get("/")
async def root():
return {200:codes[200]}
@api_router.get("/search/{query}")
async def search(query: str = None, page: int = 1) -> MetaData:
filter= {"Query": query, "Page": page}
cache= cache_manager.fetch_cache(filter)
if cache != None:
cache_expired= cache_manager.is_cache_expired(cache["cache_time"])
if not cache_expired:
return MetaData(Meta_Data= cache["Comics"]["Meta-Data"])
scraper.set_search_url(query, page)
results= await scraper.get_search_results()
if isinstance(results, int):
raise HTTPException(status_code= results, detail= codes[results])
if cache == None:
cache_manager.create_cache({"cache_time": None, "Query": query, "Page": page, "Comics": results})
else:
cache_manager.update_cache(filter, {"$set":{"cache_time": None, "Comics": results}})
return MetaData(Meta_Data= results["Meta-Data"])
@api_router.get("/latest")
async def latest() -> MetaData:
filter= {"Latest": True}
cache= cache_manager.fetch_cache(filter)
if cache != None:
cache_expired= cache_manager.is_cache_expired(cache["cache_time"])
if not cache_expired:
return MetaData(Meta_Data= cache["Comics"]["Meta-Data"])
scraper.set_latestpage_url()
result= await scraper.get_search_results()
if cache == None:
cache_manager.create_cache({"cache_time": None, "Latest": True, "Comics": result})
else:
cache_manager.update_cache(filter, {"$set":{"cache_time": None, "Comics": result}})
return MetaData(Meta_Data= result["Meta-Data"])
@api_router.get("/tag/{tag}")
async def tag(tag: str = None, page: int = 1) -> MetaData:
filter= {"Tag": tag, "Page": page}
cache= cache_manager.fetch_cache(filter)
if cache != None:
cache_expired= cache_manager.is_cache_expired(cache["cache_time"])
if not cache_expired:
return MetaData(Meta_Data= cache["Comics"]["Meta-Data"])
scraper.set_tag_url(tag, page)
results= await scraper.get_search_results()
if isinstance(results, int):
raise HTTPException(status_code= results, detail= codes[results])
if cache == None:
cache_manager.create_cache({"cache_time": None, "Tag": tag, "Page": page, "Comics": results})
else:
cache_manager.update_cache(filter, {"$set":{"cache_time": None, "Comics": results}})
return MetaData(Meta_Data= results["Meta-Data"])
app.include_router(api_router, prefix= "/getcomics/v2")