Newer
Older
import DataBase.mongoDB as DataBase
import RegularExpressionParser.Parser as Parser
import Crawler.Scrape as Scrape
from flask import Flask
from flask import request
from urllib.parse import urlparse, parse_qs
app = Flask(__name__)
# app.config["DEBUG"] = True
@app.route("/", methods=['GET'])
def home():
return "200: successfully connected to home page\n"
@app.route("/api/<collection>/", methods=["GET", "PUT", "POST", "DELETE"])
def data_base(collection):
""" data base page of server """
print("\n===============================\n")
print(collection)
print("\n===============================\n")
if collection == "book":
url_parsed = urlparse(request.url)
qs_parsed = parse_qs(url_parsed.query)
if qs_parsed == {}:
return jsonify(DataBase.get_documents_json(0, {}))
return jsonify(search_document(["book.id:" + qs_parsed["id"][0]]))
elif collection == "author":
url_parsed = urlparse(request.url)
qs_parsed = parse_qs(url_parsed.query)
if qs_parsed == {}:
return jsonify(DataBase.get_documents_json(1, {}))
return jsonify(search_document(["author.id:" + qs_parsed["id"][0]]))
elif collection == "search":
url_parsed = urlparse(request.url)
qs_parsed = parse_qs(url_parsed.query)
result = jsonify(search_document(qs_parsed["q"][0].split("&")))
return jsonify(result)
elif request.method == "PUT":
if request.headers["Content-Type"] != "application/json":
json_update_info = request.json
if collection == "book":
opt = 0
elif collection == "author":
opt = 1
else:
DataBase.update_dicts(opt, request.args.to_dict(), json_update_info)
return "200: PUT succeeded"
elif request.method == "POST":
if request.headers["Content-Type"] != "application/json":
json_file = request.json
if collection == "books":
DataBase.insert_dicts(json_file, 0)
elif collection == "authors":
DataBase.insert_dicts(json_file, 1)
elif collection == "book":
DataBase.insert_document(json_file, 0)
elif collection == "author":
DataBase.insert_document(json_file, 1)
elif collection == "scrape":
param = request.args.to_dict()
url = param["url"]
max_book = param["max_book"]
max_author = param["max_author"]
Scrape.scrape_api(url, max_book, max_author)
elif request.method == "DELETE":
identifier = request.args.to_dict()
print(identifier)
if collection == "book":
opt = 0
elif collection == "author":
opt = 1
else:
DataBase.clean(opt, identifier)
return "200: DELETE succeeded"
def search_document(identifiers):
""" function used to find one or several document in database """
json_idt = Parser.parse_query_to_json(identifiers[0])
print(json_idt)
if re.search("^book.*", identifiers[0]):
return DataBase.get_documents_json(0, json_idt)
return DataBase.get_documents_json(1, json_idt)
elif len(identifiers) == 3:
if re.search("^book.*", identifiers[0]):
if re.search("^author.*", identifiers[2]):
print("Failed to find documentation: two statements are not pointing to the same collection")
return {}
else:
opt = 0
else:
if re.search("^book.*", identifiers[2]):
print("Failed to find documentation: two statements are not pointing to the same collection")
return {}
else:
opt = 1
json_idt1 = Parser.parse_query_to_json(identifiers[0])
json_idt2 = Parser.parse_query_to_json(identifiers[2])
if identifiers[1] == "AND":
exp = {"$and": [json_idt1, json_idt2]}
elif identifiers[1] == "OR":
exp = {"$or": [json_idt1, json_idt2]}
else:
print("Failed to parse query: unknown operator for identifiers[1]")
return {}
print("exp:")
print(exp)
return DataBase.get_documents_json(opt, exp)