Files
fachschaften/compiler/views.py
Andreas Stephanides 8955bf17f5 init commit
2017-01-14 12:23:04 +01:00

147 lines
4.2 KiB
Python

from flask import Blueprint, jsonify, render_template, abort, redirect, url_for, request
compiler_pages = Blueprint('compiler', __name__,
template_folder='.')
from src.database import db_session2,init_db,read_json,init_db2
from .models import CrawlUrl
from .models import CrawlCache, CrawlCacheSchema
from .models import CrawlUrlSchema
from src import clogger
from src.articles import Article
#import mworker
import flask
import json
import mworker
from compiler import do_compile
from fetching import fetch_page
#flask.json.JSONEncoder.default = lambda self,obj: ((obj.__json__()) if isinstance(obj, (Article,CrawlUrl)) else None)
@compiler_pages.route("/")
@compiler_pages.route("")
@compiler_pages.route(".json")
def index():
status="For documentation goto /doc"
return jsonify(status=status)
@compiler_pages.route("/doc")
@compiler_pages.route("/doc.json")
def doc():
return render_template("README")
# return jsonify(status=render_template("README"))
#
@compiler_pages.route("/initdb")
@compiler_pages.route("/initdb.json")
def initdb_json():
init_db() # initialisiere Datenbank
status="Datenbank Neu initialisiert"
return jsonify(status=status)
@compiler_pages.route("/initdb2")
@compiler_pages.route("/initdb2.json")
def initdb_json2():
init_db2() # initialisiere Datenbank
status="Datenbank Neu initialisiert"
return jsonify(status=status)
@compiler_pages.route("/start")
@compiler_pages.route("/start.json")
def start_json():
mworker.start_workers(1,1,1) # initialisiere Datenbank
status="Worker gestartet"
return jsonify(status=status)
@compiler_pages.route("/urls")
@compiler_pages.route("/urls.json")
def urls_index_json():
# Lade Alle Urls
status=CrawlUrl.query.all()
return jsonify(urls=status)
# show an existing CrawlUrl
@compiler_pages.route("/urls/<int:id>")
@compiler_pages.route("/urls/<int:id>.json")
def urls_json(id):
# Lade Alle Urls
status=CrawlUrl.query.get(id)
cc=CrawlCache.query.filter(CrawlCache.url==status.url).first()
return jsonify(urls=status, cache=cc.__json__())
# que an existing CrawlUrl for fetching
@compiler_pages.route("/urls/<int:id>/que")
@compiler_pages.route("/urls/<int:id>/que.json")
def urls_que_json(id):
# Lade Alle Urls
cu=CrawlUrl.query.get(id)
mworker.queue_url(cu.tpe, cu.url)
cc=CrawlCache.query.filter(CrawlCache.url==cu.url)
mworker.start_workers(1,1,1) # initialisiere Datenbank
status="Worker gestartet"
return jsonify(urls=cu, cache=cc)
# que an existing CrawlUrl for fetching
@compiler_pages.route("/urls/<int:id>/test")
@compiler_pages.route("/urls/<int:id>/test.json")
def urls_test_json(id):
# Lade Alle Urls
cu=CrawlUrl.query.get(id)
rw=fetch_page(cu.url)
h= {"url": cu.url, "sourcetype": cu.tpe, "raw": rw}
h2=do_compile(cu.tpe, h)
return jsonify(urls=cu,hs=h2,rw=rw)
@compiler_pages.route("/debug",methods=['GET','PUT'])
def debug():
status="did nothing"
js=read_json(request)
clogger.info(request.get_json())
if js["cmd"] == "runfetch":
mworker.run_fetch()
status="fetched something"
if js["cmd"] == "que":
cu = CrawlUrl.query.get(js["id"])
mworker.queue_url(cu.tpe, cu.url)
status= mworker.run_fetch()
if js["cmd"] == "comp":
status=mworker.run_compile()
if js["cmd"]=="process":
status=mworker.run_process()
return jsonify(status=status)
@compiler_pages.route("/debugurl")
def debugurl():
s=CrawlUrlSchema()
status=CrawlUrl.query.all()
return jsonify(status=status)
@compiler_pages.route("/urls",methods=['POST'])
def add_urls():
# Lese Daten
js =read_json(request)
# clogger.info(js)
# Finde oder Erzeuge Url in der Datenbank
url=CrawlUrlSchema().load(js["url"])
clogger.info(url)
url=CrawlUrl.find_or_create(url.data["tpe"], url.data["url"])
db_session2.add(url)
db_session2.commit()
return jsonify(url=url, kk=js)
@compiler_pages.route("/urls/<int:id>",methods=['DELETE'])
@compiler_pages.route("/urls<int:id>.json",methods=['DELETE'])
def delete(id):
cu=CrawlUrl.query.get(id)
if cu != None:
db_session2.delete(cu)
db_session2.commit()
return jsonify(url={})