Files
fachschaften/database.py
2017-02-21 19:55:19 +01:00

78 lines
2.6 KiB
Python

from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from src import package_directory,clogger, cfg
from os import path
import json
#engine = create_engine('sqlite:////home/andreas/www/crawler/test.db', convert_unicode=True)
if cfg.get("db_path")==None or cfg.get("db_path").strip()=="":
db_path=package_directory
else:
db_path=cfg.get("db_path")
db_main_type = cfg.get("db_main_type")
if db_main_type == None or db_main_type.strip()=="":
db_main_type="sqlite"
db_mainfile=cfg.get("db_main_file")
if db_mainfile == None or db_mainfile.strip()=="":
db_mainfile="../srctest.db"
db_urlfile=cfg.get("db_url_file")
if db_urlfile == None or db_urlfile.strip()=="":
db_urlfile="../srctest_cu.db"
if cfg.get("db_main_type") == "mysql":
engine = create_engine("mysql+pymysql://%s:%s@localhost/crawler_articles?charset=utf8" % (cfg.get("db_main_user"), cfg.get("db_main_pw")))
else:
engine = create_engine('sqlite:///'+ path.join(db_path,db_mainfile), convert_unicode=True)
db_session = scoped_session(sessionmaker(autocommit=False,
# autoflush=False,
bind=engine))
db_session_process = scoped_session(sessionmaker(autocommit=False,
# autoflush=False,
bind=engine))
if cfg.get("db_urls_type") == "mysql":
engine2 = create_engine("mysql+pymysql://%s:%s@localhost/crawler_urls?charset=utf8" % (cfg.get("db_urls_user"), cfg.get("db_urls_pw")))
else:
engine2 = create_engine('sqlite:///'+ path.join(db_path,db_urlfile), convert_unicode=True)
db_session2 = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine2))
from database_mbase import MyBase,MyBase2
#Base = declarative_base()
#Base.query = db_session.query_property()
Base=declarative_base(cls=MyBase)
#Base.query = db_session.query_property()
Base2 = declarative_base(cls=MyBase2)
Base2.query = db_session2.query_property()
def read_json(rq):
js=rq.get_json()
clogger.info(rq.data)
if js is None:
js=rq.form.to_dict()
if js=={} and rq.data != "":
d=rq.data
js=json.loads(d)
clogger.info(js)
return js
def init_db():
import src.models
from src.models import Organization
Base.metadata.create_all(bind=engine)
def init_db2():
from .compiler.models import CrawlUrl, CrawlCache
Base2.metadata.create_all(bind=engine2)