fastdoi/main.py
2024-08-24 13:40:12 +08:00

62 lines
1.8 KiB
Python

from fastapi import FastAPI
import urllib.parse
from config import __CONFIG__
import mysql_connector
from fastgpt_uploader import upload2fastgpt
from semanticscholar import search_paper
import rss
app = FastAPI()
@app.get("/fastdoi")
async def get_reference(questions):
print('Search: '+questions)
res = []
try:
list = search_paper(urllib.parse.quote(questions))
for i in list:
if mysql_connector.is_loaded(i['paperId']):
print(i['paperId'])
else:
print(i['citationStyles']['bibtex'])
res.append({
'id':i['paperId'],
'q':str(i['citationStyles']['bibtex']),
'a':str(i['abstract']),
'score':[]
})
print('New load: '+str(len(res))+'/'+str(len(list)))
except Exception as e:
print(str(e))
if(upload2fastgpt(res)):
for i in res:
mysql_connector.new_load(i['id'])
return res
@app.get("/rss")
async def load_rss():
count = 0
loaded = []
uploads = []
while entries := rss.load_content():
for entry in entries:
loaded.append(entry['id'])
uploads.append({
'q': entry['title'],
'a': entry['content']
})
count += 1
if len(loaded) >= 100 and upload2fastgpt(uploads):
rss.update(loaded)
loaded = []
uploads = []
if len(loaded) > 0 and upload2fastgpt(uploads):
rss.update(loaded)
print(f"Total entries loaded: {count}")
return f"Total entries loaded: {count}"
if __name__ == '__main__':
import uvicorn
uvicorn.run(app, host="127.0.0.1", port=8964)
mysql_connector.end_mysql()