Initial commit

This commit is contained in:
Liuweiqing 2024-01-16 10:54:40 +08:00
commit 442ac1426b
5 changed files with 97 additions and 0 deletions

2
.gitattributes vendored Normal file
View File

@ -0,0 +1,2 @@
# Auto detect text files and perform LF normalization
* text=auto

18
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,18 @@
{
"sqltools.connections": [
{
"mysqlOptions": {
"authProtocol": "default",
"enableSsl": "Disabled"
},
"previewLimit": 50,
"server": "localhost",
"port": 3306,
"driver": "MySQL",
"name": "local",
"database": "b3log_symphony",
"username": "root",
"password": "123456"
}
]
}

2
README.md Normal file
View File

@ -0,0 +1,2 @@
# paper-ai
根据ai寻找到的论文自动生成引用

71
getPaper.py Normal file
View File

@ -0,0 +1,71 @@
import arxiv
def get_authors(authors, first_author = False):
output = str()
if first_author == False:
output = ", ".join(str(author) for author in authors)
else:
output = authors[0]
return output
def get_daily_papers(topic,query="slam", max_results=2):
"""
@param topic: str
@param query: str
@return paper_with_code: dict
"""
# output
content = dict()
search_engine = arxiv.Search(
query = query,
max_results = max_results,
sort_by = arxiv.SortCriterion.SubmittedDate
)
for result in search_engine.results():
paper_id = result.get_short_id()
paper_title = result.title
paper_url = result.entry_id
paper_abstract = result.summary.replace("\n"," ")
paper_authors = get_authors(result.authors)
paper_first_author = get_authors(result.authors,first_author = True)
primary_category = result.primary_category
publish_time = result.published.date()
print("Time = ", publish_time ,
" title = ", paper_title,
" author = ", paper_first_author)
# eg: 2108.09112v1 -> 2108.09112
ver_pos = paper_id.find('v')
if ver_pos == -1:
paper_key = paper_id
else:
paper_key = paper_id[0:ver_pos]
content[paper_key] = f"|**{publish_time}**|**{paper_title}**|{paper_first_author} et.al.|[{paper_id}]({paper_url})|\n"
data = {topic:content}
return data
if __name__ == "__main__":
data_collector = []
keywords = dict()
keywords["SLAM"] = "SLAM"
for topic,keyword in keywords.items():
print("Keyword: " + topic)
data = get_daily_papers(topic, query = keyword, max_results = 10)
data_collector.append(data)
print("\n")

4
pass Normal file
View File

@ -0,0 +1,4 @@
wW5gVm+DjTU.WV~
anonpublic:eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InlpZGZ1a2Zicmx1aXpqdmZycnNqIiwicm9sZSI6ImFub24iLCJpYXQiOjE3MDQ4NjMyNjEsImV4cCI6MjAyMDQzOTI2MX0.EXIXAdNIGLFo5wHmwmY2-9bqLO9vyFYDvMMtCtkxgig
service_role: