From 6cda6d176aac29b3972dfde305a62c0be0dc2437 Mon Sep 17 00:00:00 2001 From: liuweiqing Date: Tue, 20 Feb 2024 10:52:33 +0800 Subject: [PATCH] =?UTF-8?q?feat:=20=E5=8F=AF=E9=80=89=E7=9A=84=E5=AF=B9?= =?UTF-8?q?=E6=96=87=E7=8C=AE=E7=9B=B8=E5=85=B3=E6=80=A7=E6=A3=80=E9=AA=8C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/{[lng] => }/global-error.jsx | 0 app/i18n/locales/en/translation.json | 3 +- app/i18n/locales/zh-CN/translation.json | 3 +- app/store/index.ts | 1 + app/store/slices/stateSlice.ts | 6 +++ components/GetSemantic.tsx | 2 +- components/QuillEditor.tsx | 69 +++++++++++++++++++++---- components/Settings.tsx | 38 ++++++++++++-- components/chatAI.tsx | 19 ++++--- sentry.client.config.ts | 41 ++++++++------- sentry.edge.config.ts | 17 +++--- sentry.server.config.ts | 17 +++--- utils/others/aiutils.ts | 51 ++++++++++++++++++ 13 files changed, 204 insertions(+), 63 deletions(-) rename app/{[lng] => }/global-error.jsx (100%) create mode 100644 utils/others/aiutils.ts diff --git a/app/[lng]/global-error.jsx b/app/global-error.jsx similarity index 100% rename from app/[lng]/global-error.jsx rename to app/global-error.jsx diff --git a/app/i18n/locales/en/translation.json b/app/i18n/locales/en/translation.json index 81eed68..363859c 100644 --- a/app/i18n/locales/en/translation.json +++ b/app/i18n/locales/en/translation.json @@ -27,5 +27,6 @@ "linuxdo": "linuxdo(Recommended)", "custom": "Custom" }, - "鼠标点击段落中的上标跳转到文献引用?": "Click the superscript in the paragraph to jump to the reference?" + "鼠标点击段落中的上标跳转到文献引用?": "Click the superscript in the paragraph to jump to the reference?", + "是否检查文献与主题相关性(如果不相关则不会传给AI引用)": "Check the relevance of the literature to the topic (if it is not relevant, it will not be passed to the AI reference)" } diff --git a/app/i18n/locales/zh-CN/translation.json b/app/i18n/locales/zh-CN/translation.json index 24429d8..995661d 100644 --- a/app/i18n/locales/zh-CN/translation.json +++ b/app/i18n/locales/zh-CN/translation.json @@ -28,5 +28,6 @@ "linuxdo": "linuxdo(第二个推荐)", "custom": "自定义" }, - "鼠标点击段落中的上标跳转到文献引用?": "鼠标点击段落中的上标跳转到文献引用?" + "鼠标点击段落中的上标跳转到文献引用?": "鼠标点击段落中的上标跳转到文献引用?", + "是否检查文献与主题相关性(如果不相关则不会传给AI引用)": "是否检查文献与主题相关性(如果不相关则不会传给AI引用)" } diff --git a/app/store/index.ts b/app/store/index.ts index 6861c1a..3e914de 100644 --- a/app/store/index.ts +++ b/app/store/index.ts @@ -29,6 +29,7 @@ const statePersistConfig = { "isVip", "language", "isJumpToReference", + "isEvaluateTopicMatch", ], }; diff --git a/app/store/slices/stateSlice.ts b/app/store/slices/stateSlice.ts index ba7ffb8..4d8a88b 100644 --- a/app/store/slices/stateSlice.ts +++ b/app/store/slices/stateSlice.ts @@ -6,6 +6,7 @@ export interface APIState { isVip: boolean; language: string; isJumpToReference: boolean; + isEvaluateTopicMatch: boolean; } const initialState: APIState = { @@ -15,6 +16,7 @@ const initialState: APIState = { isVip: false, language: "en", isJumpToReference: false, + isEvaluateTopicMatch: false, }; export const stateSlice = createSlice({ @@ -45,6 +47,9 @@ export const stateSlice = createSlice({ setIsJumpToReference: (state, action: PayloadAction) => { state.isJumpToReference = action.payload; }, + setIsEvaluateTopicMatch: (state, action: PayloadAction) => { + state.isEvaluateTopicMatch = action.payload; + }, }, }); @@ -56,6 +61,7 @@ export const { setIsVip, setLanguage, setIsJumpToReference, + setIsEvaluateTopicMatch, } = stateSlice.actions; export const stateReducer = stateSlice.reducer; diff --git a/components/GetSemantic.tsx b/components/GetSemantic.tsx index 80e8cc6..816cbb2 100644 --- a/components/GetSemantic.tsx +++ b/components/GetSemantic.tsx @@ -32,7 +32,7 @@ async function getSemanticPapers( params: { query: query, offset: offset, - limit: 2, + limit: limit, year: year, fields: "title,year,authors.name,abstract,venue,url,journal", }, diff --git a/components/QuillEditor.tsx b/components/QuillEditor.tsx index d809f7b..a931ebe 100644 --- a/components/QuillEditor.tsx +++ b/components/QuillEditor.tsx @@ -10,7 +10,7 @@ import Link from "next/link"; import getArxivPapers from "./GetArxiv"; import getSemanticPapers from "./GetSemantic"; import { fetchPubMedData } from "./GetPubMed "; -import { getTopicFromAI, sendMessageToOpenAI } from "./chatAI"; +import { getAI, sendMessageToOpenAI } from "./chatAI"; import { getTextBeforeCursor, convertToSuperscript, @@ -19,6 +19,7 @@ import { getNumberBeforeCursor, formatJournalReference, } from "@/utils/others/quillutils"; +import { evaluateTopicMatch } from "@/utils/others/aiutils"; //组件 import ExportDocx from "./Export"; import ReferenceList from "./ReferenceList"; @@ -74,6 +75,9 @@ const QEditor = ({ lng }) => { const isJumpToReference = useAppSelector( (state) => state.state.isJumpToReference ); + const isEvaluateTopicMatch = useAppSelector( + (state) => state.state.isEvaluateTopicMatch + ); const [quill, setQuill] = useState(null); const contentUpdatedFromNetwork = useAppSelector( (state) => state.state.contentUpdatedFromNetwork @@ -269,8 +273,14 @@ const QEditor = ({ lng }) => { async function paper2AI(topic: string) { quill!.setSelection(cursorPosition!, 0); // 将光标移动到原来的位置 let offset = -1; - if (generatedPaperNumber) offset = 0; - setOpenProgressBar(true); + if (generatedPaperNumber != 1) offset = 0; //如果生成的数量不为1,则从0开始 + setOpenProgressBar(true); //开启进度条 + //如果说要评估主题是否匹配的话,就要多获取一些文献 + let limit = 2; + if (isEvaluateTopicMatch) { + limit = 4; + } + for (let i = 0; i < generatedPaperNumber!; i++) { try { if (!topic) { @@ -278,7 +288,13 @@ const QEditor = ({ lng }) => { const prompt = "As a topic extraction assistant, you can help me extract the current discussion of the paper topic, I will enter the content of the paper, you extract the paper topic , no more than two, Hyphenated query terms yield no matches (replace it with space to find matches) return format is: topic1 topic2"; const userMessage = getTextBeforeCursor(quill!, 2000); - topic = await getTopicFromAI(userMessage, prompt, apiKey); + topic = await getAI( + userMessage, + prompt, + apiKey, + upsreamUrl, + selectedModel! + ); console.log("topic in AI before removeSpecialCharacters", topic); topic = removeSpecialCharacters(topic); topic = topic.split(" ").slice(0, 2).join(" "); @@ -290,7 +306,19 @@ const QEditor = ({ lng }) => { console.log("topic in AI", topic); let rawData, dataString, newReferences; if (selectedSource === "arxiv") { - rawData = await getArxivPapers(topic); + rawData = await getArxivPapers(topic, limit, offset); + //判断返回的文献是否跟用户输入的主题相关 + if (isEvaluateTopicMatch) { + const { relevantPapers, nonRelevantPapers } = + await evaluateTopicMatch( + rawData, + apiKey, + upsreamUrl, + selectedModel!, + topic + ); + rawData = relevantPapers; + } console.log("arxiv rawdata:", rawData); // 将 rawData 转换为引用数组 newReferences = rawData.map((entry: any) => ({ @@ -305,7 +333,19 @@ const QEditor = ({ lng }) => { }) .join(""); } else if (selectedSource === "semanticScholar") { - rawData = await getSemanticPapers(topic, "2015-2023", offset); + rawData = await getSemanticPapers(topic, "2015-2023", offset, limit); + //判断返回的文献是否跟用户输入的主题相关 + if (isEvaluateTopicMatch) { + const { relevantPapers, nonRelevantPapers } = + await evaluateTopicMatch( + rawData, + apiKey, + upsreamUrl, + selectedModel!, + topic + ); + rawData = relevantPapers; + } // 将 rawData 转换为引用数组 newReferences = rawData.map((entry: any) => ({ url: entry.url, @@ -321,10 +361,22 @@ const QEditor = ({ lng }) => { }) .join(""); } else if (selectedSource === "pubmed") { - rawData = await fetchPubMedData(topic, 2020, offset, 2); + rawData = await fetchPubMedData(topic, 2020, offset, limit); if (!rawData) { throw new Error("未搜索到文献 from PubMed."); } + //判断返回的文献是否跟用户输入的主题相关 + if (isEvaluateTopicMatch) { + const { relevantPapers, nonRelevantPapers } = + await evaluateTopicMatch( + rawData, + apiKey, + upsreamUrl, + selectedModel!, + topic + ); + rawData = relevantPapers; + } newReferences = rawData.map((entry: any) => ({ id: entry.id, // 文章的 PubMed ID title: entry.title, // 文章的标题 @@ -336,9 +388,8 @@ const QEditor = ({ lng }) => { source: "PubMed", // 指示这些引用来自 PubMed })); - // 打印或进一步处理 newReferences + // 打印 newReferences console.log(newReferences); - dataString = rawData .map((entry: any) => { return `Time: ${entry.year}\nTitle: ${entry.title}\nSummary: ${entry.abstract}\n\n`; diff --git a/components/Settings.tsx b/components/Settings.tsx index fca63f3..280dc00 100644 --- a/components/Settings.tsx +++ b/components/Settings.tsx @@ -7,7 +7,10 @@ import { setUpsreamUrl, setSystemPrompt, } from "@/app/store/slices/authSlice"; -import { setIsJumpToReference } from "@/app/store/slices/stateSlice"; +import { + setIsJumpToReference, + setIsEvaluateTopicMatch, +} from "@/app/store/slices/stateSlice"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { faArrowLeft } from "@fortawesome/free-solid-svg-icons"; import Link from "next/link"; @@ -53,14 +56,16 @@ const Settings = ({ lng }: { lng: string }) => { const isJumpToReference = useAppSelector( (state) => state.state.isJumpToReference ); + const isEvaluateTopicMatch = useAppSelector( + (state) => state.state.isEvaluateTopicMatch + ); //state const [userConfigNumber, setUserConfigNumber] = useLocalStorage( "userConfigNumber", "2" ); - - const toggleSwitch = () => { - dispatch(setIsJumpToReference(!isJumpToReference)); + const toggleSwitch = (currentState: any, setState: any) => { + setState(!currentState); }; return (
@@ -153,7 +158,11 @@ const Settings = ({ lng }: { lng: string }) => { type="checkbox" className="sr-only peer" checked={isJumpToReference} - onChange={toggleSwitch} + onChange={() => + toggleSwitch(isJumpToReference, (value: any) => + dispatch(setIsJumpToReference(value)) + ) + } />
{ > {t("鼠标点击段落中的上标跳转到文献引用?")} +
); }; diff --git a/components/chatAI.tsx b/components/chatAI.tsx index 28d6591..1ba3536 100644 --- a/components/chatAI.tsx +++ b/components/chatAI.tsx @@ -105,10 +105,12 @@ const sendMessageToOpenAI = async ( } }; -const getTopicFromAI = async ( +const getAI = async ( userMessage: string, - prompt: string, - apiKey: string + systemPrompt: string, + apiKey: string, + upsreamUrl: string, + selectedModel: string ) => { // 设置API请求参数 const requestOptions = { @@ -122,12 +124,12 @@ const getTopicFromAI = async ( : process.env.NEXT_PUBLIC_OPENAI_API_KEY), }, body: JSON.stringify({ - model: "gpt-3.5-turbo", + model: selectedModel || "gpt-3.5-turbo", stream: false, messages: [ { role: "system", - content: prompt, + content: systemPrompt, }, { role: "user", @@ -137,7 +139,7 @@ const getTopicFromAI = async ( }), }; const response = await fetch( - process.env.NEXT_PUBLIC_AI_URL + "/v1/chat/completions", + (upsreamUrl || process.env.NEXT_PUBLIC_AI_URL) + "/v1/chat/completions", requestOptions ); const data = await response.json(); @@ -145,9 +147,6 @@ const getTopicFromAI = async ( return topic; // 获取并返回回复 }; -// 给getTopicFromAI函数创建别名 -// export const getFromAI = sendMessageToOpenAI; - async function processResult(reader, decoder, editor) { let buffer = ""; while (true) { @@ -207,4 +206,4 @@ async function processResult(reader, decoder, editor) { } } -export { getTopicFromAI, sendMessageToOpenAI }; +export { getAI, sendMessageToOpenAI }; diff --git a/sentry.client.config.ts b/sentry.client.config.ts index 62e4c7d..eb201d6 100644 --- a/sentry.client.config.ts +++ b/sentry.client.config.ts @@ -3,28 +3,29 @@ // https://docs.sentry.io/platforms/javascript/guides/nextjs/ import * as Sentry from "@sentry/nextjs"; +if (process.env.NODE_ENV === "production") { + Sentry.init({ + dsn: "https://523c4056ba48d012c62a377dfc49f647@o4506728662564864.ingest.sentry.io/4506728672264192", -Sentry.init({ - dsn: "https://523c4056ba48d012c62a377dfc49f647@o4506728662564864.ingest.sentry.io/4506728672264192", + // Adjust this value in production, or use tracesSampler for greater control + tracesSampleRate: 1, - // Adjust this value in production, or use tracesSampler for greater control - tracesSampleRate: 1, + // Setting this option to true will print useful information to the console while you're setting up Sentry. + debug: false, - // Setting this option to true will print useful information to the console while you're setting up Sentry. - debug: false, + replaysOnErrorSampleRate: 1.0, - replaysOnErrorSampleRate: 1.0, + // This sets the sample rate to be 10%. You may want this to be 100% while + // in development and sample at a lower rate in production + replaysSessionSampleRate: 0.1, - // This sets the sample rate to be 10%. You may want this to be 100% while - // in development and sample at a lower rate in production - replaysSessionSampleRate: 0.1, - - // You can remove this option if you're not planning to use the Sentry Session Replay feature: - integrations: [ - Sentry.replayIntegration({ - // Additional Replay configuration goes in here, for example: - maskAllText: true, - blockAllMedia: true, - }), - ], -}); + // You can remove this option if you're not planning to use the Sentry Session Replay feature: + integrations: [ + Sentry.replayIntegration({ + // Additional Replay configuration goes in here, for example: + maskAllText: true, + blockAllMedia: true, + }), + ], + }); +} diff --git a/sentry.edge.config.ts b/sentry.edge.config.ts index 145d507..aabb319 100644 --- a/sentry.edge.config.ts +++ b/sentry.edge.config.ts @@ -4,13 +4,14 @@ // https://docs.sentry.io/platforms/javascript/guides/nextjs/ import * as Sentry from "@sentry/nextjs"; +if (process.env.NODE_ENV === "production") { + Sentry.init({ + dsn: "https://523c4056ba48d012c62a377dfc49f647@o4506728662564864.ingest.sentry.io/4506728672264192", -Sentry.init({ - dsn: "https://523c4056ba48d012c62a377dfc49f647@o4506728662564864.ingest.sentry.io/4506728672264192", + // Adjust this value in production, or use tracesSampler for greater control + tracesSampleRate: 1, - // Adjust this value in production, or use tracesSampler for greater control - tracesSampleRate: 1, - - // Setting this option to true will print useful information to the console while you're setting up Sentry. - debug: false, -}); + // Setting this option to true will print useful information to the console while you're setting up Sentry. + debug: false, + }); +} diff --git a/sentry.server.config.ts b/sentry.server.config.ts index 1492df8..6edbd22 100644 --- a/sentry.server.config.ts +++ b/sentry.server.config.ts @@ -3,13 +3,14 @@ // https://docs.sentry.io/platforms/javascript/guides/nextjs/ import * as Sentry from "@sentry/nextjs"; +if (process.env.NODE_ENV === "production") { + Sentry.init({ + dsn: "https://523c4056ba48d012c62a377dfc49f647@o4506728662564864.ingest.sentry.io/4506728672264192", -Sentry.init({ - dsn: "https://523c4056ba48d012c62a377dfc49f647@o4506728662564864.ingest.sentry.io/4506728672264192", + // Adjust this value in production, or use tracesSampler for greater control + tracesSampleRate: 1, - // Adjust this value in production, or use tracesSampler for greater control - tracesSampleRate: 1, - - // Setting this option to true will print useful information to the console while you're setting up Sentry. - debug: false, -}); + // Setting this option to true will print useful information to the console while you're setting up Sentry. + debug: false, + }); +} diff --git a/utils/others/aiutils.ts b/utils/others/aiutils.ts new file mode 100644 index 0000000..adab29d --- /dev/null +++ b/utils/others/aiutils.ts @@ -0,0 +1,51 @@ +// Path: utils/others/aiutils.ts +import { getAI } from "@/components/chatAI"; +//判断返回的文献是否跟用户输入的主题相关 +export async function evaluateTopicMatch( + userMessage: any[], + apiKey: string, + upsreamUrl: string, + selectedModel: string, + topic: string +): Promise<{ relevantPapers: string[]; nonRelevantPapers: string[] }> { + const prompt = "请判断文献是否跟用户输入的主题相关,只需要返回true或者false"; + let relevantPapers: string[] = []; // 存储相关论文的数组 + let nonRelevantPapers: string[] = []; // 存储不相关论文的数组 + + for (const paper of userMessage) { + const input = `user's topic:${topic}, \n paper's title: ${paper.title}, \n paper's abstract: ${paper.abstract}`; + const isRelevantResult = await getAI( + input, + prompt, + apiKey, + upsreamUrl, + selectedModel! + ); + console.log("isRelevantResult", isRelevantResult); + // 尝试解析 JSON 结果,如果无法解析则直接使用结果字符串 + let isRelevant; + try { + const parsedResult = JSON.parse(isRelevantResult); + isRelevant = + parsedResult === true || parsedResult.toLowerCase() === "true"; + } catch { + isRelevant = + isRelevantResult.includes("true") || isRelevantResult.includes("True"); + } + + if (isRelevant) { + relevantPapers.push(paper); // 如果论文相关,则添加到数组中 + } else { + nonRelevantPapers.push(paper); // 如果论文不相关,则添加到不相关论文数组中 + } + console.log( + `这次有${nonRelevantPapers.length}篇文献没有通过相关性检查`, + nonRelevantPapers + ); + } + //如果相关文献大于两片则缩减到两篇 + if (relevantPapers.length > 2) { + relevantPapers = relevantPapers.slice(0, 2); + } + return { relevantPapers, nonRelevantPapers }; +}