lele commited on
Commit
8fa2bba
·
1 Parent(s): 2d07852

modified: app.py

Browse files

modified: requirements.txt
modified: src/ai_infra.py
modified: src/ai_transform.py

Files changed (4) hide show
  1. app.py +4 -1
  2. requirements.txt +2 -1
  3. src/ai_infra.py +53 -8
  4. src/ai_transform.py +4 -1
app.py CHANGED
@@ -4,6 +4,7 @@ from src.ai_infra import init_ai_config, get_ai_models
4
  from src.utils import *
5
  import requests, os, datetime
6
  import json
 
7
 
8
  # for offline debugging
9
  if os.path.exists("secret.env"):
@@ -19,7 +20,7 @@ NOTION_TOKEN = os.environ.get("NOTION_TOKEN")
19
  DB_ID = os.environ.get("DB_ID")
20
 
21
  def get_property(it, task_name, url, api_key, model):
22
- properties = {
23
  "Task name": {
24
  "title": [{"text": {"content": task_name}}]
25
  }
@@ -72,6 +73,8 @@ def init_items(items):
72
 
73
  @app.route("/", methods=["GET"])
74
  def index():
 
 
75
  return send_from_directory(app.static_folder, "index.html")
76
 
77
  # @app.route('/', methods=['GET'])
 
4
  from src.utils import *
5
  import requests, os, datetime
6
  import json
7
+ from typing import Any
8
 
9
  # for offline debugging
10
  if os.path.exists("secret.env"):
 
20
  DB_ID = os.environ.get("DB_ID")
21
 
22
  def get_property(it, task_name, url, api_key, model):
23
+ properties: dict[str, Any] = {
24
  "Task name": {
25
  "title": [{"text": {"content": task_name}}]
26
  }
 
73
 
74
  @app.route("/", methods=["GET"])
75
  def index():
76
+ if app.static_folder is None:
77
+ raise RuntimeError("Static folder is not configured")
78
  return send_from_directory(app.static_folder, "index.html")
79
 
80
  # @app.route('/', methods=['GET'])
requirements.txt CHANGED
@@ -1,4 +1,5 @@
1
  flask==2.3.3
2
  requests==2.31.0
3
  python-dotenv
4
- openai>=0.27.8
 
 
1
  flask==2.3.3
2
  requests==2.31.0
3
  python-dotenv
4
+ openai>=0.27.8
5
+ google-genai==1.50.0
src/ai_infra.py CHANGED
@@ -1,8 +1,15 @@
1
 
2
  import os
3
  from openai import OpenAI
 
 
 
 
 
 
 
4
 
5
- def chat_completion(question: str, model: str, base_url: str, api_key: str, system_instr: str = None) -> str:
6
  '''
7
  ai interaction function using OpenAI SDK.
8
  Parameters
@@ -20,21 +27,59 @@ def chat_completion(question: str, model: str, base_url: str, api_key: str, syst
20
  '''
21
  client = OpenAI(api_key=api_key,
22
  base_url=base_url)
23
- messages = [{"role": "user", "content": question}]
 
 
 
24
  if system_instr is not None:
25
- messages.append({
26
- "role": "system",
27
- "content": system_instr
28
- })
29
  response = client.chat.completions.create(
30
  # reasoning_effort="high",
31
  model=model,
32
  messages=messages
33
  )
34
- content = response.choices[0].message.content
35
  print(f"Model: {model}, Base URL: {base_url}\nResponse: {content}\n")
36
  return content
37
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
  def init_ai_config(model: str = "default")-> dict[str, str]:
39
  '''
40
  Initialize configuration for different AI models based on the model name.
@@ -110,7 +155,7 @@ def init_ai_config(model: str = "default")-> dict[str, str]:
110
  }
111
  API_MODEL = "GPT"
112
  # config["api_key"] = "Bearer " + os.environ.get(f"{API_MODEL}_KEY")
113
- config["api_key"] = os.environ.get(f"{API_MODEL}_KEY")
114
  return config
115
 
116
  def get_ai_models() -> list:
 
1
 
2
  import os
3
  from openai import OpenAI
4
+ from openai.types.chat import (
5
+ ChatCompletionUserMessageParam,
6
+ ChatCompletionSystemMessageParam,
7
+ )
8
+ from google import genai
9
+ from typing import Union
10
+ import io
11
 
12
+ def chat_completion(question: str, model: str, base_url: str, api_key: str, system_instr: str | None = None) -> str:
13
  '''
14
  ai interaction function using OpenAI SDK.
15
  Parameters
 
27
  '''
28
  client = OpenAI(api_key=api_key,
29
  base_url=base_url)
30
+ messages: list[ChatCompletionUserMessageParam |
31
+ ChatCompletionSystemMessageParam] = [
32
+ ChatCompletionUserMessageParam(role="user", content=question)
33
+ ]
34
  if system_instr is not None:
35
+ messages.append(
36
+ ChatCompletionSystemMessageParam(role="system", content=system_instr)
37
+ )
 
38
  response = client.chat.completions.create(
39
  # reasoning_effort="high",
40
  model=model,
41
  messages=messages
42
  )
43
+ content = response.choices[0].message.content or ""
44
  print(f"Model: {model}, Base URL: {base_url}\nResponse: {content}\n")
45
  return content
46
 
47
+ def init_genai_client(api_key: str | None = None) -> genai.Client:
48
+ if not api_key:
49
+ api_key = os.environ.get(f"GEMINI_KEY") or ""
50
+ if not api_key:
51
+ raise ValueError("API key for Gemini is not provided.")
52
+ return genai.Client(api_key=api_key)
53
+
54
+ def interact_with_pdf(client: genai.Client, file: Union[str, os.PathLike[str], io.IOBase], question: str = "") -> str:
55
+ '''
56
+ ai interaction function using Generative SDK to interact with PDF files.
57
+ Parameters
58
+ ----------
59
+ file :
60
+ A path to the file or an `IOBase` object to be uploaded. If it's an
61
+ IOBase object, it must be opened in blocking (the default) mode and
62
+ binary mode. In other words, do not use non-blocking mode or text mode.
63
+ The given stream must be seekable, that is, it must be able to call
64
+ `seek()` on 'path'.
65
+ question : str
66
+ The input question or prompt to send to the AI model.
67
+ '''
68
+
69
+ # 上传文件
70
+ pdf_file = client.files.upload(file=file)
71
+ # 生成内容
72
+ response = client.models.generate_content(
73
+ model="gemini-2.5-flash",
74
+ contents=[
75
+ question,
76
+ pdf_file
77
+ ]
78
+ )
79
+ content = response.text or ""
80
+ print(content)
81
+ return content
82
+
83
  def init_ai_config(model: str = "default")-> dict[str, str]:
84
  '''
85
  Initialize configuration for different AI models based on the model name.
 
155
  }
156
  API_MODEL = "GPT"
157
  # config["api_key"] = "Bearer " + os.environ.get(f"{API_MODEL}_KEY")
158
+ config["api_key"] = os.environ.get(f"{API_MODEL}_KEY") or ""
159
  return config
160
 
161
  def get_ai_models() -> list:
src/ai_transform.py CHANGED
@@ -9,7 +9,7 @@ from typing import Union, List
9
  with open("prompt.txt", encoding="utf-8") as f:
10
  system_content = f.read()
11
 
12
- def modified_with_ai(items, url, api_key, model=None):
13
  # 在这里调用AI模型对item进行修改
14
  # today_date = time.strftime("%Y-%m-%d", time.localtime())
15
  today_date = time_cali()
@@ -49,10 +49,13 @@ def classify_task_with_ai(
49
  返回任务所属的类别名称,或在不匹配任何类别时返回 ''。
50
  """
51
  # 1. 读取并格式化任务类别 (RAG中的 "R" - Retrieval)
 
 
52
  if config is not None:
53
  url = config["url"]
54
  api_key = config["api_key"]
55
  model = config["model"]
 
56
  categories_mapping = load_task_mapping_from_txt(categories_filepath)
57
  task_list = categories_mapping.keys()
58
  # 2. 定义分类 prompt 模板
 
9
  with open("prompt.txt", encoding="utf-8") as f:
10
  system_content = f.read()
11
 
12
+ def modified_with_ai(items, url, api_key, model):
13
  # 在这里调用AI模型对item进行修改
14
  # today_date = time.strftime("%Y-%m-%d", time.localtime())
15
  today_date = time_cali()
 
49
  返回任务所属的类别名称,或在不匹配任何类别时返回 ''。
50
  """
51
  # 1. 读取并格式化任务类别 (RAG中的 "R" - Retrieval)
52
+ if config is None and model is None:
53
+ raise ValueError("Either config or model must be provided.")
54
  if config is not None:
55
  url = config["url"]
56
  api_key = config["api_key"]
57
  model = config["model"]
58
+ assert isinstance(model, str)
59
  categories_mapping = load_task_mapping_from_txt(categories_filepath)
60
  task_list = categories_mapping.keys()
61
  # 2. 定义分类 prompt 模板