本帖最后由 SelfLove 于 2024-7-22 23:35 编辑
后续会增加群管理、公众号内容爬取之类的,以下源码:
import win32gui
import win32con
import win32process
import time
import Qwen # 导入自定义AI请求模型
from threading import Thread
from pywinauto.application import Application
def get_hwnd():
try:
hwnd = win32gui.FindWindow("WeChatMainWndForPC", "微信") # 根据类名或窗口名找到主窗口句柄
return hwnd
except Exception as e:
print(e)
def get_process(hwnd):
try:
thr, proc = win32process.GetWindowThreadProcessId(hwnd) # 获取当前窗口进程id
# print(proc)
return proc
except Exception as e:
print(e)
def set_top(hwnd):
try:
win32gui.ShowWindow(hwnd, win32con.SW_RESTORE) # 若窗口最小化或被遮挡则显示
win32gui.SetForegroundWindow(hwnd) # 将窗口设为顶层
except Exception as e:
print(e)
def sear_target(proc, name):
try:
# title对应inspect中name control_type对应inspect中LocalizedControlType的英文
app = Application(backend='uia').connect(process=proc) # 根据进程号连接到应用
search_lable = app.top_window().child_window(title="搜索")
search_lable.click_input()
search_lable.type_keys(name)
# 找到搜索结果列表 该title可能存在一定的局限性
target = app.top_window().child_window(title="@str:IDS_FAV_SEARCH_RESULT:3780", control_type="List")
target.child_window(title=name, control_type="Button").click_input()
return app # 返回应用对象
except Exception as e:
print(e)
def get_message(app, target):
front_message = [0]
last_message = [0]
while True:
try:
message_list = app.top_window().child_window(title="消息", control_type="List")
item_last = message_list.get_item(-1) # 获得列表项最后一项
# iter_children()返回生成器
name = next(item_last.iter_children(control_type="Button")).window_text()
if name == target: # 比较最新的一条消息是否是目标对象发送的
print(item_last.window_text())
front_message[0] = item_last.window_text() # 更新前消息列表
if front_message != last_message: # 比较前消息列表是否与后消息列表一致
last_message[0] = front_message[0]
edit_lable = app.top_window().child_window(title=target, control_type="Edit") # 获取当前目标的编辑控件
req_message = last_message[0]
resp_message = ai_reply(req_message) # 发送ai请求
edit_lable.click_input()
edit_lable.type_keys(resp_message)
edit_lable.type_keys("{ENTER}")
time.sleep(1) # 休眠避免过度轮询
except Exception as e:
print(e)
def ai_reply(message):
resp = Qwen.generate_response(message)
return resp
if __name__ == '__main__':
target = "对话目标" # 需要完整的目标名
hwnd = get_hwnd()
proc = get_process(hwnd)
set_top(hwnd)
app = sear_target(proc, target)
Thread(target=get_message, args=(app, target)).start() # 新开一个线程用于轮询获取消息和ai回复
while True:
time.sleep(1)
set_top(hwnd) # 保持微信窗口在最顶层
这是我本地部署的Qwen2:7b的模型,原谅我的AMD显卡打上Ollama的补丁也不能跑Qwen3
from openai import OpenAI
def generate_response(message, times=3):
try:
client = OpenAI(
base_url='http://localhost:11434/v1/',
# required but ignored
api_key='ollama',
)
chat_completion = client.chat.completions.create(
messages=[
{
'role': 'user',
'content': message,
}
],
model='qwen:7b',
)
return chat_completion.choices[0].message.content
except:
times -= 1
if times > 0:
return generate_response(message, times)
else:
return '错误:请求异常,请检查您的网络和API地址与Key是否正确'
|