Browse Source

first commit

master
ma-zhongxu 6 days ago
commit
01171a1ffb
  1. 47
      .gitignore
  2. 20
      LICENSE
  3. 95
      README.md
  4. 12
      bin/clean.bat
  5. 12
      bin/package.bat
  6. 14
      bin/run.bat
  7. 125
      chenhai-admin/pom.xml
  8. 30
      chenhai-admin/src/main/java/com/chenhai/RuoYiApplication.java
  9. 18
      chenhai-admin/src/main/java/com/chenhai/RuoYiServletInitializer.java
  10. 94
      chenhai-admin/src/main/java/com/chenhai/web/controller/common/CaptchaController.java
  11. 162
      chenhai-admin/src/main/java/com/chenhai/web/controller/common/CommonController.java
  12. 122
      chenhai-admin/src/main/java/com/chenhai/web/controller/monitor/CacheController.java
  13. 27
      chenhai-admin/src/main/java/com/chenhai/web/controller/monitor/ServerController.java
  14. 82
      chenhai-admin/src/main/java/com/chenhai/web/controller/monitor/SysLogininforController.java
  15. 69
      chenhai-admin/src/main/java/com/chenhai/web/controller/monitor/SysOperlogController.java
  16. 83
      chenhai-admin/src/main/java/com/chenhai/web/controller/monitor/SysUserOnlineController.java
  17. 133
      chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysConfigController.java
  18. 132
      chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysDeptController.java
  19. 121
      chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysDictDataController.java
  20. 131
      chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysDictTypeController.java
  21. 29
      chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysIndexController.java
  22. 131
      chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysLoginController.java
  23. 142
      chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysMenuController.java
  24. 91
      chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysNoticeController.java
  25. 129
      chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysPostController.java
  26. 148
      chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysProfileController.java
  27. 38
      chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysRegisterController.java
  28. 262
      chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysRoleController.java
  29. 256
      chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysUserController.java
  30. 175
      chenhai-admin/src/main/java/com/chenhai/web/controller/tool/TestController.java
  31. 64
      chenhai-admin/src/main/java/com/chenhai/web/core/config/SwaggerConfig.java
  32. 1
      chenhai-admin/src/main/resources/META-INF/spring-devtools.properties
  33. 108
      chenhai-admin/src/main/resources/application-druid.yml
  34. 148
      chenhai-admin/src/main/resources/application.yml
  35. 24
      chenhai-admin/src/main/resources/banner.txt
  36. 38
      chenhai-admin/src/main/resources/i18n/messages.properties
  37. 93
      chenhai-admin/src/main/resources/logback.xml
  38. 20
      chenhai-admin/src/main/resources/mybatis/mybatis-config.xml
  39. 78
      chenhai-ai/pom.xml
  40. 43
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/config/AsyncConfig.java
  41. 28
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/config/BeanChecker.java
  42. 38
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/config/ChatClientConfig.java
  43. 141
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/config/ChatModelFactory.java
  44. 152
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/config/GraphConfig.java
  45. 18
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/config/ProgressEmitterConfig.java
  46. 84
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/controller/GiteaController.java
  47. 526
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/controller/GraphController.java
  48. 146
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/controller/McpController.java
  49. 125
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/controller/ModelController.java
  50. 40
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/controller/RealTestController.java
  51. 109
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/AnalysisResult.java
  52. 21
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/DailyPaper.java
  53. 22
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/Dept.java
  54. 16
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/UserInfo.java
  55. 21
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/WeekPlanDetail.java
  56. 21
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/WeekPlanMain.java
  57. 58
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/WeekPlanResponse.java
  58. 37
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/WeekProject.java
  59. 22
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/BasicInfo.java
  60. 13
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/DayStats.java
  61. 94
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/DeveloperActivity.java
  62. 15
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/DeveloperRank.java
  63. 13
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/FileTypeStats.java
  64. 23
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/GitAnalysisData.java
  65. 203
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/GiteaCommit.java
  66. 116
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/GiteaRepository.java
  67. 16
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/RepoRank.java
  68. 98
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/RepositoryActivity.java
  69. 135
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/DataAssociationNode.java
  70. 83
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/DataOrganizationNode.java
  71. 167
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/DataTranslationNode.java
  72. 83
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/WeekPlanAnalysisNode.java
  73. 139
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/Analysis.java
  74. 148
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/AnalysisStreamNode.java
  75. 69
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/jdbc/DailyPaperJdbcNode.java
  76. 68
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/jdbc/DeptJdbcNode.java
  77. 128
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/jdbc/GitAnalysisNode.java
  78. 66
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/jdbc/UserJdbcNode.java
  79. 67
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/jdbc/WeekPlanDetailJdbcNode.java
  80. 75
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/jdbc/WeekPlanMainJdbcNode.java
  81. 68
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/mcp/DailyPaperNode.java
  82. 51
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/mcp/DeptNode.java
  83. 50
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/mcp/UserNode.java
  84. 50
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/mcp/WeekPlanDetailNode.java
  85. 51
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/mcp/WeekPlanMainNode.java
  86. 117
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/AnalysisStreamService.java
  87. 583
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/GiteaAnalysisParallelService.java
  88. 50
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/GiteaAnalysisParallelTest.java
  89. 570
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/GiteaAnalysisService.java
  90. 398
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/GiteaAnalysisTest.java
  91. 605
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/LongTermGiteaAnalysisService.java
  92. 208
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/MarkdownService.java
  93. 172
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/SimpleConcurrentTest.java
  94. 907
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/gitNew/GiteaDataService.java
  95. 946
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/gitNew/GiteaGranularityService.java
  96. 802
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/gitNew/GiteaQueryService.java
  97. 209
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/utils/CharacterStreamProcessor.java
  98. 24
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/utils/ProgressEmitter.java
  99. 277
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/utils/PromptLoader.java
  100. 81
      chenhai-ai/src/main/java/com/chenhai/chenhaiai/utils/TextFormatUtils.java

47
.gitignore

@ -0,0 +1,47 @@
######################################################################
# Build Tools
.gradle
/build/
!gradle/wrapper/gradle-wrapper.jar
target/
!.mvn/wrapper/maven-wrapper.jar
######################################################################
# IDE
### STS ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
### JRebel ###
rebel.xml
### NetBeans ###
nbproject/private/
build/*
nbbuild/
dist/
nbdist/
.nb-gradle/
######################################################################
# Others
*.log
*.xml.versionsBackup
*.swp
!*/build/*.java
!*/build/*.html
!*/build/*.xml

20
LICENSE

@ -0,0 +1,20 @@
The MIT License (MIT)
Copyright (c) 2018 RuoYi
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

95
README.md

@ -0,0 +1,95 @@
<p align="center">
<img alt="logo" src="https://oscimg.oschina.net/oscnet/up-d3d0a9303e11d522a06cd263f3079027715.png">
</p>
<h1 align="center" style="margin: 30px 0 30px; font-weight: bold;">RuoYi v3.9.0</h1>
<h4 align="center">基于SpringBoot+Vue前后端分离的Java快速开发框架</h4>
<p align="center">
<a href="https://gitee.com/y_project/RuoYi-Vue/stargazers"><img src="https://gitee.com/y_project/RuoYi-Vue/badge/star.svg?theme=dark"></a>
<a href="https://gitee.com/y_project/RuoYi-Vue"><img src="https://img.shields.io/badge/RuoYi-v3.9.0-brightgreen.svg"></a>
<a href="https://gitee.com/y_project/RuoYi-Vue/blob/master/LICENSE"><img src="https://img.shields.io/github/license/mashape/apistatus.svg"></a>
</p>
## 平台简介
若依是一套全部开源的快速开发平台,毫无保留给个人及企业免费使用。
* 前端采用Vue、Element UI。
* 后端采用Spring Boot、Spring Security、Redis & Jwt。
* 权限认证使用Jwt,支持多终端认证系统。
* 支持加载动态权限菜单,多方式轻松权限控制。
* 高效率开发,使用代码生成器可以一键生成前后端代码。
* 提供了技术栈([Vue3](https://v3.cn.vuejs.org) [Element Plus](https://element-plus.org/zh-CN) [Vite](https://cn.vitejs.dev))版本[RuoYi-Vue3](https://gitcode.com/yangzongzhuan/RuoYi-Vue3),保持同步更新。
* 提供了单应用版本[RuoYi-Vue-fast](https://gitcode.com/yangzongzhuan/RuoYi-Vue-fast),Oracle版本[RuoYi-Vue-Oracle](https://gitcode.com/yangzongzhuan/RuoYi-Vue-Oracle),保持同步更新。
* 不分离版本,请移步[RuoYi](https://gitee.com/y_project/RuoYi),微服务版本,请移步[RuoYi-Cloud](https://gitee.com/y_project/RuoYi-Cloud)
* 阿里云折扣场:[点我进入](http://aly.ruoyi.vip),腾讯云秒杀场:[点我进入](http://txy.ruoyi.vip)&nbsp;&nbsp;
## 内置功能
1. 用户管理:用户是系统操作者,该功能主要完成系统用户配置。
2. 部门管理:配置系统组织机构(公司、部门、小组),树结构展现支持数据权限。
3. 岗位管理:配置系统用户所属担任职务。
4. 菜单管理:配置系统菜单,操作权限,按钮权限标识等。
5. 角色管理:角色菜单权限分配、设置角色按机构进行数据范围权限划分。
6. 字典管理:对系统中经常使用的一些较为固定的数据进行维护。
7. 参数管理:对系统动态配置常用参数。
8. 通知公告:系统通知公告信息发布维护。
9. 操作日志:系统正常操作日志记录和查询;系统异常信息日志记录和查询。
10. 登录日志:系统登录日志记录查询包含登录异常。
11. 在线用户:当前系统中活跃用户状态监控。
12. 定时任务:在线(添加、修改、删除)任务调度包含执行结果日志。
13. 代码生成:前后端代码的生成(java、html、xml、sql)支持CRUD下载 。
14. 系统接口:根据业务代码自动生成相关的api接口文档。
15. 服务监控:监视当前系统CPU、内存、磁盘、堆栈等相关信息。
16. 缓存监控:对系统的缓存信息查询,命令统计等。
17. 在线构建器:拖动表单元素生成相应的HTML代码。
18. 连接池监视:监视当前系统数据库连接池状态,可进行分析SQL找出系统性能瓶颈。
## 在线体验
- admin/admin123
- 陆陆续续收到一些打赏,为了更好的体验已用于演示服务器升级。谢谢各位小伙伴。
演示地址:http://vue.ruoyi.vip
文档地址:http://doc.ruoyi.vip
## 演示图
<table>
<tr>
<td><img src="https://oscimg.oschina.net/oscnet/cd1f90be5f2684f4560c9519c0f2a232ee8.jpg"/></td>
<td><img src="https://oscimg.oschina.net/oscnet/1cbcf0e6f257c7d3a063c0e3f2ff989e4b3.jpg"/></td>
</tr>
<tr>
<td><img src="https://oscimg.oschina.net/oscnet/up-8074972883b5ba0622e13246738ebba237a.png"/></td>
<td><img src="https://oscimg.oschina.net/oscnet/up-9f88719cdfca9af2e58b352a20e23d43b12.png"/></td>
</tr>
<tr>
<td><img src="https://oscimg.oschina.net/oscnet/up-39bf2584ec3a529b0d5a3b70d15c9b37646.png"/></td>
<td><img src="https://oscimg.oschina.net/oscnet/up-936ec82d1f4872e1bc980927654b6007307.png"/></td>
</tr>
<tr>
<td><img src="https://oscimg.oschina.net/oscnet/up-b2d62ceb95d2dd9b3fbe157bb70d26001e9.png"/></td>
<td><img src="https://oscimg.oschina.net/oscnet/up-d67451d308b7a79ad6819723396f7c3d77a.png"/></td>
</tr>
<tr>
<td><img src="https://oscimg.oschina.net/oscnet/5e8c387724954459291aafd5eb52b456f53.jpg"/></td>
<td><img src="https://oscimg.oschina.net/oscnet/644e78da53c2e92a95dfda4f76e6d117c4b.jpg"/></td>
</tr>
<tr>
<td><img src="https://oscimg.oschina.net/oscnet/up-8370a0d02977eebf6dbf854c8450293c937.png"/></td>
<td><img src="https://oscimg.oschina.net/oscnet/up-49003ed83f60f633e7153609a53a2b644f7.png"/></td>
</tr>
<tr>
<td><img src="https://oscimg.oschina.net/oscnet/up-d4fe726319ece268d4746602c39cffc0621.png"/></td>
<td><img src="https://oscimg.oschina.net/oscnet/up-c195234bbcd30be6927f037a6755e6ab69c.png"/></td>
</tr>
<tr>
<td><img src="https://oscimg.oschina.net/oscnet/b6115bc8c31de52951982e509930b20684a.jpg"/></td>
<td><img src="https://oscimg.oschina.net/oscnet/up-5e4daac0bb59612c5038448acbcef235e3a.png"/></td>
</tr>
</table>
## 若依前后端分离交流群
QQ群: [![加入QQ群](https://img.shields.io/badge/已满-937441-blue.svg)](https://jq.qq.com/?_wv=1027&k=5bVB1og) [![加入QQ群](https://img.shields.io/badge/已满-887144332-blue.svg)](https://jq.qq.com/?_wv=1027&k=5eiA4DH) [![加入QQ群](https://img.shields.io/badge/已满-180251782-blue.svg)](https://jq.qq.com/?_wv=1027&k=5AxMKlC) [![加入QQ群](https://img.shields.io/badge/已满-104180207-blue.svg)](https://jq.qq.com/?_wv=1027&k=51G72yr) [![加入QQ群](https://img.shields.io/badge/已满-186866453-blue.svg)](https://jq.qq.com/?_wv=1027&k=VvjN2nvu) [![加入QQ群](https://img.shields.io/badge/已满-201396349-blue.svg)](https://jq.qq.com/?_wv=1027&k=5vYAqA05) [![加入QQ群](https://img.shields.io/badge/已满-101456076-blue.svg)](https://jq.qq.com/?_wv=1027&k=kOIINEb5) [![加入QQ群](https://img.shields.io/badge/已满-101539465-blue.svg)](https://jq.qq.com/?_wv=1027&k=UKtX5jhs) [![加入QQ群](https://img.shields.io/badge/已满-264312783-blue.svg)](https://jq.qq.com/?_wv=1027&k=EI9an8lJ) [![加入QQ群](https://img.shields.io/badge/已满-167385320-blue.svg)](https://jq.qq.com/?_wv=1027&k=SWCtLnMz) [![加入QQ群](https://img.shields.io/badge/已满-104748341-blue.svg)](https://jq.qq.com/?_wv=1027&k=96Dkdq0k) [![加入QQ群](https://img.shields.io/badge/已满-160110482-blue.svg)](https://jq.qq.com/?_wv=1027&k=0fsNiYZt) [![加入QQ群](https://img.shields.io/badge/已满-170801498-blue.svg)](https://jq.qq.com/?_wv=1027&k=7xw4xUG1) [![加入QQ群](https://img.shields.io/badge/已满-108482800-blue.svg)](https://jq.qq.com/?_wv=1027&k=eCx8eyoJ) [![加入QQ群](https://img.shields.io/badge/已满-101046199-blue.svg)](https://jq.qq.com/?_wv=1027&k=SpyH2875) [![加入QQ群](https://img.shields.io/badge/已满-136919097-blue.svg)](https://jq.qq.com/?_wv=1027&k=tKEt51dz) [![加入QQ群](https://img.shields.io/badge/已满-143961921-blue.svg)](http://qm.qq.com/cgi-bin/qm/qr?_wv=1027&k=0vBbSb0ztbBgVtn3kJS-Q4HUNYwip89G&authKey=8irq5PhutrZmWIvsUsklBxhj57l%2F1nOZqjzigkXZVoZE451GG4JHPOqW7AW6cf0T&noverify=0&group_code=143961921) [![加入QQ群](https://img.shields.io/badge/已满-174951577-blue.svg)](http://qm.qq.com/cgi-bin/qm/qr?_wv=1027&k=ZFAPAbp09S2ltvwrJzp7wGlbopsc0rwi&authKey=HB2cxpxP2yspk%2Bo3WKTBfktRCccVkU26cgi5B16u0KcAYrVu7sBaE7XSEqmMdFQp&noverify=0&group_code=174951577) [![加入QQ群](https://img.shields.io/badge/已满-161281055-blue.svg)](http://qm.qq.com/cgi-bin/qm/qr?_wv=1027&k=Fn2aF5IHpwsy8j6VlalNJK6qbwFLFHat&authKey=uyIT%2B97x2AXj3odyXpsSpVaPMC%2Bidw0LxG5MAtEqlrcBcWJUA%2FeS43rsF1Tg7IRJ&noverify=0&group_code=161281055) [![加入QQ群](https://img.shields.io/badge/已满-138988063-blue.svg)](http://qm.qq.com/cgi-bin/qm/qr?_wv=1027&k=XIzkm_mV2xTsUtFxo63bmicYoDBA6Ifm&authKey=dDW%2F4qsmw3x9govoZY9w%2FoWAoC4wbHqGal%2BbqLzoS6VBarU8EBptIgPKN%2FviyC8j&noverify=0&group_code=138988063) [![加入QQ群](https://img.shields.io/badge/已满-151450850-blue.svg)](http://qm.qq.com/cgi-bin/qm/qr?_wv=1027&k=DkugnCg68PevlycJSKSwjhFqfIgrWWwR&authKey=pR1Pa5lPIeGF%2FFtIk6d%2FGB5qFi0EdvyErtpQXULzo03zbhopBHLWcuqdpwY241R%2F&noverify=0&group_code=151450850) [![加入QQ群](https://img.shields.io/badge/已满-224622315-blue.svg)](http://qm.qq.com/cgi-bin/qm/qr?_wv=1027&k=F58bgRa-Dp-rsQJThiJqIYv8t4-lWfXh&authKey=UmUs4CVG5OPA1whvsa4uSespOvyd8%2FAr9olEGaWAfdLmfKQk%2FVBp2YU3u2xXXt76&noverify=0&group_code=224622315) [![加入QQ群](https://img.shields.io/badge/已满-287842588-blue.svg)](http://qm.qq.com/cgi-bin/qm/qr?_wv=1027&k=Nxb2EQ5qozWa218Wbs7zgBnjLSNk_tVT&authKey=obBKXj6SBKgrFTJZx0AqQnIYbNOvBB2kmgwWvGhzxR67RoRr84%2Bus5OadzMcdJl5&noverify=0&group_code=287842588) [![加入QQ群](https://img.shields.io/badge/已满-187944233-blue.svg)](http://qm.qq.com/cgi-bin/qm/qr?_wv=1027&k=numtK1M_I4eVd2Gvg8qtbuL8JgX42qNh&authKey=giV9XWMaFZTY%2FqPlmWbkB9g3fi0Ev5CwEtT9Tgei0oUlFFCQLDp4ozWRiVIzubIm&noverify=0&group_code=187944233) [![加入QQ群](https://img.shields.io/badge/已满-228578329-blue.svg)](http://qm.qq.com/cgi-bin/qm/qr?_wv=1027&k=G6r5KGCaa3pqdbUSXNIgYloyb8e0_L0D&authKey=4w8tF1eGW7%2FedWn%2FHAypQksdrML%2BDHolQSx7094Agm7Luakj9EbfPnSTxSi2T1LQ&noverify=0&group_code=228578329) [![加入QQ群](https://img.shields.io/badge/已满-191164766-blue.svg)](http://qm.qq.com/cgi-bin/qm/qr?_wv=1027&k=GsOo-OLz53J8y_9TPoO6XXSGNRTgbFxA&authKey=R7Uy%2Feq%2BZsoKNqHvRKhiXpypW7DAogoWapOawUGHokJSBIBIre2%2FoiAZeZBSLuBc&noverify=0&group_code=191164766) [![加入QQ群](https://img.shields.io/badge/174569686-blue.svg)](http://qm.qq.com/cgi-bin/qm/qr?_wv=1027&k=PmYavuzsOthVqfdAPbo4uAeIbu7Ttjgc&authKey=p52l8%2FXa4PS1JcEmS3VccKSwOPJUZ1ZfQ69MEKzbrooNUljRtlKjvsXf04bxNp3G&noverify=0&group_code=174569686) 点击按钮入群。

12
bin/clean.bat

@ -0,0 +1,12 @@
@echo off
echo.
echo [信息] 清理工程target生成路径。
echo.
%~d0
cd %~dp0
cd ..
call mvn clean
pause

12
bin/package.bat

@ -0,0 +1,12 @@
@echo off
echo.
echo [信息] 打包Web工程,生成war/jar包文件。
echo.
%~d0
cd %~dp0
cd ..
call mvn clean package -Dmaven.test.skip=true
pause

14
bin/run.bat

@ -0,0 +1,14 @@
@echo off
echo.
echo [信息] 使用Jar命令运行Web工程。
echo.
cd %~dp0
cd ../chenhai-admin/target
set JAVA_OPTS=-Xms256m -Xmx1024m -XX:MetaspaceSize=128m -XX:MaxMetaspaceSize=512m
java -jar %JAVA_OPTS% chenhai-admin.jar
cd bin
pause

125
chenhai-admin/pom.xml

@ -0,0 +1,125 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>chenhai</artifactId>
<groupId>com.chenhai</groupId>
<version>3.9.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<packaging>jar</packaging>
<artifactId>chenhai-admin</artifactId>
<description>
web服务入口
</description>
<dependencies>
<!-- spring-boot-devtools -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-devtools</artifactId>
<optional>true</optional> <!-- 表示依赖不会传递 -->
</dependency>
<!-- spring-doc -->
<dependency>
<groupId>org.springdoc</groupId>
<artifactId>springdoc-openapi-starter-webmvc-ui</artifactId>
</dependency>
<!-- Mysql驱动包 -->
<dependency>
<groupId>com.mysql</groupId>
<artifactId>mysql-connector-j</artifactId>
</dependency>
<!-- 核心模块-->
<dependency>
<groupId>com.chenhai</groupId>
<artifactId>chenhai-framework</artifactId>
</dependency>
<!-- 定时任务-->
<dependency>
<groupId>com.chenhai</groupId>
<artifactId>chenhai-quartz</artifactId>
</dependency>
<!-- 代码生成-->
<dependency>
<groupId>com.chenhai</groupId>
<artifactId>chenhai-generator</artifactId>
</dependency>
<!-- ai-->
<dependency>
<groupId>com.chenhai</groupId>
<artifactId>chenhai-ai</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<version>3.5.4</version>
<configuration>
<addResources>true</addResources>
</configuration>
<executions>
<execution>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- ==================== 我添加的配置开始 ==================== -->
<!-- 资源解压插件:将chenhai-ai模块的提示词文件解压到当前模块 -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>unpack-ai-resources</id>
<phase>generate-resources</phase>
<goals>
<goal>unpack</goal>
</goals>
<configuration>
<artifactItems>
<artifactItem>
<groupId>com.chenhai</groupId>
<artifactId>chenhai-ai</artifactId>
<type>jar</type>
<overWrite>true</overWrite>
<outputDirectory>${project.build.outputDirectory}</outputDirectory>
<includes>**/prompts/**</includes>
</artifactItem>
</artifactItems>
</configuration>
</execution>
</executions>
</plugin>
<!-- ==================== 我添加的配置结束 ==================== -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-war-plugin</artifactId>
<version>3.1.0</version>
<configuration>
<failOnMissingWebXml>false</failOnMissingWebXml>
<warName>${project.artifactId}</warName>
</configuration>
</plugin>
</plugins>
<finalName>${project.artifactId}</finalName>
</build>
</project>

30
chenhai-admin/src/main/java/com/chenhai/RuoYiApplication.java

@ -0,0 +1,30 @@
package com.chenhai;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
/**
* 启动程序
*
* @author ruoyi
*/
@SpringBootApplication(exclude = { DataSourceAutoConfiguration.class })
public class RuoYiApplication
{
public static void main(String[] args)
{
// System.setProperty("spring.devtools.restart.enabled", "false");
SpringApplication.run(RuoYiApplication.class, args);
System.out.println("(♥◠‿◠)ノ゙ 若依启动成功 ლ(´ڡ`ლ)゙ \n" +
" .-------. ____ __ \n" +
" | _ _ \\ \\ \\ / / \n" +
" | ( ' ) | \\ _. / ' \n" +
" |(_ o _) / _( )_ .' \n" +
" | (_,_).' __ ___(_ o _)' \n" +
" | |\\ \\ | || |(_,_)' \n" +
" | | \\ `' /| `-' / \n" +
" | | \\ / \\ / \n" +
" ''-' `'-' `-..-' ");
}
}

18
chenhai-admin/src/main/java/com/chenhai/RuoYiServletInitializer.java

@ -0,0 +1,18 @@
package com.chenhai;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.web.servlet.support.SpringBootServletInitializer;
/**
* web容器中进行部署
*
* @author ruoyi
*/
public class RuoYiServletInitializer extends SpringBootServletInitializer
{
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application)
{
return application.sources(RuoYiApplication.class);
}
}

94
chenhai-admin/src/main/java/com/chenhai/web/controller/common/CaptchaController.java

@ -0,0 +1,94 @@
package com.chenhai.web.controller.common;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import jakarta.annotation.Resource;
import javax.imageio.ImageIO;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.FastByteArrayOutputStream;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import com.google.code.kaptcha.Producer;
import com.chenhai.common.config.RuoYiConfig;
import com.chenhai.common.constant.CacheConstants;
import com.chenhai.common.constant.Constants;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.core.redis.RedisCache;
import com.chenhai.common.utils.sign.Base64;
import com.chenhai.common.utils.uuid.IdUtils;
import com.chenhai.system.service.ISysConfigService;
/**
* 验证码操作处理
*
* @author ruoyi
*/
@RestController
public class CaptchaController
{
@Resource(name = "captchaProducer")
private Producer captchaProducer;
@Resource(name = "captchaProducerMath")
private Producer captchaProducerMath;
@Autowired
private RedisCache redisCache;
@Autowired
private ISysConfigService configService;
/**
* 生成验证码
*/
@GetMapping("/captchaImage")
public AjaxResult getCode(HttpServletResponse response) throws IOException
{
AjaxResult ajax = AjaxResult.success();
boolean captchaEnabled = configService.selectCaptchaEnabled();
ajax.put("captchaEnabled", captchaEnabled);
if (!captchaEnabled)
{
return ajax;
}
// 保存验证码信息
String uuid = IdUtils.simpleUUID();
String verifyKey = CacheConstants.CAPTCHA_CODE_KEY + uuid;
String capStr = null, code = null;
BufferedImage image = null;
// 生成验证码
String captchaType = RuoYiConfig.getCaptchaType();
if ("math".equals(captchaType))
{
String capText = captchaProducerMath.createText();
capStr = capText.substring(0, capText.lastIndexOf("@"));
code = capText.substring(capText.lastIndexOf("@") + 1);
image = captchaProducerMath.createImage(capStr);
}
else if ("char".equals(captchaType))
{
capStr = code = captchaProducer.createText();
image = captchaProducer.createImage(capStr);
}
redisCache.setCacheObject(verifyKey, code, Constants.CAPTCHA_EXPIRATION, TimeUnit.MINUTES);
// 转换流信息写出
FastByteArrayOutputStream os = new FastByteArrayOutputStream();
try
{
ImageIO.write(image, "jpg", os);
}
catch (IOException e)
{
return AjaxResult.error(e.getMessage());
}
ajax.put("uuid", uuid);
ajax.put("img", Base64.encode(os.toByteArray()));
return ajax;
}
}

162
chenhai-admin/src/main/java/com/chenhai/web/controller/common/CommonController.java

@ -0,0 +1,162 @@
package com.chenhai.web.controller.common;
import java.util.ArrayList;
import java.util.List;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import com.chenhai.common.config.RuoYiConfig;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.utils.StringUtils;
import com.chenhai.common.utils.file.FileUploadUtils;
import com.chenhai.common.utils.file.FileUtils;
import com.chenhai.framework.config.ServerConfig;
/**
* 通用请求处理
*
* @author ruoyi
*/
@RestController
@RequestMapping("/common")
public class CommonController
{
private static final Logger log = LoggerFactory.getLogger(CommonController.class);
@Autowired
private ServerConfig serverConfig;
private static final String FILE_DELIMETER = ",";
/**
* 通用下载请求
*
* @param fileName 文件名称
* @param delete 是否删除
*/
@GetMapping("/download")
public void fileDownload(String fileName, Boolean delete, HttpServletResponse response, HttpServletRequest request)
{
try
{
if (!FileUtils.checkAllowDownload(fileName))
{
throw new Exception(StringUtils.format("文件名称({})非法,不允许下载。 ", fileName));
}
String realFileName = System.currentTimeMillis() + fileName.substring(fileName.indexOf("_") + 1);
String filePath = RuoYiConfig.getDownloadPath() + fileName;
response.setContentType(MediaType.APPLICATION_OCTET_STREAM_VALUE);
FileUtils.setAttachmentResponseHeader(response, realFileName);
FileUtils.writeBytes(filePath, response.getOutputStream());
if (delete)
{
FileUtils.deleteFile(filePath);
}
}
catch (Exception e)
{
log.error("下载文件失败", e);
}
}
/**
* 通用上传请求单个
*/
@PostMapping("/upload")
public AjaxResult uploadFile(MultipartFile file) throws Exception
{
try
{
// 上传文件路径
String filePath = RuoYiConfig.getUploadPath();
// 上传并返回新文件名称
String fileName = FileUploadUtils.upload(filePath, file);
String url = serverConfig.getUrl() + fileName;
AjaxResult ajax = AjaxResult.success();
ajax.put("url", url);
ajax.put("fileName", fileName);
ajax.put("newFileName", FileUtils.getName(fileName));
ajax.put("originalFilename", file.getOriginalFilename());
return ajax;
}
catch (Exception e)
{
return AjaxResult.error(e.getMessage());
}
}
/**
* 通用上传请求多个
*/
@PostMapping("/uploads")
public AjaxResult uploadFiles(List<MultipartFile> files) throws Exception
{
try
{
// 上传文件路径
String filePath = RuoYiConfig.getUploadPath();
List<String> urls = new ArrayList<String>();
List<String> fileNames = new ArrayList<String>();
List<String> newFileNames = new ArrayList<String>();
List<String> originalFilenames = new ArrayList<String>();
for (MultipartFile file : files)
{
// 上传并返回新文件名称
String fileName = FileUploadUtils.upload(filePath, file);
String url = serverConfig.getUrl() + fileName;
urls.add(url);
fileNames.add(fileName);
newFileNames.add(FileUtils.getName(fileName));
originalFilenames.add(file.getOriginalFilename());
}
AjaxResult ajax = AjaxResult.success();
ajax.put("urls", StringUtils.join(urls, FILE_DELIMETER));
ajax.put("fileNames", StringUtils.join(fileNames, FILE_DELIMETER));
ajax.put("newFileNames", StringUtils.join(newFileNames, FILE_DELIMETER));
ajax.put("originalFilenames", StringUtils.join(originalFilenames, FILE_DELIMETER));
return ajax;
}
catch (Exception e)
{
return AjaxResult.error(e.getMessage());
}
}
/**
* 本地资源通用下载
*/
@GetMapping("/download/resource")
public void resourceDownload(String resource, HttpServletRequest request, HttpServletResponse response)
throws Exception
{
try
{
if (!FileUtils.checkAllowDownload(resource))
{
throw new Exception(StringUtils.format("资源文件({})非法,不允许下载。 ", resource));
}
// 本地资源路径
String localPath = RuoYiConfig.getProfile();
// 数据库资源地址
String downloadPath = localPath + FileUtils.stripPrefix(resource);
// 下载名称
String downloadName = StringUtils.substringAfterLast(downloadPath, "/");
response.setContentType(MediaType.APPLICATION_OCTET_STREAM_VALUE);
FileUtils.setAttachmentResponseHeader(response, downloadName);
FileUtils.writeBytes(downloadPath, response.getOutputStream());
}
catch (Exception e)
{
log.error("下载文件失败", e);
}
}
}

122
chenhai-admin/src/main/java/com/chenhai/web/controller/monitor/CacheController.java

@ -0,0 +1,122 @@
package com.chenhai.web.controller.monitor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisCallback;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.chenhai.common.constant.CacheConstants;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.utils.StringUtils;
import com.chenhai.system.domain.SysCache;
/**
* 缓存监控
*
* @author ruoyi
*/
@RestController
@RequestMapping("/monitor/cache")
public class CacheController
{
@Autowired
private RedisTemplate<String, String> redisTemplate;
private final static List<SysCache> caches = new ArrayList<SysCache>();
{
caches.add(new SysCache(CacheConstants.LOGIN_TOKEN_KEY, "用户信息"));
caches.add(new SysCache(CacheConstants.SYS_CONFIG_KEY, "配置信息"));
caches.add(new SysCache(CacheConstants.SYS_DICT_KEY, "数据字典"));
caches.add(new SysCache(CacheConstants.CAPTCHA_CODE_KEY, "验证码"));
caches.add(new SysCache(CacheConstants.REPEAT_SUBMIT_KEY, "防重提交"));
caches.add(new SysCache(CacheConstants.RATE_LIMIT_KEY, "限流处理"));
caches.add(new SysCache(CacheConstants.PWD_ERR_CNT_KEY, "密码错误次数"));
}
@SuppressWarnings("deprecation")
@PreAuthorize("@ss.hasPermi('monitor:cache:list')")
@GetMapping()
public AjaxResult getInfo() throws Exception
{
Properties info = (Properties) redisTemplate.execute((RedisCallback<Object>) connection -> connection.info());
Properties commandStats = (Properties) redisTemplate.execute((RedisCallback<Object>) connection -> connection.info("commandstats"));
Object dbSize = redisTemplate.execute((RedisCallback<Object>) connection -> connection.dbSize());
Map<String, Object> result = new HashMap<>(3);
result.put("info", info);
result.put("dbSize", dbSize);
List<Map<String, String>> pieList = new ArrayList<>();
commandStats.stringPropertyNames().forEach(key -> {
Map<String, String> data = new HashMap<>(2);
String property = commandStats.getProperty(key);
data.put("name", StringUtils.removeStart(key, "cmdstat_"));
data.put("value", StringUtils.substringBetween(property, "calls=", ",usec"));
pieList.add(data);
});
result.put("commandStats", pieList);
return AjaxResult.success(result);
}
@PreAuthorize("@ss.hasPermi('monitor:cache:list')")
@GetMapping("/getNames")
public AjaxResult cache()
{
return AjaxResult.success(caches);
}
@PreAuthorize("@ss.hasPermi('monitor:cache:list')")
@GetMapping("/getKeys/{cacheName}")
public AjaxResult getCacheKeys(@PathVariable String cacheName)
{
Set<String> cacheKeys = redisTemplate.keys(cacheName + "*");
return AjaxResult.success(new TreeSet<>(cacheKeys));
}
@PreAuthorize("@ss.hasPermi('monitor:cache:list')")
@GetMapping("/getValue/{cacheName}/{cacheKey}")
public AjaxResult getCacheValue(@PathVariable String cacheName, @PathVariable String cacheKey)
{
String cacheValue = redisTemplate.opsForValue().get(cacheKey);
SysCache sysCache = new SysCache(cacheName, cacheKey, cacheValue);
return AjaxResult.success(sysCache);
}
@PreAuthorize("@ss.hasPermi('monitor:cache:list')")
@DeleteMapping("/clearCacheName/{cacheName}")
public AjaxResult clearCacheName(@PathVariable String cacheName)
{
Collection<String> cacheKeys = redisTemplate.keys(cacheName + "*");
redisTemplate.delete(cacheKeys);
return AjaxResult.success();
}
@PreAuthorize("@ss.hasPermi('monitor:cache:list')")
@DeleteMapping("/clearCacheKey/{cacheKey}")
public AjaxResult clearCacheKey(@PathVariable String cacheKey)
{
redisTemplate.delete(cacheKey);
return AjaxResult.success();
}
@PreAuthorize("@ss.hasPermi('monitor:cache:list')")
@DeleteMapping("/clearCacheAll")
public AjaxResult clearCacheAll()
{
Collection<String> cacheKeys = redisTemplate.keys("*");
redisTemplate.delete(cacheKeys);
return AjaxResult.success();
}
}

27
chenhai-admin/src/main/java/com/chenhai/web/controller/monitor/ServerController.java

@ -0,0 +1,27 @@
package com.chenhai.web.controller.monitor;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.framework.web.domain.Server;
/**
* 服务器监控
*
* @author ruoyi
*/
@RestController
@RequestMapping("/monitor/server")
public class ServerController
{
@PreAuthorize("@ss.hasPermi('monitor:server:list')")
@GetMapping()
public AjaxResult getInfo() throws Exception
{
Server server = new Server();
server.copyTo();
return AjaxResult.success(server);
}
}

82
chenhai-admin/src/main/java/com/chenhai/web/controller/monitor/SysLogininforController.java

@ -0,0 +1,82 @@
package com.chenhai.web.controller.monitor;
import java.util.List;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.chenhai.common.annotation.Log;
import com.chenhai.common.core.controller.BaseController;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.core.page.TableDataInfo;
import com.chenhai.common.enums.BusinessType;
import com.chenhai.common.utils.poi.ExcelUtil;
import com.chenhai.framework.web.service.SysPasswordService;
import com.chenhai.system.domain.SysLogininfor;
import com.chenhai.system.service.ISysLogininforService;
/**
* 系统访问记录
*
* @author ruoyi
*/
@RestController
@RequestMapping("/monitor/logininfor")
public class SysLogininforController extends BaseController
{
@Autowired
private ISysLogininforService logininforService;
@Autowired
private SysPasswordService passwordService;
@PreAuthorize("@ss.hasPermi('monitor:logininfor:list')")
@GetMapping("/list")
public TableDataInfo list(SysLogininfor logininfor)
{
startPage();
List<SysLogininfor> list = logininforService.selectLogininforList(logininfor);
return getDataTable(list);
}
@Log(title = "登录日志", businessType = BusinessType.EXPORT)
@PreAuthorize("@ss.hasPermi('monitor:logininfor:export')")
@PostMapping("/export")
public void export(HttpServletResponse response, SysLogininfor logininfor)
{
List<SysLogininfor> list = logininforService.selectLogininforList(logininfor);
ExcelUtil<SysLogininfor> util = new ExcelUtil<SysLogininfor>(SysLogininfor.class);
util.exportExcel(response, list, "登录日志");
}
@PreAuthorize("@ss.hasPermi('monitor:logininfor:remove')")
@Log(title = "登录日志", businessType = BusinessType.DELETE)
@DeleteMapping("/{infoIds}")
public AjaxResult remove(@PathVariable Long[] infoIds)
{
return toAjax(logininforService.deleteLogininforByIds(infoIds));
}
@PreAuthorize("@ss.hasPermi('monitor:logininfor:remove')")
@Log(title = "登录日志", businessType = BusinessType.CLEAN)
@DeleteMapping("/clean")
public AjaxResult clean()
{
logininforService.cleanLogininfor();
return success();
}
@PreAuthorize("@ss.hasPermi('monitor:logininfor:unlock')")
@Log(title = "账户解锁", businessType = BusinessType.OTHER)
@GetMapping("/unlock/{userName}")
public AjaxResult unlock(@PathVariable("userName") String userName)
{
passwordService.clearLoginRecordCache(userName);
return success();
}
}

69
chenhai-admin/src/main/java/com/chenhai/web/controller/monitor/SysOperlogController.java

@ -0,0 +1,69 @@
package com.chenhai.web.controller.monitor;
import java.util.List;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.chenhai.common.annotation.Log;
import com.chenhai.common.core.controller.BaseController;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.core.page.TableDataInfo;
import com.chenhai.common.enums.BusinessType;
import com.chenhai.common.utils.poi.ExcelUtil;
import com.chenhai.system.domain.SysOperLog;
import com.chenhai.system.service.ISysOperLogService;
/**
* 操作日志记录
*
* @author ruoyi
*/
@RestController
@RequestMapping("/monitor/operlog")
public class SysOperlogController extends BaseController
{
@Autowired
private ISysOperLogService operLogService;
@PreAuthorize("@ss.hasPermi('monitor:operlog:list')")
@GetMapping("/list")
public TableDataInfo list(SysOperLog operLog)
{
startPage();
List<SysOperLog> list = operLogService.selectOperLogList(operLog);
return getDataTable(list);
}
@Log(title = "操作日志", businessType = BusinessType.EXPORT)
@PreAuthorize("@ss.hasPermi('monitor:operlog:export')")
@PostMapping("/export")
public void export(HttpServletResponse response, SysOperLog operLog)
{
List<SysOperLog> list = operLogService.selectOperLogList(operLog);
ExcelUtil<SysOperLog> util = new ExcelUtil<SysOperLog>(SysOperLog.class);
util.exportExcel(response, list, "操作日志");
}
@Log(title = "操作日志", businessType = BusinessType.DELETE)
@PreAuthorize("@ss.hasPermi('monitor:operlog:remove')")
@DeleteMapping("/{operIds}")
public AjaxResult remove(@PathVariable Long[] operIds)
{
return toAjax(operLogService.deleteOperLogByIds(operIds));
}
@Log(title = "操作日志", businessType = BusinessType.CLEAN)
@PreAuthorize("@ss.hasPermi('monitor:operlog:remove')")
@DeleteMapping("/clean")
public AjaxResult clean()
{
operLogService.cleanOperLog();
return success();
}
}

83
chenhai-admin/src/main/java/com/chenhai/web/controller/monitor/SysUserOnlineController.java

@ -0,0 +1,83 @@
package com.chenhai.web.controller.monitor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.chenhai.common.annotation.Log;
import com.chenhai.common.constant.CacheConstants;
import com.chenhai.common.core.controller.BaseController;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.core.domain.model.LoginUser;
import com.chenhai.common.core.page.TableDataInfo;
import com.chenhai.common.core.redis.RedisCache;
import com.chenhai.common.enums.BusinessType;
import com.chenhai.common.utils.StringUtils;
import com.chenhai.system.domain.SysUserOnline;
import com.chenhai.system.service.ISysUserOnlineService;
/**
* 在线用户监控
*
* @author ruoyi
*/
@RestController
@RequestMapping("/monitor/online")
public class SysUserOnlineController extends BaseController
{
@Autowired
private ISysUserOnlineService userOnlineService;
@Autowired
private RedisCache redisCache;
@PreAuthorize("@ss.hasPermi('monitor:online:list')")
@GetMapping("/list")
public TableDataInfo list(String ipaddr, String userName)
{
Collection<String> keys = redisCache.keys(CacheConstants.LOGIN_TOKEN_KEY + "*");
List<SysUserOnline> userOnlineList = new ArrayList<SysUserOnline>();
for (String key : keys)
{
LoginUser user = redisCache.getCacheObject(key);
if (StringUtils.isNotEmpty(ipaddr) && StringUtils.isNotEmpty(userName))
{
userOnlineList.add(userOnlineService.selectOnlineByInfo(ipaddr, userName, user));
}
else if (StringUtils.isNotEmpty(ipaddr))
{
userOnlineList.add(userOnlineService.selectOnlineByIpaddr(ipaddr, user));
}
else if (StringUtils.isNotEmpty(userName) && StringUtils.isNotNull(user.getUser()))
{
userOnlineList.add(userOnlineService.selectOnlineByUserName(userName, user));
}
else
{
userOnlineList.add(userOnlineService.loginUserToUserOnline(user));
}
}
Collections.reverse(userOnlineList);
userOnlineList.removeAll(Collections.singleton(null));
return getDataTable(userOnlineList);
}
/**
* 强退用户
*/
@PreAuthorize("@ss.hasPermi('monitor:online:forceLogout')")
@Log(title = "在线用户", businessType = BusinessType.FORCE)
@DeleteMapping("/{tokenId}")
public AjaxResult forceLogout(@PathVariable String tokenId)
{
redisCache.deleteObject(CacheConstants.LOGIN_TOKEN_KEY + tokenId);
return success();
}
}

133
chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysConfigController.java

@ -0,0 +1,133 @@
package com.chenhai.web.controller.system;
import java.util.List;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.chenhai.common.annotation.Log;
import com.chenhai.common.core.controller.BaseController;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.core.page.TableDataInfo;
import com.chenhai.common.enums.BusinessType;
import com.chenhai.common.utils.poi.ExcelUtil;
import com.chenhai.system.domain.SysConfig;
import com.chenhai.system.service.ISysConfigService;
/**
* 参数配置 信息操作处理
*
* @author ruoyi
*/
@RestController
@RequestMapping("/system/config")
public class SysConfigController extends BaseController
{
@Autowired
private ISysConfigService configService;
/**
* 获取参数配置列表
*/
@PreAuthorize("@ss.hasPermi('system:config:list')")
@GetMapping("/list")
public TableDataInfo list(SysConfig config)
{
startPage();
List<SysConfig> list = configService.selectConfigList(config);
return getDataTable(list);
}
@Log(title = "参数管理", businessType = BusinessType.EXPORT)
@PreAuthorize("@ss.hasPermi('system:config:export')")
@PostMapping("/export")
public void export(HttpServletResponse response, SysConfig config)
{
List<SysConfig> list = configService.selectConfigList(config);
ExcelUtil<SysConfig> util = new ExcelUtil<SysConfig>(SysConfig.class);
util.exportExcel(response, list, "参数数据");
}
/**
* 根据参数编号获取详细信息
*/
@PreAuthorize("@ss.hasPermi('system:config:query')")
@GetMapping(value = "/{configId}")
public AjaxResult getInfo(@PathVariable Long configId)
{
return success(configService.selectConfigById(configId));
}
/**
* 根据参数键名查询参数值
*/
@GetMapping(value = "/configKey/{configKey}")
public AjaxResult getConfigKey(@PathVariable String configKey)
{
return success(configService.selectConfigByKey(configKey));
}
/**
* 新增参数配置
*/
@PreAuthorize("@ss.hasPermi('system:config:add')")
@Log(title = "参数管理", businessType = BusinessType.INSERT)
@PostMapping
public AjaxResult add(@Validated @RequestBody SysConfig config)
{
if (!configService.checkConfigKeyUnique(config))
{
return error("新增参数'" + config.getConfigName() + "'失败,参数键名已存在");
}
config.setCreateBy(getUsername());
return toAjax(configService.insertConfig(config));
}
/**
* 修改参数配置
*/
@PreAuthorize("@ss.hasPermi('system:config:edit')")
@Log(title = "参数管理", businessType = BusinessType.UPDATE)
@PutMapping
public AjaxResult edit(@Validated @RequestBody SysConfig config)
{
if (!configService.checkConfigKeyUnique(config))
{
return error("修改参数'" + config.getConfigName() + "'失败,参数键名已存在");
}
config.setUpdateBy(getUsername());
return toAjax(configService.updateConfig(config));
}
/**
* 删除参数配置
*/
@PreAuthorize("@ss.hasPermi('system:config:remove')")
@Log(title = "参数管理", businessType = BusinessType.DELETE)
@DeleteMapping("/{configIds}")
public AjaxResult remove(@PathVariable Long[] configIds)
{
configService.deleteConfigByIds(configIds);
return success();
}
/**
* 刷新参数缓存
*/
@PreAuthorize("@ss.hasPermi('system:config:remove')")
@Log(title = "参数管理", businessType = BusinessType.CLEAN)
@DeleteMapping("/refreshCache")
public AjaxResult refreshCache()
{
configService.resetConfigCache();
return success();
}
}

132
chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysDeptController.java

@ -0,0 +1,132 @@
package com.chenhai.web.controller.system;
import java.util.List;
import org.apache.commons.lang3.ArrayUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.chenhai.common.annotation.Log;
import com.chenhai.common.constant.UserConstants;
import com.chenhai.common.core.controller.BaseController;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.core.domain.entity.SysDept;
import com.chenhai.common.enums.BusinessType;
import com.chenhai.common.utils.StringUtils;
import com.chenhai.system.service.ISysDeptService;
/**
* 部门信息
*
* @author ruoyi
*/
@RestController
@RequestMapping("/system/dept")
public class SysDeptController extends BaseController
{
@Autowired
private ISysDeptService deptService;
/**
* 获取部门列表
*/
@PreAuthorize("@ss.hasPermi('system:dept:list')")
@GetMapping("/list")
public AjaxResult list(SysDept dept)
{
List<SysDept> depts = deptService.selectDeptList(dept);
return success(depts);
}
/**
* 查询部门列表排除节点
*/
@PreAuthorize("@ss.hasPermi('system:dept:list')")
@GetMapping("/list/exclude/{deptId}")
public AjaxResult excludeChild(@PathVariable(value = "deptId", required = false) Long deptId)
{
List<SysDept> depts = deptService.selectDeptList(new SysDept());
depts.removeIf(d -> d.getDeptId().intValue() == deptId || ArrayUtils.contains(StringUtils.split(d.getAncestors(), ","), deptId + ""));
return success(depts);
}
/**
* 根据部门编号获取详细信息
*/
@PreAuthorize("@ss.hasPermi('system:dept:query')")
@GetMapping(value = "/{deptId}")
public AjaxResult getInfo(@PathVariable Long deptId)
{
deptService.checkDeptDataScope(deptId);
return success(deptService.selectDeptById(deptId));
}
/**
* 新增部门
*/
@PreAuthorize("@ss.hasPermi('system:dept:add')")
@Log(title = "部门管理", businessType = BusinessType.INSERT)
@PostMapping
public AjaxResult add(@Validated @RequestBody SysDept dept)
{
if (!deptService.checkDeptNameUnique(dept))
{
return error("新增部门'" + dept.getDeptName() + "'失败,部门名称已存在");
}
dept.setCreateBy(getUsername());
return toAjax(deptService.insertDept(dept));
}
/**
* 修改部门
*/
@PreAuthorize("@ss.hasPermi('system:dept:edit')")
@Log(title = "部门管理", businessType = BusinessType.UPDATE)
@PutMapping
public AjaxResult edit(@Validated @RequestBody SysDept dept)
{
Long deptId = dept.getDeptId();
deptService.checkDeptDataScope(deptId);
if (!deptService.checkDeptNameUnique(dept))
{
return error("修改部门'" + dept.getDeptName() + "'失败,部门名称已存在");
}
else if (dept.getParentId().equals(deptId))
{
return error("修改部门'" + dept.getDeptName() + "'失败,上级部门不能是自己");
}
else if (StringUtils.equals(UserConstants.DEPT_DISABLE, dept.getStatus()) && deptService.selectNormalChildrenDeptById(deptId) > 0)
{
return error("该部门包含未停用的子部门!");
}
dept.setUpdateBy(getUsername());
return toAjax(deptService.updateDept(dept));
}
/**
* 删除部门
*/
@PreAuthorize("@ss.hasPermi('system:dept:remove')")
@Log(title = "部门管理", businessType = BusinessType.DELETE)
@DeleteMapping("/{deptId}")
public AjaxResult remove(@PathVariable Long deptId)
{
if (deptService.hasChildByDeptId(deptId))
{
return warn("存在下级部门,不允许删除");
}
if (deptService.checkDeptExistUser(deptId))
{
return warn("部门存在用户,不允许删除");
}
deptService.checkDeptDataScope(deptId);
return toAjax(deptService.deleteDeptById(deptId));
}
}

121
chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysDictDataController.java

@ -0,0 +1,121 @@
package com.chenhai.web.controller.system;
import java.util.ArrayList;
import java.util.List;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.chenhai.common.annotation.Log;
import com.chenhai.common.core.controller.BaseController;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.core.domain.entity.SysDictData;
import com.chenhai.common.core.page.TableDataInfo;
import com.chenhai.common.enums.BusinessType;
import com.chenhai.common.utils.StringUtils;
import com.chenhai.common.utils.poi.ExcelUtil;
import com.chenhai.system.service.ISysDictDataService;
import com.chenhai.system.service.ISysDictTypeService;
/**
* 数据字典信息
*
* @author ruoyi
*/
@RestController
@RequestMapping("/system/dict/data")
public class SysDictDataController extends BaseController
{
@Autowired
private ISysDictDataService dictDataService;
@Autowired
private ISysDictTypeService dictTypeService;
@PreAuthorize("@ss.hasPermi('system:dict:list')")
@GetMapping("/list")
public TableDataInfo list(SysDictData dictData)
{
startPage();
List<SysDictData> list = dictDataService.selectDictDataList(dictData);
return getDataTable(list);
}
@Log(title = "字典数据", businessType = BusinessType.EXPORT)
@PreAuthorize("@ss.hasPermi('system:dict:export')")
@PostMapping("/export")
public void export(HttpServletResponse response, SysDictData dictData)
{
List<SysDictData> list = dictDataService.selectDictDataList(dictData);
ExcelUtil<SysDictData> util = new ExcelUtil<SysDictData>(SysDictData.class);
util.exportExcel(response, list, "字典数据");
}
/**
* 查询字典数据详细
*/
@PreAuthorize("@ss.hasPermi('system:dict:query')")
@GetMapping(value = "/{dictCode}")
public AjaxResult getInfo(@PathVariable Long dictCode)
{
return success(dictDataService.selectDictDataById(dictCode));
}
/**
* 根据字典类型查询字典数据信息
*/
@GetMapping(value = "/type/{dictType}")
public AjaxResult dictType(@PathVariable String dictType)
{
List<SysDictData> data = dictTypeService.selectDictDataByType(dictType);
if (StringUtils.isNull(data))
{
data = new ArrayList<SysDictData>();
}
return success(data);
}
/**
* 新增字典类型
*/
@PreAuthorize("@ss.hasPermi('system:dict:add')")
@Log(title = "字典数据", businessType = BusinessType.INSERT)
@PostMapping
public AjaxResult add(@Validated @RequestBody SysDictData dict)
{
dict.setCreateBy(getUsername());
return toAjax(dictDataService.insertDictData(dict));
}
/**
* 修改保存字典类型
*/
@PreAuthorize("@ss.hasPermi('system:dict:edit')")
@Log(title = "字典数据", businessType = BusinessType.UPDATE)
@PutMapping
public AjaxResult edit(@Validated @RequestBody SysDictData dict)
{
dict.setUpdateBy(getUsername());
return toAjax(dictDataService.updateDictData(dict));
}
/**
* 删除字典类型
*/
@PreAuthorize("@ss.hasPermi('system:dict:remove')")
@Log(title = "字典类型", businessType = BusinessType.DELETE)
@DeleteMapping("/{dictCodes}")
public AjaxResult remove(@PathVariable Long[] dictCodes)
{
dictDataService.deleteDictDataByIds(dictCodes);
return success();
}
}

131
chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysDictTypeController.java

@ -0,0 +1,131 @@
package com.chenhai.web.controller.system;
import java.util.List;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.chenhai.common.annotation.Log;
import com.chenhai.common.core.controller.BaseController;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.core.domain.entity.SysDictType;
import com.chenhai.common.core.page.TableDataInfo;
import com.chenhai.common.enums.BusinessType;
import com.chenhai.common.utils.poi.ExcelUtil;
import com.chenhai.system.service.ISysDictTypeService;
/**
* 数据字典信息
*
* @author ruoyi
*/
@RestController
@RequestMapping("/system/dict/type")
public class SysDictTypeController extends BaseController
{
@Autowired
private ISysDictTypeService dictTypeService;
@PreAuthorize("@ss.hasPermi('system:dict:list')")
@GetMapping("/list")
public TableDataInfo list(SysDictType dictType)
{
startPage();
List<SysDictType> list = dictTypeService.selectDictTypeList(dictType);
return getDataTable(list);
}
@Log(title = "字典类型", businessType = BusinessType.EXPORT)
@PreAuthorize("@ss.hasPermi('system:dict:export')")
@PostMapping("/export")
public void export(HttpServletResponse response, SysDictType dictType)
{
List<SysDictType> list = dictTypeService.selectDictTypeList(dictType);
ExcelUtil<SysDictType> util = new ExcelUtil<SysDictType>(SysDictType.class);
util.exportExcel(response, list, "字典类型");
}
/**
* 查询字典类型详细
*/
@PreAuthorize("@ss.hasPermi('system:dict:query')")
@GetMapping(value = "/{dictId}")
public AjaxResult getInfo(@PathVariable Long dictId)
{
return success(dictTypeService.selectDictTypeById(dictId));
}
/**
* 新增字典类型
*/
@PreAuthorize("@ss.hasPermi('system:dict:add')")
@Log(title = "字典类型", businessType = BusinessType.INSERT)
@PostMapping
public AjaxResult add(@Validated @RequestBody SysDictType dict)
{
if (!dictTypeService.checkDictTypeUnique(dict))
{
return error("新增字典'" + dict.getDictName() + "'失败,字典类型已存在");
}
dict.setCreateBy(getUsername());
return toAjax(dictTypeService.insertDictType(dict));
}
/**
* 修改字典类型
*/
@PreAuthorize("@ss.hasPermi('system:dict:edit')")
@Log(title = "字典类型", businessType = BusinessType.UPDATE)
@PutMapping
public AjaxResult edit(@Validated @RequestBody SysDictType dict)
{
if (!dictTypeService.checkDictTypeUnique(dict))
{
return error("修改字典'" + dict.getDictName() + "'失败,字典类型已存在");
}
dict.setUpdateBy(getUsername());
return toAjax(dictTypeService.updateDictType(dict));
}
/**
* 删除字典类型
*/
@PreAuthorize("@ss.hasPermi('system:dict:remove')")
@Log(title = "字典类型", businessType = BusinessType.DELETE)
@DeleteMapping("/{dictIds}")
public AjaxResult remove(@PathVariable Long[] dictIds)
{
dictTypeService.deleteDictTypeByIds(dictIds);
return success();
}
/**
* 刷新字典缓存
*/
@PreAuthorize("@ss.hasPermi('system:dict:remove')")
@Log(title = "字典类型", businessType = BusinessType.CLEAN)
@DeleteMapping("/refreshCache")
public AjaxResult refreshCache()
{
dictTypeService.resetDictCache();
return success();
}
/**
* 获取字典选择框列表
*/
@GetMapping("/optionselect")
public AjaxResult optionselect()
{
List<SysDictType> dictTypes = dictTypeService.selectDictTypeAll();
return success(dictTypes);
}
}

29
chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysIndexController.java

@ -0,0 +1,29 @@
package com.chenhai.web.controller.system;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.chenhai.common.config.RuoYiConfig;
import com.chenhai.common.utils.StringUtils;
/**
* 首页
*
* @author ruoyi
*/
@RestController
public class SysIndexController
{
/** 系统基础配置 */
@Autowired
private RuoYiConfig ruoyiConfig;
/**
* 访问首页提示语
*/
@RequestMapping("/")
public String index()
{
return StringUtils.format("欢迎使用{}后台管理框架,当前版本:v{},请通过前端地址访问。", ruoyiConfig.getName(), ruoyiConfig.getVersion());
}
}

131
chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysLoginController.java

@ -0,0 +1,131 @@
package com.chenhai.web.controller.system;
import java.util.Date;
import java.util.List;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
import com.chenhai.common.constant.Constants;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.core.domain.entity.SysMenu;
import com.chenhai.common.core.domain.entity.SysUser;
import com.chenhai.common.core.domain.model.LoginBody;
import com.chenhai.common.core.domain.model.LoginUser;
import com.chenhai.common.core.text.Convert;
import com.chenhai.common.utils.DateUtils;
import com.chenhai.common.utils.SecurityUtils;
import com.chenhai.common.utils.StringUtils;
import com.chenhai.framework.web.service.SysLoginService;
import com.chenhai.framework.web.service.SysPermissionService;
import com.chenhai.framework.web.service.TokenService;
import com.chenhai.system.service.ISysConfigService;
import com.chenhai.system.service.ISysMenuService;
/**
* 登录验证
*
* @author ruoyi
*/
@RestController
public class SysLoginController
{
@Autowired
private SysLoginService loginService;
@Autowired
private ISysMenuService menuService;
@Autowired
private SysPermissionService permissionService;
@Autowired
private TokenService tokenService;
@Autowired
private ISysConfigService configService;
/**
* 登录方法
*
* @param loginBody 登录信息
* @return 结果
*/
@PostMapping("/login")
public AjaxResult login(@RequestBody LoginBody loginBody)
{
AjaxResult ajax = AjaxResult.success();
// 生成令牌
String token = loginService.login(loginBody.getUsername(), loginBody.getPassword(), loginBody.getCode(),
loginBody.getUuid());
ajax.put(Constants.TOKEN, token);
return ajax;
}
/**
* 获取用户信息
*
* @return 用户信息
*/
@GetMapping("getInfo")
public AjaxResult getInfo()
{
LoginUser loginUser = SecurityUtils.getLoginUser();
SysUser user = loginUser.getUser();
// 角色集合
Set<String> roles = permissionService.getRolePermission(user);
// 权限集合
Set<String> permissions = permissionService.getMenuPermission(user);
if (!loginUser.getPermissions().equals(permissions))
{
loginUser.setPermissions(permissions);
tokenService.refreshToken(loginUser);
}
AjaxResult ajax = AjaxResult.success();
ajax.put("user", user);
ajax.put("roles", roles);
ajax.put("permissions", permissions);
ajax.put("isDefaultModifyPwd", initPasswordIsModify(user.getPwdUpdateDate()));
ajax.put("isPasswordExpired", passwordIsExpiration(user.getPwdUpdateDate()));
return ajax;
}
/**
* 获取路由信息
*
* @return 路由信息
*/
@GetMapping("getRouters")
public AjaxResult getRouters()
{
Long userId = SecurityUtils.getUserId();
List<SysMenu> menus = menuService.selectMenuTreeByUserId(userId);
return AjaxResult.success(menuService.buildMenus(menus));
}
// 检查初始密码是否提醒修改
public boolean initPasswordIsModify(Date pwdUpdateDate)
{
Integer initPasswordModify = Convert.toInt(configService.selectConfigByKey("sys.account.initPasswordModify"));
return initPasswordModify != null && initPasswordModify == 1 && pwdUpdateDate == null;
}
// 检查密码是否过期
public boolean passwordIsExpiration(Date pwdUpdateDate)
{
Integer passwordValidateDays = Convert.toInt(configService.selectConfigByKey("sys.account.passwordValidateDays"));
if (passwordValidateDays != null && passwordValidateDays > 0)
{
if (StringUtils.isNull(pwdUpdateDate))
{
// 如果从未修改过初始密码直接提醒过期
return true;
}
Date nowDate = DateUtils.getNowDate();
return DateUtils.differentDaysByMillisecond(nowDate, pwdUpdateDate) > passwordValidateDays;
}
return false;
}
}

142
chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysMenuController.java

@ -0,0 +1,142 @@
package com.chenhai.web.controller.system;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.chenhai.common.annotation.Log;
import com.chenhai.common.constant.UserConstants;
import com.chenhai.common.core.controller.BaseController;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.core.domain.entity.SysMenu;
import com.chenhai.common.enums.BusinessType;
import com.chenhai.common.utils.StringUtils;
import com.chenhai.system.service.ISysMenuService;
/**
* 菜单信息
*
* @author ruoyi
*/
@RestController
@RequestMapping("/system/menu")
public class SysMenuController extends BaseController
{
@Autowired
private ISysMenuService menuService;
/**
* 获取菜单列表
*/
@PreAuthorize("@ss.hasPermi('system:menu:list')")
@GetMapping("/list")
public AjaxResult list(SysMenu menu)
{
List<SysMenu> menus = menuService.selectMenuList(menu, getUserId());
return success(menus);
}
/**
* 根据菜单编号获取详细信息
*/
@PreAuthorize("@ss.hasPermi('system:menu:query')")
@GetMapping(value = "/{menuId}")
public AjaxResult getInfo(@PathVariable Long menuId)
{
return success(menuService.selectMenuById(menuId));
}
/**
* 获取菜单下拉树列表
*/
@GetMapping("/treeselect")
public AjaxResult treeselect(SysMenu menu)
{
List<SysMenu> menus = menuService.selectMenuList(menu, getUserId());
return success(menuService.buildMenuTreeSelect(menus));
}
/**
* 加载对应角色菜单列表树
*/
@GetMapping(value = "/roleMenuTreeselect/{roleId}")
public AjaxResult roleMenuTreeselect(@PathVariable("roleId") Long roleId)
{
List<SysMenu> menus = menuService.selectMenuList(getUserId());
AjaxResult ajax = AjaxResult.success();
ajax.put("checkedKeys", menuService.selectMenuListByRoleId(roleId));
ajax.put("menus", menuService.buildMenuTreeSelect(menus));
return ajax;
}
/**
* 新增菜单
*/
@PreAuthorize("@ss.hasPermi('system:menu:add')")
@Log(title = "菜单管理", businessType = BusinessType.INSERT)
@PostMapping
public AjaxResult add(@Validated @RequestBody SysMenu menu)
{
if (!menuService.checkMenuNameUnique(menu))
{
return error("新增菜单'" + menu.getMenuName() + "'失败,菜单名称已存在");
}
else if (UserConstants.YES_FRAME.equals(menu.getIsFrame()) && !StringUtils.ishttp(menu.getPath()))
{
return error("新增菜单'" + menu.getMenuName() + "'失败,地址必须以http(s)://开头");
}
menu.setCreateBy(getUsername());
return toAjax(menuService.insertMenu(menu));
}
/**
* 修改菜单
*/
@PreAuthorize("@ss.hasPermi('system:menu:edit')")
@Log(title = "菜单管理", businessType = BusinessType.UPDATE)
@PutMapping
public AjaxResult edit(@Validated @RequestBody SysMenu menu)
{
if (!menuService.checkMenuNameUnique(menu))
{
return error("修改菜单'" + menu.getMenuName() + "'失败,菜单名称已存在");
}
else if (UserConstants.YES_FRAME.equals(menu.getIsFrame()) && !StringUtils.ishttp(menu.getPath()))
{
return error("修改菜单'" + menu.getMenuName() + "'失败,地址必须以http(s)://开头");
}
else if (menu.getMenuId().equals(menu.getParentId()))
{
return error("修改菜单'" + menu.getMenuName() + "'失败,上级菜单不能选择自己");
}
menu.setUpdateBy(getUsername());
return toAjax(menuService.updateMenu(menu));
}
/**
* 删除菜单
*/
@PreAuthorize("@ss.hasPermi('system:menu:remove')")
@Log(title = "菜单管理", businessType = BusinessType.DELETE)
@DeleteMapping("/{menuId}")
public AjaxResult remove(@PathVariable("menuId") Long menuId)
{
if (menuService.hasChildByMenuId(menuId))
{
return warn("存在子菜单,不允许删除");
}
if (menuService.checkMenuExistRole(menuId))
{
return warn("菜单已分配,不允许删除");
}
return toAjax(menuService.deleteMenuById(menuId));
}
}

91
chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysNoticeController.java

@ -0,0 +1,91 @@
package com.chenhai.web.controller.system;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.chenhai.common.annotation.Log;
import com.chenhai.common.core.controller.BaseController;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.core.page.TableDataInfo;
import com.chenhai.common.enums.BusinessType;
import com.chenhai.system.domain.SysNotice;
import com.chenhai.system.service.ISysNoticeService;
/**
* 公告 信息操作处理
*
* @author ruoyi
*/
@RestController
@RequestMapping("/system/notice")
public class SysNoticeController extends BaseController
{
@Autowired
private ISysNoticeService noticeService;
/**
* 获取通知公告列表
*/
@PreAuthorize("@ss.hasPermi('system:notice:list')")
@GetMapping("/list")
public TableDataInfo list(SysNotice notice)
{
startPage();
List<SysNotice> list = noticeService.selectNoticeList(notice);
return getDataTable(list);
}
/**
* 根据通知公告编号获取详细信息
*/
@PreAuthorize("@ss.hasPermi('system:notice:query')")
@GetMapping(value = "/{noticeId}")
public AjaxResult getInfo(@PathVariable Long noticeId)
{
return success(noticeService.selectNoticeById(noticeId));
}
/**
* 新增通知公告
*/
@PreAuthorize("@ss.hasPermi('system:notice:add')")
@Log(title = "通知公告", businessType = BusinessType.INSERT)
@PostMapping
public AjaxResult add(@Validated @RequestBody SysNotice notice)
{
notice.setCreateBy(getUsername());
return toAjax(noticeService.insertNotice(notice));
}
/**
* 修改通知公告
*/
@PreAuthorize("@ss.hasPermi('system:notice:edit')")
@Log(title = "通知公告", businessType = BusinessType.UPDATE)
@PutMapping
public AjaxResult edit(@Validated @RequestBody SysNotice notice)
{
notice.setUpdateBy(getUsername());
return toAjax(noticeService.updateNotice(notice));
}
/**
* 删除通知公告
*/
@PreAuthorize("@ss.hasPermi('system:notice:remove')")
@Log(title = "通知公告", businessType = BusinessType.DELETE)
@DeleteMapping("/{noticeIds}")
public AjaxResult remove(@PathVariable Long[] noticeIds)
{
return toAjax(noticeService.deleteNoticeByIds(noticeIds));
}
}

129
chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysPostController.java

@ -0,0 +1,129 @@
package com.chenhai.web.controller.system;
import java.util.List;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.chenhai.common.annotation.Log;
import com.chenhai.common.core.controller.BaseController;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.core.page.TableDataInfo;
import com.chenhai.common.enums.BusinessType;
import com.chenhai.common.utils.poi.ExcelUtil;
import com.chenhai.system.domain.SysPost;
import com.chenhai.system.service.ISysPostService;
/**
* 岗位信息操作处理
*
* @author ruoyi
*/
@RestController
@RequestMapping("/system/post")
public class SysPostController extends BaseController
{
@Autowired
private ISysPostService postService;
/**
* 获取岗位列表
*/
@PreAuthorize("@ss.hasPermi('system:post:list')")
@GetMapping("/list")
public TableDataInfo list(SysPost post)
{
startPage();
List<SysPost> list = postService.selectPostList(post);
return getDataTable(list);
}
@Log(title = "岗位管理", businessType = BusinessType.EXPORT)
@PreAuthorize("@ss.hasPermi('system:post:export')")
@PostMapping("/export")
public void export(HttpServletResponse response, SysPost post)
{
List<SysPost> list = postService.selectPostList(post);
ExcelUtil<SysPost> util = new ExcelUtil<SysPost>(SysPost.class);
util.exportExcel(response, list, "岗位数据");
}
/**
* 根据岗位编号获取详细信息
*/
@PreAuthorize("@ss.hasPermi('system:post:query')")
@GetMapping(value = "/{postId}")
public AjaxResult getInfo(@PathVariable Long postId)
{
return success(postService.selectPostById(postId));
}
/**
* 新增岗位
*/
@PreAuthorize("@ss.hasPermi('system:post:add')")
@Log(title = "岗位管理", businessType = BusinessType.INSERT)
@PostMapping
public AjaxResult add(@Validated @RequestBody SysPost post)
{
if (!postService.checkPostNameUnique(post))
{
return error("新增岗位'" + post.getPostName() + "'失败,岗位名称已存在");
}
else if (!postService.checkPostCodeUnique(post))
{
return error("新增岗位'" + post.getPostName() + "'失败,岗位编码已存在");
}
post.setCreateBy(getUsername());
return toAjax(postService.insertPost(post));
}
/**
* 修改岗位
*/
@PreAuthorize("@ss.hasPermi('system:post:edit')")
@Log(title = "岗位管理", businessType = BusinessType.UPDATE)
@PutMapping
public AjaxResult edit(@Validated @RequestBody SysPost post)
{
if (!postService.checkPostNameUnique(post))
{
return error("修改岗位'" + post.getPostName() + "'失败,岗位名称已存在");
}
else if (!postService.checkPostCodeUnique(post))
{
return error("修改岗位'" + post.getPostName() + "'失败,岗位编码已存在");
}
post.setUpdateBy(getUsername());
return toAjax(postService.updatePost(post));
}
/**
* 删除岗位
*/
@PreAuthorize("@ss.hasPermi('system:post:remove')")
@Log(title = "岗位管理", businessType = BusinessType.DELETE)
@DeleteMapping("/{postIds}")
public AjaxResult remove(@PathVariable Long[] postIds)
{
return toAjax(postService.deletePostByIds(postIds));
}
/**
* 获取岗位选择框列表
*/
@GetMapping("/optionselect")
public AjaxResult optionselect()
{
List<SysPost> posts = postService.selectPostAll();
return success(posts);
}
}

148
chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysProfileController.java

@ -0,0 +1,148 @@
package com.chenhai.web.controller.system;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import com.chenhai.common.annotation.Log;
import com.chenhai.common.config.RuoYiConfig;
import com.chenhai.common.core.controller.BaseController;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.core.domain.entity.SysUser;
import com.chenhai.common.core.domain.model.LoginUser;
import com.chenhai.common.enums.BusinessType;
import com.chenhai.common.utils.DateUtils;
import com.chenhai.common.utils.SecurityUtils;
import com.chenhai.common.utils.StringUtils;
import com.chenhai.common.utils.file.FileUploadUtils;
import com.chenhai.common.utils.file.FileUtils;
import com.chenhai.common.utils.file.MimeTypeUtils;
import com.chenhai.framework.web.service.TokenService;
import com.chenhai.system.service.ISysUserService;
/**
* 个人信息 业务处理
*
* @author ruoyi
*/
@RestController
@RequestMapping("/system/user/profile")
public class SysProfileController extends BaseController
{
@Autowired
private ISysUserService userService;
@Autowired
private TokenService tokenService;
/**
* 个人信息
*/
@GetMapping
public AjaxResult profile()
{
LoginUser loginUser = getLoginUser();
SysUser user = loginUser.getUser();
AjaxResult ajax = AjaxResult.success(user);
ajax.put("roleGroup", userService.selectUserRoleGroup(loginUser.getUsername()));
ajax.put("postGroup", userService.selectUserPostGroup(loginUser.getUsername()));
return ajax;
}
/**
* 修改用户
*/
@Log(title = "个人信息", businessType = BusinessType.UPDATE)
@PutMapping
public AjaxResult updateProfile(@RequestBody SysUser user)
{
LoginUser loginUser = getLoginUser();
SysUser currentUser = loginUser.getUser();
currentUser.setNickName(user.getNickName());
currentUser.setEmail(user.getEmail());
currentUser.setPhonenumber(user.getPhonenumber());
currentUser.setSex(user.getSex());
if (StringUtils.isNotEmpty(user.getPhonenumber()) && !userService.checkPhoneUnique(currentUser))
{
return error("修改用户'" + loginUser.getUsername() + "'失败,手机号码已存在");
}
if (StringUtils.isNotEmpty(user.getEmail()) && !userService.checkEmailUnique(currentUser))
{
return error("修改用户'" + loginUser.getUsername() + "'失败,邮箱账号已存在");
}
if (userService.updateUserProfile(currentUser) > 0)
{
// 更新缓存用户信息
tokenService.setLoginUser(loginUser);
return success();
}
return error("修改个人信息异常,请联系管理员");
}
/**
* 重置密码
*/
@Log(title = "个人信息", businessType = BusinessType.UPDATE)
@PutMapping("/updatePwd")
public AjaxResult updatePwd(@RequestBody Map<String, String> params)
{
String oldPassword = params.get("oldPassword");
String newPassword = params.get("newPassword");
LoginUser loginUser = getLoginUser();
Long userId = loginUser.getUserId();
String password = loginUser.getPassword();
if (!SecurityUtils.matchesPassword(oldPassword, password))
{
return error("修改密码失败,旧密码错误");
}
if (SecurityUtils.matchesPassword(newPassword, password))
{
return error("新密码不能与旧密码相同");
}
newPassword = SecurityUtils.encryptPassword(newPassword);
if (userService.resetUserPwd(userId, newPassword) > 0)
{
// 更新缓存用户密码&密码最后更新时间
loginUser.getUser().setPwdUpdateDate(DateUtils.getNowDate());
loginUser.getUser().setPassword(newPassword);
tokenService.setLoginUser(loginUser);
return success();
}
return error("修改密码异常,请联系管理员");
}
/**
* 头像上传
*/
@Log(title = "用户头像", businessType = BusinessType.UPDATE)
@PostMapping("/avatar")
public AjaxResult avatar(@RequestParam("avatarfile") MultipartFile file) throws Exception
{
if (!file.isEmpty())
{
LoginUser loginUser = getLoginUser();
String avatar = FileUploadUtils.upload(RuoYiConfig.getAvatarPath(), file, MimeTypeUtils.IMAGE_EXTENSION, true);
if (userService.updateUserAvatar(loginUser.getUserId(), avatar))
{
String oldAvatar = loginUser.getUser().getAvatar();
if (StringUtils.isNotEmpty(oldAvatar))
{
FileUtils.deleteFile(RuoYiConfig.getProfile() + FileUtils.stripPrefix(oldAvatar));
}
AjaxResult ajax = AjaxResult.success();
ajax.put("imgUrl", avatar);
// 更新缓存用户头像
loginUser.getUser().setAvatar(avatar);
tokenService.setLoginUser(loginUser);
return ajax;
}
}
return error("上传图片异常,请联系管理员");
}
}

38
chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysRegisterController.java

@ -0,0 +1,38 @@
package com.chenhai.web.controller.system;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
import com.chenhai.common.core.controller.BaseController;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.core.domain.model.RegisterBody;
import com.chenhai.common.utils.StringUtils;
import com.chenhai.framework.web.service.SysRegisterService;
import com.chenhai.system.service.ISysConfigService;
/**
* 注册验证
*
* @author ruoyi
*/
@RestController
public class SysRegisterController extends BaseController
{
@Autowired
private SysRegisterService registerService;
@Autowired
private ISysConfigService configService;
@PostMapping("/register")
public AjaxResult register(@RequestBody RegisterBody user)
{
if (!("true".equals(configService.selectConfigByKey("sys.account.registerUser"))))
{
return error("当前系统没有开启注册功能!");
}
String msg = registerService.register(user);
return StringUtils.isEmpty(msg) ? success() : error(msg);
}
}

262
chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysRoleController.java

@ -0,0 +1,262 @@
package com.chenhai.web.controller.system;
import java.util.List;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.chenhai.common.annotation.Log;
import com.chenhai.common.core.controller.BaseController;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.core.domain.entity.SysDept;
import com.chenhai.common.core.domain.entity.SysRole;
import com.chenhai.common.core.domain.entity.SysUser;
import com.chenhai.common.core.domain.model.LoginUser;
import com.chenhai.common.core.page.TableDataInfo;
import com.chenhai.common.enums.BusinessType;
import com.chenhai.common.utils.StringUtils;
import com.chenhai.common.utils.poi.ExcelUtil;
import com.chenhai.framework.web.service.SysPermissionService;
import com.chenhai.framework.web.service.TokenService;
import com.chenhai.system.domain.SysUserRole;
import com.chenhai.system.service.ISysDeptService;
import com.chenhai.system.service.ISysRoleService;
import com.chenhai.system.service.ISysUserService;
/**
* 角色信息
*
* @author ruoyi
*/
@RestController
@RequestMapping("/system/role")
public class SysRoleController extends BaseController
{
@Autowired
private ISysRoleService roleService;
@Autowired
private TokenService tokenService;
@Autowired
private SysPermissionService permissionService;
@Autowired
private ISysUserService userService;
@Autowired
private ISysDeptService deptService;
@PreAuthorize("@ss.hasPermi('system:role:list')")
@GetMapping("/list")
public TableDataInfo list(SysRole role)
{
startPage();
List<SysRole> list = roleService.selectRoleList(role);
return getDataTable(list);
}
@Log(title = "角色管理", businessType = BusinessType.EXPORT)
@PreAuthorize("@ss.hasPermi('system:role:export')")
@PostMapping("/export")
public void export(HttpServletResponse response, SysRole role)
{
List<SysRole> list = roleService.selectRoleList(role);
ExcelUtil<SysRole> util = new ExcelUtil<SysRole>(SysRole.class);
util.exportExcel(response, list, "角色数据");
}
/**
* 根据角色编号获取详细信息
*/
@PreAuthorize("@ss.hasPermi('system:role:query')")
@GetMapping(value = "/{roleId}")
public AjaxResult getInfo(@PathVariable Long roleId)
{
roleService.checkRoleDataScope(roleId);
return success(roleService.selectRoleById(roleId));
}
/**
* 新增角色
*/
@PreAuthorize("@ss.hasPermi('system:role:add')")
@Log(title = "角色管理", businessType = BusinessType.INSERT)
@PostMapping
public AjaxResult add(@Validated @RequestBody SysRole role)
{
if (!roleService.checkRoleNameUnique(role))
{
return error("新增角色'" + role.getRoleName() + "'失败,角色名称已存在");
}
else if (!roleService.checkRoleKeyUnique(role))
{
return error("新增角色'" + role.getRoleName() + "'失败,角色权限已存在");
}
role.setCreateBy(getUsername());
return toAjax(roleService.insertRole(role));
}
/**
* 修改保存角色
*/
@PreAuthorize("@ss.hasPermi('system:role:edit')")
@Log(title = "角色管理", businessType = BusinessType.UPDATE)
@PutMapping
public AjaxResult edit(@Validated @RequestBody SysRole role)
{
roleService.checkRoleAllowed(role);
roleService.checkRoleDataScope(role.getRoleId());
if (!roleService.checkRoleNameUnique(role))
{
return error("修改角色'" + role.getRoleName() + "'失败,角色名称已存在");
}
else if (!roleService.checkRoleKeyUnique(role))
{
return error("修改角色'" + role.getRoleName() + "'失败,角色权限已存在");
}
role.setUpdateBy(getUsername());
if (roleService.updateRole(role) > 0)
{
// 更新缓存用户权限
LoginUser loginUser = getLoginUser();
if (StringUtils.isNotNull(loginUser.getUser()) && !loginUser.getUser().isAdmin())
{
loginUser.setUser(userService.selectUserByUserName(loginUser.getUser().getUserName()));
loginUser.setPermissions(permissionService.getMenuPermission(loginUser.getUser()));
tokenService.setLoginUser(loginUser);
}
return success();
}
return error("修改角色'" + role.getRoleName() + "'失败,请联系管理员");
}
/**
* 修改保存数据权限
*/
@PreAuthorize("@ss.hasPermi('system:role:edit')")
@Log(title = "角色管理", businessType = BusinessType.UPDATE)
@PutMapping("/dataScope")
public AjaxResult dataScope(@RequestBody SysRole role)
{
roleService.checkRoleAllowed(role);
roleService.checkRoleDataScope(role.getRoleId());
return toAjax(roleService.authDataScope(role));
}
/**
* 状态修改
*/
@PreAuthorize("@ss.hasPermi('system:role:edit')")
@Log(title = "角色管理", businessType = BusinessType.UPDATE)
@PutMapping("/changeStatus")
public AjaxResult changeStatus(@RequestBody SysRole role)
{
roleService.checkRoleAllowed(role);
roleService.checkRoleDataScope(role.getRoleId());
role.setUpdateBy(getUsername());
return toAjax(roleService.updateRoleStatus(role));
}
/**
* 删除角色
*/
@PreAuthorize("@ss.hasPermi('system:role:remove')")
@Log(title = "角色管理", businessType = BusinessType.DELETE)
@DeleteMapping("/{roleIds}")
public AjaxResult remove(@PathVariable Long[] roleIds)
{
return toAjax(roleService.deleteRoleByIds(roleIds));
}
/**
* 获取角色选择框列表
*/
@PreAuthorize("@ss.hasPermi('system:role:query')")
@GetMapping("/optionselect")
public AjaxResult optionselect()
{
return success(roleService.selectRoleAll());
}
/**
* 查询已分配用户角色列表
*/
@PreAuthorize("@ss.hasPermi('system:role:list')")
@GetMapping("/authUser/allocatedList")
public TableDataInfo allocatedList(SysUser user)
{
startPage();
List<SysUser> list = userService.selectAllocatedList(user);
return getDataTable(list);
}
/**
* 查询未分配用户角色列表
*/
@PreAuthorize("@ss.hasPermi('system:role:list')")
@GetMapping("/authUser/unallocatedList")
public TableDataInfo unallocatedList(SysUser user)
{
startPage();
List<SysUser> list = userService.selectUnallocatedList(user);
return getDataTable(list);
}
/**
* 取消授权用户
*/
@PreAuthorize("@ss.hasPermi('system:role:edit')")
@Log(title = "角色管理", businessType = BusinessType.GRANT)
@PutMapping("/authUser/cancel")
public AjaxResult cancelAuthUser(@RequestBody SysUserRole userRole)
{
return toAjax(roleService.deleteAuthUser(userRole));
}
/**
* 批量取消授权用户
*/
@PreAuthorize("@ss.hasPermi('system:role:edit')")
@Log(title = "角色管理", businessType = BusinessType.GRANT)
@PutMapping("/authUser/cancelAll")
public AjaxResult cancelAuthUserAll(Long roleId, Long[] userIds)
{
return toAjax(roleService.deleteAuthUsers(roleId, userIds));
}
/**
* 批量选择用户授权
*/
@PreAuthorize("@ss.hasPermi('system:role:edit')")
@Log(title = "角色管理", businessType = BusinessType.GRANT)
@PutMapping("/authUser/selectAll")
public AjaxResult selectAuthUserAll(Long roleId, Long[] userIds)
{
roleService.checkRoleDataScope(roleId);
return toAjax(roleService.insertAuthUsers(roleId, userIds));
}
/**
* 获取对应角色部门树列表
*/
@PreAuthorize("@ss.hasPermi('system:role:query')")
@GetMapping(value = "/deptTree/{roleId}")
public AjaxResult deptTree(@PathVariable("roleId") Long roleId)
{
AjaxResult ajax = AjaxResult.success();
ajax.put("checkedKeys", deptService.selectDeptListByRoleId(roleId));
ajax.put("depts", deptService.selectDeptTreeList(new SysDept()));
return ajax;
}
}

256
chenhai-admin/src/main/java/com/chenhai/web/controller/system/SysUserController.java

@ -0,0 +1,256 @@
package com.chenhai.web.controller.system;
import java.util.List;
import java.util.stream.Collectors;
import jakarta.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.ArrayUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import com.chenhai.common.annotation.Log;
import com.chenhai.common.core.controller.BaseController;
import com.chenhai.common.core.domain.AjaxResult;
import com.chenhai.common.core.domain.entity.SysDept;
import com.chenhai.common.core.domain.entity.SysRole;
import com.chenhai.common.core.domain.entity.SysUser;
import com.chenhai.common.core.page.TableDataInfo;
import com.chenhai.common.enums.BusinessType;
import com.chenhai.common.utils.SecurityUtils;
import com.chenhai.common.utils.StringUtils;
import com.chenhai.common.utils.poi.ExcelUtil;
import com.chenhai.system.service.ISysDeptService;
import com.chenhai.system.service.ISysPostService;
import com.chenhai.system.service.ISysRoleService;
import com.chenhai.system.service.ISysUserService;
/**
* 用户信息
*
* @author ruoyi
*/
@RestController
@RequestMapping("/system/user")
public class SysUserController extends BaseController
{
@Autowired
private ISysUserService userService;
@Autowired
private ISysRoleService roleService;
@Autowired
private ISysDeptService deptService;
@Autowired
private ISysPostService postService;
/**
* 获取用户列表
*/
@PreAuthorize("@ss.hasPermi('system:user:list')")
@GetMapping("/list")
public TableDataInfo list(SysUser user)
{
startPage();
List<SysUser> list = userService.selectUserList(user);
return getDataTable(list);
}
@Log(title = "用户管理", businessType = BusinessType.EXPORT)
@PreAuthorize("@ss.hasPermi('system:user:export')")
@PostMapping("/export")
public void export(HttpServletResponse response, SysUser user)
{
List<SysUser> list = userService.selectUserList(user);
ExcelUtil<SysUser> util = new ExcelUtil<SysUser>(SysUser.class);
util.exportExcel(response, list, "用户数据");
}
@Log(title = "用户管理", businessType = BusinessType.IMPORT)
@PreAuthorize("@ss.hasPermi('system:user:import')")
@PostMapping("/importData")
public AjaxResult importData(MultipartFile file, boolean updateSupport) throws Exception
{
ExcelUtil<SysUser> util = new ExcelUtil<SysUser>(SysUser.class);
List<SysUser> userList = util.importExcel(file.getInputStream());
String operName = getUsername();
String message = userService.importUser(userList, updateSupport, operName);
return success(message);
}
@PostMapping("/importTemplate")
public void importTemplate(HttpServletResponse response)
{
ExcelUtil<SysUser> util = new ExcelUtil<SysUser>(SysUser.class);
util.importTemplateExcel(response, "用户数据");
}
/**
* 根据用户编号获取详细信息
*/
@PreAuthorize("@ss.hasPermi('system:user:query')")
@GetMapping(value = { "/", "/{userId}" })
public AjaxResult getInfo(@PathVariable(value = "userId", required = false) Long userId)
{
AjaxResult ajax = AjaxResult.success();
if (StringUtils.isNotNull(userId))
{
userService.checkUserDataScope(userId);
SysUser sysUser = userService.selectUserById(userId);
ajax.put(AjaxResult.DATA_TAG, sysUser);
ajax.put("postIds", postService.selectPostListByUserId(userId));
ajax.put("roleIds", sysUser.getRoles().stream().map(SysRole::getRoleId).collect(Collectors.toList()));
}
List<SysRole> roles = roleService.selectRoleAll();
ajax.put("roles", SysUser.isAdmin(userId) ? roles : roles.stream().filter(r -> !r.isAdmin()).collect(Collectors.toList()));
ajax.put("posts", postService.selectPostAll());
return ajax;
}
/**
* 新增用户
*/
@PreAuthorize("@ss.hasPermi('system:user:add')")
@Log(title = "用户管理", businessType = BusinessType.INSERT)
@PostMapping
public AjaxResult add(@Validated @RequestBody SysUser user)
{
deptService.checkDeptDataScope(user.getDeptId());
roleService.checkRoleDataScope(user.getRoleIds());
if (!userService.checkUserNameUnique(user))
{
return error("新增用户'" + user.getUserName() + "'失败,登录账号已存在");
}
else if (StringUtils.isNotEmpty(user.getPhonenumber()) && !userService.checkPhoneUnique(user))
{
return error("新增用户'" + user.getUserName() + "'失败,手机号码已存在");
}
else if (StringUtils.isNotEmpty(user.getEmail()) && !userService.checkEmailUnique(user))
{
return error("新增用户'" + user.getUserName() + "'失败,邮箱账号已存在");
}
user.setCreateBy(getUsername());
user.setPassword(SecurityUtils.encryptPassword(user.getPassword()));
return toAjax(userService.insertUser(user));
}
/**
* 修改用户
*/
@PreAuthorize("@ss.hasPermi('system:user:edit')")
@Log(title = "用户管理", businessType = BusinessType.UPDATE)
@PutMapping
public AjaxResult edit(@Validated @RequestBody SysUser user)
{
userService.checkUserAllowed(user);
userService.checkUserDataScope(user.getUserId());
deptService.checkDeptDataScope(user.getDeptId());
roleService.checkRoleDataScope(user.getRoleIds());
if (!userService.checkUserNameUnique(user))
{
return error("修改用户'" + user.getUserName() + "'失败,登录账号已存在");
}
else if (StringUtils.isNotEmpty(user.getPhonenumber()) && !userService.checkPhoneUnique(user))
{
return error("修改用户'" + user.getUserName() + "'失败,手机号码已存在");
}
else if (StringUtils.isNotEmpty(user.getEmail()) && !userService.checkEmailUnique(user))
{
return error("修改用户'" + user.getUserName() + "'失败,邮箱账号已存在");
}
user.setUpdateBy(getUsername());
return toAjax(userService.updateUser(user));
}
/**
* 删除用户
*/
@PreAuthorize("@ss.hasPermi('system:user:remove')")
@Log(title = "用户管理", businessType = BusinessType.DELETE)
@DeleteMapping("/{userIds}")
public AjaxResult remove(@PathVariable Long[] userIds)
{
if (ArrayUtils.contains(userIds, getUserId()))
{
return error("当前用户不能删除");
}
return toAjax(userService.deleteUserByIds(userIds));
}
/**
* 重置密码
*/
@PreAuthorize("@ss.hasPermi('system:user:resetPwd')")
@Log(title = "用户管理", businessType = BusinessType.UPDATE)
@PutMapping("/resetPwd")
public AjaxResult resetPwd(@RequestBody SysUser user)
{
userService.checkUserAllowed(user);
userService.checkUserDataScope(user.getUserId());
user.setPassword(SecurityUtils.encryptPassword(user.getPassword()));
user.setUpdateBy(getUsername());
return toAjax(userService.resetPwd(user));
}
/**
* 状态修改
*/
@PreAuthorize("@ss.hasPermi('system:user:edit')")
@Log(title = "用户管理", businessType = BusinessType.UPDATE)
@PutMapping("/changeStatus")
public AjaxResult changeStatus(@RequestBody SysUser user)
{
userService.checkUserAllowed(user);
userService.checkUserDataScope(user.getUserId());
user.setUpdateBy(getUsername());
return toAjax(userService.updateUserStatus(user));
}
/**
* 根据用户编号获取授权角色
*/
@PreAuthorize("@ss.hasPermi('system:user:query')")
@GetMapping("/authRole/{userId}")
public AjaxResult authRole(@PathVariable("userId") Long userId)
{
AjaxResult ajax = AjaxResult.success();
SysUser user = userService.selectUserById(userId);
List<SysRole> roles = roleService.selectRolesByUserId(userId);
ajax.put("user", user);
ajax.put("roles", SysUser.isAdmin(userId) ? roles : roles.stream().filter(r -> !r.isAdmin()).collect(Collectors.toList()));
return ajax;
}
/**
* 用户授权角色
*/
@PreAuthorize("@ss.hasPermi('system:user:edit')")
@Log(title = "用户管理", businessType = BusinessType.GRANT)
@PutMapping("/authRole")
public AjaxResult insertAuthRole(Long userId, Long[] roleIds)
{
userService.checkUserDataScope(userId);
roleService.checkRoleDataScope(roleIds);
userService.insertUserAuth(userId, roleIds);
return success();
}
/**
* 获取部门树列表
*/
@PreAuthorize("@ss.hasPermi('system:user:list')")
@GetMapping("/deptTree")
public AjaxResult deptTree(SysDept dept)
{
return success(deptService.selectDeptTreeList(dept));
}
}

175
chenhai-admin/src/main/java/com/chenhai/web/controller/tool/TestController.java

@ -0,0 +1,175 @@
package com.chenhai.web.controller.tool;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.chenhai.common.core.controller.BaseController;
import com.chenhai.common.core.domain.R;
import com.chenhai.common.utils.StringUtils;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.tags.Tag;
/**
* swagger 用户测试方法
*
* @author ruoyi
*/
@Tag(name = "用户信息管理")
@RestController
@RequestMapping("/test/user")
public class TestController extends BaseController
{
private final static Map<Integer, UserEntity> users = new LinkedHashMap<Integer, UserEntity>();
{
users.put(1, new UserEntity(1, "admin", "admin123", "15888888888"));
users.put(2, new UserEntity(2, "ry", "admin123", "15666666666"));
}
@Operation(summary = "获取用户列表")
@GetMapping("/list")
public R<List<UserEntity>> userList()
{
List<UserEntity> userList = new ArrayList<UserEntity>(users.values());
return R.ok(userList);
}
@Operation(summary = "获取用户详细")
@GetMapping("/{userId}")
public R<UserEntity> getUser(@PathVariable(name = "userId")
Integer userId)
{
if (!users.isEmpty() && users.containsKey(userId))
{
return R.ok(users.get(userId));
}
else
{
return R.fail("用户不存在");
}
}
@Operation(summary = "新增用户")
@PostMapping("/save")
public R<String> save(UserEntity user)
{
if (StringUtils.isNull(user) || StringUtils.isNull(user.getUserId()))
{
return R.fail("用户ID不能为空");
}
users.put(user.getUserId(), user);
return R.ok();
}
@Operation(summary = "更新用户")
@PutMapping("/update")
public R<String> update(@RequestBody
UserEntity user)
{
if (StringUtils.isNull(user) || StringUtils.isNull(user.getUserId()))
{
return R.fail("用户ID不能为空");
}
if (users.isEmpty() || !users.containsKey(user.getUserId()))
{
return R.fail("用户不存在");
}
users.remove(user.getUserId());
users.put(user.getUserId(), user);
return R.ok();
}
@Operation(summary = "删除用户信息")
@DeleteMapping("/{userId}")
public R<String> delete(@PathVariable(name = "userId")
Integer userId)
{
if (!users.isEmpty() && users.containsKey(userId))
{
users.remove(userId);
return R.ok();
}
else
{
return R.fail("用户不存在");
}
}
}
@Schema(description = "用户实体")
class UserEntity
{
@Schema(title = "用户ID")
private Integer userId;
@Schema(title = "用户名称")
private String username;
@Schema(title = "用户密码")
private String password;
@Schema(title = "用户手机")
private String mobile;
public UserEntity()
{
}
public UserEntity(Integer userId, String username, String password, String mobile)
{
this.userId = userId;
this.username = username;
this.password = password;
this.mobile = mobile;
}
public Integer getUserId()
{
return userId;
}
public void setUserId(Integer userId)
{
this.userId = userId;
}
public String getUsername()
{
return username;
}
public void setUsername(String username)
{
this.username = username;
}
public String getPassword()
{
return password;
}
public void setPassword(String password)
{
this.password = password;
}
public String getMobile()
{
return mobile;
}
public void setMobile(String mobile)
{
this.mobile = mobile;
}
}

64
chenhai-admin/src/main/java/com/chenhai/web/core/config/SwaggerConfig.java

@ -0,0 +1,64 @@
package com.chenhai.web.core.config;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import com.chenhai.common.config.RuoYiConfig;
import io.swagger.v3.oas.models.Components;
import io.swagger.v3.oas.models.OpenAPI;
import io.swagger.v3.oas.models.info.Contact;
import io.swagger.v3.oas.models.info.Info;
import io.swagger.v3.oas.models.security.SecurityRequirement;
import io.swagger.v3.oas.models.security.SecurityScheme;
/**
* Swagger2的接口配置
*
* @author ruoyi
*/
@Configuration
public class SwaggerConfig
{
/** 系统基础配置 */
@Autowired
private RuoYiConfig ruoyiConfig;
/**
* 自定义的 OpenAPI 对象
*/
@Bean
public OpenAPI customOpenApi()
{
return new OpenAPI().components(new Components()
// 设置认证的请求头
.addSecuritySchemes("apikey", securityScheme()))
.addSecurityItem(new SecurityRequirement().addList("apikey"))
.info(getApiInfo());
}
@Bean
public SecurityScheme securityScheme()
{
return new SecurityScheme()
.type(SecurityScheme.Type.APIKEY)
.name("Authorization")
.in(SecurityScheme.In.HEADER)
.scheme("Bearer");
}
/**
* 添加摘要信息
*/
public Info getApiInfo()
{
return new Info()
// 设置标题
.title("标题:若依管理系统_接口文档")
// 描述
.description("描述:用于管理集团旗下公司的人员信息,具体包括XXX,XXX模块...")
// 作者信息
.contact(new Contact().name(ruoyiConfig.getName()))
// 版本
.version("版本号:" + ruoyiConfig.getVersion());
}
}

1
chenhai-admin/src/main/resources/META-INF/spring-devtools.properties

@ -0,0 +1 @@
restart.include.json=/com.alibaba.fastjson2.*.jar

108
chenhai-admin/src/main/resources/application-druid.yml

@ -0,0 +1,108 @@
# 数据源配置
spring:
datasource:
type: com.alibaba.druid.pool.DruidDataSource
driverClassName: com.mysql.cj.jdbc.Driver
druid:
# 主库数据源
master:
url: jdbc:mysql://localhost:3306/chenhai_ai?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8
username: root
password: root
# url: jdbc:mysql://localhost:3307/ruoyi?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8
# username: root
# password: root
# 从库数据源
slave:
# 从数据源开关/默认关闭
enabled: false
url: jdbc:mysql://localhost:3306/erp?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8
username: root
password: root
# 初始连接数
initialSize: 5
# 最小连接池数量
minIdle: 10
# 最大连接池数量
maxActive: 20
# 配置获取连接等待超时的时间
maxWait: 60000
# 配置连接超时时间
connectTimeout: 30000
# 配置网络超时时间
socketTimeout: 60000
# 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
timeBetweenEvictionRunsMillis: 60000
# 配置一个连接在池中最小生存的时间,单位是毫秒
minEvictableIdleTimeMillis: 300000
# 配置一个连接在池中最大生存的时间,单位是毫秒
maxEvictableIdleTimeMillis: 900000
# 配置检测连接是否有效
validationQuery: SELECT 1 FROM DUAL
testWhileIdle: true
testOnBorrow: false
testOnReturn: false
webStatFilter:
enabled: true
statViewServlet:
enabled: true
# 设置白名单,不填则允许所有访问
allow:
url-pattern: /druid/*
# 控制台管理用户名和密码
login-username: ruoyi
login-password: 123456
filter:
stat:
enabled: true
# 慢SQL记录
log-slow-sql: true
slow-sql-millis: 1000
merge-sql: true
wall:
config:
multi-statement-allow: true
ai:
zhipuai:
api-key: e24ed227aff14409b2cc5b0ee7f97df8.7vryPYluxxmvtl7z
base-url: "https://open.bigmodel.cn/api/paas"
chat:
options:
model: glm-4-flash
# embedding:
# enabled: false
embedding:
options:
model: embedding-3 # 使用的嵌入模型名称(embedding-3)
dimensions: 256 # 嵌入向量的维度(256维)
# ai:
# ollama:
# base-url: http://127.0.0.1:11434
# chat:
# options:
# model: qwen3:4b
ollama:
base-url: http://172.16.1.165:11434
chat:
model: deepseek-r1:14b
# mcp:
# client:
# request-timeout: 30s
# toolcallback:
# enabled: true
# stdio:
# servers-configuration: classpath:/mcp-servers.json
#logging:
# level:
# org.springframework.web.reactive.function.client: TRACE
# org.springframework.ai: DEBUG
# org.springframework.ai.client: DEBUG

148
chenhai-admin/src/main/resources/application.yml

@ -0,0 +1,148 @@
# 项目相关配置
ruoyi:
# 名称
name: RuoYi
# 版本
version: 3.9.0
# 版权年份
copyrightYear: 2025
# 文件路径 示例( Windows配置D:/ruoyi/uploadPath,Linux配置 /home/ruoyi/uploadPath)
profile: D:/ruoyi/uploadPath
# 获取ip地址开关
addressEnabled: false
# 验证码类型 math 数字计算 char 字符验证
captchaType: math
# 开发环境配置
server:
# 服务器的HTTP端口,默认为8080
port: 8081
servlet:
encoding:
charset: UTF-8
enabled: true
force: true
# 应用的访问路径
context-path: /
tomcat:
# tomcat的URI编码
uri-encoding: UTF-8
# 连接数满后的排队数,默认为100
accept-count: 1000
threads:
# tomcat最大线程数,默认为200
max: 800
# Tomcat启动初始化的线程数,默认值10
min-spare: 100
# 日志配置
logging:
level:
com.chenhai: debug
org.springframework: warn
# 用户配置
user:
password:
# 密码最大错误次数
maxRetryCount: 5
# 密码锁定时间(默认10分钟)
lockTime: 10
# Spring配置
spring:
# 资源信息
messages:
# 国际化资源文件路径
basename: i18n/messages
profiles:
active: druid
# 文件上传
servlet:
multipart:
# 单个文件大小
max-file-size: 10MB
# 设置总上传的文件大小
max-request-size: 20MB
# 服务模块
devtools:
restart:
# 热部署开关
enabled: true
data:
# redis 配置
redis:
# 地址
host: localhost
# 端口,默认为6379
port: 6379
# 数据库索引
database: 0
# 密码
password:
# 连接超时时间
timeout: 10s
lettuce:
pool:
# 连接池中的最小空闲连接
min-idle: 0
# 连接池中的最大空闲连接
max-idle: 8
# 连接池的最大数据库连接数
max-active: 8
# #连接池最大阻塞等待时间(使用负值表示没有限制)
max-wait: -1ms
# token配置
token:
# 令牌自定义标识
header: Authorization
# 令牌密钥
secret: abcdefghijklmnopqrstuvwxyz
# 令牌有效期(默认30分钟)
expireTime: 30
# MyBatis配置
mybatis:
# 搜索指定包别名
typeAliasesPackage: com.chenhai.**.domain
# 配置mapper的扫描,找到所有的mapper.xml映射文件
mapperLocations: classpath*:mapper/**/*Mapper.xml
# 加载全局的配置文件
configLocation: classpath:mybatis/mybatis-config.xml
# PageHelper分页插件
pagehelper:
helperDialect: mysql
supportMethodsArguments: true
params: count=countSql
# Springdoc配置
springdoc:
api-docs:
path: /v3/api-docs
swagger-ui:
enabled: true
path: /swagger-ui.html
tags-sorter: alpha
group-configs:
- group: 'default'
display-name: '测试模块'
paths-to-match: '/**'
packages-to-scan: com.chenhai.web.controller.tool
# 防盗链配置
referer:
# 防盗链开关
enabled: false
# 允许的域名列表
allowed-domains: localhost,127.0.0.1,ruoyi.vip,www.ruoyi.vip
# 防止XSS攻击
xss:
# 过滤开关
enabled: true
# 排除链接(多个用逗号分隔)
excludes: /system/notice
# 匹配链接
urlPatterns: /system/*,/monitor/*,/tool/*

24
chenhai-admin/src/main/resources/banner.txt

@ -0,0 +1,24 @@
Application Version: ${ruoyi.version}
Spring Boot Version: ${spring-boot.version}
////////////////////////////////////////////////////////////////////
// _ooOoo_ //
// o8888888o //
// 88" . "88 //
// (| ^_^ |) //
// O\ = /O //
// ____/`---'\____ //
// .' \\| |// `. //
// / \\||| : |||// \ //
// / _||||| -:- |||||- \ //
// | | \\\ - /// | | //
// | \_| ''\---/'' | | //
// \ .-\__ `-` ___/-. / //
// ___`. .' /--.--\ `. . ___ //
// ."" '< `.___\_<|>_/___.' >'"". //
// | | : `- \`.;`\ _ /`;.`/ - ` : | | //
// \ \ `-. \_ __\ /__ _/ .-` / / //
// ========`-.____`-.___\_____/___.-`____.-'======== //
// `=---=' //
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ //
// 佛祖保佑 永不宕机 永无BUG //
////////////////////////////////////////////////////////////////////

38
chenhai-admin/src/main/resources/i18n/messages.properties

@ -0,0 +1,38 @@
#错误消息
not.null=* 必须填写
user.jcaptcha.error=验证码错误
user.jcaptcha.expire=验证码已失效
user.not.exists=用户不存在/密码错误
user.password.not.match=用户不存在/密码错误
user.password.retry.limit.count=密码输入错误{0}次
user.password.retry.limit.exceed=密码输入错误{0}次,帐户锁定{1}分钟
user.password.delete=对不起,您的账号已被删除
user.blocked=用户已封禁,请联系管理员
role.blocked=角色已封禁,请联系管理员
login.blocked=很遗憾,访问IP已被列入系统黑名单
user.logout.success=退出成功
length.not.valid=长度必须在{min}到{max}个字符之间
user.username.not.valid=* 2到20个汉字、字母、数字或下划线组成,且必须以非数字开头
user.password.not.valid=* 5-50个字符
user.email.not.valid=邮箱格式错误
user.mobile.phone.number.not.valid=手机号格式错误
user.login.success=登录成功
user.register.success=注册成功
user.notfound=请重新登录
user.forcelogout=管理员强制退出,请重新登录
user.unknown.error=未知错误,请重新登录
##文件上传消息
upload.exceed.maxSize=上传的文件大小超出限制的文件大小!<br/>允许的文件最大大小是:{0}MB!
upload.filename.exceed.length=上传的文件名最长{0}个字符
##权限
no.permission=您没有数据的权限,请联系管理员添加权限 [{0}]
no.create.permission=您没有创建数据的权限,请联系管理员添加权限 [{0}]
no.update.permission=您没有修改数据的权限,请联系管理员添加权限 [{0}]
no.delete.permission=您没有删除数据的权限,请联系管理员添加权限 [{0}]
no.export.permission=您没有导出数据的权限,请联系管理员添加权限 [{0}]
no.view.permission=您没有查看数据的权限,请联系管理员添加权限 [{0}]

93
chenhai-admin/src/main/resources/logback.xml

@ -0,0 +1,93 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<!-- 日志存放路径 -->
<property name="log.path" value="/home/ruoyi/logs" />
<!-- 日志输出格式 -->
<property name="log.pattern" value="%d{HH:mm:ss.SSS} [%thread] %-5level %logger{20} - [%method,%line] - %msg%n" />
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
</appender>
<!-- 系统日志输出 -->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/sys-info.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/sys-info.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/sys-error.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/sys-error.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>ERROR</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 用户访问日志输出 -->
<appender name="sys-user" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/sys-user.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 按天回滚 daily -->
<fileNamePattern>${log.path}/sys-user.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
</appender>
<!-- 系统模块日志级别控制 -->
<logger name="com.chenhai" level="info" />
<!-- Spring日志级别控制 -->
<logger name="org.springframework" level="warn" />
<root level="info">
<appender-ref ref="console" />
</root>
<!--系统操作日志-->
<root level="info">
<appender-ref ref="file_info" />
<appender-ref ref="file_error" />
</root>
<!--系统用户操作日志-->
<logger name="sys-user" level="info">
<appender-ref ref="sys-user"/>
</logger>
</configuration>

20
chenhai-admin/src/main/resources/mybatis/mybatis-config.xml

@ -0,0 +1,20 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE configuration
PUBLIC "-//mybatis.org//DTD Config 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-config.dtd">
<configuration>
<!-- 全局参数 -->
<settings>
<!-- 使全局的映射器启用或禁用缓存 -->
<setting name="cacheEnabled" value="true" />
<!-- 允许JDBC 支持自动生成主键 -->
<setting name="useGeneratedKeys" value="true" />
<!-- 配置默认的执行器.SIMPLE就是普通执行器;REUSE执行器会重用预处理语句(prepared statements);BATCH执行器将重用语句并执行批量更新 -->
<setting name="defaultExecutorType" value="SIMPLE" />
<!-- 指定 MyBatis 所用日志的具体实现 -->
<setting name="logImpl" value="SLF4J" />
<!-- 使用驼峰命名法转换字段 -->
<!-- <setting name="mapUnderscoreToCamelCase" value="true"/> -->
</settings>
</configuration>

78
chenhai-ai/pom.xml

@ -0,0 +1,78 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>chenhai</artifactId>
<groupId>com.chenhai</groupId>
<version>3.9.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>chenhai-ai</artifactId>
<description>
ai系统模块
</description>
<dependencies>
<!-- 通用工具-->
<dependency>
<groupId>com.chenhai</groupId>
<artifactId>chenhai-common</artifactId>
</dependency>
<!-- 核心模块-->
<dependency>
<groupId>com.chenhai</groupId>
<artifactId>chenhai-framework</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-starter-model-zhipuai</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-starter-mcp-client</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba.cloud.ai</groupId>
<artifactId>spring-ai-alibaba-graph-core</artifactId>
<!-- <version>1.0.0.4</version>-->
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-starter-model-ollama</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.bitbucket.cowwoc</groupId>
<artifactId>diff-match-patch</artifactId>
<version>1.2</version>
</dependency>
<dependency>
<groupId>com.vladsch.flexmark</groupId>
<artifactId>flexmark-all</artifactId>
<version>0.64.8</version>
</dependency>
</dependencies>
</project>

43
chenhai-ai/src/main/java/com/chenhai/chenhaiai/config/AsyncConfig.java

@ -0,0 +1,43 @@
package com.chenhai.chenhaiai.config;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.task.TaskExecutor;
import org.springframework.core.task.support.TaskExecutorAdapter;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import java.util.concurrent.Executor;
import java.util.concurrent.ThreadPoolExecutor;
@Configuration
@EnableAsync
@EnableScheduling
public class AsyncConfig {
@Value("${gitea.analysis.timeout:60}")
private int timeoutSeconds;
@Bean("giteaTaskExecutor")
public ThreadPoolTaskExecutor giteaTaskExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
int coreCount = Runtime.getRuntime().availableProcessors();
executor.setCorePoolSize(Math.max(4, coreCount));
executor.setMaxPoolSize(coreCount * 2);
executor.setQueueCapacity(100);
executor.setThreadNamePrefix("gitea-async-");
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
executor.setWaitForTasksToCompleteOnShutdown(true);
executor.setAwaitTerminationSeconds(5);
executor.setKeepAliveSeconds(60);
executor.initialize();
return executor;
}
@Bean
public TaskExecutor taskExecutor() {
return new TaskExecutorAdapter(giteaTaskExecutor());
}
}

28
chenhai-ai/src/main/java/com/chenhai/chenhaiai/config/BeanChecker.java

@ -0,0 +1,28 @@
package com.chenhai.chenhaiai.config;
import jakarta.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.stereotype.Component;
@Component
public class BeanChecker {
@Autowired
private ApplicationContext context;
@PostConstruct
public void checkBeans() {
System.out.println("=== 所有ChatClient Bean ===");
String[] beanNames = context.getBeanNamesForType(org.springframework.ai.chat.client.ChatClient.class);
for (String name : beanNames) {
System.out.println("Bean名称: " + name);
}
System.out.println("=== 所有ChatModel Bean ===");
String[] modelNames = context.getBeanNamesForType(org.springframework.ai.chat.model.ChatModel.class);
for (String name : modelNames) {
System.out.println("Bean名称: " + name);
}
}
}

38
chenhai-ai/src/main/java/com/chenhai/chenhaiai/config/ChatClientConfig.java

@ -0,0 +1,38 @@
package com.chenhai.chenhaiai.config;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.tool.ToolCallback;
import org.springframework.ai.tool.ToolCallbackProvider;
import org.springframework.ai.zhipuai.ZhiPuAiChatOptions;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import java.util.List;
/**
* @author : mazhongxu
* @date : 2025-12-05 14:45
* @modyified By :
*/
@Configuration
public class ChatClientConfig {
@Bean("toolChatClient")
public ChatClient toolChatClient(ChatClient.Builder builder, ToolCallbackProvider toolCallbackProvider) {
List<ToolCallback> toolCallbacks = List.of(toolCallbackProvider.getToolCallbacks());
return builder
.defaultToolCallbacks(toolCallbacks)
.build();
}
/**
* 指定Ollama为主要模型
*/
@Bean
@Primary
public ChatModel primaryChatModel(OllamaChatModel ollamaChatModel) {
return ollamaChatModel;
}
}

141
chenhai-ai/src/main/java/com/chenhai/chenhaiai/config/ChatModelFactory.java

@ -0,0 +1,141 @@
package com.chenhai.chenhaiai.config;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.ollama.api.OllamaChatOptions;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.zhipuai.ZhiPuAiChatModel;
import org.springframework.ai.zhipuai.ZhiPuAiChatOptions;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import java.util.*;
@Component
public class ChatModelFactory {
private final ZhiPuAiChatModel zhiPuModel;
private final OllamaChatModel ollamaModel;
// 核心模型 -> 提供方 映射
private final Map<String, String> modelProviderMap = new HashMap<>();
private final List<ModelInfo> availableModels = new ArrayList<>();
public ChatModelFactory(
@Qualifier("zhiPuAiChatModel") ZhiPuAiChatModel zhiPuModel,
@Qualifier("ollamaChatModel") OllamaChatModel ollamaModel) {
this.zhiPuModel = zhiPuModel;
this.ollamaModel = ollamaModel;
initModelMappings();
}
private void initModelMappings() {
// 智谱AI模型
addModel("glm-4-flash", "智谱GLM-4-Flash", "zhipu");
addModel("glm-4", "智谱GLM-4", "zhipu");
// 🔥 Ollama模型
addModel("qwen3:4b", "通义千问4B", "ollama");
addModel("llama2", "Llama2 7B", "ollama");
addModel("mistral", "Mistral 7B", "ollama");
addModel("deepseek-r1:14b", "DeepSeek R1 14B", "ollama");
addModel("gemma:7b", "Gemma 7B", "ollama");
addModel("codellama", "CodeLlama", "ollama");
}
private void addModel(String value, String label, String provider) {
modelProviderMap.put(value, provider);
availableModels.add(new ModelInfo(value, label, provider));
}
/**
* 获取ChatClient - 根据映射关系选择
*/
public ChatClient getChatClient(String modelName) {
if (modelName == null || modelName.trim().isEmpty()) {
modelName = "glm-4-flash"; // 默认
}
String provider = modelProviderMap.get(modelName);
if (provider == null) {
// 没找到映射默认用智谱
provider = "zhipu";
modelName = "glm-4-flash";
}
// 根据提供方返回对应的ChatClient
return switch (provider) {
case "zhipu" -> getZhiPuClient();
case "ollama" -> getOllamaClient(modelName);
default -> getZhiPuClient(); // 默认智谱
};
}
private ChatClient getZhiPuClient() {
return ChatClient.builder(zhiPuModel)
.defaultOptions(ZhiPuAiChatOptions.builder()
.model("glm-4-flash")
.topP(0.7)
.temperature(0.7)
.build())
.build();
}
private ChatClient getOllamaClient(String modelName) {
return ChatClient.builder(ollamaModel)
.defaultOptions(OllamaChatOptions.builder()
.model(modelName) // Ollama需要具体的模型名
.temperature(0.7)
.topP(0.8)
.build())
.build();
}
/**
* 获取可用模型列表
*/
public List<ModelInfo> getAvailableModels() {
return availableModels;
}
/**
* 获取默认模型
*/
public String getDefaultModel() {
return "qwen3:4b";
}
/**
* 判断模型是否可用
*/
public boolean isModelAvailable(String modelName) {
return modelProviderMap.containsKey(modelName);
}
/**
* 获取模型的提供方
*/
public String getModelProvider(String modelName) {
return modelProviderMap.getOrDefault(modelName, "zhipu");
}
/**
* 简单模型信息类
*/
public static class ModelInfo {
private final String value;
private final String label;
private final String provider;
public ModelInfo(String value, String label, String provider) {
this.value = value;
this.label = label;
this.provider = provider;
}
public String getValue() { return value; }
public String getLabel() { return label; }
public String getProvider() { return provider; }
}
}

152
chenhai-ai/src/main/java/com/chenhai/chenhaiai/config/GraphConfig.java

@ -0,0 +1,152 @@
package com.chenhai.chenhaiai.config;
import com.alibaba.cloud.ai.graph.*;
import com.alibaba.cloud.ai.graph.action.AsyncEdgeAction;
import com.alibaba.cloud.ai.graph.action.AsyncNodeAction;
import com.alibaba.cloud.ai.graph.exception.GraphStateException;
import com.alibaba.cloud.ai.graph.state.strategy.ReplaceStrategy;
import com.chenhai.chenhaiai.entity.WeekPlanResponse;
import com.chenhai.chenhaiai.node.*;
import com.chenhai.chenhaiai.node.weekPlan.*;
import com.chenhai.chenhaiai.node.weekPlan.jdbc.*;
import com.chenhai.chenhaiai.node.weekPlan.mcp.DeptNode;
import com.chenhai.chenhaiai.node.weekPlan.mcp.UserNode;
import com.chenhai.chenhaiai.node.weekPlan.mcp.WeekPlanDetailNode;
import com.chenhai.chenhaiai.node.weekPlan.mcp.WeekPlanMainNode;
import com.chenhai.chenhaiai.service.GiteaAnalysisService;
import com.chenhai.chenhaiai.utils.ProgressEmitter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.util.Map;
@Configuration
public class GraphConfig {
private static final Logger log = LoggerFactory.getLogger(GraphConfig.class);
@Bean("weekPlanAnalysisNode")
public CompiledGraph simpleGraph(ChatClient.Builder clientBuilder) throws GraphStateException {
KeyStrategyFactory keyStrategyFactory = () -> Map.of("weekPlan",new ReplaceStrategy(),"personalDaily",new ReplaceStrategy());
// 创建状态图
StateGraph stateGraph = new StateGraph("WeekPlanAnalysisNode",keyStrategyFactory);
// 添加节点
stateGraph.addNode("DataTranslationNode", AsyncNodeAction.node_async(new DataTranslationNode(clientBuilder)));
stateGraph.addNode("DataOrganizationNode", AsyncNodeAction.node_async(new DataOrganizationNode(clientBuilder)));
stateGraph.addNode("DataAssociationNode", AsyncNodeAction.node_async(new DataAssociationNode(clientBuilder)));
stateGraph.addNode("WeekPlanAnalysisNode", AsyncNodeAction.node_async(new WeekPlanAnalysisNode(clientBuilder)));
// 定义边
stateGraph.addEdge(StateGraph.START,"DataTranslationNode");
stateGraph.addEdge("DataTranslationNode","DataOrganizationNode");
stateGraph.addEdge("DataOrganizationNode","DataAssociationNode");
stateGraph.addEdge("DataAssociationNode","WeekPlanAnalysisNode");
stateGraph.addEdge("WeekPlanAnalysisNode",StateGraph.END);
// 编译状态图 放入容器
return stateGraph.compile();
}
@Bean("weekPlanNode")
public CompiledGraph weekPlanNodeGraph(@Qualifier("toolChatClient") ChatClient chatClient, ProgressEmitter progressEmitter) throws GraphStateException {
KeyStrategyFactory keyStrategyFactory = () -> Map.of("weekPlanResponse",new ReplaceStrategy());
// 创建状态图
StateGraph stateGraph = new StateGraph("weekPlanNode",keyStrategyFactory);
// 添加节点
stateGraph.addNode("DeptNode", AsyncNodeAction.node_async(new DeptNode(chatClient)));
stateGraph.addNode("UserNode", AsyncNodeAction.node_async(new UserNode(chatClient)));
stateGraph.addNode("WeekPlanMainNode", AsyncNodeAction.node_async(new WeekPlanMainNode(chatClient)));
stateGraph.addNode("WeekPlanDetailNode", AsyncNodeAction.node_async(new WeekPlanDetailNode(chatClient)));
stateGraph.addNode("DailyPaperNode", AsyncNodeAction.node_async(new DailyPaperJdbcNode(progressEmitter)));
// stateGraph.addNode("DailyPaperNode", AsyncNodeAction.node_async(new DailyPaperNode(chatClient)));
stateGraph.addNode("Analysis", AsyncNodeAction.node_async(new Analysis(chatClient)));
// 定义边
stateGraph.addEdge(StateGraph.START,"DeptNode");
stateGraph.addEdge("DeptNode","UserNode");
stateGraph.addEdge("UserNode","WeekPlanMainNode");
stateGraph.addEdge("WeekPlanMainNode","WeekPlanDetailNode");
stateGraph.addEdge("WeekPlanDetailNode","DailyPaperNode");
stateGraph.addEdge("DailyPaperNode","Analysis");
stateGraph.addEdge("Analysis",StateGraph.END);
// 编译状态图 放入容器
return stateGraph.compile();
}
// @Bean("weekPlanNodeJdbcGraph")
// public CompiledGraph weekPlanNodeStream(ChatClient chatClient) throws GraphStateException {
// KeyStrategyFactory keyStrategyFactory = () -> Map.of("weekPlanResponse",new ReplaceStrategy());
// // 创建状态图
// StateGraph stateGraph = new StateGraph("weekPlanNodeJdbcGraph",keyStrategyFactory);
// // 添加节点
// stateGraph.addNode("DeptJdbcNode", AsyncNodeAction.node_async(new DeptJdbcNode()));
// stateGraph.addNode("UserJdbcNode", AsyncNodeAction.node_async(new UserJdbcNode()));
// stateGraph.addNode("WeekPlanMainJdbcNode", AsyncNodeAction.node_async(new WeekPlanMainJdbcNode()));
// stateGraph.addNode("WeekPlanDetailJdbcNode", AsyncNodeAction.node_async(new WeekPlanDetailJdbcNode()));
// stateGraph.addNode("DailyPaperJdbcNode", AsyncNodeAction.node_async(new DailyPaperJdbcNode()));
// // 定义边
// stateGraph.addEdge(StateGraph.START,"DeptJdbcNode");
// stateGraph.addEdge("DeptJdbcNode","UserJdbcNode");
// stateGraph.addEdge("UserJdbcNode","WeekPlanMainJdbcNode");
// stateGraph.addEdge("WeekPlanMainJdbcNode","WeekPlanDetailJdbcNode");
// stateGraph.addEdge("WeekPlanDetailJdbcNode","DailyPaperJdbcNode");
// stateGraph.addEdge("DailyPaperJdbcNode",StateGraph.END);
//
// // 编译状态图 放入容器
// return stateGraph.compile();
// }
@Bean("weekPlanNodeJdbcGraph")
public CompiledGraph weekPlanNodeStream(ProgressEmitter progressEmitter, GiteaAnalysisService giteaAnalysisService) throws GraphStateException {
KeyStrategyFactory keyStrategyFactory = () -> Map.of(
"weekPlanResponse", new ReplaceStrategy(),
"isResearchDept", new ReplaceStrategy()
);
StateGraph stateGraph = new StateGraph("weekPlanNodeJdbcGraph", keyStrategyFactory);
// 添加节点新增GitAnalysisNode
stateGraph.addNode("DeptJdbcNode", AsyncNodeAction.node_async(new DeptJdbcNode(progressEmitter)));
stateGraph.addNode("UserJdbcNode", AsyncNodeAction.node_async(new UserJdbcNode(progressEmitter)));
stateGraph.addNode("WeekPlanMainJdbcNode", AsyncNodeAction.node_async(new WeekPlanMainJdbcNode(progressEmitter)));
stateGraph.addNode("WeekPlanDetailJdbcNode", AsyncNodeAction.node_async(new WeekPlanDetailJdbcNode(progressEmitter)));
stateGraph.addNode("DailyPaperJdbcNode", AsyncNodeAction.node_async(new DailyPaperJdbcNode(progressEmitter)));
stateGraph.addNode("GitAnalysisNode", AsyncNodeAction.node_async(new GitAnalysisNode(progressEmitter, giteaAnalysisService)));
// 定义边 - 关键修改点3处
stateGraph.addEdge(StateGraph.START, "DeptJdbcNode");
stateGraph.addEdge("DeptJdbcNode", "UserJdbcNode");
stateGraph.addEdge("UserJdbcNode", "WeekPlanMainJdbcNode");
// 修改点1WeekPlanMainJdbcNode后根据是否为研发部分支
stateGraph.addConditionalEdges("WeekPlanMainJdbcNode",
AsyncEdgeAction.edge_async(
state -> {
// 简单判断是研发部就走"yes"否则走"no"
Boolean isResearchDept = state.value("isResearchDept", false);
return isResearchDept ? "yes" : "no";
}
),
Map.of(
"yes", "GitAnalysisNode", // 研发部先执行Git分析
"no", "WeekPlanDetailJdbcNode" // 其他部门直接获取周计划详情
)
);
// 修改点2Git分析完成后再获取周计划详情
stateGraph.addEdge("GitAnalysisNode", "WeekPlanDetailJdbcNode");
// 修改点3周计划详情获取完成后获取日报
stateGraph.addEdge("WeekPlanDetailJdbcNode", "DailyPaperJdbcNode");
// 日报节点后结束
stateGraph.addEdge("DailyPaperJdbcNode", StateGraph.END);
return stateGraph.compile();
}
}

18
chenhai-ai/src/main/java/com/chenhai/chenhaiai/config/ProgressEmitterConfig.java

@ -0,0 +1,18 @@
package com.chenhai.chenhaiai.config;
import com.chenhai.chenhaiai.utils.ProgressEmitter;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* @author : mazhongxu
* @date : 2025-12-11 9:50
* @modyified By :
*/
@Configuration
public class ProgressEmitterConfig {
@Bean
public ProgressEmitter progressEmitter() {
return new ProgressEmitter();
}
}

84
chenhai-ai/src/main/java/com/chenhai/chenhaiai/controller/GiteaController.java

@ -0,0 +1,84 @@
package com.chenhai.chenhaiai.controller;
import com.chenhai.chenhaiai.service.gitNew.GiteaGranularityService;
import com.chenhai.chenhaiai.service.gitNew.GiteaQueryService;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.util.Map;
@Slf4j
@RestController
@RequestMapping("/gitea")
public class GiteaController {
@Autowired
private GiteaQueryService giteaQueryService;
@Autowired
private GiteaGranularityService giteaGranularityService;
/**
* 获取文本分析报告 - 测试专用固定时间范围
*/
@GetMapping("/analysis/text")
public Map<String, Object> getTextAnalysisReport() {
// 固定时间范围只传年月日
String since = "2024-01-01";
String until = "2024-12-30";
System.out.println("使用固定时间范围: " + since + " 至 " + until);
return giteaQueryService.getTextAnalysisReport(since, until);
}
@GetMapping("/analysis/text/granular")
public Map<String, Object> getTextAnalysisReportWithGranularity(
@RequestParam(value = "since", defaultValue = "2024-01-01") String since,
@RequestParam(value = "until", defaultValue = "2024-01-31") String until,
@RequestParam(value = "granularity", defaultValue = "auto") String granularity) {
// 自动限制时间范围防止数据过大
LocalDate sinceDate = LocalDate.parse(since);
LocalDate untilDate = LocalDate.parse(until);
long days = ChronoUnit.DAYS.between(sinceDate, untilDate);
// 根据颗粒度限制时间范围
switch (granularity.toLowerCase()) {
case "day":
if (days > 30) {
untilDate = sinceDate.plusDays(30);
until = untilDate.toString();
log.warn("day颗粒度限制30天,自动调整为: {} 至 {}", since, until);
}
break;
case "week":
if (days > 90) {
untilDate = sinceDate.plusDays(90);
until = untilDate.toString();
log.warn("week颗粒度限制90天,自动调整为: {} 至 {}", since, until);
}
break;
case "month":
if (days > 730) { // 2年
untilDate = sinceDate.plusDays(730);
until = untilDate.toString();
log.warn("month颗粒度限制2年,自动调整为: {} 至 {}", since, until);
}
break;
}
System.out.println("使用时间范围: " + since + " 至 " + until + ",颗粒度: " + granularity);
return giteaGranularityService.getTextAnalysisReportWithGranularity(since, until, granularity);
}
}

526
chenhai-ai/src/main/java/com/chenhai/chenhaiai/controller/GraphController.java

@ -0,0 +1,526 @@
package com.chenhai.chenhaiai.controller;
import com.alibaba.cloud.ai.graph.CompiledGraph;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.chenhai.chenhaiai.config.ChatModelFactory;
import com.chenhai.chenhaiai.entity.WeekPlanResponse;
import com.chenhai.chenhaiai.service.MarkdownService;
import com.chenhai.chenhaiai.utils.CharacterStreamProcessor;
import com.chenhai.chenhaiai.utils.ProgressEmitter;
import com.chenhai.chenhaiai.utils.PromptLoader;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.Flux;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
@RestController
@RequestMapping("/graph")
public class GraphController {
private final CompiledGraph weekPlanNodeJdbcGraph;
// private final ChatClient chatClient; // 原本的模型不分哪一个系统直接用配置好的
// 现在区分什么模型
private final ChatModelFactory modelFactory;
private final PromptLoader promptLoader;
private final ProgressEmitter progressEmitter;
private final MarkdownService markdownService;
private final ObjectMapper objectMapper = new ObjectMapper();
public GraphController(
@Qualifier("weekPlanNodeJdbcGraph") CompiledGraph weekPlanNodeJdbcGraph,
// ChatClient chatClient,
ChatModelFactory modelFactory,
PromptLoader promptLoader,
ProgressEmitter progressEmitter,
MarkdownService markdownService) {
this.weekPlanNodeJdbcGraph = weekPlanNodeJdbcGraph;
// this.chatClient = chatClient;
this.modelFactory = modelFactory;
this.promptLoader = promptLoader;
this.progressEmitter = progressEmitter;
this.markdownService = markdownService;
}
@GetMapping(value = "/weekPlanAnalysisStream", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
public Flux<String> weekPlanAnalysisStream(
@RequestParam("deptName") String deptName,
@RequestParam("weekDisplay") String weekDisplay,
@RequestParam(value = "model", required = false) String model) {
return Flux.create(sink -> {
try {
// 设置进度推送器
progressEmitter.setSink(sink);
// 🔥 立即发送开始状态确保前端立即看到
String startMsg = CharacterStreamProcessor.formatMessage("status",
"🚀 启动分析流程 - " + deptName + " " + weekDisplay);
sink.next(startMsg);
// 给前端一点时间显示
Thread.sleep(100);
// 准备请求数据
WeekPlanResponse weekPlanResponse = new WeekPlanResponse();
weekPlanResponse.setDeptName(deptName);
weekPlanResponse.setWeekDisplay(weekDisplay);
// 🔥 使用真正的异步执行Graph
CompletableFuture.supplyAsync(() -> {
try {
System.out.println("开始执行Graph节点...");
// 执行Graph - 这会触发各个节点的progressEmitter调用
Optional<OverAllState> stateOptional = weekPlanNodeJdbcGraph.invoke(
Map.of("weekPlanResponse", weekPlanResponse)
);
if (stateOptional.isEmpty()) {
sink.next(CharacterStreamProcessor.formatMessage("error", "未获取到数据"));
return null;
}
// 提取完整数据
Map<String, Object> stateData = stateOptional.get().data();
Object rawData = stateData.get("weekPlanResponse");
String jsonStr = objectMapper.writeValueAsString(rawData);
return objectMapper.readValue(jsonStr, WeekPlanResponse.class);
} catch (Exception e) {
sink.next(CharacterStreamProcessor.formatMessage("error",
"Graph执行错误: " + e.getMessage()));
e.printStackTrace();
return null;
}
}, CompletableFuture.delayedExecutor(500, java.util.concurrent.TimeUnit.MILLISECONDS))
.thenAccept(fullData -> {
if (fullData != null) {
try {
// 数据获取完成
sink.next(CharacterStreamProcessor.formatMessage("status",
"📊 数据获取完成,开始AI分析"));
Thread.sleep(200); // 给用户看到状态
// 准备分析提示词
String promptTemplate = promptLoader.loadPrompt("prompts/week_plan_analysis4.txt");
String jsonData = objectMapper.writeValueAsString(fullData);
String finalPrompt = promptTemplate.replace("{jsonData}", jsonData);
// 优化提示词用于流式输出
finalPrompt = CharacterStreamProcessor.optimizePromptForStreaming(finalPrompt);
ChatClient selectedClient = modelFactory.getChatClient(model);
// 启动真正的字符级流式处理
Flux<String> characterStream = CharacterStreamProcessor.createCharacterStream(
selectedClient,
finalPrompt,
sink
);
// 订阅流
characterStream.subscribe(
chunk -> {
if (!sink.isCancelled()) {
sink.next(chunk);
}
},
error -> {
if (!sink.isCancelled()) {
sink.error(error);
}
},
() -> {
if (!sink.isCancelled()) {
sink.next(CharacterStreamProcessor.formatMessage("complete", "✅ 分析完成"));
sink.complete();
}
}
);
} catch (Exception e) {
if (!sink.isCancelled()) {
sink.next(CharacterStreamProcessor.formatMessage("error",
"AI分析错误: " + e.getMessage()));
sink.complete();
}
}
}
});
} catch (Exception e) {
if (!sink.isCancelled()) {
sink.next(CharacterStreamProcessor.formatMessage("error",
"系统错误: " + e.getMessage()));
sink.complete();
}
}
});
}
@GetMapping("/simpleGraph")
public Map<String, Object> simpleGraph(
@RequestParam("deptName") String deptName,
@RequestParam("weekDisplay") String weekDisplay){
// 1. 准备请求数据
WeekPlanResponse weekPlanResponse = new WeekPlanResponse();
weekPlanResponse.setDeptName(deptName);
weekPlanResponse.setWeekDisplay(weekDisplay);
// 2. 获取图节点数据
Optional<OverAllState> stateOptional = weekPlanNodeJdbcGraph.invoke(
Map.of("weekPlanResponse", weekPlanResponse)
);
Map<String, Object> data = stateOptional.map(OverAllState::data).orElse(Map.of());
return data;
}
@GetMapping("/fullMarkdown")
public String fullMarkdown(
@RequestParam("deptName") String deptName,
@RequestParam("weekDisplay") String weekDisplay,
@RequestParam(value = "model", required = false) String model) {
try {
// 获取完整数据
Map<String, Object> rawData = simpleGraph(deptName, weekDisplay);
Object weekPlanObj = rawData.get("weekPlanResponse");
if (weekPlanObj == null) return "# 无数据";
// 转换为JSON字符串
String jsonStr = objectMapper.writeValueAsString(weekPlanObj);
// 原样全部转换
return markdownService.fullJsonToMarkdown(jsonStr);
} catch (Exception e) {
return "# 错误\n\n" + e.getMessage();
}
}
@GetMapping(value = "/weekPlanAnalysisFromMarkdown", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
public Flux<String> weekPlanAnalysisFromMarkdown(
@RequestParam("deptName") String deptName,
@RequestParam("weekDisplay") String weekDisplay,
@RequestParam(value = "model", required = false) String model) {
return Flux.create(sink -> {
try {
// 设置进度推送器
progressEmitter.setSink(sink);
// 🔥 立即发送开始状态
String startMsg = CharacterStreamProcessor.formatMessage("status",
"🚀 启动Markdown格式分析流程 - " + deptName + " " + weekDisplay);
sink.next(startMsg);
// 给前端一点时间显示
Thread.sleep(100);
// 准备请求数据
WeekPlanResponse weekPlanResponse = new WeekPlanResponse();
weekPlanResponse.setDeptName(deptName);
weekPlanResponse.setWeekDisplay(weekDisplay);
// 🔥 使用真正的异步执行Graph
CompletableFuture.supplyAsync(() -> {
try {
System.out.println("开始执行Graph节点(Markdown模式)...");
// 执行Graph - 这会触发各个节点的progressEmitter调用
Optional<OverAllState> stateOptional = weekPlanNodeJdbcGraph.invoke(
Map.of("weekPlanResponse", weekPlanResponse)
);
if (stateOptional.isEmpty()) {
sink.next(CharacterStreamProcessor.formatMessage("error", "未获取到数据"));
return null;
}
// 提取完整数据并转换为Markdown
Map<String, Object> stateData = stateOptional.get().data();
Object rawData = stateData.get("weekPlanResponse");
String jsonStr = objectMapper.writeValueAsString(rawData);
// 转换为Markdown格式
String markdownContent = markdownService.fullJsonToMarkdown(jsonStr);
return markdownContent;
} catch (Exception e) {
sink.next(CharacterStreamProcessor.formatMessage("error",
"Graph执行或Markdown转换错误: " + e.getMessage()));
e.printStackTrace();
return null;
}
}, CompletableFuture.delayedExecutor(500, java.util.concurrent.TimeUnit.MILLISECONDS))
.thenAccept(markdownContent -> {
if (markdownContent != null) {
try {
// 数据获取完成
sink.next(CharacterStreamProcessor.formatMessage("status",
"📝 Markdown格式数据准备完成,开始AI分析"));
Thread.sleep(200); // 给用户看到状态
// 准备专门针对Markdown的分析提示词使用原有的提示词模板
String promptTemplate = promptLoader.loadPrompt("prompts/week_plan_analysis4.txt");
// 构建提示词只传入Markdown内容
// 注意这里我们将Markdown内容放到{jsonData}占位符中提示词可能需要稍作调整
String finalPrompt = promptTemplate.replace("{jsonData}",
"以下是数据的Markdown格式表示:\n\n" + markdownContent);
// 优化提示词用于流式输出
finalPrompt = CharacterStreamProcessor.optimizePromptForStreaming(finalPrompt);
// 🔥 使用工厂获取客户端
ChatClient selectedClient = modelFactory.getChatClient(model);
// 启动真正的字符级流式处理
Flux<String> characterStream = CharacterStreamProcessor.createCharacterStream(
selectedClient,
finalPrompt,
sink
);
// 订阅流
characterStream.subscribe(
chunk -> {
if (!sink.isCancelled()) {
sink.next(chunk);
}
},
error -> {
if (!sink.isCancelled()) {
sink.error(error);
}
},
() -> {
if (!sink.isCancelled()) {
sink.next(CharacterStreamProcessor.formatMessage("complete", "✅ Markdown格式分析完成"));
sink.complete();
}
}
);
} catch (Exception e) {
if (!sink.isCancelled()) {
sink.next(CharacterStreamProcessor.formatMessage("error",
"AI分析错误: " + e.getMessage()));
sink.complete();
}
}
}
});
} catch (Exception e) {
if (!sink.isCancelled()) {
sink.next(CharacterStreamProcessor.formatMessage("error",
"系统错误: " + e.getMessage()));
sink.complete();
}
}
});
}
/**
* 多视角周计划分析接口
*
* @param deptName 部门名称
* @param weekDisplay 周次显示
* @param perspective 分析视角management(管理)/process(流程)/culture(文化)/comprehensive(综合)
* @return SSE流
*/
@GetMapping(value = "/multiPerspectiveAnalysis", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
public Flux<String> multiPerspectiveAnalysis(
@RequestParam("deptName") String deptName,
@RequestParam("weekDisplay") String weekDisplay,
@RequestParam(value = "perspective", defaultValue = "management") String perspective,
@RequestParam(value = "model", required = false) String model) {
return Flux.create(sink -> {
try {
// 设置进度推送器
progressEmitter.setSink(sink);
// 根据视角显示不同的提示
String perspectiveName = getPerspectiveName(perspective);
String startMsg = CharacterStreamProcessor.formatMessage("status",
"🚀 启动" + perspectiveName + "分析 - " + deptName + " " + weekDisplay);
sink.next(startMsg);
Thread.sleep(100);
// 准备请求数据
WeekPlanResponse weekPlanResponse = new WeekPlanResponse();
weekPlanResponse.setDeptName(deptName);
weekPlanResponse.setWeekDisplay(weekDisplay);
// 异步执行Graph获取数据
CompletableFuture.supplyAsync(() -> {
try {
System.out.println("开始执行Graph节点...");
// 执行Graph获取数据
Optional<OverAllState> stateOptional = weekPlanNodeJdbcGraph.invoke(
Map.of("weekPlanResponse", weekPlanResponse)
);
if (stateOptional.isEmpty()) {
sink.next(CharacterStreamProcessor.formatMessage("error", "未获取到数据"));
return null;
}
// 提取数据并转换为JSON
Map<String, Object> stateData = stateOptional.get().data();
Object rawData = stateData.get("weekPlanResponse");
String jsonStr = objectMapper.writeValueAsString(rawData);
// 同时保留JSON和Markdown格式
Map<String, String> dataFormats = new HashMap<>();
dataFormats.put("json", jsonStr);
dataFormats.put("markdown", markdownService.fullJsonToMarkdown(jsonStr));
return dataFormats;
} catch (Exception e) {
sink.next(CharacterStreamProcessor.formatMessage("error",
"数据获取错误: " + e.getMessage()));
e.printStackTrace();
return null;
}
}, CompletableFuture.delayedExecutor(500, java.util.concurrent.TimeUnit.MILLISECONDS))
.thenAccept(dataFormats -> {
if (dataFormats != null) {
try {
// 数据获取完成
sink.next(CharacterStreamProcessor.formatMessage("status",
"📊 数据准备完成,开始" + getPerspectiveName(perspective) + "分析"));
Thread.sleep(200);
// 根据视角加载不同的提示词模板
String promptTemplate = loadPerspectivePrompt(perspective);
// 准备数据JSON用于结构化分析Markdown用于文本分析
String analysisData = "";
if (perspective.equals("management") || perspective.equals("comprehensive")) {
// 管理视角和综合视角使用JSON格式
analysisData = dataFormats.get("json");
} else {
// 流程和文化视角可以使用Markdown格式
analysisData = dataFormats.get("markdown");
}
// 构建完整提示词
String finalPrompt = buildPerspectivePrompt(promptTemplate, analysisData,
deptName, weekDisplay, perspective);
// 优化提示词用于流式输出
finalPrompt = CharacterStreamProcessor.optimizePromptForStreaming(finalPrompt);
// 🔥 使用工厂获取客户端
ChatClient selectedClient = modelFactory.getChatClient(model);
// 启动字符级流式处理
Flux<String> characterStream = CharacterStreamProcessor.createCharacterStream(
selectedClient,
finalPrompt,
sink
);
// 订阅流
characterStream.subscribe(
chunk -> {
if (!sink.isCancelled()) {
sink.next(chunk);
}
},
error -> {
if (!sink.isCancelled()) {
sink.next(CharacterStreamProcessor.formatMessage("error",
"AI分析错误: " + error.getMessage()));
sink.complete();
}
},
() -> {
if (!sink.isCancelled()) {
String completeMsg = CharacterStreamProcessor.formatMessage("complete",
"✅ " + getPerspectiveName(perspective) + "分析完成");
sink.next(completeMsg);
sink.complete();
}
}
);
} catch (Exception e) {
if (!sink.isCancelled()) {
sink.next(CharacterStreamProcessor.formatMessage("error",
"AI分析错误: " + e.getMessage()));
sink.complete();
}
}
}
});
} catch (Exception e) {
if (!sink.isCancelled()) {
sink.next(CharacterStreamProcessor.formatMessage("error",
"系统错误: " + e.getMessage()));
sink.complete();
}
}
});
}
// 辅助方法获取视角名称
private String getPerspectiveName(String perspective) {
switch (perspective) {
case "management": return "管理";
case "process": return "流程";
case "culture": return "文化";
case "comprehensive": return "综合";
default: return "管理";
}
}
// 辅助方法加载视角提示词
private String loadPerspectivePrompt(String perspective) throws IOException {
String promptFile = "";
switch (perspective) {
case "management": promptFile = "prompts/management-perspective.txt"; break;
case "process": promptFile = "prompts/process-perspective.txt"; break;
case "culture": promptFile = "prompts/culture-perspective.txt"; break;
case "comprehensive": promptFile = "prompts/comprehensive-perspective.txt"; break;
default: promptFile = "prompts/management-perspective.txt"; break;
}
return promptLoader.loadPrompt(promptFile);
}
// 辅助方法构建完整的提示词
private String buildPerspectivePrompt(String template, String data,
String deptName, String weekDisplay, String perspective) {
return template
.replace("{jsonData}", data)
.replace("{deptName}", deptName)
.replace("{weekDisplay}", weekDisplay)
.replace("{perspective}", getPerspectiveName(perspective));
}
}

146
chenhai-ai/src/main/java/com/chenhai/chenhaiai/controller/McpController.java

@ -0,0 +1,146 @@
package com.chenhai.chenhaiai.controller;
import com.chenhai.chenhaiai.entity.Dept;
import com.chenhai.chenhaiai.entity.WeekProject;
import lombok.Data;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.client.advisor.SimpleLoggerAdvisor;
import org.springframework.ai.chat.model.ChatModel;
//import org.springframework.ai.ollama.api.OllamaChatOptions;
import org.springframework.ai.tool.ToolCallback;
import org.springframework.ai.tool.ToolCallbackProvider;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;
import java.util.List;
/**
* @author : mazhongxu
* @date : 2025-11-29 21:24
* @modyified By :
*/
@RestController
@RequestMapping("/mcp")
public class McpController {
private final ChatClient chatClient;
private final ToolCallbackProvider toolCallbackProvider;
public McpController(ChatClient.Builder chatClientBuilder, ToolCallbackProvider toolCallbackProvider) {
this.toolCallbackProvider = toolCallbackProvider;
// 调试打印所有可用的工具
List<ToolCallback> toolCallbacks = List.of(toolCallbackProvider.getToolCallbacks());
System.out.println("=== 可用的 MCP 工具 ===");
for (ToolCallback tool : toolCallbacks) {
System.out.println("工具: " + tool.toString());
}
System.out.println("====================");
this.chatClient = chatClientBuilder
.defaultToolCallbacks(toolCallbacks)
.build();
}
// public McpController(ToolCallbackProvider toolCallbackProvider, ChatModel chatModel) {
// this.toolCallbackProvider = toolCallbackProvider;
//
// // 调试打印所有可用的工具
// List<ToolCallback> toolCallbacks = List.of(toolCallbackProvider.getToolCallbacks());
// System.out.println("=== 可用的 MCP 工具 ===");
// for (ToolCallback tool : toolCallbacks) {
// System.out.println("工具: " + tool.toString());
// }
// System.out.println("====================");
//
// this.chatClient = ChatClient.builder(chatModel)
// // 实现 Logger Advisor
// .defaultAdvisors(
// new SimpleLoggerAdvisor()
// )
// // 设置 ChatClient ChatModel Options 参数
// .defaultOptions(
// OllamaChatOptions.builder()
// .topP(0.7)
// .model("deepseek-v2:latest")
// .build()
// )
// .defaultToolCallbacks(toolCallbacks)
// .build();
// }
@GetMapping("/simple-test")
public String simpleTest() {
try {
// 最简单的调用
String response = chatClient.prompt()
.user("你好")
.call()
.content();
return "Success: " + response;
} catch (Exception e) {
e.printStackTrace();
return "Error: " + e.getMessage();
}
}
@GetMapping("/chats")
public Flux<String> chats(@RequestParam String question) {
return chatClient.prompt(question).stream().content();
}
@GetMapping("/chat")
public Flux<String> chat(@RequestParam String question) {
String systemPrompt = """
你是一个mysql查询专家可以调用工具执行指定sql语句查询
请严格按照MySQL查询结果的原始结构整理数据保持每行记录的独立性
**要求**
1. 保持原始的行级数据不要合并任何行
2. 每行都是一个独立的任务记录
3. 表头用中文项目名称 | 任务内容 | 负责人
4. 所有列左对齐
5. 在每一行数据结尾处位置添加`<br>`标签确保在浏览器中自动换行
6. 不要改变任何原始的人员对应关系
**正确的格式示例**
| 项目名称 | 任务内容 | 负责人 |
|:---------|:---------|:-------|
| 项目A | 具体任务描述1 | 张三 |
| 项目A | 具体任务描述2 | 李四 |
| 项目B | 具体任务描述3 | 王五 |
请直接输出整理后的表格
""";
Flux<String> stringFlux = chatClient.prompt()
.system(systemPrompt)
.user(question)
.stream()
.content();
return stringFlux;
}
@GetMapping("/entity")
public List<WeekProject> response() {
List<WeekProject> list = chatClient.prompt()
.user("请使用可用的工具查询ch_week_project表main_id=7的所有数据,只保留字段project_name,content,developer")
.call().entity(new ParameterizedTypeReference<List<WeekProject>>() {});
return list;
}
@GetMapping("/chat2")
public Dept chat2(@RequestParam String deptName) {
String prompt = """
请使用可用的工具查询sys_dept表dept_name=%s的唯一数据只保留字段dept_id,dept_name
""".formatted(deptName);
Dept dept = chatClient.prompt()
.user(prompt)
.call()
.entity(Dept.class);
return dept;
}
}

125
chenhai-ai/src/main/java/com/chenhai/chenhaiai/controller/ModelController.java

@ -0,0 +1,125 @@
package com.chenhai.chenhaiai.controller;
import com.chenhai.chenhaiai.config.ChatModelFactory;
import com.chenhai.common.core.domain.AjaxResult;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.web.bind.annotation.*;
import java.util.Map;
/**
* AI模型管理控制器
*/
@RestController
@RequestMapping("/ai/model")
public class ModelController {
private final ChatModelFactory modelFactory;
// 构造器注入
public ModelController(ChatModelFactory modelFactory) {
this.modelFactory = modelFactory;
}
/**
* 获取可用模型列表
*/
@GetMapping("/list")
public AjaxResult listModels() {
return AjaxResult.success(modelFactory.getAvailableModels());
}
/**
* 测试指定模型
*/
@PostMapping("/test")
public AjaxResult testModel(@RequestParam String model) {
try {
ChatClient client = modelFactory.getChatClient(model);
String response = client.prompt()
.user("你好,请回复'连接测试成功'")
.call()
.content();
return AjaxResult.success("测试成功", Map.of(
"model", model,
"response", response,
"timestamp", System.currentTimeMillis()
));
} catch (Exception e) {
return AjaxResult.error("测试失败: " + e.getMessage());
}
}
/**
* 获取默认模型
*/
@GetMapping("/default")
public AjaxResult getDefaultModel() {
return AjaxResult.success("默认模型", modelFactory.getDefaultModel());
}
/**
* 获取模型详情
*/
@GetMapping("/info")
public AjaxResult getModelInfo(@RequestParam String model) {
try {
ChatClient client = modelFactory.getChatClient(model);
String provider = modelFactory.getModelProvider(model);
return AjaxResult.success("模型信息", Map.of(
"model", model,
"provider", provider,
"available", true,
"description", getModelDescription(model)
));
} catch (Exception e) {
return AjaxResult.error("模型不可用");
}
}
/**
* 批量测试所有模型
*/
@PostMapping("/test-all")
public AjaxResult testAllModels() {
try {
Map<String, Object> results = new java.util.HashMap<>();
var models = modelFactory.getAvailableModels();
for (var modelInfo : models) {
String model = modelInfo.getValue();
try {
ChatClient client = modelFactory.getChatClient(model);
String response = client.prompt()
.user("测试,就问问你是谁,你是多少参数的版本")
.call()
.content();
results.put(model, Map.of(
"success", true,
"response", response.substring(0, Math.min(50, response.length())) + "..."
));
} catch (Exception e) {
results.put(model, Map.of(
"success", false,
"error", e.getMessage()
));
}
}
return AjaxResult.success("批量测试完成", results);
} catch (Exception e) {
return AjaxResult.error("批量测试失败: " + e.getMessage());
}
}
private String getModelDescription(String model) {
if (model.startsWith("glm")) {
return "智谱AI大模型";
} else {
return "本地Ollama模型";
}
}
}

40
chenhai-ai/src/main/java/com/chenhai/chenhaiai/controller/RealTestController.java

@ -0,0 +1,40 @@
package com.chenhai.chenhaiai.controller;
import com.chenhai.chenhaiai.service.gitNew.GiteaDataService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping("/test/real")
public class RealTestController {
@Autowired
private GiteaDataService giteaDataService;
@GetMapping("/sync")
public String runRealSyncTest() {
new Thread(() -> {
giteaDataService.realSyncTest();
}).start();
return "真实同步测试已开始,请查看控制台日志!";
}
@GetMapping("/full-quick")
public String testFullQuick() {
new Thread(() -> {
giteaDataService.fullSyncTest();
}).start();
return "快速全量测试已开始(前5个仓库),请查看控制台日志!";
}
@GetMapping("/full-all")
public String testFullAll() {
new Thread(() -> {
giteaDataService.executeFullSync();
}).start();
return "全量同步已开始(所有183个仓库),请查看控制台日志!";
}
}

109
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/AnalysisResult.java

@ -0,0 +1,109 @@
package com.chenhai.chenhaiai.entity;
import com.chenhai.chenhaiai.entity.git.DeveloperActivity;
import com.chenhai.chenhaiai.entity.git.RepositoryActivity;
import lombok.Data;
import java.time.DayOfWeek;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
@Data
public class AnalysisResult {
private String analysisId;
private LocalDateTime analysisTime;
private String timeRange;
private int totalRepositories;
private int activeRepositories;
private int totalCommits;
private int totalDevelopers;
private Map<String, DeveloperActivity> developerActivities = new HashMap<>();
private Map<String, RepositoryActivity> repositoryActivities = new HashMap<>();
// 汇总统计
private Map<DayOfWeek, Integer> overallCommitsByDay = new EnumMap<>(DayOfWeek.class);
private Map<Integer, Integer> overallCommitsByHour = new HashMap<>();
private Map<String, Integer> overallFileTypeDistribution = new HashMap<>();
// 排行榜 - 修改类型声明
private List<Map.Entry<String, DeveloperActivity>> developerRanking = new ArrayList<>();
private List<Map.Entry<String, RepositoryActivity>> repositoryRanking = new ArrayList<>();
public AnalysisResult() {
this.analysisId = UUID.randomUUID().toString();
this.analysisTime = LocalDateTime.now();
}
public void calculateSummary() {
// 汇总开发者数据
for (DeveloperActivity activity : developerActivities.values()) {
// 汇总时间分布
activity.getCommitsByDay().forEach((day, count) ->
overallCommitsByDay.put(day, overallCommitsByDay.getOrDefault(day, 0) + count));
activity.getCommitsByHour().forEach((hour, count) ->
overallCommitsByHour.put(hour, overallCommitsByHour.getOrDefault(hour, 0) + count));
// 汇总文件类型
activity.getCommitsByFileType().forEach((fileType, count) ->
overallFileTypeDistribution.put(fileType,
overallFileTypeDistribution.getOrDefault(fileType, 0) + count));
}
// 生成排行榜 - 直接使用entrySet()
developerRanking = new ArrayList<>(developerActivities.entrySet());
developerRanking.sort((a, b) -> Integer.compare(b.getValue().getTotalCommits(),
a.getValue().getTotalCommits()));
repositoryRanking = new ArrayList<>(repositoryActivities.entrySet());
repositoryRanking.sort((a, b) -> Integer.compare(b.getValue().getTotalCommits(),
a.getValue().getTotalCommits()));
// 计算总数
totalDevelopers = developerActivities.size();
totalRepositories = repositoryActivities.size();
totalCommits = developerActivities.values().stream()
.mapToInt(DeveloperActivity::getTotalCommits)
.sum();
}
public String getFormattedAnalysisTime() {
return analysisTime.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
}
public String generateSummary() {
// 获取排行榜数据
String topDeveloper = "无";
int topDeveloperCommits = 0;
String topRepository = "无";
int topRepositoryCommits = 0;
if (!developerRanking.isEmpty()) {
topDeveloper = developerRanking.get(0).getKey();
topDeveloperCommits = developerRanking.get(0).getValue().getTotalCommits();
}
if (!repositoryRanking.isEmpty()) {
topRepository = repositoryRanking.get(0).getKey();
topRepositoryCommits = repositoryRanking.get(0).getValue().getTotalCommits();
}
return String.format(
"分析时间: %s\n" +
"时间范围: %s\n" +
"仓库总数: %d (活跃: %d)\n" +
"开发者总数: %d\n" +
"总提交数: %d\n" +
"最活跃开发者: %s (%d 次提交)\n" +
"最活跃仓库: %s (%d 次提交)",
getFormattedAnalysisTime(),
timeRange,
totalRepositories, activeRepositories,
totalDevelopers,
totalCommits,
topDeveloper, topDeveloperCommits,
topRepository, topRepositoryCommits
);
}
}

21
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/DailyPaper.java

@ -0,0 +1,21 @@
package com.chenhai.chenhaiai.entity;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Data;
import java.math.BigDecimal;
import java.time.LocalDate;
/**
* @author : mazhongxu
* @date : 2025-12-04 19:42
* @modyified By :
*/
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class DailyPaper {
private String projectName;
private String content;
private String dailyPaperDate;
private String dailyPaperHour;
}

22
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/Dept.java

@ -0,0 +1,22 @@
package com.chenhai.chenhaiai.entity;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Data;
/**
* @author : mazhongxu
* @date : 2025-12-04 20:20
* @modyified By :
*/
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class Dept {
/**
* 部门ID
*/
private Long deptId;
/**
* 部门名称
*/
private String deptName;
}

16
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/UserInfo.java

@ -0,0 +1,16 @@
package com.chenhai.chenhaiai.entity;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Data;
/**
* @author : mazhongxu
* @date : 2025-12-04 19:26
* @modyified By :
*/
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class UserInfo {
private Long userId;
private String userName;
}

21
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/WeekPlanDetail.java

@ -0,0 +1,21 @@
package com.chenhai.chenhaiai.entity;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Data;
/**
* @author : mazhongxu
* @date : 2025-12-04 19:41
* @modyified By :
*/
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class WeekPlanDetail {
private Long mainId;
private String projectName;
private String content;
private String developer;
private Integer superviseStatus;
private String note;
private String projectNote;
}

21
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/WeekPlanMain.java

@ -0,0 +1,21 @@
package com.chenhai.chenhaiai.entity;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Data;
import java.time.LocalDate;
/**
* @author : mazhongxu
* @date : 2025-12-04 19:40
* @modyified By :
*/
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class WeekPlanMain {
private Long id;
private String deptName;
private String weekDisplay;
private String weekStartDate;
private String weekEndDate;
}

58
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/WeekPlanResponse.java

@ -0,0 +1,58 @@
package com.chenhai.chenhaiai.entity;
import com.chenhai.chenhaiai.entity.git.GitAnalysisData;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Data;
import java.util.List;
/**
* @author : mazhongxu
* @date : 2025-12-04 18:49
* @modyified By :
*/
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class WeekPlanResponse {
/**
* 部门信息
*/
private String deptName;
/**
* 周次显示
*/
private String weekDisplay;
/**
* 部门信息
*/
private Dept dept;
/**
* 员工列表
*/
private List<UserInfo> userInfos;
/**
* 周计划主信息
*/
private WeekPlanMain planMain;
/**
* 周计划详情列表
*/
private List<WeekPlanDetail> planDetails;
/**
* 日报列表
*/
private List<DailyPaper> dailyPapers;
/**
* Git分析结果
*/
private GitAnalysisData gitAnalysis;
}

37
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/WeekProject.java

@ -0,0 +1,37 @@
package com.chenhai.chenhaiai.entity;
/**
* @author : mazhongxu
* @date : 2025-11-29 22:47
* @modyified By :
*/
public class WeekProject {
private String projectName;
private String content;
private String developer;
public String getProjectName() {
return projectName;
}
public void setProjectName(String projectName) {
this.projectName = projectName;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public String getDeveloper() {
return developer;
}
public void setDeveloper(String developer) {
this.developer = developer;
}
}

22
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/BasicInfo.java

@ -0,0 +1,22 @@
package com.chenhai.chenhaiai.entity.git;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import lombok.Data;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonIgnoreProperties(ignoreUnknown = true) // 关键注解
public class BasicInfo {
private String timeRange;
private int totalRepos;
private int activeRepos;
private int activeDevelopers;
private int totalCommits;
private long analysisTime;
private String analysisStrategy;
}

13
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/DayStats.java

@ -0,0 +1,13 @@
package com.chenhai.chenhaiai.entity.git;
import lombok.Data;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class DayStats {
private String dayName;
private int commitCount;
}

94
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/DeveloperActivity.java

@ -0,0 +1,94 @@
package com.chenhai.chenhaiai.entity.git;
import com.chenhai.chenhaiai.entity.git.GiteaCommit;
import lombok.Data;
import java.time.DayOfWeek;
import java.time.ZonedDateTime;
import java.util.*;
@Data
public class DeveloperActivity {
private String developerName;
private String developerEmail;
private int totalCommits;
private Set<String> contributedRepos = new HashSet<>();
private Map<DayOfWeek, Integer> commitsByDay = new EnumMap<>(DayOfWeek.class);
private Map<Integer, Integer> commitsByHour = new HashMap<>();
private Map<String, Integer> commitsByFileType = new HashMap<>();
private List<String> recentCommitMessages = new ArrayList<>();
// 新增行数字段
private int totalAdditions = 0;
private int totalDeletions = 0;
// 新增方法累加行数变化
public void addLineChanges(int additions, int deletions) {
this.totalAdditions += additions;
this.totalDeletions += deletions;
}
// 获取净增行数
public int getNetLineChanges() {
return totalAdditions - totalDeletions;
}
// 计算属性
public double getAvgCommitsPerDay() {
return totalCommits > 0 ? totalCommits / 7.0 : 0;
}
public String getMostActiveDay() {
return commitsByDay.entrySet().stream()
.max(Map.Entry.comparingByValue())
.map(entry -> entry.getKey().toString())
.orElse("Unknown");
}
public String getMostActiveHour() {
return commitsByHour.entrySet().stream()
.max(Map.Entry.comparingByValue())
.map(entry -> String.format("%02d:00", entry.getKey()))
.orElse("Unknown");
}
public String getMostActiveFileType() {
return commitsByFileType.entrySet().stream()
.max(Map.Entry.comparingByValue())
.map(Map.Entry::getKey)
.orElse("Unknown");
}
public void addCommit(GiteaCommit commit, String repoFullName) {
totalCommits++;
contributedRepos.add(repoFullName);
// 时间分析
ZonedDateTime commitTime = commit.getCommitTime();
if (commitTime != null) {
DayOfWeek dayOfWeek = commitTime.getDayOfWeek();
commitsByDay.put(dayOfWeek, commitsByDay.getOrDefault(dayOfWeek, 0) + 1);
int hour = commitTime.getHour();
commitsByHour.put(hour, commitsByHour.getOrDefault(hour, 0) + 1);
}
// 文件类型分析
if (commit.getFiles() != null) {
for (GiteaCommit.ChangedFile file : commit.getFiles()) {
String fileType = file.getFileType();
commitsByFileType.put(fileType,
commitsByFileType.getOrDefault(fileType, 0) + 1);
}
}
// 保存最近提交消息
if (commit.getCommit() != null && commit.getCommit().getMessage() != null) {
String message = commit.getCommit().getMessage().trim();
if (!message.isEmpty()) {
recentCommitMessages.add(message);
if (recentCommitMessages.size() > 10) {
recentCommitMessages.remove(0);
}
}
}
}
}

15
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/DeveloperRank.java

@ -0,0 +1,15 @@
package com.chenhai.chenhaiai.entity.git;
import lombok.Data;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class DeveloperRank {
private int rank;
private String name;
private int commitCount;
private int repoCount;
}

13
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/FileTypeStats.java

@ -0,0 +1,13 @@
package com.chenhai.chenhaiai.entity.git;
import lombok.Data;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class FileTypeStats {
private String fileType;
private int fileCount;
}

23
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/GitAnalysisData.java

@ -0,0 +1,23 @@
package com.chenhai.chenhaiai.entity.git;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import lombok.Data;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@AllArgsConstructor
@NoArgsConstructor
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonIgnoreProperties(ignoreUnknown = true) // 关键注解
public class GitAnalysisData {
private BasicInfo basicInfo;
private List<DeveloperRank> developerRanks;
private List<RepoRank> repoRanks;
private List<DayStats> dayStats;
private List<FileTypeStats> fileTypeStats;
private String generatedTime;
private String rawReport;
}

203
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/GiteaCommit.java

@ -0,0 +1,203 @@
package com.chenhai.chenhaiai.entity.git;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.List;
@Data
@JsonIgnoreProperties(ignoreUnknown = true) // 添加这个注解
public class GiteaCommit {
@JsonProperty("sha")
private String sha;
@JsonProperty("commit")
private CommitInfo commit;
@JsonProperty("html_url")
private String htmlUrl;
@JsonProperty("author")
private UserInfo author;
@JsonProperty("committer")
private UserInfo committer;
@JsonProperty("parents")
private List<ParentCommit> parents;
@JsonProperty("files")
private List<ChangedFile> files;
// Gitea API 可能返回的其他字段
@JsonProperty("stats")
private Stats stats;
@JsonProperty("url")
private String url;
// 实用方法
public String getShortSha() {
return sha != null && sha.length() > 7 ? sha.substring(0, 7) : sha;
}
public String getShortMessage() {
if (commit == null || commit.getMessage() == null) return "";
String message = commit.getMessage().trim();
return message.length() > 50 ? message.substring(0, 47) + "..." : message;
}
public ZonedDateTime getCommitTime() {
if (commit != null && commit.getAuthor() != null && commit.getAuthor().getDate() != null) {
return ZonedDateTime.parse(commit.getAuthor().getDate());
}
return null;
}
public boolean isWithinTimeRange(ZonedDateTime since, ZonedDateTime until) {
ZonedDateTime commitTime = getCommitTime();
return commitTime != null &&
!commitTime.isBefore(since) &&
!commitTime.isAfter(until);
}
// 内部类
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public static class CommitInfo {
@JsonProperty("message")
private String message;
@JsonProperty("author")
private CommitAuthor author;
@JsonProperty("committer")
private CommitAuthor committer;
@JsonProperty("url")
private String url;
@JsonProperty("comment_count")
private Integer commentCount;
}
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public static class CommitAuthor {
@JsonProperty("name")
private String name;
@JsonProperty("email")
private String email;
@JsonProperty("date")
private String date;
}
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public static class UserInfo {
@JsonProperty("id")
private Long id;
@JsonProperty("login")
private String login;
@JsonProperty("full_name")
private String fullName;
@JsonProperty("email")
private String email;
@JsonProperty("avatar_url")
private String avatarUrl;
@JsonProperty("language")
private String language;
}
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public static class ParentCommit {
@JsonProperty("sha")
private String sha;
@JsonProperty("url")
private String url;
@JsonProperty("html_url")
private String htmlUrl;
}
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public static class ChangedFile {
@JsonProperty("filename")
private String filename;
@JsonProperty("status")
private String status; // added, modified, removed, renamed
@JsonProperty("additions")
private Integer additions;
@JsonProperty("deletions")
private Integer deletions;
@JsonProperty("changes")
private Integer changes;
@JsonProperty("patch")
private String patch;
@JsonProperty("raw_url")
private String rawUrl;
// 注意Gitea API 可能不返回 additions/deletions但我们仍然定义字段
public String getFileExtension() {
if (filename == null) return "";
int dotIndex = filename.lastIndexOf('.');
return dotIndex > 0 ? filename.substring(dotIndex + 1).toLowerCase() : "";
}
public String getFileType() {
String ext = getFileExtension();
if (ext.isEmpty()) return "unknown";
if (ext.matches("(java|py|js|ts|cpp|c|go|rs|php|rb|scala|kt|swift)")) {
return "code";
} else if (ext.matches("(json|yml|yaml|xml|properties|conf|ini|toml)")) {
return "config";
} else if (ext.matches("(md|txt|rst|adoc|docx|pdf)")) {
return "document";
} else if (ext.matches("(sql|ddl)")) {
return "database";
} else if (ext.matches("(html|css|scss|less)")) {
return "frontend";
} else if (ext.matches("(jpg|jpeg|png|gif|svg|ico)")) {
return "image";
} else {
return "other";
}
}
}
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public static class Stats {
@JsonProperty("total")
private Integer total;
@JsonProperty("additions")
private Integer additions;
@JsonProperty("deletions")
private Integer deletions;
@JsonProperty("files_changed")
private Integer filesChanged;
}
}

116
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/GiteaRepository.java

@ -0,0 +1,116 @@
package com.chenhai.chenhaiai.entity.git;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
@Data
@JsonIgnoreProperties(ignoreUnknown = true) // 添加这个注解
public class GiteaRepository {
@JsonProperty("id")
private Long id;
@JsonProperty("name")
private String name;
@JsonProperty("full_name")
private String fullName;
@JsonProperty("owner")
private GiteaUser owner;
@JsonProperty("description")
private String description;
@JsonProperty("html_url")
private String htmlUrl;
@JsonProperty("ssh_url")
private String sshUrl;
@JsonProperty("clone_url")
private String cloneUrl;
@JsonProperty("default_branch")
private String defaultBranch;
@JsonProperty("created_at")
private String createdAt;
@JsonProperty("updated_at")
private String updatedAt;
@JsonProperty("size")
private Integer size;
@JsonProperty("stars_count")
private Integer starsCount;
@JsonProperty("forks_count")
private Integer forksCount;
@JsonProperty("open_issues_count")
private Integer openIssuesCount;
@JsonProperty("private")
private Boolean isPrivate;
// Gitea API 可能返回的其他字段
@JsonProperty("language")
private String language;
@JsonProperty("has_issues")
private Boolean hasIssues;
@JsonProperty("has_wiki")
private Boolean hasWiki;
@JsonProperty("has_projects")
private Boolean hasProjects;
@JsonProperty("archived")
private Boolean archived;
// 兼容性方法
public String getFullPath() {
return fullName != null ? fullName : "";
}
public String getRepoName() {
return name != null ? name :
(fullName != null && fullName.contains("/") ?
fullName.substring(fullName.lastIndexOf("/") + 1) : "");
}
}
@Data
@JsonIgnoreProperties(ignoreUnknown = true) // 添加这个注解忽略未知字段
class GiteaUser {
@JsonProperty("id")
private Long id;
@JsonProperty("login")
private String login;
@JsonProperty("full_name")
private String fullName;
@JsonProperty("email")
private String email;
@JsonProperty("avatar_url")
private String avatarUrl;
// Gitea API 可能返回的其他字段
@JsonProperty("language")
private String language;
@JsonProperty("is_admin")
private Boolean isAdmin;
@JsonProperty("last_login")
private String lastLogin;
@JsonProperty("created")
private String created;
}

16
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/RepoRank.java

@ -0,0 +1,16 @@
package com.chenhai.chenhaiai.entity.git;
import lombok.Data;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class RepoRank {
private int rank;
private String repoName;
private String displayName;
private int commitCount;
private int developerCount;
}

98
chenhai-ai/src/main/java/com/chenhai/chenhaiai/entity/git/RepositoryActivity.java

@ -0,0 +1,98 @@
package com.chenhai.chenhaiai.entity.git;
import com.chenhai.chenhaiai.entity.git.GiteaCommit;
import lombok.Data;
import java.time.LocalDate;
import java.util.*;
@Data
public class RepositoryActivity {
private String repoFullName;
private String repoName;
private int totalCommits;
private Set<String> activeDevelopers = new HashSet<>();
private Map<LocalDate, Integer> commitsByDate = new HashMap<>();
private Map<String, Integer> fileTypeDistribution = new HashMap<>();
private Map<String, Integer> developerCommitCount = new HashMap<>();
private List<String> recentCommitShas = new ArrayList<>();
// 新增行数字段
private int totalAdditions = 0;
private int totalDeletions = 0;
// 新增方法添加提交并记录行数
// public void addCommitWithLines(GiteaCommit commit, GiteaAnalysisNewService.CommitLineChange lineChange) {
// // 调用原有addCommit方法维护其他统计
// addCommit(commit);
//
// // 累加行数
// this.totalAdditions += lineChange.getAdditions();
// this.totalDeletions += lineChange.getDeletions();
// }
// 获取净增行数
public int getNetLineChanges() {
return totalAdditions - totalDeletions;
}
// 计算属性
public int getDeveloperCount() {
return activeDevelopers.size();
}
public double getAvgCommitsPerDeveloper() {
return activeDevelopers.isEmpty() ? 0 :
(double) totalCommits / activeDevelopers.size();
}
public String getMostActiveDeveloper() {
return developerCommitCount.entrySet().stream()
.max(Map.Entry.comparingByValue())
.map(Map.Entry::getKey)
.orElse("Unknown");
}
public String getMostChangedFileType() {
return fileTypeDistribution.entrySet().stream()
.max(Map.Entry.comparingByValue())
.map(Map.Entry::getKey)
.orElse("Unknown");
}
public void addCommit(GiteaCommit commit) {
totalCommits++;
// 开发者信息
if (commit.getCommit() != null && commit.getCommit().getAuthor() != null) {
String developer = commit.getCommit().getAuthor().getName() != null ?
commit.getCommit().getAuthor().getName() :
commit.getCommit().getAuthor().getEmail();
activeDevelopers.add(developer);
developerCommitCount.put(developer,
developerCommitCount.getOrDefault(developer, 0) + 1);
}
// 日期统计
if (commit.getCommitTime() != null) {
LocalDate date = commit.getCommitTime().toLocalDate();
commitsByDate.put(date, commitsByDate.getOrDefault(date, 0) + 1);
}
// 文件类型统计
if (commit.getFiles() != null) {
for (GiteaCommit.ChangedFile file : commit.getFiles()) {
String fileType = file.getFileType();
fileTypeDistribution.put(fileType,
fileTypeDistribution.getOrDefault(fileType, 0) + 1);
}
}
// 保存最近提交
if (commit.getSha() != null) {
recentCommitShas.add(commit.getShortSha());
if (recentCommitShas.size() > 5) {
recentCommitShas.remove(0);
}
}
}
}

135
chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/DataAssociationNode.java

@ -0,0 +1,135 @@
package com.chenhai.chenhaiai.node;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.alibaba.cloud.ai.graph.action.NodeAction;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.prompt.PromptTemplate;
import java.util.Map;
/**
* @author : mazhongxu
* @date : 2025-11-28 16:09
* @modyified By :
*/
public class DataAssociationNode implements NodeAction {
private final ChatClient chatClient;
public DataAssociationNode(ChatClient.Builder builder) {
this.chatClient = builder.build();
}
@Override
public Map<String, Object> apply(OverAllState state) throws Exception {
// 从state 中获取 输入数据
String dataOrganizationResult = state.value("dataOrganizationResult", "");
// 使用文本块语法避免在模板中使用 {示例字段} 这种会被误解析的语法
String promptTemplate = """
你是一个数据关联分析助手需要将日报数据与周计划任务进行智能关联并输出完整的关联后数据结构
### 任务要求
1. 分析日报工作内容与周计划任务的相似性进行智能匹配
2. 将关联的日报数据添加到对应周计划任务的"关联日报"字段中
3. 无法关联的日报数据在项目列表中创建"临时任务"项目进行统一归类
4. 保持原有的数据结构不变只增加"关联日报"字段和"临时任务"项目
### 关联匹配规则
1. **关键词匹配**日报工作内容与周计划任务内容的关键词相似度
- 完全匹配工作内容完全一致或高度相似
- 部分匹配包含相同关键词或业务领域相同
2. **负责人匹配**日报负责人与周计划任务负责人的重叠度
- 负责人完全一致优先匹配
- 负责人有重叠部分次优先匹配
3. **项目一致性**日报项目名称与周计划项目名称的对应关系
- "研发部工作"日报可匹配到所有研发相关周计划项目
4. **语义关联**理解工作内容的业务含义进行关联
### 输出数据结构要求
必须严格按照以下完整结构输出
周计划数据:
部门名称: "[实际部门名称]"
周次显示: "[实际周次显示]"
项目列表:
- 项目名称: "[实际项目名称1]"
任务列表:
- 负责人: "[实际负责人]"
工作内容: "[实际工作内容]"
关联日报:
- 项目名称: "[日报项目名称]"
日报日期: "[日报日期]"
负责人: "[日报负责人]"
工作内容: "[日报工作内容]"
日报工时: [日报工时数值]
完成进度: [完成进度百分比]
- 项目名称: "[日报项目名称]"
日报日期: "[日报日期]"
负责人: "[日报负责人]"
工作内容: "[日报工作内容]"
日报工时: [日报工时数值]
完成进度: [完成进度百分比]
- 负责人: "[实际负责人]"
工作内容: "[实际工作内容]"
关联日报: []
- 项目名称: "[实际项目名称2]"
任务列表:
- 负责人: "[实际负责人]"
工作内容: "[实际工作内容]"
关联日报: []
- 项目名称: "临时任务"
任务列表:
- 负责人: "相关人员"
工作内容: "未关联的临时工作"
关联日报:
- 项目名称: "[日报项目名称]"
日报日期: "[日报日期]"
负责人: "[日报负责人]"
工作内容: "[日报工作内容]"
日报工时: [日报工时数值]
完成进度: [完成进度百分比]
- 项目名称: "[日报项目名称]"
日报日期: "[日报日期]"
负责人: "[日报负责人]"
工作内容: "[日报工作内容]"
日报工时: [日报工时数值]
完成进度: [完成进度百分比]
### 字段说明
- 关联日报字段数组类型包含匹配的日报完整数据
- 临时任务项目固定项目名称"临时任务"负责人固定为"相关人员"
- 空关联日报如果没有匹配的日报关联日报字段为空数组
- 数据完整性必须包含所有原始周计划任务即使没有关联日报
### 输入数据
{dataOrganizationResult}
### 输出要求
严格按照上述完整数据结构输出关联后的数据确保
1. 所有周计划任务都被保留
2. 关联日报数据完整包含所有字段
3. 临时任务项目位于项目列表末尾
4. 保持清晰的文本层级格式
5. 不遗漏任何日报数据
""";
// 创建PromptTemplate并添加变量
PromptTemplate template = new PromptTemplate(promptTemplate);
template.add("dataOrganizationResult", dataOrganizationResult);
String prompt = template.render();
// 模型调用
String content = chatClient.prompt()
.user(prompt)
.call()
.content();
// 把结果存入 state
return Map.of("dataAssociationResult", content);
}
}

83
chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/DataOrganizationNode.java

@ -0,0 +1,83 @@
package com.chenhai.chenhaiai.node;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.alibaba.cloud.ai.graph.action.NodeAction;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.prompt.PromptTemplate;
import java.util.Map;
/**
* @author : mazhongxu
* @date : 2025-11-28 16:09
* @modyified By :
*/
public class DataOrganizationNode implements NodeAction {
private final ChatClient chatClient;
public DataOrganizationNode(ChatClient.Builder builder) {
this.chatClient = builder.build();
}
@Override
public Map<String, Object> apply(OverAllState state) throws Exception {
// 从state 中获取 周计划和日报数据
String weekPlanAndPersonalDaily = state.value("weekPlanAndPersonalDaily", "");
String promptTemplate = """
你是一个数据格式转换助手需要将已经翻译成中文的JSON数据转换为层级分明的文本格式只保留关键信息
### 任务要求
1. 将中文JSON数据转换为文本格式
2. 只保留指定的关键字段
3. 保持项目列表和任务列表的完整层级结构
4. 不进行任何数据分析
### 保留字段规则
周计划数据保留字段
- 部门名称
- 周次显示
- 年份月份月中周次
- 周开始日期周结束日期
- 项目列表完整保留包括下面的任务列表
- 项目名称
- 任务列表完整保留
- 负责人
- 工作内容
- 督办状态
- 备注
日报数据保留字段
- 部门名称
- 项目名称
- 日报日期
- 负责人
- 工作内容
- 日报工时
- 完成进度
### 过滤字段
除上述指定字段外的所有其他字段
### 输入数据{weekPlanAndPersonalDaily}
### 输出要求
直接输出精简后的层级文本格式严格按照指定的字段保留
""";
PromptTemplate template = new PromptTemplate(promptTemplate);
template.add("weekPlanAndPersonalDaily", weekPlanAndPersonalDaily);
String prompt = template.render();
// 模型调用
String content = chatClient.prompt()
.user(prompt)
.call()
.content();
// 把句子存入 state
return Map.of("dataOrganizationResult", content);
}
}

167
chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/DataTranslationNode.java

@ -0,0 +1,167 @@
package com.chenhai.chenhaiai.node;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.alibaba.cloud.ai.graph.action.NodeAction;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.prompt.PromptTemplate;
import java.util.Map;
/**
* @author : mazhongxu
* @date : 2025-11-28 16:09
*/
public class DataTranslationNode implements NodeAction {
private final ChatClient chatClient;
public DataTranslationNode(ChatClient.Builder builder) {
this.chatClient = builder.build();
}
@Override
public Map<String, Object> apply(OverAllState state) throws Exception {
// 从state 中获取 周计划和日报数据
String weekPlan = state.value("weekPlan", "");
String personalDaily = state.value("personalDaily", "");
String promptTemplate = """
你是一个JSON字段翻译助手需要将周计划数据和日报数据合并成一个完整的JSON并将所有字段名翻译为中文
### 任务要求
1. 将周计划JSON和日报JSON合并为一个完整JSON
2. 将所有字段名key翻译为对应的中文名称
3. 保持原有的数据结构和数值不变
4. 不进行任何数据关联分析只做纯粹的字段翻译和结构合并
### 字段翻译对照表
周计划字段翻译
- "total" "总数"
- "rows" "数据行"
- "code" "状态码"
- "msg" "消息"
- "createBy" "创建人"
- "createTime" "创建时间"
- "updateBy" "更新人"
- "updateTime" "更新时间"
- "remark" "备注"
- "id" "主键ID"
- "deptId" "部门ID"
- "deptName" "部门名称"
- "schedule" "计划进度"
- "weekDisplay" "周次显示"
- "year" "年份"
- "month" "月份"
- "weekOfMonth" "月中周次"
- "weekStartDate" "周开始日期"
- "weekEndDate" "周结束日期"
- "monitoringStatus" "督办状态"
- "projectList" "项目列表"
- "projectNote" "项目备注"
- "projectName" "项目名称"
- "tasks" "任务列表"
- "note" "备注"
- "assistant" "协助人"
- "developer" "负责人"
- "superviseStatus" "督办状态"
- "mainId" "主表ID"
- "planStartDate" "预计开始时间"
- "planEndDate" "预计结束时间"
- "planHours" "预计用时"
- "content" "工作内容"
日报字段翻译
- "projectId" "项目ID"
- "guanlianUids" "关联用户ID"
- "beizhu" "备注信息"
- "workList" "工作列表"
- "isPlan" "是否计划内"
- "dailyPaperDate" "日报日期"
- "dailyPaperType" "日报类型"
- "dailyPaperStatus" "日报状态"
- "dailyPaperSort" "日报排序"
- "dailyPaperHour" "日报工时"
- "itemSchedule" "单项完成进度"
- "itemStaus" "单项状态"
- "filePath" "文件路径"
- "userIds" "用户ID集合"
- "images" "图片集合"
- "delFlag" "删除标志"
- "other" "其他信息"
- "jsondata" "JSON数据"
- "taskId" "任务ID"
- "completeStatus" "完成状态"
- "delItem" "删除项"
- "level" "优先级"
- "rejectedStatus" "驳回状态"
- "rejectedReason" "驳回原因"
- "rejectedUserId" "驳回人ID"
### 输出结构要求
输出一个包含两个主要字段的JSON对象
- "周计划数据": 包含翻译后的周计划数据
- "日报数据": 包含翻译后的日报数据
### 输入数据
周计划数据
{weekPlan}
日报数据
{personalDaily}
### 输出要求
直接输出合并后的完整JSON所有字段名使用中文保持数据结构完整不要添加任何额外的解释
""";
// 使用 PromptTemplate 进行变量替换
PromptTemplate template = new PromptTemplate(promptTemplate);
template.add("weekPlan", weekPlan);
template.add("personalDaily", personalDaily);
String prompt = template.render();
// 模型调用
String content = chatClient.prompt()
.user(prompt)
.call()
.content();
// 清理输出确保是纯净的JSON
String cleanJson = cleanJsonOutput(content);
// 把结果存入 state
return Map.of("weekPlanAndPersonalDaily", cleanJson);
}
/**
* 清理JSON输出移除可能的额外文本
*/
private String cleanJsonOutput(String rawOutput) {
if (rawOutput == null) {
return "{}";
}
String cleaned = rawOutput.trim();
// 移除代码块标记
cleaned = cleaned.replaceAll("```json", "").replaceAll("```", "");
// 提取第一个 { 和最后一个 } 之间的内容
int startIndex = cleaned.indexOf('{');
int endIndex = cleaned.lastIndexOf('}');
if (startIndex >= 0 && endIndex > startIndex) {
cleaned = cleaned.substring(startIndex, endIndex + 1);
}
// 验证是否是有效的JSON格式
try {
// 简单的格式验证
if (cleaned.startsWith("{") && cleaned.endsWith("}")) {
return cleaned;
}
} catch (Exception e) {
// 格式有问题返回原始内容
}
return cleaned;
}
}

83
chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/WeekPlanAnalysisNode.java

@ -0,0 +1,83 @@
package com.chenhai.chenhaiai.node;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.alibaba.cloud.ai.graph.action.NodeAction;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.prompt.PromptTemplate;
import java.util.Map;
/**
* @author : mazhongxu
* @date : 2025-11-28 16:09
* @modyified By :
*/
public class WeekPlanAnalysisNode implements NodeAction {
private final ChatClient chatClient;
public WeekPlanAnalysisNode(ChatClient.Builder builder) {
this.chatClient = builder.build();
}
@Override
public Map<String, Object> apply(OverAllState state) throws Exception {
// 从state 中获取 周计划和日报数据
String dataAssociationResult = state.value("dataAssociationResult", "");
String optimizedPrompt = """
你是一个专业的项目管理分析师请基于提供的周计划和日报关联数据进行工作效率分析
## 输入数据
{dataAssociationResult}
## 分析要求
请按照以下结构化格式输出分析结果
### 📊 核心指标概览
- 总体完成率: [计算具体百分比]
- 总工时对比: 计划[数字]h vs 实际[数字]h
- 成员工作量分布: [简要描述]
### 👥 团队效能分析
**成员表现排名:**
1. [姓名] - [任务数], [工时]h
2. [姓名] - [任务数], [工时]h
**负荷均衡度:** [均衡/需要优化]
### 风险与问题
- 进度风险: [具体描述]
- 资源问题: [具体描述]
- 其他风险: [具体描述]
### 💡 改进建议
**短期建议 (本周):**
- [具体可执行建议1]
- [具体可执行建议2]
**长期优化:**
- [战略性建议1]
- [战略性建议2]
## 输出要求
- 使用具体数据支撑每个结论
- 避免空洞描述提供可量化指标
- 建议要具体可执行
- 风险描述要明确具体
""";
PromptTemplate promptTemplate = new PromptTemplate(optimizedPrompt);
promptTemplate.add("dataAssociationResult", dataAssociationResult);
String prompt = promptTemplate.render();
// 模型调用
String content = chatClient.prompt()
.user(prompt)
.call()
.content();
// 把句子存入 state
return Map.of("result",content);
}
}

139
chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/Analysis.java

@ -0,0 +1,139 @@
package com.chenhai.chenhaiai.node.weekPlan;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.alibaba.cloud.ai.graph.action.NodeAction;
import com.chenhai.chenhaiai.entity.WeekPlanResponse;
import org.springframework.ai.chat.client.ChatClient;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.Map;
/**
* @author : mazhongxu
* @date : 2025-12-05 17:58
*/
public class Analysis implements NodeAction {
private final ChatClient chatClient;
private final ObjectMapper objectMapper;
public Analysis(ChatClient chatClient) {
this.chatClient = chatClient;
this.objectMapper = new ObjectMapper();
}
@Override
public Map<String, Object> apply(OverAllState state) throws Exception {
// 从state 中获取 输入数据
WeekPlanResponse weekPlanResponse = state.value("weekPlanResponse", new WeekPlanResponse());
// 将对象转为 JSON 字符串
String jsonData;
try {
jsonData = objectMapper.writeValueAsString(weekPlanResponse);
} catch (Exception e) {
jsonData = "数据序列化失败,请检查WeekPlanResponse结构: " + e.getMessage();
}
// 提示词
String prompt = """
请作为各部门工作效能分析师基于以下完整的周度工作数据进行分析
数据格式为weekPlanResponse对象包含planDetails周计划和dailyPapers日报
原始数据
""" + jsonData + """
分析要求
请按以下6个模块顺序进行流式分析输出每个模块用---分隔
---
[模块1: 核心数据快照]
分析周期{请从数据中提取周次信息}
部门{请从数据中提取部门名称}
计划任务数{请计算planDetails数量}
日报记录数{请计算dailyPapers数量}
参与人员{请从数据中提取所有参与人员姓名}
---
---
[模块2: 计划完成度分析]
**统计结果**
已完成/有实质进展{请分析并统计已完成项}
🔄 进行中但未完成{请分析并统计进行中项}
计划但无进展{请分析并统计无进展项}
📋 完全未开始{请分析并统计未开始项}
**完成率计算**{请计算(已完成+进行中)/总数百分比}%
**关键发现**{请简要说明最突出的计划执行问题}
---
---
[模块3: 人员工作负荷分析]
**工时统计**
{请从dailyPapers统计每人总工时}
**角色与效能评估**
{请为每个成员分析主要工作领域计划内外工时比效能状态}
**负荷评级**{请基于人均日工时评估//}
---
---
[模块4: 工作重心与干扰分析]
**本周实际工作重心**
1. {请基于日报归纳的第一重心}
2. {请基于日报归纳的第二重心}
**主要干扰源**
{请识别最主要的计划外工作类型}
{请识别最耗时的非开发活动}
**计划vs实际对比**{请说明计划任务与实际投入的匹配程度}
---
---
[模块5: 风险识别与协作评估]
**技术风险**
{请从planDetails的note字段识别技术风险}
**管理风险**
{请分析跨部门需求处理流程问题}
{请分析历史项目维护对计划的冲击}
**协作亮点**{请从日报中发现的有效协作模式}
---
---
[模块6: 改进建议与下周关注]
**核心结论**{请用一句话总结本周效能核心问题}
**具体建议**
🔥 立即行动{请提出一项可快速执行的具体改进}
📈 本周优化{请提出一项需要协调的流程优化}
📅 长期规划{请提出一项战略性建议}
**下周重点关注**{请基于本周进展提出下周需特别关注的任务}
---
输出规则
1. 严格按6个模块顺序输出每个模块以---开始和结束
2. 每个模块内容控制在3-5行便于流式显示
3. 关键数据用**加粗****完成率45%**
4. 使用简洁的项目符号或数字列表
5. 所有{...}占位符请替换为实际分析结果
6. 分析要基于具体数据避免空泛描述
7. 如果数据不足或缺失请说明"数据不足,无法分析"
""";
// 模型调用
String result = chatClient.prompt()
.user(prompt)
.call()
.content();
System.out.println("==========================result:==================================\n" + result);
// 把结果存入 state
return Map.of("result", result);
}
}

148
chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/AnalysisStreamNode.java

@ -0,0 +1,148 @@
package com.chenhai.chenhaiai.node.weekPlan;
import com.alibaba.cloud.ai.graph.GraphResponse;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.alibaba.cloud.ai.graph.action.NodeAction;
import com.alibaba.cloud.ai.graph.streaming.StreamingOutput;
import com.chenhai.chenhaiai.entity.WeekPlanResponse;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.model.ChatResponse;
import com.fasterxml.jackson.databind.ObjectMapper;
import reactor.core.publisher.Flux;
import java.util.Map;
/**
* 流式分析节点 - 完全按照TranslateNode的模式
*/
public class AnalysisStreamNode implements NodeAction {
private final ChatClient chatClient;
private final ObjectMapper objectMapper;
public AnalysisStreamNode(ChatClient chatClient) {
this.chatClient = chatClient;
this.objectMapper = new ObjectMapper();
}
@Override
public Map<String, Object> apply(OverAllState state) {
// 从state中获取输入数据
WeekPlanResponse weekPlanResponse = state.value("weekPlanResponse", new WeekPlanResponse());
// 将对象转为JSON字符串
String jsonData;
try {
jsonData = objectMapper.writeValueAsString(weekPlanResponse);
} catch (Exception e) {
jsonData = "数据序列化失败: " + e.getMessage();
}
// 提示词保持原有格式但稍微简化
String prompt = """
请作为各部门工作效能分析师基于以下完整的周度工作数据进行分析
数据格式为weekPlanResponse对象包含planDetails周计划和dailyPapers日报
原始数据
""" + jsonData + """
分析要求
请按以下6个模块顺序进行流式分析输出每个模块用---分隔
---
[模块1: 核心数据快照]
分析周期{请从数据中提取周次信息}
部门{请从数据中提取部门名称}
计划任务数{请计算planDetails数量}
日报记录数{请计算dailyPapers数量}
参与人员{请从数据中提取所有参与人员姓名}
---
---
[模块2: 计划完成度分析]
**统计结果**
已完成/有实质进展{请分析并统计已完成项}
🔄 进行中但未完成{请分析并统计进行中项}
计划但无进展{请分析并统计无进展项}
📋 完全未开始{请分析并统计未开始项}
**完成率计算**{请计算(已完成+进行中)/总数百分比}%
**关键发现**{请简要说明最突出的计划执行问题}
---
---
[模块3: 人员工作负荷分析]
**工时统计**
{请从dailyPapers统计每人总工时}
**角色与效能评估**
{请为每个成员分析主要工作领域计划内外工时比效能状态}
**负荷评级**{请基于人均日工时评估//}
---
---
[模块4: 工作重心与干扰分析]
**本周实际工作重心**
1. {请基于日报归纳的第一重心}
2. {请基于日报归纳的第二重心}
**主要干扰源**
{请识别最主要的计划外工作类型}
{请识别最耗时的非开发活动}
**计划vs实际对比**{请说明计划任务与实际投入的匹配程度}
---
---
[模块5: 风险识别与协作评估]
**技术风险**
{请从planDetails的note字段识别技术风险}
**管理风险**
{请分析跨部门需求处理流程问题}
{请分析历史项目维护对计划的冲击}
**协作亮点**{请从日报中发现的有效协作模式}
---
---
[模块6: 改进建议与下周关注]
**核心结论**{请用一句话总结本周效能核心问题}
**具体建议**
🔥 立即行动{请提出一项可快速执行的具体改进}
📈 本周优化{请提出一项需要协调的流程优化}
📅 长期规划{请提出一项战略性建议}
**下周重点关注**{请基于本周进展提出下周需特别关注的任务}
---
输出规则
1. 严格按6个模块顺序输出每个模块以---开始和结束
2. 每个模块内容控制在3-5行便于流式显示
3. 关键数据用**加粗****完成率45%**
4. 使用简洁的项目符号或数字列表
5. 所有{...}占位符请替换为实际分析结果
6. 分析要基于具体数据避免空泛描述
7. 如果数据不足或缺失请说明"数据不足,无法分析"
""";
try {
// 模型调用 - 获取 Flux<String>
Flux<String> chatResponseFlux = this.chatClient.prompt()
.user(prompt)
.stream()
.content();
// 1.0.0.0 版本中我们需要将 Flux 转换为 AsyncGenerator
// 或者直接返回 Flux让框架处理
return Map.of("analysis_result", chatResponseFlux);
} catch (Exception e) {
// 错误处理
return Map.of("analysis_result", Flux.just("分析失败: " + e.getMessage()));
}
}
}

69
chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/jdbc/DailyPaperJdbcNode.java

@ -0,0 +1,69 @@
package com.chenhai.chenhaiai.node.weekPlan.jdbc;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.alibaba.cloud.ai.graph.action.NodeAction;
import com.chenhai.chenhaiai.entity.DailyPaper;
import com.chenhai.chenhaiai.entity.WeekPlanResponse;
import com.chenhai.chenhaiai.utils.ProgressEmitter;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import java.util.List;
import java.util.Map;
public class DailyPaperJdbcNode implements NodeAction {
private final JdbcTemplate jdbcTemplate;
private final ProgressEmitter progressEmitter;
public DailyPaperJdbcNode(ProgressEmitter progressEmitter) {
this.progressEmitter = progressEmitter;
// 直接在代码中设置数据库连接参数
String url = "jdbc:mysql://172.16.1.121:3306/erp?useUnicode=true&characterEncoding=utf8&serverTimezone=Asia/Shanghai";
String username = "erperp";
String password = "HeAmK7TBTMDcerpj2";
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName("com.mysql.cj.jdbc.Driver");
dataSource.setUrl(url);
dataSource.setUsername(username);
dataSource.setPassword(password);
this.jdbcTemplate = new JdbcTemplate(dataSource);
}
@Override
public Map<String, Object> apply(OverAllState state) throws Exception {
WeekPlanResponse weekPlanResponse = state.value("weekPlanResponse", new WeekPlanResponse());
progressEmitter.emitProgress("DailyPaperJdbcNode", "正在查询日报记录...");
// 直接查询
String sql = """
SELECT
project_name as projectName,
content,
DATE_FORMAT(daily_paper_date, '%Y-%m-%d') as dailyPaperDate,
daily_paper_hour as dailyPaperHour
FROM ch_rb_urecord
WHERE dept_id = ?
AND daily_paper_date >= ?
AND daily_paper_date <= ?
ORDER BY daily_paper_date
""";
List<DailyPaper> dailyPapers = jdbcTemplate.query(sql,
new BeanPropertyRowMapper<>(DailyPaper.class),
weekPlanResponse.getDept().getDeptId(),
weekPlanResponse.getPlanMain().getWeekStartDate().substring(0, 10),
weekPlanResponse.getPlanMain().getWeekEndDate().substring(0, 10)
);
progressEmitter.emitProgress("DailyPaperJdbcNode", "查询到 " + dailyPapers.size() + " 条日报记录");
System.out.println("查询到 " + dailyPapers.size() + " 条日报记录");
weekPlanResponse.setDailyPapers(dailyPapers);
return Map.of("weekPlanResponse", weekPlanResponse);
}
}

68
chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/jdbc/DeptJdbcNode.java

@ -0,0 +1,68 @@
package com.chenhai.chenhaiai.node.weekPlan.jdbc;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.alibaba.cloud.ai.graph.action.NodeAction;
import com.chenhai.chenhaiai.entity.DailyPaper;
import com.chenhai.chenhaiai.entity.Dept;
import com.chenhai.chenhaiai.entity.WeekPlanResponse;
import com.chenhai.chenhaiai.utils.ProgressEmitter;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import java.util.List;
import java.util.Map;
/**
* @author : mazhongxu
* @date : 2025-12-04 19:06
* @modyified By :
*/
public class DeptJdbcNode implements NodeAction {
private final JdbcTemplate jdbcTemplate;
private final ProgressEmitter progressEmitter;
public DeptJdbcNode(ProgressEmitter progressEmitter) {
this.progressEmitter = progressEmitter;
// 直接在代码中设置数据库连接参数
String url = "jdbc:mysql://172.16.1.121:3306/erp?useUnicode=true&characterEncoding=utf8&serverTimezone=Asia/Shanghai";
String username = "erperp";
String password = "HeAmK7TBTMDcerpj2";
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName("com.mysql.cj.jdbc.Driver");
dataSource.setUrl(url);
dataSource.setUsername(username);
dataSource.setPassword(password);
this.jdbcTemplate = new JdbcTemplate(dataSource);
}
@Override
public Map<String, Object> apply(OverAllState state) throws Exception {
WeekPlanResponse weekPlanResponse = state.value("weekPlanResponse", new WeekPlanResponse());
// 实时推送进度 - 在执行前
progressEmitter.emitProgress("DeptJdbcNode", "开始查询部门数据: " + weekPlanResponse.getDeptName());
// 直接查询
String sql = """
select dept_id,dept_name from sys_dept where dept_name = ?
""";
Dept dept = jdbcTemplate.queryForObject(sql,
new BeanPropertyRowMapper<>(Dept.class),
weekPlanResponse.getDeptName()
);
// 查询完成实时推送
progressEmitter.emitProgress("DeptJdbcNode",
"✅ 查询到部门: " + dept.getDeptName() + " (ID: " + dept.getDeptId() + ")");
System.out.println("查询到 " + dept.getDeptName() + " 的部门数据");
weekPlanResponse.setDept(dept);
return Map.of("weekPlanResponse", weekPlanResponse);
}
}

128
chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/jdbc/GitAnalysisNode.java

@ -0,0 +1,128 @@
package com.chenhai.chenhaiai.node.weekPlan.jdbc;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.alibaba.cloud.ai.graph.action.NodeAction;
import com.chenhai.chenhaiai.entity.WeekPlanResponse;
import com.chenhai.chenhaiai.entity.git.GitAnalysisData;
import com.chenhai.chenhaiai.service.GiteaAnalysisService;
import com.chenhai.chenhaiai.utils.ProgressEmitter;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
public class GitAnalysisNode implements NodeAction {
private final ProgressEmitter progressEmitter;
private final GiteaAnalysisService giteaAnalysisService;
public GitAnalysisNode(ProgressEmitter progressEmitter, GiteaAnalysisService giteaAnalysisService) {
this.progressEmitter = progressEmitter;
this.giteaAnalysisService = giteaAnalysisService;
}
@Override
public Map<String, Object> apply(OverAllState state) throws Exception {
WeekPlanResponse weekPlanResponse = state.value("weekPlanResponse", new WeekPlanResponse());
Boolean isResearchDept = state.value("isResearchDept", false);
if (!Boolean.TRUE.equals(isResearchDept)) {
progressEmitter.emitProgress("GitAnalysisNode", "⏭️ 非研发部,跳过Git分析");
return Map.of("weekPlanResponse", weekPlanResponse);
}
// 实时推送各个阶段
progressEmitter.emitProgress("GitAnalysisNode", "💾 开始Git分析...");
progressEmitter.emitProgress("GitAnalysisNode", "📡 连接Gitea服务...");
String weekStartDate = weekPlanResponse.getPlanMain().getWeekStartDate();
String weekEndDate = weekPlanResponse.getPlanMain().getWeekEndDate();
// 转换日期时间为ISO格式
String since = convertToIsoDateTime(weekStartDate, true);
String until = convertToIsoDateTime(weekEndDate, false);
progressEmitter.emitProgress("GitAnalysisNode", "🔍 查询Git提交: " + weekStartDate + " 至 " + weekEndDate);
try {
// 同步获取Git分析结果阻塞等待
progressEmitter.emitProgress("GitAnalysisNode", "⏳ 正在分析Git提交数据...");
// 调用异步方法并同步等待结果
GitAnalysisData gitAnalysisData = giteaAnalysisService.analyzeGitDataAsync(since, until).get();
// 设置到响应中
weekPlanResponse.setGitAnalysis(gitAnalysisData);
progressEmitter.emitProgress("GitAnalysisNode", "✅ Git分析完成");
} catch (Exception e) {
progressEmitter.emitProgress("GitAnalysisNode", "❌ Git分析失败: " + e.getMessage());
// 创建一个包含错误信息的GitAnalysisData
GitAnalysisData errorData = new GitAnalysisData();
errorData.setBasicInfo(new com.chenhai.chenhaiai.entity.git.BasicInfo(
since + " 至 " + until,
0,
0,
0,
0,
0,
"分析失败: " + e.getMessage()
));
errorData.setGeneratedTime(LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")));
weekPlanResponse.setGitAnalysis(errorData);
}
return Map.of("weekPlanResponse", weekPlanResponse);
}
private String convertToIsoDateTime(String dateStr, boolean isStart) {
try {
// 简化处理假设日期格式为 yyyy-MM-dd yyyy-MM-dd HH:mm:ss
String datePart = dateStr.trim();
if (datePart.contains(" ")) {
datePart = datePart.substring(0, 10);
}
// 解析日期
LocalDate date = LocalDate.parse(
datePart,
DateTimeFormatter.ISO_DATE
);
// 设置时间
LocalDateTime dateTime;
if (isStart) {
dateTime = date.atTime(LocalTime.MIN); // 00:00:00
} else {
dateTime = date.atTime(LocalTime.MAX); // 23:59:59.999999999
}
// 转换为带时区的ISO格式
return dateTime.atZone(ZoneId.systemDefault())
.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
} catch (Exception e) {
// 如果失败返回合理的默认值上周时间范围
progressEmitter.emitProgress("GitAnalysisNode", "⚠️ 日期转换失败,使用上周时间范围: " + e.getMessage());
LocalDate lastMonday = LocalDate.now()
.minusWeeks(1)
.with(java.time.DayOfWeek.MONDAY);
LocalDateTime result;
if (isStart) {
result = lastMonday.atStartOfDay();
} else {
result = lastMonday.plusDays(6).atTime(LocalTime.MAX);
}
return result.atZone(ZoneId.systemDefault())
.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
}
}
}

66
chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/jdbc/UserJdbcNode.java

@ -0,0 +1,66 @@
package com.chenhai.chenhaiai.node.weekPlan.jdbc;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.alibaba.cloud.ai.graph.action.NodeAction;
import com.chenhai.chenhaiai.entity.UserInfo;
import com.chenhai.chenhaiai.entity.WeekPlanResponse;
import com.chenhai.chenhaiai.utils.ProgressEmitter;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import java.util.List;
import java.util.Map;
/**
* @author : mazhongxu
* @date : 2025-12-04 19:06
* @modyified By :
*/
public class UserJdbcNode implements NodeAction {
private final JdbcTemplate jdbcTemplate;
private final ProgressEmitter progressEmitter;
public UserJdbcNode(ProgressEmitter progressEmitter) {
this.progressEmitter = progressEmitter;
// 直接在代码中设置数据库连接参数
String url = "jdbc:mysql://172.16.1.121:3306/erp?useUnicode=true&characterEncoding=utf8&serverTimezone=Asia/Shanghai";
String username = "erperp";
String password = "HeAmK7TBTMDcerpj2";
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName("com.mysql.cj.jdbc.Driver");
dataSource.setUrl(url);
dataSource.setUsername(username);
dataSource.setPassword(password);
this.jdbcTemplate = new JdbcTemplate(dataSource);
}
@Override
public Map<String, Object> apply(OverAllState state) throws Exception {
WeekPlanResponse weekPlanResponse = state.value("weekPlanResponse", new WeekPlanResponse());
Long deptId = weekPlanResponse.getDept().getDeptId();
// 实时推送
progressEmitter.emitProgress("UserJdbcNode", "开始查询部门人员,部门ID: " + deptId);
// 直接查询
String sql = """
select user_id, user_name from sys_user where dept_id = ?
""";
List<UserInfo> userInfoList = jdbcTemplate.query(sql,
new BeanPropertyRowMapper<>(UserInfo.class),
deptId
);
// 实时推送完成
progressEmitter.emitProgress("UserJdbcNode", "✅ 查询到 " + userInfoList.size() + " 个用户数据");
System.out.println("查询到 " + userInfoList.size() + " 个用户数据");
weekPlanResponse.setUserInfos(userInfoList);
return Map.of("weekPlanResponse", weekPlanResponse);
}
}

67
chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/jdbc/WeekPlanDetailJdbcNode.java

@ -0,0 +1,67 @@
package com.chenhai.chenhaiai.node.weekPlan.jdbc;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.alibaba.cloud.ai.graph.action.NodeAction;
import com.chenhai.chenhaiai.entity.UserInfo;
import com.chenhai.chenhaiai.entity.WeekPlanDetail;
import com.chenhai.chenhaiai.entity.WeekPlanResponse;
import com.chenhai.chenhaiai.utils.ProgressEmitter;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import java.util.List;
import java.util.Map;
/**
* @author : mazhongxu
* @date : 2025-12-04 19:06
* @modyified By :
*/
public class WeekPlanDetailJdbcNode implements NodeAction {
private final JdbcTemplate jdbcTemplate;
private final ProgressEmitter progressEmitter;
public WeekPlanDetailJdbcNode(ProgressEmitter progressEmitter) {
this.progressEmitter = progressEmitter;
// 直接在代码中设置数据库连接参数
String url = "jdbc:mysql://172.16.1.121:3306/erp?useUnicode=true&characterEncoding=utf8&serverTimezone=Asia/Shanghai";
String username = "erperp";
String password = "HeAmK7TBTMDcerpj2";
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName("com.mysql.cj.jdbc.Driver");
dataSource.setUrl(url);
dataSource.setUsername(username);
dataSource.setPassword(password);
this.jdbcTemplate = new JdbcTemplate(dataSource);
}
@Override
public Map<String, Object> apply(OverAllState state) throws Exception {
WeekPlanResponse weekPlanResponse = state.value("weekPlanResponse", new WeekPlanResponse());
Long mainId = weekPlanResponse.getPlanMain().getId();
progressEmitter.emitProgress("WeekPlanDetailJdbcNode", "正在查询周计划详情数据...");
// 直接查询
String sql = """
select project_name, content, developer, supervise_status, note, project_note from ch_week_project where main_id = ?
""";
List<WeekPlanDetail> weekPlanDetailList = jdbcTemplate.query(sql,
new BeanPropertyRowMapper<>(WeekPlanDetail.class),
mainId
);
progressEmitter.emitProgress("WeekPlanDetailJdbcNode", "查询到 " + weekPlanDetailList.size() + " 个周计划详情数据");
System.out.println("查询到 " + weekPlanDetailList.size() + " 个周计划详情数据");
weekPlanResponse.setPlanDetails(weekPlanDetailList);
return Map.of("weekPlanResponse", weekPlanResponse);
}
}

75
chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/jdbc/WeekPlanMainJdbcNode.java

@ -0,0 +1,75 @@
package com.chenhai.chenhaiai.node.weekPlan.jdbc;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.alibaba.cloud.ai.graph.action.NodeAction;
import com.chenhai.chenhaiai.entity.Dept;
import com.chenhai.chenhaiai.entity.WeekPlanMain;
import com.chenhai.chenhaiai.entity.WeekPlanResponse;
import com.chenhai.chenhaiai.utils.ProgressEmitter;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import java.util.Map;
/**
* @author : mazhongxu
* @date : 2025-12-04 19:06
* @modyified By :
*/
public class WeekPlanMainJdbcNode implements NodeAction {
private final JdbcTemplate jdbcTemplate;
private final ProgressEmitter progressEmitter;
public WeekPlanMainJdbcNode(ProgressEmitter progressEmitter) {
this.progressEmitter = progressEmitter;
// 直接在代码中设置数据库连接参数
String url = "jdbc:mysql://172.16.1.121:3306/erp?useUnicode=true&characterEncoding=utf8&serverTimezone=Asia/Shanghai";
String username = "erperp";
String password = "HeAmK7TBTMDcerpj2";
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName("com.mysql.cj.jdbc.Driver");
dataSource.setUrl(url);
dataSource.setUsername(username);
dataSource.setPassword(password);
this.jdbcTemplate = new JdbcTemplate(dataSource);
}
@Override
public Map<String, Object> apply(OverAllState state) throws Exception {
WeekPlanResponse weekPlanResponse = state.value("weekPlanResponse", new WeekPlanResponse());
String deptName = weekPlanResponse.getDeptName();
String weekDisplay = weekPlanResponse.getWeekDisplay();
progressEmitter.emitProgress("WeekPlanMainJdbcNode", "正在查询周计划数据是否存在...");
// 直接查询
String sql = """
SELECT id, dept_name, week_display, week_start_date, week_end_date
FROM ch_week_plan
WHERE dept_name = ? AND week_display = ?
""";
WeekPlanMain weekPlanMain = jdbcTemplate.queryForObject(sql,
new BeanPropertyRowMapper<>(WeekPlanMain.class),
deptName,
weekDisplay
);
progressEmitter.emitProgress("WeekPlanMainJdbcNode", "查询到 " + weekPlanMain.getDeptName() + " 的周计划数据");
System.out.println("查询到 " + weekPlanMain.getDeptName() + " 的周计划数据");
weekPlanResponse.setPlanMain(weekPlanMain);
// 判断是否为研发部关键修改
boolean isResearchDept = "研发部".equals(deptName);
return Map.of(
"weekPlanResponse", weekPlanResponse,
"isResearchDept", isResearchDept // 返回布尔值而不是对象
);
}
}

68
chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/mcp/DailyPaperNode.java

@ -0,0 +1,68 @@
package com.chenhai.chenhaiai.node.weekPlan.mcp;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.alibaba.cloud.ai.graph.action.NodeAction;
import com.chenhai.chenhaiai.entity.DailyPaper;
import com.chenhai.chenhaiai.entity.WeekPlanMain;
import com.chenhai.chenhaiai.entity.WeekPlanResponse;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.prompt.PromptTemplate;
import org.springframework.ai.tool.ToolCallback;
import org.springframework.ai.tool.ToolCallbackProvider;
import org.springframework.ai.zhipuai.ZhiPuAiChatOptions;
import org.springframework.core.ParameterizedTypeReference;
import java.time.LocalDate;
import java.util.List;
import java.util.Map;
/**
* @author : mazhongxu
* @date : 2025-12-04 19:06
* @modyified By :
*/
public class DailyPaperNode implements NodeAction {
private final ChatClient chatClient;
public DailyPaperNode(ChatClient chatClient) {
this.chatClient = chatClient;
}
@Override
public Map<String, Object> apply(OverAllState state) throws Exception {
// 从state 中获取 输入数据
WeekPlanResponse weekPlanResponse = state.value("weekPlanResponse", new WeekPlanResponse());
Long deptId = weekPlanResponse.getDept().getDeptId();
String weekStartDate = weekPlanResponse.getPlanMain().getWeekStartDate().substring(0, 10);
String weekEndDate = weekPlanResponse.getPlanMain().getWeekEndDate().substring(0, 10);
// 提示词
String prompt = """
请使用可用的工具执行以下SQL查询
SELECT project_name, content, daily_paper_date, daily_paper_hour
FROM ch_rb_urecord
WHERE dept_id = %d
AND daily_paper_date >= '%s'
AND daily_paper_date <= '%s'
**重要**请确保返回完整的数据如果数据量大请分批处理但最终必须返回全部数据
""".formatted(deptId, weekStartDate, weekEndDate);
ZhiPuAiChatOptions chatOptions = ZhiPuAiChatOptions.builder()
.maxTokens(15536)
.model("glm-4.5")
.build();
// 模型调用
List<DailyPaper> dailyPaperList = chatClient.prompt()
.user(prompt)
.options(chatOptions)
.call()
.entity(new ParameterizedTypeReference<List<DailyPaper>>() {});
weekPlanResponse.setDailyPapers(dailyPaperList);
// 把结果存入 state
return Map.of("weekPlanResponse", weekPlanResponse);
}
}

51
chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/mcp/DeptNode.java

@ -0,0 +1,51 @@
package com.chenhai.chenhaiai.node.weekPlan.mcp;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.alibaba.cloud.ai.graph.action.NodeAction;
import com.chenhai.chenhaiai.entity.Dept;
import com.chenhai.chenhaiai.entity.UserInfo;
import com.chenhai.chenhaiai.entity.WeekPlanResponse;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.prompt.PromptTemplate;
import org.springframework.ai.tool.ToolCallback;
import org.springframework.ai.tool.ToolCallbackProvider;
import org.springframework.core.ParameterizedTypeReference;
import java.util.List;
import java.util.Map;
/**
* @author : mazhongxu
* @date : 2025-12-04 19:06
* @modyified By :
*/
public class DeptNode implements NodeAction {
private final ChatClient chatClient;
public DeptNode(ChatClient chatClient) {
this.chatClient = chatClient;
}
@Override
public Map<String, Object> apply(OverAllState state) throws Exception {
// 从state 中获取 输入数据
WeekPlanResponse weekPlanResponse = state.value("weekPlanResponse", new WeekPlanResponse());
String deptName = weekPlanResponse.getDeptName();
// 提示词
String prompt = """
请使用可用的工具查询sys_dept表dept_name=%s的唯一数据只保留字段dept_id,dept_name
""".formatted(deptName);
// 模型调用
Dept dept = chatClient.prompt()
.user(prompt)
.call()
.entity(Dept.class);
weekPlanResponse.setDept(dept);
// 把结果存入 state
return Map.of("weekPlanResponse", weekPlanResponse);
}
}

50
chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/mcp/UserNode.java

@ -0,0 +1,50 @@
package com.chenhai.chenhaiai.node.weekPlan.mcp;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.alibaba.cloud.ai.graph.action.NodeAction;
import com.chenhai.chenhaiai.entity.UserInfo;
import com.chenhai.chenhaiai.entity.WeekPlanResponse;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.prompt.PromptTemplate;
import org.springframework.ai.tool.ToolCallback;
import org.springframework.ai.tool.ToolCallbackProvider;
import org.springframework.core.ParameterizedTypeReference;
import java.util.List;
import java.util.Map;
/**
* @author : mazhongxu
* @date : 2025-12-04 19:06
* @modyified By :
*/
public class UserNode implements NodeAction {
private final ChatClient chatClient;
public UserNode(ChatClient chatClient) {
this.chatClient = chatClient;
}
@Override
public Map<String, Object> apply(OverAllState state) throws Exception {
// 从state 中获取 输入数据
WeekPlanResponse weekPlanResponse = state.value("weekPlanResponse", new WeekPlanResponse());
Long deptId = weekPlanResponse.getDept().getDeptId();
// 提示词
String prompt = """
请使用可用的工具查询sys_user表dept_id=%s的列表数据只保留字段user_id,user_name
""".formatted(deptId);
// 模型调用
List<UserInfo> userInfos = chatClient.prompt()
.user(prompt)
.call()
.entity(new ParameterizedTypeReference<List<UserInfo>>() {});
weekPlanResponse.setUserInfos(userInfos);
// 把结果存入 state
return Map.of("weekPlanResponse", weekPlanResponse);
}
}

50
chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/mcp/WeekPlanDetailNode.java

@ -0,0 +1,50 @@
package com.chenhai.chenhaiai.node.weekPlan.mcp;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.alibaba.cloud.ai.graph.action.NodeAction;
import com.chenhai.chenhaiai.entity.WeekPlanDetail;
import com.chenhai.chenhaiai.entity.WeekPlanResponse;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.prompt.PromptTemplate;
import org.springframework.core.ParameterizedTypeReference;
import java.util.List;
import java.util.Map;
/**
* @author : mazhongxu
* @date : 2025-12-04 19:06
* @modyified By :
*/
public class WeekPlanDetailNode implements NodeAction {
private final ChatClient chatClient;
public WeekPlanDetailNode(ChatClient chatClient) {
this.chatClient = chatClient;
}
@Override
public Map<String, Object> apply(OverAllState state) throws Exception {
// 从state 中获取 输入数据
WeekPlanResponse weekPlanResponse = state.value("weekPlanResponse", new WeekPlanResponse());
Long mainId = weekPlanResponse.getPlanMain().getId();
// 提示词
String prompt = """
请使用可用的工具查询ch_week_project表main_id=%d的所有数据
返回字段project_name, content, developer, supervise_status, note, project_note
请完整返回查询结果
""".formatted(mainId);
// 模型调用
List<WeekPlanDetail> weekPlanDetailList = chatClient.prompt()
.user(prompt)
.call()
.entity(new ParameterizedTypeReference<List<WeekPlanDetail>>() {});
weekPlanResponse.setPlanDetails(weekPlanDetailList);
// 把结果存入 state
return Map.of("weekPlanResponse", weekPlanResponse);
}
}

51
chenhai-ai/src/main/java/com/chenhai/chenhaiai/node/weekPlan/mcp/WeekPlanMainNode.java

@ -0,0 +1,51 @@
package com.chenhai.chenhaiai.node.weekPlan.mcp;
import com.alibaba.cloud.ai.graph.OverAllState;
import com.alibaba.cloud.ai.graph.action.NodeAction;
import com.chenhai.chenhaiai.entity.UserInfo;
import com.chenhai.chenhaiai.entity.WeekPlanMain;
import com.chenhai.chenhaiai.entity.WeekPlanResponse;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.prompt.PromptTemplate;
import org.springframework.core.ParameterizedTypeReference;
import java.util.List;
import java.util.Map;
/**
* @author : mazhongxu
* @date : 2025-12-04 19:06
* @modyified By :
*/
public class WeekPlanMainNode implements NodeAction {
private final ChatClient chatClient;
public WeekPlanMainNode(ChatClient chatClient) {
this.chatClient = chatClient;
}
@Override
public Map<String, Object> apply(OverAllState state) throws Exception {
// 从state 中获取 输入数据
WeekPlanResponse weekPlanResponse = state.value("weekPlanResponse", new WeekPlanResponse());
String deptName = weekPlanResponse.getDeptName();
String weekDisplay = weekPlanResponse.getWeekDisplay();
// 提示词
String prompt = """
请使用可用的工具查询ch_week_plan表dept_name='%s' AND week_display='%s'的唯一数据
只返回字段id, dept_name, week_display, week_start_date, week_end_date
""".formatted(deptName, weekDisplay);
// 模型调用
WeekPlanMain weekPlanMain = chatClient.prompt()
.user(prompt)
.call()
.entity(WeekPlanMain.class);
weekPlanResponse.setPlanMain(weekPlanMain);
// 把结果存入 state
return Map.of("weekPlanResponse", weekPlanResponse);
}
}

117
chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/AnalysisStreamService.java

@ -0,0 +1,117 @@
// AnalysisStreamService.java
package com.chenhai.chenhaiai.service;
import com.chenhai.chenhaiai.entity.WeekPlanResponse;
import com.chenhai.chenhaiai.utils.TextFormatUtils;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.stereotype.Service;
import reactor.core.publisher.FluxSink;
import java.util.Map;
import java.util.Optional;
/**
* 流式分析服务
*/
@Service
public class AnalysisStreamService {
private final ObjectMapper objectMapper = new ObjectMapper();
/**
* 发送流式分析结果
*/
public void sendStreamAnalysis(
FluxSink<String> sink,
String analysisContent,
ChatClient chatClient,
String prompt) {
try {
// 使用 StringBuilder 收集完整响应
StringBuilder fullResponse = new StringBuilder();
chatClient.prompt()
.user(prompt)
.stream()
.content()
.subscribe(
chunk -> {
// 收集所有chunk
fullResponse.append(chunk);
},
error -> {
sink.next(TextFormatUtils.formatMessage("error",
"分析失败: " + error.getMessage()));
sink.complete();
},
() -> {
try {
// 在流式响应完成后按模块分割发送
String completeResponse = fullResponse.toString();
sendFormattedModules(sink, completeResponse);
} catch (Exception e) {
sink.next(TextFormatUtils.formatMessage("error",
"格式化输出失败: " + e.getMessage()));
sink.complete();
}
}
);
} catch (Exception e) {
sink.next(TextFormatUtils.formatMessage("error", "流式分析失败: " + e.getMessage()));
sink.complete();
}
}
/**
* 发送格式化模块
*/
private void sendFormattedModules(FluxSink<String> sink, String analysisContent) {
// 使用工具类分割模块
String[] modules = TextFormatUtils.splitAnalysisModules(analysisContent);
for (String module : modules) {
String trimmedModule = module.trim();
if (!trimmedModule.isEmpty()) {
// 发送完整的模块
sink.next(TextFormatUtils.formatMessage("content", trimmedModule));
// 每个模块之间稍微延迟让前端有时间渲染
try {
Thread.sleep(200);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
}
}
}
// 发送完成消息
sink.next(TextFormatUtils.formatMessage("complete", "分析完成"));
sink.complete();
}
/**
* 准备分析提示词
*/
public String prepareAnalysisPrompt(String promptTemplate, WeekPlanResponse fullData) {
try {
String jsonData = objectMapper.writeValueAsString(fullData);
return promptTemplate.replace("{jsonData}", jsonData);
} catch (Exception e) {
throw new RuntimeException("准备分析提示词失败", e);
}
}
/**
* 获取数据概览消息
*/
public String getDataSummary(WeekPlanResponse fullData) {
int planCount = fullData.getPlanDetails() != null ? fullData.getPlanDetails().size() : 0;
int dailyCount = fullData.getDailyPapers() != null ? fullData.getDailyPapers().size() : 0;
return String.format("获取到 %d 个计划任务和 %d 条日报记录", planCount, dailyCount);
}
}

583
chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/GiteaAnalysisParallelService.java

@ -0,0 +1,583 @@
package com.chenhai.chenhaiai.service;
import com.chenhai.chenhaiai.entity.git.GiteaCommit;
import com.chenhai.chenhaiai.entity.git.GiteaRepository;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import lombok.extern.slf4j.Slf4j;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.time.*;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
@Slf4j
public class GiteaAnalysisParallelService implements AutoCloseable {
private final String giteaBaseUrl;
private final String accessToken;
private final ObjectMapper objectMapper;
private final HttpClient httpClient;
// 线程池
private final ExecutorService executorService;
// 缓存
private final Map<String, List<GiteaRepository>> userReposCache = new ConcurrentHashMap<>();
private final Map<String, List<GiteaCommit>> repoCommitsCache = new ConcurrentHashMap<>();
// 关闭标志
private volatile boolean isShutdown = false;
public GiteaAnalysisParallelService(String giteaBaseUrl, String accessToken) {
this.giteaBaseUrl = giteaBaseUrl;
this.accessToken = accessToken;
this.objectMapper = new ObjectMapper().registerModule(new JavaTimeModule());
// 创建HTTP客户端
this.httpClient = HttpClient.newBuilder()
.connectTimeout(Duration.ofSeconds(5))
.build();
// 创建分析用的线程池
int coreCount = Runtime.getRuntime().availableProcessors();
this.executorService = new ThreadPoolExecutor(
coreCount,
coreCount * 2,
60L, TimeUnit.SECONDS,
new LinkedBlockingQueue<>(100),
new ThreadPoolExecutor.CallerRunsPolicy()
);
// 添加JVM关闭钩子确保资源释放
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
try {
if (!isShutdown) {
shutdown();
}
} catch (Exception e) {
log.error("Shutdown hook error", e);
}
}));
}
// ==================== 唯一公开方法 ====================
public String performCompleteAnalysis(String since, String until) {
if (isShutdown) {
throw new IllegalStateException("服务已关闭");
}
long startTime = System.currentTimeMillis();
StringBuilder report = new StringBuilder();
System.out.println("=".repeat(60));
System.out.println("🚀 Gitea代码仓库分析(极速版)");
System.out.println("📅 时间范围: " + since + " 至 " + until);
System.out.println("=".repeat(60));
try {
// 1. 获取所有仓库
System.out.println("\n📦 步骤1: 获取仓库列表...");
List<GiteaRepository> allRepos = getAllUserRepositories();
int totalRepos = allRepos.size();
System.out.println(" 发现仓库: " + totalRepos + " 个");
if (totalRepos == 0) {
return "❌ 未发现任何仓库";
}
// 2. 解析时间范围
ZonedDateTime sinceTime = ZonedDateTime.parse(since);
ZonedDateTime untilTime = ZonedDateTime.parse(until);
// 3. 并行分析所有仓库
System.out.println("\n⚡ 步骤2: 并行分析仓库提交...");
// 创建共享的数据容器
Map<String, DeveloperData> devDataMap = new ConcurrentHashMap<>();
Map<String, RepoData> repoDataMap = new ConcurrentHashMap<>();
Map<DayOfWeek, Integer> dayStats = new ConcurrentHashMap<>();
Map<Integer, Integer> hourStats = new ConcurrentHashMap<>();
Map<String, Integer> fileTypeStats = new ConcurrentHashMap<>();
AtomicInteger activeRepos = new AtomicInteger(0);
AtomicInteger totalCommits = new AtomicInteger(0);
AtomicInteger processed = new AtomicInteger(0);
// 使用CountDownLatch等待所有任务完成
CountDownLatch latch = new CountDownLatch(totalRepos);
// 提交所有仓库分析任务
for (GiteaRepository repo : allRepos) {
executorService.submit(() -> {
try {
if (isShutdown) {
return;
}
analyzeRepository(repo, sinceTime, untilTime,
devDataMap, repoDataMap, dayStats, hourStats, fileTypeStats,
activeRepos, totalCommits);
} catch (Exception e) {
log.debug("仓库 {} 分析失败: {}", repo.getFullPath(), e.getMessage());
} finally {
// 更新进度
int done = processed.incrementAndGet();
if (done % 5 == 0 || done == totalRepos) {
System.out.printf(" 进度: %d/%d | 活跃: %d | 提交: %d\r",
done, totalRepos, activeRepos.get(), totalCommits.get());
}
latch.countDown();
}
});
}
// 等待所有任务完成最多30秒
boolean completed = latch.await(30, TimeUnit.SECONDS);
if (!completed) {
System.out.println("\n⚠️ 部分仓库分析超时,继续处理已完成结果...");
}
System.out.println(); // 换行
// 4. 生成报告
System.out.println("\n📊 步骤3: 生成分析报告...");
long analysisTime = System.currentTimeMillis() - startTime;
report.append(generateReport(since, until, totalRepos, activeRepos.get(),
devDataMap.size(), totalCommits.get(), devDataMap, repoDataMap,
dayStats, hourStats, fileTypeStats, analysisTime));
System.out.println("\n📤 分析完成");
} catch (Exception e) {
System.err.println("\n❌ 分析失败: " + e.getMessage());
e.printStackTrace();
report.append("分析失败: ").append(e.getMessage());
}
return report.toString();
}
// ==================== 核心分析方法 ====================
/**
* 分析单个仓库并行执行
*/
private void analyzeRepository(GiteaRepository repo,
ZonedDateTime sinceTime,
ZonedDateTime untilTime,
Map<String, DeveloperData> devDataMap,
Map<String, RepoData> repoDataMap,
Map<DayOfWeek, Integer> dayStats,
Map<Integer, Integer> hourStats,
Map<String, Integer> fileTypeStats,
AtomicInteger activeRepos,
AtomicInteger totalCommits) throws Exception {
String repoFullName = repo.getFullPath();
// 获取此仓库的提交
List<GiteaCommit> commits = getRepoCommits(repoFullName);
// 按时间范围过滤
List<GiteaCommit> commitsInRange = new ArrayList<>();
for (GiteaCommit commit : commits) {
if (commit.isWithinTimeRange(sinceTime, untilTime)) {
commitsInRange.add(commit);
}
}
if (!commitsInRange.isEmpty()) {
activeRepos.incrementAndGet();
// 创建仓库数据
RepoData repoData = new RepoData();
repoData.repoName = repoFullName;
repoData.displayName = repo.getRepoName();
// 分析每个提交
for (GiteaCommit commit : commitsInRange) {
totalCommits.incrementAndGet();
// 获取作者
String author = getAuthorName(commit);
// 更新开发者数据
DeveloperData devData = devDataMap.computeIfAbsent(author,
k -> new DeveloperData(author));
devData.commitCount++;
devData.repos.add(repoFullName);
// 更新仓库数据
repoData.commitCount++;
repoData.developers.add(author);
// 时间统计
ZonedDateTime commitTime = commit.getCommitTime();
if (commitTime != null) {
DayOfWeek day = commitTime.getDayOfWeek();
int hour = commitTime.getHour();
dayStats.merge(day, 1, Integer::sum);
hourStats.merge(hour, 1, Integer::sum);
}
// 文件类型统计
if (commit.getFiles() != null) {
for (GiteaCommit.ChangedFile file : commit.getFiles()) {
String fileType = file.getFileType();
fileTypeStats.merge(fileType, 1, Integer::sum);
}
}
}
// 保存仓库数据
repoDataMap.put(repoFullName, repoData);
}
}
/**
* 获取仓库的所有提交带缓存只取最近3个月
*/
private List<GiteaCommit> getRepoCommits(String repoFullName) throws Exception {
// 检查缓存
if (repoCommitsCache.containsKey(repoFullName)) {
return repoCommitsCache.get(repoFullName);
}
// 只获取最近3个月的提交
LocalDate threeMonthsAgo = LocalDate.now().minusMonths(3);
ZonedDateTime since = threeMonthsAgo.atStartOfDay(ZoneId.systemDefault());
ZonedDateTime until = ZonedDateTime.now();
String sinceStr = since.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
String untilStr = until.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
String url = String.format("%s/api/v1/repos/%s/commits?since=%s&until=%s",
giteaBaseUrl, repoFullName, sinceStr, untilStr);
List<GiteaCommit> commits = fetchCommits(url);
repoCommitsCache.put(repoFullName, commits);
return commits;
}
/**
* 分页获取提交列表
*/
private List<GiteaCommit> fetchCommits(String url) throws Exception {
if (isShutdown) {
return Collections.emptyList();
}
List<GiteaCommit> results = new ArrayList<>();
int page = 1;
while (true) {
String pageUrl = url + (url.contains("?") ? "&" : "?") +
"page=" + page + "&limit=50";
HttpRequest request = HttpRequest.newBuilder()
.uri(java.net.URI.create(pageUrl))
.header("Authorization", "token " + accessToken)
.header("Accept", "application/json")
.timeout(Duration.ofSeconds(10))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request,
HttpResponse.BodyHandlers.ofString());
if (response.statusCode() != 200) {
break;
}
List<GiteaCommit> pageResults = objectMapper.readValue(
response.body(),
objectMapper.getTypeFactory().constructCollectionType(List.class, GiteaCommit.class));
if (pageResults.isEmpty()) {
break;
}
results.addAll(pageResults);
// 如果数量少于限制说明是最后一页
if (pageResults.size() < 50) {
break;
}
page++;
}
return results;
}
/**
* 获取用户所有仓库
*/
public List<GiteaRepository> getAllUserRepositories() throws Exception {
if (isShutdown) {
return Collections.emptyList();
}
String cacheKey = "user_repos";
if (userReposCache.containsKey(cacheKey)) {
return userReposCache.get(cacheKey);
}
String url = giteaBaseUrl + "/api/v1/user/repos?limit=100";
List<GiteaRepository> repos = fetchRepositories(url);
userReposCache.put(cacheKey, repos);
return repos;
}
/**
* 分页获取仓库列表
*/
private List<GiteaRepository> fetchRepositories(String url) throws Exception {
if (isShutdown) {
return Collections.emptyList();
}
List<GiteaRepository> results = new ArrayList<>();
int page = 1;
while (true) {
String pageUrl = url + (url.contains("?") ? "&" : "?") +
"page=" + page + "&limit=50";
HttpRequest request = HttpRequest.newBuilder()
.uri(java.net.URI.create(pageUrl))
.header("Authorization", "token " + accessToken)
.header("Accept", "application/json")
.timeout(Duration.ofSeconds(10))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request,
HttpResponse.BodyHandlers.ofString());
if (response.statusCode() != 200) {
break;
}
List<GiteaRepository> pageResults = objectMapper.readValue(
response.body(),
objectMapper.getTypeFactory().constructCollectionType(List.class, GiteaRepository.class));
if (pageResults.isEmpty()) {
break;
}
results.addAll(pageResults);
// 如果数量少于限制说明是最后一页
if (pageResults.size() < 50) {
break;
}
page++;
}
return results;
}
/**
* 获取提交作者名称
*/
private String getAuthorName(GiteaCommit commit) {
if (commit.getCommit() != null &&
commit.getCommit().getAuthor() != null &&
commit.getCommit().getAuthor().getName() != null) {
return commit.getCommit().getAuthor().getName();
}
return "未知作者";
}
/**
* 生成分析报告
*/
private String generateReport(String since, String until,
int totalRepos, int activeRepos, int totalDevs, int totalCommits,
Map<String, DeveloperData> devDataMap,
Map<String, RepoData> repoDataMap,
Map<DayOfWeek, Integer> dayStats,
Map<Integer, Integer> hourStats,
Map<String, Integer> fileTypeStats,
long analysisTime) {
StringBuilder report = new StringBuilder();
report.append("=".repeat(80)).append("\n");
report.append("📈 GITEA 开发活动分析报告\n");
report.append("=".repeat(80)).append("\n\n");
// 基础统计
report.append("📅 时间范围: ").append(since).append(" 至 ").append(until).append("\n");
report.append("🏢 仓库总数: ").append(totalRepos).append(" 个\n");
report.append("🎯 活跃仓库: ").append(activeRepos).append(" 个\n");
report.append("👥 活跃开发者: ").append(totalDevs).append(" 人\n");
report.append("💾 提交总数: ").append(totalCommits).append(" 次\n");
report.append("⏱️ 分析耗时: ").append(analysisTime).append("ms\n\n");
// 开发者排行榜
report.append("🏆 开发者排行榜(TOP 10)\n");
report.append("-".repeat(60)).append("\n");
List<DeveloperData> devList = new ArrayList<>(devDataMap.values());
devList.sort((a, b) -> Integer.compare(b.commitCount, a.commitCount));
int rank = 1;
for (DeveloperData dev : devList) {
if (rank > 10) break;
report.append(String.format(" %2d. %-20s %4d 次提交 | %2d 个仓库%n",
rank++, dev.name, dev.commitCount, dev.repos.size()));
}
// 仓库排行榜
report.append("\n🏢 活跃仓库排行榜(TOP 10)\n");
report.append("-".repeat(60)).append("\n");
List<RepoData> repoList = new ArrayList<>(repoDataMap.values());
repoList.sort((a, b) -> Integer.compare(b.commitCount, a.commitCount));
rank = 1;
for (RepoData repo : repoList) {
if (rank > 10) break;
report.append(String.format(" %2d. %-40s %4d 次提交 | %2d 个开发者%n",
rank++, truncate(repo.repoName, 40), repo.commitCount, repo.developers.size()));
}
// 时间分布
report.append("\n⏰ 提交时间分布\n");
report.append("-".repeat(40)).append("\n");
String[] dayNames = {"周一", "周二", "周三", "周四", "周五", "周六", "周日"};
DayOfWeek[] days = DayOfWeek.values();
for (int i = 0; i < 7; i++) {
int count = dayStats.getOrDefault(days[i], 0);
report.append(String.format(" %-4s: %3d 次%n", dayNames[i], count));
}
report.append("\n按小时分布:\n");
for (int hour = 0; hour < 24; hour++) {
int count = hourStats.getOrDefault(hour, 0);
if (count > 0) {
report.append(String.format(" %02d:00-%02d:59: %3d 次%n", hour, hour, count));
}
}
// 文件类型
report.append("\n📁 修改的文件类型\n");
report.append("-".repeat(40)).append("\n");
List<Map.Entry<String, Integer>> fileList = new ArrayList<>(fileTypeStats.entrySet());
fileList.sort((a, b) -> Integer.compare(b.getValue(), a.getValue()));
for (Map.Entry<String, Integer> entry : fileList) {
report.append(String.format(" %-10s: %4d 个文件%n", entry.getKey(), entry.getValue()));
}
report.append("\n").append("=".repeat(80)).append("\n");
report.append("生成时间: ").append(LocalDateTime.now()
.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))).append("\n");
report.append("=".repeat(80));
return report.toString();
}
/**
* 字符串截断
*/
private String truncate(String str, int length) {
if (str == null || str.length() <= length) return str;
return str.substring(0, length - 3) + "...";
}
// ==================== 内部数据类 ====================
private static class DeveloperData {
String name;
int commitCount = 0;
Set<String> repos = new HashSet<>();
DeveloperData(String name) {
this.name = name;
}
}
private static class RepoData {
String repoName;
String displayName;
int commitCount = 0;
Set<String> developers = new HashSet<>();
}
/**
* 关闭服务释放资源
*/
public void shutdown() {
if (isShutdown) {
return;
}
log.info("正在关闭Gitea分析服务...");
isShutdown = true;
// 1. 先停止接收新任务
executorService.shutdown();
try {
// 2. 等待现有任务完成最多30秒
if (!executorService.awaitTermination(30, TimeUnit.SECONDS)) {
// 3. 如果超时强制关闭
log.warn("线程池未在30秒内关闭,尝试强制关闭...");
executorService.shutdownNow();
// 再等待一段时间
if (!executorService.awaitTermination(15, TimeUnit.SECONDS)) {
log.error("线程池无法关闭");
}
}
} catch (InterruptedException e) {
executorService.shutdownNow();
Thread.currentThread().interrupt();
}
// 4. 清理缓存
userReposCache.clear();
repoCommitsCache.clear();
log.info("Gitea分析服务已关闭");
}
/**
* 实现AutoCloseable接口支持try-with-resources
*/
@Override
public void close() {
shutdown();
}
/**
* 添加缓存清理方法可选
*/
public void clearCache() {
userReposCache.clear();
repoCommitsCache.clear();
log.info("缓存已清理");
}
/**
* 检查服务是否已关闭
*/
public boolean isShutdown() {
return isShutdown;
}
}

50
chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/GiteaAnalysisParallelTest.java

@ -0,0 +1,50 @@
package com.chenhai.chenhaiai.service;
import java.time.*;
import java.time.format.DateTimeFormatter;
import java.time.temporal.TemporalAdjusters;
/**
* Gitea分析测试类 - 极速版
*/
public class GiteaAnalysisParallelTest {
public static void main(String[] args) {
System.out.println("🚀 开始Gitea代码仓库分析...");
long totalStartTime = System.currentTimeMillis();
// 使用try-with-resources确保资源正确关闭
try (GiteaAnalysisParallelService service = new GiteaAnalysisParallelService(
"http://192.168.1.224:3000",
"a9f1c8d3d6fefd73956604f496457faaa3672f89"
)) {
// 1. 生成上周时间范围上周一~上周日
LocalDate lastMonday = LocalDate.now().minusWeeks(1)
.with(TemporalAdjusters.previousOrSame(DayOfWeek.MONDAY));
String since = lastMonday.atStartOfDay(ZoneId.systemDefault())
.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
String until = lastMonday.plusDays(6).atTime(23, 59, 59)
.atZone(ZoneId.systemDefault())
.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
System.out.println("📅 分析时间: " + lastMonday + " 至 " + lastMonday.plusDays(6));
// 2. 执行分析并获取报告
long analysisStartTime = System.currentTimeMillis();
String report = service.performCompleteAnalysis(since, until);
long analysisEndTime = System.currentTimeMillis();
// 3. 打印报告
System.out.println(report);
System.out.println("⏱️ 分析耗时: " + (analysisEndTime - analysisStartTime) + "ms");
} catch (Exception e) {
System.err.println("❌ 分析失败: " + e.getMessage());
e.printStackTrace();
} finally {
long totalEndTime = System.currentTimeMillis();
System.out.println("✅ 测试完成,总耗时: " + (totalEndTime - totalStartTime) + "ms");
}
}
}

570
chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/GiteaAnalysisService.java

@ -0,0 +1,570 @@
package com.chenhai.chenhaiai.service;
import com.chenhai.chenhaiai.entity.git.*;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import jakarta.annotation.PostConstruct;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.task.TaskExecutor;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Service;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.time.DayOfWeek;
import java.time.Duration;
import java.time.LocalDateTime;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
@Slf4j
@Service
public class GiteaAnalysisService {
@Value("${gitea.url:http://192.168.1.224:3000}")
private String giteaBaseUrl;
@Value("${gitea.token:a9f1c8d3d6fefd73956604f496457faaa3672f89}")
private String accessToken;
@Value("${gitea.analysis.max-active-repos:100}")
private int maxActiveRepos;
@Autowired
private ThreadPoolTaskExecutor giteaTaskExecutor;
@Autowired
private TaskExecutor taskExecutor;
private HttpClient httpClient;
private ObjectMapper objectMapper;
@PostConstruct
public void init() {
log.info("初始化Gitea分析服务...");
httpClient = HttpClient.newBuilder()
.connectTimeout(Duration.ofSeconds(10))
.build();
objectMapper = new ObjectMapper();
objectMapper.registerModule(new JavaTimeModule());
log.info("Gitea分析服务初始化完成");
}
/**
* 异步执行Git分析直接返回结构化数据
* @param since 开始时间 (ISO格式: 2025-12-01T00:00:00+08:00)
* @param until 结束时间 (ISO格式: 2025-12-31T23:59:59+08:00)
* @return 结构化分析数据
*/
@Async("giteaTaskExecutor")
public CompletableFuture<GitAnalysisData> analyzeGitDataAsync(String since, String until) {
String taskId = UUID.randomUUID().toString().substring(0, 8);
long startTime = System.currentTimeMillis();
log.info("开始Git分析任务[{}]: {} 至 {}", taskId, since, until);
return CompletableFuture.supplyAsync(() -> {
try {
GitAnalysisData analysisData = performCompleteAnalysis(since, until, taskId);
long cost = System.currentTimeMillis() - startTime;
log.info("Git分析任务[{}]完成,耗时: {}ms", taskId, cost);
return analysisData;
} catch (Exception e) {
log.error("Git分析任务[{}]失败: {}", taskId, e.getMessage(), e);
throw new RuntimeException("Git分析失败: " + e.getMessage(), e);
}
}, giteaTaskExecutor);
}
/**
* 执行完整分析返回结构化数据
*/
private GitAnalysisData performCompleteAnalysis(String since, String until, String taskId) throws Exception {
long startTime = System.currentTimeMillis();
// 1. 获取所有仓库
log.debug("任务[{}] 获取仓库列表...", taskId);
List<GiteaRepository> allRepos = getAllUserRepositories();
int totalRepos = allRepos.size();
if (totalRepos == 0) {
// 返回空的基础数据
return buildEmptyGitAnalysisData("无仓库数据");
}
log.info("任务[{}] 发现仓库: {} 个", taskId, totalRepos);
// 2. 解析时间范围
ZonedDateTime sinceTime = ZonedDateTime.parse(since);
ZonedDateTime untilTime = ZonedDateTime.parse(until);
// 3. 快速筛选活跃仓库
log.debug("任务[{}] 快速筛选活跃仓库...", taskId);
List<GiteaRepository> activeRepos = findActiveRepositories(allRepos, sinceTime, untilTime, taskId)
.get(20, TimeUnit.SECONDS);
int activeRepoCount = activeRepos.size();
log.info("任务[{}] 活跃仓库: {} 个", taskId, activeRepoCount);
if (activeRepoCount == 0) {
// 返回简单结果无活跃仓库
return buildSimpleGitAnalysisData(since, until, totalRepos, 0, 0, startTime);
}
// 限制活跃仓库数量
if (activeRepoCount > maxActiveRepos) {
log.warn("任务[{}] 活跃仓库过多({}个),采样分析前{}个", taskId, activeRepoCount, maxActiveRepos);
activeRepos = activeRepos.subList(0, maxActiveRepos);
activeRepoCount = maxActiveRepos;
}
// 4. 详细分析活跃仓库
log.debug("任务[{}] 详细分析活跃仓库...", taskId);
DetailedAnalysisResult detailResult = analyzeActiveRepositories(activeRepos, sinceTime, untilTime, taskId)
.get(20, TimeUnit.SECONDS);
// 5. 构建结构化数据
long analysisTime = System.currentTimeMillis() - startTime;
return buildGitAnalysisData(since, until, totalRepos, activeRepoCount, detailResult, analysisTime);
}
/**
* 快速筛选活跃仓库
*/
private CompletableFuture<List<GiteaRepository>> findActiveRepositories(List<GiteaRepository> allRepos,
ZonedDateTime since,
ZonedDateTime until,
String taskId) {
return CompletableFuture.supplyAsync(() -> {
List<GiteaRepository> activeRepos = Collections.synchronizedList(new ArrayList<>());
List<CompletableFuture<Boolean>> futures = new ArrayList<>();
AtomicInteger checked = new AtomicInteger(0);
final int totalRepos = allRepos.size();
for (GiteaRepository repo : allRepos) {
final GiteaRepository currentRepo = repo;
CompletableFuture<Boolean> future = CompletableFuture.supplyAsync(() -> {
try {
return hasCommitsInRange(currentRepo.getFullPath(), since, until);
} catch (Exception e) {
log.debug("任务[{}] 仓库 {} 快速检查失败: {}", taskId, currentRepo.getFullPath(), e.getMessage());
return false;
}
}, taskExecutor);
future.thenAccept(hasCommits -> {
if (hasCommits) {
activeRepos.add(currentRepo);
}
int done = checked.incrementAndGet();
if (done % 20 == 0 || done == totalRepos) {
log.debug("任务[{}] 快速检查进度: {}/{} | 活跃: {}", taskId, done, totalRepos, activeRepos.size());
}
});
futures.add(future);
}
// 等待所有检查完成
CompletableFuture<Void> allChecks = CompletableFuture.allOf(
futures.toArray(new CompletableFuture[0]));
try {
allChecks.get(10, TimeUnit.SECONDS);
} catch (Exception e) {
log.warn("任务[{}] 部分仓库快速检查未完成: {}", taskId, e.getMessage());
}
return activeRepos;
}, taskExecutor);
}
/**
* 详细分析活跃仓库
*/
private CompletableFuture<DetailedAnalysisResult> analyzeActiveRepositories(List<GiteaRepository> activeRepos,
ZonedDateTime sinceTime,
ZonedDateTime untilTime,
String taskId) {
return CompletableFuture.supplyAsync(() -> {
Map<String, DeveloperData> devDataMap = new ConcurrentHashMap<>();
Map<String, RepoData> repoDataMap = new ConcurrentHashMap<>();
Map<DayOfWeek, Integer> dayStats = new ConcurrentHashMap<>();
Map<Integer, Integer> hourStats = new ConcurrentHashMap<>();
Map<String, Integer> fileTypeStats = new ConcurrentHashMap<>();
AtomicInteger totalCommits = new AtomicInteger(0);
AtomicInteger processed = new AtomicInteger(0);
List<CompletableFuture<Void>> futures = activeRepos.stream()
.map(repo -> CompletableFuture.runAsync(() -> {
try {
analyzeSingleRepository(repo, sinceTime, untilTime,
devDataMap, repoDataMap, dayStats, hourStats, fileTypeStats, totalCommits);
} catch (Exception e) {
log.debug("任务[{}] 仓库 {} 详细分析失败: {}", taskId, repo.getFullPath(), e.getMessage());
} finally {
int done = processed.incrementAndGet();
if (done % 10 == 0 || done == activeRepos.size()) {
log.debug("任务[{}] 详细分析进度: {}/{} | 提交: {}", taskId, done, activeRepos.size(), totalCommits.get());
}
}
}, taskExecutor))
.collect(Collectors.toList());
// 等待所有分析完成
CompletableFuture<Void> allFutures = CompletableFuture.allOf(
futures.toArray(new CompletableFuture[0]));
try {
allFutures.get(15, TimeUnit.SECONDS);
} catch (Exception e) {
log.warn("任务[{}] 部分仓库详细分析未完成: {}", taskId, e.getMessage());
}
return new DetailedAnalysisResult(devDataMap, repoDataMap, dayStats, hourStats, fileTypeStats, totalCommits.get());
}, taskExecutor);
}
/**
* 分析单个仓库详情
*/
private void analyzeSingleRepository(GiteaRepository repo,
ZonedDateTime sinceTime,
ZonedDateTime untilTime,
Map<String, DeveloperData> devDataMap,
Map<String, RepoData> repoDataMap,
Map<DayOfWeek, Integer> dayStats,
Map<Integer, Integer> hourStats,
Map<String, Integer> fileTypeStats,
AtomicInteger totalCommits) throws Exception {
if (Thread.currentThread().isInterrupted()) {
log.debug("任务被中断,跳过仓库分析: {}", repo.getFullPath());
return;
}
String repoFullName = repo.getFullPath();
List<GiteaCommit> commits = getCommitsInRange(repoFullName, sinceTime, untilTime);
if (!commits.isEmpty()) {
RepoData repoData = new RepoData();
repoData.repoName = repoFullName;
repoData.displayName = repo.getRepoName();
for (GiteaCommit commit : commits) {
if (Thread.currentThread().isInterrupted()) {
log.debug("处理提交时被中断");
return;
}
totalCommits.incrementAndGet();
String author = getAuthorName(commit);
// 更新开发者数据
DeveloperData devData = devDataMap.computeIfAbsent(author, k -> new DeveloperData(author));
devData.commitCount++;
devData.repos.add(repoFullName);
// 更新仓库数据
repoData.commitCount++;
repoData.developers.add(author);
// 时间统计
ZonedDateTime commitTime = commit.getCommitTime();
if (commitTime != null) {
DayOfWeek day = commitTime.getDayOfWeek();
int hour = commitTime.getHour();
dayStats.merge(day, 1, Integer::sum);
hourStats.merge(hour, 1, Integer::sum);
}
// 文件类型统计
if (commit.getFiles() != null) {
for (GiteaCommit.ChangedFile file : commit.getFiles()) {
String fileType = file.getFileType();
fileTypeStats.merge(fileType, 1, Integer::sum);
}
}
}
repoDataMap.put(repoFullName, repoData);
}
}
// ==================== Gitea API调用方法 ====================
private boolean hasCommitsInRange(String repoFullName, ZonedDateTime since, ZonedDateTime until) throws Exception {
// 直接调用 getCommitsInRange 检查是否有提交
List<GiteaCommit> commits = getCommitsInRange(repoFullName, since, until);
return !commits.isEmpty();
}
private List<GiteaCommit> getCommitsInRange(String repoFullName, ZonedDateTime since, ZonedDateTime until) throws Exception {
// 先获取最近3个月的提交减少数据量
LocalDateTime threeMonthsAgo = LocalDateTime.now().minusMonths(3);
ZonedDateTime recentSince = threeMonthsAgo.atZone(since.getZone());
String sinceStr = recentSince.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
String untilStr = until.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
String baseUrl = String.format("%s/api/v1/repos/%s/commits?since=%s&until=%s",
giteaBaseUrl, repoFullName, sinceStr, untilStr);
List<GiteaCommit> recentCommits = fetchWithPagination(baseUrl, GiteaCommit.class, 8);
// 在代码层面再次过滤到精确时间范围
List<GiteaCommit> filteredCommits = new ArrayList<>();
for (GiteaCommit commit : recentCommits) {
ZonedDateTime commitTime = commit.getCommitTime();
if (commitTime != null &&
!commitTime.isBefore(since) &&
!commitTime.isAfter(until)) {
filteredCommits.add(commit);
}
}
return filteredCommits;
}
private List<GiteaRepository> getAllUserRepositories() throws Exception {
String baseUrl = giteaBaseUrl + "/api/v1/user/repos?limit=50";
return fetchWithPagination(baseUrl, GiteaRepository.class, 10);
}
private <T> List<T> fetchWithPagination(String baseUrl, Class<T> clazz, int timeoutSeconds) throws Exception {
List<T> results = new ArrayList<>();
int page = 1;
int maxPages = 10;
while (page <= maxPages) {
if (Thread.currentThread().isInterrupted()) {
log.debug("分页获取被中断");
break;
}
String pageUrl = baseUrl + (baseUrl.contains("?") ? "&" : "?") + "page=" + page;
HttpRequest request = HttpRequest.newBuilder()
.uri(java.net.URI.create(pageUrl))
.header("Authorization", "token " + accessToken)
.timeout(Duration.ofSeconds(timeoutSeconds))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
if (response.statusCode() != 200) {
log.warn("API请求失败: {} - {}", response.statusCode(), response.body());
break;
}
List<T> pageResults = objectMapper.readValue(
response.body(),
objectMapper.getTypeFactory().constructCollectionType(List.class, clazz));
if (pageResults.isEmpty()) {
break;
}
results.addAll(pageResults);
if (pageResults.size() < 50) {
break;
}
page++;
}
return results;
}
private String getAuthorName(GiteaCommit commit) {
if (commit.getCommit() != null &&
commit.getCommit().getAuthor() != null &&
commit.getCommit().getAuthor().getName() != null) {
return commit.getCommit().getAuthor().getName();
}
return "未知作者";
}
// ==================== GitAnalysisData构建方法 ====================
/**
* 构建完整的GitAnalysisData
*/
private GitAnalysisData buildGitAnalysisData(String since, String until, int totalRepos, int activeRepos,
DetailedAnalysisResult detailResult, long analysisTime) {
GitAnalysisData data = new GitAnalysisData();
// 基础信息
data.setBasicInfo(new BasicInfo(
since + " 至 " + until,
totalRepos,
activeRepos,
detailResult.getDevDataMap().size(),
detailResult.getTotalCommits(),
analysisTime,
"快速筛选 + 详细分析"
));
// 开发者排行榜
if (!detailResult.getDevDataMap().isEmpty()) {
List<DeveloperData> devList = new ArrayList<>(detailResult.getDevDataMap().values());
devList.sort((a, b) -> Integer.compare(b.commitCount, a.commitCount));
List<DeveloperRank> developerRanks = new ArrayList<>();
int rank = 1;
for (DeveloperData dev : devList) {
if (rank > 10) break;
developerRanks.add(new DeveloperRank(rank++, dev.name, dev.commitCount, dev.repos.size()));
}
data.setDeveloperRanks(developerRanks);
}
// 仓库排行榜
if (!detailResult.getRepoDataMap().isEmpty()) {
List<RepoData> repoList = new ArrayList<>(detailResult.getRepoDataMap().values());
repoList.sort((a, b) -> Integer.compare(b.commitCount, a.commitCount));
List<RepoRank> repoRanks = new ArrayList<>();
int rank = 1;
for (RepoData repo : repoList) {
if (rank > 10) break;
repoRanks.add(new RepoRank(rank++, repo.repoName, repo.displayName, repo.commitCount, repo.developers.size()));
}
data.setRepoRanks(repoRanks);
}
// 时间分布按星期
if (!detailResult.getDayStats().isEmpty()) {
String[] dayNames = {"周一", "周二", "周三", "周四", "周五", "周六", "周日"};
DayOfWeek[] days = DayOfWeek.values();
List<DayStats> dayStatsList = new ArrayList<>();
for (int i = 0; i < 7; i++) {
int count = detailResult.getDayStats().getOrDefault(days[i], 0);
dayStatsList.add(new DayStats(dayNames[i], count));
}
data.setDayStats(dayStatsList);
}
// 文件类型统计
if (!detailResult.getFileTypeStats().isEmpty()) {
List<Map.Entry<String, Integer>> fileList = new ArrayList<>(detailResult.getFileTypeStats().entrySet());
fileList.sort((a, b) -> Integer.compare(b.getValue(), a.getValue()));
List<FileTypeStats> fileTypeStatsList = new ArrayList<>();
for (Map.Entry<String, Integer> entry : fileList) {
fileTypeStatsList.add(new FileTypeStats(entry.getKey(), entry.getValue()));
}
data.setFileTypeStats(fileTypeStatsList);
}
// 生成时间
data.setGeneratedTime(LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")));
return data;
}
/**
* 构建空的GitAnalysisData无仓库时
*/
private GitAnalysisData buildEmptyGitAnalysisData(String message) {
GitAnalysisData data = new GitAnalysisData();
data.setBasicInfo(new BasicInfo(
"无时间范围",
0,
0,
0,
0,
0,
"无仓库数据"
));
data.setGeneratedTime(LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")));
return data;
}
/**
* 构建简单的GitAnalysisData无活跃仓库时
*/
private GitAnalysisData buildSimpleGitAnalysisData(String since, String until, int totalRepos,
int activeRepos, int totalDevs, long startTime) {
long analysisTime = System.currentTimeMillis() - startTime;
GitAnalysisData data = new GitAnalysisData();
data.setBasicInfo(new BasicInfo(
since + " 至 " + until,
totalRepos,
activeRepos,
totalDevs,
0,
analysisTime,
"快速筛选"
));
data.setGeneratedTime(LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")));
return data;
}
// ==================== 内部数据类 ====================
private static class DeveloperData {
String name;
int commitCount = 0;
Set<String> repos = new HashSet<>();
DeveloperData(String name) {
this.name = name;
}
}
private static class RepoData {
String repoName;
String displayName;
int commitCount = 0;
Set<String> developers = new HashSet<>();
}
// ==================== 详细分析结果类 ====================
private static class DetailedAnalysisResult {
private final Map<String, DeveloperData> devDataMap;
private final Map<String, RepoData> repoDataMap;
private final Map<DayOfWeek, Integer> dayStats;
private final Map<Integer, Integer> hourStats;
private final Map<String, Integer> fileTypeStats;
private final int totalCommits;
public DetailedAnalysisResult(Map<String, DeveloperData> devDataMap, Map<String, RepoData> repoDataMap,
Map<DayOfWeek, Integer> dayStats, Map<Integer, Integer> hourStats,
Map<String, Integer> fileTypeStats, int totalCommits) {
this.devDataMap = devDataMap;
this.repoDataMap = repoDataMap;
this.dayStats = dayStats;
this.hourStats = hourStats;
this.fileTypeStats = fileTypeStats;
this.totalCommits = totalCommits;
}
public Map<String, DeveloperData> getDevDataMap() { return devDataMap; }
public Map<String, RepoData> getRepoDataMap() { return repoDataMap; }
public Map<DayOfWeek, Integer> getDayStats() { return dayStats; }
public Map<Integer, Integer> getHourStats() { return hourStats; }
public Map<String, Integer> getFileTypeStats() { return fileTypeStats; }
public int getTotalCommits() { return totalCommits; }
}
}

398
chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/GiteaAnalysisTest.java

@ -0,0 +1,398 @@
//package com.chenhai.chenhaiai.service;
//
//import com.chenhai.chenhaiai.entity.AnalysisResult;
//import com.chenhai.chenhaiai.entity.git.GiteaCommit;
//
//import java.io.ByteArrayOutputStream;
//import java.io.PrintStream;
//import java.time.format.DateTimeFormatter;
//import java.util.*;
//
///**
// * Gitea分析报告生成器 - 优化版解决日志干扰问题
// */
//public class GiteaAnalysisTest {
//
// private final GiteaAnalysisService analysisService;
//
// public GiteaAnalysisTest(String giteaBaseUrl, String accessToken) {
// this.analysisService = new GiteaAnalysisService(giteaBaseUrl, accessToken);
// }
//
// /**
// * 生成完整上周报告 - 优化版避免日志干扰
// */
// public void generateCompleteReport() {
// // 保存原始输出流
// PrintStream originalOut = System.out;
// PrintStream originalErr = System.err;
//
// // 创建一个缓冲区来收集所有输出
// ByteArrayOutputStream baos = new ByteArrayOutputStream();
// PrintStream bufferStream = new PrintStream(baos);
//
// try {
// // 重定向System.out到缓冲区
// System.setOut(bufferStream);
//
// System.out.println("\n" + "=".repeat(80));
// System.out.println("🚀 GITEA 上周开发活动分析报告");
// System.out.println("=".repeat(80));
// System.out.println("生成时间: " + java.time.LocalDateTime.now().format(
// DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")));
// System.out.println("=".repeat(80) + "\n");
//
// System.out.println("📅 分析时间范围: 上周一 00:00 至 周日 23:59");
// System.out.println("🕐 正在获取数据...\n");
//
// // 1. 执行完整分析
// AnalysisResult result = analysisService.analyzeLastWeek();
//
// // 2. 生成报告
// generateReport(result);
//
// // 3. 生成单个仓库详细报告如果有活跃仓库
// if (!result.getRepositoryRanking().isEmpty()) {
// String topRepo = result.getRepositoryRanking().get(0).getKey();
// System.out.println("\n" + "=".repeat(80));
// System.out.println("📋 最活跃仓库详细分析: " + topRepo);
// System.out.println("=".repeat(80));
// generateRepoDetailReport(topRepo);
// }
//
// System.out.println("\n" + "=".repeat(80));
// System.out.println("✅ 报告生成完成!");
// System.out.println("=".repeat(80));
//
// } catch (Exception e) {
// System.err.println("\n❌ 分析失败: " + e.getMessage());
// e.printStackTrace();
// } finally {
// // 恢复原始输出流
// System.setOut(originalOut);
// System.setErr(originalErr);
//
// // 从缓冲区获取完整的报告内容
// String fullReport = baos.toString();
//
// // 清理报告移除日志行
// String cleanedReport = cleanReport(fullReport);
//
// // 输出纯净的报告
// originalOut.println(cleanedReport);
// }
// }
//
// /**
// * 清理报告移除日志行
// */
// private String cleanReport(String fullReport) {
// if (fullReport == null || fullReport.isEmpty()) {
// return fullReport;
// }
//
// StringBuilder cleaned = new StringBuilder();
// String[] lines = fullReport.split("\n");
//
// // 保留分析过程的状态指示但移除详细的日志信息
// boolean inProgressSection = false;
//
// for (String line : lines) {
// // 判断是否是日志行包含时间戳和日志级别
// if (isLogLine(line)) {
// // 如果包含重要进度信息可以保留简化版本
// if (line.contains("检查进度:") || line.contains("分析完成") ||
// line.contains("活跃仓库数:") || line.contains("开发者活动分析完成")) {
// // 提取简化信息
// String simplified = extractProgressInfo(line);
// if (simplified != null) {
// cleaned.append(simplified).append("\n");
// }
// }
// // 跳过其他详细的日志行
// continue;
// }
//
// // 保留所有非日志行真正的报告内容
// cleaned.append(line).append("\n");
// }
//
// return cleaned.toString();
// }
//
// /**
// * 判断是否是日志行
// */
// private boolean isLogLine(String line) {
// // 日志行通常包含时间戳模式 20:24:28.848和日志级别INFOWARNDEBUGERROR
// return line.matches("^\\d{2}:\\d{2}:\\d{2}\\.\\d{3}.*") &&
// (line.contains("INFO") || line.contains("WARN") ||
// line.contains("DEBUG") || line.contains("ERROR"));
// }
//
// /**
// * 从日志行中提取进度信息
// */
// private String extractProgressInfo(String logLine) {
// if (logLine.contains("检查进度:")) {
// // 提取如 "检查进度: 10/182 (活跃: 0 个)" 这样的信息
// String[] parts = logLine.split("检查进度:");
// if (parts.length > 1) {
// String progress = parts[1].split("\\[")[0].trim(); // 移除线程信息
// return "📊 " + progress;
// }
// } else if (logLine.contains("✅ 检查完成")) {
// return "✅ 仓库检查完成";
// } else if (logLine.contains("活跃仓库数:")) {
// String[] parts = logLine.split("活跃仓库数:");
// if (parts.length > 1) {
// return "📦 " + parts[1].split("\\[")[0].trim();
// }
// } else if (logLine.contains("开发者活动分析完成")) {
// return "👥 开发者分析完成";
// } else if (logLine.contains("仓库活动分析完成")) {
// return "🏢 仓库活动分析完成";
// } else if (logLine.contains("分析完成,结果ID:")) {
// return "🎯 数据分析完成";
// }
// return null;
// }
//
// /**
// * 生成主报告 - 保持不变
// */
// private void generateReport(AnalysisResult result) {
// if (result == null) {
// System.out.println("⚠️ 分析结果为空,无法生成报告");
// return;
// }
//
// // 1. 概览统计
// System.out.println("📊 概览统计");
// System.out.println("-".repeat(60));
// System.out.printf("%-25s: %d 个\n", "总仓库数", result.getTotalRepositories());
// System.out.printf("%-25s: %d 个\n", "活跃仓库数", result.getActiveRepositories());
// System.out.printf("%-25s: %d 人\n", "活跃开发者数", result.getTotalDevelopers());
// System.out.printf("%-25s: %d 次\n", "总提交次数", result.getTotalCommits());
//
// if (result.getTotalCommits() == 0) {
// System.out.println("\n📭 上周没有提交记录");
// return;
// }
//
// // 2. 开发者排行榜
// if (!result.getDeveloperRanking().isEmpty()) {
// System.out.println("\n👨‍💻 开发者贡献排行榜");
// System.out.println("-".repeat(60));
// System.out.printf("%-4s %-20s %-10s %-12s %-15s\n",
// "排名", "开发者", "提交次数", "参与仓库", "最活跃时段");
// System.out.println("-".repeat(60));
//
// int rank = 1;
// for (var entry : result.getDeveloperRanking()) {
// if (rank > 15) break;
//
// var activity = entry.getValue();
// System.out.printf("%-4d %-20s %-10d %-12d %-15s\n",
// rank++,
// truncate(entry.getKey(), 20),
// activity.getTotalCommits(),
// activity.getContributedRepos().size(),
// activity.getMostActiveDay().substring(0, 3) + " " + activity.getMostActiveHour());
// }
// }
//
// // 3. 仓库排行榜
// if (!result.getRepositoryRanking().isEmpty()) {
// System.out.println("\n🏢 活跃仓库排行榜");
// System.out.println("-".repeat(60));
// System.out.printf("%-4s %-40s %-10s %-10s %-15s\n",
// "排名", "仓库名称", "提交次数", "开发者数", "主要文件类型");
// System.out.println("-".repeat(60));
//
// int rank = 1;
// for (var entry : result.getRepositoryRanking()) {
// if (rank > 10) break;
//
// var activity = entry.getValue();
// System.out.printf("%-4d %-40s %-10d %-10d %-15s\n",
// rank++,
// truncate(entry.getKey(), 40),
// activity.getTotalCommits(),
// activity.getDeveloperCount(),
// activity.getMostChangedFileType());
// }
// }
//
// // 4. 时间分布分析
// if (!result.getOverallCommitsByDay().isEmpty()) {
// System.out.println("\n⏰ 提交时间分布");
// System.out.println("-".repeat(60));
//
// String[] days = {"周一", "周二", "周三", "周四", "周五", "周六", "周日"};
// java.time.DayOfWeek[] dayOfWeeks = {
// java.time.DayOfWeek.MONDAY, java.time.DayOfWeek.TUESDAY,
// java.time.DayOfWeek.WEDNESDAY, java.time.DayOfWeek.THURSDAY,
// java.time.DayOfWeek.FRIDAY, java.time.DayOfWeek.SATURDAY,
// java.time.DayOfWeek.SUNDAY
// };
//
// int maxCommits = result.getOverallCommitsByDay().values().stream()
// .max(Integer::compareTo).orElse(1);
//
// for (int i = 0; i < days.length; i++) {
// int commits = result.getOverallCommitsByDay().getOrDefault(dayOfWeeks[i], 0);
// int barLength = maxCommits > 0 ? (commits * 40 / maxCommits) : 0;
// String bar = "█".repeat(barLength);
//
// System.out.printf("%-4s: %3d 次提交 | %-40s\n",
// days[i], commits, bar);
// }
// }
//
// // 5. 文件类型分析
// if (!result.getOverallFileTypeDistribution().isEmpty()) {
// System.out.println("\n📁 文件类型分布");
// System.out.println("-".repeat(60));
//
// int totalFiles = result.getOverallFileTypeDistribution().values().stream()
// .mapToInt(Integer::intValue).sum();
//
// int maxFiles = result.getOverallFileTypeDistribution().values().stream()
// .max(Integer::compareTo).orElse(1);
//
// result.getOverallFileTypeDistribution().entrySet().stream()
// .sorted((a, b) -> Integer.compare(b.getValue(), a.getValue()))
// .forEach(entry -> {
// int count = entry.getValue();
// int barLength = maxFiles > 0 ? (count * 40 / maxFiles) : 0;
// String bar = "█".repeat(barLength);
// double percentage = totalFiles > 0 ? (count * 100.0 / totalFiles) : 0;
//
// System.out.printf("%-10s: %3d 次修改 (%5.1f%%) | %-40s\n",
// entry.getKey(), count, percentage, bar);
// });
// }
//
// // 6. 提交摘要
// if (!result.getDeveloperRanking().isEmpty()) {
// System.out.println("\n💬 提交摘要");
// System.out.println("-".repeat(60));
//
// // 获取所有提交消息
// List<String> allMessages = new ArrayList<>();
// for (var entry : result.getDeveloperRanking()) {
// var activity = entry.getValue();
// allMessages.addAll(activity.getRecentCommitMessages());
// }
//
// if (!allMessages.isEmpty()) {
// System.out.println("📝 最近的提交消息:");
// allMessages.stream()
// .filter(msg -> msg != null && !msg.trim().isEmpty())
// .distinct()
// .limit(8)
// .forEach(msg -> {
// System.out.printf(" • %s\n", truncate(msg.trim(), 70));
// });
// }
// }
// }
//
// /**
// * 生成仓库详细报告 - 保持不变
// */
// private void generateRepoDetailReport(String repoFullName) {
// try {
// String since = "2025-12-01T00:00:00+08:00";
// String until = "2025-12-07T23:59:59+08:00";
//
// var commits = analysisService.getRepositoryCommits(repoFullName, since, until);
//
// if (commits.isEmpty()) {
// System.out.println("该时间段内没有提交记录");
// return;
// }
//
// System.out.printf("📊 提交统计: %d 次提交\n", commits.size());
//
// // 按开发者统计
// Map<String, List<GiteaCommit>> commitsByDeveloper = new HashMap<>();
// for (var commit : commits) {
// if (commit.getCommit() != null && commit.getCommit().getAuthor() != null) {
// String developer = commit.getCommit().getAuthor().getName();
// if (developer == null || developer.isEmpty()) {
// developer = commit.getCommit().getAuthor().getEmail();
// }
// commitsByDeveloper.computeIfAbsent(developer, k -> new ArrayList<>()).add(commit);
// }
// }
//
// System.out.println("\n👥 开发者贡献:");
// System.out.println("-".repeat(60));
//
// commitsByDeveloper.entrySet().stream()
// .sorted((a, b) -> Integer.compare(b.getValue().size(), a.getValue().size()))
// .forEach(entry -> {
// int count = entry.getValue().size();
// double percentage = (count * 100.0) / commits.size();
// System.out.printf(" %-20s: %3d 次提交 (%5.1f%%)\n",
// truncate(entry.getKey(), 20), count, percentage);
// });
//
// // 最近提交记录
// System.out.println("\n🕐 最近提交记录:");
// System.out.println("-".repeat(60));
//
// commits.stream()
// .sorted((a, b) -> b.getCommitTime().compareTo(a.getCommitTime()))
// .limit(8)
// .forEach(commit -> {
// String time = commit.getCommitTime().format(
// DateTimeFormatter.ofPattern("MM-dd HH:mm"));
// String author = commit.getCommit().getAuthor().getName();
// if (author == null || author.isEmpty()) {
// author = commit.getCommit().getAuthor().getEmail();
// }
//
// System.out.printf("[%s] %-15s | %s\n",
// time,
// truncate(author, 15),
// truncate(commit.getShortMessage(), 50));
// });
//
// } catch (Exception e) {
// System.err.println("生成详细报告失败: " + e.getMessage());
// }
// }
//
// /**
// * 字符串截断
// */
// private String truncate(String str, int maxLength) {
// if (str == null) return "";
// if (str.length() <= maxLength) return str;
// return str.substring(0, maxLength - 3) + "...";
// }
//
// /**
// * 主方法 - 直接运行生成完整报告
// */
// public static void main(String[] args) {
// try {
// // 配置
// String giteaBaseUrl = "http://192.168.1.224:3000";
// String accessToken = "a9f1c8d3d6fefd73956604f496457faaa3672f89";
//
// // 创建报告生成器
// GiteaAnalysisTest generator = new GiteaAnalysisTest(giteaBaseUrl, accessToken);
//
// // 直接生成完整报告优化版
// generator.generateCompleteReport();
//
// } catch (Exception e) {
// System.err.println("❌ 程序异常: " + e.getMessage());
// e.printStackTrace();
// }
// }
//}

605
chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/LongTermGiteaAnalysisService.java

@ -0,0 +1,605 @@
//package com.chenhai.chenhaiai.service;
//
//import com.chenhai.chenhaiai.entity.git.*;
//import com.fasterxml.jackson.databind.ObjectMapper;
//import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
//import jakarta.annotation.PostConstruct;
//import lombok.extern.slf4j.Slf4j;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.beans.factory.annotation.Value;
//import org.springframework.core.task.TaskExecutor;
//import org.springframework.scheduling.annotation.Async;
//import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
//import org.springframework.stereotype.Service;
//
//import java.net.http.HttpClient;
//import java.net.http.HttpRequest;
//import java.net.http.HttpResponse;
//import java.time.DayOfWeek;
//import java.time.Duration;
//import java.time.LocalDateTime;
//import java.time.ZonedDateTime;
//import java.time.format.DateTimeFormatter;
//import java.util.*;
//import java.util.concurrent.CompletableFuture;
//import java.util.concurrent.ConcurrentHashMap;
//import java.util.concurrent.TimeUnit;
//import java.util.concurrent.atomic.AtomicInteger;
//import java.util.stream.Collectors;
//
///**
// * 长时间范围Git分析服务
// * 专门处理任意时间范围的Git分析不影响原GiteaAnalysisService
// */
//@Slf4j
//@Service
//public class LongTermGiteaAnalysisService {
//
// @Value("${gitea.url:http://192.168.1.224:3000}")
// private String giteaBaseUrl;
//
// @Value("${gitea.token:a9f1c8d3d6fefd73956604f496457faaa3672f89}")
// private String accessToken;
//
// @Value("${gitea.longterm.max-active-repos:50}")
// private int maxActiveRepos;
//
// @Value("${gitea.longterm.commit-timeout-seconds:30}")
// private int commitTimeoutSeconds;
//
// @Value("${gitea.longterm.max-pages:30}")
// private int maxPages;
//
// @Autowired
// private ThreadPoolTaskExecutor giteaTaskExecutor;
//
// @Autowired
// private TaskExecutor taskExecutor;
//
// private HttpClient httpClient;
// private ObjectMapper objectMapper;
//
// @PostConstruct
// public void init() {
// log.info("初始化长时间范围Gitea分析服务...");
//
// httpClient = HttpClient.newBuilder()
// .connectTimeout(Duration.ofSeconds(15))
// .build();
//
// objectMapper = new ObjectMapper();
// objectMapper.registerModule(new JavaTimeModule());
//
// log.info("长时间范围Gitea分析服务初始化完成");
// }
//
// /**
// * 异步执行长时间范围Git分析
// * @param since 开始时间
// * @param until 结束时间
// * @return 结构化分析数据
// */
// @Async("giteaTaskExecutor")
// public CompletableFuture<GitAnalysisData> analyzeLongTermGitDataAsync(String since, String until) {
// String taskId = UUID.randomUUID().toString().substring(0, 8);
// long startTime = System.currentTimeMillis();
//
// log.info("开始长时间Git分析任务[{}]: {} 至 {}", taskId, since, until);
//
// return CompletableFuture.supplyAsync(() -> {
// try {
// // 验证时间范围
// ZonedDateTime sinceTime = ZonedDateTime.parse(since);
// ZonedDateTime untilTime = ZonedDateTime.parse(until);
//
// Duration duration = Duration.between(sinceTime, untilTime);
// log.info("任务[{}] 分析时间范围: {} 天", taskId, duration.toDays());
//
// if (duration.toDays() > 365) {
// log.warn("任务[{}] 时间范围超过一年,性能可能受影响", taskId);
// }
//
// GitAnalysisData analysisData = performLongTermAnalysis(since, until, taskId);
// long cost = System.currentTimeMillis() - startTime;
// log.info("长时间Git分析任务[{}]完成,耗时: {}ms", taskId, cost);
// return analysisData;
// } catch (Exception e) {
// log.error("长时间Git分析任务[{}]失败: {}", taskId, e.getMessage(), e);
// throw new RuntimeException("长时间Git分析失败: " + e.getMessage(), e);
// }
// }, giteaTaskExecutor);
// }
//
// /**
// * 执行长时间范围完整分析
// */
// private GitAnalysisData performLongTermAnalysis(String since, String until, String taskId) throws Exception {
// long startTime = System.currentTimeMillis();
//
// // 1. 获取所有仓库
// log.debug("长时间任务[{}] 获取仓库列表...", taskId);
// List<GiteaRepository> allRepos = getAllUserRepositories();
// int totalRepos = allRepos.size();
//
// if (totalRepos == 0) {
// return buildEmptyGitAnalysisData("无仓库数据");
// }
//
// log.info("长时间任务[{}] 发现仓库: {} 个", taskId, totalRepos);
//
// // 2. 解析时间范围
// ZonedDateTime sinceTime = ZonedDateTime.parse(since);
// ZonedDateTime untilTime = ZonedDateTime.parse(until);
//
// // 3. 快速筛选活跃仓库使用优化方法
// log.debug("长时间任务[{}] 快速筛选活跃仓库...", taskId);
// List<GiteaRepository> activeRepos = findLongTermActiveRepositories(allRepos, sinceTime, untilTime, taskId)
// .get(30, TimeUnit.SECONDS); // 增加超时时间
// int activeRepoCount = activeRepos.size();
//
// log.info("长时间任务[{}] 活跃仓库: {} 个", taskId, activeRepoCount);
//
// if (activeRepoCount == 0) {
// return buildSimpleGitAnalysisData(since, until, totalRepos, 0, 0, startTime);
// }
//
// // 限制活跃仓库数量
// if (activeRepoCount > maxActiveRepos) {
// log.warn("长时间任务[{}] 活跃仓库过多({}个),采样分析前{}个", taskId, activeRepoCount, maxActiveRepos);
// activeRepos = activeRepos.subList(0, maxActiveRepos);
// activeRepoCount = maxActiveRepos;
// }
//
// // 4. 详细分析活跃仓库
// log.debug("长时间任务[{}] 详细分析活跃仓库...", taskId);
// DetailedAnalysisResult detailResult = analyzeLongTermActiveRepositories(activeRepos, sinceTime, untilTime, taskId)
// .get(60, TimeUnit.SECONDS); // 长时间分析增加超时
//
// // 5. 构建结构化数据
// long analysisTime = System.currentTimeMillis() - startTime;
// return buildGitAnalysisData(since, until, totalRepos, activeRepoCount, detailResult, analysisTime);
// }
//
// /**
// * 长时间范围快速筛选活跃仓库优化版
// */
// private CompletableFuture<List<GiteaRepository>> findLongTermActiveRepositories(List<GiteaRepository> allRepos,
// ZonedDateTime since,
// ZonedDateTime until,
// String taskId) {
// return CompletableFuture.supplyAsync(() -> {
// List<GiteaRepository> activeRepos = Collections.synchronizedList(new ArrayList<>());
// List<CompletableFuture<Boolean>> futures = new ArrayList<>();
// AtomicInteger checked = new AtomicInteger(0);
// final int totalRepos = allRepos.size();
//
// for (GiteaRepository repo : allRepos) {
// final GiteaRepository currentRepo = repo;
//
// CompletableFuture<Boolean> future = CompletableFuture.supplyAsync(() -> {
// try {
// // 使用优化版的快速检查方法
// return hasCommitsInLongTermRange(currentRepo.getFullPath(), since, until);
// } catch (Exception e) {
// log.debug("长时间任务[{}] 仓库 {} 快速检查失败: {}", taskId, currentRepo.getFullPath(), e.getMessage());
// return false;
// }
// }, taskExecutor);
//
// future.thenAccept(hasCommits -> {
// if (hasCommits) {
// activeRepos.add(currentRepo);
// }
// int done = checked.incrementAndGet();
// if (done % 20 == 0 || done == totalRepos) {
// log.debug("长时间任务[{}] 快速检查进度: {}/{} | 活跃: {}", taskId, done, totalRepos, activeRepos.size());
// }
// });
//
// futures.add(future);
// }
//
// CompletableFuture<Void> allChecks = CompletableFuture.allOf(
// futures.toArray(new CompletableFuture[0]));
//
// try {
// allChecks.get(15, TimeUnit.SECONDS);
// } catch (Exception e) {
// log.warn("长时间任务[{}] 部分仓库快速检查未完成: {}", taskId, e.getMessage());
// }
//
// return activeRepos;
// }, taskExecutor);
// }
//
// /**
// * 长时间范围详细分析活跃仓库
// */
// private CompletableFuture<DetailedAnalysisResult> analyzeLongTermActiveRepositories(List<GiteaRepository> activeRepos,
// ZonedDateTime sinceTime,
// ZonedDateTime untilTime,
// String taskId) {
// return CompletableFuture.supplyAsync(() -> {
// Map<String, DeveloperData> devDataMap = new ConcurrentHashMap<>();
// Map<String, RepoData> repoDataMap = new ConcurrentHashMap<>();
// Map<DayOfWeek, Integer> dayStats = new ConcurrentHashMap<>();
// Map<Integer, Integer> hourStats = new ConcurrentHashMap<>();
// Map<String, Integer> fileTypeStats = new ConcurrentHashMap<>();
// AtomicInteger totalCommits = new AtomicInteger(0);
// AtomicInteger processed = new AtomicInteger(0);
//
// List<CompletableFuture<Void>> futures = activeRepos.stream()
// .map(repo -> CompletableFuture.runAsync(() -> {
// try {
// analyzeLongTermSingleRepository(repo, sinceTime, untilTime,
// devDataMap, repoDataMap, dayStats, hourStats, fileTypeStats, totalCommits);
// } catch (Exception e) {
// log.debug("长时间任务[{}] 仓库 {} 详细分析失败: {}", taskId, repo.getFullPath(), e.getMessage());
// } finally {
// int done = processed.incrementAndGet();
// if (done % 5 == 0 || done == activeRepos.size()) {
// log.debug("长时间任务[{}] 详细分析进度: {}/{} | 提交: {}", taskId, done, activeRepos.size(), totalCommits.get());
// }
// }
// }, taskExecutor))
// .collect(Collectors.toList());
//
// CompletableFuture<Void> allFutures = CompletableFuture.allOf(
// futures.toArray(new CompletableFuture[0]));
//
// try {
// allFutures.get(30, TimeUnit.SECONDS);
// } catch (Exception e) {
// log.warn("长时间任务[{}] 部分仓库详细分析未完成: {}", taskId, e.getMessage());
// }
//
// return new DetailedAnalysisResult(devDataMap, repoDataMap, dayStats, hourStats, fileTypeStats, totalCommits.get());
// }, taskExecutor);
// }
//
// // ==================== 核心方法长时间范围优化版====================
//
// /**
// * 快速检查是否有提交长时间范围优化版
// * 只检查最近一段时间避免全量查询
// */
// private boolean hasCommitsInLongTermRange(String repoFullName, ZonedDateTime since, ZonedDateTime until) throws Exception {
// // 策略检查最近6个月或整个时间段的1/4取较小值
// Duration fullDuration = Duration.between(since, until);
// Duration checkDuration = Duration.ofDays(180); // 6个月
//
// if (fullDuration.toDays() < 180) {
// // 如果总时间小于6个月检查最后1/4的时间段
// checkDuration = fullDuration.dividedBy(4);
// }
//
// ZonedDateTime checkStart = until.minus(checkDuration);
// if (checkStart.isBefore(since)) {
// checkStart = since;
// }
//
// String sinceStr = checkStart.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
// String untilStr = until.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
//
// String url = String.format("%s/api/v1/repos/%s/commits?since=%s&until=%s&limit=1",
// giteaBaseUrl, repoFullName, sinceStr, untilStr);
//
// HttpRequest request = HttpRequest.newBuilder()
// .uri(java.net.URI.create(url))
// .header("Authorization", "token " + accessToken)
// .timeout(Duration.ofSeconds(8))
// .GET()
// .build();
//
// HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
//
// if (response.statusCode() != 200) {
// return false;
// }
//
// List<GiteaCommit> commits = objectMapper.readValue(
// response.body(),
// objectMapper.getTypeFactory().constructCollectionType(List.class, GiteaCommit.class));
//
// return !commits.isEmpty();
// }
//
// /**
// * 获取长时间范围内的提交完整获取无3个月限制
// */
// private List<GiteaCommit> getLongTermCommitsInRange(String repoFullName, ZonedDateTime since, ZonedDateTime until) throws Exception {
// String sinceStr = since.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
// String untilStr = until.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
//
// String baseUrl = String.format("%s/api/v1/repos/%s/commits?since=%s&until=%s",
// giteaBaseUrl, repoFullName, sinceStr, untilStr);
//
// // 使用配置的超时时间
// List<GiteaCommit> commits = fetchWithPaginationLongTerm(baseUrl, GiteaCommit.class, commitTimeoutSeconds);
//
// // 数据量过大时警告
// if (commits.size() > 1000) {
// log.warn("仓库 {} 提交数量过大: {},建议缩小时间范围", repoFullName, commits.size());
// }
//
// return commits;
// }
//
// /**
// * 分析单个仓库长时间范围版
// */
// private void analyzeLongTermSingleRepository(GiteaRepository repo,
// ZonedDateTime sinceTime,
// ZonedDateTime untilTime,
// Map<String, DeveloperData> devDataMap,
// Map<String, RepoData> repoDataMap,
// Map<DayOfWeek, Integer> dayStats,
// Map<Integer, Integer> hourStats,
// Map<String, Integer> fileTypeStats,
// AtomicInteger totalCommits) throws Exception {
//
// String repoFullName = repo.getFullPath();
//
// // 使用长时间范围方法获取提交
// List<GiteaCommit> commits = getLongTermCommitsInRange(repoFullName, sinceTime, untilTime);
//
// if (!commits.isEmpty()) {
// RepoData repoData = new RepoData();
// repoData.repoName = repoFullName;
// repoData.displayName = repo.getRepoName();
//
// for (GiteaCommit commit : commits) {
// totalCommits.incrementAndGet();
// String author = getAuthorName(commit);
//
// DeveloperData devData = devDataMap.computeIfAbsent(author, k -> new DeveloperData(author));
// devData.commitCount++;
// devData.repos.add(repoFullName);
//
// repoData.commitCount++;
// repoData.developers.add(author);
//
// ZonedDateTime commitTime = commit.getCommitTime();
// if (commitTime != null) {
// DayOfWeek day = commitTime.getDayOfWeek();
// int hour = commitTime.getHour();
// dayStats.merge(day, 1, Integer::sum);
// hourStats.merge(hour, 1, Integer::sum);
// }
//
// if (commit.getFiles() != null) {
// for (GiteaCommit.ChangedFile file : commit.getFiles()) {
// String fileType = file.getFileType();
// fileTypeStats.merge(fileType, 1, Integer::sum);
// }
// }
// }
//
// repoDataMap.put(repoFullName, repoData);
// }
// }
//
// // ==================== 辅助方法 ====================
//
// private List<GiteaRepository> getAllUserRepositories() throws Exception {
// String baseUrl = giteaBaseUrl + "/api/v1/user/repos?limit=50";
// return fetchWithPaginationLongTerm(baseUrl, GiteaRepository.class, 10);
// }
//
// private <T> List<T> fetchWithPaginationLongTerm(String baseUrl, Class<T> clazz, int timeoutSeconds) throws Exception {
// List<T> results = new ArrayList<>();
// int page = 1;
//
// while (page <= maxPages) {
// String pageUrl = baseUrl + (baseUrl.contains("?") ? "&" : "?") + "page=" + page;
//
// HttpRequest request = HttpRequest.newBuilder()
// .uri(java.net.URI.create(pageUrl))
// .header("Authorization", "token " + accessToken)
// .timeout(Duration.ofSeconds(timeoutSeconds))
// .GET()
// .build();
//
// HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
//
// if (response.statusCode() != 200) {
// log.warn("长时间分析API请求失败: {} - {}", response.statusCode(), response.body());
// break;
// }
//
// List<T> pageResults = objectMapper.readValue(
// response.body(),
// objectMapper.getTypeFactory().constructCollectionType(List.class, clazz));
//
// if (pageResults.isEmpty()) {
// break;
// }
//
// results.addAll(pageResults);
//
// if (pageResults.size() < 50) {
// break;
// }
//
// page++;
// }
//
// return results;
// }
//
// private String getAuthorName(GiteaCommit commit) {
// if (commit.getCommit() != null &&
// commit.getCommit().getAuthor() != null &&
// commit.getCommit().getAuthor().getName() != null) {
// return commit.getCommit().getAuthor().getName();
// }
// return "未知作者";
// }
//
// // ==================== GitAnalysisData构建方法 ====================
//
// /**
// * 构建完整的GitAnalysisData
// */
// private GitAnalysisData buildGitAnalysisData(String since, String until, int totalRepos, int activeRepos,
// GiteaAnalysisService.DetailedAnalysisResult detailResult, long analysisTime) {
// GitAnalysisData data = new GitAnalysisData();
//
// // 基础信息
// data.setBasicInfo(new BasicInfo(
// since + " 至 " + until,
// totalRepos,
// activeRepos,
// detailResult.getDevDataMap().size(),
// detailResult.getTotalCommits(),
// analysisTime,
// "快速筛选 + 详细分析"
// ));
//
// // 开发者排行榜
// if (!detailResult.getDevDataMap().isEmpty()) {
// List<GiteaAnalysisService.DeveloperData> devList = new ArrayList<>(detailResult.getDevDataMap().values());
// devList.sort((a, b) -> Integer.compare(b.commitCount, a.commitCount));
//
// List<DeveloperRank> developerRanks = new ArrayList<>();
// int rank = 1;
// for (GiteaAnalysisService.DeveloperData dev : devList) {
// if (rank > 10) break;
// developerRanks.add(new DeveloperRank(rank++, dev.name, dev.commitCount, dev.repos.size()));
// }
// data.setDeveloperRanks(developerRanks);
// }
//
// // 仓库排行榜
// if (!detailResult.getRepoDataMap().isEmpty()) {
// List<GiteaAnalysisService.RepoData> repoList = new ArrayList<>(detailResult.getRepoDataMap().values());
// repoList.sort((a, b) -> Integer.compare(b.commitCount, a.commitCount));
//
// List<RepoRank> repoRanks = new ArrayList<>();
// int rank = 1;
// for (GiteaAnalysisService.RepoData repo : repoList) {
// if (rank > 10) break;
// repoRanks.add(new RepoRank(rank++, repo.repoName, repo.displayName, repo.commitCount, repo.developers.size()));
// }
// data.setRepoRanks(repoRanks);
// }
//
// // 时间分布按星期
// if (!detailResult.getDayStats().isEmpty()) {
// String[] dayNames = {"周一", "周二", "周三", "周四", "周五", "周六", "周日"};
// DayOfWeek[] days = DayOfWeek.values();
//
// List<DayStats> dayStatsList = new ArrayList<>();
// for (int i = 0; i < 7; i++) {
// int count = detailResult.getDayStats().getOrDefault(days[i], 0);
// dayStatsList.add(new DayStats(dayNames[i], count));
// }
// data.setDayStats(dayStatsList);
// }
//
// // 文件类型统计
// if (!detailResult.getFileTypeStats().isEmpty()) {
// List<Map.Entry<String, Integer>> fileList = new ArrayList<>(detailResult.getFileTypeStats().entrySet());
// fileList.sort((a, b) -> Integer.compare(b.getValue(), a.getValue()));
//
// List<FileTypeStats> fileTypeStatsList = new ArrayList<>();
// for (Map.Entry<String, Integer> entry : fileList) {
// fileTypeStatsList.add(new FileTypeStats(entry.getKey(), entry.getValue()));
// }
// data.setFileTypeStats(fileTypeStatsList);
// }
//
// // 生成时间
// data.setGeneratedTime(LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")));
//
// return data;
// }
//
// /**
// * 构建空的GitAnalysisData无仓库时
// */
// private GitAnalysisData buildEmptyGitAnalysisData(String message) {
// GitAnalysisData data = new GitAnalysisData();
// data.setBasicInfo(new BasicInfo(
// "无时间范围",
// 0,
// 0,
// 0,
// 0,
// 0,
// "无仓库数据"
// ));
// data.setGeneratedTime(LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")));
// return data;
// }
//
// /**
// * 构建简单的GitAnalysisData无活跃仓库时
// */
// private GitAnalysisData buildSimpleGitAnalysisData(String since, String until, int totalRepos,
// int activeRepos, int totalDevs, long startTime) {
// long analysisTime = System.currentTimeMillis() - startTime;
//
// GitAnalysisData data = new GitAnalysisData();
// data.setBasicInfo(new BasicInfo(
// since + " 至 " + until,
// totalRepos,
// activeRepos,
// totalDevs,
// 0,
// analysisTime,
// "快速筛选"
// ));
// data.setGeneratedTime(LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")));
//
// return data;
// }
//
// // ==================== 内部数据类 ====================
//
// private static class DeveloperData {
// String name;
// int commitCount = 0;
// Set<String> repos = new HashSet<>();
//
// DeveloperData(String name) {
// this.name = name;
// }
// }
//
// private static class RepoData {
// String repoName;
// String displayName;
// int commitCount = 0;
// Set<String> developers = new HashSet<>();
// }
//
// // ==================== 详细分析结果类 ====================
//
// private static class DetailedAnalysisResult {
// private final Map<String, GiteaAnalysisService.DeveloperData> devDataMap;
// private final Map<String, GiteaAnalysisService.RepoData> repoDataMap;
// private final Map<DayOfWeek, Integer> dayStats;
// private final Map<Integer, Integer> hourStats;
// private final Map<String, Integer> fileTypeStats;
// private final int totalCommits;
//
// public DetailedAnalysisResult(Map<String, GiteaAnalysisService.DeveloperData> devDataMap, Map<String, GiteaAnalysisService.RepoData> repoDataMap,
// Map<DayOfWeek, Integer> dayStats, Map<Integer, Integer> hourStats,
// Map<String, Integer> fileTypeStats, int totalCommits) {
// this.devDataMap = devDataMap;
// this.repoDataMap = repoDataMap;
// this.dayStats = dayStats;
// this.hourStats = hourStats;
// this.fileTypeStats = fileTypeStats;
// this.totalCommits = totalCommits;
// }
//
// public Map<String, GiteaAnalysisService.DeveloperData> getDevDataMap() { return devDataMap; }
// public Map<String, GiteaAnalysisService.RepoData> getRepoDataMap() { return repoDataMap; }
// public Map<DayOfWeek, Integer> getDayStats() { return dayStats; }
// public Map<Integer, Integer> getHourStats() { return hourStats; }
// public Map<String, Integer> getFileTypeStats() { return fileTypeStats; }
// public int getTotalCommits() { return totalCommits; }
// }
//}

208
chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/MarkdownService.java

@ -0,0 +1,208 @@
package com.chenhai.chenhaiai.service;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.vladsch.flexmark.html.HtmlRenderer;
import com.vladsch.flexmark.parser.Parser;
import com.vladsch.flexmark.util.ast.Node;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Map;
@Service
public class MarkdownService {
private final ObjectMapper objectMapper = new ObjectMapper();
/**
* 查询结果原样全部转换为Markdown不筛选不截断
*/
public String fullJsonToMarkdown(String jsonStr) throws Exception {
Map<String, Object> data = objectMapper.readValue(jsonStr, Map.class);
StringBuilder md = new StringBuilder();
// 1. 基本信息原样转换
md.append("# ").append(getValue(data, "deptName")).append("\n\n");
md.append("## ").append(getValue(data, "weekDisplay")).append("\n\n");
// 2. 部门信息
Object deptObj = data.get("dept");
if (deptObj instanceof Map) {
md.append("**部门**:").append(getValue((Map)deptObj, "deptName")).append("\n\n");
}
// 3. 计划任务 - 全部转换
md.append("## 计划任务\n\n");
Object planDetails = data.get("planDetails");
if (planDetails instanceof List) {
List<Map<String, Object>> plans = (List<Map<String, Object>>) planDetails;
md.append("**总数**:").append(plans.size()).append(" 项\n\n");
md.append("| 项目 | 任务内容 | 负责人 | 完成状态 | 备注 |\n");
md.append("|------|----------|--------|----------|------|\n");
for (Map<String, Object> plan : plans) {
md.append(String.format("| %s | %s | %s | %s | %s |\n",
getValue(plan, "projectName"),
getValue(plan, "content"),
getValue(plan, "developer"),
getValue(plan, "superviseStatus"),
getValue(plan, "note")
));
}
}
md.append("\n");
// 4. 工作日报 - 全部转换
md.append("## 工作日报\n\n");
Object dailyPapers = data.get("dailyPapers");
if (dailyPapers instanceof List) {
List<Map<String, Object>> dailies = (List<Map<String, Object>>) dailyPapers;
md.append("**总数**:").append(dailies.size()).append(" 条\n\n");
md.append("| 日期 | 项目 | 工作内容 | 工时 |\n");
md.append("|------|------|----------|------|\n");
for (Map<String, Object> daily : dailies) {
md.append(String.format("| %s | %s | %s | %s |\n",
getValue(daily, "dailyPaperDate"),
getValue(daily, "projectName"),
getValue(daily, "content"),
getValue(daily, "dailyPaperHour")
));
}
}
md.append("\n");
// 5. 成员信息 - 全部转换
md.append("## 团队成员\n\n");
Object userInfos = data.get("userInfos");
if (userInfos instanceof List) {
List<Map<String, Object>> users = (List<Map<String, Object>>) userInfos;
md.append("| 用户ID | 姓名 |\n");
md.append("|--------|------|\n");
for (Map<String, Object> user : users) {
md.append(String.format("| %s | %s |\n",
getValue(user, "userId"),
getValue(user, "userName")
));
}
}
md.append("\n");
// 6. Git数据 - 全部转换如果有
md.append("## Git提交分析\n\n");
Object gitAnalysis = data.get("gitAnalysis");
if (gitAnalysis instanceof Map) {
Map<String, Object> git = (Map<String, Object>) gitAnalysis;
// 基础信息
Object basicInfo = git.get("basicInfo");
if (basicInfo instanceof Map) {
Map<String, Object> basic = (Map<String, Object>) basicInfo;
md.append("### 基础信息\n\n");
md.append("| 项目 | 值 |\n");
md.append("|------|----|\n");
for (Map.Entry<String, Object> entry : basic.entrySet()) {
md.append(String.format("| %s | %s |\n",
entry.getKey(),
entry.getValue()
));
}
md.append("\n");
}
// 成员排名
Object devRanks = git.get("developerRanks");
if (devRanks instanceof List) {
md.append("### 成员提交排名\n\n");
md.append("| 排名 | 成员 | 提交次数 | 参与项目数 |\n");
md.append("|------|------|----------|------------|\n");
List<Map<String, Object>> ranks = (List<Map<String, Object>>) devRanks;
for (Map<String, Object> rank : ranks) {
md.append(String.format("| %s | %s | %s | %s |\n",
getValue(rank, "rank"),
getValue(rank, "name"),
getValue(rank, "commitCount"),
getValue(rank, "repoCount")
));
}
md.append("\n");
}
// 仓库排名
Object repoRanks = git.get("repoRanks");
if (repoRanks instanceof List) {
md.append("### 仓库活跃度排名\n\n");
md.append("| 排名 | 仓库 | 提交次数 | 开发者数 |\n");
md.append("|------|------|----------|----------|\n");
List<Map<String, Object>> repos = (List<Map<String, Object>>) repoRanks;
for (Map<String, Object> repo : repos) {
md.append(String.format("| %s | %s | %s | %s |\n",
getValue(repo, "rank"),
getValue(repo, "displayName"),
getValue(repo, "commitCount"),
getValue(repo, "developerCount")
));
}
md.append("\n");
}
// 每日统计
Object dayStats = git.get("dayStats");
if (dayStats instanceof List) {
md.append("### 每日提交统计\n\n");
md.append("| 星期 | 提交次数 |\n");
md.append("|------|----------|\n");
List<Map<String, Object>> days = (List<Map<String, Object>>) dayStats;
for (Map<String, Object> day : days) {
md.append(String.format("| %s | %s |\n",
getValue(day, "dayName"),
getValue(day, "commitCount")
));
}
md.append("\n");
}
} else {
md.append("无Git数据\n\n");
}
// 7. 其他字段原样显示
md.append("## 其他信息\n\n");
// 周计划主信息
Object planMain = data.get("planMain");
if (planMain instanceof Map) {
md.append("### 周计划主信息\n\n");
Map<String, Object> main = (Map<String, Object>) planMain;
for (Map.Entry<String, Object> entry : main.entrySet()) {
md.append("- **").append(entry.getKey()).append("**: ").append(entry.getValue()).append("\n");
}
md.append("\n");
}
// 是否是研发部门
Object isResearch = data.get("isResearchDept");
if (isResearch != null) {
md.append("**是否是研发部门**: ").append(isResearch).append("\n\n");
}
return md.toString();
}
/**
* 安全获取值只转义特殊字符
*/
private String getValue(Map<String, Object> map, String key) {
Object value = map.get(key);
if (value == null) return "";
String str = value.toString();
// 只转义表格分隔符保持其他原样
return str.replace("|", "\\|").replace("\n", " ");
}
}

172
chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/SimpleConcurrentTest.java

@ -0,0 +1,172 @@
package com.chenhai.chenhaiai.service;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
public class SimpleConcurrentTest {
// 模拟的Gitea分析服务
static class MockGiteaAnalysisService {
private final ExecutorService executor;
private final AtomicInteger activeTasks = new AtomicInteger(0);
public MockGiteaAnalysisService(int poolSize) {
this.executor = Executors.newFixedThreadPool(poolSize);
System.out.println("初始化线程池,大小: " + poolSize);
}
public CompletableFuture<String> performAnalysisAsync(String since, String until) {
activeTasks.incrementAndGet();
String taskId = "Task-" + System.currentTimeMillis();
System.out.printf(" [%s] 开始处理: %s 至 %s\n",
taskId, since.substring(0, 10), until.substring(0, 10));
return CompletableFuture.supplyAsync(() -> {
try {
// 模拟耗时操作1分钟
Thread.sleep(60000);
// 模拟结果
String result = String.format("分析报告: %s - %s (仓库: 15个, 提交: 230次)",
since.substring(0, 10), until.substring(0, 10));
return result;
} catch (InterruptedException e) {
throw new RuntimeException("任务被中断");
} finally {
activeTasks.decrementAndGet();
}
}, executor);
}
public int getActiveTaskCount() {
return activeTasks.get();
}
public void shutdown() {
executor.shutdown();
}
}
public static void main(String[] args) {
System.out.println("🚀 开始并发模拟测试");
System.out.println("模拟场景: 5个用户同时请求Git分析");
System.out.println("每个分析任务耗时: 60秒\n");
MockGiteaAnalysisService service = new MockGiteaAnalysisService(8);
try {
// 测试数据
String since = getLastMonday();
String until = getLastSunday();
// 并发测试
testConcurrentRequests(service, since, until);
} finally {
service.shutdown();
System.out.println("\n✅ 测试完成");
}
}
private static void testConcurrentRequests(MockGiteaAnalysisService service, String since, String until) {
List<CompletableFuture<String>> futures = new ArrayList<>();
CountDownLatch startLatch = new CountDownLatch(1);
AtomicInteger completed = new AtomicInteger(0);
long startTime = System.currentTimeMillis();
// 创建5个并发任务
for (int i = 1; i <= 5; i++) {
final int userId = i;
CompletableFuture<String> future = CompletableFuture.supplyAsync(() -> {
try {
// 等待统一开始
startLatch.await();
System.out.printf("👤 用户%d: 开始分析...\n", userId);
return service.performAnalysisAsync(since, until).get();
} catch (Exception e) {
System.err.printf("用户%d失败: %s\n", userId, e.getMessage());
return null;
}
}).thenApply(result -> {
int done = completed.incrementAndGet();
long elapsed = (System.currentTimeMillis() - startTime) / 1000;
if (result != null) {
System.out.printf("✅ 用户%d完成! (耗时: %ds, 进度: %d/5)\n",
userId, elapsed, done);
}
return result;
});
futures.add(future);
}
// 统一开始所有任务
System.out.println("\n📢 所有用户准备就绪,3秒后同时开始...");
sleep(3000);
System.out.println("🎬 开始!");
startLatch.countDown();
// 等待所有完成
try {
CompletableFuture<Void> allDone = CompletableFuture.allOf(
futures.toArray(new CompletableFuture[0]));
allDone.get(65, TimeUnit.SECONDS); // 超时65秒
long totalTime = System.currentTimeMillis() - startTime;
System.out.printf("\n🎉 全部完成! 总耗时: %.1f秒\n", totalTime / 1000.0);
// 显示并发效果
System.out.println("\n⚡ 并发效果分析:");
System.out.println("串行处理: 5任务 × 60秒 = 300秒 (5分钟)");
System.out.printf("并发处理: %.1f秒\n", totalTime / 1000.0);
System.out.printf("性能提升: %.1f倍\n", 300.0 / (totalTime / 1000.0));
} catch (TimeoutException e) {
System.err.println("⏰ 测试超时");
} catch (Exception e) {
System.err.println("测试异常: " + e.getMessage());
}
}
private static String getLastMonday() {
ZonedDateTime time = ZonedDateTime.now()
.minusWeeks(1)
.with(java.time.DayOfWeek.MONDAY)
.withHour(0).withMinute(0).withSecond(0);
return time.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
}
private static String getLastSunday() {
ZonedDateTime time = ZonedDateTime.now()
.minusWeeks(1)
.with(java.time.DayOfWeek.SUNDAY)
.withHour(23).withMinute(59).withSecond(59);
return time.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
}
private static void sleep(long millis) {
try {
Thread.sleep(millis);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}

907
chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/gitNew/GiteaDataService.java

@ -0,0 +1,907 @@
package com.chenhai.chenhaiai.service.gitNew;
import com.chenhai.chenhaiai.entity.git.*;
import com.chenhai.common.core.redis.RedisCache;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.time.Duration;
import java.time.LocalDate;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.concurrent.TimeUnit;
/**
* Gitea数据同步服务
*
* <p>本服务负责从Gitea代码仓库平台同步数据到本地Redis缓存提供全量同步和增量同步两种模式
* 支持多仓库批量处理用于代码提交分析开发行为统计等场景</p>
*
* <h3>核心功能</h3>
* <ol>
* <li><b>全量同步</b>拉取仓库所有历史提交适用于首次数据初始化</li>
* <li><b>增量同步</b>基于最后同步时间只获取新增提交适用于日常数据更新</li>
* <li><b>智能时间管理</b>自动记录和管理同步时间点形成连续同步链条</li>
* <li><b>分页数据获取</b>处理Gitea API分页支持大仓库数据完整获取</li>
* <li><b>Redis缓存管理</b>结构化存储提交数据支持多维度索引查询</li>
* </ol>
*
* <h3>数据存储结构</h3>
* <pre>
* Redis Key 结构
* 1. 仓库列表gitea:repos:list -> Map<仓库ID, 仓库路径>
* 2. 仓库信息gitea:repo:{fullPath} -> Map<字段名, >
* 3. 提交详情gitea:commit:{fullPath}:{sha} -> Map<字段名, >
* 4. 日期索引gitea:commits:by_date:{date} -> Set<提交SHA>
* 5. 仓库提交索引gitea:repo_commits:{fullPath} -> Set<提交SHA>
* 6. 同步进度gitea:sync:progress -> Map<进度信息>
* 7. 最后同步时间gitea:last_sync:{fullPath} -> 时间戳新增
* </pre>
*
* <h3>使用场景案例</h3>
*
* <h4>案例1首次系统部署全量初始化</h4>
* <pre>
* // 1. 获取所有仓库
* List<GiteaRepository> repos = getAllRepositories();
*
* // 2. 全量同步所有仓库建议夜间执行
* syncAllReposAllCommits();
*
* // 3. 查看进度
* Map<String, Object> progress = getSyncProgress();
* </pre>
*
* <h4>案例2日常数据更新增量同步</h4>
* <pre>
* // 1. 定时任务调用如每10分钟
* syncIncrementalCommitsForRepo("ChenHaiTech/project1");
*
* // 2. 或批量增量同步所有仓库
* // 可外部定时调用不需要内部定时任务
* syncIncrementalForAllRepos(); // 如果有此方法
* </pre>
*
* <h4>案例3新增仓库处理</h4>
* <pre>
* // 1. 获取最新仓库列表
* List<GiteaRepository> repos = getAllRepositories();
*
* // 2. 对新仓库进行全量同步
* syncAllCommitsForRepo("ChenHaiTech/new-project");
*
* // 3. 后续自动进入增量同步模式
* </pre>
*
* <h3>核心方法说明</h3>
* <table border="1">
* <tr>
* <th>方法名</th>
* <th>作用</th>
* <th>适用场景</th>
* <th>耗时预估</th>
* </tr>
* <tr>
* <td>getAllRepositories()</td>
* <td>获取用户所有仓库列表</td>
* <td>首次获取定期更新仓库列表</td>
* <td>1-5秒</td>
* </tr>
* <tr>
* <td>syncAllCommitsForRepo()</td>
* <td>同步单个仓库所有历史提交</td>
* <td>新仓库初始化数据修复</td>
* <td>依赖仓库大小</td>
* </tr>
* <tr>
* <td>syncIncrementalCommitsForRepo()</td>
* <td>增量同步单个仓库</td>
* <td>日常数据更新定时任务</td>
* <td>几秒到几分钟</td>
* </tr>
* <tr>
* <td>syncAllReposAllCommits()</td>
* <td>批量全量同步所有仓库</td>
* <td>系统首次部署月度全量更新</td>
* <td>几小时183个仓库</td>
* </tr>
* <tr>
* <td>getSyncProgress()</td>
* <td>获取批量同步进度</td>
* <td>监控长时间同步任务</td>
* <td>实时</td>
* </tr>
* </table>
*
* <h3>智能时间链机制</h3>
* <p>增量同步采用智能时间判断避免数据遗漏或重复</p>
* <ul>
* <li><b>有记录</b>从上次同步时间-10分钟开始防止边界遗漏</li>
* <li><b>无记录但有数据</b>从24小时前开始保守策略保护已有全量数据</li>
* <li><b>完全无数据</b>从7天前开始新仓库首次增量</li>
* <li><b>时间保护</b>最大查询范围30天防止长时间查询</li>
* </ul>
*
* <h3>注意事项</h3>
* <ol>
* <li>全量同步数据已存在时增量同步会自动识别并采用保守策略</li>
* <li>Redis数据默认30天过期长期不访问的数据会自动清理</li>
* <li>批量操作有间隔和限流避免对Gitea服务器造成压力</li>
* <li>所有公开方法签名保持稳定外部调用无需修改</li>
* <li>增量同步依赖最后同步时间记录形成连续时间链</li>
* </ol>
*
* <h3>配置说明</h3>
* <pre>
* # application.yml
* gitea:
* url: http://192.168.1.224:3000 # Gitea服务器地址
* token: a9f1c8d3d6fefd73956604f496457faaa3672f89 # 访问令牌
* </pre>
*
* <h3>版本更新说明</h3>
* <p><b>当前版本主要改进</b></p>
* <ul>
* <li>增量同步逻辑增强基于最后同步时间智能判断</li>
* <li>新增时间链管理避免数据遗漏</li>
* <li>保持所有外部接口兼容性现有调用无需修改</li>
* <li>增强日志记录便于问题排查</li>
* </ul>
*
* @author 系统自动生成
* @since 2024-01
* @version 2.0 增强增量同步版本
*/
@Slf4j
@Service
public class GiteaDataService {
@Value("${gitea.url:http://chrdcenter.chenhaitech.com:29830}")
private String giteaBaseUrl;
@Value("${gitea.token:a9f1c8d3d6fefd73956604f496457faaa3672f89}")
private String accessToken;
@Autowired
private RedisCache redisCache;
private HttpClient httpClient;
private ObjectMapper objectMapper;
// Redis Key常量
private static final String REPO_LIST_KEY = "gitea:repos:list";
private static final String REPO_INFO_PREFIX = "gitea:repo:";
private static final String COMMIT_PREFIX = "gitea:commit:";
private static final String COMMITS_BY_DATE_PREFIX = "gitea:commits:by_date:";
private static final String REPO_COMMITS_INDEX_PREFIX = "gitea:repo_commits:";
private static final String SYNC_PROGRESS_KEY = "gitea:sync:progress";
public GiteaDataService() {
this.httpClient = HttpClient.newBuilder()
.connectTimeout(Duration.ofSeconds(10))
.build();
this.objectMapper = new ObjectMapper();
this.objectMapper.registerModule(new JavaTimeModule());
}
/**
* 1. 获取所有仓库
*/
public List<GiteaRepository> getAllRepositories() throws Exception {
String baseUrl = giteaBaseUrl + "/api/v1/user/repos?limit=50";
List<GiteaRepository> repos = fetchWithPagination(baseUrl, GiteaRepository.class, 30);
// 存储仓库列表到Redis
if (!repos.isEmpty()) {
Map<String, String> repoMap = new LinkedHashMap<>();
for (GiteaRepository repo : repos) {
repoMap.put(String.valueOf(repo.getId()), repo.getFullPath());
// 存储仓库基本信息
String repoKey = REPO_INFO_PREFIX + repo.getFullPath();
Map<String, Object> repoInfo = new HashMap<>();
repoInfo.put("id", repo.getId());
repoInfo.put("name", repo.getRepoName());
repoInfo.put("fullPath", repo.getFullPath());
// createdAt String 类型直接存储
repoInfo.put("createdAt", repo.getCreatedAt());
redisCache.setCacheMap(repoKey, repoInfo);
redisCache.expire(repoKey, 30, TimeUnit.DAYS);
}
redisCache.setCacheMap(REPO_LIST_KEY, repoMap);
redisCache.expire(REPO_LIST_KEY, 30, TimeUnit.DAYS);
}
return repos;
}
/**
* 2. 拉取单个仓库的所有历史提交从创建至今
*/
public void syncAllCommitsForRepo(String repoFullPath) throws Exception {
log.info("开始同步仓库所有历史提交: {}", repoFullPath);
// 使用最大limit减少分页次数
String baseUrl = String.format("%s/api/v1/repos/%s/commits?limit=100&stat=true",
giteaBaseUrl, repoFullPath);
log.info("提交获取URL: {}", baseUrl);
List<GiteaCommit> allCommits = fetchWithPagination(baseUrl, GiteaCommit.class, 300); // 5分钟超时
log.info("仓库 {} 共获取到 {} 个提交", repoFullPath, allCommits.size());
if (allCommits.isEmpty()) {
log.info("仓库 {} 无提交数据", repoFullPath);
return;
}
// 分批存储到Redis避免内存和Redis压力
int batchSize = 200;
int totalBatches = (int) Math.ceil((double) allCommits.size() / batchSize);
log.info("开始分批存储,每批{}条,共{}批", batchSize, totalBatches);
for (int batchIndex = 0; batchIndex < totalBatches; batchIndex++) {
int start = batchIndex * batchSize;
int end = Math.min(start + batchSize, allCommits.size());
List<GiteaCommit> batch = allCommits.subList(start, end);
int savedInBatch = 0;
for (GiteaCommit commit : batch) {
try {
storeCommit(repoFullPath, commit);
savedInBatch++;
} catch (Exception e) {
log.warn("存储提交失败(SHA:{}): {}",
commit.getSha().substring(0, 8), e.getMessage());
}
}
log.info("批次 {}/{} 完成,存储 {}/{} 条",
batchIndex + 1, totalBatches, savedInBatch, batch.size());
// 批次间休息减轻Redis压力
if (batchIndex < totalBatches - 1) {
Thread.sleep(200);
}
}
log.info("仓库 {} 提交数据同步完成,总计 {} 条", repoFullPath, allCommits.size());
}
/**
* 3. 存储单个提交到Redis
*/
private void storeCommit(String repoFullPath, GiteaCommit commit) throws Exception {
String commitKey = COMMIT_PREFIX + repoFullPath + ":" + commit.getSha();
// 检查是否已存在
if (redisCache.hasKey(commitKey)) {
return; // 已存在跳过
}
Map<String, Object> commitData = new HashMap<>();
commitData.put("sha", commit.getSha());
commitData.put("author", getAuthorName(commit));
commitData.put("message", commit.getCommit().getMessage());
// commitTime 也需要处理可能是 ZonedDateTime String
if (commit.getCommitTime() != null) {
if (commit.getCommitTime() instanceof ZonedDateTime) {
ZonedDateTime commitTime = (ZonedDateTime) commit.getCommitTime();
commitData.put("timestamp", commitTime.toEpochSecond());
commitData.put("time_str", commitTime.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME));
} else {
// 如果是 String 类型直接存储
commitData.put("time_str", commit.getCommitTime().toString());
try {
ZonedDateTime parsedTime = ZonedDateTime.parse(commit.getCommitTime().toString());
commitData.put("timestamp", parsedTime.toEpochSecond());
} catch (Exception e) {
commitData.put("timestamp", 0L);
}
}
} else {
commitData.put("timestamp", 0L);
commitData.put("time_str", "");
}
// 存储文件变更信息
if (commit.getFiles() != null && !commit.getFiles().isEmpty()) {
commitData.put("files_json", objectMapper.writeValueAsString(commit.getFiles()));
}
// 存储统计信息
if (commit.getStats() != null) {
Map<String, Object> stats = new HashMap<>();
stats.put("total", commit.getStats().getTotal());
stats.put("additions", commit.getStats().getAdditions());
stats.put("deletions", commit.getStats().getDeletions());
commitData.put("stats_json", objectMapper.writeValueAsString(stats));
}
redisCache.setCacheMap(commitKey, commitData);
// 按日期建立索引
try {
Long timestamp = (Long) commitData.get("timestamp");
if (timestamp > 0) {
LocalDate commitDate = ZonedDateTime.ofInstant(
java.time.Instant.ofEpochSecond(timestamp),
java.time.ZoneId.systemDefault()
).toLocalDate();
String dateKey = COMMITS_BY_DATE_PREFIX + repoFullPath + ":" + commitDate.toString();
// 使用Set存储该日期下的所有提交SHA
Set<String> dateCommits = redisCache.getCacheSet(dateKey);
if (dateCommits == null) {
dateCommits = new HashSet<>();
}
dateCommits.add(commit.getSha());
redisCache.setCacheSet(dateKey, dateCommits);
// 建立仓库-提交索引
String repoCommitsKey = REPO_COMMITS_INDEX_PREFIX + repoFullPath;
Set<String> repoCommits = redisCache.getCacheSet(repoCommitsKey);
if (repoCommits == null) {
repoCommits = new HashSet<>();
}
repoCommits.add(commit.getSha());
redisCache.setCacheSet(repoCommitsKey, repoCommits);
}
} catch (Exception e) {
log.warn("建立提交索引失败: {}", e.getMessage());
}
}
/**
* 4. 拉取单个仓库的增量提交昨天到现在的提交
*/
public void syncIncrementalCommitsForRepo(String repoFullPath) throws Exception {
log.info("开始增量同步仓库提交: {}", repoFullPath);
// 获取昨天的时间
LocalDate yesterday = LocalDate.now().minusDays(1);
ZonedDateTime since = yesterday.atStartOfDay(ZonedDateTime.now().getZone());
ZonedDateTime until = ZonedDateTime.now();
String sinceStr = since.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
String untilStr = until.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
String baseUrl = String.format("%s/api/v1/repos/%s/commits?since=%s&until=%s&limit=100",
giteaBaseUrl, repoFullPath, sinceStr, untilStr);
List<GiteaCommit> newCommits = fetchWithPagination(baseUrl, GiteaCommit.class, 30);
if (!newCommits.isEmpty()) {
log.info("仓库 {} 发现 {} 个新提交", repoFullPath, newCommits.size());
for (GiteaCommit commit : newCommits) {
storeCommit(repoFullPath, commit);
}
}
}
/**
* 5. 批量同步所有仓库的所有历史提交
*/
public void syncAllReposAllCommits() {
log.info("开始批量同步所有仓库的所有历史提交");
try {
// 1. 获取所有仓库
List<GiteaRepository> repos = getAllRepositories();
if (repos.isEmpty()) {
log.warn("未找到任何仓库");
return;
}
log.info("共发现 {} 个仓库,开始同步历史提交", repos.size());
// 2. 记录同步进度
Map<String, Object> progress = new HashMap<>();
progress.put("totalRepos", repos.size());
progress.put("completed", 0);
progress.put("startTime", System.currentTimeMillis());
progress.put("status", "running");
redisCache.setCacheObject(SYNC_PROGRESS_KEY, progress);
// 3. 逐个仓库同步
for (int i = 0; i < repos.size(); i++) {
GiteaRepository repo = repos.get(i);
try {
log.info("正在同步仓库 {}/{}: {}", i + 1, repos.size(), repo.getFullPath());
syncAllCommitsForRepo(repo.getFullPath());
// 更新进度
progress.put("completed", i + 1);
progress.put("currentRepo", repo.getFullPath());
redisCache.setCacheObject(SYNC_PROGRESS_KEY, progress);
// 避免请求过快间隔1秒
Thread.sleep(1000);
} catch (Exception e) {
log.error("同步仓库 {} 失败: {}", repo.getFullPath(), e.getMessage());
}
}
// 4. 完成同步
progress.put("endTime", System.currentTimeMillis());
progress.put("status", "completed");
redisCache.setCacheObject(SYNC_PROGRESS_KEY, progress);
log.info("所有仓库历史提交同步完成");
} catch (Exception e) {
log.error("批量同步失败: {}", e.getMessage(), e);
Map<String, Object> progress = new HashMap<>();
progress.put("status", "failed");
progress.put("error", e.getMessage());
redisCache.setCacheObject(SYNC_PROGRESS_KEY, progress);
}
}
/**
* 6. 获取同步进度
*/
public Map<String, Object> getSyncProgress() {
Map<String, Object> progress = redisCache.getCacheObject(SYNC_PROGRESS_KEY);
if (progress == null) {
progress = new HashMap<>();
progress.put("status", "not_started");
}
return progress;
}
/**
* 7. 测试用的main方法
*/
public static void main(String[] args) {
// 这里模拟测试实际使用时需要Spring上下文
System.out.println("Gitea数据同步服务测试");
System.out.println("主要功能:");
System.out.println("1. getAllRepositories() - 获取所有仓库");
System.out.println("2. syncAllCommitsForRepo() - 同步单个仓库所有历史提交");
System.out.println("3. syncIncrementalCommitsForRepo() - 增量同步单个仓库");
System.out.println("4. syncAllReposAllCommits() - 批量同步所有仓库所有历史提交");
System.out.println("5. getSyncProgress() - 获取同步进度");
System.out.println("\n测试步骤:");
System.out.println("1. 首先调用 getAllRepositories() 获取仓库列表");
System.out.println("2. 调用 syncAllReposAllCommits() 同步所有历史数据");
System.out.println("3. 调用 getSyncProgress() 查看同步进度");
}
// ==================== 工具方法 ====================
private String getAuthorName(GiteaCommit commit) {
if (commit.getCommit() != null &&
commit.getCommit().getAuthor() != null &&
commit.getCommit().getAuthor().getName() != null) {
return commit.getCommit().getAuthor().getName();
}
return "未知作者";
}
private <T> List<T> fetchWithPagination(String baseUrl, Class<T> clazz, int timeoutSeconds) throws Exception {
List<T> results = new ArrayList<>();
int page = 1;
// 保护机制配置
int maxTotalRecords = 100000;
long startTime = System.currentTimeMillis();
long maxDuration = 600000;
int consecutiveEmptyPages = 0;
int maxConsecutiveEmptyPages = 3;
// 确保URL有limit参数使用合理的limit
if (!baseUrl.contains("limit=")) {
baseUrl += (baseUrl.contains("?") ? "&" : "?") + "limit=50"; // 使用50与Gitea默认一致
}
// 记录初始URL
String originalUrl = baseUrl;
log.info("开始分页获取数据,URL: {}", originalUrl);
// 从URL获取请求的limit
int requestedLimit = getLimitFromUrl(baseUrl);
log.info("请求limit参数: {}", requestedLimit);
// 重要记录第一页的返回数量用于后续判断
Integer firstPageSize = null;
while (true) {
// 保护机制检查...
if (results.size() >= maxTotalRecords) {
log.warn("达到最大记录数限制({}),停止分页", maxTotalRecords);
break;
}
long currentDuration = System.currentTimeMillis() - startTime;
if (currentDuration > maxDuration) {
log.warn("分页超时({}ms),停止分页", currentDuration);
break;
}
if (consecutiveEmptyPages >= maxConsecutiveEmptyPages) {
log.warn("连续{}页返回空数据,停止分页", consecutiveEmptyPages);
break;
}
// 构造请求URL
String pageUrl = baseUrl + (baseUrl.contains("?") ? "&" : "?") + "page=" + page;
log.info("请求第{}页,累计{}条,耗时{}ms", page, results.size(), currentDuration);
// 发送HTTP请求...
HttpRequest request = HttpRequest.newBuilder()
.uri(java.net.URI.create(pageUrl))
.header("Authorization", "token " + accessToken)
.timeout(Duration.ofSeconds(30))
.GET()
.build();
HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());
if (response.statusCode() != 200) {
log.warn("第{}页API请求失败: {} - {}", page, response.statusCode(), response.body());
// 如果是404可能是仓库不存在或没权限
if (response.statusCode() == 404) {
log.error("仓库可能不存在或无权访问,停止分页");
break;
}
// 等待后重试
Thread.sleep(2000);
continue;
}
// 解析响应数据
List<T> pageResults = objectMapper.readValue(
response.body(),
objectMapper.getTypeFactory().constructCollectionType(List.class, clazz));
if (pageResults == null) {
log.warn("第{}页解析结果为null", page);
consecutiveEmptyPages++;
page++;
continue;
}
if (pageResults.isEmpty()) {
log.info("第{}页返回空数组,判断为最后一页", page);
break; // 空数组 = 最后一页
}
// 重置连续空页计数
consecutiveEmptyPages = 0;
// 记录第一页的大小
if (firstPageSize == null) {
firstPageSize = pageResults.size();
log.info("第一页返回{}条,请求的limit为{}条", firstPageSize, requestedLimit);
// 重要判断如果第一页就返回了少于请求的limit并且数量很少可能真的只有这么多
// 但为了保险我们还是继续请求下一页确认
if (firstPageSize < requestedLimit && firstPageSize < 30) {
log.info("第一页返回较少数据({}条),继续请求下一页确认", firstPageSize);
}
}
// 添加到结果集
results.addAll(pageResults);
log.info("第{}页获取 {} 条,累计 {} 条", page, pageResults.size(), results.size());
// ==================== 关键修改判断是否最后一页 ====================
// 方案总是继续请求下一页直到返回空数组
// 但可以添加一些智能判断
// 1. 如果连续几页都返回相同数量的数据可能还有更多
// 2. 如果返回数量突然减少可能是最后一页的迹象但不绝对
// 准备下一页
page++;
// 动态休眠
int sleepTime = calculateSleepTime(page, results.size());
Thread.sleep(sleepTime);
// 保护最多1000页
if (page > 1000) {
log.warn("达到最大页数限制(1000页),强制停止分页");
break;
}
}
long totalDuration = System.currentTimeMillis() - startTime;
log.info("分页结束,总共获取 {} 条数据,耗时 {}ms (约{:.1f}秒)",
results.size(), totalDuration, totalDuration / 1000.0);
return results;
}
/**
* 计算休眠时间
*/
private int calculateSleepTime(int currentPage, int totalRecords) {
int baseSleep = 300;
// 根据页数增加
int pageFactor = (currentPage / 10) * 100;
// 根据总记录数增加
int recordFactor = (totalRecords / 1000) * 50;
int sleepTime = baseSleep + pageFactor + recordFactor;
// 限制范围
sleepTime = Math.max(200, Math.min(sleepTime, 3000));
return sleepTime;
}
/**
* 从URL中提取limit参数值
*/
private int getLimitFromUrl(String url) {
try {
if (url.contains("limit=")) {
String[] parts = url.split("limit=");
if (parts.length > 1) {
String limitStr = parts[1];
// 移除后面的参数
if (limitStr.contains("&")) {
limitStr = limitStr.substring(0, limitStr.indexOf('&'));
}
if (limitStr.contains("?")) {
limitStr = limitStr.substring(0, limitStr.indexOf('?'));
}
return Integer.parseInt(limitStr.trim());
}
}
} catch (Exception e) {
log.warn("解析limit参数失败,使用默认值100。URL: {}", url);
}
return 100; // 默认值
}
/**
* 8. 真正的测试方法 - 执行实际同步并检查Redis
*/
public void realSyncTest() {
System.out.println("=== 开始真实同步测试 ===");
try {
// 1. 获取仓库
System.out.println("1. 获取仓库列表...");
List<GiteaRepository> repos = getAllRepositories();
System.out.println("获取到 " + repos.size() + " 个仓库");
if (repos.isEmpty()) {
System.out.println("没有仓库,测试结束");
return;
}
// 2. 选择第一个仓库进行测试
String testRepo = repos.get(0).getFullPath();
System.out.println("2. 测试同步仓库: " + testRepo);
// 3. 同步该仓库的历史提交
System.out.println("3. 开始同步历史提交...");
syncAllCommitsForRepo(testRepo);
// 4. 检查Redis中的数据
System.out.println("4. 检查Redis中的数据...");
// 检查仓库信息
String repoKey = REPO_INFO_PREFIX + testRepo;
Map<String, Object> repoInfo = redisCache.getCacheMap(repoKey);
System.out.println(" 仓库信息: " + (repoInfo != null ? "存在" : "不存在"));
if (repoInfo != null) {
System.out.println(" 仓库名称: " + repoInfo.get("name"));
System.out.println(" 仓库路径: " + repoInfo.get("fullPath"));
}
// 检查仓库提交索引
String repoCommitsKey = REPO_COMMITS_INDEX_PREFIX + testRepo;
Set<String> commitShas = redisCache.getCacheSet(repoCommitsKey);
System.out.println(" 提交数量: " + (commitShas != null ? commitShas.size() : 0));
if (commitShas != null && !commitShas.isEmpty()) {
// 随机检查一个提交的详情
String sampleSha = commitShas.iterator().next();
String commitKey = COMMIT_PREFIX + testRepo + ":" + sampleSha;
Map<String, Object> commitData = redisCache.getCacheMap(commitKey);
System.out.println(" 示例提交SHA: " + sampleSha.substring(0, 8) + "...");
if (commitData != null) {
System.out.println(" 作者: " + commitData.get("author"));
System.out.println(" 时间: " + commitData.get("time_str"));
System.out.println(" 消息: " +
(commitData.get("message") != null ?
commitData.get("message").toString().substring(0, Math.min(50, commitData.get("message").toString().length())) + "..." : "无"));
}
}
// 5. 测试增量同步
System.out.println("5. 测试增量同步...");
syncIncrementalCommitsForRepo(testRepo);
System.out.println(" 增量同步完成");
// 6. 重新检查提交数量
commitShas = redisCache.getCacheSet(repoCommitsKey);
System.out.println(" 最终提交数量: " + (commitShas != null ? commitShas.size() : 0));
System.out.println("\n=== 测试完成 ===");
System.out.println("\n你可以使用以下命令检查Redis数据:");
System.out.println("redis-cli keys 'gitea:*" + testRepo + "*'");
System.out.println("redis-cli hgetall 'gitea:repo:" + testRepo + "'");
System.out.println("redis-cli smembers 'gitea:repo_commits:" + testRepo + "'");
} catch (Exception e) {
System.err.println("测试失败: " + e.getMessage());
e.printStackTrace();
}
}
/**
* 真正的全量测试 - 同步所有仓库并检查数据
*/
public void fullSyncTest() {
System.out.println("=== 开始全量同步测试 ===");
System.out.println("这将同步所有183个仓库的历史提交到Redis");
long startTime = System.currentTimeMillis();
try {
// 1. 获取所有仓库
System.out.println("1. 获取仓库列表...");
List<GiteaRepository> repos = getAllRepositories();
System.out.println("获取到 " + repos.size() + " 个仓库");
if (repos.isEmpty()) {
System.out.println("没有仓库,测试结束");
return;
}
// 2. 同步前5个仓库先测试一部分
System.out.println("2. 测试同步前5个仓库...");
int testCount = Math.min(5, repos.size());
int totalCommits = 0;
for (int i = 0; i < testCount; i++) {
GiteaRepository repo = repos.get(i);
System.out.println(" 同步仓库 " + (i+1) + "/" + testCount + ": " + repo.getFullPath());
try {
// 先检查Redis中已有的提交数
String repoCommitsKey = REPO_COMMITS_INDEX_PREFIX + repo.getFullPath();
Set<String> beforeCommits = redisCache.getCacheSet(repoCommitsKey);
int beforeCount = beforeCommits != null ? beforeCommits.size() : 0;
// 同步该仓库
syncAllCommitsForRepo(repo.getFullPath());
// 检查同步后的提交数
Set<String> afterCommits = redisCache.getCacheSet(repoCommitsKey);
int afterCount = afterCommits != null ? afterCommits.size() : 0;
int newCommits = afterCount - beforeCount;
totalCommits += afterCount;
System.out.println(" 同步前: " + beforeCount + " 条,同步后: " + afterCount + " 条,新增: " + newCommits + " 条");
// 间隔1秒避免请求过快
Thread.sleep(1000);
} catch (Exception e) {
System.err.println(" 仓库同步失败: " + e.getMessage());
}
}
// 3. 检查整体数据
System.out.println("3. 检查整体Redis数据...");
// 统计所有仓库的提交总数
int allRepoCommits = 0;
Map<String, String> allRepos = redisCache.getCacheMap(REPO_LIST_KEY);
if (allRepos != null) {
System.out.println(" Redis中仓库总数: " + allRepos.size());
// 抽样检查几个仓库
int sampleCount = Math.min(10, allRepos.size());
int checked = 0;
for (Map.Entry<String, String> entry : allRepos.entrySet()) {
if (checked >= sampleCount) break;
String repoFullPath = entry.getValue();
String repoCommitsKey = REPO_COMMITS_INDEX_PREFIX + repoFullPath;
Set<String> commits = redisCache.getCacheSet(repoCommitsKey);
if (commits != null) {
allRepoCommits += commits.size();
checked++;
if (checked <= 3) { // 只打印前3个的详情
System.out.println(" 仓库 " + checked + ": " + repoFullPath + " - " + commits.size() + " 个提交");
}
}
}
if (allRepos.size() > 3) {
System.out.println(" 还有 " + (allRepos.size() - 3) + " 个仓库...");
}
}
// 4. 统计Redis中所有提交key
System.out.println("4. 统计Redis中所有Gitea相关数据...");
// 注意keys命令在生产环境要谨慎使用这里只是测试
System.out.println(" 请手动运行: redis-cli keys 'gitea:commit:*' | wc -l");
System.out.println(" 请手动运行: redis-cli keys 'gitea:repo:*' | wc -l");
long endTime = System.currentTimeMillis();
long duration = (endTime - startTime) / 1000;
System.out.println("\n=== 测试完成 ===");
System.out.println("总耗时: " + duration + " 秒");
System.out.println("测试仓库数: " + testCount);
System.out.println("总提交数: " + totalCommits);
System.out.println("Redis中预估总提交数: " + allRepoCommits);
System.out.println("\n手动检查命令:");
System.out.println(" # 查看所有仓库");
System.out.println(" redis-cli hgetall 'gitea:repos:list' | head -20");
System.out.println(" ");
System.out.println(" # 查看提交总数");
System.out.println(" redis-cli keys 'gitea:commit:*' | wc -l");
System.out.println(" ");
System.out.println(" # 查看某个仓库的提交");
System.out.println(" redis-cli smembers 'gitea:repo_commits:ChenHaiTech/仓库名' | wc -l");
} catch (Exception e) {
System.err.println("全量测试失败: " + e.getMessage());
e.printStackTrace();
}
}
/**
* 执行真正的全量同步所有仓库
*/
public void executeFullSync() {
System.out.println("=== 开始执行全量同步 ===");
System.out.println("警告:这将同步所有183个仓库,耗时可能较长!");
System.out.println("建议在夜间或低峰期执行");
new Thread(() -> {
try {
syncAllReposAllCommits();
System.out.println("全量同步完成!");
} catch (Exception e) {
System.err.println("全量同步失败: " + e.getMessage());
}
}).start();
System.out.println("全量同步已异步启动,请查看日志...");
System.out.println("查看进度: GET /test/gitea/progress");
}
}

946
chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/gitNew/GiteaGranularityService.java

@ -0,0 +1,946 @@
package com.chenhai.chenhaiai.service.gitNew;
import com.chenhai.chenhaiai.entity.git.*;
import com.chenhai.common.core.redis.RedisCache;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.time.*;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.time.temporal.WeekFields;
import java.util.*;
import java.util.stream.Collectors;
@Slf4j
@Service
public class GiteaGranularityService {
@Autowired
private GiteaQueryService giteaQueryService;
@Autowired
private RedisCache redisCache;
private final ObjectMapper objectMapper;
// Redis Key常量复用GiteaQueryService的常量
private static final String REPO_LIST_KEY = "gitea:repos:list";
private static final String REPO_INFO_KEY_PREFIX = "gitea:repo:";
private static final String COMMIT_KEY_PREFIX = "gitea:commit:";
private static final String COMMITS_BY_DATE_KEY_PREFIX = "gitea:commits:by_date:";
// 时间格式化器
private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd");
private static final DateTimeFormatter MONTH_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM");
private static final DateTimeFormatter YEAR_FORMATTER = DateTimeFormatter.ofPattern("yyyy");
public GiteaGranularityService() {
this.objectMapper = new ObjectMapper();
this.objectMapper.registerModule(new JavaTimeModule());
}
/**
* 获取带颗粒度的文本分析报告
* @param since 开始时间yyyy-MM-dd
* @param until 结束时间yyyy-MM-dd
* @param granularity 颗粒度day/week/month/year/auto
* @return 分析报告
*/
public Map<String, Object> getTextAnalysisReportWithGranularity(String since, String until, String granularity) {
Map<String, Object> result = new HashMap<>();
try {
long startTime = System.currentTimeMillis();
// 1. 获取原始提交数据
List<CommitDetail> allCommits = getAllCommitsInRange(since, until);
// 2. 根据原始数据生成分析数据
GitAnalysisData data = analyzeCommitsData(allCommits, since, until);
// 3. 确定实际使用的颗粒度
long daysBetween = calculateDaysBetween(since, until);
String actualGranularity = determineActualGranularity(granularity, daysBetween);
// 4. 生成带颗粒度的报告
String textReport = generateTextReportWithGranularity(data, allCommits, since, until, actualGranularity);
long analysisTime = System.currentTimeMillis() - startTime;
result.put("success", true);
result.put("report", textReport);
result.put("rawData", data);
result.put("granularity", actualGranularity);
result.put("availableGranularities", getAvailableGranularities(daysBetween));
result.put("generatedTime", LocalDate.now().format(DATE_FORMATTER));
result.put("analysisTime", analysisTime);
result.put("totalCommits", allCommits.size());
} catch (Exception e) {
log.error("生成带颗粒度报告失败: {}", e.getMessage(), e);
result.put("success", false);
result.put("error", e.getMessage());
result.put("report", "生成报告失败: " + e.getMessage());
}
return result;
}
/**
* 获取时间范围内的所有提交
*/
private List<CommitDetail> getAllCommitsInRange(String since, String until) {
List<CommitDetail> allCommits = new ArrayList<>();
LocalDate startDate = LocalDate.parse(since, DATE_FORMATTER);
LocalDate endDate = LocalDate.parse(until, DATE_FORMATTER);
// 获取所有仓库
Map<String, String> allRepos = redisCache.getCacheMap(REPO_LIST_KEY);
if (allRepos == null || allRepos.isEmpty()) {
return allCommits;
}
// 遍历所有仓库
for (Map.Entry<String, String> entry : allRepos.entrySet()) {
String repoFullPath = entry.getValue();
// 遍历日期范围内的每一天
LocalDate currentDate = startDate;
while (!currentDate.isAfter(endDate)) {
String dateKey = COMMITS_BY_DATE_KEY_PREFIX + repoFullPath + ":" + currentDate;
Set<String> dateCommits = redisCache.getCacheSet(dateKey);
if (dateCommits != null && !dateCommits.isEmpty()) {
for (String sha : dateCommits) {
try {
String commitKey = COMMIT_KEY_PREFIX + repoFullPath + ":" + sha;
Map<String, Object> commitData = redisCache.getCacheMap(commitKey);
if (commitData != null) {
CommitDetail detail = convertToCommitDetail(commitData, repoFullPath);
if (detail != null) {
allCommits.add(detail);
}
}
} catch (Exception e) {
log.debug("获取提交详情失败: {}", e.getMessage());
}
}
}
currentDate = currentDate.plusDays(1);
}
}
log.info("获取到 {} 条提交数据 ({} 至 {})", allCommits.size(), since, until);
return allCommits;
}
/**
* 转换为提交详情
*/
private CommitDetail convertToCommitDetail(Map<String, Object> commitData, String repoFullPath) {
try {
CommitDetail detail = new CommitDetail();
detail.setSha(commitData.get("sha") != null ? commitData.get("sha").toString() : null);
detail.setAuthor(commitData.get("author") != null ? commitData.get("author").toString() : null);
detail.setMessage(commitData.get("message") != null ? commitData.get("message").toString() : null);
detail.setRepoName(repoFullPath);
// 获取文件信息
if (commitData.get("files_json") != null) {
detail.setFilesJson(commitData.get("files_json").toString());
}
if (commitData.get("timestamp") != null) {
try {
long timestamp = ((Number) commitData.get("timestamp")).longValue();
detail.setCommitTime(ZonedDateTime.ofInstant(
Instant.ofEpochSecond(timestamp),
ZoneId.systemDefault()
));
} catch (ClassCastException e) {
log.warn("时间戳格式错误: {}", commitData.get("timestamp"));
return null;
}
} else {
return null;
}
return detail;
} catch (Exception e) {
log.warn("转换提交详情失败: {}", e.getMessage());
return null;
}
}
/**
* 分析提交数据
*/
private GitAnalysisData analyzeCommitsData(List<CommitDetail> commits, String since, String until) {
GitAnalysisData data = new GitAnalysisData();
long startTime = System.currentTimeMillis();
// 基础统计
int totalCommits = commits.size();
Set<String> developers = new HashSet<>();
Set<String> repos = new HashSet<>();
Map<String, Integer> devCommitCount = new HashMap<>();
Map<String, Integer> repoCommitCount = new HashMap<>();
Map<String, Set<String>> devRepos = new HashMap<>();
Map<String, Set<String>> repoDevs = new HashMap<>();
// 按星期统计
Map<DayOfWeek, Integer> dayStats = new HashMap<>();
// 文件类型统计
Map<String, Integer> fileTypeStats = new HashMap<>();
for (CommitDetail commit : commits) {
// 开发者统计
if (commit.getAuthor() != null) {
developers.add(commit.getAuthor());
devCommitCount.merge(commit.getAuthor(), 1, Integer::sum);
// 开发者参与仓库
devRepos.computeIfAbsent(commit.getAuthor(), k -> new HashSet<>())
.add(commit.getRepoName());
}
// 仓库统计
if (commit.getRepoName() != null) {
repos.add(commit.getRepoName());
repoCommitCount.merge(commit.getRepoName(), 1, Integer::sum);
// 仓库的开发者
repoDevs.computeIfAbsent(commit.getRepoName(), k -> new HashSet<>())
.add(commit.getAuthor());
}
// 星期统计
if (commit.getCommitTime() != null) {
DayOfWeek day = commit.getCommitTime().getDayOfWeek();
dayStats.merge(day, 1, Integer::sum);
}
// 文件类型统计
if (commit.getFilesJson() != null && !commit.getFilesJson().isEmpty()) {
try {
List<Map<String, String>> files = objectMapper.readValue(
commit.getFilesJson(), new TypeReference<List<Map<String, String>>>() {});
for (Map<String, String> file : files) {
String filename = file.get("filename");
if (filename != null) {
String fileType = getFileType(filename);
fileTypeStats.merge(fileType, 1, Integer::sum);
}
}
} catch (Exception e) {
log.debug("解析文件JSON失败: {}", e.getMessage());
}
}
}
// 获取总仓库数
Map<String, String> allReposMap = redisCache.getCacheMap(REPO_LIST_KEY);
int totalRepos = allReposMap != null ? allReposMap.size() : 0;
// 构建基础信息
BasicInfo basicInfo = new BasicInfo(
since + " 至 " + until,
totalRepos, // 总仓库数
repos.size(), // 活跃仓库数
developers.size(), // 活跃开发者数
totalCommits, // 总提交数
System.currentTimeMillis() - startTime, // 分析耗时
"Redis缓存查询(带颗粒度)"
);
data.setBasicInfo(basicInfo);
// 构建开发者排行榜
List<DeveloperRank> developerRanks = buildDeveloperRanks(devCommitCount, devRepos);
data.setDeveloperRanks(developerRanks);
// 构建仓库排行榜
List<RepoRank> repoRanks = buildRepoRanks(repoCommitCount, repoDevs);
data.setRepoRanks(repoRanks);
// 构建星期分布
List<DayStats> dayStatsList = buildDayStats(dayStats);
data.setDayStats(dayStatsList);
// 构建文件类型统计
List<FileTypeStats> fileTypeStatsList = buildFileTypeStats(fileTypeStats);
data.setFileTypeStats(fileTypeStatsList);
data.setGeneratedTime(LocalDate.now().format(DATE_FORMATTER));
return data;
}
/**
* 获取文件类型
*/
private String getFileType(String filename) {
if (filename == null || filename.isEmpty()) {
return "未知";
}
int dotIndex = filename.lastIndexOf('.');
if (dotIndex > 0 && dotIndex < filename.length() - 1) {
String ext = filename.substring(dotIndex + 1).toLowerCase();
Map<String, String> typeMap = new HashMap<>();
typeMap.put("java", "Java");
typeMap.put("py", "Python");
typeMap.put("js", "JavaScript");
typeMap.put("ts", "TypeScript");
typeMap.put("vue", "Vue");
typeMap.put("html", "HTML");
typeMap.put("css", "CSS");
typeMap.put("md", "Markdown");
typeMap.put("json", "JSON");
typeMap.put("yml", "YAML");
typeMap.put("yaml", "YAML");
typeMap.put("xml", "XML");
typeMap.put("sql", "SQL");
typeMap.put("sh", "Shell");
return typeMap.getOrDefault(ext, ext.toUpperCase());
}
return "无扩展名";
}
/**
* 构建开发者排行榜
*/
private List<DeveloperRank> buildDeveloperRanks(Map<String, Integer> devCommitCount,
Map<String, Set<String>> devRepos) {
List<Map.Entry<String, Integer>> sortedDevs = new ArrayList<>(devCommitCount.entrySet());
sortedDevs.sort((a, b) -> Integer.compare(b.getValue(), a.getValue()));
List<DeveloperRank> ranks = new ArrayList<>();
int rank = 1;
for (Map.Entry<String, Integer> entry : sortedDevs) {
if (rank > 15) break;
String devName = entry.getKey();
int commitCount = entry.getValue();
int repoCount = devRepos.getOrDefault(devName, Collections.emptySet()).size();
ranks.add(new DeveloperRank(rank++, devName, commitCount, repoCount));
}
return ranks;
}
/**
* 构建仓库排行榜
*/
private List<RepoRank> buildRepoRanks(Map<String, Integer> repoCommitCount,
Map<String, Set<String>> repoDevs) {
List<Map.Entry<String, Integer>> sortedRepos = new ArrayList<>(repoCommitCount.entrySet());
sortedRepos.sort((a, b) -> Integer.compare(b.getValue(), a.getValue()));
List<RepoRank> ranks = new ArrayList<>();
int rank = 1;
for (Map.Entry<String, Integer> entry : sortedRepos) {
if (rank > 15) break;
String repoName = entry.getKey();
int commitCount = entry.getValue();
int devCount = repoDevs.getOrDefault(repoName, Collections.emptySet()).size();
// 获取仓库显示名称
String repoKey = REPO_INFO_KEY_PREFIX + repoName;
Map<String, Object> repoInfo = redisCache.getCacheMap(repoKey);
String displayName = repoInfo != null && repoInfo.get("name") != null
? repoInfo.get("name").toString()
: repoName;
ranks.add(new RepoRank(rank++, repoName, displayName, commitCount, devCount));
}
return ranks;
}
/**
* 构建星期分布
*/
private List<DayStats> buildDayStats(Map<DayOfWeek, Integer> dayStatsMap) {
List<DayStats> statsList = new ArrayList<>();
String[] dayNames = {"周一", "周二", "周三", "周四", "周五", "周六", "周日"};
DayOfWeek[] days = DayOfWeek.values();
for (int i = 0; i < 7; i++) {
int count = dayStatsMap.getOrDefault(days[i], 0);
statsList.add(new DayStats(dayNames[i], count));
}
return statsList;
}
/**
* 构建文件类型统计
*/
private List<FileTypeStats> buildFileTypeStats(Map<String, Integer> fileTypeStatsMap) {
List<FileTypeStats> statsList = new ArrayList<>();
if (fileTypeStatsMap != null && !fileTypeStatsMap.isEmpty()) {
List<Map.Entry<String, Integer>> sortedEntries = new ArrayList<>(fileTypeStatsMap.entrySet());
sortedEntries.sort((a, b) -> Integer.compare(b.getValue(), a.getValue()));
int rank = 1;
for (Map.Entry<String, Integer> entry : sortedEntries) {
if (rank > 15) break;
statsList.add(new FileTypeStats(entry.getKey(), entry.getValue()));
rank++;
}
}
return statsList;
}
/**
* 确定实际颗粒度
*/
private String determineActualGranularity(String requestedGranularity, long daysBetween) {
// 如果请求auto则自动选择
if ("auto".equalsIgnoreCase(requestedGranularity)) {
return autoSelectGranularity(daysBetween);
}
// 检查请求的颗粒度是否可行
return validateGranularity(requestedGranularity, daysBetween);
}
/**
* 自动选择颗粒度
*/
private String autoSelectGranularity(long daysBetween) {
if (daysBetween <= 7) {
return "day"; // 一周内按天
} else if (daysBetween <= 30) {
return "week"; // 一个月内按周
} else if (daysBetween <= 365) {
return "month"; // 一年内按月
} else {
return "year"; // 超过一年按年
}
}
/**
* 验证颗粒度
*/
private String validateGranularity(String requested, long daysBetween) {
switch (requested.toLowerCase()) {
case "day":
if (daysBetween > 30) {
log.warn("时间范围{}天超过30天,day颗粒度不可用,自动降级为week", daysBetween);
return validateGranularity("week", daysBetween);
}
return "day";
case "week":
if (daysBetween > 90) {
log.warn("时间范围{}天超过90天,week颗粒度不可用,自动降级为month", daysBetween);
return validateGranularity("month", daysBetween);
}
return "week";
case "month":
if (daysBetween > 365 * 2) {
log.warn("时间范围{}天超过2年,month颗粒度不可用,自动降级为year", daysBetween);
return "year";
}
return "month";
case "year":
return "year";
default:
log.warn("未知颗粒度: {},使用auto", requested);
return autoSelectGranularity(daysBetween);
}
}
/**
* 获取可用颗粒度列表
*/
private List<String> getAvailableGranularities(long daysBetween) {
List<String> available = new ArrayList<>();
available.add("auto");
if (daysBetween <= 30) {
available.add("day");
}
if (daysBetween <= 90) {
available.add("week");
}
if (daysBetween <= 365 * 2) {
available.add("month");
}
available.add("year");
return available;
}
/**
* 生成带颗粒度的报告
*/
private String generateTextReportWithGranularity(GitAnalysisData data, List<CommitDetail> commits,
String since, String until, String granularity) {
StringBuilder sb = new StringBuilder();
long daysBetween = calculateDaysBetween(since, until);
sb.append("======================================================================\n");
sb.append(" Gitea代码提交分析报告 (颗粒度: ").append(granularity).append(")\n");
sb.append("======================================================================\n\n");
sb.append("📅 分析时间范围: ").append(since).append(" 至 ").append(until);
sb.append(" (").append(daysBetween).append("天)\n");
sb.append("⏱️ 查询耗时: ").append(String.format("%.2f", data.getBasicInfo().getAnalysisTime() / 1000.0))
.append("秒 | 📊 数据来源: Redis缓存\n\n");
// 基础信息
sb.append("📊 总体概览:\n");
sb.append("├── 📦 总仓库数: ").append(data.getBasicInfo().getTotalRepos()).append(" 个\n");
sb.append("├── ⭐ 活跃仓库: ").append(data.getBasicInfo().getActiveRepos()).append(" 个\n");
sb.append("├── 👥 活跃开发者: ").append(data.getBasicInfo().getActiveDevelopers()).append(" 人\n");
sb.append("├── 📝 总提交次数: ").append(data.getBasicInfo().getTotalCommits()).append(" 次\n");
sb.append("└── 📈 日均提交: ").append(String.format("%.1f", data.getBasicInfo().getTotalCommits() * 1.0 / Math.max(1, daysBetween)))
.append(" 次/天\n\n");
// 时间分布按颗粒度
sb.append("📈 提交时间分布(按").append(granularity).append("):\n");
switch (granularity) {
case "day":
generateDailyDistribution(sb, commits, since, until);
break;
case "week":
generateWeeklyDistribution(sb, commits, since, until);
break;
case "month":
generateMonthlyDistribution(sb, commits, since, until);
break;
case "year":
generateYearlyDistribution(sb, commits, since, until);
break;
}
sb.append("\n");
// 开发者排行榜
if (data.getDeveloperRanks() != null && !data.getDeveloperRanks().isEmpty()) {
sb.append("🏆 开发者排行榜 (按提交次数):\n");
sb.append("┌────┬────────────┬────────────┬──────────┬────────────┐\n");
sb.append("│排名│ 开发者 │ 提交次数 │ 参与仓库 │ 活跃度 │\n");
sb.append("├────┼────────────┼────────────┼──────────┼────────────┤\n");
int displayCount = Math.min(10, data.getDeveloperRanks().size());
for (int i = 0; i < displayCount; i++) {
DeveloperRank rank = data.getDeveloperRanks().get(i);
String stars = getStars(rank.getCommitCount(), data.getBasicInfo().getTotalCommits());
sb.append(String.format("│ %2d │ %-10s │ %10d │ %8d │ %10s │\n",
rank.getRank(),
truncateString(rank.getName(), 10),
rank.getCommitCount(),
rank.getRepoCount(),
stars));
}
sb.append("└────┴────────────┴────────────┴──────────┴────────────┘\n\n");
}
// 仓库排行榜
if (data.getRepoRanks() != null && !data.getRepoRanks().isEmpty()) {
sb.append("🏆 仓库活跃度排行榜 (按提交次数):\n");
sb.append("┌────┬──────────────────────┬────────────┬──────────┬──────────┐\n");
sb.append("│排名│ 仓库名称 │ 提交次数 │ 开发者数 │ 活跃度 │\n");
sb.append("├────┼──────────────────────┼────────────┼──────────┼──────────┤\n");
int displayCount = Math.min(10, data.getRepoRanks().size());
for (int i = 0; i < displayCount; i++) {
RepoRank rank = data.getRepoRanks().get(i);
String stars = getStars(rank.getCommitCount(), data.getBasicInfo().getTotalCommits());
sb.append(String.format("│ %2d │ %-20s │ %10d │ %8d │ %8s │\n",
rank.getRank(),
truncateString(rank.getDisplayName(), 20),
rank.getCommitCount(),
rank.getDeveloperCount(),
stars));
}
sb.append("└────┴──────────────────────┴────────────┴──────────┴──────────┘\n\n");
}
// 文件类型统计新增
if (data.getFileTypeStats() != null && !data.getFileTypeStats().isEmpty()) {
sb.append("📄 文件类型统计:\n");
int totalFiles = 0;
for (FileTypeStats stat : data.getFileTypeStats()) {
totalFiles += stat.getFileCount();
}
int displayCount = Math.min(8, data.getFileTypeStats().size());
for (int i = 0; i < displayCount; i++) {
FileTypeStats stat = data.getFileTypeStats().get(i);
int barLength = totalFiles > 0 ? (int) (stat.getFileCount() * 40.0 / totalFiles) : 0;
String bar = "█".repeat(Math.max(0, barLength));
double percentage = totalFiles > 0 ? (stat.getFileCount() * 100.0 / totalFiles) : 0;
sb.append(String.format(" %-12s %s %d个文件 (%.1f%%)\n",
stat.getFileType(),
bar,
stat.getFileCount(),
percentage));
}
sb.append("\n");
}
// 多颗粒度汇总
sb.append("📊 多颗粒度汇总:\n");
// 当颗粒度不是"week"且时间范围大于7天时显示周分布
if (!"week".equals(granularity) && daysBetween > 7) {
Map<String, Integer> weeklySummary = calculateWeeklySummary(commits, since, until);
if (!weeklySummary.isEmpty()) {
sb.append("├── 周分布: ").append(formatSummary(weeklySummary, "第{}周")).append("\n");
}
}
// 当颗粒度不是"month"且时间范围大于30天时显示月分布
if (!"month".equals(granularity) && daysBetween > 30) {
Map<String, Integer> monthlySummary = calculateMonthlySummary(commits, since, until);
if (!monthlySummary.isEmpty()) {
sb.append("├── 月分布: ").append(formatSummary(monthlySummary, "{}月")).append("\n");
}
}
// 当颗粒度不是"year"且时间范围大于365天时显示年分布
if (!"year".equals(granularity) && daysBetween > 365) {
Map<String, Integer> yearlySummary = calculateYearlySummary(commits, since, until);
if (!yearlySummary.isEmpty()) {
sb.append("├── 年分布: ").append(formatSummary(yearlySummary, "{}年")).append("\n");
}
}
// 当颗粒度不是"day"且时间范围小于等于30天时显示星期分布
if (!"day".equals(granularity) && daysBetween <= 30) {
sb.append("├── 星期分布: ");
Map<String, Integer> dayMap = new LinkedHashMap<>();
if (data.getDayStats() != null) {
for (DayStats dayStat : data.getDayStats()) {
dayMap.put(dayStat.getDayName(), dayStat.getCommitCount());
}
sb.append(formatSummary(dayMap, "{}")).append("\n");
}
}
sb.append("\n");
// 关键指标根据颗粒度调整
sb.append("📊 关键指标(按").append(granularity).append("):\n");
// 开发者指标
if (data.getDeveloperRanks() != null && !data.getDeveloperRanks().isEmpty()) {
DeveloperRank topDev = data.getDeveloperRanks().get(0);
sb.append("├── 👑 最活跃开发者: ").append(topDev.getName())
.append(" (").append(topDev.getCommitCount()).append("次提交)\n");
}
// 仓库指标
if (data.getRepoRanks() != null && !data.getRepoRanks().isEmpty()) {
RepoRank topRepo = data.getRepoRanks().get(0);
sb.append("├── 🏆 最活跃仓库: ").append(topRepo.getDisplayName())
.append(" (").append(topRepo.getCommitCount()).append("次提交)\n");
}
// 时间颗粒度指标
switch (granularity) {
case "day":
// 按天的颗粒度显示最活跃的一天
Map<String, Integer> dailyStats = calculateDailySummary(commits, since, until);
String mostActiveDay = getMostActivePeriod(dailyStats);
if (mostActiveDay != null) {
sb.append("├── 📅 最活跃日期: ").append(mostActiveDay)
.append(" (").append(dailyStats.get(mostActiveDay)).append("次提交)\n");
}
break;
case "week":
// 按周的颗粒度显示最活跃的一周
Map<String, Integer> weeklyStats = calculateWeeklySummary(commits, since, until);
String mostActiveWeek = getMostActivePeriod(weeklyStats);
if (mostActiveWeek != null) {
sb.append("├── 📅 最活跃周: ").append(mostActiveWeek)
.append(" (").append(weeklyStats.get(mostActiveWeek)).append("次提交)\n");
}
break;
case "month":
// 按月的颗粒度显示最活跃的月份
Map<String, Integer> monthlyStats = calculateMonthlySummary(commits, since, until);
String mostActiveMonth = getMostActivePeriod(monthlyStats);
if (mostActiveMonth != null) {
sb.append("├── 📅 最活跃月份: ").append(mostActiveMonth)
.append(" (").append(monthlyStats.get(mostActiveMonth)).append("次提交)\n");
}
break;
case "year":
// 按年的颗粒度显示最活跃的年份
Map<String, Integer> yearlyStats = calculateYearlySummary(commits, since, until);
String mostActiveYear = getMostActivePeriod(yearlyStats);
if (mostActiveYear != null) {
sb.append("├── 📅 最活跃年份: ").append(mostActiveYear)
.append(" (").append(yearlyStats.get(mostActiveYear)).append("次提交)\n");
}
break;
}
// 团队活跃度指标
double avgDailyCommits = data.getBasicInfo().getTotalCommits() * 1.0 / Math.max(1, daysBetween);
sb.append("└── ⚡ 团队活跃度: ").append(getActivityLevel(avgDailyCommits))
.append("\n\n");
sb.append("🕐 报告生成时间: ").append(data.getGeneratedTime()).append("\n");
sb.append("======================================================================\n");
return sb.toString();
}
/**
* 获取最活跃的时间段
*/
private String getMostActivePeriod(Map<String, Integer> periodStats) {
if (periodStats == null || periodStats.isEmpty()) {
return null;
}
return periodStats.entrySet().stream()
.max(Map.Entry.comparingByValue())
.map(Map.Entry::getKey)
.orElse(null);
}
/**
* 计算日汇总
*/
private Map<String, Integer> calculateDailySummary(List<CommitDetail> commits, String since, String until) {
Map<String, Integer> dailyStats = new TreeMap<>();
for (CommitDetail commit : commits) {
if (commit.getCommitTime() != null) {
LocalDate date = commit.getCommitTime().toLocalDate();
String dateKey = date.format(DATE_FORMATTER);
dailyStats.merge(dateKey, 1, Integer::sum);
}
}
return dailyStats;
}
/**
* 生成日分布
*/
private void generateDailyDistribution(StringBuilder sb, List<CommitDetail> commits, String since, String until) {
Map<String, Integer> dailyStats = calculateDailySummary(commits, since, until);
generateDistributionChart(sb, dailyStats, "{}");
}
/**
* 生成周分布
*/
private void generateWeeklyDistribution(StringBuilder sb, List<CommitDetail> commits, String since, String until) {
Map<String, Integer> weeklyStats = calculateWeeklySummary(commits, since, until);
generateDistributionChart(sb, weeklyStats, "第{}周");
}
/**
* 生成月分布
*/
private void generateMonthlyDistribution(StringBuilder sb, List<CommitDetail> commits, String since, String until) {
Map<String, Integer> monthlyStats = calculateMonthlySummary(commits, since, until);
generateDistributionChart(sb, monthlyStats, "{}月");
}
/**
* 生成年分布
*/
private void generateYearlyDistribution(StringBuilder sb, List<CommitDetail> commits, String since, String until) {
Map<String, Integer> yearlyStats = calculateYearlySummary(commits, since, until);
generateDistributionChart(sb, yearlyStats, "{}年");
}
/**
* 生成分布图表
*/
private void generateDistributionChart(StringBuilder sb, Map<String, Integer> stats, String labelFormat) {
if (stats == null || stats.isEmpty()) {
sb.append(" 无数据\n");
return;
}
int maxCount = stats.values().stream().mapToInt(Integer::intValue).max().orElse(1);
for (Map.Entry<String, Integer> entry : stats.entrySet()) {
String label = labelFormat.replace("{}", entry.getKey());
int barLength = (int) (entry.getValue() * 40.0 / maxCount);
String bar = "█".repeat(Math.max(0, barLength));
sb.append(String.format(" %-12s %s %d次\n", label, bar, entry.getValue()));
}
}
/**
* 计算周汇总
*/
private Map<String, Integer> calculateWeeklySummary(List<CommitDetail> commits, String since, String until) {
Map<String, Integer> weeklyStats = new TreeMap<>();
for (CommitDetail commit : commits) {
if (commit.getCommitTime() != null) {
LocalDate date = commit.getCommitTime().toLocalDate();
// 使用ISO周数
int weekNumber = date.get(WeekFields.ISO.weekOfYear());
String weekKey = String.format("%d-%02d", date.getYear(), weekNumber);
weeklyStats.merge(weekKey, 1, Integer::sum);
}
}
return weeklyStats;
}
/**
* 计算月汇总
*/
private Map<String, Integer> calculateMonthlySummary(List<CommitDetail> commits, String since, String until) {
Map<String, Integer> monthlyStats = new TreeMap<>();
for (CommitDetail commit : commits) {
if (commit.getCommitTime() != null) {
LocalDate date = commit.getCommitTime().toLocalDate();
String monthKey = date.format(MONTH_FORMATTER);
monthlyStats.merge(monthKey, 1, Integer::sum);
}
}
return monthlyStats;
}
/**
* 计算年汇总
*/
private Map<String, Integer> calculateYearlySummary(List<CommitDetail> commits, String since, String until) {
Map<String, Integer> yearlyStats = new TreeMap<>();
for (CommitDetail commit : commits) {
if (commit.getCommitTime() != null) {
LocalDate date = commit.getCommitTime().toLocalDate();
String yearKey = date.format(YEAR_FORMATTER);
yearlyStats.merge(yearKey, 1, Integer::sum);
}
}
return yearlyStats;
}
/**
* 格式化汇总信息
*/
private String formatSummary(Map<String, Integer> summary, String labelFormat) {
if (summary == null || summary.isEmpty()) {
return "无数据";
}
List<String> parts = new ArrayList<>();
for (Map.Entry<String, Integer> entry : summary.entrySet()) {
String label = labelFormat.replace("{}", entry.getKey());
parts.add(label + ":" + entry.getValue());
}
return String.join(" | ", parts);
}
/**
* 计算天数差
*/
private long calculateDaysBetween(String since, String until) {
try {
LocalDate start = LocalDate.parse(since, DATE_FORMATTER);
LocalDate end = LocalDate.parse(until, DATE_FORMATTER);
return ChronoUnit.DAYS.between(start, end) + 1;
} catch (Exception e) {
return 1;
}
}
/**
* 生成星标
*/
private String getStars(int count, int total) {
if (total == 0) return "☆☆☆☆☆";
double ratio = count * 1.0 / total;
if (ratio > 0.15) return "⭐⭐⭐⭐⭐";
else if (ratio > 0.10) return "⭐⭐⭐⭐☆";
else if (ratio > 0.05) return "⭐⭐⭐☆☆";
else if (ratio > 0.02) return "⭐⭐☆☆☆";
else return "⭐☆☆☆☆";
}
/**
* 截断字符串
*/
private String truncateString(String str, int maxLength) {
if (str == null) return "";
if (str.length() <= maxLength) return str;
return str.substring(0, maxLength - 2) + "..";
}
/**
* 判断活跃度等级
*/
private String getActivityLevel(double avgDailyCommits) {
if (avgDailyCommits > 50) return "极高";
else if (avgDailyCommits > 30) return "高";
else if (avgDailyCommits > 15) return "中等偏上";
else if (avgDailyCommits > 8) return "中等";
else if (avgDailyCommits > 3) return "中等偏下";
else return "低";
}
/**
* 提交详情类内部使用
*/
private static class CommitDetail {
private String author;
private String repoName;
private ZonedDateTime commitTime;
private String sha;
private String message;
private String filesJson; // 新增字段
// Getters and Setters
public String getAuthor() { return author; }
public void setAuthor(String author) { this.author = author; }
public String getRepoName() { return repoName; }
public void setRepoName(String repoName) { this.repoName = repoName; }
public ZonedDateTime getCommitTime() { return commitTime; }
public void setCommitTime(ZonedDateTime commitTime) { this.commitTime = commitTime; }
public String getSha() { return sha; }
public void setSha(String sha) { this.sha = sha; }
public String getMessage() { return message; }
public void setMessage(String message) { this.message = message; }
public String getFilesJson() { return filesJson; }
public void setFilesJson(String filesJson) { this.filesJson = filesJson; }
}
}

802
chenhai-ai/src/main/java/com/chenhai/chenhaiai/service/gitNew/GiteaQueryService.java

@ -0,0 +1,802 @@
package com.chenhai.chenhaiai.service.gitNew;
import com.chenhai.chenhaiai.entity.git.*;
import com.chenhai.common.core.redis.RedisCache;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.time.*;
import java.time.format.DateTimeFormatter;
import java.util.*;
@Slf4j
@Service
public class GiteaQueryService {
@Autowired
private RedisCache redisCache;
private final ObjectMapper objectMapper;
// Redis Key常量
private static final String REPO_LIST_KEY = "gitea:repos:list";
private static final String REPO_INFO_KEY_PREFIX = "gitea:repo:";
private static final String COMMIT_KEY_PREFIX = "gitea:commit:";
private static final String COMMITS_BY_DATE_KEY_PREFIX = "gitea:commits:by_date:";
// 时间格式化器 - 只使用年月日格式
private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd");
public GiteaQueryService() {
this.objectMapper = new ObjectMapper();
this.objectMapper.registerModule(new JavaTimeModule());
}
/**
* 主查询方法分析指定时间范围的Git数据
*/
public GitAnalysisData analyzeGitDataFromRedis(String since, String until) {
String taskId = UUID.randomUUID().toString().substring(0, 8);
long startTime = System.currentTimeMillis();
log.info("开始Redis查询分析任务[{}]: {} 至 {}", taskId, since, until);
try {
// 解析时间范围 - 使用年月日格式
LocalDate sinceDate = parseDateString(since);
LocalDate untilDate = parseDateString(until);
// 转换为一天的开始和结束时间
ZonedDateTime sinceTime = sinceDate.atStartOfDay(ZoneId.systemDefault());
ZonedDateTime untilTime = untilDate.atTime(23, 59, 59).atZone(ZoneId.systemDefault());
// 验证时间范围有效性
if (sinceTime.isAfter(untilTime)) {
log.warn("任务[{}] 时间范围无效: sinceTime={}, untilTime={}", taskId, sinceTime, untilTime);
return buildEmptyGitAnalysisData("时间范围无效: 开始时间不能晚于结束时间");
}
// 1. 获取所有仓库
Map<String, String> allRepos = redisCache.getCacheMap(REPO_LIST_KEY);
if (allRepos == null || allRepos.isEmpty()) {
log.warn("任务[{}] Redis中无仓库数据", taskId);
return buildEmptyGitAnalysisData("Redis中无仓库数据");
}
int totalRepos = allRepos.size();
log.info("任务[{}] Redis中发现仓库: {} 个", taskId, totalRepos);
// 2. 筛选活跃仓库
List<String> activeRepoNames = new ArrayList<>();
Map<String, Integer> repoCommitCounts = new HashMap<>();
Map<String, List<CommitInfo>> repoCommitsMap = new HashMap<>();
for (Map.Entry<String, String> entry : allRepos.entrySet()) {
String repoFullPath = entry.getValue();
// 获取时间范围内的提交
List<CommitInfo> commits = getCommitsInRange(repoFullPath, sinceTime, untilTime);
if (!commits.isEmpty()) {
activeRepoNames.add(repoFullPath);
repoCommitCounts.put(repoFullPath, commits.size());
repoCommitsMap.put(repoFullPath, commits);
}
}
int activeRepoCount = activeRepoNames.size();
log.info("任务[{}] 活跃仓库: {} 个", taskId, activeRepoCount);
if (activeRepoCount == 0) {
return buildSimpleGitAnalysisData(since, until, totalRepos, startTime);
}
// 3. 详细分析活跃仓库
DetailedAnalysisResult detailResult = analyzeActiveRepositories(
activeRepoNames, repoCommitsMap, sinceTime, untilTime, taskId);
// 4. 构建返回数据
long analysisTime = System.currentTimeMillis() - startTime;
return buildGitAnalysisData(since, until, totalRepos, activeRepoCount,
detailResult, analysisTime);
} catch (Exception e) {
log.error("Redis查询分析任务[{}]失败: {}", taskId, e.getMessage(), e);
throw new RuntimeException("Git分析失败: " + e.getMessage(), e);
}
}
/**
* 解析日期字符串 (yyyy-MM-dd)
*/
private LocalDate parseDateString(String dateStr) {
if (dateStr == null || dateStr.isEmpty()) {
throw new IllegalArgumentException("日期字符串不能为空");
}
try {
// 只解析年月日
return LocalDate.parse(dateStr, DATE_FORMATTER);
} catch (Exception e) {
log.error("日期格式解析失败: {}, 请使用 yyyy-MM-dd 格式", dateStr);
throw new IllegalArgumentException("日期格式错误: " + dateStr + ",请使用 yyyy-MM-dd 格式", e);
}
}
/**
* 获取时间范围内的提交
*/
private List<CommitInfo> getCommitsInRange(String repoFullPath,
ZonedDateTime since,
ZonedDateTime until) {
try {
// 使用日期索引遍历
return getCommitsByDateIndex(repoFullPath, since, until);
} catch (Exception e) {
log.warn("获取仓库 {} 提交失败: {}", repoFullPath, e.getMessage());
return Collections.emptyList();
}
}
/**
* 按日期索引获取提交
*/
private List<CommitInfo> getCommitsByDateIndex(String repoFullPath,
ZonedDateTime since,
ZonedDateTime until) {
List<CommitInfo> result = new ArrayList<>();
LocalDate startDate = since.toLocalDate();
LocalDate endDate = until.toLocalDate();
LocalDate currentDate = startDate;
while (!currentDate.isAfter(endDate)) {
String dateKey = COMMITS_BY_DATE_KEY_PREFIX + repoFullPath + ":" + currentDate;
Set<String> dateCommits = redisCache.getCacheSet(dateKey);
if (dateCommits != null && !dateCommits.isEmpty()) {
for (String sha : dateCommits) {
try {
String commitKey = COMMIT_KEY_PREFIX + repoFullPath + ":" + sha;
Map<String, Object> commitData = redisCache.getCacheMap(commitKey);
if (commitData != null && isValidCommit(commitData, since, until)) {
CommitInfo commitInfo = convertToCommitInfo(commitData);
result.add(commitInfo);
}
} catch (Exception e) {
log.debug("处理提交 {} 失败: {}", sha, e.getMessage());
}
}
}
currentDate = currentDate.plusDays(1);
}
return result;
}
/**
* 检查提交是否在时间范围内
*/
private boolean isValidCommit(Map<String, Object> commitData,
ZonedDateTime since,
ZonedDateTime until) {
if (commitData.get("timestamp") == null) {
return false;
}
try {
long timestamp = ((Number) commitData.get("timestamp")).longValue();
ZonedDateTime commitTime = ZonedDateTime.ofInstant(
Instant.ofEpochSecond(timestamp),
ZoneId.systemDefault()
);
return !commitTime.isBefore(since) && !commitTime.isAfter(until);
} catch (ClassCastException e) {
log.warn("提交时间戳格式错误: {}", commitData.get("timestamp"));
return false;
}
}
/**
* 转换提交数据
*/
private CommitInfo convertToCommitInfo(Map<String, Object> commitData) {
CommitInfo info = new CommitInfo();
try {
info.sha = commitData.get("sha") != null ? commitData.get("sha").toString() : null;
info.author = commitData.get("author") != null ? commitData.get("author").toString() : null;
info.message = commitData.get("message") != null ? commitData.get("message").toString() : null;
info.timeStr = commitData.get("time_str") != null ? commitData.get("time_str").toString() : null;
info.filesJson = commitData.get("files_json") != null ? commitData.get("files_json").toString() : null;
if (commitData.get("timestamp") != null) {
try {
info.timestamp = ((Number) commitData.get("timestamp")).longValue();
info.commitTime = ZonedDateTime.ofInstant(
Instant.ofEpochSecond(info.timestamp),
ZoneId.systemDefault()
);
} catch (ClassCastException e) {
log.warn("时间戳格式错误: {}", commitData.get("timestamp"));
info.timestamp = null;
}
}
} catch (Exception e) {
log.warn("转换提交数据失败: {}", e.getMessage());
}
return info;
}
/**
* 详细分析活跃仓库
*/
private DetailedAnalysisResult analyzeActiveRepositories(List<String> activeRepoNames,
Map<String, List<CommitInfo>> repoCommitsMap,
ZonedDateTime sinceTime,
ZonedDateTime untilTime,
String taskId) {
Map<String, DeveloperData> devDataMap = new HashMap<>();
Map<String, RepoData> repoDataMap = new HashMap<>();
Map<DayOfWeek, Integer> dayStats = new HashMap<>();
Map<Integer, Integer> hourStats = new HashMap<>();
Map<String, Integer> fileTypeStats = new HashMap<>();
int totalCommits = 0;
log.info("任务[{}] 开始详细分析 {} 个活跃仓库", taskId, activeRepoNames.size());
for (String repoFullPath : activeRepoNames) {
try {
List<CommitInfo> commits = repoCommitsMap.get(repoFullPath);
if (commits == null || commits.isEmpty()) {
continue;
}
// 获取仓库信息
String repoKey = REPO_INFO_KEY_PREFIX + repoFullPath;
Map<String, Object> repoInfo = redisCache.getCacheMap(repoKey);
RepoData repoData = new RepoData();
repoData.repoName = repoFullPath;
repoData.displayName = repoInfo != null && repoInfo.get("name") != null
? repoInfo.get("name").toString()
: repoFullPath;
// 分析该仓库的所有提交
for (CommitInfo commit : commits) {
if (commit.author == null || commit.author.isEmpty()) {
continue;
}
DeveloperData devData = devDataMap.computeIfAbsent(commit.author,
k -> new DeveloperData(commit.author));
devData.commitCount++;
devData.repos.add(repoFullPath);
repoData.commitCount++;
repoData.developers.add(commit.author);
if (commit.commitTime != null) {
DayOfWeek day = commit.commitTime.getDayOfWeek();
int hour = commit.commitTime.getHour();
dayStats.merge(day, 1, Integer::sum);
hourStats.merge(hour, 1, Integer::sum);
}
if (commit.filesJson != null && !commit.filesJson.isEmpty()) {
try {
List<Map<String, String>> files = objectMapper.readValue(
commit.filesJson, new TypeReference<List<Map<String, String>>>() {});
for (Map<String, String> file : files) {
String filename = file.get("filename");
if (filename != null) {
String fileType = getFileType(filename);
fileTypeStats.merge(fileType, 1, Integer::sum);
}
}
} catch (Exception e) {
log.debug("解析文件JSON失败: {}", e.getMessage());
}
}
totalCommits++;
}
if (repoData.commitCount > 0) {
repoDataMap.put(repoFullPath, repoData);
}
} catch (Exception e) {
log.debug("任务[{}] 仓库 {} 分析失败: {}", taskId, repoFullPath, e.getMessage());
}
}
log.info("任务[{}] 详细分析完成,总提交: {}", taskId, totalCommits);
return new DetailedAnalysisResult(devDataMap, repoDataMap, dayStats, hourStats,
fileTypeStats, totalCommits);
}
/**
* 获取文件类型
*/
private String getFileType(String filename) {
if (filename == null || filename.isEmpty()) {
return "未知";
}
int dotIndex = filename.lastIndexOf('.');
if (dotIndex > 0 && dotIndex < filename.length() - 1) {
String ext = filename.substring(dotIndex + 1).toLowerCase();
Map<String, String> typeMap = new HashMap<>();
typeMap.put("java", "Java");
typeMap.put("py", "Python");
typeMap.put("js", "JavaScript");
typeMap.put("ts", "TypeScript");
typeMap.put("vue", "Vue");
typeMap.put("html", "HTML");
typeMap.put("css", "CSS");
typeMap.put("md", "Markdown");
typeMap.put("json", "JSON");
typeMap.put("yml", "YAML");
typeMap.put("yaml", "YAML");
typeMap.put("xml", "XML");
typeMap.put("sql", "SQL");
typeMap.put("sh", "Shell");
return typeMap.getOrDefault(ext, ext.toUpperCase());
}
return "无扩展名";
}
// ==================== 数据结构类 ====================
private static class CommitInfo {
String sha;
String author;
String message;
Long timestamp;
String timeStr;
String filesJson;
ZonedDateTime commitTime;
}
private static class DeveloperData {
String name;
int commitCount = 0;
Set<String> repos = new HashSet<>();
DeveloperData(String name) {
this.name = name;
}
}
private static class RepoData {
String repoName;
String displayName;
int commitCount = 0;
Set<String> developers = new HashSet<>();
}
private static class DetailedAnalysisResult {
private final Map<String, DeveloperData> devDataMap;
private final Map<String, RepoData> repoDataMap;
private final Map<DayOfWeek, Integer> dayStats;
private final Map<Integer, Integer> hourStats;
private final Map<String, Integer> fileTypeStats;
private final int totalCommits;
public DetailedAnalysisResult(Map<String, DeveloperData> devDataMap,
Map<String, RepoData> repoDataMap,
Map<DayOfWeek, Integer> dayStats,
Map<Integer, Integer> hourStats,
Map<String, Integer> fileTypeStats,
int totalCommits) {
this.devDataMap = devDataMap;
this.repoDataMap = repoDataMap;
this.dayStats = dayStats;
this.hourStats = hourStats;
this.fileTypeStats = fileTypeStats;
this.totalCommits = totalCommits;
}
public Map<String, DeveloperData> getDevDataMap() { return devDataMap; }
public Map<String, RepoData> getRepoDataMap() { return repoDataMap; }
public Map<DayOfWeek, Integer> getDayStats() { return dayStats; }
public Map<Integer, Integer> getHourStats() { return hourStats; }
public Map<String, Integer> getFileTypeStats() { return fileTypeStats; }
public int getTotalCommits() { return totalCommits; }
}
// ==================== 数据构建方法 ====================
private GitAnalysisData buildGitAnalysisData(String since, String until,
int totalRepos, int activeRepos,
DetailedAnalysisResult detailResult,
long analysisTime) {
GitAnalysisData data = new GitAnalysisData();
// 基础信息 - 使用格式化后的时间显示
data.setBasicInfo(new BasicInfo(
formatDateForDisplay(since) + " 至 " + formatDateForDisplay(until),
totalRepos,
activeRepos,
detailResult.getDevDataMap().size(),
detailResult.getTotalCommits(),
analysisTime,
"Redis缓存查询"
));
buildDeveloperRanks(data, detailResult.getDevDataMap());
buildRepoRanks(data, detailResult.getRepoDataMap());
buildTimeDistribution(data, detailResult.getDayStats());
buildFileTypeStats(data, detailResult.getFileTypeStats());
// 修改这里只显示年月日
data.setGeneratedTime(LocalDate.now().format(DATE_FORMATTER));
return data;
}
private void buildDeveloperRanks(GitAnalysisData data, Map<String, DeveloperData> devDataMap) {
if (devDataMap != null && !devDataMap.isEmpty()) {
List<DeveloperData> devList = new ArrayList<>(devDataMap.values());
devList.sort((a, b) -> Integer.compare(b.commitCount, a.commitCount));
List<DeveloperRank> developerRanks = new ArrayList<>();
int rank = 1;
for (DeveloperData dev : devList) {
if (rank > 15) break;
developerRanks.add(new DeveloperRank(rank++, dev.name, dev.commitCount, dev.repos.size()));
}
data.setDeveloperRanks(developerRanks);
}
}
private void buildRepoRanks(GitAnalysisData data, Map<String, RepoData> repoDataMap) {
if (repoDataMap != null && !repoDataMap.isEmpty()) {
List<RepoData> repoList = new ArrayList<>(repoDataMap.values());
repoList.sort((a, b) -> Integer.compare(b.commitCount, a.commitCount));
List<RepoRank> repoRanks = new ArrayList<>();
int rank = 1;
for (RepoData repo : repoList) {
if (rank > 15) break;
repoRanks.add(new RepoRank(rank++, repo.repoName, repo.displayName,
repo.commitCount, repo.developers.size()));
}
data.setRepoRanks(repoRanks);
}
}
private void buildTimeDistribution(GitAnalysisData data, Map<DayOfWeek, Integer> dayOfWeekStats) {
if (dayOfWeekStats != null && !dayOfWeekStats.isEmpty()) {
List<DayStats> statsList = new ArrayList<>();
String[] dayNames = {"周一", "周二", "周三", "周四", "周五", "周六", "周日"};
DayOfWeek[] days = DayOfWeek.values();
for (int i = 0; i < 7; i++) {
int count = dayOfWeekStats.getOrDefault(days[i], 0);
statsList.add(new DayStats(dayNames[i], count));
}
data.setDayStats(statsList);
}
}
private void buildFileTypeStats(GitAnalysisData data, Map<String, Integer> fileTypeStats) {
if (fileTypeStats != null && !fileTypeStats.isEmpty()) {
List<Map.Entry<String, Integer>> fileList = new ArrayList<>(fileTypeStats.entrySet());
fileList.sort((a, b) -> Integer.compare(b.getValue(), a.getValue()));
List<FileTypeStats> fileTypeStatsList = new ArrayList<>();
int count = 0;
for (Map.Entry<String, Integer> entry : fileList) {
if (count++ >= 15) break;
fileTypeStatsList.add(new FileTypeStats(entry.getKey(), entry.getValue()));
}
data.setFileTypeStats(fileTypeStatsList);
}
}
private GitAnalysisData buildEmptyGitAnalysisData(String message) {
GitAnalysisData data = new GitAnalysisData();
data.setBasicInfo(new BasicInfo(
"无时间范围",
0,
0,
0,
0,
0,
message
));
data.setGeneratedTime(LocalDate.now().format(DATE_FORMATTER));
return data;
}
private GitAnalysisData buildSimpleGitAnalysisData(String since, String until, int totalRepos,
long startTime) {
long analysisTime = System.currentTimeMillis() - startTime;
GitAnalysisData data = new GitAnalysisData();
data.setBasicInfo(new BasicInfo(
formatDateForDisplay(since) + " 至 " + formatDateForDisplay(until),
totalRepos,
0,
0,
0,
analysisTime,
"无活跃仓库"
));
data.setGeneratedTime(LocalDate.now().format(DATE_FORMATTER));
return data;
}
/**
* 格式化日期显示
*/
private String formatDateForDisplay(String dateStr) {
try {
LocalDate date = parseDateString(dateStr);
return date.format(DATE_FORMATTER);
} catch (Exception e) {
return dateStr;
}
}
/**
* 获取文本格式的分析报告用于Vue2页面直接显示
*/
public Map<String, Object> getTextAnalysisReport(String since, String until) {
Map<String, Object> result = new HashMap<>();
try {
GitAnalysisData data = analyzeGitDataFromRedis(since, until);
String textReport = generateTextReport(data, since, until);
result.put("success", true);
result.put("report", textReport);
result.put("rawData", data);
result.put("generatedTime", LocalDate.now().format(DATE_FORMATTER));
} catch (Exception e) {
result.put("success", false);
result.put("error", e.getMessage());
result.put("report", "生成报告失败: " + e.getMessage());
}
return result;
}
/**
* 生成文本格式的报告
*/
private String generateTextReport(GitAnalysisData data, String since, String until) {
StringBuilder sb = new StringBuilder();
String readableSince = formatDateForDisplay(since);
String readableUntil = formatDateForDisplay(until);
sb.append("======================================================================\n");
sb.append(" Gitea代码提交分析报告\n");
sb.append("======================================================================\n\n");
sb.append("📅 分析时间范围: ").append(readableSince).append(" 至 ").append(readableUntil).append("\n");
sb.append("⏱️ 查询耗时: ").append(String.format("%.2f", data.getBasicInfo().getAnalysisTime() / 1000.0)).append("秒 | 📊 数据来源: Redis缓存\n\n");
sb.append("📊 总体概览:\n");
sb.append("├── 📦 总仓库数: ").append(data.getBasicInfo().getTotalRepos()).append(" 个\n");
double activeRepoPercentage = data.getBasicInfo().getTotalRepos() > 0 ?
(data.getBasicInfo().getActiveRepos() * 100.0 / data.getBasicInfo().getTotalRepos()) : 0;
sb.append("├── ⭐ 活跃仓库: ").append(data.getBasicInfo().getActiveRepos()).append(" 个 (")
.append(String.format("%.1f", activeRepoPercentage)).append("%)\n");
sb.append("├── 👥 活跃开发者: ").append(data.getBasicInfo().getActiveDevelopers()).append(" 人\n");
sb.append("├── 📝 总提交次数: ").append(data.getBasicInfo().getTotalCommits()).append(" 次\n");
long days = calculateDaysBetween(since, until);
double dailyCommits = days > 0 ? data.getBasicInfo().getTotalCommits() * 1.0 / days : 0;
sb.append("└── 📈 日均提交: ").append(String.format("%.0f", dailyCommits)).append(" 次\n\n");
if (data.getDeveloperRanks() != null && !data.getDeveloperRanks().isEmpty()) {
sb.append("🏆 开发者排行榜 (按提交次数):\n");
sb.append("┌────┬────────────┬────────────┬──────────┬────────────┐\n");
sb.append("│排名│ 开发者 │ 提交次数 │ 参与仓库 │ 活跃度 │\n");
sb.append("├────┼────────────┼────────────┼──────────┼────────────┤\n");
int displayCount = Math.min(10, data.getDeveloperRanks().size());
for (int i = 0; i < displayCount; i++) {
DeveloperRank rank = data.getDeveloperRanks().get(i);
String stars = getStars(rank.getCommitCount(), data.getBasicInfo().getTotalCommits());
sb.append(String.format("│ %2d │ %-10s │ %10d │ %8d │ %10s │\n",
rank.getRank(),
truncateString(rank.getName(), 10),
rank.getCommitCount(),
rank.getRepoCount(),
stars));
}
sb.append("└────┴────────────┴────────────┴──────────┴────────────┘\n\n");
}
if (data.getRepoRanks() != null && !data.getRepoRanks().isEmpty()) {
sb.append("🏆 仓库活跃度排行榜 (按提交次数):\n");
sb.append("┌────┬──────────────────────┬────────────┬──────────┬──────────┐\n");
sb.append("│排名│ 仓库名称 │ 提交次数 │ 开发者数 │ 活跃度 │\n");
sb.append("├────┼──────────────────────┼────────────┼──────────┼──────────┤\n");
int displayCount = Math.min(10, data.getRepoRanks().size());
for (int i = 0; i < displayCount; i++) {
RepoRank rank = data.getRepoRanks().get(i);
String stars = getStars(rank.getCommitCount(), data.getBasicInfo().getTotalCommits());
sb.append(String.format("│ %2d │ %-20s │ %10d │ %8d │ %8s │\n",
rank.getRank(),
truncateString(rank.getDisplayName(), 20),
rank.getCommitCount(),
rank.getDeveloperCount(),
stars));
}
sb.append("└────┴──────────────────────┴────────────┴──────────┴──────────┘\n\n");
}
if (data.getDayStats() != null && !data.getDayStats().isEmpty()) {
sb.append("📈 提交时间分布(星期):\n");
int maxCount = 1;
for (DayStats dayStat : data.getDayStats()) {
if (dayStat.getCommitCount() > maxCount) {
maxCount = dayStat.getCommitCount();
}
}
for (DayStats dayStat : data.getDayStats()) {
int barLength = (int) (dayStat.getCommitCount() * 30.0 / maxCount);
String bar = "█".repeat(Math.max(0, barLength));
sb.append(String.format("%s: %s %d次\n",
dayStat.getDayName(),
bar,
dayStat.getCommitCount()));
}
sb.append("\n");
}
if (data.getFileTypeStats() != null && !data.getFileTypeStats().isEmpty()) {
sb.append("📄 文件类型统计:\n");
int totalFiles = 0;
for (FileTypeStats stat : data.getFileTypeStats()) {
totalFiles += stat.getFileCount();
}
int displayCount = Math.min(8, data.getFileTypeStats().size());
for (int i = 0; i < displayCount; i++) {
FileTypeStats stat = data.getFileTypeStats().get(i);
int barLength = totalFiles > 0 ? (int) (stat.getFileCount() * 40.0 / totalFiles) : 0;
String bar = "█".repeat(Math.max(0, barLength));
double percentage = totalFiles > 0 ? (stat.getFileCount() * 100.0 / totalFiles) : 0;
sb.append(String.format("%-12s %s %d个文件 (%.1f%%)\n",
stat.getFileType(),
bar,
stat.getFileCount(),
percentage));
}
sb.append("\n");
}
sb.append("📊 关键指标:\n");
if (data.getDayStats() != null && !data.getDayStats().isEmpty()) {
DayStats maxDay = null;
int maxCount = 0;
for (DayStats dayStat : data.getDayStats()) {
if (dayStat.getCommitCount() > maxCount) {
maxCount = dayStat.getCommitCount();
maxDay = dayStat;
}
}
if (maxDay != null) {
sb.append("├── 📅 最活跃星期: ").append(maxDay.getDayName())
.append(" (").append(maxCount).append("次提交)\n");
}
}
if (data.getDeveloperRanks() != null && !data.getDeveloperRanks().isEmpty()) {
DeveloperRank topDev = data.getDeveloperRanks().get(0);
sb.append("├── 👑 最活跃开发者: ").append(topDev.getName())
.append(" (").append(topDev.getCommitCount()).append("次提交,参与").append(topDev.getRepoCount()).append("个仓库)\n");
}
if (data.getRepoRanks() != null && !data.getRepoRanks().isEmpty()) {
RepoRank topRepo = data.getRepoRanks().get(0);
sb.append("├── 🏆 最活跃仓库: ").append(topRepo.getDisplayName())
.append(" (").append(topRepo.getCommitCount()).append("次提交,").append(topRepo.getDeveloperCount()).append("个开发者)\n");
}
if (data.getDeveloperRanks() != null && !data.getDeveloperRanks().isEmpty()) {
int top3Commits = 0;
int limit = Math.min(3, data.getDeveloperRanks().size());
for (int i = 0; i < limit; i++) {
DeveloperRank rank = data.getDeveloperRanks().get(i);
top3Commits += rank.getCommitCount();
}
int totalCommits = data.getBasicInfo().getTotalCommits();
double top3Percentage = totalCommits > 0 ? (top3Commits * 100.0 / totalCommits) : 0;
sb.append("├── 👥 头部贡献: 前3名开发者贡献了 ").append(String.format("%.1f", top3Percentage)).append("% 的提交\n");
}
int totalDevs = data.getBasicInfo().getActiveDevelopers();
int totalCommits = data.getBasicInfo().getTotalCommits();
double avgCommitsPerDev = totalDevs > 0 ? (totalCommits * 1.0 / totalDevs) : 0;
sb.append("└── ⚡ 团队活跃度: ").append(getActivityLevel(avgCommitsPerDev))
.append(" (平均每人").append(String.format("%.1f", avgCommitsPerDev)).append("次提交)\n\n");
String generatedTime = data.getGeneratedTime() != null ? data.getGeneratedTime() :
LocalDate.now().format(DATE_FORMATTER);
sb.append("🕐 报告生成时间: ").append(generatedTime).append("\n");
sb.append("======================================================================\n");
return sb.toString();
}
/**
* 计算天数差
*/
private long calculateDaysBetween(String since, String until) {
try {
LocalDate start = parseDateString(since);
LocalDate end = parseDateString(until);
return java.time.temporal.ChronoUnit.DAYS.between(start, end) + 1;
} catch (Exception e) {
return 1;
}
}
/**
* 生成星标活跃度指示
*/
private String getStars(int count, int total) {
if (total == 0) return "☆☆☆☆☆";
double ratio = count * 1.0 / total;
if (ratio > 0.15) return "⭐⭐⭐⭐⭐";
else if (ratio > 0.10) return "⭐⭐⭐⭐☆";
else if (ratio > 0.05) return "⭐⭐⭐☆☆";
else if (ratio > 0.02) return "⭐⭐☆☆☆";
else return "⭐☆☆☆☆";
}
/**
* 截断字符串
*/
private String truncateString(String str, int maxLength) {
if (str == null) return "";
if (str.length() <= maxLength) return str;
return str.substring(0, maxLength - 2) + "..";
}
/**
* 判断活跃度等级
*/
private String getActivityLevel(double avgCommits) {
if (avgCommits > 50) return "极高";
else if (avgCommits > 30) return "高";
else if (avgCommits > 15) return "中等偏上";
else if (avgCommits > 8) return "中等";
else if (avgCommits > 3) return "中等偏下";
else return "低";
}
}

209
chenhai-ai/src/main/java/com/chenhai/chenhaiai/utils/CharacterStreamProcessor.java

@ -0,0 +1,209 @@
// CharacterStreamProcessor.java
package com.chenhai.chenhaiai.utils;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.ai.chat.client.ChatClient;
import reactor.core.publisher.Flux;
import reactor.core.publisher.FluxSink;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
/**
* 字符级流式输出处理器
*/
public class CharacterStreamProcessor {
private static final ObjectMapper objectMapper = new ObjectMapper();
// 字符输出配置
private static final int CHARS_PER_CHUNK = 3; // 每次发送的字符数越小越像打字效果
private static final int MODULE_DELAY_MS = 300; // 模块间延迟
private static final int LINE_BREAK_DELAY_MS = 100; // 换行延迟
/**
* 创建真正的字符级流式响应
*/
public static Flux<String> createCharacterStream(
ChatClient chatClient,
String prompt,
FluxSink<String> sink) {
return Flux.create(innerSink -> {
try {
// 1. 发送开始信号
innerSink.next(formatMessage("start", "开始分析..."));
// 2. 收集AI的流式响应
StringBuilder fullResponse = new StringBuilder();
chatClient.prompt()
.user(prompt)
.stream()
.content()
.subscribe(
chunk -> {
fullResponse.append(chunk);
// 实时发送字符实现打字效果
sendCharacterByCharacter(chunk, innerSink);
},
error -> {
innerSink.next(formatMessage("error",
"分析失败: " + error.getMessage()));
innerSink.complete();
},
() -> {
// 所有内容发送完成后发送完成信号
try {
Thread.sleep(500);
innerSink.next(formatMessage("complete", "分析完成"));
innerSink.complete();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
);
} catch (Exception e) {
innerSink.next(formatMessage("error", "流式处理错误: " + e.getMessage()));
innerSink.complete();
}
});
}
/**
* 逐字符发送实现打字效果
*/
private static void sendCharacterByCharacter(String text, FluxSink<String> sink) {
if (text == null || text.isEmpty()) {
return;
}
try {
// 按字符分割
char[] chars = text.toCharArray();
StringBuilder currentChunk = new StringBuilder();
for (int i = 0; i < chars.length; i++) {
currentChunk.append(chars[i]);
// 遇到特殊字符或达到chunk大小就发送一次
if (shouldSendNow(chars[i], currentChunk.length(), i, chars)) {
sink.next(formatMessage("content", currentChunk.toString()));
currentChunk.setLength(0);
// 根据字符类型添加不同的延迟
int delay = calculateDelay(chars[i]);
if (delay > 0) {
Thread.sleep(delay);
}
}
}
// 发送剩余字符
if (currentChunk.length() > 0) {
sink.next(formatMessage("content", currentChunk.toString()));
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
/**
* 判断是否应该现在发送
*/
private static boolean shouldSendNow(char currentChar, int chunkSize, int index, char[] allChars) {
// 达到chunk大小
if (chunkSize >= CHARS_PER_CHUNK) {
return true;
}
// 遇到换行符标点符号等
if (currentChar == '\n' || currentChar == '\r') {
return true;
}
// 遇到中文字符中文字符一般单独发送
if (isChineseChar(currentChar)) {
return true;
}
// 遇到模块标题开始
if (index > 0 && allChars[index-1] == '\n' && currentChar == '[') {
return true;
}
return false;
}
/**
* 计算延迟时间
*/
private static int calculateDelay(char c) {
// 换行符延迟稍长
if (c == '\n' || c == '\r') {
return LINE_BREAK_DELAY_MS;
}
// 标点符号延迟
if (isPunctuation(c)) {
return 50;
}
// 中文字符延迟
if (isChineseChar(c)) {
return 30;
}
// 英文字符延迟
if (Character.isLetter(c)) {
return 20;
}
// 数字和普通字符
return 10;
}
private static boolean isChineseChar(char c) {
Character.UnicodeBlock ub = Character.UnicodeBlock.of(c);
return ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS
|| ub == Character.UnicodeBlock.CJK_COMPATIBILITY_IDEOGRAPHS
|| ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_A;
}
private static boolean isPunctuation(char c) {
return c == ',' || c == '.' || c == ';' || c == ':' || c == '!' || c == '?';
}
/**
* 格式化消息为JSON
*/
public static String formatMessage(String type, String content) {
try {
Map<String, Object> message = new HashMap<>();
message.put("type", type);
message.put("content", content);
message.put("timestamp", System.currentTimeMillis());
return objectMapper.writeValueAsString(message);
} catch (Exception e) {
return String.format("{\"type\":\"%s\",\"content\":\"%s\",\"timestamp\":%d}",
type, content.replace("\"", "\\\""), System.currentTimeMillis());
}
}
/**
* 优化分析提示词确保AI返回正确格式
*/
public static String optimizePromptForStreaming(String originalPrompt) {
return originalPrompt + "\n\n" +
"【重要格式要求】\n" +
"1. 每个模块标题独立一行:[模块X: 标题]\n" +
"2. 标题后空一行再开始内容\n" +
"3. 模块之间用两个换行符分隔\n" +
"4. 保持简洁,每行不要过长\n" +
"5. 关键数据用**包裹\n" +
"6. 严格按照模板结构输出";
}
}

24
chenhai-ai/src/main/java/com/chenhai/chenhaiai/utils/ProgressEmitter.java

@ -0,0 +1,24 @@
package com.chenhai.chenhaiai.utils;
import reactor.core.publisher.FluxSink;
public class ProgressEmitter {
private FluxSink<String> sink;
public void setSink(FluxSink<String> sink) {
this.sink = sink;
}
public void emitProgress(String nodeName, String message) {
if (sink != null) {
String progressMsg = CharacterStreamProcessor.formatMessage("progress",
"[" + getFriendlyNodeName(nodeName) + "] " + message);
sink.next(progressMsg);
}
}
private String getFriendlyNodeName(String nodeName) {
// 简化节点名显示
return nodeName.replace("JdbcNode", "").replace("Node", "");
}
}

277
chenhai-ai/src/main/java/com/chenhai/chenhaiai/utils/PromptLoader.java

@ -0,0 +1,277 @@
package com.chenhai.chenhaiai.utils;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.stereotype.Component;
import org.springframework.util.FileCopyUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jakarta.annotation.PostConstruct;
import java.io.*;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.Enumeration;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
@Component
public class PromptLoader {
private static final Logger logger = LoggerFactory.getLogger(PromptLoader.class);
private final Map<String, String> promptCache = new ConcurrentHashMap<>();
private boolean debugMode = true; // 生产环境可设为false
/**
* 从classpath加载提示词文件 - 保持原有API不变
*/
public String loadPrompt(String filePath) throws IOException {
// 先从缓存获取
if (promptCache.containsKey(filePath)) {
return promptCache.get(filePath);
}
if (debugMode) {
logger.info("开始加载提示词文件: {}", filePath);
}
// 尝试多种加载方案
String content = tryAllLoadingStrategies(filePath);
if (content != null) {
promptCache.put(filePath, content);
if (debugMode) {
logger.info("✅ 提示词加载成功: {}", filePath);
}
return content;
}
throw new IOException("提示词文件不存在: " + filePath);
}
/**
* 尝试所有可能的加载策略
*/
private String tryAllLoadingStrategies(String filePath) {
// 策略1: 使用Spring的ClassPathResource原有方式
String content = loadWithClassPathResource(filePath);
if (content != null) return content;
// 策略2: 直接使用ClassLoaderJAR包内访问
content = loadWithClassLoader(filePath);
if (content != null) return content;
// 策略3: 尝试相对路径
content = loadWithClassLoader("prompts/" + filePath);
if (content != null) return content;
// 策略4: 尝试绝对路径
content = loadWithClassLoader("/prompts/" + filePath);
if (content != null) return content;
// 策略5: 尝试BOOT-INF路径Spring Boot打包路径
content = loadWithClassLoader("BOOT-INF/classes/prompts/" + filePath);
if (content != null) return content;
// 策略6: 如果以上都失败输出调试信息
if (debugMode) {
logDebugInfo(filePath);
}
return null;
}
/**
* 使用Spring的ClassPathResource原有方式
*/
private String loadWithClassPathResource(String filePath) {
try {
Resource resource = new ClassPathResource(filePath);
if (resource.exists()) {
try (InputStreamReader reader = new InputStreamReader(
resource.getInputStream(), StandardCharsets.UTF_8)) {
return FileCopyUtils.copyToString(reader);
}
}
} catch (Exception e) {
if (debugMode) {
logger.debug("ClassPathResource加载失败: {} - {}", filePath, e.getMessage());
}
}
return null;
}
/**
* 使用ClassLoader直接加载JAR包内访问
*/
private String loadWithClassLoader(String path) {
try (InputStream inputStream = getClass().getClassLoader().getResourceAsStream(path)) {
if (inputStream != null) {
return readInputStreamContent(inputStream);
}
} catch (Exception e) {
if (debugMode) {
logger.debug("ClassLoader加载失败: {} - {}", path, e.getMessage());
}
}
return null;
}
/**
* 从输入流读取内容
*/
private String readInputStreamContent(InputStream inputStream) throws IOException {
try (BufferedReader reader = new BufferedReader(
new InputStreamReader(inputStream, StandardCharsets.UTF_8))) {
StringBuilder content = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
content.append(line).append("\n");
}
return content.toString();
}
}
/**
* 输出调试信息
*/
private void logDebugInfo(String filePath) {
try {
logger.warn("=== 提示词文件查找失败,开始调试 ===");
logger.warn("查找文件: {}", filePath);
// 列出所有可用的prompts文件
Enumeration<URL> resources = getClass().getClassLoader().getResources("prompts");
boolean foundResources = false;
while (resources.hasMoreElements()) {
foundResources = true;
URL url = resources.nextElement();
logger.warn("资源位置: {}", url);
if ("jar".equals(url.getProtocol())) {
listJarContents(url);
}
}
if (!foundResources) {
logger.warn("未找到任何prompts目录资源");
}
// 测试常见文件路径
testCommonFilePaths();
} catch (Exception e) {
logger.error("调试信息输出失败", e);
}
}
/**
* 列出JAR包内容
*/
private void listJarContents(URL jarUrl) {
try {
String jarPath = jarUrl.getPath();
if (jarPath.startsWith("file:")) {
jarPath = jarPath.substring(5);
}
if (jarPath.contains("!")) {
jarPath = jarPath.substring(0, jarPath.indexOf("!"));
}
logger.warn("扫描JAR文件: {}", jarPath);
try (JarFile jarFile = new JarFile(jarPath)) {
Enumeration<JarEntry> entries = jarFile.entries();
int promptFileCount = 0;
while (entries.hasMoreElements()) {
JarEntry entry = entries.nextElement();
String name = entry.getName();
if (name.contains("prompts") && name.endsWith(".txt")) {
promptFileCount++;
if (promptFileCount <= 10) { // 只显示前10个文件
logger.warn("JAR中的提示词文件: {}", name);
}
}
}
logger.warn("找到 {} 个提示词文件", promptFileCount);
}
} catch (Exception e) {
logger.error("扫描JAR文件失败", e);
}
}
/**
* 测试常见文件路径
*/
private void testCommonFilePaths() {
String[] testFiles = {
"week_plan_analysis4.txt",
"management-perspective.txt",
"process-perspective.txt",
"culture-perspective.txt",
"comprehensive-perspective.txt"
};
String[] testPaths = {
"prompts/",
"/prompts/",
"BOOT-INF/classes/prompts/"
};
for (String file : testFiles) {
for (String path : testPaths) {
String fullPath = path + file;
InputStream stream = getClass().getClassLoader().getResourceAsStream(fullPath);
if (stream != null) {
logger.warn("✅ 测试成功: {}", fullPath);
try { stream.close(); } catch (IOException e) {}
} else {
logger.warn("❌ 测试失败: {}", fullPath);
}
}
}
}
/**
* 加载提示词并替换占位符 - 保持原有API不变
*/
public String loadPrompt(String filePath, Map<String, String> placeholders) throws IOException {
String template = loadPrompt(filePath);
// 替换占位符
for (Map.Entry<String, String> entry : placeholders.entrySet()) {
template = template.replace("{" + entry.getKey() + "}", entry.getValue());
}
return template;
}
/**
* 预加载常用提示词 - 保持原有逻辑
*/
@PostConstruct
public void preloadPrompts() {
try {
// 预加载周计划分析提示词
loadPrompt("prompts/week_plan_analysis4.txt");
if (debugMode) {
logger.info("提示词预加载完成");
}
} catch (IOException e) {
logger.error("提示词预加载失败: {}", e.getMessage());
}
}
/**
* 清空缓存用于调试
*/
public void clearCache() {
promptCache.clear();
if (debugMode) {
logger.info("提示词缓存已清空");
}
}
}

81
chenhai-ai/src/main/java/com/chenhai/chenhaiai/utils/TextFormatUtils.java

@ -0,0 +1,81 @@
// TextFormatUtils.java
package com.chenhai.chenhaiai.utils;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.HashMap;
import java.util.Map;
/**
* 文本格式处理工具类
*/
public class TextFormatUtils {
private static final ObjectMapper objectMapper = new ObjectMapper();
/**
* 格式化消息为JSON字符串
*/
public static String formatMessage(String type, String content) {
try {
Map<String, Object> message = new HashMap<>();
message.put("type", type);
message.put("content", content);
message.put("timestamp", System.currentTimeMillis());
return objectMapper.writeValueAsString(message);
} catch (Exception e) {
// 如果JSON序列化失败使用简单格式
return String.format("{\"type\":\"%s\",\"content\":\"%s\",\"timestamp\":%d}",
type,
escapeJson(content),
System.currentTimeMillis()
);
}
}
/**
* 转义JSON特殊字符
*/
private static String escapeJson(String content) {
if (content == null) {
return "";
}
return content.replace("\\", "\\\\")
.replace("\"", "\\\"")
.replace("\n", "\\n")
.replace("\r", "\\r")
.replace("\t", "\\t");
}
/**
* 确保分析内容有正确的换行格式
*/
public static String ensureAnalysisFormat(String content) {
if (content == null || content.isEmpty()) {
return content;
}
// 1. 确保 [模块X: 标题] 前面有换行如果不是第一个模块
String formatted = content.replaceAll("(?<!^)\\[模块", "\n\n[模块");
// 2. 确保每个模块标题后有换行
formatted = formatted.replaceAll("\\[模块(\\d+): ([^\\]]+)\\]", "[模块$1: $2]\n");
// 3. 去掉开头可能产生的多余空行
formatted = formatted.trim();
return formatted;
}
/**
* 分割分析结果为模块数组
*/
public static String[] splitAnalysisModules(String analysisContent) {
if (analysisContent == null || analysisContent.isEmpty()) {
return new String[0];
}
// "---" 分割模块但保留分割符
return analysisContent.split("(?=---)");
}
}

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save