forked from Lesin/reposync
修复日志问题
This commit is contained in:
parent
b3f634b1f9
commit
2f714b9800
2
API.md
2
API.md
|
@ -188,7 +188,7 @@ SYNC_DIR = os.getenv("SYNC_DIR", "/tmp/sync_dir/")
|
|||
注: 仓库由内到外同步时,分支输入内部仓库分支名;仓库由外到内同步时,分支输入外部仓库分支名;
|
||||
|
||||
## 日志信息获取
|
||||
允许用户通过此接口分页、使用多个分支ID,获取仓库/分支的同步日志。
|
||||
允许用户通过此接口使用多个分支ID或多个仓库名称,分页获取仓库/分支的同步日志。
|
||||
|
||||
- **URL**:`/cerobot/sync/repo/{repo_name}/logs`
|
||||
- **Method**:`GET`
|
||||
|
|
|
@ -260,23 +260,24 @@ class SyncDirection(Controller):
|
|||
msg=data.status_msg
|
||||
)
|
||||
|
||||
@router.get("/repo/{repo_name}/logs", response_model=SYNCResponse, description='获取仓库/分支日志')
|
||||
@router.get("/repo/logs", response_model=SYNCResponse, description='获取仓库/分支日志')
|
||||
async def get_logs(
|
||||
self, request: Request, user: str = Depends(user),
|
||||
repo_name: str = Path(..., description="仓库名称"),
|
||||
repo_name: str = Query(None, description="仓库名称"),
|
||||
branch_id: str = Query(None, description="分支id(仓库粒度无需输入)"),
|
||||
page_num: int = Query(1, description="页数"), page_size: int = Query(10, description="条数"),
|
||||
create_sort: bool = Query(False, description="创建时间排序, 默认倒序")
|
||||
):
|
||||
api_log(LogType.INFO, f"用户 {user} 使用 GET 方法访问接口 {request.url.path} ", user)
|
||||
branch_id_list = branch_id.split(',')
|
||||
data = await self.log_service.get_logs(repo_name=repo_name, branch_id_list=branch_id_list,
|
||||
branch_id_list = branch_id.split(',') if branch_id is not None else []
|
||||
repo_name_list = repo_name.split(',') if repo_name is not None else []
|
||||
data = await self.log_service.get_logs(repo_name_list=repo_name_list, branch_id_list=branch_id_list,
|
||||
page_num=page_num, page_size=page_size, create_sort=create_sort)
|
||||
if not data:
|
||||
return SYNCResponse(
|
||||
code_status=Status.CHECK_IN.code,
|
||||
code_status=Status.NOT_DATA.code,
|
||||
data=data,
|
||||
msg=Status.CHECK_IN.msg
|
||||
msg=Status.NOT_DATA.msg
|
||||
)
|
||||
return SYNCResponse(
|
||||
code_status=Status.SUCCESS.code,
|
||||
|
|
|
@ -283,13 +283,16 @@ class LogDAO(BaseDAO, metaclass=Singleton):
|
|||
await session.execute(stmt)
|
||||
await session.commit()
|
||||
|
||||
async def get_log(self, repo_name: str, branch_id_list: List[str], page_number: int, page_size: int, create_sort: bool) -> List[LogDTO]:
|
||||
async def get_log(self, repo_name_list: list[str], branch_id_list: List[str], page_number: int, page_size: int, create_sort: bool) -> List[LogDTO]:
|
||||
async with self._async_session() as session:
|
||||
async with session.begin():
|
||||
branch_id_list = [int(branch_id) for branch_id in branch_id_list]
|
||||
query = select(LogDO).where(LogDO.repo_name == repo_name, LogDO.branch_id.in_(branch_id_list))
|
||||
# stmt = select(LogDO).where(LogDO.repo_name == repo_name, LogDO.branch_id == branch_id)
|
||||
# stmt = stmt.order_by(create_order).offset((page_number - 1) * page_size).limit(page_size)
|
||||
_branch_id_list = [int(branch_id) for branch_id in branch_id_list]
|
||||
if repo_name_list and branch_id_list:
|
||||
query = select(LogDO).where(and_(LogDO.branch_id.in_(_branch_id_list),
|
||||
LogDO.repo_name.in_(repo_name_list)))
|
||||
else:
|
||||
query = select(LogDO).where(or_(LogDO.branch_id.in_(_branch_id_list),
|
||||
LogDO.repo_name.in_(repo_name_list)))
|
||||
create_order = LogDO.created_at if create_sort else LogDO.created_at.desc()
|
||||
query = query.order_by(create_order)
|
||||
query = query.offset((page_number - 1) * page_size).limit(page_size)
|
||||
|
|
|
@ -130,9 +130,9 @@ async def sync_repo_task(repo, user):
|
|||
await sync_branch_task(repo, branches, repo.sync_direction, user)
|
||||
else:
|
||||
log_name = f'sync_{repo.repo_name}.log'
|
||||
init_repos(repo, log_name, user)
|
||||
sync_log(LogType.INFO, f'************ 执行{repo.repo_name}仓库同步 ************', log_name, user)
|
||||
try:
|
||||
init_repos(repo, log_name, user)
|
||||
sync_log(LogType.INFO, f'************ 执行{repo.repo_name}仓库同步 ************', log_name, user)
|
||||
if repo.sync_direction == SyncDirect.to_outer:
|
||||
inter_repo_addr = get_repo_address_with_token(repo.internal_repo_address, repo.inter_token)
|
||||
stm = shell(f"git ls-remote --heads {inter_repo_addr}", SYNC_DIR, log_name, user)
|
||||
|
@ -167,10 +167,10 @@ async def sync_branch_task(repo, branches, direct, user):
|
|||
|
||||
for branch in branches:
|
||||
log_name = f'sync_{repo.repo_name}_{branch.id}.log'
|
||||
init_repos(repo, log_name, user)
|
||||
sync_log(LogType.INFO, f'************ 执行分支同步 ************', log_name, user)
|
||||
commit_id = ''
|
||||
try:
|
||||
init_repos(repo, log_name, user)
|
||||
sync_log(LogType.INFO, f'************ 执行分支同步 ************', log_name, user)
|
||||
if direct == SyncDirect.to_inter:
|
||||
sync_log(LogType.INFO, f'Execute outer to inter {branch.external_branch_name} branch Sync', log_name, user)
|
||||
commit_id = outer_to_inter(repo, branch, log_name, user)
|
||||
|
|
|
@ -211,7 +211,7 @@ class LogService(Service):
|
|||
# else:
|
||||
# await self.sync_log_dao.update_branch_log(repo_name, direct, branch_id, commit_id, log_content)
|
||||
|
||||
async def get_logs(self, repo_name: str, branch_id_list: List[str], page_num: int, page_size: int, create_sort: bool) -> Optional[List[LogDTO]]:
|
||||
logs = await self.sync_log_dao.get_log(repo_name=repo_name, branch_id_list=branch_id_list,
|
||||
async def get_logs(self, repo_name_list: List[str], branch_id_list: List[str], page_num: int, page_size: int, create_sort: bool) -> Optional[List[LogDTO]]:
|
||||
logs = await self.sync_log_dao.get_log(repo_name_list=repo_name_list, branch_id_list=branch_id_list,
|
||||
page_number=page_num, page_size=page_size, create_sort=create_sort)
|
||||
return logs
|
||||
|
|
Loading…
Reference in New Issue