|
@@ -1,42 +1,76 @@
|
|
|
|
|
+import { useEffect, useRef, useState } from "react";
|
|
|
|
|
+import Anser from "anser";
|
|
|
import dayjs, { type Dayjs } from "dayjs";
|
|
import dayjs, { type Dayjs } from "dayjs";
|
|
|
import _ from "lodash";
|
|
import _ from "lodash";
|
|
|
-import { useEffect, useRef, useState } from "react";
|
|
|
|
|
|
|
+import { z } from "zod";
|
|
|
|
|
+
|
|
|
import api from "shared/api";
|
|
import api from "shared/api";
|
|
|
-import Anser from "anser";
|
|
|
|
|
-import { useWebsockets, type NewWebsocketOptions } from "shared/hooks/useWebsockets";
|
|
|
|
|
import {
|
|
import {
|
|
|
- type AgentLog,
|
|
|
|
|
|
|
+ useWebsockets,
|
|
|
|
|
+ type NewWebsocketOptions,
|
|
|
|
|
+} from "shared/hooks/useWebsockets";
|
|
|
|
|
+
|
|
|
|
|
+import {
|
|
|
agentLogValidator,
|
|
agentLogValidator,
|
|
|
Direction,
|
|
Direction,
|
|
|
- type PorterLog,
|
|
|
|
|
- type PaginationInfo,
|
|
|
|
|
|
|
+ GenericFilter,
|
|
|
|
|
+ type AgentLog,
|
|
|
type FilterName,
|
|
type FilterName,
|
|
|
- GenericFilter
|
|
|
|
|
|
|
+ type PaginationInfo,
|
|
|
} from "../../expanded-app/logs/types";
|
|
} from "../../expanded-app/logs/types";
|
|
|
|
|
|
|
|
const MAX_LOGS = 5000;
|
|
const MAX_LOGS = 5000;
|
|
|
const MAX_BUFFER_LOGS = 1000;
|
|
const MAX_BUFFER_LOGS = 1000;
|
|
|
const QUERY_LIMIT = 1000;
|
|
const QUERY_LIMIT = 1000;
|
|
|
|
|
|
|
|
-export const parseLogs = (logs: any[] = []): PorterLog[] => {
|
|
|
|
|
- return logs.map((log: any, idx) => {
|
|
|
|
|
|
|
+const porterLogValidator = z.object({
|
|
|
|
|
+ timestamp: z.string(),
|
|
|
|
|
+ line: z.string().transform((val) => Anser.ansiToJson(val)),
|
|
|
|
|
+ output_stream: z.string(),
|
|
|
|
|
+ service_name: z.string(),
|
|
|
|
|
+ app_revision_id: z.string(),
|
|
|
|
|
+ deployment_target_id: z.string(),
|
|
|
|
|
+ job_name: z.string().default(""),
|
|
|
|
|
+ job_run_id: z.string().default(""),
|
|
|
|
|
+ lineNumber: z.number().default(0),
|
|
|
|
|
+ revision: z.string().default("0"),
|
|
|
|
|
+});
|
|
|
|
|
+export type PorterLog = z.infer<typeof porterLogValidator>;
|
|
|
|
|
+
|
|
|
|
|
+export const parseLogsFromAgent = (logs: unknown[] = []): PorterLog[] => {
|
|
|
|
|
+ return logs.map((log, idx) => {
|
|
|
try {
|
|
try {
|
|
|
const parsed: AgentLog = agentLogValidator.parse(log);
|
|
const parsed: AgentLog = agentLogValidator.parse(log);
|
|
|
// TODO Move log parsing to the render method
|
|
// TODO Move log parsing to the render method
|
|
|
const ansiLog = Anser.ansiToJson(parsed.line);
|
|
const ansiLog = Anser.ansiToJson(parsed.line);
|
|
|
return {
|
|
return {
|
|
|
|
|
+ timestamp: parsed.timestamp,
|
|
|
line: ansiLog,
|
|
line: ansiLog,
|
|
|
|
|
+ output_stream: parsed.metadata?.output_stream ?? "",
|
|
|
|
|
+ service_name:
|
|
|
|
|
+ parsed.metadata?.raw_labels?.porter_run_service_name ?? "",
|
|
|
|
|
+ app_revision_id:
|
|
|
|
|
+ parsed.metadata?.raw_labels?.porter_run_app_revision_id ?? "",
|
|
|
|
|
+ deployment_target_id:
|
|
|
|
|
+ parsed.metadata?.raw_labels?.porter_run_deployment_target_id ?? "",
|
|
|
|
|
+ job_name: parsed.metadata?.raw_labels?.job_name ?? "",
|
|
|
|
|
+ job_run_id: parsed.metadata?.raw_labels?.controller_uid ?? "",
|
|
|
|
|
+ revision: "0",
|
|
|
lineNumber: idx + 1,
|
|
lineNumber: idx + 1,
|
|
|
- timestamp: parsed.timestamp,
|
|
|
|
|
- metadata: parsed.metadata,
|
|
|
|
|
};
|
|
};
|
|
|
} catch (err) {
|
|
} catch (err) {
|
|
|
- console.log(err)
|
|
|
|
|
return {
|
|
return {
|
|
|
- line: Anser.ansiToJson(log.toString()),
|
|
|
|
|
|
|
+ timestamp: "",
|
|
|
|
|
+ line: Anser.ansiToJson(JSON.stringify(log)),
|
|
|
|
|
+ output_stream: "",
|
|
|
|
|
+ service_name: "",
|
|
|
|
|
+ app_revision_id: "",
|
|
|
|
|
+ deployment_target_id: "",
|
|
|
|
|
+ job_name: "",
|
|
|
|
|
+ job_run_id: "",
|
|
|
|
|
+ revision: "0",
|
|
|
lineNumber: idx + 1,
|
|
lineNumber: idx + 1,
|
|
|
- timestamp: undefined,
|
|
|
|
|
- }
|
|
|
|
|
|
|
+ };
|
|
|
}
|
|
}
|
|
|
});
|
|
});
|
|
|
};
|
|
};
|
|
@@ -51,35 +85,38 @@ export const useLogs = ({
|
|
|
notify,
|
|
notify,
|
|
|
setLoading,
|
|
setLoading,
|
|
|
revisionIdToNumber,
|
|
revisionIdToNumber,
|
|
|
|
|
+ revisionNumberToId,
|
|
|
setDate,
|
|
setDate,
|
|
|
appRevisionId = "",
|
|
appRevisionId = "",
|
|
|
timeRange,
|
|
timeRange,
|
|
|
filterPredeploy,
|
|
filterPredeploy,
|
|
|
appID,
|
|
appID,
|
|
|
- jobRunID = ""
|
|
|
|
|
|
|
+ jobRunName = "",
|
|
|
}: {
|
|
}: {
|
|
|
- projectID: number,
|
|
|
|
|
- clusterID: number,
|
|
|
|
|
- selectedFilterValues: Record<FilterName, string>,
|
|
|
|
|
- appName: string,
|
|
|
|
|
- deploymentTargetId: string,
|
|
|
|
|
- searchParam: string,
|
|
|
|
|
- notify: (message: string) => void,
|
|
|
|
|
- setLoading: (isLoading: boolean) => void,
|
|
|
|
|
- revisionIdToNumber: Record<string, number>,
|
|
|
|
|
|
|
+ projectID: number;
|
|
|
|
|
+ clusterID: number;
|
|
|
|
|
+ selectedFilterValues: Record<FilterName, string>;
|
|
|
|
|
+ appName: string;
|
|
|
|
|
+ deploymentTargetId: string;
|
|
|
|
|
+ searchParam: string;
|
|
|
|
|
+ notify: (message: string) => void;
|
|
|
|
|
+ setLoading: (isLoading: boolean) => void;
|
|
|
|
|
+ revisionIdToNumber: Record<string, number>;
|
|
|
|
|
+ revisionNumberToId: Record<number, string>;
|
|
|
// if setDate is set, results are not live
|
|
// if setDate is set, results are not live
|
|
|
- setDate?: Date,
|
|
|
|
|
- appRevisionId?: string,
|
|
|
|
|
|
|
+ setDate?: Date;
|
|
|
|
|
+ appRevisionId?: string;
|
|
|
timeRange?: {
|
|
timeRange?: {
|
|
|
- startTime?: Dayjs,
|
|
|
|
|
- endTime?: Dayjs,
|
|
|
|
|
- },
|
|
|
|
|
- filterPredeploy: boolean,
|
|
|
|
|
- appID: number,
|
|
|
|
|
- jobRunID?: string,
|
|
|
|
|
-}
|
|
|
|
|
-) => {
|
|
|
|
|
- const [isLive, setIsLive] = useState<boolean>(!setDate && (timeRange?.startTime == null && timeRange?.endTime == null));
|
|
|
|
|
|
|
+ startTime?: Dayjs;
|
|
|
|
|
+ endTime?: Dayjs;
|
|
|
|
|
+ };
|
|
|
|
|
+ filterPredeploy: boolean;
|
|
|
|
|
+ appID: number;
|
|
|
|
|
+ jobRunName?: string;
|
|
|
|
|
+}) => {
|
|
|
|
|
+ const [isLive, setIsLive] = useState<boolean>(
|
|
|
|
|
+ !setDate && timeRange?.startTime == null && timeRange?.endTime == null
|
|
|
|
|
+ );
|
|
|
const logsBufferRef = useRef<PorterLog[]>([]);
|
|
const logsBufferRef = useRef<PorterLog[]>([]);
|
|
|
const [logs, setLogs] = useState<PorterLog[]>([]);
|
|
const [logs, setLogs] = useState<PorterLog[]>([]);
|
|
|
const [paginationInfo, setPaginationInfo] = useState<PaginationInfo>({
|
|
const [paginationInfo, setPaginationInfo] = useState<PaginationInfo>({
|
|
@@ -98,16 +135,12 @@ export const useLogs = ({
|
|
|
// result of the initial query
|
|
// result of the initial query
|
|
|
// - moving the cursor both forward and backward changes the start and end dates
|
|
// - moving the cursor both forward and backward changes the start and end dates
|
|
|
|
|
|
|
|
- const {
|
|
|
|
|
- newWebsocket,
|
|
|
|
|
- openWebsocket,
|
|
|
|
|
- closeAllWebsockets,
|
|
|
|
|
- } = useWebsockets();
|
|
|
|
|
|
|
+ const { newWebsocket, openWebsocket, closeAllWebsockets } = useWebsockets();
|
|
|
|
|
|
|
|
const updateLogs = (
|
|
const updateLogs = (
|
|
|
newLogs: PorterLog[],
|
|
newLogs: PorterLog[],
|
|
|
direction: Direction = Direction.forward
|
|
direction: Direction = Direction.forward
|
|
|
- ) => {
|
|
|
|
|
|
|
+ ): void => {
|
|
|
// Nothing to update here
|
|
// Nothing to update here
|
|
|
if (!newLogs.length) {
|
|
if (!newLogs.length) {
|
|
|
return;
|
|
return;
|
|
@@ -187,7 +220,7 @@ export const useLogs = ({
|
|
|
search_param: searchParam,
|
|
search_param: searchParam,
|
|
|
app_revision_id: appRevisionId,
|
|
app_revision_id: appRevisionId,
|
|
|
app_id: appID.toString(),
|
|
app_id: appID.toString(),
|
|
|
- }
|
|
|
|
|
|
|
+ };
|
|
|
|
|
|
|
|
const q = new URLSearchParams(searchParams).toString();
|
|
const q = new URLSearchParams(searchParams).toString();
|
|
|
|
|
|
|
@@ -202,31 +235,32 @@ export const useLogs = ({
|
|
|
if (evt.data == null) {
|
|
if (evt.data == null) {
|
|
|
return;
|
|
return;
|
|
|
}
|
|
}
|
|
|
- const jsonData = evt.data.trim().split("\n")
|
|
|
|
|
- const newLogs: any[] = [];
|
|
|
|
|
- jsonData.forEach((data: string) => {
|
|
|
|
|
- try {
|
|
|
|
|
- const jsonLog = JSON.parse(data);
|
|
|
|
|
- newLogs.push(jsonLog)
|
|
|
|
|
- } catch (err) {
|
|
|
|
|
- // TODO: better error handling
|
|
|
|
|
- // console.log(err)
|
|
|
|
|
|
|
+ const jsonData = evt.data.trim().split("\n");
|
|
|
|
|
+ const newLogs = jsonData.map((data: string) => {
|
|
|
|
|
+ const parsedLogData = z
|
|
|
|
|
+ .record(z.unknown())
|
|
|
|
|
+ .safeParse(JSON.parse(data));
|
|
|
|
|
+ if (!parsedLogData.success) {
|
|
|
|
|
+ return {};
|
|
|
}
|
|
}
|
|
|
|
|
+
|
|
|
|
|
+ return parsedLogData.data;
|
|
|
});
|
|
});
|
|
|
- const newLogsParsed = parseLogs(newLogs);
|
|
|
|
|
- newLogsParsed.filter((log) => {
|
|
|
|
|
- return log.metadata?.raw_labels?.porter_run_app_revision_id != null
|
|
|
|
|
- && revisionIdToNumber[log.metadata.raw_labels.porter_run_app_revision_id] != null
|
|
|
|
|
- && revisionIdToNumber[log.metadata.raw_labels.porter_run_app_revision_id] != 0
|
|
|
|
|
- }).forEach((log) => {
|
|
|
|
|
- if (log.metadata?.raw_labels?.porter_run_app_revision_id != null) {
|
|
|
|
|
- const revisionNumber = revisionIdToNumber[log.metadata.raw_labels.porter_run_app_revision_id];
|
|
|
|
|
- if (revisionNumber != null && revisionNumber != 0) {
|
|
|
|
|
- log.metadata.revision = revisionNumber.toString();
|
|
|
|
|
- }
|
|
|
|
|
- }
|
|
|
|
|
- })
|
|
|
|
|
- const newLogsFiltered = filterLogs(newLogsParsed);
|
|
|
|
|
|
|
+ const newLogsParsed = parseLogsFromAgent(newLogs);
|
|
|
|
|
+
|
|
|
|
|
+ const logsWithRevisionNumber = newLogsParsed
|
|
|
|
|
+ .filter(
|
|
|
|
|
+ (log) =>
|
|
|
|
|
+ !!log.app_revision_id &&
|
|
|
|
|
+ !!revisionIdToNumber[log.app_revision_id] &&
|
|
|
|
|
+ revisionIdToNumber[log.app_revision_id] !== 0
|
|
|
|
|
+ )
|
|
|
|
|
+ .map((log) => ({
|
|
|
|
|
+ ...log,
|
|
|
|
|
+ revision: revisionIdToNumber[log.app_revision_id].toString(),
|
|
|
|
|
+ }));
|
|
|
|
|
+
|
|
|
|
|
+ const newLogsFiltered = filterLogs(logsWithRevisionNumber);
|
|
|
pushLogs(newLogsFiltered);
|
|
pushLogs(newLogsFiltered);
|
|
|
},
|
|
},
|
|
|
onclose: () => {
|
|
onclose: () => {
|
|
@@ -238,32 +272,17 @@ export const useLogs = ({
|
|
|
openWebsocket(websocketKey);
|
|
openWebsocket(websocketKey);
|
|
|
};
|
|
};
|
|
|
|
|
|
|
|
- const filterLogs = (logs: PorterLog[]) => {
|
|
|
|
|
- return logs.filter(log => {
|
|
|
|
|
- if (log.metadata == null) {
|
|
|
|
|
- return true;
|
|
|
|
|
- }
|
|
|
|
|
-
|
|
|
|
|
- if (jobRunID !== "" && log.metadata.raw_labels?.controller_uid !== jobRunID) {
|
|
|
|
|
- return false;
|
|
|
|
|
- }
|
|
|
|
|
-
|
|
|
|
|
- if (selectedFilterValues.output_stream !== GenericFilter.getDefaultOption("output_stream").value &&
|
|
|
|
|
- log.metadata.output_stream !== selectedFilterValues.output_stream) {
|
|
|
|
|
- return false;
|
|
|
|
|
- }
|
|
|
|
|
-
|
|
|
|
|
- if (filterPredeploy && (log.metadata.raw_labels?.porter_run_service_name ?? "").endsWith("predeploy")) {
|
|
|
|
|
- return false;
|
|
|
|
|
- }
|
|
|
|
|
-
|
|
|
|
|
- if (selectedFilterValues.revision !== GenericFilter.getDefaultOption("revision").value &&
|
|
|
|
|
- log.metadata.revision !== selectedFilterValues.revision) {
|
|
|
|
|
|
|
+ const filterLogs = (logs: PorterLog[]): PorterLog[] => {
|
|
|
|
|
+ return logs.filter((log) => {
|
|
|
|
|
+ if (
|
|
|
|
|
+ selectedFilterValues.output_stream !==
|
|
|
|
|
+ GenericFilter.getDefaultOption("output_stream").value &&
|
|
|
|
|
+ log.output_stream !== selectedFilterValues.output_stream
|
|
|
|
|
+ ) {
|
|
|
return false;
|
|
return false;
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
- if (selectedFilterValues.revision_id !== GenericFilter.getDefaultOption("revision_id").value &&
|
|
|
|
|
- log.metadata.raw_labels?.porter_run_app_revision_id !== selectedFilterValues.revision_id) {
|
|
|
|
|
|
|
+ if (filterPredeploy && log.service_name.endsWith("predeploy")) {
|
|
|
return false;
|
|
return false;
|
|
|
}
|
|
}
|
|
|
|
|
|
|
@@ -283,7 +302,6 @@ export const useLogs = ({
|
|
|
}> => {
|
|
}> => {
|
|
|
try {
|
|
try {
|
|
|
const getLogsReq = {
|
|
const getLogsReq = {
|
|
|
- app_id: appID,
|
|
|
|
|
service_name: selectedFilterValues.service_name,
|
|
service_name: selectedFilterValues.service_name,
|
|
|
deployment_target_id: deploymentTargetId,
|
|
deployment_target_id: deploymentTargetId,
|
|
|
search_param: searchParam,
|
|
search_param: searchParam,
|
|
@@ -291,20 +309,26 @@ export const useLogs = ({
|
|
|
end_range: endDate,
|
|
end_range: endDate,
|
|
|
limit,
|
|
limit,
|
|
|
direction,
|
|
direction,
|
|
|
- app_revision_id: appRevisionId,
|
|
|
|
|
|
|
+ app_revision_id:
|
|
|
|
|
+ revisionNumberToId[parseInt(selectedFilterValues.revision)],
|
|
|
|
|
+ job_run_name: jobRunName,
|
|
|
};
|
|
};
|
|
|
|
|
|
|
|
- const logsResp = await api.appLogs(
|
|
|
|
|
- "<token>",
|
|
|
|
|
- getLogsReq,
|
|
|
|
|
- {
|
|
|
|
|
- cluster_id: clusterID,
|
|
|
|
|
- project_id: projectID,
|
|
|
|
|
- porter_app_name: appName,
|
|
|
|
|
- }
|
|
|
|
|
- )
|
|
|
|
|
|
|
+ const logsResp = await api.appLogs("<token>", getLogsReq, {
|
|
|
|
|
+ cluster_id: clusterID,
|
|
|
|
|
+ project_id: projectID,
|
|
|
|
|
+ porter_app_name: appName,
|
|
|
|
|
+ });
|
|
|
|
|
+
|
|
|
|
|
+ const parsedRes = z
|
|
|
|
|
+ .object({
|
|
|
|
|
+ logs: z.array(porterLogValidator),
|
|
|
|
|
+ backward_continue_time: z.string().nullable(),
|
|
|
|
|
+ forward_continue_time: z.string().nullable(),
|
|
|
|
|
+ })
|
|
|
|
|
+ .safeParse(logsResp.data);
|
|
|
|
|
|
|
|
- if (logsResp.data == null) {
|
|
|
|
|
|
|
+ if (!parsedRes.success) {
|
|
|
return {
|
|
return {
|
|
|
logs: [],
|
|
logs: [],
|
|
|
previousCursor: null,
|
|
previousCursor: null,
|
|
@@ -312,32 +336,32 @@ export const useLogs = ({
|
|
|
};
|
|
};
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
- const newLogs = parseLogs(logsResp.data.logs);
|
|
|
|
|
|
|
+ const newLogs = parsedRes.data.logs;
|
|
|
if (direction === Direction.backward) {
|
|
if (direction === Direction.backward) {
|
|
|
newLogs.reverse();
|
|
newLogs.reverse();
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
- newLogs.filter((log) => {
|
|
|
|
|
- return log.metadata?.raw_labels?.porter_run_app_revision_id != null
|
|
|
|
|
- && revisionIdToNumber[log.metadata.raw_labels.porter_run_app_revision_id] != null
|
|
|
|
|
- && revisionIdToNumber[log.metadata.raw_labels.porter_run_app_revision_id] != 0
|
|
|
|
|
- }).forEach((log) => {
|
|
|
|
|
- if (log.metadata?.raw_labels?.porter_run_app_revision_id != null) {
|
|
|
|
|
- const revisionNumber = revisionIdToNumber[log.metadata.raw_labels.porter_run_app_revision_id];
|
|
|
|
|
- if (revisionNumber != null && revisionNumber != 0) {
|
|
|
|
|
- log.metadata.revision = revisionNumber.toString();
|
|
|
|
|
- }
|
|
|
|
|
- }
|
|
|
|
|
- })
|
|
|
|
|
|
|
+ const logsWithRevisionNumber = newLogs
|
|
|
|
|
+ .filter(
|
|
|
|
|
+ (log) =>
|
|
|
|
|
+ !!log.app_revision_id &&
|
|
|
|
|
+ !!revisionIdToNumber[log.app_revision_id] &&
|
|
|
|
|
+ revisionIdToNumber[log.app_revision_id] !== 0
|
|
|
|
|
+ )
|
|
|
|
|
+ .map((log) => ({
|
|
|
|
|
+ ...log,
|
|
|
|
|
+ revision: revisionIdToNumber[log.app_revision_id].toString(),
|
|
|
|
|
+ }));
|
|
|
|
|
|
|
|
return {
|
|
return {
|
|
|
- logs: newLogs,
|
|
|
|
|
|
|
+ logs: logsWithRevisionNumber,
|
|
|
previousCursor:
|
|
previousCursor:
|
|
|
// There are no more historical logs so don't set the previous cursor
|
|
// There are no more historical logs so don't set the previous cursor
|
|
|
- newLogs.length < QUERY_LIMIT && direction == Direction.backward
|
|
|
|
|
|
|
+ logsWithRevisionNumber.length < QUERY_LIMIT &&
|
|
|
|
|
+ direction === Direction.backward
|
|
|
? null
|
|
? null
|
|
|
- : logsResp.data.backward_continue_time,
|
|
|
|
|
- nextCursor: logsResp.data.forward_continue_time,
|
|
|
|
|
|
|
+ : parsedRes.data.backward_continue_time,
|
|
|
|
|
+ nextCursor: parsedRes.data.forward_continue_time,
|
|
|
};
|
|
};
|
|
|
} catch {
|
|
} catch {
|
|
|
return {
|
|
return {
|
|
@@ -352,10 +376,18 @@ export const useLogs = ({
|
|
|
setLoading(true);
|
|
setLoading(true);
|
|
|
setLogs([]);
|
|
setLogs([]);
|
|
|
flushLogsBuffer(true);
|
|
flushLogsBuffer(true);
|
|
|
- const endDate = timeRange?.endTime != null ? timeRange.endTime : dayjs(setDate);
|
|
|
|
|
- const oneDayAgo = timeRange?.startTime != null ? timeRange.startTime : endDate.subtract(1, "day");
|
|
|
|
|
-
|
|
|
|
|
- const { logs: initialLogs, previousCursor, nextCursor } = await queryLogs(
|
|
|
|
|
|
|
+ const endDate =
|
|
|
|
|
+ timeRange?.endTime != null ? timeRange.endTime : dayjs(setDate);
|
|
|
|
|
+ const oneDayAgo =
|
|
|
|
|
+ timeRange?.startTime != null
|
|
|
|
|
+ ? timeRange.startTime
|
|
|
|
|
+ : endDate.subtract(1, "day");
|
|
|
|
|
+
|
|
|
|
|
+ const {
|
|
|
|
|
+ logs: initialLogs,
|
|
|
|
|
+ previousCursor,
|
|
|
|
|
+ nextCursor,
|
|
|
|
|
+ } = await queryLogs(
|
|
|
oneDayAgo.toISOString(),
|
|
oneDayAgo.toISOString(),
|
|
|
endDate.toISOString(),
|
|
endDate.toISOString(),
|
|
|
Direction.backward
|
|
Direction.backward
|
|
@@ -468,14 +500,18 @@ export const useLogs = ({
|
|
|
|
|
|
|
|
const flushLogsBufferInterval = setInterval(flushLogsBuffer, 3000);
|
|
const flushLogsBufferInterval = setInterval(flushLogsBuffer, 3000);
|
|
|
|
|
|
|
|
- return () => { clearInterval(flushLogsBufferInterval); };
|
|
|
|
|
|
|
+ return () => {
|
|
|
|
|
+ clearInterval(flushLogsBufferInterval);
|
|
|
|
|
+ };
|
|
|
}, []);
|
|
}, []);
|
|
|
|
|
|
|
|
useEffect(() => {
|
|
useEffect(() => {
|
|
|
if (Object.keys(revisionIdToNumber).length) {
|
|
if (Object.keys(revisionIdToNumber).length) {
|
|
|
// if a complete time range is not given, then we are live
|
|
// if a complete time range is not given, then we are live
|
|
|
- const isLive = !setDate && (timeRange?.startTime == null || timeRange?.endTime == null);
|
|
|
|
|
- refresh({ isLive });
|
|
|
|
|
|
|
+ const isLive =
|
|
|
|
|
+ !setDate &&
|
|
|
|
|
+ (timeRange?.startTime == null || timeRange?.endTime == null);
|
|
|
|
|
+ void refresh({ isLive });
|
|
|
setIsLive(isLive);
|
|
setIsLive(isLive);
|
|
|
}
|
|
}
|
|
|
}, [
|
|
}, [
|
|
@@ -509,4 +545,4 @@ export const useLogs = ({
|
|
|
paginationInfo,
|
|
paginationInfo,
|
|
|
stopLogStream: closeAllWebsockets,
|
|
stopLogStream: closeAllWebsockets,
|
|
|
};
|
|
};
|
|
|
-};
|
|
|
|
|
|
|
+};
|