📈 Added real time import stats and stats cache
This commit is contained in:
@@ -621,6 +621,99 @@ export const resolvers = {
|
||||
throw new Error("Failed to preview canonical metadata");
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Get cached import statistics (fast, real-time)
|
||||
* @async
|
||||
* @function getCachedImportStatistics
|
||||
* @param {any} _ - Parent resolver (unused)
|
||||
* @param {Object} args - Query arguments (none)
|
||||
* @param {Object} context - GraphQL context with broker
|
||||
* @returns {Promise<Object>} Cached import statistics
|
||||
* @throws {Error} If statistics service is unavailable
|
||||
* @description Retrieves cached import statistics from the API service.
|
||||
* This is a fast, real-time query that doesn't require filesystem scanning.
|
||||
*
|
||||
* @example
|
||||
* ```graphql
|
||||
* query {
|
||||
* getCachedImportStatistics {
|
||||
* success
|
||||
* stats {
|
||||
* totalLocalFiles
|
||||
* alreadyImported
|
||||
* newFiles
|
||||
* percentageImported
|
||||
* pendingFiles
|
||||
* }
|
||||
* lastUpdated
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
getCachedImportStatistics: async (
|
||||
_: any,
|
||||
args: {},
|
||||
context: any
|
||||
) => {
|
||||
try {
|
||||
const broker = context?.broker;
|
||||
|
||||
if (!broker) {
|
||||
throw new Error("Broker not available in context");
|
||||
}
|
||||
|
||||
const result = await broker.call("api.getCachedImportStatistics");
|
||||
return result;
|
||||
} catch (error) {
|
||||
console.error("Error fetching cached import statistics:", error);
|
||||
throw new Error(`Failed to fetch cached import statistics: ${error.message}`);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Get job result statistics grouped by session
|
||||
* @async
|
||||
* @function getJobResultStatistics
|
||||
* @param {any} _ - Parent resolver (unused)
|
||||
* @param {Object} args - Query arguments (none)
|
||||
* @param {Object} context - GraphQL context with broker
|
||||
* @returns {Promise<Array>} Array of job result statistics by session
|
||||
* @throws {Error} If job queue service is unavailable
|
||||
* @description Retrieves job result statistics grouped by session ID,
|
||||
* including counts of completed and failed jobs and earliest timestamp.
|
||||
*
|
||||
* @example
|
||||
* ```graphql
|
||||
* query {
|
||||
* getJobResultStatistics {
|
||||
* sessionId
|
||||
* completedJobs
|
||||
* failedJobs
|
||||
* earliestTimestamp
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
getJobResultStatistics: async (
|
||||
_: any,
|
||||
args: {},
|
||||
context: any
|
||||
) => {
|
||||
try {
|
||||
const broker = context?.broker;
|
||||
|
||||
if (!broker) {
|
||||
throw new Error("Broker not available in context");
|
||||
}
|
||||
|
||||
const result = await broker.call("jobqueue.getJobResultStatistics");
|
||||
return result;
|
||||
} catch (error) {
|
||||
console.error("Error fetching job result statistics:", error);
|
||||
throw new Error(`Failed to fetch job result statistics: ${error.message}`);
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
Mutation: {
|
||||
|
||||
@@ -344,6 +344,15 @@ export const typeDefs = gql`
|
||||
comicId: ID!
|
||||
preferences: UserPreferencesInput
|
||||
): CanonicalMetadata
|
||||
|
||||
# Get import statistics for a directory
|
||||
getImportStatistics(directoryPath: String): ImportStatistics!
|
||||
|
||||
# Get cached import statistics (fast, real-time)
|
||||
getCachedImportStatistics: CachedImportStatistics!
|
||||
|
||||
# Get job result statistics grouped by session
|
||||
getJobResultStatistics: [JobResultStatistics!]!
|
||||
}
|
||||
|
||||
# Mutations
|
||||
@@ -385,6 +394,15 @@ export const typeDefs = gql`
|
||||
source: MetadataSource!
|
||||
metadata: String!
|
||||
): Comic!
|
||||
|
||||
# Start a new full import of the comics directory
|
||||
startNewImport(sessionId: String!): ImportJobResult!
|
||||
|
||||
# Start an incremental import (only new files)
|
||||
startIncrementalImport(
|
||||
sessionId: String!
|
||||
directoryPath: String
|
||||
): IncrementalImportResult!
|
||||
}
|
||||
|
||||
# Input types
|
||||
@@ -703,4 +721,63 @@ export const typeDefs = gql`
|
||||
_score: Float
|
||||
_source: Comic!
|
||||
}
|
||||
|
||||
# Import statistics
|
||||
type ImportStatistics {
|
||||
success: Boolean!
|
||||
directory: String!
|
||||
stats: ImportStats!
|
||||
}
|
||||
|
||||
type ImportStats {
|
||||
totalLocalFiles: Int!
|
||||
alreadyImported: Int!
|
||||
newFiles: Int!
|
||||
percentageImported: String!
|
||||
}
|
||||
|
||||
# Cached import statistics (real-time)
|
||||
type CachedImportStatistics {
|
||||
success: Boolean!
|
||||
message: String
|
||||
stats: CachedImportStats
|
||||
lastUpdated: String
|
||||
}
|
||||
|
||||
type CachedImportStats {
|
||||
totalLocalFiles: Int!
|
||||
alreadyImported: Int!
|
||||
newFiles: Int!
|
||||
percentageImported: String!
|
||||
pendingFiles: Int!
|
||||
}
|
||||
|
||||
# Import job result
|
||||
type ImportJobResult {
|
||||
success: Boolean!
|
||||
message: String!
|
||||
jobsQueued: Int!
|
||||
}
|
||||
|
||||
# Incremental import result
|
||||
type IncrementalImportResult {
|
||||
success: Boolean!
|
||||
message: String!
|
||||
stats: IncrementalImportStats!
|
||||
}
|
||||
|
||||
type IncrementalImportStats {
|
||||
total: Int!
|
||||
alreadyImported: Int!
|
||||
newFiles: Int!
|
||||
queued: Int!
|
||||
}
|
||||
|
||||
# Job result statistics
|
||||
type JobResultStatistics {
|
||||
sessionId: String!
|
||||
completedJobs: Int!
|
||||
failedJobs: Int!
|
||||
earliestTimestamp: String!
|
||||
}
|
||||
`;
|
||||
|
||||
Reference in New Issue
Block a user