⬇ Import flow graph-qlified
This commit is contained in:
@@ -1,11 +1,15 @@
|
||||
import React, { ReactElement, useCallback, useEffect, useState } from "react";
|
||||
import { format } from "date-fns";
|
||||
import Loader from "react-loader-spinner";
|
||||
import { isEmpty, isNil, isUndefined } from "lodash";
|
||||
import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query";
|
||||
import { useStore } from "../../store";
|
||||
import { useShallow } from "zustand/react/shallow";
|
||||
import axios from "axios";
|
||||
import {
|
||||
useGetJobResultStatisticsQuery,
|
||||
useGetImportStatisticsQuery,
|
||||
useStartIncrementalImportMutation
|
||||
} from "../../graphql/generated";
|
||||
|
||||
interface IProps {
|
||||
matches?: unknown;
|
||||
@@ -27,6 +31,7 @@ interface IProps {
|
||||
export const Import = (props: IProps): ReactElement => {
|
||||
const queryClient = useQueryClient();
|
||||
const [socketReconnectTrigger, setSocketReconnectTrigger] = useState(0);
|
||||
const [showPreview, setShowPreview] = useState(false);
|
||||
const { importJobQueue, getSocket, disconnectSocket } = useStore(
|
||||
useShallow((state) => ({
|
||||
importJobQueue: state.importJobQueue,
|
||||
@@ -35,6 +40,29 @@ export const Import = (props: IProps): ReactElement => {
|
||||
})),
|
||||
);
|
||||
|
||||
// Query to get import statistics (preview)
|
||||
const {
|
||||
data: importStats,
|
||||
isLoading: isLoadingStats,
|
||||
refetch: refetchStats
|
||||
} = useGetImportStatisticsQuery(
|
||||
{},
|
||||
{
|
||||
enabled: showPreview,
|
||||
refetchOnWindowFocus: false,
|
||||
}
|
||||
);
|
||||
|
||||
// Mutation for incremental import (smart import)
|
||||
const { mutate: startIncrementalImport, isPending: isStartingImport } = useStartIncrementalImportMutation({
|
||||
onSuccess: (data) => {
|
||||
if (data.startIncrementalImport.success) {
|
||||
importJobQueue.setStatus("running");
|
||||
setShowPreview(false);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const { mutate: initiateImport } = useMutation({
|
||||
mutationFn: async () => {
|
||||
const sessionId = localStorage.getItem("sessionId");
|
||||
@@ -46,20 +74,7 @@ export const Import = (props: IProps): ReactElement => {
|
||||
},
|
||||
});
|
||||
|
||||
const { data, isError, isLoading, refetch } = useQuery({
|
||||
queryKey: ["allImportJobResults"],
|
||||
queryFn: async () => {
|
||||
const response = await axios({
|
||||
method: "GET",
|
||||
url: "http://localhost:3000/api/jobqueue/getJobResultStatistics",
|
||||
params: { _t: Date.now() }, // Cache busting
|
||||
});
|
||||
return response;
|
||||
},
|
||||
refetchOnWindowFocus: false,
|
||||
staleTime: 0, // Always consider data stale
|
||||
gcTime: 0, // Don't cache the data (formerly cacheTime)
|
||||
});
|
||||
const { data, isError, isLoading, refetch } = useGetJobResultStatisticsQuery();
|
||||
|
||||
// Ensure socket connection is established and listen for import completion
|
||||
useEffect(() => {
|
||||
@@ -95,6 +110,35 @@ export const Import = (props: IProps): ReactElement => {
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
const handleShowPreview = () => {
|
||||
setShowPreview(true);
|
||||
refetchStats();
|
||||
};
|
||||
|
||||
const handleStartSmartImport = () => {
|
||||
// Clear old sessionId when starting a new import after queue is drained
|
||||
if (importJobQueue.status === "drained") {
|
||||
localStorage.removeItem("sessionId");
|
||||
// Disconnect and reconnect socket to get new sessionId
|
||||
disconnectSocket("/");
|
||||
// Wait for socket to reconnect and get new sessionId before starting import
|
||||
setTimeout(() => {
|
||||
getSocket("/");
|
||||
// Trigger useEffect to re-attach event listeners
|
||||
setSocketReconnectTrigger(prev => prev + 1);
|
||||
// Wait a bit more for sessionInitialized event to fire
|
||||
setTimeout(() => {
|
||||
const sessionId = localStorage.getItem("sessionId") || "";
|
||||
startIncrementalImport({ sessionId });
|
||||
}, 500);
|
||||
}, 100);
|
||||
} else {
|
||||
const sessionId = localStorage.getItem("sessionId") || "";
|
||||
startIncrementalImport({ sessionId });
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Method to render import job queue pause/resume controls on the UI
|
||||
*
|
||||
@@ -146,6 +190,7 @@ export const Import = (props: IProps): ReactElement => {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div>
|
||||
<section>
|
||||
@@ -185,41 +230,147 @@ export const Import = (props: IProps): ReactElement => {
|
||||
</div>
|
||||
</article>
|
||||
|
||||
<div className="my-4">
|
||||
{(importJobQueue.status === "drained" ||
|
||||
importJobQueue.status === undefined) && (
|
||||
{/* Import Preview Section */}
|
||||
{!showPreview && (importJobQueue.status === "drained" || importJobQueue.status === undefined) && (
|
||||
<div className="my-4 flex gap-3">
|
||||
<button
|
||||
className="flex space-x-1 sm:mt-0 sm:flex-row sm:items-center rounded-lg border border-green-400 dark:border-green-200 bg-green-200 px-5 py-3 text-gray-500 hover:bg-transparent hover:text-green-600 focus:outline-none focus:ring active:text-indigo-500"
|
||||
onClick={() => {
|
||||
// Clear old sessionId when starting a new import after queue is drained
|
||||
if (importJobQueue.status === "drained") {
|
||||
localStorage.removeItem("sessionId");
|
||||
// Disconnect and reconnect socket to get new sessionId
|
||||
disconnectSocket("/");
|
||||
// Wait for socket to reconnect and get new sessionId before starting import
|
||||
setTimeout(() => {
|
||||
getSocket("/");
|
||||
// Trigger useEffect to re-attach event listeners
|
||||
setSocketReconnectTrigger(prev => prev + 1);
|
||||
// Wait a bit more for sessionInitialized event to fire
|
||||
setTimeout(() => {
|
||||
initiateImport();
|
||||
importJobQueue.setStatus("running");
|
||||
}, 500);
|
||||
}, 100);
|
||||
} else {
|
||||
initiateImport();
|
||||
importJobQueue.setStatus("running");
|
||||
}
|
||||
}}
|
||||
className="flex space-x-1 sm:mt-0 sm:flex-row sm:items-center rounded-lg border border-blue-400 dark:border-blue-200 bg-blue-200 px-5 py-3 text-gray-500 hover:bg-transparent hover:text-blue-600 focus:outline-none focus:ring active:text-blue-500"
|
||||
onClick={handleShowPreview}
|
||||
>
|
||||
<span className="text-md">Start Import</span>
|
||||
<span className="text-md">Preview Import</span>
|
||||
<span className="w-6 h-6">
|
||||
<i className="h-6 w-6 icon-[solar--file-left-bold-duotone]"></i>
|
||||
<i className="h-6 w-6 icon-[solar--eye-bold-duotone]"></i>
|
||||
</span>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Preview Statistics */}
|
||||
{showPreview && !isLoadingStats && importStats?.getImportStatistics && (
|
||||
<div className="my-6 max-w-screen-lg">
|
||||
<span className="flex items-center my-5">
|
||||
<span className="text-xl text-slate-500 dark:text-slate-200 pr-5">
|
||||
Import Preview
|
||||
</span>
|
||||
<span className="h-px flex-1 bg-slate-200 dark:bg-slate-400"></span>
|
||||
</span>
|
||||
|
||||
<div className="rounded-lg border border-gray-200 dark:border-gray-600 bg-white dark:bg-slate-700 p-6">
|
||||
<div className="mb-4">
|
||||
<p className="text-sm text-gray-600 dark:text-gray-300">
|
||||
<span className="font-semibold">Directory:</span> {importStats.getImportStatistics.directory}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<dl className="grid grid-cols-2 gap-4 sm:grid-cols-4">
|
||||
<div className="flex flex-col rounded-lg bg-blue-100 dark:bg-blue-200 px-4 py-6 text-center">
|
||||
<dd className="text-3xl text-blue-600 md:text-5xl">
|
||||
{importStats.getImportStatistics.stats.totalLocalFiles}
|
||||
</dd>
|
||||
<dt className="text-lg font-medium text-gray-500">
|
||||
Total Files
|
||||
</dt>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col rounded-lg bg-green-100 dark:bg-green-200 px-4 py-6 text-center">
|
||||
<dd className="text-3xl text-green-600 md:text-5xl">
|
||||
{importStats.getImportStatistics.stats.newFiles}
|
||||
</dd>
|
||||
<dt className="text-lg font-medium text-gray-500">
|
||||
New Comics
|
||||
</dt>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col rounded-lg bg-yellow-100 dark:bg-yellow-200 px-4 py-6 text-center">
|
||||
<dd className="text-3xl text-yellow-600 md:text-5xl">
|
||||
{importStats.getImportStatistics.stats.alreadyImported}
|
||||
</dd>
|
||||
<dt className="text-lg font-medium text-gray-500">
|
||||
Already Imported
|
||||
</dt>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col rounded-lg bg-purple-100 dark:bg-purple-200 px-4 py-6 text-center">
|
||||
<dd className="text-3xl text-purple-600 md:text-5xl">
|
||||
{(() => {
|
||||
const percentage = importStats.getImportStatistics.stats.percentageImported;
|
||||
const numValue = typeof percentage === 'number' ? percentage : parseFloat(percentage);
|
||||
return !isNaN(numValue) ? numValue.toFixed(1) : '0.0';
|
||||
})()}%
|
||||
</dd>
|
||||
<dt className="text-lg font-medium text-gray-500">
|
||||
Already in Library
|
||||
</dt>
|
||||
</div>
|
||||
</dl>
|
||||
|
||||
{importStats.getImportStatistics.stats.newFiles > 0 && (
|
||||
<div className="mt-6">
|
||||
<article
|
||||
role="alert"
|
||||
className="rounded-lg border-s-4 border-green-500 bg-green-50 p-4 dark:border-s-4 dark:border-green-600 dark:bg-green-300 dark:text-slate-600"
|
||||
>
|
||||
<p className="font-medium">
|
||||
Ready to import {importStats.getImportStatistics.stats.newFiles} new comic{importStats.getImportStatistics.stats.newFiles !== 1 ? 's' : ''}!
|
||||
</p>
|
||||
<p className="text-sm mt-1">
|
||||
{importStats.getImportStatistics.stats.alreadyImported} comic{importStats.getImportStatistics.stats.alreadyImported !== 1 ? 's' : ''} will be skipped (already in library).
|
||||
</p>
|
||||
</article>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{importStats.getImportStatistics.stats.newFiles === 0 && (
|
||||
<div className="mt-6">
|
||||
<article
|
||||
role="alert"
|
||||
className="rounded-lg border-s-4 border-yellow-500 bg-yellow-50 p-4 dark:border-s-4 dark:border-yellow-600 dark:bg-yellow-300 dark:text-slate-600"
|
||||
>
|
||||
<p className="font-medium">
|
||||
No new comics to import!
|
||||
</p>
|
||||
<p className="text-sm mt-1">
|
||||
All {importStats.getImportStatistics.stats.totalLocalFiles} comic{importStats.getImportStatistics.stats.totalLocalFiles !== 1 ? 's' : ''} in the directory {importStats.getImportStatistics.stats.totalLocalFiles !== 1 ? 'are' : 'is'} already in your library.
|
||||
</p>
|
||||
</article>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="mt-6 flex gap-3">
|
||||
{importStats.getImportStatistics.stats.newFiles > 0 && (
|
||||
<button
|
||||
className="flex space-x-1 sm:mt-0 sm:flex-row sm:items-center rounded-lg border border-green-400 dark:border-green-200 bg-green-200 px-5 py-3 text-gray-500 hover:bg-transparent hover:text-green-600 focus:outline-none focus:ring active:text-green-500"
|
||||
onClick={handleStartSmartImport}
|
||||
disabled={isStartingImport}
|
||||
>
|
||||
<span className="text-md">
|
||||
{isStartingImport ? "Starting..." : "Start Smart Import"}
|
||||
</span>
|
||||
<span className="w-6 h-6">
|
||||
<i className="h-6 w-6 icon-[solar--file-left-bold-duotone]"></i>
|
||||
</span>
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
className="flex space-x-1 sm:mt-0 sm:flex-row sm:items-center rounded-lg border border-gray-400 dark:border-gray-200 bg-gray-200 px-5 py-3 text-gray-500 hover:bg-transparent hover:text-gray-600 focus:outline-none focus:ring active:text-gray-500"
|
||||
onClick={() => setShowPreview(false)}
|
||||
>
|
||||
<span className="text-md">Cancel</span>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Loading state for preview */}
|
||||
{showPreview && isLoadingStats && (
|
||||
<div className="my-6 flex justify-center items-center">
|
||||
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-blue-500"></div>
|
||||
<span className="ml-3 text-gray-600 dark:text-gray-300">
|
||||
Analyzing comics folder...
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Activity */}
|
||||
{(importJobQueue.status === "running" ||
|
||||
@@ -266,7 +417,7 @@ export const Import = (props: IProps): ReactElement => {
|
||||
)}
|
||||
|
||||
{/* Past imports */}
|
||||
{!isLoading && !isEmpty(data?.data) && (
|
||||
{!isLoading && !isEmpty(data?.getJobResultStatistics) && (
|
||||
<div className="max-w-screen-lg">
|
||||
<span className="flex items-center mt-6">
|
||||
<span className="text-xl text-slate-500 dark:text-slate-200 pr-5">
|
||||
@@ -298,17 +449,19 @@ export const Import = (props: IProps): ReactElement => {
|
||||
</thead>
|
||||
|
||||
<tbody className="divide-y divide-gray-200">
|
||||
{data?.data.map((jobResult: any, index: number) => {
|
||||
{data?.getJobResultStatistics.map((jobResult: any, index: number) => {
|
||||
return (
|
||||
<tr key={index}>
|
||||
<td className="whitespace-nowrap px-4 py-2 text-gray-700 dark:text-slate-300 font-medium">
|
||||
{index + 1}
|
||||
</td>
|
||||
<td className="whitespace-nowrap px-2 py-2 text-gray-700 dark:text-slate-300">
|
||||
{format(
|
||||
new Date(jobResult.earliestTimestamp),
|
||||
"EEEE, hh:mma, do LLLL y",
|
||||
)}
|
||||
{jobResult.earliestTimestamp && !isNaN(new Date(jobResult.earliestTimestamp).getTime())
|
||||
? format(
|
||||
new Date(jobResult.earliestTimestamp),
|
||||
"EEEE, hh:mma, do LLLL y",
|
||||
)
|
||||
: "N/A"}
|
||||
</td>
|
||||
<td className="whitespace-nowrap px-2 py-2 text-gray-700 dark:text-slate-300">
|
||||
<span className="tag is-warning">
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useQuery, useInfiniteQuery, UseQueryOptions, UseInfiniteQueryOptions, InfiniteData } from '@tanstack/react-query';
|
||||
import { useQuery, useInfiniteQuery, useMutation, UseQueryOptions, UseInfiniteQueryOptions, InfiniteData, UseMutationOptions } from '@tanstack/react-query';
|
||||
import { fetcher } from './fetcher';
|
||||
export type Maybe<T> = T | null;
|
||||
export type InputMaybe<T> = Maybe<T>;
|
||||
@@ -260,6 +260,28 @@ export type ImportComicResult = {
|
||||
success: Scalars['Boolean']['output'];
|
||||
};
|
||||
|
||||
export type ImportJobResult = {
|
||||
__typename?: 'ImportJobResult';
|
||||
jobsQueued: Scalars['Int']['output'];
|
||||
message: Scalars['String']['output'];
|
||||
success: Scalars['Boolean']['output'];
|
||||
};
|
||||
|
||||
export type ImportStatistics = {
|
||||
__typename?: 'ImportStatistics';
|
||||
directory: Scalars['String']['output'];
|
||||
stats: ImportStats;
|
||||
success: Scalars['Boolean']['output'];
|
||||
};
|
||||
|
||||
export type ImportStats = {
|
||||
__typename?: 'ImportStats';
|
||||
alreadyImported: Scalars['Int']['output'];
|
||||
newFiles: Scalars['Int']['output'];
|
||||
percentageImported: Scalars['String']['output'];
|
||||
totalLocalFiles: Scalars['Int']['output'];
|
||||
};
|
||||
|
||||
export type ImportStatus = {
|
||||
__typename?: 'ImportStatus';
|
||||
isImported?: Maybe<Scalars['Boolean']['output']>;
|
||||
@@ -267,6 +289,21 @@ export type ImportStatus = {
|
||||
tagged?: Maybe<Scalars['Boolean']['output']>;
|
||||
};
|
||||
|
||||
export type IncrementalImportResult = {
|
||||
__typename?: 'IncrementalImportResult';
|
||||
message: Scalars['String']['output'];
|
||||
stats: IncrementalImportStats;
|
||||
success: Scalars['Boolean']['output'];
|
||||
};
|
||||
|
||||
export type IncrementalImportStats = {
|
||||
__typename?: 'IncrementalImportStats';
|
||||
alreadyImported: Scalars['Int']['output'];
|
||||
newFiles: Scalars['Int']['output'];
|
||||
queued: Scalars['Int']['output'];
|
||||
total: Scalars['Int']['output'];
|
||||
};
|
||||
|
||||
export type InferredMetadata = {
|
||||
__typename?: 'InferredMetadata';
|
||||
issue?: Maybe<Issue>;
|
||||
@@ -278,9 +315,23 @@ export type InferredMetadataInput = {
|
||||
|
||||
export type Issue = {
|
||||
__typename?: 'Issue';
|
||||
api_detail_url?: Maybe<Scalars['String']['output']>;
|
||||
character_credits?: Maybe<Array<CharacterCredit>>;
|
||||
cover_date?: Maybe<Scalars['String']['output']>;
|
||||
description?: Maybe<Scalars['String']['output']>;
|
||||
id: Scalars['Int']['output'];
|
||||
image?: Maybe<ImageUrls>;
|
||||
issue_number?: Maybe<Scalars['String']['output']>;
|
||||
location_credits?: Maybe<Array<LocationCredit>>;
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
number?: Maybe<Scalars['Int']['output']>;
|
||||
person_credits?: Maybe<Array<PersonCredit>>;
|
||||
site_detail_url?: Maybe<Scalars['String']['output']>;
|
||||
store_date?: Maybe<Scalars['String']['output']>;
|
||||
story_arc_credits?: Maybe<Array<StoryArcCredit>>;
|
||||
subtitle?: Maybe<Scalars['String']['output']>;
|
||||
team_credits?: Maybe<Array<TeamCredit>>;
|
||||
volume?: Maybe<Volume>;
|
||||
year?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
@@ -308,6 +359,14 @@ export type IssuesForSeriesResponse = {
|
||||
status_code: Scalars['Int']['output'];
|
||||
};
|
||||
|
||||
export type JobResultStatistics = {
|
||||
__typename?: 'JobResultStatistics';
|
||||
completedJobs: Scalars['Int']['output'];
|
||||
earliestTimestamp: Scalars['String']['output'];
|
||||
failedJobs: Scalars['Int']['output'];
|
||||
sessionId: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type LocgMetadata = {
|
||||
__typename?: 'LOCGMetadata';
|
||||
cover?: Maybe<Scalars['String']['output']>;
|
||||
@@ -375,6 +434,36 @@ export type MetadataField = {
|
||||
value?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type MetadataPaginationMeta = {
|
||||
__typename?: 'MetadataPaginationMeta';
|
||||
currentPage: Scalars['Int']['output'];
|
||||
hasNextPage: Scalars['Boolean']['output'];
|
||||
hasPreviousPage: Scalars['Boolean']['output'];
|
||||
pageSize: Scalars['Int']['output'];
|
||||
totalCount: Scalars['Int']['output'];
|
||||
totalPages: Scalars['Int']['output'];
|
||||
};
|
||||
|
||||
export type MetadataPullListItem = {
|
||||
__typename?: 'MetadataPullListItem';
|
||||
cover?: Maybe<Scalars['String']['output']>;
|
||||
description?: Maybe<Scalars['String']['output']>;
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
potw?: Maybe<Scalars['Int']['output']>;
|
||||
price?: Maybe<Scalars['String']['output']>;
|
||||
publicationDate?: Maybe<Scalars['String']['output']>;
|
||||
publisher?: Maybe<Scalars['String']['output']>;
|
||||
pulls?: Maybe<Scalars['Int']['output']>;
|
||||
rating?: Maybe<Scalars['Float']['output']>;
|
||||
url?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type MetadataPullListResponse = {
|
||||
__typename?: 'MetadataPullListResponse';
|
||||
meta: MetadataPaginationMeta;
|
||||
result: Array<MetadataPullListItem>;
|
||||
};
|
||||
|
||||
export enum MetadataSource {
|
||||
ComicinfoXml = 'COMICINFO_XML',
|
||||
Comicvine = 'COMICVINE',
|
||||
@@ -406,6 +495,8 @@ export type Mutation = {
|
||||
removeMetadataOverride: Comic;
|
||||
resolveMetadata: Comic;
|
||||
setMetadataField: Comic;
|
||||
startIncrementalImport: IncrementalImportResult;
|
||||
startNewImport: ImportJobResult;
|
||||
updateSourcedMetadata: Comic;
|
||||
updateUserPreferences: UserPreferences;
|
||||
};
|
||||
@@ -445,6 +536,17 @@ export type MutationSetMetadataFieldArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type MutationStartIncrementalImportArgs = {
|
||||
directoryPath?: InputMaybe<Scalars['String']['input']>;
|
||||
sessionId: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type MutationStartNewImportArgs = {
|
||||
sessionId: Scalars['String']['input'];
|
||||
};
|
||||
|
||||
|
||||
export type MutationUpdateSourcedMetadataArgs = {
|
||||
comicId: Scalars['ID']['input'];
|
||||
metadata: Scalars['String']['input'];
|
||||
@@ -465,16 +567,6 @@ export type PageInfo = {
|
||||
totalPages: Scalars['Int']['output'];
|
||||
};
|
||||
|
||||
export type PaginationMeta = {
|
||||
__typename?: 'PaginationMeta';
|
||||
currentPage: Scalars['Int']['output'];
|
||||
hasNextPage: Scalars['Boolean']['output'];
|
||||
hasPreviousPage: Scalars['Boolean']['output'];
|
||||
pageSize: Scalars['Int']['output'];
|
||||
totalCount: Scalars['Int']['output'];
|
||||
totalPages: Scalars['Int']['output'];
|
||||
};
|
||||
|
||||
export type PaginationOptionsInput = {
|
||||
lean?: InputMaybe<Scalars['Boolean']['input']>;
|
||||
leanWithId?: InputMaybe<Scalars['Boolean']['input']>;
|
||||
@@ -516,26 +608,6 @@ export type PublisherStats = {
|
||||
id: Scalars['String']['output'];
|
||||
};
|
||||
|
||||
export type PullListItem = {
|
||||
__typename?: 'PullListItem';
|
||||
cover?: Maybe<Scalars['String']['output']>;
|
||||
description?: Maybe<Scalars['String']['output']>;
|
||||
name?: Maybe<Scalars['String']['output']>;
|
||||
potw?: Maybe<Scalars['Int']['output']>;
|
||||
price?: Maybe<Scalars['String']['output']>;
|
||||
publicationDate?: Maybe<Scalars['String']['output']>;
|
||||
publisher?: Maybe<Scalars['String']['output']>;
|
||||
pulls?: Maybe<Scalars['Int']['output']>;
|
||||
rating?: Maybe<Scalars['Float']['output']>;
|
||||
url?: Maybe<Scalars['String']['output']>;
|
||||
};
|
||||
|
||||
export type PullListResponse = {
|
||||
__typename?: 'PullListResponse';
|
||||
meta: PaginationMeta;
|
||||
result: Array<PullListItem>;
|
||||
};
|
||||
|
||||
export type Query = {
|
||||
__typename?: 'Query';
|
||||
analyzeMetadataConflicts: Array<MetadataConflict>;
|
||||
@@ -547,15 +619,17 @@ export type Query = {
|
||||
getComicBooks: ComicBooksResult;
|
||||
/** Get generic ComicVine resource (issues, volumes, etc.) */
|
||||
getComicVineResource: ComicVineResourceResponse;
|
||||
getImportStatistics: ImportStatistics;
|
||||
/** Get all issues for a series by comic object ID */
|
||||
getIssuesForSeries: IssuesForSeriesResponse;
|
||||
getJobResultStatistics: Array<JobResultStatistics>;
|
||||
getLibraryStatistics: LibraryStatistics;
|
||||
/** Get story arcs for a volume */
|
||||
getStoryArcs: Array<StoryArc>;
|
||||
/** Get volume details by URI */
|
||||
getVolume: VolumeDetailResponse;
|
||||
/** Get weekly pull list from League of Comic Geeks */
|
||||
getWeeklyPullList: PullListResponse;
|
||||
getWeeklyPullList: MetadataPullListResponse;
|
||||
previewCanonicalMetadata?: Maybe<CanonicalMetadata>;
|
||||
/** Search ComicVine for volumes, issues, characters, etc. */
|
||||
searchComicVine: ComicVineSearchResult;
|
||||
@@ -601,6 +675,11 @@ export type QueryGetComicVineResourceArgs = {
|
||||
};
|
||||
|
||||
|
||||
export type QueryGetImportStatisticsArgs = {
|
||||
directoryPath?: InputMaybe<Scalars['String']['input']>;
|
||||
};
|
||||
|
||||
|
||||
export type QueryGetIssuesForSeriesArgs = {
|
||||
comicObjectId: Scalars['ID']['input'];
|
||||
};
|
||||
@@ -996,7 +1075,34 @@ export type GetWeeklyPullListQueryVariables = Exact<{
|
||||
}>;
|
||||
|
||||
|
||||
export type GetWeeklyPullListQuery = { __typename?: 'Query', getWeeklyPullList: { __typename?: 'PullListResponse', result: Array<{ __typename?: 'PullListItem', name?: string | null, publisher?: string | null, cover?: string | null }> } };
|
||||
export type GetWeeklyPullListQuery = { __typename?: 'Query', getWeeklyPullList: { __typename?: 'MetadataPullListResponse', result: Array<{ __typename?: 'MetadataPullListItem', name?: string | null, publisher?: string | null, cover?: string | null }> } };
|
||||
|
||||
export type GetImportStatisticsQueryVariables = Exact<{
|
||||
directoryPath?: InputMaybe<Scalars['String']['input']>;
|
||||
}>;
|
||||
|
||||
|
||||
export type GetImportStatisticsQuery = { __typename?: 'Query', getImportStatistics: { __typename?: 'ImportStatistics', success: boolean, directory: string, stats: { __typename?: 'ImportStats', totalLocalFiles: number, alreadyImported: number, newFiles: number, percentageImported: string } } };
|
||||
|
||||
export type StartNewImportMutationVariables = Exact<{
|
||||
sessionId: Scalars['String']['input'];
|
||||
}>;
|
||||
|
||||
|
||||
export type StartNewImportMutation = { __typename?: 'Mutation', startNewImport: { __typename?: 'ImportJobResult', success: boolean, message: string, jobsQueued: number } };
|
||||
|
||||
export type StartIncrementalImportMutationVariables = Exact<{
|
||||
sessionId: Scalars['String']['input'];
|
||||
directoryPath?: InputMaybe<Scalars['String']['input']>;
|
||||
}>;
|
||||
|
||||
|
||||
export type StartIncrementalImportMutation = { __typename?: 'Mutation', startIncrementalImport: { __typename?: 'IncrementalImportResult', success: boolean, message: string, stats: { __typename?: 'IncrementalImportStats', total: number, alreadyImported: number, newFiles: number, queued: number } } };
|
||||
|
||||
export type GetJobResultStatisticsQueryVariables = Exact<{ [key: string]: never; }>;
|
||||
|
||||
|
||||
export type GetJobResultStatisticsQuery = { __typename?: 'Query', getJobResultStatistics: Array<{ __typename?: 'JobResultStatistics', sessionId: string, earliestTimestamp: string, completedJobs: number, failedJobs: number }> };
|
||||
|
||||
export type GetLibraryComicsQueryVariables = Exact<{
|
||||
page?: InputMaybe<Scalars['Int']['input']>;
|
||||
@@ -1721,6 +1827,173 @@ useInfiniteGetWeeklyPullListQuery.getKey = (variables: GetWeeklyPullListQueryVar
|
||||
|
||||
useGetWeeklyPullListQuery.fetcher = (variables: GetWeeklyPullListQueryVariables, options?: RequestInit['headers']) => fetcher<GetWeeklyPullListQuery, GetWeeklyPullListQueryVariables>(GetWeeklyPullListDocument, variables, options);
|
||||
|
||||
export const GetImportStatisticsDocument = `
|
||||
query GetImportStatistics($directoryPath: String) {
|
||||
getImportStatistics(directoryPath: $directoryPath) {
|
||||
success
|
||||
directory
|
||||
stats {
|
||||
totalLocalFiles
|
||||
alreadyImported
|
||||
newFiles
|
||||
percentageImported
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const useGetImportStatisticsQuery = <
|
||||
TData = GetImportStatisticsQuery,
|
||||
TError = unknown
|
||||
>(
|
||||
variables?: GetImportStatisticsQueryVariables,
|
||||
options?: Omit<UseQueryOptions<GetImportStatisticsQuery, TError, TData>, 'queryKey'> & { queryKey?: UseQueryOptions<GetImportStatisticsQuery, TError, TData>['queryKey'] }
|
||||
) => {
|
||||
|
||||
return useQuery<GetImportStatisticsQuery, TError, TData>(
|
||||
{
|
||||
queryKey: variables === undefined ? ['GetImportStatistics'] : ['GetImportStatistics', variables],
|
||||
queryFn: fetcher<GetImportStatisticsQuery, GetImportStatisticsQueryVariables>(GetImportStatisticsDocument, variables),
|
||||
...options
|
||||
}
|
||||
)};
|
||||
|
||||
useGetImportStatisticsQuery.getKey = (variables?: GetImportStatisticsQueryVariables) => variables === undefined ? ['GetImportStatistics'] : ['GetImportStatistics', variables];
|
||||
|
||||
export const useInfiniteGetImportStatisticsQuery = <
|
||||
TData = InfiniteData<GetImportStatisticsQuery>,
|
||||
TError = unknown
|
||||
>(
|
||||
variables: GetImportStatisticsQueryVariables,
|
||||
options: Omit<UseInfiniteQueryOptions<GetImportStatisticsQuery, TError, TData>, 'queryKey'> & { queryKey?: UseInfiniteQueryOptions<GetImportStatisticsQuery, TError, TData>['queryKey'] }
|
||||
) => {
|
||||
|
||||
return useInfiniteQuery<GetImportStatisticsQuery, TError, TData>(
|
||||
(() => {
|
||||
const { queryKey: optionsQueryKey, ...restOptions } = options;
|
||||
return {
|
||||
queryKey: optionsQueryKey ?? variables === undefined ? ['GetImportStatistics.infinite'] : ['GetImportStatistics.infinite', variables],
|
||||
queryFn: (metaData) => fetcher<GetImportStatisticsQuery, GetImportStatisticsQueryVariables>(GetImportStatisticsDocument, {...variables, ...(metaData.pageParam ?? {})})(),
|
||||
...restOptions
|
||||
}
|
||||
})()
|
||||
)};
|
||||
|
||||
useInfiniteGetImportStatisticsQuery.getKey = (variables?: GetImportStatisticsQueryVariables) => variables === undefined ? ['GetImportStatistics.infinite'] : ['GetImportStatistics.infinite', variables];
|
||||
|
||||
|
||||
useGetImportStatisticsQuery.fetcher = (variables?: GetImportStatisticsQueryVariables, options?: RequestInit['headers']) => fetcher<GetImportStatisticsQuery, GetImportStatisticsQueryVariables>(GetImportStatisticsDocument, variables, options);
|
||||
|
||||
export const StartNewImportDocument = `
|
||||
mutation StartNewImport($sessionId: String!) {
|
||||
startNewImport(sessionId: $sessionId) {
|
||||
success
|
||||
message
|
||||
jobsQueued
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const useStartNewImportMutation = <
|
||||
TError = unknown,
|
||||
TContext = unknown
|
||||
>(options?: UseMutationOptions<StartNewImportMutation, TError, StartNewImportMutationVariables, TContext>) => {
|
||||
|
||||
return useMutation<StartNewImportMutation, TError, StartNewImportMutationVariables, TContext>(
|
||||
{
|
||||
mutationKey: ['StartNewImport'],
|
||||
mutationFn: (variables?: StartNewImportMutationVariables) => fetcher<StartNewImportMutation, StartNewImportMutationVariables>(StartNewImportDocument, variables)(),
|
||||
...options
|
||||
}
|
||||
)};
|
||||
|
||||
|
||||
useStartNewImportMutation.fetcher = (variables: StartNewImportMutationVariables, options?: RequestInit['headers']) => fetcher<StartNewImportMutation, StartNewImportMutationVariables>(StartNewImportDocument, variables, options);
|
||||
|
||||
export const StartIncrementalImportDocument = `
|
||||
mutation StartIncrementalImport($sessionId: String!, $directoryPath: String) {
|
||||
startIncrementalImport(sessionId: $sessionId, directoryPath: $directoryPath) {
|
||||
success
|
||||
message
|
||||
stats {
|
||||
total
|
||||
alreadyImported
|
||||
newFiles
|
||||
queued
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const useStartIncrementalImportMutation = <
|
||||
TError = unknown,
|
||||
TContext = unknown
|
||||
>(options?: UseMutationOptions<StartIncrementalImportMutation, TError, StartIncrementalImportMutationVariables, TContext>) => {
|
||||
|
||||
return useMutation<StartIncrementalImportMutation, TError, StartIncrementalImportMutationVariables, TContext>(
|
||||
{
|
||||
mutationKey: ['StartIncrementalImport'],
|
||||
mutationFn: (variables?: StartIncrementalImportMutationVariables) => fetcher<StartIncrementalImportMutation, StartIncrementalImportMutationVariables>(StartIncrementalImportDocument, variables)(),
|
||||
...options
|
||||
}
|
||||
)};
|
||||
|
||||
|
||||
useStartIncrementalImportMutation.fetcher = (variables: StartIncrementalImportMutationVariables, options?: RequestInit['headers']) => fetcher<StartIncrementalImportMutation, StartIncrementalImportMutationVariables>(StartIncrementalImportDocument, variables, options);
|
||||
|
||||
export const GetJobResultStatisticsDocument = `
|
||||
query GetJobResultStatistics {
|
||||
getJobResultStatistics {
|
||||
sessionId
|
||||
earliestTimestamp
|
||||
completedJobs
|
||||
failedJobs
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
export const useGetJobResultStatisticsQuery = <
|
||||
TData = GetJobResultStatisticsQuery,
|
||||
TError = unknown
|
||||
>(
|
||||
variables?: GetJobResultStatisticsQueryVariables,
|
||||
options?: Omit<UseQueryOptions<GetJobResultStatisticsQuery, TError, TData>, 'queryKey'> & { queryKey?: UseQueryOptions<GetJobResultStatisticsQuery, TError, TData>['queryKey'] }
|
||||
) => {
|
||||
|
||||
return useQuery<GetJobResultStatisticsQuery, TError, TData>(
|
||||
{
|
||||
queryKey: variables === undefined ? ['GetJobResultStatistics'] : ['GetJobResultStatistics', variables],
|
||||
queryFn: fetcher<GetJobResultStatisticsQuery, GetJobResultStatisticsQueryVariables>(GetJobResultStatisticsDocument, variables),
|
||||
...options
|
||||
}
|
||||
)};
|
||||
|
||||
useGetJobResultStatisticsQuery.getKey = (variables?: GetJobResultStatisticsQueryVariables) => variables === undefined ? ['GetJobResultStatistics'] : ['GetJobResultStatistics', variables];
|
||||
|
||||
export const useInfiniteGetJobResultStatisticsQuery = <
|
||||
TData = InfiniteData<GetJobResultStatisticsQuery>,
|
||||
TError = unknown
|
||||
>(
|
||||
variables: GetJobResultStatisticsQueryVariables,
|
||||
options: Omit<UseInfiniteQueryOptions<GetJobResultStatisticsQuery, TError, TData>, 'queryKey'> & { queryKey?: UseInfiniteQueryOptions<GetJobResultStatisticsQuery, TError, TData>['queryKey'] }
|
||||
) => {
|
||||
|
||||
return useInfiniteQuery<GetJobResultStatisticsQuery, TError, TData>(
|
||||
(() => {
|
||||
const { queryKey: optionsQueryKey, ...restOptions } = options;
|
||||
return {
|
||||
queryKey: optionsQueryKey ?? variables === undefined ? ['GetJobResultStatistics.infinite'] : ['GetJobResultStatistics.infinite', variables],
|
||||
queryFn: (metaData) => fetcher<GetJobResultStatisticsQuery, GetJobResultStatisticsQueryVariables>(GetJobResultStatisticsDocument, {...variables, ...(metaData.pageParam ?? {})})(),
|
||||
...restOptions
|
||||
}
|
||||
})()
|
||||
)};
|
||||
|
||||
useInfiniteGetJobResultStatisticsQuery.getKey = (variables?: GetJobResultStatisticsQueryVariables) => variables === undefined ? ['GetJobResultStatistics.infinite'] : ['GetJobResultStatistics.infinite', variables];
|
||||
|
||||
|
||||
useGetJobResultStatisticsQuery.fetcher = (variables?: GetJobResultStatisticsQueryVariables, options?: RequestInit['headers']) => fetcher<GetJobResultStatisticsQuery, GetJobResultStatisticsQueryVariables>(GetJobResultStatisticsDocument, variables, options);
|
||||
|
||||
export const GetLibraryComicsDocument = `
|
||||
query GetLibraryComics($page: Int, $limit: Int, $search: String, $series: String) {
|
||||
comics(page: $page, limit: $limit, search: $search, series: $series) {
|
||||
|
||||
@@ -214,3 +214,4 @@ query GetWeeklyPullList($input: WeeklyPullListInput!) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
42
src/client/graphql/queries/import.graphql
Normal file
42
src/client/graphql/queries/import.graphql
Normal file
@@ -0,0 +1,42 @@
|
||||
query GetImportStatistics($directoryPath: String) {
|
||||
getImportStatistics(directoryPath: $directoryPath) {
|
||||
success
|
||||
directory
|
||||
stats {
|
||||
totalLocalFiles
|
||||
alreadyImported
|
||||
newFiles
|
||||
percentageImported
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutation StartNewImport($sessionId: String!) {
|
||||
startNewImport(sessionId: $sessionId) {
|
||||
success
|
||||
message
|
||||
jobsQueued
|
||||
}
|
||||
}
|
||||
|
||||
mutation StartIncrementalImport($sessionId: String!, $directoryPath: String) {
|
||||
startIncrementalImport(sessionId: $sessionId, directoryPath: $directoryPath) {
|
||||
success
|
||||
message
|
||||
stats {
|
||||
total
|
||||
alreadyImported
|
||||
newFiles
|
||||
queued
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
query GetJobResultStatistics {
|
||||
getJobResultStatistics {
|
||||
sessionId
|
||||
earliestTimestamp
|
||||
completedJobs
|
||||
failedJobs
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user