💬 Fleshing out the fetchComicVineMatches method

This commit is contained in:
2021-06-26 16:27:44 -07:00
parent 04c41b810e
commit ac5ba7fc61
2 changed files with 56 additions and 19 deletions

View File

@@ -7,7 +7,8 @@ import {
IMS_SOCKET_CONNECTION_CONNECTED, IMS_SOCKET_CONNECTION_CONNECTED,
IMS_RECENT_COMICS_FETCHED, IMS_RECENT_COMICS_FETCHED,
} from "../constants/action-types"; } from "../constants/action-types";
import { tokenize } from "../shared/utils/nlp.utils"; import { refineQuery } from "../shared/utils/nlp.utils";
import { assign } from "lodash";
export async function walkFolder(path: string): Promise<Array<IFolderData>> { export async function walkFolder(path: string): Promise<Array<IFolderData>> {
return axios return axios
@@ -101,6 +102,25 @@ export const getRecentlyImportedComicBooks = (options) => async (dispatch) => {
}; };
export const fetchComicVineMatches = (searchPayload, options) => (dispatch) => { export const fetchComicVineMatches = (searchPayload, options) => (dispatch) => {
console.log(searchPayload); try {
tokenize(searchPayload); const issueString = searchPayload.rawFileDetails.path.split("/").pop();
let seriesSearchQuery = {};
const issueSearchQuery = refineQuery(issueString);
if (searchPayload.rawFileDetails.containedIn !== "comics") {
seriesSearchQuery = refineQuery(
searchPayload.rawFileDetails.containedIn.split("/").pop(),
);
}
console.log({
issue: issueSearchQuery.searchParams,
series: seriesSearchQuery.searchParams,
});
axios.request({
url: "",
method: "POST",
});
return { issueSearchQuery, series: seriesSearchQuery.searchParams };
} catch (error) {
console.log(error);
}
}; };

View File

@@ -13,8 +13,7 @@ nlp.extend(dates);
* @function * @function
* @param {string} inputString - The string used to search against CV, Shortboxed, and other APIs. * @param {string} inputString - The string used to search against CV, Shortboxed, and other APIs.
*/ */
export const tokenize = (searchCriteriaPayload) => { export const tokenize = (inputString) => {
const { inputString } = searchCriteriaPayload;
const doc = nlp(inputString); const doc = nlp(inputString);
const sentence = doc.sentences().json(); const sentence = doc.sentences().json();
const number = doc.numbers().fractions(); const number = doc.numbers().fractions();
@@ -40,8 +39,12 @@ export const tokenize = (searchCriteriaPayload) => {
/^\s*\d+(\.\s+?|\s*-?\s*)/gim, /^\s*\d+(\.\s+?|\s*-?\s*)/gim,
); );
const issues = inputString.match(/issue(\W?)(\_?)(\d+)/gi); let issueNumbers = "";
const issueHashes = inputString.match(/\#\d/gi); const issues = inputString.match(/(^|[_\s#])(-?\d*\.?\d\w*)/gi);
if (!_.isEmpty(issues)) {
issueNumbers = issues[0].trim();
}
// const issueHashes = inputString.match(/\#\d/gi);
const yearMatches = inputString.match(/\d{4}/gi); const yearMatches = inputString.match(/\d{4}/gi);
const sentenceToProcess = sentence[0].normal.replace(/_/g, " "); const sentenceToProcess = sentence[0].normal.replace(/_/g, " ");
@@ -51,17 +54,22 @@ export const tokenize = (searchCriteriaPayload) => {
.split(" "); .split(" ");
const queryObject = { const queryObject = {
comicbook_identifiers: { comicbook_identifier_tokens: {
issues, issueNumbers,
issueHashes,
chapters, chapters,
pageCounts,
parantheses,
curlyBraces,
squareBrackets,
genericNumericRange,
hyphenatedNumericRange,
readingListIndicators,
volumes, volumes,
issueRanges: number,
}, },
years: { years: {
yearMatches, yearMatches,
}, },
sentences: { sentence_tokens: {
detailed: sentence, detailed: sentence,
normalized: normalizedSentence, normalized: normalizedSentence,
}, },
@@ -69,15 +77,24 @@ export const tokenize = (searchCriteriaPayload) => {
return queryObject; return queryObject;
}; };
export function refineQuery(queryString) { export const refineQuery = (inputString) => {
const queryObj = tokenize(queryString); const queryObj = tokenize(inputString);
const removedYears = _.xor( const removedYears = _.xor(
queryObj.sentences.normalized, queryObj.sentence_tokens.normalized,
queryObj.years.yearMatches, queryObj.years.yearMatches,
); );
return { return {
tokenized: removedYears, searchParams: {
normalized: removedYears.join(" "), searchTerms: {
meta: queryObj, name: queryObj.sentence_tokens.detailed[0].text,
number: queryObj.comicbook_identifier_tokens.issueNumbers,
},
year: queryObj.years,
},
meta: {
queryObj,
tokenized: removedYears,
normalized: removedYears.join(" "),
},
}; };
} };