Commit 1cb8c51b authored by greg@1m's avatar greg@1m
Browse files

add search generic function + searchWord

parent 7329da9a
......@@ -38,7 +38,7 @@ export const backConfig = {
...sharedConfig.path,
dev: {
disableLogin: false
disableLogin: true
},
}
......
......@@ -121,8 +121,8 @@ export const scanDirForFiles = async (path: string): Promise<iFile[] | string> =
searchWithRipGrep({
term: '',
folder: path,
typeSearch: 'folder',
titleSearch: false,
recursive: false,
onSearchEnded: async answer => {
res(answer.files)
}
......
import { processRawPathToFile } from "./file.search.manager";
import { searchWithRgGeneric } from "./search-ripgrep.manager";
// rg "#[^ #]+" "/Users/gregoirethiebault/desktop/your markdown notes/test_obsi/nodal_ex" --ignore-case --type md --multiline
export const searchWord = (p: {
term: string,
folder: string,
cb: (res: any) => void
}) => {
const objRes = {}
searchWithRgGeneric({
term: p.term,
folder: p.folder,
debug: true,
processRawLine: lineInfos => {
let l = lineInfos
if (!l.found || l.found === '') return
if (!objRes[l.file.path]) objRes[l.file.path] = {file: l.file, results:[]}
objRes[l.file.path].results.push(l.found)
// return res
},
onSearchEnded: async () => {
p.cb(objRes)
}
})
}
......@@ -16,12 +16,22 @@ export const cleanFilePath = (rawString: string, folder) => {
return rawString
}
export const processRawPathToFile = (
rawPath: string,
folder: string,
index: number = 0,
titleFilter: string = ''
): iFile => {
// export const processRawPathToFile = (p:{
// rawPath: string
// folder: string
// index: number = 0
// titleFilter: string = ''
// }): iFile => {
export const processRawPathToFile = (p: {
rawPath: string
folder: string
index?: number
titleFilter?: string
}): iFile => {
let { rawPath, folder, index, titleFilter } = { ...p }
if (!index) index = 0
if (!titleFilter) titleFilter = ''
let res: iFile
let cleanedData = cleanFilePath(rawPath, folder)
......@@ -47,7 +57,7 @@ export const processRawDataToFiles = (dataRaw: string, titleFilter: string = '',
for (let i = 0; i < array.length; i++) {
let filePath = array[i];
const fileRes = processRawPathToFile(filePath, folder, i, titleFilter)
const fileRes = processRawPathToFile({rawPath: filePath, folder, index: i, titleFilter})
res.push(fileRes)
}
return res
......
......@@ -32,7 +32,7 @@ export const processRawStringsToImagesArr = (rawMetasStrings: string[], folder:s
*/
const fileName = `${rawMetaArr2[0]}.md`;
let cleanedFileName = cleanFilePath(fileName, folder)
const file = processRawPathToFile(cleanedFileName, folder)
const file = processRawPathToFile({rawPath: cleanedFileName, folder})
if (titleFilter !== '' && !file.path.toLowerCase().includes(titleFilter.toLowerCase())) continue
......
import { cleanPath, getFileInfos } from "../../../../shared/helpers/filename.helper";
import { iFile } from "../../../../shared/types.shared";
import { backConfig } from "../../config.back";
import { normalizeString } from "../../helpers/string.helper";
import { dirDefaultBlacklist } from "../dir.manager";
import { fileExists, openFile } from "../fs.manager";
import { log } from "../log.manager";
import { createIFile } from "./file.search.manager";
const klaw = require('klaw')
const through2 = require('through2')
const path = require('path')
const fs = require('fs')
const blockLength = 1000
let walkRec = (p: {
dir: string,
taskQueue,
recursive: boolean
onFile: (file: { path: string, stats: any }) => void,
onBlockEnd: Function,
onError: Function
}) => {
// const {dir, onFile, onError}
const folderItems = fs.readdirSync(p.dir)
folderItems.forEach((item, i) => {
if (p.taskQueue.length % blockLength === 0) {
// every 1000 scan, do intermediary send
log('add step at ', i);
p.taskQueue.push(async () => {
await p.onBlockEnd()
})
}
p.taskQueue.push(async () => {
let itemPath = path.join(p.dir, item)
let stats
try {
const basename = path.basename(itemPath)
const isHiddenFile = basename[0] === '.'
const isBlacklisted = dirDefaultBlacklist.indexOf(basename) !== -1
if (isBlacklisted) log(basename);
if (!isHiddenFile && !isBlacklisted) {
stats = fs.statSync(itemPath)
}
} catch (e) {
p.onError(e)
return
}
if (!stats) return
if (p.recursive && stats.isDirectory()) {
let p2 = { ...p }
p2.dir = itemPath
await walkRec(p2)
}
else if (stats.isFile()) {
const isMdFile = itemPath.toLowerCase().endsWith('.md')
if (!isMdFile) return
await p.onFile({ path: itemPath, stats: stats })
}
})
})
}
export const liveSearchJs = async (params: {
term: string,
folder: string,
titleSearch: boolean,
recursive: boolean,
onSearchUpdate: (filesScanned: iFile[], initial: boolean) => Promise<void>,
onSearchEnded: (filesScanned: iFile[]) => Promise<void>
}): Promise<void> => {
const { term, folder, titleSearch } = { ...params }
let startTime = new Date().getTime()
let filesScanned: iFile[] = []
let errors: string[] = []
const absolutePathFolder = backConfig.dataFolder + folder
let count = 0
let totCount = 0
if (!fileExists(absolutePathFolder)) return log(`[SEARCH-JS] path ${absolutePathFolder} doesnt exists, stop search`)
const taskQueue = []
let lastFilesScannedCount = 0
walkRec({
dir: absolutePathFolder,
recursive: params.recursive,
taskQueue,
onBlockEnd: async () => {
let timeSpent = new Date().getTime() - startTime
log(`[SEARCH-JS] search update for ${absolutePathFolder} in ${timeSpent}ms for ${totCount} elements with ${errors.length} errors`)
await params.onSearchUpdate(filesScanned.slice(filesScanned.length - blockLength), false)
},
onFile: async item => {
totCount++
let finfos = getFileInfos(item.path)
let relativeFolder = finfos.folder.replace(backConfig.dataFolder, '')
let isValid = false
// 0 if no term, valid
if (term === '') {
isValid = true
}
// 1 title search for everybody
if (!isValid && normalizeString(finfos.filename).includes(normalizeString(term))) isValid = true
// 2 content search => only if title search is false
if (!titleSearch && !isValid) {
let filecontent = await openFile(item.path)
isValid = normalizeString(filecontent).indexOf(normalizeString(term)) !== -1
}
if (isValid) {
const file = createIFile(finfos.filename, relativeFolder, count, item.stats)
filesScanned.push(file)
count++
}
},
onError: e => {
errors.push(e)
}
})
log(`[SEARCHJS] exec queue length: ${taskQueue.length}`);
for (let i = 0; i < taskQueue.length; i++) {
// initial cleanup
if (i === 0) params.onSearchUpdate([], true)
// process task
if (!taskQueue[i]) return
await taskQueue[i]();
// show early result asap
// if (filesScanned.length === 10) await params.onSearchUpdate(filesScanned, false)
// if (filesScanned.length === 30) await params.onSearchUpdate(filesScanned, false)
// show definitive result
if (i === taskQueue.length - 1) {
let timeSpent = new Date().getTime() - startTime
log(`[SEARCH-JS] SEARCH ENDED for ${absolutePathFolder} in ${timeSpent}ms for ${totCount} elements with ${errors.length} errors`);
params.onSearchEnded(filesScanned)
}
}
}
// export const createIFile = (name:string, folder:string, index:number, stats:any):iFile => {
// folder = getRelativePath(folder)
// // clean name of possibe path inside
// const nameArr = name.split('/')
// let realName = nameArr.pop()
// let fullFolder = folder
// fullFolder = `${fullFolder}/${nameArr.join('/')}`
// return {
// nature: 'file',
// extension: 'md',
// index,
// created: Math.round(stats.birthtimeMs),
// modified: Math.round(stats.ctimeMs),
// name: cleanPath(`${realName}`),
// realname: `${realName}`,
// path: cleanPath(`${fullFolder}/${realName}`),
// folder: cleanPath(`${fullFolder}`),
// }
// }
......@@ -27,14 +27,85 @@ export const isRgCliWorking = async (): Promise<boolean> => {
}
}
// SEARCH WITH RG
//
// NEW GENERIC SEARCH
//
type iLineRg = {
raw: string,
path: string,
found: string,
file: iFile
}
export const searchWithRgGeneric = async (p: {
term: string
folder: string
recursive?: boolean,
debug?: boolean
processRawLine?: (infos: iLineRg) => any
onSearchEnded: (res: any) => void
}): Promise<void> => {
if (!p.recursive) p.recursive = true
if (!p.processRawLine) p.processRawLine = (r: any) => [r]
// if backconfigFolder doesnt exists, add it
const relativeFolder = getRelativePath(p.folder)
const folderToSearch = `${backConfig.dataFolder + relativeFolder}`;
const searchParams = [
p.term,
folderToSearch,
'--ignore-case',
'--type',
'md',
// do not print whole line, just one match per line
'--only-matching',
]
p.debug && console.log(`== ============`);
p.debug && console.log(backConfig.rgPath, searchParams);
const ripGrepStream = execa(backConfig.rgPath, searchParams)
const resArr: string[] = []
ripGrepStream.stdout.on('data', async dataChunk => {
const rawChunk = dataChunk.toString()
const rawLines = rawChunk.split('\n')
each(rawLines, line => {
const processedInfos = line.split(':')
if (!processedInfos[0] || processedInfos[0] === '') return
console.log(processedInfos);
const processedLine = p.processRawLine({
file: processRawPathToFile({rawPath: processedInfos[0], folder: p.folder}),
raw: line,
path: processedInfos[0],
found: processedInfos[1],
})
if (processedLine) resArr.push(processedLine)
})
})
ripGrepStream.stdout.on('close', dataChunk => {
p.onSearchEnded(resArr)
p.debug && console.log(`============== END`);
})
}
//
// ODL SEARCH WITH RG
//
export const searchWithRipGrep = async (params: {
term: string,
folder: string,
titleSearch: boolean,
imageSearch?: boolean,
recursive: boolean,
term: string
folder: string
titleSearch: boolean
typeSearch: 'term' | 'folder' | 'term-image' | 'folder-image' | 'folder-regex'
onSearchEnded: (res: { files?: iFile[], images?: iFileImage[] }) => Promise<void>
processRawEl?: (raw: string) => any
processFinalRes?: (raw: string) => any
}): Promise<void> => {
let processTerm = params.term.split('-').join('\\-')
......@@ -44,7 +115,11 @@ export const searchWithRipGrep = async (params: {
const folderToSearch = `${backConfig.dataFolder + relativeFolder}`;
const perfs = { init: Date.now(), cmd1: Date.now(), cmd2: Date.now() }
const searchType = (params.term === '') ? 'folder' : 'term'
// let searchType = (params.term === '') ? 'folder' : 'term'
// searchType = (params.imageSearch) ? searchType + '-image' :
// let searchType = ''
// if (params.term === '' && params.imageSearch) searchType = ''
const debugMode = (folderToSearch === '/sdcard/tiro-notes/main') ? true : false
......@@ -59,10 +134,11 @@ export const searchWithRipGrep = async (params: {
//////////////////////////////////////
// SEARCH TYPE 1 : TERM SEARCH
//
if (searchType === 'term' && !params.imageSearch) {
if (params.typeSearch === 'term') {
const titleFilter = params.titleSearch ? processTerm : ''
const searchedTerm = params.titleSearch ? '' : processTerm
// search term but also header time
const termRegex = `(${r.headerStart}${r.all}${r.headerStop})*${r.all}${searchedTerm}${r.all}(${r.headerStart}${r.all}${r.headerStop})*`
const normalSearchParams = [
termRegex,
......@@ -82,13 +158,14 @@ export const searchWithRipGrep = async (params: {
const rawMetaArr = rawMetaString.split('\n')
resultsRawArr.push(...rawMetaArr)
})
ripGrepStreamProcess1.stdout.on('close', dataRaw => {
const metasFilesObj = processRawStringsToMetaObj(resultsRawArr, relativeFolder, true);
// if (debugMode) log(11, resultsRawArr, 'to', metasFilesObj)
const scannedFilesObj: iFilesObj = {}
let index = 0
each(metasFilesObj, (metaObj, fileName) => {
const file = processRawPathToFile(fileName, relativeFolder, index, titleFilter)
const file = processRawPathToFile({ rawPath: fileName, folder: relativeFolder, index, titleFilter })
if (file && file.name) {
if (fileExists(`${backConfig.dataFolder}/${file.path}`)) {
scannedFilesObj[file.name] = file
......@@ -107,7 +184,7 @@ export const searchWithRipGrep = async (params: {
//////////////////////////////////////
// SEARCH TYPE 2 : FOLDER SEARCH
//
else if (searchType === 'folder' && !params.imageSearch) {
else if (params.typeSearch === 'folder') {
const fullFolderSearchParams = [
'--files',
folderToSearch,
......@@ -179,8 +256,8 @@ export const searchWithRipGrep = async (params: {
//////////////////////////////////////
// SEARCH TYPE 3 : IMAGE SEARCH
//
else if (searchType === 'term' && params.imageSearch) {
// term in folder and its subfolders
else if (params.typeSearch === 'term-image') {
const titleFilter = params.titleSearch ? processTerm : ''
const searchedTerm = params.titleSearch ? '' : processTerm
......@@ -207,7 +284,8 @@ export const searchWithRipGrep = async (params: {
params.onSearchEnded({ images })
})
}
else if (searchType === 'folder' && params.imageSearch) {
// IMAGE SEARCH : only in folder and NOT in subfolders
else if (params.typeSearch === 'folder-image') {
const searchParams = [
`${r.imageMd}`,
folderToSearch,
......@@ -233,11 +311,69 @@ export const searchWithRipGrep = async (params: {
}
//////////////////////////////////////
// SEARCH TYPE 4 : GENERIC
}
else if (params.typeSearch === 'folder-regex') {
const titleFilter = params.titleSearch ? processTerm : ''
const searchedTerm = params.titleSearch ? '' : processTerm
const termRegex = params.term
const searchParams = [
termRegex,
folderToSearch,
'--ignore-case',
'--type',
'md',
'--multiline',
]
console.log(`==============`);
console.log(searchParams);
const ripGrepStreamProcessImg2 = execa(backConfig.rgPath, searchParams)
const rawStrings: string[] = []
ripGrepStreamProcessImg2.stdout.on('data', async dataRaw => {
const partialRawString = dataRaw.toString()
console.log(`--> `, partialRawString);
// split multiline strings
// const partialRawStringsArr = partialRawString.split('\n')
// rawStrings.push(...partialRawStringsArr)
})
ripGrepStreamProcessImg2.stdout.on('close', (dataRaw) => {
// const images = processRawStringsToImagesArr(rawStrings, relativeFolder, titleFilter);
// log(h, ` TERM SEARCH + IMAGE => ENDED ${images.length}`, { searchParams });
params.onSearchEnded({})
console.log(`============== END`);
})
}
// // IMAGE SEARCH : only in folder and NOT in subfolders
// else if (params.typeSearch === 'term-regex') {
// const searchParams = [
// `${r.imageMd}`,
// folderToSearch,
// '--max-depth=1',
// '--ignore-case',
// '--type',
// 'md',
// '--multiline',
// ]
// const ripGrepStreamProcessImg1 = execa(backConfig.rgPath, searchParams)
// const rawStrings: string[] = []
// ripGrepStreamProcessImg1.stdout.on('data', async dataRaw => {
// const partialRawString = dataRaw.toString()
// // split multiline strings
// const partialRawStringsArr = partialRawString.split('\n')
// rawStrings.push(...partialRawStringsArr)
// })
// ripGrepStreamProcessImg1.stdout.on('close', dataRaw => {
// const images = processRawStringsToImagesArr(rawStrings, relativeFolder);
// log(h, ` IMAGE FOLDER => ENDED ${images.length}`);
// params.onSearchEnded({ images })
// })
// }
}
......
......@@ -31,8 +31,8 @@ export const listenSocketEndpoints = (serverSocket2: ServerSocketManager<iApiDic
searchWithRipGrep({
term: '',
folder: data.folderPath,
typeSearch: 'folder',
titleSearch: false,
recursive: false,
onSearchEnded: async res => {
if (res.files) await serverSocket2.emit('getFiles', { files: res.files, idReq: data.idReq })
}
......@@ -42,10 +42,9 @@ export const listenSocketEndpoints = (serverSocket2: ServerSocketManager<iApiDic
serverSocket2.on('askForImages', async data => {
searchWithRipGrep({
term: '',
imageSearch: true,
typeSearch: 'folder-image',
folder: data.folderPath,
titleSearch: false,
recursive: false,
onSearchEnded: async res => {
if (res.images) await serverSocket2.emit('getImages', { images: res.images })
}
......@@ -70,8 +69,8 @@ export const listenSocketEndpoints = (serverSocket2: ServerSocketManager<iApiDic
searchWithRipGrep({
term: termObj.term,
folder: termObj.folderToSearch,
typeSearch: 'term',
titleSearch: termObj.titleSearch,
recursive: true,
onSearchEnded: async res => {
if (res.files) await serverSocket2.emit('getFiles', { files: res.files, idReq: data.idReq })
}
......@@ -80,10 +79,9 @@ export const listenSocketEndpoints = (serverSocket2: ServerSocketManager<iApiDic
else if (data.type === 'image') {
searchWithRipGrep({
term: termObj.term,
typeSearch: 'term-image',
folder: termObj.folderToSearch,
titleSearch: termObj.titleSearch,
imageSearch: true,
recursive: true,
onSearchEnded: async res => {
if (res.images) await serverSocket2.emit('getImages', { images: res.images })
}
......
......@@ -6,6 +6,9 @@ import { isEnvDev } from './managers/path.manager';
import { fileLogClean, log } from './managers/log.manager';
import { cloneDeep } from 'lodash';
import { startSecuredStaticServer } from './managers/staticServer.manager';
import { searchWithRgGeneric, searchWithRipGrep } from './managers/search/search-ripgrep.manager';
import { regexs } from '../../shared/helpers/regexs.helper';
import { searchWord } from './managers/search/allOccurences.search.manager';
fileLogClean();
......@@ -51,3 +54,14 @@ server.listen(backConfig.port, function () {
let configServerStr = JSON.stringify({ https: backConfig.https, port: backConfig.port })
log(`SERVER_LOAD_SUCCESS ${configServerStr}`);
})
searchWord({
term: "#[^ #]+",
folder: '/test_obsi/nodal_ex',
cb: res => {
console.log(333, res, 333);
}
})
// GOAL IS HAVING
......@@ -44,7 +44,9 @@ export const regexs = {
searchFolder: VerEx().find(' /').anything().endOfLine(),
searchFolderNoSpace: VerEx().find('/').anything().endOfLine(),
firstPartImg: VerEx().find('![').anythingBut('[]').then('](')
firstPartImg: VerEx().find('![').anythingBut('[]').then(']('),
hashtag : VerEx().find('#').beginCapture().anythingBut(' ').endCapture()
}
......