mirror of
https://github.com/immich-app/immich.git
synced 2026-02-11 19:38:54 +03:00
* Add new cli * Remove old readme * Add documentation to readme file * Add github workflow tests for cli * Fix typo in docs * Add usage info to readme * Add package-lock.json * Fix tsconfig * Cleanup * Fix lint * Cleanup package.json * Fix accidental server change * Remove rootdir from cli * Remove tsbuildinfo * Add prettierignore * Make CLI use internal openapi specs * Add ignore and dry-run features * Sort paths alphabetically * Don't remove substring * Remove shorthand for delete * Remove unused import * Remove chokidar * Set correct openapi cli generator script * Add progress bar * Rename target to asset * Add deletion progress bar * Ignore require statement * Use read streams instead of readfile * Fix github feedback * Fix upload requires * More github comments * Cleanup messages * Cleaner pattern concats * Github comments --------- Co-authored-by: Alex Tran <alex.tran1502@gmail.com>
48 lines
1.3 KiB
TypeScript
48 lines
1.3 KiB
TypeScript
import { CrawlOptionsDto } from 'src/cores/dto/crawl-options-dto';
|
|
import { ACCEPTED_FILE_EXTENSIONS } from '../cores';
|
|
import { glob } from 'glob';
|
|
import * as fs from 'fs';
|
|
|
|
export class CrawlService {
|
|
public async crawl(crawlOptions: CrawlOptionsDto): Promise<string[]> {
|
|
const pathsToCrawl: string[] = crawlOptions.pathsToCrawl;
|
|
|
|
const directories: string[] = [];
|
|
const crawledFiles: string[] = [];
|
|
|
|
for await (const currentPath of pathsToCrawl) {
|
|
const stats = await fs.promises.stat(currentPath);
|
|
if (stats.isFile() || stats.isSymbolicLink()) {
|
|
crawledFiles.push(currentPath);
|
|
} else {
|
|
directories.push(currentPath);
|
|
}
|
|
}
|
|
|
|
let searchPattern: string;
|
|
if (directories.length === 1) {
|
|
searchPattern = directories[0];
|
|
} else if (directories.length === 0) {
|
|
return crawledFiles;
|
|
} else {
|
|
searchPattern = '{' + directories.join(',') + '}';
|
|
}
|
|
|
|
if (crawlOptions.recursive) {
|
|
searchPattern = searchPattern + '/**/';
|
|
}
|
|
|
|
searchPattern = `${searchPattern}/*.{${ACCEPTED_FILE_EXTENSIONS.join(',')}}`;
|
|
|
|
const globbedFiles = await glob(searchPattern, {
|
|
nocase: true,
|
|
nodir: true,
|
|
ignore: crawlOptions.excludePatterns,
|
|
});
|
|
|
|
const returnedFiles = crawledFiles.concat(globbedFiles);
|
|
returnedFiles.sort();
|
|
return returnedFiles;
|
|
}
|
|
}
|