mirror of
https://github.com/immich-app/immich.git
synced 2026-02-15 05:18:37 +03:00
* add unicorn to eslint * fix lint errors for cli * fix merge * fix album name extraction * Update cli/src/commands/upload.command.ts Co-authored-by: Ben McCann <322311+benmccann@users.noreply.github.com> * es2k23 * use lowercase os * return undefined album name * fix bug in asset response dto * auto fix issues * fix server code style * es2022 and formatting * fix compilation error * fix test * fix config load * fix last lint errors * set string type * bump ts * start work on web * web formatting * Fix UUIDParamDto as UUIDParamDto * fix library service lint * fix web errors * fix errors * formatting * wip * lints fixed * web can now start * alphabetical package json * rename error * chore: clean up --------- Co-authored-by: Ben McCann <322311+benmccann@users.noreply.github.com> Co-authored-by: Jason Rasmussen <jrasm91@gmail.com>
71 lines
1.7 KiB
TypeScript
71 lines
1.7 KiB
TypeScript
import { glob } from 'glob';
|
|
import * as fs from 'node:fs';
|
|
|
|
export class CrawlOptions {
|
|
pathsToCrawl!: string[];
|
|
recursive? = false;
|
|
includeHidden? = false;
|
|
exclusionPatterns?: string[];
|
|
}
|
|
|
|
export class CrawlService {
|
|
private readonly extensions!: string[];
|
|
|
|
constructor(image: string[], video: string[]) {
|
|
this.extensions = [...image, ...video].map((extension) => extension.replace('.', ''));
|
|
}
|
|
|
|
async crawl(options: CrawlOptions): Promise<string[]> {
|
|
const { recursive, pathsToCrawl, exclusionPatterns, includeHidden } = options;
|
|
|
|
if (!pathsToCrawl) {
|
|
return [];
|
|
}
|
|
|
|
const patterns: string[] = [];
|
|
const crawledFiles: string[] = [];
|
|
|
|
for await (const currentPath of pathsToCrawl) {
|
|
try {
|
|
const stats = await fs.promises.stat(currentPath);
|
|
if (stats.isFile() || stats.isSymbolicLink()) {
|
|
crawledFiles.push(currentPath);
|
|
} else {
|
|
patterns.push(currentPath);
|
|
}
|
|
} catch (error: any) {
|
|
if (error.code === 'ENOENT') {
|
|
patterns.push(currentPath);
|
|
} else {
|
|
throw error;
|
|
}
|
|
}
|
|
}
|
|
|
|
let searchPattern: string;
|
|
if (patterns.length === 1) {
|
|
searchPattern = patterns[0];
|
|
} else if (patterns.length === 0) {
|
|
return crawledFiles;
|
|
} else {
|
|
searchPattern = '{' + patterns.join(',') + '}';
|
|
}
|
|
|
|
if (recursive) {
|
|
searchPattern = searchPattern + '/**/';
|
|
}
|
|
|
|
searchPattern = `${searchPattern}/*.{${this.extensions.join(',')}}`;
|
|
|
|
const globbedFiles = await glob(searchPattern, {
|
|
absolute: true,
|
|
nocase: true,
|
|
nodir: true,
|
|
dot: includeHidden,
|
|
ignore: exclusionPatterns,
|
|
});
|
|
|
|
return [...crawledFiles, ...globbedFiles].sort();
|
|
}
|
|
}
|