Merge remote-tracking branch 'origin/main' into feature/new-activity-timeline

This commit is contained in:
idubnori
2025-10-29 19:12:12 +09:00
373 changed files with 7251 additions and 4270 deletions

2
.github/.nvmrc vendored
View File

@@ -1 +1 @@
22.20.0
22.21.0

2
.github/labeler.yml vendored
View File

@@ -31,7 +31,7 @@ documentation:
🧠machine-learning:
- changed-files:
- any-glob-to-any-file:
- machine-learning/app/**
- machine-learning/**
changelog:translation:
- head-branch: ['^chore/translations$']

View File

@@ -173,7 +173,7 @@ jobs:
persist-credentials: false
- name: Setup Flutter SDK
uses: subosito/flutter-action@v2
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2
with:
channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml
@@ -194,7 +194,7 @@ jobs:
- name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: '3.2'
ruby-version: '3.4.7'
working-directory: ./mobile/ios
- name: Install Fastlane
@@ -259,7 +259,7 @@ jobs:
security delete-keychain build.keychain || true
- name: Upload IPA artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: ios-release-ipa
path: mobile/ios/Runner.ipa

View File

@@ -132,7 +132,7 @@ jobs:
suffixes: '-rocm'
platforms: linux/amd64
runner-mapping: '{"linux/amd64": "mich"}'
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@a667ef0a5cf3ff1ff1e41be52d3fe326b24e3a00 # multi-runner-build-workflow-v1.1.3
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@47a2ee86898ccff51592d6572391fb1abcd7f782 # multi-runner-build-workflow-v2.0.1
permissions:
contents: read
actions: read
@@ -155,7 +155,7 @@ jobs:
name: Build and Push Server
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).server == true }}
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@a667ef0a5cf3ff1ff1e41be52d3fe326b24e3a00 # multi-runner-build-workflow-v1.1.3
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@47a2ee86898ccff51592d6572391fb1abcd7f782 # multi-runner-build-workflow-v2.0.1
permissions:
contents: read
actions: read

View File

@@ -1 +1 @@
22.20.0
22.21.0

View File

@@ -20,7 +20,7 @@
"@types/lodash-es": "^4.17.12",
"@types/micromatch": "^4.0.9",
"@types/mock-fs": "^4.13.1",
"@types/node": "^22.18.10",
"@types/node": "^22.18.12",
"@vitest/coverage-v8": "^3.0.0",
"byte-size": "^9.0.0",
"cli-progress": "^3.12.0",
@@ -69,6 +69,6 @@
"micromatch": "^4.0.8"
},
"volta": {
"node": "22.20.0"
"node": "22.21.0"
}
}

View File

@@ -271,7 +271,7 @@ describe('startWatch', () => {
});
});
it('should filger out ignored patterns', async () => {
it('should filter out ignored patterns', async () => {
const testFilePath = path.join(testFolder, 'test.jpg');
const ignoredPattern = 'ignored';
const ignoredFolder = path.join(testFolder, ignoredPattern);

View File

@@ -37,6 +37,7 @@ export interface UploadOptionsDto {
dryRun?: boolean;
skipHash?: boolean;
delete?: boolean;
deleteDuplicates?: boolean;
album?: boolean;
albumName?: string;
includeHidden?: boolean;
@@ -70,10 +71,8 @@ const uploadBatch = async (files: string[], options: UploadOptionsDto) => {
console.log(JSON.stringify({ newFiles, duplicates, newAssets }, undefined, 4));
}
await updateAlbums([...newAssets, ...duplicates], options);
await deleteFiles(
newAssets.map(({ filepath }) => filepath),
options,
);
await deleteFiles(newAssets, duplicates, options);
};
export const startWatch = async (
@@ -406,28 +405,46 @@ const uploadFile = async (input: string, stats: Stats): Promise<AssetMediaRespon
return response.json();
};
const deleteFiles = async (files: string[], options: UploadOptionsDto): Promise<void> => {
if (!options.delete) {
return;
const deleteFiles = async (uploaded: Asset[], duplicates: Asset[], options: UploadOptionsDto): Promise<void> => {
let fileCount = 0;
if (options.delete) {
fileCount += uploaded.length;
}
if (options.deleteDuplicates) {
fileCount += duplicates.length;
}
if (options.dryRun) {
console.log(`Would have deleted ${files.length} local asset${s(files.length)}`);
console.log(`Would have deleted ${fileCount} local asset${s(fileCount)}`);
return;
}
if (fileCount === 0) {
return;
}
console.log('Deleting assets that have been uploaded...');
const deletionProgress = new SingleBar(
{ format: 'Deleting local assets | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
Presets.shades_classic,
);
deletionProgress.start(files.length, 0);
deletionProgress.start(fileCount, 0);
const chunkDelete = async (files: Asset[]) => {
for (const assetBatch of chunk(files, options.concurrency)) {
await Promise.all(assetBatch.map((input: Asset) => unlink(input.filepath)));
deletionProgress.update(assetBatch.length);
}
};
try {
for (const assetBatch of chunk(files, options.concurrency)) {
await Promise.all(assetBatch.map((input: string) => unlink(input)));
deletionProgress.update(assetBatch.length);
if (options.delete) {
await chunkDelete(uploaded);
}
if (options.deleteDuplicates) {
await chunkDelete(duplicates);
}
} finally {
deletionProgress.stop();

View File

@@ -75,6 +75,11 @@ program
.default(false),
)
.addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS'))
.addOption(
new Option('--delete-duplicates', 'Delete local assets that are duplicates (already exist on server)').env(
'IMMICH_DELETE_DUPLICATES',
),
)
.addOption(new Option('--no-progress', 'Hide progress bars').env('IMMICH_PROGRESS_BAR').default(true))
.addOption(
new Option('--watch', 'Watch for changes and upload automatically')

View File

@@ -122,7 +122,7 @@ services:
ports:
- 3003:3003
volumes:
- ../machine-learning:/usr/src/app
- ../machine-learning/immich_ml:/usr/src/immich_ml
- model-cache:/cache
env_file:
- .env

View File

@@ -95,7 +95,7 @@ services:
command: ['./run.sh', '-disable-reporting']
ports:
- 3000:3000
image: grafana/grafana:12.1.1-ubuntu@sha256:d1da838234ff2de93e0065ee1bf0e66d38f948dcc5d718c25fa6237e14b4424a
image: grafana/grafana:12.2.1-ubuntu@sha256:797530c642f7b41ba7848c44cfda5e361ef1f3391a98bed1e5d448c472b6826a
volumes:
- grafana-data:/var/lib/grafana

View File

@@ -1 +1 @@
22.20.0
22.21.0

View File

@@ -103,6 +103,7 @@ Options:
-c, --concurrency <number> Number of assets to upload at the same time (default: 4, env: IMMICH_UPLOAD_CONCURRENCY)
-j, --json-output Output detailed information in json format (default: false, env: IMMICH_JSON_OUTPUT)
--delete Delete local assets after upload (env: IMMICH_DELETE_ASSETS)
--delete-duplicates Delete local assets that are duplicates (already exist on server) (env: IMMICH_DELETE_DUPLICATES)
--no-progress Hide progress bars (env: IMMICH_PROGRESS_BAR)
--watch Watch for changes and upload automatically (default: false, env: IMMICH_WATCH_CHANGES)
--help display help for command

View File

@@ -57,6 +57,6 @@
"node": ">=20"
},
"volta": {
"node": "22.20.0"
"node": "22.21.0"
}
}

View File

@@ -1 +1 @@
22.20.0
22.21.0

View File

@@ -25,7 +25,7 @@
"@playwright/test": "^1.44.1",
"@socket.io/component-emitter": "^3.1.2",
"@types/luxon": "^3.4.2",
"@types/node": "^22.18.10",
"@types/node": "^22.18.12",
"@types/oidc-provider": "^9.0.0",
"@types/pg": "^8.15.1",
"@types/pngjs": "^6.0.4",
@@ -52,6 +52,6 @@
"vitest": "^3.0.0"
},
"volta": {
"node": "22.20.0"
"node": "22.21.0"
}
}

View File

@@ -113,6 +113,7 @@ describe('/server', () => {
importFaces: false,
oauth: false,
oauthAutoLaunch: false,
ocr: false,
passwordLogin: true,
search: true,
sidecar: true,

View File

@@ -442,6 +442,176 @@ describe(`immich upload`, () => {
});
});
describe('immich upload --delete-duplicates', () => {
it('should delete local duplicate files', async () => {
const {
stderr: firstStderr,
stdout: firstStdout,
exitCode: firstExitCode,
} = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]);
expect(firstStderr).toContain('{message}');
expect(firstStdout.split('\n')).toEqual(
expect.arrayContaining([expect.stringContaining('Successfully uploaded 1 new asset')]),
);
expect(firstExitCode).toBe(0);
await mkdir(`/tmp/albums/nature`, { recursive: true });
await symlink(`${testAssetDir}/albums/nature/silver_fir.jpg`, `/tmp/albums/nature/silver_fir.jpg`);
// Upload with --delete-duplicates flag
const { stderr, stdout, exitCode } = await immichCli([
'upload',
`/tmp/albums/nature/silver_fir.jpg`,
'--delete-duplicates',
]);
// Check that the duplicate file was deleted
const files = await readdir(`/tmp/albums/nature`);
await rm(`/tmp/albums/nature`, { recursive: true });
expect(files.length).toBe(0);
expect(stdout.split('\n')).toEqual(
expect.arrayContaining([
expect.stringContaining('Found 0 new files and 1 duplicate'),
expect.stringContaining('All assets were already uploaded, nothing to do'),
]),
);
expect(stderr).toContain('{message}');
expect(exitCode).toBe(0);
// Verify no new assets were uploaded
const assets = await getAssetStatistics({}, { headers: asKeyAuth(key) });
expect(assets.total).toBe(1);
});
it('should have accurate dry run with --delete-duplicates', async () => {
const {
stderr: firstStderr,
stdout: firstStdout,
exitCode: firstExitCode,
} = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]);
expect(firstStderr).toContain('{message}');
expect(firstStdout.split('\n')).toEqual(
expect.arrayContaining([expect.stringContaining('Successfully uploaded 1 new asset')]),
);
expect(firstExitCode).toBe(0);
await mkdir(`/tmp/albums/nature`, { recursive: true });
await symlink(`${testAssetDir}/albums/nature/silver_fir.jpg`, `/tmp/albums/nature/silver_fir.jpg`);
// Upload with --delete-duplicates and --dry-run flags
const { stderr, stdout, exitCode } = await immichCli([
'upload',
`/tmp/albums/nature/silver_fir.jpg`,
'--delete-duplicates',
'--dry-run',
]);
// Check that the duplicate file was NOT deleted in dry run mode
const files = await readdir(`/tmp/albums/nature`);
await rm(`/tmp/albums/nature`, { recursive: true });
expect(files.length).toBe(1);
expect(stdout.split('\n')).toEqual(
expect.arrayContaining([
expect.stringContaining('Found 0 new files and 1 duplicate'),
expect.stringContaining('Would have deleted 1 local asset'),
]),
);
expect(stderr).toContain('{message}');
expect(exitCode).toBe(0);
// Verify no new assets were uploaded
const assets = await getAssetStatistics({}, { headers: asKeyAuth(key) });
expect(assets.total).toBe(1);
});
it('should work with both --delete and --delete-duplicates flags', async () => {
// First, upload a file to create a duplicate on the server
const {
stderr: firstStderr,
stdout: firstStdout,
exitCode: firstExitCode,
} = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]);
expect(firstStderr).toContain('{message}');
expect(firstStdout.split('\n')).toEqual(
expect.arrayContaining([expect.stringContaining('Successfully uploaded 1 new asset')]),
);
expect(firstExitCode).toBe(0);
// Both new and duplicate files
await mkdir(`/tmp/albums/nature`, { recursive: true });
await symlink(`${testAssetDir}/albums/nature/silver_fir.jpg`, `/tmp/albums/nature/silver_fir.jpg`); // duplicate
await symlink(`${testAssetDir}/albums/nature/el_torcal_rocks.jpg`, `/tmp/albums/nature/el_torcal_rocks.jpg`); // new
// Upload with both --delete and --delete-duplicates flags
const { stderr, stdout, exitCode } = await immichCli([
'upload',
`/tmp/albums/nature`,
'--delete',
'--delete-duplicates',
]);
// Check that both files were deleted (new file due to --delete, duplicate due to --delete-duplicates)
const files = await readdir(`/tmp/albums/nature`);
await rm(`/tmp/albums/nature`, { recursive: true });
expect(files.length).toBe(0);
expect(stdout.split('\n')).toEqual(
expect.arrayContaining([
expect.stringContaining('Found 1 new files and 1 duplicate'),
expect.stringContaining('Successfully uploaded 1 new asset'),
expect.stringContaining('Deleting assets that have been uploaded'),
]),
);
expect(stderr).toContain('{message}');
expect(exitCode).toBe(0);
// Verify one new asset was uploaded (total should be 2 now)
const assets = await getAssetStatistics({}, { headers: asKeyAuth(key) });
expect(assets.total).toBe(2);
});
it('should only delete duplicates when --delete-duplicates is used without --delete', async () => {
const {
stderr: firstStderr,
stdout: firstStdout,
exitCode: firstExitCode,
} = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]);
expect(firstStderr).toContain('{message}');
expect(firstStdout.split('\n')).toEqual(
expect.arrayContaining([expect.stringContaining('Successfully uploaded 1 new asset')]),
);
expect(firstExitCode).toBe(0);
// Both new and duplicate files
await mkdir(`/tmp/albums/nature`, { recursive: true });
await symlink(`${testAssetDir}/albums/nature/silver_fir.jpg`, `/tmp/albums/nature/silver_fir.jpg`); // duplicate
await symlink(`${testAssetDir}/albums/nature/el_torcal_rocks.jpg`, `/tmp/albums/nature/el_torcal_rocks.jpg`); // new
// Upload with only --delete-duplicates flag
const { stderr, stdout, exitCode } = await immichCli(['upload', `/tmp/albums/nature`, '--delete-duplicates']);
// Check that only the duplicate was deleted, new file should remain
const files = await readdir(`/tmp/albums/nature`);
await rm(`/tmp/albums/nature`, { recursive: true });
expect(files).toEqual(['el_torcal_rocks.jpg']);
expect(stdout.split('\n')).toEqual(
expect.arrayContaining([
expect.stringContaining('Found 1 new files and 1 duplicate'),
expect.stringContaining('Successfully uploaded 1 new asset'),
]),
);
expect(stderr).toContain('{message}');
expect(exitCode).toBe(0);
// Verify one new asset was uploaded (total should be 2 now)
const assets = await getAssetStatistics({}, { headers: asKeyAuth(key) });
expect(assets.total).toBe(2);
});
});
describe('immich upload --skip-hash', () => {
it('should skip hashing', async () => {
const filename = `albums/nature/silver_fir.jpg`;

View File

@@ -59,7 +59,7 @@ test.describe('Asset Viewer Navbar', () => {
await page.goto(`/photos/${asset.id}`);
await page.waitForSelector('#immich-asset-viewer');
await page.keyboard.press('f');
await expect(page.locator('#notification-list').getByTestId('message')).toHaveText('Added to favorites');
await expect(page.getByText('Added to favorites')).toBeVisible();
});
});
});

View File

@@ -51,6 +51,6 @@ test.describe('Slideshow', () => {
await expect(page.getByRole('button', { name: 'Exit Slideshow' })).toBeVisible();
await page.keyboard.press('f');
await expect(page.locator('#notification-list')).not.toBeVisible();
await expect(page.getByText('Added to favorites')).not.toBeVisible();
});
});

View File

@@ -154,6 +154,18 @@
"machine_learning_min_detection_score_description": "Minimum confidence score for a face to be detected from 0-1. Lower values will detect more faces but may result in false positives.",
"machine_learning_min_recognized_faces": "Minimum recognized faces",
"machine_learning_min_recognized_faces_description": "The minimum number of recognized faces for a person to be created. Increasing this makes Facial Recognition more precise at the cost of increasing the chance that a face is not assigned to a person.",
"machine_learning_ocr": "OCR",
"machine_learning_ocr_description": "Use machine learning to recognize text in images",
"machine_learning_ocr_enabled": "Enable OCR",
"machine_learning_ocr_enabled_description": "If disabled, images will not undergo text recognition.",
"machine_learning_ocr_max_resolution": "Maximum resolution",
"machine_learning_ocr_max_resolution_description": "Previews above this resolution will be resized while preserving aspect ratio. Higher values are more accurate, but take longer to process and use more memory.",
"machine_learning_ocr_min_detection_score": "Minimum detection score",
"machine_learning_ocr_min_detection_score_description": "Minimum confidence score for text to be detected from 0-1. Lower values will detect more text but may result in false positives.",
"machine_learning_ocr_min_recognition_score": "Minimum recognition score",
"machine_learning_ocr_min_score_recognition_description": "Minimum confidence score for detected text to be recognized from 0-1. Lower values will recognize more text but may result in false positives.",
"machine_learning_ocr_model": "OCR model",
"machine_learning_ocr_model_description": "Server models are more accurate than mobile models, but take longer to process and use more memory.",
"machine_learning_settings": "Machine Learning Settings",
"machine_learning_settings_description": "Manage machine learning features and settings",
"machine_learning_smart_search": "Smart Search",
@@ -245,6 +257,7 @@
"oauth_storage_quota_default_description": "Quota in GiB to be used when no claim is provided.",
"oauth_timeout": "Request Timeout",
"oauth_timeout_description": "Timeout for requests in milliseconds",
"ocr_job_description": "Use machine learning to recognize text in images",
"password_enable_description": "Login with email and password",
"password_settings": "Password Login",
"password_settings_description": "Manage password login settings",
@@ -669,6 +682,8 @@
"change_password_description": "This is either the first time you are signing into the system or a request has been made to change your password. Please enter the new password below.",
"change_password_form_confirm_password": "Confirm Password",
"change_password_form_description": "Hi {name},\n\nThis is either the first time you are signing into the system or a request has been made to change your password. Please enter the new password below.",
"change_password_form_log_out": "Log out all other devices",
"change_password_form_log_out_description": "It is recommended to log out of all other devices",
"change_password_form_new_password": "New Password",
"change_password_form_password_mismatch": "Passwords do not match",
"change_password_form_reenter_new_password": "Re-enter New Password",
@@ -776,6 +791,7 @@
"daily_title_text_date_year": "E, MMM dd, yyyy",
"dark": "Dark",
"dark_theme": "Toggle dark theme",
"date": "Date",
"date_after": "Date after",
"date_and_time": "Date and Time",
"date_before": "Date before",
@@ -890,7 +906,6 @@
"edit_tag": "Edit tag",
"edit_title": "Edit Title",
"edit_user": "Edit user",
"edited": "Edited",
"editor": "Editor",
"editor_close_without_save_prompt": "The changes will not be saved",
"editor_close_without_save_title": "Close editor?",
@@ -1085,6 +1100,7 @@
"features_setting_description": "Manage the app features",
"file_name": "File name",
"file_name_or_extension": "File name or extension",
"file_size": "File size",
"filename": "Filename",
"filetype": "Filetype",
"filter": "Filter",
@@ -1248,6 +1264,7 @@
"local_media_summary": "Local Media Summary",
"local_network": "Local network",
"local_network_sheet_info": "The app will connect to the server through this URL when using the specified Wi-Fi network",
"location": "Location",
"location_permission": "Location permission",
"location_permission_content": "In order to use the auto-switching feature, Immich needs precise location permission so it can read the current Wi-Fi network's name",
"location_picker_choose_on_map": "Choose on map",
@@ -1436,6 +1453,7 @@
"oauth": "OAuth",
"obtainium_configurator": "Obtainium Configurator",
"obtainium_configurator_instructions": "Use Obtainium to install and update the Android app directly from Immich GitHub's release. Create an API key and select a variant to create your Obtainium configuration link",
"ocr": "OCR",
"official_immich_resources": "Official Immich Resources",
"offline": "Offline",
"offset": "Offset",
@@ -1540,6 +1558,9 @@
"play_memories": "Play memories",
"play_motion_photo": "Play Motion Photo",
"play_or_pause_video": "Play or pause video",
"play_original_video": "Play original video",
"play_original_video_setting_description": "Prefer playback of original videos rather than transcoded videos. If original asset is not compatible it may not playback correctly.",
"play_transcoded_video": "Play transcoded video",
"please_auth_to_access": "Please authenticate to access",
"port": "Port",
"preferences_settings_subtitle": "Manage the app's preferences",
@@ -1676,6 +1697,7 @@
"reset_sqlite_confirmation": "Are you sure you want to reset the SQLite database? You will need to log out and log in again to resync the data",
"reset_sqlite_success": "Successfully reset the SQLite database",
"reset_to_default": "Reset to default",
"resolution": "Resolution",
"resolve_duplicates": "Resolve duplicates",
"resolved_all_duplicates": "Resolved all duplicates",
"restore": "Restore",
@@ -1694,6 +1716,7 @@
"running": "Running",
"save": "Save",
"save_to_gallery": "Save to gallery",
"saved": "Saved",
"saved_api_key": "Saved API Key",
"saved_profile": "Saved profile",
"saved_settings": "Saved settings",
@@ -1710,6 +1733,9 @@
"search_by_description_example": "Hiking day in Sapa",
"search_by_filename": "Search by file name or extension",
"search_by_filename_example": "i.e. IMG_1234.JPG or PNG",
"search_by_ocr": "Search by OCR",
"search_by_ocr_example": "Latte",
"search_camera_lens_model": "Search lens model...",
"search_camera_make": "Search camera make...",
"search_camera_model": "Search camera model...",
"search_city": "Search city...",
@@ -1726,6 +1752,7 @@
"search_filter_location_title": "Select location",
"search_filter_media_type": "Media Type",
"search_filter_media_type_title": "Select media type",
"search_filter_ocr": "Search by OCR",
"search_filter_people_title": "Select people",
"search_for": "Search for",
"search_for_existing_person": "Search for existing person",
@@ -1998,6 +2025,7 @@
"theme_setting_three_stage_loading_title": "Enable three-stage loading",
"they_will_be_merged_together": "They will be merged together",
"third_party_resources": "Third-Party Resources",
"time": "Time",
"time_based_memories": "Time-based memories",
"timeline": "Timeline",
"timezone": "Timezone",

View File

@@ -141,7 +141,7 @@ FROM prod-${DEVICE} AS prod
ARG DEVICE
RUN apt-get update && \
apt-get install -y --no-install-recommends tini $(if ! [ "$DEVICE" = "openvino" ] && ! [ "$DEVICE" = "rocm" ]; then echo "libmimalloc2.0"; fi) && \
apt-get install -y --no-install-recommends tini ccache libgl1 libglib2.0-0 libgomp1 $(if ! [ "$DEVICE" = "openvino" ] && ! [ "$DEVICE" = "rocm" ]; then echo "libmimalloc2.0"; fi) && \
apt-get autoremove -yqq && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*

View File

@@ -41,6 +41,7 @@ class PreloadModelData(BaseModel):
class MaxBatchSize(BaseModel):
facial_recognition: int | None = None
text_recognition: int | None = None
class Settings(BaseSettings):

View File

@@ -183,7 +183,9 @@ async def run_inference(payload: Image | str, entries: InferenceEntries) -> Infe
response: InferenceResponse = {}
async def _run_inference(entry: InferenceEntry) -> None:
model = await model_cache.get(entry["name"], entry["type"], entry["task"], ttl=settings.model_ttl)
model = await model_cache.get(
entry["name"], entry["type"], entry["task"], ttl=settings.model_ttl, **entry["options"]
)
inputs = [payload]
for dep in model.depends:
try:

View File

@@ -3,6 +3,8 @@ from typing import Any
from immich_ml.models.base import InferenceModel
from immich_ml.models.clip.textual import MClipTextualEncoder, OpenClipTextualEncoder
from immich_ml.models.clip.visual import OpenClipVisualEncoder
from immich_ml.models.ocr.detection import TextDetector
from immich_ml.models.ocr.recognition import TextRecognizer
from immich_ml.schemas import ModelSource, ModelTask, ModelType
from .constants import get_model_source
@@ -28,6 +30,12 @@ def get_model_class(model_name: str, model_type: ModelType, model_task: ModelTas
case ModelSource.INSIGHTFACE, ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION:
return FaceRecognizer
case ModelSource.PADDLE, ModelType.DETECTION, ModelTask.OCR:
return TextDetector
case ModelSource.PADDLE, ModelType.RECOGNITION, ModelTask.OCR:
return TextRecognizer
case _:
raise ValueError(f"Unknown model combination: {source}, {model_type}, {model_task}")

View File

@@ -38,9 +38,8 @@ class InferenceModel(ABC):
def download(self) -> None:
if not self.cached:
log.info(
f"Downloading {self.model_type.replace('-', ' ')} model '{self.model_name}'. This may take a while."
)
model_type = self.model_type.replace("-", " ")
log.info(f"Downloading {model_type} model '{self.model_name}' to {self.model_path}. This may take a while.")
self._download()
def load(self) -> None:
@@ -58,7 +57,7 @@ class InferenceModel(ABC):
self.load()
if model_kwargs:
self.configure(**model_kwargs)
return self._predict(*inputs, **model_kwargs)
return self._predict(*inputs)
@abstractmethod
def _predict(self, *inputs: Any, **model_kwargs: Any) -> Any: ...

View File

@@ -19,7 +19,7 @@ class BaseCLIPTextualEncoder(InferenceModel):
depends = []
identity = (ModelType.TEXTUAL, ModelTask.SEARCH)
def _predict(self, inputs: str, language: str | None = None, **kwargs: Any) -> str:
def _predict(self, inputs: str, language: str | None = None) -> str:
tokens = self.tokenize(inputs, language=language)
res: NDArray[np.float32] = self.session.run(None, tokens)[0][0]
return serialize_np_array(res)

View File

@@ -26,7 +26,7 @@ class BaseCLIPVisualEncoder(InferenceModel):
depends = []
identity = (ModelType.VISUAL, ModelTask.SEARCH)
def _predict(self, inputs: Image.Image | bytes, **kwargs: Any) -> str:
def _predict(self, inputs: Image.Image | bytes) -> str:
image = decode_pil(inputs)
res: NDArray[np.float32] = self.session.run(None, self.transform(image))[0][0]
return serialize_np_array(res)

View File

@@ -75,6 +75,11 @@ _INSIGHTFACE_MODELS = {
}
_PADDLE_MODELS = {
"PP-OCRv5_server",
"PP-OCRv5_mobile",
}
SUPPORTED_PROVIDERS = [
"CUDAExecutionProvider",
"ROCMExecutionProvider",
@@ -159,4 +164,7 @@ def get_model_source(model_name: str) -> ModelSource | None:
if cleaned_name in _OPENCLIP_MODELS:
return ModelSource.OPENCLIP
if cleaned_name in _PADDLE_MODELS:
return ModelSource.PADDLE
return None

View File

@@ -24,7 +24,7 @@ class FaceDetector(InferenceModel):
return session
def _predict(self, inputs: NDArray[np.uint8] | bytes, **kwargs: Any) -> FaceDetectionOutput:
def _predict(self, inputs: NDArray[np.uint8] | bytes) -> FaceDetectionOutput:
inputs = decode_cv2(inputs)
bboxes, landmarks = self._detect(inputs)

View File

@@ -44,7 +44,7 @@ class FaceRecognizer(InferenceModel):
return session
def _predict(
self, inputs: NDArray[np.uint8] | bytes | Image.Image, faces: FaceDetectionOutput, **kwargs: Any
self, inputs: NDArray[np.uint8] | bytes | Image.Image, faces: FaceDetectionOutput
) -> FacialRecognitionOutput:
if faces["boxes"].shape[0] == 0:
return []

View File

@@ -0,0 +1,86 @@
from typing import Any
import numpy as np
from PIL import Image
from rapidocr.ch_ppocr_det import TextDetector as RapidTextDetector
from rapidocr.inference_engine.base import FileInfo, InferSession
from rapidocr.utils import DownloadFile, DownloadFileInput
from rapidocr.utils.typings import EngineType, LangDet, OCRVersion, TaskType
from rapidocr.utils.typings import ModelType as RapidModelType
from immich_ml.config import log
from immich_ml.models.base import InferenceModel
from immich_ml.models.transforms import decode_cv2
from immich_ml.schemas import ModelFormat, ModelSession, ModelTask, ModelType
from immich_ml.sessions.ort import OrtSession
from .schemas import OcrOptions, TextDetectionOutput
class TextDetector(InferenceModel):
depends = []
identity = (ModelType.DETECTION, ModelTask.OCR)
def __init__(self, model_name: str, **model_kwargs: Any) -> None:
super().__init__(model_name, **model_kwargs, model_format=ModelFormat.ONNX)
self.max_resolution = 736
self.min_score = 0.5
self.score_mode = "fast"
self._empty: TextDetectionOutput = {
"image": np.empty(0, dtype=np.float32),
"boxes": np.empty(0, dtype=np.float32),
"scores": np.empty(0, dtype=np.float32),
}
def _download(self) -> None:
model_info = InferSession.get_model_url(
FileInfo(
engine_type=EngineType.ONNXRUNTIME,
ocr_version=OCRVersion.PPOCRV5,
task_type=TaskType.DET,
lang_type=LangDet.CH,
model_type=RapidModelType.MOBILE if "mobile" in self.model_name else RapidModelType.SERVER,
)
)
download_params = DownloadFileInput(
file_url=model_info["model_dir"],
sha256=model_info["SHA256"],
save_path=self.model_path,
logger=log,
)
DownloadFile.run(download_params)
def _load(self) -> ModelSession:
# TODO: support other runtime sessions
session = OrtSession(self.model_path)
self.model = RapidTextDetector(
OcrOptions(
session=session.session,
limit_side_len=self.max_resolution,
limit_type="min",
box_thresh=self.min_score,
score_mode=self.score_mode,
)
)
return session
def _predict(self, inputs: bytes | Image.Image) -> TextDetectionOutput:
results = self.model(decode_cv2(inputs))
if results.boxes is None or results.scores is None or results.img is None:
return self._empty
return {
"image": results.img,
"boxes": np.array(results.boxes, dtype=np.float32),
"scores": np.array(results.scores, dtype=np.float32),
}
def configure(self, **kwargs: Any) -> None:
if (max_resolution := kwargs.get("maxResolution")) is not None:
self.max_resolution = max_resolution
self.model.limit_side_len = max_resolution
if (min_score := kwargs.get("minScore")) is not None:
self.min_score = min_score
self.model.postprocess_op.box_thresh = min_score
if (score_mode := kwargs.get("scoreMode")) is not None:
self.score_mode = score_mode
self.model.postprocess_op.score_mode = score_mode

View File

@@ -0,0 +1,117 @@
from typing import Any
import cv2
import numpy as np
from numpy.typing import NDArray
from PIL.Image import Image
from rapidocr.ch_ppocr_rec import TextRecInput
from rapidocr.ch_ppocr_rec import TextRecognizer as RapidTextRecognizer
from rapidocr.inference_engine.base import FileInfo, InferSession
from rapidocr.utils import DownloadFile, DownloadFileInput
from rapidocr.utils.typings import EngineType, LangRec, OCRVersion, TaskType
from rapidocr.utils.typings import ModelType as RapidModelType
from immich_ml.config import log, settings
from immich_ml.models.base import InferenceModel
from immich_ml.schemas import ModelFormat, ModelSession, ModelTask, ModelType
from immich_ml.sessions.ort import OrtSession
from .schemas import OcrOptions, TextDetectionOutput, TextRecognitionOutput
class TextRecognizer(InferenceModel):
depends = [(ModelType.DETECTION, ModelTask.OCR)]
identity = (ModelType.RECOGNITION, ModelTask.OCR)
def __init__(self, model_name: str, **model_kwargs: Any) -> None:
self.min_score = model_kwargs.get("minScore", 0.9)
self._empty: TextRecognitionOutput = {
"box": np.empty(0, dtype=np.float32),
"boxScore": np.empty(0, dtype=np.float32),
"text": [],
"textScore": np.empty(0, dtype=np.float32),
}
super().__init__(model_name, **model_kwargs, model_format=ModelFormat.ONNX)
def _download(self) -> None:
model_info = InferSession.get_model_url(
FileInfo(
engine_type=EngineType.ONNXRUNTIME,
ocr_version=OCRVersion.PPOCRV5,
task_type=TaskType.REC,
lang_type=LangRec.CH,
model_type=RapidModelType.MOBILE if "mobile" in self.model_name else RapidModelType.SERVER,
)
)
download_params = DownloadFileInput(
file_url=model_info["model_dir"],
sha256=model_info["SHA256"],
save_path=self.model_path,
logger=log,
)
DownloadFile.run(download_params)
def _load(self) -> ModelSession:
# TODO: support other runtimes
session = OrtSession(self.model_path)
self.model = RapidTextRecognizer(
OcrOptions(
session=session.session,
rec_batch_num=settings.max_batch_size.text_recognition if settings.max_batch_size is not None else 6,
rec_img_shape=(3, 48, 320),
)
)
return session
def _predict(self, _: Image, texts: TextDetectionOutput) -> TextRecognitionOutput:
boxes, img, box_scores = texts["boxes"], texts["image"], texts["scores"]
if boxes.shape[0] == 0:
return self._empty
rec = self.model(TextRecInput(img=self.get_crop_img_list(img, boxes)))
if rec.txts is None:
return self._empty
height, width = img.shape[0:2]
boxes[:, :, 0] /= width
boxes[:, :, 1] /= height
text_scores = np.array(rec.scores)
valid_text_score_idx = text_scores > self.min_score
valid_score_idx_list = valid_text_score_idx.tolist()
return {
"box": boxes.reshape(-1, 8)[valid_text_score_idx].reshape(-1),
"text": [rec.txts[i] for i in range(len(rec.txts)) if valid_score_idx_list[i]],
"boxScore": box_scores[valid_text_score_idx],
"textScore": text_scores[valid_text_score_idx],
}
def get_crop_img_list(self, img: NDArray[np.float32], boxes: NDArray[np.float32]) -> list[NDArray[np.float32]]:
img_crop_width = np.maximum(
np.linalg.norm(boxes[:, 1] - boxes[:, 0], axis=1), np.linalg.norm(boxes[:, 2] - boxes[:, 3], axis=1)
).astype(np.int32)
img_crop_height = np.maximum(
np.linalg.norm(boxes[:, 0] - boxes[:, 3], axis=1), np.linalg.norm(boxes[:, 1] - boxes[:, 2], axis=1)
).astype(np.int32)
pts_std = np.zeros((img_crop_width.shape[0], 4, 2), dtype=np.float32)
pts_std[:, 1:3, 0] = img_crop_width[:, None]
pts_std[:, 2:4, 1] = img_crop_height[:, None]
img_crop_sizes = np.stack([img_crop_width, img_crop_height], axis=1).tolist()
imgs: list[NDArray[np.float32]] = []
for box, pts_std, dst_size in zip(list(boxes), list(pts_std), img_crop_sizes):
M = cv2.getPerspectiveTransform(box, pts_std)
dst_img: NDArray[np.float32] = cv2.warpPerspective(
img,
M,
dst_size,
borderMode=cv2.BORDER_REPLICATE,
flags=cv2.INTER_CUBIC,
) # type: ignore
dst_height, dst_width = dst_img.shape[0:2]
if dst_height * 1.0 / dst_width >= 1.5:
dst_img = np.rot90(dst_img)
imgs.append(dst_img)
return imgs
def configure(self, **kwargs: Any) -> None:
self.min_score = kwargs.get("minScore", self.min_score)

View File

@@ -0,0 +1,28 @@
from typing import Any, Iterable
import numpy as np
import numpy.typing as npt
from rapidocr.utils.typings import EngineType, LangRec
from typing_extensions import TypedDict
class TextDetectionOutput(TypedDict):
image: npt.NDArray[np.float32]
boxes: npt.NDArray[np.float32]
scores: npt.NDArray[np.float32]
class TextRecognitionOutput(TypedDict):
box: npt.NDArray[np.float32]
boxScore: npt.NDArray[np.float32]
text: Iterable[str]
textScore: npt.NDArray[np.float32]
# RapidOCR expects `engine_type`, `lang_type`, and `font_path` to be attributes
class OcrOptions(dict[str, Any]):
def __init__(self, **options: Any) -> None:
super().__init__(**options)
self.engine_type = EngineType.ONNXRUNTIME
self.lang_type = LangRec.CH
self.font_path = None

View File

@@ -23,6 +23,7 @@ class BoundingBox(TypedDict):
class ModelTask(StrEnum):
FACIAL_RECOGNITION = "facial-recognition"
SEARCH = "clip"
OCR = "ocr"
class ModelType(StrEnum):
@@ -42,6 +43,7 @@ class ModelSource(StrEnum):
INSIGHTFACE = "insightface"
MCLIP = "mclip"
OPENCLIP = "openclip"
PADDLE = "paddle"
ModelIdentity = tuple[ModelType, ModelTask]

View File

@@ -14,6 +14,8 @@ from ..config import log, settings
class OrtSession:
session: ort.InferenceSession
def __init__(
self,
model_path: Path | str,

View File

@@ -22,6 +22,8 @@ dependencies = [
"rich>=13.4.2",
"tokenizers>=0.15.0,<1.0",
"uvicorn[standard]>=0.22.0,<1.0",
"setuptools>=78.1.0",
"rapidocr>=3.1.0",
]
[dependency-groups]

3470
machine-learning/uv.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[tools]
node = "22.20.0"
flutter = "3.35.6"
node = "22.21.0"
flutter = "3.35.7"
pnpm = "10.18.1"
terragrunt = "0.91.2"
opentofu = "1.10.6"

View File

@@ -1,3 +1,3 @@
{
"flutter": "3.35.6"
"flutter": "3.35.7"
}

View File

@@ -1,5 +1,5 @@
{
"dart.flutterSdkPath": ".fvm/versions/3.35.6",
"dart.flutterSdkPath": ".fvm/versions/3.35.7",
"dart.lineLength": 120,
"[dart]": {
"editor.rulers": [120]

View File

@@ -17,7 +17,7 @@ linter:
# section below to disable rules from the `package:flutter_lints/flutter.yaml`
# included above or to enable additional rules. A list of all available lints
# and their documentation is published at
# https://dart-lang.github.io/linter/lints/index.html.
# https://dart.dev/tools/linter-rules
#
# Instead of disabling a lint rule for the entire project in the
# section below, it can also be suppressed for a single line of code
@@ -28,6 +28,7 @@ linter:
rules:
# avoid_print: false # Uncomment to disable the `avoid_print` rule
# prefer_single_quotes: true # Uncomment to enable the `prefer_single_quotes` rule
unawaited_futures: true
use_build_context_synchronously: false
require_trailing_commas: true
unrelated_type_equality_checks: true
@@ -46,6 +47,8 @@ analyzer:
# TODO: Re-enable after upgrading custom_lint
# plugins:
# - custom_lint
errors:
unawaited_futures: warning
custom_lint:
debug: true
@@ -152,160 +155,6 @@ dart_code_metrics:
# - avoid-passing-async-when-sync-expected
# - avoid-throw-in-catch-block
- avoid-unused-parameters
# - avoid-unnecessary-type-assertions
# - avoid-unnecessary-type-casts
# - avoid-unrelated-type-assertions
# - avoid-unrelated-type-casts
# - no-empty-block
# - no-equal-then-else
# - prefer-correct-test-file-name
- prefer-const-border-radius
# - prefer-match-file-name
# - prefer-return-await
# - avoid-self-assignment
# - avoid-self-compare
# - avoid-shadowing
# - prefer-iterable-of
# - no-equal-switch-case
# - no-equal-conditions
# - avoid-equal-expressions
# - avoid-missed-calls
# - avoid-unnecessary-negations
# - avoid-unused-generics
# - function-always-returns-null
# - avoid-throw-objects-without-tostring
# - avoid-unsafe-collection-methods
# - prefer-wildcard-pattern
# - no-equal-switch-expression-cases
# - avoid-future-tostring
# - avoid-unassigned-late-fields
# - avoid-nested-futures
# - avoid-generics-shadowing
# - prefer-parentheses-with-if-null
# - no-equal-nested-conditions
# - avoid-shadowed-extension-methods
# - avoid-unnecessary-conditionals
# - avoid-double-slash-imports
# - avoid-map-keys-contains
# - prefer-correct-json-casts
# - avoid-duplicate-mixins
# - avoid-nullable-interpolation
# - avoid-unused-instances
# - prefer-correct-for-loop-increment
# - prefer-public-exception-classes
# - avoid-uncaught-future-errors
# - always-remove-listener
# - avoid-unnecessary-setstate
# - check-for-equals-in-render-object-setters
# - consistent-update-render-object
# - use-setstate-synchronously
# - avoid-incomplete-copy-with
# - proper-super-calls
# - dispose-fields
# - avoid-empty-setstate
# - avoid-state-constructors
# - avoid-recursive-widget-calls
# - avoid-missing-image-alt
# - avoid-passing-self-as-argument
# - avoid-unnecessary-if
# - avoid-unconditional-break
# - avoid-referencing-discarded-variables
# - avoid-unnecessary-local-late
# - avoid-wildcard-cases-with-enums
# - match-getter-setter-field-names
# - avoid-accessing-collections-by-constant-index
# - prefer-unique-test-names
# - avoid-duplicate-cascades
# - prefer-specific-cases-first
# - avoid-duplicate-switch-case-conditions
# - prefer-explicit-function-type
# - avoid-misused-test-matchers
# - avoid-duplicate-test-assertions
# - prefer-switch-with-enums
# - prefer-any-or-every
# - avoid-duplicate-map-keys
# - avoid-nullable-tostring
# - avoid-undisposed-instances
# - avoid-duplicate-initializers
# - avoid-unassigned-stream-subscriptions
# - avoid-empty-test-groups
# - avoid-not-encodable-in-to-json
# - avoid-contradictory-expressions
# - avoid-excessive-expressions
# - prefer-private-extension-type-field
# - avoid-renaming-representation-getters
# - avoid-empty-spread
# - avoid-unnecessary-gesture-detector
# - avoid-missing-completer-stack-trace
# - avoid-casting-to-extension-type
# - prefer-overriding-parent-equality
# - avoid-missing-controller
# - avoid-unknown-pragma
# - avoid-conditions-with-boolean-literals
# - avoid-multi-assignment
# - avoid-collection-equality-checks
# - avoid-only-rethrow
# - avoid-incorrect-image-opacity
# - avoid-misused-set-literals
# - dispose-class-fields
# - avoid-suspicious-super-overrides
# - avoid-assignments-as-conditions
# - avoid-unused-assignment
# - avoid-unnecessary-overrides
# - avoid-implicitly-nullable-extension-types
# Enable with the next release
# - avoid-late-final-reassignment
# - avoid-duplicate-constant-values
# - function-always-returns-same-value
# - avoid-flexible-outside-flex
# - avoid-unnecessary-patterns
# - use-closest-build-context
# - avoid-commented-out-code
# - avoid-recursive-tostring
# - avoid-enum-values-by-index
# - avoid-constant-assert-conditions
# - avoid-inconsistent-digit-separators
# - pass-existing-future-to-future-builder
# - pass-existing-stream-to-stream-builder
# Code simplification
# - avoid-redundant-async
# - avoid-redundant-else
# - avoid-unnecessary-nullable-return-type
# - avoid-redundant-pragma-inline
# - avoid-nested-records
# - avoid-redundant-positional-field-name
# - avoid-explicit-pattern-field-name
# - prefer-simpler-patterns-null-check
# - avoid-unnecessary-return
# - avoid-duplicate-patterns
# - avoid-keywords-in-wildcard-pattern
# - avoid-unnecessary-futures
# - avoid-unnecessary-reassignment
# - avoid-unnecessary-call
# - avoid-unnecessary-stateful-widgets
# - prefer-dedicated-media-query-methods
# - avoid-unnecessary-overrides-in-state
# - move-variable-closer-to-its-usage
# - avoid-nullable-parameters-with-default-values
# - prefer-null-aware-spread
# - avoid-inferrable-type-arguments
# - avoid-unnecessary-super
# - avoid-unnecessary-collections
# - avoid-unnecessary-extends
# - avoid-unnecessary-enum-arguments
# - prefer-contains
# Enable with the next release
# - prefer-simpler-boolean-expressions
# - prefer-spacing
# - avoid-unnecessary-continue
# - avoid-unnecessary-compare-to
# Style
# - prefer-trailing-comma
# - unnecessary-trailing-comma
- prefer-declaring-const-constructor
# - prefer-single-widget-per-file
- prefer-switch-expression
# - prefer-prefixed-global-constants
# - prefer-correct-callback-field-name

View File

@@ -101,9 +101,15 @@ open class NativeSyncApiImplBase(context: Context) : ImmichPlugin() {
while (c.moveToNext()) {
val id = c.getLong(idColumn).toString()
val name = c.getStringOrNull(nameColumn)
val bucketId = c.getStringOrNull(bucketIdColumn)
val path = c.getStringOrNull(dataColumn)
val path = c.getString(dataColumn)
if (path.isNullOrBlank() || !File(path).exists()) {
// Skip assets with invalid metadata
if (
name.isNullOrBlank() || bucketId.isNullOrBlank() ||
path.isNullOrBlank() || !File(path).exists()
) {
yield(AssetResult.InvalidAsset(id))
continue
}
@@ -113,7 +119,6 @@ open class NativeSyncApiImplBase(context: Context) : ImmichPlugin() {
MediaStore.Files.FileColumns.MEDIA_TYPE_VIDEO -> 2
else -> 0
}
val name = c.getString(nameColumn)
// Date taken is milliseconds since epoch, Date added is seconds since epoch
val createdAt = (c.getLong(dateTakenColumn).takeIf { it > 0 }?.div(1000))
?: c.getLong(dateAddedColumn)
@@ -124,7 +129,6 @@ open class NativeSyncApiImplBase(context: Context) : ImmichPlugin() {
// Duration is milliseconds
val duration = if (mediaType == MediaStore.Files.FileColumns.MEDIA_TYPE_IMAGE) 0
else c.getLong(durationColumn) / 1000
val bucketId = c.getString(bucketIdColumn)
val orientation = c.getInt(orientationColumn)
val isFavorite = if (favoriteColumn == -1) false else c.getInt(favoriteColumn) != 0

View File

@@ -1,6 +1,6 @@
enum SortOrder { asc, desc }
enum TextSearchType { context, filename, description }
enum TextSearchType { context, filename, description, ocr }
enum AssetVisibilityEnum { timeline, hidden, archive, locked }

View File

@@ -114,10 +114,10 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
configureFileDownloaderNotifications();
// Notify the host that the background worker service has been initialized and is ready to use
_backgroundHostApi.onInitialized();
unawaited(_backgroundHostApi.onInitialized());
} catch (error, stack) {
_logger.severe("Failed to initialize background worker", error, stack);
_backgroundHostApi.close();
unawaited(_backgroundHostApi.close());
}
}

View File

@@ -249,7 +249,7 @@ class LocalSyncService {
if (assetsToUpsert.isEmpty && assetsToDelete.isEmpty) {
_log.fine("No asset changes detected in album ${deviceAlbum.name}. Updating metadata.");
_localAlbumRepository.upsert(updatedDeviceAlbum);
await _localAlbumRepository.upsert(updatedDeviceAlbum);
return true;
}

View File

@@ -164,6 +164,10 @@ class RemoteAlbumService {
return _repository.getCount();
}
Future<List<RemoteAlbum>> getAlbumsContainingAsset(String assetId) {
return _repository.getAlbumsContainingAsset(assetId);
}
Future<List<RemoteAlbum>> _sortByNewestAsset(List<RemoteAlbum> albums) async {
// map album IDs to their newest asset dates
final Map<String, Future<DateTime?>> assetTimestampFutures = {};

View File

@@ -68,7 +68,7 @@ class RemoteImageRequest extends ImageRequest {
final cacheManager = this.cacheManager;
final streamController = StreamController<List<int>>(sync: true);
final Stream<List<int>> stream;
cacheManager?.putStreamedFile(url, streamController.stream);
unawaited(cacheManager?.putStreamedFile(url, streamController.stream));
stream = response.map((chunk) {
if (_isCancelled) {
throw StateError('Cancelled request');
@@ -81,11 +81,11 @@ class RemoteImageRequest extends ImageRequest {
try {
final Uint8List bytes = await _downloadBytes(stream, response.contentLength);
streamController.close();
unawaited(streamController.close());
return await ImmutableBuffer.fromUint8List(bytes);
} catch (e) {
streamController.addError(e);
streamController.close();
unawaited(streamController.close());
if (_isCancelled) {
return null;
}
@@ -143,7 +143,7 @@ class RemoteImageRequest extends ImageRequest {
return await _decodeBuffer(buffer, decode, scale);
} catch (e) {
log.severe('Failed to decode cached image', e);
_evictFile(url);
unawaited(_evictFile(url));
return null;
}
}

View File

@@ -361,15 +361,13 @@ class DriftLocalAlbumRepository extends DriftDatabaseRepository {
return _db.managers.localAlbumEntity.count();
}
Future unlinkRemoteAlbum(String id) async {
return _db.localAlbumEntity.update()
..where((row) => row.id.equals(id))
..write(const LocalAlbumEntityCompanion(linkedRemoteAlbumId: Value(null)));
Future<void> unlinkRemoteAlbum(String id) async {
final query = _db.localAlbumEntity.update()..where((row) => row.id.equals(id));
await query.write(const LocalAlbumEntityCompanion(linkedRemoteAlbumId: Value(null)));
}
Future linkRemoteAlbum(String localAlbumId, String remoteAlbumId) async {
return _db.localAlbumEntity.update()
..where((row) => row.id.equals(localAlbumId))
..write(LocalAlbumEntityCompanion(linkedRemoteAlbumId: Value(remoteAlbumId)));
Future<void> linkRemoteAlbum(String localAlbumId, String remoteAlbumId) async {
final query = _db.localAlbumEntity.update()..where((row) => row.id.equals(localAlbumId));
await query.write(LocalAlbumEntityCompanion(linkedRemoteAlbumId: Value(remoteAlbumId)));
}
}

View File

@@ -382,6 +382,61 @@ class DriftRemoteAlbumRepository extends DriftDatabaseRepository {
return query.map((row) => row.read(_db.remoteAssetEntity.id)!).get();
}
Future<List<RemoteAlbum>> getAlbumsContainingAsset(String assetId) async {
// Note: this needs to be 2 queries as the where clause filtering causes the assetCount to always be 1
final albumIdsQuery = _db.remoteAlbumAssetEntity.selectOnly()
..addColumns([_db.remoteAlbumAssetEntity.albumId])
..where(_db.remoteAlbumAssetEntity.assetId.equals(assetId));
final albumIds = await albumIdsQuery.map((row) => row.read(_db.remoteAlbumAssetEntity.albumId)!).get();
if (albumIds.isEmpty) {
return [];
}
final assetCount = _db.remoteAlbumAssetEntity.assetId.count(distinct: true);
final query =
_db.remoteAlbumEntity.select().join([
leftOuterJoin(
_db.remoteAlbumAssetEntity,
_db.remoteAlbumAssetEntity.albumId.equalsExp(_db.remoteAlbumEntity.id),
useColumns: false,
),
leftOuterJoin(
_db.remoteAssetEntity,
_db.remoteAssetEntity.id.equalsExp(_db.remoteAlbumAssetEntity.assetId),
useColumns: false,
),
leftOuterJoin(
_db.userEntity,
_db.userEntity.id.equalsExp(_db.remoteAlbumEntity.ownerId),
useColumns: false,
),
leftOuterJoin(
_db.remoteAlbumUserEntity,
_db.remoteAlbumUserEntity.albumId.equalsExp(_db.remoteAlbumEntity.id),
useColumns: false,
),
])
..where(_db.remoteAlbumEntity.id.isIn(albumIds) & _db.remoteAssetEntity.deletedAt.isNull())
..addColumns([assetCount])
..addColumns([_db.remoteAlbumUserEntity.userId.count(distinct: true)])
..addColumns([_db.userEntity.name])
..groupBy([_db.remoteAlbumEntity.id]);
return query
.map(
(row) => row
.readTable(_db.remoteAlbumEntity)
.toDto(
ownerName: row.read(_db.userEntity.name) ?? '',
isShared: row.read(_db.remoteAlbumUserEntity.userId.count(distinct: true))! > 0,
assetCount: row.read(assetCount) ?? 0,
),
)
.get();
}
}
extension on RemoteAlbumEntityData {

View File

@@ -5,6 +5,7 @@ import 'package:openapi/api.dart';
class SearchApiRepository extends ApiRepository {
final SearchApi _api;
const SearchApiRepository(this._api);
Future<SearchResponseDto?> search(SearchFilter filter, int page) {
@@ -15,10 +16,12 @@ class SearchApiRepository extends ApiRepository {
type = AssetTypeEnum.VIDEO;
}
if (filter.context != null && filter.context!.isNotEmpty) {
if ((filter.context != null && filter.context!.isNotEmpty) ||
(filter.assetId != null && filter.assetId!.isNotEmpty)) {
return _api.searchSmart(
SmartSearchDto(
query: filter.context!,
query: filter.context,
queryAssetId: filter.assetId,
language: filter.language,
country: filter.location.country,
state: filter.location.state,
@@ -43,6 +46,7 @@ class SearchApiRepository extends ApiRepository {
originalFileName: filter.filename != null && filter.filename!.isNotEmpty ? filter.filename : null,
country: filter.location.country,
description: filter.description != null && filter.description!.isNotEmpty ? filter.description : null,
ocr: filter.ocr != null && filter.ocr!.isNotEmpty ? filter.ocr : null,
state: filter.location.state,
city: filter.location.city,
make: filter.camera.make,

View File

@@ -1,6 +1,7 @@
import 'dart:io';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/extensions/platform_extensions.dart';
import 'package:logging/logging.dart';
import 'package:photo_manager/photo_manager.dart';
@@ -89,5 +90,17 @@ class StorageRepository {
} catch (error, stackTrace) {
log.warning("Error clearing cache", error, stackTrace);
}
if (!CurrentPlatform.isIOS) {
return;
}
try {
if (await Directory.systemTemp.exists()) {
await Directory.systemTemp.delete(recursive: true);
}
} catch (error, stackTrace) {
log.warning("Error deleting temporary directory", error, stackTrace);
}
}
}

View File

@@ -159,7 +159,7 @@ class ImmichAppState extends ConsumerState<ImmichApp> with WidgetsBindingObserve
WidgetsBinding.instance.addObserver(this);
// Draw the app from edge to edge
SystemChrome.setEnabledSystemUIMode(SystemUiMode.edgeToEdge);
unawaited(SystemChrome.setEnabledSystemUIMode(SystemUiMode.edgeToEdge));
// Sets the navigation bar color
SystemUiOverlayStyle overlayStyle = const SystemUiOverlayStyle(systemNavigationBarColor: Colors.transparent);

View File

@@ -176,7 +176,9 @@ class SearchFilter {
String? context;
String? filename;
String? description;
String? ocr;
String? language;
String? assetId;
Set<PersonDto> people;
SearchLocationFilter location;
SearchCameraFilter camera;
@@ -190,7 +192,9 @@ class SearchFilter {
this.context,
this.filename,
this.description,
this.ocr,
this.language,
this.assetId,
required this.people,
required this.location,
required this.camera,
@@ -203,6 +207,8 @@ class SearchFilter {
return (context == null || (context != null && context!.isEmpty)) &&
(filename == null || (filename!.isEmpty)) &&
(description == null || (description!.isEmpty)) &&
(assetId == null || (assetId!.isEmpty)) &&
(ocr == null || (ocr!.isEmpty)) &&
people.isEmpty &&
location.country == null &&
location.state == null &&
@@ -222,6 +228,8 @@ class SearchFilter {
String? filename,
String? description,
String? language,
String? ocr,
String? assetId,
Set<PersonDto>? people,
SearchLocationFilter? location,
SearchCameraFilter? camera,
@@ -234,6 +242,8 @@ class SearchFilter {
filename: filename ?? this.filename,
description: description ?? this.description,
language: language ?? this.language,
ocr: ocr ?? this.ocr,
assetId: assetId ?? this.assetId,
people: people ?? this.people,
location: location ?? this.location,
camera: camera ?? this.camera,
@@ -245,7 +255,7 @@ class SearchFilter {
@override
String toString() {
return 'SearchFilter(context: $context, filename: $filename, description: $description, language: $language, people: $people, location: $location, camera: $camera, date: $date, display: $display, mediaType: $mediaType)';
return 'SearchFilter(context: $context, filename: $filename, description: $description, language: $language, ocr: $ocr, people: $people, location: $location, camera: $camera, date: $date, display: $display, mediaType: $mediaType, assetId: $assetId)';
}
@override
@@ -256,6 +266,8 @@ class SearchFilter {
other.filename == filename &&
other.description == description &&
other.language == language &&
other.ocr == ocr &&
other.assetId == assetId &&
other.people == people &&
other.location == location &&
other.camera == camera &&
@@ -270,6 +282,8 @@ class SearchFilter {
filename.hashCode ^
description.hashCode ^
language.hashCode ^
ocr.hashCode ^
assetId.hashCode ^
people.hashCode ^
location.hashCode ^
camera.hashCode ^

View File

@@ -5,33 +5,37 @@ class ServerFeatures {
final bool map;
final bool oauthEnabled;
final bool passwordLogin;
final bool ocr;
const ServerFeatures({
required this.trash,
required this.map,
required this.oauthEnabled,
required this.passwordLogin,
this.ocr = false,
});
ServerFeatures copyWith({bool? trash, bool? map, bool? oauthEnabled, bool? passwordLogin}) {
ServerFeatures copyWith({bool? trash, bool? map, bool? oauthEnabled, bool? passwordLogin, bool? ocr}) {
return ServerFeatures(
trash: trash ?? this.trash,
map: map ?? this.map,
oauthEnabled: oauthEnabled ?? this.oauthEnabled,
passwordLogin: passwordLogin ?? this.passwordLogin,
ocr: ocr ?? this.ocr,
);
}
@override
String toString() {
return 'ServerFeatures(trash: $trash, map: $map, oauthEnabled: $oauthEnabled, passwordLogin: $passwordLogin)';
return 'ServerFeatures(trash: $trash, map: $map, oauthEnabled: $oauthEnabled, passwordLogin: $passwordLogin, ocr: $ocr)';
}
ServerFeatures.fromDto(ServerFeaturesDto dto)
: trash = dto.trash,
map = dto.map,
oauthEnabled = dto.oauth,
passwordLogin = dto.passwordLogin;
passwordLogin = dto.passwordLogin,
ocr = dto.ocr;
@override
bool operator ==(covariant ServerFeatures other) {
@@ -40,11 +44,12 @@ class ServerFeatures {
return other.trash == trash &&
other.map == map &&
other.oauthEnabled == oauthEnabled &&
other.passwordLogin == passwordLogin;
other.passwordLogin == passwordLogin &&
other.ocr == ocr;
}
@override
int get hashCode {
return trash.hashCode ^ map.hashCode ^ oauthEnabled.hashCode ^ passwordLogin.hashCode;
return trash.hashCode ^ map.hashCode ^ oauthEnabled.hashCode ^ passwordLogin.hashCode ^ ocr.hashCode;
}
}

View File

@@ -1,3 +1,5 @@
import 'dart:async';
import 'package:auto_route/auto_route.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
@@ -51,7 +53,7 @@ class AlbumOptionsPage extends HookConsumerWidget {
final isSuccess = await ref.read(albumProvider.notifier).leaveAlbum(album);
if (isSuccess) {
context.navigateTo(const TabControllerRoute(children: [AlbumsRoute()]));
unawaited(context.navigateTo(const TabControllerRoute(children: [AlbumsRoute()])));
} else {
showErrorMessage();
}

View File

@@ -1,3 +1,5 @@
import 'dart:async';
import 'package:auto_route/auto_route.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
@@ -29,8 +31,8 @@ class AlbumSharedUserSelectionPage extends HookConsumerWidget {
if (newAlbum != null) {
ref.watch(albumTitleProvider.notifier).clearAlbumTitle();
context.maybePop(true);
context.navigateTo(const TabControllerRoute(children: [AlbumsRoute()]));
unawaited(context.maybePop(true));
unawaited(context.navigateTo(const TabControllerRoute(children: [AlbumsRoute()])));
}
ScaffoldMessenger(
@@ -109,8 +111,8 @@ class AlbumSharedUserSelectionPage extends HookConsumerWidget {
centerTitle: false,
leading: IconButton(
icon: const Icon(Icons.close_rounded),
onPressed: () async {
context.maybePop();
onPressed: () {
unawaited(context.maybePop());
},
),
actions: [

View File

@@ -155,7 +155,7 @@ class BackupControllerPage extends HookConsumerWidget {
// waited until returning from selection
await ref.read(backupProvider.notifier).backupAlbumSelectionDone();
// waited until backup albums are stored in DB
ref.read(albumProvider.notifier).refreshDeviceAlbums();
await ref.read(albumProvider.notifier).refreshDeviceAlbums();
},
child: const Text("select", style: TextStyle(fontWeight: FontWeight.bold)).tr(),
),

View File

@@ -270,7 +270,7 @@ class _BackupAlbumSelectionCard extends ConsumerWidget {
if (currentUser == null) {
return;
}
ref.read(driftBackupProvider.notifier).getBackupStatus(currentUser.id);
unawaited(ref.read(driftBackupProvider.notifier).getBackupStatus(currentUser.id));
},
child: const Text("select", style: TextStyle(fontWeight: FontWeight.bold)).tr(),
),

View File

@@ -11,6 +11,7 @@ import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/pages/common/large_leading_tile.dart';
import 'package:immich_mobile/presentation/widgets/images/thumbnail.widget.dart';
import 'package:immich_mobile/providers/backup/drift_backup.provider.dart';
import 'package:immich_mobile/repositories/asset_media.repository.dart';
import 'package:immich_mobile/routing/router.dart';
@RoutePage()
@@ -31,55 +32,66 @@ class DriftBackupAssetDetailPage extends ConsumerWidget {
itemBuilder: (context, index) {
final asset = candidates[index];
final albumsAsyncValue = ref.watch(driftCandidateBackupAlbumInfoProvider(asset.id));
return LargeLeadingTile(
title: Text(
asset.name,
style: context.textTheme.labelLarge?.copyWith(fontWeight: FontWeight.w500, fontSize: 16),
),
subtitle: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text(
asset.createdAt.toString(),
style: TextStyle(fontSize: 13.0, color: context.colorScheme.onSurfaceSecondary),
final assetMediaRepository = ref.watch(assetMediaRepositoryProvider);
return FutureBuilder<String?>(
future: assetMediaRepository.getOriginalFilename(asset.id),
builder: (context, snapshot) {
final displayName = snapshot.data ?? asset.name;
return LargeLeadingTile(
title: Text(
displayName,
style: context.textTheme.labelLarge?.copyWith(fontWeight: FontWeight.w500, fontSize: 16),
),
Text(
asset.checksum ?? "N/A",
style: TextStyle(fontSize: 13.0, color: context.colorScheme.onSurfaceSecondary),
overflow: TextOverflow.ellipsis,
),
albumsAsyncValue.when(
data: (albums) {
if (albums.isEmpty) {
return const SizedBox.shrink();
}
return Text(
albums.map((a) => a.name).join(', '),
style: context.textTheme.labelLarge?.copyWith(color: context.primaryColor),
subtitle: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text(
asset.createdAt.toString(),
style: TextStyle(fontSize: 13.0, color: context.colorScheme.onSurfaceSecondary),
),
Text(
asset.checksum ?? "N/A",
style: TextStyle(fontSize: 13.0, color: context.colorScheme.onSurfaceSecondary),
overflow: TextOverflow.ellipsis,
);
},
error: (error, stackTrace) => Text(
'error_saving_image'.tr(args: [error.toString()]),
style: TextStyle(color: context.colorScheme.error),
),
loading: () => const SizedBox(height: 16, width: 16, child: CircularProgressIndicator.adaptive()),
),
albumsAsyncValue.when(
data: (albums) {
if (albums.isEmpty) {
return const SizedBox.shrink();
}
return Text(
albums.map((a) => a.name).join(', '),
style: context.textTheme.labelLarge?.copyWith(color: context.primaryColor),
overflow: TextOverflow.ellipsis,
);
},
error: (error, stackTrace) => Text(
'error_saving_image'.tr(args: [error.toString()]),
style: TextStyle(color: context.colorScheme.error),
),
loading: () =>
const SizedBox(height: 16, width: 16, child: CircularProgressIndicator.adaptive()),
),
],
),
],
),
leading: ClipRRect(
borderRadius: const BorderRadius.all(Radius.circular(12)),
child: SizedBox(
width: 64,
height: 64,
child: Thumbnail.fromAsset(asset: asset, size: const Size(64, 64), fit: BoxFit.cover),
),
),
trailing: const Padding(padding: EdgeInsets.only(right: 24, left: 8), child: Icon(Icons.image_search)),
onTap: () async {
await context.maybePop();
await context.navigateTo(const TabShellRoute(children: [MainTimelineRoute()]));
EventStream.shared.emit(ScrollToDateEvent(asset.createdAt));
leading: ClipRRect(
borderRadius: const BorderRadius.all(Radius.circular(12)),
child: SizedBox(
width: 64,
height: 64,
child: Thumbnail.fromAsset(asset: asset, size: const Size(64, 64), fit: BoxFit.cover),
),
),
trailing: const Padding(
padding: EdgeInsets.only(right: 24, left: 8),
child: Icon(Icons.image_search),
),
onTap: () async {
await context.maybePop();
await context.navigateTo(const TabShellRoute(children: [MainTimelineRoute()]));
EventStream.shared.emit(ScrollToDateEvent(asset.createdAt));
},
);
},
);
},

View File

@@ -170,8 +170,8 @@ class DriftUploadDetailPage extends ConsumerWidget {
);
}
Future<void> _showFileDetailDialog(BuildContext context, DriftUploadStatus item) async {
showDialog(
Future<void> _showFileDetailDialog(BuildContext context, DriftUploadStatus item) {
return showDialog(
context: context,
builder: (context) => FileDetailDialog(uploadStatus: item),
);

View File

@@ -33,7 +33,7 @@ class ActivitiesPage extends HookConsumerWidget {
Future<void> onAddComment(String comment) async {
await activityNotifier.addComment(comment);
// Scroll to the end of the list to show the newly added activity
listViewScrollController.animateTo(
await listViewScrollController.animateTo(
listViewScrollController.position.maxScrollExtent + 200,
duration: const Duration(milliseconds: 600),
curve: Curves.fastOutSlowIn,

View File

@@ -1,3 +1,5 @@
import 'dart:async';
import 'package:auto_route/auto_route.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
@@ -170,11 +172,11 @@ class CreateAlbumPage extends HookConsumerWidget {
.createAlbum(ref.read(albumTitleProvider), selectedAssets.value);
if (newAlbum != null) {
ref.read(albumProvider.notifier).refreshRemoteAlbums();
await ref.read(albumProvider.notifier).refreshRemoteAlbums();
selectedAssets.value = {};
ref.read(albumTitleProvider.notifier).clearAlbumTitle();
ref.read(albumViewerProvider.notifier).disableEditAlbum();
context.replaceRoute(AlbumViewerRoute(albumId: newAlbum.id));
unawaited(context.replaceRoute(AlbumViewerRoute(albumId: newAlbum.id)));
}
}

View File

@@ -95,7 +95,7 @@ class GalleryViewerPage extends HookConsumerWidget {
} catch (e) {
// swallow error silently
log.severe('Error precaching next image: $e');
context.maybePop();
await context.maybePop();
}
}

View File

@@ -26,6 +26,7 @@ import 'package:wakelock_plus/wakelock_plus.dart';
@RoutePage()
class NativeVideoViewerPage extends HookConsumerWidget {
static final log = Logger('NativeVideoViewer');
final Asset asset;
final bool showControls;
final int playbackDelayFactor;
@@ -59,8 +60,6 @@ class NativeVideoViewerPage extends HookConsumerWidget {
// Used to show the placeholder during hero animations for remote videos to avoid a stutter
final isVisible = useState(Platform.isIOS && asset.isLocal);
final log = Logger('NativeVideoViewerPage');
final isCasting = ref.watch(castProvider.select((c) => c.isCasting));
final isVideoReady = useState(false);
@@ -142,7 +141,7 @@ class NativeVideoViewerPage extends HookConsumerWidget {
interval: const Duration(milliseconds: 100),
maxWaitTime: const Duration(milliseconds: 200),
);
ref.listen(videoPlayerControlsProvider, (oldControls, newControls) async {
ref.listen(videoPlayerControlsProvider, (oldControls, newControls) {
final playerController = controller.value;
if (playerController == null) {
return;
@@ -153,28 +152,14 @@ class NativeVideoViewerPage extends HookConsumerWidget {
return;
}
final oldSeek = (oldControls?.position ?? 0) ~/ 1;
final newSeek = newControls.position ~/ 1;
final oldSeek = oldControls?.position.inMilliseconds;
final newSeek = newControls.position.inMilliseconds;
if (oldSeek != newSeek || newControls.restarted) {
seekDebouncer.run(() => playerController.seekTo(newSeek));
}
if (oldControls?.pause != newControls.pause || newControls.restarted) {
// Make sure the last seek is complete before pausing or playing
// Otherwise, `onPlaybackPositionChanged` can receive outdated events
if (seekDebouncer.isActive) {
await seekDebouncer.drain();
}
try {
if (newControls.pause) {
await playerController.pause();
} else {
await playerController.play();
}
} catch (error) {
log.severe('Error pausing or playing video: $error');
}
unawaited(_onPauseChange(context, playerController, seekDebouncer, newControls.pause));
}
});
@@ -234,7 +219,7 @@ class NativeVideoViewerPage extends HookConsumerWidget {
return;
}
ref.read(videoPlaybackValueProvider.notifier).position = Duration(seconds: playbackInfo.position);
ref.read(videoPlaybackValueProvider.notifier).position = Duration(milliseconds: playbackInfo.position);
// Check if the video is buffering
if (playbackInfo.status == PlaybackStatus.playing) {
@@ -282,11 +267,13 @@ class NativeVideoViewerPage extends HookConsumerWidget {
nc.onPlaybackReady.addListener(onPlaybackReady);
nc.onPlaybackEnded.addListener(onPlaybackEnded);
nc.loadVideoSource(source).catchError((error) {
log.severe('Error loading video source: $error');
});
unawaited(
nc.loadVideoSource(source).catchError((error) {
log.severe('Error loading video source: $error');
}),
);
final loopVideo = ref.read(appSettingsServiceProvider).getSetting<bool>(AppSettingsEnum.loopVideo);
nc.setLoop(loopVideo);
unawaited(nc.setLoop(loopVideo));
controller.value = nc;
Timer(const Duration(milliseconds: 200), checkIfBuffering);
@@ -357,12 +344,12 @@ class NativeVideoViewerPage extends HookConsumerWidget {
useOnAppLifecycleStateChange((_, state) async {
if (state == AppLifecycleState.resumed && shouldPlayOnForeground.value) {
controller.value?.play();
await controller.value?.play();
} else if (state == AppLifecycleState.paused) {
final videoPlaying = await controller.value?.isPlaying();
if (videoPlaying ?? true) {
shouldPlayOnForeground.value = true;
controller.value?.pause();
await controller.value?.pause();
} else {
shouldPlayOnForeground.value = false;
}
@@ -391,4 +378,35 @@ class NativeVideoViewerPage extends HookConsumerWidget {
],
);
}
Future<void> _onPauseChange(
BuildContext context,
NativeVideoPlayerController controller,
Debouncer seekDebouncer,
bool isPaused,
) async {
if (!context.mounted) {
return;
}
// Make sure the last seek is complete before pausing or playing
// Otherwise, `onPlaybackPositionChanged` can receive outdated events
if (seekDebouncer.isActive) {
await seekDebouncer.drain();
}
if (!context.mounted) {
return;
}
try {
if (isPaused) {
await controller.pause();
} else {
await controller.play();
}
} catch (error) {
log.severe('Error pausing or playing video: $error');
}
}
}

View File

@@ -55,48 +55,50 @@ class SplashScreenPageState extends ConsumerState<SplashScreenPage> {
final backgroundManager = ref.read(backgroundSyncProvider);
final backupProvider = ref.read(driftBackupProvider.notifier);
ref.read(authProvider.notifier).saveAuthInfo(accessToken: accessToken).then(
(_) async {
try {
wsProvider.connect();
infoProvider.getServerInfo();
unawaited(
ref.read(authProvider.notifier).saveAuthInfo(accessToken: accessToken).then(
(_) async {
try {
wsProvider.connect();
unawaited(infoProvider.getServerInfo());
if (Store.isBetaTimelineEnabled) {
bool syncSuccess = false;
await Future.wait([
backgroundManager.syncLocal(),
backgroundManager.syncRemote().then((success) => syncSuccess = success),
]);
if (syncSuccess) {
if (Store.isBetaTimelineEnabled) {
bool syncSuccess = false;
await Future.wait([
backgroundManager.hashAssets().then((_) {
_resumeBackup(backupProvider);
}),
_resumeBackup(backupProvider),
backgroundManager.syncLocal(),
backgroundManager.syncRemote().then((success) => syncSuccess = success),
]);
} else {
await backgroundManager.hashAssets();
}
if (Store.get(StoreKey.syncAlbums, false)) {
await backgroundManager.syncLinkedAlbum();
if (syncSuccess) {
await Future.wait([
backgroundManager.hashAssets().then((_) {
_resumeBackup(backupProvider);
}),
_resumeBackup(backupProvider),
]);
} else {
await backgroundManager.hashAssets();
}
if (Store.get(StoreKey.syncAlbums, false)) {
await backgroundManager.syncLinkedAlbum();
}
}
} catch (e) {
log.severe('Failed establishing connection to the server: $e');
}
} catch (e) {
log.severe('Failed establishing connection to the server: $e');
}
},
onError: (exception) => {
log.severe('Failed to update auth info with access token: $accessToken'),
ref.read(authProvider.notifier).logout(),
context.replaceRoute(const LoginRoute()),
},
},
onError: (exception) => {
log.severe('Failed to update auth info with access token: $accessToken'),
ref.read(authProvider.notifier).logout(),
context.replaceRoute(const LoginRoute()),
},
),
);
} else {
log.severe('Missing crucial offline login info - Logging out completely');
ref.read(authProvider.notifier).logout();
context.replaceRoute(const LoginRoute());
unawaited(ref.read(authProvider.notifier).logout());
unawaited(context.replaceRoute(const LoginRoute()));
return;
}
@@ -106,11 +108,11 @@ class SplashScreenPageState extends ConsumerState<SplashScreenPage> {
final needBetaMigration = Store.get(StoreKey.needBetaMigration, false);
if (needBetaMigration) {
await Store.put(StoreKey.needBetaMigration, false);
context.router.replaceAll([ChangeExperienceRoute(switchingToBeta: true)]);
unawaited(context.router.replaceAll([ChangeExperienceRoute(switchingToBeta: true)]));
return;
}
context.replaceRoute(Store.isBetaTimelineEnabled ? const TabShellRoute() : const TabControllerRoute());
unawaited(context.replaceRoute(Store.isBetaTimelineEnabled ? const TabShellRoute() : const TabControllerRoute()));
}
if (Store.isBetaTimelineEnabled) {
@@ -120,7 +122,7 @@ class SplashScreenPageState extends ConsumerState<SplashScreenPage> {
final hasPermission = await ref.read(galleryPermissionNotifier.notifier).hasPermission;
if (hasPermission) {
// Resume backup (if enable) then navigate
ref.watch(backupProvider.notifier).resumeBackup();
await ref.watch(backupProvider.notifier).resumeBackup();
}
}
@@ -130,7 +132,7 @@ class SplashScreenPageState extends ConsumerState<SplashScreenPage> {
if (isEnableBackup) {
final currentUser = Store.tryGet(StoreKey.currentUser);
if (currentUser != null) {
notifier.handleBackupResume(currentUser.id);
unawaited(notifier.handleBackupResume(currentUser.id));
}
}
}

View File

@@ -7,6 +7,7 @@ import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/timeline.model.dart';
import 'package:immich_mobile/domain/utils/event_stream.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/presentation/pages/search/paginated_search.provider.dart';
import 'package:immich_mobile/providers/haptic_feedback.provider.dart';
import 'package:immich_mobile/providers/infrastructure/album.provider.dart';
import 'package:immich_mobile/providers/infrastructure/memory.provider.dart';
@@ -77,7 +78,7 @@ class _TabShellPageState extends ConsumerState<TabShellPage> {
}
return AutoTabsRouter(
routes: [const MainTimelineRoute(), DriftSearchRoute(), const DriftAlbumsRoute(), const DriftLibraryRoute()],
routes: const [MainTimelineRoute(), DriftSearchRoute(), DriftAlbumsRoute(), DriftLibraryRoute()],
duration: const Duration(milliseconds: 600),
transitionBuilder: (context, child, animation) => FadeTransition(opacity: animation, child: child),
builder: (context, child) {
@@ -114,6 +115,10 @@ void _onNavigationSelected(TabsRouter router, int index, WidgetRef ref) {
ref.invalidate(driftMemoryFutureProvider);
}
if (router.activeIndex != 1 && index == 1) {
ref.read(searchPreFilterProvider.notifier).clear();
}
// On Search page tapped
if (router.activeIndex == 1 && index == 1) {
ref.read(searchInputFocusProvider).requestFocus();

View File

@@ -1,13 +1,16 @@
import 'package:flutter/material.dart';
import 'dart:async';
import 'package:auto_route/auto_route.dart';
import 'package:crop_image/crop_image.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:flutter_hooks/flutter_hooks.dart';
import 'package:immich_mobile/entities/asset.entity.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/utils/hooks/crop_controller_hook.dart';
import 'package:immich_mobile/entities/asset.entity.dart';
import 'edit.page.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:auto_route/auto_route.dart';
/// A widget for cropping an image.
/// This widget uses [HookWidget] to manage its lifecycle and state. It allows
@@ -35,7 +38,7 @@ class CropImagePage extends HookWidget {
icon: Icon(Icons.done_rounded, color: context.primaryColor, size: 24),
onPressed: () async {
final croppedImage = await cropController.croppedImage();
context.pushRoute(EditImageRoute(asset: asset, image: croppedImage, isEdited: true));
unawaited(context.pushRoute(EditImageRoute(asset: asset, image: croppedImage, isEdited: true)));
},
),
],

View File

@@ -1,12 +1,13 @@
import 'dart:async';
import 'dart:ui' as ui;
import 'package:auto_route/auto_route.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:flutter_hooks/flutter_hooks.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/entities/asset.entity.dart';
import 'package:immich_mobile/constants/filters.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:auto_route/auto_route.dart';
import 'package:immich_mobile/entities/asset.entity.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/routing/router.dart';
/// A widget for filtering an image.
@@ -74,7 +75,7 @@ class FilterImagePage extends HookWidget {
icon: Icon(Icons.done_rounded, color: context.primaryColor, size: 24),
onPressed: () async {
final filteredImage = await applyFilterAndConvert(colorFilter.value);
context.pushRoute(EditImageRoute(asset: asset, image: filteredImage, isEdited: true));
unawaited(context.pushRoute(EditImageRoute(asset: asset, image: filteredImage, isEdited: true)));
},
),
],

View File

@@ -1,14 +1,16 @@
import 'dart:async';
import 'package:auto_route/auto_route.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:flutter_hooks/flutter_hooks.dart' show useState;
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/providers/local_auth.provider.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/widgets/forms/pin_registration_form.dart';
import 'package:immich_mobile/widgets/forms/pin_verification_form.dart';
import 'package:immich_mobile/entities/store.entity.dart';
@RoutePage()
class PinAuthPage extends HookConsumerWidget {
@@ -35,9 +37,9 @@ class PinAuthPage extends HookConsumerWidget {
);
if (isBetaTimeline) {
context.replaceRoute(const DriftLockedFolderRoute());
unawaited(context.replaceRoute(const DriftLockedFolderRoute()));
} else {
context.replaceRoute(const LockedRoute());
unawaited(context.replaceRoute(const LockedRoute()));
}
}
}

View File

@@ -333,7 +333,7 @@ class SharedLinkEditPage extends HookConsumerWidget {
changeExpiry: changeExpiry,
);
ref.invalidate(sharedLinksStateProvider);
context.maybePop();
await context.maybePop();
}
return Scaffold(

View File

@@ -82,10 +82,12 @@ class PhotosPage extends HookConsumerWidget {
final fullRefresh = refreshCount.value > 0;
if (fullRefresh) {
Future.wait([
ref.read(assetProvider.notifier).getAllAsset(clear: true),
ref.read(albumProvider.notifier).refreshRemoteAlbums(),
]);
unawaited(
Future.wait([
ref.read(assetProvider.notifier).getAllAsset(clear: true),
ref.read(albumProvider.notifier).refreshRemoteAlbums(),
]),
);
// refresh was forced: user requested another refresh within 2 seconds
refreshCount.value = 0;

View File

@@ -1,3 +1,4 @@
import 'dart:async';
import 'dart:math';
import 'package:auto_route/auto_route.dart';
@@ -83,7 +84,7 @@ class MapPage extends HookConsumerWidget {
isLoading.value = true;
markers.value = await ref.read(mapMarkersProvider.future);
assetsDebouncer.run(updateAssetsInBounds);
reloadLayers();
await reloadLayers();
} finally {
isLoading.value = false;
}
@@ -128,7 +129,7 @@ class MapPage extends HookConsumerWidget {
);
if (marker != null) {
updateAssetMarkerPosition(marker);
await updateAssetMarkerPosition(marker);
} else {
// If no asset was previously selected and no new asset is available, close the bottom sheet
if (selectedMarker.value == null) {
@@ -165,7 +166,7 @@ class MapPage extends HookConsumerWidget {
if (asset.isVideo) {
ref.read(showControlsProvider.notifier).show = false;
}
context.pushRoute(GalleryViewerRoute(initialIndex: 0, heroOffset: 0, renderList: renderList));
unawaited(context.pushRoute(GalleryViewerRoute(initialIndex: 0, heroOffset: 0, renderList: renderList)));
}
/// BOTTOM SHEET CALLBACKS
@@ -209,7 +210,7 @@ class MapPage extends HookConsumerWidget {
}
if (mapController.value != null && location != null) {
mapController.value!.animateCamera(
await mapController.value!.animateCamera(
CameraUpdate.newLatLngZoom(LatLng(location.latitude, location.longitude), mapZoomToAssetLevel),
duration: const Duration(milliseconds: 800),
);

View File

@@ -8,9 +8,9 @@ import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/extensions/asyncvalue_extensions.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/maplibrecontroller_extensions.dart';
import 'package:immich_mobile/utils/map_utils.dart';
import 'package:immich_mobile/widgets/map/map_theme_override.dart';
import 'package:maplibre_gl/maplibre_gl.dart';
import 'package:immich_mobile/utils/map_utils.dart';
@RoutePage()
class MapLocationPickerPage extends HookConsumerWidget {
@@ -30,7 +30,7 @@ class MapLocationPickerPage extends HookConsumerWidget {
Future<void> onMapClick(Point<num> point, LatLng centre) async {
selectedLatLng.value = centre;
controller.value?.animateCamera(CameraUpdate.newLatLng(centre));
await controller.value?.animateCamera(CameraUpdate.newLatLng(centre));
if (marker.value != null) {
await controller.value?.updateSymbol(marker.value!, SymbolOptions(geometry: centre));
}
@@ -49,7 +49,7 @@ class MapLocationPickerPage extends HookConsumerWidget {
var currentLatLng = LatLng(currentLocation.latitude, currentLocation.longitude);
selectedLatLng.value = currentLatLng;
controller.value?.animateCamera(CameraUpdate.newLatLng(currentLatLng));
await controller.value?.animateCamera(CameraUpdate.newLatLng(currentLatLng));
}
return MapThemeOverride(

View File

@@ -266,7 +266,7 @@ class SearchPage extends HookConsumerWidget {
filter.value = filter.value.copyWith(date: SearchDateFilter());
dateRangeCurrentFilterWidget.value = null;
search();
unawaited(search());
return;
}
@@ -295,7 +295,7 @@ class SearchPage extends HookConsumerWidget {
);
}
search();
unawaited(search());
}
// MEDIA PICKER
@@ -389,15 +389,18 @@ class SearchPage extends HookConsumerWidget {
handleTextSubmitted(String value) {
switch (textSearchType.value) {
case TextSearchType.context:
filter.value = filter.value.copyWith(filename: '', context: value, description: '');
filter.value = filter.value.copyWith(filename: '', context: value, description: '', ocr: '');
break;
case TextSearchType.filename:
filter.value = filter.value.copyWith(filename: value, context: '', description: '');
filter.value = filter.value.copyWith(filename: value, context: '', description: '', ocr: '');
break;
case TextSearchType.description:
filter.value = filter.value.copyWith(filename: '', context: '', description: value);
filter.value = filter.value.copyWith(filename: '', context: '', description: value, ocr: '');
break;
case TextSearchType.ocr:
filter.value = filter.value.copyWith(filename: '', context: '', description: '', ocr: value);
break;
}
@@ -408,6 +411,7 @@ class SearchPage extends HookConsumerWidget {
TextSearchType.context => Icons.image_search_rounded,
TextSearchType.filename => Icons.abc_rounded,
TextSearchType.description => Icons.text_snippet_outlined,
TextSearchType.ocr => Icons.document_scanner_outlined,
};
return Scaffold(
@@ -493,6 +497,24 @@ class SearchPage extends HookConsumerWidget {
searchHintText.value = 'search_by_description_example'.tr();
},
),
MenuItemButton(
child: ListTile(
leading: const Icon(Icons.document_scanner_outlined),
title: Text(
'search_filter_ocr'.tr(),
style: context.textTheme.bodyLarge?.copyWith(
fontWeight: FontWeight.w500,
color: textSearchType.value == TextSearchType.ocr ? context.colorScheme.primary : null,
),
),
selectedColor: context.colorScheme.primary,
selected: textSearchType.value == TextSearchType.ocr,
),
onPressed: () {
textSearchType.value = TextSearchType.ocr;
searchHintText.value = 'search_by_ocr_example'.tr();
},
),
],
),
),

View File

@@ -2,6 +2,7 @@ import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:flutter_hooks/flutter_hooks.dart' hide Store;
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/album/album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/extensions/asyncvalue_extensions.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
@@ -20,11 +21,12 @@ import 'package:immich_mobile/widgets/common/user_circle_avatar.dart';
@RoutePage()
class DriftActivitiesPage extends HookConsumerWidget {
const DriftActivitiesPage({super.key});
final RemoteAlbum album;
const DriftActivitiesPage({super.key, required this.album});
@override
Widget build(BuildContext context, WidgetRef ref) {
final album = ref.watch(currentRemoteAlbumProvider)!;
final asset = ref.read(currentAssetNotifier) as RemoteAsset?;
final activityNotifier = ref.read(albumActivityProvider(album.id, asset?.id).notifier);
@@ -40,49 +42,52 @@ class DriftActivitiesPage extends HookConsumerWidget {
scrollToBottom();
}
return Scaffold(
appBar: AppBar(
title: asset == null ? Text(album.name) : null,
actions: [const LikeActivityActionButton(menuItem: true)],
actionsPadding: const EdgeInsets.only(right: 8),
),
body: activities.widgetWhen(
onData: (data) {
final List<Widget> activityWidgets = [];
for (final activity in data.reversed) {
activityWidgets.add(
Padding(
padding: const EdgeInsets.symmetric(horizontal: 8, vertical: 6),
child: _CommentBubble(activity: activity),
return ProviderScope(
overrides: [currentRemoteAlbumScopedProvider.overrideWithValue(album)],
child: Scaffold(
appBar: AppBar(
title: asset == null ? Text(album.name) : null,
actions: [const LikeActivityActionButton(menuItem: true)],
actionsPadding: const EdgeInsets.only(right: 8),
),
body: activities.widgetWhen(
onData: (data) {
final List<Widget> activityWidgets = [];
for (final activity in data.reversed) {
activityWidgets.add(
Padding(
padding: const EdgeInsets.symmetric(horizontal: 8, vertical: 6),
child: _CommentBubble(activity: activity),
),
);
}
return SafeArea(
child: Stack(
children: [
ListView(
controller: listViewScrollController,
padding: const EdgeInsets.only(top: 8, bottom: 80),
reverse: true,
children: activityWidgets,
),
Align(
alignment: Alignment.bottomCenter,
child: Container(
decoration: BoxDecoration(
color: context.scaffoldBackgroundColor,
border: Border(top: BorderSide(color: context.colorScheme.secondaryContainer, width: 1)),
),
child: DriftActivityTextField(isEnabled: album.isActivityEnabled, onSubmit: onAddComment),
),
),
],
),
);
}
return SafeArea(
child: Stack(
children: [
ListView(
controller: listViewScrollController,
padding: const EdgeInsets.only(top: 8, bottom: 80),
reverse: true,
children: activityWidgets,
),
Align(
alignment: Alignment.bottomCenter,
child: Container(
decoration: BoxDecoration(
color: context.scaffoldBackgroundColor,
border: Border(top: BorderSide(color: context.colorScheme.secondaryContainer, width: 1)),
),
child: DriftActivityTextField(isEnabled: album.isActivityEnabled, onSubmit: onAddComment),
),
),
],
),
);
},
},
),
resizeToAvoidBottomInset: true,
),
resizeToAvoidBottomInset: true,
);
}
}

View File

@@ -5,7 +5,6 @@ import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/presentation/widgets/album/album_selector.widget.dart';
import 'package:immich_mobile/providers/infrastructure/album.provider.dart';
import 'package:immich_mobile/providers/infrastructure/current_album.provider.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/widgets/common/immich_sliver_app_bar.dart';
@@ -43,7 +42,6 @@ class _DriftAlbumsPageState extends ConsumerState<DriftAlbumsPage> {
),
AlbumSelector(
onAlbumSelected: (album) {
ref.read(currentRemoteAlbumProvider.notifier).setAlbum(album);
context.router.push(RemoteAlbumRoute(album: album));
},
),

View File

@@ -1,9 +1,12 @@
import 'dart:async';
import 'package:auto_route/auto_route.dart';
import 'package:collection/collection.dart';
import 'package:flutter/material.dart';
import 'package:flutter_hooks/flutter_hooks.dart';
import 'package:fluttertoast/fluttertoast.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/album/album.model.dart';
import 'package:immich_mobile/domain/models/user.model.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/theme_extensions.dart';
@@ -20,15 +23,11 @@ import 'package:immich_mobile/widgets/common/user_circle_avatar.dart';
@RoutePage()
class DriftAlbumOptionsPage extends HookConsumerWidget {
const DriftAlbumOptionsPage({super.key});
final RemoteAlbum album;
const DriftAlbumOptionsPage({super.key, required this.album});
@override
Widget build(BuildContext context, WidgetRef ref) {
final album = ref.watch(currentRemoteAlbumProvider);
if (album == null) {
return const SizedBox();
}
final sharedUsersAsync = ref.watch(remoteAlbumSharedUsersProvider(album.id));
final userId = ref.watch(authProvider).userId;
final activityEnabled = useState(album.isActivityEnabled);
@@ -47,7 +46,7 @@ class DriftAlbumOptionsPage extends HookConsumerWidget {
void leaveAlbum() async {
try {
await ref.read(remoteAlbumProvider.notifier).leaveAlbum(album.id, userId: userId);
context.navigateTo(const DriftAlbumsRoute());
unawaited(context.navigateTo(const DriftAlbumsRoute()));
} catch (_) {
showErrorMessage();
}
@@ -189,48 +188,51 @@ class DriftAlbumOptionsPage extends HookConsumerWidget {
);
}
return Scaffold(
appBar: AppBar(
leading: IconButton(
icon: const Icon(Icons.arrow_back_ios_new_rounded),
onPressed: () => context.maybePop(null),
return ProviderScope(
overrides: [currentRemoteAlbumScopedProvider.overrideWithValue(album)],
child: Scaffold(
appBar: AppBar(
leading: IconButton(
icon: const Icon(Icons.arrow_back_ios_new_rounded),
onPressed: () => context.maybePop(null),
),
centerTitle: true,
title: Text("options".t(context: context)),
),
centerTitle: true,
title: Text("options".t(context: context)),
),
body: ListView(
children: [
const SizedBox(height: 8),
if (isOwner)
SwitchListTile.adaptive(
value: activityEnabled.value,
onChanged: (bool value) async {
activityEnabled.value = value;
await ref.read(remoteAlbumProvider.notifier).setActivityStatus(album.id, value);
},
activeThumbColor: activityEnabled.value ? context.primaryColor : context.themeData.disabledColor,
dense: true,
title: Text(
"comments_and_likes",
style: context.textTheme.titleMedium?.copyWith(fontWeight: FontWeight.w500),
).t(context: context),
subtitle: Text(
"let_others_respond",
style: context.textTheme.labelLarge?.copyWith(color: context.colorScheme.onSurfaceSecondary),
).t(context: context),
),
buildSectionTitle("shared_album_section_people_title".t(context: context)),
if (isOwner) ...[
ListTile(
leading: const Icon(Icons.person_add_rounded),
title: Text("invite_people".t(context: context)),
onTap: () async => addUsers(),
),
const Divider(indent: 16),
body: ListView(
children: [
const SizedBox(height: 8),
if (isOwner)
SwitchListTile.adaptive(
value: activityEnabled.value,
onChanged: (bool value) async {
activityEnabled.value = value;
await ref.read(remoteAlbumProvider.notifier).setActivityStatus(album.id, value);
},
activeThumbColor: activityEnabled.value ? context.primaryColor : context.themeData.disabledColor,
dense: true,
title: Text(
"comments_and_likes",
style: context.textTheme.titleMedium?.copyWith(fontWeight: FontWeight.w500),
).t(context: context),
subtitle: Text(
"let_others_respond",
style: context.textTheme.labelLarge?.copyWith(color: context.colorScheme.onSurfaceSecondary),
).t(context: context),
),
buildSectionTitle("shared_album_section_people_title".t(context: context)),
if (isOwner) ...[
ListTile(
leading: const Icon(Icons.person_add_rounded),
title: Text("invite_people".t(context: context)),
onTap: () async => addUsers(),
),
const Divider(indent: 16),
],
buildOwnerInfo(),
buildSharedUsersList(),
],
buildOwnerInfo(),
buildSharedUsersList(),
],
),
),
);
}

View File

@@ -1,3 +1,5 @@
import 'dart:async';
import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
@@ -6,7 +8,6 @@ import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/presentation/widgets/images/thumbnail.widget.dart';
import 'package:immich_mobile/providers/infrastructure/album.provider.dart';
import 'package:immich_mobile/providers/infrastructure/current_album.provider.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/widgets/album/album_action_filled_button.dart';
@@ -178,8 +179,7 @@ class _DriftCreateAlbumPageState extends ConsumerState<DriftCreateAlbumPage> {
);
if (album != null) {
ref.read(currentRemoteAlbumProvider.notifier).setAlbum(album);
context.replaceRoute(RemoteAlbumRoute(album: album));
unawaited(context.replaceRoute(RemoteAlbumRoute(album: album)));
}
}

View File

@@ -1,3 +1,5 @@
import 'dart:async';
import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
@@ -139,7 +141,7 @@ class _RemoteAlbumPageState extends ConsumerState<RemoteAlbumPage> {
toastType: ToastType.success,
);
context.pushRoute(const DriftAlbumsRoute());
unawaited(context.pushRoute(const DriftAlbumsRoute()));
} catch (e) {
ImmichToast.show(
context: context,
@@ -161,12 +163,12 @@ class _RemoteAlbumPageState extends ConsumerState<RemoteAlbumPage> {
setState(() {
_album = _album.copyWith(name: result.name, description: result.description ?? '');
});
HapticFeedback.mediumImpact();
unawaited(HapticFeedback.mediumImpact());
}
}
Future<void> showActivity(BuildContext context) async {
context.pushRoute(const DriftActivitiesRoute());
unawaited(context.pushRoute(DriftActivitiesRoute(album: _album)));
}
Future<void> showOptionSheet(BuildContext context) async {
@@ -175,56 +177,58 @@ class _RemoteAlbumPageState extends ConsumerState<RemoteAlbumPage> {
final canAddPhotos =
await ref.read(remoteAlbumServiceProvider).getUserRole(_album.id, user?.id ?? '') == AlbumUserRole.editor;
showModalBottomSheet(
context: context,
backgroundColor: context.colorScheme.surface,
isScrollControlled: false,
builder: (context) {
return DriftRemoteAlbumOption(
onDeleteAlbum: isOwner
? () async {
await deleteAlbum(context);
if (context.mounted) {
unawaited(
showModalBottomSheet(
context: context,
backgroundColor: context.colorScheme.surface,
isScrollControlled: false,
builder: (context) {
return DriftRemoteAlbumOption(
onDeleteAlbum: isOwner
? () async {
await deleteAlbum(context);
if (context.mounted) {
context.pop();
}
}
: null,
onAddUsers: isOwner
? () async {
await addUsers(context);
context.pop();
}
}
: null,
onAddUsers: isOwner
? () async {
await addUsers(context);
context.pop();
}
: null,
onAddPhotos: isOwner || canAddPhotos
? () async {
await addAssets(context);
context.pop();
}
: null,
onToggleAlbumOrder: isOwner
? () async {
await toggleAlbumOrder();
context.pop();
}
: null,
onEditAlbum: isOwner
? () async {
context.pop();
await showEditTitleAndDescription(context);
}
: null,
onCreateSharedLink: isOwner
? () async {
context.pop();
context.pushRoute(SharedLinkEditRoute(albumId: _album.id));
}
: null,
onShowOptions: () {
context.pop();
context.pushRoute(const DriftAlbumOptionsRoute());
},
);
},
: null,
onAddPhotos: isOwner || canAddPhotos
? () async {
await addAssets(context);
context.pop();
}
: null,
onToggleAlbumOrder: isOwner
? () async {
await toggleAlbumOrder();
context.pop();
}
: null,
onEditAlbum: isOwner
? () async {
context.pop();
await showEditTitleAndDescription(context);
}
: null,
onCreateSharedLink: isOwner
? () async {
context.pop();
unawaited(context.pushRoute(SharedLinkEditRoute(albumId: _album.id)));
}
: null,
onShowOptions: () {
context.pop();
context.pushRoute(DriftAlbumOptionsRoute(album: _album));
},
);
},
),
);
}
@@ -233,35 +237,24 @@ class _RemoteAlbumPageState extends ConsumerState<RemoteAlbumPage> {
final user = ref.watch(currentUserProvider);
final isOwner = user != null ? user.id == _album.ownerId : false;
return PopScope(
onPopInvokedWithResult: (didPop, _) {
if (didPop) {
Future.microtask(() {
if (mounted) {
ref.read(currentRemoteAlbumProvider.notifier).dispose();
ref.read(remoteAlbumProvider.notifier).refresh();
}
});
}
},
child: ProviderScope(
overrides: [
timelineServiceProvider.overrideWith((ref) {
final timelineService = ref.watch(timelineFactoryProvider).remoteAlbum(albumId: _album.id);
ref.onDispose(timelineService.dispose);
return timelineService;
}),
],
child: Timeline(
appBar: RemoteAlbumSliverAppBar(
icon: Icons.photo_album_outlined,
onShowOptions: () => showOptionSheet(context),
onToggleAlbumOrder: isOwner ? () => toggleAlbumOrder() : null,
onEditTitle: isOwner ? () => showEditTitleAndDescription(context) : null,
onActivity: () => showActivity(context),
),
bottomSheet: RemoteAlbumBottomSheet(album: _album),
return ProviderScope(
overrides: [
timelineServiceProvider.overrideWith((ref) {
final timelineService = ref.watch(timelineFactoryProvider).remoteAlbum(albumId: _album.id);
ref.onDispose(timelineService.dispose);
return timelineService;
}),
currentRemoteAlbumScopedProvider.overrideWithValue(_album),
],
child: Timeline(
appBar: RemoteAlbumSliverAppBar(
icon: Icons.photo_album_outlined,
onShowOptions: () => showOptionSheet(context),
onToggleAlbumOrder: isOwner ? () => toggleAlbumOrder() : null,
onEditTitle: isOwner ? () => showEditTitleAndDescription(context) : null,
onActivity: () => showActivity(context),
),
bottomSheet: RemoteAlbumBottomSheet(album: _album),
),
);
}

View File

@@ -1,3 +1,5 @@
import 'dart:async';
import 'package:auto_route/auto_route.dart';
import 'package:crop_image/crop_image.dart';
import 'package:easy_localization/easy_localization.dart';
@@ -34,7 +36,7 @@ class DriftCropImagePage extends HookWidget {
icon: Icon(Icons.done_rounded, color: context.primaryColor, size: 24),
onPressed: () async {
final croppedImage = await cropController.croppedImage();
context.pushRoute(DriftEditImageRoute(asset: asset, image: croppedImage, isEdited: true));
unawaited(context.pushRoute(DriftEditImageRoute(asset: asset, image: croppedImage, isEdited: true)));
},
),
],

View File

@@ -70,7 +70,7 @@ class DriftEditImagePage extends ConsumerWidget {
Logger("SaveEditedImage").warning("Failed to retrieve the saved image back from OS", e);
}
ref.read(backgroundSyncProvider).syncLocal(full: true);
unawaited(ref.read(backgroundSyncProvider).syncLocal(full: true));
_exitEditing(context);
ImmichToast.show(durationInSecond: 3, context: context, msg: 'Image Saved!');

View File

@@ -75,7 +75,7 @@ class DriftFilterImagePage extends HookWidget {
icon: Icon(Icons.done_rounded, color: context.primaryColor, size: 24),
onPressed: () async {
final filteredImage = await applyFilterAndConvert(colorFilter.value);
context.pushRoute(DriftEditImageRoute(asset: asset, image: filteredImage, isEdited: true));
unawaited(context.pushRoute(DriftEditImageRoute(asset: asset, image: filteredImage, isEdited: true)));
},
),
],

View File

@@ -19,6 +19,7 @@ import 'package:immich_mobile/presentation/widgets/timeline/timeline.widget.dart
import 'package:immich_mobile/providers/infrastructure/timeline.provider.dart';
import 'package:immich_mobile/providers/search/search_input_focus.provider.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/widgets/common/feature_check.dart';
import 'package:immich_mobile/widgets/common/search_field.dart';
import 'package:immich_mobile/widgets/search/search_filter/camera_picker.dart';
import 'package:immich_mobile/widgets/search/search_filter/display_option_picker.dart';
@@ -31,15 +32,14 @@ import 'package:immich_mobile/widgets/search/search_filter/search_filter_utils.d
@RoutePage()
class DriftSearchPage extends HookConsumerWidget {
const DriftSearchPage({super.key, this.preFilter});
final SearchFilter? preFilter;
const DriftSearchPage({super.key});
@override
Widget build(BuildContext context, WidgetRef ref) {
final textSearchType = useState<TextSearchType>(TextSearchType.context);
final searchHintText = useState<String>('sunrise_on_the_beach'.t(context: context));
final textSearchController = useTextEditingController();
final preFilter = ref.watch(searchPreFilterProvider);
final filter = useState<SearchFilter>(
SearchFilter(
people: preFilter?.people ?? {},
@@ -49,6 +49,7 @@ class DriftSearchPage extends HookConsumerWidget {
display: preFilter?.display ?? SearchDisplayFilters(isNotInAlbum: false, isArchive: false, isFavorite: false),
mediaType: preFilter?.mediaType ?? AssetType.other,
language: "${context.locale.languageCode}-${context.locale.countryCode}",
assetId: preFilter?.assetId,
),
);
@@ -109,8 +110,8 @@ class DriftSearchPage extends HookConsumerWidget {
Future.delayed(Duration.zero, () {
search();
if (preFilter!.location.city != null) {
locationCurrentFilterWidget.value = Text(preFilter!.location.city!, style: context.textTheme.labelLarge);
if (preFilter.location.city != null) {
locationCurrentFilterWidget.value = Text(preFilter.location.city!, style: context.textTheme.labelLarge);
}
});
}
@@ -271,7 +272,7 @@ class DriftSearchPage extends HookConsumerWidget {
filter.value = filter.value.copyWith(date: SearchDateFilter());
dateRangeCurrentFilterWidget.value = null;
search();
unawaited(search());
return;
}
@@ -301,7 +302,7 @@ class DriftSearchPage extends HookConsumerWidget {
);
}
search();
unawaited(search());
}
// MEDIA PICKER
@@ -395,15 +396,18 @@ class DriftSearchPage extends HookConsumerWidget {
handleTextSubmitted(String value) {
switch (textSearchType.value) {
case TextSearchType.context:
filter.value = filter.value.copyWith(filename: '', context: value, description: '');
filter.value = filter.value.copyWith(filename: '', context: value, description: '', ocr: '');
break;
case TextSearchType.filename:
filter.value = filter.value.copyWith(filename: value, context: '', description: '');
filter.value = filter.value.copyWith(filename: value, context: '', description: '', ocr: '');
break;
case TextSearchType.description:
filter.value = filter.value.copyWith(filename: '', context: '', description: value);
filter.value = filter.value.copyWith(filename: '', context: '', description: value, ocr: '');
break;
case TextSearchType.ocr:
filter.value = filter.value.copyWith(filename: '', context: '', description: '', ocr: value);
break;
}
@@ -414,6 +418,7 @@ class DriftSearchPage extends HookConsumerWidget {
TextSearchType.context => Icons.image_search_rounded,
TextSearchType.filename => Icons.abc_rounded,
TextSearchType.description => Icons.text_snippet_outlined,
TextSearchType.ocr => Icons.document_scanner_outlined,
};
return Scaffold(
@@ -499,6 +504,27 @@ class DriftSearchPage extends HookConsumerWidget {
searchHintText.value = 'search_by_description_example'.t(context: context);
},
),
FeatureCheck(
feature: (features) => features.ocr,
child: MenuItemButton(
child: ListTile(
leading: const Icon(Icons.document_scanner_outlined),
title: Text(
'search_by_ocr'.t(context: context),
style: context.textTheme.bodyLarge?.copyWith(
fontWeight: FontWeight.w500,
color: textSearchType.value == TextSearchType.ocr ? context.colorScheme.primary : null,
),
),
selectedColor: context.colorScheme.primary,
selected: textSearchType.value == TextSearchType.ocr,
),
onPressed: () {
textSearchType.value = TextSearchType.ocr;
searchHintText.value = 'search_by_ocr_example'.t(context: context);
},
),
),
],
),
),

View File

@@ -4,6 +4,23 @@ import 'package:immich_mobile/domain/services/search.service.dart';
import 'package:immich_mobile/models/search/search_filter.model.dart';
import 'package:immich_mobile/providers/infrastructure/search.provider.dart';
final searchPreFilterProvider = NotifierProvider<SearchFilterProvider, SearchFilter?>(SearchFilterProvider.new);
class SearchFilterProvider extends Notifier<SearchFilter?> {
@override
SearchFilter? build() {
return null;
}
void setFilter(SearchFilter? filter) {
state = filter;
}
void clear() {
state = null;
}
}
final paginatedSearchProvider = StateNotifierProvider<PaginatedSearchNotifier, SearchResult>(
(ref) => PaginatedSearchNotifier(ref.watch(searchServiceProvider)),
);

View File

@@ -1,3 +1,5 @@
import 'dart:async';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/constants/enums.dart';
@@ -15,7 +17,7 @@ class AdvancedInfoActionButton extends ConsumerWidget {
return;
}
ref.read(actionProvider.notifier).troubleshoot(source, context);
unawaited(ref.read(actionProvider.notifier).troubleshoot(source, context));
}
@override

View File

@@ -39,7 +39,7 @@ class ShareActionButton extends ConsumerWidget {
return;
}
showDialog(
await showDialog(
context: context,
builder: (BuildContext buildContext) {
ref.read(actionProvider.notifier).shareAssets(source, context).then((ActionResult result) {

View File

@@ -0,0 +1,50 @@
import 'dart:async';
import 'package:auto_route/auto_route.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/entities/asset.entity.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/models/search/search_filter.model.dart';
import 'package:immich_mobile/presentation/pages/search/paginated_search.provider.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/base_action_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/asset_viewer.state.dart';
import 'package:immich_mobile/routing/router.dart';
class SimilarPhotosActionButton extends ConsumerWidget {
final String assetId;
const SimilarPhotosActionButton({super.key, required this.assetId});
void _onTap(BuildContext context, WidgetRef ref) async {
if (!context.mounted) {
return;
}
ref.invalidate(assetViewerProvider);
ref
.read(searchPreFilterProvider.notifier)
.setFilter(
SearchFilter(
assetId: assetId,
people: {},
location: SearchLocationFilter(),
camera: SearchCameraFilter(),
date: SearchDateFilter(),
display: SearchDisplayFilters(isNotInAlbum: false, isArchive: false, isFavorite: false),
mediaType: AssetType.image,
),
);
unawaited(context.router.popAndPush(const DriftSearchRoute()));
}
@override
Widget build(BuildContext context, WidgetRef ref) {
return BaseActionButton(
iconData: Icons.compare,
label: "view_similar_photos".t(context: context),
onPressed: () => _onTap(context, ref),
maxWidth: 100,
);
}
}

View File

@@ -12,10 +12,9 @@ import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/theme_extensions.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/models/albums/album_search.model.dart';
import 'package:immich_mobile/pages/common/large_leading_tile.dart';
import 'package:immich_mobile/presentation/widgets/album/album_tile.dart';
import 'package:immich_mobile/presentation/widgets/images/thumbnail.widget.dart';
import 'package:immich_mobile/providers/infrastructure/album.provider.dart';
import 'package:immich_mobile/providers/infrastructure/current_album.provider.dart';
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
import 'package:immich_mobile/routing/router.dart';
@@ -121,7 +120,7 @@ class _AlbumSelectorState extends ConsumerState<AlbumSelector> {
// we need to re-filter the albums after sorting
// so shownAlbums gets updated
filterAlbums();
unawaited(filterAlbums());
}
Future<void> filterAlbums() async {
@@ -516,38 +515,6 @@ class _AlbumList extends ConsumerWidget {
sliver: SliverList.builder(
itemBuilder: (_, index) {
final album = albums[index];
final albumTile = LargeLeadingTile(
title: Text(
album.name,
maxLines: 2,
overflow: TextOverflow.ellipsis,
style: context.textTheme.titleSmall?.copyWith(fontWeight: FontWeight.w600),
),
subtitle: Text(
'${'items_count'.t(context: context, args: {'count': album.assetCount})} • ${album.ownerId != userId ? 'shared_by_user'.t(context: context, args: {'user': album.ownerName}) : 'owned'.t(context: context)}',
overflow: TextOverflow.ellipsis,
style: context.textTheme.bodyMedium?.copyWith(color: context.colorScheme.onSurfaceSecondary),
),
onTap: () => onAlbumSelected(album),
leadingPadding: const EdgeInsets.only(right: 16),
leading: album.thumbnailAssetId != null
? ClipRRect(
borderRadius: const BorderRadius.all(Radius.circular(15)),
child: SizedBox(width: 80, height: 80, child: Thumbnail.remote(remoteId: album.thumbnailAssetId!)),
)
: SizedBox(
width: 80,
height: 80,
child: Container(
decoration: BoxDecoration(
color: context.colorScheme.surfaceContainer,
borderRadius: const BorderRadius.all(Radius.circular(16)),
border: Border.all(color: context.colorScheme.outline.withAlpha(50), width: 1),
),
child: const Icon(Icons.photo_album_rounded, size: 24, color: Colors.grey),
),
),
);
final isOwner = album.ownerId == userId;
if (isOwner) {
@@ -576,11 +543,14 @@ class _AlbumList extends ConsumerWidget {
onDismissed: (direction) async {
await ref.read(remoteAlbumProvider.notifier).deleteAlbum(album.id);
},
child: albumTile,
child: AlbumTile(album: album, isOwner: isOwner, onAlbumSelected: onAlbumSelected),
),
);
} else {
return Padding(padding: const EdgeInsets.only(bottom: 8.0), child: albumTile);
return Padding(
padding: const EdgeInsets.only(bottom: 8.0),
child: AlbumTile(album: album, isOwner: isOwner, onAlbumSelected: onAlbumSelected),
);
}
},
itemCount: albums.length,
@@ -709,9 +679,8 @@ class AddToAlbumHeader extends ConsumerWidget {
return;
}
ref.read(currentRemoteAlbumProvider.notifier).setAlbum(newAlbum);
ref.read(multiSelectProvider.notifier).reset();
context.pushRoute(RemoteAlbumRoute(album: newAlbum));
unawaited(context.pushRoute(RemoteAlbumRoute(album: newAlbum)));
}
return SliverPadding(

View File

@@ -0,0 +1,51 @@
import 'package:flutter/material.dart';
import 'package:immich_mobile/domain/models/album/album.model.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/theme_extensions.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/pages/common/large_leading_tile.dart';
import 'package:immich_mobile/presentation/widgets/images/thumbnail.widget.dart';
class AlbumTile extends StatelessWidget {
const AlbumTile({super.key, required this.album, required this.isOwner, this.onAlbumSelected});
final RemoteAlbum album;
final bool isOwner;
final Function(RemoteAlbum)? onAlbumSelected;
@override
Widget build(BuildContext context) {
return LargeLeadingTile(
title: Text(
album.name,
maxLines: 2,
overflow: TextOverflow.ellipsis,
style: context.textTheme.titleSmall?.copyWith(fontWeight: FontWeight.w600),
),
subtitle: Text(
'${'items_count'.t(context: context, args: {'count': album.assetCount})} • ${isOwner ? 'owned'.t(context: context) : 'shared_by_user'.t(context: context, args: {'user': album.ownerName})}',
overflow: TextOverflow.ellipsis,
style: context.textTheme.bodyMedium?.copyWith(color: context.colorScheme.onSurfaceSecondary),
),
onTap: () => onAlbumSelected?.call(album),
leadingPadding: const EdgeInsets.only(right: 16),
leading: album.thumbnailAssetId != null
? ClipRRect(
borderRadius: const BorderRadius.all(Radius.circular(15)),
child: SizedBox(width: 80, height: 80, child: Thumbnail.remote(remoteId: album.thumbnailAssetId!)),
)
: SizedBox(
width: 80,
height: 80,
child: Container(
decoration: BoxDecoration(
color: context.colorScheme.surfaceContainer,
borderRadius: const BorderRadius.all(Radius.circular(16)),
border: Border.all(color: context.colorScheme.outline.withAlpha(50), width: 1),
),
child: const Icon(Icons.photo_album_rounded, size: 24, color: Colors.grey),
),
),
);
}
}

View File

@@ -5,6 +5,7 @@ import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/album/album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/domain/models/timeline.model.dart';
import 'package:immich_mobile/domain/services/timeline.service.dart';
@@ -13,6 +14,7 @@ import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/platform_extensions.dart';
import 'package:immich_mobile/extensions/scroll_extensions.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/download_status_floating_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/activities_bottom_sheet.widget.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/asset_stack.provider.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/asset_stack.widget.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/asset_viewer.state.dart';
@@ -20,7 +22,6 @@ import 'package:immich_mobile/presentation/widgets/asset_viewer/bottom_bar.widge
import 'package:immich_mobile/presentation/widgets/asset_viewer/bottom_sheet.widget.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/top_app_bar.widget.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/video_viewer.widget.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/activities_bottom_sheet.widget.dart';
import 'package:immich_mobile/presentation/widgets/images/image_provider.dart';
import 'package:immich_mobile/presentation/widgets/images/thumbnail.widget.dart';
import 'package:immich_mobile/providers/asset_viewer/is_motion_video_playing.provider.dart';
@@ -28,6 +29,7 @@ import 'package:immich_mobile/providers/asset_viewer/video_player_controls_provi
import 'package:immich_mobile/providers/asset_viewer/video_player_value_provider.dart';
import 'package:immich_mobile/providers/cast.provider.dart';
import 'package:immich_mobile/providers/infrastructure/asset_viewer/current_asset.provider.dart';
import 'package:immich_mobile/providers/infrastructure/current_album.provider.dart';
import 'package:immich_mobile/providers/infrastructure/readonly_mode.provider.dart';
import 'package:immich_mobile/providers/infrastructure/timeline.provider.dart';
import 'package:immich_mobile/widgets/common/immich_loading_indicator.dart';
@@ -39,15 +41,25 @@ class AssetViewerPage extends StatelessWidget {
final int initialIndex;
final TimelineService timelineService;
final int? heroOffset;
final RemoteAlbum? currentAlbum;
const AssetViewerPage({super.key, required this.initialIndex, required this.timelineService, this.heroOffset});
const AssetViewerPage({
super.key,
required this.initialIndex,
required this.timelineService,
this.heroOffset,
this.currentAlbum,
});
@override
Widget build(BuildContext context) {
// This is necessary to ensure that the timeline service is available
// since the Timeline and AssetViewer are on different routes / Widget subtrees.
return ProviderScope(
overrides: [timelineServiceProvider.overrideWithValue(timelineService)],
overrides: [
timelineServiceProvider.overrideWithValue(timelineService),
currentRemoteAlbumScopedProvider.overrideWithValue(currentAlbum),
],
child: AssetViewer(initialIndex: initialIndex, heroOffset: heroOffset),
);
}
@@ -635,9 +647,9 @@ class _AssetViewerState extends ConsumerState<AssetViewer> {
// Listen for control visibility changes and change system UI mode accordingly
ref.listen(assetViewerProvider.select((value) => value.showingControls), (_, showingControls) async {
if (showingControls) {
SystemChrome.setEnabledSystemUIMode(SystemUiMode.edgeToEdge);
unawaited(SystemChrome.setEnabledSystemUIMode(SystemUiMode.edgeToEdge));
} else {
SystemChrome.setEnabledSystemUIMode(SystemUiMode.immersiveSticky);
unawaited(SystemChrome.setEnabledSystemUIMode(SystemUiMode.immersiveSticky));
}
});

View File

@@ -1,3 +1,7 @@
import 'dart:async';
import 'package:auto_route/auto_route.dart';
import 'package:collection/collection.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
@@ -8,17 +12,22 @@ import 'package:immich_mobile/domain/models/exif.model.dart';
import 'package:immich_mobile/domain/models/setting.model.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/presentation/widgets/album/album_tile.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/asset_viewer.state.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/bottom_sheet/sheet_location_details.widget.dart';
import 'package:immich_mobile/presentation/widgets/asset_viewer/bottom_sheet/sheet_people_details.widget.dart';
import 'package:immich_mobile/presentation/widgets/bottom_sheet/base_bottom_sheet.widget.dart';
import 'package:immich_mobile/providers/haptic_feedback.provider.dart';
import 'package:immich_mobile/providers/infrastructure/action.provider.dart';
import 'package:immich_mobile/providers/infrastructure/album.provider.dart';
import 'package:immich_mobile/providers/infrastructure/asset_viewer/current_asset.provider.dart';
import 'package:immich_mobile/providers/infrastructure/current_album.provider.dart';
import 'package:immich_mobile/providers/infrastructure/setting.provider.dart';
import 'package:immich_mobile/providers/routes.provider.dart';
import 'package:immich_mobile/providers/server_info.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
import 'package:immich_mobile/repositories/asset_media.repository.dart';
import 'package:immich_mobile/routing/router.dart';
import 'package:immich_mobile/utils/action_button.utils.dart';
import 'package:immich_mobile/utils/bytes_units.dart';
import 'package:immich_mobile/widgets/common/immich_toast.dart';
@@ -131,6 +140,60 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
await ref.read(actionProvider.notifier).editDateTime(ActionSource.viewer, context);
}
Widget _buildAppearsInList(WidgetRef ref, BuildContext context) {
final isRemote = ref.watch(currentAssetNotifier)?.hasRemote ?? false;
if (!isRemote) {
return const SizedBox.shrink();
}
final remoteAsset = ref.watch(currentAssetNotifier) as RemoteAsset;
final userId = ref.watch(currentUserProvider)?.id;
final assetAlbums = ref.watch(albumsContainingAssetProvider(remoteAsset.id));
return assetAlbums.when(
data: (albums) {
if (albums.isEmpty) {
return const SizedBox.shrink();
}
albums.sortBy((a) => a.name);
return Column(
spacing: 12,
children: [
if (albums.isNotEmpty)
_SheetTile(
title: 'appears_in'.t(context: context).toUpperCase(),
titleStyle: context.textTheme.labelMedium?.copyWith(
color: context.textTheme.labelMedium?.color?.withAlpha(200),
fontWeight: FontWeight.w600,
),
),
Padding(
padding: const EdgeInsets.only(left: 24),
child: Column(
spacing: 12,
children: albums.map((album) {
final isOwner = album.ownerId == userId;
return AlbumTile(
album: album,
isOwner: isOwner,
onAlbumSelected: (album) async {
ref.invalidate(assetViewerProvider);
unawaited(context.router.popAndPush(RemoteAlbumRoute(album: album)));
},
);
}).toList(),
),
),
],
);
},
loading: () => const SizedBox.shrink(),
error: (_, __) => const SizedBox.shrink(),
);
}
@override
Widget build(BuildContext context, WidgetRef ref) {
final asset = ref.watch(currentAssetNotifier);
@@ -142,6 +205,47 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
final cameraTitle = _getCameraInfoTitle(exifInfo);
final isOwner = ref.watch(currentUserProvider)?.id == (asset is RemoteAsset ? asset.ownerId : null);
// Build file info tile based on asset type
Widget buildFileInfoTile() {
if (asset is LocalAsset) {
final assetMediaRepository = ref.watch(assetMediaRepositoryProvider);
return FutureBuilder<String?>(
future: assetMediaRepository.getOriginalFilename(asset.id),
builder: (context, snapshot) {
final displayName = snapshot.data ?? asset.name;
return _SheetTile(
title: displayName,
titleStyle: context.textTheme.labelLarge,
leading: Icon(
asset.isImage ? Icons.image_outlined : Icons.videocam_outlined,
size: 24,
color: context.textTheme.labelLarge?.color,
),
subtitle: _getFileInfo(asset, exifInfo),
subtitleStyle: context.textTheme.bodyMedium?.copyWith(
color: context.textTheme.bodyMedium?.color?.withAlpha(155),
),
);
},
);
} else {
// For remote assets, use the name directly
return _SheetTile(
title: asset.name,
titleStyle: context.textTheme.labelLarge,
leading: Icon(
asset.isImage ? Icons.image_outlined : Icons.videocam_outlined,
size: 24,
color: context.textTheme.labelLarge?.color,
),
subtitle: _getFileInfo(asset, exifInfo),
subtitleStyle: context.textTheme.bodyMedium?.copyWith(
color: context.textTheme.bodyMedium?.color?.withAlpha(155),
),
);
}
}
return SliverList.list(
children: [
// Asset Date and Time
@@ -163,19 +267,7 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
),
),
// File info
_SheetTile(
title: asset.name,
titleStyle: context.textTheme.labelLarge,
leading: Icon(
asset.isImage ? Icons.image_outlined : Icons.videocam_outlined,
size: 24,
color: context.textTheme.labelLarge?.color,
),
subtitle: _getFileInfo(asset, exifInfo),
subtitleStyle: context.textTheme.bodyMedium?.copyWith(
color: context.textTheme.bodyMedium?.color?.withAlpha(155),
),
),
buildFileInfoTile(),
// Camera info
if (cameraTitle != null)
_SheetTile(
@@ -187,7 +279,10 @@ class _AssetDetailBottomSheet extends ConsumerWidget {
color: context.textTheme.bodyMedium?.color?.withAlpha(155),
),
),
const SizedBox(height: 64),
// Appears in (Albums)
_buildAppearsInList(ref, context),
// padding at the bottom to avoid cut-off
const SizedBox(height: 100),
],
);
}

View File

@@ -46,6 +46,7 @@ bool _isCurrentAsset(BaseAsset asset, BaseAsset? currentAsset) {
}
class NativeVideoViewer extends HookConsumerWidget {
static final log = Logger('NativeVideoViewer');
final BaseAsset asset;
final bool showControls;
final int playbackDelayFactor;
@@ -79,8 +80,6 @@ class NativeVideoViewer extends HookConsumerWidget {
// Used to show the placeholder during hero animations for remote videos to avoid a stutter
final isVisible = useState(Platform.isIOS && asset.hasLocal);
final log = Logger('NativeVideoViewerPage');
final isCasting = ref.watch(castProvider.select((c) => c.isCasting));
Future<VideoSource?> createSource() async {
@@ -169,7 +168,7 @@ class NativeVideoViewer extends HookConsumerWidget {
interval: const Duration(milliseconds: 100),
maxWaitTime: const Duration(milliseconds: 200),
);
ref.listen(videoPlayerControlsProvider, (oldControls, newControls) async {
ref.listen(videoPlayerControlsProvider, (oldControls, newControls) {
final playerController = controller.value;
if (playerController == null) {
return;
@@ -180,28 +179,14 @@ class NativeVideoViewer extends HookConsumerWidget {
return;
}
final oldSeek = (oldControls?.position ?? 0) ~/ 1;
final newSeek = newControls.position ~/ 1;
final oldSeek = oldControls?.position.inMilliseconds;
final newSeek = newControls.position.inMilliseconds;
if (oldSeek != newSeek || newControls.restarted) {
seekDebouncer.run(() => playerController.seekTo(newSeek));
}
if (oldControls?.pause != newControls.pause || newControls.restarted) {
// Make sure the last seek is complete before pausing or playing
// Otherwise, `onPlaybackPositionChanged` can receive outdated events
if (seekDebouncer.isActive) {
await seekDebouncer.drain();
}
try {
if (newControls.pause) {
await playerController.pause();
} else {
await playerController.play();
}
} catch (error) {
log.severe('Error pausing or playing video: $error');
}
unawaited(_onPauseChange(context, playerController, seekDebouncer, newControls.pause));
}
});
@@ -263,7 +248,7 @@ class NativeVideoViewer extends HookConsumerWidget {
return;
}
ref.read(videoPlaybackValueProvider.notifier).position = Duration(seconds: playbackInfo.position);
ref.read(videoPlaybackValueProvider.notifier).position = Duration(milliseconds: playbackInfo.position);
// Check if the video is buffering
if (playbackInfo.status == PlaybackStatus.playing) {
@@ -310,11 +295,13 @@ class NativeVideoViewer extends HookConsumerWidget {
nc.onPlaybackReady.addListener(onPlaybackReady);
nc.onPlaybackEnded.addListener(onPlaybackEnded);
nc.loadVideoSource(source).catchError((error) {
log.severe('Error loading video source: $error');
});
unawaited(
nc.loadVideoSource(source).catchError((error) {
log.severe('Error loading video source: $error');
}),
);
final loopVideo = ref.read(appSettingsServiceProvider).getSetting<bool>(AppSettingsEnum.loopVideo);
nc.setLoop(!asset.isMotionPhoto && loopVideo);
unawaited(nc.setLoop(!asset.isMotionPhoto && loopVideo));
controller.value = nc;
Timer(const Duration(milliseconds: 200), checkIfBuffering);
@@ -388,12 +375,12 @@ class NativeVideoViewer extends HookConsumerWidget {
useOnAppLifecycleStateChange((_, state) async {
if (state == AppLifecycleState.resumed && shouldPlayOnForeground.value) {
controller.value?.play();
await controller.value?.play();
} else if (state == AppLifecycleState.paused) {
final videoPlaying = await controller.value?.isPlaying();
if (videoPlaying ?? true) {
shouldPlayOnForeground.value = true;
controller.value?.pause();
await controller.value?.pause();
} else {
shouldPlayOnForeground.value = false;
}
@@ -422,4 +409,31 @@ class NativeVideoViewer extends HookConsumerWidget {
],
);
}
Future<void> _onPauseChange(
BuildContext context,
NativeVideoPlayerController controller,
Debouncer seekDebouncer,
bool isPaused,
) async {
if (!context.mounted) {
return;
}
// Make sure the last seek is complete before pausing or playing
// Otherwise, `onPlaybackPositionChanged` can receive outdated events
if (seekDebouncer.isActive) {
await seekDebouncer.drain();
}
try {
if (isPaused) {
await controller.pause();
} else {
await controller.play();
}
} catch (error) {
log.severe('Error pausing or playing video: $error');
}
}
}

View File

@@ -1,3 +1,5 @@
import 'dart:async';
import 'package:async/async.dart';
import 'package:flutter/widgets.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
@@ -51,14 +53,14 @@ mixin CancellableImageProviderMixin<T extends Object> on CancellableImageProvide
Stream<ImageInfo> loadRequest(ImageRequest request, ImageDecoderCallback decode) async* {
if (isCancelled) {
this.request = null;
evict();
unawaited(evict());
return;
}
try {
final image = await request.load(decode);
if (image == null || isCancelled) {
evict();
unawaited(evict());
return;
}
yield image;

View File

@@ -85,7 +85,7 @@ class LocalFullImageProvider extends CancellableImageProvider<LocalFullImageProv
yield* initialImageStream();
if (isCancelled) {
evict();
unawaited(evict());
return;
}
@@ -103,7 +103,7 @@ class LocalFullImageProvider extends CancellableImageProvider<LocalFullImageProv
}
if (isCancelled) {
evict();
unawaited(evict());
return;
}

View File

@@ -87,7 +87,7 @@ class RemoteFullImageProvider extends CancellableImageProvider<RemoteFullImagePr
yield* initialImageStream();
if (isCancelled) {
evict();
unawaited(evict());
return;
}
@@ -100,7 +100,7 @@ class RemoteFullImageProvider extends CancellableImageProvider<RemoteFullImagePr
yield* loadRequest(request, decode);
if (isCancelled) {
evict();
unawaited(evict());
return;
}

View File

@@ -115,12 +115,14 @@ class _DriftMapState extends ConsumerState<DriftMap> {
}
final bounds = await controller.getVisibleRegion();
_reloadMutex.run(() async {
if (mounted && ref.read(mapStateProvider.notifier).setBounds(bounds)) {
final markers = await ref.read(mapMarkerProvider(bounds).future);
await reloadMarkers(markers);
}
});
unawaited(
_reloadMutex.run(() async {
if (mounted && ref.read(mapStateProvider.notifier).setBounds(bounds)) {
final markers = await ref.read(mapMarkerProvider(bounds).future);
await reloadMarkers(markers);
}
}),
);
}
Future<void> reloadMarkers(Map<String, dynamic> markers) async {
@@ -148,7 +150,7 @@ class _DriftMapState extends ConsumerState<DriftMap> {
final controller = mapController;
if (controller != null && location != null) {
controller.animateCamera(
await controller.animateCamera(
CameraUpdate.newLatLngZoom(LatLng(location.latitude, location.longitude), MapUtils.mapZoomToAssetLevel),
duration: const Duration(milliseconds: 800),
);

View File

@@ -73,7 +73,7 @@ class MapUtils {
try {
bool serviceEnabled = await Geolocator.isLocationServiceEnabled();
if (!serviceEnabled && !silent) {
showDialog(context: context, builder: (context) => _LocationServiceDisabledDialog(context));
unawaited(showDialog(context: context, builder: (context) => _LocationServiceDisabledDialog(context)));
return (null, LocationPermission.deniedForever);
}

View File

@@ -1,3 +1,4 @@
import 'dart:async';
import 'dart:math' as math;
import 'package:auto_route/auto_route.dart';
@@ -15,8 +16,9 @@ import 'package:immich_mobile/presentation/widgets/timeline/timeline.state.dart'
import 'package:immich_mobile/presentation/widgets/timeline/timeline_drag_region.dart';
import 'package:immich_mobile/providers/asset_viewer/is_motion_video_playing.provider.dart';
import 'package:immich_mobile/providers/haptic_feedback.provider.dart';
import 'package:immich_mobile/providers/infrastructure/timeline.provider.dart';
import 'package:immich_mobile/providers/infrastructure/current_album.provider.dart';
import 'package:immich_mobile/providers/infrastructure/readonly_mode.provider.dart';
import 'package:immich_mobile/providers/infrastructure/timeline.provider.dart';
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
import 'package:immich_mobile/routing/router.dart';
@@ -156,11 +158,14 @@ class _AssetTileWidget extends ConsumerWidget {
await ref.read(timelineServiceProvider).loadAssets(assetIndex, 1);
ref.read(isPlayingMotionVideoProvider.notifier).playing = false;
AssetViewer.setAsset(ref, asset);
ctx.pushRoute(
AssetViewerRoute(
initialIndex: assetIndex,
timelineService: ref.read(timelineServiceProvider),
heroOffset: heroOffset,
unawaited(
ctx.pushRoute(
AssetViewerRoute(
initialIndex: assetIndex,
timelineService: ref.read(timelineServiceProvider),
heroOffset: heroOffset,
currentAlbum: ref.read(currentRemoteAlbumProvider),
),
),
);
}

View File

@@ -242,7 +242,7 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
}
try {
LogService.I.flush();
await LogService.I.flush();
} catch (_) {}
}
@@ -255,7 +255,7 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
// Flush logs before closing database
try {
LogService.I.flush();
await LogService.I.flush();
} catch (_) {}
// Close Isar database safely

Some files were not shown because too many files have changed in this diff Show More