mirror of
https://github.com/immich-app/immich.git
synced 2026-02-08 10:48:42 +03:00
Compare commits
136 Commits
v2.2.0
...
refactor/t
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8643a58444 | ||
|
|
16869a21de | ||
|
|
2db3f8c160 | ||
|
|
73571d9d69 | ||
|
|
d310c6f3cd | ||
|
|
c086a65fa8 | ||
|
|
7134dd29ca | ||
|
|
3e08953a43 | ||
|
|
58c3c7e26b | ||
|
|
237ddcb648 | ||
|
|
fbaeffd65c | ||
|
|
d64c339b4f | ||
|
|
69880ee165 | ||
|
|
15e00f82f0 | ||
|
|
ce82e27f4b | ||
|
|
eeee5147cc | ||
|
|
af22f9b014 | ||
|
|
1086f22166 | ||
|
|
e94eb5012f | ||
|
|
4dcc049465 | ||
|
|
d784d431d0 | ||
|
|
1200bfad13 | ||
|
|
f11bfb9581 | ||
|
|
074fdb2b96 | ||
|
|
f1f203719d | ||
|
|
f73ca9d9c0 | ||
|
|
ad3f4fb434 | ||
|
|
8001dedcbf | ||
|
|
07a39226c5 | ||
|
|
88e7e21683 | ||
|
|
2cefbf8ca3 | ||
|
|
4a6c50cd81 | ||
|
|
e0535e20e6 | ||
|
|
62580455af | ||
|
|
0e7e67efe1 | ||
|
|
2c54b506b3 | ||
|
|
8969b8bdb2 | ||
|
|
5186092faa | ||
|
|
4c9142308f | ||
|
|
bea5d4fd37 | ||
|
|
74c24bfa88 | ||
|
|
95834c68d9 | ||
|
|
09024c3558 | ||
|
|
137cb043ef | ||
|
|
edf21bae41 | ||
|
|
c958f9856d | ||
|
|
70ab8bc657 | ||
|
|
edde0f93ae | ||
|
|
896665bca9 | ||
|
|
e8e9e7830e | ||
|
|
4fd9e42ce5 | ||
|
|
337e3a8dac | ||
|
|
2dc81e28fc | ||
|
|
f915d4cc90 | ||
|
|
905f4375b0 | ||
|
|
0b3633db4f | ||
|
|
2f40f5aad8 | ||
|
|
2611e2ec20 | ||
|
|
433a3cd339 | ||
|
|
0b487897a4 | ||
|
|
d5c5bdffcb | ||
|
|
dea95ac2e6 | ||
|
|
9e2208b8dd | ||
|
|
6922a92b69 | ||
|
|
7a2c8e0662 | ||
|
|
787158247f | ||
|
|
b0a0b7c2e1 | ||
|
|
cb6d81771d | ||
|
|
8de6ec1a1b | ||
|
|
d27c01ef70 | ||
|
|
d6307b262f | ||
|
|
b2cbefe41e | ||
|
|
da5a72f6de | ||
|
|
45304f1211 | ||
|
|
a4e65a7ea8 | ||
|
|
dd393c8346 | ||
|
|
493cde9d55 | ||
|
|
7705c84b04 | ||
|
|
ce0172b8c1 | ||
|
|
718b3a7b52 | ||
|
|
8a73de018c | ||
|
|
d92df63f84 | ||
|
|
6c6b00067b | ||
|
|
9cc88ed2a6 | ||
|
|
4905bba694 | ||
|
|
853d19dc2d | ||
|
|
c935ae47d0 | ||
|
|
93ab42fa24 | ||
|
|
6913697ad1 | ||
|
|
a4ae86ce29 | ||
|
|
2c50f2e244 | ||
|
|
365abd8906 | ||
|
|
25fb43bbe3 | ||
|
|
125e8cee01 | ||
|
|
c15e9bfa72 | ||
|
|
35e188e6e7 | ||
|
|
3cc9dd126c | ||
|
|
aa69d89b9f | ||
|
|
29c14a3f58 | ||
|
|
0df70365d7 | ||
|
|
c34be73d81 | ||
|
|
f396e9e374 | ||
|
|
821a9d4691 | ||
|
|
cad654586f | ||
|
|
28eb1bc13c | ||
|
|
1e4779cf48 | ||
|
|
0647c22956 | ||
|
|
b8087b4fa2 | ||
|
|
d94cb9641b | ||
|
|
517c3e1d4c | ||
|
|
619de2a5e4 | ||
|
|
79d0e3e1ed | ||
|
|
f5ff36a1f8 | ||
|
|
b5efc9c16e | ||
|
|
1036076b0d | ||
|
|
c76324c611 | ||
|
|
0ddb92e1ec | ||
|
|
d08a520aa2 | ||
|
|
7bdf0f6c50 | ||
|
|
2b33a58448 | ||
|
|
b35f00f768 | ||
|
|
86cc7c3c73 | ||
|
|
5854cbbe97 | ||
|
|
ceb36a304d | ||
|
|
f5d7e5acca | ||
|
|
be15a84f9b | ||
|
|
32791e98c2 | ||
|
|
7ea443b3a9 | ||
|
|
c69786b039 | ||
|
|
5c7d5539ea | ||
|
|
3531856d1c | ||
|
|
4abaad548a | ||
|
|
61a2c3ace3 | ||
|
|
e9038193db | ||
|
|
3f5cd48a59 | ||
|
|
4cb094e7ae |
@@ -21,6 +21,7 @@ services:
|
||||
- app-node_modules:/usr/src/app/node_modules
|
||||
- sveltekit:/usr/src/app/web/.svelte-kit
|
||||
- coverage:/usr/src/app/web/coverage
|
||||
- ../plugins:/build/corePlugin
|
||||
immich-web:
|
||||
env_file: !reset []
|
||||
immich-machine-learning:
|
||||
|
||||
10
.github/mise.toml
vendored
Normal file
10
.github/mise.toml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
[tasks.install]
|
||||
run = "pnpm install --filter github --frozen-lockfile"
|
||||
|
||||
[tasks.format]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."format-fix"]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --write ."
|
||||
111
.github/workflows/build-mobile.yml
vendored
111
.github/workflows/build-mobile.yml
vendored
@@ -1,12 +1,16 @@
|
||||
name: Build Mobile
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ref:
|
||||
required: false
|
||||
type: string
|
||||
environment:
|
||||
description: 'Target environment'
|
||||
required: true
|
||||
default: 'development'
|
||||
type: string
|
||||
secrets:
|
||||
KEY_JKS:
|
||||
required: true
|
||||
@@ -16,6 +20,30 @@ on:
|
||||
required: true
|
||||
ANDROID_STORE_PASSWORD:
|
||||
required: true
|
||||
APP_STORE_CONNECT_API_KEY_ID:
|
||||
required: true
|
||||
APP_STORE_CONNECT_API_KEY_ISSUER_ID:
|
||||
required: true
|
||||
APP_STORE_CONNECT_API_KEY:
|
||||
required: true
|
||||
IOS_CERTIFICATE_P12:
|
||||
required: true
|
||||
IOS_CERTIFICATE_PASSWORD:
|
||||
required: true
|
||||
IOS_PROVISIONING_PROFILE:
|
||||
required: true
|
||||
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION:
|
||||
required: true
|
||||
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION:
|
||||
required: true
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE:
|
||||
required: true
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION:
|
||||
required: true
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION:
|
||||
required: true
|
||||
FASTLANE_TEAM_ID:
|
||||
required: true
|
||||
pull_request:
|
||||
push:
|
||||
branches: [main]
|
||||
@@ -137,7 +165,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Publish Android Artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: release-apk-signed
|
||||
path: mobile/build/app/outputs/flutter-apk/*.apk
|
||||
@@ -160,8 +188,8 @@ jobs:
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
# Run on main branch or workflow_dispatch
|
||||
if: ${{ !github.event.pull_request.head.repo.fork && fromJSON(needs.pre-job.outputs.should_run).mobile == true && github.ref == 'refs/heads/main' }}
|
||||
# Run on main branch or workflow_dispatch, or on PRs/other branches (build only, no upload)
|
||||
if: ${{ !github.event.pull_request.head.repo.fork && fromJSON(needs.pre-job.outputs.should_run).mobile == true }}
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
@@ -193,17 +221,22 @@ jobs:
|
||||
- name: Setup Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: '3.4.7'
|
||||
ruby-version: '3.3'
|
||||
working-directory: ./mobile/ios
|
||||
|
||||
- name: Install Fastlane
|
||||
- name: Install CocoaPods dependencies
|
||||
working-directory: ./mobile/ios
|
||||
run: |
|
||||
pod install
|
||||
|
||||
- name: Install Fastlane
|
||||
working-directory: ./mobile/ios
|
||||
run: |
|
||||
cd mobile/ios
|
||||
gem install bundler
|
||||
bundle config set --local path 'vendor/bundle'
|
||||
bundle install
|
||||
|
||||
- name: Create API Key JSON
|
||||
- name: Create API Key
|
||||
env:
|
||||
API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }}
|
||||
API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }}
|
||||
@@ -212,35 +245,55 @@ jobs:
|
||||
run: |
|
||||
mkdir -p ~/.appstoreconnect/private_keys
|
||||
echo "$API_KEY_CONTENT" | base64 --decode > ~/.appstoreconnect/private_keys/AuthKey_${API_KEY_ID}.p8
|
||||
cat > api_key.json << EOF
|
||||
{
|
||||
"key_id": "${API_KEY_ID}",
|
||||
"issuer_id": "${API_KEY_ISSUER_ID}",
|
||||
"key": "$(cat ~/.appstoreconnect/private_keys/AuthKey_${API_KEY_ID}.p8)",
|
||||
"duration": 1200,
|
||||
"in_house": false
|
||||
}
|
||||
EOF
|
||||
|
||||
- name: Import Certificate and Provisioning Profile
|
||||
- name: Import Certificate and Provisioning Profiles
|
||||
env:
|
||||
IOS_CERTIFICATE_P12: ${{ secrets.IOS_CERTIFICATE_P12 }}
|
||||
IOS_CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
|
||||
IOS_PROVISIONING_PROFILE: ${{ secrets.IOS_PROVISIONING_PROFILE }}
|
||||
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_SHARE_EXTENSION }}
|
||||
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
ENVIRONMENT: ${{ inputs.environment || 'development' }}
|
||||
working-directory: ./mobile/ios
|
||||
run: |
|
||||
# Decode certificate
|
||||
echo "$IOS_CERTIFICATE_P12" | base64 --decode > certificate.p12
|
||||
echo "$IOS_PROVISIONING_PROFILE" | base64 --decode > profile.mobileprovision
|
||||
|
||||
- name: Create keychain
|
||||
# Decode provisioning profiles based on environment
|
||||
if [[ "$ENVIRONMENT" == "development" ]]; then
|
||||
echo "$IOS_DEVELOPMENT_PROVISIONING_PROFILE" | base64 --decode > profile_dev.mobileprovision
|
||||
echo "$IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION" | base64 --decode > profile_dev_share.mobileprovision
|
||||
echo "$IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION" | base64 --decode > profile_dev_widget.mobileprovision
|
||||
ls -lh profile_dev*.mobileprovision
|
||||
else
|
||||
echo "$IOS_PROVISIONING_PROFILE" | base64 --decode > profile.mobileprovision
|
||||
echo "$IOS_PROVISIONING_PROFILE_SHARE_EXTENSION" | base64 --decode > profile_share.mobileprovision
|
||||
echo "$IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION" | base64 --decode > profile_widget.mobileprovision
|
||||
ls -lh profile*.mobileprovision
|
||||
fi
|
||||
|
||||
- name: Create keychain and import certificate
|
||||
env:
|
||||
KEYCHAIN_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
|
||||
CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
|
||||
working-directory: ./mobile/ios
|
||||
run: |
|
||||
# Create keychain
|
||||
security create-keychain -p "$KEYCHAIN_PASSWORD" build.keychain
|
||||
security default-keychain -s build.keychain
|
||||
security unlock-keychain -p "$KEYCHAIN_PASSWORD" build.keychain
|
||||
security set-keychain-settings -t 3600 -u build.keychain
|
||||
|
||||
# Import certificate
|
||||
security import certificate.p12 -k build.keychain -P "$CERTIFICATE_PASSWORD" -T /usr/bin/codesign -T /usr/bin/security
|
||||
security set-key-partition-list -S apple-tool:,apple: -s -k "$KEYCHAIN_PASSWORD" build.keychain
|
||||
|
||||
# Verify certificate was imported
|
||||
security find-identity -v -p codesigning build.keychain
|
||||
|
||||
- name: Build and deploy to TestFlight
|
||||
env:
|
||||
FASTLANE_TEAM_ID: ${{ secrets.FASTLANE_TEAM_ID }}
|
||||
@@ -249,8 +302,22 @@ jobs:
|
||||
KEYCHAIN_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
|
||||
APP_STORE_CONNECT_API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }}
|
||||
APP_STORE_CONNECT_API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }}
|
||||
ENVIRONMENT: ${{ inputs.environment || 'development' }}
|
||||
BUNDLE_ID_SUFFIX: ${{ inputs.environment == 'production' && '' || 'development' }}
|
||||
GITHUB_REF: ${{ github.ref }}
|
||||
working-directory: ./mobile/ios
|
||||
run: bundle exec fastlane release_ci
|
||||
run: |
|
||||
# Only upload to TestFlight on main branch
|
||||
if [[ "$GITHUB_REF" == "refs/heads/main" ]]; then
|
||||
if [[ "$ENVIRONMENT" == "development" ]]; then
|
||||
bundle exec fastlane gha_testflight_dev
|
||||
else
|
||||
bundle exec fastlane gha_release_prod
|
||||
fi
|
||||
else
|
||||
# Build only, no TestFlight upload for non-main branches
|
||||
bundle exec fastlane gha_build_only
|
||||
fi
|
||||
|
||||
- name: Clean up keychain
|
||||
if: always()
|
||||
@@ -258,7 +325,7 @@ jobs:
|
||||
security delete-keychain build.keychain || true
|
||||
|
||||
- name: Upload IPA artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: ios-release-ipa
|
||||
path: mobile/ios/Runner.ipa
|
||||
|
||||
7
.github/workflows/cli.yml
vendored
7
.github/workflows/cli.yml
vendored
@@ -84,7 +84,7 @@ jobs:
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
@@ -95,7 +95,7 @@ jobs:
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ steps.token.outputs.token }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Get package version
|
||||
id: package-version
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: metadata
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
|
||||
with:
|
||||
flavor: |
|
||||
latest=false
|
||||
@@ -125,4 +125,3 @@ jobs:
|
||||
cache-to: type=gha,mode=max
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
|
||||
2
.github/workflows/close-duplicates.yml
vendored
2
.github/workflows/close-duplicates.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
needs: [get_body, should_run]
|
||||
if: ${{ needs.should_run.outputs.should_run == 'true' }}
|
||||
container:
|
||||
image: ghcr.io/immich-app/mdq:main@sha256:6b8450bfc06770af1af66bce9bf2ced7d1d9b90df1a59fc4c83a17777a9f6723
|
||||
image: ghcr.io/immich-app/mdq:main@sha256:9c905a4ff69f00c4b2f98b40b6090ab3ab18d1a15ed1379733b8691aa1fcb271
|
||||
outputs:
|
||||
checked: ${{ steps.get_checkbox.outputs.checked }}
|
||||
steps:
|
||||
|
||||
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
@@ -57,7 +57,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||
uses: github/codeql-action/autobuild@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
@@ -83,6 +83,6 @@ jobs:
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
with:
|
||||
category: '/language:${{matrix.language}}'
|
||||
|
||||
2
.github/workflows/docs-build.yml
vendored
2
.github/workflows/docs-build.yml
vendored
@@ -85,7 +85,7 @@ jobs:
|
||||
run: pnpm build
|
||||
|
||||
- name: Upload build output
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: docs-build-output
|
||||
path: docs/build/
|
||||
|
||||
6
.github/workflows/docs-deploy.yml
vendored
6
.github/workflows/docs-deploy.yml
vendored
@@ -174,7 +174,7 @@ jobs:
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
working-directory: 'deployment/modules/cloudflare/docs'
|
||||
run: 'mise run tf apply'
|
||||
run: 'mise run //deployment:tf apply'
|
||||
|
||||
- name: Deploy Docs Subdomain Output
|
||||
id: docs-output
|
||||
@@ -186,7 +186,7 @@ jobs:
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
working-directory: 'deployment/modules/cloudflare/docs'
|
||||
run: |
|
||||
mise run tf output -- -json | jq -r '
|
||||
mise run //deployment:tf output -- -json | jq -r '
|
||||
"projectName=\(.pages_project_name.value)",
|
||||
"subdomain=\(.immich_app_branch_subdomain.value)"
|
||||
' >> $GITHUB_OUTPUT
|
||||
@@ -211,7 +211,7 @@ jobs:
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
working-directory: 'deployment/modules/cloudflare/docs-release'
|
||||
run: 'mise run tf apply'
|
||||
run: 'mise run //deployment:tf apply'
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0
|
||||
|
||||
2
.github/workflows/docs-destroy.yml
vendored
2
.github/workflows/docs-destroy.yml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
working-directory: 'deployment/modules/cloudflare/docs'
|
||||
run: 'mise run tf destroy -- -refresh=false'
|
||||
run: 'mise run //deployment:tf destroy -- -refresh=false'
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0
|
||||
|
||||
2
.github/workflows/fix-format.yml
vendored
2
.github/workflows/fix-format.yml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Fix formatting
|
||||
run: make install-all && make format-all
|
||||
run: pnpm --recursive install && pnpm run --recursive --parallel fix:format
|
||||
|
||||
- name: Commit and push
|
||||
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4
|
||||
|
||||
19
.github/workflows/prepare-release.yml
vendored
19
.github/workflows/prepare-release.yml
vendored
@@ -62,7 +62,7 @@ jobs:
|
||||
ref: main
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@2ddd2b9cb38ad8efd50337e8ab201519a34c9f24 # v7.1.1
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
@@ -99,8 +99,23 @@ jobs:
|
||||
ALIAS: ${{ secrets.ALIAS }}
|
||||
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
|
||||
# iOS secrets
|
||||
APP_STORE_CONNECT_API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }}
|
||||
APP_STORE_CONNECT_API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }}
|
||||
APP_STORE_CONNECT_API_KEY: ${{ secrets.APP_STORE_CONNECT_API_KEY }}
|
||||
IOS_CERTIFICATE_P12: ${{ secrets.IOS_CERTIFICATE_P12 }}
|
||||
IOS_CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
|
||||
IOS_PROVISIONING_PROFILE: ${{ secrets.IOS_PROVISIONING_PROFILE }}
|
||||
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_SHARE_EXTENSION }}
|
||||
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
FASTLANE_TEAM_ID: ${{ secrets.FASTLANE_TEAM_ID }}
|
||||
|
||||
with:
|
||||
ref: ${{ needs.bump_version.outputs.ref }}
|
||||
environment: production
|
||||
|
||||
prepare_release:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -123,7 +138,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download APK
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
name: release-apk-signed
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
170
.github/workflows/release-pr.yml
vendored
Normal file
170
.github/workflows/release-pr.yml
vendored
Normal file
@@ -0,0 +1,170 @@
|
||||
name: Manage release PR
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
bump:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: true
|
||||
ref: main
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Determine release type
|
||||
id: bump-type
|
||||
uses: ietf-tools/semver-action@c90370b2958652d71c06a3484129a4d423a6d8a8 # v1.11.0
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Bump versions
|
||||
env:
|
||||
TYPE: ${{ steps.bump-type.outputs.bump }}
|
||||
run: |
|
||||
if [ "$TYPE" == "none" ]; then
|
||||
exit 1 # TODO: Is there a cleaner way to abort the workflow?
|
||||
fi
|
||||
misc/release/pump-version.sh -s $TYPE -m true
|
||||
|
||||
- name: Manage Outline release document
|
||||
id: outline
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
OUTLINE_API_KEY: ${{ secrets.OUTLINE_API_KEY }}
|
||||
NEXT_VERSION: ${{ steps.bump-type.outputs.next }}
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
|
||||
const outlineKey = process.env.OUTLINE_API_KEY;
|
||||
const parentDocumentId = 'da856355-0844-43df-bd71-f8edce5382d9'
|
||||
const collectionId = 'e2910656-714c-4871-8721-447d9353bd73';
|
||||
const baseUrl = 'https://outline.immich.cloud';
|
||||
|
||||
const listResponse = await fetch(`${baseUrl}/api/documents.list`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${outlineKey}`,
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({ parentDocumentId })
|
||||
});
|
||||
|
||||
if (!listResponse.ok) {
|
||||
throw new Error(`Outline list failed: ${listResponse.statusText}`);
|
||||
}
|
||||
|
||||
const listData = await listResponse.json();
|
||||
const allDocuments = listData.data || [];
|
||||
|
||||
const document = allDocuments.find(doc => doc.title === 'next');
|
||||
|
||||
let documentId;
|
||||
let documentUrl;
|
||||
let documentText;
|
||||
|
||||
if (!document) {
|
||||
// Create new document
|
||||
console.log('No existing document found. Creating new one...');
|
||||
const notesTmpl = fs.readFileSync('misc/release/notes.tmpl', 'utf8');
|
||||
const createResponse = await fetch(`${baseUrl}/api/documents.create`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${outlineKey}`,
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
title: 'next',
|
||||
text: notesTmpl,
|
||||
collectionId: collectionId,
|
||||
parentDocumentId: parentDocumentId,
|
||||
publish: true
|
||||
})
|
||||
});
|
||||
|
||||
if (!createResponse.ok) {
|
||||
throw new Error(`Failed to create document: ${createResponse.statusText}`);
|
||||
}
|
||||
|
||||
const createData = await createResponse.json();
|
||||
documentId = createData.data.id;
|
||||
const urlId = createData.data.urlId;
|
||||
documentUrl = `${baseUrl}/doc/next-${urlId}`;
|
||||
documentText = createData.data.text || '';
|
||||
console.log(`Created new document: ${documentUrl}`);
|
||||
} else {
|
||||
documentId = document.id;
|
||||
const docPath = document.url;
|
||||
documentUrl = `${baseUrl}${docPath}`;
|
||||
documentText = document.text || '';
|
||||
console.log(`Found existing document: ${documentUrl}`);
|
||||
}
|
||||
|
||||
// Generate GitHub release notes
|
||||
console.log('Generating GitHub release notes...');
|
||||
const releaseNotesResponse = await github.rest.repos.generateReleaseNotes({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
tag_name: `${process.env.NEXT_VERSION}`,
|
||||
});
|
||||
|
||||
// Combine the content
|
||||
const changelog = `
|
||||
# ${process.env.NEXT_VERSION}
|
||||
|
||||
${documentText}
|
||||
|
||||
${releaseNotesResponse.data.body}
|
||||
|
||||
---
|
||||
|
||||
`
|
||||
|
||||
const existingChangelog = fs.existsSync('CHANGELOG.md') ? fs.readFileSync('CHANGELOG.md', 'utf8') : '';
|
||||
fs.writeFileSync('CHANGELOG.md', changelog + existingChangelog, 'utf8');
|
||||
|
||||
core.setOutput('document_url', documentUrl);
|
||||
|
||||
- name: Create PR
|
||||
id: create-pr
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
commit-message: 'chore: release ${{ steps.bump-type.outputs.next }}'
|
||||
title: 'chore: release ${{ steps.bump-type.outputs.next }}'
|
||||
body: 'Release notes: ${{ steps.outline.outputs.document_url }}'
|
||||
labels: 'changelog:skip'
|
||||
branch: 'release/next'
|
||||
draft: true
|
||||
3
.github/workflows/test.yml
vendored
3
.github/workflows/test.yml
vendored
@@ -382,6 +382,7 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
@@ -562,7 +563,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@2ddd2b9cb38ad8efd50337e8ab201519a34c9f24 # v7.1.1
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
# TODO: add caching when supported (https://github.com/actions/setup-python/pull/818)
|
||||
# with:
|
||||
|
||||
3
Makefile
3
Makefile
@@ -17,6 +17,9 @@ dev-docs:
|
||||
e2e:
|
||||
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --remove-orphans
|
||||
|
||||
e2e-dev:
|
||||
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.dev.yml up --remove-orphans
|
||||
|
||||
e2e-update:
|
||||
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||
|
||||
|
||||
29
cli/mise.toml
Normal file
29
cli/mise.toml
Normal file
@@ -0,0 +1,29 @@
|
||||
[tasks.install]
|
||||
run = "pnpm install --filter @immich/cli --frozen-lockfile"
|
||||
|
||||
[tasks.build]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "vite build"
|
||||
|
||||
[tasks.test]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "vite"
|
||||
|
||||
[tasks.lint]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "eslint \"src/**/*.ts\" --max-warnings 0"
|
||||
|
||||
[tasks."lint-fix"]
|
||||
run = { task = "lint --fix" }
|
||||
|
||||
[tasks.format]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."format-fix"]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --write ."
|
||||
|
||||
[tasks.check]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "tsc --noEmit"
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.98",
|
||||
"version": "2.2.101",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -20,7 +20,7 @@
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/micromatch": "^4.0.9",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.18.12",
|
||||
"@types/node": "^22.19.1",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
"byte-size": "^9.0.0",
|
||||
"cli-progress": "^3.12.0",
|
||||
|
||||
20
deployment/mise.toml
Normal file
20
deployment/mise.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[tools]
|
||||
terragrunt = "0.91.2"
|
||||
opentofu = "1.10.6"
|
||||
|
||||
[tasks."tg:fmt"]
|
||||
run = "terragrunt hclfmt"
|
||||
description = "Format terragrunt files"
|
||||
|
||||
[tasks.tf]
|
||||
run = "terragrunt run --all"
|
||||
description = "Wrapper for terragrunt run-all"
|
||||
dir = "{{cwd}}"
|
||||
|
||||
[tasks."tf:fmt"]
|
||||
run = "tofu fmt -recursive tf/"
|
||||
description = "Format terraform files"
|
||||
|
||||
[tasks."tf:init"]
|
||||
run = { task = "tf init -- -reconfigure" }
|
||||
dir = "{{cwd}}"
|
||||
@@ -41,6 +41,7 @@ services:
|
||||
- app-node_modules:/usr/src/app/node_modules
|
||||
- sveltekit:/usr/src/app/web/.svelte-kit
|
||||
- coverage:/usr/src/app/web/coverage
|
||||
- ../plugins:/build/corePlugin
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
|
||||
@@ -83,7 +83,7 @@ services:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:23031bfe0e74a13004252caaa74eccd0d62b6c6e7a04711d5b8bf5b7e113adc7
|
||||
image: prom/prometheus@sha256:49214755b6153f90a597adcbff0252cc61069f8ab69ce8411285cd4a560e8038
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
|
||||
18
docs/docs/administration/maintenance-mode.md
Normal file
18
docs/docs/administration/maintenance-mode.md
Normal file
@@ -0,0 +1,18 @@
|
||||
# Maintenance Mode
|
||||
|
||||
Maintenance mode is used to perform administrative tasks such as restoring backups to Immich.
|
||||
|
||||
You can enter maintenance mode by either:
|
||||
|
||||
- Selecting "enable maintenance mode" in system settings in administration.
|
||||
- Running the enable maintenance mode [administration command](./server-commands.md).
|
||||
|
||||
## Logging in during maintenance
|
||||
|
||||
Maintenance mode uses a separate login system which is handled automatically behind the scenes in most cases. Enabling maintenance mode in settings will automatically log you into maintenance mode when the server comes back up.
|
||||
|
||||
If you find that you've been logged out, you can:
|
||||
|
||||
- Open the logs for the Immich server and look for _"🚧 Immich is in maintenance mode, you can log in using the following URL:"_
|
||||
- Run the enable maintenance mode [administration command](./server-commands.md) again, this will give you a new URL to login with.
|
||||
- Run the disable maintenance mode [administration command](./server-commands.md) then re-enter through system settings.
|
||||
@@ -10,16 +10,19 @@ Running with a pre-existing Postgres server can unlock powerful administrative f
|
||||
|
||||
## Prerequisites
|
||||
|
||||
You must install `pgvector` (`>= 0.7.0, < 1.0.0`), as it is a prerequisite for `vchord`.
|
||||
You must install pgvector as it is a prerequisite for VectorChord.
|
||||
The easiest way to do this on Debian/Ubuntu is by adding the [PostgreSQL Apt repository][pg-apt] and then
|
||||
running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`).
|
||||
|
||||
You must install VectorChord into your instance of Postgres using their [instructions][vchord-install]. After installation, add `shared_preload_libraries = 'vchord.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vchord.so'`.
|
||||
|
||||
:::note
|
||||
Immich is known to work with Postgres versions `>= 14, < 18`.
|
||||
:::note Supported versions
|
||||
Immich is known to work with Postgres versions `>= 14, < 19`.
|
||||
|
||||
Make sure the installed version of VectorChord is compatible with your version of Immich. The current accepted range for VectorChord is `>= 0.3.0, < 0.5.0`.
|
||||
VectorChord is known to work with pgvector versions `>= 0.7, < 0.9`.
|
||||
|
||||
The Immich server will check the VectorChord version on startup to ensure compatibility, and refuse to start if a compatible version is not found.
|
||||
The current accepted range for VectorChord is `>= 0.3, < 0.6`.
|
||||
:::
|
||||
|
||||
## Specifying the connection URL
|
||||
|
||||
@@ -2,17 +2,19 @@
|
||||
|
||||
The `immich-server` docker image comes preinstalled with an administrative CLI (`immich-admin`) that supports the following commands:
|
||||
|
||||
| Command | Description |
|
||||
| ------------------------ | ------------------------------------------------------------- |
|
||||
| `help` | Display help |
|
||||
| `reset-admin-password` | Reset the password for the admin user |
|
||||
| `disable-password-login` | Disable password login |
|
||||
| `enable-password-login` | Enable password login |
|
||||
| `enable-oauth-login` | Enable OAuth login |
|
||||
| `disable-oauth-login` | Disable OAuth login |
|
||||
| `list-users` | List Immich users |
|
||||
| `version` | Print Immich version |
|
||||
| `change-media-location` | Change database file paths to align with a new media location |
|
||||
| Command | Description |
|
||||
| -------------------------- | ------------------------------------------------------------- |
|
||||
| `help` | Display help |
|
||||
| `reset-admin-password` | Reset the password for the admin user |
|
||||
| `disable-password-login` | Disable password login |
|
||||
| `enable-password-login` | Enable password login |
|
||||
| `disable-maintenance-mode` | Disable maintenance mode |
|
||||
| `enable-maintenance-mode` | Enable maintenance mode |
|
||||
| `enable-oauth-login` | Enable OAuth login |
|
||||
| `disable-oauth-login` | Disable OAuth login |
|
||||
| `list-users` | List Immich users |
|
||||
| `version` | Print Immich version |
|
||||
| `change-media-location` | Change database file paths to align with a new media location |
|
||||
|
||||
## How to run a command
|
||||
|
||||
@@ -47,6 +49,23 @@ immich-admin enable-password-login
|
||||
Password login has been enabled.
|
||||
```
|
||||
|
||||
Disable Maintenance Mode
|
||||
|
||||
```
|
||||
immich-admin disable-maintenace-mode
|
||||
Maintenance mode has been disabled.
|
||||
```
|
||||
|
||||
Enable Maintenance Mode
|
||||
|
||||
```
|
||||
immich-admin enable-maintenance-mode
|
||||
Maintenance mode has been enabled.
|
||||
|
||||
Log in using the following URL:
|
||||
https://my.immich.app/maintenance?token=<token>
|
||||
```
|
||||
|
||||
Enable OAuth login
|
||||
|
||||
```
|
||||
|
||||
@@ -12,3 +12,13 @@ pnpm run migrations:generate <migration-name>
|
||||
3. Move the migration file to folder `./server/src/schema/migrations` in your code editor.
|
||||
|
||||
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.
|
||||
|
||||
## Reverting a Migration
|
||||
|
||||
If you need to undo the most recently applied migration—for example, when developing or testing on schema changes—run:
|
||||
|
||||
```bash
|
||||
pnpm run migrations:revert
|
||||
```
|
||||
|
||||
This command rolls back the latest migration and brings the database schema back to its previous state.
|
||||
|
||||
@@ -5,7 +5,7 @@ sidebar_position: 2
|
||||
# Setup
|
||||
|
||||
:::note
|
||||
If there's a feature you're planning to work on, just give us a heads up in [Discord](https://discord.com/channels/979116623879368755/1071165397228855327) so we can:
|
||||
If there's a feature you're planning to work on, just give us a heads up in [#contributing](https://discord.com/channels/979116623879368755/1071165397228855327) on [our Discord](https://discord.immich.app) so we can:
|
||||
|
||||
1. Let you know if it's something we would accept into Immich
|
||||
2. Provide any guidance on how something like that would ideally be implemented
|
||||
|
||||
@@ -10,6 +10,16 @@ import MobileAppBackup from '/docs/partials/_mobile-app-backup.md';
|
||||
|
||||
<MobileAppDownload />
|
||||
|
||||
:::info Android verification
|
||||
Below are the SHA-256 fingerprints for the certificates signing the android applications.
|
||||
|
||||
- Playstore / Github releases:
|
||||
`86:C5:C4:55:DF:AF:49:85:92:3A:8F:35:AD:B3:1D:0C:9E:0B:95:7D:7F:94:C2:D2:AF:6A:24:38:AA:96:00:20`
|
||||
- F-Droid releases:
|
||||
`FA:8B:43:95:F4:A6:47:71:A0:53:D1:C7:57:73:5F:A2:30:13:74:F5:3D:58:0D:D1:75:AA:F7:A1:35:72:9C:BF`
|
||||
|
||||
:::
|
||||
|
||||
:::info Beta Program
|
||||
The beta release channel allows users to test upcoming changes before they are officially released. To join the channel use the links below.
|
||||
|
||||
|
||||
@@ -106,14 +106,14 @@ SELECT "user"."email", "asset"."type", COUNT(*) FROM "asset"
|
||||
|
||||
```sql title="Count by tag"
|
||||
SELECT "t"."value" AS "tag_name", COUNT(*) AS "number_assets" FROM "tag" "t"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagId" JOIN "asset" "a" ON "ta"."assetId" = "a"."id"
|
||||
WHERE "a"."visibility" != 'hidden'
|
||||
GROUP BY "t"."value" ORDER BY "number_assets" DESC;
|
||||
```
|
||||
|
||||
```sql title="Count by tag (per user)"
|
||||
SELECT "t"."value" AS "tag_name", "u"."email" as "user_email", COUNT(*) AS "number_assets" FROM "tag" "t"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id" JOIN "user" "u" ON "a"."ownerId" = "u"."id"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagId" JOIN "asset" "a" ON "ta"."assetId" = "a"."id" JOIN "user" "u" ON "a"."ownerId" = "u"."id"
|
||||
WHERE "a"."visibility" != 'hidden'
|
||||
GROUP BY "t"."value", "u"."email" ORDER BY "number_assets" DESC;
|
||||
```
|
||||
|
||||
@@ -16,48 +16,76 @@ The default configuration looks like this:
|
||||
|
||||
```json
|
||||
{
|
||||
"ffmpeg": {
|
||||
"crf": 23,
|
||||
"threads": 0,
|
||||
"preset": "ultrafast",
|
||||
"targetVideoCodec": "h264",
|
||||
"acceptedVideoCodecs": ["h264"],
|
||||
"targetAudioCodec": "aac",
|
||||
"acceptedAudioCodecs": ["aac", "mp3", "libopus", "pcm_s16le"],
|
||||
"acceptedContainers": ["mov", "ogg", "webm"],
|
||||
"targetResolution": "720",
|
||||
"maxBitrate": "0",
|
||||
"bframes": -1,
|
||||
"refs": 0,
|
||||
"gopSize": 0,
|
||||
"temporalAQ": false,
|
||||
"cqMode": "auto",
|
||||
"twoPass": false,
|
||||
"preferredHwDevice": "auto",
|
||||
"transcode": "required",
|
||||
"tonemap": "hable",
|
||||
"accel": "disabled",
|
||||
"accelDecode": false
|
||||
},
|
||||
"backup": {
|
||||
"database": {
|
||||
"enabled": true,
|
||||
"cronExpression": "0 02 * * *",
|
||||
"enabled": true,
|
||||
"keepLastAmount": 14
|
||||
}
|
||||
},
|
||||
"ffmpeg": {
|
||||
"accel": "disabled",
|
||||
"accelDecode": false,
|
||||
"acceptedAudioCodecs": ["aac", "mp3", "libopus"],
|
||||
"acceptedContainers": ["mov", "ogg", "webm"],
|
||||
"acceptedVideoCodecs": ["h264"],
|
||||
"bframes": -1,
|
||||
"cqMode": "auto",
|
||||
"crf": 23,
|
||||
"gopSize": 0,
|
||||
"maxBitrate": "0",
|
||||
"preferredHwDevice": "auto",
|
||||
"preset": "ultrafast",
|
||||
"refs": 0,
|
||||
"targetAudioCodec": "aac",
|
||||
"targetResolution": "720",
|
||||
"targetVideoCodec": "h264",
|
||||
"temporalAQ": false,
|
||||
"threads": 0,
|
||||
"tonemap": "hable",
|
||||
"transcode": "required",
|
||||
"twoPass": false
|
||||
},
|
||||
"image": {
|
||||
"colorspace": "p3",
|
||||
"extractEmbedded": false,
|
||||
"fullsize": {
|
||||
"enabled": false,
|
||||
"format": "jpeg",
|
||||
"quality": 80
|
||||
},
|
||||
"preview": {
|
||||
"format": "jpeg",
|
||||
"quality": 80,
|
||||
"size": 1440
|
||||
},
|
||||
"thumbnail": {
|
||||
"format": "webp",
|
||||
"quality": 80,
|
||||
"size": 250
|
||||
}
|
||||
},
|
||||
"job": {
|
||||
"backgroundTask": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"smartSearch": {
|
||||
"faceDetection": {
|
||||
"concurrency": 2
|
||||
},
|
||||
"library": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"metadataExtraction": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"faceDetection": {
|
||||
"concurrency": 2
|
||||
"migration": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"notifications": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"ocr": {
|
||||
"concurrency": 1
|
||||
},
|
||||
"search": {
|
||||
"concurrency": 5
|
||||
@@ -65,20 +93,23 @@ The default configuration looks like this:
|
||||
"sidecar": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"library": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"migration": {
|
||||
"concurrency": 5
|
||||
"smartSearch": {
|
||||
"concurrency": 2
|
||||
},
|
||||
"thumbnailGeneration": {
|
||||
"concurrency": 3
|
||||
},
|
||||
"videoConversion": {
|
||||
"concurrency": 1
|
||||
}
|
||||
},
|
||||
"library": {
|
||||
"scan": {
|
||||
"cronExpression": "0 0 * * *",
|
||||
"enabled": true
|
||||
},
|
||||
"notifications": {
|
||||
"concurrency": 5
|
||||
"watch": {
|
||||
"enabled": false
|
||||
}
|
||||
},
|
||||
"logging": {
|
||||
@@ -86,8 +117,11 @@ The default configuration looks like this:
|
||||
"level": "log"
|
||||
},
|
||||
"machineLearning": {
|
||||
"enabled": true,
|
||||
"urls": ["http://immich-machine-learning:3003"],
|
||||
"availabilityChecks": {
|
||||
"enabled": true,
|
||||
"interval": 30000,
|
||||
"timeout": 2000
|
||||
},
|
||||
"clip": {
|
||||
"enabled": true,
|
||||
"modelName": "ViT-B-32__openai"
|
||||
@@ -96,27 +130,59 @@ The default configuration looks like this:
|
||||
"enabled": true,
|
||||
"maxDistance": 0.01
|
||||
},
|
||||
"enabled": true,
|
||||
"facialRecognition": {
|
||||
"enabled": true,
|
||||
"modelName": "buffalo_l",
|
||||
"minScore": 0.7,
|
||||
"maxDistance": 0.5,
|
||||
"minFaces": 3
|
||||
}
|
||||
"minFaces": 3,
|
||||
"minScore": 0.7,
|
||||
"modelName": "buffalo_l"
|
||||
},
|
||||
"ocr": {
|
||||
"enabled": true,
|
||||
"maxResolution": 736,
|
||||
"minDetectionScore": 0.5,
|
||||
"minRecognitionScore": 0.8,
|
||||
"modelName": "PP-OCRv5_mobile"
|
||||
},
|
||||
"urls": ["http://immich-machine-learning:3003"]
|
||||
},
|
||||
"map": {
|
||||
"darkStyle": "https://tiles.immich.cloud/v1/style/dark.json",
|
||||
"enabled": true,
|
||||
"lightStyle": "https://tiles.immich.cloud/v1/style/light.json",
|
||||
"darkStyle": "https://tiles.immich.cloud/v1/style/dark.json"
|
||||
},
|
||||
"reverseGeocoding": {
|
||||
"enabled": true
|
||||
"lightStyle": "https://tiles.immich.cloud/v1/style/light.json"
|
||||
},
|
||||
"metadata": {
|
||||
"faces": {
|
||||
"import": false
|
||||
}
|
||||
},
|
||||
"newVersionCheck": {
|
||||
"enabled": true
|
||||
},
|
||||
"nightlyTasks": {
|
||||
"clusterNewFaces": true,
|
||||
"databaseCleanup": true,
|
||||
"generateMemories": true,
|
||||
"missingThumbnails": true,
|
||||
"startTime": "00:00",
|
||||
"syncQuotaUsage": true
|
||||
},
|
||||
"notifications": {
|
||||
"smtp": {
|
||||
"enabled": false,
|
||||
"from": "",
|
||||
"replyTo": "",
|
||||
"transport": {
|
||||
"host": "",
|
||||
"ignoreCert": false,
|
||||
"password": "",
|
||||
"port": 587,
|
||||
"secure": false,
|
||||
"username": ""
|
||||
}
|
||||
}
|
||||
},
|
||||
"oauth": {
|
||||
"autoLaunch": false,
|
||||
"autoRegister": true,
|
||||
@@ -128,70 +194,44 @@ The default configuration looks like this:
|
||||
"issuerUrl": "",
|
||||
"mobileOverrideEnabled": false,
|
||||
"mobileRedirectUri": "",
|
||||
"profileSigningAlgorithm": "none",
|
||||
"roleClaim": "immich_role",
|
||||
"scope": "openid email profile",
|
||||
"signingAlgorithm": "RS256",
|
||||
"profileSigningAlgorithm": "none",
|
||||
"storageLabelClaim": "preferred_username",
|
||||
"storageQuotaClaim": "immich_quota"
|
||||
"storageQuotaClaim": "immich_quota",
|
||||
"timeout": 30000,
|
||||
"tokenEndpointAuthMethod": "client_secret_post"
|
||||
},
|
||||
"passwordLogin": {
|
||||
"enabled": true
|
||||
},
|
||||
"reverseGeocoding": {
|
||||
"enabled": true
|
||||
},
|
||||
"server": {
|
||||
"externalDomain": "",
|
||||
"loginPageMessage": "",
|
||||
"publicUsers": true
|
||||
},
|
||||
"storageTemplate": {
|
||||
"enabled": false,
|
||||
"hashVerificationEnabled": true,
|
||||
"template": "{{y}}/{{y}}-{{MM}}-{{dd}}/{{filename}}"
|
||||
},
|
||||
"image": {
|
||||
"thumbnail": {
|
||||
"format": "webp",
|
||||
"size": 250,
|
||||
"quality": 80
|
||||
},
|
||||
"preview": {
|
||||
"format": "jpeg",
|
||||
"size": 1440,
|
||||
"quality": 80
|
||||
},
|
||||
"colorspace": "p3",
|
||||
"extractEmbedded": false
|
||||
},
|
||||
"newVersionCheck": {
|
||||
"enabled": true
|
||||
},
|
||||
"trash": {
|
||||
"enabled": true,
|
||||
"days": 30
|
||||
"templates": {
|
||||
"email": {
|
||||
"albumInviteTemplate": "",
|
||||
"albumUpdateTemplate": "",
|
||||
"welcomeTemplate": ""
|
||||
}
|
||||
},
|
||||
"theme": {
|
||||
"customCss": ""
|
||||
},
|
||||
"library": {
|
||||
"scan": {
|
||||
"enabled": true,
|
||||
"cronExpression": "0 0 * * *"
|
||||
},
|
||||
"watch": {
|
||||
"enabled": false
|
||||
}
|
||||
},
|
||||
"server": {
|
||||
"externalDomain": "",
|
||||
"loginPageMessage": ""
|
||||
},
|
||||
"notifications": {
|
||||
"smtp": {
|
||||
"enabled": false,
|
||||
"from": "",
|
||||
"replyTo": "",
|
||||
"transport": {
|
||||
"ignoreCert": false,
|
||||
"host": "",
|
||||
"port": 587,
|
||||
"username": "",
|
||||
"password": ""
|
||||
}
|
||||
}
|
||||
"trash": {
|
||||
"days": 30,
|
||||
"enabled": true
|
||||
},
|
||||
"user": {
|
||||
"deleteDelay": 7
|
||||
|
||||
@@ -149,29 +149,31 @@ Redis (Sentinel) URL example JSON before encoding:
|
||||
|
||||
## Machine Learning
|
||||
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------------------------------- | :-------------------------------------------------------------------------------------------------- | :-----------------------------: | :--------------- |
|
||||
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
|
||||
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
|
||||
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO) | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL` | Comma-separated list of (textual) CLIP model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP__VISUAL` | Comma-separated list of (visual) CLIP model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION` | Comma-separated list of (recognition) facial recognition model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION` | Comma-separated list of (detection) facial recognition model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
|
||||
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
|
||||
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
|
||||
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spinned up while inferencing. | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_ARENA` | Pre-allocates CPU memory to avoid memory fragmentation | true | machine learning |
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------------------------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------- | :-----------------------------: | :--------------- |
|
||||
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
|
||||
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
|
||||
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO) | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL` | Comma-separated list of (textual) CLIP model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP__VISUAL` | Comma-separated list of (visual) CLIP model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION` | Comma-separated list of (recognition) facial recognition model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION` | Comma-separated list of (detection) facial recognition model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
|
||||
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
|
||||
| `MACHINE_LEARNING_MAX_BATCH_SIZE__OCR` | Set the maximum number of boxes that will be processed at once by the OCR model | `6` | machine learning |
|
||||
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
|
||||
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spun up while inferencing. | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_ARENA` | Pre-allocates CPU memory to avoid memory fragmentation | true | machine learning |
|
||||
| `MACHINE_LEARNING_OPENVINO_PRECISION` | If set to FP16, uses half-precision floating-point operations for faster inference with reduced accuracy (one of [`FP16`, `FP32`], applies only to OpenVINO) | `FP32` | machine learning |
|
||||
|
||||
\*1: It is recommended to begin with this parameter when changing the concurrency levels of the machine learning service and then tune the other ones.
|
||||
|
||||
|
||||
25
docs/mise.toml
Normal file
25
docs/mise.toml
Normal file
@@ -0,0 +1,25 @@
|
||||
[tasks.install]
|
||||
run = "pnpm install --filter documentation --frozen-lockfile"
|
||||
|
||||
[tasks.start]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "docusaurus --port 3005"
|
||||
|
||||
[tasks.build]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = [
|
||||
"jq -c < ../open-api/immich-openapi-specs.json > ./static/openapi.json || exit 0",
|
||||
"docusaurus build",
|
||||
]
|
||||
|
||||
[tasks.preview]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "docusaurus serve"
|
||||
|
||||
[tasks.format]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."format-fix"]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --write ."
|
||||
12
docs/static/archived-versions.json
vendored
12
docs/static/archived-versions.json
vendored
@@ -1,4 +1,16 @@
|
||||
[
|
||||
{
|
||||
"label": "v2.2.3",
|
||||
"url": "https://docs.v2.2.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.2.2",
|
||||
"url": "https://docs.v2.2.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.2.1",
|
||||
"url": "https://docs.v2.2.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.2.0",
|
||||
"url": "https://docs.v2.2.0.archive.immich.app"
|
||||
|
||||
105
e2e/docker-compose.dev.yml
Normal file
105
e2e/docker-compose.dev.yml
Normal file
@@ -0,0 +1,105 @@
|
||||
name: immich-e2e
|
||||
|
||||
services:
|
||||
immich-server:
|
||||
container_name: immich-e2e-server
|
||||
command: ['immich-dev']
|
||||
image: immich-server-dev:latest
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile.dev
|
||||
target: dev
|
||||
environment:
|
||||
- DB_HOSTNAME=database
|
||||
- DB_USERNAME=postgres
|
||||
- DB_PASSWORD=postgres
|
||||
- DB_DATABASE_NAME=immich
|
||||
- IMMICH_MACHINE_LEARNING_ENABLED=false
|
||||
- IMMICH_TELEMETRY_INCLUDE=all
|
||||
- IMMICH_ENV=testing
|
||||
- IMMICH_PORT=2285
|
||||
- IMMICH_IGNORE_MOUNT_CHECK_ERRORS=true
|
||||
volumes:
|
||||
- ./test-assets:/test-assets
|
||||
- ..:/usr/src/app
|
||||
- ${UPLOAD_LOCATION}/photos:/data
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
- pnpm-store:/usr/src/app/.pnpm-store
|
||||
- server-node_modules:/usr/src/app/server/node_modules
|
||||
- web-node_modules:/usr/src/app/web/node_modules
|
||||
- github-node_modules:/usr/src/app/.github/node_modules
|
||||
- cli-node_modules:/usr/src/app/cli/node_modules
|
||||
- docs-node_modules:/usr/src/app/docs/node_modules
|
||||
- e2e-node_modules:/usr/src/app/e2e/node_modules
|
||||
- sdk-node_modules:/usr/src/app/open-api/typescript-sdk/node_modules
|
||||
- app-node_modules:/usr/src/app/node_modules
|
||||
- sveltekit:/usr/src/app/web/.svelte-kit
|
||||
- coverage:/usr/src/app/web/coverage
|
||||
- ../plugins:/build/corePlugin
|
||||
depends_on:
|
||||
redis:
|
||||
condition: service_started
|
||||
database:
|
||||
condition: service_healthy
|
||||
|
||||
immich-web:
|
||||
container_name: immich-e2e-web
|
||||
image: immich-web-dev:latest
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile.dev
|
||||
target: dev
|
||||
command: ['immich-web']
|
||||
ports:
|
||||
- 2285:3000
|
||||
environment:
|
||||
- IMMICH_SERVER_URL=http://immich-server:2285/
|
||||
volumes:
|
||||
- ..:/usr/src/app
|
||||
- pnpm-store:/usr/src/app/.pnpm-store
|
||||
- server-node_modules:/usr/src/app/server/node_modules
|
||||
- web-node_modules:/usr/src/app/web/node_modules
|
||||
- github-node_modules:/usr/src/app/.github/node_modules
|
||||
- cli-node_modules:/usr/src/app/cli/node_modules
|
||||
- docs-node_modules:/usr/src/app/docs/node_modules
|
||||
- e2e-node_modules:/usr/src/app/e2e/node_modules
|
||||
- sdk-node_modules:/usr/src/app/open-api/typescript-sdk/node_modules
|
||||
- app-node_modules:/usr/src/app/node_modules
|
||||
- sveltekit:/usr/src/app/web/.svelte-kit
|
||||
- coverage:/usr/src/app/web/coverage
|
||||
restart: unless-stopped
|
||||
|
||||
redis:
|
||||
image: redis:6.2-alpine@sha256:37e002448575b32a599109664107e374c8709546905c372a34d64919043b9ceb
|
||||
|
||||
database:
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:6f3e9d2c2177af16c2988ff71425d79d89ca630ec2f9c8db03209ab716542338
|
||||
command: -c fsync=off -c shared_preload_libraries=vchord.so -c config_file=/var/lib/postgresql/data/postgresql.conf
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: immich
|
||||
ports:
|
||||
- 5435:5432
|
||||
healthcheck:
|
||||
test: ['CMD-SHELL', 'pg_isready -U postgres -d immich']
|
||||
interval: 1s
|
||||
timeout: 5s
|
||||
retries: 30
|
||||
start_period: 10s
|
||||
|
||||
volumes:
|
||||
model-cache:
|
||||
prometheus-data:
|
||||
grafana-data:
|
||||
pnpm-store:
|
||||
server-node_modules:
|
||||
web-node_modules:
|
||||
github-node_modules:
|
||||
cli-node_modules:
|
||||
docs-node_modules:
|
||||
e2e-node_modules:
|
||||
sdk-node_modules:
|
||||
app-node_modules:
|
||||
sveltekit:
|
||||
coverage:
|
||||
@@ -35,7 +35,7 @@ services:
|
||||
- 2285:2285
|
||||
|
||||
redis:
|
||||
image: redis:6.2-alpine@sha256:77697a75da9f94e9357b61fcaf8345f69e3d9d32e9d15032c8415c21263977dc
|
||||
image: redis:6.2-alpine@sha256:37e002448575b32a599109664107e374c8709546905c372a34d64919043b9ceb
|
||||
|
||||
database:
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:6f3e9d2c2177af16c2988ff71425d79d89ca630ec2f9c8db03209ab716542338
|
||||
|
||||
29
e2e/mise.toml
Normal file
29
e2e/mise.toml
Normal file
@@ -0,0 +1,29 @@
|
||||
[tasks.install]
|
||||
run = "pnpm install --filter immich-e2e --frozen-lockfile"
|
||||
|
||||
[tasks.test]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "vitest --run"
|
||||
|
||||
[tasks."test-web"]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "playwright test"
|
||||
|
||||
[tasks.format]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."format-fix"]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --write ."
|
||||
|
||||
[tasks.lint]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "eslint \"src/**/*.ts\" --max-warnings 0"
|
||||
|
||||
[tasks."lint-fix"]
|
||||
run = { task = "lint --fix" }
|
||||
|
||||
[tasks.check]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "tsc --noEmit"
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "2.2.0",
|
||||
"version": "2.2.3",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@@ -25,7 +25,7 @@
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@socket.io/component-emitter": "^3.1.2",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^22.18.12",
|
||||
"@types/node": "^22.19.1",
|
||||
"@types/oidc-provider": "^9.0.0",
|
||||
"@types/pg": "^8.15.1",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
|
||||
@@ -15,7 +15,6 @@ import { DateTime } from 'luxon';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import { basename, join } from 'node:path';
|
||||
import sharp from 'sharp';
|
||||
import { Socket } from 'socket.io-client';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { makeRandomImage } from 'src/generators';
|
||||
@@ -41,40 +40,6 @@ const today = DateTime.fromObject({
|
||||
}) as DateTime<true>;
|
||||
const yesterday = today.minus({ days: 1 });
|
||||
|
||||
const createTestImageWithExif = async (filename: string, exifData: Record<string, any>) => {
|
||||
// Generate unique color to ensure different checksums for each image
|
||||
const r = Math.floor(Math.random() * 256);
|
||||
const g = Math.floor(Math.random() * 256);
|
||||
const b = Math.floor(Math.random() * 256);
|
||||
|
||||
// Create a 100x100 solid color JPEG using Sharp
|
||||
const imageBytes = await sharp({
|
||||
create: {
|
||||
width: 100,
|
||||
height: 100,
|
||||
channels: 3,
|
||||
background: { r, g, b },
|
||||
},
|
||||
})
|
||||
.jpeg({ quality: 90 })
|
||||
.toBuffer();
|
||||
|
||||
// Add random suffix to filename to avoid collisions
|
||||
const uniqueFilename = filename.replace('.jpg', `-${randomBytes(4).toString('hex')}.jpg`);
|
||||
const filepath = join(tempDir, uniqueFilename);
|
||||
await writeFile(filepath, imageBytes);
|
||||
|
||||
// Filter out undefined values before writing EXIF
|
||||
const cleanExifData = Object.fromEntries(Object.entries(exifData).filter(([, value]) => value !== undefined));
|
||||
|
||||
await exiftool.write(filepath, cleanExifData);
|
||||
|
||||
// Re-read the image bytes after EXIF has been written
|
||||
const finalImageBytes = await readFile(filepath);
|
||||
|
||||
return { filepath, imageBytes: finalImageBytes, filename: uniqueFilename };
|
||||
};
|
||||
|
||||
describe('/asset', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let websocket: Socket;
|
||||
@@ -1249,411 +1214,6 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('EXIF metadata extraction', () => {
|
||||
describe('Additional date tag extraction', () => {
|
||||
describe('Date-time vs time-only tag handling', () => {
|
||||
it('should fall back to file timestamps when only time-only tags are available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('time-only-fallback.jpg', {
|
||||
TimeCreated: '2023:11:15 14:30:00', // Time-only tag, should not be used for dateTimeOriginal
|
||||
// Exclude all date-time tags to force fallback to file timestamps
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
DateTimeUTC: undefined,
|
||||
SonyDateTime2: undefined,
|
||||
GPSDateStamp: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should prefer DateTimeOriginal over time-only tags', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('datetime-over-time.jpg', {
|
||||
DateTimeOriginal: '2023:10:10 10:00:00', // Should be preferred
|
||||
TimeCreated: '2023:11:15 14:30:00', // Should be ignored (time-only)
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use DateTimeOriginal, not TimeCreated
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-10-10T10:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GPSDateTime tag extraction', () => {
|
||||
it('should extract GPSDateTime with GPS coordinates', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datetime.jpg', {
|
||||
GPSDateTime: '2023:11:15 12:30:00Z',
|
||||
GPSLatitude: 37.7749,
|
||||
GPSLongitude: -122.4194,
|
||||
// Exclude other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(37.7749, 4);
|
||||
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-122.4194, 4);
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-11-15T12:30:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('CreateDate tag extraction', () => {
|
||||
it('should extract CreateDate when available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('create-date.jpg', {
|
||||
CreateDate: '2023:11:15 10:30:00',
|
||||
// Exclude other higher priority date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-11-15T10:30:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GPSDateStamp tag extraction', () => {
|
||||
it('should fall back to file timestamps when only date-only tags are available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp.jpg', {
|
||||
GPSDateStamp: '2023:11:15', // Date-only tag, should not be used for dateTimeOriginal
|
||||
// Note: NOT including GPSTimeStamp to avoid automatic GPSDateTime creation
|
||||
GPSLatitude: 51.5074,
|
||||
GPSLongitude: -0.1278,
|
||||
// Explicitly exclude all testable date-time tags to force fallback to file timestamps
|
||||
DateTimeOriginal: undefined,
|
||||
CreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
GPSDateTime: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(51.5074, 4);
|
||||
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-0.1278, 4);
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
/*
|
||||
* NOTE: The following EXIF date tags are NOT effectively usable with JPEG test files:
|
||||
*
|
||||
* NOT WRITABLE to JPEG:
|
||||
* - MediaCreateDate: Can be read from video files but not written to JPEG
|
||||
* - DateTimeCreated: Read-only tag in JPEG format
|
||||
* - DateTimeUTC: Cannot be written to JPEG files
|
||||
* - SonyDateTime2: Proprietary Sony tag, not writable to JPEG
|
||||
* - SubSecMediaCreateDate: Tag not defined for JPEG format
|
||||
* - SourceImageCreateTime: Non-standard insta360 tag, not writable to JPEG
|
||||
*
|
||||
* WRITABLE but NOT READABLE from JPEG:
|
||||
* - SubSecDateTimeOriginal: Can be written but not read back from JPEG
|
||||
* - SubSecCreateDate: Can be written but not read back from JPEG
|
||||
*
|
||||
* EFFECTIVELY TESTABLE TAGS (writable and readable):
|
||||
* - DateTimeOriginal ✓
|
||||
* - CreateDate ✓
|
||||
* - CreationDate ✓
|
||||
* - GPSDateTime ✓
|
||||
*
|
||||
* The metadata service correctly handles non-readable tags and will fall back to
|
||||
* file timestamps when only non-readable tags are present.
|
||||
*/
|
||||
|
||||
describe('Date tag priority order', () => {
|
||||
it('should respect the complete date tag priority order', async () => {
|
||||
// Test cases using only EFFECTIVELY TESTABLE tags (writable AND readable from JPEG)
|
||||
const testCases = [
|
||||
{
|
||||
name: 'DateTimeOriginal has highest priority among testable tags',
|
||||
exifData: {
|
||||
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-04-04T04:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'CreationDate when DateTimeOriginal missing',
|
||||
exifData: {
|
||||
CreationDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreateDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-05-05T05:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'CreationDate when standard EXIF tags missing',
|
||||
exifData: {
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-07-07T07:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'GPSDateTime when no other testable date tags present',
|
||||
exifData: {
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
Make: 'SONY',
|
||||
},
|
||||
expectedDate: '2023-10-10T10:00:00.000Z',
|
||||
},
|
||||
];
|
||||
|
||||
for (const testCase of testCases) {
|
||||
const { imageBytes, filename } = await createTestImageWithExif(
|
||||
`${testCase.name.replaceAll(/\s+/g, '-').toLowerCase()}.jpg`,
|
||||
testCase.exifData,
|
||||
);
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal, `Failed for: ${testCase.name}`).toBeDefined();
|
||||
expect(
|
||||
new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime(),
|
||||
`Date mismatch for: ${testCase.name}`,
|
||||
).toBe(new Date(testCase.expectedDate).getTime());
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases for date tag handling', () => {
|
||||
it('should fall back to file timestamps with GPSDateStamp alone', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp-only.jpg', {
|
||||
GPSDateStamp: '2023:08:08', // Date-only tag, should not be used for dateTimeOriginal
|
||||
// Intentionally no GPSTimeStamp
|
||||
// Exclude all other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
DateTimeUTC: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle all testable date tags present to verify complete priority order', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('all-testable-date-tags.jpg', {
|
||||
// All TESTABLE date tags to JPEG format (writable AND readable)
|
||||
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
// Note: Excluded non-testable tags:
|
||||
// SubSec tags: writable but not readable from JPEG
|
||||
// Non-writable tags: MediaCreateDate, DateTimeCreated, DateTimeUTC, SonyDateTime2, etc.
|
||||
// Time-only/date-only tags: already excluded from EXIF_DATE_TAGS
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use DateTimeOriginal as it has the highest priority among testable tags
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-04-04T04:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use CreationDate when SubSec tags are missing', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('creation-date-priority.jpg', {
|
||||
CreationDate: '2023:07:07 07:00:00', // WRITABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE
|
||||
// Note: DateTimeCreated, DateTimeUTC, SonyDateTime2 are NOT writable to JPEG
|
||||
// Note: TimeCreated and GPSDateStamp are excluded from EXIF_DATE_TAGS (time-only/date-only)
|
||||
// Exclude SubSec and standard EXIF tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use CreationDate when available
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-07-07T07:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should skip invalid date formats and use next valid tag', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('invalid-date-handling.jpg', {
|
||||
// Note: Testing invalid date handling with only WRITABLE tags
|
||||
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE - Valid date
|
||||
CreationDate: '2023:13:13 13:00:00', // WRITABLE - Valid date
|
||||
// Note: TimeCreated excluded (time-only), DateTimeCreated not writable to JPEG
|
||||
// Exclude other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should skip invalid dates and use the first valid one (GPSDateTime)
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-10-10T10:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /assets/exist', () => {
|
||||
it('ignores invalid deviceAssetIds', async () => {
|
||||
const response = await utils.checkExistingAssets(user1.accessToken, {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { JobCommand, JobName, LoginResponseDto, updateConfig } from '@immich/sdk';
|
||||
import { LoginResponseDto, QueueCommand, QueueName, updateConfig } from '@immich/sdk';
|
||||
import { cpSync, rmSync } from 'node:fs';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { basename } from 'node:path';
|
||||
@@ -17,28 +17,28 @@ describe('/jobs', () => {
|
||||
|
||||
describe('PUT /jobs', () => {
|
||||
afterEach(async () => {
|
||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||
command: JobCommand.Resume,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
|
||||
command: QueueCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Resume,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
|
||||
command: QueueCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.FaceDetection, {
|
||||
command: JobCommand.Resume,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.FaceDetection, {
|
||||
command: QueueCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.SmartSearch, {
|
||||
command: JobCommand.Resume,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.SmartSearch, {
|
||||
command: QueueCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.DuplicateDetection, {
|
||||
command: JobCommand.Resume,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.DuplicateDetection, {
|
||||
command: QueueCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
@@ -59,8 +59,8 @@ describe('/jobs', () => {
|
||||
it('should queue metadata extraction for missing assets', async () => {
|
||||
const path = `${testAssetDir}/formats/raw/Nikon/D700/philadelphia.nef`;
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||
command: JobCommand.Pause,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
|
||||
command: QueueCommand.Pause,
|
||||
force: false,
|
||||
});
|
||||
|
||||
@@ -77,20 +77,20 @@ describe('/jobs', () => {
|
||||
expect(asset.exifInfo?.make).toBeNull();
|
||||
}
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||
command: JobCommand.Empty,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
|
||||
command: QueueCommand.Empty,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||
command: JobCommand.Resume,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
|
||||
command: QueueCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||
command: JobCommand.Start,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
|
||||
command: QueueCommand.Start,
|
||||
force: false,
|
||||
});
|
||||
|
||||
@@ -124,8 +124,8 @@ describe('/jobs', () => {
|
||||
|
||||
cpSync(`${testAssetDir}/formats/raw/Nikon/D80/glarus.nef`, path);
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||
command: JobCommand.Start,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
|
||||
command: QueueCommand.Start,
|
||||
force: false,
|
||||
});
|
||||
|
||||
@@ -144,8 +144,8 @@ describe('/jobs', () => {
|
||||
it('should queue thumbnail extraction for assets missing thumbs', async () => {
|
||||
const path = `${testAssetDir}/albums/nature/tanners_ridge.jpg`;
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Pause,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
|
||||
command: QueueCommand.Pause,
|
||||
force: false,
|
||||
});
|
||||
|
||||
@@ -153,32 +153,32 @@ describe('/jobs', () => {
|
||||
assetData: { bytes: await readFile(path), filename: basename(path) },
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
|
||||
|
||||
const assetBefore = await utils.getAssetInfo(admin.accessToken, id);
|
||||
expect(assetBefore.thumbhash).toBeNull();
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Empty,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
|
||||
command: QueueCommand.Empty,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Resume,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
|
||||
command: QueueCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Start,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
|
||||
command: QueueCommand.Start,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
|
||||
|
||||
const assetAfter = await utils.getAssetInfo(admin.accessToken, id);
|
||||
expect(assetAfter.thumbhash).not.toBeNull();
|
||||
@@ -193,26 +193,26 @@ describe('/jobs', () => {
|
||||
assetData: { bytes: await readFile(path), filename: basename(path) },
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
|
||||
|
||||
const assetBefore = await utils.getAssetInfo(admin.accessToken, id);
|
||||
|
||||
cpSync(`${testAssetDir}/albums/nature/notocactus_minimus.jpg`, path);
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Resume,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
|
||||
command: QueueCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
// This runs the missing thumbnail job
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Start,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
|
||||
command: QueueCommand.Start,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
|
||||
|
||||
const assetAfter = await utils.getAssetInfo(admin.accessToken, id);
|
||||
|
||||
|
||||
172
e2e/src/api/specs/maintenance.e2e-spec.ts
Normal file
172
e2e/src/api/specs/maintenance.e2e-spec.ts
Normal file
@@ -0,0 +1,172 @@
|
||||
import { LoginResponseDto } from '@immich/sdk';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
describe('/admin/maintenance', () => {
|
||||
let cookie: string | undefined;
|
||||
let admin: LoginResponseDto;
|
||||
let nonAdmin: LoginResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup();
|
||||
nonAdmin = await utils.userSetup(admin.accessToken, createUserDto.user1);
|
||||
});
|
||||
|
||||
// => outside of maintenance mode
|
||||
|
||||
describe('GET ~/server/config', async () => {
|
||||
it('should indicate we are out of maintenance mode', async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
expect(body.maintenanceMode).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /login', async () => {
|
||||
it('should not work out of maintenance mode', async () => {
|
||||
const { status, body } = await request(app).post('/admin/maintenance/login').send({ token: 'token' });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('Not in maintenance mode'));
|
||||
});
|
||||
});
|
||||
|
||||
// => enter maintenance mode
|
||||
|
||||
describe.sequential('POST /', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/admin/maintenance').send({
|
||||
action: 'end',
|
||||
});
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should only work for admins', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/admin/maintenance')
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`)
|
||||
.send({ action: 'end' });
|
||||
expect(status).toBe(403);
|
||||
expect(body).toEqual(errorDto.forbidden);
|
||||
});
|
||||
|
||||
it('should be a no-op if try to exit maintenance mode', async () => {
|
||||
const { status } = await request(app)
|
||||
.post('/admin/maintenance')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ action: 'end' });
|
||||
expect(status).toBe(201);
|
||||
});
|
||||
|
||||
it('should enter maintenance mode', async () => {
|
||||
const { status, headers } = await request(app)
|
||||
.post('/admin/maintenance')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({
|
||||
action: 'start',
|
||||
});
|
||||
expect(status).toBe(201);
|
||||
|
||||
cookie = headers['set-cookie'][0].split(';')[0];
|
||||
expect(cookie).toEqual(
|
||||
expect.stringMatching(/^immich_maintenance_token=[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*$/),
|
||||
);
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { body } = await request(app).get('/server/config');
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 5e2,
|
||||
timeout: 1e4,
|
||||
},
|
||||
)
|
||||
.toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
// => in maintenance mode
|
||||
|
||||
describe.sequential('in maintenance mode', () => {
|
||||
describe('GET ~/server/config', async () => {
|
||||
it('should indicate we are in maintenance mode', async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
expect(body.maintenanceMode).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /login', async () => {
|
||||
it('should fail without cookie or token in body', async () => {
|
||||
const { status, body } = await request(app).post('/admin/maintenance/login').send({});
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorizedWithMessage('Missing JWT Token'));
|
||||
});
|
||||
|
||||
it('should succeed with cookie', async () => {
|
||||
const { status, body } = await request(app).post('/admin/maintenance/login').set('cookie', cookie!).send({});
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
username: 'Immich Admin',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should succeed with token', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/admin/maintenance/login')
|
||||
.send({
|
||||
token: cookie!.split('=')[1].trim(),
|
||||
});
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
username: 'Immich Admin',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /', async () => {
|
||||
it('should be a no-op if try to enter maintenance mode', async () => {
|
||||
const { status } = await request(app)
|
||||
.post('/admin/maintenance')
|
||||
.set('cookie', cookie!)
|
||||
.send({ action: 'start' });
|
||||
expect(status).toBe(201);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// => exit maintenance mode
|
||||
|
||||
describe.sequential('POST /', () => {
|
||||
it('should exit maintenance mode', async () => {
|
||||
const { status } = await request(app).post('/admin/maintenance').set('cookie', cookie!).send({
|
||||
action: 'end',
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { body } = await request(app).get('/server/config');
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 5e2,
|
||||
timeout: 1e4,
|
||||
},
|
||||
)
|
||||
.toBeFalsy();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -136,6 +136,7 @@ describe('/server', () => {
|
||||
externalDomain: '',
|
||||
publicUsers: true,
|
||||
isOnboarded: false,
|
||||
maintenanceMode: false,
|
||||
mapDarkStyleUrl: 'https://tiles.immich.cloud/v1/style/dark.json',
|
||||
mapLightStyleUrl: 'https://tiles.immich.cloud/v1/style/light.json',
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import {
|
||||
JobName,
|
||||
LoginResponseDto,
|
||||
QueueName,
|
||||
createStack,
|
||||
deleteUserAdmin,
|
||||
getMyUser,
|
||||
@@ -328,7 +328,7 @@ describe('/admin/users', () => {
|
||||
{ headers: asBearerAuth(user.accessToken) },
|
||||
);
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.BackgroundTask);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.BackgroundTask);
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/admin/users/${user.userId}`)
|
||||
|
||||
@@ -1,178 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Script to generate test images with additional EXIF date tags
|
||||
* This creates actual JPEG images with embedded metadata for testing
|
||||
* Images are generated into e2e/test-assets/metadata/dates/
|
||||
*/
|
||||
|
||||
import { execSync } from 'node:child_process';
|
||||
import { writeFileSync } from 'node:fs';
|
||||
import { dirname, join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import sharp from 'sharp';
|
||||
|
||||
interface TestImage {
|
||||
filename: string;
|
||||
description: string;
|
||||
exifTags: Record<string, string>;
|
||||
}
|
||||
|
||||
const testImages: TestImage[] = [
|
||||
{
|
||||
filename: 'time-created.jpg',
|
||||
description: 'Image with TimeCreated tag',
|
||||
exifTags: {
|
||||
TimeCreated: '2023:11:15 14:30:00',
|
||||
Make: 'Canon',
|
||||
Model: 'EOS R5',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'gps-datetime.jpg',
|
||||
description: 'Image with GPSDateTime and coordinates',
|
||||
exifTags: {
|
||||
GPSDateTime: '2023:11:15 12:30:00Z',
|
||||
GPSLatitude: '37.7749',
|
||||
GPSLongitude: '-122.4194',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'datetime-utc.jpg',
|
||||
description: 'Image with DateTimeUTC tag',
|
||||
exifTags: {
|
||||
DateTimeUTC: '2023:11:15 10:30:00',
|
||||
Make: 'Nikon',
|
||||
Model: 'D850',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'gps-datestamp.jpg',
|
||||
description: 'Image with GPSDateStamp and GPSTimeStamp',
|
||||
exifTags: {
|
||||
GPSDateStamp: '2023:11:15',
|
||||
GPSTimeStamp: '08:30:00',
|
||||
GPSLatitude: '51.5074',
|
||||
GPSLongitude: '-0.1278',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'sony-datetime2.jpg',
|
||||
description: 'Sony camera image with SonyDateTime2 tag',
|
||||
exifTags: {
|
||||
SonyDateTime2: '2023:11:15 06:30:00',
|
||||
Make: 'SONY',
|
||||
Model: 'ILCE-7RM5',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'date-priority-test.jpg',
|
||||
description: 'Image with multiple date tags to test priority',
|
||||
exifTags: {
|
||||
SubSecDateTimeOriginal: '2023:01:01 01:00:00',
|
||||
DateTimeOriginal: '2023:02:02 02:00:00',
|
||||
SubSecCreateDate: '2023:03:03 03:00:00',
|
||||
CreateDate: '2023:04:04 04:00:00',
|
||||
CreationDate: '2023:05:05 05:00:00',
|
||||
DateTimeCreated: '2023:06:06 06:00:00',
|
||||
TimeCreated: '2023:07:07 07:00:00',
|
||||
GPSDateTime: '2023:08:08 08:00:00',
|
||||
DateTimeUTC: '2023:09:09 09:00:00',
|
||||
GPSDateStamp: '2023:10:10',
|
||||
SonyDateTime2: '2023:11:11 11:00:00',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'new-tags-only.jpg',
|
||||
description: 'Image with only additional date tags (no standard tags)',
|
||||
exifTags: {
|
||||
TimeCreated: '2023:12:01 15:45:30',
|
||||
GPSDateTime: '2023:12:01 13:45:30Z',
|
||||
DateTimeUTC: '2023:12:01 13:45:30',
|
||||
GPSDateStamp: '2023:12:01',
|
||||
SonyDateTime2: '2023:12:01 08:45:30',
|
||||
GPSLatitude: '40.7128',
|
||||
GPSLongitude: '-74.0060',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const generateTestImages = async (): Promise<void> => {
|
||||
// Target directory: e2e/test-assets/metadata/dates/
|
||||
// Current file is in: e2e/src/
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const targetDir = join(__dirname, '..', 'test-assets', 'metadata', 'dates');
|
||||
|
||||
console.log('Generating test images with additional EXIF date tags...');
|
||||
console.log(`Target directory: ${targetDir}`);
|
||||
|
||||
for (const image of testImages) {
|
||||
try {
|
||||
const imagePath = join(targetDir, image.filename);
|
||||
|
||||
// Create unique JPEG file using Sharp
|
||||
const r = Math.floor(Math.random() * 256);
|
||||
const g = Math.floor(Math.random() * 256);
|
||||
const b = Math.floor(Math.random() * 256);
|
||||
|
||||
const jpegData = await sharp({
|
||||
create: {
|
||||
width: 100,
|
||||
height: 100,
|
||||
channels: 3,
|
||||
background: { r, g, b },
|
||||
},
|
||||
})
|
||||
.jpeg({ quality: 90 })
|
||||
.toBuffer();
|
||||
|
||||
writeFileSync(imagePath, jpegData);
|
||||
|
||||
// Build exiftool command to add EXIF data
|
||||
const exifArgs = Object.entries(image.exifTags)
|
||||
.map(([tag, value]) => `-${tag}="${value}"`)
|
||||
.join(' ');
|
||||
|
||||
const command = `exiftool ${exifArgs} -overwrite_original "${imagePath}"`;
|
||||
|
||||
console.log(`Creating ${image.filename}: ${image.description}`);
|
||||
execSync(command, { stdio: 'pipe' });
|
||||
|
||||
// Verify the tags were written
|
||||
const verifyCommand = `exiftool -json "${imagePath}"`;
|
||||
const result = execSync(verifyCommand, { encoding: 'utf8' });
|
||||
const metadata = JSON.parse(result)[0];
|
||||
|
||||
console.log(` ✓ Created with ${Object.keys(image.exifTags).length} EXIF tags`);
|
||||
|
||||
// Log first date tag found for verification
|
||||
const firstDateTag = Object.keys(image.exifTags).find(
|
||||
(tag) => tag.includes('Date') || tag.includes('Time') || tag.includes('Created'),
|
||||
);
|
||||
if (firstDateTag && metadata[firstDateTag]) {
|
||||
console.log(` ✓ Verified ${firstDateTag}: ${metadata[firstDateTag]}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to create ${image.filename}:`, (error as Error).message);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\nTest image generation complete!');
|
||||
console.log('Files created in:', targetDir);
|
||||
console.log('\nTo test these images:');
|
||||
console.log(`cd ${targetDir} && exiftool -time:all -gps:all *.jpg`);
|
||||
};
|
||||
|
||||
export { generateTestImages };
|
||||
|
||||
// Run the generator if this file is executed directly
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
generateTestImages().catch(console.error);
|
||||
}
|
||||
@@ -7,6 +7,12 @@ export const errorDto = {
|
||||
message: 'Authentication required',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
unauthorizedWithMessage: (message: string) => ({
|
||||
error: 'Unauthorized',
|
||||
statusCode: 401,
|
||||
message,
|
||||
correlationId: expect.any(String),
|
||||
}),
|
||||
forbidden: {
|
||||
error: 'Forbidden',
|
||||
statusCode: 403,
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import {
|
||||
AllJobStatusResponseDto,
|
||||
AssetMediaCreateDto,
|
||||
AssetMediaResponseDto,
|
||||
AssetResponseDto,
|
||||
@@ -7,11 +6,13 @@ import {
|
||||
CheckExistingAssetsDto,
|
||||
CreateAlbumDto,
|
||||
CreateLibraryDto,
|
||||
JobCommandDto,
|
||||
JobName,
|
||||
MaintenanceAction,
|
||||
MetadataSearchDto,
|
||||
Permission,
|
||||
PersonCreateDto,
|
||||
QueueCommandDto,
|
||||
QueueName,
|
||||
QueuesResponseDto,
|
||||
SharedLinkCreateDto,
|
||||
UpdateLibraryDto,
|
||||
UserAdminCreateDto,
|
||||
@@ -27,15 +28,16 @@ import {
|
||||
createStack,
|
||||
createUserAdmin,
|
||||
deleteAssets,
|
||||
getAllJobsStatus,
|
||||
getAssetInfo,
|
||||
getConfig,
|
||||
getConfigDefaults,
|
||||
getQueuesLegacy,
|
||||
login,
|
||||
runQueueCommandLegacy,
|
||||
scanLibrary,
|
||||
searchAssets,
|
||||
sendJobCommand,
|
||||
setBaseUrl,
|
||||
setMaintenanceMode,
|
||||
signUpAdmin,
|
||||
tagAssets,
|
||||
updateAdminOnboarding,
|
||||
@@ -477,8 +479,8 @@ export const utils = {
|
||||
tagAssets: (accessToken: string, tagId: string, assetIds: string[]) =>
|
||||
tagAssets({ id: tagId, bulkIdsDto: { ids: assetIds } }, { headers: asBearerAuth(accessToken) }),
|
||||
|
||||
jobCommand: async (accessToken: string, jobName: JobName, jobCommandDto: JobCommandDto) =>
|
||||
sendJobCommand({ id: jobName, jobCommandDto }, { headers: asBearerAuth(accessToken) }),
|
||||
queueCommand: async (accessToken: string, name: QueueName, queueCommandDto: QueueCommandDto) =>
|
||||
runQueueCommandLegacy({ name, queueCommandDto }, { headers: asBearerAuth(accessToken) }),
|
||||
|
||||
setAuthCookies: async (context: BrowserContext, accessToken: string, domain = '127.0.0.1') =>
|
||||
await context.addCookies([
|
||||
@@ -514,6 +516,42 @@ export const utils = {
|
||||
},
|
||||
]),
|
||||
|
||||
setMaintenanceAuthCookie: async (context: BrowserContext, token: string, domain = '127.0.0.1') =>
|
||||
await context.addCookies([
|
||||
{
|
||||
name: 'immich_maintenance_token',
|
||||
value: token,
|
||||
domain,
|
||||
path: '/',
|
||||
expires: 2_058_028_213,
|
||||
httpOnly: true,
|
||||
secure: false,
|
||||
sameSite: 'Lax',
|
||||
},
|
||||
]),
|
||||
|
||||
enterMaintenance: async (accessToken: string) => {
|
||||
let setCookie: string[] | undefined;
|
||||
|
||||
await setMaintenanceMode(
|
||||
{
|
||||
setMaintenanceModeDto: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
},
|
||||
{
|
||||
headers: asBearerAuth(accessToken),
|
||||
fetch: (...args: Parameters<typeof fetch>) =>
|
||||
fetch(...args).then((response) => {
|
||||
setCookie = response.headers.getSetCookie();
|
||||
return response;
|
||||
}),
|
||||
},
|
||||
);
|
||||
|
||||
return setCookie;
|
||||
},
|
||||
|
||||
resetTempFolder: () => {
|
||||
rmSync(`${testAssetDir}/temp`, { recursive: true, force: true });
|
||||
mkdirSync(`${testAssetDir}/temp`, { recursive: true });
|
||||
@@ -524,13 +562,13 @@ export const utils = {
|
||||
await updateConfig({ systemConfigDto: defaultConfig }, { headers: asBearerAuth(accessToken) });
|
||||
},
|
||||
|
||||
isQueueEmpty: async (accessToken: string, queue: keyof AllJobStatusResponseDto) => {
|
||||
const queues = await getAllJobsStatus({ headers: asBearerAuth(accessToken) });
|
||||
isQueueEmpty: async (accessToken: string, queue: keyof QueuesResponseDto) => {
|
||||
const queues = await getQueuesLegacy({ headers: asBearerAuth(accessToken) });
|
||||
const jobCounts = queues[queue].jobCounts;
|
||||
return !jobCounts.active && !jobCounts.waiting;
|
||||
},
|
||||
|
||||
waitForQueueFinish: (accessToken: string, queue: keyof AllJobStatusResponseDto, ms?: number) => {
|
||||
waitForQueueFinish: (accessToken: string, queue: keyof QueuesResponseDto, ms?: number) => {
|
||||
// eslint-disable-next-line no-async-promise-executor
|
||||
return new Promise<void>(async (resolve, reject) => {
|
||||
const timeout = setTimeout(() => reject(new Error('Timed out waiting for queue to empty')), ms || 10_000);
|
||||
|
||||
52
e2e/src/web/specs/maintenance.e2e-spec.ts
Normal file
52
e2e/src/web/specs/maintenance.e2e-spec.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { LoginResponseDto } from '@immich/sdk';
|
||||
import { expect, test } from '@playwright/test';
|
||||
import { utils } from 'src/utils';
|
||||
|
||||
test.describe.configure({ mode: 'serial' });
|
||||
|
||||
test.describe('Maintenance', () => {
|
||||
let admin: LoginResponseDto;
|
||||
|
||||
test.beforeAll(async () => {
|
||||
utils.initSdk();
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup();
|
||||
});
|
||||
|
||||
test('enter and exit maintenance mode', async ({ context, page }) => {
|
||||
await utils.setAuthCookies(context, admin.accessToken);
|
||||
|
||||
await page.goto('/admin/system-settings?isOpen=maintenance');
|
||||
await page.getByRole('button', { name: 'Start maintenance mode' }).click();
|
||||
|
||||
await page.waitForURL(`/maintenance?${new URLSearchParams({ continue: '/admin/system-settings' })}`);
|
||||
await expect(page.getByText('Temporarily Unavailable')).toBeVisible();
|
||||
await page.getByRole('button', { name: 'End maintenance mode' }).click();
|
||||
await page.waitForURL('/admin/system-settings');
|
||||
});
|
||||
|
||||
test('maintenance shows no options to users until they authenticate', async ({ page }) => {
|
||||
const setCookie = await utils.enterMaintenance(admin.accessToken);
|
||||
const cookie = setCookie
|
||||
?.map((cookie) => cookie.split(';')[0].split('='))
|
||||
?.find(([name]) => name === 'immich_maintenance_token');
|
||||
|
||||
expect(cookie).toBeTruthy();
|
||||
|
||||
await expect(async () => {
|
||||
await page.goto('/');
|
||||
await page.waitForURL('/maintenance?**', {
|
||||
timeout: 1e3,
|
||||
});
|
||||
}).toPass({ timeout: 1e4 });
|
||||
|
||||
await expect(page.getByText('Temporarily Unavailable')).toBeVisible();
|
||||
await expect(page.getByRole('button', { name: 'End maintenance mode' })).toHaveCount(0);
|
||||
|
||||
await page.goto(`/maintenance?${new URLSearchParams({ token: cookie![1] })}`);
|
||||
await expect(page.getByText('Temporarily Unavailable')).toBeVisible();
|
||||
await expect(page.getByRole('button', { name: 'End maintenance mode' })).toBeVisible();
|
||||
await page.getByRole('button', { name: 'End maintenance mode' }).click();
|
||||
await page.waitForURL('/auth/login');
|
||||
});
|
||||
});
|
||||
@@ -52,14 +52,18 @@ test.describe('User Administration', () => {
|
||||
|
||||
await page.goto(`/admin/users/${user.userId}`);
|
||||
|
||||
await page.getByRole('button', { name: 'Edit user' }).click();
|
||||
await page.getByRole('button', { name: 'Edit' }).click();
|
||||
await expect(page.getByLabel('Admin User')).not.toBeChecked();
|
||||
await page.getByText('Admin User').click();
|
||||
await expect(page.getByLabel('Admin User')).toBeChecked();
|
||||
await page.getByRole('button', { name: 'Confirm' }).click();
|
||||
|
||||
const updated = await getUserAdmin({ id: user.userId }, { headers: asBearerAuth(admin.accessToken) });
|
||||
expect(updated.isAdmin).toBe(true);
|
||||
await expect
|
||||
.poll(async () => {
|
||||
const userAdmin = await getUserAdmin({ id: user.userId }, { headers: asBearerAuth(admin.accessToken) });
|
||||
return userAdmin.isAdmin;
|
||||
})
|
||||
.toBe(true);
|
||||
});
|
||||
|
||||
test('revoke admin access', async ({ context, page }) => {
|
||||
@@ -77,13 +81,17 @@ test.describe('User Administration', () => {
|
||||
|
||||
await page.goto(`/admin/users/${user.userId}`);
|
||||
|
||||
await page.getByRole('button', { name: 'Edit user' }).click();
|
||||
await page.getByRole('button', { name: 'Edit' }).click();
|
||||
await expect(page.getByLabel('Admin User')).toBeChecked();
|
||||
await page.getByText('Admin User').click();
|
||||
await expect(page.getByLabel('Admin User')).not.toBeChecked();
|
||||
await page.getByRole('button', { name: 'Confirm' }).click();
|
||||
|
||||
const updated = await getUserAdmin({ id: user.userId }, { headers: asBearerAuth(admin.accessToken) });
|
||||
expect(updated.isAdmin).toBe(false);
|
||||
await expect
|
||||
.poll(async () => {
|
||||
const userAdmin = await getUserAdmin({ id: user.userId }, { headers: asBearerAuth(admin.accessToken) });
|
||||
return userAdmin.isAdmin;
|
||||
})
|
||||
.toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
Submodule e2e/test-assets updated: 37f60ea537...163c251744
@@ -1716,6 +1716,7 @@
|
||||
"running": "Kører",
|
||||
"save": "Gem",
|
||||
"save_to_gallery": "Gem til galleri",
|
||||
"saved": "Gemt",
|
||||
"saved_api_key": "Gemt API-nøgle",
|
||||
"saved_profile": "Gemte profil",
|
||||
"saved_settings": "Gemte indstillinger",
|
||||
|
||||
10
i18n/de.json
10
i18n/de.json
@@ -155,12 +155,15 @@
|
||||
"machine_learning_min_recognized_faces": "Mindestens erkannte Gesichter",
|
||||
"machine_learning_min_recognized_faces_description": "Die Mindestanzahl von erkannten Gesichtern, damit eine Person erstellt werden kann. Eine Erhöhung dieses Wertes macht die Gesichtserkennung präziser, erhöht aber die Wahrscheinlichkeit, dass ein Gesicht nicht zu einer Person zugeordnet wird.",
|
||||
"machine_learning_ocr": "OCR",
|
||||
"machine_learning_ocr_description": "Maschinen lernen nutzen um Texte in Bildern zu erkennen",
|
||||
"machine_learning_ocr_description": "Maschinelles Lernen nutzen um Texte in Bildern zu erkennen",
|
||||
"machine_learning_ocr_enabled": "OCR aktivieren",
|
||||
"machine_learning_ocr_enabled_description": "Wenn deaktiviert, werden die Bilder nicht von der Texterkennung bearbeitet.",
|
||||
"machine_learning_ocr_max_resolution": "Maximale Auflösung",
|
||||
"machine_learning_ocr_max_resolution_description": "Vorschauen über dieser Auflösung werden unter Beibehaltung des Seitenverhältnisses verkleinert. Höhere Werte sind genauer, benötigen jedoch mehr Zeit für die Verarbeitung und verbrauchen mehr Speicher.",
|
||||
"machine_learning_ocr_min_detection_score": "Minimaler Erkennungswert",
|
||||
"machine_learning_ocr_min_detection_score_description": "Minimale Konfidenzrate für die Texterkennung von 0–1. Niedrigere Werte führen dazu, dass mehr Text erkannt wird, können jedoch zu falsch-positiven Ergebnissen führen.",
|
||||
"machine_learning_ocr_min_recognition_score": "Minimale Erkennungsrate",
|
||||
"machine_learning_ocr_min_score_recognition_description": "Minimale Konfidenzrate für die Erkennung von erkanntem Text von 0–1. Niedrigere Werte führen dazu, dass mehr Text erkannt wird, können jedoch zu falsch-positiven Ergebnissen führen.",
|
||||
"machine_learning_ocr_model": "OCR Modell",
|
||||
"machine_learning_ocr_model_description": "Server Modelle sind genauer als mobile Modelle, brauchen aber länger zur Verarbeitung und brauchen mehr Speicher.",
|
||||
"machine_learning_settings": "Einstellungen für maschinelles Lernen",
|
||||
@@ -254,7 +257,7 @@
|
||||
"oauth_storage_quota_default_description": "Kontingent in GiB, das verwendet werden soll, wenn keines übermittelt wird.",
|
||||
"oauth_timeout": "Zeitüberschreitung bei Anfrage",
|
||||
"oauth_timeout_description": "Zeitüberschreitung für Anfragen in Millisekunden",
|
||||
"ocr_job_description": "Verwende Machine Learning zur Ernennung von Text in Bildern",
|
||||
"ocr_job_description": "Verwende Machine Learning zur Erkennung von Text in Bildern",
|
||||
"password_enable_description": "Mit E-Mail und Passwort anmelden",
|
||||
"password_settings": "Passwort-Anmeldung",
|
||||
"password_settings_description": "Passwort-Anmeldeeinstellungen verwalten",
|
||||
@@ -1352,7 +1355,7 @@
|
||||
"memories_check_back_tomorrow": "Schau morgen wieder vorbei für weitere Erinnerungen",
|
||||
"memories_setting_description": "Verwalte, was du in deinen Erinnerungen siehst",
|
||||
"memories_start_over": "Erneut beginnen",
|
||||
"memories_swipe_to_close": "Nach oben Wischen zum schließen",
|
||||
"memories_swipe_to_close": "Nach oben Wischen zum Schließen",
|
||||
"memory": "Erinnerung",
|
||||
"memory_lane_title": "Foto-Erinnerungen {title}",
|
||||
"menu": "Menü",
|
||||
@@ -1713,6 +1716,7 @@
|
||||
"running": "Läuft",
|
||||
"save": "Speichern",
|
||||
"save_to_gallery": "In Galerie speichern",
|
||||
"saved": "Gespeichert",
|
||||
"saved_api_key": "API-Schlüssel wurde gespeichert",
|
||||
"saved_profile": "Profil gespeichert",
|
||||
"saved_settings": "Einstellungen gespeichert",
|
||||
|
||||
46
i18n/en.json
46
i18n/en.json
@@ -17,7 +17,6 @@
|
||||
"add_birthday": "Add a birthday",
|
||||
"add_endpoint": "Add endpoint",
|
||||
"add_exclusion_pattern": "Add exclusion pattern",
|
||||
"add_import_path": "Add import path",
|
||||
"add_location": "Add location",
|
||||
"add_more_users": "Add more users",
|
||||
"add_partner": "Add partner",
|
||||
@@ -32,6 +31,7 @@
|
||||
"add_to_album_toggle": "Toggle selection for {album}",
|
||||
"add_to_albums": "Add to albums",
|
||||
"add_to_albums_count": "Add to albums ({count})",
|
||||
"add_to_bottom_bar": "Add to",
|
||||
"add_to_shared_album": "Add to shared album",
|
||||
"add_upload_to_stack": "Add upload to stack",
|
||||
"add_url": "Add URL",
|
||||
@@ -112,13 +112,17 @@
|
||||
"jobs_failed": "{jobCount, plural, other {# failed}}",
|
||||
"library_created": "Created library: {library}",
|
||||
"library_deleted": "Library deleted",
|
||||
"library_import_path_description": "Specify a folder to import. This folder, including subfolders, will be scanned for images and videos.",
|
||||
"library_details": "Library details",
|
||||
"library_folder_description": "Specify a folder to import. This folder, including subfolders, will be scanned for images and videos.",
|
||||
"library_remove_exclusion_pattern_prompt": "Are you sure you want to remove this exclusion pattern?",
|
||||
"library_remove_folder_prompt": "Are you sure you want to remove this import folder?",
|
||||
"library_scanning": "Periodic Scanning",
|
||||
"library_scanning_description": "Configure periodic library scanning",
|
||||
"library_scanning_enable_description": "Enable periodic library scanning",
|
||||
"library_settings": "External Library",
|
||||
"library_settings_description": "Manage external library settings",
|
||||
"library_tasks_description": "Scan external libraries for new and/or changed assets",
|
||||
"library_updated": "Updated library",
|
||||
"library_watching_enable_description": "Watch external libraries for file changes",
|
||||
"library_watching_settings": "Library watching [EXPERIMENTAL]",
|
||||
"library_watching_settings_description": "Automatically watch for changed files",
|
||||
@@ -173,6 +177,10 @@
|
||||
"machine_learning_smart_search_enabled": "Enable smart search",
|
||||
"machine_learning_smart_search_enabled_description": "If disabled, images will not be encoded for smart search.",
|
||||
"machine_learning_url_description": "The URL of the machine learning server. If more than one URL is provided, each server will be attempted one-at-a-time until one responds successfully, in order from first to last. Servers that don't respond will be temporarily ignored until they come back online.",
|
||||
"maintenance_settings": "Maintenance",
|
||||
"maintenance_settings_description": "Put Immich into maintenance mode.",
|
||||
"maintenance_start": "Start maintenance mode",
|
||||
"maintenance_start_error": "Failed to start maintenance mode.",
|
||||
"manage_concurrency": "Manage Concurrency",
|
||||
"manage_log_settings": "Manage log settings",
|
||||
"map_dark_style": "Dark style",
|
||||
@@ -430,6 +438,7 @@
|
||||
"age_months": "Age {months, plural, one {# month} other {# months}}",
|
||||
"age_year_months": "Age 1 year, {months, plural, one {# month} other {# months}}",
|
||||
"age_years": "{years, plural, other {Age #}}",
|
||||
"album": "Album",
|
||||
"album_added": "Album added",
|
||||
"album_added_notification_setting_description": "Receive an email notification when you are added to a shared album",
|
||||
"album_cover_updated": "Album cover updated",
|
||||
@@ -475,6 +484,7 @@
|
||||
"allow_edits": "Allow edits",
|
||||
"allow_public_user_to_download": "Allow public user to download",
|
||||
"allow_public_user_to_upload": "Allow public user to upload",
|
||||
"allowed": "Allowed",
|
||||
"alt_text_qr_code": "QR code image",
|
||||
"anti_clockwise": "Anti-clockwise",
|
||||
"api_key": "API Key",
|
||||
@@ -894,8 +904,6 @@
|
||||
"edit_description_prompt": "Please select a new description:",
|
||||
"edit_exclusion_pattern": "Edit exclusion pattern",
|
||||
"edit_faces": "Edit faces",
|
||||
"edit_import_path": "Edit import path",
|
||||
"edit_import_paths": "Edit Import Paths",
|
||||
"edit_key": "Edit key",
|
||||
"edit_link": "Edit link",
|
||||
"edit_location": "Edit location",
|
||||
@@ -967,8 +975,8 @@
|
||||
"failed_to_stack_assets": "Failed to stack assets",
|
||||
"failed_to_unstack_assets": "Failed to un-stack assets",
|
||||
"failed_to_update_notification_status": "Failed to update notification status",
|
||||
"import_path_already_exists": "This import path already exists.",
|
||||
"incorrect_email_or_password": "Incorrect email or password",
|
||||
"library_folder_already_exists": "This import path already exists.",
|
||||
"paths_validation_failed": "{paths, plural, one {# path} other {# paths}} failed validation",
|
||||
"profile_picture_transparent_pixels": "Profile pictures cannot have transparent pixels. Please zoom in and/or move the image.",
|
||||
"quota_higher_than_disk_size": "You set a quota higher than the disk size",
|
||||
@@ -977,7 +985,6 @@
|
||||
"unable_to_add_assets_to_shared_link": "Unable to add assets to shared link",
|
||||
"unable_to_add_comment": "Unable to add comment",
|
||||
"unable_to_add_exclusion_pattern": "Unable to add exclusion pattern",
|
||||
"unable_to_add_import_path": "Unable to add import path",
|
||||
"unable_to_add_partners": "Unable to add partners",
|
||||
"unable_to_add_remove_archive": "Unable to {archived, select, true {remove asset from} other {add asset to}} archive",
|
||||
"unable_to_add_remove_favorites": "Unable to {favorite, select, true {add asset to} other {remove asset from}} favorites",
|
||||
@@ -1000,12 +1007,10 @@
|
||||
"unable_to_delete_asset": "Unable to delete asset",
|
||||
"unable_to_delete_assets": "Error deleting assets",
|
||||
"unable_to_delete_exclusion_pattern": "Unable to delete exclusion pattern",
|
||||
"unable_to_delete_import_path": "Unable to delete import path",
|
||||
"unable_to_delete_shared_link": "Unable to delete shared link",
|
||||
"unable_to_delete_user": "Unable to delete user",
|
||||
"unable_to_download_files": "Unable to download files",
|
||||
"unable_to_edit_exclusion_pattern": "Unable to edit exclusion pattern",
|
||||
"unable_to_edit_import_path": "Unable to edit import path",
|
||||
"unable_to_empty_trash": "Unable to empty trash",
|
||||
"unable_to_enter_fullscreen": "Unable to enter fullscreen",
|
||||
"unable_to_exit_fullscreen": "Unable to exit fullscreen",
|
||||
@@ -1056,6 +1061,7 @@
|
||||
"unable_to_update_user": "Unable to update user",
|
||||
"unable_to_upload_file": "Unable to upload file"
|
||||
},
|
||||
"exclusion_pattern": "Exclusion pattern",
|
||||
"exif": "Exif",
|
||||
"exif_bottom_sheet_description": "Add Description...",
|
||||
"exif_bottom_sheet_description_error": "Error updating description",
|
||||
@@ -1196,6 +1202,8 @@
|
||||
"import_path": "Import path",
|
||||
"in_albums": "In {count, plural, one {# album} other {# albums}}",
|
||||
"in_archive": "In archive",
|
||||
"in_year": "In {year}",
|
||||
"in_year_selector": "In",
|
||||
"include_archived": "Include archived",
|
||||
"include_shared_albums": "Include shared albums",
|
||||
"include_shared_partner_assets": "Include shared partner assets",
|
||||
@@ -1232,6 +1240,7 @@
|
||||
"language_setting_description": "Select your preferred language",
|
||||
"large_files": "Large Files",
|
||||
"last": "Last",
|
||||
"last_months": "{count, plural, one {Last month} other {Last # months}}",
|
||||
"last_seen": "Last seen",
|
||||
"latest_version": "Latest Version",
|
||||
"latitude": "Latitude",
|
||||
@@ -1241,6 +1250,8 @@
|
||||
"let_others_respond": "Let others respond",
|
||||
"level": "Level",
|
||||
"library": "Library",
|
||||
"library_add_folder": "Add folder",
|
||||
"library_edit_folder": "Edit folder",
|
||||
"library_options": "Library options",
|
||||
"library_page_device_albums": "Albums on Device",
|
||||
"library_page_new_album": "New album",
|
||||
@@ -1312,8 +1323,17 @@
|
||||
"loop_videos_description": "Enable to automatically loop a video in the detail viewer.",
|
||||
"main_branch_warning": "You're using a development version; we strongly recommend using a release version!",
|
||||
"main_menu": "Main menu",
|
||||
"maintenance_description": "Immich has been put into <link>maintenance mode</link>.",
|
||||
"maintenance_end": "End maintenance mode",
|
||||
"maintenance_end_error": "Failed to end maintenance mode.",
|
||||
"maintenance_logged_in_as": "Currently logged in as {user}",
|
||||
"maintenance_title": "Temporarily Unavailable",
|
||||
"make": "Make",
|
||||
"manage_geolocation": "Manage location",
|
||||
"manage_media_access_rationale": "This permission is required for proper handling of moving assets to the trash and restoring them from it.",
|
||||
"manage_media_access_settings": "Open settings",
|
||||
"manage_media_access_subtitle": "Allow the Immich app to manage and move media files.",
|
||||
"manage_media_access_title": "Media Management Access",
|
||||
"manage_shared_links": "Manage shared links",
|
||||
"manage_sharing_with_partners": "Manage sharing with partners",
|
||||
"manage_the_app_settings": "Manage the app settings",
|
||||
@@ -1377,6 +1397,7 @@
|
||||
"more": "More",
|
||||
"move": "Move",
|
||||
"move_off_locked_folder": "Move out of locked folder",
|
||||
"move_to": "Move to",
|
||||
"move_to_lock_folder_action_prompt": "{count} added to the locked folder",
|
||||
"move_to_locked_folder": "Move to locked folder",
|
||||
"move_to_locked_folder_confirmation": "These photos and video will be removed from all albums, and only viewable from the locked folder",
|
||||
@@ -1406,6 +1427,7 @@
|
||||
"new_pin_code": "New PIN code",
|
||||
"new_pin_code_subtitle": "This is your first time accessing the locked folder. Create a PIN code to securely access this page",
|
||||
"new_timeline": "New Timeline",
|
||||
"new_update": "New update",
|
||||
"new_user_created": "New user created",
|
||||
"new_version_available": "NEW VERSION AVAILABLE",
|
||||
"newest_first": "Newest first",
|
||||
@@ -1421,6 +1443,7 @@
|
||||
"no_cast_devices_found": "No cast devices found",
|
||||
"no_checksum_local": "No checksum available - cannot fetch local assets",
|
||||
"no_checksum_remote": "No checksum available - cannot fetch remote asset",
|
||||
"no_devices": "No authorized devices",
|
||||
"no_duplicates_found": "No duplicates were found.",
|
||||
"no_exif_info_available": "No exif info available",
|
||||
"no_explore_results_message": "Upload more photos to explore your collection.",
|
||||
@@ -1437,6 +1460,7 @@
|
||||
"no_results_description": "Try a synonym or more general keyword",
|
||||
"no_shared_albums_message": "Create an album to share photos and videos with people in your network",
|
||||
"no_uploads_in_progress": "No uploads in progress",
|
||||
"not_allowed": "Not allowed",
|
||||
"not_available": "N/A",
|
||||
"not_in_any_album": "Not in any album",
|
||||
"not_selected": "Not selected",
|
||||
@@ -1547,6 +1571,8 @@
|
||||
"photos_count": "{count, plural, one {{count, number} Photo} other {{count, number} Photos}}",
|
||||
"photos_from_previous_years": "Photos from previous years",
|
||||
"pick_a_location": "Pick a location",
|
||||
"pick_custom_range": "Custom range",
|
||||
"pick_date_range": "Select a date range",
|
||||
"pin_code_changed_successfully": "Successfully changed PIN code",
|
||||
"pin_code_reset_successfully": "Successfully reset PIN code",
|
||||
"pin_code_setup_successfully": "Successfully setup a PIN code",
|
||||
@@ -1814,6 +1840,8 @@
|
||||
"server_offline": "Server Offline",
|
||||
"server_online": "Server Online",
|
||||
"server_privacy": "Server Privacy",
|
||||
"server_restarting_description": "This page will refresh momentarily.",
|
||||
"server_restarting_title": "Server is restarting",
|
||||
"server_stats": "Server Stats",
|
||||
"server_update_available": "Server update is available",
|
||||
"server_version": "Server Version",
|
||||
@@ -2027,6 +2055,7 @@
|
||||
"third_party_resources": "Third-Party Resources",
|
||||
"time": "Time",
|
||||
"time_based_memories": "Time-based memories",
|
||||
"time_based_memories_duration": "Number of seconds to display each image.",
|
||||
"timeline": "Timeline",
|
||||
"timezone": "Timezone",
|
||||
"to_archive": "Archive",
|
||||
@@ -2167,6 +2196,7 @@
|
||||
"welcome": "Welcome",
|
||||
"welcome_to_immich": "Welcome to Immich",
|
||||
"wifi_name": "Wi-Fi Name",
|
||||
"workflow": "Workflow",
|
||||
"wrong_pin_code": "Wrong PIN code",
|
||||
"year": "Year",
|
||||
"years_ago": "{years, plural, one {# year} other {# years}} ago",
|
||||
|
||||
@@ -157,7 +157,7 @@
|
||||
"machine_learning_ocr": "OCR",
|
||||
"machine_learning_ocr_description": "Utiliser l'apprentissage automatique pour reconnaître le texte dans les images",
|
||||
"machine_learning_ocr_enabled": "Activer la reconnaissance de caractères",
|
||||
"machine_learning_ocr_enabled_description": "Si désactivé, la reconnaissance de texte ne s'appliquera pas aux images",
|
||||
"machine_learning_ocr_enabled_description": "Si désactivé, la reconnaissance de texte ne s'appliquera pas aux images.",
|
||||
"machine_learning_ocr_max_resolution": "Résolution maximale",
|
||||
"machine_learning_ocr_max_resolution_description": "Les prévisualisations au-dessus de cette résolution seront retaillées en conservant leur ratio. Des valeurs plus grandes sont plus précises, mais sont plus lentes et utilisent plus de mémoire.",
|
||||
"machine_learning_ocr_min_detection_score": "Score minimum de détection",
|
||||
|
||||
96
i18n/hi.json
96
i18n/hi.json
@@ -33,6 +33,7 @@
|
||||
"add_to_albums": "एकाधिक एल्बम में डाले",
|
||||
"add_to_albums_count": "एल्बमों में डालें ({count})",
|
||||
"add_to_shared_album": "शेयर किए गए एल्बम में डालें",
|
||||
"add_upload_to_stack": "स्टैक में अपलोड करें",
|
||||
"add_url": "URL डालें",
|
||||
"added_to_archive": "संग्रहीत कर दिया गया है",
|
||||
"added_to_favorites": "पसंदीदा में डाला गया",
|
||||
@@ -124,6 +125,13 @@
|
||||
"logging_enable_description": "लॉगिंग करने देना",
|
||||
"logging_level_description": "सक्षम होने पर, किस लॉग स्तर का उपयोग करना है।",
|
||||
"logging_settings": "लॉगिंग",
|
||||
"machine_learning_availability_checks": "उपलब्धता जांच",
|
||||
"machine_learning_availability_checks_description": "उपलब्ध मशीन लर्निंग सर्वर का स्वचालित रूप से पता लगाएं और प्राथमिकता दें",
|
||||
"machine_learning_availability_checks_enabled": "उपलब्धता जांच सक्षम करें",
|
||||
"machine_learning_availability_checks_interval": "अंतराल की जाँच करें",
|
||||
"machine_learning_availability_checks_interval_description": "उपलब्धता जांच के बीच मिलीसेकेंड में अंतराल",
|
||||
"machine_learning_availability_checks_timeout": "अनुरोध समयबाह्य हुआ",
|
||||
"machine_learning_availability_checks_timeout_description": "उपलब्धता जांच के लिए मिलीसेकंड में समयबाह्य अंतराल",
|
||||
"machine_learning_clip_model": "क्लिप मॉडल",
|
||||
"machine_learning_clip_model_description": "CLIP मॉडल का नाम <link>यहां</link> सूचीबद्ध है। ध्यान दें कि मॉडल बदलने पर आपको सभी छवियों के लिए 'स्मार्ट सर्च' जोब फिर से चलाना होगा।",
|
||||
"machine_learning_duplicate_detection": "डुप्लिकेट का पता लगाना",
|
||||
@@ -146,6 +154,18 @@
|
||||
"machine_learning_min_detection_score_description": "किसी चेहरे का पता लगाने के लिए न्यूनतम आत्मविश्वास स्कोर 0-1 होना चाहिए।",
|
||||
"machine_learning_min_recognized_faces": "निम्नतम पहचाने चेहरे",
|
||||
"machine_learning_min_recognized_faces_description": "किसी व्यक्ति के लिए पहचाने जाने वाले चेहरों की न्यूनतम संख्या।",
|
||||
"machine_learning_ocr": "ओ.सी.आर",
|
||||
"machine_learning_ocr_description": "चित्रों में पाठ को पहचानने के लिए मशीन लर्निंग का उपयोग करें",
|
||||
"machine_learning_ocr_enabled": "ओ.सी.आर. सक्षम करें",
|
||||
"machine_learning_ocr_enabled_description": "यदि अक्षम किया गया है, तो चित्रों पर पाठ-पहचान नहीं होगा।",
|
||||
"machine_learning_ocr_max_resolution": "अधिकतम रिज़ॉल्यूशन",
|
||||
"machine_learning_ocr_max_resolution_description": "इस रिज़ॉल्यूशन से ऊपर के प्रदर्शन का आकार मूल अनुपात को संरक्षित करते हुए बदल दिया जाएगा। उच्च मान अधिक सटीक होते हैं, लेकिन संसाधित होने में अधिक मेमोरी और समय लगाते हैं।",
|
||||
"machine_learning_ocr_min_detection_score": "न्यूनतम खोज अंक",
|
||||
"machine_learning_ocr_min_detection_score_description": "पाठ का पता लगाने के लिए 0-1 के बीच न्यूनतम आत्मविश्वास अंक। कम अंक अधिक पाठ का पता लगाएंगे लेकिन परिणाम गलत हो सकते हैं।",
|
||||
"machine_learning_ocr_min_recognition_score": "न्यूनतम पहचान अंक",
|
||||
"machine_learning_ocr_min_score_recognition_description": "पाठ को पहचानने के लिए 0-1 के बीच न्यूनतम आत्मविश्वास अंक। कम अंक अधिक पाठ को पहचानेंगे लेकिन परिणाम गलत हो सकते हैं।",
|
||||
"machine_learning_ocr_model": "ओसीआर प्रतिमान",
|
||||
"machine_learning_ocr_model_description": "सर्वर प्रतिमान मोबाइल प्रतिमान की तुलना में अधिक सटीक होते हैं, लेकिन संसाधित होने में अधिक मेमोरी और समय लेते हैं।",
|
||||
"machine_learning_settings": "मशीन लर्निंग सेटिंग्स",
|
||||
"machine_learning_settings_description": "मशीन लर्निंग सुविधाओं और सेटिंग्स को प्रबंधित करें",
|
||||
"machine_learning_smart_search": "स्मार्ट खोज",
|
||||
@@ -203,6 +223,8 @@
|
||||
"notification_email_ignore_certificate_errors_description": "टीएलएस प्रमाणपत्र सत्यापन त्रुटियों पर ध्यान न दें (अनुशंसित नहीं)",
|
||||
"notification_email_password_description": "ईमेल सर्वर से प्रमाणीकरण करते समय उपयोग किया जाने वाला पासवर्ड",
|
||||
"notification_email_port_description": "ईमेल सर्वर का पोर्ट (जैसे 25, 465, या 587)",
|
||||
"notification_email_secure": "एस एम टी पी एस",
|
||||
"notification_email_secure_description": "एस.एम.टी.पी.एस. प्रयोग करें (टी.एल.एस पर एस.एम.टी.पी)",
|
||||
"notification_email_sent_test_email_button": "परीक्षण ईमेल भेजें और सहेजें",
|
||||
"notification_email_setting_description": "ईमेल सूचनाएं भेजने के लिए सेटिंग्स",
|
||||
"notification_email_test_email": "परीक्षण ईमेल भेजें",
|
||||
@@ -235,6 +257,7 @@
|
||||
"oauth_storage_quota_default_description": "GiB में कोटा का उपयोग तब किया जाएगा जब कोई दावा प्रदान नहीं किया गया हो ।",
|
||||
"oauth_timeout": "ब्रेक का अनुरोध",
|
||||
"oauth_timeout_description": "अनुरोधों के लिए समय-सीमा मिलीसेकंड में",
|
||||
"ocr_job_description": "चित्रों में पाठ को पहचानने के लिए मशीन लर्निंग का उपयोग करें",
|
||||
"password_enable_description": "ईमेल और पासवर्ड से लॉगिन करें",
|
||||
"password_settings": "पासवर्ड लॉग इन",
|
||||
"password_settings_description": "पासवर्ड लॉगिन सेटिंग प्रबंधित करें",
|
||||
@@ -325,7 +348,7 @@
|
||||
"transcoding_max_b_frames": "अधिकतम बी-फ्रेम",
|
||||
"transcoding_max_b_frames_description": "उच्च मान संपीड़न दक्षता में सुधार करते हैं, लेकिन एन्कोडिंग को धीमा कर देते हैं।",
|
||||
"transcoding_max_bitrate": "अधिकतम बिटरेट",
|
||||
"transcoding_max_bitrate_description": "अधिकतम बिटरेट सेट करने से फ़ाइल आकार को गुणवत्ता पर मामूली लागत के साथ अधिक पूर्वानुमानित किया जा सकता है। 720p पर, सामान्य मान VP9 या HEVC के लिए 2600k kbit/s या H.264 के लिए 4500k kbit/s हैं। 0 पर सेट होने पर अक्षम।",
|
||||
"transcoding_max_bitrate_description": "अधिकतम बिटरेट सेट करने से फ़ाइल आकार को गुणवत्ता पर मामूली लागत के साथ अधिक पूर्वानुमानित किया जा सकता है। 720p पर, सामान्य मान VP9 या HEVC के लिए 2600k kbit/s या H.264 के लिए 4500k kbit/s हैं। 0 पर सेट होने पर अक्षम। जब कोई इकाई निर्दिष्ट नहीं की जाती है, तो k (kbit/s के लिए) मान लिया जाता है; इसलिए 5000, 5000k, और 5M (Mbit/s के लिए) समतुल्य हैं।",
|
||||
"transcoding_max_keyframe_interval": "अधिकतम मुख्यफ़्रेम अंतराल",
|
||||
"transcoding_max_keyframe_interval_description": "मुख्यफ़्रेम के बीच अधिकतम फ़्रेम दूरी निर्धारित करता है।",
|
||||
"transcoding_optimal_description": "लक्ष्य रिज़ॉल्यूशन से अधिक ऊंचे वीडियो या स्वीकृत प्रारूप में नहीं",
|
||||
@@ -359,6 +382,9 @@
|
||||
"trash_number_of_days_description": "संपत्तियों को स्थायी रूप से हटाने से पहले उन्हें कूड़ेदान में रखने के लिए दिनों की संख्या",
|
||||
"trash_settings": "ट्रैश सेटिंग",
|
||||
"trash_settings_description": "ट्रैश सेटिंग प्रबंधित करें",
|
||||
"unlink_all_oauth_accounts": "सभी ओ.औथ खातों से संपर्क तोड़ दें",
|
||||
"unlink_all_oauth_accounts_description": "नए प्रदाता पर सतानांतरण करने से पहले सभी ओ.औथ खातों से संपर्क तोड़ना याद रखें।",
|
||||
"unlink_all_oauth_accounts_prompt": "क्या आप वाकई सभी ओ.औथ खातों से संपर्क तोड़ना चाहते हैं? इससे प्रत्येक उपयोगकर्ता के लिए ओ.औथ आई.डी रद्द हो जाएगी और इसे पूर्ववत नहीं किया जा सकेगा।",
|
||||
"user_cleanup_job": "उपयोगकर्ता सफ़ाई",
|
||||
"user_delete_delay": "<b>{user}</b> के खाते और परिसंपत्तियों को {delay, plural, one {# day} other {# days}} में स्थायी रूप से हटाने के लिए शेड्यूल किया जाएगा।",
|
||||
"user_delete_delay_settings": "हटाने में देरी",
|
||||
@@ -392,6 +418,8 @@
|
||||
"advanced_settings_prefer_remote_title": "दूरस्थ छवियों को प्राथमिकता दें",
|
||||
"advanced_settings_proxy_headers_subtitle": "प्रत्येक नेटवर्क अनुरोध के साथ इम्मिच द्वारा भेजे जाने वाले प्रॉक्सी हेडर को परिभाषित करें",
|
||||
"advanced_settings_proxy_headers_title": "प्रॉक्सी हेडर",
|
||||
"advanced_settings_readonly_mode_subtitle": "रीड-ओनली प्रणाली को सक्षम करता है जहां चित्र को केवल देखा जा सकता है, एकाधिक चित्रों का चयन करना, साझा करना, कास्टिंग करना, हटाना जैसी सभी चीज़ें अक्षम हैं। मुख्य स्क्रीन में उपयोगकर्ता- अवतार के माध्यम से रीड-ओनली प्रणाली को सक्षम/अक्षम करें",
|
||||
"advanced_settings_readonly_mode_title": "रीड-ओनली प्रणाली",
|
||||
"advanced_settings_self_signed_ssl_subtitle": "सर्वर एंडपॉइंट के लिए SSL प्रमाणपत्र सत्यापन को छोड़ देता है। स्व-हस्ताक्षरित प्रमाणपत्रों के लिए आवश्यक है।",
|
||||
"advanced_settings_self_signed_ssl_title": "स्व-हस्ताक्षरित SSL प्रमाणपत्रों की अनुमति दें",
|
||||
"advanced_settings_sync_remote_deletions_subtitle": "वेब पर कार्रवाई किए जाने पर इस डिवाइस पर किसी संपत्ति को स्वचालित रूप से हटाएँ या पुनर्स्थापित करें",
|
||||
@@ -419,6 +447,7 @@
|
||||
"album_remove_user_confirmation": "क्या आप वाकई {user} को हटाना चाहते हैं?",
|
||||
"album_search_not_found": "आपकी खोज से मेल खाता कोई एल्बम नहीं मिला",
|
||||
"album_share_no_users": "ऐसा लगता है कि आपने यह एल्बम सभी उपयोगकर्ताओं के साथ साझा कर दिया है या आपके पास साझा करने के लिए कोई उपयोगकर्ता नहीं है।",
|
||||
"album_summary": "एल्बम सारांश",
|
||||
"album_updated": "एल्बम अपडेट किया गया",
|
||||
"album_updated_setting_description": "जब किसी साझा एल्बम में नई संपत्तियाँ हों तो एक ईमेल सूचना प्राप्त करें",
|
||||
"album_user_left": "बायाँ {album}",
|
||||
@@ -452,11 +481,16 @@
|
||||
"api_key_description": "यह की केवल एक बार दिखाई जाएगी। विंडो बंद करने से पहले कृपया इसे कॉपी करना सुनिश्चित करें।।",
|
||||
"api_key_empty": "आपका एपीआई कुंजी नाम खाली नहीं होना चाहिए",
|
||||
"api_keys": "एपीआई कीज",
|
||||
"app_architecture_variant": "रूपान्तर (स्थापत्य/आर्किटेक्चर)",
|
||||
"app_bar_signout_dialog_content": "क्या आप सुनिश्चित हैं कि आप लॉग आउट करना चाहते हैं?",
|
||||
"app_bar_signout_dialog_ok": "हाँ",
|
||||
"app_bar_signout_dialog_title": "लॉग आउट",
|
||||
"app_download_links": "ऐप डाउनलोड लिंक",
|
||||
"app_settings": "एप्लिकेशन सेटिंग",
|
||||
"app_stores": "ऐप स्टोर/गोदाम",
|
||||
"app_update_available": "आधुनिक ऐप उपलब्ध है",
|
||||
"appears_in": "प्रकट होता है",
|
||||
"apply_count": "लागू करें ({count, number})",
|
||||
"archive": "संग्रहालय",
|
||||
"archive_action_prompt": "{count} को संग्रह में जोड़ा गया",
|
||||
"archive_or_unarchive_photo": "फ़ोटो को संग्रहीत या असंग्रहीत करें",
|
||||
@@ -465,17 +499,17 @@
|
||||
"archive_size": "पुरालेख आकार",
|
||||
"archive_size_description": "डाउनलोड के लिए संग्रह आकार कॉन्फ़िगर करें (GiB में)",
|
||||
"archived": "संग्रहित",
|
||||
"archived_count": "{count, plural, other {# संग्रहीत किए गए}",
|
||||
"archived_count": "{count, plural, other {# संग्रहीत किए गए}}",
|
||||
"are_these_the_same_person": "क्या ये वही व्यक्ति हैं?",
|
||||
"are_you_sure_to_do_this": "क्या आप वास्तव में इसे करना चाहते हैं?",
|
||||
"asset_action_delete_err_read_only": "केवल पढ़ने योग्य परिसंपत्ति(ओं) को हटाया नहीं जा सकता, छोड़ा जा सकता है",
|
||||
"asset_action_share_err_offline": "ऑफ़लाइन परिसंपत्ति(एँ) प्राप्त नहीं की जा सकती, छोड़ी जा रही है",
|
||||
"asset_added_to_album": "एल्बम में डाला गया",
|
||||
"asset_adding_to_album": "एल्बम में डाला जा रहा है..।",
|
||||
"asset_adding_to_album": "एल्बम में डाला जा रहा है…",
|
||||
"asset_description_updated": "संपत्ति विवरण अद्यतन कर दिया गया है",
|
||||
"asset_filename_is_offline": "एसेट {filename} ऑफ़लाइन है",
|
||||
"asset_has_unassigned_faces": "एसेट में अनिर्धारित चेहरे हैं",
|
||||
"asset_hashing": "हैशिंग...।",
|
||||
"asset_hashing": "हैशिंग…",
|
||||
"asset_list_group_by_sub_title": "द्वारा समूह बनाएं",
|
||||
"asset_list_layout_settings_dynamic_layout_title": "गतिशील लेआउट",
|
||||
"asset_list_layout_settings_group_automatically": "स्वचालित",
|
||||
@@ -489,6 +523,8 @@
|
||||
"asset_restored_successfully": "संपत्ति(याँ) सफलतापूर्वक पुनर्स्थापित की गईं",
|
||||
"asset_skipped": "छोड़ा गया",
|
||||
"asset_skipped_in_trash": "कचरे में",
|
||||
"asset_trashed": "एसेट नष्ट किया गया",
|
||||
"asset_troubleshoot": "एसेट समस्या निवारण",
|
||||
"asset_uploaded": "अपलोड किए गए",
|
||||
"asset_uploading": "अपलोड हो रहा है…",
|
||||
"asset_viewer_settings_subtitle": "अपनी गैलरी व्यूअर सेटिंग प्रबंधित करें",
|
||||
@@ -496,7 +532,9 @@
|
||||
"assets": "संपत्तियां",
|
||||
"assets_added_count": "{count, plural, one {# asset} other {# assets}} जोड़ा गया",
|
||||
"assets_added_to_album_count": "एल्बम में {count, plural, one {# asset} other {# assets}} जोड़ा गया",
|
||||
"assets_added_to_albums_count": "{assetTotal, plural, one {# asset} other {# assets}} को {albumTotal, plural, one {# album} other {# albums}} से जोड़ा गया",
|
||||
"assets_cannot_be_added_to_album_count": "{count, plural, one {Asset} other {Assets}} को एल्बम में नहीं जोड़ा जा सकता",
|
||||
"assets_cannot_be_added_to_albums": "{count, plural, one {Asset} other {Assets}} किसी एल्बम से नहीं जोड़े जा सकते",
|
||||
"assets_count": "{count, plural, one {# आइटम} other {# आइटम्स}}",
|
||||
"assets_deleted_permanently": "{count} संपत्ति(याँ) स्थायी रूप से हटा दी गईं",
|
||||
"assets_deleted_permanently_from_server": "{count} संपत्ति(याँ) इमिच सर्वर से स्थायी रूप से हटा दी गईं",
|
||||
@@ -513,14 +551,17 @@
|
||||
"assets_trashed_count": "ट्रैश की गई {count, plural, one {# asset} other {# assets}}",
|
||||
"assets_trashed_from_server": "{count} संपत्ति(याँ) इमिच सर्वर से कचरे में डाली गईं",
|
||||
"assets_were_part_of_album_count": "{count, plural, one {Asset was} other {Assets were}}एल्बम का पहले से ही हिस्सा थे",
|
||||
"assets_were_part_of_albums_count": "{count, plural, one {Asset was} other {Assets were}} पहले ही एल्बम में संयोजित हैं",
|
||||
"authorized_devices": "अधिकृत उपकरण",
|
||||
"automatic_endpoint_switching_subtitle": "उपलब्ध होने पर निर्दिष्ट वाई-फाई से स्थानीय रूप से कनेक्ट करें और अन्यत्र वैकल्पिक कनेक्शन का उपयोग करें",
|
||||
"automatic_endpoint_switching_title": "स्वचालित URL स्विचिंग",
|
||||
"autoplay_slideshow": "ऑटोप्ले स्लाइड शो",
|
||||
"back": "वापस",
|
||||
"back_close_deselect": "वापस जाएँ, बंद करें, या अचयनित करें",
|
||||
"background_backup_running_error": "परिप्रेक्ष्य बैकअप अभी जारी है, नियमावली बैकअप प्रारंभ नहीं किया जा सकता",
|
||||
"background_location_permission": "पृष्ठभूमि स्थान अनुमति",
|
||||
"background_location_permission_content": "पृष्ठभूमि में चलते समय नेटवर्क बदलने के लिए, Immich के पास *हमेशा* सटीक स्थान तक पहुंच होनी चाहिए ताकि ऐप वाई-फाई नेटवर्क का नाम पढ़ सके",
|
||||
"background_options": "परिप्रेक्ष्य विकल्प",
|
||||
"backup": "बैकअप",
|
||||
"backup_album_selection_page_albums_device": "डिवाइस पर एल्बम ({count})",
|
||||
"backup_album_selection_page_albums_tap": "शामिल करने के लिए टैप करें, बाहर करने के लिए डबल टैप करें",
|
||||
@@ -528,8 +569,10 @@
|
||||
"backup_album_selection_page_select_albums": "एल्बम चुनें",
|
||||
"backup_album_selection_page_selection_info": "चयन जानकारी",
|
||||
"backup_album_selection_page_total_assets": "कुल अद्वितीय संपत्तियाँ",
|
||||
"backup_albums_sync": "बैकअप एल्बम का तुल्यकालन",
|
||||
"backup_all": "सभी",
|
||||
"backup_background_service_backup_failed_message": "संपत्तियों का बैकअप लेने में विफल. पुनः प्रयास किया जा रहा है…",
|
||||
"backup_background_service_complete_notification": "एसेट का बैकअप पूरा हुआ",
|
||||
"backup_background_service_connection_failed_message": "सर्वर से कनेक्ट करने में विफल. पुनः प्रयास किया जा रहा है…",
|
||||
"backup_background_service_current_upload_notification": "{filename} अपलोड हो रहा है",
|
||||
"backup_background_service_default_notification": "नई परिसंपत्तियों की जांच की जा रही है…",
|
||||
@@ -577,6 +620,7 @@
|
||||
"backup_controller_page_turn_on": "अग्रभूमि बैकअप चालू करें",
|
||||
"backup_controller_page_uploading_file_info": "फ़ाइल जानकारी अपलोड करना",
|
||||
"backup_err_only_album": "एकमात्र एल्बम नहीं हटाया जा सकता",
|
||||
"backup_error_sync_failed": "तुल्यकालन विफल. बैकअप संसाधित नहीं किया जा सकता।",
|
||||
"backup_info_card_assets": "संपत्ति",
|
||||
"backup_manual_cancelled": "रद्द",
|
||||
"backup_manual_in_progress": "अपलोड पहले से ही प्रगति पर है। कुछ देर बाद प्रयास करें",
|
||||
@@ -638,12 +682,16 @@
|
||||
"change_password_description": "यह या तो पहली बार है जब आप सिस्टम में साइन इन कर रहे हैं या आपका पासवर्ड बदलने का अनुरोध किया गया है।",
|
||||
"change_password_form_confirm_password": "पासवर्ड की पुष्टि कीजिये",
|
||||
"change_password_form_description": "नमस्ते {name},\n\nया तो आप पहली बार सिस्टम में साइन इन कर रहे हैं या फिर आपका पासवर्ड बदलने का अनुरोध किया गया है। कृपया नीचे नया पासवर्ड डालें।",
|
||||
"change_password_form_log_out": "अन्य सभी डिवाइस को लॉग आउट करें",
|
||||
"change_password_form_log_out_description": "अन्य सभी डिवाइस से लॉग आउट करना अनुशंसित है",
|
||||
"change_password_form_new_password": "नया पासवर्ड",
|
||||
"change_password_form_password_mismatch": "सांकेतिक शब्द मेल नहीं खाते",
|
||||
"change_password_form_reenter_new_password": "नया पासवर्ड पुनः दर्ज करें",
|
||||
"change_pin_code": "पिन कोड बदलें",
|
||||
"change_your_password": "अपना पासवर्ड बदलें",
|
||||
"changed_visibility_successfully": "दृश्यता सफलतापूर्वक परिवर्तित",
|
||||
"charging": "चार्जिंग",
|
||||
"charging_requirement_mobile_backup": "परिप्रेक्ष्य बैकअप के लिए डिवाइस का चार्जिंग पे लगे होना आवश्यक है",
|
||||
"check_corrupt_asset_backup": "दूषित परिसंपत्ति बैकअप की जाँच करें",
|
||||
"check_corrupt_asset_backup_button": "जाँच करें",
|
||||
"check_corrupt_asset_backup_description": "यह जाँच केवल वाई-फ़ाई पर ही करें और सभी संपत्तियों का बैकअप लेने के बाद ही करें। इस प्रक्रिया में कुछ मिनट लग सकते हैं।",
|
||||
@@ -665,7 +713,7 @@
|
||||
"client_cert_subtitle": "केवल PKCS12 (.p12, .pfx) फ़ॉर्मैट का समर्थन करता है। प्रमाणपत्र आयात/हटाएँ केवल लॉगिन से पहले उपलब्ध हैं",
|
||||
"client_cert_title": "SSL क्लाइंट प्रमाणपत्र",
|
||||
"clockwise": "दक्षिणावर्त",
|
||||
"close": "बंद",
|
||||
"close": "बंद करें",
|
||||
"collapse": "गिर जाना",
|
||||
"collapse_all": "सभी को संकुचित करें",
|
||||
"color": "रंग",
|
||||
@@ -675,8 +723,8 @@
|
||||
"comments_and_likes": "टिप्पणियाँ और पसंद",
|
||||
"comments_are_disabled": "टिप्पणियाँ अक्षम हैं",
|
||||
"common_create_new_album": "नया एल्बम बनाएँ",
|
||||
"completed": "पुरा होना",
|
||||
"confirm": "पुष्टि",
|
||||
"completed": "पूरित",
|
||||
"confirm": "पुष्टि करें",
|
||||
"confirm_admin_password": "एडमिन पासवर्ड की पुष्टि करें",
|
||||
"confirm_delete_face": "क्या आप वाकई एसेट से {name} चेहरा हटाना चाहते हैं?",
|
||||
"confirm_delete_shared_link": "क्या आप वाकई इस साझा लिंक को हटाना चाहते हैं?",
|
||||
@@ -685,13 +733,13 @@
|
||||
"confirm_password": "पासवर्ड की पुष्टि कीजिये",
|
||||
"confirm_tag_face": "क्या आप इस चेहरे को {name} के रूप में टैग करना चाहते हैं?",
|
||||
"confirm_tag_face_unnamed": "क्या आप इस चेहरे को टैग करना चाहते हैं?",
|
||||
"connected_device": "कनेक्टेड डिवाइस",
|
||||
"connected_device": "योजित यंत्र",
|
||||
"connected_to": "से जुड़ा",
|
||||
"contain": "समाहित",
|
||||
"context": "संदर्भ",
|
||||
"continue": "जारी",
|
||||
"control_bottom_app_bar_create_new_album": "नया एल्बम बनाएँ",
|
||||
"control_bottom_app_bar_delete_from_immich": "Immich से हटाएं",
|
||||
"control_bottom_app_bar_delete_from_immich": "इम्मिच से हटाएं",
|
||||
"control_bottom_app_bar_delete_from_local": "डिवाइस से हटाएं",
|
||||
"control_bottom_app_bar_edit_location": "स्थान संपादित करें",
|
||||
"control_bottom_app_bar_edit_time": "तारीख और समय संपादित करें",
|
||||
@@ -713,6 +761,7 @@
|
||||
"create": "तैयार करें",
|
||||
"create_album": "एल्बम बनाओ",
|
||||
"create_album_page_untitled": "शीर्षकहीन",
|
||||
"create_api_key": "ऐ.पी.आई. चाभी बनाएं",
|
||||
"create_library": "लाइब्रेरी बनाएं",
|
||||
"create_link": "लिंक बनाएं",
|
||||
"create_link_to_share": "शेयर करने के लिए लिंक बनाएं",
|
||||
@@ -729,6 +778,7 @@
|
||||
"create_user": "उपयोगकर्ता बनाइये",
|
||||
"created": "बनाया",
|
||||
"created_at": "बनाया था",
|
||||
"creating_linked_albums": "जुड़े हुए एल्बम बनाए जा रहे हैं..।",
|
||||
"crop": "छाँटें",
|
||||
"curated_object_page_title": "चीज़ें",
|
||||
"current_device": "वर्तमान उपकरण",
|
||||
@@ -741,6 +791,7 @@
|
||||
"daily_title_text_date_year": "ई, एमएमएम दिन, वर्ष",
|
||||
"dark": "डार्क",
|
||||
"dark_theme": "डार्क थीम टॉगल करें",
|
||||
"date": "दिनांक",
|
||||
"date_after": "इसके बाद की तारीख",
|
||||
"date_and_time": "तिथि और समय",
|
||||
"date_before": "पहले की तारीख",
|
||||
@@ -748,6 +799,7 @@
|
||||
"date_of_birth_saved": "जन्मतिथि सफलतापूर्वक सहेजी गई",
|
||||
"date_range": "तिथि सीमा",
|
||||
"day": "दिन",
|
||||
"days": "दिन",
|
||||
"deduplicate_all": "सभी को डुप्लिकेट करें",
|
||||
"deduplication_criteria_1": "छवि का आकार बाइट्स में",
|
||||
"deduplication_criteria_2": "EXIF डेटा की संख्या",
|
||||
@@ -836,6 +888,8 @@
|
||||
"edit_date": "संपादन की तारीख",
|
||||
"edit_date_and_time": "दिनांक और समय संपादित करें",
|
||||
"edit_date_and_time_action_prompt": "{count} तारीख और समय संपादित किए गए",
|
||||
"edit_date_and_time_by_offset": "अंकुर से दिनांक बदलें",
|
||||
"edit_date_and_time_by_offset_interval": "नयी दिनांक सीमा: {from} - {to}",
|
||||
"edit_description": "संपादित करें वर्णन",
|
||||
"edit_description_prompt": "कृपया एक नया विवरण चुनें:",
|
||||
"edit_exclusion_pattern": "बहिष्करण पैटर्न संपादित करें",
|
||||
@@ -874,7 +928,9 @@
|
||||
"error": "गलती",
|
||||
"error_change_sort_album": "एल्बम का क्रम बदलने में असफल रहा",
|
||||
"error_delete_face": "एसेट से चेहरे को हटाने में त्रुटि हुई",
|
||||
"error_getting_places": "स्थानों को प्राप्त करने में त्रुटि हुई",
|
||||
"error_loading_image": "छवि लोड करने में त्रुटि",
|
||||
"error_loading_partners": "जोड़ीदार लोड करने में त्रुटि हुई: {error}",
|
||||
"error_saving_image": "त्रुटि: {error}",
|
||||
"error_tag_face_bounding_box": "चेहरे को टैग करने में त्रुटि – बाउंडिंग बॉक्स निर्देशांक प्राप्त नहीं कर सके",
|
||||
"error_title": "त्रुटि - कुछ गलत हो गया",
|
||||
@@ -907,6 +963,7 @@
|
||||
"failed_to_load_notifications": "सूचनाएँ लोड करने में विफल",
|
||||
"failed_to_load_people": "लोगों को लोड करने में विफल",
|
||||
"failed_to_remove_product_key": "उत्पाद कुंजी निकालने में विफल",
|
||||
"failed_to_reset_pin_code": "पिन कोड रीसेट करना विफल हुआ",
|
||||
"failed_to_stack_assets": "परिसंपत्तियों का ढेर लगाने में विफल",
|
||||
"failed_to_unstack_assets": "परिसंपत्तियों का ढेर खोलने में विफल",
|
||||
"failed_to_update_notification_status": "सूचना की स्थिति अपडेट करने में विफल",
|
||||
@@ -915,6 +972,7 @@
|
||||
"paths_validation_failed": "{paths, plural, one {# पथ} other {# पथ}} सत्यापन में विफल रहे",
|
||||
"profile_picture_transparent_pixels": "प्रोफ़ाइल चित्रों में पारदर्शी पिक्सेल नहीं हो सकते।",
|
||||
"quota_higher_than_disk_size": "आपने डिस्क आकार से अधिक कोटा निर्धारित किया है",
|
||||
"something_went_wrong": "कुछ त्रुटि हुई",
|
||||
"unable_to_add_album_users": "उपयोगकर्ताओं को एल्बम में डालने में असमर्थ",
|
||||
"unable_to_add_assets_to_shared_link": "साझा लिंक में संपत्ति डालने में असमर्थ",
|
||||
"unable_to_add_comment": "टिप्पणी डालने में असमर्थ",
|
||||
@@ -1000,22 +1058,42 @@
|
||||
},
|
||||
"exif": "एक्सिफ",
|
||||
"exif_bottom_sheet_description": "विवरण जोड़ें..।",
|
||||
"exif_bottom_sheet_description_error": "विवरण के आधुनीकरण करने में त्रुटि हुई",
|
||||
"exif_bottom_sheet_details": "विवरण",
|
||||
"exif_bottom_sheet_location": "स्थान",
|
||||
"exif_bottom_sheet_no_description": "कोई विवरण नहीं",
|
||||
"exif_bottom_sheet_people": "लोग",
|
||||
"exif_bottom_sheet_person_add_person": "नाम डालें",
|
||||
"exit_slideshow": "स्लाइड शो से बाहर निकलें",
|
||||
"expand_all": "सभी का विस्तार",
|
||||
"experimental_settings_new_asset_list_subtitle": "कार्य प्रगति पर है",
|
||||
"experimental_settings_new_asset_list_title": "प्रयोगात्मक फोटो ग्रिड सक्षम करें",
|
||||
"experimental_settings_subtitle": "अपने जोखिम पर उपयोग करें!",
|
||||
"experimental_settings_title": "प्रयोगात्मक",
|
||||
"expire_after": "एक्सपायर आफ्टर",
|
||||
"expired": "खत्म हो चुका",
|
||||
"expires_date": "{date} को समाप्त हो रहा है",
|
||||
"explore": "अन्वेषण करना",
|
||||
"explorer": "समन्वेषक",
|
||||
"export": "निर्यात",
|
||||
"export_as_json": "JSON के रूप में निर्यात करें",
|
||||
"export_database": "डेटाबेस निर्यात करें",
|
||||
"export_database_description": "इस.क्यू.लाइट डेटाबेस निर्यात करें",
|
||||
"extension": "विस्तार",
|
||||
"external": "बाहरी",
|
||||
"external_libraries": "बाहरी पुस्तकालय",
|
||||
"external_network": "बाहरी नेटवर्क",
|
||||
"external_network_sheet_info": "When not on the preferred WiFi network, the app will connect to the server through the first of the below URLs it can reach, starting from top to bottom",
|
||||
"face_unassigned": "सौंपे नहीं गए",
|
||||
"failed": "विफल हुआ",
|
||||
"failed_to_authenticate": "प्रमाणित करने में विफल",
|
||||
"failed_to_load_assets": "एसेट लोड करने में विफल",
|
||||
"failed_to_load_folder": "फोल्डर लोड करने में विफल",
|
||||
"favorite": "पसंदीदा",
|
||||
"favorite_action_prompt": "{count} पसंदीदा संकलन में जोड़े गए",
|
||||
"favorite_or_unfavorite_photo": "पसंदीदा या नापसंद फोटो",
|
||||
"favorites": "पसंदीदा",
|
||||
"favorites_page_no_favorites": "कोई पसंदीदा एसेट नहीं मिले",
|
||||
"feature_photo_updated": "फ़ीचर फ़ोटो अपडेट किया गया",
|
||||
"file_name": "फ़ाइल का नाम",
|
||||
"file_name_or_extension": "फ़ाइल का नाम या एक्सटेंशन",
|
||||
|
||||
@@ -1716,6 +1716,7 @@
|
||||
"running": "Kjører",
|
||||
"save": "Lagre",
|
||||
"save_to_gallery": "Lagre til galleriet",
|
||||
"saved": "Lagret",
|
||||
"saved_api_key": "Lagret API-nøkkel",
|
||||
"saved_profile": "Lagret profil",
|
||||
"saved_settings": "Lagret instillinger",
|
||||
@@ -1732,7 +1733,7 @@
|
||||
"search_by_description_example": "Turdag i Sapa",
|
||||
"search_by_filename": "Søk etter filnavn og filtype",
|
||||
"search_by_filename_example": "f.eks. IMG_1234.JPG eller PNG",
|
||||
"search_by_ocr": "Søk med OCR",
|
||||
"search_by_ocr": "Søk etter tekst i bilde",
|
||||
"search_by_ocr_example": "Latte",
|
||||
"search_camera_lens_model": "Søk etter objektivmodell...",
|
||||
"search_camera_make": "Søk etter kameramerke...",
|
||||
|
||||
@@ -1716,6 +1716,7 @@
|
||||
"running": "W trakcie",
|
||||
"save": "Zapisz",
|
||||
"save_to_gallery": "Zapisz w galerii",
|
||||
"saved": "Zapisano",
|
||||
"saved_api_key": "Zapisany klucz API",
|
||||
"saved_profile": "Zapisany profil",
|
||||
"saved_settings": "Zapisane ustawienia",
|
||||
|
||||
90
i18n/pt.json
90
i18n/pt.json
@@ -344,7 +344,7 @@
|
||||
"transcoding_hardware_acceleration": "Aceleração de hardware",
|
||||
"transcoding_hardware_acceleration_description": "Experimental; transcodificação mais rápida, mas poderá ter qualidade inferior com a mesma taxa de bits",
|
||||
"transcoding_hardware_decoding": "Decodificação de hardware",
|
||||
"transcoding_hardware_decoding_setting_description": "Permite a aceleração ponta a ponta em vez de apenas acelerar a codificação. Pode não funcionar em todos os formatos de arquivo.",
|
||||
"transcoding_hardware_decoding_setting_description": "Permite a aceleração ponta a ponta em vez de apenas acelerar a codificação. Pode não funcionar em todos os videos.",
|
||||
"transcoding_max_b_frames": "Máximo de quadros B",
|
||||
"transcoding_max_b_frames_description": "Valores mais altos melhoram a eficiência da compressão, mas tornam a codificação mais lenta. Pode não ser compatível com aceleração de hardware em dispositivos mais antigos. 0 desativa os quadros B, enquanto -1 define esse valor automaticamente.",
|
||||
"transcoding_max_bitrate": "Taxa de bits máxima",
|
||||
@@ -455,7 +455,7 @@
|
||||
"album_viewer_appbar_delete_confirm": "Tem certeza que deseja excluir este álbum da sua conta?",
|
||||
"album_viewer_appbar_share_err_delete": "Ocorreu um erro ao eliminar álbum",
|
||||
"album_viewer_appbar_share_err_leave": "Ocorreu um erro ao sair do álbum",
|
||||
"album_viewer_appbar_share_err_remove": "Houveram problemas ao remover arquivos do álbum",
|
||||
"album_viewer_appbar_share_err_remove": "Ocorreu um erro ao remover ficheiros do álbum",
|
||||
"album_viewer_appbar_share_err_title": "Ocorreu um erro ao alterar o título do álbum",
|
||||
"album_viewer_appbar_share_leave": "Deixar álbum",
|
||||
"album_viewer_appbar_share_to": "Compartilhar com",
|
||||
@@ -494,7 +494,7 @@
|
||||
"archive": "Arquivo",
|
||||
"archive_action_prompt": "{count} adicionados ao Arquivo",
|
||||
"archive_or_unarchive_photo": "Arquivar ou desarquivar foto",
|
||||
"archive_page_no_archived_assets": "Nenhum arquivo encontrado",
|
||||
"archive_page_no_archived_assets": "Nenhum ficheiro arquivado encontrado",
|
||||
"archive_page_title": "Arquivo ({count})",
|
||||
"archive_size": "Tamanho do arquivo",
|
||||
"archive_size_description": "Configure o tamanho do arquivo para transferências (em GiB)",
|
||||
@@ -502,8 +502,8 @@
|
||||
"archived_count": "{count, plural, one {#Arquivado # item} other {Arquivados # itens}}",
|
||||
"are_these_the_same_person": "Estas pessoas são a mesma pessoa?",
|
||||
"are_you_sure_to_do_this": "Tem a certeza de que quer fazer isto?",
|
||||
"asset_action_delete_err_read_only": "Não é possível excluir arquivo só leitura, ignorando",
|
||||
"asset_action_share_err_offline": "Não foi possível obter os arquivos offline, ignorando",
|
||||
"asset_action_delete_err_read_only": "Não é possível eliminar ficheiro só de leitura, a ignorar",
|
||||
"asset_action_share_err_offline": "Não foi possível obter os ficheiros offline, a ignorar",
|
||||
"asset_added_to_album": "Adicionado ao álbum",
|
||||
"asset_adding_to_album": "A adicionar ao álbum…",
|
||||
"asset_description_updated": "A descrição do ficheiro foi atualizada",
|
||||
@@ -513,14 +513,14 @@
|
||||
"asset_list_group_by_sub_title": "Agrupar por",
|
||||
"asset_list_layout_settings_dynamic_layout_title": "Layout dinâmico",
|
||||
"asset_list_layout_settings_group_automatically": "Automático",
|
||||
"asset_list_layout_settings_group_by": "Agrupar arquivos por",
|
||||
"asset_list_layout_settings_group_by": "Agrupar ficheiros por",
|
||||
"asset_list_layout_settings_group_by_month_day": "Mês + dia",
|
||||
"asset_list_layout_sub_title": "Disposição",
|
||||
"asset_list_settings_subtitle": "Configurações de disposição da grade de fotos",
|
||||
"asset_list_settings_title": "Grade de fotos",
|
||||
"asset_offline": "Ficheiro Indisponível",
|
||||
"asset_offline_description": "Este ficheiro externo deixou de estar disponível no disco. Contacte o seu administrador do Immich para obter ajuda.",
|
||||
"asset_restored_successfully": "Arquivo restaurado com sucesso",
|
||||
"asset_restored_successfully": "FIcheiro restaurado com sucesso",
|
||||
"asset_skipped": "Ignorado",
|
||||
"asset_skipped_in_trash": "Na reciclagem",
|
||||
"asset_trashed": "Ficheiro apagado",
|
||||
@@ -565,19 +565,19 @@
|
||||
"backup": "Cópia de segurança",
|
||||
"backup_album_selection_page_albums_device": "Álbuns no dispositivo ({count})",
|
||||
"backup_album_selection_page_albums_tap": "Toque para incluir, duplo toque para excluir",
|
||||
"backup_album_selection_page_assets_scatter": "Os arquivos podem estar espalhados em vários álbuns. Assim, os álbuns podem ser incluídos ou excluídos durante o processo de backup.",
|
||||
"backup_album_selection_page_assets_scatter": "Os ficheros podem estar espalhados por vários álbuns. Desta forma, os álbuns podem ser incluídos ou excluídos durante o processo de cópia de segurança.",
|
||||
"backup_album_selection_page_select_albums": "Selecione Álbuns",
|
||||
"backup_album_selection_page_selection_info": "Informações da Seleção",
|
||||
"backup_album_selection_page_total_assets": "Total de arquivos únicos",
|
||||
"backup_album_selection_page_total_assets": "Total de ficheiros únicos",
|
||||
"backup_albums_sync": "Cópia de segurança de sincronização de álbuns",
|
||||
"backup_all": "Tudo",
|
||||
"backup_background_service_backup_failed_message": "Ocorreu um erro ao efetuar cópia de segurança dos ficheiros. A tentar de novo…",
|
||||
"backup_background_service_complete_notification": "Cópia de conteúdos concluída",
|
||||
"backup_background_service_connection_failed_message": "Ocorreu um erro na ligação ao servidor. A tentar de novo…",
|
||||
"backup_background_service_current_upload_notification": "A enviar {filename}",
|
||||
"backup_background_service_default_notification": "Verificando novos arquivos…",
|
||||
"backup_background_service_default_notification": "A verificar se há novos ficheiros…",
|
||||
"backup_background_service_error_title": "Erro de backup",
|
||||
"backup_background_service_in_progress_notification": "Fazendo backup dos arquivos…",
|
||||
"backup_background_service_in_progress_notification": "A fazer cópia de segurança dos seus ficheiros…",
|
||||
"backup_background_service_upload_failure_notification": "Ocorreu um erro ao enviar {filename}",
|
||||
"backup_controller_page_albums": "Backup Álbuns",
|
||||
"backup_controller_page_background_app_refresh_disabled_content": "Para utilizar a cópia de segurança em segundo plano, ative a atualização da aplicação em segundo plano em Definições > Geral > Atualização da aplicação em segundo plano.",
|
||||
@@ -600,7 +600,7 @@
|
||||
"backup_controller_page_backup_selected": "Selecionado: ",
|
||||
"backup_controller_page_backup_sub": "Fotos e vídeos salvos em backup",
|
||||
"backup_controller_page_created": "Criado em: {date}",
|
||||
"backup_controller_page_desc_backup": "Ative o backup para enviar automáticamente novos arquivos para o servidor.",
|
||||
"backup_controller_page_desc_backup": "Ative a cópia de segurança em primeiro plano para enviar novos ficheiros automaticamente ao abrir a aplicação.",
|
||||
"backup_controller_page_excluded": "Eliminado: ",
|
||||
"backup_controller_page_failed": "Falhou ({count})",
|
||||
"backup_controller_page_filename": "Nome do ficheiro: {filename} [{size}]",
|
||||
@@ -618,10 +618,10 @@
|
||||
"backup_controller_page_total_sub": "Todas as fotos e vídeos dos álbuns selecionados",
|
||||
"backup_controller_page_turn_off": "Desativar backup",
|
||||
"backup_controller_page_turn_on": "Ativar backup",
|
||||
"backup_controller_page_uploading_file_info": "Enviando arquivo",
|
||||
"backup_controller_page_uploading_file_info": "A enviar informações do ficheiro",
|
||||
"backup_err_only_album": "Não é possível remover apenas o álbum",
|
||||
"backup_error_sync_failed": "A sincronização falhou. Não é possível fazer a cópia de segurança.",
|
||||
"backup_info_card_assets": "arquivos",
|
||||
"backup_info_card_assets": "ficheiros",
|
||||
"backup_manual_cancelled": "Cancelado",
|
||||
"backup_manual_in_progress": "Envio já está em progresso. Tente novamente mais tarde",
|
||||
"backup_manual_success": "Sucesso",
|
||||
@@ -694,7 +694,7 @@
|
||||
"charging_requirement_mobile_backup": "Cópia de segurança de fundo necessita que o dispositivo esteja a carregar",
|
||||
"check_corrupt_asset_backup": "Verificar por backups corrompidos",
|
||||
"check_corrupt_asset_backup_button": "Verificar",
|
||||
"check_corrupt_asset_backup_description": "Execute esta verificação somente em uma rede Wi-Fi e quando o backup de todos os arquivos já estiver concluído. O processo demora alguns minutos.",
|
||||
"check_corrupt_asset_backup_description": "Execute esta verificação apenas numa rede Wi-Fi e quando a cópia de segurança de todos os ficheiros já estiver concluída. O processo pode demorar alguns minutos.",
|
||||
"check_logs": "Verificar registos",
|
||||
"choose_matching_people_to_merge": "Escolha pessoas correspondentes para unir",
|
||||
"city": "Cidade/Localidade",
|
||||
@@ -770,7 +770,7 @@
|
||||
"create_new_person": "Criar nova pessoa",
|
||||
"create_new_person_hint": "Associe os ficheiros a uma nova pessoa",
|
||||
"create_new_user": "Criar novo utilizador",
|
||||
"create_shared_album_page_share_add_assets": "ADICIONAR ARQUIVOS",
|
||||
"create_shared_album_page_share_add_assets": "ADICIONAR FICHEIROS",
|
||||
"create_shared_album_page_share_select_photos": "Selecionar Fotos",
|
||||
"create_shared_link": "Criar link partilhado",
|
||||
"create_tag": "Criar etiqueta",
|
||||
@@ -812,10 +812,10 @@
|
||||
"delete_action_prompt": "{count} eliminados",
|
||||
"delete_album": "Apagar álbum",
|
||||
"delete_api_key_prompt": "Tem a certeza de que deseja remover esta chave de API?",
|
||||
"delete_dialog_alert": "Esses arquivos serão permanentemente apagados do Immich e de seu dispositivo",
|
||||
"delete_dialog_alert_local": "Estes arquivos serão permanentemente excluídos do seu dispositivo, mas continuarão disponíveis no servidor Immich",
|
||||
"delete_dialog_alert_local_non_backed_up": "Não há backup de alguns dos arquivos no servidor e eles serão excluídos permanentemente do seu dispositivo",
|
||||
"delete_dialog_alert_remote": "Estes arquivos serão permanentemente excluídos do servidor Immich",
|
||||
"delete_dialog_alert": "Estes ficheiros serão eliminados permanentemente do Immich e do seu dispositivo",
|
||||
"delete_dialog_alert_local": "Estes ficheiros serão eliminados permanentemente do seu dispositivo, mas continuarão disponíveis no servidor Immich",
|
||||
"delete_dialog_alert_local_non_backed_up": "Alguns dos ficheiros não têm cópia de segurança no Immich e serão eliminados permanentemente do seu dispositivo",
|
||||
"delete_dialog_alert_remote": "Estes ficheiros serão eliminados permanentemente do servidor Immich",
|
||||
"delete_dialog_ok_force": "Confirmo que quero excluir",
|
||||
"delete_dialog_title": "Excluir Permanentemente",
|
||||
"delete_duplicates_confirmation": "Tem a certeza de que deseja eliminar permanentemente estes itens duplicados?",
|
||||
@@ -824,7 +824,7 @@
|
||||
"delete_library": "Eliminar Biblioteca",
|
||||
"delete_link": "Eliminar link",
|
||||
"delete_local_action_prompt": "{count} eliminados localmente",
|
||||
"delete_local_dialog_ok_backed_up_only": "Excluir apenas arquivos com backup",
|
||||
"delete_local_dialog_ok_backed_up_only": "Eliminar apenas ficheiros com cópia de segurança",
|
||||
"delete_local_dialog_ok_force": "Excluir mesmo assim",
|
||||
"delete_others": "Excluir outros",
|
||||
"delete_permanently": "Eliminar permanentemente",
|
||||
@@ -872,7 +872,7 @@
|
||||
"download_settings_description": "Gerir definições relacionadas com a transferência de ficheiros",
|
||||
"download_started": "Iniciando",
|
||||
"download_sucess": "Baixado com sucesso",
|
||||
"download_sucess_android": "O arquivo foi baixado na pasta DCIM/Immich",
|
||||
"download_sucess_android": "O ficheiro foi descarregado para a pasta DCIM/Immich",
|
||||
"download_waiting_to_retry": "Tentando novamente",
|
||||
"downloading": "A transferir",
|
||||
"downloading_asset_filename": "A transferir o ficheiro {filename}",
|
||||
@@ -1134,7 +1134,7 @@
|
||||
"group_owner": "Agrupar por dono",
|
||||
"group_places_by": "Agrupar lugares por...",
|
||||
"group_year": "Agrupar por ano",
|
||||
"haptic_feedback_switch": "Habilitar vibração",
|
||||
"haptic_feedback_switch": "Ativar vibração",
|
||||
"haptic_feedback_title": "Vibração",
|
||||
"has_quota": "Tem quota",
|
||||
"hash_asset": "Criptografar ficheiro",
|
||||
@@ -1154,20 +1154,20 @@
|
||||
"hide_unnamed_people": "Ocultar pessoas sem nome",
|
||||
"home_page_add_to_album_conflicts": "Foram adicionados {added} ficheiros ao álbum {album}. {failed} ficheiros já estão no álbum.",
|
||||
"home_page_add_to_album_err_local": "Ainda não é possível adicionar recursos locais aos álbuns, ignorando",
|
||||
"home_page_add_to_album_success": "Adicionado {added} arquivos ao álbum {album}.",
|
||||
"home_page_album_err_partner": "Ainda não é possível adicionar arquivos do parceiro a um álbum, ignorando",
|
||||
"home_page_add_to_album_success": "{added} ficheiros foram adicionados ao álbum {album}.",
|
||||
"home_page_album_err_partner": "Ainda não é possível adicionar ficheiros do parceiro a um álbum, a ignorar",
|
||||
"home_page_archive_err_local": "Ainda não é possível arquivar recursos locais, ignorando",
|
||||
"home_page_archive_err_partner": "Não é possível arquivar Fotos e Videos do parceiro, ignorando",
|
||||
"home_page_building_timeline": "Construindo a linha do tempo",
|
||||
"home_page_delete_err_partner": "Não é possível excluir arquivos do parceiro, ignorando",
|
||||
"home_page_delete_remote_err_local": "Foram selecionados arquivos locais para excluir remotamente, ignorando",
|
||||
"home_page_delete_err_partner": "Não é possível eliminar ficheiros do parceiro, a ignorar",
|
||||
"home_page_delete_remote_err_local": "Foram selecionados ficheiros locais para excluir remotamente, a ignorar",
|
||||
"home_page_favorite_err_local": "Ainda não é possível adicionar recursos locais favoritos, ignorando",
|
||||
"home_page_favorite_err_partner": "Ainda não é possível marcar arquivos do parceiro como favoritos, ignorando",
|
||||
"home_page_favorite_err_partner": "Ainda não é possível marcar ficheiros do parceiro como favoritos, a ignorar",
|
||||
"home_page_first_time_notice": "Se é a primeira vez que utiliza a aplicação, certifique-se de que marca pelo menos um álbum do dispositivo para cópia de segurança, para a linha do tempo poder ser preenchida com fotos e vídeos",
|
||||
"home_page_locked_error_local": "Não foi possível mover ficheiros locais para a pasta trancada, a continuar",
|
||||
"home_page_locked_error_partner": "Não foi possível mover ficheiros do parceiro para a pasta trancada, a continuar",
|
||||
"home_page_share_err_local": "Não é possível compartilhar arquivos locais com um link, ignorando",
|
||||
"home_page_upload_err_limit": "Só é possível enviar 30 arquivos por vez, ignorando",
|
||||
"home_page_share_err_local": "Não é possível partilhar ficheiros locais com um link, a ignorar",
|
||||
"home_page_upload_err_limit": "Só é possível enviar um máximo de 30 ficheiros de cada vez, a ignorar",
|
||||
"host": "Servidor",
|
||||
"hour": "Hora",
|
||||
"hours": "Horas",
|
||||
@@ -1187,7 +1187,7 @@
|
||||
"image_alt_text_date_place_3_people": "{isVideo, select, true {Vídeo gravado} other {Foto tirada}} em {city}, {country} com {person1}, {person2}, e {person3} em {date}",
|
||||
"image_alt_text_date_place_4_or_more_people": "{isVideo, select, true {Vídeo gravado} other {Foto tirada}} em {city}, {country} com {person1}, {person2}, e outras {additionalCount, number} pessoas em {date}",
|
||||
"image_saved_successfully": "Imagem salva",
|
||||
"image_viewer_page_state_provider_download_started": "Baixando arquivo",
|
||||
"image_viewer_page_state_provider_download_started": "A descarregar ficheiro",
|
||||
"image_viewer_page_state_provider_download_success": "Baixado com sucesso",
|
||||
"image_viewer_page_state_provider_share_error": "Erro ao compartilhar",
|
||||
"immich_logo": "Logotipo do Immich",
|
||||
@@ -1244,7 +1244,7 @@
|
||||
"library_options": "Opções da biblioteca",
|
||||
"library_page_device_albums": "Álbuns no dispositivo",
|
||||
"library_page_new_album": "Novo álbum",
|
||||
"library_page_sort_asset_count": "Quantidade de arquivos",
|
||||
"library_page_sort_asset_count": "Quantidade de ficheiros",
|
||||
"library_page_sort_created": "Data de criação",
|
||||
"library_page_sort_last_modified": "Última modificação",
|
||||
"library_page_sort_title": "Título do álbum",
|
||||
@@ -1383,8 +1383,8 @@
|
||||
"moved_to_archive": "{count, plural, one {Foi movido # ficheiro} other {Foram movidos # ficheiros}} para o arquivo",
|
||||
"moved_to_library": "{count, plural, one {Foi movido # ficheiro} other {Foram movidos # ficheiros}} para a biblioteca",
|
||||
"moved_to_trash": "Enviado para a reciclagem",
|
||||
"multiselect_grid_edit_date_time_err_read_only": "Não é possível editar a data de arquivo só leitura, ignorando",
|
||||
"multiselect_grid_edit_gps_err_read_only": "Não é possível editar a localização de arquivo só leitura, ignorando",
|
||||
"multiselect_grid_edit_date_time_err_read_only": "Não é possível editar a data de um ficheiro só de leitura, a ignorar",
|
||||
"multiselect_grid_edit_gps_err_read_only": "Não é possível editar a localização de um ficheiro só de leitura, a ignorar",
|
||||
"mute_memories": "Silenciar Memórias",
|
||||
"my_albums": "Os meus álbuns",
|
||||
"name": "Nome",
|
||||
@@ -1417,7 +1417,7 @@
|
||||
"no_albums_yet": "Parece que ainda não tem nenhum álbum.",
|
||||
"no_archived_assets_message": "Arquive fotos e vídeos para os ocultar da sua visualização de fotos",
|
||||
"no_assets_message": "FAÇA CLIQUE PARA CARREGAR A SUA PRIMEIRA FOTO",
|
||||
"no_assets_to_show": "Não há arquivos para exibir",
|
||||
"no_assets_to_show": "Não há ficheiros para exibir",
|
||||
"no_cast_devices_found": "Nenhum dispositivo de transmissão encontrado",
|
||||
"no_checksum_local": "Sem cálculo de verificação disponível - não pode capturar conteúdos locais",
|
||||
"no_checksum_remote": "Soma de verificação (checksum) não disponível - não é possível obter o recurso remoto",
|
||||
@@ -1586,7 +1586,7 @@
|
||||
"public_album": "Álbum público",
|
||||
"public_share": "Partilhar Publicamente",
|
||||
"purchase_account_info": "Apoiante",
|
||||
"purchase_activated_subtitle": "Agradecemos por apoiar o Immich e software de código aberto",
|
||||
"purchase_activated_subtitle": "Agradecemos o seu apoio ao Immich e ao software de código aberto",
|
||||
"purchase_activated_time": "Ativado em {date}",
|
||||
"purchase_activated_title": "A sua chave foi ativada com sucesso",
|
||||
"purchase_button_activate": "Ativar",
|
||||
@@ -1747,7 +1747,7 @@
|
||||
"search_filter_date_title": "Selecione a data",
|
||||
"search_filter_display_option_not_in_album": "Fora de álbum",
|
||||
"search_filter_display_options": "Opções de exibição",
|
||||
"search_filter_filename": "Pesquisar por nome do arquivo",
|
||||
"search_filter_filename": "Pesquisar por nome do ficheiro",
|
||||
"search_filter_location": "Localização",
|
||||
"search_filter_location_title": "Selecione a localização",
|
||||
"search_filter_media_type": "Tipo da mídia",
|
||||
@@ -1839,15 +1839,15 @@
|
||||
"setting_notifications_notify_minutes": "{count} minutos",
|
||||
"setting_notifications_notify_never": "Nunca",
|
||||
"setting_notifications_notify_seconds": "{count} segundos",
|
||||
"setting_notifications_single_progress_subtitle": "Informações detalhadas sobre o progresso do envio por arquivo",
|
||||
"setting_notifications_single_progress_subtitle": "Informações detalhadas sobre o progresso do envio por ficheiro",
|
||||
"setting_notifications_single_progress_title": "Mostrar progresso detalhado do backup em segundo plano",
|
||||
"setting_notifications_subtitle": "Ajuste as preferências de notificação",
|
||||
"setting_notifications_total_progress_subtitle": "Progresso do envio de arquivos (concluídos/total)",
|
||||
"setting_notifications_total_progress_subtitle": "Progresso do envio de ficheiro (concluídos/total)",
|
||||
"setting_notifications_total_progress_title": "Mostrar progresso total do backup em segundo plano",
|
||||
"setting_video_viewer_auto_play_subtitle": "Reproduzir os vídeos automaticamente quando abertos",
|
||||
"setting_video_viewer_auto_play_title": "Reproduzir vídeos automaticamente",
|
||||
"setting_video_viewer_looping_title": "Repetir",
|
||||
"setting_video_viewer_original_video_subtitle": "Ao transmitir um vídeo do servidor, usar o arquivo original, mesmo quando uma versão transcodificada esteja disponível. Pode fazer com que o vídeo demore para carregar. Vídeos disponíveis localmente são exibidos na qualidade original independente desta configuração.",
|
||||
"setting_video_viewer_original_video_subtitle": "Ao transmitir um vídeo do servidor, usar o ficheiro original, mesmo se uma versão transcodificada estiver disponível. Pode causar interrupções. Vídeos disponíveis localmente são exibidos na qualidade original independentemente desta definição.",
|
||||
"setting_video_viewer_original_video_title": "Forçar vídeo original",
|
||||
"settings": "Definições",
|
||||
"settings_require_restart": "Reinicie o Immich para aplicar essa configuração",
|
||||
@@ -2019,7 +2019,7 @@
|
||||
"theme_setting_primary_color_subtitle": "Selecione a cor primária, utilizada nas ações principais e nos realces.",
|
||||
"theme_setting_primary_color_title": "Cor primária",
|
||||
"theme_setting_system_primary_color_title": "Use a cor do sistema",
|
||||
"theme_setting_system_theme_switch": "Automático (Siga a configuração do sistema)",
|
||||
"theme_setting_system_theme_switch": "Automático (Seguir a configuração do sistema)",
|
||||
"theme_setting_theme_subtitle": "Escolha a configuração do tema da aplicação",
|
||||
"theme_setting_three_stage_loading_subtitle": "O carregamento em três estágios pode aumentar o desempenho do carregamento, mas causa uma carga de rede significativamente maior",
|
||||
"theme_setting_three_stage_loading_title": "Habilitar carregamento em três estágios",
|
||||
@@ -2048,11 +2048,11 @@
|
||||
"trash_emptied": "Lixeira esvaziada",
|
||||
"trash_no_results_message": "Fotos e vídeos enviados para a reciclagem aparecem aqui.",
|
||||
"trash_page_delete_all": "Excluir tudo",
|
||||
"trash_page_empty_trash_dialog_content": "Deseja esvaziar a lixera? Estes arquivos serão apagados de forma permanente do Immich",
|
||||
"trash_page_empty_trash_dialog_content": "Deseja esvaziar a reciclagem? Estes ficheiros serão apagados permanentemente do Immich",
|
||||
"trash_page_info": "Ficheiros na reciclagem irão ser eliminados permanentemente após {days} dias",
|
||||
"trash_page_no_assets": "Lixeira vazia",
|
||||
"trash_page_restore_all": "Restaurar tudo",
|
||||
"trash_page_select_assets_btn": "Selecionar arquivos",
|
||||
"trash_page_select_assets_btn": "Selecionar ficheiros",
|
||||
"trash_page_title": "Reciclagem ({count})",
|
||||
"trashed_items_will_be_permanently_deleted_after": "Os itens da reciclagem são eliminados permanentemente após {days, plural, one {# dia} other {# dias}}.",
|
||||
"troubleshoot": "Diagnosticar problemas",
|
||||
@@ -2094,8 +2094,8 @@
|
||||
"upload_action_prompt": "{count} à espera de carregar",
|
||||
"upload_concurrency": "Carregamentos em simultâneo",
|
||||
"upload_details": "Detalhes do Carregamento",
|
||||
"upload_dialog_info": "Deseja fazer o backup dos arquivos selecionados no servidor?",
|
||||
"upload_dialog_title": "Enviar arquivo",
|
||||
"upload_dialog_info": "Deseja realizar uma cópia de segurança dos ficheiros selecionados para o servidor?",
|
||||
"upload_dialog_title": "Enviar ficheiro",
|
||||
"upload_errors": "Envio completo com {count, plural, one {# erro} other {# erros}}, atualize a página para ver os novos ficheiros enviados.",
|
||||
"upload_finished": "Carregamento acabado",
|
||||
"upload_progress": "Restante(s) {remaining, number} - Processado(s) {processed, number}/{total, number}",
|
||||
|
||||
@@ -485,7 +485,7 @@
|
||||
"app_bar_signout_dialog_content": "Вы уверены, что хотите выйти?",
|
||||
"app_bar_signout_dialog_ok": "Да",
|
||||
"app_bar_signout_dialog_title": "Выйти",
|
||||
"app_download_links": "Загрузка приложения",
|
||||
"app_download_links": "Ссылки на загрузку мобильного приложения",
|
||||
"app_settings": "Параметры приложения",
|
||||
"app_stores": "Магазины приложений",
|
||||
"app_update_available": "Доступна новая версия приложения",
|
||||
@@ -1346,7 +1346,7 @@
|
||||
"map_zoom_to_see_photos": "Уменьшение масштаба для просмотра фотографий",
|
||||
"mark_all_as_read": "Прочитано",
|
||||
"mark_as_read": "Отметить как прочитанное",
|
||||
"marked_all_as_read": "Отмечены как прочитанные",
|
||||
"marked_all_as_read": "Все уведомления отмечены как прочитанные",
|
||||
"matches": "Совпадения",
|
||||
"matching_assets": "Соответствующие объекты",
|
||||
"media_type": "Тип медиа",
|
||||
@@ -1453,7 +1453,7 @@
|
||||
"oauth": "OAuth",
|
||||
"obtainium_configurator": "Настройка Obtainium",
|
||||
"obtainium_configurator_instructions": "Для установки и обновления Android приложения Immich напрямую из источников на GitHub (минуя магазины приложений) можно использовать Obtainium. Создайте новый API ключ и укажите архитектуру приложения для формирования ссылки для Obtainium.",
|
||||
"ocr": "Распознавание текста",
|
||||
"ocr": "OCR",
|
||||
"official_immich_resources": "Официальные ресурсы Immich",
|
||||
"offline": "Недоступен",
|
||||
"offset": "Смещение",
|
||||
@@ -1858,7 +1858,7 @@
|
||||
"share_add_photos": "Добавить фото",
|
||||
"share_assets_selected": "{count} выбрано",
|
||||
"share_dialog_preparing": "Подготовка...",
|
||||
"share_link": "Поделиться ссылкой",
|
||||
"share_link": "Создать ссылку",
|
||||
"shared": "Общиe",
|
||||
"shared_album_activities_input_disable": "Комментарии отключены",
|
||||
"shared_album_activity_remove_content": "Удалить сообщение?",
|
||||
|
||||
@@ -418,7 +418,7 @@
|
||||
"advanced_settings_prefer_remote_title": "Föredra bilder från servern",
|
||||
"advanced_settings_proxy_headers_subtitle": "Definiera proxy-headers som Immich ska skicka med i varje närverksanrop",
|
||||
"advanced_settings_proxy_headers_title": "Anpassade proxyheaders [EXPERIMENTELLT]",
|
||||
"advanced_settings_readonly_mode_subtitle": "Aktiverar skrivskyddat läge där foton endast kan visas. Följande funktioner inaktiveras: välj flera bilder, dela, casta, ta bort bilder. Aktivera/inaktivera skrivskyddat läge via profilbilden på appens hemskärm",
|
||||
"advanced_settings_readonly_mode_subtitle": "Aktiverar skrivskyddat-läge där foton endast kan visas. Följande funktioner inaktiveras: välj flera bilder, dela, casta, ta bort bilder. Aktivera/inaktivera skrivskyddat läge via profilbilden på appens hemskärm",
|
||||
"advanced_settings_readonly_mode_title": "Skrivskyddat läge",
|
||||
"advanced_settings_self_signed_ssl_subtitle": "Hoppar över verifiering av serverns SSL-certifikat. Krävs för självsignerade certifikat.",
|
||||
"advanced_settings_self_signed_ssl_title": "Tillåt självsignerade SSL-certifikat [EXPERIMENTELLT]",
|
||||
@@ -791,6 +791,7 @@
|
||||
"daily_title_text_date_year": "E, dd MMM, yyyy",
|
||||
"dark": "Mörk",
|
||||
"dark_theme": "Växla mörkt tema",
|
||||
"date": "Datum",
|
||||
"date_after": "Datum efter",
|
||||
"date_and_time": "Datum och Tid",
|
||||
"date_before": "Datum före",
|
||||
@@ -1099,6 +1100,7 @@
|
||||
"features_setting_description": "Hantera appens funktioner",
|
||||
"file_name": "Filnamn",
|
||||
"file_name_or_extension": "Filnamn eller -tillägg",
|
||||
"file_size": "Filstorlek",
|
||||
"filename": "Filnamn",
|
||||
"filetype": "Filtyp",
|
||||
"filter": "Filter",
|
||||
@@ -1262,6 +1264,7 @@
|
||||
"local_media_summary": "Sammanfattning av lokala medier",
|
||||
"local_network": "Lokalt nätverk",
|
||||
"local_network_sheet_info": "Appen kommer ansluta till servern via denna URL när det specificerade WiFi-nätverket används",
|
||||
"location": "Position",
|
||||
"location_permission": "Plats-rättighet",
|
||||
"location_permission_content": "För att använda funktionen för automatisk växling behöver Immich behörighet till exakt plats så att appen kan läsa av det aktuella Wi-Fi-nätverkets namn",
|
||||
"location_picker_choose_on_map": "Välj på karta",
|
||||
@@ -1694,6 +1697,7 @@
|
||||
"reset_sqlite_confirmation": "Är du säker på att du vill återställa SQLite-databasen? Du måste logga ut och logga in igen för att synkronisera om data",
|
||||
"reset_sqlite_success": "Återställde SQLite-databasen",
|
||||
"reset_to_default": "Återställ till standard",
|
||||
"resolution": "Upplösning",
|
||||
"resolve_duplicates": "Lös dubletter",
|
||||
"resolved_all_duplicates": "Lös alla dubletter",
|
||||
"restore": "Återställ",
|
||||
@@ -1712,6 +1716,7 @@
|
||||
"running": "Igångsatt",
|
||||
"save": "Spara",
|
||||
"save_to_gallery": "Spara i galleri",
|
||||
"saved": "Sparad",
|
||||
"saved_api_key": "Sparad API-nyckel",
|
||||
"saved_profile": "Sparade profil",
|
||||
"saved_settings": "Sparade inställningar",
|
||||
@@ -2020,6 +2025,7 @@
|
||||
"theme_setting_three_stage_loading_title": "Aktivera trestegsladdning",
|
||||
"they_will_be_merged_together": "De kommer att slås samman",
|
||||
"third_party_resources": "Tredjepartsresurser",
|
||||
"time": "Tid",
|
||||
"time_based_memories": "Tidsbaserade minnen",
|
||||
"timeline": "Tidslinje",
|
||||
"timezone": "Tidszon",
|
||||
|
||||
25
i18n/ta.json
25
i18n/ta.json
@@ -154,6 +154,18 @@
|
||||
"machine_learning_min_detection_score_description": "ஒரு முகம் 0-1 முதல் கண்டறியப்படுவதற்கு குறைந்தபட்ச நம்பிக்கை மதிப்பெண். குறைந்த மதிப்புகள் அதிக முகங்களைக் கண்டறியும், ஆனால் தவறான நேர்மறைகளை ஏற்படுத்தக்கூடும்.",
|
||||
"machine_learning_min_recognized_faces": "குறைந்தபட்ச அங்கீகரிக்கப்பட்ட முகங்கள்",
|
||||
"machine_learning_min_recognized_faces_description": "ஒரு நபருக்கு உருவாக்கப்பட வேண்டிய அங்கீகரிக்கப்பட்ட முகங்களின் குறைந்தபட்ச எண்ணிக்கை. இதை அதிகரிப்பது, ஒரு நபருக்கு முகம் ஒதுக்கப்படாமல் போகும் வாய்ப்பை அதிகரிக்கும் செலவில், முக அங்கீகாரத்தை மிகவும் துல்லியமாக்குகிறது.",
|
||||
"machine_learning_ocr": "ஓசிஆர்",
|
||||
"machine_learning_ocr_description": "படங்களில் உள்ள உரையை அடையாளம் காண இயந்திர கற்றலைப் பயன்படுத்தவும்",
|
||||
"machine_learning_ocr_enabled": "ஓசிஆர் ஐ இயலச்செய்",
|
||||
"machine_learning_ocr_enabled_description": "முடக்கப்பட்டால், படங்கள் உரை அடையாளம் காணப்படாது.",
|
||||
"machine_learning_ocr_max_resolution": "அதிகபட்ச தெளிவுத்திறன்",
|
||||
"machine_learning_ocr_max_resolution_description": "இந்தத் தெளிவுத்திறனுக்கு மேலே உள்ள மாதிரிக்காட்சிகள், தோற்ற விகிதத்தைப் பாதுகாக்கும் அதே வேளையில், அளவு மாற்றப்படும். அதிக மதிப்புகள் மிகவும் துல்லியமானவை, ஆனால் செயலாக்கவும் அதிக நினைவகத்தைப் பயன்படுத்தவும் அதிக நேரம் எடுக்கும்.",
|
||||
"machine_learning_ocr_min_detection_score": "குறைந்தபட்ச கண்டறிதல் மதிப்பெண்",
|
||||
"machine_learning_ocr_min_detection_score_description": "உரையைக் கண்டறிய குறைந்தபட்ச நம்பிக்கை மதிப்பெண் 0-1. குறைந்த மதிப்பெண் அதிக உரையைக் கண்டறியும், ஆனால் தவறான தகவல்க்கு வழிவகுக்கும்.",
|
||||
"machine_learning_ocr_min_recognition_score": "குறைந்தபட்ச அங்கீகார மதிப்பெண்",
|
||||
"machine_learning_ocr_min_score_recognition_description": "கண்டறியப்பட்ட உரையை அங்கீகரிக்க குறைந்தபட்ச நம்பிக்கை மதிப்பெண் 0-1 ஆகும். குறைந்த மதிப்பெண் அதிக உரையை அங்கீகரிக்கும், ஆனால் தவறான நேர்மறைகளுக்கு வழிவகுக்கும்.",
|
||||
"machine_learning_ocr_model": "ஓசிஆர் மாதிரி",
|
||||
"machine_learning_ocr_model_description": "மொபைல் மாடல்களை விட சர்வர் மாடல்கள் மிகவும் துல்லியமானவை, ஆனால் அதிக நினைவகத்தை செயலாக்கி பயன்படுத்த அதிக நேரம் எடுக்கும்.",
|
||||
"machine_learning_settings": "இயந்திர கற்றல் அமைப்புகள்",
|
||||
"machine_learning_settings_description": "இயந்திர கற்றல் அம்சங்கள் மற்றும் அமைப்புகளை நிர்வகிக்கவும்",
|
||||
"machine_learning_smart_search": "ஸ்மார்ட் தேடல்",
|
||||
@@ -245,6 +257,7 @@
|
||||
"oauth_storage_quota_default_description": "GiB இல் உள்ள ஒதுக்கீடு எந்த உரிமைகோரலும் வழங்கப்படாதபோது பயன்படுத்தப்படும் .",
|
||||
"oauth_timeout": "கோரிக்கை நேரம் முடிந்தது",
|
||||
"oauth_timeout_description": "கோரிக்கைகளுக்கான காலக்கெடு மில்லி வினாடிகளில்",
|
||||
"ocr_job_description": "படங்களில் உள்ள உரையை அடையாளம் காண இயந்திர கற்றலைப் பயன்படுத்தவும்",
|
||||
"password_enable_description": "மின்னஞ்சல் மற்றும் கடவுச்சொல் மூலம் உள்நுழையவும்",
|
||||
"password_settings": "கடவுச்சொல் உள்நுழைவு",
|
||||
"password_settings_description": "கடவுச்சொல் உள்நுழைவு அமைப்புகளை நிர்வகிக்கவும்",
|
||||
@@ -669,6 +682,8 @@
|
||||
"change_password_description": "நீங்கள் கணினியில் கையொப்பமிடுவது இதுவே முதல் முறை அல்லது உங்கள் கடவுச்சொல்லை மாற்றுவதற்கான கோரிக்கை செய்யப்பட்டுள்ளது. கீழே புதிய கடவுச்சொல்லை உள்ளிடவும்.",
|
||||
"change_password_form_confirm_password": "கடவுச்சொல்லை உறுதிப்படுத்தவும்",
|
||||
"change_password_form_description": "ஆய் {name}, \n\nநீங்கள் கணினியில் கையொப்பமிடுவது இதுவே முதல் முறை அல்லது உங்கள் கடவுச்சொல்லை மாற்றுவதற்கான கோரிக்கை செய்யப்பட்டுள்ளது. கீழே புதிய கடவுச்சொல்லை உள்ளிடவும்.",
|
||||
"change_password_form_log_out": "மற்ற எல்லா சாதனங்களிலிருந்தும் வெளியேறு",
|
||||
"change_password_form_log_out_description": "மற்ற எல்லா சாதனங்களிலிருந்தும் வெளியேற பரிந்துரைக்கப்படுகிறது",
|
||||
"change_password_form_new_password": "புதிய கடவுச்சொல்",
|
||||
"change_password_form_password_mismatch": "கடவுச்சொற்கள் பொருந்தவில்லை",
|
||||
"change_password_form_reenter_new_password": "புதிய கடவுச்சொல்லை மீண்டும் உள்ளிடவும்",
|
||||
@@ -776,6 +791,7 @@
|
||||
"daily_title_text_date_year": "E, mmm dd, yyyy",
|
||||
"dark": "இருண்ட",
|
||||
"dark_theme": "இருண்ட கருப்பொருளை மாற்றவும்",
|
||||
"date": "தேதி",
|
||||
"date_after": "தேதி",
|
||||
"date_and_time": "தேதி மற்றும் நேரம்",
|
||||
"date_before": "முன் தேதி",
|
||||
@@ -1084,6 +1100,7 @@
|
||||
"features_setting_description": "பயன்பாட்டு அம்சங்களை நிர்வகிக்கவும்",
|
||||
"file_name": "கோப்பு பெயர்",
|
||||
"file_name_or_extension": "கோப்பு பெயர் அல்லது நீட்டிப்பு",
|
||||
"file_size": "கோப்பு அளவு",
|
||||
"filename": "கோப்புப்பெயர்",
|
||||
"filetype": "பைல்டைப்",
|
||||
"filter": "வடிப்பி",
|
||||
@@ -1247,6 +1264,7 @@
|
||||
"local_media_summary": "உள்ளக ஊடக சுருக்கம்",
|
||||
"local_network": "உள்ளக பிணையம்",
|
||||
"local_network_sheet_info": "குறிப்பிட்ட வைஃபை நெட்வொர்க்கைப் பயன்படுத்தும் போது பயன்பாடு இந்த முகவரி மூலம் சேவையகத்துடன் இணைக்கப்படும்",
|
||||
"location": "இடம்",
|
||||
"location_permission": "இருப்பிட இசைவு",
|
||||
"location_permission_content": "ஆட்டோ-ச்விட்சிங் அம்சத்தைப் பயன்படுத்த, இம்மிக்கு துல்லியமான இருப்பிட இசைவு தேவை, எனவே இது தற்போதைய வைஃபை நெட்வொர்க்கின் பெயரைப் படிக்க முடியும்",
|
||||
"location_picker_choose_on_map": "வரைபடத்தில் தேர்வு செய்யவும்",
|
||||
@@ -1435,6 +1453,7 @@
|
||||
"oauth": "Oauth",
|
||||
"obtainium_configurator": "ஒப்டெய்னியம் கட்டமைப்பாளர்",
|
||||
"obtainium_configurator_instructions": "Immich GitHub வெளியீட்டில் இருந்து நேரடியாக ஆண்ட்ராய்டு பயன்பாட்டை நிறுவவும் புதுப்பிக்கவும் Obtainium ஐப் பயன்படுத்தவும். பநிஇ விசையை உருவாக்கி, உங்கள் ஒப்டெய்னியம் உள்ளமைவு இணைப்பை உருவாக்க ஒரு மாறுபாட்டைத் தேர்ந்தெடுக்கவும்",
|
||||
"ocr": "ஓசிஆர்",
|
||||
"official_immich_resources": "உத்தியோகபூர்வ இம்மா வளங்கள்",
|
||||
"offline": "இணையமில்லாமல்",
|
||||
"offset": "ஈடுசெய்யும்",
|
||||
@@ -1678,6 +1697,7 @@
|
||||
"reset_sqlite_confirmation": "SQLITE தரவுத்தளத்தை மீட்டமைக்க விரும்புகிறீர்களா? தரவை மீண்டும் ஒத்திசைக்க நீங்கள் வெளியேறி மீண்டும் உள்நுழைய வேண்டும்",
|
||||
"reset_sqlite_success": "SQLITE தரவுத்தளத்தை வெற்றிகரமாக மீட்டமைக்கவும்",
|
||||
"reset_to_default": "இயல்புநிலைக்கு மீட்டமைக்கவும்",
|
||||
"resolution": "தெளிவுத்திறன்",
|
||||
"resolve_duplicates": "நகல்களைத் தீர்க்கவும்",
|
||||
"resolved_all_duplicates": "அனைத்து நகல்களையும் தீர்க்கும்",
|
||||
"restore": "மீட்டமை",
|
||||
@@ -1696,6 +1716,7 @@
|
||||
"running": "இயங்கும்",
|
||||
"save": "சேமி",
|
||||
"save_to_gallery": "கேலரியில் சேமிக்கவும்",
|
||||
"saved": "சேமிக்கப்பட்டது",
|
||||
"saved_api_key": "சேமித்த பநிஇ விசை",
|
||||
"saved_profile": "சேமித்த சுயவிவரம்",
|
||||
"saved_settings": "சேமித்த அமைப்புகள்",
|
||||
@@ -1712,6 +1733,8 @@
|
||||
"search_by_description_example": "சப்பாவில் நடைபயணம்",
|
||||
"search_by_filename": "கோப்பு பெயர் அல்லது நீட்டிப்பு மூலம் தேடுங்கள்",
|
||||
"search_by_filename_example": "I.E. IMG_1234.JPG அல்லது PNG",
|
||||
"search_by_ocr": "ஓசிஆர் மூலம் தேடு",
|
||||
"search_by_ocr_example": "லேட்",
|
||||
"search_camera_lens_model": "கண்ணாடி வில்லை மாதிரியைத் தேடு...",
|
||||
"search_camera_make": "தேடல் கேமரா செய்யுங்கள் ...",
|
||||
"search_camera_model": "கேமரா மாதிரியைத் தேடுங்கள் ...",
|
||||
@@ -1729,6 +1752,7 @@
|
||||
"search_filter_location_title": "இருப்பிடத்தைத் தேர்ந்தெடுக்கவும்",
|
||||
"search_filter_media_type": "ஊடக வகை",
|
||||
"search_filter_media_type_title": "மீடியா வகையைத் தேர்ந்தெடுக்கவும்",
|
||||
"search_filter_ocr": "ஓசிஆர் மூலம் தேடு",
|
||||
"search_filter_people_title": "மக்களைத் தேர்ந்தெடுக்கவும்",
|
||||
"search_for": "தேடுங்கள்",
|
||||
"search_for_existing_person": "இருக்கும் நபரைத் தேடுங்கள்",
|
||||
@@ -2001,6 +2025,7 @@
|
||||
"theme_setting_three_stage_loading_title": "மூன்று-நிலை ஏற்றுதலை இயக்கவும்",
|
||||
"they_will_be_merged_together": "அவர்கள் ஒன்றாக இணைக்கப்படுவார்கள்",
|
||||
"third_party_resources": "மூன்றாம் தரப்பு வளங்கள்",
|
||||
"time": "நேரம்",
|
||||
"time_based_memories": "நேர அடிப்படையிலான நினைவுகள்",
|
||||
"timeline": "காலவரிசை",
|
||||
"timezone": "நேர மண்டலம்",
|
||||
|
||||
@@ -163,6 +163,7 @@
|
||||
"machine_learning_ocr_min_detection_score": "En düşük tespit puanı",
|
||||
"machine_learning_ocr_min_detection_score_description": "Metnin tespit edilmesi için minimum güven puanı 0-1 arasındadır. Düşük değerler daha fazla metin tespit eder, ancak yanlış pozitif sonuçlara yol açabilir.",
|
||||
"machine_learning_ocr_min_recognition_score": "Minimum tespit puanı",
|
||||
"machine_learning_ocr_model": "OCR modeli",
|
||||
"machine_learning_settings": "Makine Öğrenmesi ayarları",
|
||||
"machine_learning_settings_description": "Makine öğrenmesi özelliklerini ve ayarlarını yönet",
|
||||
"machine_learning_smart_search": "Akıllı Arama",
|
||||
@@ -785,6 +786,7 @@
|
||||
"daily_title_text_date_year": "dd MMM yyyy E",
|
||||
"dark": "Koyu",
|
||||
"dark_theme": "Karanlık temaya geç",
|
||||
"date": "Tarih",
|
||||
"date_after": "Sonraki tarih",
|
||||
"date_and_time": "Tarih ve Zaman",
|
||||
"date_before": "Önceki tarih",
|
||||
@@ -1093,6 +1095,7 @@
|
||||
"features_setting_description": "Uygulamanın özelliklerini yönet",
|
||||
"file_name": "Dosya adı",
|
||||
"file_name_or_extension": "Dosya adı veya uzantı",
|
||||
"file_size": "Dosya boyutu",
|
||||
"filename": "Dosya adı",
|
||||
"filetype": "Dosya tipi",
|
||||
"filter": "Filtre",
|
||||
@@ -1444,6 +1447,7 @@
|
||||
"oauth": "OAuth",
|
||||
"obtainium_configurator": "Obtainium Yapılandırıcı",
|
||||
"obtainium_configurator_instructions": "Obtainium kullanarak Android uygulamasını doğrudan Immich GitHub sürümünden yükleyin ve güncelleyin. Bir API anahtarı oluşturun ve bir varyant seçerek Obtainium yapılandırma bağlantınızı oluşturun",
|
||||
"ocr": "OCR",
|
||||
"official_immich_resources": "Resmi Immich Kaynakları",
|
||||
"offline": "Çevrim dışı",
|
||||
"offset": "Ofset",
|
||||
@@ -1687,6 +1691,7 @@
|
||||
"reset_sqlite_confirmation": "SQLite veritabanını sıfırlamak istediğinizden emin misiniz? Verileri yeniden eşzamanlamak için oturumu kapatıp tekrar oturum açmanız gerekecektir",
|
||||
"reset_sqlite_success": "SQLite veritabanını başarıyla sıfırladınız",
|
||||
"reset_to_default": "Varsayılana sıfırla",
|
||||
"resolution": "Çözünürlük",
|
||||
"resolve_duplicates": "Çiftleri çöz",
|
||||
"resolved_all_duplicates": "Tüm çiftler çözüldü",
|
||||
"restore": "Geri yükle",
|
||||
@@ -2010,6 +2015,7 @@
|
||||
"theme_setting_three_stage_loading_title": "Üç aşamalı yüklemeyi etkinleştir",
|
||||
"they_will_be_merged_together": "Birlikte birleştirilecekler",
|
||||
"third_party_resources": "Üçüncü taraf kaynaklar",
|
||||
"time": "Zaman",
|
||||
"time_based_memories": "Zaman bazlı anılar",
|
||||
"timeline": "Zaman Çizelgesi",
|
||||
"timezone": "Zaman dilimi",
|
||||
|
||||
@@ -154,6 +154,8 @@
|
||||
"machine_learning_min_detection_score_description": "臉孔偵測的最低信心分數,範圍為 0 至 1。數值較低時會偵測到更多臉孔,但可能導致誤判。",
|
||||
"machine_learning_min_recognized_faces": "最低臉部辨識數量",
|
||||
"machine_learning_min_recognized_faces_description": "建立新人物所需的最低已辨識臉孔數量。提高此數值可讓臉孔辨識更精確,但同時會增加臉孔未被指派給任何人物的可能性。",
|
||||
"machine_learning_ocr": "文字辨識(OCR)",
|
||||
"machine_learning_ocr_description": "使用機器學習來識別圖片中的文字",
|
||||
"machine_learning_settings": "機器學習設定",
|
||||
"machine_learning_settings_description": "管理機器學習的功能和設定",
|
||||
"machine_learning_smart_search": "智慧搜尋",
|
||||
@@ -245,6 +247,7 @@
|
||||
"oauth_storage_quota_default_description": "未提供宣告時所使用的配額(GiB)。",
|
||||
"oauth_timeout": "請求逾時",
|
||||
"oauth_timeout_description": "請求的逾時時間(毫秒)",
|
||||
"ocr_job_description": "使用機器學習來識別圖像中的文字",
|
||||
"password_enable_description": "使用電子郵件和密碼登入",
|
||||
"password_settings": "密碼登入",
|
||||
"password_settings_description": "管理密碼登入設定",
|
||||
|
||||
@@ -13,6 +13,8 @@ from rich.logging import RichHandler
|
||||
from uvicorn import Server
|
||||
from uvicorn.workers import UvicornWorker
|
||||
|
||||
from .schemas import ModelPrecision
|
||||
|
||||
|
||||
class ClipSettings(BaseModel):
|
||||
textual: str | None = None
|
||||
@@ -24,6 +26,11 @@ class FacialRecognitionSettings(BaseModel):
|
||||
detection: str | None = None
|
||||
|
||||
|
||||
class OcrSettings(BaseModel):
|
||||
recognition: str | None = None
|
||||
detection: str | None = None
|
||||
|
||||
|
||||
class PreloadModelData(BaseModel):
|
||||
clip_fallback: str | None = os.getenv("MACHINE_LEARNING_PRELOAD__CLIP", None)
|
||||
facial_recognition_fallback: str | None = os.getenv("MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION", None)
|
||||
@@ -37,6 +44,7 @@ class PreloadModelData(BaseModel):
|
||||
del os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION"]
|
||||
clip: ClipSettings = ClipSettings()
|
||||
facial_recognition: FacialRecognitionSettings = FacialRecognitionSettings()
|
||||
ocr: OcrSettings = OcrSettings()
|
||||
|
||||
|
||||
class MaxBatchSize(BaseModel):
|
||||
@@ -70,6 +78,7 @@ class Settings(BaseSettings):
|
||||
rknn_threads: int = 1
|
||||
preload: PreloadModelData | None = None
|
||||
max_batch_size: MaxBatchSize | None = None
|
||||
openvino_precision: ModelPrecision = ModelPrecision.FP32
|
||||
|
||||
@property
|
||||
def device_id(self) -> str:
|
||||
|
||||
@@ -103,6 +103,20 @@ async def preload_models(preload: PreloadModelData) -> None:
|
||||
ModelTask.FACIAL_RECOGNITION,
|
||||
)
|
||||
|
||||
if preload.ocr.detection is not None:
|
||||
await load_models(
|
||||
preload.ocr.detection,
|
||||
ModelType.DETECTION,
|
||||
ModelTask.OCR,
|
||||
)
|
||||
|
||||
if preload.ocr.recognition is not None:
|
||||
await load_models(
|
||||
preload.ocr.recognition,
|
||||
ModelType.RECOGNITION,
|
||||
ModelTask.OCR,
|
||||
)
|
||||
|
||||
if preload.clip_fallback is not None:
|
||||
log.warning(
|
||||
"Deprecated env variable: 'MACHINE_LEARNING_PRELOAD__CLIP'. "
|
||||
|
||||
@@ -78,6 +78,14 @@ _INSIGHTFACE_MODELS = {
|
||||
_PADDLE_MODELS = {
|
||||
"PP-OCRv5_server",
|
||||
"PP-OCRv5_mobile",
|
||||
"CH__PP-OCRv5_server",
|
||||
"CH__PP-OCRv5_mobile",
|
||||
"EL__PP-OCRv5_mobile",
|
||||
"EN__PP-OCRv5_mobile",
|
||||
"ESLAV__PP-OCRv5_mobile",
|
||||
"KOREAN__PP-OCRv5_mobile",
|
||||
"LATIN__PP-OCRv5_mobile",
|
||||
"TH__PP-OCRv5_mobile",
|
||||
}
|
||||
|
||||
SUPPORTED_PROVIDERS = [
|
||||
|
||||
@@ -1,20 +1,21 @@
|
||||
from typing import Any
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from numpy.typing import NDArray
|
||||
from PIL import Image
|
||||
from rapidocr.ch_ppocr_det import TextDetector as RapidTextDetector
|
||||
from rapidocr.ch_ppocr_det.utils import DBPostProcess
|
||||
from rapidocr.inference_engine.base import FileInfo, InferSession
|
||||
from rapidocr.utils import DownloadFile, DownloadFileInput
|
||||
from rapidocr.utils.download_file import DownloadFile, DownloadFileInput
|
||||
from rapidocr.utils.typings import EngineType, LangDet, OCRVersion, TaskType
|
||||
from rapidocr.utils.typings import ModelType as RapidModelType
|
||||
|
||||
from immich_ml.config import log
|
||||
from immich_ml.models.base import InferenceModel
|
||||
from immich_ml.models.transforms import decode_cv2
|
||||
from immich_ml.schemas import ModelFormat, ModelSession, ModelTask, ModelType
|
||||
from immich_ml.sessions.ort import OrtSession
|
||||
|
||||
from .schemas import OcrOptions, TextDetectionOutput
|
||||
from .schemas import TextDetectionOutput
|
||||
|
||||
|
||||
class TextDetector(InferenceModel):
|
||||
@@ -22,15 +23,22 @@ class TextDetector(InferenceModel):
|
||||
identity = (ModelType.DETECTION, ModelTask.OCR)
|
||||
|
||||
def __init__(self, model_name: str, **model_kwargs: Any) -> None:
|
||||
super().__init__(model_name, **model_kwargs, model_format=ModelFormat.ONNX)
|
||||
super().__init__(model_name.split("__")[-1], **model_kwargs, model_format=ModelFormat.ONNX)
|
||||
self.max_resolution = 736
|
||||
self.min_score = 0.5
|
||||
self.score_mode = "fast"
|
||||
self.mean = np.array([0.5, 0.5, 0.5], dtype=np.float32)
|
||||
self.std_inv = np.float32(1.0) / (np.array([0.5, 0.5, 0.5], dtype=np.float32) * 255.0)
|
||||
self._empty: TextDetectionOutput = {
|
||||
"image": np.empty(0, dtype=np.float32),
|
||||
"boxes": np.empty(0, dtype=np.float32),
|
||||
"scores": np.empty(0, dtype=np.float32),
|
||||
}
|
||||
self.postprocess = DBPostProcess(
|
||||
thresh=0.3,
|
||||
box_thresh=model_kwargs.get("minScore", 0.5),
|
||||
max_candidates=1000,
|
||||
unclip_ratio=1.6,
|
||||
use_dilation=True,
|
||||
score_mode="fast",
|
||||
)
|
||||
|
||||
def _download(self) -> None:
|
||||
model_info = InferSession.get_model_url(
|
||||
@@ -52,35 +60,65 @@ class TextDetector(InferenceModel):
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
# TODO: support other runtime sessions
|
||||
session = OrtSession(self.model_path)
|
||||
self.model = RapidTextDetector(
|
||||
OcrOptions(
|
||||
session=session.session,
|
||||
limit_side_len=self.max_resolution,
|
||||
limit_type="min",
|
||||
box_thresh=self.min_score,
|
||||
score_mode=self.score_mode,
|
||||
)
|
||||
)
|
||||
return session
|
||||
return OrtSession(self.model_path)
|
||||
|
||||
def _predict(self, inputs: bytes | Image.Image) -> TextDetectionOutput:
|
||||
results = self.model(decode_cv2(inputs))
|
||||
if results.boxes is None or results.scores is None or results.img is None:
|
||||
# partly adapted from RapidOCR
|
||||
def _predict(self, inputs: Image.Image) -> TextDetectionOutput:
|
||||
w, h = inputs.size
|
||||
if w < 32 or h < 32:
|
||||
return self._empty
|
||||
out = self.session.run(None, {"x": self._transform(inputs)})[0]
|
||||
boxes, scores = self.postprocess(out, (h, w))
|
||||
if len(boxes) == 0:
|
||||
return self._empty
|
||||
return {
|
||||
"image": results.img,
|
||||
"boxes": np.array(results.boxes, dtype=np.float32),
|
||||
"scores": np.array(results.scores, dtype=np.float32),
|
||||
"boxes": self.sorted_boxes(boxes),
|
||||
"scores": np.array(scores, dtype=np.float32),
|
||||
}
|
||||
|
||||
# adapted from RapidOCR
|
||||
def _transform(self, img: Image.Image) -> NDArray[np.float32]:
|
||||
if img.height < img.width:
|
||||
ratio = float(self.max_resolution) / img.height
|
||||
else:
|
||||
ratio = float(self.max_resolution) / img.width
|
||||
|
||||
resize_h = int(img.height * ratio)
|
||||
resize_w = int(img.width * ratio)
|
||||
|
||||
resize_h = int(round(resize_h / 32) * 32)
|
||||
resize_w = int(round(resize_w / 32) * 32)
|
||||
resized_img = img.resize((int(resize_w), int(resize_h)), resample=Image.Resampling.LANCZOS)
|
||||
|
||||
img_np: NDArray[np.float32] = cv2.cvtColor(np.array(resized_img, dtype=np.float32), cv2.COLOR_RGB2BGR) # type: ignore
|
||||
img_np -= self.mean
|
||||
img_np *= self.std_inv
|
||||
img_np = np.transpose(img_np, (2, 0, 1))
|
||||
return np.expand_dims(img_np, axis=0)
|
||||
|
||||
def sorted_boxes(self, dt_boxes: NDArray[np.float32]) -> NDArray[np.float32]:
|
||||
if len(dt_boxes) == 0:
|
||||
return dt_boxes
|
||||
|
||||
# Sort by y, then identify lines, then sort by (line, x)
|
||||
y_order = np.argsort(dt_boxes[:, 0, 1], kind="stable")
|
||||
sorted_y = dt_boxes[y_order, 0, 1]
|
||||
|
||||
line_ids = np.empty(len(dt_boxes), dtype=np.int32)
|
||||
line_ids[0] = 0
|
||||
np.cumsum(np.abs(np.diff(sorted_y)) >= 10, out=line_ids[1:])
|
||||
|
||||
# Create composite sort key for final ordering
|
||||
# Shift line_ids by large factor, add x for tie-breaking
|
||||
sort_key = line_ids[y_order] * 1e6 + dt_boxes[y_order, 0, 0]
|
||||
final_order = np.argsort(sort_key, kind="stable")
|
||||
sorted_boxes: NDArray[np.float32] = dt_boxes[y_order[final_order]]
|
||||
return sorted_boxes
|
||||
|
||||
def configure(self, **kwargs: Any) -> None:
|
||||
if (max_resolution := kwargs.get("maxResolution")) is not None:
|
||||
self.max_resolution = max_resolution
|
||||
self.model.limit_side_len = max_resolution
|
||||
if (min_score := kwargs.get("minScore")) is not None:
|
||||
self.min_score = min_score
|
||||
self.model.postprocess_op.box_thresh = min_score
|
||||
self.postprocess.box_thresh = min_score
|
||||
if (score_mode := kwargs.get("scoreMode")) is not None:
|
||||
self.score_mode = score_mode
|
||||
self.model.postprocess_op.score_mode = score_mode
|
||||
self.postprocess.score_mode = score_mode
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
from typing import Any
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from numpy.typing import NDArray
|
||||
from PIL.Image import Image
|
||||
from PIL import Image
|
||||
from rapidocr.ch_ppocr_rec import TextRecInput
|
||||
from rapidocr.ch_ppocr_rec import TextRecognizer as RapidTextRecognizer
|
||||
from rapidocr.inference_engine.base import FileInfo, InferSession
|
||||
from rapidocr.utils import DownloadFile, DownloadFileInput
|
||||
from rapidocr.utils.download_file import DownloadFile, DownloadFileInput
|
||||
from rapidocr.utils.typings import EngineType, LangRec, OCRVersion, TaskType
|
||||
from rapidocr.utils.typings import ModelType as RapidModelType
|
||||
from rapidocr.utils.vis_res import VisRes
|
||||
|
||||
from immich_ml.config import log, settings
|
||||
from immich_ml.models.base import InferenceModel
|
||||
from immich_ml.models.transforms import pil_to_cv2
|
||||
from immich_ml.schemas import ModelFormat, ModelSession, ModelTask, ModelType
|
||||
from immich_ml.sessions.ort import OrtSession
|
||||
|
||||
@@ -24,6 +25,7 @@ class TextRecognizer(InferenceModel):
|
||||
identity = (ModelType.RECOGNITION, ModelTask.OCR)
|
||||
|
||||
def __init__(self, model_name: str, **model_kwargs: Any) -> None:
|
||||
self.language = LangRec[model_name.split("__")[0]] if "__" in model_name else LangRec.CH
|
||||
self.min_score = model_kwargs.get("minScore", 0.9)
|
||||
self._empty: TextRecognitionOutput = {
|
||||
"box": np.empty(0, dtype=np.float32),
|
||||
@@ -31,6 +33,7 @@ class TextRecognizer(InferenceModel):
|
||||
"text": [],
|
||||
"textScore": np.empty(0, dtype=np.float32),
|
||||
}
|
||||
VisRes.__init__ = lambda self, **kwargs: None # pyright: ignore[reportAttributeAccessIssue]
|
||||
super().__init__(model_name, **model_kwargs, model_format=ModelFormat.ONNX)
|
||||
|
||||
def _download(self) -> None:
|
||||
@@ -39,7 +42,7 @@ class TextRecognizer(InferenceModel):
|
||||
engine_type=EngineType.ONNXRUNTIME,
|
||||
ocr_version=OCRVersion.PPOCRV5,
|
||||
task_type=TaskType.REC,
|
||||
lang_type=LangRec.CH,
|
||||
lang_type=self.language,
|
||||
model_type=RapidModelType.MOBILE if "mobile" in self.model_name else RapidModelType.SERVER,
|
||||
)
|
||||
)
|
||||
@@ -59,21 +62,21 @@ class TextRecognizer(InferenceModel):
|
||||
session=session.session,
|
||||
rec_batch_num=settings.max_batch_size.text_recognition if settings.max_batch_size is not None else 6,
|
||||
rec_img_shape=(3, 48, 320),
|
||||
lang_type=self.language,
|
||||
)
|
||||
)
|
||||
return session
|
||||
|
||||
def _predict(self, _: Image, texts: TextDetectionOutput) -> TextRecognitionOutput:
|
||||
boxes, img, box_scores = texts["boxes"], texts["image"], texts["scores"]
|
||||
def _predict(self, img: Image.Image, texts: TextDetectionOutput) -> TextRecognitionOutput:
|
||||
boxes, box_scores = texts["boxes"], texts["scores"]
|
||||
if boxes.shape[0] == 0:
|
||||
return self._empty
|
||||
rec = self.model(TextRecInput(img=self.get_crop_img_list(img, boxes)))
|
||||
if rec.txts is None:
|
||||
return self._empty
|
||||
|
||||
height, width = img.shape[0:2]
|
||||
boxes[:, :, 0] /= width
|
||||
boxes[:, :, 1] /= height
|
||||
boxes[:, :, 0] /= img.width
|
||||
boxes[:, :, 1] /= img.height
|
||||
|
||||
text_scores = np.array(rec.scores)
|
||||
valid_text_score_idx = text_scores > self.min_score
|
||||
@@ -85,7 +88,7 @@ class TextRecognizer(InferenceModel):
|
||||
"textScore": text_scores[valid_text_score_idx],
|
||||
}
|
||||
|
||||
def get_crop_img_list(self, img: NDArray[np.float32], boxes: NDArray[np.float32]) -> list[NDArray[np.float32]]:
|
||||
def get_crop_img_list(self, img: Image.Image, boxes: NDArray[np.float32]) -> list[NDArray[np.uint8]]:
|
||||
img_crop_width = np.maximum(
|
||||
np.linalg.norm(boxes[:, 1] - boxes[:, 0], axis=1), np.linalg.norm(boxes[:, 2] - boxes[:, 3], axis=1)
|
||||
).astype(np.int32)
|
||||
@@ -96,22 +99,55 @@ class TextRecognizer(InferenceModel):
|
||||
pts_std[:, 1:3, 0] = img_crop_width[:, None]
|
||||
pts_std[:, 2:4, 1] = img_crop_height[:, None]
|
||||
|
||||
img_crop_sizes = np.stack([img_crop_width, img_crop_height], axis=1).tolist()
|
||||
imgs: list[NDArray[np.float32]] = []
|
||||
for box, pts_std, dst_size in zip(list(boxes), list(pts_std), img_crop_sizes):
|
||||
M = cv2.getPerspectiveTransform(box, pts_std)
|
||||
dst_img: NDArray[np.float32] = cv2.warpPerspective(
|
||||
img,
|
||||
M,
|
||||
dst_size,
|
||||
borderMode=cv2.BORDER_REPLICATE,
|
||||
flags=cv2.INTER_CUBIC,
|
||||
) # type: ignore
|
||||
dst_height, dst_width = dst_img.shape[0:2]
|
||||
img_crop_sizes = np.stack([img_crop_width, img_crop_height], axis=1)
|
||||
all_coeffs = self._get_perspective_transform(pts_std, boxes)
|
||||
imgs: list[NDArray[np.uint8]] = []
|
||||
for coeffs, dst_size in zip(all_coeffs, img_crop_sizes):
|
||||
dst_img = img.transform(
|
||||
size=tuple(dst_size),
|
||||
method=Image.Transform.PERSPECTIVE,
|
||||
data=tuple(coeffs),
|
||||
resample=Image.Resampling.BICUBIC,
|
||||
)
|
||||
|
||||
dst_width, dst_height = dst_img.size
|
||||
if dst_height * 1.0 / dst_width >= 1.5:
|
||||
dst_img = np.rot90(dst_img)
|
||||
imgs.append(dst_img)
|
||||
dst_img = dst_img.rotate(90, expand=True)
|
||||
imgs.append(pil_to_cv2(dst_img))
|
||||
|
||||
return imgs
|
||||
|
||||
def _get_perspective_transform(self, src: NDArray[np.float32], dst: NDArray[np.float32]) -> NDArray[np.float32]:
|
||||
N = src.shape[0]
|
||||
x, y = src[:, :, 0], src[:, :, 1]
|
||||
u, v = dst[:, :, 0], dst[:, :, 1]
|
||||
A = np.zeros((N, 8, 9), dtype=np.float32)
|
||||
|
||||
# Fill even rows (0, 2, 4, 6): [x, y, 1, 0, 0, 0, -u*x, -u*y, -u]
|
||||
A[:, ::2, 0] = x
|
||||
A[:, ::2, 1] = y
|
||||
A[:, ::2, 2] = 1
|
||||
A[:, ::2, 6] = -u * x
|
||||
A[:, ::2, 7] = -u * y
|
||||
A[:, ::2, 8] = -u
|
||||
|
||||
# Fill odd rows (1, 3, 5, 7): [0, 0, 0, x, y, 1, -v*x, -v*y, -v]
|
||||
A[:, 1::2, 3] = x
|
||||
A[:, 1::2, 4] = y
|
||||
A[:, 1::2, 5] = 1
|
||||
A[:, 1::2, 6] = -v * x
|
||||
A[:, 1::2, 7] = -v * y
|
||||
A[:, 1::2, 8] = -v
|
||||
|
||||
# Solve using SVD for all matrices at once
|
||||
_, _, Vt = np.linalg.svd(A)
|
||||
H = Vt[:, -1, :].reshape(N, 3, 3)
|
||||
H = H / H[:, 2:3, 2:3]
|
||||
|
||||
# Extract the 8 coefficients for each transformation
|
||||
return np.column_stack(
|
||||
[H[:, 0, 0], H[:, 0, 1], H[:, 0, 2], H[:, 1, 0], H[:, 1, 1], H[:, 1, 2], H[:, 2, 0], H[:, 2, 1]]
|
||||
) # pyright: ignore[reportReturnType]
|
||||
|
||||
def configure(self, **kwargs: Any) -> None:
|
||||
self.min_score = kwargs.get("minScore", self.min_score)
|
||||
|
||||
@@ -7,7 +7,6 @@ from typing_extensions import TypedDict
|
||||
|
||||
|
||||
class TextDetectionOutput(TypedDict):
|
||||
image: npt.NDArray[np.float32]
|
||||
boxes: npt.NDArray[np.float32]
|
||||
scores: npt.NDArray[np.float32]
|
||||
|
||||
@@ -21,8 +20,8 @@ class TextRecognitionOutput(TypedDict):
|
||||
|
||||
# RapidOCR expects `engine_type`, `lang_type`, and `font_path` to be attributes
|
||||
class OcrOptions(dict[str, Any]):
|
||||
def __init__(self, **options: Any) -> None:
|
||||
def __init__(self, lang_type: LangRec | None = None, **options: Any) -> None:
|
||||
super().__init__(**options)
|
||||
self.engine_type = EngineType.ONNXRUNTIME
|
||||
self.lang_type = LangRec.CH
|
||||
self.lang_type = lang_type
|
||||
self.font_path = None
|
||||
|
||||
@@ -46,6 +46,11 @@ class ModelSource(StrEnum):
|
||||
PADDLE = "paddle"
|
||||
|
||||
|
||||
class ModelPrecision(StrEnum):
|
||||
FP16 = "FP16"
|
||||
FP32 = "FP32"
|
||||
|
||||
|
||||
ModelIdentity = tuple[ModelType, ModelTask]
|
||||
|
||||
|
||||
|
||||
@@ -93,10 +93,12 @@ class OrtSession:
|
||||
case "CUDAExecutionProvider" | "ROCMExecutionProvider":
|
||||
options = {"arena_extend_strategy": "kSameAsRequested", "device_id": settings.device_id}
|
||||
case "OpenVINOExecutionProvider":
|
||||
openvino_dir = self.model_path.parent / "openvino"
|
||||
device = f"GPU.{settings.device_id}"
|
||||
options = {
|
||||
"device_type": f"GPU.{settings.device_id}",
|
||||
"precision": "FP32",
|
||||
"cache_dir": (self.model_path.parent / "openvino").as_posix(),
|
||||
"device_type": device,
|
||||
"precision": settings.openvino_precision.value,
|
||||
"cache_dir": openvino_dir.as_posix(),
|
||||
}
|
||||
case "CoreMLExecutionProvider":
|
||||
options = {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "immich-ml"
|
||||
version = "2.2.0"
|
||||
version = "2.2.3"
|
||||
description = ""
|
||||
authors = [{ name = "Hau Tran", email = "alex.tran1502@gmail.com" }]
|
||||
requires-python = ">=3.10,<4.0"
|
||||
@@ -22,7 +22,6 @@ dependencies = [
|
||||
"rich>=13.4.2",
|
||||
"tokenizers>=0.15.0,<1.0",
|
||||
"uvicorn[standard]>=0.22.0,<1.0",
|
||||
"setuptools>=78.1.0",
|
||||
"rapidocr>=3.1.0",
|
||||
]
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ from immich_ml.models.clip.textual import MClipTextualEncoder, OpenClipTextualEn
|
||||
from immich_ml.models.clip.visual import OpenClipVisualEncoder
|
||||
from immich_ml.models.facial_recognition.detection import FaceDetector
|
||||
from immich_ml.models.facial_recognition.recognition import FaceRecognizer
|
||||
from immich_ml.schemas import ModelFormat, ModelTask, ModelType
|
||||
from immich_ml.schemas import ModelFormat, ModelPrecision, ModelTask, ModelType
|
||||
from immich_ml.sessions.ann import AnnSession
|
||||
from immich_ml.sessions.ort import OrtSession
|
||||
from immich_ml.sessions.rknn import RknnSession, run_inference
|
||||
@@ -240,11 +240,16 @@ class TestOrtSession:
|
||||
|
||||
@pytest.mark.ov_device_ids(["GPU.0", "CPU"])
|
||||
def test_sets_default_provider_options(self, ov_device_ids: list[str]) -> None:
|
||||
model_path = "/cache/ViT-B-32__openai/model.onnx"
|
||||
model_path = "/cache/ViT-B-32__openai/textual/model.onnx"
|
||||
|
||||
session = OrtSession(model_path, providers=["OpenVINOExecutionProvider", "CPUExecutionProvider"])
|
||||
|
||||
assert session.provider_options == [
|
||||
{"device_type": "GPU.0", "precision": "FP32", "cache_dir": "/cache/ViT-B-32__openai/openvino"},
|
||||
{
|
||||
"device_type": "GPU.0",
|
||||
"precision": "FP32",
|
||||
"cache_dir": "/cache/ViT-B-32__openai/textual/openvino",
|
||||
},
|
||||
{"arena_extend_strategy": "kSameAsRequested"},
|
||||
]
|
||||
|
||||
@@ -262,6 +267,21 @@ class TestOrtSession:
|
||||
}
|
||||
]
|
||||
|
||||
def test_sets_openvino_to_fp16_if_enabled(self, mocker: MockerFixture) -> None:
|
||||
model_path = "/cache/ViT-B-32__openai/textual/model.onnx"
|
||||
os.environ["MACHINE_LEARNING_DEVICE_ID"] = "1"
|
||||
mocker.patch.object(settings, "openvino_precision", ModelPrecision.FP16)
|
||||
|
||||
session = OrtSession(model_path, providers=["OpenVINOExecutionProvider"])
|
||||
|
||||
assert session.provider_options == [
|
||||
{
|
||||
"device_type": "GPU.1",
|
||||
"precision": "FP16",
|
||||
"cache_dir": "/cache/ViT-B-32__openai/textual/openvino",
|
||||
}
|
||||
]
|
||||
|
||||
def test_sets_provider_options_for_cuda(self) -> None:
|
||||
os.environ["MACHINE_LEARNING_DEVICE_ID"] = "1"
|
||||
|
||||
@@ -417,7 +437,7 @@ class TestRknnSession:
|
||||
session.run(None, input_feed)
|
||||
|
||||
rknn_session.return_value.put.assert_called_once_with([input1, input2])
|
||||
np_spy.call_count == 2
|
||||
assert np_spy.call_count == 2
|
||||
np_spy.assert_has_calls([mock.call(input1), mock.call(input2)])
|
||||
|
||||
|
||||
@@ -925,11 +945,34 @@ class TestCache:
|
||||
any_order=True,
|
||||
)
|
||||
|
||||
async def test_preloads_ocr_models(self, monkeypatch: MonkeyPatch, mock_get_model: mock.Mock) -> None:
|
||||
os.environ["MACHINE_LEARNING_PRELOAD__OCR__DETECTION"] = "PP-OCRv5_mobile"
|
||||
os.environ["MACHINE_LEARNING_PRELOAD__OCR__RECOGNITION"] = "PP-OCRv5_mobile"
|
||||
|
||||
settings = Settings()
|
||||
assert settings.preload is not None
|
||||
assert settings.preload.ocr.detection == "PP-OCRv5_mobile"
|
||||
assert settings.preload.ocr.recognition == "PP-OCRv5_mobile"
|
||||
|
||||
model_cache = ModelCache()
|
||||
monkeypatch.setattr("immich_ml.main.model_cache", model_cache)
|
||||
|
||||
await preload_models(settings.preload)
|
||||
mock_get_model.assert_has_calls(
|
||||
[
|
||||
mock.call("PP-OCRv5_mobile", ModelType.DETECTION, ModelTask.OCR),
|
||||
mock.call("PP-OCRv5_mobile", ModelType.RECOGNITION, ModelTask.OCR),
|
||||
],
|
||||
any_order=True,
|
||||
)
|
||||
|
||||
async def test_preloads_all_models(self, monkeypatch: MonkeyPatch, mock_get_model: mock.Mock) -> None:
|
||||
os.environ["MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL"] = "ViT-B-32__openai"
|
||||
os.environ["MACHINE_LEARNING_PRELOAD__CLIP__VISUAL"] = "ViT-B-32__openai"
|
||||
os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION"] = "buffalo_s"
|
||||
os.environ["MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION"] = "buffalo_s"
|
||||
os.environ["MACHINE_LEARNING_PRELOAD__OCR__DETECTION"] = "PP-OCRv5_mobile"
|
||||
os.environ["MACHINE_LEARNING_PRELOAD__OCR__RECOGNITION"] = "PP-OCRv5_mobile"
|
||||
|
||||
settings = Settings()
|
||||
assert settings.preload is not None
|
||||
@@ -937,6 +980,8 @@ class TestCache:
|
||||
assert settings.preload.clip.textual == "ViT-B-32__openai"
|
||||
assert settings.preload.facial_recognition.recognition == "buffalo_s"
|
||||
assert settings.preload.facial_recognition.detection == "buffalo_s"
|
||||
assert settings.preload.ocr.detection == "PP-OCRv5_mobile"
|
||||
assert settings.preload.ocr.recognition == "PP-OCRv5_mobile"
|
||||
|
||||
model_cache = ModelCache()
|
||||
monkeypatch.setattr("immich_ml.main.model_cache", model_cache)
|
||||
@@ -948,6 +993,8 @@ class TestCache:
|
||||
mock.call("ViT-B-32__openai", ModelType.VISUAL, ModelTask.SEARCH),
|
||||
mock.call("buffalo_s", ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION),
|
||||
mock.call("buffalo_s", ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION),
|
||||
mock.call("PP-OCRv5_mobile", ModelType.DETECTION, ModelTask.OCR),
|
||||
mock.call("PP-OCRv5_mobile", ModelType.RECOGNITION, ModelTask.OCR),
|
||||
],
|
||||
any_order=True,
|
||||
)
|
||||
|
||||
2
machine-learning/uv.lock
generated
2
machine-learning/uv.lock
generated
@@ -1100,7 +1100,6 @@ dependencies = [
|
||||
{ name = "python-multipart" },
|
||||
{ name = "rapidocr" },
|
||||
{ name = "rich" },
|
||||
{ name = "setuptools" },
|
||||
{ name = "tokenizers" },
|
||||
{ name = "uvicorn", extra = ["standard"] },
|
||||
]
|
||||
@@ -1188,7 +1187,6 @@ requires-dist = [
|
||||
{ name = "rapidocr", specifier = ">=3.1.0" },
|
||||
{ name = "rich", specifier = ">=13.4.2" },
|
||||
{ name = "rknn-toolkit-lite2", marker = "extra == 'rknn'", specifier = ">=2.3.0,<3" },
|
||||
{ name = "setuptools", specifier = ">=78.1.0" },
|
||||
{ name = "tokenizers", specifier = ">=0.15.0,<1.0" },
|
||||
{ name = "uvicorn", extras = ["standard"], specifier = ">=0.22.0,<1.0" },
|
||||
]
|
||||
|
||||
@@ -3,12 +3,12 @@
|
||||
#
|
||||
# Pump one or both of the server/mobile versions in appropriate files
|
||||
#
|
||||
# usage: './scripts/pump-version.sh -s <major|minor|patch> <-m>
|
||||
# usage: './scripts/pump-version.sh -s <major|minor|patch> <-m> <true|false>
|
||||
#
|
||||
# examples:
|
||||
# ./scripts/pump-version.sh -s major # 1.0.0+50 => 2.0.0+50
|
||||
# ./scripts/pump-version.sh -s minor -m # 1.0.0+50 => 1.1.0+51
|
||||
# ./scripts/pump-version.sh -m # 1.0.0+50 => 1.0.0+51
|
||||
# ./scripts/pump-version.sh -s major # 1.0.0+50 => 2.0.0+50
|
||||
# ./scripts/pump-version.sh -s minor -m true # 1.0.0+50 => 1.1.0+51
|
||||
# ./scripts/pump-version.sh -m true # 1.0.0+50 => 1.0.0+51
|
||||
#
|
||||
|
||||
SERVER_PUMP="false"
|
||||
@@ -88,7 +88,6 @@ if [ "$CURRENT_MOBILE" != "$NEXT_MOBILE" ]; then
|
||||
fi
|
||||
|
||||
sed -i "s/\"android\.injected\.version\.name\" => \"$CURRENT_SERVER\",/\"android\.injected\.version\.name\" => \"$NEXT_SERVER\",/" mobile/android/fastlane/Fastfile
|
||||
sed -i "s/version_number: \"$CURRENT_SERVER\"$/version_number: \"$NEXT_SERVER\"/" mobile/ios/fastlane/Fastfile
|
||||
sed -i "s/\"android\.injected\.version\.code\" => $CURRENT_MOBILE,/\"android\.injected\.version\.code\" => $NEXT_MOBILE,/" mobile/android/fastlane/Fastfile
|
||||
sed -i "s/^version: $CURRENT_SERVER+$CURRENT_MOBILE$/version: $NEXT_SERVER+$NEXT_MOBILE/" mobile/pubspec.yaml
|
||||
|
||||
|
||||
515
mise.toml
515
mise.toml
@@ -1,7 +1,9 @@
|
||||
experimental_monorepo_root = true
|
||||
|
||||
[tools]
|
||||
node = "24.11.0"
|
||||
flutter = "3.35.7"
|
||||
pnpm = "10.19.0"
|
||||
pnpm = "10.20.0"
|
||||
terragrunt = "0.91.2"
|
||||
opentofu = "1.10.6"
|
||||
|
||||
@@ -14,514 +16,21 @@ postinstall = "chmod +x $MISE_TOOL_INSTALL_PATH/dcm"
|
||||
experimental = true
|
||||
pin = true
|
||||
|
||||
# .github
|
||||
[tasks."github:install"]
|
||||
run = "pnpm install --filter github --frozen-lockfile"
|
||||
|
||||
[tasks."github:format"]
|
||||
env._.path = "./.github/node_modules/.bin"
|
||||
dir = ".github"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."github:format-fix"]
|
||||
env._.path = "./.github/node_modules/.bin"
|
||||
dir = ".github"
|
||||
run = "prettier --write ."
|
||||
|
||||
# @immich/cli
|
||||
[tasks."cli:install"]
|
||||
run = "pnpm install --filter @immich/cli --frozen-lockfile"
|
||||
|
||||
[tasks."cli:build"]
|
||||
env._.path = "./cli/node_modules/.bin"
|
||||
dir = "cli"
|
||||
run = "vite build"
|
||||
|
||||
[tasks."cli:test"]
|
||||
env._.path = "./cli/node_modules/.bin"
|
||||
dir = "cli"
|
||||
run = "vite"
|
||||
|
||||
[tasks."cli:lint"]
|
||||
env._.path = "./cli/node_modules/.bin"
|
||||
dir = "cli"
|
||||
run = "eslint \"src/**/*.ts\" --max-warnings 0"
|
||||
|
||||
[tasks."cli:lint-fix"]
|
||||
run = "mise run cli:lint --fix"
|
||||
|
||||
[tasks."cli:format"]
|
||||
env._.path = "./cli/node_modules/.bin"
|
||||
dir = "cli"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."cli:format-fix"]
|
||||
env._.path = "./cli/node_modules/.bin"
|
||||
dir = "cli"
|
||||
run = "prettier --write ."
|
||||
|
||||
[tasks."cli:check"]
|
||||
env._.path = "./cli/node_modules/.bin"
|
||||
dir = "cli"
|
||||
run = "tsc --noEmit"
|
||||
|
||||
# @immich/sdk
|
||||
# SDK tasks
|
||||
[tasks."sdk:install"]
|
||||
dir = "open-api/typescript-sdk"
|
||||
run = "pnpm install --filter @immich/sdk --frozen-lockfile"
|
||||
|
||||
[tasks."sdk:build"]
|
||||
env._.path = "./open-api/typescript-sdk/node_modules/.bin"
|
||||
dir = "./open-api/typescript-sdk"
|
||||
dir = "open-api/typescript-sdk"
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "tsc"
|
||||
|
||||
# docs
|
||||
[tasks."docs:install"]
|
||||
run = "pnpm install --filter documentation --frozen-lockfile"
|
||||
|
||||
[tasks."docs:start"]
|
||||
env._.path = "./docs/node_modules/.bin"
|
||||
dir = "docs"
|
||||
run = "docusaurus --port 3005"
|
||||
|
||||
[tasks."docs:build"]
|
||||
env._.path = "./docs/node_modules/.bin"
|
||||
dir = "docs"
|
||||
run = [
|
||||
"jq -c < ../open-api/immich-openapi-specs.json > ./static/openapi.json || exit 0",
|
||||
"docusaurus build",
|
||||
]
|
||||
|
||||
|
||||
[tasks."docs:preview"]
|
||||
env._.path = "./docs/node_modules/.bin"
|
||||
dir = "docs"
|
||||
run = "docusaurus serve"
|
||||
|
||||
|
||||
[tasks."docs:format"]
|
||||
env._.path = "./docs/node_modules/.bin"
|
||||
dir = "docs"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."docs:format-fix"]
|
||||
env._.path = "./docs/node_modules/.bin"
|
||||
dir = "docs"
|
||||
run = "prettier --write ."
|
||||
|
||||
|
||||
# e2e
|
||||
[tasks."e2e:install"]
|
||||
run = "pnpm install --filter immich-e2e --frozen-lockfile"
|
||||
|
||||
[tasks."e2e:test"]
|
||||
env._.path = "./e2e/node_modules/.bin"
|
||||
dir = "e2e"
|
||||
run = "vitest --run"
|
||||
|
||||
[tasks."e2e:test-web"]
|
||||
env._.path = "./e2e/node_modules/.bin"
|
||||
dir = "e2e"
|
||||
run = "playwright test"
|
||||
|
||||
[tasks."e2e:format"]
|
||||
env._.path = "./e2e/node_modules/.bin"
|
||||
dir = "e2e"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."e2e:format-fix"]
|
||||
env._.path = "./e2e/node_modules/.bin"
|
||||
dir = "e2e"
|
||||
run = "prettier --write ."
|
||||
|
||||
[tasks."e2e:lint"]
|
||||
env._.path = "./e2e/node_modules/.bin"
|
||||
dir = "e2e"
|
||||
run = "eslint \"src/**/*.ts\" --max-warnings 0"
|
||||
|
||||
[tasks."e2e:lint-fix"]
|
||||
run = "mise run e2e:lint --fix"
|
||||
|
||||
[tasks."e2e:check"]
|
||||
env._.path = "./e2e/node_modules/.bin"
|
||||
dir = "e2e"
|
||||
run = "tsc --noEmit"
|
||||
|
||||
# i18n
|
||||
# i18n tasks
|
||||
[tasks."i18n:format"]
|
||||
run = "mise run i18n:format-fix"
|
||||
dir = "i18n"
|
||||
run = { task = ":i18n:format-fix" }
|
||||
|
||||
[tasks."i18n:format-fix"]
|
||||
run = "pnpm dlx sort-json ./i18n/*.json"
|
||||
|
||||
|
||||
# server
|
||||
[tasks."server:install"]
|
||||
run = "pnpm install --filter immich --frozen-lockfile"
|
||||
|
||||
[tasks."server:build"]
|
||||
env._.path = "./server/node_modules/.bin"
|
||||
dir = "server"
|
||||
run = "nest build"
|
||||
|
||||
[tasks."server:test"]
|
||||
env._.path = "./server/node_modules/.bin"
|
||||
dir = "server"
|
||||
run = "vitest --config test/vitest.config.mjs"
|
||||
|
||||
[tasks."server:test-medium"]
|
||||
env._.path = "./server/node_modules/.bin"
|
||||
dir = "server"
|
||||
run = "vitest --config test/vitest.config.medium.mjs"
|
||||
|
||||
[tasks."server:format"]
|
||||
env._.path = "./server/node_modules/.bin"
|
||||
dir = "server"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."server:format-fix"]
|
||||
env._.path = "./server/node_modules/.bin"
|
||||
dir = "server"
|
||||
run = "prettier --write ."
|
||||
|
||||
[tasks."server:lint"]
|
||||
env._.path = "./server/node_modules/.bin"
|
||||
dir = "server"
|
||||
run = "eslint \"src/**/*.ts\" \"test/**/*.ts\" --max-warnings 0"
|
||||
|
||||
[tasks."server:lint-fix"]
|
||||
run = "mise run server:lint --fix"
|
||||
|
||||
[tasks."server:check"]
|
||||
env._.path = "./server/node_modules/.bin"
|
||||
dir = "server"
|
||||
run = "tsc --noEmit"
|
||||
|
||||
[tasks."server:sql"]
|
||||
dir = "server"
|
||||
run = "node ./dist/bin/sync-open-api.js"
|
||||
|
||||
[tasks."server:open-api"]
|
||||
dir = "server"
|
||||
run = "node ./dist/bin/sync-open-api.js"
|
||||
|
||||
[tasks."server:migrations"]
|
||||
dir = "server"
|
||||
run = "node ./dist/bin/migrations.js"
|
||||
description = "Run database migration commands (create, generate, run, debug, or query)"
|
||||
|
||||
[tasks."server:schema-drop"]
|
||||
run = "mise run server:migrations query 'DROP schema public cascade; CREATE schema public;'"
|
||||
|
||||
[tasks."server:schema-reset"]
|
||||
run = "mise run server:schema-drop && mise run server:migrations run"
|
||||
|
||||
[tasks."server:email-dev"]
|
||||
env._.path = "./server/node_modules/.bin"
|
||||
dir = "server"
|
||||
run = "email dev -p 3050 --dir src/emails"
|
||||
|
||||
[tasks."server:checklist"]
|
||||
run = [
|
||||
"mise run server:install",
|
||||
"mise run server:format",
|
||||
"mise run server:lint",
|
||||
"mise run server:check",
|
||||
"mise run server:test-medium --run",
|
||||
"mise run server:test --run",
|
||||
]
|
||||
|
||||
|
||||
# web
|
||||
[tasks."web:install"]
|
||||
run = "pnpm install --filter immich-web --frozen-lockfile"
|
||||
|
||||
[tasks."web:svelte-kit-sync"]
|
||||
env._.path = "./web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "svelte-kit sync"
|
||||
|
||||
[tasks."web:build"]
|
||||
env._.path = "./web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "vite build"
|
||||
|
||||
[tasks."web:build-stats"]
|
||||
env.BUILD_STATS = "true"
|
||||
env._.path = "./web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "vite build"
|
||||
|
||||
[tasks."web:preview"]
|
||||
env._.path = "./web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "vite preview"
|
||||
|
||||
[tasks."web:start"]
|
||||
env._.path = "web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "vite dev --host 0.0.0.0 --port 3000"
|
||||
|
||||
[tasks."web:test"]
|
||||
depends = "web:svelte-kit-sync"
|
||||
env._.path = "web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "vitest"
|
||||
|
||||
[tasks."web:format"]
|
||||
env._.path = "web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."web:format-fix"]
|
||||
env._.path = "web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "prettier --write ."
|
||||
|
||||
[tasks."web:lint"]
|
||||
env._.path = "web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "eslint . --max-warnings 0 --concurrency 4"
|
||||
|
||||
[tasks."web:lint-fix"]
|
||||
run = "mise run web:lint --fix"
|
||||
|
||||
[tasks."web:check"]
|
||||
depends = "web:svelte-kit-sync"
|
||||
env._.path = "web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "tsc --noEmit"
|
||||
|
||||
[tasks."web:check-svelte"]
|
||||
depends = "web:svelte-kit-sync"
|
||||
env._.path = "web/node_modules/.bin"
|
||||
dir = "web"
|
||||
run = "svelte-check --no-tsconfig --fail-on-warnings"
|
||||
|
||||
[tasks."web:checklist"]
|
||||
run = [
|
||||
"mise run web:install",
|
||||
"mise run web:format",
|
||||
"mise run web:check",
|
||||
"mise run web:test --run",
|
||||
"mise run web:lint",
|
||||
]
|
||||
|
||||
|
||||
# mobile
|
||||
[tasks."mobile:codegen:dart"]
|
||||
alias = "mobile:codegen"
|
||||
description = "Execute build_runner to auto-generate dart code"
|
||||
dir = "mobile"
|
||||
sources = [
|
||||
"pubspec.yaml",
|
||||
"build.yaml",
|
||||
"lib/**/*.dart",
|
||||
"infrastructure/**/*.drift",
|
||||
]
|
||||
outputs = { auto = true }
|
||||
run = "dart run build_runner build --delete-conflicting-outputs"
|
||||
|
||||
[tasks."mobile:codegen:pigeon"]
|
||||
alias = "mobile:pigeon"
|
||||
description = "Generate pigeon platform code"
|
||||
dir = "mobile"
|
||||
depends = [
|
||||
"mobile:pigeon:native-sync",
|
||||
"mobile:pigeon:thumbnail",
|
||||
"mobile:pigeon:background-worker",
|
||||
"mobile:pigeon:background-worker-lock",
|
||||
"mobile:pigeon:connectivity",
|
||||
]
|
||||
|
||||
[tasks."mobile:codegen:translation"]
|
||||
alias = "mobile:translation"
|
||||
description = "Generate translations from i18n JSONs"
|
||||
dir = "mobile"
|
||||
run = [
|
||||
{ task = "i18n:format-fix" },
|
||||
{ tasks = [
|
||||
"mobile:i18n:loader",
|
||||
"mobile:i18n:keys",
|
||||
] },
|
||||
]
|
||||
|
||||
[tasks."mobile:codegen:app-icon"]
|
||||
description = "Generate app icons"
|
||||
dir = "mobile"
|
||||
run = "flutter pub run flutter_launcher_icons:main"
|
||||
|
||||
[tasks."mobile:codegen:splash"]
|
||||
description = "Generate splash screen"
|
||||
dir = "mobile"
|
||||
run = "flutter pub run flutter_native_splash:create"
|
||||
|
||||
[tasks."mobile:test"]
|
||||
description = "Run mobile tests"
|
||||
dir = "mobile"
|
||||
run = "flutter test"
|
||||
|
||||
[tasks."mobile:lint"]
|
||||
description = "Analyze Dart code"
|
||||
dir = "mobile"
|
||||
depends = ["mobile:analyze:dart", "mobile:analyze:dcm"]
|
||||
|
||||
[tasks."mobile:lint-fix"]
|
||||
description = "Auto-fix Dart code"
|
||||
dir = "mobile"
|
||||
depends = ["mobile:analyze:fix:dart", "mobile:analyze:fix:dcm"]
|
||||
|
||||
[tasks."mobile:format"]
|
||||
description = "Format Dart code"
|
||||
dir = "mobile"
|
||||
run = "dart format --set-exit-if-changed $(find lib -name '*.dart' -not \\( -name '*.g.dart' -o -name '*.drift.dart' -o -name '*.gr.dart' \\))"
|
||||
|
||||
[tasks."mobile:build:android"]
|
||||
description = "Build Android release"
|
||||
dir = "mobile"
|
||||
run = "flutter build appbundle"
|
||||
|
||||
[tasks."mobile:drift:migration"]
|
||||
alias = "mobile:migration"
|
||||
description = "Generate database migrations"
|
||||
dir = "mobile"
|
||||
run = "dart run drift_dev make-migrations"
|
||||
|
||||
|
||||
# mobile internal tasks
|
||||
[tasks."mobile:pigeon:native-sync"]
|
||||
description = "Generate native sync API pigeon code"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
sources = ["pigeon/native_sync_api.dart"]
|
||||
outputs = [
|
||||
"lib/platform/native_sync_api.g.dart",
|
||||
"ios/Runner/Sync/Messages.g.swift",
|
||||
"android/app/src/main/kotlin/app/alextran/immich/sync/Messages.g.kt",
|
||||
]
|
||||
run = [
|
||||
"dart run pigeon --input pigeon/native_sync_api.dart",
|
||||
"dart format lib/platform/native_sync_api.g.dart",
|
||||
]
|
||||
|
||||
[tasks."mobile:pigeon:thumbnail"]
|
||||
description = "Generate thumbnail API pigeon code"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
sources = ["pigeon/thumbnail_api.dart"]
|
||||
outputs = [
|
||||
"lib/platform/thumbnail_api.g.dart",
|
||||
"ios/Runner/Images/Thumbnails.g.swift",
|
||||
"android/app/src/main/kotlin/app/alextran/immich/images/Thumbnails.g.kt",
|
||||
]
|
||||
run = [
|
||||
"dart run pigeon --input pigeon/thumbnail_api.dart",
|
||||
"dart format lib/platform/thumbnail_api.g.dart",
|
||||
]
|
||||
|
||||
[tasks."mobile:pigeon:background-worker"]
|
||||
description = "Generate background worker API pigeon code"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
sources = ["pigeon/background_worker_api.dart"]
|
||||
outputs = [
|
||||
"lib/platform/background_worker_api.g.dart",
|
||||
"ios/Runner/Background/BackgroundWorker.g.swift",
|
||||
"android/app/src/main/kotlin/app/alextran/immich/background/BackgroundWorker.g.kt",
|
||||
]
|
||||
run = [
|
||||
"dart run pigeon --input pigeon/background_worker_api.dart",
|
||||
"dart format lib/platform/background_worker_api.g.dart",
|
||||
]
|
||||
|
||||
[tasks."mobile:pigeon:background-worker-lock"]
|
||||
description = "Generate background worker lock API pigeon code"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
sources = ["pigeon/background_worker_lock_api.dart"]
|
||||
outputs = [
|
||||
"lib/platform/background_worker_lock_api.g.dart",
|
||||
"android/app/src/main/kotlin/app/alextran/immich/background/BackgroundWorkerLock.g.kt",
|
||||
]
|
||||
run = [
|
||||
"dart run pigeon --input pigeon/background_worker_lock_api.dart",
|
||||
"dart format lib/platform/background_worker_lock_api.g.dart",
|
||||
]
|
||||
|
||||
[tasks."mobile:pigeon:connectivity"]
|
||||
description = "Generate connectivity API pigeon code"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
sources = ["pigeon/connectivity_api.dart"]
|
||||
outputs = [
|
||||
"lib/platform/connectivity_api.g.dart",
|
||||
"ios/Runner/Connectivity/Connectivity.g.swift",
|
||||
"android/app/src/main/kotlin/app/alextran/immich/connectivity/Connectivity.g.kt",
|
||||
]
|
||||
run = [
|
||||
"dart run pigeon --input pigeon/connectivity_api.dart",
|
||||
"dart format lib/platform/connectivity_api.g.dart",
|
||||
]
|
||||
|
||||
[tasks."mobile:i18n:loader"]
|
||||
description = "Generate i18n loader"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
sources = ["i18n/"]
|
||||
outputs = "lib/generated/codegen_loader.g.dart"
|
||||
run = [
|
||||
"dart run easy_localization:generate -S ../i18n",
|
||||
"dart format lib/generated/codegen_loader.g.dart",
|
||||
]
|
||||
|
||||
[tasks."mobile:i18n:keys"]
|
||||
description = "Generate i18n keys"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
sources = ["i18n/en.json"]
|
||||
outputs = "lib/generated/intl_keys.g.dart"
|
||||
run = [
|
||||
"dart run bin/generate_keys.dart",
|
||||
"dart format lib/generated/intl_keys.g.dart",
|
||||
]
|
||||
|
||||
[tasks."mobile:analyze:dart"]
|
||||
description = "Run Dart analysis"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
run = "dart analyze --fatal-infos"
|
||||
|
||||
[tasks."mobile:analyze:dcm"]
|
||||
description = "Run Dart Code Metrics"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
run = "dcm analyze lib --fatal-style --fatal-warnings"
|
||||
|
||||
[tasks."mobile:analyze:fix:dart"]
|
||||
description = "Auto-fix Dart analysis"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
run = "dart fix --apply"
|
||||
|
||||
[tasks."mobile:analyze:fix:dcm"]
|
||||
description = "Auto-fix Dart Code Metrics"
|
||||
dir = "mobile"
|
||||
hide = true
|
||||
run = "dcm fix lib"
|
||||
|
||||
# docs deployment
|
||||
[tasks."tg:fmt"]
|
||||
run = "terragrunt hclfmt"
|
||||
description = "Format terragrunt files"
|
||||
|
||||
[tasks.tf]
|
||||
run = "terragrunt run --all"
|
||||
description = "Wrapper for terragrunt run-all"
|
||||
dir = "{{cwd}}"
|
||||
|
||||
[tasks."tf:fmt"]
|
||||
run = "tofu fmt -recursive tf/"
|
||||
description = "Format terraform files"
|
||||
|
||||
[tasks."tf:init"]
|
||||
run = "mise run tf init -- -reconfigure"
|
||||
dir = "{{cwd}}"
|
||||
dir = "i18n"
|
||||
run = "pnpm dlx sort-json *.json"
|
||||
|
||||
@@ -143,7 +143,7 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
|
||||
val mediaUrls = call.argument<List<String>>("mediaUrls")
|
||||
if (mediaUrls != null) {
|
||||
if ((Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) && hasManageMediaPermission()) {
|
||||
moveToTrash(mediaUrls, result)
|
||||
moveToTrash(mediaUrls, result)
|
||||
} else {
|
||||
result.error("PERMISSION_DENIED", "Media permission required", null)
|
||||
}
|
||||
@@ -155,15 +155,23 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
|
||||
"restoreFromTrash" -> {
|
||||
val fileName = call.argument<String>("fileName")
|
||||
val type = call.argument<Int>("type")
|
||||
val mediaId = call.argument<String>("mediaId")
|
||||
if (fileName != null && type != null) {
|
||||
if ((Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) && hasManageMediaPermission()) {
|
||||
restoreFromTrash(fileName, type, result)
|
||||
} else {
|
||||
result.error("PERMISSION_DENIED", "Media permission required", null)
|
||||
}
|
||||
} else {
|
||||
result.error("INVALID_NAME", "The file name is not specified.", null)
|
||||
}
|
||||
} else
|
||||
if (mediaId != null && type != null) {
|
||||
if ((Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) && hasManageMediaPermission()) {
|
||||
restoreFromTrashById(mediaId, type, result)
|
||||
} else {
|
||||
result.error("PERMISSION_DENIED", "Media permission required", null)
|
||||
}
|
||||
} else {
|
||||
result.error("INVALID_PARAMS", "Required params are not specified.", null)
|
||||
}
|
||||
}
|
||||
|
||||
"requestManageMediaPermission" -> {
|
||||
@@ -175,6 +183,17 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
|
||||
}
|
||||
}
|
||||
|
||||
"hasManageMediaPermission" -> {
|
||||
if (hasManageMediaPermission()) {
|
||||
Log.i("Manage storage permission", "Permission already granted")
|
||||
result.success(true)
|
||||
} else {
|
||||
result.success(false)
|
||||
}
|
||||
}
|
||||
|
||||
"manageMediaPermission" -> requestManageMediaPermission(result)
|
||||
|
||||
else -> result.notImplemented()
|
||||
}
|
||||
}
|
||||
@@ -224,25 +243,47 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
|
||||
}
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.R)
|
||||
private fun toggleTrash(contentUris: List<Uri>, isTrashed: Boolean, result: Result) {
|
||||
val activity = activityBinding?.activity
|
||||
val contentResolver = context?.contentResolver
|
||||
if (activity == null || contentResolver == null) {
|
||||
result.error("TrashError", "Activity or ContentResolver not available", null)
|
||||
return
|
||||
}
|
||||
private fun restoreFromTrashById(mediaId: String, type: Int, result: Result) {
|
||||
val id = mediaId.toLongOrNull()
|
||||
if (id == null) {
|
||||
result.error("INVALID_ID", "The file id is not a valid number: $mediaId", null)
|
||||
return
|
||||
}
|
||||
if (!isInTrash(id)) {
|
||||
result.error("TrashNotFound", "Item with id=$id not found in trash", null)
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
val pendingIntent = MediaStore.createTrashRequest(contentResolver, contentUris, isTrashed)
|
||||
pendingResult = result // Store for onActivityResult
|
||||
activity.startIntentSenderForResult(
|
||||
pendingIntent.intentSender,
|
||||
trashRequestCode,
|
||||
null, 0, 0, 0
|
||||
)
|
||||
} catch (e: Exception) {
|
||||
Log.e("TrashError", "Error creating or starting trash request", e)
|
||||
result.error("TrashError", "Error creating or starting trash request", null)
|
||||
val uri = ContentUris.withAppendedId(contentUriForType(type), id)
|
||||
|
||||
try {
|
||||
Log.i(TAG, "restoreFromTrashById: uri=$uri (type=$type,id=$id)")
|
||||
restoreUris(listOf(uri), result)
|
||||
} catch (e: Exception) {
|
||||
Log.w(TAG, "restoreFromTrashById failed", e)
|
||||
}
|
||||
}
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.R)
|
||||
private fun toggleTrash(contentUris: List<Uri>, isTrashed: Boolean, result: Result) {
|
||||
val activity = activityBinding?.activity
|
||||
val contentResolver = context?.contentResolver
|
||||
if (activity == null || contentResolver == null) {
|
||||
result.error("TrashError", "Activity or ContentResolver not available", null)
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
val pendingIntent = MediaStore.createTrashRequest(contentResolver, contentUris, isTrashed)
|
||||
pendingResult = result // Store for onActivityResult
|
||||
activity.startIntentSenderForResult(
|
||||
pendingIntent.intentSender,
|
||||
trashRequestCode,
|
||||
null, 0, 0, 0
|
||||
)
|
||||
} catch (e: Exception) {
|
||||
Log.e("TrashError", "Error creating or starting trash request", e)
|
||||
result.error("TrashError", "Error creating or starting trash request", null)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -264,14 +305,7 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
|
||||
contentResolver.query(queryUri, projection, queryArgs, null)?.use { cursor ->
|
||||
if (cursor.moveToFirst()) {
|
||||
val id = cursor.getLong(cursor.getColumnIndexOrThrow(MediaStore.Files.FileColumns._ID))
|
||||
// same order as AssetType from dart
|
||||
val contentUri = when (type) {
|
||||
1 -> MediaStore.Images.Media.EXTERNAL_CONTENT_URI
|
||||
2 -> MediaStore.Video.Media.EXTERNAL_CONTENT_URI
|
||||
3 -> MediaStore.Audio.Media.EXTERNAL_CONTENT_URI
|
||||
else -> queryUri
|
||||
}
|
||||
return ContentUris.withAppendedId(contentUri, id)
|
||||
return ContentUris.withAppendedId(contentUriForType(type), id)
|
||||
}
|
||||
}
|
||||
return null
|
||||
@@ -315,6 +349,40 @@ class BackgroundServicePlugin : FlutterPlugin, MethodChannel.MethodCallHandler,
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.R)
|
||||
private fun isInTrash(id: Long): Boolean {
|
||||
val contentResolver = context?.contentResolver ?: return false
|
||||
val filesUri = MediaStore.Files.getContentUri(MediaStore.VOLUME_EXTERNAL)
|
||||
val args = Bundle().apply {
|
||||
putString(ContentResolver.QUERY_ARG_SQL_SELECTION, "${MediaStore.Files.FileColumns._ID}=?")
|
||||
putStringArray(ContentResolver.QUERY_ARG_SQL_SELECTION_ARGS, arrayOf(id.toString()))
|
||||
putInt(MediaStore.QUERY_ARG_MATCH_TRASHED, MediaStore.MATCH_ONLY)
|
||||
putInt(ContentResolver.QUERY_ARG_LIMIT, 1)
|
||||
}
|
||||
return contentResolver.query(filesUri, arrayOf(MediaStore.Files.FileColumns._ID), args, null)
|
||||
?.use { it.moveToFirst() } == true
|
||||
}
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.R)
|
||||
private fun restoreUris(uris: List<Uri>, result: Result) {
|
||||
if (uris.isEmpty()) {
|
||||
result.error("TrashError", "No URIs to restore", null)
|
||||
return
|
||||
}
|
||||
Log.i(TAG, "restoreUris: count=${uris.size}, first=${uris.first()}")
|
||||
toggleTrash(uris, false, result)
|
||||
}
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.Q)
|
||||
private fun contentUriForType(type: Int): Uri =
|
||||
when (type) {
|
||||
// same order as AssetType from dart
|
||||
1 -> MediaStore.Images.Media.EXTERNAL_CONTENT_URI
|
||||
2 -> MediaStore.Video.Media.EXTERNAL_CONTENT_URI
|
||||
3 -> MediaStore.Audio.Media.EXTERNAL_CONTENT_URI
|
||||
else -> MediaStore.Files.getContentUri(MediaStore.VOLUME_EXTERNAL)
|
||||
}
|
||||
}
|
||||
|
||||
private const val TAG = "BackgroundServicePlugin"
|
||||
|
||||
@@ -305,6 +305,7 @@ interface NativeSyncApi {
|
||||
fun getAssetsForAlbum(albumId: String, updatedTimeCond: Long?): List<PlatformAsset>
|
||||
fun hashAssets(assetIds: List<String>, allowNetworkAccess: Boolean, callback: (Result<List<HashResult>>) -> Unit)
|
||||
fun cancelHashing()
|
||||
fun getTrashedAssets(): Map<String, List<PlatformAsset>>
|
||||
|
||||
companion object {
|
||||
/** The codec used by NativeSyncApi. */
|
||||
@@ -483,6 +484,21 @@ interface NativeSyncApi {
|
||||
channel.setMessageHandler(null)
|
||||
}
|
||||
}
|
||||
run {
|
||||
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getTrashedAssets$separatedMessageChannelSuffix", codec, taskQueue)
|
||||
if (api != null) {
|
||||
channel.setMessageHandler { _, reply ->
|
||||
val wrapped: List<Any?> = try {
|
||||
listOf(api.getTrashedAssets())
|
||||
} catch (exception: Throwable) {
|
||||
MessagesPigeonUtils.wrapError(exception)
|
||||
}
|
||||
reply.reply(wrapped)
|
||||
}
|
||||
} else {
|
||||
channel.setMessageHandler(null)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,4 +21,9 @@ class NativeSyncApiImpl26(context: Context) : NativeSyncApiImplBase(context), Na
|
||||
override fun getMediaChanges(): SyncDelta {
|
||||
throw IllegalStateException("Method not supported on this Android version.")
|
||||
}
|
||||
|
||||
override fun getTrashedAssets(): Map<String, List<PlatformAsset>> {
|
||||
//Method not supported on this Android version.
|
||||
return emptyMap()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
package app.alextran.immich.sync
|
||||
|
||||
import android.content.ContentResolver
|
||||
import android.content.Context
|
||||
import android.os.Build
|
||||
import android.os.Bundle
|
||||
import android.provider.MediaStore
|
||||
import androidx.annotation.RequiresApi
|
||||
import androidx.annotation.RequiresExtension
|
||||
@@ -86,4 +88,29 @@ class NativeSyncApiImpl30(context: Context) : NativeSyncApiImplBase(context), Na
|
||||
// Unmounted volumes are handled in dart when the album is removed
|
||||
return SyncDelta(hasChanges, changed, deleted, assetAlbums)
|
||||
}
|
||||
|
||||
override fun getTrashedAssets(): Map<String, List<PlatformAsset>> {
|
||||
|
||||
val result = LinkedHashMap<String, MutableList<PlatformAsset>>()
|
||||
val volumes = MediaStore.getExternalVolumeNames(ctx)
|
||||
|
||||
for (volume in volumes) {
|
||||
|
||||
val queryArgs = Bundle().apply {
|
||||
putString(ContentResolver.QUERY_ARG_SQL_SELECTION, MEDIA_SELECTION)
|
||||
putStringArray(ContentResolver.QUERY_ARG_SQL_SELECTION_ARGS, MEDIA_SELECTION_ARGS)
|
||||
putInt(MediaStore.QUERY_ARG_MATCH_TRASHED, MediaStore.MATCH_ONLY)
|
||||
}
|
||||
|
||||
getCursor(volume, queryArgs).use { cursor ->
|
||||
getAssets(cursor).forEach { res ->
|
||||
if (res is AssetResult.ValidAsset) {
|
||||
result.getOrPut(res.albumId) { mutableListOf() }.add(res.asset)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result.mapValues { it.value.toList() }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ import android.annotation.SuppressLint
|
||||
import android.content.ContentUris
|
||||
import android.content.Context
|
||||
import android.database.Cursor
|
||||
import android.net.Uri
|
||||
import android.os.Bundle
|
||||
import android.provider.MediaStore
|
||||
import android.util.Base64
|
||||
import androidx.core.database.getStringOrNull
|
||||
@@ -81,6 +83,16 @@ open class NativeSyncApiImplBase(context: Context) : ImmichPlugin() {
|
||||
sortOrder,
|
||||
)
|
||||
|
||||
protected fun getCursor(
|
||||
volume: String,
|
||||
queryArgs: Bundle
|
||||
): Cursor? = ctx.contentResolver.query(
|
||||
MediaStore.Files.getContentUri(volume),
|
||||
ASSET_PROJECTION,
|
||||
queryArgs,
|
||||
null
|
||||
)
|
||||
|
||||
protected fun getAssets(cursor: Cursor?): Sequence<AssetResult> {
|
||||
return sequence {
|
||||
cursor?.use { c ->
|
||||
|
||||
@@ -35,8 +35,8 @@ platform :android do
|
||||
task: 'bundle',
|
||||
build_type: 'Release',
|
||||
properties: {
|
||||
"android.injected.version.code" => 3023,
|
||||
"android.injected.version.name" => "2.2.0",
|
||||
"android.injected.version.code" => 3026,
|
||||
"android.injected.version.name" => "2.2.3",
|
||||
}
|
||||
)
|
||||
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')
|
||||
|
||||
1
mobile/drift_schemas/main/drift_schema_v13.json
generated
Normal file
1
mobile/drift_schemas/main/drift_schema_v13.json
generated
Normal file
File diff suppressed because one or more lines are too long
@@ -1,4 +1,5 @@
|
||||
source "https://rubygems.org"
|
||||
|
||||
gem "fastlane"
|
||||
gem "cocoapods"
|
||||
gem "cocoapods"
|
||||
gem "abbrev" # Required for Ruby 3.4+
|
||||
@@ -32,6 +32,9 @@
|
||||
FEAFA8732E4D42F4001E47FE /* Thumbhash.swift in Sources */ = {isa = PBXBuildFile; fileRef = FEAFA8722E4D42F4001E47FE /* Thumbhash.swift */; };
|
||||
FED3B1962E253E9B0030FD97 /* ThumbnailsImpl.swift in Sources */ = {isa = PBXBuildFile; fileRef = FED3B1942E253E9B0030FD97 /* ThumbnailsImpl.swift */; };
|
||||
FED3B1972E253E9B0030FD97 /* Thumbnails.g.swift in Sources */ = {isa = PBXBuildFile; fileRef = FED3B1932E253E9B0030FD97 /* Thumbnails.g.swift */; };
|
||||
FEE084F82EC172460045228E /* SQLiteData in Frameworks */ = {isa = PBXBuildFile; productRef = FEE084F72EC172460045228E /* SQLiteData */; };
|
||||
FEE084FB2EC1725A0045228E /* RawStructuredFieldValues in Frameworks */ = {isa = PBXBuildFile; productRef = FEE084FA2EC1725A0045228E /* RawStructuredFieldValues */; };
|
||||
FEE084FD2EC1725A0045228E /* StructuredFieldValues in Frameworks */ = {isa = PBXBuildFile; productRef = FEE084FC2EC1725A0045228E /* StructuredFieldValues */; };
|
||||
/* End PBXBuildFile section */
|
||||
|
||||
/* Begin PBXContainerItemProxy section */
|
||||
@@ -153,6 +156,13 @@
|
||||
path = WidgetExtension;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
FEE084F22EC172080045228E /* Schemas */ = {
|
||||
isa = PBXFileSystemSynchronizedRootGroup;
|
||||
exceptions = (
|
||||
);
|
||||
path = Schemas;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
/* End PBXFileSystemSynchronizedRootGroup section */
|
||||
|
||||
/* Begin PBXFrameworksBuildPhase section */
|
||||
@@ -160,6 +170,9 @@
|
||||
isa = PBXFrameworksBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
FEE084F82EC172460045228E /* SQLiteData in Frameworks */,
|
||||
FEE084FB2EC1725A0045228E /* RawStructuredFieldValues in Frameworks */,
|
||||
FEE084FD2EC1725A0045228E /* StructuredFieldValues in Frameworks */,
|
||||
D218389C4A4C4693F141F7D1 /* Pods_Runner.framework in Frameworks */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
@@ -254,6 +267,7 @@
|
||||
97C146F01CF9000F007C117D /* Runner */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
FEE084F22EC172080045228E /* Schemas */,
|
||||
B231F52D2E93A44A00BC45D1 /* Core */,
|
||||
B25D37792E72CA15008B6CA7 /* Connectivity */,
|
||||
B21E34A62E5AF9760031FDB9 /* Background */,
|
||||
@@ -341,6 +355,7 @@
|
||||
fileSystemSynchronizedGroups = (
|
||||
B231F52D2E93A44A00BC45D1 /* Core */,
|
||||
B2CF7F8C2DDE4EBB00744BF6 /* Sync */,
|
||||
FEE084F22EC172080045228E /* Schemas */,
|
||||
);
|
||||
name = Runner;
|
||||
productName = Runner;
|
||||
@@ -419,6 +434,10 @@
|
||||
Base,
|
||||
);
|
||||
mainGroup = 97C146E51CF9000F007C117D;
|
||||
packageReferences = (
|
||||
FEE084F62EC172460045228E /* XCRemoteSwiftPackageReference "sqlite-data" */,
|
||||
FEE084F92EC1725A0045228E /* XCRemoteSwiftPackageReference "swift-http-structured-headers" */,
|
||||
);
|
||||
preferredProjectObjectVersion = 77;
|
||||
productRefGroup = 97C146EF1CF9000F007C117D /* Products */;
|
||||
projectDirPath = "";
|
||||
@@ -714,7 +733,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = Runner/RunnerProfile.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
@@ -858,7 +877,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = Runner/Runner.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
@@ -888,7 +907,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = Runner/Runner.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
@@ -922,7 +941,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = WidgetExtension/WidgetExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu17;
|
||||
@@ -965,7 +984,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = WidgetExtension/WidgetExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu17;
|
||||
@@ -1005,7 +1024,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = WidgetExtension/WidgetExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu17;
|
||||
@@ -1044,7 +1063,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
@@ -1088,7 +1107,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
@@ -1129,7 +1148,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
@@ -1201,6 +1220,43 @@
|
||||
defaultConfigurationName = Release;
|
||||
};
|
||||
/* End XCConfigurationList section */
|
||||
|
||||
/* Begin XCRemoteSwiftPackageReference section */
|
||||
FEE084F62EC172460045228E /* XCRemoteSwiftPackageReference "sqlite-data" */ = {
|
||||
isa = XCRemoteSwiftPackageReference;
|
||||
repositoryURL = "https://github.com/pointfreeco/sqlite-data";
|
||||
requirement = {
|
||||
kind = upToNextMajorVersion;
|
||||
minimumVersion = 1.3.0;
|
||||
};
|
||||
};
|
||||
FEE084F92EC1725A0045228E /* XCRemoteSwiftPackageReference "swift-http-structured-headers" */ = {
|
||||
isa = XCRemoteSwiftPackageReference;
|
||||
repositoryURL = "https://github.com/apple/swift-http-structured-headers.git";
|
||||
requirement = {
|
||||
kind = upToNextMajorVersion;
|
||||
minimumVersion = 1.5.0;
|
||||
};
|
||||
};
|
||||
/* End XCRemoteSwiftPackageReference section */
|
||||
|
||||
/* Begin XCSwiftPackageProductDependency section */
|
||||
FEE084F72EC172460045228E /* SQLiteData */ = {
|
||||
isa = XCSwiftPackageProductDependency;
|
||||
package = FEE084F62EC172460045228E /* XCRemoteSwiftPackageReference "sqlite-data" */;
|
||||
productName = SQLiteData;
|
||||
};
|
||||
FEE084FA2EC1725A0045228E /* RawStructuredFieldValues */ = {
|
||||
isa = XCSwiftPackageProductDependency;
|
||||
package = FEE084F92EC1725A0045228E /* XCRemoteSwiftPackageReference "swift-http-structured-headers" */;
|
||||
productName = RawStructuredFieldValues;
|
||||
};
|
||||
FEE084FC2EC1725A0045228E /* StructuredFieldValues */ = {
|
||||
isa = XCSwiftPackageProductDependency;
|
||||
package = FEE084F92EC1725A0045228E /* XCRemoteSwiftPackageReference "swift-http-structured-headers" */;
|
||||
productName = StructuredFieldValues;
|
||||
};
|
||||
/* End XCSwiftPackageProductDependency section */
|
||||
};
|
||||
rootObject = 97C146E61CF9000F007C117D /* Project object */;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,177 @@
|
||||
{
|
||||
"originHash" : "9be33bfaa68721646604aefff3cabbdaf9a193da192aae024c265065671f6c49",
|
||||
"pins" : [
|
||||
{
|
||||
"identity" : "combine-schedulers",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/combine-schedulers",
|
||||
"state" : {
|
||||
"revision" : "fd16d76fd8b9a976d88bfb6cacc05ca8d19c91b6",
|
||||
"version" : "1.1.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "grdb.swift",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/groue/GRDB.swift",
|
||||
"state" : {
|
||||
"revision" : "18497b68fdbb3a09528d260a0a0e1e7e61c8c53d",
|
||||
"version" : "7.8.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "opencombine",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/OpenCombine/OpenCombine.git",
|
||||
"state" : {
|
||||
"revision" : "8576f0d579b27020beccbccc3ea6844f3ddfc2c2",
|
||||
"version" : "0.14.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "sqlite-data",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/sqlite-data",
|
||||
"state" : {
|
||||
"revision" : "b66b894b9a5710f1072c8eb6448a7edfc2d743d9",
|
||||
"version" : "1.3.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-case-paths",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-case-paths",
|
||||
"state" : {
|
||||
"revision" : "6989976265be3f8d2b5802c722f9ba168e227c71",
|
||||
"version" : "1.7.2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-clocks",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-clocks",
|
||||
"state" : {
|
||||
"revision" : "cc46202b53476d64e824e0b6612da09d84ffde8e",
|
||||
"version" : "1.0.6"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-collections",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/apple/swift-collections",
|
||||
"state" : {
|
||||
"revision" : "7b847a3b7008b2dc2f47ca3110d8c782fb2e5c7e",
|
||||
"version" : "1.3.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-concurrency-extras",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-concurrency-extras",
|
||||
"state" : {
|
||||
"revision" : "5a3825302b1a0d744183200915a47b508c828e6f",
|
||||
"version" : "1.3.2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-custom-dump",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-custom-dump",
|
||||
"state" : {
|
||||
"revision" : "82645ec760917961cfa08c9c0c7104a57a0fa4b1",
|
||||
"version" : "1.3.3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-dependencies",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-dependencies",
|
||||
"state" : {
|
||||
"revision" : "a10f9feeb214bc72b5337b6ef6d5a029360db4cc",
|
||||
"version" : "1.10.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-http-structured-headers",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/apple/swift-http-structured-headers.git",
|
||||
"state" : {
|
||||
"revision" : "a9f3c352f4d46afd155e00b3c6e85decae6bcbeb",
|
||||
"version" : "1.5.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-identified-collections",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-identified-collections",
|
||||
"state" : {
|
||||
"revision" : "322d9ffeeba85c9f7c4984b39422ec7cc3c56597",
|
||||
"version" : "1.1.1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-perception",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-perception",
|
||||
"state" : {
|
||||
"revision" : "4f47ebafed5f0b0172cf5c661454fa8e28fb2ac4",
|
||||
"version" : "2.0.9"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-sharing",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-sharing",
|
||||
"state" : {
|
||||
"revision" : "3bfc408cc2d0bee2287c174da6b1c76768377818",
|
||||
"version" : "2.7.4"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-snapshot-testing",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-snapshot-testing",
|
||||
"state" : {
|
||||
"revision" : "a8b7c5e0ed33d8ab8887d1654d9b59f2cbad529b",
|
||||
"version" : "1.18.7"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-structured-queries",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-structured-queries",
|
||||
"state" : {
|
||||
"revision" : "9c84335373bae5f5c9f7b5f0adf3ae10f2cab5b9",
|
||||
"version" : "0.25.2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-syntax",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/swiftlang/swift-syntax",
|
||||
"state" : {
|
||||
"revision" : "4799286537280063c85a32f09884cfbca301b1a1",
|
||||
"version" : "602.0.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-tagged",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-tagged",
|
||||
"state" : {
|
||||
"revision" : "3907a9438f5b57d317001dc99f3f11b46882272b",
|
||||
"version" : "0.10.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "xctest-dynamic-overlay",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/xctest-dynamic-overlay",
|
||||
"state" : {
|
||||
"revision" : "4c27acf5394b645b70d8ba19dc249c0472d5f618",
|
||||
"version" : "1.7.0"
|
||||
}
|
||||
}
|
||||
],
|
||||
"version" : 3
|
||||
}
|
||||
@@ -0,0 +1,168 @@
|
||||
{
|
||||
"originHash" : "9be33bfaa68721646604aefff3cabbdaf9a193da192aae024c265065671f6c49",
|
||||
"pins" : [
|
||||
{
|
||||
"identity" : "combine-schedulers",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/combine-schedulers",
|
||||
"state" : {
|
||||
"revision" : "5928286acce13def418ec36d05a001a9641086f2",
|
||||
"version" : "1.0.3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "grdb.swift",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/groue/GRDB.swift",
|
||||
"state" : {
|
||||
"revision" : "18497b68fdbb3a09528d260a0a0e1e7e61c8c53d",
|
||||
"version" : "7.8.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "sqlite-data",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/sqlite-data",
|
||||
"state" : {
|
||||
"revision" : "b66b894b9a5710f1072c8eb6448a7edfc2d743d9",
|
||||
"version" : "1.3.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-case-paths",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-case-paths",
|
||||
"state" : {
|
||||
"revision" : "6989976265be3f8d2b5802c722f9ba168e227c71",
|
||||
"version" : "1.7.2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-clocks",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-clocks",
|
||||
"state" : {
|
||||
"revision" : "cc46202b53476d64e824e0b6612da09d84ffde8e",
|
||||
"version" : "1.0.6"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-collections",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/apple/swift-collections",
|
||||
"state" : {
|
||||
"revision" : "7b847a3b7008b2dc2f47ca3110d8c782fb2e5c7e",
|
||||
"version" : "1.3.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-concurrency-extras",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-concurrency-extras",
|
||||
"state" : {
|
||||
"revision" : "5a3825302b1a0d744183200915a47b508c828e6f",
|
||||
"version" : "1.3.2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-custom-dump",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-custom-dump",
|
||||
"state" : {
|
||||
"revision" : "82645ec760917961cfa08c9c0c7104a57a0fa4b1",
|
||||
"version" : "1.3.3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-dependencies",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-dependencies",
|
||||
"state" : {
|
||||
"revision" : "a10f9feeb214bc72b5337b6ef6d5a029360db4cc",
|
||||
"version" : "1.10.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-http-structured-headers",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/apple/swift-http-structured-headers.git",
|
||||
"state" : {
|
||||
"revision" : "a9f3c352f4d46afd155e00b3c6e85decae6bcbeb",
|
||||
"version" : "1.5.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-identified-collections",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-identified-collections",
|
||||
"state" : {
|
||||
"revision" : "322d9ffeeba85c9f7c4984b39422ec7cc3c56597",
|
||||
"version" : "1.1.1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-perception",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-perception",
|
||||
"state" : {
|
||||
"revision" : "4f47ebafed5f0b0172cf5c661454fa8e28fb2ac4",
|
||||
"version" : "2.0.9"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-sharing",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-sharing",
|
||||
"state" : {
|
||||
"revision" : "3bfc408cc2d0bee2287c174da6b1c76768377818",
|
||||
"version" : "2.7.4"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-snapshot-testing",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-snapshot-testing",
|
||||
"state" : {
|
||||
"revision" : "a8b7c5e0ed33d8ab8887d1654d9b59f2cbad529b",
|
||||
"version" : "1.18.7"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-structured-queries",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-structured-queries",
|
||||
"state" : {
|
||||
"revision" : "1447ea20550f6f02c4b48cc80931c3ed40a9c756",
|
||||
"version" : "0.25.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-syntax",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/swiftlang/swift-syntax",
|
||||
"state" : {
|
||||
"revision" : "4799286537280063c85a32f09884cfbca301b1a1",
|
||||
"version" : "602.0.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-tagged",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-tagged",
|
||||
"state" : {
|
||||
"revision" : "3907a9438f5b57d317001dc99f3f11b46882272b",
|
||||
"version" : "0.10.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "xctest-dynamic-overlay",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/xctest-dynamic-overlay",
|
||||
"state" : {
|
||||
"revision" : "4c27acf5394b645b70d8ba19dc249c0472d5f618",
|
||||
"version" : "1.7.0"
|
||||
}
|
||||
}
|
||||
],
|
||||
"version" : 3
|
||||
}
|
||||
@@ -80,7 +80,7 @@
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>2.1.0</string>
|
||||
<string>2.2.1</string>
|
||||
<key>CFBundleSignature</key>
|
||||
<string>????</string>
|
||||
<key>CFBundleURLTypes</key>
|
||||
@@ -107,7 +107,7 @@
|
||||
</dict>
|
||||
</array>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>231</string>
|
||||
<string>233</string>
|
||||
<key>FLTEnableImpeller</key>
|
||||
<true/>
|
||||
<key>ITSAppUsesNonExemptEncryption</key>
|
||||
|
||||
177
mobile/ios/Runner/Schemas/Constants.swift
Normal file
177
mobile/ios/Runner/Schemas/Constants.swift
Normal file
@@ -0,0 +1,177 @@
|
||||
import SQLiteData
|
||||
|
||||
struct Endpoint: Codable {
|
||||
let url: URL
|
||||
let status: Status
|
||||
|
||||
enum Status: String, Codable {
|
||||
case loading, valid, error, unknown
|
||||
}
|
||||
}
|
||||
|
||||
enum StoreKey: Int, CaseIterable, QueryBindable {
|
||||
// MARK: - Int
|
||||
case _version = 0
|
||||
static let version = Typed<Int>(rawValue: ._version)
|
||||
case _deviceIdHash = 3
|
||||
static let deviceIdHash = Typed<Int>(rawValue: ._deviceIdHash)
|
||||
case _backupTriggerDelay = 8
|
||||
static let backupTriggerDelay = Typed<Int>(rawValue: ._backupTriggerDelay)
|
||||
case _tilesPerRow = 103
|
||||
static let tilesPerRow = Typed<Int>(rawValue: ._tilesPerRow)
|
||||
case _groupAssetsBy = 105
|
||||
static let groupAssetsBy = Typed<Int>(rawValue: ._groupAssetsBy)
|
||||
case _uploadErrorNotificationGracePeriod = 106
|
||||
static let uploadErrorNotificationGracePeriod = Typed<Int>(rawValue: ._uploadErrorNotificationGracePeriod)
|
||||
case _thumbnailCacheSize = 110
|
||||
static let thumbnailCacheSize = Typed<Int>(rawValue: ._thumbnailCacheSize)
|
||||
case _imageCacheSize = 111
|
||||
static let imageCacheSize = Typed<Int>(rawValue: ._imageCacheSize)
|
||||
case _albumThumbnailCacheSize = 112
|
||||
static let albumThumbnailCacheSize = Typed<Int>(rawValue: ._albumThumbnailCacheSize)
|
||||
case _selectedAlbumSortOrder = 113
|
||||
static let selectedAlbumSortOrder = Typed<Int>(rawValue: ._selectedAlbumSortOrder)
|
||||
case _logLevel = 115
|
||||
static let logLevel = Typed<Int>(rawValue: ._logLevel)
|
||||
case _mapRelativeDate = 119
|
||||
static let mapRelativeDate = Typed<Int>(rawValue: ._mapRelativeDate)
|
||||
case _mapThemeMode = 124
|
||||
static let mapThemeMode = Typed<Int>(rawValue: ._mapThemeMode)
|
||||
|
||||
// MARK: - String
|
||||
case _assetETag = 1
|
||||
static let assetETag = Typed<String>(rawValue: ._assetETag)
|
||||
case _currentUser = 2
|
||||
static let currentUser = Typed<String>(rawValue: ._currentUser)
|
||||
case _deviceId = 4
|
||||
static let deviceId = Typed<String>(rawValue: ._deviceId)
|
||||
case _accessToken = 11
|
||||
static let accessToken = Typed<String>(rawValue: ._accessToken)
|
||||
case _serverEndpoint = 12
|
||||
static let serverEndpoint = Typed<String>(rawValue: ._serverEndpoint)
|
||||
case _sslClientCertData = 15
|
||||
static let sslClientCertData = Typed<String>(rawValue: ._sslClientCertData)
|
||||
case _sslClientPasswd = 16
|
||||
static let sslClientPasswd = Typed<String>(rawValue: ._sslClientPasswd)
|
||||
case _themeMode = 102
|
||||
static let themeMode = Typed<String>(rawValue: ._themeMode)
|
||||
case _customHeaders = 127
|
||||
static let customHeaders = Typed<[String: String]>(rawValue: ._customHeaders)
|
||||
case _primaryColor = 128
|
||||
static let primaryColor = Typed<String>(rawValue: ._primaryColor)
|
||||
case _preferredWifiName = 133
|
||||
static let preferredWifiName = Typed<String>(rawValue: ._preferredWifiName)
|
||||
|
||||
// MARK: - Endpoint
|
||||
case _externalEndpointList = 135
|
||||
static let externalEndpointList = Typed<[Endpoint]>(rawValue: ._externalEndpointList)
|
||||
|
||||
// MARK: - URL
|
||||
case _localEndpoint = 134
|
||||
static let localEndpoint = Typed<URL>(rawValue: ._localEndpoint)
|
||||
case _serverUrl = 10
|
||||
static let serverUrl = Typed<URL>(rawValue: ._serverUrl)
|
||||
|
||||
// MARK: - Date
|
||||
case _backupFailedSince = 5
|
||||
static let backupFailedSince = Typed<Date>(rawValue: ._backupFailedSince)
|
||||
|
||||
// MARK: - Bool
|
||||
case _backupRequireWifi = 6
|
||||
static let backupRequireWifi = Typed<Bool>(rawValue: ._backupRequireWifi)
|
||||
case _backupRequireCharging = 7
|
||||
static let backupRequireCharging = Typed<Bool>(rawValue: ._backupRequireCharging)
|
||||
case _autoBackup = 13
|
||||
static let autoBackup = Typed<Bool>(rawValue: ._autoBackup)
|
||||
case _backgroundBackup = 14
|
||||
static let backgroundBackup = Typed<Bool>(rawValue: ._backgroundBackup)
|
||||
case _loadPreview = 100
|
||||
static let loadPreview = Typed<Bool>(rawValue: ._loadPreview)
|
||||
case _loadOriginal = 101
|
||||
static let loadOriginal = Typed<Bool>(rawValue: ._loadOriginal)
|
||||
case _dynamicLayout = 104
|
||||
static let dynamicLayout = Typed<Bool>(rawValue: ._dynamicLayout)
|
||||
case _backgroundBackupTotalProgress = 107
|
||||
static let backgroundBackupTotalProgress = Typed<Bool>(rawValue: ._backgroundBackupTotalProgress)
|
||||
case _backgroundBackupSingleProgress = 108
|
||||
static let backgroundBackupSingleProgress = Typed<Bool>(rawValue: ._backgroundBackupSingleProgress)
|
||||
case _storageIndicator = 109
|
||||
static let storageIndicator = Typed<Bool>(rawValue: ._storageIndicator)
|
||||
case _advancedTroubleshooting = 114
|
||||
static let advancedTroubleshooting = Typed<Bool>(rawValue: ._advancedTroubleshooting)
|
||||
case _preferRemoteImage = 116
|
||||
static let preferRemoteImage = Typed<Bool>(rawValue: ._preferRemoteImage)
|
||||
case _loopVideo = 117
|
||||
static let loopVideo = Typed<Bool>(rawValue: ._loopVideo)
|
||||
case _mapShowFavoriteOnly = 118
|
||||
static let mapShowFavoriteOnly = Typed<Bool>(rawValue: ._mapShowFavoriteOnly)
|
||||
case _selfSignedCert = 120
|
||||
static let selfSignedCert = Typed<Bool>(rawValue: ._selfSignedCert)
|
||||
case _mapIncludeArchived = 121
|
||||
static let mapIncludeArchived = Typed<Bool>(rawValue: ._mapIncludeArchived)
|
||||
case _ignoreIcloudAssets = 122
|
||||
static let ignoreIcloudAssets = Typed<Bool>(rawValue: ._ignoreIcloudAssets)
|
||||
case _selectedAlbumSortReverse = 123
|
||||
static let selectedAlbumSortReverse = Typed<Bool>(rawValue: ._selectedAlbumSortReverse)
|
||||
case _mapwithPartners = 125
|
||||
static let mapwithPartners = Typed<Bool>(rawValue: ._mapwithPartners)
|
||||
case _enableHapticFeedback = 126
|
||||
static let enableHapticFeedback = Typed<Bool>(rawValue: ._enableHapticFeedback)
|
||||
case _dynamicTheme = 129
|
||||
static let dynamicTheme = Typed<Bool>(rawValue: ._dynamicTheme)
|
||||
case _colorfulInterface = 130
|
||||
static let colorfulInterface = Typed<Bool>(rawValue: ._colorfulInterface)
|
||||
case _syncAlbums = 131
|
||||
static let syncAlbums = Typed<Bool>(rawValue: ._syncAlbums)
|
||||
case _autoEndpointSwitching = 132
|
||||
static let autoEndpointSwitching = Typed<Bool>(rawValue: ._autoEndpointSwitching)
|
||||
case _loadOriginalVideo = 136
|
||||
static let loadOriginalVideo = Typed<Bool>(rawValue: ._loadOriginalVideo)
|
||||
case _manageLocalMediaAndroid = 137
|
||||
static let manageLocalMediaAndroid = Typed<Bool>(rawValue: ._manageLocalMediaAndroid)
|
||||
case _readonlyModeEnabled = 138
|
||||
static let readonlyModeEnabled = Typed<Bool>(rawValue: ._readonlyModeEnabled)
|
||||
case _autoPlayVideo = 139
|
||||
static let autoPlayVideo = Typed<Bool>(rawValue: ._autoPlayVideo)
|
||||
case _photoManagerCustomFilter = 1000
|
||||
static let photoManagerCustomFilter = Typed<Bool>(rawValue: ._photoManagerCustomFilter)
|
||||
case _betaPromptShown = 1001
|
||||
static let betaPromptShown = Typed<Bool>(rawValue: ._betaPromptShown)
|
||||
case _betaTimeline = 1002
|
||||
static let betaTimeline = Typed<Bool>(rawValue: ._betaTimeline)
|
||||
case _enableBackup = 1003
|
||||
static let enableBackup = Typed<Bool>(rawValue: ._enableBackup)
|
||||
case _useWifiForUploadVideos = 1004
|
||||
static let useWifiForUploadVideos = Typed<Bool>(rawValue: ._useWifiForUploadVideos)
|
||||
case _useWifiForUploadPhotos = 1005
|
||||
static let useWifiForUploadPhotos = Typed<Bool>(rawValue: ._useWifiForUploadPhotos)
|
||||
case _needBetaMigration = 1006
|
||||
static let needBetaMigration = Typed<Bool>(rawValue: ._needBetaMigration)
|
||||
case _shouldResetSync = 1007
|
||||
static let shouldResetSync = Typed<Bool>(rawValue: ._shouldResetSync)
|
||||
|
||||
struct Typed<T>: RawRepresentable {
|
||||
let rawValue: StoreKey
|
||||
|
||||
@_transparent
|
||||
init(rawValue value: StoreKey) {
|
||||
self.rawValue = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum BackupSelection: Int, QueryBindable {
|
||||
case selected, none, excluded
|
||||
}
|
||||
|
||||
enum AvatarColor: Int, QueryBindable {
|
||||
case primary, pink, red, yellow, blue, green, purple, orange, gray, amber
|
||||
}
|
||||
|
||||
enum AlbumUserRole: Int, QueryBindable {
|
||||
case editor, viewer
|
||||
}
|
||||
|
||||
enum MemoryType: Int, QueryBindable {
|
||||
case onThisDay
|
||||
}
|
||||
146
mobile/ios/Runner/Schemas/Store.swift
Normal file
146
mobile/ios/Runner/Schemas/Store.swift
Normal file
@@ -0,0 +1,146 @@
|
||||
import SQLiteData
|
||||
|
||||
enum StoreError: Error {
|
||||
case invalidJSON(String)
|
||||
case invalidURL(String)
|
||||
case encodingFailed
|
||||
}
|
||||
|
||||
protocol StoreConvertible {
|
||||
associatedtype StorageType
|
||||
static func fromValue(_ value: StorageType) throws(StoreError) -> Self
|
||||
static func toValue(_ value: Self) throws(StoreError) -> StorageType
|
||||
}
|
||||
|
||||
extension Int: StoreConvertible {
|
||||
static func fromValue(_ value: Int) -> Int { value }
|
||||
static func toValue(_ value: Int) -> Int { value }
|
||||
}
|
||||
|
||||
extension Bool: StoreConvertible {
|
||||
static func fromValue(_ value: Int) -> Bool { value == 1 }
|
||||
static func toValue(_ value: Bool) -> Int { value ? 1 : 0 }
|
||||
}
|
||||
|
||||
extension Date: StoreConvertible {
|
||||
static func fromValue(_ value: Int) -> Date { Date(timeIntervalSince1970: TimeInterval(value) / 1000) }
|
||||
static func toValue(_ value: Date) -> Int { Int(value.timeIntervalSince1970 * 1000) }
|
||||
}
|
||||
|
||||
extension String: StoreConvertible {
|
||||
static func fromValue(_ value: String) -> String { value }
|
||||
static func toValue(_ value: String) -> String { value }
|
||||
}
|
||||
|
||||
extension URL: StoreConvertible {
|
||||
static func fromValue(_ value: String) throws(StoreError) -> URL {
|
||||
guard let url = URL(string: value) else {
|
||||
throw StoreError.invalidURL(value)
|
||||
}
|
||||
return url
|
||||
}
|
||||
static func toValue(_ value: URL) -> String { value.absoluteString }
|
||||
}
|
||||
|
||||
extension StoreConvertible where Self: Codable, StorageType == String {
|
||||
static var jsonDecoder: JSONDecoder { JSONDecoder() }
|
||||
static var jsonEncoder: JSONEncoder { JSONEncoder() }
|
||||
|
||||
static func fromValue(_ value: String) throws(StoreError) -> Self {
|
||||
do {
|
||||
return try jsonDecoder.decode(Self.self, from: Data(value.utf8))
|
||||
} catch {
|
||||
throw StoreError.invalidJSON(value)
|
||||
}
|
||||
}
|
||||
|
||||
static func toValue(_ value: Self) throws(StoreError) -> String {
|
||||
let encoded: Data
|
||||
do {
|
||||
encoded = try jsonEncoder.encode(value)
|
||||
} catch {
|
||||
throw StoreError.encodingFailed
|
||||
}
|
||||
|
||||
guard let string = String(data: encoded, encoding: .utf8) else {
|
||||
throw StoreError.encodingFailed
|
||||
}
|
||||
return string
|
||||
}
|
||||
}
|
||||
|
||||
extension Array: StoreConvertible where Element: Codable {
|
||||
typealias StorageType = String
|
||||
}
|
||||
|
||||
extension Dictionary: StoreConvertible where Key == String, Value: Codable {
|
||||
typealias StorageType = String
|
||||
}
|
||||
|
||||
class StoreRepository {
|
||||
private let db: DatabasePool
|
||||
|
||||
init(db: DatabasePool) {
|
||||
self.db = db
|
||||
}
|
||||
|
||||
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) throws -> T? where T.StorageType == Int {
|
||||
let query = Store.select(\.intValue).where { $0.id.eq(key.rawValue) }
|
||||
if let value = try db.read({ conn in try query.fetchOne(conn) }) ?? nil {
|
||||
return try T.fromValue(value)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) throws -> T? where T.StorageType == String {
|
||||
let query = Store.select(\.stringValue).where { $0.id.eq(key.rawValue) }
|
||||
if let value = try db.read({ conn in try query.fetchOne(conn) }) ?? nil {
|
||||
return try T.fromValue(value)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) async throws -> T? where T.StorageType == Int {
|
||||
let query = Store.select(\.intValue).where { $0.id.eq(key.rawValue) }
|
||||
if let value = try await db.read({ conn in try query.fetchOne(conn) }) ?? nil {
|
||||
return try T.fromValue(value)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) async throws -> T? where T.StorageType == String {
|
||||
let query = Store.select(\.stringValue).where { $0.id.eq(key.rawValue) }
|
||||
if let value = try await db.read({ conn in try query.fetchOne(conn) }) ?? nil {
|
||||
return try T.fromValue(value)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T) throws where T.StorageType == Int {
|
||||
let value = try T.toValue(value)
|
||||
try db.write { conn in
|
||||
try Store.upsert { Store(id: key.rawValue, stringValue: nil, intValue: value) }.execute(conn)
|
||||
}
|
||||
}
|
||||
|
||||
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T) throws where T.StorageType == String {
|
||||
let value = try T.toValue(value)
|
||||
try db.write { conn in
|
||||
try Store.upsert { Store(id: key.rawValue, stringValue: value, intValue: nil) }.execute(conn)
|
||||
}
|
||||
}
|
||||
|
||||
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T) async throws where T.StorageType == Int {
|
||||
let value = try T.toValue(value)
|
||||
try await db.write { conn in
|
||||
try Store.upsert { Store(id: key.rawValue, stringValue: nil, intValue: value) }.execute(conn)
|
||||
}
|
||||
}
|
||||
|
||||
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T) async throws where T.StorageType == String {
|
||||
let value = try T.toValue(value)
|
||||
try await db.write { conn in
|
||||
try Store.upsert { Store(id: key.rawValue, stringValue: value, intValue: nil) }.execute(conn)
|
||||
}
|
||||
}
|
||||
}
|
||||
237
mobile/ios/Runner/Schemas/Tables.swift
Normal file
237
mobile/ios/Runner/Schemas/Tables.swift
Normal file
@@ -0,0 +1,237 @@
|
||||
import GRDB
|
||||
import SQLiteData
|
||||
|
||||
@Table("asset_face_entity")
|
||||
struct AssetFace {
|
||||
let id: String
|
||||
let assetId: String
|
||||
let personId: String?
|
||||
let imageWidth: Int
|
||||
let imageHeight: Int
|
||||
let boundingBoxX1: Int
|
||||
let boundingBoxY1: Int
|
||||
let boundingBoxX2: Int
|
||||
let boundingBoxY2: Int
|
||||
let sourceType: String
|
||||
}
|
||||
|
||||
@Table("auth_user_entity")
|
||||
struct AuthUser {
|
||||
let id: String
|
||||
let name: String
|
||||
let email: String
|
||||
let isAdmin: Bool
|
||||
let hasProfileImage: Bool
|
||||
let profileChangedAt: Date
|
||||
let avatarColor: AvatarColor
|
||||
let quotaSizeInBytes: Int
|
||||
let quotaUsageInBytes: Int
|
||||
let pinCode: String?
|
||||
}
|
||||
|
||||
@Table("local_album_entity")
|
||||
struct LocalAlbum {
|
||||
let id: String
|
||||
let backupSelection: BackupSelection
|
||||
let linkedRemoteAlbumId: String?
|
||||
let marker_: Bool?
|
||||
let name: String
|
||||
let isIosSharedAlbum: Bool
|
||||
let updatedAt: Date
|
||||
}
|
||||
|
||||
@Table("local_album_asset_entity")
|
||||
struct LocalAlbumAsset {
|
||||
let id: ID
|
||||
let marker_: String?
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
let assetId: String
|
||||
let albumId: String
|
||||
}
|
||||
}
|
||||
|
||||
@Table("local_asset_entity")
|
||||
struct LocalAsset {
|
||||
let id: String
|
||||
let checksum: String?
|
||||
let createdAt: Date
|
||||
let durationInSeconds: Int?
|
||||
let height: Int?
|
||||
let isFavorite: Bool
|
||||
let name: String
|
||||
let orientation: String
|
||||
let type: Int
|
||||
let updatedAt: Date
|
||||
let width: Int?
|
||||
}
|
||||
|
||||
@Table("memory_asset_entity")
|
||||
struct MemoryAsset {
|
||||
let id: ID
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
let assetId: String
|
||||
let albumId: String
|
||||
}
|
||||
}
|
||||
|
||||
@Table("memory_entity")
|
||||
struct Memory {
|
||||
let id: String
|
||||
let createdAt: Date
|
||||
let updatedAt: Date
|
||||
let deletedAt: Date?
|
||||
let ownerId: String
|
||||
let type: MemoryType
|
||||
let data: String
|
||||
let isSaved: Bool
|
||||
let memoryAt: Date
|
||||
let seenAt: Date?
|
||||
let showAt: Date?
|
||||
let hideAt: Date?
|
||||
}
|
||||
|
||||
@Table("partner_entity")
|
||||
struct Partner {
|
||||
let id: ID
|
||||
let inTimeline: Bool
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
let sharedById: String
|
||||
let sharedWithId: String
|
||||
}
|
||||
}
|
||||
|
||||
@Table("person_entity")
|
||||
struct Person {
|
||||
let id: String
|
||||
let createdAt: Date
|
||||
let updatedAt: Date
|
||||
let ownerId: String
|
||||
let name: String
|
||||
let faceAssetId: String?
|
||||
let isFavorite: Bool
|
||||
let isHidden: Bool
|
||||
let color: String?
|
||||
let birthDate: Date?
|
||||
}
|
||||
|
||||
@Table("remote_album_entity")
|
||||
struct RemoteAlbum {
|
||||
let id: String
|
||||
let createdAt: Date
|
||||
let description: String?
|
||||
let isActivityEnabled: Bool
|
||||
let name: String
|
||||
let order: Int
|
||||
let ownerId: String
|
||||
let thumbnailAssetId: String?
|
||||
let updatedAt: Date
|
||||
}
|
||||
|
||||
@Table("remote_album_asset_entity")
|
||||
struct RemoteAlbumAsset {
|
||||
let id: ID
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
let assetId: String
|
||||
let albumId: String
|
||||
}
|
||||
}
|
||||
|
||||
@Table("remote_album_user_entity")
|
||||
struct RemoteAlbumUser {
|
||||
let id: ID
|
||||
let role: AlbumUserRole
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
let albumId: String
|
||||
let userId: String
|
||||
}
|
||||
}
|
||||
|
||||
@Table("remote_asset_entity")
|
||||
struct RemoteAsset {
|
||||
let id: String
|
||||
let checksum: String?
|
||||
let deletedAt: Date?
|
||||
let isFavorite: Int
|
||||
let libraryId: String?
|
||||
let livePhotoVideoId: String?
|
||||
let localDateTime: Date?
|
||||
let orientation: String
|
||||
let ownerId: String
|
||||
let stackId: String?
|
||||
let visibility: Int
|
||||
}
|
||||
|
||||
@Table("remote_exif_entity")
|
||||
struct RemoteExif {
|
||||
@Column(primaryKey: true)
|
||||
let assetId: String
|
||||
let city: String?
|
||||
let state: String?
|
||||
let country: String?
|
||||
let dateTimeOriginal: Date?
|
||||
let description: String?
|
||||
let height: Int?
|
||||
let width: Int?
|
||||
let exposureTime: String?
|
||||
let fNumber: Double?
|
||||
let fileSize: Int?
|
||||
let focalLength: Double?
|
||||
let latitude: Double?
|
||||
let longitude: Double?
|
||||
let iso: Int?
|
||||
let make: String?
|
||||
let model: String?
|
||||
let lens: String?
|
||||
let orientation: String?
|
||||
let timeZone: String?
|
||||
let rating: Int?
|
||||
let projectionType: String?
|
||||
}
|
||||
|
||||
@Table("stack_entity")
|
||||
struct Stack {
|
||||
let id: String
|
||||
let createdAt: Date
|
||||
let updatedAt: Date
|
||||
let ownerId: String
|
||||
let primaryAssetId: String
|
||||
}
|
||||
|
||||
@Table("store_entity")
|
||||
struct Store {
|
||||
let id: StoreKey
|
||||
let stringValue: String?
|
||||
let intValue: Int?
|
||||
}
|
||||
|
||||
@Table("user_entity")
|
||||
struct User {
|
||||
let id: String
|
||||
let name: String
|
||||
let email: String
|
||||
let hasProfileImage: Bool
|
||||
let profileChangedAt: Date
|
||||
let avatarColor: AvatarColor
|
||||
}
|
||||
|
||||
@Table("user_metadata_entity")
|
||||
struct UserMetadata {
|
||||
let id: ID
|
||||
let value: Data
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
let userId: String
|
||||
let key: Date
|
||||
}
|
||||
}
|
||||
@@ -364,6 +364,7 @@ protocol NativeSyncApi {
|
||||
func getAssetsForAlbum(albumId: String, updatedTimeCond: Int64?) throws -> [PlatformAsset]
|
||||
func hashAssets(assetIds: [String], allowNetworkAccess: Bool, completion: @escaping (Result<[HashResult], Error>) -> Void)
|
||||
func cancelHashing() throws
|
||||
func getTrashedAssets() throws -> [String: [PlatformAsset]]
|
||||
}
|
||||
|
||||
/// Generated setup class from Pigeon to handle messages through the `binaryMessenger`.
|
||||
@@ -532,5 +533,20 @@ class NativeSyncApiSetup {
|
||||
} else {
|
||||
cancelHashingChannel.setMessageHandler(nil)
|
||||
}
|
||||
let getTrashedAssetsChannel = taskQueue == nil
|
||||
? FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getTrashedAssets\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
: FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getTrashedAssets\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec, taskQueue: taskQueue)
|
||||
if let api = api {
|
||||
getTrashedAssetsChannel.setMessageHandler { _, reply in
|
||||
do {
|
||||
let result = try api.getTrashedAssets()
|
||||
reply(wrapResult(result))
|
||||
} catch {
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
getTrashedAssetsChannel.setMessageHandler(nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,15 +3,15 @@ import CryptoKit
|
||||
|
||||
struct AssetWrapper: Hashable, Equatable {
|
||||
let asset: PlatformAsset
|
||||
|
||||
|
||||
init(with asset: PlatformAsset) {
|
||||
self.asset = asset
|
||||
}
|
||||
|
||||
|
||||
func hash(into hasher: inout Hasher) {
|
||||
hasher.combine(self.asset.id)
|
||||
}
|
||||
|
||||
|
||||
static func == (lhs: AssetWrapper, rhs: AssetWrapper) -> Bool {
|
||||
return lhs.asset.id == rhs.asset.id
|
||||
}
|
||||
@@ -19,31 +19,31 @@ struct AssetWrapper: Hashable, Equatable {
|
||||
|
||||
class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
static let name = "NativeSyncApi"
|
||||
|
||||
|
||||
static func register(with registrar: any FlutterPluginRegistrar) {
|
||||
let instance = NativeSyncApiImpl()
|
||||
NativeSyncApiSetup.setUp(binaryMessenger: registrar.messenger(), api: instance)
|
||||
registrar.publish(instance)
|
||||
}
|
||||
|
||||
|
||||
func detachFromEngine(for registrar: any FlutterPluginRegistrar) {
|
||||
super.detachFromEngine()
|
||||
}
|
||||
|
||||
|
||||
private let defaults: UserDefaults
|
||||
private let changeTokenKey = "immich:changeToken"
|
||||
private let albumTypes: [PHAssetCollectionType] = [.album, .smartAlbum]
|
||||
private let recoveredAlbumSubType = 1000000219
|
||||
|
||||
|
||||
private var hashTask: Task<Void?, Error>?
|
||||
private static let hashCancelledCode = "HASH_CANCELLED"
|
||||
private static let hashCancelled = Result<[HashResult], Error>.failure(PigeonError(code: hashCancelledCode, message: "Hashing cancelled", details: nil))
|
||||
|
||||
|
||||
|
||||
|
||||
init(with defaults: UserDefaults = .standard) {
|
||||
self.defaults = defaults
|
||||
}
|
||||
|
||||
|
||||
@available(iOS 16, *)
|
||||
private func getChangeToken() -> PHPersistentChangeToken? {
|
||||
guard let data = defaults.data(forKey: changeTokenKey) else {
|
||||
@@ -51,7 +51,7 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
}
|
||||
return try? NSKeyedUnarchiver.unarchivedObject(ofClass: PHPersistentChangeToken.self, from: data)
|
||||
}
|
||||
|
||||
|
||||
@available(iOS 16, *)
|
||||
private func saveChangeToken(token: PHPersistentChangeToken) -> Void {
|
||||
guard let data = try? NSKeyedArchiver.archivedData(withRootObject: token, requiringSecureCoding: true) else {
|
||||
@@ -59,18 +59,18 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
}
|
||||
defaults.set(data, forKey: changeTokenKey)
|
||||
}
|
||||
|
||||
|
||||
func clearSyncCheckpoint() -> Void {
|
||||
defaults.removeObject(forKey: changeTokenKey)
|
||||
}
|
||||
|
||||
|
||||
func checkpointSync() {
|
||||
guard #available(iOS 16, *) else {
|
||||
return
|
||||
}
|
||||
saveChangeToken(token: PHPhotoLibrary.shared().currentChangeToken)
|
||||
}
|
||||
|
||||
|
||||
func shouldFullSync() -> Bool {
|
||||
guard #available(iOS 16, *),
|
||||
PHPhotoLibrary.authorizationStatus(for: .readWrite) == .authorized,
|
||||
@@ -78,36 +78,36 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
// When we do not have access to photo library, older iOS version or No token available, fallback to full sync
|
||||
return true
|
||||
}
|
||||
|
||||
|
||||
guard let _ = try? PHPhotoLibrary.shared().fetchPersistentChanges(since: storedToken) else {
|
||||
// Cannot fetch persistent changes
|
||||
return true
|
||||
}
|
||||
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
func getAlbums() throws -> [PlatformAlbum] {
|
||||
var albums: [PlatformAlbum] = []
|
||||
|
||||
|
||||
albumTypes.forEach { type in
|
||||
let collections = PHAssetCollection.fetchAssetCollections(with: type, subtype: .any, options: nil)
|
||||
for i in 0..<collections.count {
|
||||
let album = collections.object(at: i)
|
||||
|
||||
|
||||
// Ignore recovered album
|
||||
if(album.assetCollectionSubtype.rawValue == self.recoveredAlbumSubType) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
let options = PHFetchOptions()
|
||||
options.sortDescriptors = [NSSortDescriptor(key: "modificationDate", ascending: false)]
|
||||
options.includeHiddenAssets = false
|
||||
|
||||
|
||||
let assets = getAssetsFromAlbum(in: album, options: options)
|
||||
|
||||
|
||||
let isCloud = album.assetCollectionSubtype == .albumCloudShared || album.assetCollectionSubtype == .albumMyPhotoStream
|
||||
|
||||
|
||||
var domainAlbum = PlatformAlbum(
|
||||
id: album.localIdentifier,
|
||||
name: album.localizedTitle!,
|
||||
@@ -115,57 +115,57 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
isCloud: isCloud,
|
||||
assetCount: Int64(assets.count)
|
||||
)
|
||||
|
||||
|
||||
if let firstAsset = assets.firstObject {
|
||||
domainAlbum.updatedAt = firstAsset.modificationDate.map { Int64($0.timeIntervalSince1970) }
|
||||
}
|
||||
|
||||
|
||||
albums.append(domainAlbum)
|
||||
}
|
||||
}
|
||||
return albums.sorted { $0.id < $1.id }
|
||||
}
|
||||
|
||||
|
||||
func getMediaChanges() throws -> SyncDelta {
|
||||
guard #available(iOS 16, *) else {
|
||||
throw PigeonError(code: "UNSUPPORTED_OS", message: "This feature requires iOS 16 or later.", details: nil)
|
||||
}
|
||||
|
||||
|
||||
guard PHPhotoLibrary.authorizationStatus(for: .readWrite) == .authorized else {
|
||||
throw PigeonError(code: "NO_AUTH", message: "No photo library access", details: nil)
|
||||
}
|
||||
|
||||
|
||||
guard let storedToken = getChangeToken() else {
|
||||
// No token exists, definitely need a full sync
|
||||
print("MediaManager::getMediaChanges: No token found")
|
||||
throw PigeonError(code: "NO_TOKEN", message: "No stored change token", details: nil)
|
||||
}
|
||||
|
||||
|
||||
let currentToken = PHPhotoLibrary.shared().currentChangeToken
|
||||
if storedToken == currentToken {
|
||||
return SyncDelta(hasChanges: false, updates: [], deletes: [], assetAlbums: [:])
|
||||
}
|
||||
|
||||
|
||||
do {
|
||||
let changes = try PHPhotoLibrary.shared().fetchPersistentChanges(since: storedToken)
|
||||
|
||||
|
||||
var updatedAssets: Set<AssetWrapper> = []
|
||||
var deletedAssets: Set<String> = []
|
||||
|
||||
|
||||
for change in changes {
|
||||
guard let details = try? change.changeDetails(for: PHObjectType.asset) else { continue }
|
||||
|
||||
|
||||
let updated = details.updatedLocalIdentifiers.union(details.insertedLocalIdentifiers)
|
||||
deletedAssets.formUnion(details.deletedLocalIdentifiers)
|
||||
|
||||
|
||||
if (updated.isEmpty) { continue }
|
||||
|
||||
|
||||
let options = PHFetchOptions()
|
||||
options.includeHiddenAssets = false
|
||||
let result = PHAsset.fetchAssets(withLocalIdentifiers: Array(updated), options: options)
|
||||
for i in 0..<result.count {
|
||||
let asset = result.object(at: i)
|
||||
|
||||
|
||||
// Asset wrapper only uses the id for comparison. Multiple change can contain the same asset, skip duplicate changes
|
||||
let predicate = PlatformAsset(
|
||||
id: asset.localIdentifier,
|
||||
@@ -178,25 +178,25 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
if (updatedAssets.contains(AssetWrapper(with: predicate))) {
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
let domainAsset = AssetWrapper(with: asset.toPlatformAsset())
|
||||
updatedAssets.insert(domainAsset)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
let updates = Array(updatedAssets.map { $0.asset })
|
||||
return SyncDelta(hasChanges: true, updates: updates, deletes: Array(deletedAssets), assetAlbums: buildAssetAlbumsMap(assets: updates))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
private func buildAssetAlbumsMap(assets: Array<PlatformAsset>) -> [String: [String]] {
|
||||
guard !assets.isEmpty else {
|
||||
return [:]
|
||||
}
|
||||
|
||||
|
||||
var albumAssets: [String: [String]] = [:]
|
||||
|
||||
|
||||
for type in albumTypes {
|
||||
let collections = PHAssetCollection.fetchAssetCollections(with: type, subtype: .any, options: nil)
|
||||
collections.enumerateObjects { (album, _, _) in
|
||||
@@ -211,13 +211,13 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
}
|
||||
return albumAssets
|
||||
}
|
||||
|
||||
|
||||
func getAssetIdsForAlbum(albumId: String) throws -> [String] {
|
||||
let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil)
|
||||
guard let album = collections.firstObject else {
|
||||
return []
|
||||
}
|
||||
|
||||
|
||||
var ids: [String] = []
|
||||
let options = PHFetchOptions()
|
||||
options.includeHiddenAssets = false
|
||||
@@ -227,13 +227,13 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
}
|
||||
return ids
|
||||
}
|
||||
|
||||
|
||||
func getAssetsCountSince(albumId: String, timestamp: Int64) throws -> Int64 {
|
||||
let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil)
|
||||
guard let album = collections.firstObject else {
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
let date = NSDate(timeIntervalSince1970: TimeInterval(timestamp))
|
||||
let options = PHFetchOptions()
|
||||
options.predicate = NSPredicate(format: "creationDate > %@ OR modificationDate > %@", date, date)
|
||||
@@ -241,32 +241,32 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
let assets = getAssetsFromAlbum(in: album, options: options)
|
||||
return Int64(assets.count)
|
||||
}
|
||||
|
||||
|
||||
func getAssetsForAlbum(albumId: String, updatedTimeCond: Int64?) throws -> [PlatformAsset] {
|
||||
let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil)
|
||||
guard let album = collections.firstObject else {
|
||||
return []
|
||||
}
|
||||
|
||||
|
||||
let options = PHFetchOptions()
|
||||
options.includeHiddenAssets = false
|
||||
if(updatedTimeCond != nil) {
|
||||
let date = NSDate(timeIntervalSince1970: TimeInterval(updatedTimeCond!))
|
||||
options.predicate = NSPredicate(format: "creationDate > %@ OR modificationDate > %@", date, date)
|
||||
}
|
||||
|
||||
|
||||
let result = getAssetsFromAlbum(in: album, options: options)
|
||||
if(result.count == 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
|
||||
var assets: [PlatformAsset] = []
|
||||
result.enumerateObjects { (asset, _, _) in
|
||||
assets.append(asset.toPlatformAsset())
|
||||
}
|
||||
return assets
|
||||
}
|
||||
|
||||
|
||||
func hashAssets(assetIds: [String], allowNetworkAccess: Bool, completion: @escaping (Result<[HashResult], Error>) -> Void) {
|
||||
if let prevTask = hashTask {
|
||||
prevTask.cancel()
|
||||
@@ -284,11 +284,11 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
missingAssetIds.remove(asset.localIdentifier)
|
||||
assets.append(asset)
|
||||
}
|
||||
|
||||
|
||||
if Task.isCancelled {
|
||||
return self?.completeWhenActive(for: completion, with: Self.hashCancelled)
|
||||
}
|
||||
|
||||
|
||||
await withTaskGroup(of: HashResult?.self) { taskGroup in
|
||||
var results = [HashResult]()
|
||||
results.reserveCapacity(assets.count)
|
||||
@@ -301,28 +301,28 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
return await self.hashAsset(asset, allowNetworkAccess: allowNetworkAccess)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
for await result in taskGroup {
|
||||
guard let result = result else {
|
||||
return self?.completeWhenActive(for: completion, with: Self.hashCancelled)
|
||||
}
|
||||
results.append(result)
|
||||
}
|
||||
|
||||
|
||||
for missing in missingAssetIds {
|
||||
results.append(HashResult(assetId: missing, error: "Asset not found in library", hash: nil))
|
||||
}
|
||||
|
||||
|
||||
return self?.completeWhenActive(for: completion, with: .success(results))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
func cancelHashing() {
|
||||
hashTask?.cancel()
|
||||
hashTask = nil
|
||||
}
|
||||
|
||||
|
||||
private func hashAsset(_ asset: PHAsset, allowNetworkAccess: Bool) async -> HashResult? {
|
||||
class RequestRef {
|
||||
var id: PHAssetResourceDataRequestID?
|
||||
@@ -332,21 +332,21 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
if Task.isCancelled {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
guard let resource = asset.getResource() else {
|
||||
return HashResult(assetId: asset.localIdentifier, error: "Cannot get asset resource", hash: nil)
|
||||
}
|
||||
|
||||
|
||||
if Task.isCancelled {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
let options = PHAssetResourceRequestOptions()
|
||||
options.isNetworkAccessAllowed = allowNetworkAccess
|
||||
|
||||
|
||||
return await withCheckedContinuation { continuation in
|
||||
var hasher = Insecure.SHA1()
|
||||
|
||||
|
||||
requestRef.id = PHAssetResourceManager.default().requestData(
|
||||
for: resource,
|
||||
options: options,
|
||||
@@ -377,7 +377,11 @@ class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
PHAssetResourceManager.default().cancelDataRequest(requestId)
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
func getTrashedAssets() throws -> [String: [PlatformAsset]] {
|
||||
throw PigeonError(code: "UNSUPPORTED_OS", message: "This feature not supported on iOS.", details: nil)
|
||||
}
|
||||
|
||||
private func getAssetsFromAlbum(in album: PHAssetCollection, options: PHFetchOptions) -> PHFetchResult<PHAsset> {
|
||||
// Ensure to actually getting all assets for the Recents album
|
||||
if (album.assetCollectionSubtype == .smartAlbumUserLibrary) {
|
||||
|
||||
@@ -16,42 +16,92 @@
|
||||
default_platform(:ios)
|
||||
|
||||
platform :ios do
|
||||
desc "iOS Release to TestFlight"
|
||||
lane :release_ci do
|
||||
# Setup CI environment
|
||||
setup_ci
|
||||
|
||||
# Load App Store Connect API Key
|
||||
api_key = app_store_connect_api_key(
|
||||
# Constants
|
||||
TEAM_ID = "2F67MQ8R79"
|
||||
CODE_SIGN_IDENTITY = "Apple Distribution: Hau Tran (#{TEAM_ID})"
|
||||
BASE_BUNDLE_ID = "app.alextran.immich"
|
||||
|
||||
# Helper method to get App Store Connect API key
|
||||
def get_api_key
|
||||
app_store_connect_api_key(
|
||||
key_id: ENV["APP_STORE_CONNECT_API_KEY_ID"],
|
||||
issuer_id: ENV["APP_STORE_CONNECT_API_KEY_ISSUER_ID"],
|
||||
key_filepath: "api_key.json"
|
||||
key_filepath: "#{Dir.home}/.appstoreconnect/private_keys/AuthKey_#{ENV['APP_STORE_CONNECT_API_KEY_ID']}.p8",
|
||||
duration: 1200,
|
||||
in_house: false
|
||||
)
|
||||
end
|
||||
|
||||
# Helper method to get version from pubspec.yaml
|
||||
def get_version_from_pubspec
|
||||
require 'yaml'
|
||||
|
||||
pubspec_path = File.join(Dir.pwd, "../..", "pubspec.yaml")
|
||||
pubspec = YAML.load_file(pubspec_path)
|
||||
|
||||
version_string = pubspec['version']
|
||||
version_string ? version_string.split('+').first : nil
|
||||
end
|
||||
|
||||
# Helper method to configure code signing for all targets
|
||||
def configure_code_signing(bundle_id_suffix: "")
|
||||
bundle_suffix = bundle_id_suffix.empty? ? "" : ".#{bundle_id_suffix}"
|
||||
|
||||
# Import certificate and provisioning profile
|
||||
import_certificate(
|
||||
certificate_path: "certificate.p12",
|
||||
certificate_password: ENV["IOS_CERTIFICATE_PASSWORD"],
|
||||
keychain_name: ENV["KEYCHAIN_NAME"],
|
||||
keychain_password: ENV["KEYCHAIN_PASSWORD"]
|
||||
)
|
||||
|
||||
# Install provisioning profile
|
||||
install_provisioning_profile(path: "profile.mobileprovision")
|
||||
|
||||
# Configure code signing
|
||||
# Runner (main app)
|
||||
update_code_signing_settings(
|
||||
use_automatic_signing: false,
|
||||
path: "./Runner.xcodeproj",
|
||||
team_id: ENV["FASTLANE_TEAM_ID"],
|
||||
profile_name: "app.alextran.immich AppStore"
|
||||
team_id: ENV["FASTLANE_TEAM_ID"] || TEAM_ID,
|
||||
code_sign_identity: CODE_SIGN_IDENTITY,
|
||||
bundle_identifier: "#{BASE_BUNDLE_ID}#{bundle_suffix}",
|
||||
profile_name: "#{BASE_BUNDLE_ID}#{bundle_suffix} AppStore",
|
||||
targets: ["Runner"]
|
||||
)
|
||||
|
||||
# ShareExtension
|
||||
update_code_signing_settings(
|
||||
use_automatic_signing: false,
|
||||
path: "./Runner.xcodeproj",
|
||||
team_id: ENV["FASTLANE_TEAM_ID"] || TEAM_ID,
|
||||
code_sign_identity: CODE_SIGN_IDENTITY,
|
||||
bundle_identifier: "#{BASE_BUNDLE_ID}#{bundle_suffix}.ShareExtension",
|
||||
profile_name: "#{BASE_BUNDLE_ID}#{bundle_suffix}.ShareExtension AppStore",
|
||||
targets: ["ShareExtension"]
|
||||
)
|
||||
|
||||
# WidgetExtension
|
||||
update_code_signing_settings(
|
||||
use_automatic_signing: false,
|
||||
path: "./Runner.xcodeproj",
|
||||
team_id: ENV["FASTLANE_TEAM_ID"] || TEAM_ID,
|
||||
code_sign_identity: CODE_SIGN_IDENTITY,
|
||||
bundle_identifier: "#{BASE_BUNDLE_ID}#{bundle_suffix}.Widget",
|
||||
profile_name: "#{BASE_BUNDLE_ID}#{bundle_suffix}.Widget AppStore",
|
||||
targets: ["WidgetExtension"]
|
||||
)
|
||||
end
|
||||
|
||||
# Helper method to build and upload to TestFlight
|
||||
def build_and_upload(
|
||||
api_key:,
|
||||
bundle_id_suffix: "",
|
||||
configuration: "Release",
|
||||
distribute_external: true,
|
||||
version_number: nil
|
||||
)
|
||||
bundle_suffix = bundle_id_suffix.empty? ? "" : ".#{bundle_id_suffix}"
|
||||
app_identifier = "#{BASE_BUNDLE_ID}#{bundle_suffix}"
|
||||
|
||||
# Set version number if provided
|
||||
if version_number
|
||||
increment_version_number(version_number: version_number)
|
||||
end
|
||||
|
||||
# Increment build number
|
||||
increment_build_number(
|
||||
build_number: latest_testflight_build_number(
|
||||
api_key: api_key,
|
||||
app_identifier: "app.alextran.immich"
|
||||
app_identifier: app_identifier
|
||||
) + 1,
|
||||
xcodeproj: "./Runner.xcodeproj"
|
||||
)
|
||||
@@ -60,38 +110,137 @@ platform :ios do
|
||||
build_app(
|
||||
scheme: "Runner",
|
||||
workspace: "Runner.xcworkspace",
|
||||
configuration: configuration,
|
||||
export_method: "app-store",
|
||||
xcargs: "-skipMacroValidation CODE_SIGN_IDENTITY='#{CODE_SIGN_IDENTITY}' CODE_SIGN_STYLE=Manual",
|
||||
export_options: {
|
||||
provisioningProfiles: {
|
||||
"app.alextran.immich" => "app.alextran.immich AppStore"
|
||||
}
|
||||
"#{app_identifier}" => "#{app_identifier} AppStore",
|
||||
"#{app_identifier}.ShareExtension" => "#{app_identifier}.ShareExtension AppStore",
|
||||
"#{app_identifier}.Widget" => "#{app_identifier}.Widget AppStore"
|
||||
},
|
||||
signingStyle: "manual",
|
||||
signingCertificate: CODE_SIGN_IDENTITY
|
||||
}
|
||||
)
|
||||
|
||||
# Upload to TestFlight
|
||||
upload_to_testflight(
|
||||
api_key: api_key,
|
||||
skip_waiting_for_build_processing: true
|
||||
skip_waiting_for_build_processing: true,
|
||||
distribute_external: distribute_external
|
||||
)
|
||||
end
|
||||
|
||||
desc "iOS Development Build to TestFlight (requires separate bundle ID)"
|
||||
lane :gha_testflight_dev do
|
||||
api_key = get_api_key
|
||||
|
||||
# Install development provisioning profiles
|
||||
install_provisioning_profile(path: "profile_dev.mobileprovision")
|
||||
install_provisioning_profile(path: "profile_dev_share.mobileprovision")
|
||||
install_provisioning_profile(path: "profile_dev_widget.mobileprovision")
|
||||
|
||||
# Configure code signing for dev bundle IDs
|
||||
configure_code_signing(bundle_id_suffix: "development")
|
||||
|
||||
# Build and upload
|
||||
build_and_upload(
|
||||
api_key: api_key,
|
||||
bundle_id_suffix: "development",
|
||||
configuration: "Profile",
|
||||
distribute_external: false
|
||||
)
|
||||
end
|
||||
|
||||
desc "iOS Release"
|
||||
lane :release do
|
||||
desc "iOS Release to TestFlight"
|
||||
lane :gha_release_prod do
|
||||
api_key = get_api_key
|
||||
|
||||
# Install provisioning profiles
|
||||
install_provisioning_profile(path: "profile.mobileprovision")
|
||||
install_provisioning_profile(path: "profile_share.mobileprovision")
|
||||
install_provisioning_profile(path: "profile_widget.mobileprovision")
|
||||
|
||||
|
||||
# Configure code signing for production bundle IDs
|
||||
configure_code_signing
|
||||
|
||||
# Build and upload with version number
|
||||
build_and_upload(
|
||||
api_key: api_key,
|
||||
version_number: get_version_from_pubspec,
|
||||
distribute_external: false,
|
||||
)
|
||||
end
|
||||
|
||||
desc "iOS Manual Release"
|
||||
lane :release_manual do
|
||||
enable_automatic_code_signing(
|
||||
path: "./Runner.xcodeproj",
|
||||
targets: ["Runner", "ShareExtension", "WidgetExtension"]
|
||||
)
|
||||
|
||||
increment_version_number(
|
||||
version_number: "2.2.0"
|
||||
version_number: get_version_from_pubspec
|
||||
)
|
||||
increment_build_number(
|
||||
build_number: latest_testflight_build_number + 1,
|
||||
)
|
||||
build_app(scheme: "Runner",
|
||||
workspace: "Runner.xcworkspace",
|
||||
xcargs: "-allowProvisioningUpdates")
|
||||
|
||||
# Build archive with automatic signing
|
||||
gym(
|
||||
scheme: "Runner",
|
||||
workspace: "Runner.xcworkspace",
|
||||
configuration: "Release",
|
||||
export_method: "app-store",
|
||||
skip_package_ipa: false,
|
||||
xcargs: "-skipMacroValidation -allowProvisioningUpdates",
|
||||
export_options: {
|
||||
method: "app-store",
|
||||
signingStyle: "automatic",
|
||||
uploadBitcode: false,
|
||||
uploadSymbols: true,
|
||||
compileBitcode: false
|
||||
}
|
||||
)
|
||||
|
||||
upload_to_testflight(
|
||||
skip_waiting_for_build_processing: true
|
||||
)
|
||||
end
|
||||
|
||||
desc "iOS Build Only (no TestFlight upload)"
|
||||
lane :gha_build_only do
|
||||
# Use the same build process as production, just skip the upload
|
||||
# This ensures PR builds validate the same way as production builds
|
||||
|
||||
# Install provisioning profiles (use development profiles for PR builds)
|
||||
install_provisioning_profile(path: "profile_dev.mobileprovision")
|
||||
install_provisioning_profile(path: "profile_dev_share.mobileprovision")
|
||||
install_provisioning_profile(path: "profile_dev_widget.mobileprovision")
|
||||
|
||||
# Configure code signing for dev bundle IDs
|
||||
configure_code_signing(bundle_id_suffix: "development")
|
||||
|
||||
# Build the app (same as gha_testflight_dev but without upload)
|
||||
build_app(
|
||||
scheme: "Runner",
|
||||
workspace: "Runner.xcworkspace",
|
||||
configuration: "Release",
|
||||
export_method: "app-store",
|
||||
skip_package_ipa: true,
|
||||
xcargs: "-skipMacroValidation CODE_SIGN_IDENTITY='#{CODE_SIGN_IDENTITY}' CODE_SIGN_STYLE=Manual",
|
||||
export_options: {
|
||||
provisioningProfiles: {
|
||||
"#{BASE_BUNDLE_ID}.development" => "#{BASE_BUNDLE_ID}.development AppStore",
|
||||
"#{BASE_BUNDLE_ID}.development.ShareExtension" => "#{BASE_BUNDLE_ID}.development.ShareExtension AppStore",
|
||||
"#{BASE_BUNDLE_ID}.development.Widget" => "#{BASE_BUNDLE_ID}.development.Widget AppStore"
|
||||
},
|
||||
signingStyle: "manual",
|
||||
signingCertificate: CODE_SIGN_IDENTITY
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
@@ -15,13 +15,29 @@ For _fastlane_ installation instructions, see [Installing _fastlane_](https://do
|
||||
|
||||
## iOS
|
||||
|
||||
### ios release
|
||||
### ios gha_testflight_dev
|
||||
|
||||
```sh
|
||||
[bundle exec] fastlane ios release
|
||||
[bundle exec] fastlane ios gha_testflight_dev
|
||||
```
|
||||
|
||||
iOS Release
|
||||
iOS Development Build to TestFlight (requires separate bundle ID)
|
||||
|
||||
### ios gha_release_prod
|
||||
|
||||
```sh
|
||||
[bundle exec] fastlane ios gha_release_prod
|
||||
```
|
||||
|
||||
iOS Release to TestFlight
|
||||
|
||||
### ios release_manual
|
||||
|
||||
```sh
|
||||
[bundle exec] fastlane ios release_manual
|
||||
```
|
||||
|
||||
iOS Manual Release
|
||||
|
||||
----
|
||||
|
||||
|
||||
@@ -53,3 +53,11 @@ const int kMinMonthsToEnableScrubberSnap = 12;
|
||||
const String kImmichAppStoreLink = "https://apps.apple.com/app/immich/id6449244941";
|
||||
const String kImmichPlayStoreLink = "https://play.google.com/store/apps/details?id=app.alextran.immich";
|
||||
const String kImmichLatestRelease = "https://github.com/immich-app/immich/releases/latest";
|
||||
|
||||
const int kPhotoTabIndex = 0;
|
||||
const int kSearchTabIndex = 1;
|
||||
const int kAlbumTabIndex = 2;
|
||||
const int kLibraryTabIndex = 3;
|
||||
|
||||
// Workaround for SQLite's variable limit (SQLITE_MAX_VARIABLE_NUMBER = 32766)
|
||||
const int kDriftMaxChunk = 32000;
|
||||
|
||||
@@ -177,6 +177,12 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
|
||||
}
|
||||
|
||||
Future<void> _cleanup() async {
|
||||
await runZonedGuarded(_handleCleanup, (error, stack) {
|
||||
dPrint(() => "Error during background worker cleanup: $error, $stack");
|
||||
});
|
||||
}
|
||||
|
||||
Future<void> _handleCleanup() async {
|
||||
// If ref is null, it means the service was never initialized properly
|
||||
if (_isCleanedUp || _ref == null) {
|
||||
return;
|
||||
@@ -186,11 +192,16 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
|
||||
_isCleanedUp = true;
|
||||
final backgroundSyncManager = _ref?.read(backgroundSyncProvider);
|
||||
final nativeSyncApi = _ref?.read(nativeSyncApiProvider);
|
||||
|
||||
await _drift.close();
|
||||
await _driftLogger.close();
|
||||
|
||||
_ref?.dispose();
|
||||
_ref = null;
|
||||
|
||||
_cancellationToken.cancel();
|
||||
_logger.info("Cleaning up background worker");
|
||||
|
||||
final cleanupFutures = [
|
||||
nativeSyncApi?.cancelHashing(),
|
||||
workerManagerPatch.dispose().catchError((_) async {
|
||||
@@ -199,8 +210,7 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
|
||||
}),
|
||||
LogService.I.dispose(),
|
||||
Store.dispose(),
|
||||
_drift.close(),
|
||||
_driftLogger.close(),
|
||||
|
||||
backgroundSyncManager?.cancel(),
|
||||
];
|
||||
|
||||
@@ -239,7 +249,7 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
|
||||
final networkCapabilities = await _ref?.read(connectivityApiProvider).getCapabilities() ?? [];
|
||||
return _ref
|
||||
?.read(uploadServiceProvider)
|
||||
.startBackupWithHttpClient(currentUser.id, networkCapabilities.hasWifi, _cancellationToken);
|
||||
.startBackupWithHttpClient(currentUser.id, networkCapabilities.isUnmetered, _cancellationToken);
|
||||
},
|
||||
(error, stack) {
|
||||
dPrint(() => "Error in backup zone $error, $stack");
|
||||
|
||||
@@ -2,8 +2,10 @@ import 'package:flutter/services.dart';
|
||||
import 'package:immich_mobile/constants/constants.dart';
|
||||
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/extensions/platform_extensions.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/local_album.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/local_asset.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/trashed_local_asset.repository.dart';
|
||||
import 'package:immich_mobile/platform/native_sync_api.g.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
|
||||
@@ -13,6 +15,7 @@ class HashService {
|
||||
final int _batchSize;
|
||||
final DriftLocalAlbumRepository _localAlbumRepository;
|
||||
final DriftLocalAssetRepository _localAssetRepository;
|
||||
final DriftTrashedLocalAssetRepository _trashedLocalAssetRepository;
|
||||
final NativeSyncApi _nativeSyncApi;
|
||||
final bool Function()? _cancelChecker;
|
||||
final _log = Logger('HashService');
|
||||
@@ -20,11 +23,13 @@ class HashService {
|
||||
HashService({
|
||||
required DriftLocalAlbumRepository localAlbumRepository,
|
||||
required DriftLocalAssetRepository localAssetRepository,
|
||||
required DriftTrashedLocalAssetRepository trashedLocalAssetRepository,
|
||||
required NativeSyncApi nativeSyncApi,
|
||||
bool Function()? cancelChecker,
|
||||
int? batchSize,
|
||||
}) : _localAlbumRepository = localAlbumRepository,
|
||||
_localAssetRepository = localAssetRepository,
|
||||
_trashedLocalAssetRepository = trashedLocalAssetRepository,
|
||||
_cancelChecker = cancelChecker,
|
||||
_nativeSyncApi = nativeSyncApi,
|
||||
_batchSize = batchSize ?? kBatchHashFileLimit;
|
||||
@@ -49,6 +54,14 @@ class HashService {
|
||||
await _hashAssets(album, assetsToHash);
|
||||
}
|
||||
}
|
||||
if (CurrentPlatform.isAndroid && localAlbums.isNotEmpty) {
|
||||
final backupAlbumIds = localAlbums.map((e) => e.id);
|
||||
final trashedToHash = await _trashedLocalAssetRepository.getAssetsToHash(backupAlbumIds);
|
||||
if (trashedToHash.isNotEmpty) {
|
||||
final pseudoAlbum = LocalAlbum(id: '-pseudoAlbum', name: 'Trash', updatedAt: DateTime.now());
|
||||
await _hashAssets(pseudoAlbum, trashedToHash, isTrashed: true);
|
||||
}
|
||||
}
|
||||
} on PlatformException catch (e) {
|
||||
if (e.code == _kHashCancelledCode) {
|
||||
_log.warning("Hashing cancelled by platform");
|
||||
@@ -65,7 +78,7 @@ class HashService {
|
||||
/// Processes a list of [LocalAsset]s, storing their hash and updating the assets in the DB
|
||||
/// with hash for those that were successfully hashed. Hashes are looked up in a table
|
||||
/// [LocalAssetHashEntity] by local id. Only missing entries are newly hashed and added to the DB.
|
||||
Future<void> _hashAssets(LocalAlbum album, List<LocalAsset> assetsToHash) async {
|
||||
Future<void> _hashAssets(LocalAlbum album, List<LocalAsset> assetsToHash, {bool isTrashed = false}) async {
|
||||
final toHash = <String, LocalAsset>{};
|
||||
|
||||
for (final asset in assetsToHash) {
|
||||
@@ -76,16 +89,16 @@ class HashService {
|
||||
|
||||
toHash[asset.id] = asset;
|
||||
if (toHash.length == _batchSize) {
|
||||
await _processBatch(album, toHash);
|
||||
await _processBatch(album, toHash, isTrashed);
|
||||
toHash.clear();
|
||||
}
|
||||
}
|
||||
|
||||
await _processBatch(album, toHash);
|
||||
await _processBatch(album, toHash, isTrashed);
|
||||
}
|
||||
|
||||
/// Processes a batch of assets.
|
||||
Future<void> _processBatch(LocalAlbum album, Map<String, LocalAsset> toHash) async {
|
||||
Future<void> _processBatch(LocalAlbum album, Map<String, LocalAsset> toHash, bool isTrashed) async {
|
||||
if (toHash.isEmpty) {
|
||||
return;
|
||||
}
|
||||
@@ -120,7 +133,10 @@ class HashService {
|
||||
}
|
||||
|
||||
_log.fine("Hashed ${hashed.length}/${toHash.length} assets");
|
||||
|
||||
await _localAssetRepository.updateHashes(hashed);
|
||||
if (isTrashed) {
|
||||
await _trashedLocalAssetRepository.updateHashes(hashed);
|
||||
} else {
|
||||
await _localAssetRepository.updateHashes(hashed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,9 +4,14 @@ import 'package:collection/collection.dart';
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/extensions/platform_extensions.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/local_album.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/storage.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/trashed_local_asset.repository.dart';
|
||||
import 'package:immich_mobile/platform/native_sync_api.g.dart';
|
||||
import 'package:immich_mobile/repositories/local_files_manager.repository.dart';
|
||||
import 'package:immich_mobile/utils/datetime_helpers.dart';
|
||||
import 'package:immich_mobile/utils/diff.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
@@ -14,15 +19,34 @@ import 'package:logging/logging.dart';
|
||||
class LocalSyncService {
|
||||
final DriftLocalAlbumRepository _localAlbumRepository;
|
||||
final NativeSyncApi _nativeSyncApi;
|
||||
final DriftTrashedLocalAssetRepository _trashedLocalAssetRepository;
|
||||
final LocalFilesManagerRepository _localFilesManager;
|
||||
final StorageRepository _storageRepository;
|
||||
final Logger _log = Logger("DeviceSyncService");
|
||||
|
||||
LocalSyncService({required DriftLocalAlbumRepository localAlbumRepository, required NativeSyncApi nativeSyncApi})
|
||||
: _localAlbumRepository = localAlbumRepository,
|
||||
_nativeSyncApi = nativeSyncApi;
|
||||
LocalSyncService({
|
||||
required DriftLocalAlbumRepository localAlbumRepository,
|
||||
required DriftTrashedLocalAssetRepository trashedLocalAssetRepository,
|
||||
required LocalFilesManagerRepository localFilesManager,
|
||||
required StorageRepository storageRepository,
|
||||
required NativeSyncApi nativeSyncApi,
|
||||
}) : _localAlbumRepository = localAlbumRepository,
|
||||
_trashedLocalAssetRepository = trashedLocalAssetRepository,
|
||||
_localFilesManager = localFilesManager,
|
||||
_storageRepository = storageRepository,
|
||||
_nativeSyncApi = nativeSyncApi;
|
||||
|
||||
Future<void> sync({bool full = false}) async {
|
||||
final Stopwatch stopwatch = Stopwatch()..start();
|
||||
try {
|
||||
if (CurrentPlatform.isAndroid && Store.get(StoreKey.manageLocalMediaAndroid, false)) {
|
||||
final hasPermission = await _localFilesManager.hasManageMediaPermission();
|
||||
if (hasPermission) {
|
||||
await _syncTrashedAssets();
|
||||
} else {
|
||||
_log.warning("syncTrashedAssets cannot proceed because MANAGE_MEDIA permission is missing");
|
||||
}
|
||||
}
|
||||
if (full || await _nativeSyncApi.shouldFullSync()) {
|
||||
_log.fine("Full sync request from ${full ? "user" : "native"}");
|
||||
return await fullSync();
|
||||
@@ -69,7 +93,6 @@ class LocalSyncService {
|
||||
await updateAlbum(dbAlbum, album);
|
||||
}
|
||||
}
|
||||
|
||||
await _nativeSyncApi.checkpointSync();
|
||||
} catch (e, s) {
|
||||
_log.severe("Error performing device sync", e, s);
|
||||
@@ -273,6 +296,48 @@ class LocalSyncService {
|
||||
bool _albumsEqual(LocalAlbum a, LocalAlbum b) {
|
||||
return a.name == b.name && a.assetCount == b.assetCount && a.updatedAt.isAtSameMomentAs(b.updatedAt);
|
||||
}
|
||||
|
||||
Future<void> _syncTrashedAssets() async {
|
||||
final trashedAssetMap = await _nativeSyncApi.getTrashedAssets();
|
||||
await processTrashedAssets(trashedAssetMap);
|
||||
}
|
||||
|
||||
@visibleForTesting
|
||||
Future<void> processTrashedAssets(Map<String, List<PlatformAsset>> trashedAssetMap) async {
|
||||
if (trashedAssetMap.isEmpty) {
|
||||
_log.info("syncTrashedAssets, No trashed assets found");
|
||||
}
|
||||
final trashedAssets = trashedAssetMap.cast<String, List<Object?>>().entries.expand(
|
||||
(entry) => entry.value.cast<PlatformAsset>().toTrashedAssets(entry.key),
|
||||
);
|
||||
|
||||
_log.fine("syncTrashedAssets, trashedAssets: ${trashedAssets.map((e) => e.asset.id)}");
|
||||
await _trashedLocalAssetRepository.processTrashSnapshot(trashedAssets);
|
||||
|
||||
final assetsToRestore = await _trashedLocalAssetRepository.getToRestore();
|
||||
if (assetsToRestore.isNotEmpty) {
|
||||
final restoredIds = await _localFilesManager.restoreAssetsFromTrash(assetsToRestore);
|
||||
await _trashedLocalAssetRepository.applyRestoredAssets(restoredIds);
|
||||
} else {
|
||||
_log.info("syncTrashedAssets, No remote assets found for restoration");
|
||||
}
|
||||
|
||||
final localAssetsToTrash = await _trashedLocalAssetRepository.getToTrash();
|
||||
if (localAssetsToTrash.isNotEmpty) {
|
||||
final mediaUrls = await Future.wait(
|
||||
localAssetsToTrash.values
|
||||
.expand((e) => e)
|
||||
.map((localAsset) => _storageRepository.getAssetEntityForAsset(localAsset).then((e) => e?.getMediaUrl())),
|
||||
);
|
||||
_log.info("Moving to trash ${mediaUrls.join(", ")} assets");
|
||||
final result = await _localFilesManager.moveToTrash(mediaUrls.nonNulls.toList());
|
||||
if (result) {
|
||||
await _trashedLocalAssetRepository.trashLocalAsset(localAssetsToTrash);
|
||||
}
|
||||
} else {
|
||||
_log.info("syncTrashedAssets, No assets found in backup-enabled albums for move to trash");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension on Iterable<PlatformAlbum> {
|
||||
@@ -290,20 +355,26 @@ extension on Iterable<PlatformAlbum> {
|
||||
|
||||
extension on Iterable<PlatformAsset> {
|
||||
List<LocalAsset> toLocalAssets() {
|
||||
return map(
|
||||
(e) => LocalAsset(
|
||||
id: e.id,
|
||||
name: e.name,
|
||||
checksum: null,
|
||||
type: AssetType.values.elementAtOrNull(e.type) ?? AssetType.other,
|
||||
createdAt: tryFromSecondsSinceEpoch(e.createdAt, isUtc: true) ?? DateTime.timestamp(),
|
||||
updatedAt: tryFromSecondsSinceEpoch(e.updatedAt, isUtc: true) ?? DateTime.timestamp(),
|
||||
width: e.width,
|
||||
height: e.height,
|
||||
durationInSeconds: e.durationInSeconds,
|
||||
orientation: e.orientation,
|
||||
isFavorite: e.isFavorite,
|
||||
),
|
||||
).toList();
|
||||
return map((e) => e.toLocalAsset()).toList();
|
||||
}
|
||||
|
||||
Iterable<TrashedAsset> toTrashedAssets(String albumId) {
|
||||
return map((e) => (albumId: albumId, asset: e.toLocalAsset()));
|
||||
}
|
||||
}
|
||||
|
||||
extension on PlatformAsset {
|
||||
LocalAsset toLocalAsset() => LocalAsset(
|
||||
id: id,
|
||||
name: name,
|
||||
checksum: null,
|
||||
type: AssetType.values.elementAtOrNull(type) ?? AssetType.other,
|
||||
createdAt: tryFromSecondsSinceEpoch(createdAt, isUtc: true) ?? DateTime.timestamp(),
|
||||
updatedAt: tryFromSecondsSinceEpoch(createdAt, isUtc: true) ?? DateTime.timestamp(),
|
||||
width: width,
|
||||
height: height,
|
||||
durationInSeconds: durationInSeconds,
|
||||
isFavorite: isFavorite,
|
||||
orientation: orientation,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
||||
import 'package:immich_mobile/domain/models/sync_event.model.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/extensions/platform_extensions.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/local_asset.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/storage.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/sync_api.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/sync_stream.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/trashed_local_asset.repository.dart';
|
||||
import 'package:immich_mobile/repositories/local_files_manager.repository.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:openapi/api.dart';
|
||||
|
||||
@@ -11,14 +18,26 @@ class SyncStreamService {
|
||||
|
||||
final SyncApiRepository _syncApiRepository;
|
||||
final SyncStreamRepository _syncStreamRepository;
|
||||
final DriftLocalAssetRepository _localAssetRepository;
|
||||
final DriftTrashedLocalAssetRepository _trashedLocalAssetRepository;
|
||||
final LocalFilesManagerRepository _localFilesManager;
|
||||
final StorageRepository _storageRepository;
|
||||
final bool Function()? _cancelChecker;
|
||||
|
||||
SyncStreamService({
|
||||
required SyncApiRepository syncApiRepository,
|
||||
required SyncStreamRepository syncStreamRepository,
|
||||
required DriftLocalAssetRepository localAssetRepository,
|
||||
required DriftTrashedLocalAssetRepository trashedLocalAssetRepository,
|
||||
required LocalFilesManagerRepository localFilesManager,
|
||||
required StorageRepository storageRepository,
|
||||
bool Function()? cancelChecker,
|
||||
}) : _syncApiRepository = syncApiRepository,
|
||||
_syncStreamRepository = syncStreamRepository,
|
||||
_localAssetRepository = localAssetRepository,
|
||||
_trashedLocalAssetRepository = trashedLocalAssetRepository,
|
||||
_localFilesManager = localFilesManager,
|
||||
_storageRepository = storageRepository,
|
||||
_cancelChecker = cancelChecker;
|
||||
|
||||
bool get isCancelled => _cancelChecker?.call() ?? false;
|
||||
@@ -83,7 +102,18 @@ class SyncStreamService {
|
||||
case SyncEntityType.partnerDeleteV1:
|
||||
return _syncStreamRepository.deletePartnerV1(data.cast());
|
||||
case SyncEntityType.assetV1:
|
||||
return _syncStreamRepository.updateAssetsV1(data.cast());
|
||||
final remoteSyncAssets = data.cast<SyncAssetV1>();
|
||||
await _syncStreamRepository.updateAssetsV1(remoteSyncAssets);
|
||||
if (CurrentPlatform.isAndroid && Store.get(StoreKey.manageLocalMediaAndroid, false)) {
|
||||
final hasPermission = await _localFilesManager.hasManageMediaPermission();
|
||||
if (hasPermission) {
|
||||
await _handleRemoteTrashed(remoteSyncAssets.where((e) => e.deletedAt != null).map((e) => e.checksum));
|
||||
await _applyRemoteRestoreToLocal();
|
||||
} else {
|
||||
_logger.warning("sync Trashed Assets cannot proceed because MANAGE_MEDIA permission is missing");
|
||||
}
|
||||
}
|
||||
return;
|
||||
case SyncEntityType.assetDeleteV1:
|
||||
return _syncStreamRepository.deleteAssetsV1(data.cast());
|
||||
case SyncEntityType.assetExifV1:
|
||||
@@ -132,7 +162,8 @@ class SyncStreamService {
|
||||
return;
|
||||
// SyncCompleteV1 is used to signal the completion of the sync process. Cleanup stale assets and signal completion
|
||||
case SyncEntityType.syncCompleteV1:
|
||||
return _syncStreamRepository.pruneAssets();
|
||||
return;
|
||||
// return _syncStreamRepository.pruneAssets();
|
||||
// Request to reset the client state. Clear everything related to remote entities
|
||||
case SyncEntityType.syncResetV1:
|
||||
return _syncStreamRepository.reset();
|
||||
@@ -211,4 +242,36 @@ class SyncStreamService {
|
||||
_logger.severe("Error processing AssetUploadReadyV1 websocket batch events", error, stackTrace);
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> _handleRemoteTrashed(Iterable<String> checksums) async {
|
||||
if (checksums.isEmpty) {
|
||||
return Future.value();
|
||||
} else {
|
||||
final localAssetsToTrash = await _localAssetRepository.getAssetsFromBackupAlbums(checksums);
|
||||
if (localAssetsToTrash.isNotEmpty) {
|
||||
final mediaUrls = await Future.wait(
|
||||
localAssetsToTrash.values
|
||||
.expand((e) => e)
|
||||
.map((localAsset) => _storageRepository.getAssetEntityForAsset(localAsset).then((e) => e?.getMediaUrl())),
|
||||
);
|
||||
_logger.info("Moving to trash ${mediaUrls.join(", ")} assets");
|
||||
final result = await _localFilesManager.moveToTrash(mediaUrls.nonNulls.toList());
|
||||
if (result) {
|
||||
await _trashedLocalAssetRepository.trashLocalAsset(localAssetsToTrash);
|
||||
}
|
||||
} else {
|
||||
_logger.info("No assets found in backup-enabled albums for assets: $checksums");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> _applyRemoteRestoreToLocal() async {
|
||||
final assetsToRestore = await _trashedLocalAssetRepository.getToRestore();
|
||||
if (assetsToRestore.isNotEmpty) {
|
||||
final restoredIds = await _localFilesManager.restoreAssetsFromTrash(assetsToRestore);
|
||||
await _trashedLocalAssetRepository.applyRestoredAssets(restoredIds);
|
||||
} else {
|
||||
_logger.info("No remote assets found for restoration");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
import 'package:drift/drift.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/utils/asset.mixin.dart';
|
||||
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
|
||||
|
||||
@TableIndex.sql('CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_checksum ON trashed_local_asset_entity (checksum)')
|
||||
@TableIndex.sql('CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_album ON trashed_local_asset_entity (album_id)')
|
||||
class TrashedLocalAssetEntity extends Table with DriftDefaultsMixin, AssetEntityMixin {
|
||||
const TrashedLocalAssetEntity();
|
||||
|
||||
TextColumn get id => text()();
|
||||
|
||||
TextColumn get albumId => text()();
|
||||
|
||||
TextColumn get checksum => text().nullable()();
|
||||
|
||||
BoolColumn get isFavorite => boolean().withDefault(const Constant(false))();
|
||||
|
||||
IntColumn get orientation => integer().withDefault(const Constant(0))();
|
||||
|
||||
@override
|
||||
Set<Column> get primaryKey => {id, albumId};
|
||||
}
|
||||
|
||||
extension TrashedLocalAssetEntityDataDomainExtension on TrashedLocalAssetEntityData {
|
||||
LocalAsset toLocalAsset() => LocalAsset(
|
||||
id: id,
|
||||
name: name,
|
||||
checksum: checksum,
|
||||
type: type,
|
||||
createdAt: createdAt,
|
||||
updatedAt: updatedAt,
|
||||
durationInSeconds: durationInSeconds,
|
||||
isFavorite: isFavorite,
|
||||
height: height,
|
||||
width: width,
|
||||
orientation: orientation,
|
||||
);
|
||||
}
|
||||
1080
mobile/lib/infrastructure/entities/trashed_local_asset.entity.drift.dart
generated
Normal file
1080
mobile/lib/infrastructure/entities/trashed_local_asset.entity.drift.dart
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -10,6 +10,7 @@ import 'package:immich_mobile/infrastructure/entities/exif.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/memory.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/memory_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/partner.entity.dart';
|
||||
@@ -62,6 +63,7 @@ class IsarDatabaseRepository implements IDatabaseRepository {
|
||||
PersonEntity,
|
||||
AssetFaceEntity,
|
||||
StoreEntity,
|
||||
TrashedLocalAssetEntity,
|
||||
],
|
||||
include: {'package:immich_mobile/infrastructure/entities/merged_asset.drift'},
|
||||
)
|
||||
@@ -93,7 +95,7 @@ class Drift extends $Drift implements IDatabaseRepository {
|
||||
}
|
||||
|
||||
@override
|
||||
int get schemaVersion => 12;
|
||||
int get schemaVersion => 13;
|
||||
|
||||
@override
|
||||
MigrationStrategy get migration => MigrationStrategy(
|
||||
@@ -178,6 +180,11 @@ class Drift extends $Drift implements IDatabaseRepository {
|
||||
);
|
||||
}
|
||||
},
|
||||
from12To13: (m, v13) async {
|
||||
await m.create(v13.trashedLocalAssetEntity);
|
||||
await m.createIndex(v13.idxTrashedLocalAssetChecksum);
|
||||
await m.createIndex(v13.idxTrashedLocalAssetAlbum);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
|
||||
@@ -37,9 +37,11 @@ import 'package:immich_mobile/infrastructure/entities/asset_face.entity.drift.da
|
||||
as i17;
|
||||
import 'package:immich_mobile/infrastructure/entities/store.entity.drift.dart'
|
||||
as i18;
|
||||
import 'package:immich_mobile/infrastructure/entities/merged_asset.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.drift.dart'
|
||||
as i19;
|
||||
import 'package:drift/internal/modular.dart' as i20;
|
||||
import 'package:immich_mobile/infrastructure/entities/merged_asset.drift.dart'
|
||||
as i20;
|
||||
import 'package:drift/internal/modular.dart' as i21;
|
||||
|
||||
abstract class $Drift extends i0.GeneratedDatabase {
|
||||
$Drift(i0.QueryExecutor e) : super(e);
|
||||
@@ -77,9 +79,11 @@ abstract class $Drift extends i0.GeneratedDatabase {
|
||||
late final i17.$AssetFaceEntityTable assetFaceEntity = i17
|
||||
.$AssetFaceEntityTable(this);
|
||||
late final i18.$StoreEntityTable storeEntity = i18.$StoreEntityTable(this);
|
||||
i19.MergedAssetDrift get mergedAssetDrift => i20.ReadDatabaseContainer(
|
||||
late final i19.$TrashedLocalAssetEntityTable trashedLocalAssetEntity = i19
|
||||
.$TrashedLocalAssetEntityTable(this);
|
||||
i20.MergedAssetDrift get mergedAssetDrift => i21.ReadDatabaseContainer(
|
||||
this,
|
||||
).accessor<i19.MergedAssetDrift>(i19.MergedAssetDrift.new);
|
||||
).accessor<i20.MergedAssetDrift>(i20.MergedAssetDrift.new);
|
||||
@override
|
||||
Iterable<i0.TableInfo<i0.Table, Object?>> get allTables =>
|
||||
allSchemaEntities.whereType<i0.TableInfo<i0.Table, Object?>>();
|
||||
@@ -108,7 +112,10 @@ abstract class $Drift extends i0.GeneratedDatabase {
|
||||
personEntity,
|
||||
assetFaceEntity,
|
||||
storeEntity,
|
||||
trashedLocalAssetEntity,
|
||||
i11.idxLatLng,
|
||||
i19.idxTrashedLocalAssetChecksum,
|
||||
i19.idxTrashedLocalAssetAlbum,
|
||||
];
|
||||
@override
|
||||
i0.StreamQueryUpdateRules
|
||||
@@ -336,4 +343,9 @@ class $DriftManager {
|
||||
i17.$$AssetFaceEntityTableTableManager(_db, _db.assetFaceEntity);
|
||||
i18.$$StoreEntityTableTableManager get storeEntity =>
|
||||
i18.$$StoreEntityTableTableManager(_db, _db.storeEntity);
|
||||
i19.$$TrashedLocalAssetEntityTableTableManager get trashedLocalAssetEntity =>
|
||||
i19.$$TrashedLocalAssetEntityTableTableManager(
|
||||
_db,
|
||||
_db.trashedLocalAssetEntity,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -5037,6 +5037,454 @@ final class Schema12 extends i0.VersionedSchema {
|
||||
);
|
||||
}
|
||||
|
||||
final class Schema13 extends i0.VersionedSchema {
|
||||
Schema13({required super.database}) : super(version: 13);
|
||||
@override
|
||||
late final List<i1.DatabaseSchemaEntity> entities = [
|
||||
userEntity,
|
||||
remoteAssetEntity,
|
||||
stackEntity,
|
||||
localAssetEntity,
|
||||
remoteAlbumEntity,
|
||||
localAlbumEntity,
|
||||
localAlbumAssetEntity,
|
||||
idxLocalAssetChecksum,
|
||||
idxRemoteAssetOwnerChecksum,
|
||||
uQRemoteAssetsOwnerChecksum,
|
||||
uQRemoteAssetsOwnerLibraryChecksum,
|
||||
idxRemoteAssetChecksum,
|
||||
authUserEntity,
|
||||
userMetadataEntity,
|
||||
partnerEntity,
|
||||
remoteExifEntity,
|
||||
remoteAlbumAssetEntity,
|
||||
remoteAlbumUserEntity,
|
||||
memoryEntity,
|
||||
memoryAssetEntity,
|
||||
personEntity,
|
||||
assetFaceEntity,
|
||||
storeEntity,
|
||||
trashedLocalAssetEntity,
|
||||
idxLatLng,
|
||||
idxTrashedLocalAssetChecksum,
|
||||
idxTrashedLocalAssetAlbum,
|
||||
];
|
||||
late final Shape20 userEntity = Shape20(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'user_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_1,
|
||||
_column_3,
|
||||
_column_84,
|
||||
_column_85,
|
||||
_column_91,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape17 remoteAssetEntity = Shape17(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_1,
|
||||
_column_8,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_10,
|
||||
_column_11,
|
||||
_column_12,
|
||||
_column_0,
|
||||
_column_13,
|
||||
_column_14,
|
||||
_column_15,
|
||||
_column_16,
|
||||
_column_17,
|
||||
_column_18,
|
||||
_column_19,
|
||||
_column_20,
|
||||
_column_21,
|
||||
_column_86,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape3 stackEntity = Shape3(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'stack_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [_column_0, _column_9, _column_5, _column_15, _column_75],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape2 localAssetEntity = Shape2(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'local_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_1,
|
||||
_column_8,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_10,
|
||||
_column_11,
|
||||
_column_12,
|
||||
_column_0,
|
||||
_column_22,
|
||||
_column_14,
|
||||
_column_23,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape9 remoteAlbumEntity = Shape9(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_album_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_1,
|
||||
_column_56,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_15,
|
||||
_column_57,
|
||||
_column_58,
|
||||
_column_59,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape19 localAlbumEntity = Shape19(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'local_album_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_1,
|
||||
_column_5,
|
||||
_column_31,
|
||||
_column_32,
|
||||
_column_90,
|
||||
_column_33,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape22 localAlbumAssetEntity = Shape22(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'local_album_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(asset_id, album_id)'],
|
||||
columns: [_column_34, _column_35, _column_33],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
final i1.Index idxLocalAssetChecksum = i1.Index(
|
||||
'idx_local_asset_checksum',
|
||||
'CREATE INDEX IF NOT EXISTS idx_local_asset_checksum ON local_asset_entity (checksum)',
|
||||
);
|
||||
final i1.Index idxRemoteAssetOwnerChecksum = i1.Index(
|
||||
'idx_remote_asset_owner_checksum',
|
||||
'CREATE INDEX IF NOT EXISTS idx_remote_asset_owner_checksum ON remote_asset_entity (owner_id, checksum)',
|
||||
);
|
||||
final i1.Index uQRemoteAssetsOwnerChecksum = i1.Index(
|
||||
'UQ_remote_assets_owner_checksum',
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS UQ_remote_assets_owner_checksum ON remote_asset_entity (owner_id, checksum) WHERE(library_id IS NULL)',
|
||||
);
|
||||
final i1.Index uQRemoteAssetsOwnerLibraryChecksum = i1.Index(
|
||||
'UQ_remote_assets_owner_library_checksum',
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS UQ_remote_assets_owner_library_checksum ON remote_asset_entity (owner_id, library_id, checksum) WHERE(library_id IS NOT NULL)',
|
||||
);
|
||||
final i1.Index idxRemoteAssetChecksum = i1.Index(
|
||||
'idx_remote_asset_checksum',
|
||||
'CREATE INDEX IF NOT EXISTS idx_remote_asset_checksum ON remote_asset_entity (checksum)',
|
||||
);
|
||||
late final Shape21 authUserEntity = Shape21(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'auth_user_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_1,
|
||||
_column_3,
|
||||
_column_2,
|
||||
_column_84,
|
||||
_column_85,
|
||||
_column_92,
|
||||
_column_93,
|
||||
_column_7,
|
||||
_column_94,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape4 userMetadataEntity = Shape4(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'user_metadata_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(user_id, "key")'],
|
||||
columns: [_column_25, _column_26, _column_27],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape5 partnerEntity = Shape5(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'partner_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(shared_by_id, shared_with_id)'],
|
||||
columns: [_column_28, _column_29, _column_30],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape8 remoteExifEntity = Shape8(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_exif_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(asset_id)'],
|
||||
columns: [
|
||||
_column_36,
|
||||
_column_37,
|
||||
_column_38,
|
||||
_column_39,
|
||||
_column_40,
|
||||
_column_41,
|
||||
_column_11,
|
||||
_column_10,
|
||||
_column_42,
|
||||
_column_43,
|
||||
_column_44,
|
||||
_column_45,
|
||||
_column_46,
|
||||
_column_47,
|
||||
_column_48,
|
||||
_column_49,
|
||||
_column_50,
|
||||
_column_51,
|
||||
_column_52,
|
||||
_column_53,
|
||||
_column_54,
|
||||
_column_55,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape7 remoteAlbumAssetEntity = Shape7(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_album_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(asset_id, album_id)'],
|
||||
columns: [_column_36, _column_60],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape10 remoteAlbumUserEntity = Shape10(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_album_user_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(album_id, user_id)'],
|
||||
columns: [_column_60, _column_25, _column_61],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape11 memoryEntity = Shape11(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'memory_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_18,
|
||||
_column_15,
|
||||
_column_8,
|
||||
_column_62,
|
||||
_column_63,
|
||||
_column_64,
|
||||
_column_65,
|
||||
_column_66,
|
||||
_column_67,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape12 memoryAssetEntity = Shape12(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'memory_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(asset_id, memory_id)'],
|
||||
columns: [_column_36, _column_68],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape14 personEntity = Shape14(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'person_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_15,
|
||||
_column_1,
|
||||
_column_69,
|
||||
_column_71,
|
||||
_column_72,
|
||||
_column_73,
|
||||
_column_74,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape15 assetFaceEntity = Shape15(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'asset_face_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_36,
|
||||
_column_76,
|
||||
_column_77,
|
||||
_column_78,
|
||||
_column_79,
|
||||
_column_80,
|
||||
_column_81,
|
||||
_column_82,
|
||||
_column_83,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape18 storeEntity = Shape18(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'store_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [_column_87, _column_88, _column_89],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape23 trashedLocalAssetEntity = Shape23(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'trashed_local_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id, album_id)'],
|
||||
columns: [
|
||||
_column_1,
|
||||
_column_8,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_10,
|
||||
_column_11,
|
||||
_column_12,
|
||||
_column_0,
|
||||
_column_95,
|
||||
_column_22,
|
||||
_column_14,
|
||||
_column_23,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
final i1.Index idxLatLng = i1.Index(
|
||||
'idx_lat_lng',
|
||||
'CREATE INDEX IF NOT EXISTS idx_lat_lng ON remote_exif_entity (latitude, longitude)',
|
||||
);
|
||||
final i1.Index idxTrashedLocalAssetChecksum = i1.Index(
|
||||
'idx_trashed_local_asset_checksum',
|
||||
'CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_checksum ON trashed_local_asset_entity (checksum)',
|
||||
);
|
||||
final i1.Index idxTrashedLocalAssetAlbum = i1.Index(
|
||||
'idx_trashed_local_asset_album',
|
||||
'CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_album ON trashed_local_asset_entity (album_id)',
|
||||
);
|
||||
}
|
||||
|
||||
class Shape23 extends i0.VersionedTable {
|
||||
Shape23({required super.source, required super.alias}) : super.aliased();
|
||||
i1.GeneratedColumn<String> get name =>
|
||||
columnsByName['name']! as i1.GeneratedColumn<String>;
|
||||
i1.GeneratedColumn<int> get type =>
|
||||
columnsByName['type']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<DateTime> get createdAt =>
|
||||
columnsByName['created_at']! as i1.GeneratedColumn<DateTime>;
|
||||
i1.GeneratedColumn<DateTime> get updatedAt =>
|
||||
columnsByName['updated_at']! as i1.GeneratedColumn<DateTime>;
|
||||
i1.GeneratedColumn<int> get width =>
|
||||
columnsByName['width']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<int> get height =>
|
||||
columnsByName['height']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<int> get durationInSeconds =>
|
||||
columnsByName['duration_in_seconds']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<String> get id =>
|
||||
columnsByName['id']! as i1.GeneratedColumn<String>;
|
||||
i1.GeneratedColumn<String> get albumId =>
|
||||
columnsByName['album_id']! as i1.GeneratedColumn<String>;
|
||||
i1.GeneratedColumn<String> get checksum =>
|
||||
columnsByName['checksum']! as i1.GeneratedColumn<String>;
|
||||
i1.GeneratedColumn<bool> get isFavorite =>
|
||||
columnsByName['is_favorite']! as i1.GeneratedColumn<bool>;
|
||||
i1.GeneratedColumn<int> get orientation =>
|
||||
columnsByName['orientation']! as i1.GeneratedColumn<int>;
|
||||
}
|
||||
|
||||
i1.GeneratedColumn<String> _column_95(String aliasedName) =>
|
||||
i1.GeneratedColumn<String>(
|
||||
'album_id',
|
||||
aliasedName,
|
||||
false,
|
||||
type: i1.DriftSqlType.string,
|
||||
);
|
||||
i0.MigrationStepWithVersion migrationSteps({
|
||||
required Future<void> Function(i1.Migrator m, Schema2 schema) from1To2,
|
||||
required Future<void> Function(i1.Migrator m, Schema3 schema) from2To3,
|
||||
@@ -5049,6 +5497,7 @@ i0.MigrationStepWithVersion migrationSteps({
|
||||
required Future<void> Function(i1.Migrator m, Schema10 schema) from9To10,
|
||||
required Future<void> Function(i1.Migrator m, Schema11 schema) from10To11,
|
||||
required Future<void> Function(i1.Migrator m, Schema12 schema) from11To12,
|
||||
required Future<void> Function(i1.Migrator m, Schema13 schema) from12To13,
|
||||
}) {
|
||||
return (currentVersion, database) async {
|
||||
switch (currentVersion) {
|
||||
@@ -5107,6 +5556,11 @@ i0.MigrationStepWithVersion migrationSteps({
|
||||
final migrator = i1.Migrator(database, schema);
|
||||
await from11To12(migrator, schema);
|
||||
return 12;
|
||||
case 12:
|
||||
final schema = Schema13(database: database);
|
||||
final migrator = i1.Migrator(database, schema);
|
||||
await from12To13(migrator, schema);
|
||||
return 13;
|
||||
default:
|
||||
throw ArgumentError.value('Unknown migration from $currentVersion');
|
||||
}
|
||||
@@ -5125,6 +5579,7 @@ i1.OnUpgrade stepByStep({
|
||||
required Future<void> Function(i1.Migrator m, Schema10 schema) from9To10,
|
||||
required Future<void> Function(i1.Migrator m, Schema11 schema) from10To11,
|
||||
required Future<void> Function(i1.Migrator m, Schema12 schema) from11To12,
|
||||
required Future<void> Function(i1.Migrator m, Schema13 schema) from12To13,
|
||||
}) => i0.VersionedSchema.stepByStepHelper(
|
||||
step: migrationSteps(
|
||||
from1To2: from1To2,
|
||||
@@ -5138,5 +5593,6 @@ i1.OnUpgrade stepByStep({
|
||||
from9To10: from9To10,
|
||||
from10To11: from10To11,
|
||||
from11To12: from11To12,
|
||||
from12To13: from12To13,
|
||||
),
|
||||
);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user