mirror of
https://github.com/immich-app/immich.git
synced 2026-02-15 13:28:24 +03:00
Compare commits
1 Commits
chore/open
...
shared-dee
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
44b4239a4f |
@@ -26,81 +26,7 @@
|
||||
"vitest.explorer",
|
||||
"ms-playwright.playwright",
|
||||
"ms-azuretools.vscode-docker"
|
||||
],
|
||||
"settings": {
|
||||
"tasks": {
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Fix Permissions, Install Dependencies",
|
||||
"type": "shell",
|
||||
"command": "[ -f /immich-devcontainer/container-start.sh ] && /immich-devcontainer/container-start.sh || exit 0",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "dedicated",
|
||||
"showReuseMessage": true,
|
||||
"clear": false,
|
||||
"group": "Devcontainer tasks",
|
||||
"close": true
|
||||
},
|
||||
"runOptions": {
|
||||
"runOn": "default"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Immich API Server (Nest)",
|
||||
"dependsOn": ["Fix Permissions, Install Dependencies"],
|
||||
"type": "shell",
|
||||
"command": "[ -f /immich-devcontainer/container-start-backend.sh ] && /immich-devcontainer/container-start-backend.sh || exit 0",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "dedicated",
|
||||
"showReuseMessage": true,
|
||||
"clear": false,
|
||||
"group": "Devcontainer tasks",
|
||||
"close": true
|
||||
},
|
||||
"runOptions": {
|
||||
"runOn": "folderOpen"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Immich Web Server (Vite)",
|
||||
"dependsOn": ["Fix Permissions, Install Dependencies"],
|
||||
"type": "shell",
|
||||
"command": "[ -f /immich-devcontainer/container-start-frontend.sh ] && /immich-devcontainer/container-start-frontend.sh || exit 0",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "dedicated",
|
||||
"showReuseMessage": true,
|
||||
"clear": false,
|
||||
"group": "Devcontainer tasks",
|
||||
"close": true
|
||||
},
|
||||
"runOptions": {
|
||||
"runOn": "folderOpen"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Build Immich CLI",
|
||||
"type": "shell",
|
||||
"command": "pnpm --filter cli build:dev"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"features": {
|
||||
|
||||
1
.github/pull_request_template.md
vendored
1
.github/pull_request_template.md
vendored
@@ -26,7 +26,6 @@ The `/api/something` endpoint is now `/api/something-else`
|
||||
|
||||
## Checklist:
|
||||
|
||||
- [ ] I have carefully read CONTRIBUTING.md
|
||||
- [ ] I have performed a self-review of my own code
|
||||
- [ ] I have made corresponding changes to the documentation if applicable
|
||||
- [ ] I have no unrelated changes in the PR.
|
||||
|
||||
7
.github/workflows/build-mobile.yml
vendored
7
.github/workflows/build-mobile.yml
vendored
@@ -178,12 +178,9 @@ jobs:
|
||||
contents: read
|
||||
# Run on main branch or workflow_dispatch, or on PRs/other branches (build only, no upload)
|
||||
if: ${{ !github.event.pull_request.head.repo.fork && fromJSON(needs.pre-job.outputs.should_run).mobile == true }}
|
||||
runs-on: macos-15
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- name: Select Xcode 26
|
||||
run: sudo xcode-select -s /Applications/Xcode_26.2.app/Contents/Developer
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
with:
|
||||
@@ -269,8 +266,6 @@ jobs:
|
||||
ENVIRONMENT: ${{ inputs.environment || 'development' }}
|
||||
BUNDLE_ID_SUFFIX: ${{ inputs.environment == 'production' && '' || 'development' }}
|
||||
GITHUB_REF: ${{ github.ref }}
|
||||
FASTLANE_XCODEBUILD_SETTINGS_TIMEOUT: 120
|
||||
FASTLANE_XCODEBUILD_SETTINGS_RETRIES: 6
|
||||
working-directory: ./mobile/ios
|
||||
run: |
|
||||
# Only upload to TestFlight on main branch
|
||||
|
||||
37
.github/workflows/check-openapi.yml
vendored
37
.github/workflows/check-openapi.yml
vendored
@@ -1,37 +0,0 @@
|
||||
name: Check OpenAPI
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'open-api/**'
|
||||
- '.github/workflows/check-openapi.yml'
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'open-api/**'
|
||||
- '.github/workflows/check-openapi.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
check-openapi:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Check for breaking API changes
|
||||
# sha is pinning to a commit instead of a tag since the action does not tag versions
|
||||
uses: oasdiff/oasdiff-action/breaking@ccb863950ce437a50f8f1a40d2a1112117e06ce4
|
||||
with:
|
||||
base: https://raw.githubusercontent.com/${{ github.repository }}/main/open-api/immich-openapi-specs.json
|
||||
revision: open-api/immich-openapi-specs.json
|
||||
fail-on: ERR
|
||||
7
.github/workflows/cli.yml
vendored
7
.github/workflows/cli.yml
vendored
@@ -24,11 +24,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
packages: write
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
@@ -58,8 +57,10 @@ jobs:
|
||||
|
||||
- run: pnpm install --frozen-lockfile
|
||||
- run: pnpm build
|
||||
- run: pnpm publish --provenance --no-git-checks
|
||||
- run: pnpm publish --no-git-checks
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
docker:
|
||||
name: Docker
|
||||
|
||||
38
.github/workflows/close-llm-pr.yml
vendored
38
.github/workflows/close-llm-pr.yml
vendored
@@ -1,38 +0,0 @@
|
||||
name: Close LLM-generated PRs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
comment_and_close:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.label.name == 'llm-generated' }}
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Comment and close
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
NODE_ID: ${{ github.event.pull_request.node_id }}
|
||||
run: |
|
||||
gh api graphql \
|
||||
-f prId="$NODE_ID" \
|
||||
-f body="Thank you for your interest in contributing to Immich! Unfortunately this PR looks like it was generated using an LLM. As noted in our CONTRIBUTING.md, we request that you don't use LLMs to generate PRs as those are not a good use of maintainer time." \
|
||||
-f query='
|
||||
mutation CommentAndClosePR($prId: ID!, $body: String!) {
|
||||
addComment(input: {
|
||||
subjectId: $prId,
|
||||
body: $body
|
||||
}) {
|
||||
__typename
|
||||
}
|
||||
|
||||
closePullRequest(input: {
|
||||
pullRequestId: $prId
|
||||
}) {
|
||||
__typename
|
||||
}
|
||||
}'
|
||||
2
.github/workflows/docker.yml
vendored
2
.github/workflows/docker.yml
vendored
@@ -131,7 +131,7 @@ jobs:
|
||||
- device: rocm
|
||||
suffixes: '-rocm'
|
||||
platforms: linux/amd64
|
||||
runner-mapping: '{"linux/amd64": "pokedex-giant"}'
|
||||
runner-mapping: '{"linux/amd64": "mich"}'
|
||||
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@0477486d82313fba68f7c82c034120a4b8981297 # multi-runner-build-workflow-v2.1.0
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
6
.github/workflows/prepare-release.yml
vendored
6
.github/workflows/prepare-release.yml
vendored
@@ -109,6 +109,12 @@ jobs:
|
||||
APP_STORE_CONNECT_API_KEY: ${{ secrets.APP_STORE_CONNECT_API_KEY }}
|
||||
IOS_CERTIFICATE_P12: ${{ secrets.IOS_CERTIFICATE_P12 }}
|
||||
IOS_CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
|
||||
IOS_PROVISIONING_PROFILE: ${{ secrets.IOS_PROVISIONING_PROFILE }}
|
||||
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_SHARE_EXTENSION }}
|
||||
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
FASTLANE_TEAM_ID: ${{ secrets.FASTLANE_TEAM_ID }}
|
||||
|
||||
with:
|
||||
|
||||
6
.github/workflows/sdk.yml
vendored
6
.github/workflows/sdk.yml
vendored
@@ -12,8 +12,6 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
packages: write
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
@@ -44,4 +42,6 @@ jobs:
|
||||
- name: Build
|
||||
run: pnpm build
|
||||
- name: Publish
|
||||
run: pnpm publish --provenance --no-git-checks
|
||||
run: pnpm publish --no-git-checks
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
36
.github/workflows/test.yml
vendored
36
.github/workflows/test.yml
vendored
@@ -497,15 +497,14 @@ jobs:
|
||||
run: npx playwright install chromium --only-shell
|
||||
if: ${{ !cancelled() }}
|
||||
- name: Docker build
|
||||
run: docker compose up -d --build --renew-anon-volumes --force-recreate --remove-orphans --wait --wait-timeout 300
|
||||
run: docker compose build
|
||||
if: ${{ !cancelled() }}
|
||||
- name: Run e2e tests (web)
|
||||
env:
|
||||
CI: true
|
||||
PLAYWRIGHT_DISABLE_WEBSERVER: true
|
||||
run: npx playwright test --project=web
|
||||
run: npx playwright test --project=chromium
|
||||
if: ${{ !cancelled() }}
|
||||
- name: Archive e2e test (web) results
|
||||
- name: Archive web results
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
if: success() || failure()
|
||||
with:
|
||||
@@ -514,37 +513,14 @@ jobs:
|
||||
- name: Run ui tests (web)
|
||||
env:
|
||||
CI: true
|
||||
PLAYWRIGHT_DISABLE_WEBSERVER: true
|
||||
run: npx playwright test --project=ui
|
||||
if: ${{ !cancelled() }}
|
||||
- name: Archive ui test (web) results
|
||||
- name: Archive ui results
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
if: success() || failure()
|
||||
with:
|
||||
name: e2e-ui-test-results-${{ matrix.runner }}
|
||||
path: e2e/playwright-report/
|
||||
- name: Run maintenance tests
|
||||
env:
|
||||
CI: true
|
||||
PLAYWRIGHT_DISABLE_WEBSERVER: true
|
||||
run: npx playwright test --project=maintenance
|
||||
if: ${{ !cancelled() }}
|
||||
- name: Archive maintenance tests (web) results
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
if: success() || failure()
|
||||
with:
|
||||
name: e2e-maintenance-isolated-test-results-${{ matrix.runner }}
|
||||
path: e2e/playwright-report/
|
||||
- name: Capture Docker logs
|
||||
if: always()
|
||||
run: docker compose logs --no-color > docker-compose-logs.txt
|
||||
working-directory: ./e2e
|
||||
- name: Archive Docker logs
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
if: always()
|
||||
with:
|
||||
name: docker-compose-logs-${{ matrix.runner }}
|
||||
path: e2e/docker-compose-logs.txt
|
||||
success-check-e2e:
|
||||
name: End-to-End Tests Success
|
||||
needs: [e2e-tests-server-cli, e2e-tests-web]
|
||||
@@ -615,9 +591,9 @@ jobs:
|
||||
- name: Lint with ruff
|
||||
run: |
|
||||
uv run ruff check --output-format=github immich_ml
|
||||
- name: Format with ruff
|
||||
- name: Check black formatting
|
||||
run: |
|
||||
uv run ruff format --check immich_ml
|
||||
uv run black --check immich_ml
|
||||
- name: Run mypy type checking
|
||||
run: |
|
||||
uv run mypy --strict immich_ml/
|
||||
|
||||
2
.github/workflows/weblate-lock.yml
vendored
2
.github/workflows/weblate-lock.yml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
i18n:
|
||||
- modified: 'i18n/!(en|package)**\.json'
|
||||
- modified: 'i18n/!(en)**\.json'
|
||||
skip-force-logic: 'true'
|
||||
|
||||
enforce-lock:
|
||||
|
||||
80
.vscode/tasks.json
vendored
Normal file
80
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Fix Permissions, Install Dependencies",
|
||||
"type": "shell",
|
||||
"command": "[ -f /immich-devcontainer/container-start.sh ] && /immich-devcontainer/container-start.sh || exit 0",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "dedicated",
|
||||
"showReuseMessage": true,
|
||||
"clear": false,
|
||||
"group": "Devcontainer tasks",
|
||||
"close": true
|
||||
},
|
||||
"runOptions": {
|
||||
"runOn": "default"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Immich API Server (Nest)",
|
||||
"dependsOn": ["Fix Permissions, Install Dependencies"],
|
||||
"type": "shell",
|
||||
"command": "[ -f /immich-devcontainer/container-start-backend.sh ] && /immich-devcontainer/container-start-backend.sh || exit 0",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "dedicated",
|
||||
"showReuseMessage": true,
|
||||
"clear": false,
|
||||
"group": "Devcontainer tasks",
|
||||
"close": true
|
||||
},
|
||||
"runOptions": {
|
||||
"runOn": "default"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Immich Web Server (Vite)",
|
||||
"dependsOn": ["Fix Permissions, Install Dependencies"],
|
||||
"type": "shell",
|
||||
"command": "[ -f /immich-devcontainer/container-start-frontend.sh ] && /immich-devcontainer/container-start-frontend.sh || exit 0",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "dedicated",
|
||||
"showReuseMessage": true,
|
||||
"clear": false,
|
||||
"group": "Devcontainer tasks",
|
||||
"close": true
|
||||
},
|
||||
"runOptions": {
|
||||
"runOn": "default"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Immich Server and Web",
|
||||
"dependsOn": ["Immich Web Server (Vite)", "Immich API Server (Nest)"],
|
||||
"runOptions": {
|
||||
"runOn": "folderOpen"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Build Immich CLI",
|
||||
"type": "shell",
|
||||
"command": "pnpm --filter cli build:dev"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -17,27 +17,15 @@ If you are looking for something to work on, there are discussions and issues wi
|
||||
|
||||
## Use of generative AI
|
||||
|
||||
We ask you not to open PRs generated with an LLM. We find that code generated like this tends to need a large amount of back-and-forth, which is a very inefficient use of our time. If we want LLM-generated code, it's much faster for us to use an LLM ourselves than to go through an intermediary via a pull request.
|
||||
We generally discourage PRs entirely generated by an LLM. For any part generated by an LLM, please put extra effort into your self-review. By using generative AI without proper self-review, the time you save ends up being more work we need to put in for proper reviews and code cleanup. Please keep that in mind when submitting code by an LLM. Clearly state the use of LLMs/(generative) AI in your pull request as requested by the template.
|
||||
|
||||
## Feature freezes
|
||||
|
||||
From time to time, we put a feature freeze on parts of the codebase. For us, this means we won't accept most PRs that make changes in that area. Exempted from this are simple bug fixes that require only minor changes. We will close feature PRs that target a feature-frozen area, even if that feature is highly requested and you put a lot of work into it. Please keep that in mind, and if you're ever uncertain if a PR would be accepted, reach out to us first (e.g., in the aforementioned `#contributing` channel). We hate to throw away work. Currently, we have feature freezes on:
|
||||
|
||||
- Sharing/Asset ownership
|
||||
- (External) libraries
|
||||
* Sharing/Asset ownership
|
||||
* (External) libraries
|
||||
|
||||
## Non-code contributions
|
||||
|
||||
If you want to contribute to Immich but you don't feel comfortable programming in our tech stack, there are other ways you can help the team.
|
||||
|
||||
### Translations
|
||||
|
||||
All our translations are done through [Weblate](https://hosted.weblate.org/projects/immich). These rely entirely on the community; if you speak a language that isn't fully translated yet, submitting translations there is greatly appreciated!
|
||||
|
||||
### Datasets
|
||||
|
||||
Help us improve our [Immich Datasets](https://datasets.immich.app) by submitting photos and videos taken from a variety of devices, including smartphones, DSLRs, and action cameras, as well as photos with unique features, such as panoramas, burst photos, and photo spheres. These datasets will be publically available for anyone to use, do not submit private/sensitive photos.
|
||||
|
||||
### Community support
|
||||
|
||||
If you like helping others, answering Q&A discussions here on GitHub and replying to people on our Discord is also always appreciated.
|
||||
If you want to contribute to Immich but you don't feel comfortable programming in our tech stack, there are other ways you can help the team. All our translations are done through [Weblate](https://hosted.weblate.org/projects/immich). These rely entirely on the community; if you speak a language that isn't fully translated yet, submitting translations there is greatly appreciated! If you like helping others, answering Q&A discussions here on GitHub and replying to people on our Discord is also always appreciated.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.5.6",
|
||||
"version": "2.2.105",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -14,13 +14,13 @@
|
||||
],
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.8.0",
|
||||
"@immich/sdk": "workspace:*",
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@types/byte-size": "^8.1.0",
|
||||
"@types/cli-progress": "^3.11.0",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/micromatch": "^4.0.9",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^24.10.11",
|
||||
"@types/node": "^24.10.8",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
"byte-size": "^9.0.0",
|
||||
"cli-progress": "^3.12.0",
|
||||
|
||||
@@ -4,7 +4,6 @@ import {
|
||||
AssetBulkUploadCheckResult,
|
||||
AssetMediaResponseDto,
|
||||
AssetMediaStatus,
|
||||
Permission,
|
||||
addAssetsToAlbum,
|
||||
checkBulkUpload,
|
||||
createAlbum,
|
||||
@@ -21,11 +20,13 @@ import { Stats, createReadStream } from 'node:fs';
|
||||
import { stat, unlink } from 'node:fs/promises';
|
||||
import path, { basename } from 'node:path';
|
||||
import { Queue } from 'src/queue';
|
||||
import { BaseOptions, Batcher, authenticate, crawl, requirePermissions, s, sha1 } from 'src/utils';
|
||||
import { BaseOptions, Batcher, authenticate, crawl, sha1 } from 'src/utils';
|
||||
|
||||
const UPLOAD_WATCH_BATCH_SIZE = 100;
|
||||
const UPLOAD_WATCH_DEBOUNCE_TIME_MS = 10_000;
|
||||
|
||||
const s = (count: number) => (count === 1 ? '' : 's');
|
||||
|
||||
// TODO figure out why `id` is missing
|
||||
type AssetBulkUploadCheckResults = Array<AssetBulkUploadCheckResult & { id: string }>;
|
||||
type Asset = { id: string; filepath: string };
|
||||
@@ -135,7 +136,6 @@ export const startWatch = async (
|
||||
|
||||
export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => {
|
||||
await authenticate(baseOptions);
|
||||
await requirePermissions([Permission.AssetUpload]);
|
||||
|
||||
const scanFiles = await scan(paths, options);
|
||||
|
||||
@@ -180,49 +180,18 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
}
|
||||
|
||||
let multiBar: MultiBar | undefined;
|
||||
let totalSize = 0;
|
||||
const statsMap = new Map<string, Stats>();
|
||||
|
||||
// Calculate total size first
|
||||
for (const filepath of files) {
|
||||
const stats = await stat(filepath);
|
||||
statsMap.set(filepath, stats);
|
||||
totalSize += stats.size;
|
||||
}
|
||||
|
||||
if (progress) {
|
||||
multiBar = new MultiBar(
|
||||
{
|
||||
format: '{message} | {bar} | {percentage}% | ETA: {eta_formatted} | {value}/{total}',
|
||||
formatValue: (v: number, options, type) => {
|
||||
// Don't format percentage
|
||||
if (type === 'percentage') {
|
||||
return v.toString();
|
||||
}
|
||||
return byteSize(v).toString();
|
||||
},
|
||||
etaBuffer: 100, // Increase samples for ETA calculation
|
||||
},
|
||||
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
|
||||
// Ensure we restore cursor on interrupt
|
||||
process.on('SIGINT', () => {
|
||||
if (multiBar) {
|
||||
multiBar.stop();
|
||||
}
|
||||
process.exit(0);
|
||||
});
|
||||
} else {
|
||||
console.log(`Received ${files.length} files (${byteSize(totalSize)}), hashing...`);
|
||||
console.log(`Received ${files.length} files, hashing...`);
|
||||
}
|
||||
|
||||
const hashProgressBar = multiBar?.create(totalSize, 0, {
|
||||
message: 'Hashing files ',
|
||||
});
|
||||
const checkProgressBar = multiBar?.create(totalSize, 0, {
|
||||
message: 'Checking for duplicates',
|
||||
});
|
||||
const hashProgressBar = multiBar?.create(files.length, 0, { message: 'Hashing files ' });
|
||||
const checkProgressBar = multiBar?.create(files.length, 0, { message: 'Checking for duplicates' });
|
||||
|
||||
const newFiles: string[] = [];
|
||||
const duplicates: Asset[] = [];
|
||||
@@ -242,13 +211,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
}
|
||||
}
|
||||
|
||||
// Update progress based on total size of processed files
|
||||
let processedSize = 0;
|
||||
for (const asset of assets) {
|
||||
const stats = statsMap.get(asset.id);
|
||||
processedSize += stats?.size || 0;
|
||||
}
|
||||
checkProgressBar?.increment(processedSize);
|
||||
checkProgressBar?.increment(assets.length);
|
||||
},
|
||||
{ concurrency, retry: 3 },
|
||||
);
|
||||
@@ -258,10 +221,6 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
|
||||
const queue = new Queue<string, AssetBulkUploadCheckItem[]>(
|
||||
async (filepath: string): Promise<AssetBulkUploadCheckItem[]> => {
|
||||
const stats = statsMap.get(filepath);
|
||||
if (!stats) {
|
||||
throw new Error(`Stats not found for ${filepath}`);
|
||||
}
|
||||
const dto = { id: filepath, checksum: await sha1(filepath) };
|
||||
|
||||
results.push(dto);
|
||||
@@ -272,7 +231,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
void checkBulkUploadQueue.push(batch);
|
||||
}
|
||||
|
||||
hashProgressBar?.increment(stats.size);
|
||||
hashProgressBar?.increment();
|
||||
return results;
|
||||
},
|
||||
{ concurrency, retry: 3 },
|
||||
|
||||
@@ -1,15 +1,7 @@
|
||||
import { getMyUser, Permission } from '@immich/sdk';
|
||||
import { getMyUser } from '@immich/sdk';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { mkdir, unlink } from 'node:fs/promises';
|
||||
import {
|
||||
BaseOptions,
|
||||
connect,
|
||||
getAuthFilePath,
|
||||
logError,
|
||||
requirePermissions,
|
||||
withError,
|
||||
writeAuthFile,
|
||||
} from 'src/utils';
|
||||
import { BaseOptions, connect, getAuthFilePath, logError, withError, writeAuthFile } from 'src/utils';
|
||||
|
||||
export const login = async (url: string, key: string, options: BaseOptions) => {
|
||||
console.log(`Logging in to ${url}`);
|
||||
@@ -17,7 +9,6 @@ export const login = async (url: string, key: string, options: BaseOptions) => {
|
||||
const { configDirectory: configDir } = options;
|
||||
|
||||
await connect(url, key);
|
||||
await requirePermissions([Permission.UserRead]);
|
||||
|
||||
const [error, user] = await withError(getMyUser());
|
||||
if (error) {
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { getAssetStatistics, getMyUser, getServerVersion, getSupportedMediaTypes, Permission } from '@immich/sdk';
|
||||
import { authenticate, BaseOptions, requirePermissions } from 'src/utils';
|
||||
import { getAssetStatistics, getMyUser, getServerVersion, getSupportedMediaTypes } from '@immich/sdk';
|
||||
import { BaseOptions, authenticate } from 'src/utils';
|
||||
|
||||
export const serverInfo = async (options: BaseOptions) => {
|
||||
const { url } = await authenticate(options);
|
||||
await requirePermissions([Permission.ServerAbout, Permission.AssetStatistics, Permission.UserRead]);
|
||||
|
||||
const [versionInfo, mediaTypes, stats, userInfo] = await Promise.all([
|
||||
getServerVersion(),
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiKeyResponseDto, getMyApiKey, getMyUser, init, isHttpError, Permission } from '@immich/sdk';
|
||||
import { getMyUser, init, isHttpError } from '@immich/sdk';
|
||||
import { convertPathToPattern, glob } from 'fast-glob';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { createReadStream } from 'node:fs';
|
||||
@@ -34,36 +34,6 @@ export const authenticate = async (options: BaseOptions): Promise<AuthDto> => {
|
||||
return auth;
|
||||
};
|
||||
|
||||
export const s = (count: number) => (count === 1 ? '' : 's');
|
||||
|
||||
let _apiKey: ApiKeyResponseDto;
|
||||
export const requirePermissions = async (permissions: Permission[]) => {
|
||||
if (!_apiKey) {
|
||||
_apiKey = await getMyApiKey();
|
||||
}
|
||||
|
||||
if (_apiKey.permissions.includes(Permission.All)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const missing: Permission[] = [];
|
||||
|
||||
for (const permission of permissions) {
|
||||
if (!_apiKey.permissions.includes(permission)) {
|
||||
missing.push(permission);
|
||||
}
|
||||
}
|
||||
|
||||
if (missing.length > 0) {
|
||||
const combined = missing.map((permission) => `"${permission}"`).join(', ');
|
||||
console.log(
|
||||
`Missing required permission${s(missing.length)}: ${combined}.
|
||||
Please make sure your API key has the correct permissions.`,
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
export const connect = async (url: string, key: string) => {
|
||||
const wellKnownUrl = new URL('.well-known/immich', url);
|
||||
try {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[tools]
|
||||
terragrunt = "0.98.0"
|
||||
opentofu = "1.11.4"
|
||||
terragrunt = "0.93.10"
|
||||
opentofu = "1.10.7"
|
||||
|
||||
[tasks."tg:fmt"]
|
||||
run = "terragrunt hclfmt"
|
||||
|
||||
@@ -127,7 +127,7 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:9@sha256:930b41430fb727f533c5982fe509b6f04233e26d0f7354e04de4b0d5c706e44e
|
||||
image: docker.io/valkey/valkey:9@sha256:546304417feac0874c3dd576e0952c6bb8f06bb4093ea0c9ca303c73cf458f63
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:9@sha256:930b41430fb727f533c5982fe509b6f04233e26d0f7354e04de4b0d5c706e44e
|
||||
image: docker.io/valkey/valkey:9@sha256:546304417feac0874c3dd576e0952c6bb8f06bb4093ea0c9ca303c73cf458f63
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
@@ -97,7 +97,7 @@ services:
|
||||
command: ['./run.sh', '-disable-reporting']
|
||||
ports:
|
||||
- 3000:3000
|
||||
image: grafana/grafana:12.3.2-ubuntu@sha256:6cca4b429a1dc0d37d401dee54825c12d40056c3c6f3f56e3f0d6318ce77749b
|
||||
image: grafana/grafana:12.3.1-ubuntu@sha256:d57f1365197aec34c4d80869d8ca45bb7787c7663904950dab214dfb40c1c2fd
|
||||
volumes:
|
||||
- grafana-data:/var/lib/grafana
|
||||
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
#
|
||||
# WARNING: To install Immich, follow our guide: https://docs.immich.app/install/docker-compose
|
||||
#
|
||||
# Make sure to use the docker-compose.yml of the current release:
|
||||
#
|
||||
# https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
||||
#
|
||||
# The compose file on main may not be compatible with the latest release.
|
||||
|
||||
name: immich
|
||||
|
||||
services:
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
user: '1000:1000'
|
||||
security_opt:
|
||||
- no-new-privileges:true
|
||||
cap_drop:
|
||||
- NET_RAW
|
||||
volumes:
|
||||
# Do not edit the next line. If you want to change the media storage location on your system, edit the value of UPLOAD_LOCATION in the .env file
|
||||
- ${UPLOAD_LOCATION}:/data
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- '2283:2283'
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
restart: always
|
||||
healthcheck:
|
||||
disable: false
|
||||
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
# For hardware acceleration, add one of -[armnn, cuda, rocm, openvino, rknn] to the image tag.
|
||||
# Example tag: ${IMMICH_VERSION:-release}-cuda
|
||||
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
|
||||
# extends: # uncomment this section for hardware acceleration - see https://docs.immich.app/features/ml-hardware-acceleration
|
||||
# file: hwaccel.ml.yml
|
||||
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference - use the `-wsl` version for WSL2 where applicable
|
||||
user: '1000:1000'
|
||||
security_opt:
|
||||
- no-new-privileges:true
|
||||
cap_drop:
|
||||
- NET_RAW
|
||||
volumes:
|
||||
- ./ml-model-cache:/cache
|
||||
- ./ml-dotcache:/.cache
|
||||
- ./ml-config:/.config
|
||||
env_file:
|
||||
- .env
|
||||
restart: always
|
||||
healthcheck:
|
||||
disable: false
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:9@sha256:930b41430fb727f533c5982fe509b6f04233e26d0f7354e04de4b0d5c706e44e
|
||||
user: '1000:1000'
|
||||
security_opt:
|
||||
- no-new-privileges:true
|
||||
cap_drop:
|
||||
- NET_RAW
|
||||
volumes:
|
||||
- ./redis:/data
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:bcf63357191b76a916ae5eb93464d65c07511da41e3bf7a8416db519b40b1c23
|
||||
user: '1000:1000'
|
||||
security_opt:
|
||||
- no-new-privileges:true
|
||||
cap_drop:
|
||||
- NET_RAW
|
||||
environment:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
# Uncomment the DB_STORAGE_TYPE: 'HDD' var if your database isn't stored on SSDs
|
||||
# DB_STORAGE_TYPE: 'HDD'
|
||||
volumes:
|
||||
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
|
||||
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
||||
shm_size: 128mb
|
||||
restart: always
|
||||
healthcheck:
|
||||
disable: false
|
||||
|
||||
volumes:
|
||||
model-cache:
|
||||
@@ -49,7 +49,7 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:9@sha256:930b41430fb727f533c5982fe509b6f04233e26d0f7354e04de4b0d5c706e44e
|
||||
image: docker.io/valkey/valkey:9@sha256:546304417feac0874c3dd576e0952c6bb8f06bb4093ea0c9ca303c73cf458f63
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
|
||||
@@ -402,9 +402,6 @@ To decrease Redis logs, you can add the following line to the `redis:` section o
|
||||
### How can I run Immich as a non-root user?
|
||||
|
||||
You can change the user in the container by setting the `user` argument in `docker-compose.yml` for each service.
|
||||
|
||||
[Example docker-compose.yml file](https://github.com/immich-app/immich/blob/main/docker/docker-compose.rootless.yml)
|
||||
|
||||
You may need to add mount points or docker volumes for the following internal container paths:
|
||||
|
||||
- `immich-machine-learning:/.config`
|
||||
|
||||
@@ -140,8 +140,7 @@ For advanced users or automated recovery scenarios, you can restore a database b
|
||||
|
||||
```bash title='Backup'
|
||||
# Replace <DB_USERNAME> with the database username - usually postgres unless you have changed it.
|
||||
# Replace <DB_DATABASE_NAME> with the database name - usually immich unless you have changed it.
|
||||
docker exec -t immich_postgres pg_dump --clean --if-exists --dbname=<DB_DATABASE_NAME> --username=<DB_USERNAME> | gzip > "/path/to/backup/dump.sql.gz"
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> | gzip > "/path/to/backup/dump.sql.gz"
|
||||
```
|
||||
|
||||
```bash title='Restore'
|
||||
@@ -154,10 +153,9 @@ docker start immich_postgres # Start Postgres server
|
||||
sleep 10 # Wait for Postgres server to start up
|
||||
# Check the database user if you deviated from the default
|
||||
# Replace <DB_USERNAME> with the database username - usually postgres unless you have changed it.
|
||||
# Replace <DB_DATABASE_NAME> with the database name - usually immich unless you have changed it.
|
||||
gunzip --stdout "/path/to/backup/dump.sql.gz" \
|
||||
| sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" \
|
||||
| docker exec -i immich_postgres psql --dbname=<DB_DATABASE_NAME> --username=<DB_USERNAME> --single-transaction --set ON_ERROR_STOP=on # Restore Backup
|
||||
| docker exec -i immich_postgres psql --dbname=postgres --username=<DB_USERNAME> # Restore Backup
|
||||
docker compose up -d # Start remainder of Immich apps
|
||||
```
|
||||
|
||||
@@ -166,8 +164,7 @@ docker compose up -d # Start remainder of Immich apps
|
||||
|
||||
```powershell title='Backup'
|
||||
# Replace <DB_USERNAME> with the database username - usually postgres unless you have changed it.
|
||||
# Replace <DB_DATABASE_NAME> with the database name - usually immich unless you have changed it.
|
||||
[System.IO.File]::WriteAllLines("C:\absolute\path\to\backup\dump.sql", (docker exec -t immich_postgres pg_dump --clean --if-exists --dbname=<DB_DATABASE_NAME> --username=<DB_USERNAME>))
|
||||
[System.IO.File]::WriteAllLines("C:\absolute\path\to\backup\dump.sql", (docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME>))
|
||||
```
|
||||
|
||||
```powershell title='Restore'
|
||||
@@ -182,9 +179,8 @@ sleep 10 # Wait for Postgres server to
|
||||
docker exec -it immich_postgres bash # Enter the Docker shell and run the following command
|
||||
# If your backup ends in `.gz`, replace `cat` with `gunzip --stdout`
|
||||
# Replace <DB_USERNAME> with the database username - usually postgres unless you have changed it.
|
||||
# Replace <DB_DATABASE_NAME> with the database name - usually immich unless you have changed it.
|
||||
|
||||
cat "/dump.sql" | sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" | psql --dbname=<DB_DATABASE_NAME> --username=<DB_USERNAME> --single-transaction --set ON_ERROR_STOP=on
|
||||
cat "/dump.sql" | sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" | psql --dbname=postgres --username=<DB_USERNAME>
|
||||
exit # Exit the Docker shell
|
||||
docker compose up -d # Start remainder of Immich apps
|
||||
```
|
||||
@@ -192,10 +188,6 @@ docker compose up -d # Start remainder of Immich ap
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
:::warning
|
||||
The backup and restore process changed in v2.5.0, if you have a backup created with an older version of Immich, use the documentation version selector to find manual restore instructions for your backup.
|
||||
:::
|
||||
|
||||
:::note
|
||||
For the database restore to proceed properly, it requires a completely fresh install (i.e., the Immich server has never run since creating the Docker containers). If the Immich app has run, you may encounter Postgres conflicts (relation already exists, violated foreign key constraints, etc.). In this case, delete the `DB_DATA_LOCATION` folder to reset the database.
|
||||
:::
|
||||
@@ -204,10 +196,6 @@ For the database restore to proceed properly, it requires a completely fresh ins
|
||||
Some deployment methods make it difficult to start the database without also starting the server. In these cases, set the environment variable `DB_SKIP_MIGRATIONS=true` before starting the services. This prevents the server from running migrations that interfere with the restore process. Remove this variable and restart services after the database is restored.
|
||||
:::
|
||||
|
||||
:::tip
|
||||
The provided restore process ensures your database is never in a broken state by committing all changes in one transaction. This may be undesirable behaviour in some circumstances, you can disable it by removing `--single-transaction --set ON_ERROR_STOP=on` from the command.
|
||||
:::
|
||||
|
||||
## Filesystem
|
||||
|
||||
Immich stores two types of content in the filesystem: (a) original, unmodified assets (photos and videos), and (b) generated content. We recommend backing up the entire contents of `UPLOAD_LOCATION`, but only the original content is critical, which is stored in the following folders:
|
||||
|
||||
@@ -56,13 +56,11 @@ Once you have a new OAuth client application configured, Immich can be configure
|
||||
| Setting | Type | Default | Description |
|
||||
| ---------------------------------------------------- | ------- | -------------------- | ----------------------------------------------------------------------------------- |
|
||||
| Enabled | boolean | false | Enable/disable OAuth |
|
||||
| `issuer_url` | URL | (required) | Required. Self-discovery URL for client (from previous step) |
|
||||
| `client_id` | string | (required) | Required. Client ID (from previous step) |
|
||||
| `client_secret` | string | (required) | Required. Client Secret (previous step) |
|
||||
| `scope` | string | openid email profile | Full list of scopes to send with the request (space delimited) |
|
||||
| `id_token_signed_response_alg` | string | RS256 | The algorithm used to sign the id token (examples: RS256, HS256) |
|
||||
| `userinfo_signed_response_alg` | string | none | The algorithm used to sign the userinfo response (examples: RS256, HS256) |
|
||||
| Request timeout | string | 30,000 (30 seconds) | Number of milliseconds to wait for http requests to complete before giving up |
|
||||
| Issuer URL | URL | (required) | Required. Self-discovery URL for client (from previous step) |
|
||||
| Client ID | string | (required) | Required. Client ID (from previous step) |
|
||||
| Client Secret | string | (required) | Required. Client Secret (previous step) |
|
||||
| Scope | string | openid email profile | Full list of scopes to send with the request (space delimited) |
|
||||
| Signing Algorithm | string | RS256 | The algorithm used to sign the id token (examples: RS256, HS256) |
|
||||
| Storage Label Claim | string | preferred_username | Claim mapping for the user's storage label**¹** |
|
||||
| Role Claim | string | immich_role | Claim mapping for the user's role. (should return "user" or "admin")**¹** |
|
||||
| Storage Quota Claim | string | immich_quota | Claim mapping for the user's storage**¹** |
|
||||
|
||||
@@ -88,7 +88,7 @@ The easiest option is to have both extensions installed during the migration:
|
||||
<details>
|
||||
<summary>Migration steps (automatic)</summary>
|
||||
1. Ensure you still have pgvecto.rs installed
|
||||
2. Install `pgvector` (`>= 0.7, < 0.9`). The easiest way to do this is on Debian/Ubuntu by adding the [PostgreSQL Apt repository][pg-apt] and then running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`)
|
||||
2. Install `pgvector` (`>= 0.7.0, < 1.0.0`). The easiest way to do this is on Debian/Ubuntu by adding the [PostgreSQL Apt repository][pg-apt] and then running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`)
|
||||
3. [Install VectorChord][vchord-install]
|
||||
4. Add `shared_preload_libraries= 'vchord.so, vectors.so'` to your `postgresql.conf`, making sure to include _both_ `vchord.so` and `vectors.so`. You may include other libraries here as well if needed
|
||||
5. Restart the Postgres database
|
||||
|
||||
@@ -98,6 +98,7 @@ entryPoints:
|
||||
respondingTimeouts:
|
||||
readTimeout: 600s
|
||||
idleTimeout: 600s
|
||||
writeTimeout: 600s
|
||||
```
|
||||
|
||||
The second part is in the `docker-compose.yml` file where immich is in. Add the Traefik specific labels like in the example.
|
||||
|
||||
@@ -90,13 +90,10 @@ To see local changes to `@immich/ui` in Immich, do the following:
|
||||
|
||||
#### Setup
|
||||
|
||||
1. [Install mise](https://mise.jdx.dev/installing-mise.html).
|
||||
2. Change to the immich (root) directory and trust the mise config with `mise trust`.
|
||||
3. Install tools with mise: `mise install`.
|
||||
4. Change to the `mobile/` directory.
|
||||
5. Run `flutter pub get` to install the dependencies.
|
||||
6. Run `make translation` to generate the translation file.
|
||||
7. Run `flutter run` to start the app.
|
||||
1. Setup Flutter toolchain using FVM.
|
||||
2. Run `flutter pub get` to install the dependencies.
|
||||
3. Run `make translation` to generate the translation file.
|
||||
4. Run `fvm flutter run` to start the app.
|
||||
|
||||
#### Translation
|
||||
|
||||
|
||||
@@ -183,7 +183,7 @@ For example to get a list of files that would be uploaded for further
|
||||
processing:
|
||||
|
||||
```bash
|
||||
immich upload --dry-run --json-output . | tail -n +6 | jq .newFiles[]
|
||||
immich upload --dry-run . | tail -n +6 | jq .newFiles[]
|
||||
```
|
||||
|
||||
### Obtain the API Key
|
||||
|
||||
@@ -86,8 +86,8 @@ You do not need to redo any machine learning jobs after enabling hardware accele
|
||||
## Setup
|
||||
|
||||
1. If you do not already have it, download the latest [`hwaccel.ml.yml`][hw-file] file and ensure it's in the same folder as the `docker-compose.yml`.
|
||||
2. In `immich-machine-learning`, add one of -[armnn, cuda, rocm, openvino, rknn] to the `image` section's tag at the end of the line.
|
||||
3. Still in the `docker-compose.yml` under `immich-machine-learning`, uncomment the `extends` section and change `cpu` to the appropriate backend.
|
||||
2. In the `docker-compose.yml` under `immich-machine-learning`, uncomment the `extends` section and change `cpu` to the appropriate backend.
|
||||
3. Still in `immich-machine-learning`, add one of -[armnn, cuda, rocm, openvino, rknn] to the `image` section's tag at the end of the line.
|
||||
4. Redeploy the `immich-machine-learning` container with these updated settings.
|
||||
|
||||
### Confirming Device Usage
|
||||
|
||||
@@ -66,7 +66,7 @@ Now make sure that the local album is selected in the backup screen (steps 1-2 a
|
||||
- **Keep on device:** You can choose to restrict removal to `Always keep` **All photos** or **All videos**, regardless of other settings. This setting can hamper freeing up space significantly — with 80 GB of videos and 40 GB photos, selecting `Always keep photos` retains thousands of photos on your device.
|
||||
|
||||
2. **Scan & Review:** Before any files are removed, you are presented with a review screen to verify which items will be deleted and how much storage is reclamable.
|
||||
3. **Deletion:** Confirmed items are moved to your device's native Trash/Recycle Bin. For large queues, Immich processes deletion in batches for stability (`2000` assets per batch on Android, `10000` per batch on iOS).
|
||||
3. **Deletion:** Confirmed items are moved to your device's native Trash/Recycle Bin.
|
||||
|
||||
:::info reclaim storage
|
||||
To use the reclaimed space right away, you must empty the system/gallery trash manually outside of Immich.
|
||||
|
||||
@@ -26,16 +26,6 @@ docker image prune
|
||||
[breaking]: https://github.com/immich-app/immich/discussions?discussions_q=label%3Achangelog%3Abreaking-change+sort%3Adate_created
|
||||
[releases]: https://github.com/immich-app/immich/releases
|
||||
|
||||
## Versioning Policy
|
||||
|
||||
Immich follows [semantic versioning][semver], which tags releases in the format `<major>.<minor>.<patch>`. We intend for breaking changes to be limited to major version releases.
|
||||
You can configure your Docker image to point to the current major version by using a metatag, such as `:v2`.
|
||||
|
||||
Currently, we have no plans to backport patches to earlier versions. We encourage all users to run the most recent release of Immich.
|
||||
Switching back to an earlier version, even within the same minor release tag, is not supported.
|
||||
|
||||
[semver]: https://semver.org/
|
||||
|
||||
## Migrating to VectorChord
|
||||
|
||||
:::info
|
||||
|
||||
@@ -32,7 +32,3 @@ If you would like to migrate from one media location to another, simply successf
|
||||
4. Start up Immich
|
||||
|
||||
After version `1.136.0`, Immich can detect when a media location has moved and will automatically update the database paths to keep them in sync.
|
||||
|
||||
## Schema drift
|
||||
|
||||
Schema drift is when the database schema is out of sync with the code. This could be the result of manual database tinkering, issues during a database restore, or something else. Schema drift can lead to data corruption, application bugs, and other unpredictable behavior. Please reconcile the differences as soon as possible. Specifically, missing `CONSTRAINT`s can result in duplicate assets being uploaded, since the server relies on a checksum `CONSTRAINT` to prevent duplicates.
|
||||
|
||||
32
docs/static/archived-versions.json
vendored
32
docs/static/archived-versions.json
vendored
@@ -1,20 +1,36 @@
|
||||
[
|
||||
{
|
||||
"label": "v2.5.6",
|
||||
"url": "https://docs.v2.5.6.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.4.1",
|
||||
"url": "https://docs.v2.4.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.4.0",
|
||||
"url": "https://docs.v2.4.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.3.1",
|
||||
"url": "https://docs.v2.3.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.3.0",
|
||||
"url": "https://docs.v2.3.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.2.3",
|
||||
"url": "https://docs.v2.2.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.2.2",
|
||||
"url": "https://docs.v2.2.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.2.1",
|
||||
"url": "https://docs.v2.2.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.2.0",
|
||||
"url": "https://docs.v2.2.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.1.0",
|
||||
"url": "https://docs.v2.1.0.archive.immich.app"
|
||||
@@ -23,10 +39,18 @@
|
||||
"label": "v2.0.1",
|
||||
"url": "https://docs.v2.0.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.0.0",
|
||||
"url": "https://docs.v2.0.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.144.1",
|
||||
"url": "https://docs.v1.144.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.144.0",
|
||||
"url": "https://docs.v1.144.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.143.1",
|
||||
"url": "https://docs.v1.143.1.archive.immich.app"
|
||||
|
||||
@@ -70,7 +70,7 @@ services:
|
||||
restart: unless-stopped
|
||||
|
||||
redis:
|
||||
image: redis:6.2-alpine@sha256:46884be93652d02a96a176ccf173d1040bef365c5706aa7b6a1931caec8bfeef
|
||||
image: redis:6.2-alpine@sha256:37e002448575b32a599109664107e374c8709546905c372a34d64919043b9ceb
|
||||
|
||||
database:
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:6f3e9d2c2177af16c2988ff71425d79d89ca630ec2f9c8db03209ab716542338
|
||||
|
||||
@@ -42,7 +42,7 @@ services:
|
||||
- 2285:2285
|
||||
|
||||
redis:
|
||||
image: redis:6.2-alpine@sha256:46884be93652d02a96a176ccf173d1040bef365c5706aa7b6a1931caec8bfeef
|
||||
image: redis:6.2-alpine@sha256:37e002448575b32a599109664107e374c8709546905c372a34d64919043b9ceb
|
||||
|
||||
database:
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:6f3e9d2c2177af16c2988ff71425d79d89ca630ec2f9c8db03209ab716542338
|
||||
@@ -53,7 +53,6 @@ services:
|
||||
POSTGRES_DB: immich
|
||||
ports:
|
||||
- 5435:5432
|
||||
shm_size: 128mb
|
||||
healthcheck:
|
||||
test: ['CMD-SHELL', 'pg_isready -U postgres -d immich']
|
||||
interval: 1s
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "2.5.6",
|
||||
"version": "2.4.1",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@@ -21,13 +21,13 @@
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.8.0",
|
||||
"@faker-js/faker": "^10.1.0",
|
||||
"@immich/cli": "workspace:*",
|
||||
"@immich/e2e-auth-server": "workspace:*",
|
||||
"@immich/sdk": "workspace:*",
|
||||
"@immich/cli": "file:../cli",
|
||||
"@immich/e2e-auth-server": "file:../e2e-auth-server",
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@socket.io/component-emitter": "^3.1.2",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^24.10.11",
|
||||
"@types/node": "^24.10.8",
|
||||
"@types/pg": "^8.15.1",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
"@types/supertest": "^6.0.2",
|
||||
|
||||
@@ -14,8 +14,7 @@ export const playwrightDisableWebserver = process.env.PLAYWRIGHT_DISABLE_WEBSERV
|
||||
process.env.PW_EXPERIMENTAL_SERVICE_WORKER_NETWORK_EVENTS = '1';
|
||||
|
||||
const config: PlaywrightTestConfig = {
|
||||
testDir: './src/specs/server',
|
||||
testMatch: /.*\.e2e-spec\.ts/,
|
||||
testDir: './src/web/specs',
|
||||
fullyParallel: false,
|
||||
forbidOnly: !!process.env.CI,
|
||||
retries: process.env.CI ? 4 : 0,
|
||||
@@ -29,28 +28,54 @@ const config: PlaywrightTestConfig = {
|
||||
},
|
||||
},
|
||||
|
||||
testMatch: /.*\.e2e-spec\.ts/,
|
||||
|
||||
workers: process.env.CI ? 4 : Math.round(cpus().length * 0.75),
|
||||
|
||||
projects: [
|
||||
{
|
||||
name: 'web',
|
||||
name: 'chromium',
|
||||
use: { ...devices['Desktop Chrome'] },
|
||||
testDir: './src/specs/web',
|
||||
testMatch: /.*\.e2e-spec\.ts/,
|
||||
workers: 1,
|
||||
},
|
||||
{
|
||||
name: 'ui',
|
||||
use: { ...devices['Desktop Chrome'] },
|
||||
testDir: './src/ui/specs',
|
||||
testMatch: /.*\.ui-spec\.ts/,
|
||||
fullyParallel: true,
|
||||
workers: process.env.CI ? 3 : Math.max(1, Math.round(cpus().length * 0.75) - 1),
|
||||
},
|
||||
{
|
||||
name: 'maintenance',
|
||||
use: { ...devices['Desktop Chrome'] },
|
||||
testDir: './src/specs/maintenance',
|
||||
workers: 1,
|
||||
},
|
||||
|
||||
// {
|
||||
// name: 'firefox',
|
||||
// use: { ...devices['Desktop Firefox'] },
|
||||
// },
|
||||
|
||||
// {
|
||||
// name: 'webkit',
|
||||
// use: { ...devices['Desktop Safari'] },
|
||||
// },
|
||||
|
||||
/* Test against mobile viewports. */
|
||||
// {
|
||||
// name: 'Mobile Chrome',
|
||||
// use: { ...devices['Pixel 5'] },
|
||||
// },
|
||||
// {
|
||||
// name: 'Mobile Safari',
|
||||
// use: { ...devices['iPhone 12'] },
|
||||
// },
|
||||
|
||||
/* Test against branded browsers. */
|
||||
// {
|
||||
// name: 'Microsoft Edge',
|
||||
// use: { ...devices['Desktop Edge'], channel: 'msedge' },
|
||||
// },
|
||||
// {
|
||||
// name: 'Google Chrome',
|
||||
// use: { ...devices['Desktop Chrome'], channel: 'chrome' },
|
||||
// },
|
||||
],
|
||||
|
||||
/* Run your local dev server before starting the tests */
|
||||
|
||||
@@ -473,7 +473,6 @@ describe('/asset', () => {
|
||||
id: user1Assets[0].id,
|
||||
exifInfo: expect.objectContaining({
|
||||
dateTimeOriginal: '2023-11-20T01:11:00+00:00',
|
||||
timeZone: 'UTC-7',
|
||||
}),
|
||||
});
|
||||
expect(status).toEqual(200);
|
||||
@@ -239,7 +239,7 @@ describe('/shared-links', () => {
|
||||
const { status, body } = await request(app).get('/shared-links/me').query({ key: linkWithPassword.key });
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.passwordRequired);
|
||||
expect(body).toEqual(errorDto.invalidSharePassword);
|
||||
});
|
||||
|
||||
it('should get data for correct password protected link', async () => {
|
||||
@@ -1,5 +1,5 @@
|
||||
import { generateConsecutiveDays, generateDayAssets } from 'src/ui/generators/timeline/model-objects';
|
||||
import { SeededRandom, selectRandomDays } from 'src/ui/generators/timeline/utils';
|
||||
import { generateConsecutiveDays, generateDayAssets } from 'src/generators/timeline/model-objects';
|
||||
import { SeededRandom, selectRandomDays } from 'src/generators/timeline/utils';
|
||||
import type { MockTimelineAsset } from './timeline-config';
|
||||
import { GENERATION_CONSTANTS } from './timeline-config';
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import sharp from 'sharp';
|
||||
import { SeededRandom } from 'src/ui/generators/timeline/utils';
|
||||
import { SeededRandom } from 'src/generators/timeline/utils';
|
||||
|
||||
export const randomThumbnail = async (seed: string, ratio: number) => {
|
||||
const height = 235;
|
||||
@@ -6,7 +6,7 @@ import { faker } from '@faker-js/faker';
|
||||
import { AssetVisibility } from '@immich/sdk';
|
||||
import { DateTime } from 'luxon';
|
||||
import { writeFileSync } from 'node:fs';
|
||||
import { SeededRandom } from 'src/ui/generators/timeline/utils';
|
||||
import { SeededRandom } from 'src/generators/timeline/utils';
|
||||
import type { DayPattern, MonthDistribution } from './distribution-patterns';
|
||||
import { ASSET_DISTRIBUTION, DAY_DISTRIBUTION } from './distribution-patterns';
|
||||
import type { MockTimelineAsset, MockTimelineData, SerializedTimelineData, TimelineConfig } from './timeline-config';
|
||||
@@ -15,7 +15,7 @@ import {
|
||||
} from '@immich/sdk';
|
||||
import { DateTime } from 'luxon';
|
||||
import { signupDto } from 'src/fixtures';
|
||||
import { parseTimeBucketKey } from 'src/ui/generators/timeline/utils';
|
||||
import { parseTimeBucketKey } from 'src/generators/timeline/utils';
|
||||
import type { MockTimelineAsset, MockTimelineData } from './timeline-config';
|
||||
|
||||
/**
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { AssetVisibility } from '@immich/sdk';
|
||||
import { DayPattern, MonthDistribution } from 'src/ui/generators/timeline/distribution-patterns';
|
||||
import { DayPattern, MonthDistribution } from 'src/generators/timeline/distribution-patterns';
|
||||
|
||||
// Constants for generation parameters
|
||||
export const GENERATION_CONSTANTS = {
|
||||
@@ -1,5 +1,5 @@
|
||||
import { DateTime } from 'luxon';
|
||||
import { GENERATION_CONSTANTS, MockTimelineAsset } from 'src/ui/generators/timeline/timeline-config';
|
||||
import { GENERATION_CONSTANTS, MockTimelineAsset } from 'src/generators/timeline/timeline-config';
|
||||
|
||||
/**
|
||||
* Linear Congruential Generator for deterministic pseudo-random numbers
|
||||
@@ -10,8 +10,8 @@ import {
|
||||
randomPreview,
|
||||
randomThumbnail,
|
||||
TimelineData,
|
||||
} from 'src/ui/generators/timeline';
|
||||
import { sleep } from 'src/ui/specs/timeline/utils';
|
||||
} from 'src/generators/timeline';
|
||||
import { sleep } from 'src/web/specs/timeline/utils';
|
||||
|
||||
export class TimelineTestContext {
|
||||
slowBucket = false;
|
||||
@@ -43,10 +43,10 @@ export const errorDto = {
|
||||
message: 'Invalid share key',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
passwordRequired: {
|
||||
invalidSharePassword: {
|
||||
error: 'Unauthorized',
|
||||
statusCode: 401,
|
||||
message: 'Password required',
|
||||
message: 'Invalid password',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
badRequest: (message: any = null) => ({
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
export { generateMemoriesFromTimeline, generateMemory } from './memory/model-objects';
|
||||
export type { MemoryConfig, MemoryYearConfig } from './memory/model-objects';
|
||||
@@ -1,84 +0,0 @@
|
||||
import { faker } from '@faker-js/faker';
|
||||
import { MemoryType, type MemoryResponseDto, type OnThisDayDto } from '@immich/sdk';
|
||||
import { DateTime } from 'luxon';
|
||||
import { toAssetResponseDto } from 'src/ui/generators/timeline/rest-response';
|
||||
import type { MockTimelineAsset } from 'src/ui/generators/timeline/timeline-config';
|
||||
import { SeededRandom, selectRandomMultiple } from 'src/ui/generators/timeline/utils';
|
||||
|
||||
export type MemoryConfig = {
|
||||
id?: string;
|
||||
ownerId: string;
|
||||
year: number;
|
||||
memoryAt: string;
|
||||
isSaved?: boolean;
|
||||
};
|
||||
|
||||
export type MemoryYearConfig = {
|
||||
year: number;
|
||||
assetCount: number;
|
||||
};
|
||||
|
||||
export function generateMemory(config: MemoryConfig, assets: MockTimelineAsset[]): MemoryResponseDto {
|
||||
const now = new Date().toISOString();
|
||||
const memoryId = config.id ?? faker.string.uuid();
|
||||
|
||||
return {
|
||||
id: memoryId,
|
||||
assets: assets.map((asset) => toAssetResponseDto(asset)),
|
||||
data: { year: config.year } as OnThisDayDto,
|
||||
memoryAt: config.memoryAt,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
isSaved: config.isSaved ?? false,
|
||||
ownerId: config.ownerId,
|
||||
type: MemoryType.OnThisDay,
|
||||
};
|
||||
}
|
||||
|
||||
export function generateMemoriesFromTimeline(
|
||||
timelineAssets: MockTimelineAsset[],
|
||||
ownerId: string,
|
||||
memoryConfigs: MemoryYearConfig[],
|
||||
seed: number = 42,
|
||||
): MemoryResponseDto[] {
|
||||
const rng = new SeededRandom(seed);
|
||||
const memories: MemoryResponseDto[] = [];
|
||||
const usedAssetIds = new Set<string>();
|
||||
|
||||
for (const config of memoryConfigs) {
|
||||
const yearAssets = timelineAssets.filter((asset) => {
|
||||
const assetYear = DateTime.fromISO(asset.fileCreatedAt).year;
|
||||
return assetYear === config.year && !usedAssetIds.has(asset.id);
|
||||
});
|
||||
|
||||
if (yearAssets.length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const countToSelect = Math.min(config.assetCount, yearAssets.length);
|
||||
const selectedAssets = selectRandomMultiple(yearAssets, countToSelect, rng);
|
||||
|
||||
for (const asset of selectedAssets) {
|
||||
usedAssetIds.add(asset.id);
|
||||
}
|
||||
|
||||
selectedAssets.sort(
|
||||
(a, b) => DateTime.fromISO(b.fileCreatedAt).diff(DateTime.fromISO(a.fileCreatedAt)).milliseconds,
|
||||
);
|
||||
|
||||
const memoryAt = DateTime.now().set({ year: config.year }).toISO()!;
|
||||
|
||||
memories.push(
|
||||
generateMemory(
|
||||
{
|
||||
ownerId,
|
||||
year: config.year,
|
||||
memoryAt,
|
||||
},
|
||||
selectedAssets,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
return memories;
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
import type { MemoryResponseDto } from '@immich/sdk';
|
||||
import { BrowserContext } from '@playwright/test';
|
||||
|
||||
export type MemoryChanges = {
|
||||
memoryDeletions: string[];
|
||||
assetRemovals: Map<string, string[]>;
|
||||
};
|
||||
|
||||
export const setupMemoryMockApiRoutes = async (
|
||||
context: BrowserContext,
|
||||
memories: MemoryResponseDto[],
|
||||
changes: MemoryChanges,
|
||||
) => {
|
||||
await context.route('**/api/memories*', async (route, request) => {
|
||||
const url = new URL(request.url());
|
||||
const pathname = url.pathname;
|
||||
|
||||
if (pathname === '/api/memories' && request.method() === 'GET') {
|
||||
const activeMemories = memories
|
||||
.filter((memory) => !changes.memoryDeletions.includes(memory.id))
|
||||
.map((memory) => {
|
||||
const removedAssets = changes.assetRemovals.get(memory.id) ?? [];
|
||||
return {
|
||||
...memory,
|
||||
assets: memory.assets.filter((asset) => !removedAssets.includes(asset.id)),
|
||||
};
|
||||
})
|
||||
.filter((memory) => memory.assets.length > 0);
|
||||
|
||||
return route.fulfill({
|
||||
status: 200,
|
||||
contentType: 'application/json',
|
||||
json: activeMemories,
|
||||
});
|
||||
}
|
||||
|
||||
const memoryMatch = pathname.match(/\/api\/memories\/([^/]+)$/);
|
||||
if (memoryMatch && request.method() === 'GET') {
|
||||
const memoryId = memoryMatch[1];
|
||||
const memory = memories.find((m) => m.id === memoryId);
|
||||
|
||||
if (!memory || changes.memoryDeletions.includes(memoryId)) {
|
||||
return route.fulfill({ status: 404 });
|
||||
}
|
||||
|
||||
const removedAssets = changes.assetRemovals.get(memoryId) ?? [];
|
||||
return route.fulfill({
|
||||
status: 200,
|
||||
contentType: 'application/json',
|
||||
json: {
|
||||
...memory,
|
||||
assets: memory.assets.filter((asset) => !removedAssets.includes(asset.id)),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
if (/\/api\/memories\/([^/]+)$/.test(pathname) && request.method() === 'DELETE') {
|
||||
const memoryId = pathname.split('/').pop()!;
|
||||
changes.memoryDeletions.push(memoryId);
|
||||
return route.fulfill({ status: 204 });
|
||||
}
|
||||
|
||||
await route.fallback();
|
||||
});
|
||||
};
|
||||
@@ -1,289 +0,0 @@
|
||||
import { faker } from '@faker-js/faker';
|
||||
import type { MemoryResponseDto } from '@immich/sdk';
|
||||
import { test } from '@playwright/test';
|
||||
import { generateMemoriesFromTimeline } from 'src/ui/generators/memory';
|
||||
import {
|
||||
Changes,
|
||||
createDefaultTimelineConfig,
|
||||
generateTimelineData,
|
||||
TimelineAssetConfig,
|
||||
TimelineData,
|
||||
} from 'src/ui/generators/timeline';
|
||||
import { setupBaseMockApiRoutes } from 'src/ui/mock-network/base-network';
|
||||
import { MemoryChanges, setupMemoryMockApiRoutes } from 'src/ui/mock-network/memory-network';
|
||||
import { setupTimelineMockApiRoutes, TimelineTestContext } from 'src/ui/mock-network/timeline-network';
|
||||
import { memoryAssetViewerUtils, memoryGalleryUtils, memoryViewerUtils } from './utils';
|
||||
|
||||
test.describe.configure({ mode: 'parallel' });
|
||||
|
||||
test.describe('Memory Viewer - Gallery Asset Viewer Navigation', () => {
|
||||
let adminUserId: string;
|
||||
let timelineRestData: TimelineData;
|
||||
let memories: MemoryResponseDto[];
|
||||
const assets: TimelineAssetConfig[] = [];
|
||||
const testContext = new TimelineTestContext();
|
||||
const changes: Changes = {
|
||||
albumAdditions: [],
|
||||
assetDeletions: [],
|
||||
assetArchivals: [],
|
||||
assetFavorites: [],
|
||||
};
|
||||
const memoryChanges: MemoryChanges = {
|
||||
memoryDeletions: [],
|
||||
assetRemovals: new Map(),
|
||||
};
|
||||
|
||||
test.beforeAll(async () => {
|
||||
adminUserId = faker.string.uuid();
|
||||
testContext.adminId = adminUserId;
|
||||
|
||||
timelineRestData = generateTimelineData({
|
||||
...createDefaultTimelineConfig(),
|
||||
ownerId: adminUserId,
|
||||
});
|
||||
|
||||
for (const timeBucket of timelineRestData.buckets.values()) {
|
||||
assets.push(...timeBucket);
|
||||
}
|
||||
|
||||
memories = generateMemoriesFromTimeline(
|
||||
assets,
|
||||
adminUserId,
|
||||
[
|
||||
{ year: 2024, assetCount: 3 },
|
||||
{ year: 2023, assetCount: 2 },
|
||||
{ year: 2022, assetCount: 4 },
|
||||
],
|
||||
42,
|
||||
);
|
||||
});
|
||||
|
||||
test.beforeEach(async ({ context }) => {
|
||||
await setupBaseMockApiRoutes(context, adminUserId);
|
||||
await setupTimelineMockApiRoutes(context, timelineRestData, changes, testContext);
|
||||
await setupMemoryMockApiRoutes(context, memories, memoryChanges);
|
||||
});
|
||||
|
||||
test.afterEach(() => {
|
||||
testContext.slowBucket = false;
|
||||
changes.albumAdditions = [];
|
||||
changes.assetDeletions = [];
|
||||
changes.assetArchivals = [];
|
||||
changes.assetFavorites = [];
|
||||
memoryChanges.memoryDeletions = [];
|
||||
memoryChanges.assetRemovals.clear();
|
||||
});
|
||||
|
||||
test.describe('Asset viewer navigation from gallery', () => {
|
||||
test('shows both prev/next buttons for middle asset within a memory', async ({ page }) => {
|
||||
const firstMemory = memories[0];
|
||||
const middleAsset = firstMemory.assets[1];
|
||||
|
||||
await memoryViewerUtils.openMemoryPageWithAsset(page, middleAsset.id);
|
||||
await memoryGalleryUtils.clickThumbnail(page, middleAsset.id);
|
||||
|
||||
await memoryAssetViewerUtils.waitForViewerOpen(page);
|
||||
await memoryAssetViewerUtils.waitForAssetLoad(page, middleAsset);
|
||||
|
||||
await memoryAssetViewerUtils.expectPreviousButtonVisible(page);
|
||||
await memoryAssetViewerUtils.expectNextButtonVisible(page);
|
||||
});
|
||||
|
||||
test('shows next button when at last asset of first memory (next memory exists)', async ({ page }) => {
|
||||
const firstMemory = memories[0];
|
||||
const lastAssetOfFirstMemory = firstMemory.assets.at(-1)!;
|
||||
|
||||
await memoryViewerUtils.openMemoryPageWithAsset(page, lastAssetOfFirstMemory.id);
|
||||
await memoryGalleryUtils.clickThumbnail(page, lastAssetOfFirstMemory.id);
|
||||
|
||||
await memoryAssetViewerUtils.waitForViewerOpen(page);
|
||||
await memoryAssetViewerUtils.waitForAssetLoad(page, lastAssetOfFirstMemory);
|
||||
|
||||
await memoryAssetViewerUtils.expectNextButtonVisible(page);
|
||||
await memoryAssetViewerUtils.expectPreviousButtonVisible(page);
|
||||
});
|
||||
|
||||
test('shows prev button when at first asset of last memory (prev memory exists)', async ({ page }) => {
|
||||
const lastMemory = memories.at(-1)!;
|
||||
const firstAssetOfLastMemory = lastMemory.assets[0];
|
||||
|
||||
await memoryViewerUtils.openMemoryPageWithAsset(page, firstAssetOfLastMemory.id);
|
||||
await memoryGalleryUtils.clickThumbnail(page, firstAssetOfLastMemory.id);
|
||||
|
||||
await memoryAssetViewerUtils.waitForViewerOpen(page);
|
||||
await memoryAssetViewerUtils.waitForAssetLoad(page, firstAssetOfLastMemory);
|
||||
|
||||
await memoryAssetViewerUtils.expectPreviousButtonVisible(page);
|
||||
await memoryAssetViewerUtils.expectNextButtonVisible(page);
|
||||
});
|
||||
|
||||
test('can navigate from last asset of memory to first asset of next memory', async ({ page }) => {
|
||||
const firstMemory = memories[0];
|
||||
const secondMemory = memories[1];
|
||||
const lastAssetOfFirst = firstMemory.assets.at(-1)!;
|
||||
const firstAssetOfSecond = secondMemory.assets[0];
|
||||
|
||||
await memoryViewerUtils.openMemoryPageWithAsset(page, lastAssetOfFirst.id);
|
||||
await memoryGalleryUtils.clickThumbnail(page, lastAssetOfFirst.id);
|
||||
|
||||
await memoryAssetViewerUtils.waitForViewerOpen(page);
|
||||
await memoryAssetViewerUtils.waitForAssetLoad(page, lastAssetOfFirst);
|
||||
|
||||
await memoryAssetViewerUtils.clickNextButton(page);
|
||||
await memoryAssetViewerUtils.waitForAssetLoad(page, firstAssetOfSecond);
|
||||
|
||||
await memoryAssetViewerUtils.expectCurrentAssetId(page, firstAssetOfSecond.id);
|
||||
});
|
||||
|
||||
test('can navigate from first asset of memory to last asset of previous memory', async ({ page }) => {
|
||||
const firstMemory = memories[0];
|
||||
const secondMemory = memories[1];
|
||||
const lastAssetOfFirst = firstMemory.assets.at(-1)!;
|
||||
const firstAssetOfSecond = secondMemory.assets[0];
|
||||
|
||||
await memoryViewerUtils.openMemoryPageWithAsset(page, firstAssetOfSecond.id);
|
||||
await memoryGalleryUtils.clickThumbnail(page, firstAssetOfSecond.id);
|
||||
|
||||
await memoryAssetViewerUtils.waitForViewerOpen(page);
|
||||
await memoryAssetViewerUtils.waitForAssetLoad(page, firstAssetOfSecond);
|
||||
|
||||
await memoryAssetViewerUtils.clickPreviousButton(page);
|
||||
await memoryAssetViewerUtils.waitForAssetLoad(page, lastAssetOfFirst);
|
||||
});
|
||||
|
||||
test('hides prev button at very first asset (first memory, first asset, no prev memory)', async ({ page }) => {
|
||||
const firstMemory = memories[0];
|
||||
const veryFirstAsset = firstMemory.assets[0];
|
||||
|
||||
await memoryViewerUtils.openMemoryPageWithAsset(page, veryFirstAsset.id);
|
||||
await memoryGalleryUtils.clickThumbnail(page, veryFirstAsset.id);
|
||||
|
||||
await memoryAssetViewerUtils.waitForViewerOpen(page);
|
||||
await memoryAssetViewerUtils.waitForAssetLoad(page, veryFirstAsset);
|
||||
|
||||
await memoryAssetViewerUtils.expectPreviousButtonNotVisible(page);
|
||||
await memoryAssetViewerUtils.expectNextButtonVisible(page);
|
||||
});
|
||||
|
||||
test('hides next button at very last asset (last memory, last asset, no next memory)', async ({ page }) => {
|
||||
const lastMemory = memories.at(-1)!;
|
||||
const veryLastAsset = lastMemory.assets.at(-1)!;
|
||||
|
||||
await memoryViewerUtils.openMemoryPageWithAsset(page, veryLastAsset.id);
|
||||
await memoryGalleryUtils.clickThumbnail(page, veryLastAsset.id);
|
||||
|
||||
await memoryAssetViewerUtils.waitForViewerOpen(page);
|
||||
await memoryAssetViewerUtils.waitForAssetLoad(page, veryLastAsset);
|
||||
|
||||
await memoryAssetViewerUtils.expectNextButtonNotVisible(page);
|
||||
await memoryAssetViewerUtils.expectPreviousButtonVisible(page);
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('Keyboard navigation', () => {
|
||||
test('ArrowLeft navigates to previous asset across memory boundary', async ({ page }) => {
|
||||
const firstMemory = memories[0];
|
||||
const secondMemory = memories[1];
|
||||
const lastAssetOfFirst = firstMemory.assets.at(-1)!;
|
||||
const firstAssetOfSecond = secondMemory.assets[0];
|
||||
|
||||
await memoryViewerUtils.openMemoryPageWithAsset(page, firstAssetOfSecond.id);
|
||||
await memoryGalleryUtils.clickThumbnail(page, firstAssetOfSecond.id);
|
||||
|
||||
await memoryAssetViewerUtils.waitForViewerOpen(page);
|
||||
await memoryAssetViewerUtils.waitForAssetLoad(page, firstAssetOfSecond);
|
||||
|
||||
await page.keyboard.press('ArrowLeft');
|
||||
await memoryAssetViewerUtils.waitForAssetLoad(page, lastAssetOfFirst);
|
||||
});
|
||||
|
||||
test('ArrowRight navigates to next asset across memory boundary', async ({ page }) => {
|
||||
const firstMemory = memories[0];
|
||||
const secondMemory = memories[1];
|
||||
const lastAssetOfFirst = firstMemory.assets.at(-1)!;
|
||||
const firstAssetOfSecond = secondMemory.assets[0];
|
||||
|
||||
await memoryViewerUtils.openMemoryPageWithAsset(page, lastAssetOfFirst.id);
|
||||
await memoryGalleryUtils.clickThumbnail(page, lastAssetOfFirst.id);
|
||||
|
||||
await memoryAssetViewerUtils.waitForViewerOpen(page);
|
||||
await memoryAssetViewerUtils.waitForAssetLoad(page, lastAssetOfFirst);
|
||||
|
||||
await page.keyboard.press('ArrowRight');
|
||||
await memoryAssetViewerUtils.waitForAssetLoad(page, firstAssetOfSecond);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('Memory Viewer - Single Asset Memory Edge Cases', () => {
|
||||
let adminUserId: string;
|
||||
let timelineRestData: TimelineData;
|
||||
let memories: MemoryResponseDto[];
|
||||
const assets: TimelineAssetConfig[] = [];
|
||||
const testContext = new TimelineTestContext();
|
||||
const changes: Changes = {
|
||||
albumAdditions: [],
|
||||
assetDeletions: [],
|
||||
assetArchivals: [],
|
||||
assetFavorites: [],
|
||||
};
|
||||
const memoryChanges: MemoryChanges = {
|
||||
memoryDeletions: [],
|
||||
assetRemovals: new Map(),
|
||||
};
|
||||
|
||||
test.beforeAll(async () => {
|
||||
adminUserId = faker.string.uuid();
|
||||
testContext.adminId = adminUserId;
|
||||
|
||||
timelineRestData = generateTimelineData({
|
||||
...createDefaultTimelineConfig(),
|
||||
ownerId: adminUserId,
|
||||
});
|
||||
|
||||
for (const timeBucket of timelineRestData.buckets.values()) {
|
||||
assets.push(...timeBucket);
|
||||
}
|
||||
|
||||
memories = generateMemoriesFromTimeline(
|
||||
assets,
|
||||
adminUserId,
|
||||
[
|
||||
{ year: 2024, assetCount: 2 },
|
||||
{ year: 2023, assetCount: 1 },
|
||||
{ year: 2022, assetCount: 2 },
|
||||
],
|
||||
123,
|
||||
);
|
||||
});
|
||||
|
||||
test.beforeEach(async ({ context }) => {
|
||||
await setupBaseMockApiRoutes(context, adminUserId);
|
||||
await setupTimelineMockApiRoutes(context, timelineRestData, changes, testContext);
|
||||
await setupMemoryMockApiRoutes(context, memories, memoryChanges);
|
||||
});
|
||||
|
||||
test.afterEach(() => {
|
||||
testContext.slowBucket = false;
|
||||
changes.albumAdditions = [];
|
||||
changes.assetDeletions = [];
|
||||
changes.assetArchivals = [];
|
||||
changes.assetFavorites = [];
|
||||
memoryChanges.memoryDeletions = [];
|
||||
memoryChanges.assetRemovals.clear();
|
||||
});
|
||||
|
||||
test('single asset memory shows both prev/next when surrounded by other memories', async ({ page }) => {
|
||||
const singleAssetMemory = memories[1];
|
||||
const singleAsset = singleAssetMemory.assets[0];
|
||||
|
||||
await memoryViewerUtils.openMemoryPageWithAsset(page, singleAsset.id);
|
||||
await memoryGalleryUtils.clickThumbnail(page, singleAsset.id);
|
||||
|
||||
await memoryAssetViewerUtils.waitForViewerOpen(page);
|
||||
await memoryAssetViewerUtils.waitForAssetLoad(page, singleAsset);
|
||||
|
||||
await memoryAssetViewerUtils.expectPreviousButtonVisible(page);
|
||||
await memoryAssetViewerUtils.expectNextButtonVisible(page);
|
||||
});
|
||||
});
|
||||
@@ -1,123 +0,0 @@
|
||||
import type { AssetResponseDto } from '@immich/sdk';
|
||||
import { expect, Page } from '@playwright/test';
|
||||
|
||||
function getAssetIdFromUrl(url: URL): string | null {
|
||||
const pathMatch = url.pathname.match(/\/memory\/photos\/([^/]+)/);
|
||||
if (pathMatch) {
|
||||
return pathMatch[1];
|
||||
}
|
||||
return url.searchParams.get('id');
|
||||
}
|
||||
|
||||
export const memoryViewerUtils = {
|
||||
locator(page: Page) {
|
||||
return page.locator('#memory-viewer');
|
||||
},
|
||||
|
||||
async waitForMemoryLoad(page: Page) {
|
||||
await expect(this.locator(page)).toBeVisible();
|
||||
await expect(page.locator('#memory-viewer img').first()).toBeVisible();
|
||||
},
|
||||
|
||||
async openMemoryPage(page: Page) {
|
||||
await page.goto('/memory');
|
||||
await this.waitForMemoryLoad(page);
|
||||
},
|
||||
|
||||
async openMemoryPageWithAsset(page: Page, assetId: string) {
|
||||
await page.goto(`/memory?id=${assetId}`);
|
||||
await this.waitForMemoryLoad(page);
|
||||
},
|
||||
};
|
||||
|
||||
export const memoryGalleryUtils = {
|
||||
locator(page: Page) {
|
||||
return page.locator('#gallery-memory');
|
||||
},
|
||||
|
||||
thumbnailWithAssetId(page: Page, assetId: string) {
|
||||
return page.locator(`#gallery-memory [data-thumbnail-focus-container][data-asset="${assetId}"]`);
|
||||
},
|
||||
|
||||
async scrollToGallery(page: Page) {
|
||||
const showGalleryButton = page.getByLabel('Show gallery');
|
||||
if (await showGalleryButton.isVisible()) {
|
||||
await showGalleryButton.click();
|
||||
}
|
||||
await expect(this.locator(page)).toBeInViewport();
|
||||
},
|
||||
|
||||
async clickThumbnail(page: Page, assetId: string) {
|
||||
await this.scrollToGallery(page);
|
||||
await this.thumbnailWithAssetId(page, assetId).click();
|
||||
},
|
||||
|
||||
async getAllThumbnails(page: Page) {
|
||||
await this.scrollToGallery(page);
|
||||
return page.locator('#gallery-memory [data-thumbnail-focus-container]');
|
||||
},
|
||||
};
|
||||
|
||||
export const memoryAssetViewerUtils = {
|
||||
locator(page: Page) {
|
||||
return page.locator('#immich-asset-viewer');
|
||||
},
|
||||
|
||||
async waitForViewerOpen(page: Page) {
|
||||
await expect(this.locator(page)).toBeVisible();
|
||||
},
|
||||
|
||||
async waitForAssetLoad(page: Page, asset: AssetResponseDto) {
|
||||
const viewer = this.locator(page);
|
||||
const imgLocator = viewer.locator(`img[draggable="false"][src*="/api/assets/${asset.id}/thumbnail?size=preview"]`);
|
||||
const videoLocator = viewer.locator(`video[poster*="/api/assets/${asset.id}/thumbnail?size=preview"]`);
|
||||
|
||||
await imgLocator.or(videoLocator).waitFor({ timeout: 10_000 });
|
||||
},
|
||||
|
||||
nextButton(page: Page) {
|
||||
return page.getByLabel('View next asset');
|
||||
},
|
||||
|
||||
previousButton(page: Page) {
|
||||
return page.getByLabel('View previous asset');
|
||||
},
|
||||
|
||||
async expectNextButtonVisible(page: Page) {
|
||||
await expect(this.nextButton(page)).toBeVisible();
|
||||
},
|
||||
|
||||
async expectNextButtonNotVisible(page: Page) {
|
||||
await expect(this.nextButton(page)).toHaveCount(0);
|
||||
},
|
||||
|
||||
async expectPreviousButtonVisible(page: Page) {
|
||||
await expect(this.previousButton(page)).toBeVisible();
|
||||
},
|
||||
|
||||
async expectPreviousButtonNotVisible(page: Page) {
|
||||
await expect(this.previousButton(page)).toHaveCount(0);
|
||||
},
|
||||
|
||||
async clickNextButton(page: Page) {
|
||||
await this.nextButton(page).click();
|
||||
},
|
||||
|
||||
async clickPreviousButton(page: Page) {
|
||||
await this.previousButton(page).click();
|
||||
},
|
||||
|
||||
async closeViewer(page: Page) {
|
||||
await page.keyboard.press('Escape');
|
||||
await expect(this.locator(page)).not.toBeVisible();
|
||||
},
|
||||
|
||||
getCurrentAssetId(page: Page): string | null {
|
||||
const url = new URL(page.url());
|
||||
return getAssetIdFromUrl(url);
|
||||
},
|
||||
|
||||
async expectCurrentAssetId(page: Page, expectedAssetId: string) {
|
||||
await expect.poll(() => this.getCurrentAssetId(page)).toBe(expectedAssetId);
|
||||
},
|
||||
};
|
||||
@@ -1,116 +0,0 @@
|
||||
import { faker } from '@faker-js/faker';
|
||||
import { expect, test } from '@playwright/test';
|
||||
import {
|
||||
Changes,
|
||||
createDefaultTimelineConfig,
|
||||
generateTimelineData,
|
||||
TimelineAssetConfig,
|
||||
TimelineData,
|
||||
} from 'src/ui/generators/timeline';
|
||||
import { setupBaseMockApiRoutes } from 'src/ui/mock-network/base-network';
|
||||
import { setupTimelineMockApiRoutes, TimelineTestContext } from 'src/ui/mock-network/timeline-network';
|
||||
import { assetViewerUtils } from '../timeline/utils';
|
||||
|
||||
const buildSearchUrl = (assetId: string) => {
|
||||
const searchQuery = encodeURIComponent(JSON.stringify({ originalFileName: 'test' }));
|
||||
return `/search/photos/${assetId}?query=${searchQuery}`;
|
||||
};
|
||||
|
||||
test.describe.configure({ mode: 'parallel' });
|
||||
test.describe('search gallery-viewer', () => {
|
||||
let adminUserId: string;
|
||||
let timelineRestData: TimelineData;
|
||||
const assets: TimelineAssetConfig[] = [];
|
||||
const testContext = new TimelineTestContext();
|
||||
const changes: Changes = {
|
||||
albumAdditions: [],
|
||||
assetDeletions: [],
|
||||
assetArchivals: [],
|
||||
assetFavorites: [],
|
||||
};
|
||||
|
||||
test.beforeAll(async () => {
|
||||
adminUserId = faker.string.uuid();
|
||||
testContext.adminId = adminUserId;
|
||||
timelineRestData = generateTimelineData({ ...createDefaultTimelineConfig(), ownerId: adminUserId });
|
||||
for (const timeBucket of timelineRestData.buckets.values()) {
|
||||
assets.push(...timeBucket);
|
||||
}
|
||||
});
|
||||
|
||||
test.beforeEach(async ({ context }) => {
|
||||
await setupBaseMockApiRoutes(context, adminUserId);
|
||||
await setupTimelineMockApiRoutes(context, timelineRestData, changes, testContext);
|
||||
|
||||
await context.route('**/api/search/metadata', async (route, request) => {
|
||||
if (request.method() === 'POST') {
|
||||
const searchAssets = assets.slice(0, 5).filter((asset) => !changes.assetDeletions.includes(asset.id));
|
||||
return route.fulfill({
|
||||
status: 200,
|
||||
contentType: 'application/json',
|
||||
json: {
|
||||
albums: { total: 0, count: 0, items: [], facets: [] },
|
||||
assets: {
|
||||
total: searchAssets.length,
|
||||
count: searchAssets.length,
|
||||
items: searchAssets,
|
||||
facets: [],
|
||||
nextPage: null,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
await route.fallback();
|
||||
});
|
||||
});
|
||||
|
||||
test.afterEach(() => {
|
||||
testContext.slowBucket = false;
|
||||
changes.albumAdditions = [];
|
||||
changes.assetDeletions = [];
|
||||
changes.assetArchivals = [];
|
||||
changes.assetFavorites = [];
|
||||
});
|
||||
|
||||
test.describe('/search/photos/:id', () => {
|
||||
test('Deleting a photo advances to the next photo', async ({ page }) => {
|
||||
const asset = assets[0];
|
||||
await page.goto(buildSearchUrl(asset.id));
|
||||
await assetViewerUtils.waitForViewerLoad(page, asset);
|
||||
await page.getByLabel('Delete').click();
|
||||
await assetViewerUtils.waitForViewerLoad(page, assets[1]);
|
||||
});
|
||||
|
||||
test('Deleting two photos in a row advances to the next photo each time', async ({ page }) => {
|
||||
const asset = assets[0];
|
||||
await page.goto(buildSearchUrl(asset.id));
|
||||
await assetViewerUtils.waitForViewerLoad(page, asset);
|
||||
await page.getByLabel('Delete').click();
|
||||
await assetViewerUtils.waitForViewerLoad(page, assets[1]);
|
||||
await page.getByLabel('Delete').click();
|
||||
await assetViewerUtils.waitForViewerLoad(page, assets[2]);
|
||||
});
|
||||
|
||||
test('Navigating backward then deleting advances to the next photo', async ({ page }) => {
|
||||
const asset = assets[1];
|
||||
await page.goto(buildSearchUrl(asset.id));
|
||||
await assetViewerUtils.waitForViewerLoad(page, asset);
|
||||
await page.getByLabel('View previous asset').click();
|
||||
await assetViewerUtils.waitForViewerLoad(page, assets[0]);
|
||||
await page.getByLabel('View next asset').click();
|
||||
await assetViewerUtils.waitForViewerLoad(page, asset);
|
||||
await page.getByLabel('Delete').click();
|
||||
await assetViewerUtils.waitForViewerLoad(page, assets[2]);
|
||||
});
|
||||
|
||||
test('Deleting the last photo advances to the previous photo', async ({ page }) => {
|
||||
const lastAsset = assets[4];
|
||||
await page.goto(buildSearchUrl(lastAsset.id));
|
||||
await assetViewerUtils.waitForViewerLoad(page, lastAsset);
|
||||
await expect(page.getByLabel('View next asset')).toHaveCount(0);
|
||||
await page.getByLabel('Delete').click();
|
||||
await assetViewerUtils.waitForViewerLoad(page, assets[3]);
|
||||
await expect(page.getByLabel('View previous asset')).toBeVisible();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -8,11 +8,11 @@ import {
|
||||
selectRandom,
|
||||
TimelineAssetConfig,
|
||||
TimelineData,
|
||||
} from 'src/ui/generators/timeline';
|
||||
import { setupBaseMockApiRoutes } from 'src/ui/mock-network/base-network';
|
||||
import { setupTimelineMockApiRoutes, TimelineTestContext } from 'src/ui/mock-network/timeline-network';
|
||||
} from 'src/generators/timeline';
|
||||
import { setupBaseMockApiRoutes } from 'src/mock-network/base-network';
|
||||
import { setupTimelineMockApiRoutes, TimelineTestContext } from 'src/mock-network/timeline-network';
|
||||
import { utils } from 'src/utils';
|
||||
import { assetViewerUtils } from '../timeline/utils';
|
||||
import { assetViewerUtils } from 'src/web/specs/timeline/utils';
|
||||
|
||||
test.describe.configure({ mode: 'parallel' });
|
||||
test.describe('asset-viewer', () => {
|
||||
@@ -12,15 +12,18 @@ import {
|
||||
selectRandomMultiple,
|
||||
TimelineAssetConfig,
|
||||
TimelineData,
|
||||
} from 'src/ui/generators/timeline';
|
||||
import { setupBaseMockApiRoutes } from 'src/ui/mock-network/base-network';
|
||||
import {
|
||||
pageRoutePromise,
|
||||
setupTimelineMockApiRoutes,
|
||||
TimelineTestContext,
|
||||
} from 'src/ui/mock-network/timeline-network';
|
||||
} from 'src/generators/timeline';
|
||||
import { setupBaseMockApiRoutes } from 'src/mock-network/base-network';
|
||||
import { pageRoutePromise, setupTimelineMockApiRoutes, TimelineTestContext } from 'src/mock-network/timeline-network';
|
||||
import { utils } from 'src/utils';
|
||||
import { assetViewerUtils, padYearMonth, pageUtils, poll, thumbnailUtils, timelineUtils } from './utils';
|
||||
import {
|
||||
assetViewerUtils,
|
||||
padYearMonth,
|
||||
pageUtils,
|
||||
poll,
|
||||
thumbnailUtils,
|
||||
timelineUtils,
|
||||
} from 'src/web/specs/timeline/utils';
|
||||
|
||||
test.describe.configure({ mode: 'parallel' });
|
||||
test.describe('Timeline', () => {
|
||||
@@ -1,6 +1,6 @@
|
||||
import { BrowserContext, expect, Page } from '@playwright/test';
|
||||
import { DateTime } from 'luxon';
|
||||
import { TimelineAssetConfig } from 'src/ui/generators/timeline';
|
||||
import { TimelineAssetConfig } from 'src/generators/timeline';
|
||||
|
||||
export const sleep = (ms: number) => {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user