feat: initial asset editing implementation

feat: db insertions for edits

feat: get asset edits endpoint

feat: wip apply edits

feat: finish asset files changes

feat: wip

feat: wip

fix: openapi

fix: tests

the failing tests were so scuffed. Simply solved by adding [] to the param list

feat: more wip

feat: more wip

feat: some more tests and fixes

chore: fix default for getting thumbnail and add todo for tests

feat: LRTB validation

chore: code cleanup

chore: more test checks for cleanup

feat: show edit pane

fix: state issues

chore: restructure web editor

feat: restructure edit manager

feat: refactor cropManager

chore: combine all editing

chore: web editing improvements

fix: handling when no crops

fix: openapi enum

chore: more edit refactoring

fix: make image decoding more efficient

chore: more refactoring

fix: getCrop LRTB algorithm

fix: missing await

chore: use relative coordinates for edit

chore: update sql

fix: use resize observer instead of svelte:doc resize hook

chore: simplify quad box generation

fix: light mode styling

chore: refactor to not be a recursive job call

this simplifies the logic and the job only completes once thumbhash and others are properly updated

chore: more refactoring

feat: use affine transforms for most operations

feat: bounding box edit transformation

feat: tests

chore: sql and openapi sync

fix: medium tests

fix: rotated OCR

chore: cleanup transform test

fix: remove rebase issue

fix(server): block edits for live photos, gifs, panoramic photos

fix: openapi enum validation

chore: rename edit endpoint

chore: remove public modifiers

feat: delete endpoint

chore: use === and !== explicitly

fix: require 1 edit for the editAsset endpoint

fix: remove thumbnail edit notification and use on_upload_success instead

fix: primary key on asset edit table

chore: refactor to isPanorama

chore: rename editRepository to assetEditRepository

fix: missing toLowerCase

fix: db migrations

chore: update sql files
pull/24155/head
bwees 2025-11-20 23:26:45 -06:00
parent 81ed88fb99
commit 7f5b5aac9a
No known key found for this signature in database
93 changed files with 5858 additions and 1304 deletions

View File

@ -934,6 +934,7 @@
"editor_close_without_save_title": "Close editor?",
"editor_crop_tool_h2_aspect_ratios": "Aspect ratios",
"editor_crop_tool_h2_rotation": "Rotation",
"editor_reset_all_changes": "Reset all changes",
"email": "Email",
"email_notifications": "Email notifications",
"empty_folder": "This folder is empty",

View File

@ -101,7 +101,9 @@ Class | Method | HTTP request | Description
*AssetsApi* | [**deleteAssetMetadata**](doc//AssetsApi.md#deleteassetmetadata) | **DELETE** /assets/{id}/metadata/{key} | Delete asset metadata by key
*AssetsApi* | [**deleteAssets**](doc//AssetsApi.md#deleteassets) | **DELETE** /assets | Delete assets
*AssetsApi* | [**downloadAsset**](doc//AssetsApi.md#downloadasset) | **GET** /assets/{id}/original | Download original asset
*AssetsApi* | [**editAsset**](doc//AssetsApi.md#editasset) | **PUT** /assets/{id}/edits | Applies edits to an existing asset
*AssetsApi* | [**getAllUserAssetsByDeviceId**](doc//AssetsApi.md#getalluserassetsbydeviceid) | **GET** /assets/device/{deviceId} | Retrieve assets by device ID
*AssetsApi* | [**getAssetEdits**](doc//AssetsApi.md#getassetedits) | **GET** /assets/{id}/edits | Retrieve edits for an existing asset
*AssetsApi* | [**getAssetInfo**](doc//AssetsApi.md#getassetinfo) | **GET** /assets/{id} | Retrieve an asset
*AssetsApi* | [**getAssetMetadata**](doc//AssetsApi.md#getassetmetadata) | **GET** /assets/{id}/metadata | Get asset metadata
*AssetsApi* | [**getAssetMetadataByKey**](doc//AssetsApi.md#getassetmetadatabykey) | **GET** /assets/{id}/metadata/{key} | Retrieve asset metadata by key
@ -109,6 +111,7 @@ Class | Method | HTTP request | Description
*AssetsApi* | [**getAssetStatistics**](doc//AssetsApi.md#getassetstatistics) | **GET** /assets/statistics | Get asset statistics
*AssetsApi* | [**getRandom**](doc//AssetsApi.md#getrandom) | **GET** /assets/random | Get random assets
*AssetsApi* | [**playAssetVideo**](doc//AssetsApi.md#playassetvideo) | **GET** /assets/{id}/video/playback | Play asset video
*AssetsApi* | [**removeAssetEdits**](doc//AssetsApi.md#removeassetedits) | **DELETE** /assets/{id}/edits | Remove edits from an existing asset
*AssetsApi* | [**replaceAsset**](doc//AssetsApi.md#replaceasset) | **PUT** /assets/{id}/original | Replace asset
*AssetsApi* | [**runAssetJobs**](doc//AssetsApi.md#runassetjobs) | **POST** /assets/jobs | Run an asset job
*AssetsApi* | [**updateAsset**](doc//AssetsApi.md#updateasset) | **PUT** /assets/{id} | Update an asset
@ -343,6 +346,8 @@ Class | Method | HTTP request | Description
- [AssetCopyDto](doc//AssetCopyDto.md)
- [AssetDeltaSyncDto](doc//AssetDeltaSyncDto.md)
- [AssetDeltaSyncResponseDto](doc//AssetDeltaSyncResponseDto.md)
- [AssetEditsDto](doc//AssetEditsDto.md)
- [AssetEditsDtoEditsInner](doc//AssetEditsDtoEditsInner.md)
- [AssetFaceCreateDto](doc//AssetFaceCreateDto.md)
- [AssetFaceDeleteDto](doc//AssetFaceDeleteDto.md)
- [AssetFaceResponseDto](doc//AssetFaceResponseDto.md)
@ -386,6 +391,7 @@ Class | Method | HTTP request | Description
- [CreateAlbumDto](doc//CreateAlbumDto.md)
- [CreateLibraryDto](doc//CreateLibraryDto.md)
- [CreateProfileImageResponseDto](doc//CreateProfileImageResponseDto.md)
- [CropParameters](doc//CropParameters.md)
- [DatabaseBackupConfig](doc//DatabaseBackupConfig.md)
- [DownloadArchiveInfo](doc//DownloadArchiveInfo.md)
- [DownloadInfoDto](doc//DownloadInfoDto.md)
@ -394,6 +400,11 @@ Class | Method | HTTP request | Description
- [DownloadUpdate](doc//DownloadUpdate.md)
- [DuplicateDetectionConfig](doc//DuplicateDetectionConfig.md)
- [DuplicateResponseDto](doc//DuplicateResponseDto.md)
- [EditAction](doc//EditAction.md)
- [EditActionCrop](doc//EditActionCrop.md)
- [EditActionListDto](doc//EditActionListDto.md)
- [EditActionMirror](doc//EditActionMirror.md)
- [EditActionRotate](doc//EditActionRotate.md)
- [EmailNotificationsResponse](doc//EmailNotificationsResponse.md)
- [EmailNotificationsUpdate](doc//EmailNotificationsUpdate.md)
- [ExifResponseDto](doc//ExifResponseDto.md)
@ -430,6 +441,8 @@ Class | Method | HTTP request | Description
- [MemoryUpdateDto](doc//MemoryUpdateDto.md)
- [MergePersonDto](doc//MergePersonDto.md)
- [MetadataSearchDto](doc//MetadataSearchDto.md)
- [MirrorAxis](doc//MirrorAxis.md)
- [MirrorParameters](doc//MirrorParameters.md)
- [NotificationCreateDto](doc//NotificationCreateDto.md)
- [NotificationDeleteAllDto](doc//NotificationDeleteAllDto.md)
- [NotificationDto](doc//NotificationDto.md)
@ -489,6 +502,7 @@ Class | Method | HTTP request | Description
- [ReactionLevel](doc//ReactionLevel.md)
- [ReactionType](doc//ReactionType.md)
- [ReverseGeocodingStateResponseDto](doc//ReverseGeocodingStateResponseDto.md)
- [RotateParameters](doc//RotateParameters.md)
- [SearchAlbumResponseDto](doc//SearchAlbumResponseDto.md)
- [SearchAssetResponseDto](doc//SearchAssetResponseDto.md)
- [SearchExploreItem](doc//SearchExploreItem.md)

View File

@ -95,6 +95,8 @@ part 'model/asset_bulk_upload_check_result.dart';
part 'model/asset_copy_dto.dart';
part 'model/asset_delta_sync_dto.dart';
part 'model/asset_delta_sync_response_dto.dart';
part 'model/asset_edits_dto.dart';
part 'model/asset_edits_dto_edits_inner.dart';
part 'model/asset_face_create_dto.dart';
part 'model/asset_face_delete_dto.dart';
part 'model/asset_face_response_dto.dart';
@ -138,6 +140,7 @@ part 'model/contributor_count_response_dto.dart';
part 'model/create_album_dto.dart';
part 'model/create_library_dto.dart';
part 'model/create_profile_image_response_dto.dart';
part 'model/crop_parameters.dart';
part 'model/database_backup_config.dart';
part 'model/download_archive_info.dart';
part 'model/download_info_dto.dart';
@ -146,6 +149,11 @@ part 'model/download_response_dto.dart';
part 'model/download_update.dart';
part 'model/duplicate_detection_config.dart';
part 'model/duplicate_response_dto.dart';
part 'model/edit_action.dart';
part 'model/edit_action_crop.dart';
part 'model/edit_action_list_dto.dart';
part 'model/edit_action_mirror.dart';
part 'model/edit_action_rotate.dart';
part 'model/email_notifications_response.dart';
part 'model/email_notifications_update.dart';
part 'model/exif_response_dto.dart';
@ -182,6 +190,8 @@ part 'model/memory_type.dart';
part 'model/memory_update_dto.dart';
part 'model/merge_person_dto.dart';
part 'model/metadata_search_dto.dart';
part 'model/mirror_axis.dart';
part 'model/mirror_parameters.dart';
part 'model/notification_create_dto.dart';
part 'model/notification_delete_all_dto.dart';
part 'model/notification_dto.dart';
@ -241,6 +251,7 @@ part 'model/ratings_update.dart';
part 'model/reaction_level.dart';
part 'model/reaction_type.dart';
part 'model/reverse_geocoding_state_response_dto.dart';
part 'model/rotate_parameters.dart';
part 'model/search_album_response_dto.dart';
part 'model/search_asset_response_dto.dart';
part 'model/search_explore_item.dart';

View File

@ -288,10 +288,12 @@ class AssetsApi {
///
/// * [String] id (required):
///
/// * [bool] edited:
///
/// * [String] key:
///
/// * [String] slug:
Future<Response> downloadAssetWithHttpInfo(String id, { String? key, String? slug, }) async {
Future<Response> downloadAssetWithHttpInfo(String id, { bool? edited, String? key, String? slug, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/assets/{id}/original'
.replaceAll('{id}', id);
@ -303,6 +305,9 @@ class AssetsApi {
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (edited != null) {
queryParams.addAll(_queryParams('', 'edited', edited));
}
if (key != null) {
queryParams.addAll(_queryParams('', 'key', key));
}
@ -332,11 +337,13 @@ class AssetsApi {
///
/// * [String] id (required):
///
/// * [bool] edited:
///
/// * [String] key:
///
/// * [String] slug:
Future<MultipartFile?> downloadAsset(String id, { String? key, String? slug, }) async {
final response = await downloadAssetWithHttpInfo(id, key: key, slug: slug, );
Future<MultipartFile?> downloadAsset(String id, { bool? edited, String? key, String? slug, }) async {
final response = await downloadAssetWithHttpInfo(id, edited: edited, key: key, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
@ -350,6 +357,67 @@ class AssetsApi {
return null;
}
/// Applies edits to an existing asset
///
/// Applies a series of edit actions (crop, rotate, mirror) to the specified asset.
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] id (required):
///
/// * [EditActionListDto] editActionListDto (required):
Future<Response> editAssetWithHttpInfo(String id, EditActionListDto editActionListDto,) async {
// ignore: prefer_const_declarations
final apiPath = r'/assets/{id}/edits'
.replaceAll('{id}', id);
// ignore: prefer_final_locals
Object? postBody = editActionListDto;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>['application/json'];
return apiClient.invokeAPI(
apiPath,
'PUT',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Applies edits to an existing asset
///
/// Applies a series of edit actions (crop, rotate, mirror) to the specified asset.
///
/// Parameters:
///
/// * [String] id (required):
///
/// * [EditActionListDto] editActionListDto (required):
Future<AssetEditsDto?> editAsset(String id, EditActionListDto editActionListDto,) async {
final response = await editAssetWithHttpInfo(id, editActionListDto,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'AssetEditsDto',) as AssetEditsDto;
}
return null;
}
/// Retrieve assets by device ID
///
/// Get all asset of a device that are in the database, ID only.
@ -410,6 +478,63 @@ class AssetsApi {
return null;
}
/// Retrieve edits for an existing asset
///
/// Retrieve a series of edit actions (crop, rotate, mirror) associated with the specified asset.
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] id (required):
Future<Response> getAssetEditsWithHttpInfo(String id,) async {
// ignore: prefer_const_declarations
final apiPath = r'/assets/{id}/edits'
.replaceAll('{id}', id);
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'GET',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Retrieve edits for an existing asset
///
/// Retrieve a series of edit actions (crop, rotate, mirror) associated with the specified asset.
///
/// Parameters:
///
/// * [String] id (required):
Future<AssetEditsDto?> getAssetEdits(String id,) async {
final response = await getAssetEditsWithHttpInfo(id,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'AssetEditsDto',) as AssetEditsDto;
}
return null;
}
/// Retrieve an asset
///
/// Retrieve detailed information about a specific asset.
@ -873,6 +998,55 @@ class AssetsApi {
return null;
}
/// Remove edits from an existing asset
///
/// Removes all edit actions (crop, rotate, mirror) associated with the specified asset.
///
/// Note: This method returns the HTTP [Response].
///
/// Parameters:
///
/// * [String] id (required):
Future<Response> removeAssetEditsWithHttpInfo(String id,) async {
// ignore: prefer_const_declarations
final apiPath = r'/assets/{id}/edits'
.replaceAll('{id}', id);
// ignore: prefer_final_locals
Object? postBody;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>[];
return apiClient.invokeAPI(
apiPath,
'DELETE',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Remove edits from an existing asset
///
/// Removes all edit actions (crop, rotate, mirror) associated with the specified asset.
///
/// Parameters:
///
/// * [String] id (required):
Future<void> removeAssetEdits(String id,) async {
final response = await removeAssetEditsWithHttpInfo(id,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
}
/// Replace asset
///
/// Replace the asset with new file, without changing its id.
@ -1418,12 +1592,14 @@ class AssetsApi {
///
/// * [String] id (required):
///
/// * [bool] edited:
///
/// * [String] key:
///
/// * [AssetMediaSize] size:
///
/// * [String] slug:
Future<Response> viewAssetWithHttpInfo(String id, { String? key, AssetMediaSize? size, String? slug, }) async {
Future<Response> viewAssetWithHttpInfo(String id, { bool? edited, String? key, AssetMediaSize? size, String? slug, }) async {
// ignore: prefer_const_declarations
final apiPath = r'/assets/{id}/thumbnail'
.replaceAll('{id}', id);
@ -1435,6 +1611,9 @@ class AssetsApi {
final headerParams = <String, String>{};
final formParams = <String, String>{};
if (edited != null) {
queryParams.addAll(_queryParams('', 'edited', edited));
}
if (key != null) {
queryParams.addAll(_queryParams('', 'key', key));
}
@ -1467,13 +1646,15 @@ class AssetsApi {
///
/// * [String] id (required):
///
/// * [bool] edited:
///
/// * [String] key:
///
/// * [AssetMediaSize] size:
///
/// * [String] slug:
Future<MultipartFile?> viewAsset(String id, { String? key, AssetMediaSize? size, String? slug, }) async {
final response = await viewAssetWithHttpInfo(id, key: key, size: size, slug: slug, );
Future<MultipartFile?> viewAsset(String id, { bool? edited, String? key, AssetMediaSize? size, String? slug, }) async {
final response = await viewAssetWithHttpInfo(id, edited: edited, key: key, size: size, slug: slug, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}

View File

@ -238,6 +238,10 @@ class ApiClient {
return AssetDeltaSyncDto.fromJson(value);
case 'AssetDeltaSyncResponseDto':
return AssetDeltaSyncResponseDto.fromJson(value);
case 'AssetEditsDto':
return AssetEditsDto.fromJson(value);
case 'AssetEditsDtoEditsInner':
return AssetEditsDtoEditsInner.fromJson(value);
case 'AssetFaceCreateDto':
return AssetFaceCreateDto.fromJson(value);
case 'AssetFaceDeleteDto':
@ -324,6 +328,8 @@ class ApiClient {
return CreateLibraryDto.fromJson(value);
case 'CreateProfileImageResponseDto':
return CreateProfileImageResponseDto.fromJson(value);
case 'CropParameters':
return CropParameters.fromJson(value);
case 'DatabaseBackupConfig':
return DatabaseBackupConfig.fromJson(value);
case 'DownloadArchiveInfo':
@ -340,6 +346,16 @@ class ApiClient {
return DuplicateDetectionConfig.fromJson(value);
case 'DuplicateResponseDto':
return DuplicateResponseDto.fromJson(value);
case 'EditAction':
return EditActionTypeTransformer().decode(value);
case 'EditActionCrop':
return EditActionCrop.fromJson(value);
case 'EditActionListDto':
return EditActionListDto.fromJson(value);
case 'EditActionMirror':
return EditActionMirror.fromJson(value);
case 'EditActionRotate':
return EditActionRotate.fromJson(value);
case 'EmailNotificationsResponse':
return EmailNotificationsResponse.fromJson(value);
case 'EmailNotificationsUpdate':
@ -412,6 +428,10 @@ class ApiClient {
return MergePersonDto.fromJson(value);
case 'MetadataSearchDto':
return MetadataSearchDto.fromJson(value);
case 'MirrorAxis':
return MirrorAxisTypeTransformer().decode(value);
case 'MirrorParameters':
return MirrorParameters.fromJson(value);
case 'NotificationCreateDto':
return NotificationCreateDto.fromJson(value);
case 'NotificationDeleteAllDto':
@ -530,6 +550,8 @@ class ApiClient {
return ReactionTypeTypeTransformer().decode(value);
case 'ReverseGeocodingStateResponseDto':
return ReverseGeocodingStateResponseDto.fromJson(value);
case 'RotateParameters':
return RotateParameters.fromJson(value);
case 'SearchAlbumResponseDto':
return SearchAlbumResponseDto.fromJson(value);
case 'SearchAssetResponseDto':

View File

@ -91,6 +91,9 @@ String parameterToString(dynamic value) {
if (value is Colorspace) {
return ColorspaceTypeTransformer().encode(value).toString();
}
if (value is EditAction) {
return EditActionTypeTransformer().encode(value).toString();
}
if (value is ImageFormat) {
return ImageFormatTypeTransformer().encode(value).toString();
}
@ -112,6 +115,9 @@ String parameterToString(dynamic value) {
if (value is MemoryType) {
return MemoryTypeTypeTransformer().encode(value).toString();
}
if (value is MirrorAxis) {
return MirrorAxisTypeTransformer().encode(value).toString();
}
if (value is NotificationLevel) {
return NotificationLevelTypeTransformer().encode(value).toString();
}

View File

@ -0,0 +1,108 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class AssetEditsDto {
/// Returns a new [AssetEditsDto] instance.
AssetEditsDto({
required this.assetId,
this.edits = const [],
});
String assetId;
/// list of edits
List<AssetEditsDtoEditsInner> edits;
@override
bool operator ==(Object other) => identical(this, other) || other is AssetEditsDto &&
other.assetId == assetId &&
_deepEquality.equals(other.edits, edits);
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(assetId.hashCode) +
(edits.hashCode);
@override
String toString() => 'AssetEditsDto[assetId=$assetId, edits=$edits]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'assetId'] = this.assetId;
json[r'edits'] = this.edits;
return json;
}
/// Returns a new [AssetEditsDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static AssetEditsDto? fromJson(dynamic value) {
upgradeDto(value, "AssetEditsDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return AssetEditsDto(
assetId: mapValueOfType<String>(json, r'assetId')!,
edits: AssetEditsDtoEditsInner.listFromJson(json[r'edits']),
);
}
return null;
}
static List<AssetEditsDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <AssetEditsDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = AssetEditsDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, AssetEditsDto> mapFromJson(dynamic json) {
final map = <String, AssetEditsDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = AssetEditsDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of AssetEditsDto-objects as value to a dart map
static Map<String, List<AssetEditsDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<AssetEditsDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = AssetEditsDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'assetId',
'edits',
};
}

View File

@ -0,0 +1,107 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class AssetEditsDtoEditsInner {
/// Returns a new [AssetEditsDtoEditsInner] instance.
AssetEditsDtoEditsInner({
required this.action,
required this.parameters,
});
EditAction action;
MirrorParameters parameters;
@override
bool operator ==(Object other) => identical(this, other) || other is AssetEditsDtoEditsInner &&
other.action == action &&
other.parameters == parameters;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(action.hashCode) +
(parameters.hashCode);
@override
String toString() => 'AssetEditsDtoEditsInner[action=$action, parameters=$parameters]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'action'] = this.action;
json[r'parameters'] = this.parameters;
return json;
}
/// Returns a new [AssetEditsDtoEditsInner] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static AssetEditsDtoEditsInner? fromJson(dynamic value) {
upgradeDto(value, "AssetEditsDtoEditsInner");
if (value is Map) {
final json = value.cast<String, dynamic>();
return AssetEditsDtoEditsInner(
action: EditAction.fromJson(json[r'action'])!,
parameters: MirrorParameters.fromJson(json[r'parameters'])!,
);
}
return null;
}
static List<AssetEditsDtoEditsInner> listFromJson(dynamic json, {bool growable = false,}) {
final result = <AssetEditsDtoEditsInner>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = AssetEditsDtoEditsInner.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, AssetEditsDtoEditsInner> mapFromJson(dynamic json) {
final map = <String, AssetEditsDtoEditsInner>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = AssetEditsDtoEditsInner.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of AssetEditsDtoEditsInner-objects as value to a dart map
static Map<String, List<AssetEditsDtoEditsInner>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<AssetEditsDtoEditsInner>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = AssetEditsDtoEditsInner.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'action',
'parameters',
};
}

View File

@ -0,0 +1,135 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class CropParameters {
/// Returns a new [CropParameters] instance.
CropParameters({
required this.height,
required this.width,
required this.x,
required this.y,
});
/// Height of the crop
///
/// Minimum value: 1
num height;
/// Width of the crop
///
/// Minimum value: 1
num width;
/// Top-Left X coordinate of crop
///
/// Minimum value: 0
num x;
/// Top-Left Y coordinate of crop
///
/// Minimum value: 0
num y;
@override
bool operator ==(Object other) => identical(this, other) || other is CropParameters &&
other.height == height &&
other.width == width &&
other.x == x &&
other.y == y;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(height.hashCode) +
(width.hashCode) +
(x.hashCode) +
(y.hashCode);
@override
String toString() => 'CropParameters[height=$height, width=$width, x=$x, y=$y]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'height'] = this.height;
json[r'width'] = this.width;
json[r'x'] = this.x;
json[r'y'] = this.y;
return json;
}
/// Returns a new [CropParameters] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static CropParameters? fromJson(dynamic value) {
upgradeDto(value, "CropParameters");
if (value is Map) {
final json = value.cast<String, dynamic>();
return CropParameters(
height: num.parse('${json[r'height']}'),
width: num.parse('${json[r'width']}'),
x: num.parse('${json[r'x']}'),
y: num.parse('${json[r'y']}'),
);
}
return null;
}
static List<CropParameters> listFromJson(dynamic json, {bool growable = false,}) {
final result = <CropParameters>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = CropParameters.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, CropParameters> mapFromJson(dynamic json) {
final map = <String, CropParameters>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = CropParameters.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of CropParameters-objects as value to a dart map
static Map<String, List<CropParameters>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<CropParameters>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = CropParameters.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'height',
'width',
'x',
'y',
};
}

View File

@ -0,0 +1,88 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class EditAction {
/// Instantiate a new enum with the provided [value].
const EditAction._(this.value);
/// The underlying value of this enum member.
final String value;
@override
String toString() => value;
String toJson() => value;
static const crop = EditAction._(r'crop');
static const rotate = EditAction._(r'rotate');
static const mirror = EditAction._(r'mirror');
/// List of all possible values in this [enum][EditAction].
static const values = <EditAction>[
crop,
rotate,
mirror,
];
static EditAction? fromJson(dynamic value) => EditActionTypeTransformer().decode(value);
static List<EditAction> listFromJson(dynamic json, {bool growable = false,}) {
final result = <EditAction>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = EditAction.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
}
/// Transformation class that can [encode] an instance of [EditAction] to String,
/// and [decode] dynamic data back to [EditAction].
class EditActionTypeTransformer {
factory EditActionTypeTransformer() => _instance ??= const EditActionTypeTransformer._();
const EditActionTypeTransformer._();
String encode(EditAction data) => data.value;
/// Decodes a [dynamic value][data] to a EditAction.
///
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
///
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
/// and users are still using an old app with the old code.
EditAction? decode(dynamic data, {bool allowNull = true}) {
if (data != null) {
switch (data) {
case r'crop': return EditAction.crop;
case r'rotate': return EditAction.rotate;
case r'mirror': return EditAction.mirror;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');
}
}
}
return null;
}
/// Singleton [EditActionTypeTransformer] instance.
static EditActionTypeTransformer? _instance;
}

View File

@ -0,0 +1,107 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class EditActionCrop {
/// Returns a new [EditActionCrop] instance.
EditActionCrop({
required this.action,
required this.parameters,
});
EditAction action;
CropParameters parameters;
@override
bool operator ==(Object other) => identical(this, other) || other is EditActionCrop &&
other.action == action &&
other.parameters == parameters;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(action.hashCode) +
(parameters.hashCode);
@override
String toString() => 'EditActionCrop[action=$action, parameters=$parameters]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'action'] = this.action;
json[r'parameters'] = this.parameters;
return json;
}
/// Returns a new [EditActionCrop] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static EditActionCrop? fromJson(dynamic value) {
upgradeDto(value, "EditActionCrop");
if (value is Map) {
final json = value.cast<String, dynamic>();
return EditActionCrop(
action: EditAction.fromJson(json[r'action'])!,
parameters: CropParameters.fromJson(json[r'parameters'])!,
);
}
return null;
}
static List<EditActionCrop> listFromJson(dynamic json, {bool growable = false,}) {
final result = <EditActionCrop>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = EditActionCrop.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, EditActionCrop> mapFromJson(dynamic json) {
final map = <String, EditActionCrop>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = EditActionCrop.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of EditActionCrop-objects as value to a dart map
static Map<String, List<EditActionCrop>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<EditActionCrop>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = EditActionCrop.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'action',
'parameters',
};
}

View File

@ -0,0 +1,100 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class EditActionListDto {
/// Returns a new [EditActionListDto] instance.
EditActionListDto({
this.edits = const [],
});
/// list of edits
List<AssetEditsDtoEditsInner> edits;
@override
bool operator ==(Object other) => identical(this, other) || other is EditActionListDto &&
_deepEquality.equals(other.edits, edits);
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(edits.hashCode);
@override
String toString() => 'EditActionListDto[edits=$edits]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'edits'] = this.edits;
return json;
}
/// Returns a new [EditActionListDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static EditActionListDto? fromJson(dynamic value) {
upgradeDto(value, "EditActionListDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return EditActionListDto(
edits: AssetEditsDtoEditsInner.listFromJson(json[r'edits']),
);
}
return null;
}
static List<EditActionListDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <EditActionListDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = EditActionListDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, EditActionListDto> mapFromJson(dynamic json) {
final map = <String, EditActionListDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = EditActionListDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of EditActionListDto-objects as value to a dart map
static Map<String, List<EditActionListDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<EditActionListDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = EditActionListDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'edits',
};
}

View File

@ -0,0 +1,107 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class EditActionMirror {
/// Returns a new [EditActionMirror] instance.
EditActionMirror({
required this.action,
required this.parameters,
});
EditAction action;
MirrorParameters parameters;
@override
bool operator ==(Object other) => identical(this, other) || other is EditActionMirror &&
other.action == action &&
other.parameters == parameters;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(action.hashCode) +
(parameters.hashCode);
@override
String toString() => 'EditActionMirror[action=$action, parameters=$parameters]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'action'] = this.action;
json[r'parameters'] = this.parameters;
return json;
}
/// Returns a new [EditActionMirror] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static EditActionMirror? fromJson(dynamic value) {
upgradeDto(value, "EditActionMirror");
if (value is Map) {
final json = value.cast<String, dynamic>();
return EditActionMirror(
action: EditAction.fromJson(json[r'action'])!,
parameters: MirrorParameters.fromJson(json[r'parameters'])!,
);
}
return null;
}
static List<EditActionMirror> listFromJson(dynamic json, {bool growable = false,}) {
final result = <EditActionMirror>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = EditActionMirror.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, EditActionMirror> mapFromJson(dynamic json) {
final map = <String, EditActionMirror>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = EditActionMirror.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of EditActionMirror-objects as value to a dart map
static Map<String, List<EditActionMirror>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<EditActionMirror>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = EditActionMirror.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'action',
'parameters',
};
}

View File

@ -0,0 +1,107 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class EditActionRotate {
/// Returns a new [EditActionRotate] instance.
EditActionRotate({
required this.action,
required this.parameters,
});
EditAction action;
RotateParameters parameters;
@override
bool operator ==(Object other) => identical(this, other) || other is EditActionRotate &&
other.action == action &&
other.parameters == parameters;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(action.hashCode) +
(parameters.hashCode);
@override
String toString() => 'EditActionRotate[action=$action, parameters=$parameters]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'action'] = this.action;
json[r'parameters'] = this.parameters;
return json;
}
/// Returns a new [EditActionRotate] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static EditActionRotate? fromJson(dynamic value) {
upgradeDto(value, "EditActionRotate");
if (value is Map) {
final json = value.cast<String, dynamic>();
return EditActionRotate(
action: EditAction.fromJson(json[r'action'])!,
parameters: RotateParameters.fromJson(json[r'parameters'])!,
);
}
return null;
}
static List<EditActionRotate> listFromJson(dynamic json, {bool growable = false,}) {
final result = <EditActionRotate>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = EditActionRotate.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, EditActionRotate> mapFromJson(dynamic json) {
final map = <String, EditActionRotate>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = EditActionRotate.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of EditActionRotate-objects as value to a dart map
static Map<String, List<EditActionRotate>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<EditActionRotate>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = EditActionRotate.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'action',
'parameters',
};
}

View File

@ -0,0 +1,85 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
/// Axis to mirror along
class MirrorAxis {
/// Instantiate a new enum with the provided [value].
const MirrorAxis._(this.value);
/// The underlying value of this enum member.
final String value;
@override
String toString() => value;
String toJson() => value;
static const horizontal = MirrorAxis._(r'horizontal');
static const vertical = MirrorAxis._(r'vertical');
/// List of all possible values in this [enum][MirrorAxis].
static const values = <MirrorAxis>[
horizontal,
vertical,
];
static MirrorAxis? fromJson(dynamic value) => MirrorAxisTypeTransformer().decode(value);
static List<MirrorAxis> listFromJson(dynamic json, {bool growable = false,}) {
final result = <MirrorAxis>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = MirrorAxis.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
}
/// Transformation class that can [encode] an instance of [MirrorAxis] to String,
/// and [decode] dynamic data back to [MirrorAxis].
class MirrorAxisTypeTransformer {
factory MirrorAxisTypeTransformer() => _instance ??= const MirrorAxisTypeTransformer._();
const MirrorAxisTypeTransformer._();
String encode(MirrorAxis data) => data.value;
/// Decodes a [dynamic value][data] to a MirrorAxis.
///
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
///
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
/// and users are still using an old app with the old code.
MirrorAxis? decode(dynamic data, {bool allowNull = true}) {
if (data != null) {
switch (data) {
case r'horizontal': return MirrorAxis.horizontal;
case r'vertical': return MirrorAxis.vertical;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');
}
}
}
return null;
}
/// Singleton [MirrorAxisTypeTransformer] instance.
static MirrorAxisTypeTransformer? _instance;
}

View File

@ -0,0 +1,100 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class MirrorParameters {
/// Returns a new [MirrorParameters] instance.
MirrorParameters({
required this.axis,
});
/// Axis to mirror along
MirrorAxis axis;
@override
bool operator ==(Object other) => identical(this, other) || other is MirrorParameters &&
other.axis == axis;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(axis.hashCode);
@override
String toString() => 'MirrorParameters[axis=$axis]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'axis'] = this.axis;
return json;
}
/// Returns a new [MirrorParameters] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static MirrorParameters? fromJson(dynamic value) {
upgradeDto(value, "MirrorParameters");
if (value is Map) {
final json = value.cast<String, dynamic>();
return MirrorParameters(
axis: MirrorAxis.fromJson(json[r'axis'])!,
);
}
return null;
}
static List<MirrorParameters> listFromJson(dynamic json, {bool growable = false,}) {
final result = <MirrorParameters>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = MirrorParameters.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, MirrorParameters> mapFromJson(dynamic json) {
final map = <String, MirrorParameters>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = MirrorParameters.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of MirrorParameters-objects as value to a dart map
static Map<String, List<MirrorParameters>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<MirrorParameters>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = MirrorParameters.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'axis',
};
}

View File

@ -43,6 +43,8 @@ class Permission {
static const assetPeriodUpload = Permission._(r'asset.upload');
static const assetPeriodReplace = Permission._(r'asset.replace');
static const assetPeriodCopy = Permission._(r'asset.copy');
static const assetPeriodDerive = Permission._(r'asset.derive');
static const assetPeriodEdit = Permission._(r'asset.edit');
static const albumPeriodCreate = Permission._(r'album.create');
static const albumPeriodRead = Permission._(r'album.read');
static const albumPeriodUpdate = Permission._(r'album.update');
@ -191,6 +193,8 @@ class Permission {
assetPeriodUpload,
assetPeriodReplace,
assetPeriodCopy,
assetPeriodDerive,
assetPeriodEdit,
albumPeriodCreate,
albumPeriodRead,
albumPeriodUpdate,
@ -374,6 +378,8 @@ class PermissionTypeTransformer {
case r'asset.upload': return Permission.assetPeriodUpload;
case r'asset.replace': return Permission.assetPeriodReplace;
case r'asset.copy': return Permission.assetPeriodCopy;
case r'asset.derive': return Permission.assetPeriodDerive;
case r'asset.edit': return Permission.assetPeriodEdit;
case r'album.create': return Permission.albumPeriodCreate;
case r'album.read': return Permission.albumPeriodRead;
case r'album.update': return Permission.albumPeriodUpdate;

View File

@ -0,0 +1,100 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class RotateParameters {
/// Returns a new [RotateParameters] instance.
RotateParameters({
required this.angle,
});
/// Rotation angle in degrees
num angle;
@override
bool operator ==(Object other) => identical(this, other) || other is RotateParameters &&
other.angle == angle;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(angle.hashCode);
@override
String toString() => 'RotateParameters[angle=$angle]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'angle'] = this.angle;
return json;
}
/// Returns a new [RotateParameters] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static RotateParameters? fromJson(dynamic value) {
upgradeDto(value, "RotateParameters");
if (value is Map) {
final json = value.cast<String, dynamic>();
return RotateParameters(
angle: num.parse('${json[r'angle']}'),
);
}
return null;
}
static List<RotateParameters> listFromJson(dynamic json, {bool growable = false,}) {
final result = <RotateParameters>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = RotateParameters.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, RotateParameters> mapFromJson(dynamic json) {
final map = <String, RotateParameters>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = RotateParameters.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of RotateParameters-objects as value to a dart map
static Map<String, List<RotateParameters>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<RotateParameters>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = RotateParameters.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'angle',
};
}

View File

@ -3187,6 +3187,173 @@
"x-immich-state": "Stable"
}
},
"/assets/{id}/edits": {
"delete": {
"description": "Removes all edit actions (crop, rotate, mirror) associated with the specified asset.",
"operationId": "removeAssetEdits",
"parameters": [
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"type": "string"
}
}
],
"responses": {
"204": {
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"summary": "Remove edits from an existing asset",
"tags": [
"Assets"
],
"x-immich-history": [
{
"version": "v2",
"state": "Added"
},
{
"version": "v2",
"state": "Beta"
}
],
"x-immich-permission": "asset.edit",
"x-immich-state": "Beta"
},
"get": {
"description": "Retrieve a series of edit actions (crop, rotate, mirror) associated with the specified asset.",
"operationId": "getAssetEdits",
"parameters": [
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"type": "string"
}
}
],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/AssetEditsDto"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"summary": "Retrieve edits for an existing asset",
"tags": [
"Assets"
],
"x-immich-history": [
{
"version": "v2",
"state": "Added"
},
{
"version": "v2",
"state": "Beta"
}
],
"x-immich-permission": "asset.read",
"x-immich-state": "Beta"
},
"put": {
"description": "Applies a series of edit actions (crop, rotate, mirror) to the specified asset.",
"operationId": "editAsset",
"parameters": [
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"type": "string"
}
}
],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/EditActionListDto"
}
}
},
"required": true
},
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/AssetEditsDto"
}
}
},
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"summary": "Applies edits to an existing asset",
"tags": [
"Assets"
],
"x-immich-history": [
{
"version": "v2",
"state": "Added"
},
{
"version": "v2",
"state": "Beta"
}
],
"x-immich-permission": "asset.edit",
"x-immich-state": "Beta"
}
},
"/assets/{id}/metadata": {
"get": {
"description": "Retrieve all metadata key-value pairs associated with the specified asset.",
@ -3516,6 +3683,15 @@
"description": "Downloads the original file of the specified asset.",
"operationId": "downloadAsset",
"parameters": [
{
"name": "edited",
"required": false,
"in": "query",
"schema": {
"default": true,
"type": "boolean"
}
},
{
"name": "id",
"required": true,
@ -3676,6 +3852,14 @@
"description": "Retrieve the thumbnail image for the specified asset.",
"operationId": "viewAsset",
"parameters": [
{
"name": "edited",
"required": false,
"in": "query",
"schema": {
"type": "boolean"
}
},
{
"name": "id",
"required": true,
@ -15106,6 +15290,36 @@
],
"type": "object"
},
"AssetEditsDto": {
"properties": {
"assetId": {
"format": "uuid",
"type": "string"
},
"edits": {
"description": "list of edits",
"items": {
"anyOf": [
{
"$ref": "#/components/schemas/EditActionCrop"
},
{
"$ref": "#/components/schemas/EditActionRotate"
},
{
"$ref": "#/components/schemas/EditActionMirror"
}
]
},
"type": "array"
}
},
"required": [
"assetId",
"edits"
],
"type": "object"
},
"AssetFaceCreateDto": {
"properties": {
"assetId": {
@ -16223,6 +16437,37 @@
],
"type": "object"
},
"CropParameters": {
"properties": {
"height": {
"description": "Height of the crop",
"minimum": 1,
"type": "number"
},
"width": {
"description": "Width of the crop",
"minimum": 1,
"type": "number"
},
"x": {
"description": "Top-Left X coordinate of crop",
"minimum": 0,
"type": "number"
},
"y": {
"description": "Top-Left Y coordinate of crop",
"minimum": 0,
"type": "number"
}
},
"required": [
"height",
"width",
"x",
"y"
],
"type": "object"
},
"DatabaseBackupConfig": {
"properties": {
"cronExpression": {
@ -16367,6 +16612,96 @@
],
"type": "object"
},
"EditAction": {
"enum": [
"crop",
"rotate",
"mirror"
],
"type": "string"
},
"EditActionCrop": {
"properties": {
"action": {
"allOf": [
{
"$ref": "#/components/schemas/EditAction"
}
]
},
"parameters": {
"$ref": "#/components/schemas/CropParameters"
}
},
"required": [
"action",
"parameters"
],
"type": "object"
},
"EditActionListDto": {
"properties": {
"edits": {
"description": "list of edits",
"items": {
"anyOf": [
{
"$ref": "#/components/schemas/EditActionCrop"
},
{
"$ref": "#/components/schemas/EditActionRotate"
},
{
"$ref": "#/components/schemas/EditActionMirror"
}
]
},
"type": "array"
}
},
"required": [
"edits"
],
"type": "object"
},
"EditActionMirror": {
"properties": {
"action": {
"allOf": [
{
"$ref": "#/components/schemas/EditAction"
}
]
},
"parameters": {
"$ref": "#/components/schemas/MirrorParameters"
}
},
"required": [
"action",
"parameters"
],
"type": "object"
},
"EditActionRotate": {
"properties": {
"action": {
"allOf": [
{
"$ref": "#/components/schemas/EditAction"
}
]
},
"parameters": {
"$ref": "#/components/schemas/RotateParameters"
}
},
"required": [
"action",
"parameters"
],
"type": "object"
},
"EmailNotificationsResponse": {
"properties": {
"albumInvite": {
@ -17377,6 +17712,30 @@
},
"type": "object"
},
"MirrorAxis": {
"description": "Axis to mirror along",
"enum": [
"horizontal",
"vertical"
],
"type": "string"
},
"MirrorParameters": {
"properties": {
"axis": {
"allOf": [
{
"$ref": "#/components/schemas/MirrorAxis"
}
],
"description": "Axis to mirror along"
}
},
"required": [
"axis"
],
"type": "object"
},
"NotificationCreateDto": {
"properties": {
"data": {
@ -17857,6 +18216,8 @@
"asset.upload",
"asset.replace",
"asset.copy",
"asset.derive",
"asset.edit",
"album.create",
"album.read",
"album.update",
@ -18913,6 +19274,18 @@
],
"type": "object"
},
"RotateParameters": {
"properties": {
"angle": {
"description": "Rotation angle in degrees",
"type": "number"
}
},
"required": [
"angle"
],
"type": "object"
},
"SearchAlbumResponseDto": {
"properties": {
"count": {

View File

@ -555,6 +555,45 @@ export type UpdateAssetDto = {
rating?: number;
visibility?: AssetVisibility;
};
export type CropParameters = {
/** Height of the crop */
height: number;
/** Width of the crop */
width: number;
/** Top-Left X coordinate of crop */
x: number;
/** Top-Left Y coordinate of crop */
y: number;
};
export type EditActionCrop = {
action: EditAction;
parameters: CropParameters;
};
export type RotateParameters = {
/** Rotation angle in degrees */
angle: number;
};
export type EditActionRotate = {
action: EditAction;
parameters: RotateParameters;
};
export type MirrorParameters = {
/** Axis to mirror along */
axis: MirrorAxis;
};
export type EditActionMirror = {
action: EditAction;
parameters: MirrorParameters;
};
export type AssetEditsDto = {
assetId: string;
/** list of edits */
edits: (EditActionCrop | EditActionRotate | EditActionMirror)[];
};
export type EditActionListDto = {
/** list of edits */
edits: (EditActionCrop | EditActionRotate | EditActionMirror)[];
};
export type AssetMetadataResponseDto = {
key: AssetMetadataKey;
updatedAt: string;
@ -2527,6 +2566,46 @@ export function updateAsset({ id, updateAssetDto }: {
body: updateAssetDto
})));
}
/**
* Remove edits from an existing asset
*/
export function removeAssetEdits({ id }: {
id: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText(`/assets/${encodeURIComponent(id)}/edits`, {
...opts,
method: "DELETE"
}));
}
/**
* Retrieve edits for an existing asset
*/
export function getAssetEdits({ id }: {
id: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: AssetEditsDto;
}>(`/assets/${encodeURIComponent(id)}/edits`, {
...opts
}));
}
/**
* Applies edits to an existing asset
*/
export function editAsset({ id, editActionListDto }: {
id: string;
editActionListDto: EditActionListDto;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: AssetEditsDto;
}>(`/assets/${encodeURIComponent(id)}/edits`, oazapfts.json({
...opts,
method: "PUT",
body: editActionListDto
})));
}
/**
* Get asset metadata
*/
@ -2598,7 +2677,8 @@ export function getAssetOcr({ id }: {
/**
* Download original asset
*/
export function downloadAsset({ id, key, slug }: {
export function downloadAsset({ edited, id, key, slug }: {
edited?: boolean;
id: string;
key?: string;
slug?: string;
@ -2607,6 +2687,7 @@ export function downloadAsset({ id, key, slug }: {
status: 200;
data: Blob;
}>(`/assets/${encodeURIComponent(id)}/original${QS.query(QS.explode({
edited,
key,
slug
}))}`, {
@ -2637,7 +2718,8 @@ export function replaceAsset({ id, key, slug, assetMediaReplaceDto }: {
/**
* View asset thumbnail
*/
export function viewAsset({ id, key, size, slug }: {
export function viewAsset({ edited, id, key, size, slug }: {
edited?: boolean;
id: string;
key?: string;
size?: AssetMediaSize;
@ -2647,6 +2729,7 @@ export function viewAsset({ id, key, size, slug }: {
status: 200;
data: Blob;
}>(`/assets/${encodeURIComponent(id)}/thumbnail${QS.query(QS.explode({
edited,
key,
size,
slug
@ -5221,6 +5304,8 @@ export enum Permission {
AssetUpload = "asset.upload",
AssetReplace = "asset.replace",
AssetCopy = "asset.copy",
AssetDerive = "asset.derive",
AssetEdit = "asset.edit",
AlbumCreate = "album.create",
AlbumRead = "album.read",
AlbumUpdate = "album.update",
@ -5369,6 +5454,15 @@ export enum AssetJobName {
RegenerateThumbnail = "regenerate-thumbnail",
TranscodeVideo = "transcode-video"
}
export enum EditAction {
Crop = "crop",
Rotate = "rotate",
Mirror = "mirror"
}
export enum MirrorAxis {
Horizontal = "horizontal",
Vertical = "vertical"
}
export enum AssetMediaSize {
Fullsize = "fullsize",
Preview = "preview",

View File

@ -547,6 +547,9 @@ importers:
thumbhash:
specifier: ^0.1.1
version: 0.1.1
transformation-matrix:
specifier: ^3.1.0
version: 3.1.0
ua-parser-js:
specifier: ^2.0.0
version: 2.0.7
@ -11005,6 +11008,13 @@ packages:
resolution: {integrity: sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==}
engines: {node: '>=12'}
tr46@5.1.1:
resolution: {integrity: sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==}
engines: {node: '>=18'}
transformation-matrix@3.1.0:
resolution: {integrity: sha512-oYubRWTi2tYFHAL2J8DLvPIqIYcYZ0fSOi2vmSy042Ho4jBW2ce6VP7QfD44t65WQz6bw5w1Pk22J7lcUpaTKA==}
tree-dump@1.1.0:
resolution: {integrity: sha512-rMuvhU4MCDbcbnleZTFezWsaZXRFemSqAM+7jPnzUl1fo9w3YEKOxAeui0fz3OI4EU4hf23iyA7uQRVko+UaBA==}
engines: {node: '>=10.0'}
@ -24161,6 +24171,16 @@ snapshots:
punycode: 2.3.1
optional: true
<<<<<<< HEAD
=======
tr46@5.1.1:
dependencies:
punycode: 2.3.1
optional: true
transformation-matrix@3.1.0: {}
>>>>>>> 01fb71269 (feat: initial asset editing implementation)
tree-dump@1.1.0(tslib@2.8.1):
dependencies:
tslib: 2.8.1

View File

@ -110,6 +110,7 @@
"socket.io": "^4.8.1",
"tailwindcss-preset-email": "^1.4.0",
"thumbhash": "^0.1.1",
"transformation-matrix": "^3.1.0",
"ua-parser-js": "^2.0.0",
"uuid": "^11.1.0",
"validator": "^13.12.0"
@ -128,8 +129,8 @@
"@types/cookie-parser": "^1.4.8",
"@types/express": "^5.0.0",
"@types/fluent-ffmpeg": "^2.1.21",
"@types/jsonwebtoken": "^9.0.10",
"@types/js-yaml": "^4.0.9",
"@types/jsonwebtoken": "^9.0.10",
"@types/lodash": "^4.14.197",
"@types/luxon": "^3.6.2",
"@types/mock-fs": "^4.13.1",

View File

@ -33,6 +33,7 @@ import {
CheckExistingAssetsDto,
UploadFieldName,
} from 'src/dtos/asset-media.dto';
import { AssetDownloadOriginalDto } from 'src/dtos/asset.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { ApiTag, ImmichHeader, Permission, RouteKey } from 'src/enum';
import { AssetUploadInterceptor } from 'src/middleware/asset-upload.interceptor';
@ -94,10 +95,11 @@ export class AssetMediaController {
async downloadAsset(
@Auth() auth: AuthDto,
@Param() { id }: UUIDParamDto,
@Query() { edited }: AssetDownloadOriginalDto,
@Res() res: Response,
@Next() next: NextFunction,
) {
await sendFile(res, next, () => this.service.downloadOriginal(auth, id), this.logger);
await sendFile(res, next, () => this.service.downloadOriginal(auth, id, edited ?? true), this.logger);
}
@Put(':id/original')

View File

@ -17,6 +17,7 @@ import {
UpdateAssetDto,
} from 'src/dtos/asset.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { AssetEditsDto, EditActionListDto } from 'src/dtos/editing.dto';
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
import { ApiTag, Permission, RouteKey } from 'src/enum';
import { Auth, Authenticated } from 'src/middleware/auth.guard';
@ -197,4 +198,42 @@ export class AssetController {
deleteAssetMetadata(@Auth() auth: AuthDto, @Param() { id, key }: AssetMetadataRouteParams): Promise<void> {
return this.service.deleteMetadataByKey(auth, id, key);
}
@Put(':id/edits')
@Authenticated({ permission: Permission.AssetEdit })
@Endpoint({
summary: 'Applies edits to an existing asset',
description: 'Applies a series of edit actions (crop, rotate, mirror) to the specified asset.',
history: new HistoryBuilder().added('v2').beta('v2'),
})
editAsset(
@Auth() auth: AuthDto,
@Param() { id }: UUIDParamDto,
@Body() dto: EditActionListDto,
): Promise<AssetEditsDto> {
return this.service.editAsset(auth, id, dto);
}
@Get(':id/edits')
@Authenticated({ permission: Permission.AssetRead })
@Endpoint({
summary: 'Retrieve edits for an existing asset',
description: 'Retrieve a series of edit actions (crop, rotate, mirror) associated with the specified asset.',
history: new HistoryBuilder().added('v2').beta('v2'),
})
getAssetEdits(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise<AssetEditsDto> {
return this.service.getAssetEdits(auth, id);
}
@Delete(':id/edits')
@Authenticated({ permission: Permission.AssetEdit })
@HttpCode(HttpStatus.NO_CONTENT)
@Endpoint({
summary: 'Remove edits from an existing asset',
description: 'Removes all edit actions (crop, rotate, mirror) associated with the specified asset.',
history: new HistoryBuilder().added('v2').beta('v2'),
})
removeAssetEdits(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise<void> {
return this.service.removeAssetEdits(auth, id);
}
}

View File

@ -24,7 +24,13 @@ export interface MoveRequest {
};
}
export type GeneratedImageType = AssetPathType.Preview | AssetPathType.Thumbnail | AssetPathType.FullSize;
export type GeneratedImageType =
| AssetPathType.Preview
| AssetPathType.Thumbnail
| AssetPathType.FullSize
| AssetPathType.EditedPreview
| AssetPathType.EditedThumbnail
| AssetPathType.EditedFullSize;
export type GeneratedAssetType = GeneratedImageType | AssetPathType.EncodedVideo;
export type ThumbnailPathEntity = { id: string; ownerId: string };

View File

@ -272,6 +272,7 @@ export type AssetFace = {
person?: Person | null;
updatedAt: Date;
updateId: string;
isVisible: boolean;
};
export type Plugin = Selectable<PluginTable>;

View File

@ -19,6 +19,9 @@ export enum AssetMediaSize {
export class AssetMediaOptionsDto {
@ValidateEnum({ enum: AssetMediaSize, name: 'AssetMediaSize', optional: true })
size?: AssetMediaSize;
@ValidateBoolean({ optional: true })
edited?: boolean;
}
export enum UploadFieldName {

View File

@ -3,6 +3,7 @@ import { Selectable } from 'kysely';
import { AssetFace, AssetFile, Exif, Stack, Tag, User } from 'src/database';
import { HistoryBuilder, Property } from 'src/decorators';
import { AuthDto } from 'src/dtos/auth.dto';
import { EditActionItem } from 'src/dtos/editing.dto';
import { ExifResponseDto, mapExif } from 'src/dtos/exif.dto';
import {
AssetFaceWithoutPersonResponseDto,
@ -13,6 +14,8 @@ import {
import { TagResponseDto, mapTag } from 'src/dtos/tag.dto';
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
import { AssetStatus, AssetType, AssetVisibility } from 'src/enum';
import { ImageDimensions } from 'src/types';
import { getDimensions } from 'src/utils/asset.util';
import { hexOrBufferToBase64 } from 'src/utils/bytes';
import { mimeTypes } from 'src/utils/mime-types';
import { ValidateEnum } from 'src/validation';
@ -109,6 +112,7 @@ export type MapAsset = {
deviceId: string;
duplicateId: string | null;
duration: string | null;
edits?: EditActionItem[];
encodedVideoPath: string | null;
exifInfo?: Selectable<Exif> | null;
faces?: AssetFace[];
@ -151,16 +155,20 @@ export type AssetMapOptions = {
};
// TODO: this is inefficient
const peopleWithFaces = (faces?: AssetFace[]): PersonWithFacesResponseDto[] => {
const peopleWithFaces = (
faces?: AssetFace[],
edits?: EditActionItem[],
assetDimensions?: ImageDimensions,
): PersonWithFacesResponseDto[] => {
const result: PersonWithFacesResponseDto[] = [];
if (faces) {
if (faces && edits && assetDimensions) {
for (const face of faces) {
if (face.person) {
const existingPersonEntry = result.find((item) => item.id === face.person!.id);
if (existingPersonEntry) {
existingPersonEntry.faces.push(face);
} else {
result.push({ ...mapPerson(face.person!), faces: [mapFacesWithoutPerson(face)] });
result.push({ ...mapPerson(face.person!), faces: [mapFacesWithoutPerson(face, edits, assetDimensions)] });
}
}
}
@ -200,6 +208,8 @@ export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): Asset
return sanitizedAssetResponse as AssetResponseDto;
}
const assetDimensions = entity.exifInfo ? getDimensions(entity.exifInfo) : undefined;
return {
id: entity.id,
createdAt: entity.createdAt,
@ -225,7 +235,7 @@ export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): Asset
exifInfo: entity.exifInfo ? mapExif(entity.exifInfo) : undefined,
livePhotoVideoId: entity.livePhotoVideoId,
tags: entity.tags?.map((tag) => mapTag(tag)),
people: peopleWithFaces(entity.faces),
people: peopleWithFaces(entity.faces, entity.edits, assetDimensions),
unassignedFaces: entity.faces?.filter((face) => !face.person).map((a) => mapFacesWithoutPerson(a)),
checksum: hexOrBufferToBase64(entity.checksum)!,
stack: withStack ? mapStack(entity) : undefined,

View File

@ -197,6 +197,11 @@ export class AssetCopyDto {
favorite?: boolean;
}
export class AssetDownloadOriginalDto {
@ValidateBoolean({ optional: true, default: true })
edited?: boolean;
}
export const mapStats = (stats: AssetStats): AssetStatsResponseDto => {
return {
images: stats[AssetType.Image],

View File

@ -0,0 +1,122 @@
import { ApiExtraModels, ApiProperty, getSchemaPath } from '@nestjs/swagger';
import { ClassConstructor, plainToInstance, Transform, Type } from 'class-transformer';
import { IsEnum, IsInt, Min, ValidateNested } from 'class-validator';
import { IsAxisAlignedRotation, ValidateUUID } from 'src/validation';
export enum EditAction {
Crop = 'crop',
Rotate = 'rotate',
Mirror = 'mirror',
}
export enum MirrorAxis {
Horizontal = 'horizontal',
Vertical = 'vertical',
}
export class CropParameters {
@IsInt()
@Min(0)
@ApiProperty({ description: 'Top-Left X coordinate of crop' })
x!: number;
@IsInt()
@Min(0)
@ApiProperty({ description: 'Top-Left Y coordinate of crop' })
y!: number;
@IsInt()
@Min(1)
@ApiProperty({ description: 'Width of the crop' })
width!: number;
@IsInt()
@Min(1)
@ApiProperty({ description: 'Height of the crop' })
height!: number;
}
export class RotateParameters {
@IsAxisAlignedRotation()
@ApiProperty({ description: 'Rotation angle in degrees' })
angle!: number;
}
export class MirrorParameters {
@IsEnum(MirrorAxis)
@ApiProperty({ enum: MirrorAxis, enumName: 'MirrorAxis', description: 'Axis to mirror along' })
axis!: MirrorAxis;
}
class EditActionBase {
@IsEnum(EditAction)
@ApiProperty({ enum: EditAction, enumName: 'EditAction' })
action!: EditAction;
}
export class EditActionCrop extends EditActionBase {
@ValidateNested()
@Type(() => CropParameters)
@ApiProperty({ type: CropParameters })
parameters!: CropParameters;
}
export class EditActionRotate extends EditActionBase {
@ValidateNested()
@Type(() => RotateParameters)
@ApiProperty({ type: RotateParameters })
parameters!: RotateParameters;
}
export class EditActionMirror extends EditActionBase {
@ValidateNested()
@Type(() => MirrorParameters)
@ApiProperty({ type: MirrorParameters })
parameters!: MirrorParameters;
}
export type EditActionItem =
| {
action: EditAction.Crop;
parameters: CropParameters;
}
| {
action: EditAction.Rotate;
parameters: RotateParameters;
}
| {
action: EditAction.Mirror;
parameters: MirrorParameters;
};
export type EditActionParameter = {
[EditAction.Crop]: CropParameters;
[EditAction.Rotate]: RotateParameters;
[EditAction.Mirror]: MirrorParameters;
};
type EditActions = EditActionCrop | EditActionRotate | EditActionMirror;
const actionToClass: Record<EditAction, ClassConstructor<EditActions>> = {
[EditAction.Crop]: EditActionCrop,
[EditAction.Rotate]: EditActionRotate,
[EditAction.Mirror]: EditActionMirror,
} as const;
const getActionClass = (item: { action: EditAction }): ClassConstructor<EditActions> => actionToClass[item.action];
@ApiExtraModels(EditActionRotate, EditActionMirror, EditActionCrop)
export class EditActionListDto {
/** list of edits */
@ValidateNested({ each: true })
@Transform(({ value: edits }) =>
Array.isArray(edits) ? edits.map((item) => plainToInstance(getActionClass(item), item)) : edits,
)
@ApiProperty({ anyOf: Object.values(actionToClass).map((target) => ({ $ref: getSchemaPath(target) })) })
edits!: EditActionItem[];
}
export class AssetEditsDto extends EditActionListDto {
@ValidateUUID()
@ApiProperty()
assetId!: string;
}

View File

@ -6,9 +6,12 @@ import { DateTime } from 'luxon';
import { AssetFace, Person } from 'src/database';
import { HistoryBuilder, Property } from 'src/decorators';
import { AuthDto } from 'src/dtos/auth.dto';
import { EditActionItem } from 'src/dtos/editing.dto';
import { SourceType } from 'src/enum';
import { AssetFaceTable } from 'src/schema/tables/asset-face.table';
import { ImageDimensions } from 'src/types';
import { asDateString } from 'src/utils/date';
import { transformFaceBoundingBox } from 'src/utils/transform';
import {
IsDateStringFormat,
MaxDateString,
@ -233,29 +236,37 @@ export function mapPerson(person: Person): PersonResponseDto {
};
}
export function mapFacesWithoutPerson(face: Selectable<AssetFaceTable>): AssetFaceWithoutPersonResponseDto {
export function mapFacesWithoutPerson(
face: Selectable<AssetFaceTable>,
edits?: EditActionItem[],
assetDimensions?: ImageDimensions,
): AssetFaceWithoutPersonResponseDto {
return {
id: face.id,
imageHeight: face.imageHeight,
imageWidth: face.imageWidth,
boundingBoxX1: face.boundingBoxX1,
boundingBoxX2: face.boundingBoxX2,
boundingBoxY1: face.boundingBoxY1,
boundingBoxY2: face.boundingBoxY2,
...transformFaceBoundingBox(
{
boundingBoxX1: face.boundingBoxX1,
boundingBoxY1: face.boundingBoxY1,
boundingBoxX2: face.boundingBoxX2,
boundingBoxY2: face.boundingBoxY2,
imageWidth: face.imageWidth,
imageHeight: face.imageHeight,
},
edits ?? [],
assetDimensions ?? { width: face.imageWidth, height: face.imageHeight },
),
sourceType: face.sourceType,
};
}
export function mapFaces(face: AssetFace, auth: AuthDto): AssetFaceResponseDto {
export function mapFaces(
face: AssetFace,
auth: AuthDto,
edits?: EditActionItem[],
assetDimensions?: ImageDimensions,
): AssetFaceResponseDto {
return {
id: face.id,
imageHeight: face.imageHeight,
imageWidth: face.imageWidth,
boundingBoxX1: face.boundingBoxX1,
boundingBoxX2: face.boundingBoxX2,
boundingBoxY1: face.boundingBoxY1,
boundingBoxY2: face.boundingBoxY2,
sourceType: face.sourceType,
...mapFacesWithoutPerson(face, edits, assetDimensions),
person: face.person?.ownerId === auth.user.id ? mapPerson(face.person) : null,
};
}

View File

@ -45,6 +45,9 @@ export enum AssetFileType {
Preview = 'preview',
Thumbnail = 'thumbnail',
Sidecar = 'sidecar',
EditedFullSize = 'fullsize_edited',
EditedPreview = 'preview_edited',
EditedThumbnail = 'thumbnail_edited',
}
export enum AlbumUserRole {
@ -106,6 +109,8 @@ export enum Permission {
AssetUpload = 'asset.upload',
AssetReplace = 'asset.replace',
AssetCopy = 'asset.copy',
AssetDerive = 'asset.derive',
AssetEdit = 'asset.edit',
AlbumCreate = 'album.create',
AlbumRead = 'album.read',
@ -358,6 +363,9 @@ export enum AssetPathType {
Original = 'original',
FullSize = 'fullsize',
Preview = 'preview',
EditedFullSize = 'edited_fullsize',
EditedPreview = 'edited_preview',
EditedThumbnail = 'edited_thumbnail',
Thumbnail = 'thumbnail',
EncodedVideo = 'encoded_video',
Sidecar = 'sidecar',

View File

@ -0,0 +1,22 @@
-- NOTE: This file is auto generated by ./sql-generator
-- AssetEditRepository.storeEdits
begin
delete from "asset_edit"
where
"assetId" = $1
rollback
-- AssetEditRepository.getEditsForAsset
select
"action",
"parameters"
from
"asset_edit"
where
"assetId" = $1
-- AssetEditRepository.deleteEditsForAsset
delete from "asset_edit"
where
"assetId" = $1

View File

@ -103,7 +103,21 @@ select
where
"asset_file"."assetId" = "asset"."id"
) as agg
) as "files"
) as "files",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_edit"."action",
"asset_edit"."parameters"
from
"asset_edit"
where
"asset_edit"."assetId" = "asset"."id"
) as agg
) as "edits"
from
"asset"
inner join "asset_job_status" on "asset_job_status"."assetId" = "asset"."id"
@ -165,6 +179,20 @@ select
"asset_file"."assetId" = "asset"."id"
) as agg
) as "files",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"asset_edit"."action",
"asset_edit"."parameters"
from
"asset_edit"
where
"asset_edit"."assetId" = "asset"."id"
) as agg
) as "edits",
to_json("asset_exif") as "exifInfo"
from
"asset"
@ -201,6 +229,7 @@ select
where
"asset_face"."assetId" = "asset"."id"
and "asset_face"."deletedAt" is null
and "asset_face"."isVisible" = $1
) as agg
) as "faces",
(
@ -392,6 +421,7 @@ select
where
"asset_face"."assetId" = "asset"."id"
and "asset_face"."deletedAt" is null
and "asset_face"."isVisible" is true
) as agg
) as "faces",
(

View File

@ -144,6 +144,7 @@ select
where
"asset_face"."assetId" = "asset"."id"
and "asset_face"."deletedAt" is null
and "asset_face"."isVisible" is true
) as agg
) as "faces",
(

View File

@ -15,6 +15,7 @@ from
"asset_ocr"
where
"asset_ocr"."assetId" = $1
and "asset_ocr"."isVisible" = $2
-- OcrRepository.upsert
with
@ -66,3 +67,10 @@ with
)
select
1 as "dummy"
-- OcrRepository.updateOcrVisibilities
update "ocr_search"
set
"text" = $1
where
"assetId" = $2

View File

@ -35,6 +35,7 @@ from
where
"person"."ownerId" = $1
and "asset_face"."deletedAt" is null
and "asset_face"."isVisible" is true
and "person"."isHidden" = $2
group by
"person"."id"
@ -63,6 +64,7 @@ from
left join "asset_face" on "asset_face"."personId" = "person"."id"
where
"asset_face"."deletedAt" is null
and "asset_face"."isVisible" is true
group by
"person"."id"
having
@ -89,6 +91,7 @@ from
where
"asset_face"."assetId" = $1
and "asset_face"."deletedAt" is null
and "asset_face"."isVisible" = $2
order by
"asset_face"."boundingBoxX1" asc
@ -229,6 +232,7 @@ from
and "asset"."deletedAt" is null
where
"asset_face"."deletedAt" is null
and "asset_face"."isVisible" is true
-- PersonRepository.getNumberOfPeople
select
@ -250,6 +254,7 @@ where
where
"asset_face"."personId" = "person"."id"
and "asset_face"."deletedAt" is null
and "asset_face"."isVisible" = $2
and exists (
select
from
@ -260,7 +265,7 @@ where
and "asset"."deletedAt" is null
)
)
and "person"."ownerId" = $2
and "person"."ownerId" = $3
-- PersonRepository.refreshFaces
with
@ -321,6 +326,7 @@ from
where
"asset_face"."personId" = $1
and "asset_face"."deletedAt" is null
and "asset_face"."isVisible" is true
-- PersonRepository.getLatestFaceDate
select

View File

@ -544,6 +544,7 @@ where
"asset_face"."updateId" < $1
and "asset_face"."updateId" > $2
and "asset"."ownerId" = $3
and "asset_face"."isVisible" = $4
order by
"asset_face"."updateId" asc

View File

@ -0,0 +1,45 @@
import { Injectable } from '@nestjs/common';
import { Kysely } from 'kysely';
import { InjectKysely } from 'nestjs-kysely';
import { DummyValue, GenerateSql } from 'src/decorators';
import { EditActionItem } from 'src/dtos/editing.dto';
import { DB } from 'src/schema';
@Injectable()
export class AssetEditRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {}
@GenerateSql({
params: [DummyValue.UUID],
})
async storeEdits(assetId: string, edits: EditActionItem[]): Promise<void> {
await this.db.transaction().execute(async (trx) => {
await trx.deleteFrom('asset_edit').where('assetId', '=', assetId).execute();
if (edits.length > 0) {
await trx
.insertInto('asset_edit')
.values(edits.map((edit) => ({ assetId, ...edit })))
.execute();
}
});
}
@GenerateSql({
params: [DummyValue.UUID],
})
async getEditsForAsset(assetId: string): Promise<EditActionItem[]> {
return this.db
.selectFrom('asset_edit')
.select(['action', 'parameters'])
.where('assetId', '=', assetId)
.execute() as Promise<EditActionItem[]>;
}
@GenerateSql({
params: [DummyValue.UUID],
})
async deleteEditsForAsset(assetId: string): Promise<void> {
await this.db.deleteFrom('asset_edit').where('assetId', '=', assetId).execute();
}
}

View File

@ -11,6 +11,7 @@ import {
asUuid,
toJson,
withDefaultVisibility,
withEdits,
withExif,
withExifInner,
withFaces,
@ -71,6 +72,7 @@ export class AssetJobRepository {
.selectFrom('asset')
.select(['asset.id', 'asset.thumbhash'])
.select(withFiles)
.select(withEdits)
.where('asset.deletedAt', 'is', null)
.where('asset.visibility', '!=', AssetVisibility.Hidden)
.$if(!force, (qb) =>
@ -112,6 +114,7 @@ export class AssetJobRepository {
'asset.type',
])
.select(withFiles)
.select(withEdits)
.$call(withExifInner)
.where('asset.id', '=', id)
.executeTakeFirst();
@ -189,7 +192,7 @@ export class AssetJobRepository {
.selectFrom('asset')
.select(['asset.id', 'asset.visibility'])
.$call(withExifInner)
.select((eb) => withFaces(eb, true))
.select((eb) => withFaces(eb, true, true))
.select((eb) => withFiles(eb, AssetFileType.Preview))
.where('asset.id', '=', id)
.executeTakeFirst();

View File

@ -19,6 +19,7 @@ import {
truncatedDate,
unnest,
withDefaultVisibility,
withEdits,
withExif,
withFaces,
withFacesAndPeople,
@ -111,6 +112,7 @@ interface GetByIdsRelations {
smartSearch?: boolean;
stack?: { assets?: boolean };
tags?: boolean;
edits?: boolean;
}
@Injectable()
@ -408,7 +410,10 @@ export class AssetRepository {
}
@GenerateSql({ params: [DummyValue.UUID] })
getById(id: string, { exifInfo, faces, files, library, owner, smartSearch, stack, tags }: GetByIdsRelations = {}) {
getById(
id: string,
{ exifInfo, faces, files, library, owner, smartSearch, stack, tags, edits }: GetByIdsRelations = {},
) {
return this.db
.selectFrom('asset')
.selectAll('asset')
@ -445,6 +450,7 @@ export class AssetRepository {
)
.$if(!!files, (qb) => qb.select(withFiles))
.$if(!!tags, (qb) => qb.select(withTags))
.$if(!!edits, (qb) => qb.select(withEdits))
.limit(1)
.executeTakeFirst();
}

View File

@ -4,6 +4,7 @@ import { AlbumUserRepository } from 'src/repositories/album-user.repository';
import { AlbumRepository } from 'src/repositories/album.repository';
import { ApiKeyRepository } from 'src/repositories/api-key.repository';
import { AppRepository } from 'src/repositories/app.repository';
import { AssetEditRepository } from 'src/repositories/asset-edit.repository';
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { AuditRepository } from 'src/repositories/audit.repository';
@ -59,6 +60,7 @@ export const repositories = [
ApiKeyRepository,
AppRepository,
AssetRepository,
AssetEditRepository,
AssetJobRepository,
ConfigRepository,
CronRepository,

View File

@ -0,0 +1,711 @@
import sharp from 'sharp';
import { AssetFace } from 'src/database';
import { EditAction, EditActionCrop, MirrorAxis } from 'src/dtos/editing.dto';
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
import { SourceType } from 'src/enum';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MediaRepository } from 'src/repositories/media.repository';
import { automock } from 'test/utils';
const getPixelColor = async (buffer: Buffer, x: number, y: number) => {
const metadata = await sharp(buffer).metadata();
const width = metadata.width!;
const { data } = await sharp(buffer).raw().toBuffer({ resolveWithObject: true });
const idx = (y * width + x) * 4;
return {
r: data[idx],
g: data[idx + 1],
b: data[idx + 2],
};
};
const buildTestQuadImage = async () => {
// build a 4 quadrant image for testing mirroring
const base = sharp({
create: { width: 1000, height: 1000, channels: 3, background: { r: 0, g: 0, b: 0 } },
}).png();
const tl = await sharp({
create: { width: 500, height: 500, channels: 3, background: { r: 255, g: 0, b: 0 } },
})
.png()
.toBuffer();
const tr = await sharp({
create: { width: 500, height: 500, channels: 3, background: { r: 0, g: 255, b: 0 } },
})
.png()
.toBuffer();
const bl = await sharp({
create: { width: 500, height: 500, channels: 3, background: { r: 0, g: 0, b: 255 } },
})
.png()
.toBuffer();
const br = await sharp({
create: { width: 500, height: 500, channels: 3, background: { r: 255, g: 255, b: 0 } },
})
.png()
.toBuffer();
const image = base.composite([
{ input: tl, left: 0, top: 0 }, // top-left
{ input: tr, left: 500, top: 0 }, // top-right
{ input: bl, left: 0, top: 500 }, // bottom-left
{ input: br, left: 500, top: 500 }, // bottom-right
]);
return image.png().toBuffer();
};
describe(MediaRepository.name, () => {
let sut: MediaRepository;
beforeEach(() => {
// eslint-disable-next-line no-sparse-arrays
sut = new MediaRepository(automock(LoggingRepository, { args: [, { getEnv: () => ({}) }], strict: false }));
});
describe('applyEdits (single actions)', () => {
it('should apply crop edit correctly', async () => {
const result = await sut['applyEdits'](
sharp({
create: {
width: 1000,
height: 1000,
channels: 4,
background: { r: 255, g: 0, b: 0, alpha: 0.5 },
},
}).png(),
[
{
action: EditAction.Crop,
parameters: {
x: 100,
y: 200,
width: 700,
height: 300,
},
},
],
);
const metadata = await result.toBuffer().then((buf) => sharp(buf).metadata());
expect(metadata.width).toBe(700);
expect(metadata.height).toBe(300);
});
it('should apply rotate edit correctly', async () => {
const result = await sut['applyEdits'](
sharp({
create: {
width: 500,
height: 1000,
channels: 4,
background: { r: 255, g: 0, b: 0, alpha: 0.5 },
},
}).png(),
[
{
action: EditAction.Rotate,
parameters: {
angle: 90,
},
},
],
);
const metadata = await result.toBuffer().then((buf) => sharp(buf).metadata());
expect(metadata.width).toBe(1000);
expect(metadata.height).toBe(500);
});
it('should apply mirror edit correctly', async () => {
const resultHorizontal = await sut['applyEdits'](sharp(await buildTestQuadImage()), [
{
action: EditAction.Mirror,
parameters: {
axis: MirrorAxis.Horizontal,
},
},
]);
const bufferHorizontal = await resultHorizontal.toBuffer();
const metadataHorizontal = await resultHorizontal.metadata();
expect(metadataHorizontal.width).toBe(1000);
expect(metadataHorizontal.height).toBe(1000);
expect(await getPixelColor(bufferHorizontal, 10, 10)).toEqual({ r: 0, g: 255, b: 0 });
expect(await getPixelColor(bufferHorizontal, 990, 10)).toEqual({ r: 255, g: 0, b: 0 });
expect(await getPixelColor(bufferHorizontal, 10, 990)).toEqual({ r: 255, g: 255, b: 0 });
expect(await getPixelColor(bufferHorizontal, 990, 990)).toEqual({ r: 0, g: 0, b: 255 });
const resultVertical = await sut['applyEdits'](sharp(await buildTestQuadImage()), [
{
action: EditAction.Mirror,
parameters: {
axis: MirrorAxis.Vertical,
},
},
]);
const bufferVertical = await resultVertical.toBuffer();
const metadataVertical = await resultVertical.metadata();
expect(metadataVertical.width).toBe(1000);
expect(metadataVertical.height).toBe(1000);
// top-left should now be bottom-left (blue)
expect(await getPixelColor(bufferVertical, 10, 10)).toEqual({ r: 0, g: 0, b: 255 });
// top-right should now be bottom-right (yellow)
expect(await getPixelColor(bufferVertical, 990, 10)).toEqual({ r: 255, g: 255, b: 0 });
// bottom-left should now be top-left (red)
expect(await getPixelColor(bufferVertical, 10, 990)).toEqual({ r: 255, g: 0, b: 0 });
// bottom-right should now be top-right (blue)
expect(await getPixelColor(bufferVertical, 990, 990)).toEqual({ r: 0, g: 255, b: 0 });
});
});
describe('applyEdits (multiple sequential edits)', () => {
it('should apply horizontal mirror then vertical mirror (equivalent to 180° rotation)', async () => {
const imageBuffer = await buildTestQuadImage();
const result = await sut['applyEdits'](sharp(imageBuffer), [
{ action: EditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
{ action: EditAction.Mirror, parameters: { axis: MirrorAxis.Vertical } },
]);
const buffer = await result.png().toBuffer();
const metadata = await sharp(buffer).metadata();
expect(metadata.width).toBe(1000);
expect(metadata.height).toBe(1000);
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 255, b: 0 });
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 0, g: 0, b: 255 });
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 0, g: 255, b: 0 });
expect(await getPixelColor(buffer, 990, 990)).toEqual({ r: 255, g: 0, b: 0 });
});
it('should apply rotate 90° then horizontal mirror', async () => {
const imageBuffer = await buildTestQuadImage();
const result = await sut['applyEdits'](sharp(imageBuffer), [
{ action: EditAction.Rotate, parameters: { angle: 90 } },
{ action: EditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
]);
const buffer = await result.png().toBuffer();
const metadata = await sharp(buffer).metadata();
expect(metadata.width).toBe(1000);
expect(metadata.height).toBe(1000);
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 0, b: 0 });
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 0, g: 0, b: 255 });
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 0, g: 255, b: 0 });
expect(await getPixelColor(buffer, 990, 990)).toEqual({ r: 255, g: 255, b: 0 });
});
it('should apply 180° rotation', async () => {
const imageBuffer = await buildTestQuadImage();
const result = await sut['applyEdits'](sharp(imageBuffer), [
{ action: EditAction.Rotate, parameters: { angle: 180 } },
]);
const buffer = await result.png().toBuffer();
const metadata = await sharp(buffer).metadata();
expect(metadata.width).toBe(1000);
expect(metadata.height).toBe(1000);
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 255, b: 0 });
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 0, g: 0, b: 255 });
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 0, g: 255, b: 0 });
expect(await getPixelColor(buffer, 990, 990)).toEqual({ r: 255, g: 0, b: 0 });
});
it('should apply 270° rotations', async () => {
const imageBuffer = await buildTestQuadImage();
const result = await sut['applyEdits'](sharp(imageBuffer), [
{ action: EditAction.Rotate, parameters: { angle: 270 } },
]);
const buffer = await result.png().toBuffer();
const metadata = await sharp(buffer).metadata();
expect(metadata.width).toBe(1000);
expect(metadata.height).toBe(1000);
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 0, g: 255, b: 0 });
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 255, g: 255, b: 0 });
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 255, g: 0, b: 0 });
expect(await getPixelColor(buffer, 990, 990)).toEqual({ r: 0, g: 0, b: 255 });
});
it('should apply crop then rotate 90°', async () => {
const imageBuffer = await buildTestQuadImage();
const result = await sut['applyEdits'](sharp(imageBuffer), [
{ action: EditAction.Crop, parameters: { x: 0, y: 0, width: 1000, height: 500 } },
{ action: EditAction.Rotate, parameters: { angle: 90 } },
]);
const buffer = await result.png().toBuffer();
const metadata = await sharp(buffer).metadata();
expect(metadata.width).toBe(500);
expect(metadata.height).toBe(1000);
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 0, b: 0 });
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 0, g: 255, b: 0 });
});
it('should apply rotate 90° then crop', async () => {
const imageBuffer = await buildTestQuadImage();
const result = await sut['applyEdits'](sharp(imageBuffer), [
{ action: EditAction.Crop, parameters: { x: 0, y: 0, width: 500, height: 1000 } },
{ action: EditAction.Rotate, parameters: { angle: 90 } },
]);
const buffer = await result.png().toBuffer();
const metadata = await sharp(buffer).metadata();
expect(metadata.width).toBe(1000);
expect(metadata.height).toBe(500);
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 0, g: 0, b: 255 });
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 255, g: 0, b: 0 });
});
it('should apply vertical mirror then horizontal mirror then rotate 90°', async () => {
const imageBuffer = await buildTestQuadImage();
const result = await sut['applyEdits'](sharp(imageBuffer), [
{ action: EditAction.Mirror, parameters: { axis: MirrorAxis.Vertical } },
{ action: EditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
{ action: EditAction.Rotate, parameters: { angle: 90 } },
]);
const buffer = await result.png().toBuffer();
const metadata = await sharp(buffer).metadata();
expect(metadata.width).toBe(1000);
expect(metadata.height).toBe(1000);
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 0, g: 255, b: 0 });
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 255, g: 255, b: 0 });
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 255, g: 0, b: 0 });
expect(await getPixelColor(buffer, 990, 990)).toEqual({ r: 0, g: 0, b: 255 });
});
it('should apply crop to single quadrant then mirror', async () => {
const imageBuffer = await buildTestQuadImage();
const result = await sut['applyEdits'](sharp(imageBuffer), [
{ action: EditAction.Crop, parameters: { x: 0, y: 0, width: 500, height: 500 } },
{ action: EditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
]);
const buffer = await result.png().toBuffer();
const metadata = await sharp(buffer).metadata();
expect(metadata.width).toBe(500);
expect(metadata.height).toBe(500);
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 0, b: 0 });
expect(await getPixelColor(buffer, 490, 10)).toEqual({ r: 255, g: 0, b: 0 });
expect(await getPixelColor(buffer, 10, 490)).toEqual({ r: 255, g: 0, b: 0 });
expect(await getPixelColor(buffer, 490, 490)).toEqual({ r: 255, g: 0, b: 0 });
});
it('should apply all operations: crop, rotate, mirror', async () => {
const imageBuffer = await buildTestQuadImage();
const result = await sut['applyEdits'](sharp(imageBuffer), [
{ action: EditAction.Crop, parameters: { x: 0, y: 0, width: 500, height: 1000 } },
{ action: EditAction.Rotate, parameters: { angle: 90 } },
{ action: EditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
]);
const buffer = await result.png().toBuffer();
const metadata = await sharp(buffer).metadata();
expect(metadata.width).toBe(1000);
expect(metadata.height).toBe(500);
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 0, b: 0 });
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 0, g: 0, b: 255 });
});
});
describe('checkFaceVisibility', () => {
const baseFace: AssetFace = {
id: 'face-1',
assetId: 'asset-1',
personId: 'person-1',
boundingBoxX1: 100,
boundingBoxY1: 100,
boundingBoxX2: 200,
boundingBoxY2: 200,
imageWidth: 1000,
imageHeight: 800,
sourceType: SourceType.MachineLearning,
isVisible: true,
updatedAt: new Date(),
deletedAt: null,
updateId: '',
};
const assetDimensions = { width: 1000, height: 800 };
describe('with no crop edit', () => {
it('should return all faces as visible when no crop is provided', () => {
const faces = [baseFace];
const result = sut.checkFaceVisibility(faces, assetDimensions);
expect(result.visible).toEqual(faces);
expect(result.hidden).toEqual([]);
});
});
describe('with crop edit', () => {
it('should mark face as visible when fully inside crop area', () => {
const crop: EditActionCrop = {
action: EditAction.Crop,
parameters: { x: 0, y: 0, width: 500, height: 400 },
};
const faces = [baseFace];
const result = sut.checkFaceVisibility(faces, assetDimensions, crop);
expect(result.visible).toEqual(faces);
expect(result.hidden).toEqual([]);
});
it('should mark face as visible when more than 50% inside crop area', () => {
const crop: EditActionCrop = {
action: EditAction.Crop,
parameters: { x: 150, y: 150, width: 500, height: 400 },
};
// Face at (100,100)-(200,200), crop starts at (150,150)
// Overlap: (150,150)-(200,200) = 50x50 = 2500
// Face area: 100x100 = 10000
// Overlap percentage: 25% - should be hidden
const faces = [baseFace];
const result = sut.checkFaceVisibility(faces, assetDimensions, crop);
expect(result.visible).toEqual([]);
expect(result.hidden).toEqual(faces);
});
it('should mark face as hidden when less than 50% inside crop area', () => {
const crop: EditActionCrop = {
action: EditAction.Crop,
parameters: { x: 250, y: 250, width: 500, height: 400 },
};
// Face completely outside crop area
const faces = [baseFace];
const result = sut.checkFaceVisibility(faces, assetDimensions, crop);
expect(result.visible).toEqual([]);
expect(result.hidden).toEqual(faces);
});
it('should mark face as hidden when completely outside crop area', () => {
const crop: EditActionCrop = {
action: EditAction.Crop,
parameters: { x: 500, y: 500, width: 200, height: 200 },
};
const faces = [baseFace];
const result = sut.checkFaceVisibility(faces, assetDimensions, crop);
expect(result.visible).toEqual([]);
expect(result.hidden).toEqual(faces);
});
it('should handle multiple faces with mixed visibility', () => {
const crop: EditActionCrop = {
action: EditAction.Crop,
parameters: { x: 0, y: 0, width: 300, height: 300 },
};
const faceInside: AssetFace = {
...baseFace,
id: 'face-inside',
boundingBoxX1: 50,
boundingBoxY1: 50,
boundingBoxX2: 150,
boundingBoxY2: 150,
};
const faceOutside: AssetFace = {
...baseFace,
id: 'face-outside',
boundingBoxX1: 400,
boundingBoxY1: 400,
boundingBoxX2: 500,
boundingBoxY2: 500,
};
const faces = [faceInside, faceOutside];
const result = sut.checkFaceVisibility(faces, assetDimensions, crop);
expect(result.visible).toEqual([faceInside]);
expect(result.hidden).toEqual([faceOutside]);
});
it('should handle face at exactly 50% overlap threshold', () => {
// Face at (0,0)-(100,100), crop at (50,0)-(150,100)
// Overlap: (50,0)-(100,100) = 50x100 = 5000
// Face area: 100x100 = 10000
// Overlap percentage: 50% - exactly at threshold, should be visible
const faceAtEdge: AssetFace = {
...baseFace,
id: 'face-edge',
boundingBoxX1: 0,
boundingBoxY1: 0,
boundingBoxX2: 100,
boundingBoxY2: 100,
};
const crop: EditActionCrop = {
action: EditAction.Crop,
parameters: { x: 50, y: 0, width: 100, height: 100 },
};
const faces = [faceAtEdge];
const result = sut.checkFaceVisibility(faces, assetDimensions, crop);
expect(result.visible).toEqual([faceAtEdge]);
expect(result.hidden).toEqual([]);
});
});
describe('with scaled dimensions', () => {
it('should handle faces when asset dimensions differ from face image dimensions', () => {
// Face stored at 1000x800 resolution, but displaying at 500x400
const scaledDimensions = { width: 500, height: 400 };
const crop: EditActionCrop = {
action: EditAction.Crop,
parameters: { x: 0, y: 0, width: 250, height: 200 },
};
// Face at (100,100)-(200,200) on 1000x800
// Scaled to 500x400: (50,50)-(100,100)
// Crop at (0,0)-(250,200) - face is fully inside
const faces = [baseFace];
const result = sut.checkFaceVisibility(faces, scaledDimensions, crop);
expect(result.visible).toEqual(faces);
expect(result.hidden).toEqual([]);
});
});
describe('visibility is only affected by crop (not rotate or mirror)', () => {
it('should keep all faces visible when there is no crop regardless of other transforms', () => {
// Rotate and mirror edits don't affect visibility - only crop does
// The visibility functions only take an optional crop parameter
const faces = [baseFace];
// Without any crop, all faces remain visible
const result = sut.checkFaceVisibility(faces, assetDimensions);
expect(result.visible).toEqual(faces);
expect(result.hidden).toEqual([]);
});
it('should only consider crop for visibility calculation', () => {
// Even if the image will be rotated/mirrored, visibility is determined
// solely by whether the face overlaps with the crop area
const crop: EditActionCrop = {
action: EditAction.Crop,
parameters: { x: 0, y: 0, width: 300, height: 300 },
};
const faceInsideCrop: AssetFace = {
...baseFace,
id: 'face-inside',
boundingBoxX1: 50,
boundingBoxY1: 50,
boundingBoxX2: 150,
boundingBoxY2: 150,
};
const faceOutsideCrop: AssetFace = {
...baseFace,
id: 'face-outside',
boundingBoxX1: 400,
boundingBoxY1: 400,
boundingBoxX2: 500,
boundingBoxY2: 500,
};
const faces = [faceInsideCrop, faceOutsideCrop];
const result = sut.checkFaceVisibility(faces, assetDimensions, crop);
// Face inside crop area is visible, face outside is hidden
// This is true regardless of any subsequent rotate/mirror operations
expect(result.visible).toEqual([faceInsideCrop]);
expect(result.hidden).toEqual([faceOutsideCrop]);
});
});
});
describe('checkOcrVisibility', () => {
const baseOcr: AssetOcrResponseDto = {
id: 'ocr-1',
assetId: 'asset-1',
x1: 0.1,
y1: 0.1,
x2: 0.2,
y2: 0.1,
x3: 0.2,
y3: 0.2,
x4: 0.1,
y4: 0.2,
boxScore: 0.9,
textScore: 0.85,
text: 'Test OCR',
};
const assetDimensions = { width: 1000, height: 800 };
describe('with no crop edit', () => {
it('should return all OCR items as visible when no crop is provided', () => {
const ocrs = [baseOcr];
const result = sut.checkOcrVisibility(ocrs, assetDimensions);
expect(result.visible).toEqual(ocrs);
expect(result.hidden).toEqual([]);
});
});
describe('with crop edit', () => {
it('should mark OCR as visible when fully inside crop area', () => {
const crop: EditActionCrop = {
action: EditAction.Crop,
parameters: { x: 0, y: 0, width: 500, height: 400 },
};
// OCR box: (0.1,0.1)-(0.2,0.2) on 1000x800 = (100,80)-(200,160)
// Crop: (0,0)-(500,400) - OCR fully inside
const ocrs = [baseOcr];
const result = sut.checkOcrVisibility(ocrs, assetDimensions, crop);
expect(result.visible).toEqual(ocrs);
expect(result.hidden).toEqual([]);
});
it('should mark OCR as hidden when completely outside crop area', () => {
const crop: EditActionCrop = {
action: EditAction.Crop,
parameters: { x: 500, y: 500, width: 200, height: 200 },
};
// OCR box: (100,80)-(200,160) - completely outside crop
const ocrs = [baseOcr];
const result = sut.checkOcrVisibility(ocrs, assetDimensions, crop);
expect(result.visible).toEqual([]);
expect(result.hidden).toEqual(ocrs);
});
it('should mark OCR as hidden when less than 50% inside crop area', () => {
const crop: EditActionCrop = {
action: EditAction.Crop,
parameters: { x: 150, y: 120, width: 500, height: 400 },
};
// OCR box: (100,80)-(200,160)
// Crop: (150,120)-(650,520)
// Overlap: (150,120)-(200,160) = 50x40 = 2000
// OCR area: 100x80 = 8000
// Overlap percentage: 25% - should be hidden
const ocrs = [baseOcr];
const result = sut.checkOcrVisibility(ocrs, assetDimensions, crop);
expect(result.visible).toEqual([]);
expect(result.hidden).toEqual(ocrs);
});
it('should handle multiple OCR items with mixed visibility', () => {
const crop: EditActionCrop = {
action: EditAction.Crop,
parameters: { x: 0, y: 0, width: 300, height: 300 },
};
const ocrInside: AssetOcrResponseDto = {
...baseOcr,
id: 'ocr-inside',
};
const ocrOutside: AssetOcrResponseDto = {
...baseOcr,
id: 'ocr-outside',
x1: 0.5,
y1: 0.5,
x2: 0.6,
y2: 0.5,
x3: 0.6,
y3: 0.6,
x4: 0.5,
y4: 0.6,
};
const ocrs = [ocrInside, ocrOutside];
const result = sut.checkOcrVisibility(ocrs, assetDimensions, crop);
expect(result.visible).toEqual([ocrInside]);
expect(result.hidden).toEqual([ocrOutside]);
});
it('should handle OCR boxes with rotated/skewed polygons', () => {
// OCR with a rotated bounding box (not axis-aligned)
const rotatedOcr: AssetOcrResponseDto = {
...baseOcr,
id: 'ocr-rotated',
x1: 0.15,
y1: 0.1,
x2: 0.25,
y2: 0.15,
x3: 0.2,
y3: 0.25,
x4: 0.1,
y4: 0.2,
};
const crop: EditActionCrop = {
action: EditAction.Crop,
parameters: { x: 0, y: 0, width: 300, height: 300 },
};
const ocrs = [rotatedOcr];
const result = sut.checkOcrVisibility(ocrs, assetDimensions, crop);
expect(result.visible).toEqual([rotatedOcr]);
expect(result.hidden).toEqual([]);
});
});
describe('visibility is only affected by crop (not rotate or mirror)', () => {
it('should keep all OCR items visible when there is no crop regardless of other transforms', () => {
// Rotate and mirror edits don't affect visibility - only crop does
// The visibility functions only take an optional crop parameter
const ocrs = [baseOcr];
// Without any crop, all OCR items remain visible
const result = sut.checkOcrVisibility(ocrs, assetDimensions);
expect(result.visible).toEqual(ocrs);
expect(result.hidden).toEqual([]);
});
it('should only consider crop for visibility calculation', () => {
// Even if the image will be rotated/mirrored, visibility is determined
// solely by whether the OCR box overlaps with the crop area
const crop: EditActionCrop = {
action: EditAction.Crop,
parameters: { x: 0, y: 0, width: 300, height: 300 },
};
const ocrInsideCrop: AssetOcrResponseDto = {
...baseOcr,
id: 'ocr-inside',
// OCR at (0.1,0.1)-(0.2,0.2) = (100,80)-(200,160) on 1000x800, inside crop
};
const ocrOutsideCrop: AssetOcrResponseDto = {
...baseOcr,
id: 'ocr-outside',
x1: 0.5,
y1: 0.5,
x2: 0.6,
y2: 0.5,
x3: 0.6,
y3: 0.6,
x4: 0.5,
y4: 0.6,
// OCR at (500,400)-(600,480) on 1000x800, outside crop
};
const ocrs = [ocrInsideCrop, ocrOutsideCrop];
const result = sut.checkOcrVisibility(ocrs, assetDimensions, crop);
// OCR inside crop area is visible, OCR outside is hidden
// This is true regardless of any subsequent rotate/mirror operations
expect(result.visible).toEqual([ocrInsideCrop]);
expect(result.hidden).toEqual([ocrOutsideCrop]);
});
});
});
});

View File

@ -6,7 +6,9 @@ import fs from 'node:fs/promises';
import { Writable } from 'node:stream';
import sharp from 'sharp';
import { ORIENTATION_TO_SHARP_ROTATION } from 'src/constants';
import { Exif } from 'src/database';
import { AssetFace, Exif } from 'src/database';
import { EditActionCrop, EditActionItem } from 'src/dtos/editing.dto';
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
import { Colorspace, LogLevel, RawExtractedFormat } from 'src/enum';
import { LoggingRepository } from 'src/repositories/logging.repository';
import {
@ -19,6 +21,7 @@ import {
VideoInfo,
} from 'src/types';
import { handlePromiseError } from 'src/utils/misc';
import { createAffineMatrix } from 'src/utils/transform';
const probe = (input: string, options: string[]): Promise<FfprobeData> =>
new Promise((resolve, reject) =>
@ -138,21 +141,48 @@ export class MediaRepository {
}
}
decodeImage(input: string | Buffer, options: DecodeToBufferOptions) {
return this.getImageDecodingPipeline(input, options).raw().toBuffer({ resolveWithObject: true });
async decodeImage(input: string | Buffer, options: DecodeToBufferOptions) {
const pipeline = await this.getImageDecodingPipeline(input, options);
return pipeline.raw().toBuffer({ resolveWithObject: true });
}
private async applyEdits(pipeline: sharp.Sharp, edits: EditActionItem[]): Promise<sharp.Sharp> {
const affineEditOperations = edits.filter((edit) => edit.action !== 'crop');
const matrix = createAffineMatrix(affineEditOperations);
const crop = edits.find((edit) => edit.action === 'crop');
const dimensions = await pipeline.metadata();
if (crop) {
pipeline = pipeline.extract({
left: crop ? Math.round(crop.parameters.x) : 0,
top: crop ? Math.round(crop.parameters.y) : 0,
width: crop ? Math.round(crop.parameters.width) : dimensions.width || 0,
height: crop ? Math.round(crop.parameters.height) : dimensions.height || 0,
});
}
const { a, b, c, d } = matrix;
pipeline = pipeline.affine([
[a, b],
[c, d],
]);
return pipeline;
}
async generateThumbnail(input: string | Buffer, options: GenerateThumbnailOptions, output: string): Promise<void> {
await this.getImageDecodingPipeline(input, options)
.toFormat(options.format, {
quality: options.quality,
// this is default in libvips (except the threshold is 90), but we need to set it manually in sharp
chromaSubsampling: options.quality >= 80 ? '4:4:4' : '4:2:0',
})
.toFile(output);
const pipeline = await this.getImageDecodingPipeline(input, options);
const decoded = pipeline.toFormat(options.format, {
quality: options.quality,
// this is default in libvips (except the threshold is 90), but we need to set it manually in sharp
chromaSubsampling: options.quality >= 80 ? '4:4:4' : '4:2:0',
});
await decoded.toFile(output);
}
private getImageDecodingPipeline(input: string | Buffer, options: DecodeToBufferOptions) {
private async getImageDecodingPipeline(input: string | Buffer, options: DecodeToBufferOptions) {
let pipeline = sharp(input, {
// some invalid images can still be processed by sharp, but we want to fail on them by default to avoid crashes
failOn: options.processInvalidImages ? 'none' : 'error',
@ -175,8 +205,8 @@ export class MediaRepository {
}
}
if (options.crop) {
pipeline = pipeline.extract(options.crop);
if (options.edits && options.edits.length > 0) {
pipeline = await this.applyEdits(pipeline, options.edits);
}
if (options.size !== undefined) {
@ -186,17 +216,127 @@ export class MediaRepository {
}
async generateThumbhash(input: string | Buffer, options: GenerateThumbhashOptions): Promise<Buffer> {
const [{ rgbaToThumbHash }, { data, info }] = await Promise.all([
const [{ rgbaToThumbHash }, decodingPipeline] = await Promise.all([
import('thumbhash'),
sharp(input, options)
.resize(100, 100, { fit: 'inside', withoutEnlargement: true })
.raw()
.ensureAlpha()
.toBuffer({ resolveWithObject: true }),
this.getImageDecodingPipeline(input, {
colorspace: options.colorspace,
processInvalidImages: options.processInvalidImages,
raw: options.raw,
edits: options.edits,
}),
]);
const pipeline = decodingPipeline.resize(100, 100, { fit: 'inside', withoutEnlargement: true }).raw().ensureAlpha();
const { data, info } = await pipeline.toBuffer({ resolveWithObject: true });
return Buffer.from(rgbaToThumbHash(info.width, info.height, data));
}
private boundingBoxOverlap(
boxA: { x1: number; y1: number; x2: number; y2: number },
boxB: { x1: number; y1: number; x2: number; y2: number },
) {
const overlapX1 = Math.max(boxA.x1, boxB.x1);
const overlapY1 = Math.max(boxA.y1, boxB.y1);
const overlapX2 = Math.min(boxA.x2, boxB.x2);
const overlapY2 = Math.min(boxA.y2, boxB.y2);
const overlapArea = Math.max(0, overlapX2 - overlapX1) * Math.max(0, overlapY2 - overlapY1);
const faceArea = (boxA.x2 - boxA.x1) * (boxA.y2 - boxA.y1);
return overlapArea / faceArea;
}
checkFaceVisibility(
faces: AssetFace[],
assetDimensions: ImageDimensions,
crop?: EditActionCrop,
): { visible: AssetFace[]; hidden: AssetFace[] } {
if (!crop) {
return {
visible: faces,
hidden: [],
};
}
const cropArea = {
x1: crop.parameters.x,
y1: crop.parameters.y,
x2: crop.parameters.x + crop.parameters.width,
y2: crop.parameters.y + crop.parameters.height,
};
const status = faces.map((face) => {
const faceArea = {
x1: (face.boundingBoxX1 / face.imageWidth) * assetDimensions.width,
y1: (face.boundingBoxY1 / face.imageHeight) * assetDimensions.height,
x2: (face.boundingBoxX2 / face.imageWidth) * assetDimensions.width,
y2: (face.boundingBoxY2 / face.imageHeight) * assetDimensions.height,
};
const overlapPercentage = this.boundingBoxOverlap(faceArea, cropArea);
return {
face,
isVisible: overlapPercentage >= 0.5,
};
});
return {
visible: status.filter((s) => s.isVisible).map((s) => s.face),
hidden: status.filter((s) => !s.isVisible).map((s) => s.face),
};
}
checkOcrVisibility(
ocrs: AssetOcrResponseDto[],
assetDimensions: ImageDimensions,
crop?: EditActionCrop,
): { visible: AssetOcrResponseDto[]; hidden: AssetOcrResponseDto[] } {
if (!crop) {
return {
visible: ocrs,
hidden: [],
};
}
const cropArea = {
x1: crop.parameters.x,
y1: crop.parameters.y,
x2: crop.parameters.x + crop.parameters.width,
y2: crop.parameters.y + crop.parameters.height,
};
const status = ocrs.map((ocr) => {
// ocr use coordinates of a scaled image for ML
const ocrPolygon = [
{ x: ocr.x1 * assetDimensions.width, y: ocr.y1 * assetDimensions.height },
{ x: ocr.x2 * assetDimensions.width, y: ocr.y2 * assetDimensions.height },
{ x: ocr.x3 * assetDimensions.width, y: ocr.y3 * assetDimensions.height },
{ x: ocr.x4 * assetDimensions.width, y: ocr.y4 * assetDimensions.height },
];
const ocrBox = {
x1: Math.min(ocrPolygon[0].x, ocrPolygon[1].x, ocrPolygon[2].x, ocrPolygon[3].x),
y1: Math.min(ocrPolygon[0].y, ocrPolygon[1].y, ocrPolygon[2].y, ocrPolygon[3].y),
x2: Math.max(ocrPolygon[0].x, ocrPolygon[1].x, ocrPolygon[2].x, ocrPolygon[3].x),
y2: Math.max(ocrPolygon[0].y, ocrPolygon[1].y, ocrPolygon[2].y, ocrPolygon[3].y),
};
const overlapPercentage = this.boundingBoxOverlap(ocrBox, cropArea);
return {
ocr,
isVisible: overlapPercentage >= 0.5,
};
});
return {
visible: status.filter((s) => s.isVisible).map((s) => s.ocr),
hidden: status.filter((s) => !s.isVisible).map((s) => s.ocr),
};
}
async probe(input: string, options?: ProbeOptions): Promise<VideoInfo> {
const results = await probe(input, options?.countFrames ? ['-count_packets'] : []); // gets frame count quickly: https://stackoverflow.com/a/28376817
return {

View File

@ -2,6 +2,7 @@ import { Injectable } from '@nestjs/common';
import { Insertable, Kysely, sql } from 'kysely';
import { InjectKysely } from 'nestjs-kysely';
import { DummyValue, GenerateSql } from 'src/decorators';
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
import { DB } from 'src/schema';
import { AssetOcrTable } from 'src/schema/tables/asset-ocr.table';
@ -15,8 +16,13 @@ export class OcrRepository {
}
@GenerateSql({ params: [DummyValue.UUID] })
getByAssetId(id: string) {
return this.db.selectFrom('asset_ocr').selectAll('asset_ocr').where('asset_ocr.assetId', '=', id).execute();
getByAssetId(id: string, { onlyVisible = true }: { onlyVisible?: boolean } = {}) {
return this.db
.selectFrom('asset_ocr')
.selectAll('asset_ocr')
.where('asset_ocr.assetId', '=', id)
.$if(onlyVisible, (qb) => qb.where('asset_ocr.isVisible', '=', true))
.execute();
}
deleteAll() {
@ -65,4 +71,38 @@ export class OcrRepository {
return query.selectNoFrom(sql`1`.as('dummy')).execute();
}
@GenerateSql({ params: [DummyValue.UUID, [], []] })
async updateOcrVisibilities(
assetId: string,
visible: AssetOcrResponseDto[],
hidden: AssetOcrResponseDto[],
): Promise<void> {
if (visible.length > 0) {
await this.db
.updateTable('asset_ocr')
.set({ isVisible: true })
.where(
'asset_ocr.id',
'in',
visible.map((i) => i.id),
)
.execute();
}
if (hidden.length > 0) {
await this.db
.updateTable('asset_ocr')
.set({ isVisible: false })
.where(
'asset_ocr.id',
'in',
hidden.map((i) => i.id),
)
.execute();
}
const searchText = visible.map((item) => item.text.trim()).join(' ');
await this.db.updateTable('ocr_search').set({ text: searchText }).where('assetId', '=', assetId).execute();
}
}

View File

@ -2,6 +2,7 @@ import { Injectable } from '@nestjs/common';
import { ExpressionBuilder, Insertable, Kysely, NotNull, Selectable, sql, Updateable } from 'kysely';
import { jsonObjectFrom } from 'kysely/helpers/postgres';
import { InjectKysely } from 'nestjs-kysely';
import { AssetFace } from 'src/database';
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
import { AssetFileType, AssetVisibility, SourceType } from 'src/enum';
import { DB } from 'src/schema';
@ -121,6 +122,7 @@ export class PersonRepository {
.$if(!!options.sourceType, (qb) => qb.where('asset_face.sourceType', '=', options.sourceType!))
.$if(!!options.assetId, (qb) => qb.where('asset_face.assetId', '=', options.assetId!))
.where('asset_face.deletedAt', 'is', null)
.where('asset_face.isVisible', 'is', true)
.stream();
}
@ -160,6 +162,7 @@ export class PersonRepository {
)
.where('person.ownerId', '=', userId)
.where('asset_face.deletedAt', 'is', null)
.where('asset_face.isVisible', 'is', true)
.orderBy('person.isHidden', 'asc')
.orderBy('person.isFavorite', 'desc')
.having((eb) =>
@ -208,19 +211,21 @@ export class PersonRepository {
.selectAll('person')
.leftJoin('asset_face', 'asset_face.personId', 'person.id')
.where('asset_face.deletedAt', 'is', null)
.where('asset_face.isVisible', 'is', true)
.having((eb) => eb.fn.count('asset_face.assetId'), '=', 0)
.groupBy('person.id')
.execute();
}
@GenerateSql({ params: [DummyValue.UUID] })
getFaces(assetId: string) {
getFaces(assetId: string, { onlyVisible = true }: { onlyVisible?: boolean } = {}) {
return this.db
.selectFrom('asset_face')
.selectAll('asset_face')
.select(withPerson)
.where('asset_face.assetId', '=', assetId)
.where('asset_face.deletedAt', 'is', null)
.$if(onlyVisible, (qb) => qb.where('asset_face.isVisible', '=', true))
.orderBy('asset_face.boundingBoxX1', 'asc')
.execute();
}
@ -350,6 +355,7 @@ export class PersonRepository {
)
.select((eb) => eb.fn.count(eb.fn('distinct', ['asset.id'])).as('count'))
.where('asset_face.deletedAt', 'is', null)
.where('asset_face.isVisible', 'is', true)
.executeTakeFirst();
return {
@ -368,6 +374,7 @@ export class PersonRepository {
.selectFrom('asset_face')
.whereRef('asset_face.personId', '=', 'person.id')
.where('asset_face.deletedAt', 'is', null)
.where('asset_face.isVisible', '=', true)
.where((eb) =>
eb.exists((eb) =>
eb
@ -495,6 +502,7 @@ export class PersonRepository {
.selectAll('asset_face')
.where('asset_face.personId', '=', personId)
.where('asset_face.deletedAt', 'is', null)
.where('asset_face.isVisible', 'is', true)
.executeTakeFirst();
}
@ -539,4 +547,35 @@ export class PersonRepository {
}
return this.db.selectFrom('person').select(['id', 'thumbnailPath']).where('id', 'in', ids).execute();
}
@GenerateSql({ params: [[], []] })
async updateFaceVisibilities(visible: AssetFace[], hidden: AssetFace[]): Promise<void> {
if (visible.length === 0 && hidden.length === 0) {
return;
}
if (visible.length > 0) {
await this.db
.updateTable('asset_face')
.set({ isVisible: true })
.where(
'asset_face.id',
'in',
visible.map(({ id }) => id),
)
.execute();
}
if (hidden.length > 0) {
await this.db
.updateTable('asset_face')
.set({ isVisible: false })
.where(
'asset_face.id',
'in',
hidden.map(({ id }) => id),
)
.execute();
}
}
}

View File

@ -483,6 +483,7 @@ class AssetFaceSync extends BaseSync {
])
.leftJoin('asset', 'asset.id', 'asset_face.assetId')
.where('asset.ownerId', '=', options.userId)
.where('asset_face.isVisible', '=', true)
.stream();
}
}

View File

@ -28,6 +28,7 @@ import { AlbumUserTable } from 'src/schema/tables/album-user.table';
import { AlbumTable } from 'src/schema/tables/album.table';
import { ApiKeyTable } from 'src/schema/tables/api-key.table';
import { AssetAuditTable } from 'src/schema/tables/asset-audit.table';
import { AssetEditTable } from 'src/schema/tables/asset-edit.table';
import { AssetExifTable } from 'src/schema/tables/asset-exif.table';
import { AssetFaceAuditTable } from 'src/schema/tables/asset-face-audit.table';
import { AssetFaceTable } from 'src/schema/tables/asset-face.table';
@ -86,6 +87,7 @@ export class ImmichDatabase {
AlbumTable,
ApiKeyTable,
AssetAuditTable,
AssetEditTable,
AssetFaceTable,
AssetFaceAuditTable,
AssetMetadataTable,
@ -179,6 +181,7 @@ export interface DB {
asset: AssetTable;
asset_audit: AssetAuditTable;
asset_edit: AssetEditTable;
asset_exif: AssetExifTable;
asset_face: AssetFaceTable;
asset_face_audit: AssetFaceAuditTable;

View File

@ -0,0 +1,22 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`
CREATE TABLE "asset_edit" (
"id" uuid NOT NULL DEFAULT uuid_generate_v4(),
"assetId" uuid NOT NULL,
"action" varchar NOT NULL,
"parameters" jsonb NOT NULL
);
`.execute(db);
await sql`ALTER TABLE "asset_edit" ADD CONSTRAINT "asset_edit_pkey" PRIMARY KEY ("id");`.execute(db);
await sql`ALTER TABLE "asset_edit" ADD CONSTRAINT "asset_edit_assetId_fkey" FOREIGN KEY ("assetId") REFERENCES "asset" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(
db,
);
await sql`CREATE INDEX "asset_edit_assetId_idx" ON "asset_edit" ("assetId")`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`DROP TABLE IF EXISTS "asset_edit";`.execute(db);
}

View File

@ -0,0 +1,11 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`ALTER TABLE "asset_ocr" ADD COLUMN "isVisible" boolean NOT NULL DEFAULT TRUE`.execute(db);
await sql`ALTER TABLE "asset_face" ADD COLUMN "isVisible" boolean NOT NULL DEFAULT TRUE`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`ALTER TABLE "asset_ocr" DROP COLUMN "isVisible";`.execute(db);
await sql`ALTER TABLE "asset_face" DROP COLUMN "isVisible";`.execute(db);
}

View File

@ -0,0 +1,17 @@
import { EditAction, EditActionParameter } from 'src/dtos/editing.dto';
import { AssetTable } from 'src/schema/tables/asset.table';
import { Column, ForeignKeyColumn, Generated, PrimaryGeneratedColumn } from 'src/sql-tools';
export class AssetEditTable<T extends EditAction = EditAction> {
@PrimaryGeneratedColumn()
id!: Generated<string>;
@ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE', nullable: false, primary: true })
assetId!: string;
@Column()
action!: T;
@Column({ type: 'jsonb' })
parameters!: EditActionParameter[T];
}

View File

@ -78,4 +78,7 @@ export class AssetFaceTable {
@UpdateIdColumn()
updateId!: Generated<string>;
@Column({ type: 'boolean', default: true })
isVisible!: Generated<boolean>;
}

View File

@ -42,4 +42,7 @@ export class AssetOcrTable {
@Column({ type: 'text' })
text!: string;
@Column({ type: 'boolean', default: true })
isVisible!: Generated<boolean>;
}

View File

@ -489,7 +489,7 @@ describe(AssetMediaService.name, () => {
describe('downloadOriginal', () => {
it('should require the asset.download permission', async () => {
await expect(sut.downloadOriginal(authStub.admin, 'asset-1')).rejects.toBeInstanceOf(BadRequestException);
await expect(sut.downloadOriginal(authStub.admin, 'asset-1', true)).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.access.asset.checkOwnerAccess).toHaveBeenCalledWith(
authStub.admin.user.id,
@ -503,16 +503,16 @@ describe(AssetMediaService.name, () => {
it('should throw an error if the asset is not found', async () => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
await expect(sut.downloadOriginal(authStub.admin, 'asset-1')).rejects.toBeInstanceOf(NotFoundException);
await expect(sut.downloadOriginal(authStub.admin, 'asset-1', true)).rejects.toBeInstanceOf(NotFoundException);
expect(mocks.asset.getById).toHaveBeenCalledWith('asset-1', { files: true });
expect(mocks.asset.getById).toHaveBeenCalledWith('asset-1', { files: true, edits: true });
});
it('should download a file', async () => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
mocks.asset.getById.mockResolvedValue(assetStub.image);
await expect(sut.downloadOriginal(authStub.admin, 'asset-1')).resolves.toEqual(
await expect(sut.downloadOriginal(authStub.admin, 'asset-1', true)).resolves.toEqual(
new ImmichFileResponse({
path: '/original/path.jpg',
fileName: 'asset-id.jpg',
@ -521,6 +521,8 @@ describe(AssetMediaService.name, () => {
}),
);
});
// TODO: Edited asset tests
});
describe('viewThumbnail', () => {
@ -620,6 +622,8 @@ describe(AssetMediaService.name, () => {
}),
);
});
// TODO: Edited asset tests
});
describe('playbackVideo', () => {

View File

@ -193,11 +193,24 @@ export class AssetMediaService extends BaseService {
}
}
async downloadOriginal(auth: AuthDto, id: string): Promise<ImmichFileResponse> {
async downloadOriginal(auth: AuthDto, id: string, edited: boolean): Promise<ImmichFileResponse> {
await this.requireAccess({ auth, permission: Permission.AssetDownload, ids: [id] });
const asset = await this.findOrFail(id);
if (asset.edits!.length > 0 && edited) {
const { editedFullsizeFile } = getAssetFiles(asset.files ?? []);
if (editedFullsizeFile) {
return new ImmichFileResponse({
path: editedFullsizeFile.path,
fileName: getFileNameWithoutExtension(asset.originalFileName) + getFilenameExtension(editedFullsizeFile.path),
contentType: mimeTypes.lookup(editedFullsizeFile.path),
cacheControl: CacheControl.PrivateWithCache,
});
}
}
return new ImmichFileResponse({
path: asset.originalPath,
fileName: asset.originalFileName,
@ -216,12 +229,20 @@ export class AssetMediaService extends BaseService {
const asset = await this.findOrFail(id);
const size = dto.size ?? AssetMediaSize.THUMBNAIL;
const { thumbnailFile, previewFile, fullsizeFile } = getAssetFiles(asset.files ?? []);
const files = getAssetFiles(asset.files ?? []);
const requestingEdited = (dto.edited ?? true) && asset.edits!.length > 0;
const { fullsizeFile, previewFile, thumbnailFile } = {
fullsizeFile: requestingEdited ? files.editedFullsizeFile : files.fullsizeFile,
previewFile: requestingEdited ? files.editedPreviewFile : files.previewFile,
thumbnailFile: requestingEdited ? files.editedThumbnailFile : files.thumbnailFile,
};
let filepath = previewFile?.path;
if (size === AssetMediaSize.THUMBNAIL && thumbnailFile) {
filepath = thumbnailFile.path;
} else if (size === AssetMediaSize.FULLSIZE) {
if (mimeTypes.isWebSupportedImage(asset.originalPath)) {
if (mimeTypes.isWebSupportedImage(asset.originalPath) && !dto.edited) {
// use original file for web supported images
return { targetSize: 'original' };
}
@ -456,7 +477,7 @@ export class AssetMediaService extends BaseService {
}
private async findOrFail(id: string) {
const asset = await this.assetRepository.getById(id, { files: true });
const asset = await this.assetRepository.getById(id, { files: true, edits: true });
if (!asset) {
throw new NotFoundException('Asset not found');
}

View File

@ -585,6 +585,9 @@ describe(AssetService.name, () => {
'/uploads/user-id/webp/path.ext',
'/uploads/user-id/thumbs/path.jpg',
'/uploads/user-id/fullsize/path.webp',
undefined,
undefined,
undefined,
assetWithFace.originalPath,
],
},
@ -646,6 +649,11 @@ describe(AssetService.name, () => {
'/uploads/user-id/webp/path.ext',
'/uploads/user-id/thumbs/path.jpg',
'/uploads/user-id/fullsize/path.webp',
undefined,
undefined,
undefined,
undefined,
undefined,
'fake_path/asset_1.jpeg',
],
},
@ -672,6 +680,11 @@ describe(AssetService.name, () => {
'/uploads/user-id/webp/path.ext',
'/uploads/user-id/thumbs/path.jpg',
'/uploads/user-id/fullsize/path.webp',
undefined,
undefined,
undefined,
undefined,
undefined,
'fake_path/asset_1.jpeg',
],
},
@ -709,6 +722,7 @@ describe(AssetService.name, () => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
mocks.ocr.getByAssetId.mockResolvedValue([ocr1, ocr2]);
mocks.asset.getById.mockResolvedValue(assetStub.image);
await expect(sut.getOcr(authStub.admin, 'asset-1')).resolves.toEqual([ocr1, ocr2]);
@ -723,7 +737,7 @@ describe(AssetService.name, () => {
it('should return empty array when no OCR data exists', async () => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
mocks.ocr.getByAssetId.mockResolvedValue([]);
mocks.asset.getById.mockResolvedValue(assetStub.image);
await expect(sut.getOcr(authStub.admin, 'asset-1')).resolves.toEqual([]);
expect(mocks.ocr.getByAssetId).toHaveBeenCalledWith('asset-1');

View File

@ -18,11 +18,13 @@ import {
mapStats,
} from 'src/dtos/asset.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { AssetEditsDto, EditAction, EditActionListDto } from 'src/dtos/editing.dto';
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
import {
AssetFileType,
AssetMetadataKey,
AssetStatus,
AssetType,
AssetVisibility,
JobName,
JobStatus,
@ -32,7 +34,16 @@ import {
import { BaseService } from 'src/services/base.service';
import { ISidecarWriteJob, JobItem, JobOf } from 'src/types';
import { requireElevatedPermission } from 'src/utils/access';
import { getAssetFiles, getMyPartnerIds, onAfterUnlink, onBeforeLink, onBeforeUnlink } from 'src/utils/asset.util';
import {
getAssetFiles,
getDimensions,
getMyPartnerIds,
isPanorama,
onAfterUnlink,
onBeforeLink,
onBeforeUnlink,
} from 'src/utils/asset.util';
import { transformOcrBoundingBox } from 'src/utils/transform';
@Injectable()
export class AssetService extends BaseService {
@ -67,6 +78,7 @@ export class AssetService extends BaseService {
owner: true,
faces: { person: true },
stack: { assets: true },
edits: true,
tags: true,
});
@ -360,11 +372,19 @@ export class AssetService extends BaseService {
}
}
const { fullsizeFile, previewFile, thumbnailFile, sidecarFile } = getAssetFiles(asset.files ?? []);
const files = [thumbnailFile?.path, previewFile?.path, fullsizeFile?.path, asset.encodedVideoPath];
const assetFiles = getAssetFiles(asset.files ?? []);
const files = [
assetFiles.thumbnailFile?.path,
assetFiles.previewFile?.path,
assetFiles.fullsizeFile?.path,
assetFiles.editedFullsizeFile?.path,
assetFiles.editedPreviewFile?.path,
assetFiles.editedThumbnailFile?.path,
asset.encodedVideoPath,
];
if (deleteOnDisk && !asset.isOffline) {
files.push(sidecarFile?.path, asset.originalPath);
files.push(assetFiles.sidecarFile?.path, asset.originalPath);
}
await this.jobRepository.queue({ name: JobName.FileDelete, data: { files: files.filter(Boolean) } });
@ -393,7 +413,16 @@ export class AssetService extends BaseService {
async getOcr(auth: AuthDto, id: string): Promise<AssetOcrResponseDto[]> {
await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [id] });
return this.ocrRepository.getByAssetId(id);
const ocr = await this.ocrRepository.getByAssetId(id);
const asset = await this.assetRepository.getById(id, { exifInfo: true, edits: true });
if (!asset || !asset.exifInfo || !asset.edits) {
throw new BadRequestException('Asset not found');
}
const dimensions = getDimensions(asset.exifInfo);
return ocr.map((item) => transformOcrBoundingBox(item, asset.edits!, dimensions));
}
async upsertMetadata(auth: AuthDto, id: string, dto: AssetMetadataUpsertDto): Promise<AssetMetadataResponseDto[]> {
@ -464,4 +493,95 @@ export class AssetService extends BaseService {
await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id, ...writes } });
}
}
async getAssetEdits(auth: AuthDto, id: string): Promise<AssetEditsDto> {
await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [id] });
const edits = await this.assetEditRepository.getEditsForAsset(id);
return {
assetId: id,
edits,
};
}
async editAsset(auth: AuthDto, id: string, dto: EditActionListDto): Promise<AssetEditsDto> {
await this.requireAccess({ auth, permission: Permission.AssetEdit, ids: [id] });
if (dto.edits.length === 0) {
throw new BadRequestException('At least one edit action must be provided');
}
const asset = await this.assetRepository.getById(id, { exifInfo: true });
if (!asset) {
throw new BadRequestException('Asset not found');
}
if (asset.type !== AssetType.Image) {
throw new BadRequestException('Only images can be edited');
}
if (asset.livePhotoVideoId !== null) {
throw new BadRequestException('Editing live photos is not supported');
}
if (isPanorama(asset)) {
throw new BadRequestException('Editing panorama images is not supported');
}
if (asset.originalPath?.toLowerCase().endsWith('.gif')) {
throw new BadRequestException('Editing GIF images is not supported');
}
// verify there are unique actions
// mirror can be duplicated but must have different parameters
const actionSet = new Set<string>();
for (const edit of dto.edits) {
const key = edit.action === EditAction.Mirror ? `${edit.action}-${JSON.stringify(edit.parameters)}` : edit.action;
if (actionSet.has(key)) {
throw new BadRequestException('Duplicate edit actions are not allowed');
}
actionSet.add(key);
}
// check that crop parameters will not go out of bounds
const { width: assetWidth, height: assetHeight } = getDimensions(asset.exifInfo!);
if (!assetWidth || !assetHeight) {
throw new BadRequestException('Asset dimensions are not available for editing');
}
const crop = dto.edits.find((e) => e.action === EditAction.Crop)?.parameters;
if (crop) {
const { x, y, width, height } = crop;
if (x + width > assetWidth || y + height > assetHeight) {
throw new BadRequestException('Crop parameters are out of bounds');
}
}
await this.assetEditRepository.storeEdits(id, dto.edits);
await this.jobRepository.queue({
name: JobName.AssetGenerateThumbnails,
data: { id, source: 'edit', notify: true },
});
// Return the asset and its applied edits
return {
assetId: id,
edits: dto.edits,
};
}
async removeAssetEdits(auth: AuthDto, id: string): Promise<void> {
await this.requireAccess({ auth, permission: Permission.AssetEdit, ids: [id] });
const asset = await this.assetRepository.getById(id);
if (!asset) {
throw new BadRequestException('Asset not found');
}
await this.assetEditRepository.deleteEditsForAsset(id);
await this.jobRepository.queue({
name: JobName.AssetGenerateThumbnails,
data: { id, source: 'edit', notify: true },
});
}
}

View File

@ -11,6 +11,7 @@ import { AlbumUserRepository } from 'src/repositories/album-user.repository';
import { AlbumRepository } from 'src/repositories/album.repository';
import { ApiKeyRepository } from 'src/repositories/api-key.repository';
import { AppRepository } from 'src/repositories/app.repository';
import { AssetEditRepository } from 'src/repositories/asset-edit.repository';
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { AuditRepository } from 'src/repositories/audit.repository';
@ -69,6 +70,7 @@ export const BASE_SERVICE_DEPENDENCIES = [
ApiKeyRepository,
AppRepository,
AssetRepository,
AssetEditRepository,
AssetJobRepository,
AuditRepository,
ConfigRepository,
@ -127,6 +129,7 @@ export class BaseService {
protected apiKeyRepository: ApiKeyRepository,
protected appRepository: AppRepository,
protected assetRepository: AssetRepository,
protected assetEditRepository: AssetEditRepository,
protected assetJobRepository: AssetJobRepository,
protected auditRepository: AuditRepository,
protected configRepository: ConfigRepository,

View File

@ -313,6 +313,7 @@ describe(MediaService.name, () => {
quality: 80,
processInvalidImages: false,
raw: rawInfo,
edits: [],
},
expect.any(String),
);
@ -325,6 +326,7 @@ describe(MediaService.name, () => {
quality: 80,
processInvalidImages: false,
raw: rawInfo,
edits: [],
},
expect.any(String),
);
@ -334,6 +336,7 @@ describe(MediaService.name, () => {
colorspace: Colorspace.P3,
processInvalidImages: false,
raw: rawInfo,
edits: [],
});
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith([
@ -527,6 +530,7 @@ describe(MediaService.name, () => {
quality: 80,
processInvalidImages: false,
raw: rawInfo,
edits: [],
},
previewPath,
);
@ -539,6 +543,7 @@ describe(MediaService.name, () => {
quality: 80,
processInvalidImages: false,
raw: rawInfo,
edits: [],
},
thumbnailPath,
);
@ -572,6 +577,7 @@ describe(MediaService.name, () => {
quality: 80,
processInvalidImages: false,
raw: rawInfo,
edits: [],
},
previewPath,
);
@ -584,6 +590,7 @@ describe(MediaService.name, () => {
quality: 80,
processInvalidImages: false,
raw: rawInfo,
edits: [],
},
thumbnailPath,
);
@ -641,7 +648,6 @@ describe(MediaService.name, () => {
processInvalidImages: false,
size: 1440,
});
expect(mocks.media.getImageDimensions).not.toHaveBeenCalled();
});
it('should resize original image if embedded image extraction is not enabled', async () => {
@ -657,7 +663,6 @@ describe(MediaService.name, () => {
processInvalidImages: false,
size: 1440,
});
expect(mocks.media.getImageDimensions).not.toHaveBeenCalled();
});
it('should process invalid images if enabled', async () => {
@ -691,7 +696,6 @@ describe(MediaService.name, () => {
expect.objectContaining({ processInvalidImages: false }),
);
expect(mocks.media.getImageDimensions).not.toHaveBeenCalled();
vi.unstubAllEnvs();
});
@ -722,6 +726,7 @@ describe(MediaService.name, () => {
quality: 80,
processInvalidImages: false,
raw: rawInfo,
edits: [],
},
expect.any(String),
);
@ -752,6 +757,7 @@ describe(MediaService.name, () => {
quality: 80,
processInvalidImages: false,
raw: rawInfo,
edits: [],
},
expect.any(String),
);
@ -764,6 +770,7 @@ describe(MediaService.name, () => {
quality: 80,
processInvalidImages: false,
raw: rawInfo,
edits: [],
},
expect.any(String),
);
@ -792,6 +799,7 @@ describe(MediaService.name, () => {
quality: 80,
processInvalidImages: false,
raw: rawInfo,
edits: [],
},
expect.any(String),
);
@ -804,6 +812,7 @@ describe(MediaService.name, () => {
size: 1440,
processInvalidImages: false,
raw: rawInfo,
edits: [],
},
expect.any(String),
);
@ -833,6 +842,7 @@ describe(MediaService.name, () => {
quality: 80,
processInvalidImages: false,
raw: rawInfo,
edits: [],
},
expect.any(String),
);
@ -888,6 +898,7 @@ describe(MediaService.name, () => {
quality: 80,
processInvalidImages: false,
raw: rawInfo,
edits: [],
},
expect.any(String),
);
@ -926,10 +937,162 @@ describe(MediaService.name, () => {
quality: 90,
processInvalidImages: false,
raw: rawInfo,
edits: [],
},
expect.any(String),
);
});
it('should apply edits when generating thumbnails', async () => {
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...assetStub.withCropEdit,
});
await sut.handleGenerateThumbnails({ id: assetStub.image.id, source: 'edit' });
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
rawBuffer,
expect.objectContaining({
edits: [
{
action: 'crop',
parameters: { height: 1152, width: 1512, x: 216, y: 1512 },
},
],
}),
expect.any(String),
);
});
it('should not generate edited files when job source is not edit', async () => {
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...assetStub.withCropEdit,
});
await sut.handleGenerateThumbnails({ id: assetStub.image.id, source: 'upload' });
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
rawBuffer,
expect.objectContaining({
edits: [],
}),
expect.any(String),
);
});
it('should clean up edited files if an asset has no edits', async () => {
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...assetStub.withRevertedEdits,
});
const status = await sut.handleGenerateThumbnails({ id: assetStub.image.id, source: 'edit' });
expect(mocks.storage.unlink).toHaveBeenCalledWith('/uploads/user-id/fullsize/path_edited.jpg');
expect(mocks.storage.unlink).toHaveBeenCalledWith('/uploads/user-id/thumbnail/path_edited.jpg');
expect(mocks.storage.unlink).toHaveBeenCalledWith('/uploads/user-id/preview/path_edited.jpg');
expect(mocks.asset.deleteFiles).toHaveBeenCalledWith(
expect.arrayContaining([
expect.objectContaining({ path: '/uploads/user-id/fullsize/path_edited.jpg' }),
expect.objectContaining({ path: '/uploads/user-id/preview/path_edited.jpg' }),
expect.objectContaining({ path: '/uploads/user-id/thumbnail/path_edited.jpg' }),
]),
);
expect(status).toBe(JobStatus.Success);
expect(mocks.media.generateThumbnail).toHaveBeenCalled();
// ensure that we switched to non-edit mode
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith(
expect.arrayContaining([
expect.objectContaining({ type: AssetFileType.Preview }),
expect.objectContaining({ type: AssetFileType.Thumbnail }),
]),
);
});
it('should generate all 3 edited files if an asset has edits', async () => {
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...assetStub.withCropEdit,
});
await sut.handleGenerateThumbnails({ id: assetStub.image.id, source: 'edit' });
expect(mocks.media.generateThumbnail).toHaveBeenCalledTimes(3);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
rawBuffer,
expect.anything(),
expect.stringContaining('edited_preview.jpeg'),
);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
rawBuffer,
expect.anything(),
expect.stringContaining('edited_thumbnail.webp'),
);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
rawBuffer,
expect.anything(),
expect.stringContaining('edited_fullsize.jpeg'),
);
});
it('should skip thumbhash saving if job source is not edit and edits exist', async () => {
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...assetStub.withCropEdit,
});
const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8');
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
await sut.handleGenerateThumbnails({ id: assetStub.image.id, source: 'upload' });
expect(mocks.asset.update).not.toHaveBeenCalledWith(
expect.objectContaining({
thumbhash: thumbhashBuffer,
}),
);
});
it('should apply thumbhash if job source is edit and edits exist', async () => {
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...assetStub.withCropEdit,
});
const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8');
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
await sut.handleGenerateThumbnails({ id: assetStub.image.id, source: 'edit' });
expect(mocks.asset.update).toHaveBeenCalledWith(
expect.objectContaining({
thumbhash: thumbhashBuffer,
}),
);
});
it('should upsert 3 edited files for edit jobs', async () => {
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...assetStub.withCropEdit,
});
const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8');
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
await sut.handleGenerateThumbnails({ id: assetStub.image.id, source: 'edit' });
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith(
expect.arrayContaining([
expect.objectContaining({ type: AssetFileType.EditedFullSize }),
expect.objectContaining({ type: AssetFileType.EditedPreview }),
expect.objectContaining({ type: AssetFileType.EditedThumbnail }),
]),
);
});
it('should reject videos for edit thumbnail jobs', async () => {
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.video);
await expect(sut.handleGenerateThumbnails({ id: assetStub.video.id, source: 'edit' })).resolves.toBe(
JobStatus.Skipped,
);
expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
});
});
describe('handleGeneratePersonThumbnail', () => {
@ -981,12 +1144,17 @@ describe(MediaService.name, () => {
colorspace: Colorspace.P3,
format: ImageFormat.Jpeg,
quality: 80,
crop: {
left: 238,
top: 163,
width: 274,
height: 274,
},
edits: [
{
action: 'crop',
parameters: {
height: 274,
width: 274,
x: 238,
y: 163,
},
},
],
raw: info,
processInvalidImages: false,
size: 250,
@ -1020,12 +1188,17 @@ describe(MediaService.name, () => {
colorspace: Colorspace.P3,
format: ImageFormat.Jpeg,
quality: 80,
crop: {
left: 238,
top: 163,
width: 274,
height: 274,
},
edits: [
{
action: 'crop',
parameters: {
height: 274,
width: 274,
x: 238,
y: 163,
},
},
],
raw: info,
processInvalidImages: false,
size: 250,
@ -1057,12 +1230,17 @@ describe(MediaService.name, () => {
colorspace: Colorspace.P3,
format: ImageFormat.Jpeg,
quality: 80,
crop: {
left: 0,
top: 85,
width: 510,
height: 510,
},
edits: [
{
action: 'crop',
parameters: {
height: 510,
width: 510,
x: 0,
y: 85,
},
},
],
raw: info,
processInvalidImages: false,
size: 250,
@ -1094,12 +1272,17 @@ describe(MediaService.name, () => {
colorspace: Colorspace.P3,
format: ImageFormat.Jpeg,
quality: 80,
crop: {
left: 591,
top: 591,
width: 408,
height: 408,
},
edits: [
{
action: 'crop',
parameters: {
height: 408,
width: 408,
x: 591,
y: 591,
},
},
],
raw: info,
processInvalidImages: false,
size: 250,
@ -1131,12 +1314,17 @@ describe(MediaService.name, () => {
colorspace: Colorspace.P3,
format: ImageFormat.Jpeg,
quality: 80,
crop: {
left: 0,
top: 62,
width: 412,
height: 412,
},
edits: [
{
action: 'crop',
parameters: {
height: 412,
width: 412,
x: 0,
y: 62,
},
},
],
raw: info,
processInvalidImages: false,
size: 250,
@ -1168,12 +1356,17 @@ describe(MediaService.name, () => {
colorspace: Colorspace.P3,
format: ImageFormat.Jpeg,
quality: 80,
crop: {
left: 4485,
top: 94,
width: 138,
height: 138,
},
edits: [
{
action: 'crop',
parameters: {
height: 138,
width: 138,
x: 4485,
y: 94,
},
},
],
raw: info,
processInvalidImages: false,
size: 250,
@ -1210,12 +1403,17 @@ describe(MediaService.name, () => {
colorspace: Colorspace.P3,
format: ImageFormat.Jpeg,
quality: 80,
crop: {
height: 844,
left: 388,
top: 730,
width: 844,
},
edits: [
{
action: 'crop',
parameters: {
height: 844,
width: 844,
x: 388,
y: 730,
},
},
],
raw: info,
processInvalidImages: false,
size: 250,

View File

@ -3,6 +3,7 @@ import { FACE_THUMBNAIL_SIZE, JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { StorageCore, ThumbnailPathEntity } from 'src/cores/storage.core';
import { Exif } from 'src/database';
import { OnEvent, OnJob } from 'src/decorators';
import { CropParameters, EditAction, EditActionItem } from 'src/dtos/editing.dto';
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
import {
AssetFileType,
@ -28,8 +29,8 @@ import { BoundingBox } from 'src/repositories/machine-learning.repository';
import { BaseService } from 'src/services/base.service';
import {
AudioStreamInfo,
CropOptions,
DecodeToBufferOptions,
GenerateThumbnailOptions,
ImageDimensions,
JobItem,
JobOf,
@ -37,7 +38,7 @@ import {
VideoInterfaces,
VideoStreamInfo,
} from 'src/types';
import { getAssetFiles } from 'src/utils/asset.util';
import { getAssetFiles, getDimensions } from 'src/utils/asset.util';
import { BaseConfig, ThumbnailConfig } from 'src/utils/media';
import { mimeTypes } from 'src/utils/mime-types';
import { clamp, isFaceImportEnabled, isFacialRecognitionEnabled } from 'src/utils/misc';
@ -67,12 +68,19 @@ export class MediaService extends BaseService {
};
for await (const asset of this.assetJobRepository.streamForThumbnailJob(!!force)) {
const { previewFile, thumbnailFile } = getAssetFiles(asset.files);
const assetFiles = getAssetFiles(asset.files);
if (!previewFile || !thumbnailFile || !asset.thumbhash || force) {
if (!assetFiles.previewFile || !assetFiles.thumbnailFile || !asset.thumbhash || force) {
jobs.push({ name: JobName.AssetGenerateThumbnails, data: { id: asset.id } });
}
if (
asset.edits.length > 0 &&
(!assetFiles.editedPreviewFile || !assetFiles.editedThumbnailFile || !assetFiles.editedFullsizeFile || force)
) {
jobs.push({ name: JobName.AssetGenerateThumbnails, data: { id: asset.id, source: 'edit' } });
}
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await queueAll();
}
@ -155,8 +163,10 @@ export class MediaService extends BaseService {
}
@OnJob({ name: JobName.AssetGenerateThumbnails, queue: QueueName.ThumbnailGeneration })
async handleGenerateThumbnails({ id }: JobOf<JobName.AssetGenerateThumbnails>): Promise<JobStatus> {
async handleGenerateThumbnails({ id, source }: JobOf<JobName.AssetGenerateThumbnails>): Promise<JobStatus> {
const asset = await this.assetJobRepository.getForGenerateThumbnailJob(id);
let applyEdits = source === 'edit';
if (!asset) {
this.logger.warn(`Thumbnail generation failed for asset ${id}: not found`);
return JobStatus.Failed;
@ -167,35 +177,74 @@ export class MediaService extends BaseService {
return JobStatus.Skipped;
}
if (asset.type !== AssetType.Image && applyEdits) {
this.logger.warn(`Thumbnail generation for edits is only supported for images. Asset ${id} is a ${asset.type}`);
return JobStatus.Skipped;
}
// clean up edited files if no edits exist
if (applyEdits && asset.edits.length === 0) {
const assetFiles = getAssetFiles(asset.files);
const files = [
assetFiles.editedFullsizeFile,
assetFiles.editedPreviewFile,
assetFiles.editedThumbnailFile,
].filter((file) => file !== undefined);
if (files.length > 0) {
await this.assetRepository.deleteFiles(files);
await Promise.all(files.map((path) => this.storageRepository.unlink(path.path)));
}
applyEdits = false;
}
let generated: {
previewPath: string;
thumbnailPath: string;
fullsizePath?: string;
thumbhash: Buffer;
fullsizeDimensions: ImageDimensions;
};
if (asset.type === AssetType.Video || asset.originalFileName.toLowerCase().endsWith('.gif')) {
this.logger.verbose(`Thumbnail generation for video ${id} ${asset.originalPath}`);
generated = await this.generateVideoThumbnails(asset);
} else if (asset.type === AssetType.Image) {
this.logger.verbose(`Thumbnail generation for image ${id} ${asset.originalPath}`);
generated = await this.generateImageThumbnails(asset);
generated = await this.generateImageThumbnails(asset, applyEdits);
} else {
this.logger.warn(`Skipping thumbnail generation for asset ${id}: ${asset.type} is not an image or video`);
return JobStatus.Skipped;
}
const { previewFile, thumbnailFile, fullsizeFile } = getAssetFiles(asset.files);
const assetFiles = getAssetFiles(asset.files);
const previewFile = applyEdits ? assetFiles.editedPreviewFile : assetFiles.previewFile;
const thumbnailFile = applyEdits ? assetFiles.editedThumbnailFile : assetFiles.thumbnailFile;
const fullsizeFile = applyEdits ? assetFiles.editedFullsizeFile : assetFiles.fullsizeFile;
const toUpsert: UpsertFileOptions[] = [];
if (previewFile?.path !== generated.previewPath) {
toUpsert.push({ assetId: asset.id, path: generated.previewPath, type: AssetFileType.Preview });
toUpsert.push({
assetId: asset.id,
path: generated.previewPath,
type: applyEdits ? AssetFileType.EditedPreview : AssetFileType.Preview,
});
}
if (thumbnailFile?.path !== generated.thumbnailPath) {
toUpsert.push({ assetId: asset.id, path: generated.thumbnailPath, type: AssetFileType.Thumbnail });
toUpsert.push({
assetId: asset.id,
path: generated.thumbnailPath,
type: applyEdits ? AssetFileType.EditedThumbnail : AssetFileType.Thumbnail,
});
}
if (generated.fullsizePath && fullsizeFile?.path !== generated.fullsizePath) {
toUpsert.push({ assetId: asset.id, path: generated.fullsizePath, type: AssetFileType.FullSize });
toUpsert.push({
assetId: asset.id,
path: generated.fullsizePath,
type: applyEdits ? AssetFileType.EditedFullSize : AssetFileType.FullSize,
});
}
if (toUpsert.length > 0) {
@ -226,11 +275,28 @@ export class MediaService extends BaseService {
await Promise.all(pathsToDelete.map((path) => this.storageRepository.unlink(path)));
}
if (!asset.thumbhash || Buffer.compare(asset.thumbhash, generated.thumbhash) !== 0) {
await this.assetRepository.update({ id: asset.id, thumbhash: generated.thumbhash });
// We don't want the non-edit job overwriting the thumbhash/dimensions of an edit job
if (applyEdits === asset.edits.length > 0) {
if (!asset.thumbhash || Buffer.compare(asset.thumbhash, generated.thumbhash) !== 0) {
await this.assetRepository.update({ id: asset.id, thumbhash: generated.thumbhash });
}
await this.assetRepository.update({ id: asset.id, ...generated.fullsizeDimensions });
}
await this.assetRepository.upsertJobStatus({ assetId: asset.id, previewAt: new Date(), thumbnailAt: new Date() });
if (source === 'edit') {
// check if the edits modify faces or ocr
const assetFaces = await this.personRepository.getFaces(asset.id, { onlyVisible: false });
const ocrData = await this.ocrRepository.getByAssetId(asset.id, { onlyVisible: false });
const crop = asset.edits.find((e) => e.action === EditAction.Crop);
const originalDimensions = getDimensions(asset.exifInfo!);
const faceStatuses = this.mediaRepository.checkFaceVisibility(assetFaces, originalDimensions, crop);
await this.personRepository.updateFaceVisibilities(faceStatuses.visible, faceStatuses.hidden);
const ocrStatuses = this.mediaRepository.checkOcrVisibility(ocrData, originalDimensions, crop);
await this.ocrRepository.updateOcrVisibilities(asset.id, ocrStatuses.visible, ocrStatuses.hidden);
}
return JobStatus.Success;
}
@ -258,23 +324,35 @@ export class MediaService extends BaseService {
return { info, data, colorspace };
}
private async generateImageThumbnails(asset: {
id: string;
ownerId: string;
originalFileName: string;
originalPath: string;
exifInfo: Exif;
}) {
private async generateImageThumbnails(
asset: {
id: string;
ownerId: string;
originalFileName: string;
originalPath: string;
exifInfo: Exif;
edits: EditActionItem[];
},
useEdits: boolean,
) {
const { image } = await this.getConfig({ withCache: true });
const previewPath = StorageCore.getImagePath(asset, AssetPathType.Preview, image.preview.format);
const thumbnailPath = StorageCore.getImagePath(asset, AssetPathType.Thumbnail, image.thumbnail.format);
const previewPath = StorageCore.getImagePath(
asset,
useEdits ? AssetPathType.EditedPreview : AssetPathType.Preview,
image.preview.format,
);
const thumbnailPath = StorageCore.getImagePath(
asset,
useEdits ? AssetPathType.EditedThumbnail : AssetPathType.Thumbnail,
image.thumbnail.format,
);
this.storageCore.ensureFolders(previewPath);
// Handle embedded preview extraction for RAW files
const extractEmbedded = image.extractEmbedded && mimeTypes.isRaw(asset.originalFileName);
const extracted = extractEmbedded ? await this.extractImage(asset.originalPath, image.preview.size) : null;
const generateFullsize =
(image.fullsize.enabled || asset.exifInfo.projectionType == 'EQUIRECTANGULAR') &&
(image.fullsize.enabled || asset.exifInfo.projectionType === 'EQUIRECTANGULAR') &&
!mimeTypes.isWebSupportedImage(asset.originalPath);
const convertFullsize = generateFullsize && (!extracted || !mimeTypes.isWebSupportedImage(` .${extracted.format}`));
@ -283,23 +361,39 @@ export class MediaService extends BaseService {
// only specify orientation to extracted images which don't have EXIF orientation data
// or it can double rotate the image
extracted ? asset.exifInfo : { ...asset.exifInfo, orientation: null },
convertFullsize ? undefined : image.preview.size,
convertFullsize || useEdits ? undefined : image.preview.size,
);
// generate final images
const thumbnailOptions = { colorspace, processInvalidImages: false, raw: info };
const thumbnailOptions = { colorspace, processInvalidImages: false, raw: info, edits: useEdits ? asset.edits : [] };
const promises = [
this.mediaRepository.generateThumbhash(data, thumbnailOptions),
this.mediaRepository.generateThumbnail(data, { ...image.thumbnail, ...thumbnailOptions }, thumbnailPath),
this.mediaRepository.generateThumbnail(data, { ...image.preview, ...thumbnailOptions }, previewPath),
this.mediaRepository.generateThumbnail(
data,
{ ...image.thumbnail, ...thumbnailOptions, edits: useEdits ? asset.edits : [] },
thumbnailPath,
),
this.mediaRepository.generateThumbnail(
data,
{ ...image.preview, ...thumbnailOptions, edits: useEdits ? asset.edits : [] },
previewPath,
),
];
let fullsizePath: string | undefined;
if (convertFullsize) {
if (convertFullsize || useEdits) {
// convert a new fullsize image from the same source as the thumbnail
fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FullSize, image.fullsize.format);
const fullsizeOptions = { format: image.fullsize.format, quality: image.fullsize.quality, ...thumbnailOptions };
fullsizePath = StorageCore.getImagePath(
asset,
useEdits ? AssetPathType.EditedFullSize : AssetPathType.FullSize,
image.fullsize.format,
);
const fullsizeOptions = {
format: image.fullsize.format,
quality: image.fullsize.quality,
...thumbnailOptions,
};
promises.push(this.mediaRepository.generateThumbnail(data, fullsizeOptions, fullsizePath));
} else if (generateFullsize && extracted && extracted.format === RawExtractedFormat.Jpeg) {
fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FullSize, extracted.format);
@ -328,7 +422,9 @@ export class MediaService extends BaseService {
await Promise.all(promises);
}
return { previewPath, thumbnailPath, fullsizePath, thumbhash: outputs[0] as Buffer };
const dims = await this.mediaRepository.getImageDimensions(fullsizePath ?? asset.originalPath);
return { previewPath, thumbnailPath, fullsizePath, thumbhash: outputs[0] as Buffer, fullsizeDimensions: dims };
}
@OnJob({ name: JobName.PersonGenerateThumbnail, queue: QueueName.ThumbnailGeneration })
@ -369,17 +465,22 @@ export class MediaService extends BaseService {
const thumbnailPath = StorageCore.getPersonThumbnailPath({ id, ownerId });
this.storageCore.ensureFolders(thumbnailPath);
const thumbnailOptions = {
const thumbnailOptions: GenerateThumbnailOptions = {
colorspace: image.colorspace,
format: ImageFormat.Jpeg,
raw: info,
quality: image.thumbnail.quality,
crop: this.getCrop(
{ old: { width: oldWidth, height: oldHeight }, new: { width: info.width, height: info.height } },
{ x1, y1, x2, y2 },
),
processInvalidImages: false,
size: FACE_THUMBNAIL_SIZE,
edits: [
{
action: EditAction.Crop,
parameters: this.getCrop(
{ old: { width: oldWidth, height: oldHeight }, new: { width: info.width, height: info.height } },
{ x1, y1, x2, y2 },
),
},
],
};
await this.mediaRepository.generateThumbnail(decodedImage, thumbnailOptions, thumbnailPath);
@ -388,7 +489,10 @@ export class MediaService extends BaseService {
return JobStatus.Success;
}
private getCrop(dims: { old: ImageDimensions; new: ImageDimensions }, { x1, y1, x2, y2 }: BoundingBox): CropOptions {
private getCrop(
dims: { old: ImageDimensions; new: ImageDimensions },
{ x1, y1, x2, y2 }: BoundingBox,
): CropParameters {
// face bounding boxes can spill outside the image dimensions
const clampedX1 = clamp(x1, 0, dims.old.width);
const clampedY1 = clamp(y1, 0, dims.old.height);
@ -416,8 +520,8 @@ export class MediaService extends BaseService {
);
return {
left: middleX - newHalfSize,
top: middleY - newHalfSize,
x: middleX - newHalfSize,
y: middleY - newHalfSize,
width: newHalfSize * 2,
height: newHalfSize * 2,
};
@ -454,7 +558,12 @@ export class MediaService extends BaseService {
processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
});
return { previewPath, thumbnailPath, thumbhash };
return {
previewPath,
thumbnailPath,
thumbhash,
fullsizeDimensions: { width: mainVideoStream.width, height: mainVideoStream.height },
};
}
@OnJob({ name: JobName.AssetEncodeVideoQueueAll, queue: QueueName.VideoConversion })

View File

@ -354,6 +354,7 @@ describe(PersonService.name, () => {
it('should get the bounding boxes for an asset', async () => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([faceStub.face1.assetId]));
mocks.person.getFaces.mockResolvedValue([faceStub.primaryFace1]);
mocks.asset.getById.mockResolvedValue(assetStub.image);
await expect(sut.getFacesById(authStub.admin, { id: faceStub.face1.assetId })).resolves.toStrictEqual([
mapFaces(faceStub.primaryFace1, authStub.admin),
]);

View File

@ -40,6 +40,7 @@ import { AssetFaceTable } from 'src/schema/tables/asset-face.table';
import { FaceSearchTable } from 'src/schema/tables/face-search.table';
import { BaseService } from 'src/services/base.service';
import { JobItem, JobOf } from 'src/types';
import { getDimensions } from 'src/utils/asset.util';
import { ImmichFileResponse } from 'src/utils/file';
import { mimeTypes } from 'src/utils/mime-types';
import { isFacialRecognitionEnabled } from 'src/utils/misc';
@ -126,7 +127,10 @@ export class PersonService extends BaseService {
async getFacesById(auth: AuthDto, dto: FaceDto): Promise<AssetFaceResponseDto[]> {
await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [dto.id] });
const faces = await this.personRepository.getFaces(dto.id);
return faces.map((asset) => mapFaces(asset, auth));
const asset = await this.assetRepository.getById(dto.id, { edits: true, exifInfo: true });
const assetDimensions = getDimensions(asset!.exifInfo!);
return faces.map((face) => mapFaces(face, auth, asset!.edits!, assetDimensions));
}
async createNewFeaturePhoto(changeFeaturePhoto: string[]) {

View File

@ -3,6 +3,7 @@ import { VECTOR_EXTENSIONS } from 'src/constants';
import { Asset, AssetFile } from 'src/database';
import { UploadFieldName } from 'src/dtos/asset-media.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { EditActionItem } from 'src/dtos/editing.dto';
import {
AssetOrder,
AssetType,
@ -25,13 +26,6 @@ export type DeepPartial<T> = T extends object ? { [K in keyof T]?: DeepPartial<T
export type RepositoryInterface<T extends object> = Pick<T, keyof T>;
export interface CropOptions {
top: number;
left: number;
width: number;
height: number;
}
export interface FullsizeImageOptions {
format: ImageFormat;
quality: number;
@ -52,9 +46,9 @@ export interface RawImageInfo {
interface DecodeImageOptions {
colorspace: string;
crop?: CropOptions;
processInvalidImages: boolean;
raw?: RawImageInfo;
edits?: EditActionItem[];
}
export interface DecodeToBufferOptions extends DecodeImageOptions {
@ -72,7 +66,6 @@ export type GenerateThumbhashFromBufferOptions = GenerateThumbhashOptions & { ra
export interface GenerateThumbnailsOptions {
colorspace: string;
crop?: CropOptions;
preview?: ImageOptions;
processInvalidImages: boolean;
thumbhash?: boolean;
@ -186,7 +179,7 @@ export interface IDelayedJob extends IBaseJob {
delay?: number;
}
export type JobSource = 'upload' | 'sidecar-write' | 'copy';
export type JobSource = 'upload' | 'sidecar-write' | 'copy' | 'edit';
export interface IEntityJob extends IBaseJob {
id: string;
source?: JobSource;

View File

@ -157,6 +157,14 @@ const checkOtherAccess = async (access: AccessRepository, request: OtherAccessRe
return await access.asset.checkOwnerAccess(auth.user.id, ids, auth.session?.hasElevatedPermission);
}
case Permission.AssetEdit: {
return await access.asset.checkOwnerAccess(auth.user.id, ids, auth.session?.hasElevatedPermission);
}
case Permission.AssetDerive: {
return await access.asset.checkOwnerAccess(auth.user.id, ids, auth.session?.hasElevatedPermission);
}
case Permission.AlbumRead: {
const isOwner = await access.album.checkOwnerAccess(auth.user.id, ids);
const isShared = await access.album.checkSharedAlbumAccess(

View File

@ -1,9 +1,10 @@
import { BadRequestException } from '@nestjs/common';
import { GeneratedImageType, StorageCore } from 'src/cores/storage.core';
import { AssetFile } from 'src/database';
import { AssetFile, Exif } from 'src/database';
import { BulkIdErrorReason, BulkIdResponseDto } from 'src/dtos/asset-ids.response.dto';
import { UploadFieldName } from 'src/dtos/asset-media.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { ExifResponseDto } from 'src/dtos/exif.dto';
import { AssetFileType, AssetType, AssetVisibility, Permission } from 'src/enum';
import { AuthRequest } from 'src/middleware/auth.guard';
import { AccessRepository } from 'src/repositories/access.repository';
@ -22,6 +23,10 @@ export const getAssetFiles = (files: AssetFile[]) => ({
previewFile: getAssetFile(files, AssetFileType.Preview),
thumbnailFile: getAssetFile(files, AssetFileType.Thumbnail),
sidecarFile: getAssetFile(files, AssetFileType.Sidecar),
editedFullsizeFile: getAssetFile(files, AssetFileType.EditedFullSize),
editedPreviewFile: getAssetFile(files, AssetFileType.EditedPreview),
editedThumbnailFile: getAssetFile(files, AssetFileType.EditedThumbnail),
});
export const addAssets = async (
@ -199,3 +204,26 @@ export const asUploadRequest = (request: AuthRequest, file: Express.Multer.File)
file: mapToUploadFile(file as ImmichFile),
};
};
const isFlipped = (orientation?: string | null) => {
const value = Number(orientation);
return value && [5, 6, 7, 8, -90, 90].includes(value);
};
export const getDimensions = (exifInfo: ExifResponseDto | Exif) => {
const { exifImageWidth: width, exifImageHeight: height } = exifInfo;
if (!width || !height) {
return { width: 0, height: 0 };
}
if (isFlipped(exifInfo.orientation)) {
return { width: height, height: width };
}
return { width, height };
};
export const isPanorama = (asset: { exifInfo?: Exif | null; originalFileName: string }) => {
return asset.exifInfo?.projectionType === 'EQUIRECTANGULAR' || asset.originalFileName.toLowerCase().endsWith('.insp');
};

View File

@ -1,4 +1,5 @@
import {
AliasedRawBuilder,
DeduplicateJoinsPlugin,
Expression,
ExpressionBuilder,
@ -16,6 +17,7 @@ import { jsonArrayFrom, jsonObjectFrom } from 'kysely/helpers/postgres';
import { parse } from 'pg-connection-string';
import postgres, { Notice, PostgresError } from 'postgres';
import { columns, Exif, Person } from 'src/database';
import { EditActionItem } from 'src/dtos/editing.dto';
import { AssetFileType, AssetVisibility, DatabaseExtension, DatabaseSslMode } from 'src/enum';
import { AssetSearchBuilderOptions } from 'src/repositories/search.repository';
import { DB } from 'src/schema';
@ -180,13 +182,14 @@ export function withSmartSearch<O>(qb: SelectQueryBuilder<DB, 'asset', O>) {
.select((eb) => toJson(eb, 'smart_search').as('smartSearch'));
}
export function withFaces(eb: ExpressionBuilder<DB, 'asset'>, withDeletedFace?: boolean) {
export function withFaces(eb: ExpressionBuilder<DB, 'asset'>, withHidden?: boolean, withDeletedFace?: boolean) {
return jsonArrayFrom(
eb
.selectFrom('asset_face')
.selectAll('asset_face')
.whereRef('asset_face.assetId', '=', 'asset.id')
.$if(!withDeletedFace, (qb) => qb.where('asset_face.deletedAt', 'is', null)),
.$if(!withDeletedFace, (qb) => qb.where('asset_face.deletedAt', 'is', null))
.$if(!withHidden, (qb) => qb.where('asset_face.isVisible', '=', true)),
).as('faces');
}
@ -208,7 +211,11 @@ export function withFilePath(eb: ExpressionBuilder<DB, 'asset'>, type: AssetFile
.where('asset_file.type', '=', type);
}
export function withFacesAndPeople(eb: ExpressionBuilder<DB, 'asset'>, withDeletedFace?: boolean) {
export function withFacesAndPeople(
eb: ExpressionBuilder<DB, 'asset'>,
withHidden?: boolean,
withDeletedFace?: boolean,
) {
return jsonArrayFrom(
eb
.selectFrom('asset_face')
@ -220,7 +227,8 @@ export function withFacesAndPeople(eb: ExpressionBuilder<DB, 'asset'>, withDelet
.selectAll('asset_face')
.select((eb) => eb.table('person').$castTo<Person>().as('person'))
.whereRef('asset_face.assetId', '=', 'asset.id')
.$if(!withDeletedFace, (qb) => qb.where('asset_face.deletedAt', 'is', null)),
.$if(!withDeletedFace, (qb) => qb.where('asset_face.deletedAt', 'is', null))
.$if(!withHidden, (qb) => qb.where('asset_face.isVisible', 'is', true)),
).as('faces');
}
@ -232,6 +240,7 @@ export function hasPeople<O>(qb: SelectQueryBuilder<DB, 'asset', O>, personIds:
.select('assetId')
.where('personId', '=', anyUuid(personIds!))
.where('deletedAt', 'is', null)
.where('isVisible', 'is', true)
.groupBy('assetId')
.having((eb) => eb.fn.count('personId').distinct(), '=', personIds.length)
.as('has_people'),
@ -346,6 +355,17 @@ export const tokenizeForSearch = (text: string): string[] => {
return tokens;
};
// needed to properly type the return with the EditActionItem discriminated union type
type AliasedEditActions = AliasedRawBuilder<EditActionItem[], 'edits'>;
export function withEdits(eb: ExpressionBuilder<DB, 'asset'>): AliasedEditActions {
return jsonArrayFrom(
eb
.selectFrom('asset_edit')
.select(['asset_edit.action', 'asset_edit.parameters'])
.whereRef('asset_edit.assetId', '=', 'asset.id'),
).as('edits') as AliasedEditActions;
}
const joinDeduplicationPlugin = new DeduplicateJoinsPlugin();
/** TODO: This should only be used for search-related queries, not as a general purpose query builder */

View File

@ -0,0 +1,285 @@
import { EditAction, EditActionItem, MirrorAxis } from 'src/dtos/editing.dto';
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
import { transformFaceBoundingBox, transformOcrBoundingBox } from 'src/utils/transform';
import { describe, expect, it } from 'vitest';
describe('transformFaceBoundingBox', () => {
const baseFace = {
boundingBoxX1: 100,
boundingBoxY1: 100,
boundingBoxX2: 200,
boundingBoxY2: 200,
imageWidth: 1000,
imageHeight: 800,
};
const baseDimensions = { width: 1000, height: 800 };
describe('with no edits', () => {
it('should return unchanged bounding box', () => {
const result = transformFaceBoundingBox(baseFace, [], baseDimensions);
expect(result).toEqual(baseFace);
});
});
describe('with crop edit', () => {
it('should adjust bounding box for crop offset', () => {
const edits: EditActionItem[] = [
{ action: EditAction.Crop, parameters: { x: 50, y: 50, width: 400, height: 300 } },
];
const result = transformFaceBoundingBox(baseFace, edits, baseDimensions);
expect(result.boundingBoxX1).toBe(50);
expect(result.boundingBoxY1).toBe(50);
expect(result.boundingBoxX2).toBe(150);
expect(result.boundingBoxY2).toBe(150);
expect(result.imageWidth).toBe(400);
expect(result.imageHeight).toBe(300);
});
it('should handle face partially outside crop area', () => {
const edits: EditActionItem[] = [
{ action: EditAction.Crop, parameters: { x: 150, y: 150, width: 400, height: 300 } },
];
const result = transformFaceBoundingBox(baseFace, edits, baseDimensions);
expect(result.boundingBoxX1).toBe(-50);
expect(result.boundingBoxY1).toBe(-50);
expect(result.boundingBoxX2).toBe(50);
expect(result.boundingBoxY2).toBe(50);
});
});
describe('with rotate edit', () => {
it('should rotate 90 degrees clockwise', () => {
const edits: EditActionItem[] = [{ action: EditAction.Rotate, parameters: { angle: 90 } }];
const result = transformFaceBoundingBox(baseFace, edits, baseDimensions);
expect(result.imageWidth).toBe(800);
expect(result.imageHeight).toBe(1000);
expect(result.boundingBoxX1).toBe(600);
expect(result.boundingBoxY1).toBe(100);
expect(result.boundingBoxX2).toBe(700);
expect(result.boundingBoxY2).toBe(200);
});
it('should rotate 180 degrees', () => {
const edits: EditActionItem[] = [{ action: EditAction.Rotate, parameters: { angle: 180 } }];
const result = transformFaceBoundingBox(baseFace, edits, baseDimensions);
expect(result.imageWidth).toBe(1000);
expect(result.imageHeight).toBe(800);
expect(result.boundingBoxX1).toBe(800);
expect(result.boundingBoxY1).toBe(600);
expect(result.boundingBoxX2).toBe(900);
expect(result.boundingBoxY2).toBe(700);
});
it('should rotate 270 degrees', () => {
const edits: EditActionItem[] = [{ action: EditAction.Rotate, parameters: { angle: 270 } }];
const result = transformFaceBoundingBox(baseFace, edits, baseDimensions);
expect(result.imageWidth).toBe(800);
expect(result.imageHeight).toBe(1000);
});
});
describe('with mirror edit', () => {
it('should mirror horizontally', () => {
const edits: EditActionItem[] = [{ action: EditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } }];
const result = transformFaceBoundingBox(baseFace, edits, baseDimensions);
expect(result.boundingBoxX1).toBe(800);
expect(result.boundingBoxY1).toBe(100);
expect(result.boundingBoxX2).toBe(900);
expect(result.boundingBoxY2).toBe(200);
expect(result.imageWidth).toBe(1000);
expect(result.imageHeight).toBe(800);
});
it('should mirror vertically', () => {
const edits: EditActionItem[] = [{ action: EditAction.Mirror, parameters: { axis: MirrorAxis.Vertical } }];
const result = transformFaceBoundingBox(baseFace, edits, baseDimensions);
expect(result.boundingBoxX1).toBe(100);
expect(result.boundingBoxY1).toBe(600);
expect(result.boundingBoxX2).toBe(200);
expect(result.boundingBoxY2).toBe(700);
expect(result.imageWidth).toBe(1000);
expect(result.imageHeight).toBe(800);
});
});
describe('with combined edits', () => {
it('should apply crop then rotate', () => {
const edits: EditActionItem[] = [
{ action: EditAction.Crop, parameters: { x: 50, y: 50, width: 400, height: 300 } },
{ action: EditAction.Rotate, parameters: { angle: 90 } },
];
const result = transformFaceBoundingBox(baseFace, edits, baseDimensions);
expect(result.imageWidth).toBe(300);
expect(result.imageHeight).toBe(400);
});
it('should apply crop then mirror', () => {
const edits: EditActionItem[] = [
{ action: EditAction.Crop, parameters: { x: 0, y: 0, width: 500, height: 400 } },
{ action: EditAction.Mirror, parameters: { axis: MirrorAxis.Vertical } },
];
const result = transformFaceBoundingBox(baseFace, edits, baseDimensions);
expect(result.boundingBoxX1).toBe(100);
expect(result.boundingBoxX2).toBe(200);
expect(result.boundingBoxY1).toBe(200);
expect(result.boundingBoxY2).toBe(300);
});
});
describe('with scaled dimensions', () => {
it('should scale face to match different image dimensions', () => {
const scaledDimensions = { width: 500, height: 400 }; // Half the original size
const edits: EditActionItem[] = [
{ action: EditAction.Crop, parameters: { x: 50, y: 50, width: 200, height: 150 } },
];
const result = transformFaceBoundingBox(baseFace, edits, scaledDimensions);
expect(result.boundingBoxX1).toBe(0);
expect(result.boundingBoxY1).toBe(0);
expect(result.boundingBoxX2).toBe(50);
expect(result.boundingBoxY2).toBe(50);
});
});
});
describe('transformOcrBoundingBox', () => {
const baseOcr: AssetOcrResponseDto = {
id: 'ocr-1',
assetId: 'asset-1',
x1: 0.1,
y1: 0.1,
x2: 0.2,
y2: 0.1,
x3: 0.2,
y3: 0.2,
x4: 0.1,
y4: 0.2,
boxScore: 0.9,
textScore: 0.85,
text: 'Test OCR',
};
const baseDimensions = { width: 1000, height: 800 };
describe('with no edits', () => {
it('should return unchanged bounding box', () => {
const result = transformOcrBoundingBox(baseOcr, [], baseDimensions);
expect(result).toEqual(baseOcr);
});
});
describe('with crop edit', () => {
it('should adjust normalized coordinates for crop', () => {
const edits: EditActionItem[] = [
{ action: EditAction.Crop, parameters: { x: 100, y: 80, width: 400, height: 320 } },
];
const result = transformOcrBoundingBox(baseOcr, edits, baseDimensions);
// Original OCR: (0.1,0.1)-(0.2,0.2) on 1000x800 = (100,80)-(200,160)
// After crop offset (100,80): (0,0)-(100,80)
// Normalized to 400x320: (0,0)-(0.25,0.25)
expect(result.x1).toBeCloseTo(0, 5);
expect(result.y1).toBeCloseTo(0, 5);
expect(result.x2).toBeCloseTo(0.25, 5);
expect(result.y2).toBeCloseTo(0, 5);
expect(result.x3).toBeCloseTo(0.25, 5);
expect(result.y3).toBeCloseTo(0.25, 5);
expect(result.x4).toBeCloseTo(0, 5);
expect(result.y4).toBeCloseTo(0.25, 5);
});
});
describe('with rotate edit', () => {
it('should rotate normalized coordinates 90 degrees and reorder points', () => {
const edits: EditActionItem[] = [{ action: EditAction.Rotate, parameters: { angle: 90 } }];
const result = transformOcrBoundingBox(baseOcr, edits, baseDimensions);
expect(result.id).toBe(baseOcr.id);
expect(result.text).toBe(baseOcr.text);
expect(result.x1).toBeCloseTo(0.8, 5);
expect(result.y1).toBeCloseTo(0.1, 5);
expect(result.x2).toBeCloseTo(0.9, 5);
expect(result.y2).toBeCloseTo(0.1, 5);
expect(result.x3).toBeCloseTo(0.9, 5);
expect(result.y3).toBeCloseTo(0.2, 5);
expect(result.x4).toBeCloseTo(0.8, 5);
expect(result.y4).toBeCloseTo(0.2, 5);
});
it('should rotate 180 degrees and reorder points', () => {
const edits: EditActionItem[] = [{ action: EditAction.Rotate, parameters: { angle: 180 } }];
const result = transformOcrBoundingBox(baseOcr, edits, baseDimensions);
expect(result.x1).toBeCloseTo(0.8, 5);
expect(result.y1).toBeCloseTo(0.8, 5);
expect(result.x2).toBeCloseTo(0.9, 5);
expect(result.y2).toBeCloseTo(0.8, 5);
expect(result.x3).toBeCloseTo(0.9, 5);
expect(result.y3).toBeCloseTo(0.9, 5);
expect(result.x4).toBeCloseTo(0.8, 5);
expect(result.y4).toBeCloseTo(0.9, 5);
});
it('should rotate 270 degrees and reorder points', () => {
const edits: EditActionItem[] = [{ action: EditAction.Rotate, parameters: { angle: 270 } }];
const result = transformOcrBoundingBox(baseOcr, edits, baseDimensions);
expect(result.id).toBe(baseOcr.id);
expect(result.text).toBe(baseOcr.text);
expect(result.x1).toBeCloseTo(0.1, 5);
expect(result.y1).toBeCloseTo(0.8, 5);
expect(result.x2).toBeCloseTo(0.2, 5);
expect(result.y2).toBeCloseTo(0.8, 5);
expect(result.x3).toBeCloseTo(0.2, 5);
expect(result.y3).toBeCloseTo(0.9, 5);
expect(result.x4).toBeCloseTo(0.1, 5);
expect(result.y4).toBeCloseTo(0.9, 5);
});
});
describe('with mirror edit', () => {
it('should mirror horizontally', () => {
const edits: EditActionItem[] = [{ action: EditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } }];
const result = transformOcrBoundingBox(baseOcr, edits, baseDimensions);
expect(result.x1).toBeCloseTo(0.9, 5);
expect(result.y1).toBeCloseTo(0.1, 5);
});
it('should mirror vertically', () => {
const edits: EditActionItem[] = [{ action: EditAction.Mirror, parameters: { axis: MirrorAxis.Vertical } }];
const result = transformOcrBoundingBox(baseOcr, edits, baseDimensions);
expect(result.x1).toBeCloseTo(0.1, 5);
expect(result.y1).toBeCloseTo(0.9, 5);
});
});
describe('with combined edits', () => {
it('should preserve OCR metadata through transforms', () => {
const edits: EditActionItem[] = [
{ action: EditAction.Crop, parameters: { x: 0, y: 0, width: 500, height: 400 } },
{ action: EditAction.Rotate, parameters: { angle: 90 } },
];
const result = transformOcrBoundingBox(baseOcr, edits, baseDimensions);
expect(result.id).toBe(baseOcr.id);
expect(result.assetId).toBe(baseOcr.assetId);
expect(result.boxScore).toBe(baseOcr.boxScore);
expect(result.textScore).toBe(baseOcr.textScore);
expect(result.text).toBe(baseOcr.text);
});
});
});

View File

@ -0,0 +1,203 @@
import { EditAction, EditActionItem } from 'src/dtos/editing.dto';
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
import { ImageDimensions } from 'src/types';
import { applyToPoint, compose, flipX, flipY, identity, Matrix, rotate, scale, translate } from 'transformation-matrix';
export const createAffineMatrix = (
edits: EditActionItem[],
scalingParameters?: {
pointSpace: ImageDimensions;
targetSpace: ImageDimensions;
},
): Matrix => {
let scalingMatrix: Matrix = identity();
if (scalingParameters) {
const { pointSpace, targetSpace } = scalingParameters;
const scaleX = targetSpace.width / pointSpace.width;
scalingMatrix = scale(scaleX);
}
return compose(
scalingMatrix,
...edits.map((edit) => {
switch (edit.action) {
case 'rotate': {
const angleInRadians = (-edit.parameters.angle * Math.PI) / 180;
return rotate(angleInRadians);
}
case 'mirror': {
return edit.parameters.axis === 'horizontal' ? flipY() : flipX();
}
default: {
return identity();
}
}
}),
);
};
type Point = { x: number; y: number };
type TransformState = {
points: Point[];
currentWidth: number;
currentHeight: number;
};
/**
* Transforms an array of points through a series of edit operations (crop, rotate, mirror).
* Points should be in absolute pixel coordinates relative to the starting dimensions.
*/
const transformPoints = (
points: Point[],
edits: EditActionItem[],
startingDimensions: ImageDimensions,
): TransformState => {
let currentWidth = startingDimensions.width;
let currentHeight = startingDimensions.height;
let transformedPoints = [...points];
// Handle crop first
const crop = edits.find((edit) => edit.action === 'crop');
if (crop) {
const { x: cropX, y: cropY, width: cropWidth, height: cropHeight } = crop.parameters;
transformedPoints = transformedPoints.map((p) => ({
x: p.x - cropX,
y: p.y - cropY,
}));
currentWidth = cropWidth;
currentHeight = cropHeight;
}
// Apply rotate and mirror transforms
for (const edit of edits) {
let matrix: Matrix = identity();
if (edit.action === 'rotate') {
const angleDegrees = edit.parameters.angle;
const angleRadians = (angleDegrees * Math.PI) / 180;
const newWidth = angleDegrees === 90 || angleDegrees === 270 ? currentHeight : currentWidth;
const newHeight = angleDegrees === 90 || angleDegrees === 270 ? currentWidth : currentHeight;
matrix = compose(
translate(newWidth / 2, newHeight / 2),
rotate(angleRadians),
translate(-currentWidth / 2, -currentHeight / 2),
);
currentWidth = newWidth;
currentHeight = newHeight;
} else if (edit.action === 'mirror') {
matrix = compose(
translate(currentWidth / 2, currentHeight / 2),
edit.parameters.axis === 'horizontal' ? flipY() : flipX(),
translate(-currentWidth / 2, -currentHeight / 2),
);
} else {
// Skip non-affine transformations
continue;
}
transformedPoints = transformedPoints.map((p) => applyToPoint(matrix, p));
}
return {
points: transformedPoints,
currentWidth,
currentHeight,
};
};
type FaceBoundingBox = {
boundingBoxX1: number;
boundingBoxX2: number;
boundingBoxY1: number;
boundingBoxY2: number;
imageWidth: number;
imageHeight: number;
};
export const transformFaceBoundingBox = (
box: FaceBoundingBox,
edits: EditActionItem[],
imageDimensions: ImageDimensions,
): FaceBoundingBox => {
if (edits.length === 0) {
return box;
}
const scaleX = imageDimensions.width / box.imageWidth;
const scaleY = imageDimensions.height / box.imageHeight;
const points: Point[] = [
{ x: box.boundingBoxX1 * scaleX, y: box.boundingBoxY1 * scaleY },
{ x: box.boundingBoxX2 * scaleX, y: box.boundingBoxY2 * scaleY },
];
const { points: transformedPoints, currentWidth, currentHeight } = transformPoints(points, edits, imageDimensions);
// Ensure x1,y1 is top-left and x2,y2 is bottom-right
const [p1, p2] = transformedPoints;
return {
boundingBoxX1: Math.min(p1.x, p2.x),
boundingBoxY1: Math.min(p1.y, p2.y),
boundingBoxX2: Math.max(p1.x, p2.x),
boundingBoxY2: Math.max(p1.y, p2.y),
imageWidth: currentWidth,
imageHeight: currentHeight,
};
};
const reorderQuadPointsForRotation = (points: Point[], rotationDegrees: number): Point[] => {
const [p1, p2, p3, p4] = points;
switch (rotationDegrees) {
case 90: {
return [p4, p1, p2, p3];
}
case 180: {
return [p3, p4, p1, p2];
}
case 270: {
return [p2, p3, p4, p1];
}
default: {
return points;
}
}
};
export const transformOcrBoundingBox = (
box: AssetOcrResponseDto,
edits: EditActionItem[],
imageDimensions: ImageDimensions,
): AssetOcrResponseDto => {
if (edits.length === 0) {
return box;
}
const points: Point[] = [
{ x: box.x1 * imageDimensions.width, y: box.y1 * imageDimensions.height },
{ x: box.x2 * imageDimensions.width, y: box.y2 * imageDimensions.height },
{ x: box.x3 * imageDimensions.width, y: box.y3 * imageDimensions.height },
{ x: box.x4 * imageDimensions.width, y: box.y4 * imageDimensions.height },
];
const { points: transformedPoints, currentWidth, currentHeight } = transformPoints(points, edits, imageDimensions);
// Reorder points to maintain semantic ordering (topLeft, topRight, bottomRight, bottomLeft)
const netRotation = edits.find((e) => e.action == EditAction.Rotate)?.parameters.angle ?? 0 % 360;
const reorderedPoints = reorderQuadPointsForRotation(transformedPoints, netRotation);
const [p1, p2, p3, p4] = reorderedPoints;
return {
...box,
x1: p1.x / currentWidth,
y1: p1.y / currentHeight,
x2: p2.x / currentWidth,
y2: p2.y / currentHeight,
x3: p3.x / currentWidth,
y3: p3.y / currentHeight,
x4: p4.x / currentWidth,
y4: p4.y / currentHeight,
};
};

View File

@ -81,6 +81,24 @@ export const ValidateUUID = (options?: UUIDOptions & ApiPropertyOptions) => {
);
};
export function IsAxisAlignedRotation() {
return ValidateBy(
{
name: 'isAxisAlignedRotation',
validator: {
validate(value: any) {
return [0, 90, 180, 270].includes(value);
},
defaultMessage: buildMessage(
(eachPrefix) => eachPrefix + '$property must be one of the following values: 0, 90, 180, 270',
{},
),
},
},
{},
);
}
export class UUIDParamDto {
@IsNotEmpty()
@IsUUID('4')

View File

@ -1,5 +1,6 @@
import { AssetFace, AssetFile, Exif } from 'src/database';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { EditAction, EditActionItem } from 'src/dtos/editing.dto';
import { AssetFileType, AssetStatus, AssetType, AssetVisibility } from 'src/enum';
import { StorageAsset } from 'src/types';
import { authStub } from 'test/fixtures/auth.stub';
@ -36,8 +37,35 @@ const sidecarFileWithoutExt: AssetFile = {
path: '/original/path.xmp',
};
const editedPreviewFile: AssetFile = {
id: 'file-4',
type: AssetFileType.EditedPreview,
path: '/uploads/user-id/preview/path_edited.jpg',
};
const editedThumbnailFile: AssetFile = {
id: 'file-5',
type: AssetFileType.EditedThumbnail,
path: '/uploads/user-id/thumbnail/path_edited.jpg',
};
const editedFullsizeFile: AssetFile = {
id: 'file-6',
type: AssetFileType.EditedFullSize,
path: '/uploads/user-id/fullsize/path_edited.jpg',
};
const files: AssetFile[] = [fullsizeFile, previewFile, thumbnailFile];
const editedFiles: AssetFile[] = [
fullsizeFile,
previewFile,
thumbnailFile,
editedFullsizeFile,
editedPreviewFile,
editedThumbnailFile,
];
export const stackStub = (stackId: string, assets: (MapAsset & { exifInfo: Exif })[]) => {
return {
id: stackId,
@ -101,6 +129,9 @@ export const assetStub = {
stackId: null,
updateId: '42',
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
noWebpPath: Object.freeze({
@ -139,6 +170,9 @@ export const assetStub = {
stackId: null,
updateId: '42',
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
noThumbhash: Object.freeze({
@ -174,6 +208,9 @@ export const assetStub = {
stackId: null,
updateId: '42',
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
primaryImage: Object.freeze({
@ -219,6 +256,9 @@ export const assetStub = {
updateId: '42',
libraryId: null,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
image: Object.freeze({
@ -264,6 +304,7 @@ export const assetStub = {
height: null,
width: null,
visibility: AssetVisibility.Timeline,
edits: [],
}),
trashed: Object.freeze({
@ -304,6 +345,9 @@ export const assetStub = {
stackId: null,
updateId: '42',
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
trashedOffline: Object.freeze({
@ -344,6 +388,9 @@ export const assetStub = {
stackId: null,
updateId: '42',
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
archived: Object.freeze({
id: 'asset-id',
@ -383,6 +430,9 @@ export const assetStub = {
stackId: null,
updateId: '42',
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
external: Object.freeze({
@ -422,6 +472,9 @@ export const assetStub = {
stackId: null,
stack: null,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
image1: Object.freeze({
@ -461,6 +514,9 @@ export const assetStub = {
libraryId: null,
stack: null,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
imageFrom2015: Object.freeze({
@ -499,6 +555,9 @@ export const assetStub = {
duplicateId: null,
isOffline: false,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
video: Object.freeze({
@ -539,6 +598,9 @@ export const assetStub = {
libraryId: null,
stackId: null,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
livePhotoMotionAsset: Object.freeze({
@ -556,7 +618,10 @@ export const assetStub = {
files: [] as AssetFile[],
libraryId: null,
visibility: AssetVisibility.Hidden,
} as MapAsset & { faces: AssetFace[]; files: AssetFile[]; exifInfo: Exif }),
width: null,
height: null,
edits: [] as EditActionItem[],
} as MapAsset & { faces: AssetFace[]; files: AssetFile[]; exifInfo: Exif; edits: EditActionItem[] }),
livePhotoStillAsset: Object.freeze({
id: 'live-photo-still-asset',
@ -574,7 +639,10 @@ export const assetStub = {
files,
faces: [] as AssetFace[],
visibility: AssetVisibility.Timeline,
} as MapAsset & { faces: AssetFace[]; files: AssetFile[] }),
width: null,
height: null,
edits: [] as EditActionItem[],
} as MapAsset & { faces: AssetFace[]; files: AssetFile[]; edits: EditActionItem[] }),
livePhotoWithOriginalFileName: Object.freeze({
id: 'live-photo-still-asset',
@ -594,7 +662,10 @@ export const assetStub = {
libraryId: null,
faces: [] as AssetFace[],
visibility: AssetVisibility.Timeline,
} as MapAsset & { faces: AssetFace[]; files: AssetFile[] }),
width: null,
height: null,
edits: [] as EditActionItem[],
} as MapAsset & { faces: AssetFace[]; files: AssetFile[]; edits: EditActionItem[] }),
withLocation: Object.freeze({
id: 'asset-with-favorite-id',
@ -638,6 +709,9 @@ export const assetStub = {
isOffline: false,
tags: [],
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
sidecar: Object.freeze({
@ -673,6 +747,9 @@ export const assetStub = {
libraryId: null,
stackId: null,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
sidecarWithoutExt: Object.freeze({
@ -705,6 +782,9 @@ export const assetStub = {
duplicateId: null,
isOffline: false,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
hasEncodedVideo: Object.freeze({
@ -744,6 +824,9 @@ export const assetStub = {
stackId: null,
stack: null,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
hasFileExtension: Object.freeze({
@ -780,6 +863,9 @@ export const assetStub = {
duplicateId: null,
isOffline: false,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
imageDng: Object.freeze({
@ -820,6 +906,9 @@ export const assetStub = {
libraryId: null,
stackId: null,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
imageHif: Object.freeze({
@ -860,6 +949,9 @@ export const assetStub = {
libraryId: null,
stackId: null,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
panoramaTif: Object.freeze({
id: 'asset-id',
@ -899,5 +991,110 @@ export const assetStub = {
libraryId: null,
stackId: null,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
withCropEdit: Object.freeze({
id: 'asset-id',
status: AssetStatus.Active,
deviceAssetId: 'device-asset-id',
fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'),
fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'),
owner: userStub.user1,
ownerId: 'user-id',
deviceId: 'device-id',
originalPath: '/original/path.jpg',
files,
checksum: Buffer.from('file hash', 'utf8'),
type: AssetType.Image,
thumbhash: Buffer.from('blablabla', 'base64'),
encodedVideoPath: null,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
localDateTime: new Date('2025-01-01T01:02:03.456Z'),
isFavorite: true,
duration: null,
isExternal: false,
livePhotoVideo: null,
livePhotoVideoId: null,
updateId: 'foo',
libraryId: null,
stackId: null,
sharedLinks: [],
originalFileName: 'asset-id.jpg',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
exifImageHeight: 3840,
exifImageWidth: 2160,
} as Exif,
duplicateId: null,
isOffline: false,
stack: null,
orientation: '',
projectionType: null,
height: 3840,
width: 2160,
visibility: AssetVisibility.Timeline,
edits: [
{
action: EditAction.Crop,
parameters: {
width: 1512,
height: 1152,
x: 216,
y: 1512,
},
},
] as EditActionItem[],
}),
withRevertedEdits: Object.freeze({
id: 'asset-id',
status: AssetStatus.Active,
deviceAssetId: 'device-asset-id',
fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'),
fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'),
owner: userStub.user1,
ownerId: 'user-id',
deviceId: 'device-id',
originalPath: '/original/path.jpg',
files: editedFiles,
checksum: Buffer.from('file hash', 'utf8'),
type: AssetType.Image,
thumbhash: Buffer.from('blablabla', 'base64'),
encodedVideoPath: null,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
localDateTime: new Date('2025-01-01T01:02:03.456Z'),
isFavorite: true,
duration: null,
isExternal: false,
livePhotoVideo: null,
livePhotoVideoId: null,
updateId: 'foo',
libraryId: null,
stackId: null,
sharedLinks: [],
originalFileName: 'asset-id.jpg',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
exifImageHeight: 3840,
exifImageWidth: 2160,
} as Exif,
duplicateId: null,
isOffline: false,
stack: null,
orientation: '',
projectionType: null,
height: 3840,
width: 2160,
visibility: AssetVisibility.Timeline,
edits: [],
}),
};

View File

@ -25,6 +25,7 @@ export const faceStub = {
deletedAt: new Date(),
updatedAt: new Date('2023-01-01T00:00:00Z'),
updateId: '0d1173e3-4d80-4d76-b41e-57d56de21125',
isVisible: true,
}),
primaryFace1: Object.freeze({
id: 'assetFaceId2',
@ -43,6 +44,7 @@ export const faceStub = {
deletedAt: null,
updatedAt: new Date('2023-01-01T00:00:00Z'),
updateId: '0d1173e3-4d80-4d76-b41e-57d56de21125',
isVisible: true,
}),
mergeFace1: Object.freeze({
id: 'assetFaceId3',
@ -61,6 +63,7 @@ export const faceStub = {
deletedAt: null,
updatedAt: new Date('2023-01-01T00:00:00Z'),
updateId: '0d1173e3-4d80-4d76-b41e-57d56de21125',
isVisible: true,
}),
noPerson1: Object.freeze({
id: 'assetFaceId8',
@ -79,6 +82,7 @@ export const faceStub = {
deletedAt: null,
updatedAt: new Date('2023-01-01T00:00:00Z'),
updateId: '0d1173e3-4d80-4d76-b41e-57d56de21125',
isVisible: true,
}),
noPerson2: Object.freeze({
id: 'assetFaceId9',
@ -97,6 +101,7 @@ export const faceStub = {
deletedAt: null,
updatedAt: new Date('2023-01-01T00:00:00Z'),
updateId: '0d1173e3-4d80-4d76-b41e-57d56de21125',
isVisible: true,
}),
fromExif1: Object.freeze({
id: 'assetFaceId9',
@ -114,6 +119,7 @@ export const faceStub = {
deletedAt: null,
updatedAt: new Date('2023-01-01T00:00:00Z'),
updateId: '0d1173e3-4d80-4d76-b41e-57d56de21125',
isVisible: true,
}),
fromExif2: Object.freeze({
id: 'assetFaceId9',
@ -131,6 +137,7 @@ export const faceStub = {
deletedAt: null,
updatedAt: new Date('2023-01-01T00:00:00Z'),
updateId: '0d1173e3-4d80-4d76-b41e-57d56de21125',
isVisible: true,
}),
withBirthDate: Object.freeze({
id: 'assetFaceId10',
@ -148,5 +155,6 @@ export const faceStub = {
deletedAt: null,
updatedAt: new Date('2023-01-01T00:00:00Z'),
updateId: '0d1173e3-4d80-4d76-b41e-57d56de21125',
isVisible: true,
}),
};

View File

@ -573,6 +573,7 @@ const assetFaceInsert = (assetFace: Partial<AssetFace> & { assetId: string }) =>
imageWidth: assetFace.imageWidth ?? 10,
personId: assetFace.personId ?? null,
sourceType: assetFace.sourceType ?? SourceType.MachineLearning,
isVisible: assetFace.isVisible ?? true,
};
return {

View File

@ -57,6 +57,7 @@ describe(OcrService.name, () => {
id: expect.any(String),
text: 'Test OCR',
textScore: 0.95,
isVisible: true,
x1: 10,
y1: 10,
x2: 50,
@ -106,6 +107,7 @@ describe(OcrService.name, () => {
id: expect.any(String),
text: 'One',
textScore: 0.9,
isVisible: true,
x1: 0,
y1: 1,
x2: 2,
@ -121,6 +123,7 @@ describe(OcrService.name, () => {
id: expect.any(String),
text: 'Two',
textScore: 0.89,
isVisible: true,
x1: 8,
y1: 9,
x2: 10,
@ -136,6 +139,7 @@ describe(OcrService.name, () => {
id: expect.any(String),
text: 'Three',
textScore: 0.88,
isVisible: true,
x1: 16,
y1: 17,
x2: 18,
@ -151,6 +155,7 @@ describe(OcrService.name, () => {
id: expect.any(String),
text: 'Four',
textScore: 0.87,
isVisible: true,
x1: 24,
y1: 25,
x2: 26,
@ -166,6 +171,7 @@ describe(OcrService.name, () => {
id: expect.any(String),
text: 'Five',
textScore: 0.86,
isVisible: true,
x1: 32,
y1: 33,
x2: 34,

View File

@ -8,10 +8,12 @@ export const newMediaRepositoryMock = (): Mocked<RepositoryInterface<MediaReposi
writeExif: vitest.fn().mockImplementation(() => Promise.resolve()),
copyTagGroup: vitest.fn().mockImplementation(() => Promise.resolve()),
generateThumbhash: vitest.fn().mockResolvedValue(Buffer.from('')),
decodeImage: vitest.fn().mockResolvedValue({ data: Buffer.from(''), info: {} }),
decodeImage: vitest.fn().mockImplementation(() => Promise.resolve({ data: Buffer.from(''), info: {} })),
extract: vitest.fn().mockResolvedValue(null),
probe: vitest.fn(),
transcode: vitest.fn(),
getImageDimensions: vitest.fn(),
checkFaceVisibility: vitest.fn().mockImplementation(() => ({ visible: [], hidden: [] })),
checkOcrVisibility: vitest.fn().mockImplementation(() => ({ visible: [], hidden: [] })),
};
};

View File

@ -349,6 +349,7 @@ const assetOcrFactory = (
boxScore?: number;
textScore?: number;
text?: string;
isVisible?: boolean;
} = {},
) => ({
id: newUuid(),
@ -364,6 +365,7 @@ const assetOcrFactory = (
boxScore: 0.95,
textScore: 0.92,
text: 'Sample Text',
isVisible: true,
...ocr,
});

View File

@ -20,6 +20,7 @@ import { AlbumUserRepository } from 'src/repositories/album-user.repository';
import { AlbumRepository } from 'src/repositories/album.repository';
import { ApiKeyRepository } from 'src/repositories/api-key.repository';
import { AppRepository } from 'src/repositories/app.repository';
import { AssetEditRepository } from 'src/repositories/asset-edit.repository';
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { AuditRepository } from 'src/repositories/audit.repository';
@ -216,6 +217,7 @@ export type ServiceOverrides = {
app: AppRepository;
audit: AuditRepository;
asset: AssetRepository;
assetEdit: AssetEditRepository;
assetJob: AssetJobRepository;
config: ConfigRepository;
cron: CronRepository;
@ -289,6 +291,7 @@ export const getMocks = () => {
album: automock(AlbumRepository, { strict: false }),
albumUser: automock(AlbumUserRepository),
asset: newAssetRepositoryMock(),
assetEdit: automock(AssetEditRepository),
assetJob: automock(AssetJobRepository),
app: automock(AppRepository, { strict: false }),
config: newConfigRepositoryMock(),
@ -356,6 +359,7 @@ export const newTestService = <T extends BaseService>(
overrides.apiKey || (mocks.apiKey as As<ApiKeyRepository>),
overrides.app || (mocks.app as As<AppRepository>),
overrides.asset || (mocks.asset as As<AssetRepository>),
overrides.assetEdit || (mocks.assetEdit as As<AssetEditRepository>),
overrides.assetJob || (mocks.assetJob as As<AssetJobRepository>),
overrides.audit || (mocks.audit as As<AuditRepository>),
overrides.config || (mocks.config as As<ConfigRepository> as ConfigRepository),

View File

@ -0,0 +1,20 @@
<script lang="ts">
import { IconButton } from '@immich/ui';
import { mdiPencil } from '@mdi/js';
import { t } from 'svelte-i18n';
interface Props {
onAction: () => void;
}
let { onAction }: Props = $props();
</script>
<IconButton
color="secondary"
shape="round"
variant="ghost"
icon={mdiPencil}
aria-label={$t('editor')}
onclick={() => onAction()}
/>

View File

@ -9,6 +9,7 @@
import CloseAction from '$lib/components/asset-viewer/actions/close-action.svelte';
import DeleteAction from '$lib/components/asset-viewer/actions/delete-action.svelte';
import DownloadAction from '$lib/components/asset-viewer/actions/download-action.svelte';
import EditAction from '$lib/components/asset-viewer/actions/edit-action.svelte';
import FavoriteAction from '$lib/components/asset-viewer/actions/favorite-action.svelte';
import KeepThisDeleteOthersAction from '$lib/components/asset-viewer/actions/keep-this-delete-others.svelte';
import RemoveAssetFromStack from '$lib/components/asset-viewer/actions/remove-asset-from-stack.svelte';
@ -23,7 +24,7 @@
import UnstackAction from '$lib/components/asset-viewer/actions/unstack-action.svelte';
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
import MenuOption from '$lib/components/shared-components/context-menu/menu-option.svelte';
import { AppRoute } from '$lib/constants';
import { AppRoute, ProjectionType } from '$lib/constants';
import { featureFlagsManager } from '$lib/managers/feature-flags-manager.svelte';
import { handleReplaceAsset } from '$lib/services/asset.service';
import { photoViewerImgElement } from '$lib/stores/assets-store.svelte';
@ -76,7 +77,7 @@
onRunJob: (name: AssetJobName) => void;
onPlaySlideshow: () => void;
onShowDetail: () => void;
// export let showEditorHandler: () => void;
onEdit: () => void;
onClose: () => void;
motionPhoto?: Snippet;
playOriginalVideo: boolean;
@ -99,6 +100,7 @@
onPlaySlideshow,
onShowDetail,
onClose,
onEdit,
motionPhoto,
playOriginalVideo = false,
setPlayOriginalVideo,
@ -109,16 +111,16 @@
let showDownloadButton = $derived(sharedLink ? sharedLink.allowDownload : !asset.isOffline);
let isLocked = $derived(asset.visibility === AssetVisibility.Locked);
let smartSearchEnabled = $derived(featureFlagsManager.value.smartSearch);
// $: showEditorButton =
// isOwner &&
// asset.type === AssetTypeEnum.Image &&
// !(
// asset.exifInfo?.projectionType === ProjectionType.EQUIRECTANGULAR ||
// (asset.originalPath && asset.originalPath.toLowerCase().endsWith('.insp'))
// ) &&
// !(asset.originalPath && asset.originalPath.toLowerCase().endsWith('.gif')) &&
// !asset.livePhotoVideoId;
let showEditorButton = $derived(
isOwner &&
asset.type === AssetTypeEnum.Image &&
!(
asset.exifInfo?.projectionType === ProjectionType.EQUIRECTANGULAR ||
(asset.originalPath && asset.originalPath.toLowerCase().endsWith('.insp'))
) &&
!(asset.originalPath && asset.originalPath.toLowerCase().endsWith('.gif')) &&
!asset.livePhotoVideoId,
);
</script>
<div
@ -181,6 +183,10 @@
<FavoriteAction {asset} {onAction} />
{/if}
{#if showEditorButton}
<EditAction onAction={onEdit} />
{/if}
{#if isOwner}
<DeleteAction {asset} {onAction} {preAction} />

View File

@ -10,8 +10,8 @@
import { AppRoute, AssetAction, ProjectionType } from '$lib/constants';
import { activityManager } from '$lib/managers/activity-manager.svelte';
import { authManager } from '$lib/managers/auth-manager.svelte';
import { editManager, EditToolType } from '$lib/managers/edit/edit-manager.svelte';
import type { TimelineAsset } from '$lib/managers/timeline-manager/types';
import { closeEditorCofirm } from '$lib/stores/asset-editor.store';
import { assetViewingStore } from '$lib/stores/asset-viewing.store';
import { ocrManager } from '$lib/stores/ocr.svelte';
import { alwaysLoadOriginalVideo, isShowDetail } from '$lib/stores/preferences.store';
@ -110,7 +110,6 @@
let isShowEditor = $state(false);
let fullscreenElement = $state<Element>();
let unsubscribes: (() => void)[] = [];
let selectedEditType: string = $state('');
let stack: StackResponseDto | null = $state(null);
let zoomToggle = $state(() => void 0);
@ -228,10 +227,15 @@
onClose(asset);
};
const closeEditor = () => {
closeEditorCofirm(() => {
const closeEditor = async () => {
if (await editManager.closeConfirm()) {
// If edits were applied, refresh the asset to show the new image
if (editManager.hasAppliedEdits) {
const refreshedAsset = await getAssetInfo({ id: asset.id });
asset = refreshedAsset;
}
isShowEditor = false;
});
}
};
const navigateAsset = async (order?: 'previous' | 'next', e?: Event) => {
@ -269,12 +273,12 @@
}
};
// const showEditorHandler = () => {
// if (isShowActivity) {
// isShowActivity = false;
// }
// isShowEditor = !isShowEditor;
// };
const showEditor = () => {
if (isShowActivity) {
isShowActivity = false;
}
isShowEditor = !isShowEditor;
};
const handleRunJob = async (name: AssetJobName) => {
try {
@ -363,10 +367,6 @@
onAction?.(action);
};
const handleUpdateSelectedEditType = (type: string) => {
selectedEditType = type;
};
const handleAssetReplace = async ({ oldAssetId, newAssetId }: { oldAssetId: string; newAssetId: string }) => {
if (oldAssetId !== asset.id) {
return;
@ -434,6 +434,7 @@
onPlaySlideshow={() => ($slideshowState = SlideshowState.PlaySlideshow)}
onShowDetail={toggleDetailPanel}
onClose={closeViewer}
onEdit={showEditor}
{playOriginalVideo}
{setPlayOriginalVideo}
>
@ -513,7 +514,7 @@
.toLowerCase()
.endsWith('.insp'))}
<ImagePanoramaViewer bind:zoomToggle {asset} />
{:else if isShowEditor && selectedEditType === 'crop'}
{:else if isShowEditor && editManager.selectedTool?.type === EditToolType.Transform}
<CropArea {asset} />
{:else}
<PhotoViewer
@ -588,7 +589,7 @@
class="row-start-1 row-span-4 w-[400px] overflow-y-auto transition-all dark:border-l dark:border-s-immich-dark-gray"
translate="yes"
>
<EditorPanel {asset} onUpdateSelectedType={handleUpdateSelectedEditType} onClose={closeEditor} />
<EditorPanel {asset} onClose={closeEditor} />
</div>
{/if}

View File

@ -1,24 +1,8 @@
<script lang="ts">
import { getAssetOriginalUrl } from '$lib/utils';
import { handleError } from '$lib/utils/handle-error';
import { transformManager } from '$lib/managers/edit/transform-manager.svelte';
import { getAltText } from '$lib/utils/thumbnail-util';
import { onDestroy, onMount, tick } from 'svelte';
import { t } from 'svelte-i18n';
import {
changedOriention,
cropAspectRatio,
cropSettings,
resetGlobalCropStore,
rotateDegrees,
} from '$lib/stores/asset-editor.store';
import { toTimelineAsset } from '$lib/utils/timeline-util';
import type { AssetResponseDto } from '@immich/sdk';
import { animateCropChange, recalculateCrop } from './crop-settings';
import { cropAreaEl, cropFrame, imgElement, isResizingOrDragging, overlayEl, resetCropStore } from './crop-store';
import { draw } from './drawing';
import { onImageLoad, resizeCanvas } from './image-loading';
import { handleMouseDown, handleMouseMove, handleMouseUp } from './mouse-handlers';
interface Props {
asset: AssetResponseDto;
@ -26,69 +10,50 @@
let { asset }: Props = $props();
let img = $state<HTMLImageElement>();
let canvasContainer = $state<HTMLElement | null>(null);
$effect(() => {
if (!img) {
if (!canvasContainer) {
return;
}
imgElement.set(img);
});
const resizeObserver = new ResizeObserver(() => {
transformManager.resizeCanvas();
});
cropAspectRatio.subscribe((value) => {
if (!img || !$cropAreaEl) {
return;
}
const newCrop = recalculateCrop($cropSettings, $cropAreaEl, value, true);
if (newCrop) {
animateCropChange($cropSettings, newCrop, () => draw($cropSettings));
}
});
resizeObserver.observe(canvasContainer);
onMount(async () => {
resetGlobalCropStore();
img = new Image();
await tick();
img.src = getAssetOriginalUrl({ id: asset.id, cacheKey: asset.thumbhash });
img.addEventListener('load', () => onImageLoad(true), { passive: true });
img.addEventListener('error', (error) => handleError(error, $t('error_loading_image')), { passive: true });
globalThis.addEventListener('mousemove', handleMouseMove, { passive: true });
});
onDestroy(() => {
globalThis.removeEventListener('mousemove', handleMouseMove);
resetCropStore();
resetGlobalCropStore();
});
$effect(() => {
resizeCanvas();
return () => {
resizeObserver.disconnect();
};
});
</script>
<div class="canvas-container">
<div class="canvas-container" bind:this={canvasContainer}>
<button
class={`crop-area ${$changedOriention ? 'changedOriention' : ''}`}
style={`rotate:${$rotateDegrees}deg`}
bind:this={$cropAreaEl}
onmousedown={handleMouseDown}
onmouseup={handleMouseUp}
class={`crop-area ${transformManager.orientationChanged ? 'changedOriention' : ''}`}
style={`rotate:${transformManager.imageRotation}deg`}
bind:this={transformManager.cropAreaEl}
onmousedown={(e) => transformManager.handleMouseDown(e)}
onmouseup={() => transformManager.handleMouseUp()}
aria-label="Crop area"
type="button"
>
<img draggable="false" src={img?.src} alt={$getAltText(toTimelineAsset(asset))} />
<div class={`${$isResizingOrDragging ? 'resizing' : ''} crop-frame`} bind:this={$cropFrame}>
<img draggable="false" src={transformManager.imgElement?.src} alt={$getAltText(toTimelineAsset(asset))} />
<div
class={`${transformManager.isInteracting ? 'resizing' : ''} crop-frame`}
bind:this={transformManager.cropFrame}
>
<div class="grid"></div>
<div class="corner top-left"></div>
<div class="corner top-right"></div>
<div class="corner bottom-left"></div>
<div class="corner bottom-right"></div>
</div>
<div class={`${$isResizingOrDragging ? 'light' : ''} overlay`} bind:this={$overlayEl}></div>
<div
class={`${transformManager.isInteracting ? 'light' : ''} overlay`}
bind:this={transformManager.overlayEl}
></div>
</button>
</div>

View File

@ -1,5 +1,5 @@
<script lang="ts">
import type { CropAspectRatio } from '$lib/stores/asset-editor.store';
import type { CropAspectRatio } from '$lib/managers/edit/transform-manager.svelte';
import { Button, Icon, type Color } from '@immich/ui';
interface Props {

View File

@ -1,159 +0,0 @@
import type { CropAspectRatio, CropSettings } from '$lib/stores/asset-editor.store';
import { get } from 'svelte/store';
import { cropAreaEl } from './crop-store';
import { checkEdits } from './mouse-handlers';
export function recalculateCrop(
crop: CropSettings,
canvas: HTMLElement,
aspectRatio: CropAspectRatio,
returnNewCrop = false,
): CropSettings | null {
const canvasW = canvas.clientWidth;
const canvasH = canvas.clientHeight;
let newWidth = crop.width;
let newHeight = crop.height;
const { newWidth: w, newHeight: h } = keepAspectRatio(newWidth, newHeight, aspectRatio);
if (w > canvasW) {
newWidth = canvasW;
newHeight = canvasW / (w / h);
} else if (h > canvasH) {
newHeight = canvasH;
newWidth = canvasH * (w / h);
} else {
newWidth = w;
newHeight = h;
}
const newX = Math.max(0, Math.min(crop.x, canvasW - newWidth));
const newY = Math.max(0, Math.min(crop.y, canvasH - newHeight));
const newCrop = {
width: newWidth,
height: newHeight,
x: newX,
y: newY,
};
if (returnNewCrop) {
setTimeout(() => {
checkEdits();
}, 1);
return newCrop;
} else {
crop.width = newWidth;
crop.height = newHeight;
crop.x = newX;
crop.y = newY;
return null;
}
}
export function animateCropChange(crop: CropSettings, newCrop: CropSettings, draw: () => void, duration = 100) {
const cropArea = get(cropAreaEl);
if (!cropArea) {
return;
}
const cropFrame = cropArea.querySelector('.crop-frame') as HTMLElement;
if (!cropFrame) {
return;
}
const startTime = performance.now();
const initialCrop = { ...crop };
const animate = (currentTime: number) => {
const elapsedTime = currentTime - startTime;
const progress = Math.min(elapsedTime / duration, 1);
crop.x = initialCrop.x + (newCrop.x - initialCrop.x) * progress;
crop.y = initialCrop.y + (newCrop.y - initialCrop.y) * progress;
crop.width = initialCrop.width + (newCrop.width - initialCrop.width) * progress;
crop.height = initialCrop.height + (newCrop.height - initialCrop.height) * progress;
draw();
if (progress < 1) {
requestAnimationFrame(animate);
}
};
requestAnimationFrame(animate);
}
export function keepAspectRatio(newWidth: number, newHeight: number, aspectRatio: CropAspectRatio) {
const [widthRatio, heightRatio] = aspectRatio.split(':').map(Number);
if (widthRatio && heightRatio) {
const calculatedWidth = (newHeight * widthRatio) / heightRatio;
return { newWidth: calculatedWidth, newHeight };
}
return { newWidth, newHeight };
}
export function adjustDimensions(
newWidth: number,
newHeight: number,
aspectRatio: CropAspectRatio,
xLimit: number,
yLimit: number,
minSize: number,
) {
let w = newWidth;
let h = newHeight;
let aspectMultiplier: number;
if (aspectRatio === 'free') {
aspectMultiplier = newWidth / newHeight;
} else {
const [widthRatio, heightRatio] = aspectRatio.split(':').map(Number);
aspectMultiplier = widthRatio && heightRatio ? widthRatio / heightRatio : newWidth / newHeight;
}
if (aspectRatio !== 'free') {
h = w / aspectMultiplier;
}
if (w > xLimit) {
w = xLimit;
if (aspectRatio !== 'free') {
h = w / aspectMultiplier;
}
}
if (h > yLimit) {
h = yLimit;
if (aspectRatio !== 'free') {
w = h * aspectMultiplier;
}
}
if (w < minSize) {
w = minSize;
if (aspectRatio !== 'free') {
h = w / aspectMultiplier;
}
}
if (h < minSize) {
h = minSize;
if (aspectRatio !== 'free') {
w = h * aspectMultiplier;
}
}
if (aspectRatio !== 'free' && w / h !== aspectMultiplier) {
if (w < minSize) {
h = w / aspectMultiplier;
}
if (h < minSize) {
w = h * aspectMultiplier;
}
}
return { newWidth: w, newHeight: h };
}

View File

@ -1,27 +0,0 @@
import { writable } from 'svelte/store';
export const darkenLevel = writable(0.65);
export const isResizingOrDragging = writable(false);
export const animationFrame = writable<ReturnType<typeof requestAnimationFrame> | null>(null);
export const canvasCursor = writable('default');
export const dragOffset = writable({ x: 0, y: 0 });
export const resizeSide = writable('');
export const imgElement = writable<HTMLImageElement | null>(null);
export const cropAreaEl = writable<HTMLElement | null>(null);
export const isDragging = writable<boolean>(false);
export const overlayEl = writable<HTMLElement | null>(null);
export const cropFrame = writable<HTMLElement | null>(null);
export function resetCropStore() {
darkenLevel.set(0.65);
isResizingOrDragging.set(false);
animationFrame.set(null);
canvasCursor.set('default');
dragOffset.set({ x: 0, y: 0 });
resizeSide.set('');
imgElement.set(null);
cropAreaEl.set(null);
isDragging.set(false);
overlayEl.set(null);
}

View File

@ -1,22 +1,11 @@
<script lang="ts">
import {
cropAspectRatio,
cropImageScale,
cropImageSize,
cropSettings,
cropSettingsChanged,
normaizedRorateDegrees,
rotateDegrees,
type CropAspectRatio,
} from '$lib/stores/asset-editor.store';
import { transformManager, type CropAspectRatio } from '$lib/managers/edit/transform-manager.svelte';
import { IconButton } from '@immich/ui';
import { mdiBackupRestore, mdiCropFree, mdiRotateLeft, mdiRotateRight, mdiSquareOutline } from '@mdi/js';
import { tick } from 'svelte';
import { t } from 'svelte-i18n';
import CropPreset from './crop-preset.svelte';
import { onImageLoad } from './image-loading';
let rotateHorizontal = $derived([90, 270].includes($normaizedRorateDegrees));
let rotateHorizontal = $derived([90, 270].includes(transformManager.normalizedRotation));
const icon_16_9 = `M200-280q-33 0-56.5-23.5T120-360v-240q0-33 23.5-56.5T200-680h560q33 0 56.5 23.5T840-600v240q0 33-23.5 56.5T760-280H200Zm0-80h560v-240H200v240Zm0 0v-240 240Z`;
const icon_4_3 = `M19 5H5c-1.1 0-2 .9-2 2v10c0 1.1.9 2 2 2h14c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm0 12H5V7h14v10z`;
const icon_3_2 = `M200-240q-33 0-56.5-23.5T120-320v-320q0-33 23.5-56.5T200-720h560q33 0 56.5 23.5T840-640v320q0 33-23.5 56.5T760-240H200Zm0-80h560v-320H200v320Zm0 0v-320 320Z`;
@ -92,55 +81,38 @@
},
];
let selectedSize: CropAspectRatio = $state('free');
$effect(() => {
$cropAspectRatio = selectedSize;
});
let sizesRows = $derived([
sizes.filter((s) => s.rotate === false),
sizes.filter((s) => s.rotate === undefined),
sizes.filter((s) => s.rotate === true),
]);
async function rotate(clock: boolean) {
rotateDegrees.update((v) => {
return v + 90 * (clock ? 1 : -1);
});
await tick();
onImageLoad();
}
function selectType(size: CropAspectRatio) {
if (size === 'reset') {
selectedSize = 'free';
let cropImageSizeM = $cropImageSize;
let cropImageScaleM = $cropImageScale;
$cropSettings = {
transformManager.cropAspectRatio = 'free';
let cropImageSizeM = transformManager.cropImageSize;
let cropImageScaleM = transformManager.cropImageScale;
transformManager.region = {
x: 0,
y: 0,
width: cropImageSizeM[0] * cropImageScaleM - 1,
height: cropImageSizeM[1] * cropImageScaleM - 1,
};
$cropAspectRatio = selectedSize;
$cropSettingsChanged = false;
return;
size = 'free';
}
selectedSize = size;
$cropAspectRatio = size;
transformManager.setAspectRatio(size);
}
</script>
<div class="mt-3 px-4 py-4">
<div class="mt-3 px-4">
<div class="flex h-10 w-full items-center justify-between text-sm">
<h2 class="uppercase">{$t('editor_crop_tool_h2_aspect_ratios')}</h2>
<h2 class="uppercase">{$t('crop')}</h2>
</div>
{#each sizesRows as sizesRow, index (index)}
<ul class="flex-wrap flex-row flex gap-x-6 py-2 justify-evenly">
{#each sizesRow as size (size.name)}
<CropPreset {size} {selectedSize} {rotateHorizontal} {selectType} />
<CropPreset {size} selectedSize={transformManager.cropAspectRatio} {rotateHorizontal} {selectType} />
{/each}
</ul>
{/each}
@ -154,7 +126,7 @@
variant="ghost"
color="secondary"
aria-label={$t('anti_clockwise')}
onclick={() => rotate(false)}
onclick={() => transformManager.rotate(-90)}
icon={mdiRotateLeft}
/>
</li>
@ -164,7 +136,7 @@
variant="ghost"
color="secondary"
aria-label={$t('clockwise')}
onclick={() => rotate(true)}
onclick={() => transformManager.rotate(90)}
icon={mdiRotateRight}
/>
</li>

View File

@ -1,40 +0,0 @@
import type { CropSettings } from '$lib/stores/asset-editor.store';
import { get } from 'svelte/store';
import { cropFrame, overlayEl } from './crop-store';
export function draw(crop: CropSettings) {
const mCropFrame = get(cropFrame);
if (!mCropFrame) {
return;
}
mCropFrame.style.left = `${crop.x}px`;
mCropFrame.style.top = `${crop.y}px`;
mCropFrame.style.width = `${crop.width}px`;
mCropFrame.style.height = `${crop.height}px`;
drawOverlay(crop);
}
export function drawOverlay(crop: CropSettings) {
const overlay = get(overlayEl);
if (!overlay) {
return;
}
overlay.style.clipPath = `
polygon(
0% 0%,
0% 100%,
100% 100%,
100% 0%,
0% 0%,
${crop.x}px ${crop.y}px,
${crop.x + crop.width}px ${crop.y}px,
${crop.x + crop.width}px ${crop.y + crop.height}px,
${crop.x}px ${crop.y + crop.height}px,
${crop.x}px ${crop.y}px
)
`;
}

View File

@ -1,117 +0,0 @@
import { cropImageScale, cropImageSize, cropSettings, type CropSettings } from '$lib/stores/asset-editor.store';
import { get } from 'svelte/store';
import { cropAreaEl, cropFrame, imgElement } from './crop-store';
import { draw } from './drawing';
export function onImageLoad(resetSize: boolean = false) {
const img = get(imgElement);
const cropArea = get(cropAreaEl);
if (!cropArea || !img) {
return;
}
const containerWidth = cropArea.clientWidth ?? 0;
const containerHeight = cropArea.clientHeight ?? 0;
const scale = calculateScale(img, containerWidth, containerHeight);
cropImageSize.set([img.width, img.height]);
if (resetSize) {
cropSettings.update((crop) => {
crop.x = 0;
crop.y = 0;
crop.width = img.width * scale;
crop.height = img.height * scale;
return crop;
});
} else {
const cropFrameEl = get(cropFrame);
cropFrameEl?.classList.add('transition');
cropSettings.update((crop) => normalizeCropArea(crop, img, scale));
cropFrameEl?.classList.add('transition');
cropFrameEl?.addEventListener('transitionend', () => cropFrameEl?.classList.remove('transition'), {
passive: true,
});
}
cropImageScale.set(scale);
img.style.width = `${img.width * scale}px`;
img.style.height = `${img.height * scale}px`;
draw(get(cropSettings));
}
export function calculateScale(img: HTMLImageElement, containerWidth: number, containerHeight: number): number {
const imageAspectRatio = img.width / img.height;
let scale: number;
if (imageAspectRatio > 1) {
scale = containerWidth / img.width;
if (img.height * scale > containerHeight) {
scale = containerHeight / img.height;
}
} else {
scale = containerHeight / img.height;
if (img.width * scale > containerWidth) {
scale = containerWidth / img.width;
}
}
return scale;
}
export function normalizeCropArea(crop: CropSettings, img: HTMLImageElement, scale: number) {
const prevScale = get(cropImageScale);
const scaleRatio = scale / prevScale;
crop.x *= scaleRatio;
crop.y *= scaleRatio;
crop.width *= scaleRatio;
crop.height *= scaleRatio;
crop.width = Math.min(crop.width, img.width * scale);
crop.height = Math.min(crop.height, img.height * scale);
crop.x = Math.max(0, Math.min(crop.x, img.width * scale - crop.width));
crop.y = Math.max(0, Math.min(crop.y, img.height * scale - crop.height));
return crop;
}
export function resizeCanvas() {
const img = get(imgElement);
const cropArea = get(cropAreaEl);
if (!cropArea || !img) {
return;
}
const containerWidth = cropArea?.clientWidth ?? 0;
const containerHeight = cropArea?.clientHeight ?? 0;
const imageAspectRatio = img.width / img.height;
let scale;
if (imageAspectRatio > 1) {
scale = containerWidth / img.width;
if (img.height * scale > containerHeight) {
scale = containerHeight / img.height;
}
} else {
scale = containerHeight / img.height;
if (img.width * scale > containerWidth) {
scale = containerWidth / img.width;
}
}
img.style.width = `${img.width * scale}px`;
img.style.height = `${img.height * scale}px`;
const cropFrame = cropArea.querySelector('.crop-frame') as HTMLElement;
if (cropFrame) {
cropFrame.style.width = `${img.width * scale}px`;
cropFrame.style.height = `${img.height * scale}px`;
}
draw(get(cropSettings));
}

View File

@ -1,536 +0,0 @@
import {
cropAspectRatio,
cropImageScale,
cropImageSize,
cropSettings,
cropSettingsChanged,
normaizedRorateDegrees,
rotateDegrees,
showCancelConfirmDialog,
type CropSettings,
} from '$lib/stores/asset-editor.store';
import { get } from 'svelte/store';
import { adjustDimensions, keepAspectRatio } from './crop-settings';
import {
canvasCursor,
cropAreaEl,
dragOffset,
isDragging,
isResizingOrDragging,
overlayEl,
resizeSide,
} from './crop-store';
import { draw } from './drawing';
export function handleMouseDown(e: MouseEvent) {
const canvas = get(cropAreaEl);
if (!canvas) {
return;
}
const crop = get(cropSettings);
const { mouseX, mouseY } = getMousePosition(e);
const {
onLeftBoundary,
onRightBoundary,
onTopBoundary,
onBottomBoundary,
onTopLeftCorner,
onTopRightCorner,
onBottomLeftCorner,
onBottomRightCorner,
} = isOnCropBoundary(mouseX, mouseY, crop);
if (
onTopLeftCorner ||
onTopRightCorner ||
onBottomLeftCorner ||
onBottomRightCorner ||
onLeftBoundary ||
onRightBoundary ||
onTopBoundary ||
onBottomBoundary
) {
setResizeSide(mouseX, mouseY);
} else if (isInCropArea(mouseX, mouseY, crop)) {
startDragging(mouseX, mouseY);
}
document.body.style.userSelect = 'none';
globalThis.addEventListener('mouseup', handleMouseUp, { passive: true });
}
export function handleMouseMove(e: MouseEvent) {
const canvas = get(cropAreaEl);
if (!canvas) {
return;
}
const resizeSideValue = get(resizeSide);
const { mouseX, mouseY } = getMousePosition(e);
if (get(isDragging)) {
moveCrop(mouseX, mouseY);
} else if (resizeSideValue) {
resizeCrop(mouseX, mouseY);
} else {
updateCursor(mouseX, mouseY);
}
}
export function handleMouseUp() {
globalThis.removeEventListener('mouseup', handleMouseUp);
document.body.style.userSelect = '';
stopInteraction();
}
function getMousePosition(e: MouseEvent) {
let offsetX = e.clientX;
let offsetY = e.clientY;
const clienRect = getBoundingClientRectCached(get(cropAreaEl));
const rotateDeg = get(normaizedRorateDegrees);
if (rotateDeg == 90) {
offsetX = e.clientY - (clienRect?.top ?? 0);
offsetY = window.innerWidth - e.clientX - (window.innerWidth - (clienRect?.right ?? 0));
} else if (rotateDeg == 180) {
offsetX = window.innerWidth - e.clientX - (window.innerWidth - (clienRect?.right ?? 0));
offsetY = window.innerHeight - e.clientY - (window.innerHeight - (clienRect?.bottom ?? 0));
} else if (rotateDeg == 270) {
offsetX = window.innerHeight - e.clientY - (window.innerHeight - (clienRect?.bottom ?? 0));
offsetY = e.clientX - (clienRect?.left ?? 0);
} else if (rotateDeg == 0) {
offsetX -= clienRect?.left ?? 0;
offsetY -= clienRect?.top ?? 0;
}
return { mouseX: offsetX, mouseY: offsetY };
}
type BoundingClientRect = ReturnType<HTMLElement['getBoundingClientRect']>;
let getBoundingClientRectCache: { data: BoundingClientRect | null; time: number } = {
data: null,
time: 0,
};
rotateDegrees.subscribe(() => {
getBoundingClientRectCache.time = 0;
});
function getBoundingClientRectCached(el: HTMLElement | null) {
if (Date.now() - getBoundingClientRectCache.time > 5000 || getBoundingClientRectCache.data === null) {
getBoundingClientRectCache = {
time: Date.now(),
data: el?.getBoundingClientRect() ?? null,
};
}
return getBoundingClientRectCache.data;
}
function isOnCropBoundary(mouseX: number, mouseY: number, crop: CropSettings) {
const { x, y, width, height } = crop;
const sensitivity = 10;
const cornerSensitivity = 15;
const outOfBound = mouseX > get(cropImageSize)[0] || mouseY > get(cropImageSize)[1] || mouseX < 0 || mouseY < 0;
if (outOfBound) {
return {
onLeftBoundary: false,
onRightBoundary: false,
onTopBoundary: false,
onBottomBoundary: false,
onTopLeftCorner: false,
onTopRightCorner: false,
onBottomLeftCorner: false,
onBottomRightCorner: false,
};
}
const onLeftBoundary = mouseX >= x - sensitivity && mouseX <= x + sensitivity && mouseY >= y && mouseY <= y + height;
const onRightBoundary =
mouseX >= x + width - sensitivity && mouseX <= x + width + sensitivity && mouseY >= y && mouseY <= y + height;
const onTopBoundary = mouseY >= y - sensitivity && mouseY <= y + sensitivity && mouseX >= x && mouseX <= x + width;
const onBottomBoundary =
mouseY >= y + height - sensitivity && mouseY <= y + height + sensitivity && mouseX >= x && mouseX <= x + width;
const onTopLeftCorner =
mouseX >= x - cornerSensitivity &&
mouseX <= x + cornerSensitivity &&
mouseY >= y - cornerSensitivity &&
mouseY <= y + cornerSensitivity;
const onTopRightCorner =
mouseX >= x + width - cornerSensitivity &&
mouseX <= x + width + cornerSensitivity &&
mouseY >= y - cornerSensitivity &&
mouseY <= y + cornerSensitivity;
const onBottomLeftCorner =
mouseX >= x - cornerSensitivity &&
mouseX <= x + cornerSensitivity &&
mouseY >= y + height - cornerSensitivity &&
mouseY <= y + height + cornerSensitivity;
const onBottomRightCorner =
mouseX >= x + width - cornerSensitivity &&
mouseX <= x + width + cornerSensitivity &&
mouseY >= y + height - cornerSensitivity &&
mouseY <= y + height + cornerSensitivity;
return {
onLeftBoundary,
onRightBoundary,
onTopBoundary,
onBottomBoundary,
onTopLeftCorner,
onTopRightCorner,
onBottomLeftCorner,
onBottomRightCorner,
};
}
function isInCropArea(mouseX: number, mouseY: number, crop: CropSettings) {
const { x, y, width, height } = crop;
return mouseX >= x && mouseX <= x + width && mouseY >= y && mouseY <= y + height;
}
function setResizeSide(mouseX: number, mouseY: number) {
const crop = get(cropSettings);
const {
onLeftBoundary,
onRightBoundary,
onTopBoundary,
onBottomBoundary,
onTopLeftCorner,
onTopRightCorner,
onBottomLeftCorner,
onBottomRightCorner,
} = isOnCropBoundary(mouseX, mouseY, crop);
if (onTopLeftCorner) {
resizeSide.set('top-left');
} else if (onTopRightCorner) {
resizeSide.set('top-right');
} else if (onBottomLeftCorner) {
resizeSide.set('bottom-left');
} else if (onBottomRightCorner) {
resizeSide.set('bottom-right');
} else if (onLeftBoundary) {
resizeSide.set('left');
} else if (onRightBoundary) {
resizeSide.set('right');
} else if (onTopBoundary) {
resizeSide.set('top');
} else if (onBottomBoundary) {
resizeSide.set('bottom');
}
}
function startDragging(mouseX: number, mouseY: number) {
isDragging.set(true);
const crop = get(cropSettings);
isResizingOrDragging.set(true);
dragOffset.set({ x: mouseX - crop.x, y: mouseY - crop.y });
fadeOverlay(false);
}
function moveCrop(mouseX: number, mouseY: number) {
const cropArea = get(cropAreaEl);
if (!cropArea) {
return;
}
const crop = get(cropSettings);
const { x, y } = get(dragOffset);
let newX = mouseX - x;
let newY = mouseY - y;
newX = Math.max(0, Math.min(cropArea.clientWidth - crop.width, newX));
newY = Math.max(0, Math.min(cropArea.clientHeight - crop.height, newY));
cropSettings.update((crop) => {
crop.x = newX;
crop.y = newY;
return crop;
});
draw(crop);
}
function resizeCrop(mouseX: number, mouseY: number) {
const canvas = get(cropAreaEl);
const crop = get(cropSettings);
const resizeSideValue = get(resizeSide);
if (!canvas || !resizeSideValue) {
return;
}
fadeOverlay(false);
const { x, y, width, height } = crop;
const minSize = 50;
let newWidth = width;
let newHeight = height;
switch (resizeSideValue) {
case 'left': {
newWidth = width + x - mouseX;
newHeight = height;
if (newWidth >= minSize && mouseX >= 0) {
const { newWidth: w, newHeight: h } = keepAspectRatio(newWidth, newHeight, get(cropAspectRatio));
cropSettings.update((crop) => {
crop.width = Math.max(minSize, Math.min(w, canvas.clientWidth));
crop.height = Math.max(minSize, Math.min(h, canvas.clientHeight));
crop.x = Math.max(0, x + width - crop.width);
return crop;
});
}
break;
}
case 'right': {
newWidth = mouseX - x;
newHeight = height;
if (newWidth >= minSize && mouseX <= canvas.clientWidth) {
const { newWidth: w, newHeight: h } = keepAspectRatio(newWidth, newHeight, get(cropAspectRatio));
cropSettings.update((crop) => {
crop.width = Math.max(minSize, Math.min(w, canvas.clientWidth - x));
crop.height = Math.max(minSize, Math.min(h, canvas.clientHeight));
return crop;
});
}
break;
}
case 'top': {
newHeight = height + y - mouseY;
newWidth = width;
if (newHeight >= minSize && mouseY >= 0) {
const { newWidth: w, newHeight: h } = adjustDimensions(
newWidth,
newHeight,
get(cropAspectRatio),
canvas.clientWidth,
canvas.clientHeight,
minSize,
);
cropSettings.update((crop) => {
crop.y = Math.max(0, y + height - h);
crop.width = w;
crop.height = h;
return crop;
});
}
break;
}
case 'bottom': {
newHeight = mouseY - y;
newWidth = width;
if (newHeight >= minSize && mouseY <= canvas.clientHeight) {
const { newWidth: w, newHeight: h } = adjustDimensions(
newWidth,
newHeight,
get(cropAspectRatio),
canvas.clientWidth,
canvas.clientHeight - y,
minSize,
);
cropSettings.update((crop) => {
crop.width = w;
crop.height = h;
return crop;
});
}
break;
}
case 'top-left': {
newWidth = width + x - Math.max(mouseX, 0);
newHeight = height + y - Math.max(mouseY, 0);
const { newWidth: w, newHeight: h } = adjustDimensions(
newWidth,
newHeight,
get(cropAspectRatio),
canvas.clientWidth,
canvas.clientHeight,
minSize,
);
cropSettings.update((crop) => {
crop.width = w;
crop.height = h;
crop.x = Math.max(0, x + width - crop.width);
crop.y = Math.max(0, y + height - crop.height);
return crop;
});
break;
}
case 'top-right': {
newWidth = Math.max(mouseX, 0) - x;
newHeight = height + y - Math.max(mouseY, 0);
const { newWidth: w, newHeight: h } = adjustDimensions(
newWidth,
newHeight,
get(cropAspectRatio),
canvas.clientWidth - x,
y + height,
minSize,
);
cropSettings.update((crop) => {
crop.width = w;
crop.height = h;
crop.y = Math.max(0, y + height - crop.height);
return crop;
});
break;
}
case 'bottom-left': {
newWidth = width + x - Math.max(mouseX, 0);
newHeight = Math.max(mouseY, 0) - y;
const { newWidth: w, newHeight: h } = adjustDimensions(
newWidth,
newHeight,
get(cropAspectRatio),
canvas.clientWidth,
canvas.clientHeight - y,
minSize,
);
cropSettings.update((crop) => {
crop.width = w;
crop.height = h;
crop.x = Math.max(0, x + width - crop.width);
return crop;
});
break;
}
case 'bottom-right': {
newWidth = Math.max(mouseX, 0) - x;
newHeight = Math.max(mouseY, 0) - y;
const { newWidth: w, newHeight: h } = adjustDimensions(
newWidth,
newHeight,
get(cropAspectRatio),
canvas.clientWidth - x,
canvas.clientHeight - y,
minSize,
);
cropSettings.update((crop) => {
crop.width = w;
crop.height = h;
return crop;
});
break;
}
}
cropSettings.update((crop) => {
crop.x = Math.max(0, Math.min(crop.x, canvas.clientWidth - crop.width));
crop.y = Math.max(0, Math.min(crop.y, canvas.clientHeight - crop.height));
return crop;
});
draw(crop);
}
function updateCursor(mouseX: number, mouseY: number) {
const canvas = get(cropAreaEl);
if (!canvas) {
return;
}
const crop = get(cropSettings);
const rotateDeg = get(normaizedRorateDegrees);
let {
onLeftBoundary,
onRightBoundary,
onTopBoundary,
onBottomBoundary,
onTopLeftCorner,
onTopRightCorner,
onBottomLeftCorner,
onBottomRightCorner,
} = isOnCropBoundary(mouseX, mouseY, crop);
if (rotateDeg == 90) {
[onTopBoundary, onRightBoundary, onBottomBoundary, onLeftBoundary] = [
onLeftBoundary,
onTopBoundary,
onRightBoundary,
onBottomBoundary,
];
[onTopLeftCorner, onTopRightCorner, onBottomRightCorner, onBottomLeftCorner] = [
onBottomLeftCorner,
onTopLeftCorner,
onTopRightCorner,
onBottomRightCorner,
];
} else if (rotateDeg == 180) {
[onTopBoundary, onBottomBoundary] = [onBottomBoundary, onTopBoundary];
[onLeftBoundary, onRightBoundary] = [onRightBoundary, onLeftBoundary];
[onTopLeftCorner, onBottomRightCorner] = [onBottomRightCorner, onTopLeftCorner];
[onTopRightCorner, onBottomLeftCorner] = [onBottomLeftCorner, onTopRightCorner];
} else if (rotateDeg == 270) {
[onTopBoundary, onRightBoundary, onBottomBoundary, onLeftBoundary] = [
onRightBoundary,
onBottomBoundary,
onLeftBoundary,
onTopBoundary,
];
[onTopLeftCorner, onTopRightCorner, onBottomRightCorner, onBottomLeftCorner] = [
onTopRightCorner,
onBottomRightCorner,
onBottomLeftCorner,
onTopLeftCorner,
];
}
if (onTopLeftCorner || onBottomRightCorner) {
setCursor('nwse-resize');
} else if (onTopRightCorner || onBottomLeftCorner) {
setCursor('nesw-resize');
} else if (onLeftBoundary || onRightBoundary) {
setCursor('ew-resize');
} else if (onTopBoundary || onBottomBoundary) {
setCursor('ns-resize');
} else if (isInCropArea(mouseX, mouseY, crop)) {
setCursor('move');
} else {
setCursor('default');
}
function setCursor(cursorName: string) {
if (get(canvasCursor) != cursorName && canvas && !get(showCancelConfirmDialog)) {
canvasCursor.set(cursorName);
document.body.style.cursor = cursorName;
canvas.style.cursor = cursorName;
}
}
}
function stopInteraction() {
isResizingOrDragging.set(false);
isDragging.set(false);
resizeSide.set('');
fadeOverlay(true); // Darken the background
setTimeout(() => {
checkEdits();
}, 1);
}
export function checkEdits() {
const cropImageSizeParams = get(cropSettings);
const originalImgSize = get(cropImageSize).map((el) => el * get(cropImageScale));
const changed =
Math.abs(originalImgSize[0] - cropImageSizeParams.width) > 2 ||
Math.abs(originalImgSize[1] - cropImageSizeParams.height) > 2;
cropSettingsChanged.set(changed);
}
function fadeOverlay(toDark: boolean) {
const overlay = get(overlayEl);
const cropFrame = document.querySelector('.crop-frame');
if (toDark) {
overlay?.classList.remove('light');
cropFrame?.classList.remove('resizing');
} else {
overlay?.classList.add('light');
cropFrame?.classList.add('resizing');
}
isResizingOrDragging.set(!toDark);
}

View File

@ -1,11 +1,11 @@
<script lang="ts">
import { shortcut } from '$lib/actions/shortcut';
import { editTypes, showCancelConfirmDialog } from '$lib/stores/asset-editor.store';
import { editManager, EditToolType } from '$lib/managers/edit/edit-manager.svelte';
import { websocketEvents } from '$lib/stores/websocket';
import { type AssetResponseDto } from '@immich/sdk';
import { ConfirmModal, IconButton } from '@immich/ui';
import { mdiClose } from '@mdi/js';
import { onMount } from 'svelte';
import { getAssetEdits, type AssetResponseDto } from '@immich/sdk';
import { Button, IconButton, VStack } from '@immich/ui';
import { mdiClose, mdiFloppy, mdiRefresh } from '@mdi/js';
import { onDestroy, onMount } from 'svelte';
import { t } from 'svelte-i18n';
onMount(() => {
@ -18,30 +18,32 @@
interface Props {
asset: AssetResponseDto;
onUpdateSelectedType: (type: string) => void;
onClose: () => void;
}
let { asset = $bindable(), onUpdateSelectedType, onClose }: Props = $props();
onMount(async () => {
const edits = await getAssetEdits({ id: asset.id });
await editManager.activateTool(EditToolType.Transform, asset, edits);
});
let selectedType: string = $state(editTypes[0].name);
let selectedTypeObj = $derived(editTypes.find((t) => t.name === selectedType) || editTypes[0]);
onDestroy(() => {
editManager.cleanup();
});
setTimeout(() => {
onUpdateSelectedType(selectedType);
}, 1);
async function applyEdits() {
const success = await editManager.applyEdits();
function selectType(name: string) {
selectedType = name;
onUpdateSelectedType(selectedType);
if (success) {
onClose();
}
}
const onConfirm = () => (typeof $showCancelConfirmDialog === 'boolean' ? null : $showCancelConfirmDialog());
let { asset = $bindable(), onClose }: Props = $props();
</script>
<svelte:document use:shortcut={{ shortcut: { key: 'Escape' }, onShortcut: onClose }} />
<section class="relative p-2 dark:bg-immich-dark-bg dark:text-immich-dark-fg">
<section class="relative flex flex-col h-full p-2 dark:bg-immich-dark-bg dark:text-immich-dark-fg dark">
<div class="flex place-items-center gap-2">
<IconButton
shape="round"
@ -53,32 +55,28 @@
/>
<p class="text-lg text-immich-fg dark:text-immich-dark-fg capitalize">{$t('editor')}</p>
</div>
<section class="px-4 py-4">
<ul class="flex w-full justify-around">
{#each editTypes as etype (etype.name)}
<li>
<IconButton
shape="round"
color={etype.name === selectedType ? 'primary' : 'secondary'}
icon={etype.icon}
aria-label={etype.name}
onclick={() => selectType(etype.name)}
/>
</li>
{/each}
</ul>
</section>
<section>
<selectedTypeObj.component />
{#if editManager.selectedTool}
<editManager.selectedTool.component />
{/if}
</section>
<div class="flex-1"></div>
<section class="p-4">
<VStack gap={4}>
<Button
fullWidth
leadingIcon={mdiFloppy}
color="success"
onclick={() => applyEdits()}
loading={editManager.isApplyingEdits}
>
{$t('save')}
</Button>
<!-- TODO make this clear all edits -->
<Button fullWidth leadingIcon={mdiRefresh} color="danger" onclick={() => editManager.resetAllChanges()}>
{$t('editor_reset_all_changes')}
</Button>
</VStack>
</section>
</section>
{#if $showCancelConfirmDialog}
<ConfirmModal
title={$t('editor_close_without_save_title')}
prompt={$t('editor_close_without_save_prompt')}
confirmColor="danger"
confirmText={$t('close')}
onClose={(confirmed) => (confirmed ? onConfirm() : ($showCancelConfirmDialog = false))}
/>
{/if}

View File

@ -0,0 +1,147 @@
import CropTool from '$lib/components/asset-viewer/editor/crop-tool/crop-tool.svelte';
import { transformManager } from '$lib/managers/edit/transform-manager.svelte';
import { waitForWebsocketEvent } from '$lib/stores/websocket';
import { editAsset, removeAssetEdits, type AssetEditsDto, type AssetResponseDto } from '@immich/sdk';
import { ConfirmModal, modalManager, toastManager } from '@immich/ui';
import { mdiCropRotate } from '@mdi/js';
import type { Component } from 'svelte';
export type EditAction = AssetEditsDto['edits'][number];
export type EditActions = EditAction[];
export interface EditToolManager {
onActivate: (asset: AssetResponseDto, edits: EditActions) => Promise<void>;
onDeactivate: () => void;
resetAllChanges: () => Promise<void>;
hasChanges: boolean;
edits: EditAction[];
}
export enum EditToolType {
Transform = 'transform',
}
export interface EditTool {
type: EditToolType;
icon: string;
component: Component;
manager: EditToolManager;
}
export class EditManager {
tools: EditTool[] = [
{
type: EditToolType.Transform,
icon: mdiCropRotate,
component: CropTool,
manager: transformManager,
},
];
currentAsset = $state<AssetResponseDto | null>(null);
selectedTool = $state<EditTool | null>(null);
hasChanges = $derived(this.tools.some((t) => t.manager.hasChanges));
// used to disable multiple confirm dialogs and mouse events while one is open
isShowingConfirmDialog = $state(false);
isApplyingEdits = $state(false);
hasAppliedEdits = $state(false);
async closeConfirm(): Promise<boolean> {
// Prevent multiple dialogs (usually happens with rapid escape key presses)
if (this.isShowingConfirmDialog) {
return false;
}
if (!this.hasChanges || this.hasAppliedEdits) {
return true;
}
this.isShowingConfirmDialog = true;
const confirmed = await modalManager.show(ConfirmModal, {
title: 'Discard Edits?',
prompt: 'You have unsaved edits. Are you sure you want to discard them?',
confirmText: 'Discard Edits',
});
this.isShowingConfirmDialog = false;
return confirmed;
}
reset() {
for (const tool of this.tools) {
tool.manager.onDeactivate?.();
}
this.selectedTool = this.tools[0];
}
async activateTool(toolType: EditToolType, asset: AssetResponseDto, edits: AssetEditsDto) {
this.hasAppliedEdits = false;
if (this.selectedTool?.type === toolType) {
return;
}
this.currentAsset = asset;
this.selectedTool?.manager.onDeactivate?.();
const newTool = this.tools.find((t) => t.type === toolType);
if (newTool) {
this.selectedTool = newTool;
await newTool.manager.onActivate?.(asset, edits.edits);
}
}
cleanup() {
for (const tool of this.tools) {
tool.manager.onDeactivate?.();
}
this.currentAsset = null;
this.selectedTool = null;
}
async resetAllChanges() {
for (const tool of this.tools) {
await tool.manager.resetAllChanges();
}
}
async applyEdits(): Promise<boolean> {
this.isApplyingEdits = true;
const edits = this.tools.flatMap((tool) => tool.manager.edits);
try {
// Setup the websocket listener before sending the edit request
const editCompleted = waitForWebsocketEvent(
'on_upload_success',
(asset) => asset.id === this.currentAsset!.id,
10_000,
);
await (edits.length === 0
? removeAssetEdits({ id: this.currentAsset!.id })
: editAsset({
id: this.currentAsset!.id,
editActionListDto: {
edits,
},
}));
const t = Date.now();
await editCompleted;
console.log(`Edit completed in ${Date.now() - t}ms`);
toastManager.success('Edits applied successfully');
this.hasAppliedEdits = true;
return true;
} catch {
toastManager.danger('Failed to apply edits');
return false;
} finally {
this.isApplyingEdits = false;
}
}
}
export const editManager = new EditManager();

View File

@ -0,0 +1,929 @@
import { editManager, type EditActions, type EditToolManager } from '$lib/managers/edit/edit-manager.svelte';
import { getAssetOriginalUrl } from '$lib/utils';
import { handleError } from '$lib/utils/handle-error';
import { EditAction, type AssetResponseDto, type CropParameters, type RotateParameters } from '@immich/sdk';
import { tick } from 'svelte';
export type CropAspectRatio =
| '1:1'
| '16:9'
| '4:3'
| '3:2'
| '7:5'
| '9:16'
| '3:4'
| '2:3'
| '5:7'
| 'free'
| 'reset';
export type CropSettings = {
x: number;
y: number;
width: number;
height: number;
};
class TransformManager implements EditToolManager {
hasChanges: boolean = $derived.by(() => this.checkEdits());
darkenLevel = $state(0.65);
isInteracting = $state(false);
isDragging = $state(false);
animationFrame = $state<ReturnType<typeof requestAnimationFrame> | null>(null);
canvasCursor = $state('default');
dragOffset = $state({ x: 0, y: 0 });
resizeSide = $state('');
imgElement = $state<HTMLImageElement | null>(null);
cropAreaEl = $state<HTMLElement | null>(null);
overlayEl = $state<HTMLElement | null>(null);
cropFrame = $state<HTMLElement | null>(null);
cropImageSize = $state([1000, 1000]);
cropImageScale = $state(1);
cropAspectRatio = $state('free' as CropAspectRatio);
region = $state({ x: 0, y: 0, width: 100, height: 100 });
imageRotation = $state(0);
normalizedRotation = $derived.by(() => {
const newAngle = this.imageRotation % 360;
return newAngle < 0 ? newAngle + 360 : newAngle;
});
orientationChanged = $derived.by(() => this.normalizedRotation % 180 > 0);
edits = $derived.by(() => this.getEdits());
setAspectRatio(aspectRatio: CropAspectRatio) {
this.cropAspectRatio = aspectRatio;
if (!this.imgElement || !this.cropAreaEl) {
return;
}
const newCrop = transformManager.recalculateCrop(aspectRatio);
if (newCrop) {
transformManager.animateCropChange(this.cropAreaEl, this.region, newCrop);
this.region = newCrop;
}
}
checkEdits() {
const originalImgSize = this.cropImageSize.map((el) => el * this.cropImageScale);
return (
Math.abs(originalImgSize[0] - this.region.width) > 2 || Math.abs(originalImgSize[1] - this.region.height) > 2
);
}
getEdits(): EditActions {
const edits: EditActions = [];
if (this.checkEdits()) {
const { x, y, width, height } = this.region;
edits.push({
action: EditAction.Crop,
parameters: {
x: Math.round(x / this.cropImageScale),
y: Math.round(y / this.cropImageScale),
width: Math.round(width / this.cropImageScale),
height: Math.round(height / this.cropImageScale),
},
});
}
if (this.normalizedRotation !== 0) {
edits.push({
action: EditAction.Rotate,
parameters: {
angle: this.normalizedRotation,
},
});
}
return edits;
}
async resetAllChanges() {
this.imageRotation = 0;
await tick();
this.onImageLoad([]);
}
async onActivate(asset: AssetResponseDto, edits: EditActions): Promise<void> {
this.imgElement = new Image();
this.imgElement.src = getAssetOriginalUrl({ id: asset.id, cacheKey: asset.thumbhash, edited: false });
this.imgElement.addEventListener('load', () => transformManager.onImageLoad(edits), { passive: true });
this.imgElement.addEventListener('error', (error) => handleError(error, 'ErrorLoadingImage'), {
passive: true,
});
globalThis.addEventListener('mousemove', (e) => transformManager.handleMouseMove(e), { passive: true });
// set the rotation before loading the image
const rotateEdit = edits.find((e) => e.action === 'rotate');
if (rotateEdit) {
this.imageRotation = (rotateEdit.parameters as RotateParameters).angle;
}
await tick();
this.resizeCanvas();
}
onDeactivate() {
globalThis.removeEventListener('mousemove', transformManager.handleMouseMove);
this.reset();
}
reset() {
this.darkenLevel = 0.65;
this.isInteracting = false;
this.animationFrame = null;
this.canvasCursor = 'default';
this.dragOffset = { x: 0, y: 0 };
this.resizeSide = '';
this.imgElement = null;
this.cropAreaEl = null;
this.isDragging = false;
this.overlayEl = null;
this.imageRotation = 0;
this.region = { x: 0, y: 0, width: 100, height: 100 };
this.cropImageSize = [1000, 1000];
this.cropImageScale = 1;
}
async rotate(angle: number) {
this.imageRotation += angle;
await tick();
this.onImageLoad();
}
recalculateCrop(aspectRatio: CropAspectRatio = this.cropAspectRatio): CropSettings {
if (!this.cropAreaEl) {
return this.region;
}
const canvasW = this.cropAreaEl.clientWidth;
const canvasH = this.cropAreaEl.clientHeight;
let newWidth = this.region.width;
let newHeight = this.region.height;
const { newWidth: w, newHeight: h } = this.keepAspectRatio(newWidth, newHeight, aspectRatio);
if (w > canvasW) {
newWidth = canvasW;
newHeight = canvasW / (w / h);
} else if (h > canvasH) {
newHeight = canvasH;
newWidth = canvasH * (w / h);
} else {
newWidth = w;
newHeight = h;
}
const newX = Math.max(0, Math.min(this.region.x, canvasW - newWidth));
const newY = Math.max(0, Math.min(this.region.y, canvasH - newHeight));
const newCrop = {
width: newWidth,
height: newHeight,
x: newX,
y: newY,
};
return newCrop;
}
animateCropChange(element: HTMLElement, from: CropSettings, to: CropSettings, duration = 100) {
const cropFrame = element.querySelector('.crop-frame') as HTMLElement;
if (!cropFrame) {
return;
}
const startTime = performance.now();
const initialCrop = { ...from };
const animate = (currentTime: number) => {
const elapsedTime = currentTime - startTime;
const progress = Math.min(elapsedTime / duration, 1);
from.x = initialCrop.x + (to.x - initialCrop.x) * progress;
from.y = initialCrop.y + (to.y - initialCrop.y) * progress;
from.width = initialCrop.width + (to.width - initialCrop.width) * progress;
from.height = initialCrop.height + (to.height - initialCrop.height) * progress;
this.draw(from);
if (progress < 1) {
requestAnimationFrame(animate);
}
};
requestAnimationFrame(animate);
}
keepAspectRatio(newWidth: number, newHeight: number, aspectRatio: CropAspectRatio = this.cropAspectRatio) {
const [widthRatio, heightRatio] = aspectRatio.split(':').map(Number);
if (widthRatio && heightRatio) {
const calculatedWidth = (newHeight * widthRatio) / heightRatio;
return { newWidth: calculatedWidth, newHeight };
}
return { newWidth, newHeight };
}
adjustDimensions(
newWidth: number,
newHeight: number,
aspectRatio: CropAspectRatio,
xLimit: number,
yLimit: number,
minSize: number,
) {
let w = newWidth;
let h = newHeight;
let aspectMultiplier: number;
if (aspectRatio === 'free') {
aspectMultiplier = newWidth / newHeight;
} else {
const [widthRatio, heightRatio] = aspectRatio.split(':').map(Number);
aspectMultiplier = widthRatio && heightRatio ? widthRatio / heightRatio : newWidth / newHeight;
}
if (aspectRatio !== 'free') {
h = w / aspectMultiplier;
}
if (w > xLimit) {
w = xLimit;
if (aspectRatio !== 'free') {
h = w / aspectMultiplier;
}
}
if (h > yLimit) {
h = yLimit;
if (aspectRatio !== 'free') {
w = h * aspectMultiplier;
}
}
if (w < minSize) {
w = minSize;
if (aspectRatio !== 'free') {
h = w / aspectMultiplier;
}
}
if (h < minSize) {
h = minSize;
if (aspectRatio !== 'free') {
w = h * aspectMultiplier;
}
}
if (aspectRatio !== 'free' && w / h !== aspectMultiplier) {
if (w < minSize) {
h = w / aspectMultiplier;
}
if (h < minSize) {
w = h * aspectMultiplier;
}
}
return { newWidth: w, newHeight: h };
}
draw(crop: CropSettings = this.region) {
if (!this.cropFrame) {
return;
}
this.cropFrame.style.left = `${crop.x}px`;
this.cropFrame.style.top = `${crop.y}px`;
this.cropFrame.style.width = `${crop.width}px`;
this.cropFrame.style.height = `${crop.height}px`;
this.drawOverlay(crop);
}
drawOverlay(crop: CropSettings) {
if (!this.overlayEl) {
return;
}
this.overlayEl.style.clipPath = `
polygon(
0% 0%,
0% 100%,
100% 100%,
100% 0%,
0% 0%,
${crop.x}px ${crop.y}px,
${crop.x + crop.width}px ${crop.y}px,
${crop.x + crop.width}px ${crop.y + crop.height}px,
${crop.x}px ${crop.y + crop.height}px,
${crop.x}px ${crop.y}px
)
`;
}
onImageLoad(edits: EditActions | null = null) {
const img = this.imgElement;
if (!this.cropAreaEl || !img) {
return;
}
this.cropImageSize = [img.width, img.height];
const scale = this.calculateScale();
if (edits === null) {
const cropFrameEl = this.cropFrame;
cropFrameEl?.classList.add('transition');
this.region = this.normalizeCropArea(scale);
cropFrameEl?.classList.add('transition');
cropFrameEl?.addEventListener('transitionend', () => cropFrameEl?.classList.remove('transition'), {
passive: true,
});
} else {
const cropEdit = edits.find((e) => e.action === EditAction.Crop);
if (cropEdit) {
const params = cropEdit.parameters as CropParameters;
// Convert from absolute pixel coordinates to display coordinates
this.region = {
x: params.x * scale,
y: params.y * scale,
width: params.width * scale,
height: params.height * scale,
};
} else {
this.region = {
x: 0,
y: 0,
width: img.width * scale,
height: img.height * scale,
};
}
}
this.cropImageScale = scale;
img.style.width = `${img.width * scale}px`;
img.style.height = `${img.height * scale}px`;
this.draw();
}
calculateScale(): number {
const img = this.imgElement;
const cropArea = this.cropAreaEl;
if (!cropArea || !img) {
return 1;
}
const containerWidth = cropArea?.clientWidth ?? 0;
const containerHeight = cropArea?.clientHeight ?? 0;
const imageAspectRatio = img.width / img.height;
let scale: number;
if (imageAspectRatio > 1) {
scale = containerWidth / img.width;
if (img.height * scale > containerHeight) {
scale = containerHeight / img.height;
}
} else {
scale = containerHeight / img.height;
if (img.width * scale > containerWidth) {
scale = containerWidth / img.width;
}
}
return scale;
}
normalizeCropArea(scale: number) {
const img = this.imgElement;
const crop = { ...this.region };
if (!img) {
return crop;
}
const prevScale = this.cropImageScale;
const scaleRatio = scale / prevScale;
crop.x *= scaleRatio;
crop.y *= scaleRatio;
crop.width *= scaleRatio;
crop.height *= scaleRatio;
crop.width = Math.min(crop.width, img.width * scale);
crop.height = Math.min(crop.height, img.height * scale);
crop.x = Math.max(0, Math.min(crop.x, img.width * scale - crop.width));
crop.y = Math.max(0, Math.min(crop.y, img.height * scale - crop.height));
return crop;
}
resizeCanvas() {
const img = this.imgElement;
const cropArea = this.cropAreaEl;
if (!cropArea || !img) {
return;
}
const scale = this.calculateScale();
this.region = this.normalizeCropArea(scale);
this.cropImageScale = scale;
img.style.width = `${img.width * scale}px`;
img.style.height = `${img.height * scale}px`;
this.draw();
}
handleMouseDown(e: MouseEvent) {
const canvas = this.cropAreaEl;
if (!canvas) {
return;
}
const { mouseX, mouseY } = this.getMousePosition(e);
const {
onLeftBoundary,
onRightBoundary,
onTopBoundary,
onBottomBoundary,
onTopLeftCorner,
onTopRightCorner,
onBottomLeftCorner,
onBottomRightCorner,
} = this.isOnCropBoundary(mouseX, mouseY);
if (
onTopLeftCorner ||
onTopRightCorner ||
onBottomLeftCorner ||
onBottomRightCorner ||
onLeftBoundary ||
onRightBoundary ||
onTopBoundary ||
onBottomBoundary
) {
this.setResizeSide(mouseX, mouseY);
} else if (this.isInCropArea(mouseX, mouseY)) {
this.startDragging(mouseX, mouseY);
}
document.body.style.userSelect = 'none';
globalThis.addEventListener('mouseup', () => this.handleMouseUp(), { passive: true });
}
handleMouseMove(e: MouseEvent) {
const canvas = this.cropAreaEl;
if (!canvas) {
return;
}
const resizeSideValue = this.resizeSide;
const { mouseX, mouseY } = this.getMousePosition(e);
if (this.isDragging) {
this.moveCrop(mouseX, mouseY);
} else if (resizeSideValue) {
this.resizeCrop(mouseX, mouseY);
} else {
this.updateCursor(mouseX, mouseY);
}
}
handleMouseUp() {
globalThis.removeEventListener('mouseup', this.handleMouseUp);
document.body.style.userSelect = '';
this.isInteracting = false;
this.isDragging = false;
this.resizeSide = '';
this.fadeOverlay(true); // Darken the background
}
getMousePosition(e: MouseEvent) {
let offsetX = e.clientX;
let offsetY = e.clientY;
const clienRect = this.cropAreaEl?.getBoundingClientRect();
const rotateDeg = this.normalizedRotation;
if (rotateDeg == 90) {
offsetX = e.clientY - (clienRect?.top ?? 0);
offsetY = window.innerWidth - e.clientX - (window.innerWidth - (clienRect?.right ?? 0));
} else if (rotateDeg == 180) {
offsetX = window.innerWidth - e.clientX - (window.innerWidth - (clienRect?.right ?? 0));
offsetY = window.innerHeight - e.clientY - (window.innerHeight - (clienRect?.bottom ?? 0));
} else if (rotateDeg == 270) {
offsetX = window.innerHeight - e.clientY - (window.innerHeight - (clienRect?.bottom ?? 0));
offsetY = e.clientX - (clienRect?.left ?? 0);
} else if (rotateDeg == 0) {
offsetX -= clienRect?.left ?? 0;
offsetY -= clienRect?.top ?? 0;
}
return { mouseX: offsetX, mouseY: offsetY };
}
isOnCropBoundary(mouseX: number, mouseY: number) {
const { x, y, width, height } = this.region;
const sensitivity = 10;
const cornerSensitivity = 15;
const [imgWidth, imgHeight] = this.cropImageSize;
const outOfBound = mouseX > imgWidth || mouseY > imgHeight || mouseX < 0 || mouseY < 0;
if (outOfBound) {
return {
onLeftBoundary: false,
onRightBoundary: false,
onTopBoundary: false,
onBottomBoundary: false,
onTopLeftCorner: false,
onTopRightCorner: false,
onBottomLeftCorner: false,
onBottomRightCorner: false,
};
}
const onLeftBoundary =
mouseX >= x - sensitivity && mouseX <= x + sensitivity && mouseY >= y && mouseY <= y + height;
const onRightBoundary =
mouseX >= x + width - sensitivity && mouseX <= x + width + sensitivity && mouseY >= y && mouseY <= y + height;
const onTopBoundary = mouseY >= y - sensitivity && mouseY <= y + sensitivity && mouseX >= x && mouseX <= x + width;
const onBottomBoundary =
mouseY >= y + height - sensitivity && mouseY <= y + height + sensitivity && mouseX >= x && mouseX <= x + width;
const onTopLeftCorner =
mouseX >= x - cornerSensitivity &&
mouseX <= x + cornerSensitivity &&
mouseY >= y - cornerSensitivity &&
mouseY <= y + cornerSensitivity;
const onTopRightCorner =
mouseX >= x + width - cornerSensitivity &&
mouseX <= x + width + cornerSensitivity &&
mouseY >= y - cornerSensitivity &&
mouseY <= y + cornerSensitivity;
const onBottomLeftCorner =
mouseX >= x - cornerSensitivity &&
mouseX <= x + cornerSensitivity &&
mouseY >= y + height - cornerSensitivity &&
mouseY <= y + height + cornerSensitivity;
const onBottomRightCorner =
mouseX >= x + width - cornerSensitivity &&
mouseX <= x + width + cornerSensitivity &&
mouseY >= y + height - cornerSensitivity &&
mouseY <= y + height + cornerSensitivity;
return {
onLeftBoundary,
onRightBoundary,
onTopBoundary,
onBottomBoundary,
onTopLeftCorner,
onTopRightCorner,
onBottomLeftCorner,
onBottomRightCorner,
};
}
isInCropArea(mouseX: number, mouseY: number) {
const { x, y, width, height } = this.region;
return mouseX >= x && mouseX <= x + width && mouseY >= y && mouseY <= y + height;
}
setResizeSide(mouseX: number, mouseY: number) {
const {
onLeftBoundary,
onRightBoundary,
onTopBoundary,
onBottomBoundary,
onTopLeftCorner,
onTopRightCorner,
onBottomLeftCorner,
onBottomRightCorner,
} = this.isOnCropBoundary(mouseX, mouseY);
if (onTopLeftCorner) {
this.resizeSide = 'top-left';
} else if (onTopRightCorner) {
this.resizeSide = 'top-right';
} else if (onBottomLeftCorner) {
this.resizeSide = 'bottom-left';
} else if (onBottomRightCorner) {
this.resizeSide = 'bottom-right';
} else if (onLeftBoundary) {
this.resizeSide = 'left';
} else if (onRightBoundary) {
this.resizeSide = 'right';
} else if (onTopBoundary) {
this.resizeSide = 'top';
} else if (onBottomBoundary) {
this.resizeSide = 'bottom';
}
}
startDragging(mouseX: number, mouseY: number) {
this.isDragging = true;
const crop = this.region;
this.isInteracting = true;
this.dragOffset = { x: mouseX - crop.x, y: mouseY - crop.y };
this.fadeOverlay(false);
}
moveCrop(mouseX: number, mouseY: number) {
const cropArea = this.cropAreaEl;
if (!cropArea) {
return;
}
const crop = this.region;
const { x, y } = this.dragOffset;
let newX = mouseX - x;
let newY = mouseY - y;
newX = Math.max(0, Math.min(cropArea.clientWidth - crop.width, newX));
newY = Math.max(0, Math.min(cropArea.clientHeight - crop.height, newY));
this.region = {
...this.region,
x: newX,
y: newY,
};
this.draw();
}
resizeCrop(mouseX: number, mouseY: number) {
const canvas = this.cropAreaEl;
const crop = this.region;
const resizeSideValue = this.resizeSide;
if (!canvas || !resizeSideValue) {
return;
}
this.fadeOverlay(false);
const { x, y, width, height } = crop;
const minSize = 50;
let newWidth = width;
let newHeight = height;
switch (resizeSideValue) {
case 'left': {
newWidth = width + x - mouseX;
newHeight = height;
if (newWidth >= minSize && mouseX >= 0) {
const { newWidth: w, newHeight: h } = this.keepAspectRatio(newWidth, newHeight);
this.region = {
...this.region,
width: Math.max(minSize, Math.min(w, canvas.clientWidth)),
height: Math.max(minSize, Math.min(h, canvas.clientHeight)),
x: Math.max(0, x + width - this.region.width),
};
}
break;
}
case 'right': {
newWidth = mouseX - x;
newHeight = height;
if (newWidth >= minSize && mouseX <= canvas.clientWidth) {
const { newWidth: w, newHeight: h } = this.keepAspectRatio(newWidth, newHeight);
this.region = {
...this.region,
width: Math.max(minSize, Math.min(w, canvas.clientWidth - x)),
height: Math.max(minSize, Math.min(h, canvas.clientHeight)),
};
}
break;
}
case 'top': {
newHeight = height + y - mouseY;
newWidth = width;
if (newHeight >= minSize && mouseY >= 0) {
const { newWidth: w, newHeight: h } = this.adjustDimensions(
newWidth,
newHeight,
this.cropAspectRatio,
canvas.clientWidth,
canvas.clientHeight,
minSize,
);
this.region = {
...this.region,
y: Math.max(0, y + height - h),
width: w,
height: h,
};
}
break;
}
case 'bottom': {
newHeight = mouseY - y;
newWidth = width;
if (newHeight >= minSize && mouseY <= canvas.clientHeight) {
const { newWidth: w, newHeight: h } = this.adjustDimensions(
newWidth,
newHeight,
this.cropAspectRatio,
canvas.clientWidth,
canvas.clientHeight - y,
minSize,
);
this.region = {
...this.region,
width: w,
height: h,
};
}
break;
}
case 'top-left': {
newWidth = width + x - Math.max(mouseX, 0);
newHeight = height + y - Math.max(mouseY, 0);
const { newWidth: w, newHeight: h } = this.adjustDimensions(
newWidth,
newHeight,
this.cropAspectRatio,
canvas.clientWidth,
canvas.clientHeight,
minSize,
);
this.region = {
width: w,
height: h,
x: Math.max(0, x + width - w),
y: Math.max(0, y + height - h),
};
break;
}
case 'top-right': {
newWidth = Math.max(mouseX, 0) - x;
newHeight = height + y - Math.max(mouseY, 0);
const { newWidth: w, newHeight: h } = this.adjustDimensions(
newWidth,
newHeight,
this.cropAspectRatio,
canvas.clientWidth - x,
y + height,
minSize,
);
this.region = {
...this.region,
width: w,
height: h,
y: Math.max(0, y + height - h),
};
break;
}
case 'bottom-left': {
newWidth = width + x - Math.max(mouseX, 0);
newHeight = Math.max(mouseY, 0) - y;
const { newWidth: w, newHeight: h } = this.adjustDimensions(
newWidth,
newHeight,
this.cropAspectRatio,
canvas.clientWidth,
canvas.clientHeight - y,
minSize,
);
this.region = {
...this.region,
width: w,
height: h,
x: Math.max(0, x + width - w),
};
break;
}
case 'bottom-right': {
newWidth = Math.max(mouseX, 0) - x;
newHeight = Math.max(mouseY, 0) - y;
const { newWidth: w, newHeight: h } = this.adjustDimensions(
newWidth,
newHeight,
this.cropAspectRatio,
canvas.clientWidth - x,
canvas.clientHeight - y,
minSize,
);
this.region = {
...this.region,
width: w,
height: h,
};
break;
}
}
this.region = {
...this.region,
x: Math.max(0, Math.min(this.region.x, canvas.clientWidth - this.region.width)),
y: Math.max(0, Math.min(this.region.y, canvas.clientHeight - this.region.height)),
};
this.draw();
}
updateCursor(mouseX: number, mouseY: number) {
if (!this.cropAreaEl) {
return;
}
let {
onLeftBoundary,
onRightBoundary,
onTopBoundary,
onBottomBoundary,
onTopLeftCorner,
onTopRightCorner,
onBottomLeftCorner,
onBottomRightCorner,
} = this.isOnCropBoundary(mouseX, mouseY);
if (this.normalizedRotation == 90) {
[onTopBoundary, onRightBoundary, onBottomBoundary, onLeftBoundary] = [
onLeftBoundary,
onTopBoundary,
onRightBoundary,
onBottomBoundary,
];
[onTopLeftCorner, onTopRightCorner, onBottomRightCorner, onBottomLeftCorner] = [
onBottomLeftCorner,
onTopLeftCorner,
onTopRightCorner,
onBottomRightCorner,
];
} else if (this.normalizedRotation == 180) {
[onTopBoundary, onBottomBoundary] = [onBottomBoundary, onTopBoundary];
[onLeftBoundary, onRightBoundary] = [onRightBoundary, onLeftBoundary];
[onTopLeftCorner, onBottomRightCorner] = [onBottomRightCorner, onTopLeftCorner];
[onTopRightCorner, onBottomLeftCorner] = [onBottomLeftCorner, onTopRightCorner];
} else if (this.normalizedRotation == 270) {
[onTopBoundary, onRightBoundary, onBottomBoundary, onLeftBoundary] = [
onRightBoundary,
onBottomBoundary,
onLeftBoundary,
onTopBoundary,
];
[onTopLeftCorner, onTopRightCorner, onBottomRightCorner, onBottomLeftCorner] = [
onTopRightCorner,
onBottomRightCorner,
onBottomLeftCorner,
onTopLeftCorner,
];
}
let cursorName = '';
if (onTopLeftCorner || onBottomRightCorner) {
cursorName = 'nwse-resize';
} else if (onTopRightCorner || onBottomLeftCorner) {
cursorName = 'nesw-resize';
} else if (onLeftBoundary || onRightBoundary) {
cursorName = 'ew-resize';
} else if (onTopBoundary || onBottomBoundary) {
cursorName = 'ns-resize';
} else if (this.isInCropArea(mouseX, mouseY)) {
cursorName = 'move';
} else {
cursorName = 'default';
}
if (this.canvasCursor != cursorName && this.cropAreaEl && !editManager.isShowingConfirmDialog) {
this.canvasCursor = cursorName;
document.body.style.cursor = cursorName;
this.cropAreaEl.style.cursor = cursorName;
}
}
fadeOverlay(toDark: boolean) {
const overlay = this.overlayEl;
const cropFrame = document.querySelector('.crop-frame');
if (toDark) {
overlay?.classList.remove('light');
cropFrame?.classList.remove('resizing');
} else {
overlay?.classList.add('light');
cropFrame?.classList.add('resizing');
}
this.isInteracting = !toDark;
}
}
export const transformManager = new TransformManager();

View File

@ -1,74 +1,4 @@
import CropTool from '$lib/components/asset-viewer/editor/crop-tool/crop-tool.svelte';
import { mdiCropRotate } from '@mdi/js';
import { derived, get, writable } from 'svelte/store';
import { writable } from 'svelte/store';
//---------crop
export const cropSettings = writable<CropSettings>({ x: 0, y: 0, width: 100, height: 100 });
export const cropImageSize = writable([1000, 1000]);
export const cropImageScale = writable(1);
export const cropAspectRatio = writable<CropAspectRatio>('free');
export const cropSettingsChanged = writable<boolean>(false);
//---------rotate
export const rotateDegrees = writable<number>(0);
export const normaizedRorateDegrees = derived(rotateDegrees, (v) => {
const newAngle = v % 360;
return newAngle < 0 ? newAngle + 360 : newAngle;
});
export const changedOriention = derived(normaizedRorateDegrees, () => get(normaizedRorateDegrees) % 180 > 0);
//-----other
export const showCancelConfirmDialog = writable<boolean | CallableFunction>(false);
export const lastChosenLocation = writable<{ lng: number; lat: number } | null>(null);
export const editTypes = [
{
name: 'crop',
icon: mdiCropRotate,
component: CropTool,
changesFlag: cropSettingsChanged,
},
];
export function closeEditorCofirm(closeCallback: CallableFunction) {
if (get(hasChanges)) {
showCancelConfirmDialog.set(closeCallback);
} else {
closeCallback();
}
}
export const hasChanges = derived(
editTypes.map((t) => t.changesFlag),
($flags) => {
return $flags.some(Boolean);
},
);
export function resetGlobalCropStore() {
cropSettings.set({ x: 0, y: 0, width: 100, height: 100 });
cropImageSize.set([1000, 1000]);
cropImageScale.set(1);
cropAspectRatio.set('free');
cropSettingsChanged.set(false);
showCancelConfirmDialog.set(false);
rotateDegrees.set(0);
}
export type CropAspectRatio =
| '1:1'
| '16:9'
| '4:3'
| '3:2'
| '7:5'
| '9:16'
| '3:4'
| '2:3'
| '5:7'
| 'free'
| 'reset';
export type CropSettings = {
x: number;
y: number;
width: number;
height: number;
};

View File

@ -73,3 +73,25 @@ export const openWebsocketConnection = () => {
export const closeWebsocketConnection = () => {
websocket.disconnect();
};
export const waitForWebsocketEvent = <T extends keyof Events>(
event: T,
predicate?: (...args: Parameters<Events[T]>) => boolean,
timeout: number = 10_000,
): Promise<Parameters<Events[T]>> => {
return new Promise((resolve, reject) => {
// @ts-expect-error: The typings are weird on this?
const cleanup = websocketEvents.on(event, (...args: Parameters<Events[T]>) => {
if (!predicate || predicate(...args)) {
cleanup();
clearTimeout(timer);
resolve(args);
}
});
const timer = setTimeout(() => {
cleanup();
reject(new Error(`Timeout waiting for event: ${String(event)}`));
}, timeout);
});
};

View File

@ -189,14 +189,14 @@ const createUrl = (path: string, parameters?: Record<string, unknown>) => {
return getBaseUrl() + url.pathname + url.search + url.hash;
};
type AssetUrlOptions = { id: string; cacheKey?: string | null };
type AssetUrlOptions = { id: string; cacheKey?: string | null; edited?: boolean };
export const getAssetOriginalUrl = (options: string | AssetUrlOptions) => {
if (typeof options === 'string') {
options = { id: options };
}
const { id, cacheKey } = options;
return createUrl(getAssetOriginalPath(id), { ...authManager.params, c: cacheKey });
const { id, cacheKey, edited } = options;
return createUrl(getAssetOriginalPath(id), { ...authManager.params, c: cacheKey, edited });
};
export const getAssetThumbnailUrl = (options: string | (AssetUrlOptions & { size?: AssetMediaSize })) => {